From 96a08e3b3a8f84bc775c4459f8ad84ec4bc97c88 Mon Sep 17 00:00:00 2001 From: Shauheen Date: Wed, 28 Nov 2018 09:18:12 -0800 Subject: [PATCH 001/100] Bump master to 0.9 (#1750) --- build/BranchInfo.props | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build/BranchInfo.props b/build/BranchInfo.props index 02e98d6b5d..1627429558 100644 --- a/build/BranchInfo.props +++ b/build/BranchInfo.props @@ -1,7 +1,7 @@ 0 - 8 + 9 0 preview From 4d1223d06bcda37ad1049bdde7c134f9d3421797 Mon Sep 17 00:00:00 2001 From: Shahab Moradi Date: Wed, 28 Nov 2018 13:20:58 -0500 Subject: [PATCH 002/100] Added VarVector static extension for OneHotHashEncoding (#1745) Added VarVector static extension for OneHotHashEncoding --- .../OneHotHashEncodingTransformer.cs | 15 +++++++++++++++ .../CategoricalHash/featurized.tsv | 9 +++++---- .../CategoricalHash/featurized.tsv | 9 +++++---- .../Transformers/CategoricalHashTests.cs | 19 +++++++++++++------ 4 files changed, 38 insertions(+), 14 deletions(-) diff --git a/src/Microsoft.ML.Transforms/OneHotHashEncodingTransformer.cs b/src/Microsoft.ML.Transforms/OneHotHashEncodingTransformer.cs index f94f1aa8f7..53e75dfd8a 100644 --- a/src/Microsoft.ML.Transforms/OneHotHashEncodingTransformer.cs +++ b/src/Microsoft.ML.Transforms/OneHotHashEncodingTransformer.cs @@ -445,5 +445,20 @@ public static Vector OneHotHashEncoding(this Vector input, OneHot return new ImplVector(input, new Config(outputKind, hashBits, seed, ordered, invertHash)); } + /// + /// Converts the categorical value into an indicator array by building a dictionary of categories based on the data and using the id in the dictionary as the index in the array + /// + /// Incoming data. + /// Specify the output type of indicator array: array or binary encoded data. + /// Amount of bits to use for hashing. + /// Seed value used for hashing. + /// Whether the position of each term should be included in the hash. + /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + public static Vector OneHotHashEncoding(this VarVector input, OneHotHashVectorOutputKind outputKind = DefOut, + int hashBits = DefHashBits, uint seed = DefSeed, bool ordered = DefOrdered, int invertHash = DefInvertHash) + { + Contracts.CheckValue(input, nameof(input)); + return new ImplVector(input, new Config(outputKind, hashBits, seed, ordered, invertHash)); + } } } diff --git a/test/BaselineOutput/SingleDebug/CategoricalHash/featurized.tsv b/test/BaselineOutput/SingleDebug/CategoricalHash/featurized.tsv index 54f30cc6a6..4bd60fd39a 100644 --- a/test/BaselineOutput/SingleDebug/CategoricalHash/featurized.tsv +++ b/test/BaselineOutput/SingleDebug/CategoricalHash/featurized.tsv @@ -5,8 +5,9 @@ #@ col=C:R4:327680-393215 #@ col=D:R4:393216-393233 #@ col=E:R4:393234-393305 +#@ col=F:R4:393306-458841 #@ } -393306 11529:1 77065:1 165873:1 196777:1 326564:1 327849:1 339209:1 362481:1 392100:1 393220:1 393222:1 393223:1 393225:1 393230:1 393233:1 393238:1 393240:1 393241:1 393243:1 393248:1 393251:1 393254:1 393259:1 393260:1 393261:1 393262:1 393263:1 393264:1 393265:1 393269:1 393280:1 393282:1 393284:1 393287:1 393290:1 393291:1 393292:1 393293:1 393294:1 393296:1 393297:1 393298:1 393300:1 393303:1 -393306 11529:1 77065:1 192621:1 236060:1 323071:1 339209:1 367132:1 388607:1 389229:1 393220:1 393222:1 393223:1 393225:1 393230:1 393233:1 393238:1 393240:1 393241:1 393243:1 393248:1 393251:1 393254:1 393255:1 393256:1 393257:1 393263:1 393264:1 393266:1 393267:1 393269:1 393272:1 393275:1 393276:1 393278:1 393283:1 393284:1 393285:1 393290:1 393291:1 393292:1 393294:1 393295:1 393297:1 393298:1 393299:1 393300:1 393301:1 393302:1 393303:1 393304:1 393305:1 -393306 47483:1 113019:1 165873:1 196777:1 326564:1 327849:1 362481:1 375163:1 392100:1 393218:1 393220:1 393221:1 393222:1 393225:1 393227:1 393228:1 393229:1 393230:1 393232:1 393233:1 393236:1 393238:1 393239:1 393240:1 393243:1 393245:1 393246:1 393247:1 393248:1 393250:1 393251:1 393254:1 393259:1 393260:1 393261:1 393262:1 393263:1 393264:1 393265:1 393269:1 393280:1 393282:1 393284:1 393287:1 393290:1 393291:1 393292:1 393293:1 393294:1 393296:1 393297:1 393298:1 393300:1 393303:1 -393306 42588:1 108124:1 173921:1 212446:1 326564:1 343518:1 370268:1 370529:1 392100:1 393218:1 393220:1 393223:1 393224:1 393227:1 393229:1 393230:1 393231:1 393236:1 393238:1 393241:1 393242:1 393245:1 393247:1 393248:1 393249:1 393254:1 393256:1 393259:1 393260:1 393261:1 393263:1 393264:1 393269:1 393274:1 393275:1 393276:1 393277:1 393279:1 393280:1 393281:1 393283:1 393284:1 393285:1 393286:1 393290:1 393291:1 393292:1 393293:1 393294:1 393296:1 393297:1 393298:1 393300:1 393303:1 +458842 11529:1 77065:1 165873:1 196777:1 326564:1 327849:1 339209:1 362481:1 392100:1 393220:1 393222:1 393223:1 393225:1 393230:1 393233:1 393238:1 393240:1 393241:1 393243:1 393248:1 393251:1 393254:1 393259:1 393260:1 393261:1 393262:1 393263:1 393264:1 393265:1 393269:1 393280:1 393282:1 393284:1 393287:1 393290:1 393291:1 393292:1 393293:1 393294:1 393296:1 393297:1 393298:1 393300:1 393303:1 404835:1 +458842 11529:1 77065:1 192621:1 236060:1 323071:1 339209:1 367132:1 388607:1 389229:1 393220:1 393222:1 393223:1 393225:1 393230:1 393233:1 393238:1 393240:1 393241:1 393243:1 393248:1 393251:1 393254:1 393255:1 393256:1 393257:1 393263:1 393264:1 393266:1 393267:1 393269:1 393272:1 393275:1 393276:1 393278:1 393283:1 393284:1 393285:1 393290:1 393291:1 393292:1 393294:1 393295:1 393297:1 393298:1 393299:1 393300:1 393301:1 393302:1 393303:1 393304:1 393305:1 404835:1 +458842 47483:1 113019:1 165873:1 196777:1 326564:1 327849:1 362481:1 375163:1 392100:1 393218:1 393220:1 393221:1 393222:1 393225:1 393227:1 393228:1 393229:1 393230:1 393232:1 393233:1 393236:1 393238:1 393239:1 393240:1 393243:1 393245:1 393246:1 393247:1 393248:1 393250:1 393251:1 393254:1 393259:1 393260:1 393261:1 393262:1 393263:1 393264:1 393265:1 393269:1 393280:1 393282:1 393284:1 393287:1 393290:1 393291:1 393292:1 393293:1 393294:1 393296:1 393297:1 393298:1 393300:1 393303:1 440789:1 +458842 42588:1 108124:1 173921:1 212446:1 326564:1 343518:1 370268:1 370529:1 392100:1 393218:1 393220:1 393223:1 393224:1 393227:1 393229:1 393230:1 393231:1 393236:1 393238:1 393241:1 393242:1 393245:1 393247:1 393248:1 393249:1 393254:1 393256:1 393259:1 393260:1 393261:1 393263:1 393264:1 393269:1 393274:1 393275:1 393276:1 393277:1 393279:1 393280:1 393281:1 393283:1 393284:1 393285:1 393286:1 393290:1 393291:1 393292:1 393293:1 393294:1 393296:1 393297:1 393298:1 393300:1 393303:1 435894:1 diff --git a/test/BaselineOutput/SingleRelease/CategoricalHash/featurized.tsv b/test/BaselineOutput/SingleRelease/CategoricalHash/featurized.tsv index 54f30cc6a6..4bd60fd39a 100644 --- a/test/BaselineOutput/SingleRelease/CategoricalHash/featurized.tsv +++ b/test/BaselineOutput/SingleRelease/CategoricalHash/featurized.tsv @@ -5,8 +5,9 @@ #@ col=C:R4:327680-393215 #@ col=D:R4:393216-393233 #@ col=E:R4:393234-393305 +#@ col=F:R4:393306-458841 #@ } -393306 11529:1 77065:1 165873:1 196777:1 326564:1 327849:1 339209:1 362481:1 392100:1 393220:1 393222:1 393223:1 393225:1 393230:1 393233:1 393238:1 393240:1 393241:1 393243:1 393248:1 393251:1 393254:1 393259:1 393260:1 393261:1 393262:1 393263:1 393264:1 393265:1 393269:1 393280:1 393282:1 393284:1 393287:1 393290:1 393291:1 393292:1 393293:1 393294:1 393296:1 393297:1 393298:1 393300:1 393303:1 -393306 11529:1 77065:1 192621:1 236060:1 323071:1 339209:1 367132:1 388607:1 389229:1 393220:1 393222:1 393223:1 393225:1 393230:1 393233:1 393238:1 393240:1 393241:1 393243:1 393248:1 393251:1 393254:1 393255:1 393256:1 393257:1 393263:1 393264:1 393266:1 393267:1 393269:1 393272:1 393275:1 393276:1 393278:1 393283:1 393284:1 393285:1 393290:1 393291:1 393292:1 393294:1 393295:1 393297:1 393298:1 393299:1 393300:1 393301:1 393302:1 393303:1 393304:1 393305:1 -393306 47483:1 113019:1 165873:1 196777:1 326564:1 327849:1 362481:1 375163:1 392100:1 393218:1 393220:1 393221:1 393222:1 393225:1 393227:1 393228:1 393229:1 393230:1 393232:1 393233:1 393236:1 393238:1 393239:1 393240:1 393243:1 393245:1 393246:1 393247:1 393248:1 393250:1 393251:1 393254:1 393259:1 393260:1 393261:1 393262:1 393263:1 393264:1 393265:1 393269:1 393280:1 393282:1 393284:1 393287:1 393290:1 393291:1 393292:1 393293:1 393294:1 393296:1 393297:1 393298:1 393300:1 393303:1 -393306 42588:1 108124:1 173921:1 212446:1 326564:1 343518:1 370268:1 370529:1 392100:1 393218:1 393220:1 393223:1 393224:1 393227:1 393229:1 393230:1 393231:1 393236:1 393238:1 393241:1 393242:1 393245:1 393247:1 393248:1 393249:1 393254:1 393256:1 393259:1 393260:1 393261:1 393263:1 393264:1 393269:1 393274:1 393275:1 393276:1 393277:1 393279:1 393280:1 393281:1 393283:1 393284:1 393285:1 393286:1 393290:1 393291:1 393292:1 393293:1 393294:1 393296:1 393297:1 393298:1 393300:1 393303:1 +458842 11529:1 77065:1 165873:1 196777:1 326564:1 327849:1 339209:1 362481:1 392100:1 393220:1 393222:1 393223:1 393225:1 393230:1 393233:1 393238:1 393240:1 393241:1 393243:1 393248:1 393251:1 393254:1 393259:1 393260:1 393261:1 393262:1 393263:1 393264:1 393265:1 393269:1 393280:1 393282:1 393284:1 393287:1 393290:1 393291:1 393292:1 393293:1 393294:1 393296:1 393297:1 393298:1 393300:1 393303:1 404835:1 +458842 11529:1 77065:1 192621:1 236060:1 323071:1 339209:1 367132:1 388607:1 389229:1 393220:1 393222:1 393223:1 393225:1 393230:1 393233:1 393238:1 393240:1 393241:1 393243:1 393248:1 393251:1 393254:1 393255:1 393256:1 393257:1 393263:1 393264:1 393266:1 393267:1 393269:1 393272:1 393275:1 393276:1 393278:1 393283:1 393284:1 393285:1 393290:1 393291:1 393292:1 393294:1 393295:1 393297:1 393298:1 393299:1 393300:1 393301:1 393302:1 393303:1 393304:1 393305:1 404835:1 +458842 47483:1 113019:1 165873:1 196777:1 326564:1 327849:1 362481:1 375163:1 392100:1 393218:1 393220:1 393221:1 393222:1 393225:1 393227:1 393228:1 393229:1 393230:1 393232:1 393233:1 393236:1 393238:1 393239:1 393240:1 393243:1 393245:1 393246:1 393247:1 393248:1 393250:1 393251:1 393254:1 393259:1 393260:1 393261:1 393262:1 393263:1 393264:1 393265:1 393269:1 393280:1 393282:1 393284:1 393287:1 393290:1 393291:1 393292:1 393293:1 393294:1 393296:1 393297:1 393298:1 393300:1 393303:1 440789:1 +458842 42588:1 108124:1 173921:1 212446:1 326564:1 343518:1 370268:1 370529:1 392100:1 393218:1 393220:1 393223:1 393224:1 393227:1 393229:1 393230:1 393231:1 393236:1 393238:1 393241:1 393242:1 393245:1 393247:1 393248:1 393249:1 393254:1 393256:1 393259:1 393260:1 393261:1 393263:1 393264:1 393269:1 393274:1 393275:1 393276:1 393277:1 393279:1 393280:1 393281:1 393283:1 393284:1 393285:1 393286:1 393290:1 393291:1 393292:1 393293:1 393294:1 393296:1 393297:1 393298:1 393300:1 393303:1 435894:1 diff --git a/test/Microsoft.ML.Tests/Transformers/CategoricalHashTests.cs b/test/Microsoft.ML.Tests/Transformers/CategoricalHashTests.cs index 43646f5ef1..7de79b6ad2 100644 --- a/test/Microsoft.ML.Tests/Transformers/CategoricalHashTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/CategoricalHashTests.cs @@ -8,6 +8,7 @@ using Microsoft.ML.Runtime.Model; using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Runtime.Tools; +using Microsoft.ML.StaticPipe; using Microsoft.ML.Transforms; using Microsoft.ML.Transforms.Categorical; using System; @@ -74,11 +75,17 @@ public void CategoricalHashStatic() var invalidData = ComponentCreation.CreateDataView(Env, wrongCollection); var est = data.MakeNewEstimator(). Append(row => ( - A: row.ScalarString.OneHotHashEncoding(outputKind: CategoricalHashStaticExtensions.OneHotHashScalarOutputKind.Ind), - B: row.VectorString.OneHotHashEncoding(outputKind: CategoricalHashStaticExtensions.OneHotHashVectorOutputKind.Ind), - C: row.VectorString.OneHotHashEncoding(outputKind: CategoricalHashStaticExtensions.OneHotHashVectorOutputKind.Bag), - D: row.ScalarString.OneHotHashEncoding(outputKind: CategoricalHashStaticExtensions.OneHotHashScalarOutputKind.Bin), - E: row.VectorString.OneHotHashEncoding(outputKind: CategoricalHashStaticExtensions.OneHotHashVectorOutputKind.Bin) + row.ScalarString, + row.VectorString, + // Create a VarVector column + VarVectorString: row.ScalarString.TokenizeText())). + Append(row => ( + A: row.ScalarString.OneHotHashEncoding(outputKind: CategoricalHashStaticExtensions.OneHotHashScalarOutputKind.Ind), + B: row.VectorString.OneHotHashEncoding(outputKind: CategoricalHashStaticExtensions.OneHotHashVectorOutputKind.Ind), + C: row.VectorString.OneHotHashEncoding(outputKind: CategoricalHashStaticExtensions.OneHotHashVectorOutputKind.Bag), + D: row.ScalarString.OneHotHashEncoding(outputKind: CategoricalHashStaticExtensions.OneHotHashScalarOutputKind.Bin), + E: row.VectorString.OneHotHashEncoding(outputKind: CategoricalHashStaticExtensions.OneHotHashVectorOutputKind.Bin), + F: row.VarVectorString.OneHotHashEncoding() )); TestEstimatorCore(est.AsDynamic, data.AsDynamic, invalidInput: invalidData); @@ -88,7 +95,7 @@ public void CategoricalHashStatic() { var saver = new TextSaver(Env, new TextSaver.Arguments { Silent = true }); var savedData = TakeFilter.Create(Env, est.Fit(data).Transform(data).AsDynamic, 4); - var view = ColumnSelectingTransformer.CreateKeep(Env, savedData, new[] { "A", "B", "C", "D", "E" }); + var view = ColumnSelectingTransformer.CreateKeep(Env, savedData, new[] { "A", "B", "C", "D", "E", "F" }); using (var fs = File.Create(outputPath)) DataSaverUtils.SaveDataView(ch, saver, view, fs, keepHidden: true); } From 3d33e20f33da70cdd3da2ad9e0b2b03df929bef4 Mon Sep 17 00:00:00 2001 From: Anirudh Agnihotry Date: Wed, 28 Nov 2018 12:59:44 -0800 Subject: [PATCH 003/100] Adding a new CI leg for netcoreapp 3.0 (#1700) * adding new ci leg * removing . * Fixing and disabling tests for netcoreapp3.0 * skipping some more tests and name change * added comments and conditional fact * name changed to NotNetCore30 * name changed and reding cli version from the file * name changed to DotnetCLIVersion.netcoreapp.latest.txt * contracts assert cprrected for avx=intrinscis fixing new tests broken by other commits Fixing Tests --- .vsts-dotnet-ci.yml | 14 +++++++++ DotnetCLIVersion.netcoreapp.latest.txt | 1 + build/ci/phase-template.yml | 16 ++++++---- config.json | 14 ++++++++- init-tools.cmd | 6 ++-- src/Microsoft.ML.CpuMath/AvxIntrinsics.cs | 28 +---------------- src/Microsoft.ML.CpuMath/SseIntrinsics.cs | 30 ------------------- .../UnitTests/TestCSharpApi.cs | 2 +- .../UnitTests/TestEntryPoints.cs | 12 ++++---- .../TestPredictors.cs | 16 +++++----- .../Training.cs | 2 +- .../BaseTestBaseline.cs | 4 +++ .../TestCommandBase.cs | 2 +- test/Microsoft.ML.Tests/CSharpCodeGen.cs | 2 +- test/Microsoft.ML.Tests/OnnxTests.cs | 2 +- .../Transformers/NormalizerTests.cs | 2 +- .../Transformers/RffTests.cs | 2 +- .../TimeSeries.cs | 2 +- .../TimeSeriesDirectApi.cs | 25 ++++++++-------- .../TimeSeriesEstimatorTests.cs | 4 +-- 20 files changed, 84 insertions(+), 102 deletions(-) create mode 100644 DotnetCLIVersion.netcoreapp.latest.txt diff --git a/.vsts-dotnet-ci.yml b/.vsts-dotnet-ci.yml index 2699c9b7b3..ff6a0d6bec 100644 --- a/.vsts-dotnet-ci.yml +++ b/.vsts-dotnet-ci.yml @@ -23,6 +23,20 @@ phases: queue: name: Hosted VS2017 +- template: /build/ci/phase-template.yml + parameters: + name: core30 + buildScript: build.cmd + customMatrixes: + Build_Debug_Intrinsics: + _configuration: Debug-Intrinsics + _config_short: DI + Build_Release_Intrinsics: + _configuration: Release-Intrinsics + _config_short: RI + queue: + name: Hosted VS2017 + - template: /build/ci/phase-template.yml parameters: name: Windows_x86 diff --git a/DotnetCLIVersion.netcoreapp.latest.txt b/DotnetCLIVersion.netcoreapp.latest.txt new file mode 100644 index 0000000000..23f4ef9a4c --- /dev/null +++ b/DotnetCLIVersion.netcoreapp.latest.txt @@ -0,0 +1 @@ +3.0.100-alpha1-009622 \ No newline at end of file diff --git a/build/ci/phase-template.yml b/build/ci/phase-template.yml index 9929ba182d..a9d2a35943 100644 --- a/build/ci/phase-template.yml +++ b/build/ci/phase-template.yml @@ -3,6 +3,7 @@ parameters: architecture: x64 buildScript: '' queue: {} + customMatrixes: '' phases: - phase: ${{ parameters.name }} @@ -14,12 +15,15 @@ phases: timeoutInMinutes: 45 parallel: 99 matrix: - Build_Debug: - _configuration: Debug - _config_short: D - Build_Release: - _configuration: Release - _config_short: R + ${{ if eq(parameters.customMatrixes, '') }}: + Build_Debug: + _configuration: Debug + _config_short: D + Build_Release: + _configuration: Release + _config_short: R + ${{ if ne(parameters.customMatrixes, '') }}: + ${{ insert }}: ${{ parameters.customMatrixes }} ${{ insert }}: ${{ parameters.queue }} steps: - script: $(_buildScript) -$(_configuration) -buildArch=$(_arch) diff --git a/config.json b/config.json index 8436586e61..54e56ddeb1 100644 --- a/config.json +++ b/config.json @@ -3,7 +3,7 @@ "Configuration": { "description": "Sets the optimization level for the Build Configuration you want to build.", "valueType": "property", - "values": [ "Debug", "Release" ], + "values": [ "Debug", "Release", "Debug-Intrinsics", "Release-Intrinsics" ], "defaultValue": "Debug" }, "TargetArchitecture": { @@ -94,6 +94,18 @@ "Configuration": "Release" } }, + "debug-intrinsics": { + "description": "Sets optimization level to debug for managed build configuration and builds against netcoreapp3.0. (/p:Configuration=Debug-Intrinsics)", + "settings": { + "Configuration": "Debug-Intrinsics" + } + }, + "release-intrinsics": { + "description": "Sets optimization level to release for managed build configuration and builds against netcoreapp3.0. (/p:Configuration=Release-Intrinsics)", + "settings": { + "Configuration": "Release-Intrinsics" + } + }, "buildArch": { "description": "Sets the architecture for the native build. (/p:TargetArchitecture=[value])", "settings": { diff --git a/init-tools.cmd b/init-tools.cmd index 3743cb413d..349f7b1461 100644 --- a/init-tools.cmd +++ b/init-tools.cmd @@ -46,15 +46,17 @@ if exist "%DotNetBuildToolsDir%" ( echo Running %0 > "%INIT_TOOLS_LOG%" set /p DOTNET_VERSION=< "%~dp0DotnetCLIVersion.txt" -if exist "%DOTNET_CMD%" goto :afterdotnetrestore :Arg_Loop if [%1] == [] goto :ArchSet -if /i [%1] == [x86] ( set ARCH=x86&&goto ArchSet) +if /i [%1] == [x86] ( set ARCH=x86) +if /i [%1] == [-Debug-Intrinsics] ( set /p DOTNET_VERSION=< "%~dp0DotnetCLIVersion.netcoreapp.latest.txt") +if /i [%1] == [-Release-Intrinsics] ( set /p DOTNET_VERSION=< "%~dp0DotnetCLIVersion.netcoreapp.latest.txt") shift goto :Arg_Loop :ArchSet +if exist "%DOTNET_CMD%" goto :afterdotnetrestore echo Installing dotnet cli... if NOT exist "%DOTNET_PATH%" mkdir "%DOTNET_PATH%" diff --git a/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs b/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs index 55e692eb63..5b402c6ba1 100644 --- a/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs +++ b/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs @@ -9,6 +9,7 @@ // * P suffix means sparse (unaligned) partial vector - the vector is only part of a larger sparse vector. // * Tran means the matrix is transposed. +using Microsoft.ML.Runtime.Internal.CpuMath.Core; using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; @@ -158,9 +159,6 @@ public static unsafe void MatMul(AlignedArray mat, AlignedArray src, AlignedArra public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan src, Span dst, int crow, int ccol) { - Contracts.Assert(crow % 4 == 0); - Contracts.Assert(ccol % 4 == 0); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) fixed (float* pmat = &MemoryMarshal.GetReference(mat)) @@ -313,9 +311,6 @@ public static unsafe void MatMulP(AlignedArray mat, ReadOnlySpan rgposSrc, public static unsafe void MatMulP(ReadOnlySpan mat, ReadOnlySpan rgposSrc, ReadOnlySpan src, int posMin, int iposMin, int iposEnd, Span dst, int crow, int ccol) { - Contracts.Assert(crow % 8 == 0); - Contracts.Assert(ccol % 8 == 0); - // REVIEW: For extremely sparse inputs, interchanging the loops would // likely be more efficient. fixed (float* psrc = &MemoryMarshal.GetReference(src)) @@ -473,9 +468,6 @@ public static unsafe void MatMulTran(AlignedArray mat, AlignedArray src, Aligned public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan src, Span dst, int crow, int ccol) { - Contracts.Assert(crow % 4 == 0); - Contracts.Assert(ccol % 4 == 0); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) fixed (float* pmat = &MemoryMarshal.GetReference(mat)) @@ -951,8 +943,6 @@ public static unsafe void Scale(float scale, Span dst) public static unsafe void ScaleSrcU(float scale, ReadOnlySpan src, Span dst, int count) { - Contracts.Assert(src.Length == dst.Length); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { @@ -1046,8 +1036,6 @@ public static unsafe void ScaleAddU(float a, float b, Span dst) public static unsafe void AddScaleU(float scale, ReadOnlySpan src, Span dst, int count) { - Contracts.Assert(src.Length == dst.Length); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { @@ -1100,8 +1088,6 @@ public static unsafe void AddScaleU(float scale, ReadOnlySpan src, Span src, ReadOnlySpan dst, Span result, int count) { - Contracts.Assert(src.Length == dst.Length); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) fixed (float* pres = &MemoryMarshal.GetReference(result)) @@ -1156,8 +1142,6 @@ public static unsafe void AddScaleCopyU(float scale, ReadOnlySpan src, Re public static unsafe void AddScaleSU(float scale, ReadOnlySpan src, ReadOnlySpan idx, Span dst, int count) { - Contracts.Assert(src.Length == dst.Length); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (int* pidx = &MemoryMarshal.GetReference(idx)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) @@ -1206,8 +1190,6 @@ public static unsafe void AddScaleSU(float scale, ReadOnlySpan src, ReadO public static unsafe void AddU(ReadOnlySpan src, Span dst, int count) { - Contracts.Assert(src.Length == dst.Length); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { @@ -1255,8 +1237,6 @@ public static unsafe void AddU(ReadOnlySpan src, Span dst, int cou public static unsafe void AddSU(ReadOnlySpan src, ReadOnlySpan idx, Span dst, int count) { - Contracts.Assert(src.Length == dst.Length); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (int* pidx = &MemoryMarshal.GetReference(idx)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) @@ -1738,8 +1718,6 @@ public static unsafe float MaxAbsDiffU(float mean, ReadOnlySpan src) public static unsafe float DotU(ReadOnlySpan src, ReadOnlySpan dst, int count) { - Contracts.Assert(src.Length == dst.Length); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { @@ -1792,8 +1770,6 @@ public static unsafe float DotU(ReadOnlySpan src, ReadOnlySpan dst public static unsafe float DotSU(ReadOnlySpan src, ReadOnlySpan dst, ReadOnlySpan idx, int count) { - Contracts.Assert(src.Length == dst.Length); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) fixed (int* pidx = &MemoryMarshal.GetReference(idx)) @@ -1848,8 +1824,6 @@ public static unsafe float DotSU(ReadOnlySpan src, ReadOnlySpan ds public static unsafe float Dist2(ReadOnlySpan src, ReadOnlySpan dst, int count) { - Contracts.Assert(src.Length == dst.Length); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { diff --git a/src/Microsoft.ML.CpuMath/SseIntrinsics.cs b/src/Microsoft.ML.CpuMath/SseIntrinsics.cs index 5d6f2ed134..9583fbe652 100644 --- a/src/Microsoft.ML.CpuMath/SseIntrinsics.cs +++ b/src/Microsoft.ML.CpuMath/SseIntrinsics.cs @@ -122,9 +122,6 @@ public static unsafe void MatMul(AlignedArray mat, AlignedArray src, AlignedArra public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan src, Span dst, int crow, int ccol) { - Contracts.Assert(crow % 4 == 0); - Contracts.Assert(ccol % 4 == 0); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) fixed (float* pmat = &MemoryMarshal.GetReference(mat)) @@ -285,9 +282,6 @@ public static unsafe void MatMulP(AlignedArray mat, ReadOnlySpan rgposSrc, public static unsafe void MatMulP(ReadOnlySpan mat, ReadOnlySpan rgposSrc, ReadOnlySpan src, int posMin, int iposMin, int iposEnd, Span dst, int crow, int ccol) { - Contracts.Assert(crow % 4 == 0); - Contracts.Assert(ccol % 4 == 0); - // REVIEW: For extremely sparse inputs, interchanging the loops would // likely be more efficient. fixed (float* psrc = &MemoryMarshal.GetReference(src)) @@ -448,9 +442,6 @@ public static unsafe void MatMulTran(AlignedArray mat, AlignedArray src, Aligned public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan src, Span dst, int crow, int ccol) { - Contracts.Assert(crow % 4 == 0); - Contracts.Assert(ccol % 4 == 0); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) fixed (float* pmat = &MemoryMarshal.GetReference(mat)) @@ -893,8 +884,6 @@ public static unsafe void Scale(float scale, Span dst) public static unsafe void ScaleSrcU(float scale, ReadOnlySpan src, Span dst, int count) { - Contracts.Assert(src.Length == dst.Length); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { @@ -963,8 +952,6 @@ public static unsafe void ScaleAddU(float a, float b, Span dst) public static unsafe void AddScaleU(float scale, ReadOnlySpan src, Span dst, int count) { - Contracts.Assert(src.Length == dst.Length); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { @@ -1004,8 +991,6 @@ public static unsafe void AddScaleU(float scale, ReadOnlySpan src, Span src, ReadOnlySpan dst, Span result, int count) { - Contracts.Assert(src.Length == dst.Length); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) fixed (float* pres = &MemoryMarshal.GetReference(result)) @@ -1047,8 +1032,6 @@ public static unsafe void AddScaleCopyU(float scale, ReadOnlySpan src, Re public static unsafe void AddScaleSU(float scale, ReadOnlySpan src, ReadOnlySpan idx, Span dst, int count) { - Contracts.Assert(src.Length == dst.Length); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (int* pidx = &MemoryMarshal.GetReference(idx)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) @@ -1085,8 +1068,6 @@ public static unsafe void AddScaleSU(float scale, ReadOnlySpan src, ReadO public static unsafe void AddU(ReadOnlySpan src, Span dst, int count) { - Contracts.Assert(src.Length == dst.Length); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { @@ -1122,8 +1103,6 @@ public static unsafe void AddU(ReadOnlySpan src, Span dst, int cou public static unsafe void AddSU(ReadOnlySpan src, ReadOnlySpan idx, Span dst, int count) { - Contracts.Assert(src.Length == dst.Length); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (int* pidx = &MemoryMarshal.GetReference(idx)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) @@ -1157,9 +1136,6 @@ public static unsafe void AddSU(ReadOnlySpan src, ReadOnlySpan idx, public static unsafe void MulElementWiseU(ReadOnlySpan src1, ReadOnlySpan src2, Span dst, int count) { - Contracts.Assert(src1.Length == dst.Length); - Contracts.Assert(src2.Length == dst.Length); - fixed (float* psrc1 = &MemoryMarshal.GetReference(src1)) fixed (float* psrc2 = &MemoryMarshal.GetReference(src2)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) @@ -1494,8 +1470,6 @@ public static unsafe float MaxAbsDiffU(float mean, ReadOnlySpan src) public static unsafe float DotU(ReadOnlySpan src, ReadOnlySpan dst, int count) { - Contracts.Assert(src.Length == dst.Length); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { @@ -1535,8 +1509,6 @@ public static unsafe float DotU(ReadOnlySpan src, ReadOnlySpan dst public static unsafe float DotSU(ReadOnlySpan src, ReadOnlySpan dst, ReadOnlySpan idx, int count) { - Contracts.Assert(src.Length == dst.Length); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) fixed (int* pidx = &MemoryMarshal.GetReference(idx)) @@ -1578,8 +1550,6 @@ public static unsafe float DotSU(ReadOnlySpan src, ReadOnlySpan ds public static unsafe float Dist2(ReadOnlySpan src, ReadOnlySpan dst, int count) { - Contracts.Assert(src.Length == dst.Length); - fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestCSharpApi.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestCSharpApi.cs index 362e440935..f5d9006800 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestCSharpApi.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestCSharpApi.cs @@ -255,7 +255,7 @@ public void TestCrossValidationBinaryMacro() } } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline public void TestCrossValidationMacro() { var dataPath = GetDataPath(TestDatasets.generatedRegressionDatasetmacro.trainFilename); diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs index ec9cbd85e3..5e6b541acb 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs @@ -961,7 +961,7 @@ public void EntryPointPipelineEnsemble() getterAnom(ref scoreAnom); Assert.True(Single.IsNaN(scoreBin) && Single.IsNaN(score) || scoreBin == score); Assert.True(Single.IsNaN(scoreBinCali) && Single.IsNaN(score) || scoreBinCali == score); - Assert.True(Single.IsNaN(scoreSaved) && Single.IsNaN(score) || scoreSaved == score); + Assert.True(Single.IsNaN(scoreSaved) && Single.IsNaN(score) || CompareNumbersWithTolerance(scoreSaved, score, null, 6)); Assert.True(Single.IsNaN(scoreAnom) && Single.IsNaN(score) || scoreAnom == score); Single avg = 0; @@ -1341,7 +1341,7 @@ public void EntryPointMulticlassPipelineEnsemble() } } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] public void EntryPointPipelineEnsembleGetSummary() { var dataPath = GetDataPath("breast-cancer-withheader.txt"); @@ -1431,14 +1431,14 @@ public void EntryPointPipelineEnsembleGetSummary() var saver = Env.CreateSaver("Text"); using (var file = Env.CreateOutputFile(summary)) DataSaverUtils.SaveDataView(ch, saver, summaryDataViews.Summaries[i], file); - CheckEquality(@"../Common/EntryPoints", $"ensemble-model{i}-summary.txt"); + CheckEquality(@"../Common/EntryPoints", $"ensemble-model{i}-summary.txt", digitsOfPrecision: 4); if (summaryDataViews.Stats[i] != null) { var stats = DeleteOutputPath(@"../Common/EntryPoints", $"ensemble-model{i}-stats.txt"); using (var file = Env.CreateOutputFile(stats)) DataSaverUtils.SaveDataView(ch, saver, summaryDataViews.Stats[i], file); - CheckEquality(@"../Common/EntryPoints", $"ensemble-model{i}-stats.txt"); + CheckEquality(@"../Common/EntryPoints", $"ensemble-model{i}-stats.txt", digitsOfPrecision: 4); } } } @@ -1448,7 +1448,7 @@ public void EntryPointPipelineEnsembleGetSummary() using (var writer = Utils.OpenWriter(file)) summarizable.SaveSummary(writer, null); - CheckEquality(@"../Common/EntryPoints", "ensemble-summary.txt"); + CheckEquality(@"../Common/EntryPoints", "ensemble-summary.txt", digitsOfPrecision: 4); var summaryKvps = binaryEnsembleCalibrated.Predictor as ICanGetSummaryInKeyValuePairs; Assert.NotNull(summaryKvps); @@ -1472,7 +1472,7 @@ public void EntryPointPipelineEnsembleGetSummary() } } } - CheckEquality(@"../Common/EntryPoints", "ensemble-summary-key-value-pairs.txt"); + CheckEquality(@"../Common/EntryPoints", "ensemble-summary-key-value-pairs.txt", digitsOfPrecision: 4); Done(); } diff --git a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs index 3d4c7b63aa..9f900b5440 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs @@ -161,7 +161,7 @@ public void EarlyStoppingTest() /// /// Multiclass Logistic Regression test. /// - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline [TestCategory("Multiclass")] [TestCategory("Logistic Regression")] public void MulticlassLRTest() @@ -173,7 +173,7 @@ public void MulticlassLRTest() /// /// Multiclass Logistic Regression with non-negative coefficients test. /// - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline [TestCategory("Multiclass")] [TestCategory("Logistic Regression")] public void MulticlassLRNonNegativeTest() @@ -313,7 +313,7 @@ public void BinaryClassifierLogisticRegressionNormTest() /// ///A test for binary classifiers with non-negative coefficients /// - [ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // x86 output differs from Baseline + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCoreAnd64BitProcess))] // netcore3.0 and x86 output differs from Baseline [TestCategory("Binary")] public void BinaryClassifierLogisticRegressionNonNegativeTest() { @@ -326,7 +326,7 @@ public void BinaryClassifierLogisticRegressionNonNegativeTest() /// ///A test for binary classifiers /// - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline [TestCategory("Binary")] public void BinaryClassifierLogisticRegressionBinNormTest() { @@ -904,7 +904,7 @@ private void CombineAndTestEnsembles(IDataView idv, string name, string options, predGetters[i](ref preds[i]); } if (scores.All(s => !float.IsNaN(s))) - Assert.Equal(score, scores.Sum() / predCount, 3); + CompareNumbersWithTolerance(score, scores.Sum() / predCount); for (int i = 0; i < predCount; i++) Assert.Equal(vectorScore.Length, vectorScores[i].Length); for (int i = 0; i < vectorScore.Length; i++) @@ -1561,7 +1561,7 @@ public IList GetDatasetsForCalibratorTest() /// ///A test for no calibrators /// - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline [TestCategory("Calibrator")] public void DefaultCalibratorPerceptronTest() { @@ -1573,7 +1573,7 @@ public void DefaultCalibratorPerceptronTest() /// ///A test for PAV calibrators /// - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline [TestCategory("Calibrator")] public void PAVCalibratorPerceptronTest() { @@ -1585,7 +1585,7 @@ public void PAVCalibratorPerceptronTest() /// ///A test for random calibrators /// - [ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // x86 output differs from Baseline + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCoreAnd64BitProcess))] // netcore3.0 and x86 output differs from Baseline [TestCategory("Calibrator")] public void RandomCalibratorPerceptronTest() { diff --git a/test/Microsoft.ML.StaticPipelineTesting/Training.cs b/test/Microsoft.ML.StaticPipelineTesting/Training.cs index 7a22368843..83836447e9 100644 --- a/test/Microsoft.ML.StaticPipelineTesting/Training.cs +++ b/test/Microsoft.ML.StaticPipelineTesting/Training.cs @@ -961,7 +961,7 @@ public void HogwildSGDBinaryClassification() Assert.InRange(metrics.Auprc, 0, 1); } - [ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // This test is being fixed as part of issue #1441. + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCoreAnd64BitProcess))] // netcore3.0 and x86 output differs from Baseline. This test is being fixed as part of issue #1441. public void MatrixFactorization() { // Create a new context for ML.NET operations. It can be used for exception tracking and logging, diff --git a/test/Microsoft.ML.TestFramework/BaseTestBaseline.cs b/test/Microsoft.ML.TestFramework/BaseTestBaseline.cs index 0a1e7ada2f..dbdbee29ae 100644 --- a/test/Microsoft.ML.TestFramework/BaseTestBaseline.cs +++ b/test/Microsoft.ML.TestFramework/BaseTestBaseline.cs @@ -24,6 +24,10 @@ public abstract partial class BaseTestBaseline : BaseTestClass { public const int DigitsOfPrecision = 7; + public static bool LessThanNetCore30OrNotNetCore { get; } = AppDomain.CurrentDomain.GetData("FX_PRODUCT_VERSION") == null ? true : false; + + public static bool LessThanNetCore30OrNotNetCoreAnd64BitProcess { get; } = LessThanNetCore30OrNotNetCore && Environment.Is64BitProcess; + protected BaseTestBaseline(ITestOutputHelper output) : base(output) { } diff --git a/test/Microsoft.ML.TestFramework/TestCommandBase.cs b/test/Microsoft.ML.TestFramework/TestCommandBase.cs index a4593edbb1..bc62b2c9d0 100644 --- a/test/Microsoft.ML.TestFramework/TestCommandBase.cs +++ b/test/Microsoft.ML.TestFramework/TestCommandBase.cs @@ -1172,7 +1172,7 @@ public void CommandTrainMlrWithLabelNames() Done(); } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline [TestCategory(Cat), TestCategory("Multiclass"), TestCategory("Logistic Regression")] public void CommandTrainMlrWithStats() { diff --git a/test/Microsoft.ML.Tests/CSharpCodeGen.cs b/test/Microsoft.ML.Tests/CSharpCodeGen.cs index d16a9924f1..a2f2fa7459 100644 --- a/test/Microsoft.ML.Tests/CSharpCodeGen.cs +++ b/test/Microsoft.ML.Tests/CSharpCodeGen.cs @@ -22,7 +22,7 @@ public void RegenerateCSharpApi() Runtime.Tools.Maml.Main(new[] { $"? generator=cs{{csFilename={basePath}}}" }); } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(LessThanNetCore30OrNotNetCore))] public void TestGeneratedCSharpAPI() { var dataPath = GetOutputPath("Api.cs"); diff --git a/test/Microsoft.ML.Tests/OnnxTests.cs b/test/Microsoft.ML.Tests/OnnxTests.cs index 320af0adde..9b53f9aba8 100644 --- a/test/Microsoft.ML.Tests/OnnxTests.cs +++ b/test/Microsoft.ML.Tests/OnnxTests.cs @@ -351,7 +351,7 @@ public void WordEmbeddingsTest() Done(); } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 differs from Baseline public void KmeansTest() { string dataPath = GetDataPath(@"breast-cancer.txt"); diff --git a/test/Microsoft.ML.Tests/Transformers/NormalizerTests.cs b/test/Microsoft.ML.Tests/Transformers/NormalizerTests.cs index aa7d6f2b5e..258f9d6e4a 100644 --- a/test/Microsoft.ML.Tests/Transformers/NormalizerTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/NormalizerTests.cs @@ -393,7 +393,7 @@ public void TestLpNormOldSavingAndLoading() } } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline public void GcnWorkout() { string dataSource = GetDataPath(TestDatasets.generatedRegressionDataset.trainFilename); diff --git a/test/Microsoft.ML.Tests/Transformers/RffTests.cs b/test/Microsoft.ML.Tests/Transformers/RffTests.cs index d647eddbca..f231082757 100644 --- a/test/Microsoft.ML.Tests/Transformers/RffTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/RffTests.cs @@ -37,7 +37,7 @@ private class TestClassInvalidSchema public int A; } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline public void RffWorkout() { Random rand = new Random(); diff --git a/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs b/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs index 308e45c247..f20de461f4 100644 --- a/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs +++ b/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs @@ -72,7 +72,7 @@ public void SavePipeSsaSpike() Done(); } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline public void SavePipeSsaSpikeNoData() { string pathData = DeleteOutputPath("SavePipe", "SsaSpikeNoData.txt"); diff --git a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs index 8cc54896a8..0bd4604daa 100644 --- a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs +++ b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs @@ -9,6 +9,7 @@ using Microsoft.ML.Data; using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; +using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Runtime.TimeSeriesProcessing; using Microsoft.ML.TimeSeries; using Xunit; @@ -86,7 +87,7 @@ public void ChangeDetection() } } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline public void ChangePointDetectionWithSeasonality() { var env = new MLContext(conc: 1); @@ -136,7 +137,7 @@ public void ChangePointDetectionWithSeasonality() } } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] public void ChangePointDetectionWithSeasonalityPredictionEngineNoColumn() { const int ChangeHistorySize = 10; @@ -189,10 +190,10 @@ public void ChangePointDetectionWithSeasonalityPredictionEngineNoColumn() var engine2 = model2.CreateTimeSeriesPredictionFunction(ml); var prediction2 = engine2.Predict(new Data(1)); //Raw score after first input. - Assert.Equal(1.1661833524703979, prediction2.Change[1], precision: 7); // Raw score + Assert.Equal(1.1661833524703979, prediction2.Change[1], precision: 5); // Raw score prediction2 = engine2.Predict(new Data(1)); //Raw score after second input. - Assert.Equal(0.12216401100158691, prediction2.Change[1], precision: 7); // Raw score + Assert.Equal(0.12216401100158691, prediction2.Change[1], precision: 5); // Raw score //Even though time series column is not requested it will // pass the observation through time series transform and update the state with the first input. @@ -209,10 +210,10 @@ public void ChangePointDetectionWithSeasonalityPredictionEngineNoColumn() //and raw score should match the raw score obtained by passing the two input in the first model. var engine3 = model3.CreateTimeSeriesPredictionFunction(ml); var prediction3 = engine3.Predict(new Data(1)); - Assert.Equal(0.12216401100158691, prediction2.Change[1], precision: 7); // Raw score + Assert.Equal(0.12216401100158691, prediction2.Change[1], precision: 5); // Raw score } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] public void ChangePointDetectionWithSeasonalityPredictionEngine() { const int ChangeHistorySize = 10; @@ -252,7 +253,7 @@ public void ChangePointDetectionWithSeasonalityPredictionEngine() var engine = model.CreateTimeSeriesPredictionFunction(ml); var prediction = engine.Predict(new Data(1)); Assert.Equal(0, prediction.Change[0], precision: 7); // Alert - Assert.Equal(1.1661833524703979, prediction.Change[1], precision: 7); // Raw score + Assert.Equal(1.1661833524703979, prediction.Change[1], precision: 5); // Raw score Assert.Equal(0.5, prediction.Change[2], precision: 7); // P-Value score Assert.Equal(5.1200000000000114E-08, prediction.Change[3], precision: 7); // Martingale score @@ -263,8 +264,8 @@ public void ChangePointDetectionWithSeasonalityPredictionEngine() //Model 1: Prediction #2 prediction = engine.Predict(new Data(1)); Assert.Equal(0, prediction.Change[0], precision: 7); // Alert - Assert.Equal(0.12216401100158691, prediction.Change[1], precision: 7); // Raw score - Assert.Equal(0.14823824685192111, prediction.Change[2], precision: 7); // P-Value score + Assert.Equal(0.12216401100158691, prediction.Change[1], precision: 5); // Raw score + Assert.Equal(0.14823824685192111, prediction.Change[2], precision: 5); // P-Value score Assert.Equal(1.5292508189989167E-07, prediction.Change[3], precision: 7); // Martingale score // Load Model 1. @@ -276,9 +277,9 @@ public void ChangePointDetectionWithSeasonalityPredictionEngine() engine = model2.CreateTimeSeriesPredictionFunction(ml); prediction = engine.Predict(new Data(1)); Assert.Equal(0, prediction.Change[0], precision: 7); // Alert - Assert.Equal(0.12216401100158691, prediction.Change[1], precision: 7); // Raw score - Assert.Equal(0.14823824685192111, prediction.Change[2], precision: 7); // P-Value score - Assert.Equal(1.5292508189989167E-07, prediction.Change[3], precision: 7); // Martingale score + Assert.Equal(0.12216401100158691, prediction.Change[1], precision: 5); // Raw score + Assert.Equal(0.14823824685192111, prediction.Change[2], precision: 5); // P-Value score + Assert.Equal(1.5292508189989167E-07, prediction.Change[3], precision: 5); // Martingale score } } } diff --git a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs index 3d7cfd5d32..a7892701d7 100644 --- a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs +++ b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs @@ -41,7 +41,7 @@ public TimeSeriesEstimatorTests(ITestOutputHelper output) : base(output) { } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline void TestSsaChangePointEstimator() { int Confidence = 95; @@ -75,7 +75,7 @@ void TestSsaChangePointEstimator() Done(); } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline void TestSsaSpikeEstimator() { int Confidence = 95; From 533e18647a77c31f8149ccf2618ccb2e354f8e62 Mon Sep 17 00:00:00 2001 From: Yael Dekel Date: Wed, 28 Nov 2018 15:06:11 -0800 Subject: [PATCH 004/100] Bug in ApplyAt (#1757) --- .../Utilities/VBufferUtils.cs | 2 +- .../UnitTests/TestVBuffer.cs | 25 +++++++++++++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) create mode 100644 test/Microsoft.ML.Core.Tests/UnitTests/TestVBuffer.cs diff --git a/src/Microsoft.ML.Core/Utilities/VBufferUtils.cs b/src/Microsoft.ML.Core/Utilities/VBufferUtils.cs index f730d61724..ac07e3e614 100644 --- a/src/Microsoft.ML.Core/Utilities/VBufferUtils.cs +++ b/src/Microsoft.ML.Core/Utilities/VBufferUtils.cs @@ -405,7 +405,7 @@ public static void ApplyAt(ref VBuffer dst, int slot, SlotValueManipulator // we are modifying in the sparse vector, in which case the vector becomes // dense. Then there is no need to do anything with indices. bool needIndices = dstValuesCount + 1 < dst.Length; - editor = VBufferEditor.Create(ref dst, dst.Length, dstValuesCount + 1); + editor = VBufferEditor.Create(ref dst, dst.Length, dstValuesCount + 1, keepOldOnResize: true); if (idx != dstValuesCount) { // We have to do some sort of shift copy. diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestVBuffer.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestVBuffer.cs new file mode 100644 index 0000000000..3712b06038 --- /dev/null +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestVBuffer.cs @@ -0,0 +1,25 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.Collections.Generic; +using System.Text; +using Microsoft.ML.Runtime.Data; +using Microsoft.ML.Runtime.Internal.Utilities; +using Xunit; + +namespace Microsoft.ML.Core.Tests.UnitTests +{ + public sealed class TestVBuffer + { + [Fact] + public void TestApplyAt() + { + var buffer = new VBuffer(10, 3, new[] { 0.5f, 1.2f, -3.8f }, new[] { 1, 5, 8 }); + VBufferUtils.ApplyAt(ref buffer, 6, (int slot, ref float value) => { value = value + 1; }); + Assert.Equal(4, buffer.GetValues().Length); + Assert.Equal(1, buffer.GetValues()[2]); + } + } +} From 4387205bbb5a9c538864e1b816d02b3f4901acb9 Mon Sep 17 00:00:00 2001 From: Anirudh Agnihotry Date: Thu, 29 Nov 2018 12:16:54 -0800 Subject: [PATCH 005/100] Correcting Documentation, adding asserts and disabling failing ci tests (#1777) * correcting Documentation, adding asserts and disabling failing ci tests * Correcting asserts --- docs/building/netcoreapp3.0-instructions.md | 8 +++--- src/Microsoft.ML.CpuMath/AvxIntrinsics.cs | 25 ++++++++++++++++++ src/Microsoft.ML.CpuMath/SseIntrinsics.cs | 26 +++++++++++++++++++ .../UnitTests/TestEntryPoints.cs | 2 +- .../TestPredictors.cs | 2 +- .../TimeSeries.cs | 2 +- 6 files changed, 57 insertions(+), 8 deletions(-) diff --git a/docs/building/netcoreapp3.0-instructions.md b/docs/building/netcoreapp3.0-instructions.md index 338e06ef83..d65bcba34b 100644 --- a/docs/building/netcoreapp3.0-instructions.md +++ b/docs/building/netcoreapp3.0-instructions.md @@ -1,10 +1,8 @@ In order to build ML.NET for .NET Core 3.0, you need to do a few manual steps. -1. Pick a version of the .NET Core 3.0 SDK you want to use. As of this writing, I'm using `3.0.100-alpha1-009622`. You can get the latest available version from the [dotnet/core-sdk README](https://github.com/dotnet/core-sdk#installers-and-binaries) page. -2. Change the [DotnetCLIVersion.txt](https://github.com/dotnet/machinelearning/blob/master/DotnetCLIVersion.txt) file to use that version number. -3. Delete the local `.\Tools\` folder from the root of the repo, to ensure you download the new version. -4. Run `.\build.cmd -- /p:Configuration=Release-Intrinsics` from the root of the repo. -5. If you want to build the NuGet packages, `.\build.cmd -buildPackages` after step 4. +1. Delete the local `.\Tools\` folder from the root of the repo, to ensure you download the new version of the .NET Core SDK. +2. Run `.\build.cmd -- /p:Configuration=Release-Intrinsics` or `.\build.cmd -Release-Intrinsics` from the root of the repo. +3. If you want to build the NuGet packages, `.\build.cmd -buildPackages` after step 2. If you are using Visual Studio, you will need to do the following: diff --git a/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs b/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs index 5b402c6ba1..2156ddf5fa 100644 --- a/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs +++ b/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs @@ -943,6 +943,8 @@ public static unsafe void Scale(float scale, Span dst) public static unsafe void ScaleSrcU(float scale, ReadOnlySpan src, Span dst, int count) { + Contracts.Assert(count <= src.Length); + Contracts.Assert(count <= dst.Length); fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { @@ -1036,6 +1038,8 @@ public static unsafe void ScaleAddU(float a, float b, Span dst) public static unsafe void AddScaleU(float scale, ReadOnlySpan src, Span dst, int count) { + Contracts.Assert(count <= src.Length); + Contracts.Assert(count <= dst.Length); fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { @@ -1088,6 +1092,9 @@ public static unsafe void AddScaleU(float scale, ReadOnlySpan src, Span src, ReadOnlySpan dst, Span result, int count) { + Contracts.Assert(count <= src.Length); + Contracts.Assert(count <= dst.Length); + Contracts.Assert(count <= result.Length); fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) fixed (float* pres = &MemoryMarshal.GetReference(result)) @@ -1142,6 +1149,9 @@ public static unsafe void AddScaleCopyU(float scale, ReadOnlySpan src, Re public static unsafe void AddScaleSU(float scale, ReadOnlySpan src, ReadOnlySpan idx, Span dst, int count) { + Contracts.Assert(count <= src.Length); + Contracts.Assert(count <= dst.Length); + Contracts.Assert(count <= idx.Length); fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (int* pidx = &MemoryMarshal.GetReference(idx)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) @@ -1190,6 +1200,8 @@ public static unsafe void AddScaleSU(float scale, ReadOnlySpan src, ReadO public static unsafe void AddU(ReadOnlySpan src, Span dst, int count) { + Contracts.Assert(count <= src.Length); + Contracts.Assert(count <= dst.Length); fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { @@ -1237,6 +1249,9 @@ public static unsafe void AddU(ReadOnlySpan src, Span dst, int cou public static unsafe void AddSU(ReadOnlySpan src, ReadOnlySpan idx, Span dst, int count) { + Contracts.Assert(count <= src.Length); + Contracts.Assert(count <= dst.Length); + Contracts.Assert(count <= idx.Length); fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (int* pidx = &MemoryMarshal.GetReference(idx)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) @@ -1282,6 +1297,9 @@ public static unsafe void AddSU(ReadOnlySpan src, ReadOnlySpan idx, public static unsafe void MulElementWiseU(ReadOnlySpan src1, ReadOnlySpan src2, Span dst, int count) { + Contracts.Assert(count <= src1.Length); + Contracts.Assert(count <= src2.Length); + Contracts.Assert(count <= dst.Length); fixed (float* psrc1 = &MemoryMarshal.GetReference(src1)) fixed (float* psrc2 = &MemoryMarshal.GetReference(src2)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) @@ -1718,6 +1736,8 @@ public static unsafe float MaxAbsDiffU(float mean, ReadOnlySpan src) public static unsafe float DotU(ReadOnlySpan src, ReadOnlySpan dst, int count) { + Contracts.Assert(count <= src.Length); + Contracts.Assert(count <= dst.Length); fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { @@ -1770,6 +1790,9 @@ public static unsafe float DotU(ReadOnlySpan src, ReadOnlySpan dst public static unsafe float DotSU(ReadOnlySpan src, ReadOnlySpan dst, ReadOnlySpan idx, int count) { + Contracts.Assert(count <= src.Length); + Contracts.Assert(count <= dst.Length); + Contracts.Assert(count <= idx.Length); fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) fixed (int* pidx = &MemoryMarshal.GetReference(idx)) @@ -1824,6 +1847,8 @@ public static unsafe float DotSU(ReadOnlySpan src, ReadOnlySpan ds public static unsafe float Dist2(ReadOnlySpan src, ReadOnlySpan dst, int count) { + Contracts.Assert(count <= src.Length); + Contracts.Assert(count <= dst.Length); fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { diff --git a/src/Microsoft.ML.CpuMath/SseIntrinsics.cs b/src/Microsoft.ML.CpuMath/SseIntrinsics.cs index 9583fbe652..b83fd6bbc6 100644 --- a/src/Microsoft.ML.CpuMath/SseIntrinsics.cs +++ b/src/Microsoft.ML.CpuMath/SseIntrinsics.cs @@ -13,6 +13,7 @@ // * D suffix means convolution matrix, with implicit source padding. // * Tran means the matrix is transposed. +using Microsoft.ML.Runtime.Internal.CpuMath.Core; using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; @@ -884,6 +885,8 @@ public static unsafe void Scale(float scale, Span dst) public static unsafe void ScaleSrcU(float scale, ReadOnlySpan src, Span dst, int count) { + Contracts.Assert(count <= src.Length); + Contracts.Assert(count <= dst.Length); fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { @@ -952,6 +955,8 @@ public static unsafe void ScaleAddU(float a, float b, Span dst) public static unsafe void AddScaleU(float scale, ReadOnlySpan src, Span dst, int count) { + Contracts.Assert(count <= src.Length); + Contracts.Assert(count <= dst.Length); fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { @@ -991,6 +996,9 @@ public static unsafe void AddScaleU(float scale, ReadOnlySpan src, Span src, ReadOnlySpan dst, Span result, int count) { + Contracts.Assert(count <= src.Length); + Contracts.Assert(count <= dst.Length); + Contracts.Assert(count <= result.Length); fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) fixed (float* pres = &MemoryMarshal.GetReference(result)) @@ -1032,6 +1040,9 @@ public static unsafe void AddScaleCopyU(float scale, ReadOnlySpan src, Re public static unsafe void AddScaleSU(float scale, ReadOnlySpan src, ReadOnlySpan idx, Span dst, int count) { + Contracts.Assert(count <= src.Length); + Contracts.Assert(count <= dst.Length); + Contracts.Assert(count <= idx.Length); fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (int* pidx = &MemoryMarshal.GetReference(idx)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) @@ -1068,6 +1079,8 @@ public static unsafe void AddScaleSU(float scale, ReadOnlySpan src, ReadO public static unsafe void AddU(ReadOnlySpan src, Span dst, int count) { + Contracts.Assert(count <= src.Length); + Contracts.Assert(count <= dst.Length); fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { @@ -1103,6 +1116,9 @@ public static unsafe void AddU(ReadOnlySpan src, Span dst, int cou public static unsafe void AddSU(ReadOnlySpan src, ReadOnlySpan idx, Span dst, int count) { + Contracts.Assert(count <= src.Length); + Contracts.Assert(count <= dst.Length); + Contracts.Assert(count <= idx.Length); fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (int* pidx = &MemoryMarshal.GetReference(idx)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) @@ -1136,6 +1152,9 @@ public static unsafe void AddSU(ReadOnlySpan src, ReadOnlySpan idx, public static unsafe void MulElementWiseU(ReadOnlySpan src1, ReadOnlySpan src2, Span dst, int count) { + Contracts.Assert(count <= src1.Length); + Contracts.Assert(count <= src2.Length); + Contracts.Assert(count <= dst.Length); fixed (float* psrc1 = &MemoryMarshal.GetReference(src1)) fixed (float* psrc2 = &MemoryMarshal.GetReference(src2)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) @@ -1470,6 +1489,8 @@ public static unsafe float MaxAbsDiffU(float mean, ReadOnlySpan src) public static unsafe float DotU(ReadOnlySpan src, ReadOnlySpan dst, int count) { + Contracts.Assert(count <= src.Length); + Contracts.Assert(count <= dst.Length); fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { @@ -1509,6 +1530,9 @@ public static unsafe float DotU(ReadOnlySpan src, ReadOnlySpan dst public static unsafe float DotSU(ReadOnlySpan src, ReadOnlySpan dst, ReadOnlySpan idx, int count) { + Contracts.Assert(count <= src.Length); + Contracts.Assert(count <= dst.Length); + Contracts.Assert(count <= idx.Length); fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) fixed (int* pidx = &MemoryMarshal.GetReference(idx)) @@ -1550,6 +1574,8 @@ public static unsafe float DotSU(ReadOnlySpan src, ReadOnlySpan ds public static unsafe float Dist2(ReadOnlySpan src, ReadOnlySpan dst, int count) { + Contracts.Assert(count <= src.Length); + Contracts.Assert(count <= dst.Length); fixed (float* psrc = &MemoryMarshal.GetReference(src)) fixed (float* pdst = &MemoryMarshal.GetReference(dst)) { diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs index 5e6b541acb..107c83eb7d 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs @@ -961,7 +961,7 @@ public void EntryPointPipelineEnsemble() getterAnom(ref scoreAnom); Assert.True(Single.IsNaN(scoreBin) && Single.IsNaN(score) || scoreBin == score); Assert.True(Single.IsNaN(scoreBinCali) && Single.IsNaN(score) || scoreBinCali == score); - Assert.True(Single.IsNaN(scoreSaved) && Single.IsNaN(score) || CompareNumbersWithTolerance(scoreSaved, score, null, 6)); + Assert.True(Single.IsNaN(scoreSaved) && Single.IsNaN(score) || CompareNumbersWithTolerance(scoreSaved, score, null, 5)); Assert.True(Single.IsNaN(scoreAnom) && Single.IsNaN(score) || scoreAnom == score); Single avg = 0; diff --git a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs index 9f900b5440..4342fcc49f 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs @@ -339,7 +339,7 @@ public void BinaryClassifierLogisticRegressionBinNormTest() /// ///A test for binary classifiers /// - [ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // x86 output differs from Baseline + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCoreAnd64BitProcess))] // x86 output differs from Baseline and flaky on netcore 3.0 [TestCategory("Binary")] public void BinaryClassifierLogisticRegressionGaussianNormTest() { diff --git a/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs b/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs index f20de461f4..39340d225b 100644 --- a/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs +++ b/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs @@ -158,7 +158,7 @@ public void SavePipePercentileThreshold() Done(); } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // Test is Flaky on netcore 3.0 public void SavePipeMovingAverageUniform() { TestCore(null, true, From c3a20faa31c22eb85806d325dfe9f12d308c772e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20S=C4=99k?= Date: Thu, 29 Nov 2018 22:44:59 +0100 Subject: [PATCH 006/100] Move evaluation results to separate classes (#1663) * Move evaluation results to stand alone metrics classes --- .../Evaluators/BinaryClassifierEvaluator.cs | 161 +----------------- .../Evaluators/ClusteringEvaluator.cs | 46 +---- .../Evaluators/EvaluatorStaticExtensions.cs | 15 +- .../Metrics/BinaryClassificationMetrics.cs | 113 ++++++++++++ .../CalibratedBinaryClassificationMetrics.cs | 54 ++++++ .../Evaluators/Metrics/ClusteringMetrics.cs | 49 ++++++ .../Metrics/MultiClassClassifierMetrics.cs | 100 +++++++++++ .../Evaluators/Metrics/RankerMetrics.cs | 44 +++++ .../Evaluators/Metrics/RegressionMetrics.cs | 76 +++++++++ ...or.cs => MultiClassClassifierEvaluator.cs} | 97 +---------- .../Evaluators/RankerEvaluator.cs | 40 +---- .../Evaluators/RegressionEvaluator.cs | 79 +-------- src/Microsoft.ML.Data/TrainContext.cs | 29 ++-- .../Training/TrainingStaticExtensions.cs | 18 +- .../RecommenderCatalog.cs | 9 +- .../PermutationFeatureImportanceExtensions.cs | 21 +-- src/Native/MatrixFactorizationNative/libmf | 2 +- .../Scenarios/SentimentPredictionTests.cs | 6 +- .../IrisPlantClassificationTests.cs | 2 +- .../SentimentPredictionTests.cs | 5 +- 20 files changed, 512 insertions(+), 454 deletions(-) create mode 100644 src/Microsoft.ML.Data/Evaluators/Metrics/BinaryClassificationMetrics.cs create mode 100644 src/Microsoft.ML.Data/Evaluators/Metrics/CalibratedBinaryClassificationMetrics.cs create mode 100644 src/Microsoft.ML.Data/Evaluators/Metrics/ClusteringMetrics.cs create mode 100644 src/Microsoft.ML.Data/Evaluators/Metrics/MultiClassClassifierMetrics.cs create mode 100644 src/Microsoft.ML.Data/Evaluators/Metrics/RankerMetrics.cs create mode 100644 src/Microsoft.ML.Data/Evaluators/Metrics/RegressionMetrics.cs rename src/Microsoft.ML.Data/Evaluators/{MulticlassClassifierEvaluator.cs => MultiClassClassifierEvaluator.cs} (91%) diff --git a/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs index a06e038571..528f24efce 100644 --- a/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs @@ -790,155 +790,6 @@ private void ComputePrCurves() } } - /// - /// Evaluation results for binary classifiers, excluding probabilistic metrics. - /// - public class Result - { - /// - /// Gets the area under the ROC curve. - /// - /// - /// The area under the ROC curve is equal to the probability that the classifier ranks - /// a randomly chosen positive instance higher than a randomly chosen negative one - /// (assuming 'positive' ranks higher than 'negative'). - /// - public double Auc { get; } - - /// - /// Gets the accuracy of a classifier which is the proportion of correct predictions in the test set. - /// - public double Accuracy { get; } - - /// - /// Gets the positive precision of a classifier which is the proportion of correctly predicted - /// positive instances among all the positive predictions (i.e., the number of positive instances - /// predicted as positive, divided by the total number of instances predicted as positive). - /// - public double PositivePrecision { get; } - - /// - /// Gets the positive recall of a classifier which is the proportion of correctly predicted - /// positive instances among all the positive instances (i.e., the number of positive instances - /// predicted as positive, divided by the total number of positive instances). - /// - public double PositiveRecall { get; private set; } - - /// - /// Gets the negative precision of a classifier which is the proportion of correctly predicted - /// negative instances among all the negative predictions (i.e., the number of negative instances - /// predicted as negative, divided by the total number of instances predicted as negative). - /// - public double NegativePrecision { get; } - - /// - /// Gets the negative recall of a classifier which is the proportion of correctly predicted - /// negative instances among all the negative instances (i.e., the number of negative instances - /// predicted as negative, divided by the total number of negative instances). - /// - public double NegativeRecall { get; } - - /// - /// Gets the F1 score of the classifier. - /// - /// - /// F1 score is the harmonic mean of precision and recall: 2 * precision * recall / (precision + recall). - /// - public double F1Score { get; } - - /// - /// Gets the area under the precision/recall curve of the classifier. - /// - /// - /// The area under the precision/recall curve is a single number summary of the information in the - /// precision/recall curve. It is increasingly used in the machine learning community, particularly - /// for imbalanced datasets where one class is observed more frequently than the other. On these - /// datasets, AUPRC can highlight performance differences that are lost with AUC. - /// - public double Auprc { get; } - - protected private static T Fetch(IExceptionContext ectx, IRow row, string name) - { - if (!row.Schema.TryGetColumnIndex(name, out int col)) - throw ectx.Except($"Could not find column '{name}'"); - T val = default; - row.GetGetter(col)(ref val); - return val; - } - - internal Result(IExceptionContext ectx, IRow overallResult) - { - double Fetch(string name) => Fetch(ectx, overallResult, name); - Auc = Fetch(BinaryClassifierEvaluator.Auc); - Accuracy = Fetch(BinaryClassifierEvaluator.Accuracy); - PositivePrecision = Fetch(BinaryClassifierEvaluator.PosPrecName); - PositiveRecall = Fetch(BinaryClassifierEvaluator.PosRecallName); - NegativePrecision = Fetch(BinaryClassifierEvaluator.NegPrecName); - NegativeRecall = Fetch(BinaryClassifierEvaluator.NegRecallName); - F1Score = Fetch(BinaryClassifierEvaluator.F1); - Auprc = Fetch(BinaryClassifierEvaluator.AuPrc); - } - - [BestFriend] - internal Result(double auc, double accuracy, double positivePrecision, double positiveRecall, - double negativePrecision, double negativeRecall, double f1Score, double auprc) - { - Auc = auc; - Accuracy = accuracy; - PositivePrecision = positivePrecision; - PositiveRecall = positiveRecall; - NegativePrecision = negativePrecision; - NegativeRecall = negativeRecall; - F1Score = f1Score; - Auprc = auprc; - } - } - - /// - /// Evaluation results for binary classifiers, including probabilistic metrics. - /// - public sealed class CalibratedResult : Result - { - /// - /// Gets the log-loss of the classifier. - /// - /// - /// The log-loss metric, is computed as follows: - /// LL = - (1/m) * sum( log(p[i])) - /// where m is the number of instances in the test set. - /// p[i] is the probability returned by the classifier if the instance belongs to class 1, - /// and 1 minus the probability returned by the classifier if the instance belongs to class 0. - /// - public double LogLoss { get; } - - /// - /// Gets the log-loss reduction (also known as relative log-loss, or reduction in information gain - RIG) - /// of the classifier. - /// - /// - /// The log-loss reduction is scaled relative to a classifier that predicts the prior for every example: - /// (LL(prior) - LL(classifier)) / LL(prior) - /// This metric can be interpreted as the advantage of the classifier over a random prediction. - /// For example, if the RIG equals 20, it can be interpreted as "the probability of a correct prediction is - /// 20% better than random guessing." - /// - public double LogLossReduction { get; } - - /// - /// Gets the test-set entropy (prior Log-Loss/instance) of the classifier. - /// - public double Entropy { get; } - - internal CalibratedResult(IExceptionContext ectx, IRow overallResult) - : base(ectx, overallResult) - { - double Fetch(string name) => Fetch(ectx, overallResult, name); - LogLoss = Fetch(BinaryClassifierEvaluator.LogLoss); - LogLossReduction = Fetch(BinaryClassifierEvaluator.LogLossReduction); - Entropy = Fetch(BinaryClassifierEvaluator.Entropy); - } - } - /// /// Evaluates scored binary classification data. /// @@ -948,7 +799,7 @@ internal CalibratedResult(IExceptionContext ectx, IRow overallResult) /// The name of the probability column in , the calibrated version of . /// The name of the predicted label column in . /// The evaluation results for these calibrated outputs. - public CalibratedResult Evaluate(IDataView data, string label, string score, string probability, string predictedLabel) + public CalibratedBinaryClassificationMetrics Evaluate(IDataView data, string label, string score, string probability, string predictedLabel) { Host.CheckValue(data, nameof(data)); Host.CheckNonEmpty(label, nameof(label)); @@ -966,12 +817,12 @@ public CalibratedResult Evaluate(IDataView data, string label, string score, str Host.Assert(resultDict.ContainsKey(MetricKinds.OverallMetrics)); var overall = resultDict[MetricKinds.OverallMetrics]; - CalibratedResult result; + CalibratedBinaryClassificationMetrics result; using (var cursor = overall.GetRowCursor(i => true)) { var moved = cursor.MoveNext(); Host.Assert(moved); - result = new CalibratedResult(Host, cursor); + result = new CalibratedBinaryClassificationMetrics(Host, cursor); moved = cursor.MoveNext(); Host.Assert(!moved); } @@ -987,7 +838,7 @@ public CalibratedResult Evaluate(IDataView data, string label, string score, str /// The name of the predicted label column in . /// The evaluation results for these uncalibrated outputs. /// - public Result Evaluate(IDataView data, string label, string score, string predictedLabel) + public BinaryClassificationMetrics Evaluate(IDataView data, string label, string score, string predictedLabel) { Host.CheckValue(data, nameof(data)); Host.CheckNonEmpty(label, nameof(label)); @@ -1003,12 +854,12 @@ public Result Evaluate(IDataView data, string label, string score, string predic Host.Assert(resultDict.ContainsKey(MetricKinds.OverallMetrics)); var overall = resultDict[MetricKinds.OverallMetrics]; - Result result; + BinaryClassificationMetrics result; using (var cursor = overall.GetRowCursor(i => true)) { var moved = cursor.MoveNext(); Host.Assert(moved); - result = new Result(Host, cursor); + result = new BinaryClassificationMetrics(Host, cursor); moved = cursor.MoveNext(); Host.Assert(!moved); } diff --git a/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs index 7e3a39b175..7d015da046 100644 --- a/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs @@ -62,7 +62,7 @@ public ClusteringEvaluator(IHostEnvironment env, Arguments args) /// The name of the optional label column in . /// The name of the optional feature column in . /// The evaluation results. - public Result Evaluate(IDataView data, string score, string label = null, string features = null) + public ClusteringMetrics Evaluate(IDataView data, string score, string label = null, string features = null) { Host.CheckValue(data, nameof(data)); Host.CheckNonEmpty(score, nameof(score)); @@ -82,12 +82,12 @@ public Result Evaluate(IDataView data, string score, string label = null, string Host.Assert(resultDict.ContainsKey(MetricKinds.OverallMetrics)); var overall = resultDict[MetricKinds.OverallMetrics]; - Result result; + ClusteringMetrics result; using (var cursor = overall.GetRowCursor(i => true)) { var moved = cursor.MoveNext(); Host.Assert(moved); - result = new Result(Host, cursor, _calculateDbi); + result = new ClusteringMetrics(Host, cursor, _calculateDbi); moved = cursor.MoveNext(); Host.Assert(!moved); } @@ -559,46 +559,6 @@ private void AssertValid(bool assertGetters) } } } - - /// - /// The metrics generated after evaluating the clustering predictions. - /// - public sealed class Result - { - /// - /// Normalized Mutual Information - /// NMI is a measure of the mutual dependence of the variables. - /// Normalized variants work on data that already has cluster labels. - /// Its value ranged from 0 to 1, where higher numbers are better. - /// - public double Nmi { get; } - - /// - /// Average Score. For the K-Means algorithm, the 'score' is the distance from the centroid to the example. - /// The average score is, therefore, a measure of proximity of the examples to cluster centroids. - /// In other words, it's the 'cluster tightness' measure. - /// Note however, that this metric will only decrease if the number of clusters is increased, - /// and in the extreme case (where each distinct example is its own cluster) it will be equal to zero. - /// - public double AvgMinScore { get; } - - /// - /// Davies-Bouldin Index - /// DBI is a measure of the how much scatter is in the cluster and the cluster separation. - /// - public double Dbi { get; } - - internal Result(IExceptionContext ectx, IRow overallResult, bool calculateDbi) - { - double Fetch(string name) => RowCursorUtils.Fetch(ectx, overallResult, name); - - Nmi = Fetch(ClusteringEvaluator.Nmi); - AvgMinScore = Fetch(ClusteringEvaluator.AvgMinScore); - - if (calculateDbi) - Dbi = Fetch(ClusteringEvaluator.Dbi); - } - } } public sealed class ClusteringPerInstanceEvaluator : PerInstanceEvaluatorBase diff --git a/src/Microsoft.ML.Data/Evaluators/EvaluatorStaticExtensions.cs b/src/Microsoft.ML.Data/Evaluators/EvaluatorStaticExtensions.cs index df58e4d46c..46972dc75d 100644 --- a/src/Microsoft.ML.Data/Evaluators/EvaluatorStaticExtensions.cs +++ b/src/Microsoft.ML.Data/Evaluators/EvaluatorStaticExtensions.cs @@ -3,6 +3,7 @@ // See the LICENSE file in the project root for more information. using System; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Training; using Microsoft.ML.StaticPipe; using Microsoft.ML.StaticPipe.Runtime; @@ -24,7 +25,7 @@ public static class EvaluatorStaticExtensions /// The index delegate for columns from calibrated prediction of a binary classifier. /// Under typical scenarios, this will just be the same tuple of results returned from the trainer. /// The evaluation results for these calibrated outputs. - public static BinaryClassifierEvaluator.CalibratedResult Evaluate( + public static CalibratedBinaryClassificationMetrics Evaluate( this BinaryClassificationContext ctx, DataView data, Func> label, @@ -60,7 +61,7 @@ public static BinaryClassifierEvaluator.CalibratedResult Evaluate( /// The index delegate for columns from uncalibrated prediction of a binary classifier. /// Under typical scenarios, this will just be the same tuple of results returned from the trainer. /// The evaluation results for these uncalibrated outputs. - public static BinaryClassifierEvaluator.Result Evaluate( + public static BinaryClassificationMetrics Evaluate( this BinaryClassificationContext ctx, DataView data, Func> label, @@ -94,7 +95,7 @@ public static BinaryClassifierEvaluator.Result Evaluate( /// The optional index delegate for the label column. /// The optional index delegate for the features column. /// The evaluation metrics. - public static ClusteringEvaluator.Result Evaluate( + public static ClusteringMetrics Evaluate( this ClusteringContext ctx, DataView data, Func> score, @@ -127,11 +128,11 @@ public static ClusteringEvaluator.Result Evaluate( /// The index delegate for the label column. /// The index delegate for columns from the prediction of a multiclass classifier. /// Under typical scenarios, this will just be the same tuple of results returned from the trainer. - /// If given a positive value, the will be filled with + /// If given a positive value, the will be filled with /// the top-K accuracy, that is, the accuracy assuming we consider an example with the correct class within /// the top-K values as being stored "correctly." /// The evaluation metrics. - public static MultiClassClassifierEvaluator.Result Evaluate( + public static MultiClassClassifierMetrics Evaluate( this MulticlassClassificationContext ctx, DataView data, Func> label, @@ -178,7 +179,7 @@ private sealed class TrivialRegressionLossFactory : ISupportRegressionLossFactor /// The index delegate for predicted score column. /// Potentially custom loss function. If left unspecified defaults to . /// The evaluation metrics. - public static RegressionEvaluator.Result Evaluate( + public static RegressionMetrics Evaluate( this RegressionContext ctx, DataView data, Func> label, @@ -212,7 +213,7 @@ public static RegressionEvaluator.Result Evaluate( /// The index delegate for the groupId column. /// The index delegate for predicted score column. /// The evaluation metrics. - public static RankerEvaluator.Result Evaluate( + public static RankerMetrics Evaluate( this RankingContext ctx, DataView data, Func> label, diff --git a/src/Microsoft.ML.Data/Evaluators/Metrics/BinaryClassificationMetrics.cs b/src/Microsoft.ML.Data/Evaluators/Metrics/BinaryClassificationMetrics.cs new file mode 100644 index 0000000000..102e43a894 --- /dev/null +++ b/src/Microsoft.ML.Data/Evaluators/Metrics/BinaryClassificationMetrics.cs @@ -0,0 +1,113 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.ML.Runtime; +using Microsoft.ML.Runtime.Data; + +namespace Microsoft.ML.Data +{ + /// + /// Evaluation results for binary classifiers, excluding probabilistic metrics. + /// + public class BinaryClassificationMetrics + { + /// + /// Gets the area under the ROC curve. + /// + /// + /// The area under the ROC curve is equal to the probability that the classifier ranks + /// a randomly chosen positive instance higher than a randomly chosen negative one + /// (assuming 'positive' ranks higher than 'negative'). + /// + public double Auc { get; } + + /// + /// Gets the accuracy of a classifier which is the proportion of correct predictions in the test set. + /// + public double Accuracy { get; } + + /// + /// Gets the positive precision of a classifier which is the proportion of correctly predicted + /// positive instances among all the positive predictions (i.e., the number of positive instances + /// predicted as positive, divided by the total number of instances predicted as positive). + /// + public double PositivePrecision { get; } + + /// + /// Gets the positive recall of a classifier which is the proportion of correctly predicted + /// positive instances among all the positive instances (i.e., the number of positive instances + /// predicted as positive, divided by the total number of positive instances). + /// + public double PositiveRecall { get; private set; } + + /// + /// Gets the negative precision of a classifier which is the proportion of correctly predicted + /// negative instances among all the negative predictions (i.e., the number of negative instances + /// predicted as negative, divided by the total number of instances predicted as negative). + /// + public double NegativePrecision { get; } + + /// + /// Gets the negative recall of a classifier which is the proportion of correctly predicted + /// negative instances among all the negative instances (i.e., the number of negative instances + /// predicted as negative, divided by the total number of negative instances). + /// + public double NegativeRecall { get; } + + /// + /// Gets the F1 score of the classifier. + /// + /// + /// F1 score is the harmonic mean of precision and recall: 2 * precision * recall / (precision + recall). + /// + public double F1Score { get; } + + /// + /// Gets the area under the precision/recall curve of the classifier. + /// + /// + /// The area under the precision/recall curve is a single number summary of the information in the + /// precision/recall curve. It is increasingly used in the machine learning community, particularly + /// for imbalanced datasets where one class is observed more frequently than the other. On these + /// datasets, AUPRC can highlight performance differences that are lost with AUC. + /// + public double Auprc { get; } + + protected private static T Fetch(IExceptionContext ectx, IRow row, string name) + { + if (!row.Schema.TryGetColumnIndex(name, out int col)) + throw ectx.Except($"Could not find column '{name}'"); + T val = default; + row.GetGetter(col)(ref val); + return val; + } + + internal BinaryClassificationMetrics(IExceptionContext ectx, IRow overallResult) + { + double Fetch(string name) => Fetch(ectx, overallResult, name); + Auc = Fetch(BinaryClassifierEvaluator.Auc); + Accuracy = Fetch(BinaryClassifierEvaluator.Accuracy); + PositivePrecision = Fetch(BinaryClassifierEvaluator.PosPrecName); + PositiveRecall = Fetch(BinaryClassifierEvaluator.PosRecallName); + NegativePrecision = Fetch(BinaryClassifierEvaluator.NegPrecName); + NegativeRecall = Fetch(BinaryClassifierEvaluator.NegRecallName); + F1Score = Fetch(BinaryClassifierEvaluator.F1); + Auprc = Fetch(BinaryClassifierEvaluator.AuPrc); + } + + [BestFriend] + internal BinaryClassificationMetrics(double auc, double accuracy, double positivePrecision, double positiveRecall, + double negativePrecision, double negativeRecall, double f1Score, double auprc) + { + Auc = auc; + Accuracy = accuracy; + PositivePrecision = positivePrecision; + PositiveRecall = positiveRecall; + NegativePrecision = negativePrecision; + NegativeRecall = negativeRecall; + F1Score = f1Score; + Auprc = auprc; + } + } +} \ No newline at end of file diff --git a/src/Microsoft.ML.Data/Evaluators/Metrics/CalibratedBinaryClassificationMetrics.cs b/src/Microsoft.ML.Data/Evaluators/Metrics/CalibratedBinaryClassificationMetrics.cs new file mode 100644 index 0000000000..c4f0861224 --- /dev/null +++ b/src/Microsoft.ML.Data/Evaluators/Metrics/CalibratedBinaryClassificationMetrics.cs @@ -0,0 +1,54 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.ML.Runtime; +using Microsoft.ML.Runtime.Data; + +namespace Microsoft.ML.Data +{ + /// + /// Evaluation results for binary classifiers, including probabilistic metrics. + /// + public sealed class CalibratedBinaryClassificationMetrics : BinaryClassificationMetrics + { + /// + /// Gets the log-loss of the classifier. + /// + /// + /// The log-loss metric, is computed as follows: + /// LL = - (1/m) * sum( log(p[i])) + /// where m is the number of instances in the test set. + /// p[i] is the probability returned by the classifier if the instance belongs to class 1, + /// and 1 minus the probability returned by the classifier if the instance belongs to class 0. + /// + public double LogLoss { get; } + + /// + /// Gets the log-loss reduction (also known as relative log-loss, or reduction in information gain - RIG) + /// of the classifier. + /// + /// + /// The log-loss reduction is scaled relative to a classifier that predicts the prior for every example: + /// (LL(prior) - LL(classifier)) / LL(prior) + /// This metric can be interpreted as the advantage of the classifier over a random prediction. + /// For example, if the RIG equals 20, it can be interpreted as "the probability of a correct prediction is + /// 20% better than random guessing." + /// + public double LogLossReduction { get; } + + /// + /// Gets the test-set entropy (prior Log-Loss/instance) of the classifier. + /// + public double Entropy { get; } + + internal CalibratedBinaryClassificationMetrics(IExceptionContext ectx, IRow overallResult) + : base(ectx, overallResult) + { + double Fetch(string name) => Fetch(ectx, overallResult, name); + LogLoss = Fetch(BinaryClassifierEvaluator.LogLoss); + LogLossReduction = Fetch(BinaryClassifierEvaluator.LogLossReduction); + Entropy = Fetch(BinaryClassifierEvaluator.Entropy); + } + } +} \ No newline at end of file diff --git a/src/Microsoft.ML.Data/Evaluators/Metrics/ClusteringMetrics.cs b/src/Microsoft.ML.Data/Evaluators/Metrics/ClusteringMetrics.cs new file mode 100644 index 0000000000..fc7e87e150 --- /dev/null +++ b/src/Microsoft.ML.Data/Evaluators/Metrics/ClusteringMetrics.cs @@ -0,0 +1,49 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.ML.Runtime; +using Microsoft.ML.Runtime.Data; + +namespace Microsoft.ML.Data +{ + /// + /// The metrics generated after evaluating the clustering predictions. + /// + public sealed class ClusteringMetrics + { + /// + /// Normalized Mutual Information + /// NMI is a measure of the mutual dependence of the variables. + /// Normalized variants work on data that already has cluster labels. + /// Its value ranged from 0 to 1, where higher numbers are better. + /// + public double Nmi { get; } + + /// + /// Average Score. For the K-Means algorithm, the 'score' is the distance from the centroid to the example. + /// The average score is, therefore, a measure of proximity of the examples to cluster centroids. + /// In other words, it's the 'cluster tightness' measure. + /// Note however, that this metric will only decrease if the number of clusters is increased, + /// and in the extreme case (where each distinct example is its own cluster) it will be equal to zero. + /// + public double AvgMinScore { get; } + + /// + /// Davies-Bouldin Index + /// DBI is a measure of the how much scatter is in the cluster and the cluster separation. + /// + public double Dbi { get; } + + internal ClusteringMetrics(IExceptionContext ectx, IRow overallResult, bool calculateDbi) + { + double Fetch(string name) => RowCursorUtils.Fetch(ectx, overallResult, name); + + Nmi = Fetch(ClusteringEvaluator.Nmi); + AvgMinScore = Fetch(ClusteringEvaluator.AvgMinScore); + + if (calculateDbi) + Dbi = Fetch(ClusteringEvaluator.Dbi); + } + } +} \ No newline at end of file diff --git a/src/Microsoft.ML.Data/Evaluators/Metrics/MultiClassClassifierMetrics.cs b/src/Microsoft.ML.Data/Evaluators/Metrics/MultiClassClassifierMetrics.cs new file mode 100644 index 0000000000..4eff184abc --- /dev/null +++ b/src/Microsoft.ML.Data/Evaluators/Metrics/MultiClassClassifierMetrics.cs @@ -0,0 +1,100 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.ML.Runtime; +using Microsoft.ML.Runtime.Data; + +namespace Microsoft.ML.Data +{ + public sealed class MultiClassClassifierMetrics + { + /// + /// Gets the micro-average accuracy of the model. + /// + /// + /// The micro-average is the fraction of instances predicted correctly. + /// + /// The micro-average metric weighs each class according to the number of instances that belong + /// to it in the dataset. + /// + public double AccuracyMicro { get; } + + /// + /// Gets the macro-average accuracy of the model. + /// + /// + /// The macro-average is computed by taking the average over all the classes of the fraction + /// of correct predictions in this class (the number of correctly predicted instances in the class, + /// divided by the total number of instances in the class). + /// + /// The macro-average metric gives the same weight to each class, no matter how many instances from + /// that class the dataset contains. + /// + public double AccuracyMacro { get; } + + /// + /// Gets the average log-loss of the classifier. + /// + /// + /// The log-loss metric, is computed as follows: + /// LL = - (1/m) * sum( log(p[i])) + /// where m is the number of instances in the test set. + /// p[i] is the probability returned by the classifier if the instance belongs to class 1, + /// and 1 minus the probability returned by the classifier if the instance belongs to class 0. + /// + public double LogLoss { get; } + + /// + /// Gets the log-loss reduction (also known as relative log-loss, or reduction in information gain - RIG) + /// of the classifier. + /// + /// + /// The log-loss reduction is scaled relative to a classifier that predicts the prior for every example: + /// (LL(prior) - LL(classifier)) / LL(prior) + /// This metric can be interpreted as the advantage of the classifier over a random prediction. + /// For example, if the RIG equals 20, it can be interpreted as "the probability of a correct prediction is + /// 20% better than random guessing". + /// + public double LogLossReduction { get; private set; } + + /// + /// If positive, this is the top-K for which the is calculated. + /// + public int TopK { get; } + + /// + /// If is positive, this is the relative number of examples where + /// the true label is one of the top k predicted labels by the predictor. + /// + public double TopKAccuracy { get; } + + /// + /// Gets the log-loss of the classifier for each class. + /// + /// + /// The log-loss metric, is computed as follows: + /// LL = - (1/m) * sum( log(p[i])) + /// where m is the number of instances in the test set. + /// p[i] is the probability returned by the classifier if the instance belongs to the class, + /// and 1 minus the probability returned by the classifier if the instance does not belong to the class. + /// + public double[] PerClassLogLoss { get; } + + internal MultiClassClassifierMetrics(IExceptionContext ectx, IRow overallResult, int topK) + { + double FetchDouble(string name) => RowCursorUtils.Fetch(ectx, overallResult, name); + AccuracyMicro = FetchDouble(MultiClassClassifierEvaluator.AccuracyMicro); + AccuracyMacro = FetchDouble(MultiClassClassifierEvaluator.AccuracyMacro); + LogLoss = FetchDouble(MultiClassClassifierEvaluator.LogLoss); + LogLossReduction = FetchDouble(MultiClassClassifierEvaluator.LogLossReduction); + TopK = topK; + if (topK > 0) + TopKAccuracy = FetchDouble(MultiClassClassifierEvaluator.TopKAccuracy); + + var perClassLogLoss = RowCursorUtils.Fetch>(ectx, overallResult, MultiClassClassifierEvaluator.PerClassLogLoss); + PerClassLogLoss = new double[perClassLogLoss.Length]; + perClassLogLoss.CopyTo(PerClassLogLoss); + } + } +} \ No newline at end of file diff --git a/src/Microsoft.ML.Data/Evaluators/Metrics/RankerMetrics.cs b/src/Microsoft.ML.Data/Evaluators/Metrics/RankerMetrics.cs new file mode 100644 index 0000000000..975d4a494c --- /dev/null +++ b/src/Microsoft.ML.Data/Evaluators/Metrics/RankerMetrics.cs @@ -0,0 +1,44 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.ML.Runtime; +using Microsoft.ML.Runtime.Data; + +namespace Microsoft.ML.Data +{ + public sealed class RankerMetrics + { + /// + /// Array of normalized discounted cumulative gains where i-th element represent NDCG@i. + /// + /// + public double[] Ndcg { get; } + + /// + ///Array of discounted cumulative gains where i-th element represent DCG@i. + /// Discounted Cumulative gain + /// is the sum of the gains, for all the instances i, normalized by the natural logarithm of the instance + 1. + /// Note that unline the Wikipedia article, ML.Net uses the natural logarithm. + /// + /// + public double[] Dcg { get; } + + private static T Fetch(IExceptionContext ectx, IRow row, string name) + { + if (!row.Schema.TryGetColumnIndex(name, out int col)) + throw ectx.Except($"Could not find column '{name}'"); + T val = default; + row.GetGetter(col)(ref val); + return val; + } + + internal RankerMetrics(IExceptionContext ectx, IRow overallResult) + { + VBuffer Fetch(string name) => Fetch>(ectx, overallResult, name); + + Dcg = Fetch(RankerEvaluator.Dcg).GetValues().ToArray(); + Ndcg = Fetch(RankerEvaluator.Ndcg).GetValues().ToArray(); + } + } +} \ No newline at end of file diff --git a/src/Microsoft.ML.Data/Evaluators/Metrics/RegressionMetrics.cs b/src/Microsoft.ML.Data/Evaluators/Metrics/RegressionMetrics.cs new file mode 100644 index 0000000000..8a9fd96b31 --- /dev/null +++ b/src/Microsoft.ML.Data/Evaluators/Metrics/RegressionMetrics.cs @@ -0,0 +1,76 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.ML.Runtime; +using Microsoft.ML.Runtime.Data; + +namespace Microsoft.ML.Data +{ + public sealed class RegressionMetrics + { + /// + /// Gets the absolute loss of the model. + /// + /// + /// The absolute loss is defined as + /// L1 = (1/m) * sum( abs( yi - y'i)) + /// where m is the number of instances in the test set. + /// y'i are the predicted labels for each instance. + /// yi are the correct labels of each instance. + /// + public double L1 { get; } + + /// + /// Gets the squared loss of the model. + /// + /// + /// The squared loss is defined as + /// L2 = (1/m) * sum(( yi - y'i)^2) + /// where m is the number of instances in the test set. + /// y'i are the predicted labels for each instance. + /// yi are the correct labels of each instance. + /// + public double L2 { get; } + + /// + /// Gets the root mean square loss (or RMS) which is the square root of the L2 loss. + /// + public double Rms { get; } + + /// + /// Gets the result of user defined loss function. + /// + /// + /// This is the average of a loss function defined by the user, + /// computed over all the instances in the test set. + /// + public double LossFn { get; } + + /// + /// Gets the R squared value of the model, which is also known as + /// the coefficient of determination​. + /// + public double RSquared { get; } + + internal RegressionMetrics(IExceptionContext ectx, IRow overallResult) + { + double Fetch(string name) => RowCursorUtils.Fetch(ectx, overallResult, name); + L1 = Fetch(RegressionEvaluator.L1); + L2 = Fetch(RegressionEvaluator.L2); + Rms = Fetch(RegressionEvaluator.Rms); + LossFn = Fetch(RegressionEvaluator.Loss); + RSquared = Fetch(RegressionEvaluator.RSquared); + } + + [BestFriend] + internal RegressionMetrics(double l1, double l2, double rms, double lossFunction, double rSquared) + { + L1 = l1; + L2 = l2; + Rms = rms; + LossFn = lossFunction; + RSquared = rSquared; + } + } +} \ No newline at end of file diff --git a/src/Microsoft.ML.Data/Evaluators/MulticlassClassifierEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs similarity index 91% rename from src/Microsoft.ML.Data/Evaluators/MulticlassClassifierEvaluator.cs rename to src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs index 93515e5315..273599f7f5 100644 --- a/src/Microsoft.ML.Data/Evaluators/MulticlassClassifierEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs @@ -496,97 +496,6 @@ public void GetSlotNames(ref VBuffer> slotNames) } } - public sealed class Result - { - /// - /// Gets the micro-average accuracy of the model. - /// - /// - /// The micro-average is the fraction of instances predicted correctly. - /// - /// The micro-average metric weighs each class according to the number of instances that belong - /// to it in the dataset. - /// - public double AccuracyMicro { get; } - - /// - /// Gets the macro-average accuracy of the model. - /// - /// - /// The macro-average is computed by taking the average over all the classes of the fraction - /// of correct predictions in this class (the number of correctly predicted instances in the class, - /// divided by the total number of instances in the class). - /// - /// The macro-average metric gives the same weight to each class, no matter how many instances from - /// that class the dataset contains. - /// - public double AccuracyMacro { get; } - - /// - /// Gets the average log-loss of the classifier. - /// - /// - /// The log-loss metric, is computed as follows: - /// LL = - (1/m) * sum( log(p[i])) - /// where m is the number of instances in the test set. - /// p[i] is the probability returned by the classifier if the instance belongs to class 1, - /// and 1 minus the probability returned by the classifier if the instance belongs to class 0. - /// - public double LogLoss { get; } - - /// - /// Gets the log-loss reduction (also known as relative log-loss, or reduction in information gain - RIG) - /// of the classifier. - /// - /// - /// The log-loss reduction is scaled relative to a classifier that predicts the prior for every example: - /// (LL(prior) - LL(classifier)) / LL(prior) - /// This metric can be interpreted as the advantage of the classifier over a random prediction. - /// For example, if the RIG equals 20, it can be interpreted as "the probability of a correct prediction is - /// 20% better than random guessing". - /// - public double LogLossReduction { get; private set; } - - /// - /// If positive, this is the top-K for which the is calculated. - /// - public int TopK { get; } - - /// - /// If is positive, this is the relative number of examples where - /// the true label is one of the top k predicted labels by the predictor. - /// - public double TopKAccuracy { get; } - - /// - /// Gets the log-loss of the classifier for each class. - /// - /// - /// The log-loss metric, is computed as follows: - /// LL = - (1/m) * sum( log(p[i])) - /// where m is the number of instances in the test set. - /// p[i] is the probability returned by the classifier if the instance belongs to the class, - /// and 1 minus the probability returned by the classifier if the instance does not belong to the class. - /// - public double[] PerClassLogLoss { get; } - - internal Result(IExceptionContext ectx, IRow overallResult, int topK) - { - double FetchDouble(string name) => RowCursorUtils.Fetch(ectx, overallResult, name); - AccuracyMicro = FetchDouble(MultiClassClassifierEvaluator.AccuracyMicro); - AccuracyMacro = FetchDouble(MultiClassClassifierEvaluator.AccuracyMacro); - LogLoss = FetchDouble(MultiClassClassifierEvaluator.LogLoss); - LogLossReduction = FetchDouble(MultiClassClassifierEvaluator.LogLossReduction); - TopK = topK; - if (topK > 0) - TopKAccuracy = FetchDouble(MultiClassClassifierEvaluator.TopKAccuracy); - - var perClassLogLoss = RowCursorUtils.Fetch>(ectx, overallResult, MultiClassClassifierEvaluator.PerClassLogLoss); - PerClassLogLoss = new double[perClassLogLoss.Length]; - perClassLogLoss.CopyTo(PerClassLogLoss); - } - } - /// /// Evaluates scored multiclass classification data. /// @@ -595,7 +504,7 @@ internal Result(IExceptionContext ectx, IRow overallResult, int topK) /// The name of the score column in . /// The name of the predicted label column in . /// The evaluation results for these outputs. - public Result Evaluate(IDataView data, string label, string score, string predictedLabel) + public MultiClassClassifierMetrics Evaluate(IDataView data, string label, string score, string predictedLabel) { Host.CheckValue(data, nameof(data)); Host.CheckNonEmpty(label, nameof(label)); @@ -611,12 +520,12 @@ public Result Evaluate(IDataView data, string label, string score, string predic Host.Assert(resultDict.ContainsKey(MetricKinds.OverallMetrics)); var overall = resultDict[MetricKinds.OverallMetrics]; - Result result; + MultiClassClassifierMetrics result; using (var cursor = overall.GetRowCursor(i => true)) { var moved = cursor.MoveNext(); Host.Assert(moved); - result = new Result(Host, cursor, _outputTopKAcc ?? 0); + result = new MultiClassClassifierMetrics(Host, cursor, _outputTopKAcc ?? 0); moved = cursor.MoveNext(); Host.Assert(!moved); } diff --git a/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs index bffe37be58..a7aa5bc434 100644 --- a/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs @@ -243,7 +243,7 @@ protected override void GetAggregatorConsolidationFuncs(Aggregator aggregator, A /// The name of the groupId column. /// The name of the predicted score column. /// The evaluation metrics for these outputs. - public Result Evaluate(IDataView data, string label, string groupId, string score) + public RankerMetrics Evaluate(IDataView data, string label, string groupId, string score) { Host.CheckValue(data, nameof(data)); Host.CheckNonEmpty(label, nameof(label)); @@ -257,12 +257,12 @@ public Result Evaluate(IDataView data, string label, string groupId, string scor Host.Assert(resultDict.ContainsKey(MetricKinds.OverallMetrics)); var overall = resultDict[MetricKinds.OverallMetrics]; - Result result; + RankerMetrics result; using (var cursor = overall.GetRowCursor(i => true)) { var moved = cursor.MoveNext(); Host.Assert(moved); - result = new Result(Host, cursor); + result = new RankerMetrics(Host, cursor); moved = cursor.MoveNext(); Host.Assert(!moved); } @@ -538,40 +538,6 @@ public void GetSlotNames(ref VBuffer> slotNames) slotNames = editor.Commit(); } } - - public sealed class Result - { - /// - /// Normalized Discounted Cumulative Gain - /// - /// - public double[] Ndcg { get; } - - /// - /// Discounted Cumulative gain - /// is the sum of the gains, for all the instances i, normalized by the natural logarithm of the instance + 1. - /// Note that unline the Wikipedia article, ML.Net uses the natural logarithm. - /// - /// - public double[] Dcg { get; } - - private static T Fetch(IExceptionContext ectx, IRow row, string name) - { - if (!row.Schema.TryGetColumnIndex(name, out int col)) - throw ectx.Except($"Could not find column '{name}'"); - T val = default; - row.GetGetter(col)(ref val); - return val; - } - - internal Result(IExceptionContext ectx, IRow overallResult) - { - VBuffer Fetch(string name) => Fetch>(ectx, overallResult, name); - - Dcg = Fetch(RankerEvaluator.Dcg).GetValues().ToArray(); - Ndcg = Fetch(RankerEvaluator.Ndcg).GetValues().ToArray(); - } - } } public sealed class RankerPerInstanceTransform : IDataTransform diff --git a/src/Microsoft.ML.Data/Evaluators/RegressionEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/RegressionEvaluator.cs index df3c4a12fa..729e0bffe0 100644 --- a/src/Microsoft.ML.Data/Evaluators/RegressionEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/RegressionEvaluator.cs @@ -2,15 +2,15 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Float = System.Single; using Microsoft.ML.Data; -using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.CommandLine; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.EntryPoints; using Microsoft.ML.Runtime.Model; -using System; +using Microsoft.ML.Runtime; using System.Collections.Generic; -using Float = System.Single; +using System; [assembly: LoadableClass(typeof(RegressionEvaluator), typeof(RegressionEvaluator), typeof(RegressionEvaluator.Arguments), typeof(SignatureEvaluator), "Regression Evaluator", RegressionEvaluator.LoadName, "Regression")] @@ -157,73 +157,6 @@ public override void AddColumn(ArrayDataViewBuilder dvBldr, string metricName, p } } - public sealed class Result - { - /// - /// Gets the absolute loss of the model. - /// - /// - /// The absolute loss is defined as - /// L1 = (1/m) * sum( abs( yi - y'i)) - /// where m is the number of instances in the test set. - /// y'i are the predicted labels for each instance. - /// yi are the correct labels of each instance. - /// - public double L1 { get; } - - /// - /// Gets the squared loss of the model. - /// - /// - /// The squared loss is defined as - /// L2 = (1/m) * sum(( yi - y'i)^2) - /// where m is the number of instances in the test set. - /// y'i are the predicted labels for each instance. - /// yi are the correct labels of each instance. - /// - public double L2 { get; } - - /// - /// Gets the root mean square loss (or RMS) which is the square root of the L2 loss. - /// - public double Rms { get; } - - /// - /// Gets the user defined loss function. - /// - /// - /// This is the average of a loss function defined by the user, - /// computed over all the instances in the test set. - /// - public double LossFn { get; } - - /// - /// Gets the R squared value of the model, which is also known as - /// the coefficient of determination​. - /// - public double RSquared { get; } - - internal Result(IExceptionContext ectx, IRow overallResult) - { - double Fetch(string name) => RowCursorUtils.Fetch(ectx, overallResult, name); - L1 = Fetch(RegressionEvaluator.L1); - L2 = Fetch(RegressionEvaluator.L2); - Rms = Fetch(RegressionEvaluator.Rms); - LossFn = Fetch(RegressionEvaluator.Loss); - RSquared = Fetch(RegressionEvaluator.RSquared); - } - - [BestFriend] - internal Result(double l1, double l2, double rms, double lossFunction, double rSquared) - { - L1 = l1; - L2 = l2; - Rms = rms; - LossFn = lossFunction; - RSquared = rSquared; - } - } - /// /// Evaluates scored regression data. /// @@ -231,7 +164,7 @@ internal Result(double l1, double l2, double rms, double lossFunction, double rS /// The name of the label column. /// The name of the predicted score column. /// The evaluation metrics for these outputs. - public Result Evaluate(IDataView data, string label, string score) + public RegressionMetrics Evaluate(IDataView data, string label, string score) { Host.CheckValue(data, nameof(data)); Host.CheckNonEmpty(label, nameof(label)); @@ -244,12 +177,12 @@ public Result Evaluate(IDataView data, string label, string score) Host.Assert(resultDict.ContainsKey(MetricKinds.OverallMetrics)); var overall = resultDict[MetricKinds.OverallMetrics]; - Result result; + RegressionMetrics result; using (var cursor = overall.GetRowCursor(i => true)) { var moved = cursor.MoveNext(); Host.Assert(moved); - result = new Result(Host, cursor); + result = new RegressionMetrics(Host, cursor); moved = cursor.MoveNext(); Host.Assert(!moved); } diff --git a/src/Microsoft.ML.Data/TrainContext.cs b/src/Microsoft.ML.Data/TrainContext.cs index 9418efae89..13eda65945 100644 --- a/src/Microsoft.ML.Data/TrainContext.cs +++ b/src/Microsoft.ML.Data/TrainContext.cs @@ -3,6 +3,7 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Core.Data; +using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Transforms; @@ -207,7 +208,7 @@ internal BinaryClassificationTrainers(BinaryClassificationContext ctx) /// The name of the probability column in , the calibrated version of . /// The name of the predicted label column in . /// The evaluation results for these calibrated outputs. - public BinaryClassifierEvaluator.CalibratedResult Evaluate(IDataView data, string label = DefaultColumnNames.Label, string score = DefaultColumnNames.Score, + public CalibratedBinaryClassificationMetrics Evaluate(IDataView data, string label = DefaultColumnNames.Label, string score = DefaultColumnNames.Score, string probability = DefaultColumnNames.Probability, string predictedLabel = DefaultColumnNames.PredictedLabel) { Host.CheckValue(data, nameof(data)); @@ -228,7 +229,7 @@ public BinaryClassifierEvaluator.CalibratedResult Evaluate(IDataView data, strin /// The name of the score column in . /// The name of the predicted label column in . /// The evaluation results for these uncalibrated outputs. - public BinaryClassifierEvaluator.Result EvaluateNonCalibrated(IDataView data, string label = DefaultColumnNames.Label, string score = DefaultColumnNames.Score, + public BinaryClassificationMetrics EvaluateNonCalibrated(IDataView data, string label = DefaultColumnNames.Label, string score = DefaultColumnNames.Score, string predictedLabel = DefaultColumnNames.PredictedLabel) { Host.CheckValue(data, nameof(data)); @@ -254,7 +255,7 @@ public BinaryClassifierEvaluator.Result EvaluateNonCalibrated(IDataView data, st /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from /// train to the test set. /// Per-fold results: metrics, models, scored datasets. - public (BinaryClassifierEvaluator.Result metrics, ITransformer model, IDataView scoredTestData)[] CrossValidateNonCalibrated( + public (BinaryClassificationMetrics metrics, ITransformer model, IDataView scoredTestData)[] CrossValidateNonCalibrated( IDataView data, IEstimator estimator, int numFolds = 5, string labelColumn = DefaultColumnNames.Label, string stratificationColumn = null) { Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); @@ -276,7 +277,7 @@ public BinaryClassifierEvaluator.Result EvaluateNonCalibrated(IDataView data, st /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from /// train to the test set. /// Per-fold results: metrics, models, scored datasets. - public (BinaryClassifierEvaluator.CalibratedResult metrics, ITransformer model, IDataView scoredTestData)[] CrossValidate( + public (CalibratedBinaryClassificationMetrics metrics, ITransformer model, IDataView scoredTestData)[] CrossValidate( IDataView data, IEstimator estimator, int numFolds = 5, string labelColumn = DefaultColumnNames.Label, string stratificationColumn = null) { Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); @@ -318,11 +319,11 @@ internal ClusteringTrainers(ClusteringContext ctx) /// The scored data. /// The name of the score column in . /// The name of the optional label column in . - /// If present, the metric will be computed. + /// If present, the metric will be computed. /// The name of the optional features column in . - /// If present, the metric will be computed. + /// If present, the metric will be computed. /// The evaluation result. - public ClusteringEvaluator.Result Evaluate(IDataView data, + public ClusteringMetrics Evaluate(IDataView data, string label = null, string score = DefaultColumnNames.Score, string features = null ) @@ -355,7 +356,7 @@ public ClusteringEvaluator.Result Evaluate(IDataView data, /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from /// train to the test set. /// Per-fold results: metrics, models, scored datasets. - public (ClusteringEvaluator.Result metrics, ITransformer model, IDataView scoredTestData)[] CrossValidate( + public (ClusteringMetrics metrics, ITransformer model, IDataView scoredTestData)[] CrossValidate( IDataView data, IEstimator estimator, int numFolds = 5, string labelColumn = null, string featuresColumn = null, string stratificationColumn = null) { var result = CrossValidateTrain(data, estimator, numFolds, stratificationColumn); @@ -394,11 +395,11 @@ internal MulticlassClassificationTrainers(MulticlassClassificationContext ctx) /// The name of the label column in . /// The name of the score column in . /// The name of the predicted label column in . - /// If given a positive value, the will be filled with + /// If given a positive value, the will be filled with /// the top-K accuracy, that is, the accuracy assuming we consider an example with the correct class within /// the top-K values as being stored "correctly." /// The evaluation results for these calibrated outputs. - public MultiClassClassifierEvaluator.Result Evaluate(IDataView data, string label = DefaultColumnNames.Label, string score = DefaultColumnNames.Score, + public MultiClassClassifierMetrics Evaluate(IDataView data, string label = DefaultColumnNames.Label, string score = DefaultColumnNames.Score, string predictedLabel = DefaultColumnNames.PredictedLabel, int topK = 0) { Host.CheckValue(data, nameof(data)); @@ -427,7 +428,7 @@ public MultiClassClassifierEvaluator.Result Evaluate(IDataView data, string labe /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from /// train to the test set. /// Per-fold results: metrics, models, scored datasets. - public (MultiClassClassifierEvaluator.Result metrics, ITransformer model, IDataView scoredTestData)[] CrossValidate( + public (MultiClassClassifierMetrics metrics, ITransformer model, IDataView scoredTestData)[] CrossValidate( IDataView data, IEstimator estimator, int numFolds = 5, string labelColumn = DefaultColumnNames.Label, string stratificationColumn = null) { Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); @@ -467,7 +468,7 @@ internal RegressionTrainers(RegressionContext ctx) /// The name of the label column in . /// The name of the score column in . /// The evaluation results for these calibrated outputs. - public RegressionEvaluator.Result Evaluate(IDataView data, string label = DefaultColumnNames.Label, string score = DefaultColumnNames.Score) + public RegressionMetrics Evaluate(IDataView data, string label = DefaultColumnNames.Label, string score = DefaultColumnNames.Score) { Host.CheckValue(data, nameof(data)); Host.CheckNonEmpty(label, nameof(label)); @@ -491,7 +492,7 @@ public RegressionEvaluator.Result Evaluate(IDataView data, string label = Defaul /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from /// train to the test set. /// Per-fold results: metrics, models, scored datasets. - public (RegressionEvaluator.Result metrics, ITransformer model, IDataView scoredTestData)[] CrossValidate( + public (RegressionMetrics metrics, ITransformer model, IDataView scoredTestData)[] CrossValidate( IDataView data, IEstimator estimator, int numFolds = 5, string labelColumn = DefaultColumnNames.Label, string stratificationColumn = null) { Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); @@ -532,7 +533,7 @@ internal RankingTrainers(RankingContext ctx) /// The name of the groupId column in . /// The name of the score column in . /// The evaluation results for these calibrated outputs. - public RankerEvaluator.Result Evaluate(IDataView data, string label, string groupId, string score = DefaultColumnNames.Score) + public RankerMetrics Evaluate(IDataView data, string label, string groupId, string score = DefaultColumnNames.Score) { Host.CheckValue(data, nameof(data)); Host.CheckNonEmpty(label, nameof(label)); diff --git a/src/Microsoft.ML.Data/Training/TrainingStaticExtensions.cs b/src/Microsoft.ML.Data/Training/TrainingStaticExtensions.cs index 8cfda0485e..1107a24a83 100644 --- a/src/Microsoft.ML.Data/Training/TrainingStaticExtensions.cs +++ b/src/Microsoft.ML.Data/Training/TrainingStaticExtensions.cs @@ -3,14 +3,12 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Core.Data; -using Microsoft.ML.StaticPipe; -using Microsoft.ML.StaticPipe.Runtime; +using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Data; -using System; -using System.Collections.Generic; +using Microsoft.ML.StaticPipe.Runtime; +using Microsoft.ML.StaticPipe; using System.Linq; -using System.Text; +using System; namespace Microsoft.ML { @@ -73,7 +71,7 @@ public static (DataView trainSet, DataView testSet) TrainTestSplit(this /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from /// train to the test set. /// Per-fold results: metrics, models, scored datasets. - public static (RegressionEvaluator.Result metrics, Transformer model, DataView scoredTestData)[] CrossValidate( + public static (RegressionMetrics metrics, Transformer model, DataView scoredTestData)[] CrossValidate( this RegressionContext context, DataView data, Estimator estimator, @@ -129,7 +127,7 @@ public static (RegressionEvaluator.Result metrics, Transformer /// Per-fold results: metrics, models, scored datasets. - public static (MultiClassClassifierEvaluator.Result metrics, Transformer model, DataView scoredTestData)[] CrossValidate( + public static (MultiClassClassifierMetrics metrics, Transformer model, DataView scoredTestData)[] CrossValidate( this MulticlassClassificationContext context, DataView data, Estimator estimator, @@ -185,7 +183,7 @@ public static (MultiClassClassifierEvaluator.Result metrics, Transformer /// Per-fold results: metrics, models, scored datasets. - public static (BinaryClassifierEvaluator.Result metrics, Transformer model, DataView scoredTestData)[] CrossValidateNonCalibrated( + public static (BinaryClassificationMetrics metrics, Transformer model, DataView scoredTestData)[] CrossValidateNonCalibrated( this BinaryClassificationContext context, DataView data, Estimator estimator, @@ -241,7 +239,7 @@ public static (BinaryClassifierEvaluator.Result metrics, Transformer /// Per-fold results: metrics, models, scored datasets. - public static (BinaryClassifierEvaluator.CalibratedResult metrics, Transformer model, DataView scoredTestData)[] CrossValidate( + public static (CalibratedBinaryClassificationMetrics metrics, Transformer model, DataView scoredTestData)[] CrossValidate( this BinaryClassificationContext context, DataView data, Estimator estimator, diff --git a/src/Microsoft.ML.Recommender/RecommenderCatalog.cs b/src/Microsoft.ML.Recommender/RecommenderCatalog.cs index 1100b66013..2be553736f 100644 --- a/src/Microsoft.ML.Recommender/RecommenderCatalog.cs +++ b/src/Microsoft.ML.Recommender/RecommenderCatalog.cs @@ -3,11 +3,12 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Core.Data; -using Microsoft.ML.Runtime; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; +using Microsoft.ML.Runtime; using Microsoft.ML.Trainers; -using System; using System.Linq; +using System; namespace Microsoft.ML { @@ -71,7 +72,7 @@ public MatrixFactorizationTrainer MatrixFactorization( /// The name of the label column in . /// The name of the score column in . /// The evaluation results for these calibrated outputs. - public RegressionEvaluator.Result Evaluate(IDataView data, string label = DefaultColumnNames.Label, string score = DefaultColumnNames.Score) + public RegressionMetrics Evaluate(IDataView data, string label = DefaultColumnNames.Label, string score = DefaultColumnNames.Score) { Host.CheckValue(data, nameof(data)); Host.CheckNonEmpty(label, nameof(label)); @@ -95,7 +96,7 @@ public RegressionEvaluator.Result Evaluate(IDataView data, string label = Defaul /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from /// train to the test set. /// Per-fold results: metrics, models, scored datasets. - public (RegressionEvaluator.Result metrics, ITransformer model, IDataView scoredTestData)[] CrossValidate( + public (RegressionMetrics metrics, ITransformer model, IDataView scoredTestData)[] CrossValidate( IDataView data, IEstimator estimator, int numFolds = 5, string labelColumn = DefaultColumnNames.Label, string stratificationColumn = null) { Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); diff --git a/src/Microsoft.ML.Transforms/PermutationFeatureImportanceExtensions.cs b/src/Microsoft.ML.Transforms/PermutationFeatureImportanceExtensions.cs index 5b726f2d41..07bfa4ebfc 100644 --- a/src/Microsoft.ML.Transforms/PermutationFeatureImportanceExtensions.cs +++ b/src/Microsoft.ML.Transforms/PermutationFeatureImportanceExtensions.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Transforms; @@ -51,7 +52,7 @@ public static class PermutationFeatureImportanceExtensions /// Use features weight to pre-filter features. /// Limit the number of examples to evaluate on. null means examples (up to ~ 2 bln) from input will be used. /// Array of per-feature 'contributions' to the score. - public static ImmutableArray + public static ImmutableArray PermutationFeatureImportance( this RegressionContext ctx, IPredictionTransformer model, @@ -61,7 +62,7 @@ public static ImmutableArray bool useFeatureWeightFilter = false, int? topExamples = null) { - return PermutationFeatureImportance.GetImportanceMetricsMatrix( + return PermutationFeatureImportance.GetImportanceMetricsMatrix( CatalogUtils.GetEnvironment(ctx), model, data, @@ -72,10 +73,10 @@ public static ImmutableArray topExamples); } - private static RegressionEvaluator.Result RegressionDelta( - RegressionEvaluator.Result a, RegressionEvaluator.Result b) + private static RegressionMetrics RegressionDelta( + RegressionMetrics a, RegressionMetrics b) { - return new RegressionEvaluator.Result( + return new RegressionMetrics( l1: a.L1 - b.L1, l2: a.L2 - b.L2, rms: a.Rms - b.Rms, @@ -96,7 +97,7 @@ private static RegressionEvaluator.Result RegressionDelta( /// Use features weight to pre-filter features. /// Limit the number of examples to evaluate on. null means examples (up to ~ 2 bln) from input will be used. /// Array of per-feature 'contributions' to the score. - public static ImmutableArray + public static ImmutableArray PermutationFeatureImportance( this BinaryClassificationContext ctx, IPredictionTransformer model, @@ -106,7 +107,7 @@ public static ImmutableArray bool useFeatureWeightFilter = false, int? topExamples = null) { - return PermutationFeatureImportance.GetImportanceMetricsMatrix( + return PermutationFeatureImportance.GetImportanceMetricsMatrix( CatalogUtils.GetEnvironment(ctx), model, data, @@ -117,10 +118,10 @@ public static ImmutableArray topExamples); } - private static BinaryClassifierEvaluator.Result BinaryClassifierDelta( - BinaryClassifierEvaluator.Result a, BinaryClassifierEvaluator.Result b) + private static BinaryClassificationMetrics BinaryClassifierDelta( + BinaryClassificationMetrics a, BinaryClassificationMetrics b) { - return new BinaryClassifierEvaluator.Result( + return new BinaryClassificationMetrics( auc: a.Auc - b.Auc, accuracy: a.Accuracy - b.Accuracy, positivePrecision: a.PositivePrecision - b.PositivePrecision, diff --git a/src/Native/MatrixFactorizationNative/libmf b/src/Native/MatrixFactorizationNative/libmf index f92a18161b..53a91e7e8c 160000 --- a/src/Native/MatrixFactorizationNative/libmf +++ b/src/Native/MatrixFactorizationNative/libmf @@ -1 +1 @@ -Subproject commit f92a18161b6824fda4c4ab698a69d299a836841a +Subproject commit 53a91e7e8c88463e97acfbbafb7134a6030860b3 diff --git a/test/Microsoft.ML.Tests/Scenarios/SentimentPredictionTests.cs b/test/Microsoft.ML.Tests/Scenarios/SentimentPredictionTests.cs index 226257fc48..a8be68d31a 100644 --- a/test/Microsoft.ML.Tests/Scenarios/SentimentPredictionTests.cs +++ b/test/Microsoft.ML.Tests/Scenarios/SentimentPredictionTests.cs @@ -189,7 +189,7 @@ public void CrossValidateSentimentModelTest() Assert.True(predictions.ElementAt(1).Sentiment); } - private void ValidateBinaryMetricsSymSGD(BinaryClassificationMetrics metrics) + private void ValidateBinaryMetricsSymSGD(Microsoft.ML.Legacy.Models.BinaryClassificationMetrics metrics) { Assert.Equal(.8889, metrics.Accuracy, 4); @@ -222,7 +222,7 @@ private void ValidateBinaryMetricsSymSGD(BinaryClassificationMetrics metrics) } - private void ValidateBinaryMetricsLightGBM(BinaryClassificationMetrics metrics) + private void ValidateBinaryMetricsLightGBM(Microsoft.ML.Legacy.Models.BinaryClassificationMetrics metrics) { Assert.Equal(0.61111111111111116, metrics.Accuracy, 4); @@ -255,7 +255,7 @@ private void ValidateBinaryMetricsLightGBM(BinaryClassificationMetrics metrics) } - private void ValidateBinaryMetrics(BinaryClassificationMetrics metrics) + private void ValidateBinaryMetrics(Microsoft.ML.Legacy.Models.BinaryClassificationMetrics metrics) { Assert.Equal(0.6111, metrics.Accuracy, 4); diff --git a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs index 9d7ac5cf81..01b3d75d13 100644 --- a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs +++ b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs @@ -99,7 +99,7 @@ private void ComparePredictions(PredictionFunction mod Assert.Equal(0, prediction.PredictedLabels[2], 2); } - private void CompareMatrics(MultiClassClassifierEvaluator.Result metrics) + private void CompareMatrics(MultiClassClassifierMetrics metrics) { Assert.Equal(.98, metrics.AccuracyMacro); Assert.Equal(.98, metrics.AccuracyMicro, 2); diff --git a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs index 0d298a51b5..b04a360168 100644 --- a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs +++ b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs @@ -152,7 +152,7 @@ public void TrainAndPredictSentimentModelWithDirectionInstantiationTestWithWordE Assert.Equal(1.0, (double)summary[0].Value, 1); } - private BinaryClassificationMetrics EvaluateBinary(IHostEnvironment env, IDataView scoredData) + private Microsoft.ML.Legacy.Models.BinaryClassificationMetrics EvaluateBinary(IHostEnvironment env, IDataView scoredData) { var dataEval = new RoleMappedData(scoredData, label: "Label", feature: "Features", opt: true); @@ -163,7 +163,8 @@ private BinaryClassificationMetrics EvaluateBinary(IHostEnvironment env, IDataVi var evaluator = new BinaryClassifierMamlEvaluator(env, new BinaryClassifierMamlEvaluator.Arguments()); var metricsDic = evaluator.Evaluate(dataEval); - return BinaryClassificationMetrics.FromMetrics(env, metricsDic["OverallMetrics"], metricsDic["ConfusionMatrix"])[0]; + return Microsoft.ML.Legacy.Models.BinaryClassificationMetrics + .FromMetrics(env, metricsDic["OverallMetrics"], metricsDic["ConfusionMatrix"])[0]; } } #pragma warning restore 612 From 83e33db3fbb697b5c210dc16d13afd0ec3c3fc51 Mon Sep 17 00:00:00 2001 From: Vadim Eksarevskiy <42353187+vaeksare@users.noreply.github.com> Date: Thu, 29 Nov 2018 14:12:04 -0800 Subject: [PATCH 007/100] Add fixes to prevent potential build failures due to redist copying (#1775) * Prevent models from copying from each leg * Copying now checks for condition * Add correct property * Minor change to condition logic --- build/vsts-ci.yml | 6 +++--- config.json | 12 ++++++++++++ .../Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj | 1 + 3 files changed, 16 insertions(+), 3 deletions(-) diff --git a/build/vsts-ci.yml b/build/vsts-ci.yml index d0171921ac..ab77d0ed4a 100644 --- a/build/vsts-ci.yml +++ b/build/vsts-ci.yml @@ -24,7 +24,7 @@ phases: container: LinuxContainer steps: # Only build native assets to avoid conflicts. - - script: ./build.sh -buildNative -$(BuildConfig) + - script: ./build.sh -buildNative -$(BuildConfig) -skipRIDAgnosticAssets displayName: Build - task: PublishBuildArtifacts@1 @@ -49,7 +49,7 @@ phases: - agent.os -equals Darwin steps: # Only build native assets to avoid conflicts. - - script: ./build.sh -buildNative -$(BuildConfig) + - script: ./build.sh -buildNative -$(BuildConfig) -skipRIDAgnosticAssets displayName: Build - task: PublishBuildArtifacts@1 @@ -89,7 +89,7 @@ phases: condition: and(succeeded(), in(variables._SignType, 'real', 'test')) # Only build native assets to avoid conflicts. - - script: ./build.cmd -buildNative -$(BuildConfig) -buildArch=x86 + - script: ./build.cmd -buildNative -$(BuildConfig) -buildArch=x86 -skipRIDAgnosticAssets displayName: Build - task: MSBuild@1 diff --git a/config.json b/config.json index 54e56ddeb1..99f72f7051 100644 --- a/config.json +++ b/config.json @@ -30,6 +30,12 @@ "values": [], "defaultValue": "" }, + "SkipRIDAgnosticAssets": { + "description": "Prevents RID agnostic assets in redist from being built.", + "valueType": "property", + "values": [], + "defaultValue": "" + }, "MsBuildLogging": { "description": "MsBuild logging options.", "valueType": "passThrough", @@ -118,6 +124,12 @@ "BuildNative": "default" } }, + "skipRIDAgnosticAssets": { + "description": "Avoid building RID agnostic assets in redist.", + "settings": { + "SkipRIDAgnosticAssets": "default" + } + }, "buildPackages": { "description": "Builds the NuGet packages.", "settings": { diff --git a/src/Redist/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj b/src/Redist/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj index 2c99b80db6..2401de0df0 100644 --- a/src/Redist/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj +++ b/src/Redist/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj @@ -106,6 +106,7 @@ DependsOnTargets="DownloadDnnModelFiles"> From 8022c4fb9e8f0965367f571d95dd07892fec4b45 Mon Sep 17 00:00:00 2001 From: Abhishek Goswami Date: Thu, 29 Nov 2018 15:38:26 -0800 Subject: [PATCH 008/100] Add LDA example to Microsoft.ML.Samples (#1782) * Adding LDA sample to Microsoft.ML.Samples * review comments - 1. nit changes --- .../Dynamic/LdaTransform.cs | 63 +++++++++++++++++++ .../Microsoft.ML.Samples.csproj | 1 + docs/samples/Microsoft.ML.Samples/Program.cs | 2 +- .../Text/TextCatalog.cs | 7 +++ 4 files changed, 72 insertions(+), 1 deletion(-) create mode 100644 docs/samples/Microsoft.ML.Samples/Dynamic/LdaTransform.cs diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/LdaTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/LdaTransform.cs new file mode 100644 index 0000000000..f2cbec7c28 --- /dev/null +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/LdaTransform.cs @@ -0,0 +1,63 @@ +using Microsoft.ML.Data; +using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime.Data; +using System; +using System.Collections.Generic; + +namespace Microsoft.ML.Samples.Dynamic +{ + public class LdaTransformExample + { + public static void LdaTransform() + { + // Create a new ML context, for ML.NET operations. It can be used for exception tracking and logging, + // as well as the source of randomness. + var ml = new MLContext(); + + // Get a small dataset as an IEnumerable. + IEnumerable data = SamplesUtils.DatasetUtils.GetTopicsData(); + var trainData = ml.CreateStreamingDataView(data); + + // Preview of one of the columns of the the topics data. + // The Review column contains the keys associated with a particular body of text. + // + // Review + // "animals birds cats dogs fish horse" + // "horse birds house fish duck cats" + // "car truck driver bus pickup" + // "car truck driver bus pickup horse" + + string review = nameof(SamplesUtils.DatasetUtils.SampleTopicsData.Review); + string ldaFeatures = "LdaFeatures"; + + // A pipeline for featurizing the "Review" column + var pipeline = ml.Transforms.Text.ProduceWordBags(review). + Append(ml.Transforms.Text.LatentDirichletAllocation(review, ldaFeatures, numTopic:3)); + + // The transformed data + var transformer = pipeline.Fit(trainData); + var transformed_data = transformer.Transform(trainData); + + // Column obtained after processing the input. + var ldaFeaturesColumn = transformed_data.GetColumn>(ml, ldaFeatures); + + Console.WriteLine($"{ldaFeatures} column obtained post-transformation."); + foreach (var featureRow in ldaFeaturesColumn) + { + foreach (var value in featureRow.GetValues()) + Console.Write($"{value} "); + Console.WriteLine(""); + } + + Console.WriteLine("==================================================="); + + // LdaFeatures column obtained post-transformation. + // For LDA, we had specified numTopic:3. Hence each row of text has been featurized as a vector of floats with length 3. + + //0.1818182 0.4545455 0.3636364 + //0.3636364 0.1818182 0.4545455 + //0.2222222 0.2222222 0.5555556 + //0.2727273 0.09090909 0.6363636 + } + } +} diff --git a/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj b/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj index 2b9041c51b..abea9f3e8d 100644 --- a/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj +++ b/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj @@ -17,6 +17,7 @@ + false diff --git a/docs/samples/Microsoft.ML.Samples/Program.cs b/docs/samples/Microsoft.ML.Samples/Program.cs index d8730391b2..83a7e8c2e8 100644 --- a/docs/samples/Microsoft.ML.Samples/Program.cs +++ b/docs/samples/Microsoft.ML.Samples/Program.cs @@ -6,7 +6,7 @@ internal static class Program { static void Main(string[] args) { - MatrixFactorizationExample.MatrixFactorizationInMemoryData(); + LdaTransformExample.LdaTransform(); } } } diff --git a/src/Microsoft.ML.Transforms/Text/TextCatalog.cs b/src/Microsoft.ML.Transforms/Text/TextCatalog.cs index aaed670b0b..52cf642d2f 100644 --- a/src/Microsoft.ML.Transforms/Text/TextCatalog.cs +++ b/src/Microsoft.ML.Transforms/Text/TextCatalog.cs @@ -507,6 +507,13 @@ public static NgramHashEstimator ProduceHashedNgrams(this TransformsCatalog.Text /// The number of words to summarize the topic. /// The number of burn-in iterations. /// Reset the random number generator for each document. + /// + /// + /// + /// + /// public static LatentDirichletAllocationEstimator LatentDirichletAllocation(this TransformsCatalog.TextTransforms catalog, string inputColumn, string outputColumn = null, From 706ef73a4bf81a977438e480a4c86e3adaee712d Mon Sep 17 00:00:00 2001 From: Eric Erhardt Date: Fri, 30 Nov 2018 18:18:21 +0000 Subject: [PATCH 009/100] Remove IRandom and replace with System.Random. (#1786) * Remove IRandom and replace with System.Random. * Make TauswortheHybrid internal/BestFriend. * Add NextSingle and NextSigned back. * Override Next(int, int) and NextBytes since they are public. * Change Single and Double to float and double. --- .../DataViewConstructionUtils.cs | 12 +- .../StatefulFilterTransform.cs | 4 +- src/Microsoft.ML.Api/TypedCursor.cs | 4 +- src/Microsoft.ML.Core/Data/IDataView.cs | 6 +- .../Data/IHostEnvironment.cs | 2 +- .../Environment/ConsoleEnvironment.cs | 8 +- .../Environment/HostEnvironmentBase.cs | 16 +- src/Microsoft.ML.Core/Utilities/Random.cs | 281 +++++------------- .../Utilities/ReservoirSampler.cs | 9 +- src/Microsoft.ML.Core/Utilities/Stats.cs | 24 +- src/Microsoft.ML.Core/Utilities/Utils.cs | 28 -- src/Microsoft.ML.Data/Data/DataViewUtils.cs | 2 +- src/Microsoft.ML.Data/Data/RowCursorUtils.cs | 4 +- .../DataLoadSave/Binary/BinaryLoader.cs | 8 +- .../DataLoadSave/Binary/BinarySaver.cs | 2 +- .../DataLoadSave/CompositeDataLoader.cs | 4 +- .../DataLoadSave/PartitionedFileLoader.cs | 8 +- .../DataLoadSave/Text/TextLoader.cs | 4 +- .../DataLoadSave/Transpose/TransposeLoader.cs | 4 +- .../DataView/AppendRowsDataView.cs | 12 +- .../DataView/ArrayDataViewBuilder.cs | 6 +- .../DataView/CacheDataView.cs | 10 +- .../DataView/EmptyDataView.cs | 4 +- .../DataView/LambdaFilter.cs | 4 +- .../DataView/OpaqueDataView.cs | 4 +- .../DataView/RowToRowMapperTransform.cs | 4 +- src/Microsoft.ML.Data/DataView/Transposer.cs | 14 +- src/Microsoft.ML.Data/DataView/ZipDataView.cs | 4 +- .../Dirty/ChooseColumnsByIndexTransform.cs | 4 +- .../Evaluators/AucAggregator.cs | 12 +- .../Evaluators/RankerEvaluator.cs | 4 +- .../Scorers/RowToRowScorerBase.cs | 4 +- .../Training/TrainerUtils.cs | 20 +- .../Transforms/ColumnSelecting.cs | 4 +- .../Transforms/GenerateNumberTransform.cs | 4 +- src/Microsoft.ML.Data/Transforms/NAFilter.cs | 4 +- .../Transforms/NopTransform.cs | 4 +- .../Transforms/PerGroupTransformBase.cs | 4 +- .../Transforms/RangeFilter.cs | 4 +- .../Transforms/RowShufflingTransformer.cs | 16 +- .../Transforms/SkipTakeFilter.cs | 4 +- .../Transforms/TransformBase.cs | 10 +- .../Utilities/LocalEnvironment.cs | 6 +- .../FeatureSelector/AllFeatureSelector.cs | 3 +- .../FeatureSelector/RandomFeatureSelector.cs | 2 +- .../Selector/IFeatureSelector.cs | 3 +- .../Selector/ISubsetSelector.cs | 4 +- .../SubsetSelector/AllInstanceSelector.cs | 5 +- .../SubsetSelector/BaseSubsetSelector.cs | 4 +- .../SubsetSelector/BootstrapSelector.cs | 5 +- .../SubsetSelector/RandomPartitionSelector.cs | 2 +- .../SumupPerformanceCommand.cs | 8 +- .../KMeansPlusPlusTrainer.cs | 20 +- .../LightGbmTrainerBase.cs | 8 +- src/Microsoft.ML.PCA/PcaTrainer.cs | 2 +- src/Microsoft.ML.Parquet/ParquetLoader.cs | 8 +- .../Standard/SdcaBinary.cs | 8 +- .../Standard/SdcaMultiClass.cs | 2 +- .../Standard/Simple/SimpleTrainers.cs | 2 +- .../Algorithms/KdoSweeper.cs | 2 +- .../PValueTransform.cs | 2 +- .../SequentialTransformBase.cs | 4 +- .../SequentialTransformerBase.cs | 8 +- .../BootstrapSamplingTransformer.cs | 8 +- .../FourierDistributionSampler.cs | 7 +- src/Microsoft.ML.Transforms/GroupTransform.cs | 4 +- .../OptionalColumnTransform.cs | 4 +- .../ProduceIdTransform.cs | 4 +- .../Text/NgramHashingTransformer.cs | 4 +- .../UngroupTransform.cs | 4 +- .../DataPipe/TestDataPipeBase.cs | 2 +- 71 files changed, 286 insertions(+), 449 deletions(-) diff --git a/src/Microsoft.ML.Api/DataViewConstructionUtils.cs b/src/Microsoft.ML.Api/DataViewConstructionUtils.cs index 253c86aa5b..1d05d67f3b 100644 --- a/src/Microsoft.ML.Api/DataViewConstructionUtils.cs +++ b/src/Microsoft.ML.Api/DataViewConstructionUtils.cs @@ -400,10 +400,10 @@ protected DataViewBase(IHostEnvironment env, string name, InternalSchemaDefiniti public abstract long? GetRowCount(); - public abstract IRowCursor GetRowCursor(Func predicate, IRandom rand = null); + public abstract IRowCursor GetRowCursor(Func predicate, Random rand = null); public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, - int n, IRandom rand = null) + int n, Random rand = null) { consolidator = null; return new[] { GetRowCursor(predicate, rand) }; @@ -561,7 +561,7 @@ public override bool CanShuffle return _data.Count; } - public override IRowCursor GetRowCursor(Func predicate, IRandom rand = null) + public override IRowCursor GetRowCursor(Func predicate, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); return new Cursor(Host, "ListDataView", this, predicate, rand); @@ -578,7 +578,7 @@ private int Index } public Cursor(IHostEnvironment env, string name, ListDataView dataView, - Func predicate, IRandom rand) + Func predicate, Random rand) : base(env, dataView, predicate) { Ch.AssertValueOrNull(rand); @@ -660,7 +660,7 @@ public override bool CanShuffle return (_data as ICollection)?.Count; } - public override IRowCursor GetRowCursor(Func predicate, IRandom rand = null) + public override IRowCursor GetRowCursor(Func predicate, Random rand = null) { return new Cursor(Host, this, predicate); } @@ -747,7 +747,7 @@ public void SetCurrentRowObject(TRow value) _current = value; } - public override IRowCursor GetRowCursor(Func predicate, IRandom rand = null) + public override IRowCursor GetRowCursor(Func predicate, Random rand = null) { Contracts.Assert(_current != null, "The current object must be set prior to cursoring"); return new Cursor(Host, this, predicate); diff --git a/src/Microsoft.ML.Api/StatefulFilterTransform.cs b/src/Microsoft.ML.Api/StatefulFilterTransform.cs index 04edfb42dd..332c3e202a 100644 --- a/src/Microsoft.ML.Api/StatefulFilterTransform.cs +++ b/src/Microsoft.ML.Api/StatefulFilterTransform.cs @@ -106,7 +106,7 @@ private StatefulFilterTransform(IHostEnvironment env, StatefulFilterTransform predicate, IRandom rand = null) + public IRowCursor GetRowCursor(Func predicate, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); @@ -118,7 +118,7 @@ public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) return new Cursor(this, input, predicate); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, IRandom rand = null) + public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Contracts.CheckValue(predicate, nameof(predicate)); Contracts.CheckParam(n >= 0, nameof(n)); diff --git a/src/Microsoft.ML.Api/TypedCursor.cs b/src/Microsoft.ML.Api/TypedCursor.cs index c7944c7704..c13d277305 100644 --- a/src/Microsoft.ML.Api/TypedCursor.cs +++ b/src/Microsoft.ML.Api/TypedCursor.cs @@ -192,7 +192,7 @@ public IRowCursor GetCursor(Func additionalColumnsPredicate, in { _host.CheckValue(additionalColumnsPredicate, nameof(additionalColumnsPredicate)); - IRandom rand = randomSeed.HasValue ? RandomUtils.Create(randomSeed.Value) : null; + Random rand = randomSeed.HasValue ? RandomUtils.Create(randomSeed.Value) : null; var cursor = _data.GetRowCursor(GetDependencies(additionalColumnsPredicate), rand); return new TypedCursor(this, cursor); @@ -212,7 +212,7 @@ public Func GetDependencies(Func additionalColumnsPredicat /// Number of cursors to create /// Random generator to use public IRowCursor[] GetCursorSet(out IRowCursorConsolidator consolidator, - Func additionalColumnsPredicate, int n, IRandom rand) + Func additionalColumnsPredicate, int n, Random rand) { _host.CheckValue(additionalColumnsPredicate, nameof(additionalColumnsPredicate)); _host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Core/Data/IDataView.cs b/src/Microsoft.ML.Core/Data/IDataView.cs index cb0da9a194..d621cb031e 100644 --- a/src/Microsoft.ML.Core/Data/IDataView.cs +++ b/src/Microsoft.ML.Core/Data/IDataView.cs @@ -99,7 +99,7 @@ public interface IDataView : ISchematized /// a getter for an inactive columns will throw. The predicate must be /// non-null. To activate all columns, pass "col => true". /// - IRowCursor GetRowCursor(Func needCol, IRandom rand = null); + IRowCursor GetRowCursor(Func needCol, Random rand = null); /// /// This constructs a set of parallel batch cursors. The value n is a recommended limit @@ -109,7 +109,7 @@ public interface IDataView : ISchematized /// an implementation can return a different number of cursors. /// /// The cursors should return the same data as returned through - /// , except partitioned: no two cursors + /// , except partitioned: no two cursors /// should return the "same" row as would have been returned through the regular serial cursor, /// but all rows should be returned by exactly one of the cursors returned from this cursor. /// The cursors can have their values reconciled downstream through the use of the @@ -123,7 +123,7 @@ public interface IDataView : ISchematized /// An instance /// IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, - Func needCol, int n, IRandom rand = null); + Func needCol, int n, Random rand = null); } /// diff --git a/src/Microsoft.ML.Core/Data/IHostEnvironment.cs b/src/Microsoft.ML.Core/Data/IHostEnvironment.cs index eb0d57845c..3f93005544 100644 --- a/src/Microsoft.ML.Core/Data/IHostEnvironment.cs +++ b/src/Microsoft.ML.Core/Data/IHostEnvironment.cs @@ -94,7 +94,7 @@ public interface IHost : IHostEnvironment /// The random number generator issued to this component. Note that random number /// generators are NOT thread safe. /// - IRandom Rand { get; } + Random Rand { get; } /// /// Signal to stop exection in this host and all its children. diff --git a/src/Microsoft.ML.Core/Environment/ConsoleEnvironment.cs b/src/Microsoft.ML.Core/Environment/ConsoleEnvironment.cs index 3e27ce2516..4dfd92b11f 100644 --- a/src/Microsoft.ML.Core/Environment/ConsoleEnvironment.cs +++ b/src/Microsoft.ML.Core/Environment/ConsoleEnvironment.cs @@ -368,7 +368,7 @@ public ConsoleEnvironment(int? seed = null, bool verbose = false, /// Concurrency level. Set to 1 to run single-threaded. Set to 0 to pick automatically. /// Text writer to print normal messages to. /// Text writer to print error messages to. - private ConsoleEnvironment(IRandom rand, bool verbose = false, + private ConsoleEnvironment(Random rand, bool verbose = false, MessageSensitivity sensitivity = MessageSensitivity.All, int conc = 0, TextWriter outWriter = null, TextWriter errWriter = null) : base(rand, verbose, conc, nameof(ConsoleEnvironment)) @@ -401,7 +401,7 @@ protected override IFileHandle CreateTempFileCore(IHostEnvironment env, string s return base.CreateTempFileCore(env, suffix, "TLC_" + prefix); } - protected override IHost RegisterCore(HostEnvironmentBase source, string shortName, string parentFullName, IRandom rand, bool verbose, int? conc) + protected override IHost RegisterCore(HostEnvironmentBase source, string shortName, string parentFullName, Random rand, bool verbose, int? conc) { Contracts.AssertValue(rand); Contracts.AssertValueOrNull(parentFullName); @@ -472,7 +472,7 @@ public void Dispose() private sealed class Host : HostBase { - public Host(HostEnvironmentBase source, string shortName, string parentFullName, IRandom rand, bool verbose, int? conc) + public Host(HostEnvironmentBase source, string shortName, string parentFullName, Random rand, bool verbose, int? conc) : base(source, shortName, parentFullName, rand, verbose, conc) { IsCancelled = source.IsCancelled; @@ -494,7 +494,7 @@ protected override IPipe CreatePipe(ChannelProviderBase pare return new Pipe(parent, name, GetDispatchDelegate()); } - protected override IHost RegisterCore(HostEnvironmentBase source, string shortName, string parentFullName, IRandom rand, bool verbose, int? conc) + protected override IHost RegisterCore(HostEnvironmentBase source, string shortName, string parentFullName, Random rand, bool verbose, int? conc) { return new Host(source, shortName, parentFullName, rand, verbose, conc); } diff --git a/src/Microsoft.ML.Core/Environment/HostEnvironmentBase.cs b/src/Microsoft.ML.Core/Environment/HostEnvironmentBase.cs index 31ab23e28f..0641d28369 100644 --- a/src/Microsoft.ML.Core/Environment/HostEnvironmentBase.cs +++ b/src/Microsoft.ML.Core/Environment/HostEnvironmentBase.cs @@ -107,12 +107,12 @@ public abstract class HostBase : HostEnvironmentBase, IHost { public override int Depth { get; } - public IRandom Rand => _rand; + public Random Rand => _rand; // We don't have dispose mechanism for hosts, so to let GC collect children hosts we make them WeakReference. private readonly List> _children; - public HostBase(HostEnvironmentBase source, string shortName, string parentFullName, IRandom rand, bool verbose, int? conc) + public HostBase(HostEnvironmentBase source, string shortName, string parentFullName, Random rand, bool verbose, int? conc) : base(source, rand, verbose, conc, shortName, parentFullName) { Depth = source.Depth + 1; @@ -139,7 +139,7 @@ public void StopExecution() IHost host; lock (_cancelLock) { - IRandom rand = (seed.HasValue) ? RandomUtils.Create(seed.Value) : RandomUtils.Create(_rand); + Random rand = (seed.HasValue) ? RandomUtils.Create(seed.Value) : RandomUtils.Create(_rand); host = RegisterCore(this, name, Master?.FullName, rand, verbose ?? Verbose, conc ?? _conc); if (!IsCancelled) _children.Add(new WeakReference(host)); @@ -342,7 +342,7 @@ public void RemoveListener(Action listenerFunc) private readonly object _cancelLock; // The random number generator for this host. - private readonly IRandom _rand; + private readonly Random _rand; // A dictionary mapping the type of message to the Dispatcher that gets the strongly typed dispatch delegate. protected readonly ConcurrentDictionary ListenerDict; @@ -367,7 +367,7 @@ public void RemoveListener(Action listenerFunc) /// /// The main constructor. /// - protected HostEnvironmentBase(IRandom rand, bool verbose, int conc, + protected HostEnvironmentBase(Random rand, bool verbose, int conc, string shortName = null, string parentFullName = null) : base(shortName, parentFullName, verbose) { @@ -386,7 +386,7 @@ protected HostEnvironmentBase(IRandom rand, bool verbose, int conc, /// /// This constructor is for forking. /// - protected HostEnvironmentBase(HostEnvironmentBase source, IRandom rand, bool verbose, + protected HostEnvironmentBase(HostEnvironmentBase source, Random rand, bool verbose, int? conc, string shortName = null, string parentFullName = null) : base(shortName, parentFullName, verbose) { @@ -433,12 +433,12 @@ public virtual void Dispose() public IHost Register(string name, int? seed = null, bool? verbose = null, int? conc = null) { Contracts.CheckNonEmpty(name, nameof(name)); - IRandom rand = (seed.HasValue) ? RandomUtils.Create(seed.Value) : RandomUtils.Create(_rand); + Random rand = (seed.HasValue) ? RandomUtils.Create(seed.Value) : RandomUtils.Create(_rand); return RegisterCore(this, name, Master?.FullName, rand, verbose ?? Verbose, conc); } protected abstract IHost RegisterCore(HostEnvironmentBase source, string shortName, - string parentFullName, IRandom rand, bool verbose, int? conc); + string parentFullName, Random rand, bool verbose, int? conc); public IFileHandle OpenInputFile(string path) { diff --git a/src/Microsoft.ML.Core/Utilities/Random.cs b/src/Microsoft.ML.Core/Utilities/Random.cs index d5bbf2d4ec..50093698b6 100644 --- a/src/Microsoft.ML.Core/Utilities/Random.cs +++ b/src/Microsoft.ML.Core/Utilities/Random.cs @@ -8,44 +8,42 @@ namespace Microsoft.ML.Runtime { - public interface IRandom + [BestFriend] + internal static class RandomUtils { - /// - /// Generates a Single in the range [0, 1). - /// - Single NextSingle(); - - /// - /// Generates a Double in the range [0, 1). - /// - Double NextDouble(); - - /// - /// Generates an int in the range [0, int.MaxValue]. Note that this differs - /// from the contract for System.Random.Next, which claims to never return - /// int.MaxValue. - /// - int Next(); - - /// - /// Generates an int in the range [int.MinValue, int.MaxValue]. - /// - int NextSigned(); - - /// - /// Generates an int in the range [0, limit), unless limit == 0, in which case this advances the generator - /// and returns 0. - /// Throws if limit is less than 0. - /// - int Next(int limit); - } + public static float NextSingle(this Random random) + { + if (random is TauswortheHybrid tauswortheHybrd) + { + return tauswortheHybrd.NextSingle(); + } + + for (; ; ) + { + // Since the largest value that NextDouble() can return rounds to 1 when cast to float, + // we need to protect against returning 1. + var res = (float)random.NextDouble(); + if (res < 1.0f) + return res; + } + } + + public static int NextSigned(this Random random) + { + if (random is TauswortheHybrid tauswortheHybrd) + { + return tauswortheHybrd.NextSigned(); + } + + // Note that, according to the documentation for System.Random, + // this won't ever achieve int.MaxValue, but oh well. + return random.Next(int.MinValue, int.MaxValue); + } - public static class RandomUtils - { public static TauswortheHybrid Create() { // Seed from a system random. - return new TauswortheHybrid(new SysRandom()); + return new TauswortheHybrid(new Random()); } public static TauswortheHybrid Create(int? seed) @@ -74,81 +72,17 @@ public static TauswortheHybrid Create(uint seed) return new TauswortheHybrid(state); } - public static TauswortheHybrid Create(IRandom seed) + public static TauswortheHybrid Create(Random seed) { return new TauswortheHybrid(seed); } } - public sealed class SysRandom : IRandom - { - private readonly Random _rnd; - - public SysRandom() - { - _rnd = new Random(); - } - - public SysRandom(int seed) - { - _rnd = new Random(seed); - } - - public static SysRandom Wrap(Random rnd) - { - if (rnd != null) - return new SysRandom(rnd); - return null; - } - - private SysRandom(Random rnd) - { - Contracts.AssertValue(rnd); - _rnd = rnd; - } - - public Single NextSingle() - { - // Since the largest value that NextDouble() can return rounds to 1 when cast to Single, - // we need to protect against returning 1. - for (;;) - { - var res = (Single)_rnd.NextDouble(); - if (res < 1.0f) - return res; - } - } - - public Double NextDouble() - { - return _rnd.NextDouble(); - } - - public int Next() - { - // Note that, according to the documentation for System.Random, - // this won't ever achieve int.MaxValue, but oh well. - return _rnd.Next(); - } - - public int Next(int limit) - { - Contracts.CheckParam(limit >= 0, nameof(limit), "limit must be non-negative"); - return _rnd.Next(limit); - } - - public int NextSigned() - { - // Note that, according to the documentation for System.Random, - // this won't ever achieve int.MaxValue, but oh well. - return _rnd.Next(int.MinValue, int.MaxValue); - } - } - /// /// Tausworthe hybrid random number generator. /// - public sealed class TauswortheHybrid : IRandom + [BestFriend] + internal sealed class TauswortheHybrid : Random { public readonly struct State { @@ -204,7 +138,7 @@ public TauswortheHybrid(State state) _z4 = state.U4; } - public TauswortheHybrid(IRandom rng) + public TauswortheHybrid(Random rng) { _z1 = GetSeed(rng); _z2 = GetSeed(rng); @@ -212,14 +146,14 @@ public TauswortheHybrid(IRandom rng) _z4 = GetU(rng); } - private static uint GetU(IRandom rng) + private static uint GetU(Random rng) { return ((uint)rng.Next(0x00010000) << 16) | ((uint)rng.Next(0x00010000)); } - private static uint GetSeed(IRandom rng) + private static uint GetSeed(Random rng) { - for (;;) + for (; ; ) { uint u = GetU(rng); if (u >= 128) @@ -227,19 +161,19 @@ private static uint GetSeed(IRandom rng) } } - public Single NextSingle() + public float NextSingle() { NextState(); return GetSingle(); } - public Double NextDouble() + public override double NextDouble() { NextState(); return GetDouble(); } - public int Next() + public override int Next() { NextState(); uint u = GetUint(); @@ -248,17 +182,35 @@ public int Next() return n; } - public int Next(int limit) + public override int Next(int maxValue) { - Contracts.CheckParam(limit >= 0, nameof(limit), "limit must be non-negative"); + Contracts.CheckParam(maxValue >= 0, nameof(maxValue), "maxValue must be non-negative"); NextState(); uint u = GetUint(); - ulong uu = (ulong)u * (ulong)limit; + ulong uu = (ulong)u * (ulong)maxValue; int res = (int)(uu >> 32); - Contracts.Assert(0 <= res && (res < limit || res == 0)); + Contracts.Assert(0 <= res && (res < maxValue || res == 0)); return res; } + public override int Next(int minValue, int maxValue) + { + Contracts.CheckParam(minValue <= maxValue, nameof(minValue), "minValue must be less than or equal to maxValue."); + + long range = (long)maxValue - minValue; + return (int)((long)(NextDouble() * range) + minValue); + } + + public override void NextBytes(byte[] buffer) + { + Contracts.CheckValue(buffer, nameof(buffer)); + + for (int i = 0; i < buffer.Length; i++) + { + buffer[i] = (byte)Next(); + } + } + public int NextSigned() { NextState(); @@ -270,23 +222,23 @@ private uint GetUint() return _z1 ^ _z2 ^ _z3 ^ _z4; } - private Single GetSingle() + private float GetSingle() { - const Single scale = (Single)1 / (1 << 23); + const float scale = (float)1 / (1 << 23); // Drop the low 9 bits so the conversion to Single is exact. Allowing rounding would cause // issues with biasing values and, worse, the possibility of returning exactly 1. uint u = GetUint() >> 9; - Contracts.Assert((uint)(Single)u == u); + Contracts.Assert((uint)(float)u == u); - return (Single)u * scale; + return (float)u * scale; } - private Double GetDouble() + private double GetDouble() { - const Double scale = (Double)1 / (1 << 16) / (1 << 16); + const double scale = (double)1 / (1 << 16) / (1 << 16); uint u = GetUint(); - return (Double)u * scale; + return (double)u * scale; } private void NextState() @@ -315,97 +267,4 @@ public State GetState() return new State(_z1, _z2, _z3, _z4); } } - -#if false // REVIEW: This was written for NN drop out but turned out to be too slow, so I inlined it instead. - public sealed class BooleanSampler - { - public const int CbitRand = 25; - - private readonly IRandom _rand; - private readonly uint _k; // probability of "true" is _k / (1U << _qlog). - private readonly int _qlog; // Number of bits consumed by each call to Sample(). - private readonly int _cv; // Number of calls to Sample() covered by a call to _rand.Next(...). - private readonly uint _mask; // (1U << _qlog) - 1 - - // Mutable state. - private int _c; - private uint _v; - - /// - /// Create a boolean sampler using the given random number generator, quantizing the true rate - /// to cbitQuant bits, assuming that sampling the random number generator is capable of producing - /// cbitRand good bits. - /// - /// For example, new BooleanSampler(0.5f, 1, 25, new Random()) will produce a reasonable fair coin flipper. - /// Note that this reduces the parameters, so new BooleanSampler(0.5f, 6, 25, new Random()) will produce - /// the same flipper. In other words, since 0.5 quantized to 6 bits can be reduced to only needing one - /// bit, it reduces cbitQuant to 1. - /// - public static BooleanSampler Create(Single rate, int cbitQuant, IRandom rand) - { - Contracts.Assert(0 < rate && rate < 1); - Contracts.Assert(0 < cbitQuant && cbitQuant <= CbitRand / 2); - - int qlog = cbitQuant; - uint k = (uint)(rate * (1 << qlog)); - if (k == 0) - k = 1; - Contracts.Assert(0 <= k && k < (1U << qlog)); - - while ((k & 1) == 0 && k > 0) - { - qlog--; - k >>= 1; - } - Contracts.Assert(qlog > 0); - uint q = 1U << qlog; - Contracts.Assert(0 < k && k < q); - - int cv = CbitRand / qlog; - Contracts.Assert(cv > 1); - return new BooleanSampler(qlog, k, rand); - } - - private BooleanSampler(int qlog, uint k, IRandom rand) - { - _qlog = qlog; - _k = k; - _rand = rand; - _qlog = qlog; - _cv = CbitRand / _qlog; - _mask = (1U << _qlog) - 1; - } - - public bool Sample() - { - _v >>= _qlog; - if (--_c <= 0) - { - _v = (uint)_rand.Next(1 << (_cv * _qlog)); - _c = _cv; - } - return (_v & _mask) < _k; - } - - public void SampleMany(out uint bits, out int count) - { - uint u = (uint)_rand.Next(1 << (_cv * _qlog)); - count = _cv; - if (_qlog == 1) - { - bits = u; - return; - } - - bits = 0; - for (int i = 0; i < count; i++) - { - bits <<= 1; - if ((u & _mask) < _k) - bits |= 1; - u >>= _qlog; - } - } - } -#endif } \ No newline at end of file diff --git a/src/Microsoft.ML.Core/Utilities/ReservoirSampler.cs b/src/Microsoft.ML.Core/Utilities/ReservoirSampler.cs index 8438a2e742..91fc4d3a62 100644 --- a/src/Microsoft.ML.Core/Utilities/ReservoirSampler.cs +++ b/src/Microsoft.ML.Core/Utilities/ReservoirSampler.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using System; using System.Collections.Generic; using System.Linq; using Microsoft.ML.Runtime.Data; @@ -56,7 +57,7 @@ internal sealed class ReservoirSamplerWithoutReplacement : IReservoirSampler< // This array contains a cache of the elements composing the reservoir. private readonly T[] _cache; - private readonly IRandom _rnd; + private readonly Random _rnd; private long _numSampled; private readonly ValueGetter _getter; @@ -67,7 +68,7 @@ internal sealed class ReservoirSamplerWithoutReplacement : IReservoirSampler< public long NumSampled { get { return _numSampled; } } - public ReservoirSamplerWithoutReplacement(IRandom rnd, int size, ValueGetter getter) + public ReservoirSamplerWithoutReplacement(Random rnd, int size, ValueGetter getter) { Contracts.CheckValue(rnd, nameof(rnd)); Contracts.CheckParam(size > 0, nameof(size), "Reservoir size must be positive"); @@ -135,7 +136,7 @@ internal sealed class ReservoirSamplerWithReplacement : IReservoirSampler private readonly T[] _cache; private readonly int[] _counts; - private readonly IRandom _rnd; + private readonly Random _rnd; private long _numSampled; private readonly ValueGetter _getter; @@ -146,7 +147,7 @@ internal sealed class ReservoirSamplerWithReplacement : IReservoirSampler public long NumSampled { get { return _numSampled; } } - public ReservoirSamplerWithReplacement(IRandom rnd, int size, ValueGetter getter) + public ReservoirSamplerWithReplacement(Random rnd, int size, ValueGetter getter) { Contracts.CheckValue(rnd, nameof(rnd)); Contracts.CheckParam(size > 0, nameof(size), "Reservoir size must be positive"); diff --git a/src/Microsoft.ML.Core/Utilities/Stats.cs b/src/Microsoft.ML.Core/Utilities/Stats.cs index 182239adde..26c538084e 100644 --- a/src/Microsoft.ML.Core/Utilities/Stats.cs +++ b/src/Microsoft.ML.Core/Utilities/Stats.cs @@ -20,7 +20,7 @@ internal static class Stats /// Size of range to sample from, between 0 and int.MaxValue^2 /// Random number generator /// Sampled value - public static long SampleLong(long rangeSize, IRandom rand) + public static long SampleLong(long rangeSize, Random rand) { Contracts.CheckParam(rangeSize > 0, nameof(rangeSize), "rangeSize must be positive."); @@ -51,7 +51,7 @@ public static long SampleLong(long rangeSize, IRandom rand) /// A Random to use for the sampling /// a sample /// uses Joseph L. Leva's algorithm from "A fast normal random number generator", 1992 - public static double SampleFromGaussian(IRandom rand) + public static double SampleFromGaussian(Random rand) { double u; double v; @@ -75,7 +75,7 @@ public static double SampleFromGaussian(IRandom rand) /// The random number generator to use /// Sample from gamma distribution /// Uses Marsaglia and Tsang's fast algorithm - public static double SampleFromGamma(IRandom r, double alpha) + public static double SampleFromGamma(Random r, double alpha) { Contracts.CheckParam(alpha > 0, nameof(alpha), "alpha must be positive"); @@ -111,7 +111,7 @@ public static double SampleFromGamma(IRandom r, double alpha) /// first parameter /// second parameter /// Sample from distribution - public static double SampleFromBeta(IRandom rand, double alpha1, double alpha2) + public static double SampleFromBeta(Random rand, double alpha1, double alpha2) { double gamma1 = SampleFromGamma(rand, alpha1); double gamma2 = SampleFromGamma(rand, alpha2); @@ -124,7 +124,7 @@ public static double SampleFromBeta(IRandom rand, double alpha1, double alpha2) /// Random generator to use /// array of parameters /// array in which to store resulting sample - public static void SampleFromDirichlet(IRandom rand, double[] alphas, double[] result) + public static void SampleFromDirichlet(Random rand, double[] alphas, double[] result) { Contracts.Check(alphas.Length == result.Length, "Dirichlet parameters must have the same dimensionality as sample space."); @@ -141,7 +141,7 @@ public static void SampleFromDirichlet(IRandom rand, double[] alphas, double[] r } } - public static int SampleFromPoisson(IRandom rand, double lambda) + public static int SampleFromPoisson(Random rand, double lambda) { if (lambda < 5) { @@ -201,7 +201,7 @@ public static int SampleFromPoisson(IRandom rand, double lambda) // Mean refers to the mu parameter. Scale refers to the b parameter. // https://en.wikipedia.org/wiki/Laplace_distribution - public static Float SampleFromLaplacian(IRandom rand, Float mean, Float scale) + public static Float SampleFromLaplacian(Random rand, Float mean, Float scale) { Float u = rand.NextSingle(); u = u - 0.5f; @@ -220,7 +220,7 @@ public static Float SampleFromLaplacian(IRandom rand, Float mean, Float scale) /// /// /// - public static Float SampleFromCauchy(IRandom rand) + public static Float SampleFromCauchy(Random rand) { return (Float)Math.Tan(Math.PI * (rand.NextSingle() - 0.5)); } @@ -233,7 +233,7 @@ public static Float SampleFromCauchy(IRandom rand) /// Parameter p of binomial /// /// Should be robust for all values of n, p - public static int SampleFromBinomial(IRandom r, int n, double p) + public static int SampleFromBinomial(Random r, int n, double p) { return BinoRand.Next(r, n, p); } @@ -266,7 +266,7 @@ private static double Fc(int k) } } - public static int Next(IRandom rand, int n, double p) + public static int Next(Random rand, int n, double p) { int x; double pin = Math.Min(p, 1 - p); @@ -284,7 +284,7 @@ public static int Next(IRandom rand, int n, double p) // For small n // Inverse transformation algorithm // Described in Kachitvichyanukul and Schmeiser: "Binomial Random Variate Generation" - private static int InvTransform(int n, double p, IRandom rn) + private static int InvTransform(int n, double p, Random rn) { int x = 0; double u = rn.NextDouble(); @@ -308,7 +308,7 @@ private static int InvTransform(int n, double p, IRandom rn) // For large n // Algorithm from W. Hormann: "The Generation of Binomial Random Variables" // This is algorithm BTRD - private static int GenerateLarge(int n, double p, IRandom rn) + private static int GenerateLarge(int n, double p, Random rn) { double np = n * p; double q = 1 - p; diff --git a/src/Microsoft.ML.Core/Utilities/Utils.cs b/src/Microsoft.ML.Core/Utilities/Utils.cs index d0f6f6256e..0fcf55a449 100644 --- a/src/Microsoft.ML.Core/Utilities/Utils.cs +++ b/src/Microsoft.ML.Core/Utilities/Utils.cs @@ -534,24 +534,6 @@ public static int[] GetRandomPermutation(Random rand, int size) return res; } - public static int[] GetRandomPermutation(IRandom rand, int size) - { - Contracts.AssertValue(rand); - Contracts.Assert(size >= 0); - - var res = GetIdentityPermutation(size); - Shuffle(rand, res); - return res; - } - - public static void Shuffle(IRandom rand, T[] rgv) - { - Contracts.AssertValue(rand); - Contracts.AssertValue(rgv); - - Shuffle(rand, rgv, 0, rgv.Length); - } - public static void Shuffle(Random rand, T[] rgv) { Contracts.AssertValue(rand); @@ -560,16 +542,6 @@ public static void Shuffle(Random rand, T[] rgv) Shuffle(rand, rgv, 0, rgv.Length); } - public static void Shuffle(IRandom rand, T[] rgv, int min, int lim) - { - Contracts.AssertValue(rand); - Contracts.AssertValue(rgv); - Contracts.Check(0 <= min & min <= lim & lim <= rgv.Length); - - for (int iv = min; iv < lim; iv++) - Swap(ref rgv[iv], ref rgv[iv + rand.Next(lim - iv)]); - } - public static void Shuffle(Random rand, T[] rgv, int min, int lim) { Contracts.AssertValue(rand); diff --git a/src/Microsoft.ML.Data/Data/DataViewUtils.cs b/src/Microsoft.ML.Data/Data/DataViewUtils.cs index 30ba97bc67..4d8752a6fe 100644 --- a/src/Microsoft.ML.Data/Data/DataViewUtils.cs +++ b/src/Microsoft.ML.Data/Data/DataViewUtils.cs @@ -115,7 +115,7 @@ public static int GetThreadCount(IHost host, int num = 0, bool preferOne = false /// the target cardinality of the cursor set. /// public static bool TryCreateConsolidatingCursor(out IRowCursor curs, - IDataView view, Func predicate, IHost host, IRandom rand) + IDataView view, Func predicate, IHost host, Random rand) { Contracts.CheckValue(host, nameof(host)); host.CheckValue(view, nameof(view)); diff --git a/src/Microsoft.ML.Data/Data/RowCursorUtils.cs b/src/Microsoft.ML.Data/Data/RowCursorUtils.cs index 1104a36fb1..869f263f3a 100644 --- a/src/Microsoft.ML.Data/Data/RowCursorUtils.cs +++ b/src/Microsoft.ML.Data/Data/RowCursorUtils.cs @@ -519,7 +519,7 @@ public OneRowDataView(IHostEnvironment env, IRow row) _row = row; } - public IRowCursor GetRowCursor(Func needCol, IRandom rand = null) + public IRowCursor GetRowCursor(Func needCol, Random rand = null) { _host.CheckValue(needCol, nameof(needCol)); _host.CheckValueOrNull(rand); @@ -527,7 +527,7 @@ public IRowCursor GetRowCursor(Func needCol, IRandom rand = null) return new Cursor(_host, this, active); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, IRandom rand = null) + public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, Random rand = null) { _host.CheckValue(needCol, nameof(needCol)); _host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Data/DataLoadSave/Binary/BinaryLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/Binary/BinaryLoader.cs index 473ee0d8d1..172a6d695e 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Binary/BinaryLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Binary/BinaryLoader.cs @@ -1234,7 +1234,7 @@ private TableOfContentsEntry CreateRowIndexEntry(string rowIndexName) return entry; } - private IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + private IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { if (rand != null && _randomShufflePoolRows > 0) { @@ -1247,7 +1247,7 @@ private IRowCursor GetRowCursorCore(Func predicate, IRandom rand = nu return new Cursor(this, predicate, rand); } - public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) + public IRowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); @@ -1255,7 +1255,7 @@ public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) } public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, - Func predicate, int n, IRandom rand = null) + Func predicate, int n, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); @@ -1293,7 +1293,7 @@ public override long Batch get { return 0; } } - public Cursor(BinaryLoader parent, Func predicate, IRandom rand) + public Cursor(BinaryLoader parent, Func predicate, Random rand) : base(parent._host) { _parent = parent; diff --git a/src/Microsoft.ML.Data/DataLoadSave/Binary/BinarySaver.cs b/src/Microsoft.ML.Data/DataLoadSave/Binary/BinarySaver.cs index 2b434e8e3c..9a47f932de 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Binary/BinarySaver.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Binary/BinarySaver.cs @@ -741,7 +741,7 @@ private int RowsPerBlockHeuristic(IDataView data, ColumnCodec[] actives) // First get the cursor. HashSet active = new HashSet(actives.Select(cc => cc.SourceIndex)); - IRandom rand = data.CanShuffle ? new TauswortheHybrid(_host.Rand) : null; + Random rand = data.CanShuffle ? new TauswortheHybrid(_host.Rand) : null; // Get the estimators. EstimatorDelegate del = EstimatorCore; MethodInfo methInfo = del.GetMethodInfo().GetGenericMethodDefinition(); diff --git a/src/Microsoft.ML.Data/DataLoadSave/CompositeDataLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/CompositeDataLoader.cs index 222d5c994a..e281cd0876 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/CompositeDataLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/CompositeDataLoader.cs @@ -568,7 +568,7 @@ private static string GenerateTag(int index) public ITransposeSchema TransposeSchema { get; } - public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) + public IRowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); @@ -576,7 +576,7 @@ public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) } public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, - Func predicate, int n, IRandom rand = null) + Func predicate, int n, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Data/DataLoadSave/PartitionedFileLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/PartitionedFileLoader.cs index 4ccda7c147..4efb11f0fe 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/PartitionedFileLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/PartitionedFileLoader.cs @@ -293,12 +293,12 @@ public void Save(ModelSaveContext ctx) return null; } - public IRowCursor GetRowCursor(Func needCol, IRandom rand = null) + public IRowCursor GetRowCursor(Func needCol, Random rand = null) { return new Cursor(_host, this, _files, needCol, rand); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, IRandom rand = null) + public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, Random rand = null) { consolidator = null; var cursor = new Cursor(_host, this, _files, needCol, rand); @@ -376,7 +376,7 @@ private sealed class Cursor : RootCursorBase, IRowCursor private IEnumerator _fileOrder; - public Cursor(IChannelProvider provider, PartitionedFileLoader parent, IMultiStreamSource files, Func predicate, IRandom rand) + public Cursor(IChannelProvider provider, PartitionedFileLoader parent, IMultiStreamSource files, Func predicate, Random rand) : base(provider) { Contracts.AssertValue(parent); @@ -624,7 +624,7 @@ private bool IsSubColumn(int col) private int SubColumnCount => Schema.Count - _parent._srcDirIndex.Length; - private IEnumerable CreateFileOrder(IRandom rand) + private IEnumerable CreateFileOrder(Random rand) { if (rand == null) { diff --git a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoader.cs index a2fa37d08f..225dfe1e98 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoader.cs @@ -1364,7 +1364,7 @@ public BoundLoader(TextLoader reader, IMultiStreamSource files) public Schema Schema => _reader._bindings.AsSchema; - public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) + public IRowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); @@ -1373,7 +1373,7 @@ public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) } public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, - Func predicate, int n, IRandom rand = null) + Func predicate, int n, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs index 6490b91483..0954bae382 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs @@ -668,7 +668,7 @@ public VectorType GetSlotType(int col) return _header.RowCount; } - public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) + public IRowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); @@ -677,7 +677,7 @@ public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) return new Cursor(this, predicate); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, IRandom rand = null) + public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); if (HasRowData) diff --git a/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs b/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs index 64f158a603..3ad08e94f2 100644 --- a/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs +++ b/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs @@ -146,7 +146,7 @@ private void CheckSchemaConsistency() return sum; } - public IRowCursor GetRowCursor(Func needCol, IRandom rand = null) + public IRowCursor GetRowCursor(Func needCol, Random rand = null) { _host.CheckValue(needCol, nameof(needCol)); if (rand == null || !_canShuffle) @@ -154,7 +154,7 @@ public IRowCursor GetRowCursor(Func needCol, IRandom rand = null) return new RandCursor(this, needCol, rand, _counts); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, IRandom rand = null) + public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { consolidator = null; return new IRowCursor[] { GetRowCursor(predicate, rand) }; @@ -301,10 +301,10 @@ private sealed class RandCursor : CursorBase { private readonly IRowCursor[] _cursorSet; private readonly MultinomialWithoutReplacementSampler _sampler; - private readonly IRandom _rand; + private readonly Random _rand; private int _currentSourceIndex; - public RandCursor(AppendRowsDataView parent, Func needCol, IRandom rand, int[] counts) + public RandCursor(AppendRowsDataView parent, Func needCol, Random rand, int[] counts) : base(parent) { Ch.AssertValue(needCol); @@ -397,7 +397,7 @@ private sealed class MultinomialWithoutReplacementSampler private const int BatchSize = 1000; private readonly int[] _rowsLeft; - private readonly IRandom _rand; + private readonly Random _rand; private readonly int[] _batch; private readonly IExceptionContext _ectx; @@ -405,7 +405,7 @@ private sealed class MultinomialWithoutReplacementSampler private int _batchPos; private int _totalLeft; - public MultinomialWithoutReplacementSampler(IExceptionContext context, int[] counts, IRandom rand) + public MultinomialWithoutReplacementSampler(IExceptionContext context, int[] counts, Random rand) { Contracts.AssertValue(context); _ectx = context; diff --git a/src/Microsoft.ML.Data/DataView/ArrayDataViewBuilder.cs b/src/Microsoft.ML.Data/DataView/ArrayDataViewBuilder.cs index acd69db40d..c11d49923c 100644 --- a/src/Microsoft.ML.Data/DataView/ArrayDataViewBuilder.cs +++ b/src/Microsoft.ML.Data/DataView/ArrayDataViewBuilder.cs @@ -226,7 +226,7 @@ public DataView(IHostEnvironment env, ArrayDataViewBuilder builder, int rowCount _rowCount = rowCount; } - public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) + public IRowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); @@ -234,7 +234,7 @@ public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) } public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, - Func predicate, int n, IRandom rand = null) + Func predicate, int n, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); @@ -256,7 +256,7 @@ public override long Batch get { return 0; } } - public RowCursor(IChannelProvider provider, DataView view, Func predicate, IRandom rand) + public RowCursor(IChannelProvider provider, DataView view, Func predicate, Random rand) : base(provider) { Ch.AssertValue(view); diff --git a/src/Microsoft.ML.Data/DataView/CacheDataView.cs b/src/Microsoft.ML.Data/DataView/CacheDataView.cs index 3674ec40ca..6d670db5ef 100644 --- a/src/Microsoft.ML.Data/DataView/CacheDataView.cs +++ b/src/Microsoft.ML.Data/DataView/CacheDataView.cs @@ -203,7 +203,7 @@ public int MapInputToCacheColumnIndex(int inputIndex) return _rowCount; } - public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) + public IRowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); @@ -221,7 +221,7 @@ public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) /// Returns a permutation or null. This function will return null if either /// is null, or if the row count of this cache exceeds the maximum array size. /// - private int[] GetPermutationOrNull(IRandom rand) + private int[] GetPermutationOrNull(Random rand) { if (rand == null) return null; @@ -235,7 +235,7 @@ private int[] GetPermutationOrNull(IRandom rand) return Utils.GetRandomPermutation(rand, (int)_rowCount); } - private IRowCursor GetRowCursorWaiterCore(TWaiter waiter, Func predicate, IRandom rand) + private IRowCursor GetRowCursorWaiterCore(TWaiter waiter, Func predicate, Random rand) where TWaiter : struct, IWaiter { _host.AssertValue(predicate); @@ -248,7 +248,7 @@ private IRowCursor GetRowCursorWaiterCore(TWaiter waiter, Func predicate, int n, IRandom rand = null) + Func predicate, int n, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); @@ -279,7 +279,7 @@ public IRowCursor CreateCursor(IChannelProvider provider, IRowCursor[] inputs) } } - private IRowCursor[] GetRowCursorSetWaiterCore(TWaiter waiter, Func predicate, int n, IRandom rand) + private IRowCursor[] GetRowCursorSetWaiterCore(TWaiter waiter, Func predicate, int n, Random rand) where TWaiter : struct, IWaiter { _host.AssertValue(predicate); diff --git a/src/Microsoft.ML.Data/DataView/EmptyDataView.cs b/src/Microsoft.ML.Data/DataView/EmptyDataView.cs index 6e0d779958..90aedca48a 100644 --- a/src/Microsoft.ML.Data/DataView/EmptyDataView.cs +++ b/src/Microsoft.ML.Data/DataView/EmptyDataView.cs @@ -28,14 +28,14 @@ public EmptyDataView(IHostEnvironment env, Schema schema) public long? GetRowCount() => 0; - public IRowCursor GetRowCursor(Func needCol, IRandom rand = null) + public IRowCursor GetRowCursor(Func needCol, Random rand = null) { _host.CheckValue(needCol, nameof(needCol)); _host.CheckValueOrNull(rand); return new Cursor(_host, Schema, needCol); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, IRandom rand = null) + public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, Random rand = null) { _host.CheckValue(needCol, nameof(needCol)); _host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Data/DataView/LambdaFilter.cs b/src/Microsoft.ML.Data/DataView/LambdaFilter.cs index d778ddbc11..87173d71d8 100644 --- a/src/Microsoft.ML.Data/DataView/LambdaFilter.cs +++ b/src/Microsoft.ML.Data/DataView/LambdaFilter.cs @@ -106,7 +106,7 @@ public override void Save(ModelSaveContext ctx) return null; } - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -118,7 +118,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando } public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, - Func predicate, int n, IRandom rand = null) + Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Data/DataView/OpaqueDataView.cs b/src/Microsoft.ML.Data/DataView/OpaqueDataView.cs index 1172142972..7d93533625 100644 --- a/src/Microsoft.ML.Data/DataView/OpaqueDataView.cs +++ b/src/Microsoft.ML.Data/DataView/OpaqueDataView.cs @@ -27,13 +27,13 @@ public OpaqueDataView(IDataView source) return _source.GetRowCount(); } - public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) + public IRowCursor GetRowCursor(Func predicate, Random rand = null) { return _source.GetRowCursor(predicate, rand); } public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, - Func predicate, int n, IRandom rand = null) + Func predicate, int n, Random rand = null) { return _source.GetRowCursorSet(out consolidator, predicate, n, rand); } diff --git a/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs b/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs index 2de71f0a78..e472f6546f 100644 --- a/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs +++ b/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs @@ -178,14 +178,14 @@ private Func GetActiveOutputColumns(bool[] active) return null; } - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { Func predicateInput; var active = GetActive(predicate, out predicateInput); return new RowCursor(Host, Source.GetRowCursor(predicateInput, rand), this, active); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, IRandom rand = null) + public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Data/DataView/Transposer.cs b/src/Microsoft.ML.Data/DataView/Transposer.cs index eae5b4fe06..2befe3a726 100644 --- a/src/Microsoft.ML.Data/DataView/Transposer.cs +++ b/src/Microsoft.ML.Data/DataView/Transposer.cs @@ -265,12 +265,12 @@ private ISlotCursor GetSlotCursorCore(int col) public bool CanShuffle { get { return _view.CanShuffle; } } - public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) + public IRowCursor GetRowCursor(Func predicate, Random rand = null) { return _view.GetRowCursor(predicate, rand); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, IRandom rand = null) + public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { return _view.GetRowCursorSet(out consolidator, predicate, n, rand); } @@ -867,7 +867,7 @@ private void OutputColumnToSplitterIndices(int col, out int splitInd, out int sp splitCol = _colToSplitCol[col]; } - public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) + public IRowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); bool[] activeSplitters; @@ -875,7 +875,7 @@ public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) return new Cursor(_host, this, _input.GetRowCursor(srcPred, rand), predicate, activeSplitters); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, IRandom rand = null) + public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); @@ -891,7 +891,7 @@ public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Fun /// Given a possibly null predicate for this data view, produce the dependency predicate for the sources, /// as well as a list of all the splitters for which we should produce rowsets. /// - /// The predicate input into the method. + /// The predicate input into the method. /// A boolean indicator array of length equal to the number of splitters, /// indicating whether that splitter has any active columns in its outputs or not /// The predicate to use when constructing the row cursor from the source @@ -1536,7 +1536,7 @@ public SlotDataView(IHostEnvironment env, ITransposeDataView data, int col) return valueCount; } - public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) + public IRowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); return Utils.MarshalInvoke(GetRowCursor, _type.ItemType.RawType, predicate(0)); @@ -1547,7 +1547,7 @@ private IRowCursor GetRowCursor(bool active) return new Cursor(this, active); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, IRandom rand = null) + public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); consolidator = null; diff --git a/src/Microsoft.ML.Data/DataView/ZipDataView.cs b/src/Microsoft.ML.Data/DataView/ZipDataView.cs index e481a96504..6bd6356b2a 100644 --- a/src/Microsoft.ML.Data/DataView/ZipDataView.cs +++ b/src/Microsoft.ML.Data/DataView/ZipDataView.cs @@ -71,7 +71,7 @@ private ZipDataView(IHost host, IDataView[] sources) return min; } - public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) + public IRowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); @@ -99,7 +99,7 @@ private IRowCursor GetMinimumCursor(IDataView dv) return dv.GetRowCursor(x => false); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, IRandom rand = null) + public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { consolidator = null; return new IRowCursor[] { GetRowCursor(predicate, rand) }; diff --git a/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs b/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs index ec12f9bc52..dbe5cafa20 100644 --- a/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs +++ b/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs @@ -254,7 +254,7 @@ public override void Save(ModelSaveContext ctx) return null; } - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -266,7 +266,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando } public sealed override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, - Func predicate, int n, IRandom rand = null) + Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Data/Evaluators/AucAggregator.cs b/src/Microsoft.ML.Data/Evaluators/AucAggregator.cs index 87bbf897d6..f2fbe9d238 100644 --- a/src/Microsoft.ML.Data/Evaluators/AucAggregator.cs +++ b/src/Microsoft.ML.Data/Evaluators/AucAggregator.cs @@ -41,7 +41,7 @@ internal abstract class AucAggregatorBase : AucAggregatorBase protected IEnumerable PosSample; protected IEnumerable NegSample; - protected AucAggregatorBase(IRandom rand, int reservoirSize) + protected AucAggregatorBase(Random rand, int reservoirSize) { Contracts.Assert(reservoirSize >= -1); @@ -119,7 +119,7 @@ public override Double ComputeWeightedAuc(out Double unweighted) internal sealed class UnweightedAucAggregator : AucAggregatorBase { - public UnweightedAucAggregator(IRandom rand, int reservoirSize) + public UnweightedAucAggregator(Random rand, int reservoirSize) : base(rand, reservoirSize) { } @@ -220,7 +220,7 @@ public struct AucInfo private Single _weight; - public WeightedAucAggregator(IRandom rand, int reservoirSize) + public WeightedAucAggregator(Random rand, int reservoirSize) : base(rand, reservoirSize) { } @@ -368,7 +368,7 @@ private protected abstract class AuPrcAggregatorBase : AuPrcAggregatorBase { protected readonly ReservoirSamplerWithoutReplacement Reservoir; - protected AuPrcAggregatorBase(IRandom rand, int reservoirSize) + protected AuPrcAggregatorBase(Random rand, int reservoirSize) { Contracts.Assert(reservoirSize > 0); @@ -401,7 +401,7 @@ public struct Info public Single Label; } - public UnweightedAuPrcAggregator(IRandom rand, int reservoirSize) + public UnweightedAuPrcAggregator(Random rand, int reservoirSize) : base(rand, reservoirSize) { } @@ -475,7 +475,7 @@ public struct Info public Single Weight; } - public WeightedAuPrcAggregator(IRandom rand, int reservoirSize) + public WeightedAuPrcAggregator(Random rand, int reservoirSize) : base(rand, reservoirSize) { } diff --git a/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs index a7aa5bc434..ef6d005e38 100644 --- a/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs @@ -601,12 +601,12 @@ public void Save(ModelSaveContext ctx) return _transform.GetRowCount(); } - public IRowCursor GetRowCursor(Func needCol, IRandom rand = null) + public IRowCursor GetRowCursor(Func needCol, Random rand = null) { return _transform.GetRowCursor(needCol, rand); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, IRandom rand = null) + public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, Random rand = null) { return _transform.GetRowCursorSet(out consolidator, needCol, n, rand); } diff --git a/src/Microsoft.ML.Data/Scorers/RowToRowScorerBase.cs b/src/Microsoft.ML.Data/Scorers/RowToRowScorerBase.cs index e0d7d5e9ac..bcd12302a5 100644 --- a/src/Microsoft.ML.Data/Scorers/RowToRowScorerBase.cs +++ b/src/Microsoft.ML.Data/Scorers/RowToRowScorerBase.cs @@ -114,7 +114,7 @@ private static bool[] GetActive(BindingsBase bindings, Func predicate /// protected abstract bool WantParallelCursors(Func predicate); - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { Contracts.AssertValue(predicate); Contracts.AssertValueOrNull(rand); @@ -127,7 +127,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando return new RowCursor(Host, this, input, active, predicateMapper); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, IRandom rand = null) + public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Data/Training/TrainerUtils.cs b/src/Microsoft.ML.Data/Training/TrainerUtils.cs index f6947cd2f9..41e984f522 100644 --- a/src/Microsoft.ML.Data/Training/TrainerUtils.cs +++ b/src/Microsoft.ML.Data/Training/TrainerUtils.cs @@ -237,7 +237,7 @@ private static Func CreatePredicate(RoleMappedData data, CursOpt opt, /// Create a row cursor for the RoleMappedData with the indicated standard columns active. /// This does not verify that the columns exist, but merely activates the ones that do exist. /// - public static IRowCursor CreateRowCursor(this RoleMappedData data, CursOpt opt, IRandom rand, IEnumerable extraCols = null) + public static IRowCursor CreateRowCursor(this RoleMappedData data, CursOpt opt, Random rand, IEnumerable extraCols = null) => data.Data.GetRowCursor(CreatePredicate(data, opt, extraCols), rand); /// @@ -245,7 +245,7 @@ public static IRowCursor CreateRowCursor(this RoleMappedData data, CursOpt opt, /// This does not verify that the columns exist, but merely activates the ones that do exist. /// public static IRowCursor[] CreateRowCursorSet(this RoleMappedData data, out IRowCursorConsolidator consolidator, - CursOpt opt, int n, IRandom rand, IEnumerable extraCols = null) + CursOpt opt, int n, Random rand, IEnumerable extraCols = null) => data.Data.GetRowCursorSet(out consolidator, CreatePredicate(data, opt, extraCols), n, rand); private static void AddOpt(HashSet cols, ColumnInfo info) @@ -428,7 +428,7 @@ protected TrainingCursorBase(IRowCursor input, Action signal) _signal = signal; } - protected static IRowCursor CreateCursor(RoleMappedData data, CursOpt opt, IRandom rand, params int[] extraCols) + protected static IRowCursor CreateCursor(RoleMappedData data, CursOpt opt, Random rand, params int[] extraCols) { Contracts.AssertValue(data); Contracts.AssertValueOrNull(rand); @@ -531,13 +531,13 @@ private void SignalCore(CursOpt opt) } /// - /// The typed analog to . + /// The typed analog to . /// /// Non-null if we are requesting a shuffled cursor. /// The extra columns to activate on the row cursor /// in addition to those required by the factory's options. /// The wrapping typed cursor. - public TCurs Create(IRandom rand = null, params int[] extraCols) + public TCurs Create(Random rand = null, params int[] extraCols) { CursOpt opt; lock (_lock) @@ -558,7 +558,7 @@ public TCurs Create(IRandom rand = null, params int[] extraCols) /// in addition to those required by the factory's options. /// The cursor set. Note that this needn't necessarily be of size /// . - public TCurs[] CreateSet(int n, IRandom rand = null, params int[] extraCols) + public TCurs[] CreateSet(int n, Random rand = null, params int[] extraCols) { CursOpt opt; lock (_lock) @@ -653,7 +653,7 @@ public class StandardScalarCursor : TrainingCursorBase public ulong Group; public UInt128 Id; - public StandardScalarCursor(RoleMappedData data, CursOpt opt, IRandom rand = null, params int[] extraCols) + public StandardScalarCursor(RoleMappedData data, CursOpt opt, Random rand = null, params int[] extraCols) : this(CreateCursor(data, opt, rand, extraCols), data, opt) { } @@ -743,7 +743,7 @@ public class FeatureFloatVectorCursor : StandardScalarCursor public VBuffer Features; public FeatureFloatVectorCursor(RoleMappedData data, CursOpt opt = CursOpt.Features, - IRandom rand = null, params int[] extraCols) + Random rand = null, params int[] extraCols) : this(CreateCursor(data, opt, rand, extraCols), data, opt) { } @@ -811,7 +811,7 @@ public class FloatLabelCursor : FeatureFloatVectorCursor public float Label; public FloatLabelCursor(RoleMappedData data, CursOpt opt = CursOpt.Label, - IRandom rand = null, params int[] extraCols) + Random rand = null, params int[] extraCols) : this(CreateCursor(data, opt, rand, extraCols), data, opt) { } @@ -880,7 +880,7 @@ public class MultiClassLabelCursor : FeatureFloatVectorCursor public int Label; public MultiClassLabelCursor(int classCount, RoleMappedData data, CursOpt opt = CursOpt.Label, - IRandom rand = null, params int[] extraCols) + Random rand = null, params int[] extraCols) : this(classCount, CreateCursor(data, opt, rand, extraCols), data, opt) { } diff --git a/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs b/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs index 204421d1ad..486e921f54 100644 --- a/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs +++ b/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs @@ -631,7 +631,7 @@ public SelectColumnsDataTransform(IHostEnvironment env, ColumnSelectingTransform public long? GetRowCount() => Source.GetRowCount(); - public IRowCursor GetRowCursor(Func needCol, IRandom rand = null) + public IRowCursor GetRowCursor(Func needCol, Random rand = null) { _host.AssertValue(needCol, nameof(needCol)); _host.AssertValueOrNull(rand); @@ -645,7 +645,7 @@ public IRowCursor GetRowCursor(Func needCol, IRandom rand = null) return new RowCursor(_host, _mapper, inputRowCursor, active); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, IRandom rand = null) + public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, Random rand = null) { _host.CheckValue(needCol, nameof(needCol)); _host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs b/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs index 85d91e4301..0a7a95370a 100644 --- a/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs @@ -332,7 +332,7 @@ public override void Save(ModelSaveContext ctx) return null; } - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -344,7 +344,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando } public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, - Func predicate, int n, IRandom rand = null) + Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Data/Transforms/NAFilter.cs b/src/Microsoft.ML.Data/Transforms/NAFilter.cs index 280546f785..25f6242064 100644 --- a/src/Microsoft.ML.Data/Transforms/NAFilter.cs +++ b/src/Microsoft.ML.Data/Transforms/NAFilter.cs @@ -204,7 +204,7 @@ private static bool TestType(ColumnType type) return null; } - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -216,7 +216,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando } public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, - Func predicate, int n, IRandom rand = null) + Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Data/Transforms/NopTransform.cs b/src/Microsoft.ML.Data/Transforms/NopTransform.cs index 2384c4abb4..d5ea3bdc8e 100644 --- a/src/Microsoft.ML.Data/Transforms/NopTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/NopTransform.cs @@ -109,12 +109,12 @@ public bool CanShuffle return Source.GetRowCount(); } - public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) + public IRowCursor GetRowCursor(Func predicate, Random rand = null) { return Source.GetRowCursor(predicate, rand); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, IRandom rand = null) + public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { return Source.GetRowCursorSet(out consolidator, predicate, n, rand); } diff --git a/src/Microsoft.ML.Data/Transforms/PerGroupTransformBase.cs b/src/Microsoft.ML.Data/Transforms/PerGroupTransformBase.cs index 7fddf36dbd..44537b0647 100644 --- a/src/Microsoft.ML.Data/Transforms/PerGroupTransformBase.cs +++ b/src/Microsoft.ML.Data/Transforms/PerGroupTransformBase.cs @@ -150,7 +150,7 @@ public virtual void Save(ModelSaveContext ctx) return Source.GetRowCount(); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, IRandom rand = null) + public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); @@ -158,7 +158,7 @@ public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Fun return new IRowCursor[] { GetRowCursor(predicate, rand) }; } - public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) + public IRowCursor GetRowCursor(Func predicate, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Data/Transforms/RangeFilter.cs b/src/Microsoft.ML.Data/Transforms/RangeFilter.cs index 59542b13aa..54a1e42161 100644 --- a/src/Microsoft.ML.Data/Transforms/RangeFilter.cs +++ b/src/Microsoft.ML.Data/Transforms/RangeFilter.cs @@ -204,7 +204,7 @@ public override void Save(ModelSaveContext ctx) return null; } - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -216,7 +216,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando } public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, - Func predicate, int n, IRandom rand = null) + Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs b/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs index 4a74df460e..9276f5c08e 100644 --- a/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs +++ b/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs @@ -223,7 +223,7 @@ internal static bool CanShuffleAll(ISchema schema) /// /// Utility to take a cursor, and get a shuffled version of this cursor. /// - public static IRowCursor GetShuffledCursor(IChannelProvider provider, int poolRows, IRowCursor cursor, IRandom rand) + public static IRowCursor GetShuffledCursor(IChannelProvider provider, int poolRows, IRowCursor cursor, Random rand) { Contracts.CheckValue(provider, nameof(provider)); @@ -249,7 +249,7 @@ public static IRowCursor GetShuffledCursor(IChannelProvider provider, int poolRo return false; } - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -257,7 +257,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando // REVIEW: This is slightly interesting. Our mechanism for inducing // randomness in the source cursor is this Random object, but this can change // from release to release. The correct solution, it seems, is to instead have - // randomness injected into cursor creation by using IRandom (or something akin + // randomness injected into cursor creation by using Random (or something akin // to it), vs. just a straight system Random. // The desired functionality is to support some permutations of whether we allow @@ -275,10 +275,10 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando bool shouldShuffleMe = _forceShuffle || rand != null; bool shouldShuffleSource = _forceShuffleSource || (!_poolOnly && rand != null); - IRandom myRandom = rand ?? (shouldShuffleMe || shouldShuffleSource ? RandomUtils.Create(_forceShuffleSeed) : null); + Random myRandom = rand ?? (shouldShuffleMe || shouldShuffleSource ? RandomUtils.Create(_forceShuffleSeed) : null); if (shouldShuffleMe) rand = myRandom; - IRandom sourceRand = shouldShuffleSource ? RandomUtils.Create(myRandom) : null; + Random sourceRand = shouldShuffleSource ? RandomUtils.Create(myRandom) : null; var input = _subsetInput.GetRowCursor(predicate, sourceRand); // If rand is null (so we're not doing pool shuffling) or number of pool rows is 1 @@ -290,7 +290,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando } public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, - Func predicate, int n, IRandom rand = null) + Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); @@ -466,7 +466,7 @@ public void Fetch(int idx, ref T value) private readonly int _poolRows; private readonly IRowCursor _input; - private readonly IRandom _rand; + private readonly Random _rand; // This acts as mapping from the "circular" index to the actual index within the pipe. private readonly int[] _pipeIndices; @@ -504,7 +504,7 @@ public override long Batch get { return 0; } } - public RowCursor(IChannelProvider provider, int poolRows, IRowCursor input, IRandom rand) + public RowCursor(IChannelProvider provider, int poolRows, IRowCursor input, Random rand) : base(provider) { Ch.AssertValue(input); diff --git a/src/Microsoft.ML.Data/Transforms/SkipTakeFilter.cs b/src/Microsoft.ML.Data/Transforms/SkipTakeFilter.cs index bf3331d0a5..c684dc139c 100644 --- a/src/Microsoft.ML.Data/Transforms/SkipTakeFilter.cs +++ b/src/Microsoft.ML.Data/Transforms/SkipTakeFilter.cs @@ -188,7 +188,7 @@ public override void Save(ModelSaveContext ctx) return false; } - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate); Host.AssertValueOrNull(rand); @@ -199,7 +199,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando } public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, - Func predicate, int n, IRandom rand = null) + Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Data/Transforms/TransformBase.cs b/src/Microsoft.ML.Data/Transforms/TransformBase.cs index 83dce385ad..e517710543 100644 --- a/src/Microsoft.ML.Data/Transforms/TransformBase.cs +++ b/src/Microsoft.ML.Data/Transforms/TransformBase.cs @@ -51,7 +51,7 @@ protected TransformBase(IHost host, IDataView input) public abstract Schema Schema { get; } - public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) + public IRowCursor GetRowCursor(Func predicate, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); @@ -84,10 +84,10 @@ public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) /// /// Create a single (non-parallel) row cursor. /// - protected abstract IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null); + protected abstract IRowCursor GetRowCursorCore(Func predicate, Random rand = null); public abstract IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, - Func predicate, int n, IRandom rand = null); + Func predicate, int n, Random rand = null); } /// @@ -718,7 +718,7 @@ protected virtual bool WantParallelCursors(Func predicate) return _bindings.AnyNewColumnsActive(predicate); } - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -730,7 +730,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando } public sealed override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, - Func predicate, int n, IRandom rand = null) + Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Data/Utilities/LocalEnvironment.cs b/src/Microsoft.ML.Data/Utilities/LocalEnvironment.cs index f53f6f3558..73e4c03e9f 100644 --- a/src/Microsoft.ML.Data/Utilities/LocalEnvironment.cs +++ b/src/Microsoft.ML.Data/Utilities/LocalEnvironment.cs @@ -71,7 +71,7 @@ public void RemoveListener(Action listener) protected override IFileHandle CreateTempFileCore(IHostEnvironment env, string suffix = null, string prefix = null) => base.CreateTempFileCore(env, suffix, "Local_" + prefix); - protected override IHost RegisterCore(HostEnvironmentBase source, string shortName, string parentFullName, IRandom rand, bool verbose, int? conc) + protected override IHost RegisterCore(HostEnvironmentBase source, string shortName, string parentFullName, Random rand, bool verbose, int? conc) { Contracts.AssertValue(rand); Contracts.AssertValueOrNull(parentFullName); @@ -105,7 +105,7 @@ public override CompositionContainer GetCompositionContainer() private sealed class Host : HostBase { - public Host(HostEnvironmentBase source, string shortName, string parentFullName, IRandom rand, bool verbose, int? conc) + public Host(HostEnvironmentBase source, string shortName, string parentFullName, Random rand, bool verbose, int? conc) : base(source, shortName, parentFullName, rand, verbose, conc) { IsCancelled = source.IsCancelled; @@ -127,7 +127,7 @@ protected override IPipe CreatePipe(ChannelProviderBase pare return new Pipe(parent, name, GetDispatchDelegate()); } - protected override IHost RegisterCore(HostEnvironmentBase source, string shortName, string parentFullName, IRandom rand, bool verbose, int? conc) + protected override IHost RegisterCore(HostEnvironmentBase source, string shortName, string parentFullName, Random rand, bool verbose, int? conc) { return new Host(source, shortName, parentFullName, rand, verbose, conc); } diff --git a/src/Microsoft.ML.Ensemble/Selector/FeatureSelector/AllFeatureSelector.cs b/src/Microsoft.ML.Ensemble/Selector/FeatureSelector/AllFeatureSelector.cs index 84a70a6ee4..ad854e975f 100644 --- a/src/Microsoft.ML.Ensemble/Selector/FeatureSelector/AllFeatureSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/FeatureSelector/AllFeatureSelector.cs @@ -6,6 +6,7 @@ using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Ensemble.Selector; using Microsoft.ML.Runtime.Ensemble.Selector.FeatureSelector; +using System; [assembly: LoadableClass(typeof(AllFeatureSelector), null, typeof(SignatureEnsembleFeatureSelector), AllFeatureSelector.UserName, AllFeatureSelector.LoadName)] @@ -21,7 +22,7 @@ public AllFeatureSelector(IHostEnvironment env) { } - public Subset SelectFeatures(RoleMappedData data, IRandom rand) + public Subset SelectFeatures(RoleMappedData data, Random rand) { return new Subset(data); } diff --git a/src/Microsoft.ML.Ensemble/Selector/FeatureSelector/RandomFeatureSelector.cs b/src/Microsoft.ML.Ensemble/Selector/FeatureSelector/RandomFeatureSelector.cs index c0c9b8968f..b3650dd139 100644 --- a/src/Microsoft.ML.Ensemble/Selector/FeatureSelector/RandomFeatureSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/FeatureSelector/RandomFeatureSelector.cs @@ -45,7 +45,7 @@ public RandomFeatureSelector(IHostEnvironment env, Arguments args) "The feature proportion for RandomFeatureSelector should be greater than 0 and lesser than 1"); } - public Subset SelectFeatures(RoleMappedData data, IRandom rand) + public Subset SelectFeatures(RoleMappedData data, Random rand) { _host.CheckValue(data, nameof(data)); data.CheckFeatureFloatVector(); diff --git a/src/Microsoft.ML.Ensemble/Selector/IFeatureSelector.cs b/src/Microsoft.ML.Ensemble/Selector/IFeatureSelector.cs index 99e90c5c01..c53f7d244e 100644 --- a/src/Microsoft.ML.Ensemble/Selector/IFeatureSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/IFeatureSelector.cs @@ -4,12 +4,13 @@ using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.EntryPoints; +using System; namespace Microsoft.ML.Runtime.Ensemble.Selector { public interface IFeatureSelector { - Subset SelectFeatures(RoleMappedData data, IRandom rand); + Subset SelectFeatures(RoleMappedData data, Random rand); } public delegate void SignatureEnsembleFeatureSelector(); diff --git a/src/Microsoft.ML.Ensemble/Selector/ISubsetSelector.cs b/src/Microsoft.ML.Ensemble/Selector/ISubsetSelector.cs index 2a0f088219..704bcd919b 100644 --- a/src/Microsoft.ML.Ensemble/Selector/ISubsetSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/ISubsetSelector.cs @@ -12,8 +12,8 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector public interface ISubsetSelector { void Initialize(RoleMappedData data, int size, int batchSize, Single validationDatasetProportion); - IEnumerable GetBatches(IRandom rand); - IEnumerable GetSubsets(Batch batch, IRandom rand); + IEnumerable GetBatches(Random rand); + IEnumerable GetSubsets(Batch batch, Random rand); RoleMappedData GetTestData(Subset subset, Batch batch); } diff --git a/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/AllInstanceSelector.cs b/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/AllInstanceSelector.cs index 98e6eeb8eb..483d6a5b0f 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/AllInstanceSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/AllInstanceSelector.cs @@ -2,11 +2,12 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using System.Collections.Generic; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Ensemble.Selector; using Microsoft.ML.Runtime.Ensemble.Selector.SubsetSelector; using Microsoft.ML.Runtime.EntryPoints; +using System; +using System.Collections.Generic; [assembly: LoadableClass(typeof(AllInstanceSelector), typeof(AllInstanceSelector.Arguments), typeof(SignatureEnsembleDataSelector), AllInstanceSelector.UserName, AllInstanceSelector.LoadName)] @@ -31,7 +32,7 @@ public AllInstanceSelector(IHostEnvironment env, Arguments args) { } - public override IEnumerable GetSubsets(Batch batch, IRandom rand) + public override IEnumerable GetSubsets(Batch batch, Random rand) { for (int i = 0; i < Size; i++) yield return FeatureSelector.SelectFeatures(batch.TrainInstances, rand); diff --git a/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/BaseSubsetSelector.cs b/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/BaseSubsetSelector.cs index 78564618db..6e4d2a6b27 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/BaseSubsetSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/BaseSubsetSelector.cs @@ -52,9 +52,9 @@ public void Initialize(RoleMappedData data, int size, int batchSize, Single vali ValidationDatasetProportion = validationDatasetProportion; } - public abstract IEnumerable GetSubsets(Batch batch, IRandom rand); + public abstract IEnumerable GetSubsets(Batch batch, Random rand); - public IEnumerable GetBatches(IRandom rand) + public IEnumerable GetBatches(Random rand) { Host.Assert(Data != null, "Must call Initialize first!"); Host.AssertValue(rand); diff --git a/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/BootstrapSelector.cs b/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/BootstrapSelector.cs index fd045f71a8..93cc361a9e 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/BootstrapSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/BootstrapSelector.cs @@ -2,13 +2,14 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using System.Collections.Generic; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Ensemble.Selector; using Microsoft.ML.Runtime.Ensemble.Selector.SubsetSelector; using Microsoft.ML.Runtime.EntryPoints; using Microsoft.ML.Transforms; +using System; +using System.Collections.Generic; [assembly: LoadableClass(typeof(BootstrapSelector), typeof(BootstrapSelector.Arguments), typeof(SignatureEnsembleDataSelector), BootstrapSelector.UserName, BootstrapSelector.LoadName)] @@ -41,7 +42,7 @@ public BootstrapSelector(IHostEnvironment env, Arguments args) { } - public override IEnumerable GetSubsets(Batch batch, IRandom rand) + public override IEnumerable GetSubsets(Batch batch, Random rand) { for (int i = 0; i < Size; i++) { diff --git a/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/RandomPartitionSelector.cs b/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/RandomPartitionSelector.cs index 0768b2a79e..7ddf7bb9d7 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/RandomPartitionSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/RandomPartitionSelector.cs @@ -34,7 +34,7 @@ public RandomPartitionSelector(IHostEnvironment env, Arguments args) { } - public override IEnumerable GetSubsets(Batch batch, IRandom rand) + public override IEnumerable GetSubsets(Batch batch, Random rand) { string name = Data.Data.Schema.GetTempColumnName(); var args = new GenerateNumberTransform.Arguments(); diff --git a/src/Microsoft.ML.FastTree/SumupPerformanceCommand.cs b/src/Microsoft.ML.FastTree/SumupPerformanceCommand.cs index 14fcad759e..fb48563dd6 100644 --- a/src/Microsoft.ML.FastTree/SumupPerformanceCommand.cs +++ b/src/Microsoft.ML.FastTree/SumupPerformanceCommand.cs @@ -183,7 +183,7 @@ private IntArray[] CreateRandomIntArrays(IChannel ch) } } - private IEnumerator Geometric(double p, IRandom rgen) + private IEnumerator Geometric(double p, Random rgen) { double denom = Math.Log(1 - p); @@ -209,7 +209,7 @@ private IEnumerator Geometric(double p, IRandom rgen) } } - private IEnumerable CreateDocIndicesCore(double sparsity, IRandom rgen) + private IEnumerable CreateDocIndicesCore(double sparsity, Random rgen) { _host.Assert(0 < sparsity && sparsity < 1); int remaining = _len; @@ -227,7 +227,7 @@ private IEnumerable CreateDocIndicesCore(double sparsity, IRandom rgen) } } - private IEnumerable CreateDocIndices(double sparsity, IRandom rgen) + private IEnumerable CreateDocIndices(double sparsity, Random rgen) { _host.Assert(0 <= sparsity && sparsity <= 1); if (sparsity == 1) @@ -237,7 +237,7 @@ private IEnumerable CreateDocIndices(double sparsity, IRandom rgen) return CreateDocIndicesCore(sparsity, rgen); } - private void InitSumupInputData(SumupInputData data, double sparsity, IRandom rgen) + private void InitSumupInputData(SumupInputData data, double sparsity, Random rgen) { int count = 0; foreach (int d in CreateDocIndices(sparsity, rgen)) diff --git a/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs b/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs index 41084bd5ab..389a8be9c2 100644 --- a/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs +++ b/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs @@ -875,7 +875,7 @@ public static void Initialize(IHost host, int numThreads, IChannel ch, FeatureFl KMeansUtils.ParallelMapReduce( numThreads, host, cursorFactory, initializationState.RowIndexGetter, (ref float[] weights) => weights = new float[totalSamples], - (ref VBuffer point, int pointRowIndex, float[] weights, IRandom rand) => + (ref VBuffer point, int pointRowIndex, float[] weights, Random rand) => { int bestCluster; float discardBestWeight; @@ -887,7 +887,7 @@ public static void Initialize(IHost host, int numThreads, IChannel ch, FeatureFl #endif weights[bestCluster]++; }, - (float[][] workStateWeights, IRandom rand, ref float[] weights) => + (float[][] workStateWeights, Random rand, ref float[] weights) => { weights = new float[totalSamples]; for (int i = 0; i < workStateWeights.Length; i++) @@ -902,8 +902,8 @@ public static void Initialize(IHost host, int numThreads, IChannel ch, FeatureFl KMeansUtils.ParallelMapReduce( numThreads, host, cursorFactory, (FeatureFloatVectorCursor cur) => -1, (ref float[] weights) => weights = new float[totalSamples], - (ref VBuffer point, int discard, float[] weights, IRandom rand) => weights[KMeansUtils.FindBestCluster(in point, clusters, clustersL2s)]++, - (float[][] workStateWeights, IRandom rand, ref float[] weights) => + (ref VBuffer point, int discard, float[] weights, Random rand) => weights[KMeansUtils.FindBestCluster(in point, clusters, clustersL2s)]++, + (float[][] workStateWeights, Random rand, ref float[] weights) => { weights = new float[totalSamples]; for (int i = 0; i < workStateWeights.Length; i++) @@ -1572,7 +1572,7 @@ public static RowStats ParallelWeightedReservoirSample( else heap.Clear(); }, - (ref VBuffer point, int pointRowIndex, Heap heap, IRandom rand) => + (ref VBuffer point, int pointRowIndex, Heap heap, Random rand) => { // We use distance as a proxy for 'is the same point'. By excluding // all points that lie within a very small distance of our current set of @@ -1608,7 +1608,7 @@ public static RowStats ParallelWeightedReservoirSample( Utils.Swap(ref wRow.Point, ref point); heap.Add(wRow); }, - (Heap[] heaps, IRandom rand, ref Heap finalHeap) => + (Heap[] heaps, Random rand, ref Heap finalHeap) => { host.Assert(finalHeap == null); finalHeap = new Heap((x, y) => x.Weight > y.Weight, numSamples); @@ -1647,13 +1647,13 @@ public static RowStats ParallelWeightedReservoirSample( public delegate void InitAction(ref TPartitionState val); public delegate int RowIndexGetter(FeatureFloatVectorCursor cur); - public delegate void MapAction(ref VBuffer point, int rowIndex, TPartitionState state, IRandom rand); - public delegate void ReduceAction(TPartitionState[] intermediates, IRandom rand, ref TGlobalState result); + public delegate void MapAction(ref VBuffer point, int rowIndex, TPartitionState state, Random rand); + public delegate void ReduceAction(TPartitionState[] intermediates, Random rand, ref TGlobalState result); /// /// Takes a data cursor and perform an in-memory parallel aggregation operation on it. This /// helper wraps some of the behavior common to parallel operations over a IRowCursor set, - /// including building the set, creating separate IRandom instances, and IRowCursor disposal. + /// including building the set, creating separate Random instances, and IRowCursor disposal. /// /// The type that each parallel cursor will be expected to aggregate to. /// The type of the final output from combining each per-thread instance of TInterAgg. @@ -1688,7 +1688,7 @@ public static RowStats ParallelMapReduce( var cur = set[i]; initChunk(ref buffer[i]); var innerWorkState = buffer[i]; - IRandom rand = RandomUtils.Create(baseHost.Rand); + Random rand = RandomUtils.Create(baseHost.Rand); workArr[i] = () => { using (cur) diff --git a/src/Microsoft.ML.LightGBM/LightGbmTrainerBase.cs b/src/Microsoft.ML.LightGBM/LightGbmTrainerBase.cs index 53e6ba543b..755dd32cff 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmTrainerBase.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmTrainerBase.cs @@ -482,7 +482,7 @@ private static bool MoveMany(FloatLabelCursor cursor, long count) return true; } - private void GetFeatureValueDense(IChannel ch, FloatLabelCursor cursor, CategoricalMetaData catMetaData, IRandom rand, out ReadOnlySpan featureValues) + private void GetFeatureValueDense(IChannel ch, FloatLabelCursor cursor, CategoricalMetaData catMetaData, Random rand, out ReadOnlySpan featureValues) { var cursorFeaturesValues = cursor.Features.GetValues(); if (catMetaData.CategoricalBoudaries != null) @@ -520,7 +520,7 @@ private void GetFeatureValueDense(IChannel ch, FloatLabelCursor cursor, Categori } private void GetFeatureValueSparse(IChannel ch, FloatLabelCursor cursor, - CategoricalMetaData catMetaData, IRandom rand, out ReadOnlySpan indices, + CategoricalMetaData catMetaData, Random rand, out ReadOnlySpan indices, out ReadOnlySpan featureValues, out int cnt) { var cursorFeaturesValues = cursor.Features.GetValues(); @@ -758,7 +758,7 @@ private void LoadDataset(IChannel ch, FloatLabelCursor.Factory factory, Dataset } } - private void CopyToArray(IChannel ch, FloatLabelCursor cursor, float[] features, CategoricalMetaData catMetaData, IRandom rand, ref int numElem) + private void CopyToArray(IChannel ch, FloatLabelCursor cursor, float[] features, CategoricalMetaData catMetaData, Random rand, ref int numElem) { ch.Assert(features.Length >= numElem + catMetaData.NumCol); if (catMetaData.CategoricalBoudaries != null) @@ -797,7 +797,7 @@ private void CopyToArray(IChannel ch, FloatLabelCursor cursor, float[] features, } private void CopyToCsr(IChannel ch, FloatLabelCursor cursor, - int[] indices, float[] features, CategoricalMetaData catMetaData, IRandom rand, ref int numElem) + int[] indices, float[] features, CategoricalMetaData catMetaData, Random rand, ref int numElem) { int numValue = cursor.Features.GetValues().Length; if (numValue > 0) diff --git a/src/Microsoft.ML.PCA/PcaTrainer.cs b/src/Microsoft.ML.PCA/PcaTrainer.cs index f6ab01820f..d37cf58851 100644 --- a/src/Microsoft.ML.PCA/PcaTrainer.cs +++ b/src/Microsoft.ML.PCA/PcaTrainer.cs @@ -229,7 +229,7 @@ private static float[][] Zeros(int k, int d) private static float[][] GaussianMatrix(int k, int d, int seed) { var rv = Zeros(k, d); - var rng = new SysRandom(seed); + var rng = new Random(seed); // REVIEW: use a faster Gaussian random matrix generator //MKL has a fast vectorized random number generation. diff --git a/src/Microsoft.ML.Parquet/ParquetLoader.cs b/src/Microsoft.ML.Parquet/ParquetLoader.cs index c4b36f501c..5627f9c0a1 100644 --- a/src/Microsoft.ML.Parquet/ParquetLoader.cs +++ b/src/Microsoft.ML.Parquet/ParquetLoader.cs @@ -391,14 +391,14 @@ private static Stream OpenStream(string filename) return _rowCount; } - public IRowCursor GetRowCursor(Func predicate, IRandom rand = null) + public IRowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); return new Cursor(this, predicate, rand); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, IRandom rand = null) + public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); @@ -446,9 +446,9 @@ private sealed class Cursor : RootCursorBase, IRowCursor private IEnumerator _dataSetEnumerator; private IEnumerator _blockEnumerator; private IList[] _columnValues; - private IRandom _rand; + private Random _rand; - public Cursor(ParquetLoader parent, Func predicate, IRandom rand) + public Cursor(ParquetLoader parent, Func predicate, Random rand) : base(parent._host) { Ch.AssertValue(predicate); diff --git a/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs b/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs index 0ffc0f929d..42e447004a 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs @@ -520,7 +520,7 @@ protected sealed override TModel TrainCore(IChannel ch, RoleMappedData data, Lin ch.Assert(Args.MaxIterations.HasValue); var maxIterations = Args.MaxIterations.Value; - var rands = new IRandom[maxIterations]; + var rands = new Random[maxIterations]; for (int i = 0; i < maxIterations; i++) rands[i] = RandomUtils.Create(Host.Rand.Next()); @@ -748,7 +748,7 @@ private void InitializeConvergenceMetrics(out string[] names, out Double[] initi /// The array holding the pre-computed squared L2-norm of features for each training example. It may be null. It is always null for /// binary classification and regression because this quantity is not needed. /// - protected virtual void TrainWithoutLock(IProgressChannelProvider progress, FloatLabelCursor.Factory cursorFactory, IRandom rand, + protected virtual void TrainWithoutLock(IProgressChannelProvider progress, FloatLabelCursor.Factory cursorFactory, Random rand, IdToIdxLookup idToIdx, int numThreads, DualsTableBase duals, float[] biasReg, float[] invariants, float lambdaNInv, VBuffer[] weights, float[] biasUnreg, VBuffer[] l1IntermediateWeights, float[] l1IntermediateBias, float[] featureNormSquared) { @@ -1831,8 +1831,8 @@ protected override TScalarPredictor TrainCore(IChannel ch, RoleMappedData data, //Reference: Leon Bottou. Stochastic Gradient Descent Tricks. //https://research.microsoft.com/pubs/192769/tricks-2012.pdf - var trainingTasks = new Action[_args.MaxIterations]; - var rands = new IRandom[_args.MaxIterations]; + var trainingTasks = new Action[_args.MaxIterations]; + var rands = new Random[_args.MaxIterations]; var ilr = _args.InitLearningRate; long t = 0; for (int epoch = 1; epoch <= _args.MaxIterations; epoch++) diff --git a/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs b/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs index 042d6b4f72..f60b822827 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs @@ -124,7 +124,7 @@ protected override void CheckLabelCompatible(SchemaShape.Column labelCol) } /// - protected override void TrainWithoutLock(IProgressChannelProvider progress, FloatLabelCursor.Factory cursorFactory, IRandom rand, + protected override void TrainWithoutLock(IProgressChannelProvider progress, FloatLabelCursor.Factory cursorFactory, Random rand, IdToIdxLookup idToIdx, int numThreads, DualsTableBase duals, Float[] biasReg, Float[] invariants, Float lambdaNInv, VBuffer[] weights, Float[] biasUnreg, VBuffer[] l1IntermediateWeights, Float[] l1IntermediateBias, Float[] featureNormSquared) { diff --git a/src/Microsoft.ML.StandardLearners/Standard/Simple/SimpleTrainers.cs b/src/Microsoft.ML.StandardLearners/Standard/Simple/SimpleTrainers.cs index 59dd072f7f..2e031433e1 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/Simple/SimpleTrainers.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/Simple/SimpleTrainers.cs @@ -126,7 +126,7 @@ private static VersionInfo GetVersionInfo() // Keep all the serializable state here. private readonly int _seed; private readonly object _instanceLock; - private readonly IRandom _random; + private readonly Random _random; public override PredictionKind PredictionKind => PredictionKind.BinaryClassification; public ColumnType InputType { get; } diff --git a/src/Microsoft.ML.Sweeper/Algorithms/KdoSweeper.cs b/src/Microsoft.ML.Sweeper/Algorithms/KdoSweeper.cs index 4ec0c95694..f739dc2df2 100644 --- a/src/Microsoft.ML.Sweeper/Algorithms/KdoSweeper.cs +++ b/src/Microsoft.ML.Sweeper/Algorithms/KdoSweeper.cs @@ -259,7 +259,7 @@ private ParameterSet SampleChild(ParameterSet parent, double fitness, int n, IRu else { // If Beta flag set, sample from independent Beta distributions instead. - SysRandom rng = new SysRandom(); + Random rng = new Random(); double alpha = 1 + 15 * fitness; foreach (int index in numericParamIndices) { diff --git a/src/Microsoft.ML.TimeSeries/PValueTransform.cs b/src/Microsoft.ML.TimeSeries/PValueTransform.cs index 36e8bebf4b..27f5911266 100644 --- a/src/Microsoft.ML.TimeSeries/PValueTransform.cs +++ b/src/Microsoft.ML.TimeSeries/PValueTransform.cs @@ -109,7 +109,7 @@ public override void Save(ModelSaveContext ctx) public sealed class State : StateBase { - private IRandom _randomGen; + private Random _randomGen; private PValueTransform _parent; diff --git a/src/Microsoft.ML.TimeSeries/SequentialTransformBase.cs b/src/Microsoft.ML.TimeSeries/SequentialTransformBase.cs index e7c151e9d2..dc47a4fa4a 100644 --- a/src/Microsoft.ML.TimeSeries/SequentialTransformBase.cs +++ b/src/Microsoft.ML.TimeSeries/SequentialTransformBase.cs @@ -352,7 +352,7 @@ private void InitFunction(TState state) return false; } - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { var srcCursor = _transform.GetRowCursor(predicate, rand); return new Cursor(this, srcCursor); @@ -365,7 +365,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando return _transform.GetRowCount(); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, IRandom rand = null) + public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { consolidator = null; return new IRowCursor[] { GetRowCursorCore(predicate, rand) }; diff --git a/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs b/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs index 3f0d425ab9..7a0ed541d5 100644 --- a/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs +++ b/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs @@ -429,7 +429,7 @@ private void InitFunction(TState state) public override bool CanShuffle { get { return false; } } - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { var srcCursor = _transform.GetRowCursor(predicate, rand); var clone = (SequentialDataTransform)MemberwiseClone(); @@ -450,7 +450,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando return _transform.GetRowCount(); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, IRandom rand = null) + public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { consolidator = null; return new IRowCursor[] { GetRowCursorCore(predicate, rand) }; @@ -686,14 +686,14 @@ private Func GetActiveOutputColumns(bool[] active) return null; } - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { Func predicateInput; var active = GetActive(predicate, out predicateInput); return new RowCursor(Host, Source.GetRowCursor(predicateInput, rand), this, active); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, IRandom rand = null) + public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Transforms/BootstrapSamplingTransformer.cs b/src/Microsoft.ML.Transforms/BootstrapSamplingTransformer.cs index 391b5ed0e2..7476920482 100644 --- a/src/Microsoft.ML.Transforms/BootstrapSamplingTransformer.cs +++ b/src/Microsoft.ML.Transforms/BootstrapSamplingTransformer.cs @@ -164,7 +164,7 @@ public static BootstrapSamplingTransformer Create(IHostEnvironment env, ModelLoa return false; } - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { // We do not use the input random because this cursor does not support shuffling. var rgen = new TauswortheHybrid(_state); @@ -175,7 +175,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando return cursor; } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, IRandom rand = null) + public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { var cursor = GetRowCursorCore(predicate, rand); consolidator = null; @@ -186,13 +186,13 @@ private sealed class RowCursor : LinkedRootCursorBase, IRowCursor { private int _remaining; private readonly BootstrapSamplingTransformer _parent; - private readonly IRandom _rgen; + private readonly Random _rgen; public override long Batch { get { return 0; } } public Schema Schema { get { return Input.Schema; } } - public RowCursor(BootstrapSamplingTransformer parent, IRowCursor input, IRandom rgen) + public RowCursor(BootstrapSamplingTransformer parent, IRowCursor input, Random rgen) : base(parent.Host, input) { Ch.AssertValue(rgen); diff --git a/src/Microsoft.ML.Transforms/FourierDistributionSampler.cs b/src/Microsoft.ML.Transforms/FourierDistributionSampler.cs index f05d39a6fa..1cefd54c46 100644 --- a/src/Microsoft.ML.Transforms/FourierDistributionSampler.cs +++ b/src/Microsoft.ML.Transforms/FourierDistributionSampler.cs @@ -8,6 +8,7 @@ using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; using Microsoft.ML.Transforms; +using System; [assembly: LoadableClass(typeof(GaussianFourierSampler), typeof(GaussianFourierSampler.Arguments), typeof(SignatureFourierDistributionSampler), "Gaussian Kernel", GaussianFourierSampler.LoadName, "Gaussian")] @@ -33,7 +34,7 @@ namespace Microsoft.ML.Transforms public interface IFourierDistributionSampler : ICanSaveModel { - float Next(IRandom rand); + float Next(Random rand); } [TlcModule.ComponentKind("FourierDistributionSampler")] @@ -116,7 +117,7 @@ public void Save(ModelSaveContext ctx) ctx.Writer.Write(_gamma); } - public float Next(IRandom rand) + public float Next(Random rand) { return (float)Stats.SampleFromGaussian(rand) * MathUtils.Sqrt(2 * _gamma); } @@ -197,7 +198,7 @@ public void Save(ModelSaveContext ctx) ctx.Writer.Write(_a); } - public float Next(IRandom rand) + public float Next(Random rand) { return _a * Stats.SampleFromCauchy(rand); } diff --git a/src/Microsoft.ML.Transforms/GroupTransform.cs b/src/Microsoft.ML.Transforms/GroupTransform.cs index f0571e5f4c..a41a59bf33 100644 --- a/src/Microsoft.ML.Transforms/GroupTransform.cs +++ b/src/Microsoft.ML.Transforms/GroupTransform.cs @@ -156,7 +156,7 @@ public override void Save(ModelSaveContext ctx) public override Schema Schema => _groupSchema.AsSchema; - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); @@ -173,7 +173,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando public override bool CanShuffle { get { return false; } } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, IRandom rand = null) + public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs b/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs index 0232f050ae..55bc3d18ee 100644 --- a/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs +++ b/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs @@ -297,7 +297,7 @@ public override void Save(ModelSaveContext ctx) return null; } - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -309,7 +309,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando } public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, - Func predicate, int n, IRandom rand = null) + Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Transforms/ProduceIdTransform.cs b/src/Microsoft.ML.Transforms/ProduceIdTransform.cs index 12f738e973..9a64ddd511 100644 --- a/src/Microsoft.ML.Transforms/ProduceIdTransform.cs +++ b/src/Microsoft.ML.Transforms/ProduceIdTransform.cs @@ -136,7 +136,7 @@ public override void Save(ModelSaveContext ctx) _bindings.Save(ctx); } - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -148,7 +148,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando return new RowCursor(Host, _bindings, input, active); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, IRandom rand = null) + public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs b/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs index 1ff3c4320c..c22fe779ec 100644 --- a/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs +++ b/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs @@ -628,7 +628,7 @@ private void AssertValid(uint[] ngram, int ngramLength, int lim, int icol) return null; } - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -640,7 +640,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando } public sealed override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, - Func predicate, int n, IRandom rand = null) + Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); diff --git a/src/Microsoft.ML.Transforms/UngroupTransform.cs b/src/Microsoft.ML.Transforms/UngroupTransform.cs index 8d091e21ec..ad786e1c8c 100644 --- a/src/Microsoft.ML.Transforms/UngroupTransform.cs +++ b/src/Microsoft.ML.Transforms/UngroupTransform.cs @@ -179,7 +179,7 @@ public override bool CanShuffle get { return false; } } - protected override IRowCursor GetRowCursorCore(Func predicate, IRandom rand = null) + protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { var activeInput = _schemaImpl.GetActiveInput(predicate); var inputCursor = Source.GetRowCursor(col => activeInput[col], null); @@ -187,7 +187,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, IRando } public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, - int n, IRandom rand = null) + int n, Random rand = null) { var activeInput = _schemaImpl.GetActiveInput(predicate); var inputCursors = Source.GetRowCursorSet(out consolidator, col => activeInput[col], n, null); diff --git a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs index 03ca7d8600..77e0ece94c 100644 --- a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs +++ b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs @@ -257,7 +257,7 @@ protected IDataLoader TestCore(string pathData, bool keepHidden, string[] argsPi var newPipe = ApplyTransformUtils.ApplyAllTransformsToData(_env, comp.View, cachedData); if (!newPipe.CanShuffle) { - using (var c1 = newPipe.GetRowCursor(col => true, new SysRandom(123))) + using (var c1 = newPipe.GetRowCursor(col => true, new Random(123))) using (var c2 = newPipe.GetRowCursor(col => true)) { if (!CheckSameValues(c1, c2, true, true, true)) From be115f416a77f88f56f6d3abe0931ca1f74ef40b Mon Sep 17 00:00:00 2001 From: Rogan Carr Date: Fri, 30 Nov 2018 13:51:36 -0800 Subject: [PATCH 010/100] Update the PFI Binary Classification XML Docs (#1792) Updating Binary Classification XML Docs to the latest version of the PFI documentation text. --- .../PermutationFeatureImportanceExtensions.cs | 37 ++++++++++++++++--- 1 file changed, 32 insertions(+), 5 deletions(-) diff --git a/src/Microsoft.ML.Transforms/PermutationFeatureImportanceExtensions.cs b/src/Microsoft.ML.Transforms/PermutationFeatureImportanceExtensions.cs index 07bfa4ebfc..8d596c94df 100644 --- a/src/Microsoft.ML.Transforms/PermutationFeatureImportanceExtensions.cs +++ b/src/Microsoft.ML.Transforms/PermutationFeatureImportanceExtensions.cs @@ -18,7 +18,7 @@ public static class PermutationFeatureImportanceExtensions /// /// /// Permutation feature importance (PFI) is a technique to determine the global importance of features in a trained - /// machine learning model. PFI is a simple yet powerul technique motivated by Breiman in his Random Forest paper, section 10 + /// machine learning model. PFI is a simple yet powerful technique motivated by Breiman in his Random Forest paper, section 10 /// (Breiman. "Random Forests." Machine Learning, 2001.) /// The advantage of the PFI method is that it is model agnostic -- it works with any model that can be /// evaluated -- and it can use any dataset, not just the training set, to compute feature importance metrics. @@ -33,7 +33,7 @@ public static class PermutationFeatureImportanceExtensions /// /// /// In this implementation, PFI computes the change in all possible regression evaluation metrics for each feature, and an - /// ImmutableArray of RegressionEvaluator.Result objects is returned. See the sample below for an + /// ImmutableArray of RegressionMetrics objects is returned. See the sample below for an /// example of working with these results to analyze the feature importance of a model. /// /// @@ -85,10 +85,37 @@ private static RegressionMetrics RegressionDelta( } /// - /// Permutation Feature Importance is a technique that calculates how much each feature 'matters' to the predictions. - /// Namely, how much the model's predictions will change if we randomly permute the values of one feature across the evaluation set. - /// If the quality doesn't change much, this feature is not very important. If the quality drops drastically, this was a really important feature. + /// Permutation Feature Importance (PFI) for Binary Classification /// + /// + /// + /// Permutation feature importance (PFI) is a technique to determine the global importance of features in a trained + /// machine learning model. PFI is a simple yet powerful technique motivated by Breiman in his Random Forest paper, section 10 + /// (Breiman. "Random Forests." Machine Learning, 2001.) + /// The advantage of the PFI method is that it is model agnostic -- it works with any model that can be + /// evaluated -- and it can use any dataset, not just the training set, to compute feature importance metrics. + /// + /// + /// PFI works by taking a labeled dataset, choosing a feature, and permuting the values + /// for that feature across all the examples, so that each example now has a random value for the feature and + /// the original values for all other features. The evalution metric (e.g. AUC or R-squared) is then calculated + /// for this modified dataset, and the change in the evaluation metric from the original dataset is computed. + /// The larger the change in the evaluation metric, the more important the feature is to the model. + /// PFI works by performing this permutation analysis across all the features of a model, one after another. + /// + /// + /// In this implementation, PFI computes the change in all possible binary classification evaluation metrics for each feature, and an + /// ImmutableArray of BinaryClassificationMetrics objects is returned. See the sample below for an + /// example of working with these results to analyze the feature importance of a model. + /// + /// + /// + /// + /// + /// + /// /// The binary classification context. /// The model to evaluate. /// The evaluation data set. From cb37c7e7f1e1b29b5608a2755db793c5435d10b1 Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Fri, 30 Nov 2018 14:06:40 -0800 Subject: [PATCH 011/100] Remove ISchematized interface from the codebase. (#1759) * Remove ISchematized interface from the codebase. 1. Remove ISchematized 2. For any class that requires a Schema, we add an Schema as its field 3. Rename Schema to OutputSchema in IRowToRowMapper * Address comments * Clean redundant Schema field and point Schema to OutputSchema * Replace Schema in SingleValueRowMapper with OutputSchema --- src/Microsoft.ML.Api/PredictionEngine.cs | 2 +- .../StatefulFilterTransform.cs | 4 ++- src/Microsoft.ML.Core/Data/IDataView.cs | 27 ++++++++-------- .../Data/ISchemaBindableMapper.cs | 25 ++++++++++++--- .../Data/RoleMappedSchema.cs | 4 +-- src/Microsoft.ML.Core/Data/Schema.cs | 2 +- .../Commands/ScoreCommand.cs | 4 +-- src/Microsoft.ML.Data/Data/IRowSeekable.cs | 5 ++- .../Data/ITransposeDataView.cs | 2 +- .../DataLoadSave/TransformerChain.cs | 2 +- .../DataView/CompositeRowToRowMapper.cs | 4 +-- .../DataView/LambdaFilter.cs | 2 +- .../DataView/RowToRowMapperTransform.cs | 10 +++--- .../Dirty/ChooseColumnsByIndexTransform.cs | 2 +- .../EntryPoints/TransformModel.cs | 2 ++ .../Evaluators/RankerEvaluator.cs | 11 ++++++- .../Prediction/Calibrator.cs | 24 +++++++------- .../Scorers/BinaryClassifierScorer.cs | 7 ++-- .../Scorers/ClusteringScorer.cs | 2 +- ...FeatureContributionCalculationTransform.cs | 10 +++--- .../Scorers/GenericScorer.cs | 10 +++--- .../Scorers/MultiClassClassifierScorer.cs | 13 ++++---- .../Scorers/PredictedLabelScorerBase.cs | 32 +++++++++---------- .../Scorers/RowToRowScorerBase.cs | 18 +++++------ .../Scorers/SchemaBindablePredictorWrapper.cs | 19 +++++------ .../Transforms/ColumnSelecting.cs | 22 +++++++------ .../Transforms/GenerateNumberTransform.cs | 2 +- src/Microsoft.ML.Data/Transforms/NAFilter.cs | 2 +- .../Transforms/NopTransform.cs | 11 ++++++- .../Transforms/PerGroupTransformBase.cs | 6 ++-- .../Transforms/RangeFilter.cs | 2 +- .../Transforms/RowShufflingTransformer.cs | 2 +- .../Transforms/SkipTakeFilter.cs | 4 +-- .../Transforms/TransformBase.cs | 23 +++++++++---- src/Microsoft.ML.Ensemble/PipelineEnsemble.cs | 10 +++--- .../TreeEnsembleFeaturizer.cs | 8 ++--- .../VectorToImageTransform.cs | 2 +- .../MatrixFactorizationPredictor.cs | 12 +++---- .../FieldAwareFactorizationMachineUtils.cs | 9 +++--- .../PredictionFunction.cs | 4 +-- .../SequentialTransformBase.cs | 6 ++-- .../SequentialTransformerBase.cs | 20 ++++++------ .../SlidingWindowTransformBase.cs | 2 +- src/Microsoft.ML.Transforms/GroupTransform.cs | 4 +-- .../OptionalColumnTransform.cs | 2 +- .../ProduceIdTransform.cs | 2 +- .../Text/NgramHashingTransformer.cs | 2 +- .../UngroupTransform.cs | 2 +- src/Native/MatrixFactorizationNative/libmf | 2 +- test/Microsoft.ML.Benchmarks/HashBench.cs | 2 +- .../DataPipe/TestDataPipeBase.cs | 2 +- .../Transformers/HashTests.cs | 12 +++---- 52 files changed, 238 insertions(+), 182 deletions(-) diff --git a/src/Microsoft.ML.Api/PredictionEngine.cs b/src/Microsoft.ML.Api/PredictionEngine.cs index 67734d0950..78d4810568 100644 --- a/src/Microsoft.ML.Api/PredictionEngine.cs +++ b/src/Microsoft.ML.Api/PredictionEngine.cs @@ -196,7 +196,7 @@ private protected PredictionEngineBase(IHostEnvironment env, ITransformer transf internal virtual void PredictionEngineCore(IHostEnvironment env, DataViewConstructionUtils.InputRow inputRow, IRowToRowMapper mapper, bool ignoreMissingColumns, SchemaDefinition inputSchemaDefinition, SchemaDefinition outputSchemaDefinition, out Action disposer, out IRowReadableAs outputRow) { - var cursorable = TypedCursorable.Create(env, new EmptyDataView(env, mapper.Schema), ignoreMissingColumns, outputSchemaDefinition); + var cursorable = TypedCursorable.Create(env, new EmptyDataView(env, mapper.OutputSchema), ignoreMissingColumns, outputSchemaDefinition); var outputRowLocal = mapper.GetRow(_inputRow, col => true, out disposer); outputRow = cursorable.GetRow(outputRowLocal); } diff --git a/src/Microsoft.ML.Api/StatefulFilterTransform.cs b/src/Microsoft.ML.Api/StatefulFilterTransform.cs index 332c3e202a..a8341639ff 100644 --- a/src/Microsoft.ML.Api/StatefulFilterTransform.cs +++ b/src/Microsoft.ML.Api/StatefulFilterTransform.cs @@ -98,7 +98,9 @@ private StatefulFilterTransform(IHostEnvironment env, StatefulFilterTransform _bindings.Schema; + Schema IDataView.Schema => OutputSchema; + + public Schema OutputSchema => _bindings.Schema; public long? GetRowCount() { diff --git a/src/Microsoft.ML.Core/Data/IDataView.cs b/src/Microsoft.ML.Core/Data/IDataView.cs index d621cb031e..7d5e0c1790 100644 --- a/src/Microsoft.ML.Core/Data/IDataView.cs +++ b/src/Microsoft.ML.Core/Data/IDataView.cs @@ -60,22 +60,11 @@ public interface ISchema void GetMetadata(string kind, int col, ref TValue value); } - /// - /// Base interface for schematized information. IDataView and IRowCursor both derive from this. - /// - public interface ISchematized - { - /// - /// Gets an instance of Schema. - /// - Schema Schema { get; } - } - /// /// The input and output of Query Operators (Transforms). This is the fundamental data pipeline /// type, comparable to IEnumerable for LINQ. /// - public interface IDataView : ISchematized + public interface IDataView { /// /// Whether this IDataView supports shuffling of rows, to any degree. @@ -124,6 +113,11 @@ public interface IDataView : ISchematized /// IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, Random rand = null); + + /// + /// Gets an instance of Schema. + /// + Schema Schema { get; } } /// @@ -148,7 +142,7 @@ public interface IRowCursorConsolidator /// A logical row. May be a row of an IDataView or a stand-alone row. If/when its contents /// change, its ICounted.Counter value is incremented. /// - public interface IRow : ISchematized, ICounted + public interface IRow : ICounted { /// /// Returns whether the given column is active in this row. @@ -161,6 +155,13 @@ public interface IRow : ISchematized, ICounted /// differs from this column's type. /// ValueGetter GetGetter(int col); + + /// + /// Gets a , which provides name and type information for variables + /// (i.e., columns in ML.NET's type system) stored in this row. + /// + Schema Schema { get; } + } /// diff --git a/src/Microsoft.ML.Core/Data/ISchemaBindableMapper.cs b/src/Microsoft.ML.Core/Data/ISchemaBindableMapper.cs index 525c7926a0..c4b3147d2c 100644 --- a/src/Microsoft.ML.Core/Data/ISchemaBindableMapper.cs +++ b/src/Microsoft.ML.Core/Data/ISchemaBindableMapper.cs @@ -30,13 +30,18 @@ public interface ISchemaBindableMapper /// This interface is used to map a schema from input columns to output columns. The should keep track /// of the input columns that are needed for the mapping. /// - public interface ISchemaBoundMapper : ISchematized + public interface ISchemaBoundMapper { /// /// The that was passed to the in the binding process. /// RoleMappedSchema InputRoleMappedSchema { get; } + /// + /// Gets schema of this mapper's output. + /// + Schema OutputSchema { get; } + /// /// A property to get back the that produced this . /// @@ -53,6 +58,11 @@ public interface ISchemaBoundMapper : ISchematized /// public interface ISchemaBoundRowMapper : ISchemaBoundMapper, IRowToRowMapper { + /// + /// There are two schemas from and . + /// Since the two parent schema's are identical in all derived classes, we merge them into . + /// + new Schema OutputSchema { get; } } /// @@ -61,15 +71,20 @@ public interface ISchemaBoundRowMapper : ISchemaBoundMapper, IRowToRowMapper /// return a subset of the input columns. /// This interface is similar to , except it does not have any input role mappings, /// so to rebind, the same input column names must be used. - /// Implementing of this object are typically created using a definie input . + /// Implementations of this interface are typically created over defined input . /// - public interface IRowToRowMapper : ISchematized + public interface IRowToRowMapper { /// /// Mappers are defined as accepting inputs with this very specific schema. /// Schema InputSchema { get; } + /// + /// Gets an instance of which describes the columns' names and types in the output generated by this mapper. + /// + Schema OutputSchema { get; } + /// /// Given a predicate specifying which columns are needed, return a predicate indicating which input columns are /// needed. The domain of the function is defined over the indices of the columns of @@ -82,9 +97,9 @@ public interface IRowToRowMapper : ISchematized /// The active columns are those for which returns true. Getting values on inactive /// columns of the returned row will throw. Null predicates are disallowed. /// - /// The of should be the same object as + /// The of should be the same object as /// . Implementors of this method should throw if that is not the case. Conversely, - /// the returned value must have the same schema as . + /// the returned value must have the same schema as . /// /// This method creates a live connection between the input and the output . In particular, when the getters of the output are invoked, they invoke the diff --git a/src/Microsoft.ML.Core/Data/RoleMappedSchema.cs b/src/Microsoft.ML.Core/Data/RoleMappedSchema.cs index a8d22319b3..d3c571c62a 100644 --- a/src/Microsoft.ML.Core/Data/RoleMappedSchema.cs +++ b/src/Microsoft.ML.Core/Data/RoleMappedSchema.cs @@ -467,7 +467,7 @@ public RoleMappedSchema(Schema schema, string label, string feature, /// /// Encapsulates an plus a corresponding . /// Note that the schema of of is - /// guaranteed to equal the the of . + /// guaranteed to equal the the of . /// public sealed class RoleMappedData { @@ -478,7 +478,7 @@ public sealed class RoleMappedData /// /// The role mapped schema. Note that 's is - /// guaranteed to be the same as 's . + /// guaranteed to be the same as 's . /// public RoleMappedSchema Schema { get; } diff --git a/src/Microsoft.ML.Core/Data/Schema.cs b/src/Microsoft.ML.Core/Data/Schema.cs index 9be55ca48f..e21bd41b86 100644 --- a/src/Microsoft.ML.Core/Data/Schema.cs +++ b/src/Microsoft.ML.Core/Data/Schema.cs @@ -14,7 +14,7 @@ namespace Microsoft.ML.Data { /// - /// This class represents the schema of an object (like an or an ). + /// This class represents the of an object like, for interstance, an or an . /// On the high level, the schema is a collection of 'columns'. Each column has the following properties: /// - Column name. /// - Column type. diff --git a/src/Microsoft.ML.Data/Commands/ScoreCommand.cs b/src/Microsoft.ML.Data/Commands/ScoreCommand.cs index ee3d05b546..868dd83b19 100644 --- a/src/Microsoft.ML.Data/Commands/ScoreCommand.cs +++ b/src/Microsoft.ML.Data/Commands/ScoreCommand.cs @@ -306,8 +306,8 @@ public static TScorerFactory GetScorerComponent( ComponentCatalog.LoadableClassInfo info = null; ReadOnlyMemory scoreKind = default; - if (mapper.Schema.Count > 0 && - mapper.Schema.TryGetMetadata(TextType.Instance, MetadataUtils.Kinds.ScoreColumnKind, 0, ref scoreKind) && + if (mapper.OutputSchema.Count > 0 && + mapper.OutputSchema.TryGetMetadata(TextType.Instance, MetadataUtils.Kinds.ScoreColumnKind, 0, ref scoreKind) && !scoreKind.IsEmpty) { var loadName = scoreKind.ToString(); diff --git a/src/Microsoft.ML.Data/Data/IRowSeekable.cs b/src/Microsoft.ML.Data/Data/IRowSeekable.cs index 3c0bf0db08..9270ddc7f3 100644 --- a/src/Microsoft.ML.Data/Data/IRowSeekable.cs +++ b/src/Microsoft.ML.Data/Data/IRowSeekable.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using System; namespace Microsoft.ML.Runtime.Data @@ -11,9 +12,11 @@ namespace Microsoft.ML.Runtime.Data /// /// Represents a data view that supports random access to a specific row. /// - public interface IRowSeekable : ISchematized + public interface IRowSeekable { IRowSeeker GetSeeker(Func predicate); + + Schema Schema { get; } } /// diff --git a/src/Microsoft.ML.Data/Data/ITransposeDataView.cs b/src/Microsoft.ML.Data/Data/ITransposeDataView.cs index f247bc9859..4c68dcbdd1 100644 --- a/src/Microsoft.ML.Data/Data/ITransposeDataView.cs +++ b/src/Microsoft.ML.Data/Data/ITransposeDataView.cs @@ -28,7 +28,7 @@ public interface ITransposeDataView : IDataView /// /// An enhanced schema, containing information on the transposition properties, if any, /// of each column. Note that there is no contract or suggestion that this property - /// should be equal to . + /// should be equal to . /// ITransposeSchema TransposeSchema { get; } diff --git a/src/Microsoft.ML.Data/DataLoadSave/TransformerChain.cs b/src/Microsoft.ML.Data/DataLoadSave/TransformerChain.cs index 2f355a04a1..f1a7ca5308 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/TransformerChain.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/TransformerChain.cs @@ -227,7 +227,7 @@ public IRowToRowMapper GetRowToRowMapper(Schema inputSchema) for (int i = 0; i < mappers.Length; ++i) { mappers[i] = _transformers[i].GetRowToRowMapper(schema); - schema = mappers[i].Schema; + schema = mappers[i].OutputSchema; } return new CompositeRowToRowMapper(inputSchema, mappers); } diff --git a/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs b/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs index 4d79c0d6f5..eb319cb7bc 100644 --- a/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs +++ b/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs @@ -18,7 +18,7 @@ public sealed class CompositeRowToRowMapper : IRowToRowMapper private static readonly IRowToRowMapper[] _empty = new IRowToRowMapper[0]; public Schema InputSchema { get; } - public Schema Schema { get; } + public Schema OutputSchema { get; } /// /// Out of a series of mappers, construct a seemingly unitary mapper that is able to apply them in sequence. @@ -32,7 +32,7 @@ public CompositeRowToRowMapper(Schema inputSchema, IRowToRowMapper[] mappers) Contracts.CheckValueOrNull(mappers); InnerMappers = Utils.Size(mappers) > 0 ? mappers : _empty; InputSchema = inputSchema; - Schema = Utils.Size(mappers) > 0 ? mappers[mappers.Length - 1].Schema : inputSchema; + OutputSchema = Utils.Size(mappers) > 0 ? mappers[mappers.Length - 1].OutputSchema : inputSchema; } public Func GetDependencies(Func predicate) diff --git a/src/Microsoft.ML.Data/DataView/LambdaFilter.cs b/src/Microsoft.ML.Data/DataView/LambdaFilter.cs index 87173d71d8..0cec2a7a58 100644 --- a/src/Microsoft.ML.Data/DataView/LambdaFilter.cs +++ b/src/Microsoft.ML.Data/DataView/LambdaFilter.cs @@ -154,7 +154,7 @@ private sealed class RowCursor : LinkedRowFilterCursorBase private T1 _src; public RowCursor(Impl parent, IRowCursor input, bool[] active) - : base(parent.Host, input, parent.Schema, active) + : base(parent.Host, input, parent.OutputSchema, active) { _getSrc = Input.GetGetter(parent._colSrc); if (parent._conv == null) diff --git a/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs b/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs index e472f6546f..d5ccb2d063 100644 --- a/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs +++ b/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs @@ -75,7 +75,7 @@ private static VersionInfo GetVersionInfo() loaderAssemblyName: typeof(RowToRowMapperTransform).Assembly.FullName); } - public override Schema Schema => _bindings.Schema; + public override Schema OutputSchema => _bindings.Schema; bool ICanSaveOnnx.CanSaveOnnx(OnnxContext ctx) => _mapper is ICanSaveOnnx onnxMapper ? onnxMapper.CanSaveOnnx(ctx) : false; @@ -233,7 +233,7 @@ public Func GetDependencies(Func predicate) return predicateInput; } - Schema IRowToRowMapper.InputSchema => Source.Schema; + public Schema InputSchema => Source.Schema; public IRow GetRow(IRow input, Func active, out Action disposer) { @@ -245,13 +245,13 @@ public IRow GetRow(IRow input, Func active, out Action disposer) using (var ch = Host.Start("GetEntireRow")) { Action disp; - var activeArr = new bool[Schema.ColumnCount]; - for (int i = 0; i < Schema.ColumnCount; i++) + var activeArr = new bool[OutputSchema.ColumnCount]; + for (int i = 0; i < OutputSchema.ColumnCount; i++) activeArr[i] = active(i); var pred = GetActiveOutputColumns(activeArr); var getters = _mapper.CreateGetters(input, pred, out disp); disposer += disp; - return new Row(input, this, Schema, getters); + return new Row(input, this, OutputSchema, getters); } } diff --git a/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs b/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs index dbe5cafa20..14a23ab580 100644 --- a/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs +++ b/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs @@ -245,7 +245,7 @@ public override void Save(ModelSaveContext ctx) _bindings.Save(ctx); } - public override Schema Schema => _bindings.AsSchema; + public override Schema OutputSchema => _bindings.AsSchema; protected override bool? ShouldUseParallelCursors(Func predicate) { diff --git a/src/Microsoft.ML.Data/EntryPoints/TransformModel.cs b/src/Microsoft.ML.Data/EntryPoints/TransformModel.cs index 4d94dc9760..8eae667c53 100644 --- a/src/Microsoft.ML.Data/EntryPoints/TransformModel.cs +++ b/src/Microsoft.ML.Data/EntryPoints/TransformModel.cs @@ -193,6 +193,8 @@ private sealed class CompositeRowToRowMapper : IRowToRowMapper public Schema Schema => _chain.Schema; + public Schema OutputSchema => Schema; + public CompositeRowToRowMapper(IExceptionContext ectx, IDataView chain, ISchema rootSchema) { Contracts.CheckValue(ectx, nameof(ectx)); diff --git a/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs index ef6d005e38..fe750fce59 100644 --- a/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs @@ -566,7 +566,16 @@ private static VersionInfo GetVersionInfo() public bool CanShuffle { get { return _transform.CanShuffle; } } - public Schema Schema => _transform.Schema; + /// + /// Explicit implementation prevents Schema from being accessed from derived classes. + /// It's our first step to separate data produced by transform from transform. + /// + Schema IDataView.Schema => OutputSchema; + + /// + /// Shape information of the produced output. Note that the input and the output of this transform (and their types) are identical. + /// + public Schema OutputSchema => _transform.OutputSchema; public RankerPerInstanceTransform(IHostEnvironment env, IDataView input, string labelCol, string scoreCol, string groupCol, int truncationLevel, Double[] labelGains) diff --git a/src/Microsoft.ML.Data/Prediction/Calibrator.cs b/src/Microsoft.ML.Data/Prediction/Calibrator.cs index ea2126b91e..0e966a44dc 100644 --- a/src/Microsoft.ML.Data/Prediction/Calibrator.cs +++ b/src/Microsoft.ML.Data/Prediction/Calibrator.cs @@ -528,7 +528,7 @@ private sealed class Bound : ISchemaBoundRowMapper public ISchemaBindableMapper Bindable => _parent; public RoleMappedSchema InputRoleMappedSchema => _predictor.InputRoleMappedSchema; public Schema InputSchema => _predictor.InputSchema; - public Schema Schema { get; } + public Schema OutputSchema { get; } public Bound(IHostEnvironment env, SchemaBindableCalibratedPredictor parent, RoleMappedSchema schema) { @@ -537,16 +537,16 @@ public Bound(IHostEnvironment env, SchemaBindableCalibratedPredictor parent, Rol _parent = parent; _predictor = _parent._bindable.Bind(env, schema) as ISchemaBoundRowMapper; env.Check(_predictor != null, "Predictor is not a row-to-row mapper"); - if (!_predictor.Schema.TryGetColumnIndex(MetadataUtils.Const.ScoreValueKind.Score, out _scoreCol)) + if (!_predictor.OutputSchema.TryGetColumnIndex(MetadataUtils.Const.ScoreValueKind.Score, out _scoreCol)) throw env.Except("Predictor does not output a score"); - var scoreType = _predictor.Schema.GetColumnType(_scoreCol); + var scoreType = _predictor.OutputSchema.GetColumnType(_scoreCol); env.Check(!scoreType.IsVector && scoreType.IsNumber); - Schema = Schema.Create(new BinaryClassifierSchema()); + OutputSchema = Schema.Create(new BinaryClassifierSchema()); } public Func GetDependencies(Func predicate) { - for (int i = 0; i < Schema.ColumnCount; i++) + for (int i = 0; i < OutputSchema.ColumnCount; i++) { if (predicate(i)) return _predictor.GetDependencies(col => true); @@ -562,7 +562,7 @@ public Func GetDependencies(Func predicate) public IRow GetRow(IRow input, Func predicate, out Action disposer) { Func predictorPredicate = col => false; - for (int i = 0; i < Schema.ColumnCount; i++) + for (int i = 0; i < OutputSchema.ColumnCount; i++) { if (predicate(i)) { @@ -571,17 +571,17 @@ public IRow GetRow(IRow input, Func predicate, out Action disposer) } } var predictorRow = _predictor.GetRow(input, predictorPredicate, out disposer); - var getters = new Delegate[Schema.ColumnCount]; - for (int i = 0; i < Schema.ColumnCount - 1; i++) + var getters = new Delegate[OutputSchema.ColumnCount]; + for (int i = 0; i < OutputSchema.ColumnCount - 1; i++) { var type = predictorRow.Schema.GetColumnType(i); if (!predicate(i)) continue; getters[i] = Utils.MarshalInvoke(GetPredictorGetter, type.RawType, predictorRow, i); } - if (predicate(Schema.ColumnCount - 1)) - getters[Schema.ColumnCount - 1] = GetProbGetter(predictorRow); - return new SimpleRow(Schema, predictorRow, getters); + if (predicate(OutputSchema.ColumnCount - 1)) + getters[OutputSchema.ColumnCount - 1] = GetProbGetter(predictorRow); + return new SimpleRow(OutputSchema, predictorRow, getters); } private Delegate GetPredictorGetter(IRow input, int col) @@ -728,7 +728,7 @@ private static bool NeedCalibration(IHostEnvironment env, IChannel ch, ICalibrat var bindable = ScoreUtils.GetSchemaBindableMapper(env, predictor); var bound = bindable.Bind(env, schema); - var outputSchema = bound.Schema; + var outputSchema = bound.OutputSchema; int scoreCol; if (!outputSchema.TryGetColumnIndex(MetadataUtils.Const.ScoreValueKind.Score, out scoreCol)) { diff --git a/src/Microsoft.ML.Data/Scorers/BinaryClassifierScorer.cs b/src/Microsoft.ML.Data/Scorers/BinaryClassifierScorer.cs index 56a72d3f92..68fdb15ab7 100644 --- a/src/Microsoft.ML.Data/Scorers/BinaryClassifierScorer.cs +++ b/src/Microsoft.ML.Data/Scorers/BinaryClassifierScorer.cs @@ -94,11 +94,10 @@ private static bool CanWrap(ISchemaBoundMapper mapper, ColumnType labelNameType) if (rowMapper == null) return false; // We could cover this case, but it is of no practical worth as far as I see, so I decline to do so. - ISchema outSchema = mapper.Schema; int scoreIdx; - if (!outSchema.TryGetColumnIndex(MetadataUtils.Const.ScoreValueKind.Score, out scoreIdx)) + if (!mapper.OutputSchema.TryGetColumnIndex(MetadataUtils.Const.ScoreValueKind.Score, out scoreIdx)) return false; // The mapper doesn't even publish a score column to attach the metadata to. - if (outSchema.GetMetadataTypeOrNull(MetadataUtils.Kinds.TrainingLabelValues, scoreIdx) != null) + if (mapper.OutputSchema.GetMetadataTypeOrNull(MetadataUtils.Kinds.TrainingLabelValues, scoreIdx) != null) return false; // The mapper publishes a score column, and already produces its own slot names. return labelNameType.IsVector && labelNameType.VectorSize == 2; @@ -224,7 +223,7 @@ public override IDataTransform ApplyToData(IHostEnvironment env, IDataView newSo protected override Delegate GetPredictedLabelGetter(IRow output, out Delegate scoreGetter) { Host.AssertValue(output); - Host.Assert(output.Schema == Bindings.RowMapper.Schema); + Host.Assert(output.Schema == Bindings.RowMapper.OutputSchema); Host.Assert(output.IsColumnActive(Bindings.ScoreColumnIndex)); ValueGetter mapperScoreGetter = output.GetGetter(Bindings.ScoreColumnIndex); diff --git a/src/Microsoft.ML.Data/Scorers/ClusteringScorer.cs b/src/Microsoft.ML.Data/Scorers/ClusteringScorer.cs index 40fd5b621f..1035735bc3 100644 --- a/src/Microsoft.ML.Data/Scorers/ClusteringScorer.cs +++ b/src/Microsoft.ML.Data/Scorers/ClusteringScorer.cs @@ -94,7 +94,7 @@ public override IDataTransform ApplyToData(IHostEnvironment env, IDataView newSo protected override Delegate GetPredictedLabelGetter(IRow output, out Delegate scoreGetter) { Contracts.AssertValue(output); - Contracts.Assert(output.Schema == Bindings.RowMapper.Schema); + Contracts.Assert(output.Schema == Bindings.RowMapper.OutputSchema); Contracts.Assert(output.IsColumnActive(Bindings.ScoreColumnIndex)); ValueGetter> mapperScoreGetter = output.GetGetter>(Bindings.ScoreColumnIndex); diff --git a/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs b/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs index b2a37cba4b..80a98881a3 100644 --- a/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs +++ b/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs @@ -335,7 +335,7 @@ private sealed class RowMapper : ISchemaBoundRowMapper public Schema InputSchema => InputRoleMappedSchema.Schema; - public Schema Schema { get; } + public Schema OutputSchema { get; } public ISchemaBindableMapper Bindable => _parent; @@ -368,8 +368,8 @@ public RowMapper(IHostEnvironment env, BindableMapper parent, RoleMappedSchema s InputSchema, InputRoleMappedSchema.Feature.Index); } - _outputGenericSchema = _genericRowMapper.Schema; - Schema = new CompositeSchema(new ISchema[] { _outputGenericSchema, _outputSchema, }).AsSchema; + _outputGenericSchema = _genericRowMapper.OutputSchema; + OutputSchema = new CompositeSchema(new ISchema[] { _outputGenericSchema, _outputSchema, }).AsSchema; } /// @@ -377,7 +377,7 @@ public RowMapper(IHostEnvironment env, BindableMapper parent, RoleMappedSchema s /// public Func GetDependencies(Func predicate) { - for (int i = 0; i < Schema.ColumnCount; i++) + for (int i = 0; i < OutputSchema.ColumnCount; i++) { if (predicate(i)) return col => col == InputRoleMappedSchema.Feature.Index; @@ -406,7 +406,7 @@ public IRow GetOutputRow(IRow input, Func predicate, out Action dispo getters[i] = RowCursorUtils.GetGetterAsDelegate(genericRow, i); } - return new SimpleRow(Schema, input, getters); + return new SimpleRow(OutputSchema, input, getters); } public Func GetGenericPredicate(Func predicate) diff --git a/src/Microsoft.ML.Data/Scorers/GenericScorer.cs b/src/Microsoft.ML.Data/Scorers/GenericScorer.cs index b1a1955452..99097dc12b 100644 --- a/src/Microsoft.ML.Data/Scorers/GenericScorer.cs +++ b/src/Microsoft.ML.Data/Scorers/GenericScorer.cs @@ -140,7 +140,7 @@ private static VersionInfo GetVersionInfo() private readonly Bindings _bindings; protected override BindingsBase GetBindings() => _bindings; - public override Schema Schema { get; } + public override Schema OutputSchema { get; } bool ICanSavePfa.CanSavePfa => (Bindable as ICanSavePfa)?.CanSavePfa == true; @@ -160,7 +160,7 @@ public GenericScorer(IHostEnvironment env, ScorerArgumentsBase args, IDataView d var rowMapper = mapper as ISchemaBoundRowMapper; Host.CheckParam(rowMapper != null, nameof(mapper), "mapper should implement ISchemaBoundRowMapper"); _bindings = Bindings.Create(data.Schema, rowMapper, args.Suffix); - Schema = Schema.Create(_bindings); + OutputSchema = Schema.Create(_bindings); } /// @@ -170,7 +170,7 @@ private GenericScorer(IHostEnvironment env, GenericScorer transform, IDataView d : base(env, data, RegistrationName, transform.Bindable) { _bindings = transform._bindings.ApplyToSchema(env, data.Schema); - Schema = Schema.Create(_bindings); + OutputSchema = Schema.Create(_bindings); } /// @@ -181,7 +181,7 @@ private GenericScorer(IHost host, ModelLoadContext ctx, IDataView input) { Contracts.AssertValue(ctx); _bindings = Bindings.Create(ctx, host, Bindable, input.Schema); - Schema = Schema.Create(_bindings); + OutputSchema = Schema.Create(_bindings); } /// @@ -266,7 +266,7 @@ protected override Delegate[] GetGetters(IRow output, Func predicate) Host.Assert(_bindings.DerivedColumnCount == 0); Host.AssertValue(output); Host.AssertValue(predicate); - Host.Assert(output.Schema == _bindings.RowMapper.Schema); + Host.Assert(output.Schema == _bindings.RowMapper.OutputSchema); return GetGettersFromRow(output, predicate); } diff --git a/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs b/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs index 66f30f565c..3ee4377443 100644 --- a/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs +++ b/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs @@ -250,10 +250,9 @@ private sealed class Bound : ISchemaBoundRowMapper private LabelNameBindableMapper _bindable; private readonly Func _canWrap; - public Schema Schema => _outSchema.AsSchema; - public RoleMappedSchema InputRoleMappedSchema => _mapper.InputRoleMappedSchema; public Schema InputSchema => _mapper.InputSchema; + public Schema OutputSchema => _outSchema.AsSchema; public ISchemaBindableMapper Bindable { @@ -286,7 +285,7 @@ public Bound(IHostEnvironment env, ISchemaBoundRowMapper mapper, VectorType type _mapper = mapper; int scoreIdx; - bool result = mapper.Schema.TryGetColumnIndex(MetadataUtils.Const.ScoreValueKind.Score, out scoreIdx); + bool result = mapper.OutputSchema.TryGetColumnIndex(MetadataUtils.Const.ScoreValueKind.Score, out scoreIdx); if (!result) throw env.ExceptParam(nameof(mapper), "Mapper did not have a '{0}' column", MetadataUtils.Const.ScoreValueKind.Score); @@ -294,7 +293,7 @@ public Bound(IHostEnvironment env, ISchemaBoundRowMapper mapper, VectorType type _labelNameGetter = getter; _metadataKind = metadataKind; - _outSchema = new SchemaImpl(mapper.Schema, scoreIdx, _labelNameType, _labelNameGetter, _metadataKind); + _outSchema = new SchemaImpl(mapper.OutputSchema, scoreIdx, _labelNameType, _labelNameGetter, _metadataKind); _canWrap = canWrap; } @@ -311,7 +310,7 @@ public Func GetDependencies(Func predicate) public IRow GetRow(IRow input, Func predicate, out Action disposer) { var innerRow = _mapper.GetRow(input, predicate, out disposer); - return new RowImpl(innerRow, Schema); + return new RowImpl(innerRow, OutputSchema); } private sealed class SchemaImpl : ISchema @@ -470,7 +469,7 @@ public static bool CanWrap(ISchemaBoundMapper mapper, ColumnType labelNameType) if (rowMapper == null) return false; // We could cover this case, but it is of no practical worth as far as I see, so I decline to do so. - ISchema outSchema = mapper.Schema; + ISchema outSchema = mapper.OutputSchema; int scoreIdx; if (!outSchema.TryGetColumnIndex(MetadataUtils.Const.ScoreValueKind.Score, out scoreIdx)) return false; // The mapper doesn't even publish a score column to attach the metadata to. @@ -555,7 +554,7 @@ public override IDataTransform ApplyToData(IHostEnvironment env, IDataView newSo protected override Delegate GetPredictedLabelGetter(IRow output, out Delegate scoreGetter) { Host.AssertValue(output); - Host.Assert(output.Schema == Bindings.RowMapper.Schema); + Host.Assert(output.Schema == Bindings.RowMapper.OutputSchema); Host.Assert(output.IsColumnActive(Bindings.ScoreColumnIndex)); ValueGetter> mapperScoreGetter = output.GetGetter>(Bindings.ScoreColumnIndex); diff --git a/src/Microsoft.ML.Data/Scorers/PredictedLabelScorerBase.cs b/src/Microsoft.ML.Data/Scorers/PredictedLabelScorerBase.cs index 6345914b90..e080e9f7be 100644 --- a/src/Microsoft.ML.Data/Scorers/PredictedLabelScorerBase.cs +++ b/src/Microsoft.ML.Data/Scorers/PredictedLabelScorerBase.cs @@ -61,24 +61,24 @@ private BindingsImpl(Schema input, ISchemaBoundRowMapper mapper, string suffix, // bearing object makes pushing the logic into the multiclass scorer almost impossible. if (predColType.IsKey) { - ColumnType scoreSlotsType = mapper.Schema.GetMetadataTypeOrNull(MetadataUtils.Kinds.SlotNames, scoreColIndex); + ColumnType scoreSlotsType = mapper.OutputSchema.GetMetadataTypeOrNull(MetadataUtils.Kinds.SlotNames, scoreColIndex); if (scoreSlotsType != null && scoreSlotsType.IsKnownSizeVector && scoreSlotsType.VectorSize == predColType.KeyCount) { Contracts.Assert(scoreSlotsType.VectorSize > 0); IColumn col = Utils.MarshalInvoke(KeyValueMetadataFromMetadata, - scoreSlotsType.RawType, mapper.Schema, scoreColIndex, MetadataUtils.Kinds.SlotNames); + scoreSlotsType.RawType, mapper.OutputSchema, scoreColIndex, MetadataUtils.Kinds.SlotNames); _predColMetadata = RowColumnUtils.GetRow(null, col); } else { - scoreSlotsType = mapper.Schema.GetMetadataTypeOrNull(MetadataUtils.Kinds.TrainingLabelValues, scoreColIndex); + scoreSlotsType = mapper.OutputSchema.GetMetadataTypeOrNull(MetadataUtils.Kinds.TrainingLabelValues, scoreColIndex); if (scoreSlotsType != null && scoreSlotsType.IsKnownSizeVector && scoreSlotsType.VectorSize == predColType.KeyCount) { Contracts.Assert(scoreSlotsType.VectorSize > 0); IColumn col = Utils.MarshalInvoke(KeyValueMetadataFromMetadata, - scoreSlotsType.RawType, mapper.Schema, scoreColIndex, MetadataUtils.Kinds.TrainingLabelValues); + scoreSlotsType.RawType, mapper.OutputSchema, scoreColIndex, MetadataUtils.Kinds.TrainingLabelValues); _predColMetadata = RowColumnUtils.GetRow(null, col); } } @@ -115,7 +115,7 @@ public BindingsImpl ApplyToSchema(Schema input, ISchemaBindableMapper bindable, env.AssertValue(input); env.AssertValue(bindable); - string scoreCol = RowMapper.Schema.GetColumnName(ScoreColumnIndex); + string scoreCol = RowMapper.OutputSchema.GetColumnName(ScoreColumnIndex); var schema = new RoleMappedSchema(input, RowMapper.GetInputColumnRoles()); // Checks compatibility of the predictor input types. @@ -123,7 +123,7 @@ public BindingsImpl ApplyToSchema(Schema input, ISchemaBindableMapper bindable, var rowMapper = mapper as ISchemaBoundRowMapper; env.CheckParam(rowMapper != null, nameof(bindable), "Mapper must implement ISchemaBoundRowMapper"); int mapperScoreColumn; - bool tmp = rowMapper.Schema.TryGetColumnIndex(scoreCol, out mapperScoreColumn); + bool tmp = rowMapper.OutputSchema.TryGetColumnIndex(scoreCol, out mapperScoreColumn); env.Check(tmp, "Mapper doesn't have expected score column"); return new BindingsImpl(input, rowMapper, Suffix, ScoreColumnKind, true, mapperScoreColumn, PredColType); @@ -153,9 +153,9 @@ public static BindingsImpl Create(ModelLoadContext ctx, Schema input, // Find the score column of the mapper. int scoreColIndex; - env.CheckDecode(mapper.Schema.TryGetColumnIndex(scoreCol, out scoreColIndex)); + env.CheckDecode(mapper.OutputSchema.TryGetColumnIndex(scoreCol, out scoreColIndex)); - var scoreType = mapper.Schema.GetColumnType(scoreColIndex); + var scoreType = mapper.OutputSchema.GetColumnType(scoreColIndex); env.CheckDecode(outputTypeMatches(scoreType)); var predColType = getPredColType(scoreType, rowMapper); @@ -172,7 +172,7 @@ public override void Save(ModelSaveContext ctx) // int: id of the column used for deriving the predicted label column SaveBase(ctx); ctx.SaveNonEmptyString(ScoreColumnKind); - ctx.SaveNonEmptyString(RowMapper.Schema.GetColumnName(ScoreColumnIndex)); + ctx.SaveNonEmptyString(RowMapper.OutputSchema.GetColumnName(ScoreColumnIndex)); } protected override ColumnType GetColumnTypeCore(int iinfo) @@ -277,7 +277,7 @@ public override Func GetActiveMapperColumns(bool[] active) protected readonly BindingsImpl Bindings; protected override BindingsBase GetBindings() => Bindings; - public override Schema Schema { get; } + public override Schema OutputSchema { get; } bool ICanSavePfa.CanSavePfa => (Bindable as ICanSavePfa)?.CanSavePfa == true; @@ -298,15 +298,15 @@ protected PredictedLabelScorerBase(ScorerArgumentsBase args, IHostEnvironment en Host.CheckParam(rowMapper != null, nameof(mapper), "mapper should implement " + nameof(ISchemaBoundRowMapper)); int scoreColIndex; - if (!mapper.Schema.TryGetColumnIndex(scoreColName, out scoreColIndex)) + if (!mapper.OutputSchema.TryGetColumnIndex(scoreColName, out scoreColIndex)) throw Host.ExceptParam(nameof(scoreColName), "mapper does not contain a column '{0}'", scoreColName); - var scoreType = mapper.Schema.GetColumnType(scoreColIndex); + var scoreType = mapper.OutputSchema.GetColumnType(scoreColIndex); Host.Check(outputTypeMatches(scoreType), "Unexpected predictor output type"); var predColType = getPredColType(scoreType, rowMapper); Bindings = BindingsImpl.Create(data.Schema, rowMapper, args.Suffix, scoreColKind, scoreColIndex, predColType); - Schema = Schema.Create(Bindings); + OutputSchema = Schema.Create(Bindings); } protected PredictedLabelScorerBase(IHostEnvironment env, PredictedLabelScorerBase transform, @@ -314,7 +314,7 @@ protected PredictedLabelScorerBase(IHostEnvironment env, PredictedLabelScorerBas : base(env, newSource, registrationName, transform.Bindable) { Bindings = transform.Bindings.ApplyToSchema(newSource.Schema, Bindable, env); - Schema = Schema.Create(Bindings); + OutputSchema = Schema.Create(Bindings); } protected PredictedLabelScorerBase(IHost host, ModelLoadContext ctx, IDataView input, @@ -327,7 +327,7 @@ protected PredictedLabelScorerBase(IHost host, ModelLoadContext ctx, IDataView i Host.AssertValue(getPredColType); Bindings = BindingsImpl.Create(ctx, input.Schema, host, Bindable, outputTypeMatches, getPredColType); - Schema = Schema.Create(Bindings); + OutputSchema = Schema.Create(Bindings); } protected override void SaveCore(ModelSaveContext ctx) @@ -408,7 +408,7 @@ protected override Delegate[] GetGetters(IRow output, Func predicate) Host.Assert(Bindings.DerivedColumnCount == 1); Host.AssertValue(output); Host.AssertValue(predicate); - Host.Assert(output.Schema == Bindings.RowMapper.Schema); + Host.Assert(output.Schema == Bindings.RowMapper.OutputSchema); Host.Assert(Bindings.InfoCount == output.Schema.ColumnCount + 1); var getters = new Delegate[Bindings.InfoCount]; diff --git a/src/Microsoft.ML.Data/Scorers/RowToRowScorerBase.cs b/src/Microsoft.ML.Data/Scorers/RowToRowScorerBase.cs index bcd12302a5..8cb39d2f82 100644 --- a/src/Microsoft.ML.Data/Scorers/RowToRowScorerBase.cs +++ b/src/Microsoft.ML.Data/Scorers/RowToRowScorerBase.cs @@ -232,14 +232,14 @@ public RowCursor(IChannelProvider provider, RowToRowScorerBase parent, IRowCurso Ch.AssertValue(predicateMapper); _bindings = parent.GetBindings(); - Schema = parent.Schema; + Schema = parent.OutputSchema; Ch.Assert(active.Length == _bindings.ColumnCount); _active = active; var output = _bindings.RowMapper.GetRow(input, predicateMapper, out _disposer); try { - Ch.Assert(output.Schema == _bindings.RowMapper.Schema); + Ch.Assert(output.Schema == _bindings.RowMapper.OutputSchema); _getters = parent.GetGetters(output, iinfo => active[_bindings.MapIinfoToCol(iinfo)]); } catch (Exception) @@ -337,7 +337,7 @@ private static string[] GetOutputNames(ISchemaBoundMapper mapper, string suffix, Contracts.AssertValueOrNull(suffix); Contracts.AssertValue(namesDerived); - var schema = mapper.Schema; + var schema = mapper.OutputSchema; int count = namesDerived.Length + schema.ColumnCount; var res = new string[count]; int dst = 0; @@ -400,7 +400,7 @@ protected void SaveBase(ModelSaveContext ctx) protected override ColumnType GetColumnTypeCore(int iinfo) { Contracts.Assert(DerivedColumnCount <= iinfo && iinfo < InfoCount); - return Mapper.Schema.GetColumnType(iinfo - DerivedColumnCount); + return Mapper.OutputSchema.GetColumnType(iinfo - DerivedColumnCount); } protected override IEnumerable> GetMetadataTypesCore(int iinfo) @@ -410,7 +410,7 @@ protected override IEnumerable> GetMetadataType yield return MetadataUtils.ScoreColumnSetIdType.GetPair(MetadataUtils.Kinds.ScoreColumnSetId); if (iinfo < DerivedColumnCount) yield break; - foreach (var pair in Mapper.Schema.GetMetadataTypes(iinfo - DerivedColumnCount)) + foreach (var pair in Mapper.OutputSchema.GetMetadataTypes(iinfo - DerivedColumnCount)) yield return pair; } @@ -421,7 +421,7 @@ protected override ColumnType GetMetadataTypeCore(string kind, int iinfo) return MetadataUtils.ScoreColumnSetIdType; if (iinfo < DerivedColumnCount) return null; - return Mapper.Schema.GetMetadataTypeOrNull(kind, iinfo - DerivedColumnCount); + return Mapper.OutputSchema.GetMetadataTypeOrNull(kind, iinfo - DerivedColumnCount); } protected override void GetMetadataCore(string kind, int iinfo, ref TValue value) @@ -435,7 +435,7 @@ protected override void GetMetadataCore(string kind, int iinfo, ref TVal default: if (iinfo < DerivedColumnCount) throw MetadataUtils.ExceptGetMetadata(); - Mapper.Schema.GetMetadata(kind, iinfo - DerivedColumnCount, ref value); + Mapper.OutputSchema.GetMetadata(kind, iinfo - DerivedColumnCount, ref value); break; } } @@ -452,8 +452,8 @@ public virtual Func GetActiveMapperColumns(bool[] active) return col => { - Contracts.Assert(0 <= col && col < Mapper.Schema.ColumnCount); - return 0 <= col && col < Mapper.Schema.ColumnCount && + Contracts.Assert(0 <= col && col < Mapper.OutputSchema.ColumnCount); + return 0 <= col && col < Mapper.OutputSchema.ColumnCount && active[MapIinfoToCol(col + DerivedColumnCount)]; }; } diff --git a/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs b/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs index a2fdf96af4..4d44d07543 100644 --- a/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs +++ b/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs @@ -191,7 +191,7 @@ protected sealed class SingleValueRowMapper : ISchemaBoundRowMapper private readonly SchemaBindablePredictorWrapperBase _parent; public RoleMappedSchema InputRoleMappedSchema { get; } - public Schema Schema { get; } + public Schema OutputSchema { get; } public ISchemaBindableMapper Bindable => _parent; public SingleValueRowMapper(RoleMappedSchema schema, SchemaBindablePredictorWrapperBase parent, Schema outputSchema) @@ -203,12 +203,12 @@ public SingleValueRowMapper(RoleMappedSchema schema, SchemaBindablePredictorWrap _parent = parent; InputRoleMappedSchema = schema; - Schema = outputSchema; + OutputSchema = outputSchema; } public Func GetDependencies(Func predicate) { - for (int i = 0; i < Schema.ColumnCount; i++) + for (int i = 0; i < OutputSchema.ColumnCount; i++) { if (predicate(i)) return col => col == InputRoleMappedSchema.Feature.Index; @@ -232,7 +232,7 @@ public IRow GetRow(IRow input, Func predicate, out Action disposer) if (predicate(0)) getters[0] = _parent.GetPredictionGetter(input, InputRoleMappedSchema.Feature.Index); disposer = null; - return new SimpleRow(Schema, input, getters); + return new SimpleRow(OutputSchema, input, getters); } } } @@ -472,7 +472,8 @@ private sealed class CalibratedRowMapper : ISchemaBoundRowMapper public RoleMappedSchema InputRoleMappedSchema { get; } public Schema InputSchema => InputRoleMappedSchema.Schema; - public Schema Schema { get; } + public Schema OutputSchema { get; } + public ISchemaBindableMapper Bindable => _parent; public CalibratedRowMapper(RoleMappedSchema schema, SchemaBindableBinaryPredictorWrapper parent) @@ -484,7 +485,7 @@ public CalibratedRowMapper(RoleMappedSchema schema, SchemaBindableBinaryPredicto _parent = parent; InputRoleMappedSchema = schema; - Schema = Schema.Create(new BinaryClassifierSchema()); + OutputSchema = Schema.Create(new BinaryClassifierSchema()); if (schema.Feature != null) { @@ -496,7 +497,7 @@ public CalibratedRowMapper(RoleMappedSchema schema, SchemaBindableBinaryPredicto public Func GetDependencies(Func predicate) { - for (int i = 0; i < Schema.ColumnCount; i++) + for (int i = 0; i < OutputSchema.ColumnCount; i++) { if (predicate(i) && InputRoleMappedSchema.Feature != null) return col => col == InputRoleMappedSchema.Feature.Index; @@ -568,10 +569,10 @@ private static void EnsureCachedResultValueMapper(ValueMapper, Fl public IRow GetRow(IRow input, Func predicate, out Action disposer) { Contracts.AssertValue(input); - var active = Utils.BuildArray(Schema.ColumnCount, predicate); + var active = Utils.BuildArray(OutputSchema.ColumnCount, predicate); var getters = CreateGetters(input, active); disposer = null; - return new SimpleRow(Schema, input, getters); + return new SimpleRow(OutputSchema, input, getters); } } } diff --git a/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs b/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs index 486e921f54..afbbbe59b8 100644 --- a/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs +++ b/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs @@ -448,7 +448,7 @@ public Schema GetOutputSchema(Schema inputSchema) throw _host.ExceptSchemaMismatch(nameof(inputSchema), "input", string.Join(",", invalidColumns)); } - return new Mapper(this, inputSchema).Schema; + return new Mapper(this, inputSchema).OutputSchema; } public IRowToRowMapper GetRowToRowMapper(Schema inputSchema) @@ -485,7 +485,7 @@ private sealed class Mapper public ISchema InputSchema => _inputSchema; - public Schema Schema { get; } + public Schema OutputSchema { get; } public Mapper(ColumnSelectingTransformer transform, Schema inputSchema) { @@ -496,7 +496,7 @@ public Mapper(ColumnSelectingTransformer transform, Schema inputSchema) transform.KeepColumns, transform.KeepHidden, _inputSchema); - Schema = GenerateOutputSchema(_outputToInputMap, _inputSchema); + OutputSchema = GenerateOutputSchema(_outputToInputMap, _inputSchema); } public int GetInputIndex(int outputIndex) @@ -593,7 +593,7 @@ public Row(IRow input, Mapper mapper) public long Batch => _input.Batch; - Schema ISchematized.Schema => _mapper.Schema; + Schema IRow.Schema => _mapper.OutputSchema; public ValueGetter GetGetter(int col) { @@ -625,9 +625,11 @@ public SelectColumnsDataTransform(IHostEnvironment env, ColumnSelectingTransform public IDataView Source { get; } - Schema IRowToRowMapper.InputSchema => Source.Schema; + public Schema InputSchema => Source.Schema; - Schema ISchematized.Schema => _mapper.Schema; + Schema IDataView.Schema => OutputSchema; + + public Schema OutputSchema => _mapper.OutputSchema; public long? GetRowCount() => Source.GetRowCount(); @@ -641,7 +643,7 @@ public IRowCursor GetRowCursor(Func needCol, Random rand = null) var inputRowCursor = Source.GetRowCursor(inputPred, rand); // Build the active state for the output - var active = Utils.BuildArray(_mapper.Schema.ColumnCount, needCol); + var active = Utils.BuildArray(_mapper.OutputSchema.ColumnCount, needCol); return new RowCursor(_host, _mapper, inputRowCursor, active); } @@ -655,7 +657,7 @@ public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Fun var inputs = Source.GetRowCursorSet(out consolidator, inputPred, n, rand); // Build out the acitve state for the output - var active = Utils.BuildArray(_mapper.Schema.ColumnCount, needCol); + var active = Utils.BuildArray(_mapper.OutputSchema.ColumnCount, needCol); _host.AssertNonEmpty(inputs); // No need to split if this is given 1 input cursor. @@ -672,7 +674,7 @@ public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Fun public Func GetDependencies(Func activeOutput) { var active = new bool[_mapper.InputSchema.ColumnCount]; - var columnCount = _mapper.Schema.ColumnCount; + var columnCount = _mapper.OutputSchema.ColumnCount; for (int colIdx = 0; colIdx < columnCount; ++colIdx) { if (activeOutput(colIdx)) @@ -705,7 +707,7 @@ public RowCursor(IChannelProvider provider, Mapper mapper, IRowCursor input, boo _active = active; } - Schema ISchematized.Schema => _mapper.Schema; + public Schema Schema => _mapper.OutputSchema; public ValueGetter GetGetter(int col) { diff --git a/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs b/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs index 0a7a95370a..72231d5eb9 100644 --- a/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs @@ -318,7 +318,7 @@ public override void Save(ModelSaveContext ctx) _bindings.Save(ctx); } - public override Schema Schema => _bindings.AsSchema; + public override Schema OutputSchema => _bindings.AsSchema; public override bool CanShuffle { get { return false; } } diff --git a/src/Microsoft.ML.Data/Transforms/NAFilter.cs b/src/Microsoft.ML.Data/Transforms/NAFilter.cs index 25f6242064..36c9f43368 100644 --- a/src/Microsoft.ML.Data/Transforms/NAFilter.cs +++ b/src/Microsoft.ML.Data/Transforms/NAFilter.cs @@ -375,7 +375,7 @@ public override Delegate GetGetter() private readonly Value[] _values; public RowCursor(NAFilter parent, IRowCursor input, bool[] active) - : base(parent.Host, input, parent.Schema, active) + : base(parent.Host, input, parent.OutputSchema, active) { _parent = parent; _values = new Value[_parent._infos.Length]; diff --git a/src/Microsoft.ML.Data/Transforms/NopTransform.cs b/src/Microsoft.ML.Data/Transforms/NopTransform.cs index d5ea3bdc8e..0d0a619c1c 100644 --- a/src/Microsoft.ML.Data/Transforms/NopTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/NopTransform.cs @@ -102,7 +102,16 @@ public bool CanShuffle get { return Source.CanShuffle; } } - public Schema Schema => Source.Schema; + /// + /// Explicit implementation prevents Schema from being accessed from derived classes. + /// It's our first step to separate data produced by transform from transform. + /// + Schema IDataView.Schema => OutputSchema; + + /// + /// Shape information of the produced output. Note that the input and the output of this transform (and their types) are identical. + /// + public Schema OutputSchema => Source.Schema; public long? GetRowCount() { diff --git a/src/Microsoft.ML.Data/Transforms/PerGroupTransformBase.cs b/src/Microsoft.ML.Data/Transforms/PerGroupTransformBase.cs index 44537b0647..38eea76459 100644 --- a/src/Microsoft.ML.Data/Transforms/PerGroupTransformBase.cs +++ b/src/Microsoft.ML.Data/Transforms/PerGroupTransformBase.cs @@ -91,7 +91,9 @@ public Func GetDependencies(Func predicate) protected readonly string ScoreCol; protected readonly string GroupCol; - public Schema Schema => GetBindings().AsSchema; + Schema IDataView.Schema => OutputSchema; + + public Schema OutputSchema => GetBindings().AsSchema; public IDataView Source { get; } @@ -235,7 +237,7 @@ private sealed class RowCursor : RootCursorBase, IRowCursor private readonly ValueGetter _labelGetter; private readonly ValueGetter _scoreGetter; - public Schema Schema => _parent.Schema; + public Schema Schema => _parent.OutputSchema; public override long Batch { get { return 0; } } diff --git a/src/Microsoft.ML.Data/Transforms/RangeFilter.cs b/src/Microsoft.ML.Data/Transforms/RangeFilter.cs index 54a1e42161..f02a05dfd9 100644 --- a/src/Microsoft.ML.Data/Transforms/RangeFilter.cs +++ b/src/Microsoft.ML.Data/Transforms/RangeFilter.cs @@ -269,7 +269,7 @@ private abstract class RowCursorBase : LinkedRowFilterCursorBase private readonly Double _max; protected RowCursorBase(RangeFilter parent, IRowCursor input, bool[] active) - : base(parent.Host, input, parent.Schema, active) + : base(parent.Host, input, parent.OutputSchema, active) { Parent = parent; _min = Parent._min; diff --git a/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs b/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs index 9276f5c08e..121d9c43fb 100644 --- a/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs +++ b/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs @@ -241,7 +241,7 @@ public static IRowCursor GetShuffledCursor(IChannelProvider provider, int poolRo public override bool CanShuffle { get { return true; } } - public override Schema Schema { get { return _subsetInput.Schema; } } + public override Schema OutputSchema { get { return _subsetInput.Schema; } } protected override bool? ShouldUseParallelCursors(Func predicate) { diff --git a/src/Microsoft.ML.Data/Transforms/SkipTakeFilter.cs b/src/Microsoft.ML.Data/Transforms/SkipTakeFilter.cs index c684dc139c..7f0399fc6d 100644 --- a/src/Microsoft.ML.Data/Transforms/SkipTakeFilter.cs +++ b/src/Microsoft.ML.Data/Transforms/SkipTakeFilter.cs @@ -194,8 +194,8 @@ protected override IRowCursor GetRowCursorCore(Func predicate, Random Host.AssertValueOrNull(rand); var input = Source.GetRowCursor(predicate); - var activeColumns = Utils.BuildArray(Schema.ColumnCount, predicate); - return new RowCursor(Host, input, Schema, activeColumns, _skip, _take); + var activeColumns = Utils.BuildArray(OutputSchema.ColumnCount, predicate); + return new RowCursor(Host, input, OutputSchema, activeColumns, _skip, _take); } public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, diff --git a/src/Microsoft.ML.Data/Transforms/TransformBase.cs b/src/Microsoft.ML.Data/Transforms/TransformBase.cs index e517710543..cbf60b6967 100644 --- a/src/Microsoft.ML.Data/Transforms/TransformBase.cs +++ b/src/Microsoft.ML.Data/Transforms/TransformBase.cs @@ -49,7 +49,16 @@ protected TransformBase(IHost host, IDataView input) public virtual bool CanShuffle { get { return Source.CanShuffle; } } - public abstract Schema Schema { get; } + /// + /// The field is the type information of the produced IDataView of this transformer. + /// + /// Explicit interface implementation hides in all derived classes. The reason + /// is that a transformer should know the type it will produce but shouldn't contain the type of the data it produces. + /// Thus, this field will be eventually removed while legacy code can still access for now. + /// + Schema IDataView.Schema => OutputSchema; + + public abstract Schema OutputSchema { get; } public IRowCursor GetRowCursor(Func predicate, Random rand = null) { @@ -127,7 +136,7 @@ private protected FilterBase(IHost host, IDataView input) public override long? GetRowCount() => null; - public sealed override Schema Schema => Source.Schema; + public override Schema OutputSchema => Source.Schema; bool ICanSavePfa.CanSavePfa => true; @@ -157,7 +166,7 @@ public Func GetDependencies(Func predicate) protected abstract Func GetDependenciesCore(Func predicate); - Schema IRowToRowMapper.InputSchema => Source.Schema; + public Schema InputSchema => Source.Schema; public IRow GetRow(IRow input, Func active, out Action disposer) { @@ -171,7 +180,7 @@ public IRow GetRow(IRow input, Func active, out Action disposer) Action disp; var getters = CreateGetters(input, active, out disp); disposer += disp; - return new Row(input, this, Schema, getters); + return new Row(input, this, OutputSchema, getters); } } @@ -601,7 +610,7 @@ void ISaveAsOnnx.SaveAsOnnx(OnnxContext ctx) } if (!SaveAsOnnxCore(ctx, iinfo, info, ctx.GetVariableName(sourceColumnName), - ctx.AddIntermediateVariable(Schema[_bindings.MapIinfoToCol(iinfo)].Type, info.Name))) + ctx.AddIntermediateVariable(OutputSchema[_bindings.MapIinfoToCol(iinfo)].Type, info.Name))) { ctx.RemoveColumn(info.Name, true); } @@ -635,7 +644,7 @@ private protected virtual JToken SaveAsPfaCore(BoundPfaContext ctx, int iinfo, C private protected virtual bool SaveAsOnnxCore(OnnxContext ctx, int iinfo, ColInfo info, string srcVariableName, string dstVariableName) => false; - public sealed override Schema Schema => _bindings.AsSchema; + public sealed override Schema OutputSchema => _bindings.AsSchema; public ITransposeSchema TransposeSchema => _bindings; @@ -758,7 +767,7 @@ protected Exception ExceptGetSlotCursor(int col) { Host.Assert(0 <= col && col < _bindings.ColumnCount); return Host.ExceptParam(nameof(col), "Bad call to GetSlotCursor on untransposable column '{0}'", - Schema[col].Name); + OutputSchema[col].Name); } public ISlotCursor GetSlotCursor(int col) diff --git a/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs b/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs index 385d8dc7b5..067a4c37bb 100644 --- a/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs +++ b/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs @@ -43,13 +43,13 @@ private abstract class BoundBase : ISchemaBoundRowMapper public ISchemaBindableMapper Bindable => Parent; public RoleMappedSchema InputRoleMappedSchema { get; } public Schema InputSchema => InputRoleMappedSchema.Schema; - public Schema Schema { get; } + public Schema OutputSchema { get; } public BoundBase(SchemaBindablePipelineEnsembleBase parent, RoleMappedSchema schema) { Parent = parent; InputRoleMappedSchema = schema; - Schema = Schema.Create(new ScoreMapperSchema(Parent.ScoreType, Parent._scoreColumnKind)); + OutputSchema = Schema.Create(new ScoreMapperSchema(Parent.ScoreType, Parent._scoreColumnKind)); _inputColIndices = new HashSet(); for (int i = 0; i < Parent._inputCols.Length; i++) { @@ -75,7 +75,7 @@ public BoundBase(SchemaBindablePipelineEnsembleBase parent, RoleMappedSchema sch throw Parent.Host.Except("Predictor {0} is not a row to row mapper", i); // Make sure there is a score column, and remember its index. - if (!Mappers[i].Schema.TryGetColumnIndex(MetadataUtils.Const.ScoreValueKind.Score, out ScoreCols[i])) + if (!Mappers[i].OutputSchema.TryGetColumnIndex(MetadataUtils.Const.ScoreValueKind.Score, out ScoreCols[i])) throw Parent.Host.Except("Predictor {0} does not contain a score column", i); // Get the pipeline. @@ -90,7 +90,7 @@ public BoundBase(SchemaBindablePipelineEnsembleBase parent, RoleMappedSchema sch public Func GetDependencies(Func predicate) { - for (int i = 0; i < Schema.ColumnCount; i++) + for (int i = 0; i < OutputSchema.ColumnCount; i++) { if (predicate(i)) return col => _inputColIndices.Contains(col); @@ -105,7 +105,7 @@ public Func GetDependencies(Func predicate) public IRow GetRow(IRow input, Func predicate, out Action disposer) { - return new SimpleRow(Schema, input, new[] { CreateScoreGetter(input, predicate, out disposer) }); + return new SimpleRow(OutputSchema, input, new[] { CreateScoreGetter(input, predicate, out disposer) }); } public abstract Delegate CreateScoreGetter(IRow input, Func mapperPredicate, out Action disposer); diff --git a/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs b/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs index e8b57dda6e..5f1e46b6dc 100644 --- a/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs +++ b/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs @@ -174,8 +174,8 @@ private void IsNormalized(int iinfo, ref bool dst) public RoleMappedSchema InputRoleMappedSchema { get; } - public Schema Schema { get; } public Schema InputSchema => InputRoleMappedSchema.Schema; + public Schema OutputSchema { get; } public ISchemaBindableMapper Bindable => _owner; @@ -205,7 +205,7 @@ public BoundMapper(IExceptionContext ectx, TreeEnsembleFeaturizerBindableMapper // which means that #internal = #leaf - 1. // Therefore, the number of internal nodes in the ensemble is #leaf - #trees. var pathIdType = new VectorType(NumberType.Float, _owner._totalLeafCount - _owner._ensemble.TrainedEnsemble.NumTrees); - Schema = Schema.Create(new SchemaImpl(ectx, owner, treeValueType, leafIdType, pathIdType)); + OutputSchema = Schema.Create(new SchemaImpl(ectx, owner, treeValueType, leafIdType, pathIdType)); } public IRow GetRow(IRow input, Func predicate, out Action disposer) @@ -213,7 +213,7 @@ public IRow GetRow(IRow input, Func predicate, out Action disposer) _ectx.CheckValue(input, nameof(input)); _ectx.CheckValue(predicate, nameof(predicate)); disposer = null; - return new SimpleRow(Schema, input, CreateGetters(input, predicate)); + return new SimpleRow(OutputSchema, input, CreateGetters(input, predicate)); } private Delegate[] CreateGetters(IRow input, Func predicate) @@ -397,7 +397,7 @@ private void EnsureCachedPosition() public Func GetDependencies(Func predicate) { - for (int i = 0; i < Schema.ColumnCount; i++) + for (int i = 0; i < OutputSchema.ColumnCount; i++) { if (predicate(i)) return col => col == InputRoleMappedSchema.Feature.Index; diff --git a/src/Microsoft.ML.ImageAnalytics/VectorToImageTransform.cs b/src/Microsoft.ML.ImageAnalytics/VectorToImageTransform.cs index 5468714696..54b9148e86 100644 --- a/src/Microsoft.ML.ImageAnalytics/VectorToImageTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/VectorToImageTransform.cs @@ -340,7 +340,7 @@ protected override Delegate GetGetterCore(IChannel ch, IRow input, int iinfo, ou var ex = _exes[iinfo]; bool needScale = ex.Offset != 0 || ex.Scale != 1; disposer = null; - var sourceType = Schema.GetColumnType(Infos[iinfo].Source); + var sourceType = InputSchema.GetColumnType(Infos[iinfo].Source); if (sourceType.ItemType == NumberType.R4 || sourceType.ItemType == NumberType.R8) return GetterFromType(input, iinfo, ex, needScale); else diff --git a/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs b/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs index 71219ee19e..465989a265 100644 --- a/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs +++ b/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs @@ -278,8 +278,8 @@ private sealed class RowMapper : ISchemaBoundRowMapper private readonly string _matrixColumnIndexColumnName; private readonly string _matrixRowIndexColumnName; private IHostEnvironment _env; - public Schema Schema { get; } public Schema InputSchema => InputRoleMappedSchema.Schema; + public Schema OutputSchema { get; } public RoleMappedSchema InputRoleMappedSchema { get; } @@ -307,12 +307,12 @@ public RowMapper(IHostEnvironment env, MatrixFactorizationPredictor parent, Role CheckInputSchema(schema.Schema, _matrixColumnIndexColumnIndex, _matrixRowIndexCololumnIndex); InputRoleMappedSchema = schema; - Schema = outputSchema; + OutputSchema = outputSchema; } public Func GetDependencies(Func predicate) { - for (int i = 0; i < Schema.ColumnCount; i++) + for (int i = 0; i < OutputSchema.ColumnCount; i++) { if (predicate(i)) return col => (col == _matrixColumnIndexColumnIndex || col == _matrixRowIndexCololumnIndex); @@ -342,7 +342,7 @@ private void CheckInputSchema(ISchema schema, int matrixColumnIndexCol, int matr private Delegate[] CreateGetter(IRow input, bool[] active) { _env.CheckValue(input, nameof(input)); - _env.Assert(Utils.Size(active) == Schema.ColumnCount); + _env.Assert(Utils.Size(active) == OutputSchema.ColumnCount); var getters = new Delegate[1]; if (active[0]) @@ -360,10 +360,10 @@ private Delegate[] CreateGetter(IRow input, bool[] active) public IRow GetRow(IRow input, Func predicate, out Action disposer) { - var active = Utils.BuildArray(Schema.ColumnCount, predicate); + var active = Utils.BuildArray(OutputSchema.ColumnCount, predicate); var getters = CreateGetter(input, active); disposer = null; - return new SimpleRow(Schema, input, getters); + return new SimpleRow(OutputSchema, input, getters); } public ISchemaBindableMapper Bindable { get { return _parent; } } diff --git a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachineUtils.cs b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachineUtils.cs index 451f22da80..11ce748cb2 100644 --- a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachineUtils.cs +++ b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachineUtils.cs @@ -60,7 +60,8 @@ internal sealed class FieldAwareFactorizationMachineScalarRowMapper : ISchemaBou public RoleMappedSchema InputRoleMappedSchema { get; } - public Schema Schema { get; } + public Schema OutputSchema { get; } + public Schema InputSchema => InputRoleMappedSchema.Schema; public ISchemaBindableMapper Bindable => _pred; @@ -85,7 +86,7 @@ public FieldAwareFactorizationMachineScalarRowMapper(IHostEnvironment env, RoleM var inputFeatureColumns = _columns.Select(c => new KeyValuePair(RoleMappedSchema.ColumnRole.Feature, c.Name)).ToList(); InputRoleMappedSchema = new RoleMappedSchema(schema.Schema, inputFeatureColumns); - Schema = outputSchema; + OutputSchema = outputSchema; _inputColumnIndexes = new List(); foreach (var kvp in inputFeatureColumns) @@ -130,12 +131,12 @@ public IRow GetRow(IRow input, Func predicate, out Action action) getters[1] = probGetter; } - return new SimpleRow(Schema, input, getters); + return new SimpleRow(OutputSchema, input, getters); } public Func GetDependencies(Func predicate) { - if (Enumerable.Range(0, Schema.ColumnCount).Any(predicate)) + if (Enumerable.Range(0, OutputSchema.ColumnCount).Any(predicate)) return index => _inputColumnIndexes.Any(c => c == index); else return index => false; diff --git a/src/Microsoft.ML.TimeSeries/PredictionFunction.cs b/src/Microsoft.ML.TimeSeries/PredictionFunction.cs index 114aa0e059..4c1d1681f6 100644 --- a/src/Microsoft.ML.TimeSeries/PredictionFunction.cs +++ b/src/Microsoft.ML.TimeSeries/PredictionFunction.cs @@ -171,7 +171,7 @@ internal override void PredictionEngineCore(IHostEnvironment env, DataViewConstr { List rows = new List(); IRow outputRowLocal = outputRowLocal = GetStatefulRows(inputRow, mapper, col => true, rows, out disposer); - var cursorable = TypedCursorable.Create(env, new EmptyDataView(env, mapper.Schema), ignoreMissingColumns, outputSchemaDefinition); + var cursorable = TypedCursorable.Create(env, new EmptyDataView(env, mapper.OutputSchema), ignoreMissingColumns, outputSchemaDefinition); _pinger = CreatePinger(rows); outputRow = cursorable.GetRow(outputRowLocal); } @@ -208,7 +208,7 @@ private IRowToRowMapper GetRowToRowMapper(Schema inputSchema) else mappers[i] = transformers[i].GetRowToRowMapper(schema); - schema = mappers[i].Schema; + schema = mappers[i].OutputSchema; } return new CompositeRowToRowMapper(inputSchema, mappers); } diff --git a/src/Microsoft.ML.TimeSeries/SequentialTransformBase.cs b/src/Microsoft.ML.TimeSeries/SequentialTransformBase.cs index dc47a4fa4a..c6f38380f4 100644 --- a/src/Microsoft.ML.TimeSeries/SequentialTransformBase.cs +++ b/src/Microsoft.ML.TimeSeries/SequentialTransformBase.cs @@ -358,7 +358,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, Random return new Cursor(this, srcCursor); } - public override Schema Schema => _transform.Schema; + public override Schema OutputSchema => _transform.Schema; public override long? GetRowCount() { @@ -381,11 +381,11 @@ private sealed class Cursor : SynchronizedCursorBase, IRowCursor public Cursor(SequentialTransformBase parent, IRowCursor input) : base(parent.Host, input) { - Ch.Assert(input.Schema.ColumnCount == parent.Schema.ColumnCount); + Ch.Assert(input.Schema.ColumnCount == parent.OutputSchema.ColumnCount); _parent = parent; } - public Schema Schema { get { return _parent.Schema; } } + public Schema Schema { get { return _parent.OutputSchema; } } public bool IsColumnActive(int col) { diff --git a/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs b/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs index 7a0ed541d5..8b6d74426c 100644 --- a/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs +++ b/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs @@ -443,8 +443,6 @@ protected override IRowCursor GetRowCursorCore(Func predicate, Random return false; } - public override Schema Schema => _bindings.Schema; - public override long? GetRowCount() { return _transform.GetRowCount(); @@ -467,9 +465,12 @@ public IDataTransform ApplyToData(IHostEnvironment env, IDataView newSource) } public Schema InputSchema => Source.Schema; + + public override Schema OutputSchema => _bindings.Schema; + public Func GetDependencies(Func predicate) { - for (int i = 0; i < Schema.ColumnCount; i++) + for (int i = 0; i < OutputSchema.ColumnCount; i++) { if (predicate(i)) return col => true; @@ -542,11 +543,11 @@ private sealed class Cursor : SynchronizedCursorBase, IRowCursor public Cursor(IHost host, SequentialDataTransform parent, IRowCursor input) : base(host, input) { - Ch.Assert(input.Schema.ColumnCount == parent.Schema.ColumnCount); + Ch.Assert(input.Schema.ColumnCount == parent.OutputSchema.ColumnCount); _parent = parent; } - public Schema Schema { get { return _parent.Schema; } } + public Schema Schema { get { return _parent.OutputSchema; } } public bool IsColumnActive(int col) { @@ -586,7 +587,8 @@ private static VersionInfo GetVersionInfo() loaderAssemblyName: typeof(TimeSeriesRowToRowMapperTransform).Assembly.FullName); } - public override Schema Schema => _bindings.Schema; + public override Schema OutputSchema => _bindings.Schema; + bool ICanSaveOnnx.CanSaveOnnx(OnnxContext ctx) => _mapper is ICanSaveOnnx onnxMapper ? onnxMapper.CanSaveOnnx(ctx) : false; bool ICanSavePfa.CanSavePfa => _mapper is ICanSavePfa pfaMapper ? pfaMapper.CanSavePfa : false; @@ -753,13 +755,13 @@ public IRow GetRow(IRow input, Func active, out Action disposer) using (var ch = Host.Start("GetEntireRow")) { Action disp; - var activeArr = new bool[Schema.ColumnCount]; - for (int i = 0; i < Schema.ColumnCount; i++) + var activeArr = new bool[OutputSchema.ColumnCount]; + for (int i = 0; i < OutputSchema.ColumnCount; i++) activeArr[i] = active(i); var pred = GetActiveOutputColumns(activeArr); var getters = _mapper.CreateGetters(input, pred, out disp); disposer += disp; - return new StatefulRow(input, this, Schema, getters, + return new StatefulRow(input, this, OutputSchema, getters, _mapper.CreatePinger(input, pred, out disp)); } } diff --git a/src/Microsoft.ML.TimeSeries/SlidingWindowTransformBase.cs b/src/Microsoft.ML.TimeSeries/SlidingWindowTransformBase.cs index 199969726d..3f18093791 100644 --- a/src/Microsoft.ML.TimeSeries/SlidingWindowTransformBase.cs +++ b/src/Microsoft.ML.TimeSeries/SlidingWindowTransformBase.cs @@ -99,7 +99,7 @@ protected SlidingWindowTransformBase(IHostEnvironment env, ModelLoadContext ctx, private TInput GetNaValue() { - var sch = Schema; + var sch = OutputSchema; int index; sch.TryGetColumnIndex(InputColumnName, out index); ColumnType col = sch.GetColumnType(index); diff --git a/src/Microsoft.ML.Transforms/GroupTransform.cs b/src/Microsoft.ML.Transforms/GroupTransform.cs index a41a59bf33..1de6dea27b 100644 --- a/src/Microsoft.ML.Transforms/GroupTransform.cs +++ b/src/Microsoft.ML.Transforms/GroupTransform.cs @@ -154,7 +154,7 @@ public override void Save(ModelSaveContext ctx) return null; } - public override Schema Schema => _groupSchema.AsSchema; + public override Schema OutputSchema => _groupSchema.AsSchema; protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) { @@ -554,7 +554,7 @@ public override void ReadValue(int position) public override long Batch { get { return 0; } } - public Schema Schema => _parent.Schema; + public Schema Schema => _parent.OutputSchema; public Cursor(GroupTransform parent, Func predicate) : base(parent.Host) diff --git a/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs b/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs index 55bc3d18ee..d2b3fe483e 100644 --- a/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs +++ b/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs @@ -289,7 +289,7 @@ public override void Save(ModelSaveContext ctx) _bindings.Save(Host, ctx); } - public override Schema Schema => _bindings.AsSchema; + public override Schema OutputSchema => _bindings.AsSchema; protected override bool? ShouldUseParallelCursors(Func predicate) { diff --git a/src/Microsoft.ML.Transforms/ProduceIdTransform.cs b/src/Microsoft.ML.Transforms/ProduceIdTransform.cs index 9a64ddd511..e2fc0359ad 100644 --- a/src/Microsoft.ML.Transforms/ProduceIdTransform.cs +++ b/src/Microsoft.ML.Transforms/ProduceIdTransform.cs @@ -92,7 +92,7 @@ private static VersionInfo GetVersionInfo() private readonly Bindings _bindings; - public override Schema Schema => _bindings.AsSchema; + public override Schema OutputSchema => _bindings.AsSchema; public override bool CanShuffle { get { return Source.CanShuffle; } } diff --git a/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs b/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs index c22fe779ec..a84c7102d0 100644 --- a/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs +++ b/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs @@ -616,7 +616,7 @@ private void AssertValid(uint[] ngram, int ngramLength, int lim, int icol) Host.Assert(icol >= 0); } - public override Schema Schema => _bindings.AsSchema; + public override Schema OutputSchema => _bindings.AsSchema; protected override bool? ShouldUseParallelCursors(Func predicate) { diff --git a/src/Microsoft.ML.Transforms/UngroupTransform.cs b/src/Microsoft.ML.Transforms/UngroupTransform.cs index ad786e1c8c..6c58376f84 100644 --- a/src/Microsoft.ML.Transforms/UngroupTransform.cs +++ b/src/Microsoft.ML.Transforms/UngroupTransform.cs @@ -163,7 +163,7 @@ public override void Save(ModelSaveContext ctx) return null; } - public override Schema Schema => _schemaImpl.AsSchema; + public override Schema OutputSchema => _schemaImpl.AsSchema; protected override bool? ShouldUseParallelCursors(Func predicate) { diff --git a/src/Native/MatrixFactorizationNative/libmf b/src/Native/MatrixFactorizationNative/libmf index 53a91e7e8c..f92a18161b 160000 --- a/src/Native/MatrixFactorizationNative/libmf +++ b/src/Native/MatrixFactorizationNative/libmf @@ -1 +1 @@ -Subproject commit 53a91e7e8c88463e97acfbbafb7134a6030860b3 +Subproject commit f92a18161b6824fda4c4ab698a69d299a836841a diff --git a/test/Microsoft.ML.Benchmarks/HashBench.cs b/test/Microsoft.ML.Benchmarks/HashBench.cs index c4e0a66166..027411f016 100644 --- a/test/Microsoft.ML.Benchmarks/HashBench.cs +++ b/test/Microsoft.ML.Benchmarks/HashBench.cs @@ -44,7 +44,7 @@ private void InitMap(T val, ColumnType type, int hashBits = 20) var info = new HashingTransformer.ColumnInfo("Foo", "Bar", hashBits: hashBits); var xf = new HashingTransformer(_env, new[] { info }); var mapper = xf.GetRowToRowMapper(inRow.Schema); - mapper.Schema.TryGetColumnIndex("Bar", out int outCol); + mapper.OutputSchema.TryGetColumnIndex("Bar", out int outCol); var outRow = mapper.GetRow(inRow, c => c == outCol, out var _); if (type is VectorType) _vecGetter = outRow.GetGetter>(outCol); diff --git a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs index 77e0ece94c..e2bc679830 100644 --- a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs +++ b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs @@ -100,7 +100,7 @@ protected void TestEstimatorCore(IEstimator estimator, { var mapper = transformer.GetRowToRowMapper(data.Schema); Check(mapper.InputSchema == data.Schema, "InputSchemas were not identical to actual input schema"); - CheckSameSchemas(schema, mapper.Schema); + CheckSameSchemas(schema, mapper.OutputSchema); } else { diff --git a/test/Microsoft.ML.Tests/Transformers/HashTests.cs b/test/Microsoft.ML.Tests/Transformers/HashTests.cs index 4863c5dd46..417d9ed388 100644 --- a/test/Microsoft.ML.Tests/Transformers/HashTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/HashTests.cs @@ -151,7 +151,7 @@ private void HashTestCore(T val, PrimitiveType type, uint expected, uint expe var info = new HashingTransformer.ColumnInfo("Foo", "Bar", hashBits: bits); var xf = new HashingTransformer(Env, new[] { info }); var mapper = xf.GetRowToRowMapper(inRow.Schema); - mapper.Schema.TryGetColumnIndex("Bar", out int outCol); + mapper.OutputSchema.TryGetColumnIndex("Bar", out int outCol); var outRow = mapper.GetRow(inRow, c => c == outCol, out var _); var getter = outRow.GetGetter(outCol); @@ -163,7 +163,7 @@ private void HashTestCore(T val, PrimitiveType type, uint expected, uint expe info = new HashingTransformer.ColumnInfo("Foo", "Bar", hashBits: bits, ordered: true); xf = new HashingTransformer(Env, new[] { info }); mapper = xf.GetRowToRowMapper(inRow.Schema); - mapper.Schema.TryGetColumnIndex("Bar", out outCol); + mapper.OutputSchema.TryGetColumnIndex("Bar", out outCol); outRow = mapper.GetRow(inRow, c => c == outCol, out var _); getter = outRow.GetGetter(outCol); @@ -180,7 +180,7 @@ private void HashTestCore(T val, PrimitiveType type, uint expected, uint expe info = new HashingTransformer.ColumnInfo("Foo", "Bar", hashBits: bits, ordered: false); xf = new HashingTransformer(Env, new[] { info }); mapper = xf.GetRowToRowMapper(inRow.Schema); - mapper.Schema.TryGetColumnIndex("Bar", out outCol); + mapper.OutputSchema.TryGetColumnIndex("Bar", out outCol); outRow = mapper.GetRow(inRow, c => c == outCol, out var _); var vecGetter = outRow.GetGetter>(outCol); @@ -195,7 +195,7 @@ private void HashTestCore(T val, PrimitiveType type, uint expected, uint expe info = new HashingTransformer.ColumnInfo("Foo", "Bar", hashBits: bits, ordered: true); xf = new HashingTransformer(Env, new[] { info }); mapper = xf.GetRowToRowMapper(inRow.Schema); - mapper.Schema.TryGetColumnIndex("Bar", out outCol); + mapper.OutputSchema.TryGetColumnIndex("Bar", out outCol); outRow = mapper.GetRow(inRow, c => c == outCol, out var _); vecGetter = outRow.GetGetter>(outCol); vecGetter(ref vecResult); @@ -213,7 +213,7 @@ private void HashTestCore(T val, PrimitiveType type, uint expected, uint expe info = new HashingTransformer.ColumnInfo("Foo", "Bar", hashBits: bits, ordered: false); xf = new HashingTransformer(Env, new[] { info }); mapper = xf.GetRowToRowMapper(inRow.Schema); - mapper.Schema.TryGetColumnIndex("Bar", out outCol); + mapper.OutputSchema.TryGetColumnIndex("Bar", out outCol); outRow = mapper.GetRow(inRow, c => c == outCol, out var _); vecGetter = outRow.GetGetter>(outCol); vecGetter(ref vecResult); @@ -226,7 +226,7 @@ private void HashTestCore(T val, PrimitiveType type, uint expected, uint expe info = new HashingTransformer.ColumnInfo("Foo", "Bar", hashBits: bits, ordered: true); xf = new HashingTransformer(Env, new[] { info }); mapper = xf.GetRowToRowMapper(inRow.Schema); - mapper.Schema.TryGetColumnIndex("Bar", out outCol); + mapper.OutputSchema.TryGetColumnIndex("Bar", out outCol); outRow = mapper.GetRow(inRow, c => c == outCol, out var _); vecGetter = outRow.GetGetter>(outCol); vecGetter(ref vecResult); From 7679a662987f6fb31de88bea57d7a1a6d344064d Mon Sep 17 00:00:00 2001 From: Gal Oshri Date: Sat, 1 Dec 2018 12:09:14 -0800 Subject: [PATCH 012/100] Add release notes for ML.NET 0.8 (#1785) * Add release notes for 0.8 --- docs/release-notes/0.8/dataPreview.gif | Bin 0 -> 194634 bytes docs/release-notes/0.8/release-0.8.md | 126 +++++++++++++++++++++++++ 2 files changed, 126 insertions(+) create mode 100644 docs/release-notes/0.8/dataPreview.gif create mode 100644 docs/release-notes/0.8/release-0.8.md diff --git a/docs/release-notes/0.8/dataPreview.gif b/docs/release-notes/0.8/dataPreview.gif new file mode 100644 index 0000000000000000000000000000000000000000..553f4b45641778229b64363430272581310f13a7 GIT binary patch literal 194634 zcma&sWlSVY)F^1&t#Nm!VQ`1Rx?ym4cOTpt+ z!2@^^&=}zmiD6(-VPIfkU|{|mm|)-qV35IZNID1zG8kCAWF$;NglJ!pVZXt{e?!Ik zhR)}Qspg4gY>x9y06ppxax@Swi4QJYAFj|9w#ovbP6w^a3NYDSc6Hu!a$R-v9S#WS z4fg4dvTCnYZC;ltU*#*9Wy~34NEx6@=wL}L5zdSP=lUvVdFcM|c1j3INJ-7e$jr{p z{`s?@s;04TY-Dg~;y**f|6_P~Vq|oBdS-oUW^;OW>;H0b@$mn4`EdEaU0*-k-2891 z4>z|DxBtuC-TnV^_y35S`}@m>`?IHq!(R`lzaQ>?KR*2WKRv!YK0ZG^y}Z2q{`Kqc z|IP3Jmi~7;F3W2!&olp=Z#KBk0FI^kd`WWA62R;`Iaa`=jIjz53!K|KKBa^DTDiHD>%Jef(GR z;=|tQ#mU+E|2RGS&+*CG{?X}w_Kr?=Pxkk&HV!XVkB;U}kB9Fr+kfBHzdu%fJe5J8 zOQ28H&|h89-viJ;5a^#-=%0C-f9T2}O$}*DRS-7^C;b21a+<#g2)Hn0Fu?!jF#q#R zU{Ihi3#jCB4b;NgWAKUOv&92(Uar=g9ro&Wgn!@dw?qB0 zRldDZh&(M;o6Pm~zWJUUP5o5q@B4WB^K`X0*Wdr|9}E)nelP-|?S2RfW5s?b2LHKf z$S6U$p>&gFkt|iSAQ8Yr`Ahp`R5b?NVKjZZ?O_aKZ1iC)Fu%e)jx*n9*PnMa(>Px6 zwBjgH^w;@Ok^~aVak31d-EoQ>W99L8MgEK9R8=s`Nt&kigk?B_Qb#h-e|;f@f`svz z+zge3aYE|mIqg*0VKY36;t|4fmg})mD|GhxG%rn?)TSiUy6U1V*X#14+!w%_oQ^s{i&Es(ayTDpNo&Da0cBOPzo_v@ z_n1R)@aa-OMbuqdt?T7S7=hgnIGLh!g}@>UF(Tq3ldYvKtB^YfG|zvwr#Emi?#a9d zw+M9Z53~n%xdaOkdMY?ebKoaq*4&ReWK$U<*E(s^X@8#L#~*Ja6hg0US=M*pp7ibv zmJk$*>8?}S>8f7B18`0ycb@A*c*qD>cm`)48Ec<)o@v{+fqypv^6nT3B%IvvbUbn@ z26xMzau4x?E>Z#ag=!GoIeFJQYNOmnW6GuQPD2QILw)ux62fyP_Z7l(hz45= zIIUsheO_!?%hy^mKr@e`cvW^vS)mZ`nqLzJB&9x(kGMy!qyGrx&$V0XY8xSTm}VQ} z=}eg3_3lp-;Qr)s*G}o$up@X(MFD+0q+SU7*39W1?nJgpk1uzv{(&cTiQ0*kwvfNH zHx0IdTL`iAL_OH(#9HU^V&|mW@!Ow6vh7CGcVX@ji5J?Rj}luh#JVm3gM-@%FMtqS zG06jEO_VksV3OJF6bdDZX|UUPCB-pwa4wozFeNDrIn<3i3U*pDk@6Bpg?xkT3|KDH z)p(+*sWeG0mttzP){|4UBMHfKA_`M7bmcTLvt^21+C0Q&WSkjewLEGIYOq zi^T552HdnfBu)tzWjMKyW?ooU*W&vU2b~9CYjI z@MJBnKt6osz;Qvs4??)fL8~d(O}DJ(?^@vBEZ@@-$xdDswS^)O4g)dtPMQZ^+JDdt z|M*o+`1g={Dx8-?udn2g)AcVLnlPs=>cSpb2@Y(RQImmZcUDJDCyu9@Mv^#ufmA%B z@ca#cq&%+Z{8^`6ftHizQ}6&od(5b}8BgpPl$VH#aX=|@x{d~{kO!sr84M*|X6VR4 zq}hi-5lkyqD4a(@MYoXBWtoeZmIh<6KQbyzUXUZ{O6SflL<%0dLuLj+d{I9kM-3c@j zmjPaWM>+KkUCSkut$w~!=GSO;!C%0ODX5b43$vS;=J)q0KOlRp%Mw^($;DKT(a1(A zu<~l^+%%;vR_~{q@`fCcdX~7o#x_3owjK4lpsm&S%QXcTSUi&IX@WLwubS^4ZsZHg zmMzYea*I}wpaqN&c%*SHqCfFAa(K{ksfJF8tOdok53>Eww>Z2+xp*XHH)W^@L@IN= zczJ;PZS-~U>>>?O(BY;9kBLwR4zWucBQ>*IkpKLxUpu{0F%tFY)x1?gEe#&K21wFV zE#BI;7}Hk+D=d0Z_(8Sg5r_8Mi5eD#zGo~?tDNSJ;7B}sE@2Q3*3@IVwZQqYD zdHG>HNkZA(5JE9SbfuI2Ug|+u=^F+usmw^+^x}wxG9K7Uj~iCF8H?uvr>knXXnwDU zQxi^nQI1Ye3E8qSW<$URx*$ z%~o1l7I@Jtop5Duz3+fUx@eS|Bbs2;u%aeI=`K81{8G4Z^E^nIek$jq%ZgzqWGh8_ z?H6LYo~w(5;P*?lnAP%rKL|(Ypkc{x0BMq^@}+ap2vcy?<>|{NIT4ezgG2rRM>{1? zU!Bk&bQ{GC52X$TYlS0^<8|UKhfKqfZ?6F$$ORP|rwrx#dZapdjA+uoLW8Tl#e zz`kuu96w?Q%xZQNLYEX>Bk8+nXoFfNb^4OaCg89*nPh}G66~~%*d4~Zzsx!`J;k^D z#wd^cB)tn!=O2eJX-d=(A|+W&TWwG#tK@59OUEgQOdOckmJ2P-d@&~TTvt0bU)k{S zny|FTe`_8%d#tx=(4Q49U?;5 z0+Zd_^%=Fw^htT z29u{zy!8R$pFH^$Q%~0)dNcE`J&^y_jX^#Dpom%Bz!3$zQ3Z|tC_K|g>1k3qHIelt#!WfiuR7E*9#UdfL zAmwsaSoL}O|ihu*sshNAT*G{UykhW{Id(fwa zAWT3s6iLopnIPH$AK1Q%<2V+C!)dnJ%(f#Pox&Vsup*Wa?MWmUBJ|6=paUlYf)t^H zhG>elRt8`h_sQqeD>>FDLKsdM65(p?YM7ZQpDJl% z=Dr@qcV8sx{3ntekx6hF>4gEC2~kGWBdQU}lPpAQ1|QbZU8A!|#tuISZNXIQIVj?f zCniA4$VL`f2%IPgOXH4>Zwjj(k!bA^EQF{m#0PrN35tr=K)GP!Dor7e5ko}zQa}uK z7UEA#rkAl^k><6ts@K#jkhMx=ij!?qxMd7$mh(0CiWgwE_T$tYE0SAk;tt}GFOdDZ z^4-@E92W#H^bDgPz>tQS;yT(MKg*Y}VhiU*7p~3h+N$G*wrt?vA|#L+9?ht-qNOso z;wMbUF^eC8PQ+`!>SDp-zu77@U6B}x%6k2W=vG&1$U^N&F6qGvnLrhm+!ThDGt0wP z=C$-2%wIG07J>9;P!?W@7sT`a8Ae$qn!_yGv^;14EKs~5AOQjrg^AhOWK(T;4ROcl z2{`T0c^9JQ9PgVoqhhswF}rOK;_Qs&&ay~&HWzKtmTCz=O9|EIXP-Q@P>1@;Cl<%; z*s-UD$?JcK>(W%{FY)b5^G$^RF`SmSu$pg%pYO=&2d>OFwv$g97Cwzmu+h%!ID>KA z7akG_;4My{h7+cZG4kNyd4uQpIP%9vQ*O@iyLd*@wN)0UOPpNu6Jp`m^vtB*WQqBL zYuRD9BY?+5ipL@!OUQ+O)`|nrfL)Mgq1hw|(k?cH%@icHiSV_xyeJSMEeWG#Ein&- zc}f&)4>S(L;}S0RrY+H>Nl~pLaMLRm5Ka)1oFe_tO zC{fRzkrC(A85GVx!S8@j448z}Q$EBW23`(CRDNNa|KYaseHqkc8x*)^{;)tO+VS$U*d zb7G8;GIq&YHnUo`1DNk!wcDGuyRWr;gEeGkl}lA6BTIOjXmyueb=RA9`>b`ipz6yX zfb}Zemss52vA7qPb?=+?(AWA%)OtGl`t0oF-m_W`6%> zuEr*7i7`9cArj<`kVBECzOHQkP zcPmkKbM<0N>tw6BNSmcWTlQh=zabiBhc?HpHs`lC=a4q@p=QiMDPgmA!H{;B?)HGK z_G{Mmaf!OkvB2=F_DKJZXoZSkwhlC!7Mg<&D%j2#kn^E8nW&tk#2&kF4?X&49Ql3!fw97?v}Ui0kR%r zvu=B`HcXmM@{k_V!k(e6p4qn^pk&WNSI;MZ(RrwU?|M!zN^?u>-!4Q40IdVwfdbx9 zb?>Qv-#I$&qCeloR^RPgUshA!9ZLVRLI1Dr@@<78lZJj1&R!CLy#VY0AI-pTg8`)6 zuE)6Uub&|2mw2LE?Z8T#Rn~MZA>^JgVG5TG1h^?!mpOh@pKr z7#JN4Sa=xIw7|n5PEi8Y23R&lB9?$*l0t0q`yraa;eIbk?sy{6cv#|sVJcV%W6BUX z7gn+dPaXr0(-cCykEA~c0h1&DLLwO8gi{V6;G6zI@QhAZFrt1vj0r#_qaGzQ8v;xP z{Nsd^+#b<(B#?f`RqMg=R>U`(Hszllg0d})Sd-%mVT@x84%_BJ$nPPpYU7+KV`zyK06S33d;Lk#!tz$Q@@7nS{LK1?J*~_Kb*zOw2e= z_?XQA52mNLaW?}d*9~#^dWH`iaVvTNXYsffJ-E#Q1g?%#TZXe93$y5U(>9{RjFNLQ zYE#R(xR9Eu%dM%v*C`9qY5u_;60R9cu31dLm~9QVy$jx4&Fo`52HpqhsyL#VJIXi zPZt5le+|IevVW#aUL6s|5}HPgby^8dfcVy~PvqiA09Udwaovp8UPM=1ehj%LuY1h! zi{`F!!fZMOZa8p`gd|`D&#cH{Y!SoE5pu06P4ip#uA@C{bYm=KDs2Hp*TW2lh2yt( z1~v?xR>%uy3<5XnpfelkPTRjum#39h)A4o+dMDx(cYu!TUlZ2W7S}NYS7qPjgLif~ z$g$6F)(aeWhuAmKxz@#NwicZhY(=LeFt!RXw@;MTgRFL50@w0-0c|rYo`%EzO1r3L zTlayx?s>ZzN_$N^!=Yj02g&=+6yxVcBWM6bSRAZ#r>Vi@oen1e6V}>O*{D&iT=UKj zG0Yf%nh-n8$TujhPamJ)}#3*1`5-P_x&Cdc{Co9<8aa{2Hm^Is3CJ-AjA)k>p zo_73LCgPZ^p83Iv0~YPYAqzZkYgmK9K~}$??0%6e0gjqcD$%(Ni;}O|7_EW&08Utl zbh8jo_6g{w=!hrALU!%CKE^VT{n)SIltXn*Qkh5bde|TlLksIdjN?MU@Qf&bTDNXh zsBWxa=j1c_$s)y!^6c?o?|csBgp&A$Vx8XC*&&`8oK*}A4r;;bgiZ1W*s!|GB=O5S zBistZVNb)W*!ZhW&P%KhIalM)akEG1wR4rV7jJ>c(YqIoP8ZBHYcmPQowye`wbx&< zj@0`09*rPuxffnWm-f7e6v=e->(&d4tg**)!rXnW?{#irZ;;Q+3JtjKka#MpOV1t zuwegUCC75YK5=xqOiy^U#&{_ae}0)Bt)G3yXk1`anZ@isi+MjM`FMC%oKTsA9W?$W z^KXgb(?wXo?CR`H=A#Q;xO1zYUjOt$3iyBtx!yw&7!OW}F2*F`HsgTtFd$YlheTpCNo{a)6p3Rw z&|*Q72a9DA>6r3!bpf>UVJHOY@zD!(-~@Ct=j-wBCrY`9bVD#W%H=b;=vI_1avn;c zLUiO;4|&zYrD$Zl{E}D<+oc-mUbHm%CT86VbEU_365cA6L@Sj=nM!VJa4sEJ)rT-& zjZ6o7y>p(y5}Ro=)o)5j@SxLLPXdW>IB^6=lL<$dq_OVbDvMfs26QOW^wX165S>hs zOosl8{pz`{YvP~M>1*x><;w*jlwY?oUA|AhK60KU+tJCKp{%K;k1^4BpU0*igd$7w za-g~Sf1Y((&`~u!#VI~bMQ%u=jb;)HHT5yovTDY*khv3Y6!v9A|Fo6-n_Gy0G8Q3p zc|IBuek_H=j?hb1-!N9SWe1Ewak!vC_^KY$5it~|T+uc@d|ivV9F`=4N^&gEn%oqI z10lScauzA(*Znkz_oVMKdQo?X%1B(Ex&Tgnyu@NB=EeaIH487!fdvT3QG`ZIrktCp z@SZNZd&4c_djU5*vxw9goh>yBmdOk2!4X}#NQ>U?YGnn3fk)~VwIxD%hPkEUB?=?<0)PCi}ko>mvdb4 zS_Xvl)ka(UCVJ*}OAOW8wki8v&FKG z&66grs#++M)@)Uq@TZ#Q)h|XoWYU*id=@93MRZG;Tsqo9IZKC4WNcYQEf&t1697u# z%iAAa>KRf4MNaW_d?+5Ui2%_>uW@o|Z6EsjoTaxCN_Y7l*3jT=KC@93x4#BydiHNe zj!UN)r(^=lTM;F5Edowus%MQAYZkYH^&Bq_{Kgv;xRYO7H#Jflx7oHufbGTlJ6=Cv z6L7`zrM{xkwLNR`sd(D`)^*HtyF>5q02#Ngb@=GC>G}xSyHqrb$y%SKKYYh9j-ltN zJWmNFtm6I2s?cXzn&GUpB>2>_xgF_%EMp1>h4&WD0Wk~P2NTF6;WAyc9$O0PC5=9& zWi(_dd5CB2pG7VXo@%nOs>kN~Uoy~JI+?OmEk%1ws*CqRuuNGTG33xfQxde4opBb| z<(^>&<^E2x5qF|-KEtG}y^KX+d{;$bknKHruMTJVPIbI^#-Dpr7K*!2(9O>XS}v_O zDPg0MGq)ikimVKBctb-yzN$Im|}sj#M66N!=kc-W+87l!0i(A@I7@r9Uguxc6~TQDyBUHF>Ri5MN3zZ^JxsfJXI8PWuF`PM5qRVLRh+J z7GDySFG;E>yS2RV>Uhf0jLW<+ZB6-en+1MdzDRD4XDM;gU3F7IPx$O{DO8-vmy3N> zb_11u5QWw5y+u2Hl|>*}7)kYzoJkjHB!W3s6U=#e1}-0`6~9cYVDn_Y+_#$oTrM;1 zRIh!m&L(nAM6T}Nl2D)bhUVR;A#zcl7z%S8_07nEGmy-$zF<@n4{^%VtHG2NHiUDy zS-%8LO>6SlX$+`bKya5uCu}6i01N6G(zo)*wp!i#t8J^T^$|{vRw4$QtE+8G2**yI z-3GhdP3`p~*Y*hK#5Saynzf)6-qbjW0JFRHxd>;EQW2v|?X|A8JVuhnZlhcIH0vEP zQx~4@${BHYnuTjjH@$Ad`?j^-yNIs=Cn6@VP_S-H?bmM>Z#o~h;GRknXN%Y^z4Vsi zRwQHg(6??=B!Tro>`0ez22nG#yI=iv3y$US24+~^>%&w?uF)V-bKDFcO(HB8VAiYyW4QRewqSwHg``N25(Y6ld7aJ0j8Ts=zFs?J z@6Dy3NS+lSF?;ol&E;~iXO&j3y>|QNN`0hfjg^>#QnfX1O{T0^QNxvAgEan;r@pU1 zz;|h;hIM4own|(j2Wx??jk!p#mQpb%d!4P#wNbCO)?O#)HByQ!Vvv||LnwB@00L9; zkSIOuJ_c0UX&sNH*8T`{)0fF*V`tR6@1^(ax8GZP|B!qJ5XD_WKao--nS#W|G2yV| z_pulR2hrELP%%<N+P#9^dJ6YgZ8di$(pdlqsQ4SqAskI~?KrH*$jiEeX6EgkCrZQ*?xF zZM6_!^p6em5~uM|rtv@Uh!XzkRdVQ473t0=6TVz(FLdbok<)qXE-~RCsXZ*25(b5t zM3*27kU%*X>2{Ztdm2<;>Ic(F$g&Oh7We&l>XWVhtcE@eCL1Pu7NpOGVMuE!Yz{+f z=!eZ6a^R6b0*WvN2$f2Ae+iT5l#Vj+2|s+ag5JY|=4`wx!jijxknw#! z;}CyNXA@~!g2Q1Jf{kyvk?!A<23?W~-#T<=h=@E7cPEm;5OH?WL%O&9M;M|(2O2|9 z3jGYK-P`D+>K3pF$(?FInTuwzZ8mT(-G4PEFUkBcE;A_20d#B7lhO?ay@`Ache4Q! zb2#p^5goBMY%wDj?~)#|>Jbti7rR>m!9EWlFc>_bbHHQB}4Msi%wDj_f z5X+1Swu~WofV|^JpO0jNmdEnYVX!%c2~l8jyZZuog@#k*oE1OoJAxRFM-w1ouJPbA zk^Tes!LKDC0E#dPF$@3&20?KQ)uoRcVFERI1SA5sG92Xs_Bi5;Z%nmVkcZ?phoBLM zq&^LSc=}!(qzMOlXvx1E6if4tk2SGLfl}qUY-Q8m#*r+C*G0rdMLYARq`8Q@9-F%h z9Hg6<#`B9o2IMe?yyLL<<9m3Mrqf_|58jxPafSxD+^ue>_+UeP*?7wq-S|FZgsIjk za1jEys~F5hJr;_=hZH}N5kHXy4A~*;>JyQXC7bw@1H-E;46WH7DR)3bYZ(9|hygf5 zuz+HDf@0K5G1|85JBIAL({&5v(j@2IL*byLxW4%k-cXs&Hp|hhEupt+DG=R2Vlzxn zO)#27m^HpKh;BCfeJJHhN_+WBhQES^Aq@I_C~5Qqy?F23*f8BEc`OY8`GJeDbj55>0?zCTu07oEaIfjnD;VI95SD=|^DbMDpy+ zF;Ky82u72e%jFRd+m@CiQvw_GbIB;10sCt$c@YVwp*6z4-sf?C$l;^FP!x>go=c7A z2Ip_}6*&wy(}|yYsK6Qu9g4`Dg!j?Ve;&@6N@at>)`)g4@Ty;2563T$5jzd#iOLkn zjGZsHSd@U-y|Qnyry0+;@gOXbslv7HuYe23u;{=yhA z1z=a%ep*N^R=$ELZ*6sA6NI(DFODrO96l@HpQw?qs7)aV(NSnpmX2(NeTLV9ePbIQ zah$U|9zsKueT$b;T~Z`UR%dO6V^iwKQ9yhaF9 zqL^#lJZo@bi(3W@U^*S-Kq2CYNev@jA}Gc1YuK6{f~o@EJeAm5aKv23)0_Z-2C>J& zN4VM`uxn^J6dk1OM9>pdx)v57>T$9Z?33}(^TZVJ-;c-B)!dgq+Et0uCs_9x?KCm}NMv7)2wK2rj{8ilR zLW+ebt`ru5&A*86tE z`q=fwwzXPCXBgJ!5R`XhuO`>UrvH0rC-eh2+GZCKEx*`9UedlrU8aqKKGbBgt0Xw}icNWN^q^j`Zt! z%F1NodL}(oOk{TWa$C|6Mmj8}|M~Ce(JO4U>V$b%(EhUE@Z7qiXx(X@*bSW9J2ABh zm_Wo@IwVlkX_2N5AFg#W9s*gmQ^<$^*sY-W;@4wL;V6BhwFhUkAl#=b$hb*mth%!` z{PeQbwr#9#Z4&Re>6if4RmMRXQM zLTXi6G58@S)R?z>^kVgJvxNoj!A81N?vA_0d?r^o-w+!K#T0=xX*fdut4#CpWqKR`&dk(9heo8w$lXc4@p)So=G}pZ_giIDpK-;`*{}Wv0X+-FdE=SjPpq;#f{FReo7)7V@^^%VAULp8^TubpmN!OlV)QCgG7`#}?YgG6w ze%-8jM%FVVefC*=U)<7cP|N->;X-lCCLhvMCu8lg((kBj_4(Jn(yG1-6gZYNk(Pj}?&{BSmVbw3V) znBo4iAQr!2|Km*2vhG(ZElJ2%$bbA;tetcsccbVyyL;M6Fjnw!M1(npRCg`mb9VSC^PW%cByX!H|MNG)zK@nmhi|&%CB1`NlWj8>e|Dft zU$J!V*dFsdLq-hj&`uqn(+`?wJL&FZw|0-nz!S^wM=x|5V#{Cm32w-}Zitb4Fm^7#t~i(4c`Gkv#d-WT9F|@-saV}<;INE0a*3*uWJNya51NZ=-4HF+ zF6O`7MzYWR&{NM)5f^v)_%>^@dx@F9YU<^3?CEga5~5J78*buXoM7nO*D~G|ks;Y#L}ZZf_u`E^G58h{oa1EXzT(Xmbe8|-<<$lQDPGm~ z5O+2dcfz*8S$h!M?GFrb zG$S}E$Ihc!-=-N@{I_lIH{WuUp0}}o?`!`wo(A#v26K?28Gyv|dgk}kL*ZzcaFT@* z(?ij?h~l}nYDEdC@HK|l9WvnXP)b?GY3A?rli%^+3YKBbEhaO$ZRDTT;06h^g*~mq zJKYblqi}+hWo(|A7fa4{X&f$}S(eL9CRN$-K2_ycC?nh5Krde$H0w;qzB{qD0XNJ4 zMK?KJ{bJwla64P9b@lmF{q0mRIGU+k#$P^drPyBgA&pyd z(vwXfzx$s0FyAR8L0(q<{H#`Hm!n+kM_|8NXQQ*kZ}9f(ddqap@Al?R;C_E73NNWU z!(wPmMv}~9L$GY7KlnWZPTBpu$?(7)~~0ig47|1q&@|~V=N`xZVZC=1@Hl-$BUlVTCx32v)G~WXLe``^%7N7AwT~U5W99Uf(3Sk7AH=}j zJ>IFn=vdyLqwAuoo1>df zEsnkCVbUO{O#$;tze>5a`&0kN>)WS+(_WEJ&_P79ZOz{8#1p5$F{EjrHQGkFxeUH= z@WC)a*gdr*$x$iI$mdB_Nm~YIO{p+oPg=G=-@r5`^sjWbpe_i>{XEYq^&MZ5hu z=Orz2?t7n3w(zAij!U176#cK&Ac3eLb@RfBksW0;v5Dwy#|C(aa<9U={X0g4VD6{_Qf-#J`+8W#Q4q1P?@UmB@xQgdOLbJo&`ej!do7 z++2Lfc$4QSw4C_IT#Li`rWxb}Q2Ifma?7Xt^RMsv?N}B{UZv3c9PIe3Y#lrRt{Ayp zXsdy*QBA=EC>1JHA#-gSN_jIFl?`ualue~CR`>8sy*__rQ`hw@-5pc*f^h@~cFpdn zkwj5^n7Pw%wMsm0@o;=$#=E1&RYF7-OpTH7x>2$*{Qw8Mztt;->cX+*k3>kham+co zr;|DO5SAcd!tA_GAXotfK;iP6e3fF(57v}oOZ!+jT=YJQR!R@Ya}08p>_XGQW{#y) zLV?+u=vKO=YVAS@#u=ZMsqHS10-h1EJJv+Qg(Q2Bat9Nr-9|ZCZppr9>5XRdn%b-AOTGtX>rKoY1!?{H4qC_f?zny(0mjgp9)QRJHXGv9=946><>4D>7y&# znBWvU8brzWIJI8A#njh_Py8fkIO-CjDCA&&$X6tzFVCAPU~hwiL$dV|xYbcyF`D|8jEyE8d-C2{omr#EdGUr$J) z1ADb10Ymh{Rf=5FYfHYlE}2>wmQ^cFiX-?_lbZG;FAa*8y-k@P&V?9G;V#$oy*xfl z_u?!qTs)~6JkD1A;^J3nz8k5k^TD#A@D^Q-Iso<%H3I8=aG}wJS3g_p7goza{Xa0I zqSTRdbs4-+^_^PahlA0k7gA$6CYzD?nrD_7XZEw4ry`EOyZ0j{i4ZXzkYDK|me z%$sy;g0T&TZ5IxP#HpB0-o`1GND)BlPfHI2oIU-~k+vU{9uc@($g>*u38-WVTYqV4 zxuWB6(dt0_`a37DNe#4vS!cjCi-~953c*53CzkW|Y<%yOb-OxwcF0b&;e+H!s|#4f z){N2SG&Lpx#gB^lGn6KI{Bgv{4<-88**P4M@SI2a)bK#qTBF=}c0vUacr>G{n2No; zU+m;yH7@hV)61(m3}eP>`9_F_?3S|v_18f}#2=+N1F7TV@Ae%vehL#}WHOi+pNX&P zr#GP(O$ok{7K@zNt3WKJA50Yc2?&4tsq8&~L9%(upS2gUZ|wMrQ4gZ)xZTxn-4Mb) zh=B^SG0#dU{|Z<@I@Zq9+7MOwINPhmc%39?aIxE5&IC_-RIgJSz5i+3SRI!^FY~&b zR&~1bPtEQuGFl8E*9TGT=IegEakHxX*ouErcgs_$huMSe#Og>&O8!>MPEC|zc!2OS zBUN@AK2ka)%MVifZE9)@*PVfnycQ7H7-0dt*m$Kk@3d~#_N}!rnbNKx{YRP{*KKQS zGS7&5F5&l@2ez^BZ0mG-Y?0*^`e~H&Nwe%`q%%S6nU^!g=+t>n_@3N-jYxVD16q}x zb$N?6Og2FKRV{LG>M2=3_eOikJ%gQgj8KdH?%#@F+COx>6JB8(>gg#U$-BsfOxLzv zf9_~j-k%gp%}WL5zZ23wYtx8ob#p-FlDXDID@zeCUnb=&{zT2sa_$c0XgDW3m4CHB z7K7=O8%JHq~w8?Xvzsx)KMZ4Pu7D$PBv>?Mp za-iK40<^=i6vH{^lv4!}JpC}bTfu@@tq8>ABTY_K)EZX)n5*<8G}J{tfsT_Ycz}y2 zkWg6&mS|)l?@H3T!A>u!^J0+1+Po=pB%JM}ml{oT0y1aVL?|ku5j*bIHi1^B#yHEy zEerv%nn?|+ww>I5-5>(0)83B=h#y2NCnQhK%6LCT8+Ql+m&0UUZwXqc1h`G{s*$s6 zD0r4!ei;AxQYo{YSt!LkkE4w*n|447UY!jg-~5A3$+3p-7LECAD2GF%z?(MGNE1qs zJbVEGY9$@&g+#s+qs(JSr4-1?nTixD({}CCCWc#bZH%!XoI6po-O3?v*%n6UIx!9|+v8{vhM6OwqEQ5XTdN~pMI zslsvA7-9yqQJ3880Hp6^vh#&cnaYSDGX4wmcf);N{o8YmkYNTTR|&l2I8N3A zz-w036~G@$N2&}tT?pq6u>=6rV0cq-T~UY{>@S)|bLLXDUYo!34sJ@6>~vp*$7Gb5RHqKv|* zgu|GTTWYP!V8C(Oe@(MdgDD0sg8Qz_~*(9IRlzY~@&n6J5A{dJ~_V-W*16ZKlu?I?(%S4WSH7A(EVbK?$|igca&L+QEmND>d)dA33{X8pUDlj#iF|? zT`Bh3siQAQs}<+X^NdmQO3{=E5sbFh?6y;lXLg&Ql*N%0j?*x!GFPq(Wnxo#pM2S1 z<~>a0w5Es{B%`%`W~JcdIJk+7E|mCRNtUCKKfP*6vm=>@BMGx~YPlC711uEz=Hz~m zq*u!eS0-I(`4P1|4JNYt%DIf3vBLLrkRi(#ITm1Pr8RyP1yqHl&X)QRv1x=ZGq@zY zhGkiiRcW9S@sLanMzy%`M1|VET)5IT;mA7I&M1`jFUckDHcRFEv5hga|4_N&zPP$o_v+cc$HIh>4+$yc#3&rt7>F?Dm7k}HGk>6X&>~RSS@|3 z@EB*JWl17Z&pkRCC-5f;zz~%@MS}4_{8bST7J#oeCTaE^)d(;we2rIB!f*9o1M33h zhuUpC`Ay&&0j9< zEQ&mJ=MV1Ox(->&<=+Q#YOVzt+S>jZHO3U&gva;rSE65#k9DqD{_6axJM`6ou=Ey+cQpr+0gQ-BXH; z{Uv&_Dts^2!tOqKP}_clh9pl4eT!Au&eqFu zsu8OFix7^f9Rka(#_`Z5^W$F7iSmEVy^nAQ zha$=MfEsr%p8XDCV^vcvlXel-rT0jtz_i4?fXc{7xA&IH8A~~kB9O#XJYnw$fP->C zOM163MXTFEpz|!Qr`}b+BC4YJ!{ zMi6Fq8>n3Y)u^%QXS^_Eb~of~j^(>vf|Az~K#C4FXLiU9VkT!y>?oS`W{! zDJuAS*c4gxR~FlZ#gnnkGsd?v43-ql4hwc!8z@zj?yP8J_9t0+Ypy&{^Lx2_S2!;t z-;T%9Ld}`j<$C`6&HPWS%BH0GU%@OiYC^>kgU}$d*dPhbP;p z7Pt?u%I<2&-s{R*c1mBQJPMF}j*QvKZZ8CiP^a8M_-7 zdPwpbj_Een0Ddx%9bv-d2uyMN!k9UzZ666E?_=B@>&fMjm{=EBPPTkqsj)U?2(C2%uqH;0SNo1YuAnP3NHQ` z80C+1#?n6|R4(3(U+~|iX}xox2s)04ZA-#9C%kP=0BUG5ku0yubsCnZ&fl!1a=%t$==@6>D&+Jg}gA`DcgMUoOA%hPM^UpWt)i z;$5!RH>m#DLz8Wf=?slUugZCc|xO~%fKFANBU#EW&2xy+m>Pv3SIEWxcpHF;r5Z)pI_mi@-V^p4GrceFgJQ=3Xm( za7^(dvRH#~F?M}a=h#lY5=T=h7){oV%HmVzxtm{MrTGc=xXw2%@T~KFN6!xE>w{b~ zpLwwJr>0A|bRKz%@lZ2Uom(|0v+)ROjO86Z*9#rK`7C0V)N^E~3&553Koa@(v zBi@7~Qs;5M2J7*ddPN9z;DMS#_6~KN*Yk#f5l2s=9k*{#E!fR*kz`6ZOJr@$`e*p&!g^s1{MDVsZ%X%yR~lkV7)d31o#W zXA9@@3K{+vZYrt_w>TsPr;EnEB8HI%Z6`?WJmGcJ#R7ts@@0D1J&7W8ljPwKj zS!6^30q+&mg;|@P!G;aK>7**XEvv zlSW+WgQ)P-yTn#+duyBtD6!8=o8f!vvs4WN#oqZFxPoLap6+QW8Ot*@Wp0==J)Ovn zdLY#pez`yBF;%i8hgl?j`VNR1BpDizdCSC7;x^^(Jg2&$rHAjdRvWX#j@>obJRDj0 zo+X&t+XJ=R&3vuV&7M~{+|60mtI@+8{zvlV-!R=SgGq*q`_S+h@Q z>efiS$ii#QezEn!k$#E&Ud;i?^R1DAI}jYLK`9LG(Lref6|Esz3fs{kdHOJ|VMX?$ z(cyc1eOj6-cmy#y(!w|oM%5L$$3`{PRUV8z(6b#I(=iTvpsgTrOgyUR*!N(<&~tli z;;|2o_M~wLbG0^~J%U?%>S>bg_>_5OnD(?~Ufl5{Z%o!NN9T)U7x%;$ z=OGoHS=TAsiCOoBFr7Ki^`eQnXZw9RU!4lJC%(Rf?9_~S-^!f_>?8FueFHe*?^u0t zqI$LaC930`zlMqJOfE)9;p#0#De_D$IU}j+EywBgA4eL>(4kR~V-B|G5KXRO zpawb0tsp0H785PLUc5E6_AW|Qe?2S7Zrak>AY6YVFYm)qq&HhTrExmToR?Ypl*8K< zX6_fx7H=1x>u*<0+0AU%ETGnH)U6lK*xe~Pfl-?p&86g|Kl2s)_NHg%WBAAI$;A<| z0lDvewe%4W54sTSC0~{r_y!&x)=9Yf8iz)$>$8>7qh5-BB-+ihG3Z|zKbSC%Fg%&! z{`%!)#zXqDs1OyGZ$z2o zvh1o+p${q%-$=?5Wx4I5UKt*JBWop>;|q^^{Zw#~Vkl8gs5mOj)@6}ufm~j!KPud{ za*<{qRMhQ6MR*@A-hfajNa03D1_&7LHx)#TzZM&>WEKm$MztBuSu3VMePrC2D6Z7WsX!Q<+Qq6}YCLKv=O^P5{Er2IB z6UTK8Ax)n@=qnfdmZWM;mM>X7%r5pF&GL$jB&9}Fc;*|9lOU%yw zZZD4v>XKL}wU^jP^0)>j@>!sg52aQnZd|Uckd+eWUU&TXFfK{0G}o>gM534Qy@>Zy zV)2$hoqAY7`6xSgv-myFU>L63*}D2rvUXK*T#>CSr)W%aNEZKCUdihXtvrq%w#fy8 zBnK(l(MSx>FUC1#*abS4Ry|&cWAUEwER)9=Q_2eQ1=NJv1|c2? zM1;hbB~r4wz6Zt5N#7r?%O%wJRPnf!NX%|&lQqna6}#7cH(Vbs&*O^QP>Q3}F%Tv( zS zoaQffHZ1YC#TZs@sibxHkiqOWexSQ?RA3^sr@XiqoSyFb3EghX%@})*{FO+fP0Urgxm&O2mD%cJ~KO&M+^bt%amM5lc*WxFp{zx0`xhqLgJfm~H_ z)##o|QTK!5X~AYBwho=s?r$Zu+SxoW_EHw8&BX?i`qyqfisuLKvar|L?k9IgRSRY@8(Jzq)*nghKgtoq*-(j?9?29I z%9Ay;(qPo=scPKh{GO_#hp5RGIJRVYANh?fvF;OtsN<8VRE+QANt2(2Ps*awwoI$N z26h5m6HM}qo3qd2*jw_9WO?D`S z+)Q^2vrk>5xXPATLq4Dg|Dwgfdfuhp&l!_*OXCKfSdXz?UR+1&iV*8quWdSlEnxs- zRqRwO(+lC8iD#%T#VVfR#>4vVM%ucozSl@5t!vIbLp8;Sk&(Q9mqPQjP5G~06N{#< zqJ3XzY2iqz-8{IZtIt0axZHR*`h54#O4G~>pPyWIKGU* z0B*9j;-&AiY@L5QP7g^xrNKj<59Fr%>JdF~3lr@Obw%=~dy{4Q5yNxvR8OPGxBu#bZ#Zz+_xy+j8fFibb#OkzJNnQ{y9K90#@mPOZ8_}#%l1dU_i=6y9{LVIB-Nc@d#Kwtx7|w7qIjK^21!3{ZLio}gsZI?;cu(ipHh}y)Jg%)duQFaSJ9E) zUbv?RxL8$c_SyDll%-hB4mhi37~G|ZTl3p)YB*VH2akAAg(5Mic+WWCz>yhNT!it% zNSwT!RpdRDvq3x>-WMEnfZRgKmy$U8es#i4!eGLjtgO|HEIHG&PTArh4VQb%>?be=QHNX*01bt^tHAqC=qGV;p+ z@>U-5up!?_M~bw4z2vCP+x;}TWt2txlx2KWRgP43WmHZ3RIPl}pB<^Y%BcJHsfYMz z#vEy;%4lZyX%_fsmmO)>%V@XvY4`bV96R1PFS~KMe*?mQW1od6y^Id$fX;S5Wy7Bc zpWm0foSvRvpO*iMI9ip3pMh+RfzOFCM3ynFgkkj)qgc7Oj1!ZJla5mK5lll>GlGs=gGG^$)m1~=!f8*NU)9Trb(@7PpqvdM5WqIE!@7XX9_D0~Sk9hF zsFAKgY0J;_?!YbkfTM~~y*zp&_CTfHiSv?~>vK8Rl|Pqd8He65*Dfme)B*SIC+?&j zPFpFSg=oF41D@k@9@a>nL#LZT%Qqo0+$e{uKb&~8vUv%dIY|yt#t7NSDxOjb@Mk*l zTkr6)Yd+-?5D0J*Fu@fNBQm-rAo$ctP;VzilwRn8vyfhekl~?_v4HSXXJN|tpc|;KS_M9xYegA(RFxh%vlmmKyvC3A-SN*H7js?z2dgJ)9r16JIBt9 z`_6YR5AURv-GR8kFkDDbDq#eIQZaill1eF>BgPwtQp_%Q+4=9XAKm2}W#tx>7ORxj zbd;79l#w0fldqIfsr(K*l6l}Fs}d}wcO+{Z>+?`h&azUj$Xw1=P~KJ0*}+BL`$#@3 zTi(Y-A!PKif2Bf{pkm6hLR_U{+L2D;kz$sMk_od?-jPz-kyfGLy}HVKcxRbH-C-5yn0KToWN^{6_%tA?JI2K%wbJ;bO6pR1;XmZsRTCi|$Sl&cn%mX_kN z7RIQSy6b~e%?EnN57tK>7`tjuXlh#?YkwZmc68OL(bVxg))5=g@p089(9{h%*4-V} zjdIoN(a=je)_XUsm+7h>prN04tZy=`U*>8creRQbY(O<^(CYf|jrzl`RDZKYk=T_-Nnt@tgaP&yODq4LpXp8Lg`tVVoFM^cxYl8T;_v zr#LZAP&cM`d!ioug!_b!`Q(XE{1U&Li6m*D_=$;Zwf|i=Qr4!Q!C%zhPPYuU? z^iQ5Xt%g5wGqa8NwmLC$b@Os^Gxv6L_Bt^~1dQAHx>t+l@fP1t>`}-pk=z|{5-hP!9Z1Nmh}<1%60E3B9hu3j8Qq<@ z6RbH;orK72L~5KVCY+^o-1$XZ6v?!|Rl7)>x~S_oKX7;Du5mTgaWWQhqj7h$)N!;u zb;A*Hchzz5tZ@fF3Vn3!1Kd5ft3ASW?4m?GXWcxLbZpa3J^O^cvUF_nYP`UwU8ZAQ z<^HUw`dO2XRjbJJG`HtnI+lH>&%=bRy~lJcrfR&sPrMg&%$MC?*jB&T)-l@`d1>hO z@?7WX<>^ZmVK|DeDMl?^@&rzxYeM4T!(Ht|iq5)rB1N1x$o7w4pF3f|Vg1hp#$^dNO9zYHR@%Qmxfji*< z;M>ah+_w=D5*%8cA_%PxKRQIGXm1ZO_zi(5+CwZXkJmo?F7&dF)NviW^Y7d52>jm> z0rUs}f(|Z>%xLvPC*pX>hVKLJPbEY~Zz+BI`4u_jR%d6~JNaN*=};|nCvagTnrc3E zmr+(vIOe17##*C!I4UjxBj+{Bz7L>cpZqwlc(3wSHh|&SQfq!N>t+g+4tQV&@zW}( zZ`6-`s~v+!A4s!yTrf~>_VlgFrW3b8oZZ*t*PJ>`v-K3L8l$g7OkvkcEg^VdaJi1b z9K`|f|CXbOK~FG22R->WHV*J0H*+=c`c4rGxf(i?lXj#)9h2CV{7|sfbw~-hY^!38Ve(bLSEFj z*4LzrAKsIXEc6Bok@*TnMz!>(i=r_wO-5$I;XkvC0t^z@cQDJ)|Hv}%Yw&Xq{5*u* z3|Kw8$ z8MpAKT6M;c5OgZv_oDnyx&qNK)s)ZoGo%0r6#6rp(7;FKN(CRo9MJmnGwbMh7r=c1 zmX_f6z>g}hzyudigE_Ul0&ajF-Pzp&(C#dJU7cw-IyyQA3(U=}_223CPycs*U;IBS z0^lO@pLHdj6goCGxQM(o^J5S`qqvp>?7FTL(2p&Jy#p7KGh@=>m%mqxVZ5LJ(36k3 zCxH?PnU^gl^IoDTGq{nwbfqmt%uo1BmuANO09XJAw?gHbx*wE*>w&%zn06{5l-!ud zt)qE5HNu~}t1fpP3Z&aJ=e=Q>KZ)yM+k$;qUb>ser#`D9qkzb3* zPxlssxL`McOX%8ufqm%<*wxj>IT;;%9YdGSXN@9}&UXoCo({m}ID7D`1jYydzqwQZ zdEf$K`x8zqEG)oJ=G%9agoI_T_Q6^J=MGG%p|P!{_0!tghK8m#6*bG%SXTjU+z)C!XHzn zsQBe;pi*D?pZXg7=Wv4fM>zS#tyojstNR z_7jIe9~2OWcXkdy`xDS~gp7<4#93?EsD z-;(F!lVoETIy?WKl$3;tN!H%p&BDSD0(Dzk7l_`BOuQA9jbFaZ!KCi}i!Av4t=|#& z-x~oChmrn97yeP!DWxiu{VePL0f+y!zl_qKINV*C_XiID-Cw2Fo?sy{AC#LY1`BuEcGM`SkLv=fjDf%S%LB=4x=h( zR9R0H{PLHvZ7;R|=`UkHUH1~}#x)MNv_@Q)b)Nf6{nusPrNGg~wZ9DF^x9wMBjOd_ zruTJO7lEhy2K1LXZJfY^U;E1pAMAquGFOfW1n4hA_vnD6a}Eyr%NXgRlGcJayfZQX zl0e(S*o){9h{NBy%v+yR2eL7^5r>2RGXF6S{|kSatVmQk-sL!IN(XqHt{X)r1ZuOs zoQUa5!IEH_J@qEZ9Q28KVN-p+lDhkuoi)uP*?cw4^Q3Dwog{dDHDhX<9ggZtm~Y_{ zh}k0b_LZF1!*@_#2l)F$$__Iq)ZeouC&7$k!`_S~1C9oT1FgA*(H!tRF?3P2Jfx!W z`V>ehw6p~k%2k?Q)Y8w=ih}2V3`2to!lX)jr>kK|Q02j`%5v{WDP(fBf^CyOu=FRb zs|BQa(#?DYiMXy^F8b(TSD0;%r^EAv9l7(!)^>#Vs_=oemV`c!v-(Z;l7jf_u zAzTdtB_$q22avQN7#UE}mSeb2$z#L2D)4zrZih{~a6EXg zT|(HGf6O{jEYa-S#O}7$TWEIZ6%IyrPXrgog67FOOD}ZG|*8+8w(}qEB(t?vUoLKl26bIb_V#WmHl|qe1 z`2~AFSvWah2*3kK{|0-10p71d1%$m{9UlmOAk;NBwfrhqz)lb>S3sP*eMj+E)A#qf z1q3!uPLbb5%YRXh{(jj1=@CFr0xK45d>F{Wp%JjjfFm*~UmOTN+x(ufR3I`caVa_h zJp@82_DJxIU`Z|^4qhQMBd&g<(E5v=ji45EKslmM6ZO^ z+TgIOOtp=^3}=76amnY|Q(n-+*LGqwe))RiG*i}a%1K4atDe= zetsSq)fqB%xpRJ=CZ2Wz4gk;23N9}EFE39(``FXd-jkELkWaA~l%l7n4O1H{$H!ae z=i%UZP_J+h4-fC^YRkCv^Q{}$IP9vSpQBGt`!6?kz{%ms$uc-R^#1Vf?5yJCWO#cw z;OZ(myd%uMrtZ;)MwCS(W;?(#``NqoY=vmpfYmZ7m{%TDsW9RB>@ccaZ z>@3Hq?PPO1=Hz7k^tA2jD&*>_mC2?ytUdfibLQ)U6?pR-m%8HbKLRetcT}HC9Z#ByqU)@b|$k4I`c@aW&K$H~%V9Owt2y-20_)hrf?F#a0BJ&%+AHG)&iGWq&*1jk&3 z@Vg$tc^-TlxE{et?j7x1djWqH_;GRV1-yHD`==K$oh}0Brx!3fu_~tU+W&vf{9kZ2 z*IvN?1)TZ6c>!kxa%|+jkGI;$%gxT;$SHrD(hq^*edT8-rTAfl;hm49M!hou9`F}*slKKwz*w1AIP~=yA*G|Q@55~ zuv5QPy}8q{*Uq`ycr4u>0Z1$>we|fX21gf=p(!*NV$EnyrH#-VUzAUcm{Q!#>f~LeSac zbn9>chQ@s~C_`p@G^D^@2bUgmhefxOAD2V%H(lo*L zWXd9^=w#ZaX8UBup@aMMi_3)V>8!_U5h%8Je!6}76^_PpHt$bnceW7BT735HwaCud zVx&CJ`BJQo-T89j9qN{qRQH|p)r=sXi?#O&b{FfpImH(ng*7`Do24B*-?u6!?7nZ; zt`>jaX*}KezT1L!^T*yNGW#F66_J@YdN6P+g8mk;`?65&8r_r6ZTh^XR9Swh##lB;5!f-oLVciKq>d&=*k&T zvd$LNcw7j+f5x^9;M>RE0>PT3M&*gD$1>w}Ay!bzp?OXR(9a{$LXCU~o|ihyU^Ae~=D?Xk?SNFj#g!#;Kzy1n+#}V5cC)ra zVq-tzIr94(NQ#|Ol;sh@#HI{bb)C{22T`y6-!n2Qb}8zYML^C-k>P}}GsBz)vnlqoeNWPkuXBluQ)qO0Gn z{xI!W!i-!;$gC@7ZYb6m^n^WAZDrxV-a}%`JO9herZV zhz)VdEb4Jq_JZz=(ji-x0`z7rC|D#nY6qxEpu>V38YuPcS*Y{LxB+<&%_%h#rmKoQ(l!l`lXdD3j!RzPY&}Am znP{b~E)7Q6GA2pUO&aFO$@DV=>M2$5@Q;g_@r?mq_#pOHX0A0Cn}NWZcCE&7Wv)t* zl}?ZTh?HA_@D_slsUjKF-%b^o`UeX%oHqd5EXMB(c8O!^0Jjv5raTXA#zYa0Y{k-TfwBj;;@;wQ=u*o-A|#)0Bmc06 zorrIY;kSh<*;yLQM~w>em^6_q-3!*ag(Q&bVIrcoA8P)^pUeasg+9G397!FO%0L-S z9Jd^Tm^TT{gtgy&?*7?B5>67W6DNQr18=nygsk(QW=h=A=wgWW2=m7x4 zp7)xm3Lugv3Ri?H&im}bogum~2-4_zzgxdR`l$_gIFq6uj;P2$QdyItsviu#UHO)% zZ%tOBekk%q<$Fezbwz{v;lvM>*#dp*D$nXiG7y!y(kdJ3DfOedx2y6s`!*gl)Q{PO zqeGBkoBCJ)P~Bfu?B2Ke=sP7E8hUkUkT)E|0zUcaq^dljZ_7;L`_y1~b!Cpqww1y6 z>B&Ih;)=d)yJz2L=5cFkJ5+X@Qoes#Q?03g*R<{0@O^e~#Wjh(ZPTse8BQn539`7u z)^jrYIRLM=1x?k#aHe4%{eEp5S^r+3#E%8wj_4=W4-O%{)ALYw=?_7YKFE6#Uu`pL zI}!4#PA>Nvm)P#tb?fvW#7q2G;)(FAS-p((!uo+mMdsEaIk#&(BDDg;6YnzSIeJTV zxu$U6>tjgqQTELAw+1uudgDRgR5A&O?$udo-TYDUGs7<$SM`$}swd^Fl3QkI;(atv zoPADPHzWqU1}-B{>SrW3+y@%wPWw-N-Zkwq2Z^_%;35ev-P&=w1H;0EoV80_?uJJ+ zF0t-wdrB7^?)7XokyK-DR^pL4adpm}vMIe@4rLX=$E zHzU?D-bfQ4g}D}L(xW4Vyf~fM^*(91y4>3pI$eBhyLc1%9OcRvKOWe^xFv;eF^_^; z207A!&zQr}6XBmkUSh7d&?*}XQ%qciBmWDxu;$d^3WUo<#SH<&8eBRDiM_;qb? z_++qSZcyY|aI9!Zyhku00sc=a7Y|hUbKwA;fE|PyxbB9)Gce#F1|%`=N~@5QGj(?L zg1r(*Wt3Ocf>V`y$~w|AsvwW?AJv(^um6s~|G5$PcN1+|;+E1(=>UAjf0$?siBpU5 zg9l*P{^*h-m`sG9D^_IprHDFjRI*j%z|&xU*Ig2u)SKI39ER5u?O=td+i6Nw`45uS zV;;?RR~6{w>Q%kCo@kGhKIsFMMdK{f>9&jAX;e<|*#?ipji!6o6Kyp-bbMux(uKBQ zG^9tpi7aqXS@cJj6oEzF@@JRycPiHz0!=aKNmps8-kzy--h5;T09&&yem^JLjw4Op z9k>4wDjKL3Kl~+Bys6T3aRN@X?O&PC&3-?-7AmgZK0mzxg^DFN@iKg#or7Ickm?N> z{QGwlwt0B8Zbo5U!C{4Tt!;2dX zMsO&U-2qeM9EKa<1eCy=QzLm zZ$wQb>ds%&Bmo=%ehoo*oT|MZWsB{RHzZi-#LAbu6K`7g_j*>W_GjFAg{$IKxjvkG zzd*Imt7>z+^l?wP%CqY2>00~E{=R25yK^lsvG7!%pKSr1F;uEN4)FUBXew6viwAZ8 zPAmP-#Emz(KvB*2v(F>)b3Z&?HUN#duW)42pF|A^LD<5gZ$NP)`qvlZ>XVP}Ljhh` zAjC~H(f{AXjVKzyptw;#cfpzR?&`v8g|ffIjlMZgB4FdEj4!(lJ2eCQyA&3q@1m?P z#i+@#FUKz6C@#mX)v?V+-GpW^#TXsL?nE5d;oGb^k zlCwjHbAJ*wg(Pj=i|=}6^NF%iGV?cz8M>~CnmKl>QqK5#qT)J+p3QPff@`8ioN_y) z^1yskCDF+vFQeDZdZ-d4YKr|ohErvh)u3`P*34Dki>kXOYWxnxb-9fvrehbXA8~B% zG-Hl|x<=$@I(Dt>cQ1&VH!raEJ|dC8_dXG0aXEfwzx$x29csJ9_vsI!hRdjk5BMelvyuxB7zGcG;r)bf@N2M+44X!n0)hxc6=FKf6ac0X;NOCRf?)t4 zXj+933=2b4nF{{DkkG$h_^EKJ_6!#__8#dN#xfs*OkT z!6hw5@G1weH#0b4`q`Tab^WC_{zsY3dFdP|vx%`SxKQ z3!hGHyT00X_z^hDvW;3D#w0YwwZLzs3&2GYC?+W z4kT2h20VAB;7XloNh?xIlDc_so%qSjr8jNY*<}m^t<>zkgym5M(Yzpk;pO&`?i>bYd#=KasL_sZ^Yzurh(v!34{KaDO$+>Xhe zJgi~OD?4S^!!6@pKQWJ#CX5V8jW}p9FY?{sVDiKx&bLHD6Bc{ec_TZ;@pA|BghP;s z%j?{1toqco45#?lPM^?Le`Fb=S$0z0)t0L%@I~3mUyWs}uca;eHr#^}d`=5*!>BIK zc;cfo!^!jR9{1G4LBca(S0_waI^nSg9J61FA8l@(pQWtc8YCZnUd5OYve@%81%W!!wzu}jMkzodNKH#>oFP~vU!-V>Z)eW(B@SP11H`^>zgdYl2I+Tm4`n%t5cN0qJey{+1af%{J86D(&t$jD zZq3bl6i`0$C#;&HM^(^(GaZa>dWju|nw;H|LU5-%{`e%M2wumTOZPCaU^3e8obT4{+BHuFnU^x4Trfd@|Yf6k+lq;*ZzVYhoy9Gl9 zS$t}Qa!|*GaY60r$Huf*mdKX(>(XW0R`5$yrVrCkx!OB82yDVjFv)`p{b9H23nGOm zVZ;;^K4*CZJQ&v0K;u}$@xH_DCH+X6@4>|l>KV~*iOJciT(TuQF>ucq-&4qXc5(&q zi}`&?VI=924_Q9Yjc;-UMgu}0?Ab~zBBfP0^X%>6aJ3+DEt_-VT!lDH^#rTY z_u?d+_n56T-zbgc%2eed59P_lt`eOrm*lrK=Bwo-!^?4*P#6^8q=f*yoW-bsFNOZ> z8;Z6^XdIHMq_uuoDTMZ6D=Ly_few}#ZTwjq(dio-v0r2v;LFWF_SO1H8$PmcuW57v)jE060)pi*&Fd1y7l6;mL{Ro zE%nDW#R2d3Q1QFiZbwHKN52~%j&Gk0Cua8zYqck$^Cd>sfjbf}ML2}0EOLFS2s8|F zR>Yk3X*9wZF0zwCHi)#ubVyTU%#pzQCi(Q+cx)d{31ueL6q#-UT~XF3gd%##T?6)( zwR?52GO+Ub23fkkcd37b(C=Vsy$4q#KkXr=p8^ z-2D-WwV0Vfg*~>}%QWK0?t3gE2Nw^*r3bTajCw0y5%-3Bm)UFTdZ^C%fdp)+k0I{z z(&f_{loSFYdKkB;nM#+Gn6ii-ZTa19Ay3D8pCgOZCCfoPeEZHk5)WnaP>3p?7>*H> zBGsOZWsiPN2=?m8+W|Mb_X7CaTfy`w zF1JqT;hoYB#6zujUkV;3Gq0O`7JHJXcxzE3N^z_}HO3?9xYwM_D0V*kT_u}hnJ%9H zOpktOImd-9?tz%tlD2>cN>^Pc)q*U%QsZ-S_CT~8#nJw*aAKVaY%JpTmz{c zJ{<*$fQft40*P*47WqmkH-T1`tQj;_b8OZnI=3H9J!YQ!%_^r~ZlWA=ZcRp#+uci}#;G1# zst`*#P5S95=DyY&?Vvizkd;mdBljrQXRjd@%$2th3EPd%61sYn?sYnGpWRA0=MG<- z5=#36aJKpFrW~O(Q+E)l>U+W{P}7C zyVon57e{+s-Vc6+de_kxlw~6=GYyZO|x3UEEmng5atICC1kn+5BJiWgUE+br8 zwN|4R zM>C5+kj*dA*^gQ18EU4V=$Y_s53f79;!>S{TO$54y1rCCp30N{s%QRcSa@q<3lm6X~v{?5L@4UKF!9(uPxHWDXPm?D}3 zR;OI^5zadwRb%PfTmnQ$pM5~vn<4&z0fMSKsuJ%{eV z)WP=IB3f`lc^O%1()4jk^y`jVL^L+5{9zo^-m=xv(=ePNwD%`bou`h6$T!5RT_pL0 zd39qgoM8k+Zph0-Si-JogTgi@>KW~hk!P+b!gi0%O0LmqXJIEau2 zYvNn##Xpg9H!>ERGPSdrikA~js8|eeqY(E^uSCCoZ6-%|Mq}YeE{J zXc(N7=xCCN?3n0PYi`R%XFo~bt4`#cdq-PdV21F|B2~KM7%l8JSvx?UMQ*+fnez8a{>jQ)b4e<c6zNSewUs${~k1cD)pq&Zk06rkiaq1je$Z$$>DB9x3t)z zXe#174v2)>31!$RM50nSk|MqB|77&+FoQ~jhveJ}$31e^Epys~u&XN*r7RJnH1)2S zlZR9E4P9rFiZ_QSE`3KTkDZaV{9Glb-|}EaC zkqObEGY6~bzgLxaQ4Hm!5jJ#@x@AI1{2JWQkm2i4CT)2^WR3aIVJO%!Kl_DePDxCT zmldziRL;?U4%~_c;j2#oamWo$&JC^4eLbBUevunVnHMdd7ptEa@0FLBoR?gmmpYx7 z?v)pyMDRvD|Gj>GwpV^`a(;e&exX-xrZ_ha8h%>e2>N7LoxF`?B0zic~)<;{JU4 z-mc*3rC|4%qESaBdk8S+RkV;ylIn;$Bwkcs7cy{eDnSUyJ`qjex8fQV-uEJTTn(6J z1Ty5<7LmS2CT3RgQ3dDDVF%Gqg-dN52?z8`Q1VN$`NGkxIJqN}D5PnAC~-wwl&Sg< z->xWwOdvur&fx^^GV2=Uo4Q;@A?0^7%ZE6MxD2A@#hnF+fWBNI!Fy%%p%qSEgcs`W z_?6}|d9J2@Fhlurh?;03Ng*XsoXD$6#e2jN8^xjBRc2i|H(CqLo>g0@&^P+W|GpOUt_idm5vYRSxoSv+yjy9Te9M6sV; z-mP?}VA?$k@a0f zH#XqJ9dB)hRgi7Uv0t?;l18C8-^1^|QytWNVcm)faY9g0-!6E^Dpq+r=r#HU3vcNM zvX2~xD9(#ZHg}bDCq-LH-`YzuHW(~b4u{05TMMghah8#{sZBMClcEh+s}ZWXQa#{o z^>f==G2+p0Rp1h7ooUE&jJNngX;;^7qg-wGgCdlw!-B8F<%?!qREJk%$MY{8FMf0& z;MASIw@*na4Cg9wpziX3bj3aoj`mp!2%*e~O9munvV+78%Wd*1Q}e0|bM0Jnn`*QngMA4XT^Mvo!S%>7 zO5pE8Q6EHlJcy)DhXTpyk5__nWI&Ie=L)Iy3aRB~N%VeJ?oCN98w>@KHXuVAkiJwv zE;3h6O?&9|%Zc8+snh~!OKur8&^@8^@%53?^J4fBJzOtkf%03Xv%tiOM%k%*|_W%sbK_ zOb8u>*-j+sqslaOW2m7j+ZL$W=85{`0@{5TrTWdTWAXa%<}4(GG*pZEen~j0CEa*@ z{s@Go$Yyr@yJIe@q)1HY6a;?)(I|l?LFL*6aW9%q($^NwKn}@A7A~EL!t9YznR$6x zWqhv(X}{R$6>*UMNbV5GaZ_%rmuxm?6A#EsDldX2-GMxo>8t zGK)r#nkrC>^4xGn3onN-_NL1uXXMgm%JhetXuhbb;1^qwZBJ_lbx#H+K|gcnbr{0K z4fD{qzhE>Cb6*bg6^#gnDb+`G(u+c!d4p10W4yX>8344?T(gSsVIuroVz&@|Jm z@)a<9v?go#o?UnEav{{~3qwX8lkG5i*f2-a@M+^Hzbyo#cwQuJc9v%Ft6@pX#fbYh zZ5+t~$ljp*+)_T}SGDxMah=tJ?HbRW8bnCgnpnS`A4XYaALm5Bv-ricXu(IKjwDrQ+7k@cM-Suklz#n5xLOEdzj68WnOzh$$Qv0 z_6hIjfz1k1kfO;4~S$GZ6NbwHX-W-Hr?)e&_Ne}LG+&CoK zg&M#P{Pqs`-yBlhM17foW~GKET5@=IV9$UK&6W-oj(;TGe1uejYHb9yhoLJC9BJLz zp?-X+Xi2cjqY#${rov0NMCwGO8=A{fOI-Yo&TcNL2eu*)j<{=WYj@k z{hCq#Nv(r?JIJMjygJCGUlZ&gT~6x&a_k_H4l?i{uMS=XNptW9lHTAqLDKveiw=_K zAcYR{=O7J!&68jMr~~+@Ald%Ej70tYi+)GopGM$svI8`7bbR=H#`TJ}e^5S2KY88a z0FjRRQ~3n4>vDoVtxz8f)~A2U4h)isOXw`89=7G0|$T{|GoqY zXO>(Fp=}^r;wEa}_k4?2?;(lRhT^SO0EtyQZQ_j4;ipf`i|LYk?z@|JkeD9k8eO0V zV6q?pqcuZ%-!@DXm;3!q{1_cjCJS?bP8w+pAAM1)l%@p z=Wm=e7|nZk@&-US&pP;mfHGk5E-CuNJ2xXJoEeawa)G0rC&dqX>{Fo6nvTm_vdL*` zuP?{v#ChsFJr>3{bC=CuWlM{`@JNN7MES$QLzwlY(3}{wb<>_`h;cxkomlF<1uJbU zX3GFtt4*0Oj99jJBUy%f|LiJ>>bK0Ry!Uou%?_0lTn^oj?U6l~9dN#Y^^Aa?&bUs&xdSk=|`4-&jX?j^So)a_$}$>nblVmc3;7Z|uEwSX6!A zuRU~kmvo1KG=hM1cS?wK2}pN0NOyO4cXxMphXP9DxEGh#{p_cY_uc!S_uc!RLPiut&fg2I1V5CQxE9_sB%{4a_AcwF z#q0k$*s(kkWwL{{bw4Um@WWWj6McRwj}^T{$~wwgT^Xy0aJ%FXC!4H{a1z=Frf?vn z6smkz1w=&G@f~I;(7jF@GLBvFT8jyyH5aqWAfVi!qBsC7)|w?um`lIt!ti-2cBUS+ zj>U+XMyKZt_0~)6Eo4}kI~(yla*XnOsBCE6$S{MdW$G6A9!v6TZB*fJ4bdLDCUoFd zWe-gC(g9fm?UNXxUfjm$c>yLlU1_4AdY)oo8#X#!Xhc}<-iG&t3^DJt^2G^HB1p1DFpX=&)3h&eY9vxW1nx$xCW@hU`$R z23;GY3BIT3g%&awmEddkBJ*MsBXdg3O`LjSi4K8BbhZXhWIwAz!-SM{BTNU$7Zlee z!QU7Q2XPYgCj{H=@lNdx=kY_!p+fDsZoA`Nr(PIEaaJfz^}QXMF&-#Vb>WMWD67w2 zQdI?^xRq#6_!errM{M(m)=j7q%*$Q(m-X`$jdc;QlD@pU$9iyf@zJ7SGUZ9~Bbi zMlZqHOxl5np&fmJ_79cJL3WtPNK?o0Cb||k zbw9j{>?D?Z)gQiboKXh`QuwiHSkl#;C~V-0Z>oX4Igcg2x5Sbbj%CEdgQB&8&m}_Z zC+f9^Dq?lI8|I}4+{ZDOg1wNt@IM|G=2(y*B#kOo67RKNDJQfIXXL6+rKG7n+|uDm z7mKicdObnH6mdyHdA4dSF1RSsVt;P-D(Rt;o26fWY?cxhnyh-M5%w$b2BpEr(Uf<$ zKicI&X;@UQ4d9_FikM2O1HEC#63txt$WUuSBxH9n)9(A}bs?gv<-Y9dTM zV%|N#<#UKr7u&y;cBhUTN;uC}@;M(q2PY5fa7c9pMeNh?Ka40w)y5ae9x&)UjA~BS zCN}-)hZvyNC2N`Q{f^)@;}`s~HR(}ql8NvzY^_Himp-u1vm`qLYOKq=8$S|!dYJM- ztJL zalTl-p;TkyRK4wSp*pIe+*B7=a?XtrsKteAeEf6v)lS}jt2oeSp(gmNb+Xo5)5}#F4$xtE+T_8v`L?}-r zfR=zo7XMEq>n{Ze8E||)zeKYBbY)&x@C626$PXY!fEE9B2J3H1ytx5(|4Cv2xCj8` zU!VwV`@@RBAEki56=2SPVMQQi1+XGOj6l%p1tQ~YfjkrNP<;t@y?`a~bs7MA1Yi<) z5Cb_XU;_|-dI8Iqz!DHt0`it1Ac^%S@C59B$vpid@bo9`1jL?zhddB<0#aE3u>uJv zAm{Wi38(+l{}=r8{{8PBfqy`wfiNr#D*ivb-v1sNLFf$7e?+{0)n$wW0+|1bWC3*< zS~xPgX;9JB-;u1pMZEuSp;5OM3Lf6JxXxz_5(yC&|C|-%591F-XJ)Y7yBQM7Cf+II zYOy(l>?m&w+BR135BrFyM-1}WiSa}5SGTsaKUk7~{X?vp00fPQ!-~eilxhHi!o&pO zlTiqt%mV|AO;Fle6aqkm@tqfULVDfYoDFC{OJY0tZ0AXMce%j9)fxBIm+)L3E+bg7k%`PI2);$@g)h5TntD z_Fcj2Hb!Hq#s?ur_}~YZiyfx=6r%7L__C&AYeTd^n5LUw>#A?+qxhJvy8L(u)C>wk zh+q4RA0O=tB0W{pbALo z^BV3FlRP4Uo}9KspjP&Wv`+;)rkM`;T#3gy%O*sOxlK-tlwUT3kW4Ce6J^L+4*yH3 zKx%zY7YHgsyBs_7pg$=o$#g7@-aHCYner@R! zJ9oEXjz~z!l%hDa;-x%2N#}{_fiE(y1xzb}NY=lT3atGkhdllG zJl#|BeT~%&ZPw$6>-6`s`Ah_At&Y4?1aIv#JNAP@ zw1~!zMQI*$qv%=g2vW8aRpKe{GlJx%f#VV7^s<-{a4cwJ9+OaCA!s^v{@cBNGD7>U3Fv>JM z45v|`Zut$p+>A3L31R+NEJqJ95bik@-^|lwDxK)7AKAeY?QAJ`@kyMi!V-K_?gXBl zlyBBDYQ*?@3KESRE4HPSgUtQ&HbL3_6CdbUEuj@Qd*SG3nIugyf9gFf{-nG>K1?y- zpDcOy8$%}vu@pX&g_#|0-pRJ$N9#Ulm>i~%!JSLi1NVQV)=@2-dkbx760XfC7dSD5 zBSa>!3n({m^LY@g#YOoI-Y@lWfX2VRx!IeU#8)i0Y#8A|T3=-S;o5fxU6pnFKwGfs zKN}MPC;D0@P6#aN`v`(fC%E3aSH6^)9`>utsoliv1TApJ(j#0N9=kc~&-K7$Lp~6CT+>HeZP1ILWxYih^%7 zL6=qv)|n1ee&@((8KNgjCJlt>pyl%jkMi!d&!tE-(j0VG5pb$xEu?Qqz2sIi zEs5~b#E&=^BcpII3?s%t+JWMJ)P-9*ZtGswpX*7CrRVlRZPmf3yHu+s@aGL`&o*uO zF7h@sYH=2Lv0S}?k*&FVhI%``k4DIE!=+EhNjd3koxhR(UIIxd<_SDk^bUrhSmAa8 zig|ep?+Hahtza$?XPO&XPtil%-*Slf*(|Yz3~n%G+eY1yLW6qsD?jg!5z12i@9OCO zi<#YdtL=`7#nryn_N^d{ads^2W!Kp-{f27y)3oP@sR6z4j}c~su&n$mCN;M1{q5zr z?=e@GCmS1^{+0@eSGccXB|lXLbWTR@`}YZ?(mnDfKk9N{(1~rs&?apJ5>Qpec(#GM zDyQH(xjXSFKE@89c{@94hdR?=KqC7(o*^uT8>>SQ4{Hw)32xp}1ZN)is?wn?72bUxq^8VWHtt*#%wc*4Va^ z|A}NJPqiO%Pq_{cA8|#H-!7qVxqdz8-VQ#wOOttR;918E`THQbOG_cvkF-jPsu*Ql zO6Z&3p4&}5x99K>FJ{b>1{><`)xQ5{wlwYwzU7Iw)z)zjr~}o>B(1!kEnpAMZ4G_O z1?ID$p!Nk7t;Z+26&B-+Z|68^H;Ym4yPl5}em}1L2H#J0f$ye%gH8Jl`Mjy&-6-+VHO4)?#=Rp+hDQP-BnKqAZ8=uw zyT53=7-MPg=eMe4ZMJXC0n3s5(L*NDBT?O9iyV@{z=QyfyeGf|ft3QmL5;!L|2>cI z{oscw2@n~&=cgjmjVAM-*jBB$&Id|1{!T_Nw;=h(fTbaay*y7PO-^0$KxQ7t?}rYr z9fc+u{p`8bN%6cQ4(-j=y@cZYV}FppJ@yyAGed0*CN}g^7;=0o_GRXj+w>wh_~NJg z+T1ICGQZMm=}^bO1a{7#8)rM`&^ae-@Lgy&epoI~SiWXhp-WhCa#(3|Sov~T<$YK+ zet0cUc)ezLqf2;ma(HWVc>8jA=Y4oLenc-%M89Ul&NT!~4#c1t`miMWXmbSgOa#u69zr-9f^ZsYT{9ADCPLy8iUt%lcn`rW6eT$wbxeoO zY5*-F1SKf+^=29Jtt}L*BJ^`e6o(=dVK$T$YxHVz#0@-XYdIQC>+9bq1pkuza|5r5 zHUB;#2=E+$S1;!Q8*Blv17Pii!=zrl@R#l1Fb8b=b2boQ!2jKs)Bn8vKaarwXO6%N z+kxl|$b^8IX22}a|3dDMjYCXji~g@v2nk5&9FpDH`(rOEg!PfSBk@=W)a9R2|GnJ* z|KSNiK%u@`}C!*!YCSbP}zm;8OkphWGB>lHm z+jQoDz2O+Vzh^cnQNU*NwBE#}S6OapYTev^$KnK=!YRC>FiTAwEAu7>IX>Q8z6j&~ z>LCNdxT&tLzl3qyfA^3Rxpf^8FuC>oyBy)jZFf7k$!(58opv;!9FB8>RWlM-gY-Pj zC_-CO?#aWNOW-NO8(Wsi|MifSzlCx6Zb&R#`xa=gTvGz14b^j;<=7K*0+n!SC=%5E zt05~9I3tC6Ql0d8arTopMj2n^5U4Y)bz5k&d|>~RAuGGbCi#W~QYGpRdaVl?sWt_s zMIRmz{(~^i=0zB%9XHt;U(om%Z(b>CH_E7#1=fx)?)^`Mad->T*3!*-V>6YjPWzTk zBrr&o68%yCQ5c7+CG-9L$|ip_1jxYkd($Hgl}$Hdf{g7yrd)CTqwhEm3U;Q#vLnGa z$3ppljVMn6_gWbVOB(?}5&;$q8UY#-4iX9t3IP!T9}blY7MBg0g&$s88qrt{$x;X2 z)C@+)2TC^{Lca`T(gHH+MbKPCliNTS-9c7cg*99NxsC$`%70l99%Imc-K0@jjIl9q zA$N~;p-IT9OPpkXZts6f*pAK?_ZYSjG*DBp~bd^^|qD6 zioM^YTl~CF`dWD5Rz}lu^VsCd_{&=T$M3)=t7DU^-7~v23ugt3R~gH<3G=t%Q@1`N zH=jGd8`T|ZSMF<;Y-<#4Xcnw}$X_7$f(WCuFB1;EGVo2+TYjuj#6r#tqzAJIY-#ZEfiQ6BSQ z8TWYu|Bqhrn|_7+3Ax8L^&-;2$hac~cO|DKY56|s4E}Un-dre*X%w75{UHWWZ z`aE3vfdAvV@cDJ<`@HM-v>oFao}x0M_Hp#P{#BhbQy9hYP^k z0oK97_Wr`w9nT?%rBrmb z`65B6Xd)l};i3VafN95kDfC$)%xw#&{!V0=k0c8FOQML>XDfNH)S5&VG?cyc9sLd% z01-sB(Nrmlj8)Pft@xQ!#b15LmqdnsgCet2pVNena1IJ7Vf{d|UCiKLE*jVCz0u@1 zDxKvmZ;UmIME+>Sr!!T`{_zCtj4F$<#=`oklEfrg^U&~9cO(wNAFcRTG=<4_-s^+O zJjx}<)>g-vSzx-#-$V1j5=D{dI`}vY%FnAPDt5v;uqxyU-{ZU{_4BU(JOMw;zqn|y zW1OQ=j!Vb}xz14zUI|}f(CtXx|1#YP1jz%_RRjq%(YcJWlOPqL$<=X$HN=K@!m!ms zcEX>114ck<)ydeLwwm3MkvKcWlp?TBZ!|;RRmVtkBV0M_fg~(SJ#L4`z%2uM!Wvt;$sC2$(fV53{`C{>O@YPdgCO+ zDA7YXXF7voNK~fxbJ@^#kRWTa-0nccchv+lX`LOW=$Qg`t#L(MEyc7!9u_0U*?x!k zg>v4HduF*{Mz+(bSExdPa#+;S!!G3^yx<#WUT86dEJU3JzEjGb37;SD{_h+{r7Sx%Pr!lQACyKa>H;rUN^(KTADRO zZ4w~)WSnp zCR7*K-iOy|aI`_-Je14|k1s^|UDPr2^ZyWg(|9%&;i$!i;rwg2;1*SN zFHa_+V-h5s0Ee&`97^zfsUb;KGiXZZ(-acr{N*cs{|?GkX(YM&nNyn$>by&F@@Ou( z=&KqqKDAYjju=f|i2W#W<+D{ix6ayDJ?j>7-Gri$yHcN!#J*V%iV6qk6jXYd&>RjM z2~9=VqV?-p7~Rgi6T9EVcFE;iJ3?!^I`gLyC$aL;<<=lxGoOdPO$s{m~};U-kDDb z$4LC+n;c=ud**2+=s-AOec{1-L3}4UFT46J6a_JfYl8f*@2*L^gO1TY8|2|5ufjv7 zN#tg7*wcQ^--J$T@J*btGT|6AfT^0#Tg3?ry8Wd3gxK52z6%`%OK>nCveIyia4xa? zChZjl1?zkRqQq_7r!LJ0N(~mFfYHln%(XarcnljcoY@svZ0-?bbu|AE^VxR{%A*2r zeL$Y0sm)&a)iBLU4rj`g(uyg3pmT+^O}_)sDrX*KfVI<6nlUl86NgYFMx-(L{ZaG$ z$RtGb7d1z$!s|rMaUPO7+a+aO8eMT_ns%ntwDIT*_`_n`2}>uh_#8-kZdTiq>i}CK zeWE^x8M^sz=Ao_cdpKH^gcevGkg5wvE}Gtl6O1e3!{AFN<;<#?%Vy(hGUWik2lo#umlW$&K zF!#Huz+_jU!qHOi$-t?U{b;Gf8@kj9YgV3A%J^fGGOd^Jwz>jh!Vy=8u%GX?COWd* zNCj#+7J*AFZlc^oEl+dM?G`%?rNT@?W_3_?QSP1O1zMs*g1;FKbrmOA%IcT&+N#%n zy%fcvDkb|`>`y1Eu7(QRU>)>zu43*sF-cpo+7HnjNzKz2Wp*+SE63V^v2$bC;TQS* zIbiHu+OToPU|&j)zoR*VsdC$5*gRFNYp{Rn+=Cu<89+F$ji7ViE1$!fjw-^Q>7YqVI#?a@J9V@8aH;Z% zvouU%+z`Utr8{l+wZie*E@ANgz=*(byj$&F#?R$Jp+kfyQsR9Iq85tx;#VWMX3nz#lTzg2;gmyly=vs%cE^rX^N z*iE^rRwkS#>jJLIj|6`^x4lnBRO7OWJ~`>1<`J&Xx)?vgF1wit*K^40NIxWy;G2yT zl*II`HP3W-oOPXa$lR!bLtLIF^9t$4T1-1t`*Dm3!+PT*YHg}8--C!zBTgdOoughw zg$2`nBkaLUrYYk~QJu&s?V#&t6dkar_KO`VPUPGlKaDb<%cn6RGR`6odbwA;p%l)+ zI+Jy6h0>Qxn-S$OFP(@y|FBW)t#Gj26D5}RbZCUCkRqLRk+u#utAA3KL4kBw+GekU!)mJ#rW6Ia9qsEg~ z24zHJSAvehU=j>BJkMQGg`i4f0VlB@?d5zgH=xw0yw_Cfawa4a5>HLPBNWC$_$Ur7`9-&8rt&g%z+_3(Ga?$r4zmzq06F zO4W2 zhg!<=OBk$&-q*Id0ETa`9IE)H@V)<$x5ukVAFPkQ1NUyZaEt#kZ_3j!fNGm zd(#A$DB-q!=jnd*h0^r1+>zaS<7fUR`OK8My7^xE&IYJfHEb zz#6`!_+KE+T-k;#;WcgNjZJWRd{Bx_-jaKqH+jLZ`a)lMGc>zn+~8rJ zOIShhcoFwF!p2zZ!}&v*Ik0Q`j48Tt(*oN)ctC=MZO2>(0d> z;j17k^?=}EjP63;heBsv3Ak&TKt4^lI}LRCU+lq z_)vY?&_XAtSZ5bf31?0?=U_PdRD8_629pm%Ax*_$YwEA1C4DI2TwXcDM436`!Fh7J zgyE3eJ1%*J@^~yYV=!tu>}p1gX@KPMLf!>icJl;nAAP~NwC63dKZXCYC+S*SXhWWe z1>uSX)q;sO?2L);oGq?{kZg;}5ukExOW7QaJmSAT5L$TZ zxNE#*O1yMSylhFlqCw0=h)J}GIo}RKlx>1$N`iJv!iSXvy@vz?fwUR_zf+Tz1BnPb|C)XrRgy;c#Y?u^G_0S|Af@DA5l0Q z_&^GXu!m%e6*v+YhzL-MzH72jVsc_=V#En3v^yc91Qf-a&`M?enUZ5p*Kqy3&)ogB9aY;%>J&$ z_Ui<`9U%wLEy-;g6ng@?qfgaJ$vg&TOtLhMe3bYsa3O^7W49kzrX z6JX$KVe<1~3XEc8lBK{gr2JZeW4z1&vF5?Jr55m}>LX;8o9Akjq)I=+4)PXMt|Tmm z7QlfZJi9^Amr!L|DdJ)IZ`=}eQWIi%Q~L%3hX{(=DZydI8=={OvsqM+2@Y-rRESyfr3qid63hux z-&Gf?mZn9!<(QWi_3)PB^MQJ#@(mDjw(N6tRtgKGQZ8Hz;9HB#w2QM#%1R%LT2f10 zwR7@HiaAXZl1|}MR}(4;i<lNf zmdKr!gHy8hTNBO@i@%+e_LOFPMXa*oO*jk1Qa8_F3M+dm$&FKxYTIp|I85mYs%#ewDR80c%T2^R{wo?yEJvq4m-_C9#Ahc4M{W1Pu=<aC!y$9W-$+Ug} zYo{N%mBBnRbC$JptSW)dnxc_%Jo&Pk_zh=VGfvw&&n=VH7!7zA)2Dd5xMMmm+`I16 zx{Sx0nSdU7>u@{V|p5UPHD=_t43>UEe+4#Xc^ne9nmit{eEU3eTZiFVY2KIs_DF@ zeZ6>wC})kK;Cwv8)~!{9q4;xnD~>kE#SMfyosHc#-gn+I*wWH+I$88or#&)R{nVVD zS{a=-iUb-w93LP_g->{F{>C>g^_W_@Ix;a@cRSwPdRphw-V$d~lhqEcSZ~eEDNAK> zpMw8Yf6tVEb3Sc~FnNT~C7PI^R-P;%m;7#HpgMgKdc~xC@Ug*VUwX|#J#;0Au9`M@gx1>cu0(i?npGrkgY(T)kTKQ zuL`5LbDwoqw%l`-GM3)Uj6d_0=tyO13e0LV*Mw!1q1ra*x2|px&+IV8LB}Lien@T- zNN!>7FOXVb*Qp^5PtQm%FJxMls3;oHnFaYQEfcMd%eG~ZYz%@um(uwfbBRiks!KAo zrvs2%F$HG0-)8BxE$p`zaOyUmtxmg_Wf-)zJKKU-e+_4Ctf!5RxAGU{jjv`AOci_P zuy{7&PgF#eZ04?QK%P!)3ak;qZpyB&HCt?XFpuMDFJmH=p)$7ATGC-ErV@>g|3HD> zADx(H%3Qaa2sSMKmC=e_I)QzsmAet{Xd59!I}A5t%%8WI0K!EOgQxGz=ZgCmz(qu!Ab)IZee9Llk0mgCVL1#zl33hX%7~g&I+;=qP6%5+auqy}y#S>=Pj`-teEK$$ z#h;#&zSZ8j;2N>5*HWO}zULQu;Zt&dJkdrxp7E(ZH=}ik zslBgd{G(+@l{?)r>iE0|3yt8!Ma0CKZ2N80xzHKBnc(T&*v?6N*%|WkMSGno;uP}h zDO|a!%Cov_6wu@I=CAgJagSk2#93(OE9tdiL;-MXIpK1CPF6un zR`VWszwymsV{t5V-oN5!T1KPe#mX4-f>76zNL77NlkFA^J zjFZ%h48M0vC1dMkUbhQwXNA@W5w9=GO z*Jn2$hIFU(Ngo4W&uyKJe?q&(>OA*~yk7##wXF&4`MM{-{JxH+-?Z*3i!$G=4By-d zJLvvi9)Yk0t{5V%T%ue}zPC;<9o@33D#{|-P8Qsz*Wdo&x=l>g(Sg%tX33842Has zm(|hGxOjE3(G^Up58k*wY|0JRa|L^WfBpLXu3YfxnScTw%3o5U?B@9@Usyrjj#UJK zHosZ~k-VPuJqlyL?R&I$&}^a@5>z#!SPG(SVmKt&(}K@5Un)W%OpWBl2|u5YiW58U z*GQ0hL$gbg2UE>*QbmcfOHn5o)k@J?`>;#X7vXl{o=6_5tJ&u1-5^KqhMOKoaZctTG5aU!+(=cvO)6fs(RM#>sXi(Sr zJiw`;)G$ls152zqm?Z-bf2$}@;24jjSTx_DWttVprEOkR(5S7`aJ>iVeco)iM$d5r zhaeHB=CrO~E(V;hEGhZE9_0BH$!)U0=;khfFBwF-DZLwd!YzOd6+wF!(gWuYZ&=$4ZkG&px`(=XrsQt^-J2(M{8421BhdBia0mlUm(+)r?t;R0?r%GM;*>v-f)60JHfP_E_+`k1YHl3mfk`iWd#emofH*yx}8-G z3A$f29d){Y?}B^haWzESboRX#hiz^t!Wmcce)DGo2S5Uxfi83GbY61_yjLqjKd}yjd;uF z2XDG8!nZgRbqlovFM}^}BzGBsC%g0GEvp1!*i5v(s1ZIEuecajY|PiUz?36|5i$AM z*o6LksyDol6%PJLl*GPoAWMeurK6*rMU5HM2s#xlassjSKc z=;_%fb)Fm3_=6v0K0?kV4~Q0`N?Z;Zb=Ug2rysnLF&`7EoK0QoH~G1!8A+pB9Wkf) zsb*Dj%>4E$aYfXWZ=prrsc|mjdw}WN16~EU#ktJ8epA7l76q@{xvbyN#rLmp=n4W) zl%P6Ph0!Ev11T((KFqF&O!bGc3;a|-xlk5mG-i2Y=OFt^4om!fm}0P-RNh|faS~lA zSFAX_GU`pdnB1eXxG;8MjKxVJVc2Yh-_N|aIB^Q5(sS9P@x|J+X3|6Tvsvoq*?jJ$ z!ukv%jP+(E!e7Ps}nZA3NMvrz$He8@puA;V9 zmo#prs~CgQo3fR-PRDBVj9H>u^a3j-sT##$6%ZmT3|&WKjZMd?nH19MB*lECnj^g| z?byoHH^*in7$zx4_f>A3#s)<=9qvo=3-!(G z0%*wgZ30q!jpz-3>4AsK)>oocEYDsl~o zeZX!HIS^X+fuzK;y6@;*nh0njNiDj zd*w~4X#A{v*MJOuZRou|LopZGFE@A@`!&LZ8^@^!$99m&z!HjxJ9*4`@Ox-ewdRAD z$fVvk_N-82Et)ll;aI*mY0lcmX^AN-L9^A3=fQ3$XOqv?b|8=P zo%d)?6ISAmE9+KJ(2U7*BbPrd72d);iy)xs2--o>TCAJ#ieklzUunMhQol_jW zseEU>r;N3>GG%jJYKmxWq2azf`>mv*q2prCP^W4g*0~w!=%PAoZT%>Us3m3;(Mh6d zZR2~8bNhk7XScdEh?_y@&YOtzh5V)C_Rq^dkHG&mM*yA_&?N&R z0Wx8K7&w}@#ppSGL5K**bm&491Hs4$uzyBczxYp>=s|~|Qc%iYeKsKN2q?yZgyYR$ zl{O$G^QYAY$ie~3_)F*P<_7SE13Q6eH^pVBqjm* zH=z9n0+K*X5{OI!ig7^u{YUuyNAC@|PyRA|zuX$AzyUJvKPGR$*bR8QU&?Gi;vM)$A4q7K3tRcCvroeO!oKqC-#Do z#J?2dal2!wFPTS#ZJL)pTOu~uOXiU)ltOnDyJ)IVF)!DH0T}-jrCMdNk?L5cQKH)w zrc3Blu9agvRl1SlRH2hq&D+&|pCG%Ll(?^Fwr#IeQv-dy8 zKgCJhc9?BfElpBn8G=|_Db1!d6#Uym$)Q7GsYK!ziwPU%S6|7cf^2j9VR4D1(?Qkz zls0Q}Uq7kwi9iQKPxFT0*C^ctBM?(6^4Mnk2c|$IC<^z^~bWc-(RR z^c4m)(hax%5ib)No%j+IyGM}H69wWCG7^K9^x05u4yB_Jr!Ixpg+w82B8_B?I_gdW zjo*`e6_jQ@Nc$kIXoLvmz)PL9>v%!}>G61<1*NhJS0tvbZl-E&DDE~RGa;}f1UaiT zlpBsM1@8m9ymMz7rK#+YlcOfWS{jAzS=}87*0Mhm&Vp#w&Ww?RyzD8*Qno!%DivcW z7i+yb)Pd?rS8lQ9k{&jvxz4 z#1qR)DC~w|vbRz)C4F8wf!2O?$y9lt`1XB+4c}8vOuJW`F3Qa>m;JzImePb)D!mAd zAtsw=Hhc9e*}|!J&f^3#Y6JrGv+w5kIp&mj<(TG4wd8K(OJTNemh|;A@J;><$xXAy zShfo9xY4)r2PWmZ50E&C<;I9NfpwhR1)STeM;h<6JmJW0Z91swi0%Bt)tc85GE?X! zm(1sl>!KhzSraS;qMDS?8iG&UNhdV)#nX@)Tu-C!E}Wp?XqpXDiG`0(>KAbv+`bW5 zn^gwOx@KaFCDGU#hMA47<%(O6ntzL4TgA&nU6+$$u294*5x#j6b3OQ~!S3o42V?YE zplS2didiNf619gyc9xdMHOa_|NusTu4Lg*s;ZgV9?~*v@dIwri>j{KcZ*(@cI$feM zy%M^aemOf{{GHM$uUYTBLwpm4f-i%7DSJXgr9>|%RZ(2Y0<&D2rdnGJO=E8krr`VaT6hG7w3IXD+4*48?L49TwEtJKMs(FSz7cN#WzJ z(S8^h6L1g?8RZv^t^i++i+L4*fSr>Dt5zHoQz}ofH}fmeuP@vvf`aI5`I-rnJ=~|% zXzP>uO(k9#RC$EDsVo(|+Hil$0$>GyuV2}&JQN9%Gmos3re+QukySU2ON>vYEr1+V zDlLdFbD&h)gK0-@P=D;?e5;n%LlQfWztMFTaaPU#6M~8L*8(aEQ4~SLnh7xs%=w0-(g?&jNx7S-e8BlgPQkm>aqNm6Nt=&}Ol_8=taqJ*7)|%1 z+a<OhP3JgsOP8OxbS@?+dLrmV) zYHO;qJH%3gGc3NCQW2)#ynD`%WuuoITN*^=sjDj=L%gMbQyX9Ga(!kg7eIm2Gp9Yt zNYQH)D5*r>RP|%*+m{QhjkQt9(03@)w=QR^o#Nk=uuT`I7b z?_}6E_whckhFC&=9FEeYg?G|0R**GpcEaU(FlLjJ4HuqQ9-`D^K8(JkW}A(=Za5ER zzNj6c4Lk1+Vp=%#dYt<-Q#jiM56R-s*E=~a__e+H%eAz}5;@gprC;iG!5NDF80R&Np zJsjT2-h6u~iVkO@^A& za+C=3qBd4SK`}VDgto%eiCQz9xc#ii3r0dHCAegZ<7XeN!NS+o)#bxn%}ks&872{~ zudefwBjA={eD)I=-k=nU2@6YQ%aqSia)f-w>#>MXa?9s5QbDgl-(9|n_TTCubpFVQ z_5ATf1qR=B%rRf4K(}v`(O>ytM8#q=;#2W%4&zV#lBcr|9>Tn{E7i5q$MXg|S%1~zo!JGrRfP_H z*uQBnN_Cs+BVo#cxl*zf5jE@^Wm+@$+eAXF@si*dd+y=S|1o4|B$5pieU<)mvf zeT9TYOTJ`{)f|E?Y!mR)+MLb{4bE%+$djfpl%d8m^Bx1JwE6r!HW&A0G!s4W8nYDS z4e>umGre@h0L)kb3?Kj-y#nY7=#BkBCjgoNbOLbYrA7u^qrn!SB!HOy(9<7o`h!b1 zHwyqL0rd1&;|xG20CQgbr`+^XNc*dk_V2g}_;&&51n?B#@&MlW0LTY;69m900FwZ8 z0ubrNDgtnott}8W|0AJ%AuQVlfT;k60!Zo)H2p%h+7GaJHA3m2pOsnZRf!vHLFoH}Qo7 z+*F@G3viSAyktY^pV3Uf*xWy(nVwWjSL$HLa&;Mj!6cy1E2FxpDkAWh$dUme{7tW? z<7hzXbiTeYL7224=-aV-lW`0RcDm$r;r@_NL^R@ie5Lnr6X__JtcuxobIHisl7;Jj zzWvbj(on}U+uLRa{}Bvq8GbjACX^wXbw8o;_ly?pawf36^6|kknMxv zF)b8|GN_p*g7PXL+s{hi_>(q~FwKf{g09Icw^I}c9h0lTWGZ(BzwK_X0wrEDbB#hx z>O?ZzkZ4o((SyBY7zi5tkI2MaGY&F>Db%1D;gpPcbs(yhQR~~1X{ragL?Z08Q-BBX zimg!&oE^G}7dCt`6)+CHSqGUqvv|{bMyz`$OvmDZCREdc%-_ljtTnA;$878CTE-WU z?F&X&aTf9?8*fc3CpA7S2u_8`+0E#7 zUltU#Zd^B+w`$s!f3j}gw*XFK9Vfclw4T_Eu{7EA+}JcbZX_(}VY3mWevbBfoUVp; zgAo8lieUe;<`OrD13KN zrdM&+L@LR&X(Gmcb9Okpw60a)o7d8N%waCuZb%m5#?ggFb2Ig zQC?JM=o@nk)&L4&VjsGid;}#6Ly@G-D5u~^zi`&nVCqJE(JDw!OApYzcF!pcysQ`| zOgjVRehume&#bCfx`c5-uQ#l+@2)wk>aW7y2|k=I(m1n-j)EO-r zBlvWK5sqW@@z8*P+aFP5noyin^c8NLma(h$)QlUFCvphren%#WWDCDag}_3L7}wL> z=&;yFJPK`!L4}14r6ZNnTw@q3it`PUJ3>M=SVW;V!4&c{bjove$#-G!udX!VDZDAu zk@>0XyAW933pM*9I&V zAyzVg7;{u`!p!##s-cD^y%iZ=Ii|oX-)Kinm3Iw_Vo&o3L7?Ok(X>!C`kr?T+D}PE zH36BJ#jji$E{{phTndi#q0KrJ&kB6ohLXOQMGJK*Nb6)}prAeh6cxnO7R8~j+;;tf zcm|eL*&qwXcQJ=>%Q1U~E#(e$PZu%g{TnZ=O(Rz0=9g!MrIkhP(m#<{YOCc72qmSy z|ESW+hyE^%r5p##=x809_r0`oOoRg!m9X@JMr^5lY&@gNF{01^VDCMEqTJSP(Qa~7 zvSbh?ND=|bAcz436a>j4NwQ?gU=y3D_p{=7=tkaXix;Xv5ig+Q zq-?>3D>mNY+IHMGYFY~O?NWLpR$e-metEZLU+hY{X?x5zf4QUB!lAQbq*k+>DeIJG zI*WgL@CMTEFR8`Ukm7Y*UnUlTPUZIm-k+65R$(eU&cU~ z=9L>G+0vNt9{qltV!hUI41(yUgU_Pt_}WW@ah@!pjND<1K;@+5wYJYS>#mj4$;5)B zwV32)BcaMF{M=z?vJKSAHEW~PkIB(A+h~|w)gChwYviL7$I2gMjS})vLdelJzqN@} z4UD~WZZ`Y8GQ|E=gaY|7DwrIly@yjW%Y$3F(!Xb2nkXd6`@GnACBN+=PZ%k%A4b8W z7mw{VSsfULzW=41Ip1|z-i4^;`n|dE2X{&p>3Peu9=*M8eyjm&w~Wf*A|x#sfqr@E z@GWAvmPc^CGwju<9oy+HFMl2J&hDIf9_1j{!nG3>S~q*!Ss-sHX~(c!a9Y~6 zSlS-Td?Qb@3HSZ`(F+bKOpepq`G&R6Qj{`;Gv_^YMrpI(6!>W5!uV~>UXz~~q^P57 zdC=0ce|jKR!Eo!E%is#n~md2~^M3m0-5oe>fVw(2L7>1GgZ-|cW8a}$Y z{}w~i4nx<@keR_+#ptu`hs3J)XRqXl7eAye-u_7}`m{>Tl?grQr?K*Vi4|kT$s_sp z$83JK^b=?h5*(Q0uqfRkh8mBCB9^EULW4la#l6bXZ=bVp-NeFkj3(y(3F{6ExJy+(BUXq&j*^e2u`^D}bt zGl}&xq4fJA=%*<}5`FAPObo{$hB!FEonqlG)o`~-xW@?`M(OV*sE(j_$uW76c-7rTDn zVf!FN+?YtVD;P5kURox+QTkDj1s0Vh5MO%#unS)A#JO_wBfjFt^R7?+szF1PMu98R z&f|~>2_h!+b z5gk(vdYjGrhuUF9+^Y$?=z2#}L)^$loEv*(RdB5iyjRLjA3!889HrtM_8!Z7h)kOE{6A7(F--1dm-Xi{q#et>tX018=o3CQ? zh>P>`Lxf%M=(0LXl#=5LJE^*6|F|oCB}-_F@{gwxx-13{FgW7U#8=z+WkwA!V_vOK zN~_&s41RfQt+nj8T%AgAAN?EN5xw?Ca*txC%Hmt3UcSe&_q-#E>1k52 zE?CtayZ(?xa#;85sYr^j=I7{-&!UW?D@9MxZxXlgRQ!yoLXpKzpg)$*5F3^a#ZHYI z6Q=#D>|&f|KjNeF5{icJqp&neDyA&;oh-$6o$FYZ^e8T6PKp9`ngXFtMH5NYYfj-V zO~Hx}#bqbKsZBjzOSKD#A~NI7B#9XxO%-o;xWNwbS_`~^^d|wOH+%e9h#>hl#Gs1l z`Zej!ImDhgfk7vh!EDx#ThayC18C3SMgAC$Y?xl^>8yd2ck=vk1w{&NJ_~N7e^?80 zsAl(;`5ay9-!&0O0Lu`_%kbz59@nS$R?lF4mH9}-os0ycZI-DUkm=qP+82xdI?v|C zt;`1DEF<=4lUn$xKc`RZTcg|AzH27;HM8vUqTa{DQLnZff~*zu9MNoQPX?71J= zKF7y?O4!JIww~+EmLEfze#<3L#6Mp?H{S^-s^wWul}kY`PHxY1fpPrj0qR0+;hbU3 z!Wz87@%Tbn*~01R!kDhYdFrAd|1=Cw%wyS9H9gF=+M=!LBD4*37aa6&BE`Nq#m;QS zKjVvGCdE$v#b;;5u0rSv5OkgrOnxE=3aQzKrQ7piC>hFuWqn+2M6h@NSL z4o875O=XctrQv1{=+0ypUukBZX)#kNcvflMn{F#wrLR%tfFBLIGilXSxoJgtoL4DR zReRM%Dr{B+vB7<1s#7(qKTKA8IU$;HYH-%8VKOx_Gu1EeL`HAIW6!H2&U3MDhD6QO zB#QdCpAcoA=O&BR%9?&$IY9*B=jCbDrk~fO%LP{l1(e8DJDS%y$JSo6snu|Xm*z({ z)Ya7{)Y)%_zvifanTx0pt!)j0zeo8;j*8Z&J2fzz26xU>NjujMaRg@8HNpv z)u7KJYvdZML>mcc8^-Dym9**?ruT86xvPWn@s|F^#ch_ zBMDIi^({r_jdy^5A-F+^v$A}qme-;-pT^%*tDf7U=J9M(T<<4Ig8GMUZQM0&FM^v% z`f3Bt{rJV&u?gBGg4>@Lw3p|%gv-_H%=+WWx05Ee%V~G4yLP0BcF?*tt7vx=Zbne| zMRwB^n$fl@TYMUrY0q{7{UGr>$+SBc&ujKYt5saP@Pj*WIJ;aD8;<;2nJ${b>uY|P zw~_`o$GcU1lyC8IYv1B%pOWjI7VQbpCO$Xs3NG+>-s+xB2o9ocjkjoJ>+860(S47q zDe$5;NxQ}B9686)ZA{y|9n?}m(A-?#Q!v||EMIGGf!yKfDYxk1+7qMQJ;pguDc=*PUD+q!S+~`(T;GzK-{;obZ`U^vhu`5LKd3c}+}-Nf4;~D$ z7(}@RT+$A`A{fF@=vde8!Y&**3$9Dy?0HVmNkG_qCNgwvUTu!*S%mUcD8S#pq8v~_~v#`-qz4(x4Px6USWdnpJrc38oE;p z`di)l#0y&#X@;e^Mg_!MhAl?vlZMBp2l?iPMYnq&hx98o49c(%z7qe!o7AH9ZeSv@ zc=lopUwpK|^^582L2cUpciZEHbQ5nb#_dCD@4HWgaketdjdK)ExLZuXE(g36g7w8F z6}AV8I49g+Bh_bm#O6AvE(i5o$K@1;BexOJA)|}=<4f`bFY2aTwYxOMyG`Czq`OZ? zB#wn$4n0noE^3&5qR{xVp{%5#w=AT8e0CBKf6Bsb(vz!?@g_n?eA=pSrmb)k*x?v7izNwdRvfX_edwb@K=w#Jp@Ar4Jh{DqK z*KN?*she&ia_93j+pQnPrxLna-t_kA6@DcWpUy07b$d5_`EJN|c1mz-bfkU(ePAIr zWB^B}CqiLisBbtnadB{FUfXp3yZG1c#II0-v3rEySVCqG5(Z|@oAGq|E)u%+6vl<; z7Wzeo-dW79iY;}DjkAd_&5O(l?F^0-j@XNTH=><*@^RjIZcdhaUan|Pp=d(MV?uy_ z{$~;Pgo|Z1`skyO6{`May8h8a3k1r2%GP5H z>v>P+ML!Bnz8~9i0{2=!{`|mg+J#cBZzmj;?+2!CST24c=xYt0swG^BpIotNoFUMu zQ{-BAdA|OkeoX_l#+|%=BXQ&w_bkIsrM}0!&t+HG$5oM?)q?Hy`k>`p?(aPfD~e*v zk5S(z`Zwp^%{9-rwm#o#|2WZ!>flM-x?4Y~a50o7@#T=Nw!mZE&13Y9*zgp0k$3Qn z+0K{FqyXBF17mcn$y`gt+T%KaxtAE5-`Wl%Typ<7wlTlp@39%EvqSzIdH1q0(c?!E zUDuiASnTsY7Wzoj`E@bz;<2$N`gJ|{Nl$)SX-fLH$~ResX;7|_#Ympi|6jZVe_6zUcL z2~Q}ieARQwc@Cr=ZEtUvl$4-0Ouv2mW@BSBK0dy>kaM~n6u=DyguX2 z0|QVB4)$hGPfr2=KRY`M(x9}VP@WHfX1}8G!T0ZkD;`2tK-U9GA7})#$Fo390Hi%o z5Wu+uxUscAKpCtM!5f!XjC{y611%L``vHf~L;2v*Ti&^oIl%c_F_O+ui^f4*fHU z2#r}~DD+>*a7tWu!`MHQh&ZcD7rP=rC#61h+st0jNvS9CFLitVQWQHFJ)9ko;khH( zil3h)Jp)NZx%}9)bJR+k^2OS%)EG1o1(W4QZ?eRD-)}!F#Ji^pL54~`LUY2EQCw=) z)v08i%k!z@@x~)y+oHI@huxd^4u-<*^M!a|;dn}t9<)<>#$kyJq_5AZGA3H!qHXPv zY`=#PYl%fu12tbPmAhhVz*zT$VVas@nNFhqo{d+S+_}7~+jr9%f5$-*N+v-lIi1RF ztlhd=>>m^5C{|UyY;+hao&T>_R9w8nAwdCH`SIIQKjRE4n6S+YC^ydXi_288zDE9}tJ5e>*6*^&7Zgb_D;*uE zf8SA0ksi3-6)r@yfk*F1qhKS~Ebq7@RtE95nE(~+0QBJ#Vf|`T7 zSP&BGO~4?t=Sd(8A+jz~+7RVT{)(pi106P+H<`PdRcBc@6rHDo&k2JxcQKX>p_S3K zisrRCjLGduCZ_i3AklBQS+_u+(lTSd@|U+IC*$vXSJl5UGud{6rrV6(HXVPXOF~+4 zznjxQrul}>3$RLXKu^21AHP{pH^AP6c|mRnK4l`C4f)O3sqF zA4A#NdL`)A9PzT{izKCvjiOZk71_@^#7l@bkIt~OZWUNRy}+|CZjhVtsM}y@Zrqa% z{%S7vxC>6^_RMG%CH6>8>q8{w^qcD%2J^i%M|n%P^0)Gr;V;s@4Dz~hM6Q~jAI@DL z%;h^>SfT#V$hbW?e%TxTwW^-tE}ABR$MAlNI)v@PQMkEhO=~K5X)7xX=X%w0hT<(CL3EioK&qgo;d+B20q$aPL}U*%PHh zG=*9zr*{b@g7pd0*xK3gV1cT^=~Us0KGXw@_}W7HB(J+3QHhlL5p?O3kK{aIC@T#S zy`4^MDul%wd6Ph2MW0H~sa3GW2JUfj>l*qUnY&)4esVJ2B#zY>4`*!x_{DA!E-7}= zy(|kCs=v*$E`z0+lkTUr#dJNO>#>Z0O+=1-Cj0ByuE#x!L?y8X*J--j#cLJA^ZPQn z^C_Q6``rtCMCQwd36rDR(2cn(#=;v_^GtZWh_?gC6ki+;G?`gQ|UOK%( z?z0CQgVelsF?ajl$_oYzM@_3_GR|c`$J7`}IJ5sOJeR9jJ~@&?eLsiWXO%lDUM_2R zG)J+W?Iot}uz>3QAhkn1rO##~xkh7wZ=&?o9sMEKGITMhmOY6qhA@IP_0X(ec@gul zqg`WbCV2?s&=MATbuV?KxC^6zop`}ZN&Dt!Zrz8tJR4|Qx+;aXgp#jW83S6)^sZ?w zn9IR?G&q9Z`dIbw7U+`o8vy;;2+3$Bi zC#9D~ywO#h%P%RZ+Nu)Mo>1++O$o2*;}GXV#ZS~5Y&Kf=S*Yg5FV%k$9yuL&d0F`6 z1Z~O#8GsU;$wOocKcI+zS2#xdu<7O6~R}{1=SUwaLg?Q21*Phq! z*rFfKj*i>ZD~bKk4x;{YxMJ35nzYWyIQ`WRtGZ6Z#T-g-vG7Sku;m8*k_QFt0DN9h zh56d1>(h&G1Xf3_At6@5#)%VnzpA@XRw6rL31aOuojFMChJA@Dw?zs;{`LO z>Pzx5k!aHH6UR0yVx$vBYT91tr~2=#FHXB3H15X^iY?zQJ{@pH9i+Ut8}RgjAq~aC z&zu(x8_LBJ$+MpzFF7SQ9rw>B2X$BxpVGGyC<@Q!l9_6b7kAR$CgO8;K)PNu?#B#X ztcdwQuxmLVe!<;@Z^JN8 z9^%BpHq^MUS9{V@dJvmHL@Lm+OgtNhDXC&V(8YSOuemX(e^6?1->UNBW%K6p_qg`O z9pj$2NUYa=Hn%1?Y4e2ijT3KPVjnSnpGP>P#Z@ly)h@E?q;iu!FNJ(n)O}T*eAQ!p zHLHEKCw+BJe03@P^o9Hk)&C^J&8qz@CjBf={H!VAwnA`wb@*S&@R#arf0E(q{(esW zDF4`hkl`o(p_BpPLIIKL0nxw7aCQGQMO+q8gM%_q*#^&V7msH*ApNQ+HkpuhpkgWH z88k@tW)Qk$P$gwhwNOxvdQhEHQ0+-jJx*{VWpJBNaK~g&7h7;QsKXOWR_Gs?PZ^SS z68zF8xW_4^&p%{5He|LsWWGB1Yfi{Qb?_)#$VyJY@*3IVWXKZR$DOW^KdL|OPk!9( z3K>`nT#fw{9sBWY@)PRh6ND-hO*j-oBNWRy6elheuO<{@N@68~*vXrC2~DDD)s^m6 z7=;QkL52i{#;t3zx3=tvX{aRVx;c<=DT;gH^UC2=aZ+r#Va@ueu)x-ECJb&aQ}(;U zu8lDf0vZvdrBYRD4?qE*^(Cqw`#vtsSd>G!tSzoT`2?5(STILjFd(?@p13P|5)1+C zH*v$roFgiwAK9GXo?#6~v#f1yUMdHQtf7G{~f_G0GJ2k99myo}^JKvT=rhq^VQjd`q0GA^l2~{NP^V z_bJj9=VZ~a&A6x%~#{f(HJpAz+8v~FGf&2nsxZ3snpGf#B;p`Cc8wM{f zj{~F@)-(3odjo)|Rrwe|zW^#j2~GjjoT>Zi%z248JS2Fs2jKBPbISlWUi=Ec>FRGB zTe3U`urWZe0LOAbFFKaTuHf+v@J?kHr+)x24251iid=jNy#$@87MG_IlrP7VPXL;w zuvrI)7bJ+St!)6f3n1Kjak^hO4 zGlUf=#MD#G#xPaO&E|R#9BKtRt%2Kk4h2(1Z%A@}cN{iDJfmYPhj`WDP%QlJIP7tV zMlXL-ywKwROFtd?+ib(PA}T03=Y3aOqOTzC=IQ{HpNLYSwqkuG=jp8iH>!%w3Hry` z$qHJBQsXLmoAZ~74%=U|)H?eVO0a)GIviQCw`r>F7HI>nYg;FtS#I_vnX^JJGk!uQ z-=p9k#OL20Y**C1U{+sP2wD2!*lB!?$q#Q>%w&>{S0oVr>nY?OH6A0a4G!V2?asM( zt)-5$-kqjAt&H-a0_eot!2+$UpOLYe{c?15=1#?cfe=3GW*I&gjqrT|!?$J(2?59z|8f#zm;>~dwklam^-Z zsELC(F}xYVQ5RZVd+#E?6<6!FbsL$Ms7*VivCE#M|Hk;9IfF5+btAeUK1~a`l{{xO zXOLJ}Ps%#QfjM7YUuciO>MYfO$Z>sm&%rvyPjl&WH}-DqG_mmEoA;7RdY*feA)YF$ zRrZf9@5edPEZ7X3*vNayg$Pve(TrFITxxJ0!CmOG4|SN5pWPMlh%SV=g2%fEKh z=Y|H)BVS@EU3;N*PTQOQ)C9|mPnfCnEKhgp3g^!rzAd;1f1D+le%>9mX(URm&uJ*K zL1v?bz1i7WSYz3Iy(yI0Z%86+yK@$P{;!6e7E!NuZy1(fb~0B zE&heNIQ{`RT!K^xwhR)DdKZQxpNe*)3yCGN>q!)xioq(=jHkKlMcJ2%CD_$`&1Kh{ zksuA{smwocGM-vj3q|e!d-EuQbRxJ+8x!@Ozl?l3NlaH8i^yJpVsJWHwoE&_=3bzB zUphrqS39@MUXU)qXUa~Q4&L~^U=#Vz)DvAD0=0V~*1?}?S7kayruRNN^?jy4?&_2{ z+xr9~$Y8{l?RrSPAL=ik!E~d$>xsyISSWz2*pl7PH21?}`!ZMryStye>_?;$WU@V# zeFlx)kG#UkR~?6IK|)w?Ca1A%ujW8_cqPEej@`XFXSOlb7nwY8**<;hk;vwZOvVV| z-nZ9~(Yvma7%YW3RZ$U%h?G&tdl}O+n#KDn2FpKBCHvV}p5|emIw-|Z)iYM)@+u#} zX~s^tFjDsLe!({}W-N@7akcQV0_&t@Evq5swtkbWcxB$6;~dPjgP28Z>Ch z0tQW2GuCq?8kEc-$_ul=@CDVB8kQXf(*g`#pchIU%vi#7nSI~<6vkn%TZUCwz~PMx zr89{x7lEN5CVU(H*HZ9CS^b@ild`CJ)3KcE#3tmO*c-(B{ooyJjzPus z57nzE&)(o?xA{++LwOD&6PyiHiV%f(p-B5f3l)YS=ZbR#7hRvVHb#DRQx>5~`f5R+ zNcqdbk$$@rDnd29UXskvqy3mWuT7s;y+n(j=g;mnTW`*KpZJ*tksY-zfnGm2R_K8! zFi1n)yG%4*6DWZfZtDN49W5a<&x_%LWQ&|n4SlSv;OpKpO8~~3 z@$KC5{%s?xx|vB;fof*X9h+NKv&;?UHN_#Cb>ChUyS+YUzmxlbd5a2036X4sR--{^ z$E$)dC7YsTtS%m(UHfGx7=66mgg>L!D$iMcxm5`9sVgQr+dOU}r`h#9C`J^&hhU@h zY4)I$7xR#z0~GCm#vtlCTKyrH^-ab7h#yqL=jf-cvO5nhL~hg`VMN$>N~dW!$DR9c6C&f{{7@>d3nn1*qd+2zPYR9Vf^^!w<0^RGY8|hvzsi zBz;?HJXHq|h|w9EY8XQI@cSwzQ|>$vzd`St%2NBCLD=DGWn);evEkMYVb{m$Gf_dI zj&j-JkO#Y{W90Ql<{uLhCvO+R+CO~VqlP4C-66*Qf zr1>nc!kbA93K92#UIff~V2!6>{k%1d8#Cm!&tM7{_t8W3seQnj{&aCt^DA_+(+%}h z=1&ws3F0qK5lA#77NYQnW&gL%22cSAy+LFOls6FlgK+=1K_7(mAlwH*Id}+w01$8k zNevty`y*Ti;W!Am|Eej0NE$p51kJ$k0hA&TF@w} z_Xz94^_xPe^Y6%FkF~6tl62`kuKOAfIg!&bL$Im){ATQXpe(U|^Y=_by)mWM(o`5p zZh!Mvj1n3OU=ztqm*lMM8V!Q9u`RtcT9z3P5&N@E(f80t6sF(9WLR>)hsz7Be~(an za%VYGMb&gUO8viRQzS?Q%aS!Kl6{$A8Hiz=NYyX9`q*J!Ln+zid7E(x@g?q>wAVE= z#WX(>ACq)6uAZFF0T0bgem4ce2A>K}VNlfv=Rd0Y0A2v#dC=bas>QV-0&oWa>Kh_Z zhWuB3tw9^?-v%|nI~e}a*dFx4Hst^BtU3Sh%$&bp^MBz8;5@iWF2G@gG<#xQr%_OOYbsGa_(<|fZ@8AJM4j@#xdI0p32L}(wKokLPAe;b99O92y z0z?ZSmH@mPhy;Ge65uIUC%Ot7z>ll2;XlO;{}+z>-*5T9^9bP7Luei;5@q664&q|6dYz$a!=h zN~Ot|R*J>KFuWj-@QF@j(!3z(F9KZD*}~Ev7eJACFl$JQSLyX@IMzJGkf)zWko4bc zbtSQ|ZgLx%q%N|_Mi?)9KyW3IScHrs{0ltaxLeUU)4b*;D@(j9%pa?)#P6Do*I*1# z_=-?V{@x^UuO_r4BmzPQQT$^G5uWiZf&Z~ia2E;dvy}TUj>u|BOa}h!ocA9Ycz<@v&1TPvMNkL_>duH&2>11||S~7f9&cIqpbJa}AD9BRg zAH6PX%9eXUli~Tkn)21bRO#QnE=O{d^C|A6>Owvj;x=+qyDnIyBQWRb<7tqVX$>E+ z$&pAY2o<{zdb=bO7VP@1$X}e?skJ`yQdHG4dNXW(*XV1BM!4x2VHoQY-mMM|I+b63 zhw0>C_&H_48o}}(YXn#zz%l{W1u*l$q5y(*FmJDBI}oKKWWj6)>j79hz)ArQfMoz2 z2P*=Y%wWq0-(X?`qztOY@L%0R!0Q2G1|?j;vH_j~f`SOJ5CAP5VGXc!Z?7!?(g;I- zW+*gd8Uc{?71#!l8mubdjx{}EIs_btGTQ=x9fAlk1UJOrWp@8vljeWe-}%cL!9yZc zJtPV7!X?HN6Ot+YhQvg}VG|0qxdjbkyl&~Ei$sT`-6r$X77o>e5))C#P~Hs7?8n6A zW#nOG{H%~5#_R@L+s*#+U+(HCm9C+iZSgPenSB>JG8jd_t&pxlg=hw|zWGUiO?mcI zjuiX}TOOkLYZL4r#X0>+IsTiPBtj1l>e>a9>`dHb%XpUsvn(!4 zNa&&$8U&w1=WaU^4T42T08!CR)mkSYy`wlf2}wuy{U&Q_uc(C97DP$&T(VOeT5V0! zTt&%1r+d?9d%v=s<{g*f<%=W8?2OOdl()YT0M*r6aR(9t3R0>5LlyXKy*WI*k_3Py z=gNI^Itbi1z+3~K4O}z88UtJ~V2uD47+`(@O9XJ`fV3lE%mHo`Fwubzj0SXVCews2JnSkP5)^Wu$cgZ2{4raM+tC|0RISZkpSBWaEpLc8n7t+ z_K5&X$p5fU?%$XD?;ZiLR&bC}%2=ZTnv8_~D!1sbs=y6AF?HlZbMPMpsmcu7$Y@1v zA0bIReSa77wOhF=-2Fp5rxNW> z_$@h!M5hgXNC++)k!b3FiYJ~E-moyh#N#baR;a7oo~g#J*g3!9u+@xVi_h@9UTA$T z`0@<3eWQGDb?9@kD&9@6LmVP39o0tK61(+k4Ilid zt#ax7aj=;ZqRnYBaE{5AYI1psy#T|(Q(rJ75Ljc>$CqGR^dV8j8hcA-l%r>eVZD4) zEzXzioB!=-lWzfRSvlVVSv}Ui1@UySEd}$Bn=FM0t>i3y{Qsm1oa#fg9oCm)bkQIy zv4+v6D{&@SxhwG&m9kl}mJYbaiT2~Bt4U5p!p4cNpwoK_3`@i)3i(;UUsXq)CGO%!rlrL8O+TV);WGo0nkwRq-v1*+zS6*(Oun`*e$n}up~ zUgw2{OMY@i)oaQ5MKxQ|o0hfPE$2J62OV;Et6w+97a}f>^0OMJB{+D>hD79ckyxzS zyDd0;Zo948Xj?zp$Ufb<$}K9`>!5Yq+UsP5bMANDjk+l&%$pEiynZ zu#sqa?H(i$;Fjz{mXR@HFsvcbXk?^H$RyEdD0MLfaiU55kr>q98L{tMdlL94`jFpF z!)5hV#YWZ!3=$|ni5QRI zgYr3**V@>C>jvXDUR_?hzOkdPe>EN|R~MyUz4Qg#o0(l68T$(5J98>;2bUGQG#gMH zP<%8Jo8JE8$DU0@1Q_3dk&*GqZ zA|LpZP*nBNtUkE$=^q`PfOn&k-VWr9<>gHBgOjqP!vabyR41P&dwf`?{aVN z01+Vz#vU9906+PlU~CQ!embSMS091@0$=e+00Lpd$EQCz_3huiZT-9c9@rlBPGw0` zQ=7g2TIW7d=3naE6OQmaTgE1XU0GEBgCP6QjCae0y|EH?8rUBHX;FQxVg1i~jU4@X zAocc2G}kE7)v#179hLkD<#TjW43K;ROsSUR4Kz3J%3)l;#y>}s=D!e69ZXvk01Lor zaks+T!(ZqKZVNIsDp^EAgYX5U+MuuP-UX0d)0S}wyS=e8kfbFuxA?8Kd*YfQI+Dv3 zzKU<#gk^%O-;EoKhKGEo`3+}10){z}v?pl&ew{a`I9OuRC6q-X871Ih;ePf$I$85iD@}{{J z&Dvdejz{q<(%R@C)IS+p9LB?()ctP_zxwua)?FypfFa$4Nsjm9#Jp?WV%MlIWL7Xm zPOp9_@g8K`@b zu^v3y#Q-T~@oGWiO4Oc|Nve2xy2(0tv9vD?U+U;)>uBb2qTtE)(xjh!%yMxyp~xgG z7bhHpv{I4UoY$Yw7&#%~s#v*&F)N_sSk>a%Jm$`#5)BT2hN=u+Z6T32qmaJV!nHC5 z4imQ5XR&WcPSd=)@nMNX-|_FNdy-U^8}_2nCzUc6q3=KLjz&WDXVM~)`rm)Y#Hh4R zWlR(vT9nToGkP1P{N&C|q$-oJiM-Z)md`uq_-+`4&CRnVfjT-7*-cas2f-%1U9uwU zj0cfQu_R0_@8ka=z?%kV#;42Y*a)=$0=IQUCwT7(r^?12ZPy|4(;$S5ztTFkf1Tih zO+Wbz|H$5@V}mGZgeSvMn#vhdYRW0B!^M6g&W6$-jJuFwqW^&75PInqVJ)TSe$#NGRgeR>&`stPHnFQ?(DU5JzDpy zE+1KFZs9n^N^oO;b-~#2!fZ<r33Lduda zOqKb)o_Bvq_$bz0ZJc@DT4H`F=0|QiL(x+NmWD<(>rSANv%y_$!S#dLAK9{YE61^ z-rynm=uzPwwz)(MiT7)HJrsTKcmzh~-JuI(Et&lg3j(7O@`4uf zdz9_Q>HKZ+C)XTWN$ER_MsKZl-8Ot!9s;e13CEBuHZ+)}A84$i#oN8(YPO|w$X_A0 z=PC1ujfvrWU%ioXQR&emY1I+c_mGRI{?KFej1z4PNvgC*macM!fuWCmq)M9c{T8Gz z8P}c?@7jD~8otF$72EWXJv=m2F9_d7cLl#@g^R%qG@m9*Q-rQRjpW?&{!!aTO|!4p_OQ{BC$S1%f{BSQL| z`<`jgOX~F9YW1hzOEcs|@FP=RR+eo@NhaH&w68(fta+E$f{mwpvr~gfWeWRyu$2$u zN6AJW9EB2!@OH`38@%@}=w2|RXg3*S(j3Y-{KU(+|A_NRg}_G%5^E_O44HM+INxLcG*ZNbYrm?Gb{8lzit+5uH82% z*0;T8-+hr5rk?6gCch_}aSMa+J87z4nz8^+tM$s?XSk%N6y(mT*gKYK;c2w~F zjtsGQw<9n}a$pBX6)?B>FA63YBRC68L+w&z-d@2FssYB>NBu~}@JvoU0`Kr+t_`Sd zHNGo1rRPhhZSK!Z@65#I5Rq*%u+e*{NCyW@EuuRemCoM8r9w1Q-Ks}Qv4`8TM$=u> z1NK=Xyf>ajp5LuZ%#vm%OK*B|i*2K)?5R=``@O?f+?Na1IT}%Os&`>L{9?IrHkBmt{%KCgTq z&SaE!9Lt+|*+EO*mE=faR=wzMR9tD9QC5bq%bEOM#QnB#L1VT7?zFO~b+-skxdrB5 z@&S_O`$nSZ&O==H?BT5iCU+00m_x>G!Z9yRo-GNpgmq{{@gkW6diQRm0o5`#06j|htkK|&$049fexcM zB@-(`!fjGAW?5w+wPV{^JI!$lKaNH~kC_Vnrjfwe)gs37h9%pMEhV*>|zK# zwjK;Rrw(%dY^T;ShHYy|(4=hSx77w48yYk&S4{G+&>mngx=c+8iO1P^K(N-3DPOmo zD-7m_+0~w}D}|qp310qGlPFkQJ`7vK$Xxg>#uXRuhVB5pZKU7epubjr60&_vRzpGg;En; z-tQe0JFEK&!=+6**Nc9FS~bg9b-xgVA+p`(Iy{QAoiKBr%&C4T%N9qk_PBI(yVLwa z7yIdVBQ#4Sx@HNM^{$6`qzB%WJCuZ4orTuPiXO*{Nye)b?!ji_m9qL|rQ<_ICkzeY zC8F+SJ?_O9YyV)Z^(ax0QjT z4B=!jxC@Re%J0zckrRpTg!PG`z3FX58zDFvw&R)_Jh}_Mal_H@#9nXB$%OI+wm;N8 z9FlqxAP`G__r}|(1vN5=y4QP;1|wD#M$G$;tR|i2pYMTgDt^5U!)T zq4B%wNtQ>filZ{Fl}|htWyIs*9}fl8J90?iW41m~w05wdRvGN7r@^`Y^{GC7)kl+irZiOHOv2%!o?*A0!`WoRS>nPur@{$p z!nvtX5xla7Jh(=qPaqU`$kZu6-7EFg_T(kXAvO9QrVovj6pr9kjo{m3Hvjg-6^9Is z5%adX|BE<%+-g$vShAh3O$Jk_2%(&6j_Xtg2o6p zzkhogb*qy3eG5=jSuG#g+c^l}$OK3Zazb3#;Ze(ofx_GE>0AiANc0U>>ou0x8E@~wgZNxoY@(sKuow$&B~M;WS7S$q zkEW5s(ogOlBqNMHC0D)Fk*+d%F3(&txXO9q%VK&g85|Lq)77a$5xhJ&*h!%o4wL4T z{+VVf;){+xs;V#W%+eik!?}7`{e>ZC0S=EG)n7*^`I%;0dwPo);0Z5a5y5a!6uOp* zs!{Pr#-C5$V`W=VD6+gh^?-Gz3zjFF^<)-3b^U>z5mn2AE0l{Xl<0~R6B6s9Je0L_ zY@9g_RYszR?#3zU`j9I|Kds68fMuQQ;t*{H`O%SY>5|Uw>1~(C$4{C}!4qrtGTWf0 z!0PsAMT*Zt&iQ>KYFz=jhZKIi()L-;GAkVTPy(>%XUVZ=g?IdOpWi4N;>$G@fzaT* z7haD(uV7imim7EU&R9#cO2o>TE0xxy=ZW}gCG1~39*71(>q@V6M!$K;C!O#KO{3ggkd@t~gpeZo zyQK{O`wc0 zu7fn+ET|0R&xRTR4g=u=XbnJU0Qv$@8GslGeuH~Jo4Ar34I%%OC;na7`R~8;_Xzy| zJ_3NCLH|o5LUn7TB$VJ5$YA;xf(Gap6>Ex&ZDD^QX!JR$7Bc=L9|A(i7<(am)re3g zqESo{DXEb8M-8#DH3$YqvMTUKgN7R9*gT4%NS4Vf{Q_~O>xo`&z6A7(8{Z*WSPSD< z-sm@;=+ljtSNcV{e)ooWAMi#etCe$m$k&v;jdVijfP4tM4lIrPuJj7E{8I4|3i&$y zwTR7$5`*49^ost&@bdto;QG&_LJr2Uk4iM zj(_x_kHZ*VG}fOUtc~Qq83g*p@$OP@%8RDP%kz_i&98$^O<-BTVq1V=P?#)u;;`f_ zcme%l!JFs_&@aeTO%{DAjdB+KXdTuT;f#L&qF-=U{%`6Rxy#YN^$Wi{|EgcCuOwJ^ z+*$oozgWp#O?Er_L%(2OOZA~J`$NCDsv)Ln{D)Et`6E|T6RdR5D?`K80{8dr?-BT) zIs!O2zjHNpf2T40krpWm>4s2%uPbK)d&~iI-!D*}}D=YK!^BVcPQwuAg zHp%+>x>n&nC}Fa@yL)y9N~i2^Y-}7KAN$sA_|&f7PuS;=-}^E-FO|9%i2Pwtv|Ze@ z2E@Ah}^h&67DcJK5&olPA4fW=}(+kaem@L8mP=T!Il*|MeJvi47(O7zQDfat=Y zYeOM!GOjMpNg)mljR-O_s^ksz2Wy&lK4IIpvBZ9C_;b0=?_oV}Eebvq*g*a7-dRI3 zKYsiWtJ{*xTh%m3vVoxbk%kkUt7=_a;)UxaLvIM(uccfYiU&u(el0Jw!RmFa#SgxX z8ZH0d`Z4A$YOX|#j2(cp7goOw_F^TNPhH0K1nuGM*Fy1r)94>*4TbdJJMJ7|y z3(4||Z{(db7pCGx`*=d7x@cst|FnaaR69F^G$;Mrc0#m>U2>c!CxRT!Czu`0Y0^=8 zlaj$2!ecdri7-kUrB|?a2NZ;si*`045m+j%QFtVd)cG2Y2uBr}cIy+V=(W^zO*Hao z;LI02nKrv=KZG(WU*UYF1BVzykA~>tAUFa}9T}_8u7ROpKOpJTmzb^f@rIO%DxDN= zpj+$SvgWcN>IcgIaAs(TA+prm$o0f@db{i!;;{f$5KZ5HI9BKP_o>a zZ!q1X6V9Ji1wDwh7h&kXRTY3s{$de8A%G_Ue*hxc;|ahJfJ*>X03rb_0>}VR2w)39 z6@U_AdzNH7wpBYf0Tuz~{yjf+e_qwU=?U!7$?xmU;b=6;fj~!%7A4GqRl$h=biKJ= zO&+-3T$ZMB4&4cbQwUnLrqzI5>7Un|_t!4nCL?5f3QA^PWB<3czxN_yMhTSJ z3@FxplLP`qHY=UU&#^C1MNu;22`AbK2x_GG#yd|~Xr2GpchiHX7AcUL6uQw6Ofx)B zlS}Q*;7`XT6G~o`;j?*IM&d7(wGfdeS(5g!2~1-NsH{DXVhFJ31GJndYCj{wGF097U#g0^PU_fSRmJt+MhYC@ZI*&SuHk?<@GIAl zPu~UD2U^7;7pQ%AmsARpgP#U%@};|1(|6~0cjt$9+go?#TX#vHc6VoXt%i54(syw< z+|`*~;L$b@zW8aEl0g{6e2_uV?>BOiyx#t4w|!<;JYWcznv+_0(|l@&?oWREw%++- z&A`9_II`Vvt8}jZVVJ)I%-WKBHsMs9Ba3T3TA9Auw}CzR{QTnM$B%znuuV(?i}5I% z!q)p_QBk@1nO!+8BWiBtTm3^;+u2oV}N!Y#d#fzE!2aFPQk)+{Wk|}~w`V?>|2mWpx zibnsYAo%mL{`m>~CvuRmi6&)tM$-PXmIE+S_(=}`NfYAzsxy<}{7-WDhZPLlDW<~5 zw+;7f%l~Gi@P53|p2`0=IsBANHk6N59iRR!nfyr(&si_pwEc&?djHms%so3cgDk^C zB6#90$-k{&Jd)#6R{3eiW)4~o+)j~CtK>0p^ey#3GU@$(#>g9jX31A+8I7+rE-&<5 zm!wLt$9;W0cvtmqTWjFT`-xho*^2wB>z`*jcf$B(+SE3`zJ8p0y0Y!?_WGyA&bu-X z)PHQReO;Wbd~jrUcZU)+M}V{I+FwFmJ7Eg_t>us%&Uppw_Hkc zN;!1V0#zM+bK<3Fc5HE(kbb5_^n{tDK%K1q_8AQ=1_Ut;MVUyXl_g_{^nzq`80~)d zzVfiIim10RRV)(@(Lazzpd3)QcdtaSd@s$V4kz2+=aT!dK!IkpcY~PCzn2v=B;&o> zfn>6}osr!>w0ex8)e%N7rB>A^ta^UACRL7BK5}G$x4d6MhWEpeoFa{QBhmY%-;FU37W7$-zRQj+)gUpUP3<$#MuCbmuk29ew8 zbtQu-sEn2Ti_6M#0&!InNXft|v)WBqDW$*3yQO^f#7hu^@Swy88WZgHM->Fx589RB zS^r`ixUXie?RbXVhnDR--Y@|vPaH*_n>CK@spyd(;cS&gs)(6F+s5}a*sE?|$?H3W z6()<6=7x?NDI;dymsa+0OBokbG9OF}rNZ3ic9rG}mu81ZBJ~jN(n~?zSIUG4sWJlu zHMQkHXnBtxz65NOrOEQIbJHD*-dn1?fu{0$? zKZp0kn(`RpV?=Ut_N%0$K%)ggEPeUXVpaKbucS^Eg^$+}l*`X?2~eMqcR)x^gbFg3 zUd^TDqtt||s_H*p4jFQZZ*bPp47>-Dj>g9{6g^lGEAP|wm zi|4*(ZS17Xw&(4~J-D!Z=5yJSfS97OH$%eR3=4aSsB{f`J7#y$v&wP zMFUsP$tEb$_zlKAgM}cQh=x!kRl38N1UZyu?oz(qVm>>p!PIkWI3S7zk7le27n&X} zpzp|7SEsR{=O(2yet{X4zG=InQm=gQtwy8{yqejn`Hk_$z551o`APAL7%nvYm)wpfyt_B_;GY*gr`?1^M2g{?~3 z$7Zw}6Gw8pa?Ty&%Di`yCOo1s?u>@SS%tj%Z~0AeZ!2}r5vD?!&1K{-RQm-&V(ug= z+0LEu2P#NZA3ZnG;F%0c_8QN4vVfN>k|^UMS=W%)`ECNraavd zq8Kj@Wl932?a#@q;L^(~VDm}tR_@M7 zhU*U)UvwJP0E5!CnggFMWp1~;R9EsUJXrmml1J-l=&(N=2L(RQgvN{SwS9j0Ie2?} zhLcp!21kdB^3Ks%yt&u0`|wMIYU@1uYGaoWz@~NU0_TIq9{J(r_@LHB%&zi-ovWCY z#LU*0;zybW%!gM}TUuXXuQm-i8GlWG-uhbkLDTR)7?reP}E3=+lxrHn=~ zT=FDv<3g28t~Wm^G+r&?(S{87Ytl2|5T##{@9h{{Cdc7x)pzqg=-tp{AdnEXv&fGZ zueZD?hOakW&HLo{s73!v#d_Ozu-KK|79F_EMu$iEm&oI-3p&J&ZlS#8gzMimFfyA1 z);r6$ZlPYuf8KcLz4P@BRj{U-OkjI*+qa^p+i$h*ZVgqotyb}Gzh^tLJ(+obt>IDd z6R*?$Gw<8pwe8;jV$E{p0ms#i)5qFYJnn8U%J3@grZWlls~{&%2z! zTXp*d75zr%@ep#{>9tq4wl^O?+G#F3^5L_`Rp$kn-L1lion|Hiur1%+*#-@JP6Aw+ zfWQ-w{siDqPSZ%BnHuW}s1`ug0SXCFfiR+lK}`Y*5>PW7(=`Y60H_4W z?OmV-;N%nr2|XwSKn4%8e@!hTkp1ta{KON8XR=gMT(f9$Yf%9XDAm}mpG(F2xFs#OF@Z^)i?$^^=N1W z3MU}NSWy<_^jaJv4+a&E)&N)OAn1z}Atpj36VHA)yGM4*(6I4_XkQ z9r3dZ@o9Gt`T*+y`k)a3IuZc!rDfFs@c{Q#twW#%0YEP&XY?27RgM?|&{yRS(H}6{ z1O2gpq5tkT_0Qw|Ie~vX0o3yEX&?WPA-5rMK20=VLcbgS?bn9=q4}CNKTkS zB5U-MvoR^ON{*B0j<-keKqX_}M&`SE*986F>nriXotnxflJtZ}$QsP&}x8 zP)-Ykex*Z4|M4CB^XES&@ZWs`s9A`m5yDlGG>9-buUb)iSWE;Ji$pjI6Gq@fi3_Ia zdWAwTdO5R{G;~hyb$N9YUfiXe_GEqsUP7N)4kaCxLx=*_!Qo-Jk_KnpMzt&A2IW;z z?e_Nk$Q&J=aD^fT!nNy?C{cpdH|_8&L}>$}D)oIYt2r$Cg5^XRM9tLEaQpem_G2jI z7fqBk1~>a4Y>etsW z1ihiC{~VD(Bd4ExvL#(Gj0Udans+CCA(oa$_d^9eW)JhfB0np@JS)Wc3-chBgRl<< z1t9DLz=Mn*j1BU=Y9pU~T|X|Gf#oUi%+p93b@v$Oo9mX&CI? z=knkEF8_JFKPT`XCxBY{mEw;;NFs`b;U&}&))58Y?~{myayk&g;wS5Lu^RN;bY@sx zT}Yz?L4mXYiUCLyPHE_8cnHcTP#~xg(RwC=I+;|8@R25SQ7Dp+Q`p{9J|CB(L+glM z7HE$upz_q5-h>D+3L!-lnkVJ;1SG?E#rFK1%S72(iZp+y9cPPz3%-2F!_iI6Mgylk ziN#)o*kvij?QE0@i@gc9tmsDkvUy*cm|?|-OK%n_6KJE^jnb=^AX;uR3M)j>F>uRV zs0wXQq)b3KQ+SRD5-6UcX%!N$;j{>}a>{}}o&q@PR|qFpRCR~ro&@1nrOybxfS7(O zng9m?CIAEgXaJxeKp!vxK>r@}0q6nZL81@fz6W#w>j&gcdU`H^X@F*cX}}G8$vuea z0NEg>gYEe}K!a3a57Yl~@_&B!=LG&`P5@N{Xn>UyLneozsMrLOsMBYgaf&rS69g)~HM~+B^44dn!+6j;Ds#Cu6xn}Cz zep@;jF7D=21VLc(2KI9}C+k*-)fr79R7}ID<>2Cw1}+Ru&m!T9A!s^>^JBEhBfm3v#-BW8hZ$j!|=Akc4aMsIE=0Kl)Uy;@%%-rS7aL;l7_>BdGai1L5}*4IHd@bkt- z;pS%In>WDC_0#%#-`d)f_4TIB%_zVQ>+3z6n<;=Dt^_03)}|~hsCIV;_6TBiH9I|B zY;A23Fvaff*!uc?Ko%Yz$j!~nt5;cechdlCtgXFOQbMh-cdV}u0Dp@YFI+Y^(?M>q zyIZ`mk+-%sv%cQq?2N3eI<&ic?c2A~?e7s6FDn0R6z*1_(KH}`z+k9XS3j<YM&%FhX}MJ6pIvrxz)ij#JC7t18C{ zVqnwZMi-NHySQky?QHfZQM4b}=WM5qV{wY5r?7m}I!P$hMZ*p9c&F@%qlq{@I+2); zbH`&3!wG3_)f(f490w3pWHy|66^A@R*|g!!i8vU}_yGFV0UL}BE!Lk@s|B$HP<}`) z7AC}yiJOtmOax61h!$gz&`Kf@2I-s)kt7}a=tP8ORKpcZT!L_p7hN7(gWsckdr*x0>b=KsMnQBCmH{>Hnv*gZueaQshF z9gYd-8GND#<{S1QHOyR;Ny#0*?!PV#mRQ z=;?Xr=>a0+_wWE>BtUTddLGHi9yK*Jz(vKw16a`h`VaV0ef`f{W5EAB^fEIuJv=<> z!4K90Kev9b6am=^ikj|!@Pqm1q5pkPU{9f>Y!VI&Losq={=w8u-t9$S_3sNK|A|7W z)C9tsM>X`K3^bwmBn|)Hv%c{mazN?<77>?&ht<>p0|o;nTj$$^vK!wUqXw z!orCJgw`*GQit}<8!SibgpV4FqQkV&h9)=|4}LBYLeN2Dlr(f*1PUr2l1u8*>_cL@ zer`VPX&1)cis9+vuEF3^5rUhl*-+Qr)uI`7a)PB4}bEoA0J`I z=_Nw+q$%-UCnXaX(giuJ_|mCNV{yt0ZFDLWG_3;=&4rqZI=z(F)T-|!?#>*7SUs`i z@*HbY^HYcOAJCciluoB)7qxAz!^b+^e|+jro(U?Ta<2IB`L3iyTSisXv%=LuqKw$* zl2b=UUOeL|S6Rv(yt*~B=uzRiJkNMq^228XoA3Hds=dC#u*5KYp~O<28ReiO<88h- zqR~foed8|hDZ}pDPbil9K7t>DG>}Sf)NYuRgq?4hgVdE9Xsjv)UrT1wMO-Rt8YROE z-kHybBW2hE4-j{bq*#3h6r+;wn3P=YtyfheNt5WCn+vw=S&H}8b zm2U)$mtK!fp{A_pQF%`7>`ZIVg_%}PR49-jx@!xf<2iK=kB&uZ`J;yW0-dBn72;1-_Z^+thZ zKXsXW%-l|LAtQe=;mje~o0DZE>@~eMx!{Cz%i$u2CG(4}@{L}#e-V9vpJ;G2g5H#i zWoEUfj`+Ra>u$WG>rjVZePSf*?dv5Oqf<9K;@Qb(-KNY9sMq>red+U+52sDp*Xu}R z7!^#t6i{7yq8LXs%q2O!ttg3MUsw6JAKB+j{#?x4#9~Yo%IR9dj z%QRkhSb*)}+p}>+6AhxT&cJhC9UXp$_GlWH=NNfvyRO2@-s3AWKOl0NKdhmXeyvK8 zI{7G*@To49wdzDRBU?d(X9HGiHHVCtT$Bu09$s3jRbQ5JJyyy*x^u09l{(d4FP8Oj z&04*n*G~Ce6P_E=!RPEHesJidJvCD9r*7b8I$$&_f{o?NOEzr zHdpFVU;iM?P>XwGFl(&2uxF%medZDX}cvEP$?$v>$$>SY5Z=T`5*A>NI za|?;Rv@^O=VAxO~;cj3ompu~cQu;BmPvtzz)XUdh2W5;st+(Y{x7d}*0_jj#?aV{F zV&$Z4Y7tZ%ua5YsrorGx9tNwoZ#(xIK7TgsyBW5x@O;#(QFY5aiT7tI%$8IwY?J+S z<=v^8`lqEYZw_9SJMhh-p&NhVddg%eKdh;@cfI%#vCd=mkm>*s(*8l$qn6e_pq-? zM2V(GU|r|-*rmITYMwHS&0YB_sq|hOkaNP50{M?snaLY^wCr9jp42if*-9)8Hi&~hCe%M<)7 zCurmgc6TPsHJE6Mq{@Hf#Be*$dRQ0VHqSDD=CZ@5KYq9Px}Zq$5-0|hd5J#4(y1pc~?Gh#qoGRnvjEwCs{~s zr(KLWRM0jRegU!U)}0y>lE!pd*&!%2h!JL&OO$1-J6)t0&A9B`yyAcT6jlitbD+ac z+*9m!EnWn6e5yXCNAZ$>pQ;QM(-xck%_0s4(C>}D$oyI~t}(Vq7v13-6Zz6k!q2u4 zA1BdaC+De7XL40++0ITRezjL}aY_H~23ib>9qeP5C_FH;;53_S802@x!$sNlIM6vca)8ryWubrZ}-V>SNm7Ad9d*oA^4evR&8nj-7wlYU zkG#<7;!DuA+I*$o8sSTvj2`yWA6GmwGNF#V6$dp5a<9ovFH$UiP5Op79=~$F*Cqa3 zP6{76McO4Y@Ere z$I1lgy~y>_77H=x@_ON$vM;TBdmfG^fTq?OsA#nXW+M!A0jaVe|M$%*&BS$b4ag{E^W3fp^G1Vr%FY)73 zW6H#MElT-EtJgnY!yHG#8C)0EIvpuvzbBt*)UeQ8KlX>P-OhK-8xEr@DOE4$iiD5SKF-!%{ zF}f~+1?l+(*Ha7Dt0O$Q3ooe@`dAhE1s3{e6b3dIUYRNk*(xM(7kP5WG`p9uUbHH@ z=UwzIyC^z?Ic}>?bI;c0*G+Q_&@99NuBQ5_a|9g=(0SM!eSnq% zXlQ`W02qCM_9*Cw{{8f0ulou5n|pJQzn@?J{qXYNx8D8p%KqI>096W6Aohe0a4J5v zG?RJFaC&}08#G}7ehn$-rtqc8K|7LscY`2A~i`3JoJZlB#huLpuq&40sh7{IR$3X`oFNx4rwT-i;o>W`rx_a3*&s zW4T2Ltc0z!+lwJ{+?` zTmK(oguPe{fV`(S`W2Mf_jCt9JOpqJ)(-Y`2Vgd7VrtFE$OC4IdlC7kmZ-E!OkDo( zVU0hZmp>=)FMa~3tlw~M{{?4VEPyj|9};5%Ich~AkQU+MHwaKXuPn_*Ba^^UNJt~Y zP!~Egki=oh%&RB2(4Q_t>!+YR7OGEy3vI~L^X{Oaf&7^BuW96t-FEi7Yh*(P2<;$f5HD3LaW*bQO-M6Gq35YW#AK6p3V4 zL;qeN_;vp|7qkh{LmdA(0)zA!%;G@e45oj3;vq1;1Hc6{Iv^O@gD=1E}qIfil(uf;pLWU728Q_S-L-uYU8CHgL zM;y*WaEp3H0tPq@)>&R;~F(Yv|uPCrxebUUgo5^l3AqC(HlQNc8}sdPFl9e&l-Wz&FcHck zn*dY$fm8{xIw@+XV1Sn6+(ccvL-z0;EF0ZM0PYnb_n0pqfuw<>q7s*9hN@UKC}1?) z+VlbTFtA65Md^Zg*ax$dXZHt=UR1X*i^MY+9NeafOGz!-UV{|+0Bp?d7|1Q!cCy@a zGrYp-uqUSfwNtYXS_bXpy=~gR+R1w%72p+s6#y3mQvh55Sr8>bJOzm^fGoiD9%}y* zqwT3=K}h}k5*CEc|JynJ`TCy|_}4iB)J-5t=d}NcS|k-(obbh-018?uckwR{+SllK zGzp%jw_Q|^c4Iy>&DQSxH!SweAe z2qk{z+FU3S;^jmT4&mk~D5%3l(`X3!p%B<`H6cTyV-+wO1#}gOVhCawF}4M$9csL! zq!4W%nqyND!mqCOJN2dhDVXPk7QphG;C~U!gAouA%mcMNP|gG70-ZcaTtP7lr1Ai& zKsmox(t=P7unaWwps)qncz|ditp|E|AYTUpd!RW7r7plbkkbSGJkY@JrOqePg3xPdN{S_dq!hH1nyc!hi>c&qML?qPx3KK#sk;8xQpMKsg^7DgKvues^~T zi04gARDqxH?rsfWiOtOz_X~#rS8Q&Et*uR_rV0T?{l-Qy(9(kj7?9HgJ^#weljMGI z!{mHZlOagO_Z0Q6uJU_wdZ49WUypuS4h02Ct*!O`67z=wMg95nvVe9#-hMa>m>7?} ztbp9=oBdi!Ez3rXjMNAOiGQ6>-JfUj|8N4o1@rcF8S(;lyf?B7^tp*>svd0aD?KiV z2kX`vD!71?vqVMn>Y2S{WZ|>6hsZVggSYS)Zp+P@*MoEvhwz@~$Sy`47(_wQv&V&l z!Z7@mq5x8n=}i;4NsEzh`wfJ}3e1YOEW#p-B^y-g&uLKNt4}_hu)wfugjJZA3i^MU za7igMj1@%VZtZ6%*0%oi%KFwT7^Qtm8?S1brvW9+6$GbSU9}TL)qr|pX-PQyC^(eC zXCeQk?qF7siH4p@zR?qS8=V8%D<(0c===~WiJ+*>pcvCwM;Cr96s{c@q}~2XKU!2j z4c|Zds?nE`T8#leXHSNx;j|YFm9t1!0zRS}uW~30bR@wYb^UpjAx+9pbI;|`rsS~b zP5AiJ_F(2WN@Pk^R?pBYU#b01!n&t0&DgZKhD z2&^JxQdr~*$}~ZOLe3k9cB{;BERXp3nGO;|y%tfg}K^;Qx})1LsVzPPeC` z2NHUqj|W@wKu8Z{^1z@2+-(k2^nk4ZD*;74kkbPdJ>VvwYX2#!2M_d1PXF^edda_( z_IhA7A2Q7ZYI;Pn9EDVFy|DV2~&tKu=TL4Rf z=Uoq;i(WFYGX5#uPxb)neQ*!$o)mvihY#o#EEof}1-gB(R}5B#!Co;C@B@30`qgn@ zHwsKg!6Gp@8?Z}E8r}tK!(i(e>=N^PBm)amAnFH7exT~#+ZF~(10H%FpfQ0&GJ|zx z^YLnrW4JdZ%@dXn0SC`JjxK2KopiRRaj)Ah>f zj^UJX8|w4Q?M=WOiJ{ZGl-Hjme!Rr6|5EQRE-Q^D+BcEHJ-sME@<`& zTp0fn&zBlWHLW-hugY`yP9bnCgCwn3zQd_bqeGYCS0zcqV!x2{!rE%Af7L| zxxmo7Df+^y?eK_ToWa|p!sbWWs}_@5KNj*Qn{1xVH#awXCW!cAYbj{8b2#l8?&^KK z<1G`nK2b|<`=_`xjfq1UK?;fo7t+k`>$zlkOS7FN!MZlZeEmdA-sBMTbvpy-zY|_r zS~(U!&o$I(ex5=j;R(y(T6rcMmqnau2X+|<1sUU2nBBWb*k-?m*x~mjvVn=?kLUX!;Ea6{M*<3;=e??ld)T>T~ zn=BpLu6e%n(Y+Oy4?b)%+4H67jC2e_f8dmGf8o(-0jf6HrPCC2XqED+R)>AA6slN` zvyJo8?uvFPsZ&ob1!ho*Jm%)jc>`Xl*g^*u{}zc-Tf+N=xcxyYbUE&+|Ue>xmCYz4_^60QD_zn#IA~1U|PXqR(G6 z$9nhfkjLqsJcI?O>Qmo)oJ~#GF0>Z8cI%HR_*s*pHVGGU83HY3qAH@r*4jgf0Ksr z0I^Rh7-cwj-AtQ|zF_3|nFs$Ja)o{P?h%srJi)JS75C=;u(NQ?%vaWzt#1RU!)=rK zzUiax9}e(1g?)!fO}p2QeuxgjmW_ENM72H?m$BYad@)zJW>GE>S>;<9JGVe5qHUrP zM~&muWYg4cppf_yW2cu{GJLOf;~}=tS(8sX?!baZ!TM0$rF5{jxRA|8jYt_K3%eNM*HcZnk#u* zteAs7HQSWcL3f2o96?JZd^cANJBkA<)AijCbH~ab0k@Q!`2_5m? zPn;5jE+CC&i^sAf-aV2I>ftfx6VYTDi!;B?QRnq)4%u3rfKa0un-W-dc@SfpR&mR0 zE4m_M7B+F{tV+{)vn5;d{IlnM5o1EO^vX%_H=IFBt!)8)gy>5%H7NsqRz{EVr4P8p z_vt+@v0}SWnMp=pI%jgpcc6H`8oO`YtH+!@an4q+Bq~wiLXgq*Jb`gGo987R`Q-6a zLepfJ`n#59m-mj?JlJs^P75aF!zy*v^T^}Z)C!yxj0#6@?yJ-v@{w>!B<|-qqcr%e zp|zma)-;FNz-jcMLcaW+w+3!Wk74OV_Sn(oodgGk(p!FQYYmMb2hMs2r84mP?r)oB zKfxKRQ0zR{*hfj{amzYwT5i2*#E9gbd;Zk@n1g=g8|AWRG^al)xZ zFmgSTd+(t|yOB8|E=5yyYc$dC8%fCMt-kfvSlX};Nv!;>u~Yg(cB60RAun8wKPw|y zNjS6%ZF8BjK9Pq6$#WJ;SG&l;|;I5O>Y%KF(;{T_{yGxy&+0(;LsW54TS4~Z7$ z5T*Um`Vz|@PNylJ_ciqRChvnjVozK5&aTPci@oy3Zl5@4@x*(*yJ~^>}WEmAc z;&wqbeL}TIqc!#6=a6mxv3WbWDhlJ50b$Q)c8nW4IEFoC8K)*j6gLv7L!A8wGiSy_ z4t58{dx~A(9$;l$@7baJvb_IROVrY^Ci2O{#RnV8$n7lu-dl8Do|jwZmDd%>XR}_0 zetpp=yAd!PW&ACNCwN2(zk1`Cv2z;7vnkD4*~d3*RwSZ$=Y=W*6OSjX6p^+YrDm%K z18lxt&cuvwM{c$2Nr%@KlBhb7m$??Cp~A(?mz52^C8ng$_17_sM;TjhUp8K=J(Bs- zyAY~gt$?3W#f;0?Z$G;>{WX(zXC>`XYchVOW2-}@h57+h5;XN)fg zWRh1Zj%`l`2X9@B&H8qu@c#VcEPuz?tlmZA`_Jbq{5?yvR=az*+r4BUhg&ln$?Gli z2l&4)vfQ0yfPA0c-C6OiQhhZW5^$NRi2l3(S?f)8vCWdq1Mgk5p2KcjegFkx-nZrM zj!T)q+AK{FmXA+=h2ESA7xUc^JO5=ppoqi6v(T&YHT3?nfAx~PY_C%_JK?9jXX81U zdx3X30v_IiU%uxzaO>(jC5jJG1SKW-qS(dPm(Q;&5o_2fQVOrUc}nEwxcZ2lNL~+N zoD3;?8YZqA=0J-ss9gCu>xUEj~(+lj5B2g$nj-8e-A1RaBCb zQ9a6Mc#p+CQHttLi&eloJNn0sDaSu?buLi6dgIvD-9jhNrRacDv52(jHO1J9rx)&t z$IPLk-r}##`6nn$#^PJdC-`kkB^x^Ms%mf9KV6UkHi=*#|VsY zXiT_oyLd8;5YzCNmnYmCU87b|mm`Y2Y9C|z|+j2ah@_t3p0uX~+F7seJ4<=huq z`#Mx$GYNZWCtf$|yc{Y@XO41h?JC8or1a=;ske#fO(Fv}>>yXd!J-7F%{Y}}FFnia z3*t9W8xf?-;Wa0{Cwh{FpQf+}#J~=*>EU3M0f}&+XHJAw&oHuq!7zM!seACc7(FtB{xjaKw(PLtWNmaHmgcDyHz%4U^aI~ zHg9t_-&8hcD_f8|M_46C)G9|DT!@;HBh{QEJ(Yvq%8}#F<>k(n56o4F$yLe7Rcp>w zpUTB;#RzFa=tO6w7I!df0Wf`ilbqaH6#>FEGg9HC?KfxD3b^@r$#qY_H?|YRi(Vv zN?f~^CuCIOS)jtv8p}j#Y~gnT;q3 ztmnIV52sY>c2??TmtPw#9pkQgw#9SKioK2r9>Xr#>%`L?gE;1i(Df{PrY{5~3Y8}b zy=|_pqp8}6DX$`G@6yx=1erm&6TF!<5-l~?-|Yx@dR3z_ydpeqdY7t&&(|K;JiNBW z@maQJy}4XP7^bdTcevDs0#>!)R#zierMO*tHlr^628<@Q3T42`7=%zEQ7{|8g-xny zN6Orv2(k~>IF}Z)zpwrpSgU+el>dF*{5uhjAnd@21|REtemsS@;1v(KE##m3as|}} zWbW69XwWs4xG-3o{;mGp`v#rt@<^U4UF&+pc7vfoqmx8g#toPpwm!{()rPj(daz!2 zj@3njmHqjV`4vl;WYgv6&7Oe`_zpRP8hQ3``KfmO-o?hzB&kPJkSB!qiWT>M-`r*0 z+OsWzM9X1VWh>;RA?%SIICgTiRoqvacV|)dLQboYyh?$oJWPwVg?PWlnteu9UhXjr z8j*ZuAPISv*BO|K`{H{9A2g^+@!2cH85G>v=HRkNWVTfG3N^!}nlB5Kakp@VYFejM zHyZ}EGPvtUmSW!L8c^o*z%|rbLR+de^}E7g2A)F47L_}<$-*`r6{Y*$&_XvmjV~sf z+!0cNAbz-M?e`~jmqR;V5mmH91?C4-6b)D>10~{HRBFtRXD{4u%vJrat@J?16s$`a zQ$WX(RqLFa9Gf46j6T?GXL=G}P!MCp$Ft zHTvV#c!e}srL;9Kl-Wp!Ythv79ZccRrRzc<1}-{QxyE$Q(?Hg(y>>(Oy)isT&zo-w zq8FVd=y=#dUJRx)wPSNSQtP&jOeGi+qe2zKPZ9~|L*So_XUaO9Yc)6i*Q z-9gP`F>xAkFY2znJa*%XX}rRaj+dT{s@@x3-5^J+S5mDzqk2x-S_$q0k+Iq*c?)d5 zH$bGSPOCDw+F(o%`EZiaMNMP4YSUQjNY!=?+_$nW#fZhX<=DaA(>eDPSK7Z?u#2~h z@ckIQvt|AsG0cN)SNUe#s=d?w`P)5b1vNFH?v1>Lt#E$Gp7-eT$Y!3f@(=y;mvu66 zCfHIFtu7N|UQ;w%cZ3srUflrwZO)zV9Bd}AZi$gA*-xlCEgU6V^I|!->BfR@i*3@J z(iXsYQBX9*ALk zWZm60J9YAHkT8U+xDs&+nB9l?o)Q$9xaiB!xI{>kxD(*zwJ z(|zN$UgQmx2H)wa%{YCd3#KL0Q+2k_G5`HP|H7vWRmcKd9mck~}e(@YNsPy1}KPTrq>@vyk;`{*~#7xVEirfp|l2hY5@ zGxP5L%!lcjPd{e9?3-OVJp0Xdb}e{z)bG%-8ZM5UA7ZEhsv6x zv73SH<}Te~?Q$#Y+rlq!zX+fx@47#?|GbC}Q+ML{T*iF)Qw^TM@_Esa1xf=}#Mr&v zKuqxRl*0gq>iOL5!Kw-x=#*qNYpj#1d$pWgLv8%RRd?tRZLQq){27UM-1Ehk2VNRi z%yH3*a_ER?OTLOKEx=y92uasd{1`epNP*6*&(0iaf8K~2o;p5U8mfxUNP1w-uu!~j z$iVLPIlCp)r^dTur8i@jc8s!K<-b~b&hb3MdNFCci76HVe<4O?p;KTs=qFu@`HB@J zw~8O!_cBnLOTLW;$NEk3WSO(2R%gq@t(FIxOV>l*ZPC}rgtf^!RcT7LytjT(_q?u7 z{lTQ$dmozzUBn0S7pHZyyTdQCRvVJbGR9v9Vcl-mwI#gEn36c&+VSH3L7xRB`7#5> z2vzPlQQq6Cd~_X0o(~W4efnnGcFR!JMCg&NRySK*`GwUka;JhxYi(EZ6GB2vV=ybZ zt?HQU2b-2bq^V+@bZ?&ICz8s*zHge8KJtk@#cHUtl z)~e5r>@RHe==n7evD(dtm+RUlg_FJ--d)!CK73tRA=N_kGw;DSGj&tMRRQ8^KuEvd zqZK1t_T%*P(1B))!(WSez9`RrdzesuIr*vH!|{z8W##EEH2wY+8qOEtqtoig4TskV zkEUPbJ$^rP{P5uojvtDe`}79J%8YqGGf-}xNS;{bOUyI6iTqSzc3{5f;h@6tul>V= zp+}0tf7DCmOVTH{1t;6aJvfDlz>|JRJ_|8BKRsbCWLBWQ)jaz`jz<+5nM^J}b){SK zac)QUVX2o3Pp>?Gwm$H^XXmcmhl+-+piNlfChMiC;`rBtB|lnDH#7%tGCD3#AK5Xk zcs|Qm*mj?FVs}Y4^FinI%a@fkV@5m&PP2;e?QY1t{Cb>q+oNdp?k)tw&EbfY^nBVs zmvtiPf!JT!vm=s%N=+&{&2;f-!oI^H2ri(BND?v1(oQ#frGN8)t>10@vLGRiUAC<& z-F(S7>u~T6g4^PaNsdlDpH7CwTeAYAJL(bKC*E0J&N%V@WQ8&XZ#lX_^Hj~5 zhgmwACqG&>T$mR9VWl8N$LTOT)17(hvt3*8j~&VnSn0uP`lSlI={-;Tp7?!7B6%%W zoChEziwiJaa%}OVSKLVqNX{m*P~w@f_RTA96XmuKBKHNodzC7{&i|y74#A-t> zf@s{vDX%T6Nn3ATdj0UuktpNDh5@<;mXm~PE?GLF&tY13h}cPf*<&trgx~f@;M&LO z2ai+^pFwg>c%6Nkb#?m!9LBbC3_i>w0=ty*9E@IK>yZ*lM)PAk zN=$9;0i8gZNhmD`E*{C)MiSTDIO$xi!*M!%8MA3JK#txDWYLxX1mR)OcV$U&QRE;6 zp>Z)fvq+P;#ll#(%T4~$isZCUq7Z`-lC)7>Lgj?Tf)AhjVNSxehVqZJ#frQ{8g*(CB z-GjTkTW}BVmJlR_^pnkg_qWg2qt7^_`+Y~Bi_J~0cm`Qjv*uc>{_{8SCTT>Zy^@6y zh44AnlzebFqLkMlVX+!WPb?iZL7x$fMUlCX9&4KfFn-VBRVXSO^=?JrojL)LbX7C7 zZC3J#M|;#L*|mK8s43p{4U^B_nWIcgCPddYua#BWEr8~aq~D)`}4gB(s7JuFeATlSP6QvUH|gq^W%aSkq-=| z`z%BxpPI#L8ES2&uL&la0U(tIb7BL}d9`Hc?FNf<);n|>%S^w!2FvV^lKfV=CKSe2 z`4e_GR)zCnZq~(Yr8oPLwAK)KS&cutO?jVr(qZYNMNtg8?m6#GIxrR=9Ubnz@BlKy z?&M*7r4Z+F#4USRb_4GI53L4Qg@R+R<57W}2Vvq%g&o**AOsFf-eyKR=2>@!3V~Uu zp-)x;9D}ora|4aDzra|%GdE@LR0F@!okmh*)qS8}Vm*ac`+WeVasbW4x{QUBkrak? zpZ)i1+XGd%t}%~SDK8_0>h3a43D@Z`I2=JnA)NKfyUu}eQvK6|eq(nGt;HxZH?!Et zX?I@<{hsTG&ZJZQT6eWhyB;KNxh?c{`(=|ZbyptW?q|FILR`4(vNHggE=Eaqy&^(^ z=o<&Z6v+5mHu4KoXT}0$zA|w`Dr#=KaA30%?;P4kJPO1t_jz<)rHlw+G&6)>%bWqE zo0-;FrZ7xDOYv&7F&D*pIOZ#KUMhldZw7BRcFcZXdRf{W%|JnK87~)c-7IW@O~G3$ zY0>8x*X(l$m8Ss=WPzY>JXKbKEd1 ztC41L^p?JvI3DS^7pJO@IB1nL&>s0z=Z$CC8v<`S#&SENNyyH*0xCSCNLPa3HRD=4 zfo_IMI35q}0|uz2e@xoqR$=_2Do7f4yDfZ>;N#*xi|?VtgTQ_DEaF_tM;yTu39n)_ zsKTtIrL3pC7&LMie(oGP?yC8WOlk66;A@L_;Z}b8Pg6YTol-62_a}2_;X= zGy=x>4H<&#^*&g?cG6+pxbwDkrpbA_mn=3!)e)BKRh5Vk z6krqxrBU#oig=G-&vMcb;k99>7LcO&^fQ4bhQP zx7uHCS*%qX0J&&$QtU&t=)NJ94B#7k=^ zeU~+>(RO}M1er0MUAo;N4nbN=?@^mCL{cKW_O+W~1eY%TB9QYOvlJSNF=65dsCAoM z;tp#adTZiBZ5?p#K6qA#J>E!} zNbwnU=Ub}n7%dC7m#}CAxETA8c7H=)A;{qEN9d`C>~ljF`sYSH77uM?C=9{GFIH+l zSATsx2K^-&<=lO7r}*u?vznN)VQL3{>#lZZMm7nL?5Z?vhvpIwnwD-qho&dZMd?eb zeCus2Py6tg*Zh)G?sZ$wKVKf%nECNhQ_ITE!XTilZXeIOjngm zkogv?&O9U!A+Lf@uWlG$PUae(mBR+`L>>{eW%9iBYS<1DM#dwjEu`C=&vE3=XClov zmxFE83*X&*V(o~_SdT-fjzen>RN$|0&tk^3?Wi2&)WhRSa%286iAU7U%<-jdN-b~( zy`9>J#lVChU-u3c4ugfeu?0a2^IsI-mGWYA3Ei@lH=A!Tx>C0bc%Yfvt-{bwrYch?Cg05Lh}un^JikYdJ*ST zRyN7oa&(^_RGxCk*M|b%gdRM?%Jr$3C=$@tPbvwDdoc9yu{IA%c9amRlxS#G{lP~B zf_N(Ka8=@5#NDzEl#Z#5;kJBP}O_*RRHIz+UzYpi~dgS3yU39}C74i&9 znzyA>g^<`H{k14#DULCupS!w?UI5!eg-2+<@?>!!mqI*7xF#e;CYPo`K%lfVluvDu zzp$ak3Q;a+S9X!MvC6T&!L8#vsbHw7TsF;9^QNYTm|F3qK$A|nTKS>Y>iV`A5?}h^ zPM+Z|@!@Xu;U3fBUdQ1+-{Jn4;enjtK?eDD42+KM;o7C)(c5QZf^s8=q|t29!f3g1 zQ`EZ&@evC(g)~|PFX5-L*^~k=o2PvrD<=g#c9W1}K!sN;6k0LvY~*Uv4n%;$Y7irg zyqU0lM8xJt{Gg2sOCiAwz=IS(1&!X?O`Sg!8eRx~;cOm$B-SC*e5{^yIS=0U2DP#W zy~h~SmQgy_R?5z1+45-dog4kJgx;q1EWE3~;|IoOjN(^uXse9iUSs-MbHh2qc(yAw zS$XnP0aP$75_-|RaM*f@p}e?0^3zw`PrC)j@);y1hN-A+ZK&7f&C+E3elMbMGhDWgoMk}31gXB9H(W|PL!%?sd=v$Si2RYP%9Z+Ha1Cfz-k zPkQdgI)TIA`XvPK=tm~36wSvvP)5H+76UKX7zaMp?g7E4UJ^F4QTc=z6W>(I7Og~a z`^mF1iY6tW{fh|V(L|0z=3)>lsV*VU5z(c%aCxwZ4Orj}?i3!)REG?f0fi_Xmx!2I zex2Qiq=DbcamELkXi1F*`?1WQrLGd=}lT>iOOE%sPx= zU6jU-SHiryBOEgKuvEHKKhRkokH@USn=$++Z1lB$l!hQwY^%>rRoX+J1G{0`X_+*p z5pUf=TLf2nf~7w?cbc=Se4M`+Jw{s<)(J75V`G~KclAsK&KF5*SJ^f}7zV}2c{YOw z$4j)g4um}+2?&UTG3-2?*%J`>BPt}FG?7A767Qc})wv4cqNGfGUv+OxWRM1Q_1-8N ztrroWS9^*?6o@ZMq3g=fx7p|{7WK=fm+D4=gt9UgnoARb5&Ft+N^QKT6%(Km{9rA{ zrMBFPdi|v+&{Cg4Zhx%)O_6?3wthjy@W|HE=*7|)((*Xr@&w~@`y}u3XNhG;w(%jq z;9)a^Bp$Na_Xd#>%kxJm3z$SpHOoDV%T}KaC)sStYSr-Fv@! zF;x7V<~uVEv9!6EvBfpuS%^#?140dx$6rx%)d@le*6=G??D3jHAu45Se;)PE3jF6*06Op&Ap!*nOT3>mv(q03%tAmC`9(aK5R(a7 zUh%Tm8(B0M<633CCxBE*QN9q5u{(~2~+`-9M&DrAwapIYin!t8r?`h;B~y6%M^sh0#zQbUk4#-p$p=)8;hwT zVIT@i0~9zUbVLNmA2=s`V5!(2fRMnLKrH9~fn&S(T42l_P`9O(?Y-9mV*kr-ffOA+ z0Tp1k_o?*zNIH<81JX8s$b10M_&q%WNWB0u{|6xPpL<*W^W=Za3IH&PKXGgzbUZ3S z7Ui{0Z{Qmq0?Ujq-C!h`EDwn>Ff>nw@&@3bAWDbe$UxxsZ(jC!vy0(*=$!ia27)Y! z7?j)T6u^jH-zlzC;VK{q%W2Bf_Fa{1k~k;F8+ABxnpzVOIaV%xm}0uTxLa5%wSa4c*(?SH|W{;=kU zAT%%ri1u&d{kt^-5fnhcz#bP+F_0|*3>pwJAYmXYa&OasiUDf|Gz$nBFl&G+agTQT zpX2LnY)^nV+P`I;|9QwiEAVe$0cg{|#OsBE0V7BNSZv-H#C-ZBG#v;**rbL)Xz~Rl z2#QEm(yx*s9SD|95@hAvl#K>K>OEwt@)VMg-vDh8<-!U+2p+xqG7)fihL{P2)2&)~ z(n}6W8dH|1Xetja(NIEH`9L#Y;*()d`}Dyyru2y65svzi1`0woEP+4cm3|=_nF|h% zx2P7_6`(k+(TL8rgGlUg+S?z(F^HLPQfNd#h&b4&+;9LJV-BKbP#+imt5OOuV|EY; z7#qa+H$4OUY(Ub0jPG~bfTaJ@^Szh>R?Oe^46t7SRtz9!K<)Rge82eyL=J#r0MEXU zw*#h5OUHii(7;jQ-lPGA|3i8Dx4$+2dEo!X3P7g;1O*1pJ%WM=*lG)dy)kg{JnA{= zK;BRm-DVOR5y?PYR3`1yiWi;X*wWT9fvUNparAPnJS?htoxvomD~JOsJBlen7HPLu zZ`OuVq1Y^h1=V|!*)%95(5G)fUWH0v3>;-FgE3Zl z1fqMjSWDbU6*DyBW9a2E?|DU@V21!f?t>Z2WjJhf+Y)k;(5`hOH$`EJLZbG zgX}Xz;0U--RL*v5VQ&zWsZtkClOZ~ThbI~#{AU&(4OkC$dJqgu2qaSfYuo*I=l(q{vHgGUULHW&`_{+xsqp#RM5C3Nc{!dl_>IEWZf&M{lL_ou#aly$1sEy+|J~BPB!PW5V88dS3+VCQbpba9j?BPT8n9+y2Mvhs?>At80|Q%Z1O%M> z(?0ba++$TWZ z04)m8`PEe+0CWP_O#m?JkHY0Z03HQ&E(1Nt06Yp%KJd8!0Hc6*X5hFDFr$`NEC5Ip z0F1i-2mrqE0zM|<6KFqtz`VYW0eCj-a9v-JjX7uCo3^Z;O8( z`+t1}?wd}f%K@+(6d$Jl|FP**E*!ydFa@~V8c7D4PL~G$)pQDb3s?F5wUfc$U@iM% z+1jPn%Y!w4G@S-h$~m#lRGED)G8p<3*5c<%bhPvv*78@=>2Fxem##qkzhEtLY4Qz~ zTSEzt{%Sf6p;Bn9{u9>nyXmz2n?;^$+H&zb$24(XsorxJ$P@UEnTUt^~crT(ZTB2a{2eD3rwSx|7beZwAzZ*HZR*)H5k^Z35ElMY9Q?PmBA zS?^{B{)V-LKHlHWj*x`y<-}-O@8u?#7wzSxIPLG{XZXYR3v%MD_X`X1iuQ|2YWMex z@5*~&2PM@r)(55a+eHUu&ENMAKD8sW9hP?!*&J5%GZi0J4nICPtQwbOJF1@2v^lDo z13Cd~mz)lc>el?(j_Wt$Y>pdt^NNof4{HyOn@)S#PMW{X*qpRnZ5N-k-hMwgX#=6K zpSB|s+nxdmjgr$&j3$r`Bi3C`hxs+#8G(q^==NAg?U&qwd z43MU@-{y8tX*e&xMb-&;Mu>#%@*X}5wl}VtvwgA!SpcUT@uJzyrmAC5x0k~2mJEuG zFIPp(K+Yh-!=ra-aOM?!bRVq@uP+*M8CRQ6WyY`2KYl&>iV)*$_!%wUVx?v$rwx&R ziSicYI-OhB8nspM(#vNyrRi0ZOlTJ-A{dizyk@fw=khEB|43Gfw6DV(FrmS2?X(uxgD8X+(JJj?HVfVO95p-MMum)coG;9*HVI{ zXzWekSnlWT0)mijW}xA614TXzkO#gX5!L!mGOSI=gHT~;5*+?82JYYZ7=sREU(kgV z2m({#fN}h^yU@jrQKU}6$Zl{D#%)B9D54K>1Sb-ReGL&xB0?M&*hBNe7!g_yCLPl5 zVo4E37SyIE-;wU)UjSe&!`W1q{e40=XCL7hIW#CT{i0;&5rOhKbi@PwlKkh95urH@ z%rXPAy5~^|!#PY(1_l(}&Z9Fha#^Hg233>JV+!PRVOj%&nvLhN<)OLk7BWM+jX4jT zD$IKAN%0L{l{{!Y)YkN7mG?%xpCDoRZRPi+YjQI%c;(+q%K`3MnZYNE6Tr&j_uF&f2j{u z6&rmK^uMi~d%ZXAkrERNi4~|l{W*S*y(0|HL@Qd-UeW+qd8|&?pH}{IX9e@}wMBMT zG%|^zAZTw=1)PJ0yH#Qf#(_z{?szAKmd~yqNF(A^cX<-5b0@YN?sTg~L@3E4Vf3XD z^!>QwcWJrM>zTvPV>PrtQNjwh6Z5B?HocI%+#FGNW4u)<0|_nKz?>=eJ-7iKcpbR0 z6xPE}XZ>uqGXsn@=bzRfy!)P8kfpm$Xi4LWVQuDvjwPCJ2!GO8zaB35eK%V#!7e`y zDyC0JueF#d%>ZAfoX-e1nNEm{9^aETQ&ZL4T72Ejs*&j+=i5%8KA2le24V{&jfHE; zGBU)`-?8kZDAw>1B`aHR83#f7BOvLXldOQ1qXmuE!6oyQS-v%rtlcU!SwW?K z$O6f!ibrl61d|Or)@U0NT3+bmm?EO?22V+L-5=-=<(C6iZfCdN<9y(DccvwRJ}PQs zTm89~Myyh)1$DZNMd;?B`evTrps{r_gt2mJ-~Ct(OO)oQ;RM;%z6rG{fVoXT8ik_? zZ7le83KI3m0m?(v)6|mE&FSypR`)Ti9<=ONV*- zE|12-aoG;sXGO}ZJaLpSUqy4?Fu$m{YHiGOTQEOZsTH;;y_oSB=%p^hPY>ix6Bpaz zsZn~uD2n|{;)dI~b=@NsQz$z1TZ;nKGsxWx`ijfTXm+sPC{f;>bH&Vt+#WP1w zFn7V>JigREhl%|r!K^0v%1HLGK{ookyk_Pyg)S_ya=WgQBJ$RQs=VwK{@Lmi#%~AX z>sB}8rau~EYqa!QR=R75U*DdYJd4i>gK&y;Ii2m1y*u{DE`5yZuo7Zi@^Ssar;Ck9 z#nfYlD~$XTC#^BQlg%w>zLE_!xIo8K@70_>T0KvK?IBhzTJto$(E-s6Zl;Sp{`D$C zq!bGuSa~VyV`+oM&VZKul5j69Tk`YA^>K?D_9A8OYd7O4te`>^WqzB%8ovHcqRP{! z@4e&FHLuF9tUem=zXXP`E<}#Gj@<87=}jQ+T4Fbm5%0eXWPO{e;$IC5@tzkjI)R8k z{UYAamwGd4vHz`skRSl&O$;b%_}GM=GUx4&#>CG~B& zC5OI78ne(_dFmwvij4GY%3wjq9~#&RYgxh-h2b(76YZWFIZZ7ChB&7>bRzu%T-#@G zhvIa+w{xiunLG9+y82A7Nob^rLSraHB8;o+8-DR3DGZ&_X(BQp^n3{s4AEP)H-6e&%9^xkE^U;+KSwZ9&PvlTU<#Fu5$g9Di#4+xP?(=La>tn7IHILOY zyc#wE-4e5L*Vv~aI$+hPM=`E<)w8(sV%f(D(a)b7GdQNu@Vbe9jg2hHV*g5<^zdnZ z2TVe>Sy3p@DKyEG2_%E@xO_IbWkd zVw^-KT`9$meFItYjbrXhF0EEi*{Q6V{H$BnOqEaEF+nWCAY|NjwS3rgk?KW}<_Ju) zu4uaK%|)>xCYyF2Y^E~mqQqQ*O?S3vrnd5;^i>#}euFGh!@|2V=Mgr;%c9xV^NUZO znC!*?;iwbud%3>?`wL?7nMUsK6~STbro?QSgZ_Dy2q0neC&lxlKxugzCWoaI+rnhj z_v$p?Lo44KFI!IE zS$gcqt==qLwqA{V^1K{cy}P+=17X?6Bg(Hq$hBLSwvL^>-mW1FTy@}tpSs(yenHc_ z>LeL0_kS{M@=LY8i>B4y`%`_Jk^)P}N)NnWba8hRDvm(?&euHfBsuwSuE8Ll4 zbuc_ZE&d0AABP?-VR}0v6CaqUMGlT^n*jnRw1cs5yjZ9~h^T87ado$A*zTYq@OXPn zv8^G51T^^VUHctU|M1r1rt1;=Z7#p$6fZQ_wE=WxaMBK&8G-$poMY|R=#gfNbspD2 zb;Fa)zR(p3X5A3qnUl;%-(I32CGxQ;p9KYl_Rx2&Aw7hPM77C*P)!@APy>02Hiq{# zyPQWYu-YNvIB@Cs*_m$!HO0;&2ZleA+RBkT(~@sDe>_eE*M!$(V$#@(2{eG}4IjrK zU|O0zdO9;zrd*wCaDY73u;5qwF!R0s9*vP3U(p1QilQKeQX36a1bTr$C2XxGb-hSc zP+9f?;I56$uXH=KqjXZP*mXIt{=Da|X^+82@C&?Z-i93oC0~E(4!TOje06}NO*6ER z##etAu;CCv+_yF$`F21mZB;s6;KL3RAvX8V=j+2iKA)#OX-Tv_@xZo#yp!bX3Rv0x zklZ{2q7F~W4?RS3gzvFxUG!qw`{X}rM+klV2#=)Y8v>2qt~hqnT;Hpg+6V0`klBu@ z%%yk8`V>Qo0d9+CKOH|keHV%}g4~&(9UNPK+suOAmk94~r@LCfXz|gU1^V-x;2; z|F~U|e$GGd{O~2(rxZRi<40SwhlhCwiQ}pPP6)T`TQDW~x0P`2I4?sAFZ`6txv|#o zEUgrgC=nAL(oOQTU#CK^smg%zh0VAdbS5pjKo^B{k(W?Yqs-z zpUC2aH?b8y4p?E^!WG#rW!BR<&!c|^j%W$TEO%9ob6dW6&N#zZvg3we{-VC-5rNqG zW4^7pr4JI0AM1|4ufM-vzO`$BTdbrUsf!K;)V=G>>ua3pr+mBbwVp!xK0+>mcc5+e zhc15X{k{*NZ)_4g22kL(d=%ONwxNN6N%cV~fiG^E?Vr&2@>=?4^!a{e_BIUgRpWC_ zzXE6WgL(VkFlxP-arUK-52CzusOGb;O#syt*jDWZMm~n8paeW&@qNt_dTQYnv(1X> z$JAHxD*cUVB}$+dOQ4SCo1!?!)VZ*0GspT%Mz(&>-OE5zn|EMcpx`NkByJ4p! zADd?1Yf5_UNPFDQ+9~V=Ue9tYlNJp#?)#>|Nr+DJN-o$_lcMq3BJ*id zhKd}}INmK|)TNWspO_-%oKipk&UibevCcT2H#JTpb;>n$#ws=WE-z(C$7pyyb&WLb z_4m|uE5mlzwB5wC{f4x|`LyHfv{Tab^QYqj{bc?F(81%X}#k$VM6as^pa1;t_o)sG4q@=7{^N(Q}3 zCihB~rc7VwBG%-)Os>I7lSYEmqJq#T#Q<7?Be{y-Jxh|mNvZIBo;iA1uoRd97DX^sA3jk;U4;bO)sH%EsXM-XTJbE|yM zr2OQ%cijBWj3h7h$bWO!5XsEhZR(&4jG<)qS0sg7B$1239z%U#h-;x&ULh$>Sg1>~ zOmSTVdBb&sca(d^j4nl?b+B4}cbo_piYe0>31M(Yh@8w!Y# znU)MxY{m8|M!HGdAxJ62ysfb&x2dt6ycxCDQkS1vY%4r!$r|GMkD<`WFZ$Q`^ESyDQaUz$qinNB9;jLkTKGLkrwy!Kh!qkfoi zG*+U&t3j1)hb>h)IuT$d!0G2LC_r5yHUv1*`Q2!7+>Hj|admB6J>m!5*{+rdWMs!g z*4dhkxxGdDr-6}0ZG0XH7A18Xk8SG7X@ydZjmf*2ZOl703A!a~0$gqFC_cR#E*jge zy4!f%Efd`1BVXfvSmYAg4{z!Y_DFl3k`7*bbsG0pW2oqcjaCZ1)@nEu*|yOE12n@d z_mRCPp1qIJ-GRN>A;{LtE3~ITpP&e%-;t~-%%jIeeyF!`&`EHJTC0Ee>EI=)J2w0+ zC&-)BwzpoZcVgZvr^$GewQiNPCjEARn?1jJaS+WVUu&nu{igQq-n&8D!MC=c_b7wZ z8BU7}POsxZGx{U%cASiZoZae%wr-sl7y7+XKtawfH-ckLp?%lQW5x~jL9@Nr6wW#E zuH3;RJt8yMgB+ zLpLdP!p+U)$q{oNaDSV^@w)op#R6rvQ3g57Rg|U+DZhtyo}8(0VT-<9;?F(@gXewC zA{;}YWtU$VpNe6H*A$;FL#H*DT{bk~JRJ42DN~f!`3mwihR?j*DKf~Py(S@Eb zzYV81D9sL;Wa1!mDxG{v-uF;}pk96`(#0)}ZLW{3*!9PNAZDcli?7$Re@p0e`Hwjn zXk(;7mrcS<2!~%!(ojoduBppd`p>acyBRemfAO<<&G-KAv=YP}<`Wf~RG^E+sqy~$ z0SJu~a2K8Ko4n@hcW-`nIKH3l7zya6q!`g}W+89N^dR`aF`q0qCui61cNf+dZ#hbm zI!EOmUd9qEaWz+uJ~!wVv^FyS3nO?bbvnynzNxBCc?SP~3gkzzbW!a9! zx4d-KVRw0xGK19$GVKBh(-|0~02!6Xp0F)F9$tHwn$V*#OUyFz+HFdjYf5Ih`5I#u zLM3y#nd!@DT#(e{;GHwE|VFYa^3 z1I>rQO~?GpOyR3)q@#3Hl^c1n7FhN2QmZf1w)ulrOAQk2Mq^BtWAA+R7E~1Cf3n9r z&hHq^6~8Tv^Gb^|8;A=E^>=NEUlZ(lCbRA>vxm#E>Ta~et01y*K2pOxHXL2IB?sVCe}1i;NIU5V&Uf0jk0DTE8m^B zoU1I~lXZJaXV$}0v{_=i2p;TV5+0U8CTU)_Lwmi?P+H9wa(Dc5s5b{|{&9+$n&%1R z`T0)TvE|atx1g?J{ga1d1Ef)3W99mSJoGShWixxWS^c}U;p^d#=`GG#Qb$FUu6VeULwfI;b|UVMj>I!^e00&3X0BGNEh|Nl34hbox1zrf>9LV|3E?juse~_1*W-r*FWqDX2zF?;BrOk47+y8`pDjZH|RKv+b|H z{B8~Ug#NLW+~XKwy(jEbYy=HFpaUdpU!QQxJXawV{b^o0kQVDiM`1AV9SG}z2`{<^ynoAt%DEplkuvpIzZIjioyu! zwh^q5ngi>OGMa8!`!9-aWkFmih6-_fiSB}jSSw=6uL0tw1$(m#@r!nt*AQkzba?Q<6Pe zQL!p$V_a@Yeq1WOI6O0An;zDy!w#C`rB%S-Je@1?vQXt}yfE**d?_IIO2EEjF>Z7M zhELb85O6LQH9>D9t^PuSm7klTrMSpm6j|U%?`r|*;X89Bu6*UKNgd7ctw^KMR~PyE zN~G^JQ1V-x^l0A-2fj|$xYdC<;Ge@Iu5NzrPVtmi&M!Hmlz4U7kmFsAJm3QbP)Lgj z`S=vIQMoaUY*4wg)zt}Q??#&nx+0j9R95xnQgI_+VMnA_f9v9D{SMkrYdRi2b|M1V zJrRZwl|R_aQVY6MG<%5fc-#xy*BFtJuLE)evm_`PkhPx%?SXUYySj{_5?2qg;mW2= zlQG7U^`bEV;)j++Rzs9w)E^^<&DI}!HU_a(&^ zUtdE3ZlGiLOR9@{DWUHZ9}%iw2Ciu4*L1TPeh@dMBW#eCd|Vmkg}9!i>P*yF!=~UP zk1htcsE_cle-@6oH7I+OUF#se4`;qdK|UBcA~@C*5X>8oit| zq(HSttnYcZ_sE3*wRCq%s3lMAp!$PuS0}F=8ZF@`Ah4Ojk#`5WA;?G0{QQ;M1R8<~ z>JSozh|Ifh)OvIbsl4HzQ%|Noi^FkPES!{$-ZccyMlg;EZ{feNAw|{2DM<3j>(=Ra zQxWDxOft*+%%3Q3sh>4pi6j4(Ke}@p-5bZ(4o`~X`vT|B%$e$0YyP!}sEY99~u zVh(mU;}JXk=|gD<*pVhRZns@%5dmH?Lmc%@B-kzJeSdtY*)R!7mGK2>$s4qg>V$FT zPgoOh*)r34P|SH{whWtP$0k#h?~H8djjbsdCn~6)Fu^{I{U>{8{moWfZHR`b_!sVJ z83+xcL^+H%|4rVbpB#$ZDg***5}NHv6I!{;O1iix1c3nFitG!NQvpvonq879(%S0X zbdw=mm7&_BV%QohuD*QhtphF?B%>+HQ-%}c+;1-0k3s%S7eJabywObc%4ZD9G3(}d@x{~+R!aHD6~hwsjt zK$eLMW@n*1B;atyByn>c6#4`0C7l*A_t;YcPvqRS_&FEJ8y%U7P@+u-TAd19r$P~Z z)u^vQNV162m9iO|^AA@MmG{()GAd)Tc9sJ4K_s*BlPOBvnLGkQoT6BdwAUL`ufT6? z3WG0uUj$@aD0;JV6aK0;Q=ZqR<-?kRyV^FhJ`5<`ZIQ|1{W+`8QLH6r9urOZ!sDasLA-{0x5cZE2XI$H{Hn##?*aL4u5z;KG>I$~rU%HOyv5n@Vz|UiyJf_&v zFYVK2PhAb%joVgw#-vzae*W)YwJ?HS4Vo>tZc4Ybr1u^P#)Z2J&I4FRE;o)M@jdJ-*aJ=^E`z2=uztK9Ri+R~lhd!Boq$5Tft2ep11Y4iMExBBy?y6nxP)kDoP zImXlvAMP&NKHU9cW7>O-E+d;O8=Q>a&cJ^yNGgJ-wBLs8*p9+L@2`xihKC+N#uNx? zIU&KvJ@cxshbdPuf%*&V_L&fGnO?XFLe)D6O+~1z$$a?H5Zy$)@E#-`1=jK7N>f}-H;w>Q^w&a{8o6W`)Sm+}D3%s3MXrDyf*y!ec}6YGa1JH|$dV zoUm6FHMWJkSVO!_(dAV1@s{$qEK7wp;;m?iphz)v3%2m6yQ-cclLS$=kNcc`KH~jG zf_wq25+N-0e#6Y&4)uX-$x9%Rdb4zmOaU84_#*n#+bX6gpLlEsu3lCWhp84{^?_J@ z(X|1#vL>o3`t)P2yb8TuPFI1;iH7UzuBHPq^I__<{yu)NjO;qKzD%c|v}~N|V4kDw zNr52CQgmi>aMmy>7UUVS$6)E^U|CMc>}S;S=ERw=gOxl(RT$W7$yE6QuxiJlI^UuC zn4yLoIi{S!#_pl!DY=D)p_Xq$ZHV%^dveF+gPzT8?ErU8e5iwAxUN1AA}DOpA=~A^ z&n8XRlM^z@#%O9C0*eWm>Kf)zXGzV-_4*nf9@ug>Uz@iMEupMb#c~IQ zoHtA}>M)EY#ZAPqiK$WCprU1ha7z6U>P;3-4|u1I9G&Cn4g*GXQCyw&=vkoB^-^16 zQDHLPNB2;SB)a<4kOF3k(YKJXT(Ys@1kpm%(NmT&!THXtVVKDGeEXQq>NTfzj#G(hY5QkAUa7!WK zl|H4W->b~zFMJ`7X~lsVFZ{^5Ig*Wy*e$fm%9hMVpA?mMTJ2fID*_hKIb3!`=~|y> z{41d@VVw)33L_~I>z8Si!)gsZOyj#{A``+mDK9O=5iN5tEH{gHrM@PUf6iC|WG939 z8XWJ>mE>B&ZAnm@4N+F*!2hNn@8nvWRZaX%;xkVez8Ek16UHgOOz3Cmle8+0N%m2L z7Ol(Aq|Xi$eP!pfE9+9;>1ZsG%B9X0TS=FaF67OJ1jHm|zGEWp;VZ{Ywxvyuo90v= z39Q+xc*udz#VadECIfFIM<&xD3Bw-@Rp4r!&%lf+_VZ=~jYO+7hl zsKOj96eDY}ODleqexfsUE;Ct$PF?X5vl^ee6SFBvJpEzEKoUCDuWvHurA^7h+I6ha zol`*_VuJ>li&!mVF1oXIj0SfLL9&UlMZ-UpN(9kM*_S8L6SO&&=j~`Jnj@!po-Ir- zFZ&7Cs1G%9Dy`$+h zZW<~yuZXNK^m{0*J;md1kvEJr9Fd<;gLOAWEFT-R>=2HHc4L1&#F`Qm?Ohu+gpDsa z3WD-Ry3Q2OhZXcDbPowd@UJ@YzKv&48g%EZ+Nmgpn~q<$gp9?mzDr(njZ(xIUE7yu z!tPb1>(M#(7)QSw^%-m@H8sIQHNgEcSFJooGpzgul?YQ){w7xmN7LkV{0rtvH7b!Z z*H|?69jfDb%g8J3QV*=;bc9jI6W$q?>DG&*_kmh`D-^Ixl_zxVl2F+bd1UL z?yrE0&B5S>chq7~9)`@s`>q?PGf zNrt<_aEEJ(w7__07G?fE$|7viLlV#8r^SYp2*EnNZXA5q071DNe2aupAPqXt5&()1aSwU(@Ft7dc822iKU2ohE!wxt*cX z=S(z@Jfh+xuATZ=*^@C$eoNE0Ov)KcRxi;+_I&Qc;lhJsOtKfBgYV?1?uer8J_|pa z${_6zX0ULJvn-&pYA@Q#p;dW87@gv$lch3WBSEH#ywLDWH&;TJ`Yl<#lldnrT?@Ab z|1jO^3ZtT*N$CTdcrt4dn+pvl=V37->ns>r+!H>;YnEGXUde*ScMs0^x5Ul7nH4fK}K7>kGK6<7i$cT z{ArGnUmFpeIGh`N5!*SdO+D8lDf26xsO{CEyeqe~OxaidJ`u-go-I`lQzN1XHOC%cu?x`XhSs z2UdmVZ=taNm#F6dDx~?}LfHQ#zy6~$mH#~aKeqx<4-f(d%^#sK2s(~npgkOjYGR@x zp4!uP1rY$<<;Lafy*^Z!hHFr6Z^R%50nzb5Tn3~-4iQ~t5`0r{2niBNh6@g}R4NK5 zbgmu5s0=|zWYC6Bwu2#H9CaLc1`8a7lxD~{E)0iY;-X7x(5#8f6_b+jB`E)$i$V)} z1SSU2|7(C8m;=A(qTEAK?l}oSmr848Gg5f|MTuSF4nt#>-X++-fLgy zI%i*d|K8<)IEgU_<2&wqJlvnp6UnNn<8en~S&K|ctd=E|gRdu*iw2W_W(%(i*9z9? z@_^cjVC;;sDHGLrA{YsgUI{5y6$K;9hNCeMCk#o&>Viop_~R;XD~lRpp$^k}>=`{9 z+=A%bvxmQx2Iw-1FLdxw@Ly}LUs~wIva8nedrEfog^1nc)orM}`6_DDBx2wFn?-xj zdDTCZ2qfSX1KkljP$KaC-^Cgb9-wXlT;o`GbnHPnc9;EWK%t}K04YC4utCZJg$X1H zfbvvTRR>xB(~$5dY6-GV&%k-iUxWL^-$IoCwwL+8+kv46AQ(CSah)Z>sCgXMSvenB zj!Ybk@!X{lS!yn|09HlO*pP~)&L{?bQo@ErP9G(`EG_vRNu*j5FEhrefWYPZT%J$R z7^{K{MN^>2e{SG#{=^i)C!`jXaYWfD5@BPs3Xl?U3&R;<$^=ve6Sv_6HqH_c*~6Lx zh1^7EL)<8gs}JeL-Zc?Xq1l8P7v41xQ*ntV?1?RQ`+S&Ex4=riL&kpANlV7VK6JU! z$Zn7~mf5`WVt?~F<*J<(0WGvA@`vT?C(CdEBslvK~9g&Mj+InP=io| z$RFDkK%(8v$V8)P&bS5iM!tvq=^5k{5-PK`V(Bh9D7=(} zh@P?9j4k1mJNuJJ(9O+;kw7l0;%c&wfdUXPxD}IdITq|o$4en8Ue1As5u;ET@erJ{ zKSZMv5NNUi!4a%HHb^H(#h>`(a^r1GjRo%ahh1WD`hWnK1!R=&e=owv5)_cU>2#M+ui^%1r_+%PIMg8za@kJQ!d+o ztK&qcfQ7nAx?l`Tv&18RYN?U?8Hf<>V$UxZWts?%8x7O=#8Y;P6Dbyo)iJMi4>R(a?DUF~q@w1jZw)i*O30u#I@l8omg2 z!VVKjfW3ed^XlJz?FM$Q@F$gRut)ON8t2z+G7C1@(rTkCH1%(&5i~bDnapMfa{%3XNlTbM>upqV& z{i(mbV*ge__@DA-(J#QmFwziDAo!zE^o$nbmK|_7qnsLA;znmE5@iFL>w%HW{u|%fQC)5t+i2k z6+aZE8;WYq+o@tP={ox&AalvV$NsN@<`)1Dk)zsCiT2v*#bB`1j}G zQxFM^9Afzo$p*C-gd4QKAhICSpeBPrgJgr|__$*Rfff)ree5GUPBUF)8q47drfikW!cGBYnF9z4OK9x-UQ#UVNo)vE1cox3wVH{ir{fVPh*#fU-N3`C;3^8n9t4#jG@M>d zd^HA#M=~L~8A&bh@H`k=7BdVX+aZfiUK=dFW4zqrtZaf42Lx3+wbS!s80?9WF6%_| z5X^DL@4oHH?nu#(+mu=8I4&l-?@i2wXb^E>1p=m*Dh+5s_h7a5Rvf->;3ThTE0#&t)kv z#t_u*_Z$W-abxBQC>SvdCL5d44wkeS(T3%|*pX~~IyBh84*U&?i4m8N-@I#@{g1tE zewS=ZLcsO}_OANhB-?*>%74w~aWVbtA{4N8{V+iNCDB zkNMx^44_jXvOl(uLlKcH*p%ZTG6qI^7k*rZS^!+q2*hX@Ty}omNx+u~qLm5} ztig{$BvOzTbb1X!A~R2TKzJD1-zgB>F4jm7r%_3K=%n;pTGeMbf2KI0?{Xk@xeg=kbVNcC(wF=)n%am z1f4MueS%;E`zH{69t%K$>k|k-f%Fs9;P2mKLC8Tp29+4NK7oi6_&4`|7kz&J9`d{B zvlh5Mfp+s)^a=E!phCO4k^<2u5Qc(M4Md;7_z8+P5PbsOC%7Ykat`hYo0~Jgn?0j} z;uEypK>7)K?mx|*dwc%C>|Iti}I}^s}~jvg)5rVYB}~OFs?H#|(og<(L=5G_ zz9c8Vi3d_M@dN26<5qw&njAU55X?SOt{lW2XTzy27y{jZP{oWmBB~UAApO*FQ4SUb z4$u>)cUKpma?P)D!sSXC7oTeJhH?;5*Y9T_)EX0%19gD(6VEUEJsWktRIB5eMNk1d zjfl<141|!=)umv~5Fq{BP_$T1yyFH29i4#m(^FvJBpzWaceOn&fPT#!^-!gOH-wrI zNIyqY=krnxRBS)y^k~cF!AJ)}f%NkrC_fR!IE>3PkY)nX&ylf}+#^JapiNQrSYJ{~ zd3V@aYAuj{+UV0N6MjiQ2~h7=x>^PD)5dz$utQulPb*Lw0xdWu1q;6R;qkd$87_u%P{q)IM<;1KHL7h}2OZ{Kl(fu_Uk+3eP zlu8D`lNb~qkbd5kSWPDpBaaH8IFCWXh)FDf^wU}&^2O1jV+s8{WcaNCDfb*nh*k@S zgwDN%%m;DA`p^2#h|R+w7LSdpVLh3Ww@7ue4PQ#gXa3u$F*8pfl1!EuM+xEQ)y8hc z&%PPKLRkL+tGdr_y02k)BqTad*q!!RNgbga@!I<=qvi9P$o@P6b!?LSHJkH?Niz2z z9UU-9?(EdWjP5wHYqPPj0mUM4NCK1O&S=cBIub}DfjSZ>B|&`v#zW==?ILGz#a(}Nq{R7thWG%tO3T%+JHQ(WiW@LV0y!j5P3q+w#dLoMKFPCbM@1w1z&r^Sbb*Bu zcqV~xGG=u2*Ea-)!m#ufZo2B;=M`wCQ(Fc`(?=r7eI zFj9i8kHMS!*Y}~nU;XcS27c8Cc>2Se(wjv)dh+F$aubO4#0iJ6aWe80 zxsY@`5;oZzBgCeVPS~s*eD3Z_;84Cu-0V+GaKJTfeVqqV==j+LZ`TG|@r~U`W!rD7 zrjK>^P)(lxkWY^yuSkYbOO30f&m~S?j}NO1yPfk(3SDG7pr3A@`EV>f4W{{|(I%Ib z-l-5qWjtJPQAgtgf5j6Wrd7|6n`1(bIZRCaRmpBlB31hLY;4nCxE+X8bi4J~WMp2D z&u?5LTKSYk;w2%Ppn(hs2eU_dd-vkDV>TRlA;j_G4wwo311tO$?A``btm#TRAe6r8NV#?vN`RzrgSQgWK|-(?Vf0j?DO?IVTOaV7O#D2dc5jln%KNP z)2Ru6jf%-T+8LkTT)y|POrGKJZTnWK&;9*M^XZG~9S?5C*Iu;fz?^wI+(nNgws^SYsf;;XjVehuQtn+EFKJ}gJBp_Ks6c9}I_CrbAgk?dfw2;27xC4*qs?u)aYGG}t0=Cn{wCbLQmf0VMLg_qlQUe2Rs-{1 z_&g=<#?XXnHH#8;u*DBXR)OB!<(!RELBfWQvv0wai281(Ne&@+PQF}8`En&XmWod1 zBYVbaQQ7`NdQ=0VaM6r#Z_EE;MExSx$QekX?JktO9gQU`A5a)O^P#Mir;j{r(|7#f z4VTOW|25L{hK)W9-1mZXsoJCb1I!p51Dv?bH6EkQ>ar=<}O@ z^L%EXs+oZaU-cMHWYf}-5>^iFB^B#CU)!9Xg{3HnR}vhlX8e}TuV0{ZrEesz@!Kzx zq@K0To2034xw3-=2JU@t(hk1m;V23XgMmqsX(wMxzwknQ-`iZdodUz~!b^ENqlG3r zg~o%0MsOb}J&oi*NDiPWGAXRwfJzCMG>7N>F+^s1OKPs`a)}5Z3+O4f0EOBt}pPoC|ts_vBItA;_yl2|0 z@6|7Lj_)^np9Vp%!bKrU@ne(Zpr~72(d-xJy{0(|F^?Xhxvl8E=4E{`ukoU}-O9a| zFX3W7D?;xMM)q2F2gM%l6}>y!+G{(a5XTb>&%%KU_ZsB~h-JUV#GFD+ehmc@=uCTYN5diz8OH(pIF z@O2D5L~2xyw?j-%mr&N#TmM+o^tT{A=};RwYQO04^l)|~qOLjohb{XI(?r|QCR``- zi#K9=j@jW`)?~?o)n3y0(UZ_6W^6gl}S;h8yV%WAzuQx88k~u@FH2Bpdre#&0c$ZcM z@;%|?HA!aNS#e5Xw>B?%{o1Rhxjm&7XMYwH1=_7@eaJCMcj}Yd`0gb2_5GX7$qPS^ zyyuD8zL{p>-=ziNirK~R1FEg7EK8#b5iF!2rGIYr)ra{#;nL@+S4O-<|9^ zdQ%+imo32dxzGn%yfiM`1C*I@RViP4zitO5ms&DllfS%rSGQ(g@~v77Hy%alntk<% zef{&hlurD)*8QGy4+Y|a4wQP-z@p0~foznQ&-9R#}yLMN+2BiaW z41qs$gYFmyCCT_==6%_<=s5WOF7MG^siW13bal^jJ-Z)#j>u|&;USAvfR?fUkqp{P zYQips9v^@XiDV3oHg=ejMy*}(*T{DX=7+*PLiY3U{1%~28ve-o0Onzr1x;`9{g8Ya zmwZ`#*+iIDe5n6Qz+I2fJ0Xv=G#-~TxC*ET8*@Nf>9krK$n%6?`HA4@`X`D+Zr{c& zMH3u$d%QmPI7Sf#jqL@bpdWbvG3$P~xn}5Oe8_8A-#7V|vK#o7kVi){5h3|_BJ1!* zUSA5?r+dNSTa3PYT6cCso_?f%kP#F%mf*`M>x3e)drj=^Y-4zX13JHf>@=o*;qgSk z-R~hff>GPSvM02&)(%zYaofiK+3x*MA>rFHc9U*CCmW)L1sr4Z?FB-Q!mZt1o-YU1 z%RD$u;+0nGX%PaK2z9*A=W#>J9g~hQ+OUx?@X$De@3Zn$_l(VDxO=lM7MJKK`#mTq z-h*orA7LF+>;6n((y=w+{C29m+tuuNve$uP3D4B2qr}vihSWJ=lKh_f z{$$$HnY87rX{#SoiJj6uC8m9uOxyULwsrFPw=>UouRh=Re14Gl{71v{qsiyc!Sk;T zX>g`=B9nBqSGwp9{Fh0xFFoV?otqX!R0r@u*WXQ&2S!HJ8J*e*HnkbYCdtdA8LR?H zSldkDsZ7y>%(JK@fx^sCBxJ2Ydpe1h9Y-rqnx%A*g=5MNK;R&-e4N8fm&xLs@2^%!GMtOs;kunfTjZ5{nY7^h`$8>=!mO~u z+N2B4stA9HqH-qg^ivo~Np1pH{!(mKrR}*SBxK=Kc+>Wr=lSAWqy_RG3#W>j`|<*9wTc9}=S*m;ZP`CJ zmc3Ww{Vb7v+N7+;SLjutR;O1P`a`iJjf!n%xqFz{oM6%Gg|n#{+@Ja`8lKPgc~2{B zft*ds`p{RfWKz0pi^(i3TMx?#Gb#EJc69NpU>W=#Os%4F`-Uh1nU^4u*QG+6cL6I& z28EGAr$40 z+5+?B4bkZ>$a(>H=TNx4Ljc0C`1jRLb-}JUaal8GDiF?Tw8PB7Ce{nEMwL^KoprL` zb1AqY1mDsM<4|ngSz5}Kk-C)_@5;QS%80OP9r=r5@@2tcsy<6nnuzlF`?=2#SyGv$ zZTxda+e_JF8%lMVIc`B}=TA{No!ZJeb@_fx;ayfZhoN=5ls{=BxfCSZCw`Zyj9|*5 zCIJfz!;)ocMcx$ls%B;EY;HR8may&Ow?is6y)KXJ6R%F@X;b+&4nGNQJfp-8OVexj3g>_IBIK z33=K3jr)=^cak~B5EsFRv`pHy6>L++dRTFKs`UeDj^bM3N2cm^F9J#Pg=()hoh{`MK^5AWHlbU} z8d1FZ12Xe2jV20(CCOda$jgtoT4A@8kxNRiU0BqV%Rk?5FF&P3&syRc8Sy;M#TvS>dtfZ^(Hz~}G35HGqY*IdD}-l??-GazkHnaM)+mpS2I}>f44_wl?p{pjyd%f%jzrqy@-i(9n6DckB z)lwr%%>E`Cj+dlzn>85cSv7rCTkYC8gjtSDzDlUI(n>qY58Amj-k8$DAJlqg zK&D*-m5+0BrY~xRwHj9e4-Kn!ZTIVF^tV@}Tqu+ioR!F%xFt2+B@zLz z?h+cphqXCPy=u*PWyW0{H_%t=Qy6ddN@raRVX2FJz}A>uGn(1sL0a7YmMx-E&+M*V zqpu!BQGka-*pefKL2jNI%c_i6IX?3dvIpE*e(e{WVG-OlG=q}QXqygaE}qdbK)KSc z6%P%|z0%nn#(Y-LVa1Loy_Nk@Zh6r6t@-WsA8!+H+l+^mE785xo06v1?V@qY(TvKy zY(E~bBfX8e9+W*(1VGP z)QPZ`i6^rY5kDs)Stp|vCu7YgYd?(2kyiM<&Wg61O#3;RZqB`vovk{X{Yf~7SE%r& zNEx~^r-*rqwR!5~eOgWAL<(~PS1Rin_54T5^(KPcMUCTXFQ&D_U$xom>QsohnJ`Z* z!0uSE_;Jj19JSE4&gPdq*61^jw&bX<&$gM1j+@g49BS7@O}`7DQI5`m3`LtO z8cQgk*_I}8^%;AA-R-_n<;<#n`|>um;_vqF%GdLL+7J2lO`jku2yM)7E2(^HJ1up2 zqTaF~p)d=AO4CRS3&P6|k~oM4#>(vr11k~YMPrn@=g#Y&d&7zq%6y@GVn$%-g3yrk z&e|Z!1KM;|S`hKE?O zuw29|E>4h_nOs;>Wn;Z|?Sta8qNb_#OKgm72mL{6YS<1H&63`ijm0GE#d}HfK5`}a z^Cjm+FC87Ewmx}6J18MoIa-Xou~7%l;}rjn42+}Ym@YUAHN-qFEf@Y|%{ui}4!P~ExKWtfv<12A)(hVs$wBHt8-~^gi^bPi z&8KVBVZygfTLfniTm%&MQ<;~~MseTRz60&JU~ceSsZHQb*_zEs>-JF{N0kj+9^&mBnQsV}Al=^Td9ZamX*x5TS!G^` z?Oi+j!@}$9LLB5@b;Q298;E3x4r@J!HksR|N9MEB1RfOSP!1C{2duNJZ3p*#>*xG% z?eUB90r5bCTy4j#Y`-m=vvnlHHEfO>sPs*WuUfKqY4xT*88ivCWWMnb+p4##`kW;^ zu2Rb}n4??Paf1wc@^!nSsVbDc(Bo^Z)8jowT%DWgSC67P_4YbERsF*z?7l<&1nC$5 zjMiD5FF3Q(ul<#u*xoy|aSti;yDg$34_YX=Si)?%tu9S7+{|BPmCF;(m0LW#9MLp* zG<`bGr*D+$lkB%2*5NBgN=<{N;-3e%y&g5b5&Lj2wK*-N@hX?}u=&~&RaL}+`0Ufo zZ{|7sW-_>&AN{??jo1bki-DcuH&eDHNErg?q_MaHqcJYs9}Tf6`FQxYcfwAbfGo7m zbEx?tMX{H#9sL@?tf%1n=+_x)5IygWC-Wl(vhodM#g)j z5BqYUT{{5{e?u#_(>F6QEM`a zj!Rles&!KN(Ctgqqxc`B4~vXeJ`6tBe17ex*Y4-pwqW+NEo0Lfiw@axQXQ9Oe;mp& zhMn$}OS0uhY=qefd~y*qY$KxbS#3wVGz#aXJnU;P&*A6(C^&Qp?sPim$&E}DwXIed zR`gmUxhh4@h=mYiUE@;$GE**roE$5&mx2Xqz3E*v7cYf$7R$vJ z%At!N$MUrL-V4_6O#(6zt?w`Bgrm#9`c@UcwZ)TjY*psOGwMY1#x^=1IdhQ|sqvqY zn04v($YjO zWS0)YldjSWU)lWM4sg;%myZmgpGcJP-zB{$d3jxEM-1*QLXa>y*(usipB`3xg@K(p z8fq^_iVA+8@E2`tHO~Lih~#)5ASjWN#H_JG^(l5gOc^5yV>yRTi+u zy?tpZMXpEWJ1mahBuVIDac4xOV9(^%cw{TzeXSCR$H?x1?+BmZ@;Hv1c z5X{%=vG_{yjdVKil>SCv0keOg8$(Kg zUxwCU8y4ppsS!C>s+niYRDBxz_m-DW6rhMDPQS)p?X^}4=`Fa>sWU`$)O?<1Z&5jV!L*@X5H|e-a*u&bcv*UUafV^mhS}1UQs!FC&W=Yn<@_dk#Z}$(jA)E@`Sjs026KIi2s@ugciPcBvIX`pC%FjLeh!>csLl^C9$OUs5QxP}BpO6=%U zISiS&iH67}5lhvRDcJWzA-H_pEK9*d#?xKCIxaapB6%Tb-e;ziYROWguU?67Fv~MEM4MQ5e zv(bGVSM&>|+wwB)JfO#@HiJ4Y|I9M)q(EALvMtSM`iSdas5|gKO*UKp5_dI-+Wvly zy!o2Zc$Mot5v$P~rcDlYQIV+}&VhH$mp$T}SJdM@Uz1HQdot9l2RizEkiUK~Hr~4T z!tvow)Agg>@wVSkPI#1p1)O1mK&;{9N7rmYBs0-Y7337aqhLvTWuk+j+bQUDvn8eb zL?=7Zy%2SU8*UUAi@7!KgoVV&yzU;jn$zj)Jx!%E7muGD zu-3Srq@GAESu{E57<517!%RwCn$wVb_x-e;W^1Lr$ybkvT+&gBHYyBL!@(LZnRG2S z8ZuL_p9Z;P^C;fYxiU2p-|dony5)|6`_!A~M6UVjinbTxO_Xf!g)r*3*c#PM^&@0e zPns*ean9r`|fr@tIMMU{s8g7-(W-Uk`x=+3ea;wf!w70!- z*k>EUd7-4m-of3p>f)wb{R>40=lJQF6%F^s(G~}{qq^zYuR-q39~2$E#;51@y4_oM zS{xtlO~3n%$b*1lvwrZwZJt);DS)F=fDG&E3y4E%QAHmwzd)X)AnXo&JueK2P(N_ zGR&@OYI==4Yjw$$nf-V!*z0YMl563W*|jU|we;p!nTp+K*WD=WW80M6Dw`rmZqj>C zj<)XZTf~2M3_h&3VsURA|5@#o5i`5fy7%c`+}B6fZVt#Bra={Cm7x|2br_OFxdZc8A(8J7$qe4 zkBJE|2LWacz+eIxF#ux=U{nqa!-26mFxv1ZQvxRB{v1*Na|ZVB5%vF`g8+XY)qnB~ zpobs?62g_IEBnET*%T5C7gYR6;cAyQBGY{zGYU!T?K^R42C71jRKp`|I+#sGe^c?3;%KF!dx(jfW-w8&{4j&A5sf3hZva>K+a<#<) zH9d;;ueimagG(NpH>suXIHAyQgd=LWzV5)d;cGG1NJg~J>J5FuclyD66eIEe5Qdua z+HEs}FCIn7eUS){^M{a8WY_W`Uu*y|oXKN9G#$7j&&q=lTHimgu|MDw0Wjvy_Lqns z&$xr2gGr|2G52F~3?v>*x`Ut}gDA%wHyAtujOQ_k0!D@Y`zFs+uT)-U#xt7k;9tX0ym}A;T7@B5ks)}C@-MS6Dx{PLv?B(gn34SR zy5$hO7$Q=`%wq$(Iw_Ht>VI&+1Ihn^tbn~OAYmy;3V>vPMOHwTL6DC<3?QeVT!ZQj zg8WY@204e6AO4FJgV9DXr1)o+{|@qh{q_0xPyElH0d&Mavh0hXU{lKFa6 zaZWrbJ*`c(EcK1f2s$o5z5OUo?LbzJ6k#pPOkzqR6owWL!v#>FP^-xFi=DpcQ?7lq zjq}jCR3euX#$NL{cs5E7jb4D@?Ri9`=a4Gl0&8c+`$NYEmMMY%ie7 zfE`rMryfK!lvd=%z=R=Q^C^bP0J*UVCyrtx@v6o|&ucw{OIQ1l(5Pu8iPMlfC2m3O zFnM3jZyxE&Xo#ByX{=-DhtEXPLt5n)cOV3$#Ik|ofcz*S41bF)4iSW=f@AOyzC9#| zs2r}Vebn|^ROgLl!$w@fNlCDSjWhju0O=Do3v%NclgZLO9_`Oq>gd{+9J@%0u zhZZ~!16<&^M+T+!pNc9dr(oFruO3m5Rgh#5*W;{$&;s@gq#88QAg&;^$38O9EQ5~w zPb=Af@h|4@ANThe_}6Cu?E%Sq{2pD1n~g<57K_IR5-}?UXc=<82%{8(mwQBUsgqEP zx>QF-y@x1yI4E%#f(6VEcB59DUZQqV$Z z3kche_M?;zz zvz{QH$8aZLJpuFy>MOuK0pbaw3UE)rckb+b27D*T?=jyAsw~LwpOhy6H;=0?pgjHk zsgIv@k3mnscLLB8;GX;YwSdUn-_HghCWtu*JD@y|!wx9VU*ODs5lH$m=m|@?F@yE35LqW3swY@6BRt`r%8N*Tn3iuncRxN}{AQm| zfoi6ItSs!32P33Dl)?oso{y{~oPz|gOHxTOYN07)3As-&9M*pMvaAW^Cdj*eC9*X5 ziG|a*M$56)Qc+>A+uxMyA+d&dNxzYD9I`iuEV|1ug^YZ4mP}IpTnY+FmF6vB1Chw6 zGF@&?_%h_HVGxvUmdLFf!+&PIMI}I4P4K)`Si}Nn2st}7pDhvLdvG{J8vv$@Ut>FH zG^23^=L4}Q3SwD7stw${@5;+~O&=$iHYCh}wuv{0l=G*ANu9>8%>9HT;Ec|O1dLxZ zjz#7g22H@JK~zRF#1;}up}22shLHb5>N6ZFWKtM(l-o24gZX`ln-5$H01DiJERg~K zXvqHx1%l_~V<-^Nfi>XX@;es@pg`~sK!E@a)Q%YilpJ{BcRKJF4hYadfCG;HdXXKJ zdVtk|uKt(~1g`)t5CDPz4Fp6WU;%$En-Bd4kUs$5fiFGg0s#>SD8N67K!6E?UI37S zfCU7oA7B9ing=LAfB*sz5PT_ko(E_kxOo5~5FF4iD6nP+fPlwO{s0gJ%pU*>{{#U6 z8VEo@fCCl00s}xK!5`R5D?&iK)DF8LTYMOaGN~F0gqWg z1dcGyzU`_jE&r_aG^6K=ft&vdH@AJ;?PagYXCyD zgj08kiW|0G7e25nx13HezD!U)or3K5lU3j z=tG}Hs>T_^yfwBoulP|Kc{18$bJSagG8dv3<1KRCpT!&LNBYHHllV@bXju!OaVJVC zE0`<>?TgXOO}LddGkRw4Vc)%&$u0YEN$KuWDe1^uvw<| z4f0q=JP-^nEbIwO5G;jn8N%EQ*BI>r1u*@BNxXjXi>@SwSgiNFQIqZEIU>U^^=lWr z9iWe*MJ(=R@B|CL{w>Un*27#mu~c?L>dYLZOyI|hG!9ca`K{7Fn5-XP%YpfB1nn&r zY<_lkOs9K0p5pdW0Drv~lW>c;x)1NrWW{b`0!kB-md*> zHNiT-*{63u`y(lZOV(H3mcmG;udDWc7&V;PgAG{8J=B>-2JUf~hknf{-O>ze4$9PP zBPVIMPCQGvXEtd%XzI!3uBWBw`$uop*jg&aW;1juN}U-T_7|)hnjzAYpY%`7t-PnG zn)kf&!Q=SRQyjPQx+mW0NaH8y;HGJRxM zzh}_oHbr^V8tb%Y|5BhcgpYmkGWGKmx}|7$%KdNW>P(&$pOGv3u{+}~wi0PILXdqI zxky&fcqS=j4XbQ^8=cywiI@X5B1Xi6!9_b zrt;~7&8IAW3&_&)6z-1mUST@V!guRaZcm1|HOo(7$zDpxIM}<)2|S}Pb4pn^#KojE zFR{G7?TfUlj9Gwx7#F`oHyQfaps;X}bv?gqF`+ch`3d(jBf5LeEbz%Kq%Qj*y7z`H zDkzR4S0etU-n7&yk;r8Bnfj4_9TZyY>YbtcX)gy04Xu-7GAw0!w1@gFJ601^Au$sJ-TySNqfz!`ek0@{d2pGdo3H`Wj-sq?+&K+TK7fnUfJq@cXV(i z$MSm_o>(UF;5}Va{R{Dj6;E#JEuNgN@0M+?h`2wn$a+}cgFC7K{L~T$b3?C|Qe||^ zz!Hyq!%M@5m9bfR9|TMr`ixsEjGY;{aAaTo*$=?TT462Tj#s`+Ao4ybJ zf&y3cR^*Wd`bC0A7uY}u@n3@RRR;W6Wl1|IV?>kyQR8y*6vSxmG zFixPXEf4;=evRgEBB0dev}Niu$yZYXwbxD|ScV~3H33E`>kAp-?WwSb)APKpWq0Qx z$LsRbE>^f2lsVi!_!tUYd9Zx#6dq;G>yA(n_j5%gP0NHRCw)zIv?asMB6_47zbe?< zHF*zFT;C`BG+({bG)1tO#L*=E{FJXXJqtYdY{eca}qHkM&>tZ)CT0Vpkba)tOe&k@atuc38`OGx*|w^OCu9As$j%9{pS@2qrnsGw{PWWeuiW5E1-B5P2d{2xHqERU>@~(r&gpsVlNwv> z;8cHpAn)}~+>6*~r80+pT=Pz(IQPw!@7mgB_aA85M;@w0hwuA$!@6$|yB4G8*SoJZ zzh7i?Z%S60+huEcuZy?O*lyZU{q3+lN*qociJu{}CY)p$a+`?%@r^*$y79DStNv-r z#=2F@XR4t9=eCF6;fAg2ciVmhJx^cIcW>BaKEnI-#^Y93x@C4t!askya&)-lj^FQp zew5q8^7GZ6G{WY&cV$&G7|nWAOU?FZIo@>kDzQcqG0|m$=QSBR%PUqQ8%{YgK9t6G zR=Xag`%ZS6zRJNKKWkkodmvbk{ZzO8Rk zH!${4$#W!*45_otTiyv^%- zmOsGD=FydUM=qRi8*#9oY)C-9wGadRNGu^(liyc0IGCH@_+>fd!GuQ$BO+`yAY3zG zwA+5dE#OvnD3NtQQoTLtN@(^(Xo9TUtGtH>9--P-LPd!mmuWpHM!QmC@Dyr6X7MOW zr=UA%I<-i=y}OHpaY%QsO(Cyur*W_pU#PciC{u4R9Ot0I@POCj$wWf9f4%eTV5Ash z_>+2Ze;fz%n zvq!{foc{}tD8Yv4yq<8j-T=FaBY(D!5x3oI^((GCsPCmZ5U$T*kp`S|pP`e;V`?7OGiI8*G$;ZG*W_JqG)iRUKq=C+Q{ zGj?DM2`e#voNEjZ@^m7V36wVuQ(J{(#8IPe_WRvl$sa?h^8%7yCNAwI=6EJXOeWM! z_zVZfpI>#VXn1r$oG5A?N&J%T+V^Lc+EzEUk=6oH4!iMm0twv$)&bhKU7jfelPO&* zDW3EZ3-wl}FQ1uLBsZUgJdT1~th+-6NKyyRI6OIH%Sw{A?3q?i@3lHB+g*sK94aPoO)-SeXYpGAT6cXzDljVu8aNSc&R-k4595Ai}7CzvvB zr#q4I5m`YH8VfN`Ww0D%urXy~|0e>SUF#Or^2e zrROS+3Yg_f_vFe``%31S$|`QFrQf654J*xzl3FG$=6hRo)mfamU)rEknj|@o?69h1 zij~E%IJkmep#K8Q0%>W8RAq)FHxNus(ww>_972^`kIO%kLi|DnFHEY)mU%>rU^-E? zC_}OL9Ad!10Cbn}zs7y!V zb*xsQvx*BDRF_#_^+rr5N(f~r1UU;kz|Lf?i*ar@0O9piMOkjLF#JBtAe>2Q*g$U+ zW@@P3%K;C$tG@JJ6ybD1#k9zT3}yD1=^RIGuTU+yX{~i~v2BrsT~qN?<7opaY4W2( zF1BqJxN^(MVO5TpNT1#zYjqr4e0AH3vXy0O)go;|n9Da$ ziS$cQ;o5XIxuXkN6c?H6xP*8vonEIV65^ipg5-T)1;Hv?SRw zKa)%2XW9yG$|p6yh}_CYA-my+tx$TGQItyD4<*e@{7>yJxcIb~OxF-s!9v2CJ%m}# zl9kHQ7CbbwzzetN#!8sessv0|OBS~xF@qj&d-2}ok;NwFP!oL>gqw$_w8QjuxdR># zR!y-S$Z?*z#P5TE*7a2N+uINsUHLwWMl_tG6_w`$daqYCPJB}Ss35>nUKBf}^Ff%s zQnJvXx;@>U&YYRU@PrnEq`+65~uw7k```Otkqjh^U(!yuU61D)DO>w~BI#~!iN$4OSS(iY>% z$4^n1I_n!x$yKzGTVzCyU42~5L&j~OTo|u2@oejLa~Mx{VVgUvY4T0uk}g@j!-=>D zlYA()ZV6-KjL9Fqlj*EenTk`{=2N*3rt(v#3R|X%Q_cKr^NYL~eWs?W&8J;>@)64U z9sW#=6bNsfqK>eMcXuZ!g9@Ml+H3yey6d<^uQG|l3$K38WF5Vpaj;_?5uqFZ`G#Y3 zA~)$;0cjb*T%eS+NO-ors;?b$zo;pyXriJh2sYE!SKVm~lVG8v^6uChsN}4fVf{qk z(JVc30Jrcic|4VW`!0uX(FB~n<K9=Q-1^ z$$GaK&M>F36dxt={b*^%r15AnuiP`}5nHd`5uv+KG=Ic}TJ&qFq5DjG-Lynryo0E9 zDP(Bnh^}QklXlXtwC|qq-~~~@s@HeDg`UwGmA?B?DMB3H_^^pK(^Pb>awK)&+)p8S z+?S$U?>BEps%0_asC3ljTWk74b;S;X<)wudrKM?tjk@PWpR=u#IyH0*HIk%@RX^<0 zpq)v6yxw~xmVUP*uXH{8+IsZb+^*o8kLvZ>_v=HO>K8`p^A77*eX!5&%e;EufV}R^5av0sC{hX!R#uix|zRQ#1v%EU=Hs#n1hZ)GhB5-Yb`WT$}$XBz+@|@J_za zGi``Ed0a`UZ8LQ!TIaK=YQf?8ZQR4tv+`nQs%?k|jOGv5-cr6$EZe>uQ@Xu=8rQa^ z!Y&!(M}Szi_Z44UWT%Vp;hi%Vlzi0saazgvU2CjPzxqw~Ez@s$x7v!5+GfMw8!xqe zGb`rqWJ9LN6Ol_9Ki&JSgK(Z(g+7Ljy10bHvE4k2RV_>E#OyemLJx=8CPP{6Nyl-L z?#<-;XQ25ml@u*EKW!pfS@Gf?L{&kQ;;X{zrGEa!r+vD0UmqM6zVaM=;c>lFjOuX8 zyf6L7hpCe7*1}i%n{wNBhtK3R3)uM^%eL7a4%v^`4lgSdmWfH?p3IU&d|!OgsigN) zS)og#rJ>2Ct5R6MrA?&vY~MxU7as)=cT1&)uH)3rpld8IEI!fS(CbE6cF$awvbEE- z`*aAwaPwgMBrs*fFHY!yNKO((Zr>+;w{k%v(cQ%0uG-_-KOs`IKPUihFB zCt}_jaPs1^Ueam%TqzO&_`zu|4CA6K`F#Dz&v1K`Z>;*G7-{#p%ds~241KywNc^+( zQQr%vR+rAu3d1UHkcifh0$V)Vjv~`l(6H0L^d2f#Upa1NqQ;&JBitYJf3XnRkXdnXlqN~-fQ0nj%~en-B0(!>un2ua4pwzg|kn7|Nq|m`9FKR zsZT74I!KpDn{OSCs4*~Y6t29i%l+cyKU#gWqPur-+MjRO`LXByd_SWvgLcP1%eZBro>@dStULCu5h#~a!T5(fX_l^3}#`<#Z zg=GPB^VB98({_!^4x2H3P(owS zonfsm=<|S0x(>sQ@d#mFyxvWVs-iP{&fo*tD1YWRPtHEaZp=Su(QZgZx|!ZwFHceyL;aXZ{wDPtxc z<~%xi`Qll0nbjs&zjYkX$kPpVk^`NE#gV@I4>~-W|D(I~JyLV`Xk~NoHshzh27Q-4 zQz#DPecOL1_Z!ybr_#htvk{4R>BSpmr&_}0EP20O;Zwme9r80RD-Yc>bEH0?&iZ)# z8%Mg1Pp>%2d0f)SrHN6ta~N`3(H3bRn8FuLytwPf#M&NH z?Yxej+1Ic)cV{+>wX;inHO8Eb*qdvv?XEnMzh`sU?Vg!(oMubOsl<21Hq0LSIrebi>|~`b(cbxgc1z-EnJ48{JdlU;;Y`wD$KlMLS6 z(Ser9qCUNm#4nHr*_-tjqX|9tN?U`ReEK)e%TLY|Vwx$l`%4ydCYv2g@2V3o-?SJ{ z5q3$tjj#4>NDWG{ywV!%bG(1c%KQ}3*Rg@#qX&@HohdfYTIU2D&(GX|r;6!*3c1-p zSy51`z3B->u+PAITk}&L9Q{HCh^Tc^4*EH_7J86;vxnG#*(S1&&-4d^DC&C2(>o8TH zy2N|0-$mBRw6`yJF7Y|`)2t(hc~v*?rP8j4{{K^uS^Xer>5NHBMd=4G?fxI{G!hvv zp1w#kO5kc`!LP+rD2!6^V#r@W;S%bbP=%zz$Iz4j31gm1IHZsvC*{2E0)!hOSOJ$O zL*)^|7jQZ@WG({64;(dD7D846@)1z1gm460wmi){03s3$LkIs^pPyK2~fapM86G9|P)rNApuDD7E{XsO5LvByg9en`} zSJVr2DQ62@@$~A3zPdRD_CaiEO;XLd3c7iNiKIQ~T$L@WxB3f2E31~QefoU}T%U7??L?md^3$6ib$UZ3aAJ60rr>E%K|p{)xO_}PycI< z0E8mw;<2b>TOJ{BP3UQ=>BwWUB_`65nyyX4Y4-k2WUai!bb5&>#8|IEtWAT5l&Gwy zT$1AIzj5HJHUmLSOoO;(|GWxRyU$;$LOvsmoc^8=TE0)Y3*QvFt@o&F@cwzch+7%Q z-UvJtSLY*?9Cwd9l5TXI==C9sjciywyXRJs*FQmwkEYF)=d&OL$*~a$J>yTmomVT- ztD92L!qfH@Kt-cL5vXPcuz%p+Z%rfBH3)b^Cj$t<>V{6>f0lGA zb%PZcKaD0}(*y)d02w0zOJL0s_*pEZL;)0mNfUr5u$cmV!&)b4D2;7xz#I!nT1!F& zEE{W<4a6he+YEPVLbAVm#Ea7-i3MwK zJhl#NO-9joP^QO))+ZeuSbUg5C_M7j`5hd&S=QYnvf~K8)aGK&wB*n6Yx@}CC!Z}c z!IID3?$dpkajo@EM%k2IU8!9wR&#uJY1U>MxP@^S2S^K^42oTIRT0*Yiyt(HfZ#xya)HH6&>VeRIfxZ#4&*yQZ9sE8Z3K}21kHi0Cj>ny<$>sD zrNaV4oTY0=8O9zCgMCSD zC}tm#Djb5T0b}Zt$_Axw4rVsHG$H+I9jSGlwof@)H%V*bV-_uS)-9c9IGOv7zsinj zK$sJ%k%!X?OSBF|wTZMt#@IL0?@-yO?BHBm_e05l^OL7U)|z5u8JhAi7In6NI+Bsx zhB49Pn$ZCwFT%I)7h@7A-TTPif-iF+1d)XFBm!mt>~FQr0zv5EBSq5 zybW)dR~LaK)9B-C4`oWq>;j%WF|69vBC`~D7yml7;KS>3--Sw{&5F7WlUjRFumj^j zIxr60{kEcjl?DI~Y@(?ZtazC05T@<$v2q6qP97;m#;8A*f*5T($dTn-}( zZm13{G4nJYEz7p28(P!EM1Qt6uf&L!nve7}!kld@o-3^oG5m%)W$j{wXUn5BRPLV;OW-UFCmkw7lx%dkuU za>18{LI*nE0sOS;+y$^xd0=?YxeH*>Kryv608<8Q281^hT^RFaxM|6DWmV+`;CHOe z!gi%sU^r0RaNt9*WAI{^DdB*y%dGG};!B_So&OLcK-@w&4&!X*(Yajz9zF?&>sZ-b z;3M|Tjr8f!+>yI@aFG?QFLrPwNkKEW%HUe)yGsg9;tEGbG&LdHo$T#XPrO$Uq4ixC zA!%x{&nw|`9ST$uZNHHOi!JW&bID_JT=5;snnzdYm`Fzq>F_{KEK0;?p-6$HEj@oP zLUtmXS1nCIWXTA+aTSd-^0ux9p&RpC2gz8ZqkD?P`>jm=RPA|drzocevb#{flV_mN zpxzpqhG!G8S^V9y4lTW7dt=OF#FQI#?W%Q^Z#P44#pgnk#7{VY4sKk9I)fXVzeGGN z^Z{|eotk%JOd-$@2m-hi+_3ZqQh`E;%vP4A)X-xA&^$*G#dC?E%o%_N^Z{s~86XCB zAn_;vC5Z~ebMyE@AEy(-gdJkCy?|~t zBbqQG)tlHh(Wi2Zu&x5pHgfxL`p3zMCx7jShA`Y1r;e~trjGsrLg5D*72z5~%9#OJpcT3f!IJ^cXV1nK z&;TO<3;l**$k3Sxh72r2b0W|O%nDx11Yp66VNMNJ45${hv;xn)^%;{U?Xz6+Rwu13?Lq zv`#f8VeB|vn z;;2DsX`db1_;iVS$D}!{(pmcZNZE`z+H@Cj+nhbV6&bj)y^z5&EtOk~-+V-)pQW_+@QfwY z)t0BP+QC!m*6Fos!r3Dw`27(%jyeV9b#5NXr*pkLJV#loy<9Y77d1LkAApBgLY0jl89z&iyy z=mXqA`Wx>+ICx*cN( zN#~ZO7x(yPKk^^E^x*SKS;1rX*vrHHr6JqAme2TZSyzL*AD2#EIdG*QHg3JpvH~i7 zxUv!{Ys*(3G3@vVuW9)<4s-WbAR~52LX(u%k}J`BTyH7*$Mf(V-spL>%+AWkYISzP zc9uysUtNPmv9QPX$GOrLl#SUaot;$$aY$u<`I1aab)Eqx%DEr^{@N>FFqqD<8DrvL zZ?c~LL7xmqCsqEkM0xVwLkI>A}JRNM|`_`o_v zIaohT>?yCOTplO~)2I9$Gywcv%;y5)<6GmQVDYdDfGIs#{N70Xw>doU4U|(0a1f0D z=2t!OyZx0%fY2hgVilqT&@mUje~tv#BBlWnZ)X*vM(9Ksu51Eru$f8{U7#IwJu5g>j>nEc=UwAsVtqzh*IvW_Smu>-G1lwoO?a@eq+exadGD zHsi(L8}8rh`$6#Top+%h#_H2T>xFkRuOBnK-MxF)(2Cn9-mQ4NT$zL1?OHi$PV3tz zR$Ok}ZJuhKxW4_;)g!({pISwQR>oqg?uSq#)b^i_;RZbbF@WM2l*_=ESK5w+R-jx4 zg)-2F-^yZ84TFj_l(e8a4TwW&3q-;!B7gsynGl7Oi)=t4Ox@=S=oX+5P~rwR2Xz5a zc~R?z$7P!mEN7UxLwW@SgTpn05*Y{vgC(?x-wtX4#q(p&oD>jR5fof!>=J^Kq;x%f6_+h>vplq)Ohoi7}ZyN}`k02rAAn zfmwQRnVo5Aea?`5ZzZvSJ)*Wx*6#8Z^ro_&IQHzov~0Rew^zPr)--nF!P?>Z^%6&q z#Ov)Y?HFAwp3)lCK*LxywGPs9nG#FyFEsu6l3dBKR}n;W-DCxgh>R|4*2y$%fko?% zKAAetlHXh^5zE{b(a~70O4m`(GseOaa*Cg3t3Qq9@b&#hB*PN3xjB?;j^3kBvsslR zR?)seKCqZ|S()6m=>`KKpWoDfb-glR%O|47n=1yWBDn>;L-$(1-|SyAYZU%KJ%vB$ z0O0>J`w)Ev-hoXxGjQzhD-{rFVH-`_cBmQHPYj+8Qv*{oUqBw%2jpSC06GAQ0TT+L zPy~~CYQ1i28v^RlxP7_MLImvVw=RTq3@88y3xwCq9A>VbF%ymrgb14_IhJLE56eGu zn?o3z+j&7|ZNb(~7TeAVG=Vj?m`vm#H%vqulZ?+W@y?u6i z&DQhy%Ai*E{5%>u)vQo;nqO4Ron)5rm@5|lo`jO=H} zfLoaAgZ%@Uup)r27;te|t3$qpPB#MM2YbFkMQX-R1*OyZ6!2i`0JfEtH3d1)4v<3( z2G|C|DZF9EFm!yT{qQG8Yv9SBl+hFYgHrPBY5d17eATIy$8=d%%6C?0%M}Qa| ztGX%1cHZdBdUC8!CSMTi54-Syx-dqZtJ2k;-GnJc(jFu2oDwmeFG-S+2#*sH5ay!N zZq}m0>Kr3-N#IV5>=O@vls<0{Q*%({(w9EJp0RDcSbtP~r7@<)INDX+S!9G%TV8Q3 zYeY<=yGpNQ#E>|aam_yb0I3eea_)3*m~rhxEklA|ZjfYoF5YAQi%r>3j6A7hj*%N< zNSBW=^HKMl_r)@UDxwF|_uVNZMP|6NssE@mt(5Cg>@%#^s?84DIUxLYM9Z^SA}~p6 z>*8uS{7!yPn{U<~Q(k8&YeTIDIoW9YA}TXplIuWua3ndk4G*xx1|69GZ+q<&)sz@e zGC+mbU>ZOv0VM;#E@;Nhn|t95UJ8Af$-@Q!W%Mwm2LS;3fq0-FD3?a2!}7t%e5?T$ z5C^!8=;r_S6;1r(#0dQVi~wAgHzo#3u?C}a80lqYVhu`IVMG?92|}E3vQlBxM%#$v z>6;sx?&cx9z>=G>1CzS)T^E#_2z5&?((Ku=4c~Xa9XD{6nuM&mNA6KDADe8{I1bb! z+e^N0>IX51`k)1@w)=xD(B+_5~Hg1=zsk_i_s58HA={9!RLa`Ph zj7ADy?=?qgd`x(AcX~y33PLjra)*TgZh{%5XNj9|CEMA`t4utEon?p&6i0R1PR2%- naeYHp^VrJ%xoLP(YMOZMvNh@AsFYP{X)B^v&RMnudF}rI&f%$Q literal 0 HcmV?d00001 diff --git a/docs/release-notes/0.8/release-0.8.md b/docs/release-notes/0.8/release-0.8.md new file mode 100644 index 0000000000..bc3bee06d3 --- /dev/null +++ b/docs/release-notes/0.8/release-0.8.md @@ -0,0 +1,126 @@ +# ML.NET 0.8 Release Notes + +Today we are excited to release ML.NET 0.8 and we can finally explain why it +is the best version so far! This release enables model explainability to +understand which features (inputs) are most important, improved debuggability, +easier to use time series predictions, several API improvements, a new +recommendation use case, and more. + +### Installation + +ML.NET supports Windows, MacOS, and Linux. See [supported OS versions of .NET +Core +2.0](https://github.com/dotnet/core/blob/master/release-notes/2.0/2.0-supported-os.md) +for more details. + +You can install ML.NET NuGet from the CLI using: +``` +dotnet add package Microsoft.ML +``` + +From package manager: +``` +Install-Package Microsoft.ML +``` + +### Release Notes + +Below are some of the highlights from this release. + +* Added first steps towards model explainability + ([#1735](https://github.com/dotnet/machinelearning/pull/1735), + [#1692](https://github.com/dotnet/machinelearning/pull/1692)) + + * Enabled explainability in the form of overall feature importance and + generalized additive models. + * Overall feature importance gives a sense of which features are overall + most important for the model. For example, when predicting the sentiment + of a tweet, the presence of "amazing" might be more important than + whether the tweet contains "bird". This is enabled through Permutation + Feature Importance. Example usage can be found + [here](https://github.com/dotnet/machinelearning/blob/3d33e20f33da70cdd3da2ad9e0b2b03df929bef4/docs/samples/Microsoft.ML.Samples/Dynamic/PermutationFeatureImportance.cs). + * Generalized Additive Models have very explainable predictions. They are + similar to linear models in terms of ease of understanding but are more + flexible and can have better performance. Example usage can be found + [here](https://github.com/dotnet/machinelearning/blob/3d33e20f33da70cdd3da2ad9e0b2b03df929bef4/docs/samples/Microsoft.ML.Samples/Dynamic/GeneralizedAdditiveModels.cs). + +* Improved debuggability by previewing IDataViews + ([#1518](https://github.com/dotnet/machinelearning/pull/1518)) + + * It is often useful to peek at the data that is read into an ML.NET + pipeline and even look at it after some intermediate steps to ensure the + data is transformed as expected. + * You can now preview an IDataView by going to the Watch window in the VS + debugger, entering a variable name you want to preview and calling its + `Preview()` method. + + ![](dataPreview.gif) + +* Enabled a stateful prediction engine for time series problems + ([#1727](https://github.com/dotnet/machinelearning/pull/1727)) + + * [ML.NET + 0.7](https://github.com/dotnet/machinelearning/blob/483ec04a11fbdc056a88bc581d7e5cee9092a936/docs/release-notes/0.7/release-0.7.md) + enabled anomaly detection scenarios. However, the prediction engine was + stateless, which means that every time you want to figure out whether + the latest data point is anomolous, you need to provide historical data + as well. This is unnatural. + * The prediction engine can now keep state of time series data seen so + far, so you can now get predictions by just providing the latest data + point. This is enabled by using `CreateTimeSeriesPredictionFunction` + instead of `MakePredictionFunction`. Example usage can be found + [here](https://github.com/dotnet/machinelearning/blob/3d33e20f33da70cdd3da2ad9e0b2b03df929bef4/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs#L141). + You'll need to add the Microsoft.ML.TimeSeries NuGet to your project. + +* Improved support for recommendation scenarios with implicit feedback + ([#1664](https://github.com/dotnet/machinelearning/pull/1664)) + + * [ML.NET + 0.7](https://github.com/dotnet/machinelearning/blob/483ec04a11fbdc056a88bc581d7e5cee9092a936/docs/release-notes/0.7/release-0.7.md) + included Matrix Factorization which enables using ratings provided by + users to recommend other items they might like. + * In some cases, you don't have specific ratings from users but only + implicit feedback (e.g. they watched the movie but didn't rate it). + * Matrix Factorization in ML.NET can now use this type of implicit data to + train models for recommendation scenarios. + * Example usage can be found + [here](https://github.com/dotnet/machinelearning/blob/71d58fa83f77abb630d815e5cf8aa9dd3390aa65/test/Microsoft.ML.Tests/TrainerEstimators/MatrixFactorizationTests.cs#L335). + You'll need to add the Microsoft.ML.MatrixFactorization NuGet to your + project. + +* Enabled saving and loading data as a binary file (IDataView/IDV) + ([#1678](https://github.com/dotnet/machinelearning/pull/1678)) + + * It is sometimes useful to save data after it has been transformed. For + example, you might have featurized all the text into sparse vectors and + want to perform repeated experimentation with different trainers without + continuously repeating the data transformation. + * Saving and loading files in ML.NET's binary format can help efficiency + as it is compressed and already schematized. + * Reading a binary data file can be done using + `mlContext.Data.ReadFromBinary("pathToFile")` and writing a binary data + file can be done using `mlContext.Data.SaveAsBinary("pathToFile")`. + +* Added filtering and caching APIs + ([#1569](https://github.com/dotnet/machinelearning/pull/1569)) + + * There is sometimes a need to filter the data used for training a model. + For example, you need to remove rows that don't have a label, or focus + your model on certain categories of inputs. This can now be done with + additional filters as shown + [here](https://github.com/dotnet/machinelearning/blob/71d58fa83f77abb630d815e5cf8aa9dd3390aa65/test/Microsoft.ML.Tests/RangeFilterTests.cs#L30). + + * Some estimators iterate over the data multiple times. Instead of always + reading from file, you can choose to cache the data to potentially speed + things up. An example can be found + [here](https://github.com/dotnet/machinelearning/blob/71d58fa83f77abb630d815e5cf8aa9dd3390aa65/test/Microsoft.ML.Tests/CachingTests.cs#L56). + +### Acknowledgements + +Shoutout to [jwood803](https://github.com/jwood803), +[feiyun0112](https://github.com/feiyun0112), +[bojanmisic](https://github.com/bojanmisic), +[rantri](https://github.com/rantri), [Caraul](https://github.com/Caraul), +[van-tienhoang](https://github.com/van-tienhoang), +[Thomas-S-B](https://github.com/Thomas-S-B), and the ML.NET team for their +contributions as part of this release! \ No newline at end of file From bc7e0cdd2b8d233f1bef0ca66e030031c3759b15 Mon Sep 17 00:00:00 2001 From: jignparm Date: Sat, 1 Dec 2018 20:10:35 +0000 Subject: [PATCH 013/100] Update to use OnnxRuntime library instead of Sonoma (#1717) * update transform to use onnxruntime library instead of sonoma * Updated to conform to newer API * Resolved PR comments * minor fix * updated per PR comments * Resolved more PR comments * added functionality to pull input/output names from model, without user enumerating them explicitly * update version to mlscoring 0.1.5 --- build/Dependencies.props | 2 +- .../Dynamic/OnnxTransform.cs | 110 ++++++ .../Microsoft.ML.Samples.csproj | 1 + .../Microsoft.ML.OnnxTransform.nupkgproj | 2 +- .../Microsoft.ML.OnnxTransform.csproj | 2 +- .../OnnxTransform.cs | 143 +++++--- src/Microsoft.ML.OnnxTransform/OnnxUtils.cs | 337 ++++++------------ .../OnnxTransformTests.cs | 5 +- 8 files changed, 321 insertions(+), 281 deletions(-) create mode 100644 docs/samples/Microsoft.ML.Samples/Dynamic/OnnxTransform.cs diff --git a/build/Dependencies.props b/build/Dependencies.props index 46304fc56a..8d2f7abdc1 100644 --- a/build/Dependencies.props +++ b/build/Dependencies.props @@ -15,7 +15,7 @@ 3.5.1 2.2.1.1 - 1.1.0 + 0.1.5 0.0.0.7 2.1.3 4.5.0 diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/OnnxTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/OnnxTransform.cs new file mode 100644 index 0000000000..6db1884c00 --- /dev/null +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/OnnxTransform.cs @@ -0,0 +1,110 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.ML.OnnxRuntime; +using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime.Data; +using Microsoft.ML.Transforms; +using System; +using System.Linq; + +namespace Microsoft.ML.Samples.Dynamic +{ + class OnnxTransformExample + { + /// + /// Example use of OnnxEstimator in an ML.NET pipeline + /// + public static void OnnxTransformSample() + { + // Download the squeeznet image model from ONNX model zoo, version 1.2 + // https://github.com/onnx/models/tree/master/squeezenet + var modelPath = @"squeezenet\model.onnx"; + + // Inspect the model's inputs and outputs + var session = new InferenceSession(modelPath); + var inputInfo = session.InputMetadata.First(); + var outputInfo = session.OutputMetadata.First(); + Console.WriteLine($"Input Name is {String.Join(",", inputInfo.Key)}"); + Console.WriteLine($"Input Dimensions are {String.Join(",", inputInfo.Value.Dimensions)}"); + Console.WriteLine($"Output Name is {String.Join(",", outputInfo.Key)}"); + Console.WriteLine($"Output Dimensions are {String.Join(",", outputInfo.Value.Dimensions)}"); + // Results.. + // Input Name is data_0 + // Input Dimensions are 1,3,224,224 + // Output Name is softmaxout_1 + // Output Dimensions are 1,1000,1,1 + + // Create ML pipeline to score the data using OnnxScoringEstimator + var mlContext = new MLContext(); + var data = GetTensorData(); + var idv = mlContext.CreateStreamingDataView(data); + var pipeline = new OnnxScoringEstimator(mlContext, modelPath, new[] { inputInfo.Key }, new[] { outputInfo.Key }); + + // Run the pipeline and get the transformed values + var transformedValues = pipeline.Fit(idv).Transform(idv); + + // Retrieve model scores into Prediction class + var predictions = transformedValues.AsEnumerable(mlContext, reuseRowObject: false); + + // Iterate rows + foreach (var prediction in predictions) + { + int numClasses = 0; + foreach (var classScore in prediction.softmaxout_1.Take(3)) + { + Console.WriteLine($"Class #{numClasses++} score = {classScore}"); + } + Console.WriteLine(new string('-', 10)); + } + + // Results look like below... + // Class #0 score = 4.544065E-05 + // Class #1 score = 0.003845858 + // Class #2 score = 0.0001249467 + // ---------- + // Class #0 score = 4.491953E-05 + // Class #1 score = 0.003848222 + // Class #2 score = 0.0001245592 + // ---------- + } + + /// + /// inputSize is the overall dimensions of the model input tensor. + /// + private const int inputSize = 224 * 224 * 3; + + /// + /// A class to hold sample tensor data. Member name should match + /// the inputs that the model expects (in this case, data_0) + /// + public class TensorData + { + [VectorType(inputSize)] + public float[] data_0 { get; set; } + } + + /// + /// Method to generate sample test data. Returns 2 sample rows. + /// + /// + public static TensorData[] GetTensorData() + { + // This can be any numerical data. Assume image pixel values. + var image1 = Enumerable.Range(0, inputSize).Select(x => (float)x / inputSize).ToArray(); + var image2 = Enumerable.Range(0, inputSize).Select(x => (float)(x + 10000) / inputSize).ToArray(); + return new TensorData[] { new TensorData() { data_0 = image1 }, new TensorData() { data_0 = image2 } }; + } + + /// + /// Class to contain the output values from the transformation. + /// This model generates a vector of 1000 floats. + /// + class Prediction + { + [VectorType(1000)] + public float[] softmaxout_1 { get; set; } + } + } +} \ No newline at end of file diff --git a/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj b/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj index abea9f3e8d..63568930aa 100644 --- a/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj +++ b/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj @@ -13,6 +13,7 @@ + diff --git a/pkg/Microsoft.ML.OnnxTransform/Microsoft.ML.OnnxTransform.nupkgproj b/pkg/Microsoft.ML.OnnxTransform/Microsoft.ML.OnnxTransform.nupkgproj index 4d64d756fe..b817e809d1 100644 --- a/pkg/Microsoft.ML.OnnxTransform/Microsoft.ML.OnnxTransform.nupkgproj +++ b/pkg/Microsoft.ML.OnnxTransform/Microsoft.ML.OnnxTransform.nupkgproj @@ -7,7 +7,7 @@ - + diff --git a/src/Microsoft.ML.OnnxTransform/Microsoft.ML.OnnxTransform.csproj b/src/Microsoft.ML.OnnxTransform/Microsoft.ML.OnnxTransform.csproj index 2298f8d0d7..ce2ac23746 100644 --- a/src/Microsoft.ML.OnnxTransform/Microsoft.ML.OnnxTransform.csproj +++ b/src/Microsoft.ML.OnnxTransform/Microsoft.ML.OnnxTransform.csproj @@ -9,7 +9,7 @@ - + diff --git a/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs b/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs index b6eb29286e..e414261336 100644 --- a/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs +++ b/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs @@ -12,13 +12,13 @@ using Microsoft.ML.Runtime.EntryPoints; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; -using Microsoft.ML.Scoring; +using Microsoft.ML.OnnxRuntime; using Microsoft.ML.Transforms; using Microsoft.ML.StaticPipe; using Microsoft.ML.StaticPipe.Runtime; using Microsoft.ML.Core.Data; using Microsoft.ML.Data; -using OnnxShape = System.Collections.Generic.List; +using OnnxShape = System.Collections.Generic.List; [assembly: LoadableClass(OnnxTransform.Summary, typeof(IDataTransform), typeof(OnnxTransform), typeof(OnnxTransform.Arguments), typeof(SignatureDataTransform), OnnxTransform.UserName, OnnxTransform.ShortName, "OnnxTransform", "OnnxScorer")] @@ -36,6 +36,22 @@ namespace Microsoft.ML.Transforms { + /// + ///

A transform for scoring ONNX models in the ML.NET framework.

+ /// + /// + /// + ///
+ /// + ///

Supports inferencing of models in 1.2 and 1.3 format, using the + /// Microsoft.ML.OnnxRuntime library + ///

+ ///

The inputs and outputs of the onnx models must of of Tensors. Sequence and Maps are not yet supported.

+ ///

Visit https://github.com/onnx/models to see a list of readily available models to get started with.

+ ///

Refer to http://onnx.ai' for more information about ONNX.

+ ///
public sealed class OnnxTransform : RowToRowTransformerBase { public sealed class Arguments : TransformInputBase @@ -75,6 +91,12 @@ private static VersionInfo GetVersionInfo() loaderAssemblyName: typeof(OnnxTransform).Assembly.FullName); } + public static IDataTransform Create(IHostEnvironment env, IDataView input, string modelFile) + { + var args = new Arguments { ModelFile = modelFile, InputColumns = new string[] { }, OutputColumns = new string[] { } }; + return Create(env, args, input); + } + public static IDataTransform Create(IHostEnvironment env, IDataView input, string modelFile, string[] inputColumns, string[] outputColumns) { var args = new Arguments { ModelFile = modelFile, InputColumns = inputColumns, OutputColumns = outputColumns }; @@ -147,15 +169,15 @@ private OnnxTransform(IHostEnvironment env, Arguments args, byte[] modelBytes = Model = OnnxModel.CreateFromBytes(modelBytes); var modelInfo = Model.ModelInfo; - Inputs = args.InputColumns; - Outputs = args.OutputColumns; - OutputTypes = new ColumnType[args.OutputColumns.Length]; + Inputs = (args.InputColumns.Count() == 0 ) ? Model.InputNames.ToArray() : args.InputColumns; + Outputs = (args.OutputColumns.Count() == 0 ) ? Model.OutputNames.ToArray() : args.OutputColumns; + OutputTypes = new ColumnType[Outputs.Length]; var numModelOutputs = Model.ModelInfo.OutputsInfo.Length; - for (int i=0; i < args.OutputColumns.Length; i++) + for (int i=0; i < Outputs.Length; i++) { - var idx = Model.OutputNames.IndexOf(args.OutputColumns[i]); + var idx = Model.OutputNames.IndexOf(Outputs[i]); if (idx < 0) - throw Host.Except($"Column {args.OutputColumns[i]} doesn't match output node names of model"); + throw Host.Except($"Column {Outputs[i]} doesn't match output node names of model"); var outputNodeInfo = Model.ModelInfo.OutputsInfo[idx]; var shape = outputNodeInfo.Shape; @@ -165,6 +187,11 @@ private OnnxTransform(IHostEnvironment env, Arguments args, byte[] modelBytes = _args = args; } + public OnnxTransform(IHostEnvironment env, string modelFile) + : this(env, new Arguments() { ModelFile = modelFile, InputColumns = new string[] { }, OutputColumns = new string[] { } }) + { + } + public OnnxTransform(IHostEnvironment env, string modelFile, string inputColumn, string outputColumn) : this(env, new Arguments() { ModelFile = modelFile, InputColumns = new[] { inputColumn }, OutputColumns = new[] { outputColumn } }) { @@ -223,7 +250,7 @@ private sealed class Mapper : MapperBase private readonly int[] _inputColIndices; private readonly bool[] _isInputVector; private readonly OnnxShape[] _inputTensorShapes; - private readonly DataType[] _inputOnnxTypes; + private readonly System.Type[] _inputOnnxTypes; public Mapper(OnnxTransform parent, Schema inputSchema) : base(Contracts.CheckRef(parent, nameof(parent)).Host.Register(nameof(Mapper)), inputSchema) @@ -233,7 +260,7 @@ public Mapper(OnnxTransform parent, Schema inputSchema) : _inputColIndices = new int[_parent.Inputs.Length]; _isInputVector = new bool[_parent.Inputs.Length]; _inputTensorShapes = new OnnxShape[_parent.Inputs.Length]; - _inputOnnxTypes = new DataType[_parent.Inputs.Length]; + _inputOnnxTypes = new System.Type[_parent.Inputs.Length]; var model = _parent.Model; for (int i = 0; i < _parent.Inputs.Length; i++) @@ -289,36 +316,39 @@ public override Func GetDependencies(Func activeOutput) public override void Save(ModelSaveContext ctx) => _parent.Save(ctx); - private interface ITensorValueGetter + private interface INamedOnnxValueGetter { - Tensor GetTensor(); + NamedOnnxValue GetNamedOnnxValue(); } private class OutputCache { public long Position; - public Dictionary Outputs; + public Dictionary Outputs; public OutputCache() { Position = -1; - Outputs = new Dictionary(); + Outputs = new Dictionary(); } } - private void UpdateCacheIfNeeded(long position, ITensorValueGetter[] srcTensorGetters, string[] activeOutputColNames, OutputCache outputCache) + private void UpdateCacheIfNeeded(long position, INamedOnnxValueGetter[] srcNamedOnnxValueGetters, string[] activeOutputColNames, OutputCache outputCache) { if (outputCache.Position != position) { - var inputTensors = new List(); + var inputNameOnnxValues = new List(); for (int i = 0; i < _inputColIndices.Length; i++) - inputTensors.Add(srcTensorGetters[i].GetTensor()); - - var outputTensors = _parent.Model.Run(inputTensors); - Contracts.Assert(outputTensors.Count > 0); + { + inputNameOnnxValues.Add(srcNamedOnnxValueGetters[i].GetNamedOnnxValue()); + } - for (int j = 0; j < outputTensors.Count; j++) - outputCache.Outputs[activeOutputColNames[j]] = outputTensors[j]; + var outputNamedOnnxValues = _parent.Model.Run(inputNameOnnxValues); + Contracts.Assert(outputNamedOnnxValues.Count > 0); + foreach (var outputNameOnnxValue in outputNamedOnnxValues) + { + outputCache.Outputs[outputNameOnnxValue.Name] = outputNameOnnxValue; + } outputCache.Position = position; } } @@ -333,93 +363,110 @@ protected override Delegate MakeGetter(IRow input, int iinfo, Func ac var activeOutputColNames = _parent.Outputs.Where((x, i) => activeOutput(i)).ToArray(); var type = OnnxUtils.OnnxToMlNetType(_parent.Model.ModelInfo.OutputsInfo[iinfo].Type).RawType; Host.Assert(type == _parent.OutputTypes[iinfo].ItemType.RawType); - var srcTensorGetters = GetTensorValueGetters(input, _inputColIndices, _isInputVector, _inputOnnxTypes, _inputTensorShapes); - return Utils.MarshalInvoke(MakeGetter, type, input, iinfo, srcTensorGetters, activeOutputColNames, outputCache); + var srcNamedValueGetters = GetNamedOnnxValueGetters(input, _parent.Inputs, _inputColIndices, _isInputVector, _inputOnnxTypes, _inputTensorShapes); + return Utils.MarshalInvoke(MakeGetter, type, input, iinfo, srcNamedValueGetters, activeOutputColNames, outputCache); } - private Delegate MakeGetter(IRow input, int iinfo, ITensorValueGetter[] srcTensorGetters, string[] activeOutputColNames, OutputCache outputCache) + private Delegate MakeGetter(IRow input, int iinfo, INamedOnnxValueGetter[] srcNamedValueGetters, string[] activeOutputColNames, OutputCache outputCache) { Host.AssertValue(input); ValueGetter> valuegetter = (ref VBuffer dst) => { - UpdateCacheIfNeeded(input.Position, srcTensorGetters, activeOutputColNames, outputCache); - var tensor = outputCache.Outputs[_parent.Outputs[iinfo]]; - var editor = VBufferEditor.Create(ref dst, tensor.GetSize()); - OnnxUtils.CopyTo(tensor, editor.Values); + UpdateCacheIfNeeded(input.Position, srcNamedValueGetters, activeOutputColNames, outputCache); + var namedOnnxValue = outputCache.Outputs[_parent.Outputs[iinfo]]; + var denseTensor = namedOnnxValue.AsTensor() as System.Numerics.Tensors.DenseTensor; + if (denseTensor == null) + throw Host.Except($"Output column {namedOnnxValue.Name} doesn't contain a DenseTensor of expected type {typeof(T)}"); + var editor = VBufferEditor.Create(ref dst, (int) denseTensor.Length); + denseTensor.Buffer.Span.CopyTo(editor.Values); dst = editor.Commit(); }; return valuegetter; } - private static ITensorValueGetter[] GetTensorValueGetters(IRow input, + private static INamedOnnxValueGetter[] GetNamedOnnxValueGetters(IRow input, + string[] inputColNames, int[] inputColIndices, bool[] isInputVector, - DataType[] onnxInputTypes, + System.Type[] onnxInputTypes, OnnxShape[] onnxInputShapes) { - var srcTensorGetters = new ITensorValueGetter[inputColIndices.Length]; + var srcNamedOnnxValueGetters = new INamedOnnxValueGetter[inputColIndices.Length]; for (int i = 0; i < inputColIndices.Length; i++) { int colIndex = inputColIndices[i]; - srcTensorGetters[i] = CreateTensorValueGetter(input, onnxInputTypes[i], isInputVector[i], colIndex, onnxInputShapes[i]); + srcNamedOnnxValueGetters[i] = CreateNamedOnnxValueGetter(input, onnxInputTypes[i], isInputVector[i], inputColNames[i], colIndex, onnxInputShapes[i]); } - return srcTensorGetters; + return srcNamedOnnxValueGetters; } - private static ITensorValueGetter CreateTensorValueGetter(IRow input, DataType onnxType, bool isVector, int colIndex, OnnxShape onnxShape) + private static INamedOnnxValueGetter CreateNamedOnnxValueGetter(IRow input, System.Type onnxType, bool isVector, string colName, int colIndex, OnnxShape onnxShape) { var type = OnnxUtils.OnnxToMlNetType(onnxType).RawType; Contracts.AssertValue(type); - return Utils.MarshalInvoke(CreateTensorValueGetter, type, input, isVector, colIndex, onnxShape); + return Utils.MarshalInvoke(CreateNameOnnxValueGetter, type, input, isVector, colName, colIndex, onnxShape); } - private static ITensorValueGetter CreateTensorValueGetter(IRow input, bool isVector, int colIndex, OnnxShape onnxShape) + private static INamedOnnxValueGetter CreateNameOnnxValueGetter(IRow input, bool isVector, string colName, int colIndex, OnnxShape onnxShape) { if (isVector) - return new TensorValueGetterVec(input, colIndex, onnxShape); - return new TensorValueGetter(input, colIndex); + return new NamedOnnxValueGetterVec(input, colName, colIndex, onnxShape); + return new NameOnnxValueGetter(input, colName, colIndex); } - private class TensorValueGetter : ITensorValueGetter + private class NameOnnxValueGetter : INamedOnnxValueGetter { private readonly ValueGetter _srcgetter; + private readonly string _colName; - public TensorValueGetter(IRow input, int colIndex) + public NameOnnxValueGetter(IRow input, string colName, int colIndex) { + _colName = colName; _srcgetter = input.GetGetter(colIndex); } - public Tensor GetTensor() + public NamedOnnxValue GetNamedOnnxValue() { var scalar = default(T); _srcgetter(ref scalar); - return OnnxUtils.CreateScalarTensor(scalar); + return OnnxUtils.CreateScalarNamedOnnxValue(_colName, scalar); } } - private class TensorValueGetterVec : ITensorValueGetter + private class NamedOnnxValueGetterVec : INamedOnnxValueGetter { private readonly ValueGetter> _srcgetter; private readonly OnnxShape _tensorShape; + private readonly string _colName; private VBuffer _vBuffer; private VBuffer _vBufferDense; - public TensorValueGetterVec(IRow input, int colIndex, OnnxShape tensorShape) + public NamedOnnxValueGetterVec(IRow input, string colName, int colIndex, OnnxShape tensorShape) { _srcgetter = input.GetGetter>(colIndex); _tensorShape = tensorShape; + _colName = colName; _vBuffer = default; _vBufferDense = default; } - public Tensor GetTensor() + public NamedOnnxValue GetNamedOnnxValue() { _srcgetter(ref _vBuffer); _vBuffer.CopyToDense(ref _vBufferDense); - return OnnxUtils.CreateTensor(_vBufferDense.GetValues(), _tensorShape); + return OnnxUtils.CreateNamedOnnxValue(_colName, _vBufferDense.GetValues(), _tensorShape); } } } } + + /// + /// A class implementing the estimator interface of the OnnxTransform. + /// public sealed class OnnxScoringEstimator : TrivialEstimator { + public OnnxScoringEstimator(IHostEnvironment env, string modelFile) + : this(env, new OnnxTransform(env, modelFile, new string[] { }, new string[] { })) + { + } + public OnnxScoringEstimator(IHostEnvironment env, string modelFile, string[] inputs, string[] outputs) : this(env, new OnnxTransform(env, modelFile, inputs, outputs)) { @@ -459,7 +506,7 @@ public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { resultDic[Transformer.Outputs[i]] = new SchemaShape.Column(Transformer.Outputs[i], Transformer.OutputTypes[i].IsKnownSizeVector ? SchemaShape.Column.VectorKind.Vector - : SchemaShape.Column.VectorKind.VariableVector, NumberType.R4, false); + : SchemaShape.Column.VectorKind.VariableVector, Transformer.OutputTypes[i].ItemType, false); } return new SchemaShape(resultDic.Values); } diff --git a/src/Microsoft.ML.OnnxTransform/OnnxUtils.cs b/src/Microsoft.ML.OnnxTransform/OnnxUtils.cs index 611359d9cc..001f2ce9d8 100644 --- a/src/Microsoft.ML.OnnxTransform/OnnxUtils.cs +++ b/src/Microsoft.ML.OnnxTransform/OnnxUtils.cs @@ -4,40 +4,39 @@ using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Runtime.Internal.Utilities; -using Microsoft.ML.Scoring; +using Microsoft.ML.OnnxRuntime; +using System.Numerics.Tensors; using System; using System.Collections.Generic; using System.IO; using System.Linq; -using Microsoft.ML.StaticPipe; -using OnnxShape = System.Collections.Generic.List; -using Microsoft.ML.Data; +using OnnxShape = System.Collections.Generic.List; namespace Microsoft.ML.Transforms { /// - /// OnnxModel is a facad for ModelManager. ModelManager is provided by Sonoma API, - /// and it has a lot of functionality (multiple models, multiple versions) that are not - /// needed by Onnx transform, which only needs a single model. This facad simplifies the - /// usage of onnx model. + /// OnnxModel is a utility class to load ONNX models and retrieve metadata + /// for inputs and outputs. The metadata includes the names, shapes and types + /// It provides API to open a session, score tensors (NamedOnnxValues) and return + /// the results. /// internal sealed class OnnxModel { + /// /// OnnxModelInfo contains the data that we should get from - /// Sonoma API once that functionality is added. + /// OnnxRuntime API once that functionality is added. /// public sealed class OnnxModelInfo { public readonly OnnxNodeInfo[] InputsInfo; public readonly OnnxNodeInfo[] OutputsInfo; - public OnnxModelInfo(OnnxNodeInfo[] inputsInfo, OnnxNodeInfo[] outputsInfo) + public OnnxModelInfo(IEnumerable inputsInfo, IEnumerable outputsInfo) { - InputsInfo = inputsInfo; - OutputsInfo = outputsInfo; + InputsInfo = inputsInfo.ToArray(); + OutputsInfo = outputsInfo.ToArray(); } } @@ -47,11 +46,20 @@ public OnnxModelInfo(OnnxNodeInfo[] inputsInfo, OnnxNodeInfo[] outputsInfo) ///
public class OnnxNodeInfo { + /// + /// The Name of the node + /// public readonly string Name; + /// + /// The shape of the node + /// public readonly OnnxShape Shape; - public readonly DataType Type; + /// + /// The type of the node + /// + public readonly System.Type Type; - public OnnxNodeInfo(string name, OnnxShape shape, DataType type) + public OnnxNodeInfo(string name, OnnxShape shape, System.Type type) { Name = name; Shape = shape; @@ -60,29 +68,25 @@ public OnnxNodeInfo(string name, OnnxShape shape, DataType type) } public readonly OnnxModelInfo ModelInfo; - - private static readonly int _ignoredVersion = int.MaxValue; - private readonly ModelManager _modelManager; + private readonly InferenceSession _session; private readonly string _modelFile; - private readonly string _modelName; public readonly List InputNames; public readonly List OutputNames; public OnnxModel(string modelFile) { _modelFile = modelFile; - - // Load the onnx model - var modelFileInfo = new FileInfo(modelFile); - _modelName = Path.GetFileNameWithoutExtension(modelFileInfo.Name); - _modelManager = new ModelManager(modelFileInfo.Directory.FullName, true); - _modelManager.InitOnnxModel(_modelName, _ignoredVersion); - + _session = new InferenceSession(modelFile); ModelInfo = new OnnxModelInfo(GetInputsInfo(), GetOutputsInfo()); InputNames = ModelInfo.InputsInfo.Select(i => i.Name).ToList(); OutputNames = ModelInfo.OutputsInfo.Select(i => i.Name).ToList(); } + /// + /// Create an OnnxModel from a byte[] + /// + /// + /// OnnxModel public static OnnxModel CreateFromBytes(byte[] modelBytes) { var tempModelDir = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); @@ -98,239 +102,114 @@ public static OnnxModel CreateFromBytes(byte[] modelBytes) // or keep the dir/file and write proper cleanup when application closes } - public List Run(List inputTensors) + /// + /// Uses an open session to score a list of NamedOnnxValues. + /// + /// The NamedOnnxValues to score + /// Resulting output NamedOnnxValues list + public IReadOnlyCollection Run(List inputNamedOnnxValues) { - var outputTensors = _modelManager.RunModel( - _modelName, _ignoredVersion, InputNames, inputTensors, OutputNames); - - return outputTensors; + return _session.Run(inputNamedOnnxValues); } + /// + /// Convert the model to a byte array. + /// + /// byte[] public byte[] ToByteArray() { return File.ReadAllBytes(_modelFile); } - private OnnxNodeInfo[] GetInputsInfo() - { - return DictToNodesInfo( - _modelManager.GetInputTypeDict(_modelName, _ignoredVersion), - _modelManager.GetInputShapesDict(_modelName, _ignoredVersion)); - } - - private OnnxNodeInfo[] GetOutputsInfo() + /// + /// Returns input metadata of the ONNX model. + /// + /// OnnxNodeInfo[] + private IEnumerable GetInputsInfo() { - return DictToNodesInfo( - _modelManager.GetOutputTypeDict(_modelName, _ignoredVersion), - _modelManager.GetOutputShapesDict(_modelName, _ignoredVersion)); + return _session.InputMetadata.Select(kv => new OnnxNodeInfo(kv.Key, kv.Value.Dimensions.ToList(), kv.Value.ElementType)); } - private static OnnxNodeInfo[] DictToNodesInfo( - Dictionary typeDict, - Dictionary shapeDictArray) + /// + /// Returns output metadata of the ONNX model. + /// + /// + private IEnumerable GetOutputsInfo() { - var shapeDict = new Dictionary>(); - foreach (var key in shapeDictArray.Keys) - shapeDict.Add(key, shapeDictArray[key].ToList()); - - var sameKey = typeDict.Count == shapeDict.Count && - typeDict.Keys.SequenceEqual(shapeDict.Keys); - Contracts.Assert(sameKey, "Type and shape dictionaries should have the same keys"); - return typeDict.Select(kv => new OnnxNodeInfo( - name: kv.Key, type: kv.Value, shape: shapeDict[kv.Key])).OrderBy(x => x.Name).ToArray(); + return _session.OutputMetadata.Select(kv => new OnnxNodeInfo(kv.Key, kv.Value.Dimensions.ToList(), kv.Value.ElementType)); } } internal sealed class OnnxUtils { - /// - /// Sonoma API only provides Tensor() constructors with overloaded - /// versions based on data type. - /// - - private static Dictionary _typeMap; - - public static Tensor CreateScalarTensor(T data) - { - if (typeof(T) == typeof(System.Boolean)) - { - return new Tensor((System.Boolean)(object)data); - } - else if (typeof(T) == typeof(System.Byte)) - { - return new Tensor((System.Byte)(object)data); - } - else if (typeof(T) == typeof(System.Char)) - { - return new Tensor((System.Char)(object)data); - } - else if (typeof(T) == typeof(System.Double)) - { - return new Tensor((System.Double)(object)data); - } - else if (typeof(T) == typeof(System.Single)) - { - return new Tensor((System.Single)(object)data); - } - else if (typeof(T) == typeof(System.Int32)) - { - return new Tensor((System.Int32)(object)data); - } - else if (typeof(T) == typeof(System.Int64)) - { - return new Tensor((System.Int64)(object)data); - } - else if (typeof(T) == typeof(System.SByte)) - { - return new Tensor((System.SByte)(object)data); - } - else if (typeof(T) == typeof(System.Int16)) - { - return new Tensor((System.Int16)(object)data); - } - else if (typeof(T) == typeof(System.UInt32)) - { - return new Tensor((System.UInt32)(object)data); - } - else if (typeof(T) == typeof(System.UInt64)) - { - return new Tensor((System.UInt64)(object)data); - } - else if (typeof(T) == typeof(System.UInt16)) - { - return new Tensor((System.UInt16)(object)data); - } - throw new NotSupportedException($"Unsupported type {typeof(T)}"); - } + private static HashSet _onnxTypeMap = + new HashSet + { + typeof(Double), + typeof(Single), + typeof(Int16), + typeof(Int32), + typeof(Int64), + typeof(UInt16), + typeof(UInt32), + typeof(UInt64) + }; + private static Dictionary _typeToKindMap= + new Dictionary + { + { typeof(Single) , DataKind.R4}, + { typeof(Double) , DataKind.R8}, + { typeof(Int16) , DataKind.I2}, + { typeof(Int32) , DataKind.I4}, + { typeof(Int64) , DataKind.I8}, + { typeof(UInt16) , DataKind.U2}, + { typeof(UInt32) , DataKind.U4}, + { typeof(UInt64) , DataKind.U8}, + { typeof(String) , DataKind.TX}, + { typeof(Boolean) , DataKind.BL}, + }; /// - /// Sonoma API only provides Tensor() constructors with overloaded versions - /// based on data type. ML.NET cannot use the overloaded version and requires - /// generic version. CreateTensor<T> is generic wrapper on top of - /// overloaded Tensor(T[] data, OnnxShape shape) constructors. + /// Creates a NamedOnnxValue from a scalar value. /// - public static Tensor CreateTensor(ReadOnlySpan data, OnnxShape shape) + /// The type of the Tensor contained in the NamedOnnxValue + /// The name of the NamedOnnxValue + /// The data values of the Tensor + /// NamedOnnxValue + public static NamedOnnxValue CreateScalarNamedOnnxValue(string name, T data) { - if (typeof(T) == typeof(System.Boolean)) - { - return new Tensor((System.Boolean[])(object)data.ToArray(), shape.ToArray()); - } - else if (typeof(T) == typeof(System.Double)) - { - return new Tensor((System.Double[])(object)data.ToArray(), shape.ToArray()); - } - else if (typeof(T) == typeof(System.Single)) - { - return new Tensor((System.Single[])(object)data.ToArray(), shape.ToArray()); - } - else if (typeof(T) == typeof(System.Int32)) - { - return new Tensor((System.Int32[])(object)data.ToArray(), shape.ToArray()); - } - else if (typeof(T) == typeof(System.Int64)) - { - return new Tensor((System.Int64[])(object)data.ToArray(), shape.ToArray()); - } - throw new NotImplementedException($"Not implemented type {typeof(T)}"); + if (!_onnxTypeMap.Contains(typeof(T))) + throw new NotImplementedException($"Not implemented type {typeof(T)}"); + return NamedOnnxValue.CreateFromTensor(name, new DenseTensor(new T[] { data }, new int[] { 1 })); } /// - /// Sonoma API only provides CopyTo() functions with overloaded versions - /// based on data type. ML.NET cannot use the overloaded version and requires - /// generic version. CopyTo<T> is generic wrapper on top of - /// overloaded Tensor.CopyTo(List<T> dst) methods. - /// Also Tensor.CopyTo(List<T> dst) requires a list input, whereas ML.NET - /// provides array buffers to copy values to. This mismatch causes an extra copy. + /// Create a NamedOnnxValue from vbuffer span. Checks if the tensor type + /// is supported by OnnxRuntime prior to execution. /// - public static unsafe void CopyTo(Tensor tensor, Span dst) + /// The type of the Tensor contained in the NamedOnnxValue + /// The name of the NamedOnnxValue + /// A span containing the data + /// The shape of the Tensor being created + /// NamedOnnxValue + public static NamedOnnxValue CreateNamedOnnxValue(string name, ReadOnlySpan data, OnnxShape shape) { - var typeMap = SystemTypeToOnnxType(); - if (typeMap.ContainsKey(typeof(T))) - { - if (tensor.GetDataType() != typeMap[typeof(T)]) - { - throw new InvalidOperationException( string.Format("Cannot copy source tensor of type {0} to managed type {1}.", tensor.GetDataType(), typeof(T))); - } - Span tensorSpan = new Span(tensor.UnsafeGetData().ToPointer(), tensor.GetSize()); - tensorSpan.CopyTo(dst); - // TODO: the CopyTo() function is susceptible to GC reclaiming tensor - // during the method call. Use KeepAlive for now, and remove - // after permanent fix in CopyTo(). - } - else + if (!_onnxTypeMap.Contains(typeof(T))) throw new NotImplementedException($"Not implemented type {typeof(T)}"); - GC.KeepAlive(tensor); + return NamedOnnxValue.CreateFromTensor(name, new DenseTensor(data.ToArray(), shape.Select(x => (int)x).ToArray())); } - public static PrimitiveType OnnxToMlNetType(DataType type) - { - DataKind kind; - switch (type) - { - case DataType.Type_Float: - kind = DataKind.R4; - break; - - case DataType.Type_Double: - kind = DataKind.R8; - break; - - case DataType.Type_Int8: - kind = DataKind.I1; - break; - - case DataType.Type_Int16: - kind = DataKind.I2; - break; - - case DataType.Type_Int32: - kind = DataKind.I4; - break; - - case DataType.Type_Int64: - kind = DataKind.I8; - break; - - case DataType.Type_Uint8: - kind = DataKind.U1; - break; - - case DataType.Type_Uint16: - kind = DataKind.U2; - break; - - case DataType.Type_String: - kind = DataKind.TX; - break; - - case DataType.Type_Bool: - kind = DataKind.BL; - break; - - case DataType.Type_Invalid: - default: - throw Contracts.ExceptNotSupp("Onnx type not supported", type); - } - - return PrimitiveType.FromKind(kind); - } - - internal static Dictionary SystemTypeToOnnxType() + /// + /// Converts a Onnx type, that follows the System.Type convention + /// to the type system ML.NET recognizes (e.g. I4, I8, R4 etc.) + /// + /// + /// + public static PrimitiveType OnnxToMlNetType(System.Type type) { - if (_typeMap == null) - { - _typeMap = new Dictionary - { - { typeof(Boolean) , DataType.Type_Bool }, - { typeof(Double) , DataType.Type_Double }, - { typeof(Single) , DataType.Type_Float }, - { typeof(Int16) , DataType.Type_Int16 }, - { typeof(Int32) , DataType.Type_Int32 }, - { typeof(Int64) , DataType.Type_Int64 }, - { typeof(UInt16) , DataType.Type_Uint16 } - }; - } - return _typeMap; + if (!_typeToKindMap.ContainsKey(type)) + throw Contracts.ExceptNotSupp("Onnx type not supported", type); + return PrimitiveType.FromKind(_typeToKindMap[type]); } } } diff --git a/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs b/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs index af6a26d440..cf60bf28d3 100644 --- a/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs +++ b/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs @@ -13,6 +13,7 @@ using System; using System.Collections.Generic; using System.IO; +using System.Linq; using System.Runtime.InteropServices; using Xunit; using Xunit.Abstractions; @@ -298,8 +299,10 @@ public void OnnxModelMultiInput() { getScoresa(ref buffera); getScoresb(ref bufferb); - Console.WriteLine(buffera.GetValues().ToArray()); Assert.Equal(5, buffera.Length); + Assert.Equal(5, bufferb.Length); + Assert.Equal(0, buffera.GetValues().ToArray().Sum()); + Assert.Equal(30, bufferb.GetValues().ToArray().Sum()); } } } From 05284d62b167a4a8b98fdfabbba178b7609071b6 Mon Sep 17 00:00:00 2001 From: Yael Dekel Date: Sat, 1 Dec 2018 14:56:57 -0800 Subject: [PATCH 014/100] Fix CodeGen command and add a unit test (#1654) * Fix CodeGen command and add a unit test * Disable new unit test on Linux and Mac * PR comments * Address PR comments * Fix generated code template * Update baseline * Update baseline --- src/Microsoft.ML.Api/GenerateCodeCommand.cs | 1 - .../GeneratedCodeTemplate.csresource | 13 +- .../Command/CommandShowSchemaModel-out.txt | 273 ++++++++++++++++++ .../Common/Command/codegen-out.cs | 88 ++++++ .../TestParallelFasttreeInterface.cs | 5 +- .../BaseTestBaseline.cs | 7 +- .../EnvironmentExtensions.cs | 2 + .../TestCommandBase.cs | 60 ++-- 8 files changed, 423 insertions(+), 26 deletions(-) create mode 100644 test/BaselineOutput/Common/Command/CommandShowSchemaModel-out.txt create mode 100644 test/BaselineOutput/Common/Command/codegen-out.cs diff --git a/src/Microsoft.ML.Api/GenerateCodeCommand.cs b/src/Microsoft.ML.Api/GenerateCodeCommand.cs index 4855a94219..44fe07479e 100644 --- a/src/Microsoft.ML.Api/GenerateCodeCommand.cs +++ b/src/Microsoft.ML.Api/GenerateCodeCommand.cs @@ -121,7 +121,6 @@ public void Run() bool isScoreColumn = scorer.Schema.GetMetadataTypeOrNull(MetadataUtils.Kinds.ScoreColumnSetId, i) != null; var sb = isScoreColumn ? scoreSb : nonScoreSb; - if (sb.Length > 0) sb.AppendLine(); diff --git a/src/Microsoft.ML.Api/GeneratedCodeTemplate.csresource b/src/Microsoft.ML.Api/GeneratedCodeTemplate.csresource index c75126e377..b550860bdc 100644 --- a/src/Microsoft.ML.Api/GeneratedCodeTemplate.csresource +++ b/src/Microsoft.ML.Api/GeneratedCodeTemplate.csresource @@ -1,6 +1,7 @@ using System; using System.Collections.Generic; -using Microsoft.ML; +using Microsoft.ML.Legacy; +using Microsoft.ML.Runtime.Api; namespace MLGeneratedCode { @@ -31,6 +32,14 @@ public class ScoredOutput { /*#SCORE_CLASS_DECL#*/ /*#/SCORE_CLASS_DECL#*/ + + // These are all remaining available columns, either supplied as the input, or intermediate + // columns generated by the transforms. Materializing these columns has a performance cost, + // so they are commented out. Feel free to uncomment any column that is useful for your scenario. +#if false +/*#SCORED_EXAMPLE_CLASS_DECL#*/ +/*#/SCORED_EXAMPLE_CLASS_DECL#*/ +#endif } /*public static void Main(string[] args) @@ -45,7 +54,7 @@ PredictAsync(modelPath); /// This method demonstrates how to run prediction. /// ///
-public static void Predict(string modelPath) +public static async void PredictAsync(string modelPath) { var model = await PredictionModel.ReadAsync(modelPath); diff --git a/test/BaselineOutput/Common/Command/CommandShowSchemaModel-out.txt b/test/BaselineOutput/Common/Command/CommandShowSchemaModel-out.txt new file mode 100644 index 0000000000..fcb90ce80c --- /dev/null +++ b/test/BaselineOutput/Common/Command/CommandShowSchemaModel-out.txt @@ -0,0 +1,273 @@ +---- BoundLoader ---- +3 columns: + NumFeatures: Vec + Metadata 'SlotNames': Vec: Length=6, Count=6 + [0] 'age', [1] 'fnlwgt', [2] 'education-num', [3] 'capital-gain', [4] 'capital-loss', [5] 'hours-per-week' + CatFeaturesText: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'Workclass', [1] 'education', [2] 'marital-status', [3] 'occupation', [4] 'relationship', [5] 'ethnicity', [6] 'sex', [7] 'native-country-region' + Label: R4 +---- RowToRowMapperTransform ---- +4 columns: + NumFeatures: Vec + Metadata 'SlotNames': Vec: Length=6, Count=6 + [0] 'age', [1] 'fnlwgt', [2] 'education-num', [3] 'capital-gain', [4] 'capital-loss', [5] 'hours-per-week' + CatFeaturesText: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'Workclass', [1] 'education', [2] 'marital-status', [3] 'occupation', [4] 'relationship', [5] 'ethnicity', [6] 'sex', [7] 'native-country-region' + Label: R4 + CatFeatures: Vec, 8> + Metadata 'KeyValues': Vec: Length=76, Count=76 + [0] 'Private', [1] '11th', [2] 'Never-married', [3] 'Machine-op-inspct', [4] 'Own-child', [5] 'Black', [6] 'Male', [7] 'United-States', [8] 'HS-grad', [9] 'Married-civ-spouse' + [10] 'Farming-fishing', [11] 'Husband', [12] 'White', [13] 'Local-gov', [14] 'Assoc-acdm', [15] 'Protective-serv', [16] 'Some-college', [17] '?', [18] 'Female', [19] '10th' + [20] 'Other-service', [21] 'Not-in-family', [22] 'Unmarried', [23] 'Self-emp-not-inc', [24] 'Prof-school', [25] 'Prof-specialty', [26] '7th-8th', [27] 'Craft-repair', [28] 'Federal-gov', [29] 'Bachelors' + [30] 'Adm-clerical', [31] 'Masters', [32] 'Exec-managerial', [33] 'State-gov', [34] 'Wife', [35] 'Widowed', [36] 'Doctorate', [37] 'Asian-Pac-Islander', [38] 'Tech-support', [39] 'Divorced' + [40] 'Peru', [41] 'Separated', [42] 'Sales', [43] '5th-6th', [44] 'Priv-house-serv', [45] 'Guatemala', [46] 'Self-emp-inc', [47] 'Assoc-voc', [48] 'Mexico', [49] 'Transport-moving' + [50] 'Handlers-cleaners', [51] '9th', [52] 'Married-spouse-absent', [53] 'Other', [54] 'Dominican-Republic', [55] 'Armed-Forces', [56] 'Amer-Indian-Inuit', [57] 'Ireland', [58] 'Germany', [59] '12th' + [60] 'Other-relative', [61] 'Philippines', [62] 'Thailand', [63] 'Haiti', [64] 'El-Salvador', [65] 'Puerto-Rico', [66] 'Vietnam', [67] '1st-4th', [68] 'South', [69] 'Married-AF-spouse' + [70] 'Columbia', [71] 'Japan', [72] 'India', [73] 'Cambodia', [74] 'Poland', [75] 'Laos' + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'Workclass', [1] 'education', [2] 'marital-status', [3] 'occupation', [4] 'relationship', [5] 'ethnicity', [6] 'sex', [7] 'native-country-region' +---- RowToRowMapperTransform ---- +5 columns: + NumFeatures: Vec + Metadata 'SlotNames': Vec: Length=6, Count=6 + [0] 'age', [1] 'fnlwgt', [2] 'education-num', [3] 'capital-gain', [4] 'capital-loss', [5] 'hours-per-week' + CatFeaturesText: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'Workclass', [1] 'education', [2] 'marital-status', [3] 'occupation', [4] 'relationship', [5] 'ethnicity', [6] 'sex', [7] 'native-country-region' + Label: R4 + CatFeatures: Vec, 8> + Metadata 'KeyValues': Vec: Length=76, Count=76 + [0] 'Private', [1] '11th', [2] 'Never-married', [3] 'Machine-op-inspct', [4] 'Own-child', [5] 'Black', [6] 'Male', [7] 'United-States', [8] 'HS-grad', [9] 'Married-civ-spouse' + [10] 'Farming-fishing', [11] 'Husband', [12] 'White', [13] 'Local-gov', [14] 'Assoc-acdm', [15] 'Protective-serv', [16] 'Some-college', [17] '?', [18] 'Female', [19] '10th' + [20] 'Other-service', [21] 'Not-in-family', [22] 'Unmarried', [23] 'Self-emp-not-inc', [24] 'Prof-school', [25] 'Prof-specialty', [26] '7th-8th', [27] 'Craft-repair', [28] 'Federal-gov', [29] 'Bachelors' + [30] 'Adm-clerical', [31] 'Masters', [32] 'Exec-managerial', [33] 'State-gov', [34] 'Wife', [35] 'Widowed', [36] 'Doctorate', [37] 'Asian-Pac-Islander', [38] 'Tech-support', [39] 'Divorced' + [40] 'Peru', [41] 'Separated', [42] 'Sales', [43] '5th-6th', [44] 'Priv-house-serv', [45] 'Guatemala', [46] 'Self-emp-inc', [47] 'Assoc-voc', [48] 'Mexico', [49] 'Transport-moving' + [50] 'Handlers-cleaners', [51] '9th', [52] 'Married-spouse-absent', [53] 'Other', [54] 'Dominican-Republic', [55] 'Armed-Forces', [56] 'Amer-Indian-Inuit', [57] 'Ireland', [58] 'Germany', [59] '12th' + [60] 'Other-relative', [61] 'Philippines', [62] 'Thailand', [63] 'Haiti', [64] 'El-Salvador', [65] 'Puerto-Rico', [66] 'Vietnam', [67] '1st-4th', [68] 'South', [69] 'Married-AF-spouse' + [70] 'Columbia', [71] 'Japan', [72] 'India', [73] 'Cambodia', [74] 'Poland', [75] 'Laos' + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'Workclass', [1] 'education', [2] 'marital-status', [3] 'occupation', [4] 'relationship', [5] 'ethnicity', [6] 'sex', [7] 'native-country-region' + CatFeatures: Vec + Metadata 'CategoricalSlotRanges': Vec: Length=16, Count=16 + [0] '0', [1] '75', [2] '76', [3] '151', [4] '152', [5] '227', [6] '228', [7] '303', [8] '304', [9] '379' + [10] '380', [11] '455', [12] '456', [13] '531', [14] '532', [15] '607' + Metadata 'IsNormalized': Bool: '1' + Metadata 'SlotNames': Vec: Length=608, Count=608 + [0] 'Workclass.Private', [1] 'Workclass.11th', [2] 'Workclass.Never-married', [3] 'Workclass.Machine-op-inspct', [4] 'Workclass.Own-child', [5] 'Workclass.Black', [6] 'Workclass.Male', [7] 'Workclass.United-States', [8] 'Workclass.HS-grad', [9] 'Workclass.Married-civ-spouse' + [10] 'Workclass.Farming-fishing', [11] 'Workclass.Husband', [12] 'Workclass.White', [13] 'Workclass.Local-gov', [14] 'Workclass.Assoc-acdm', [15] 'Workclass.Protective-serv', [16] 'Workclass.Some-college', [17] 'Workclass.?', [18] 'Workclass.Female', [19] 'Workclass.10th' + [20] 'Workclass.Other-service', [21] 'Workclass.Not-in-family', [22] 'Workclass.Unmarried', [23] 'Workclass.Self-emp-not-inc', [24] 'Workclass.Prof-school', [25] 'Workclass.Prof-specialty', [26] 'Workclass.7th-8th', [27] 'Workclass.Craft-repair', [28] 'Workclass.Federal-gov', [29] 'Workclass.Bachelors' + [30] 'Workclass.Adm-clerical', [31] 'Workclass.Masters', [32] 'Workclass.Exec-managerial', [33] 'Workclass.State-gov', [34] 'Workclass.Wife', [35] 'Workclass.Widowed', [36] 'Workclass.Doctorate', [37] 'Workclass.Asian-Pac-Islander', [38] 'Workclass.Tech-support', [39] 'Workclass.Divorced' + [40] 'Workclass.Peru', [41] 'Workclass.Separated', [42] 'Workclass.Sales', [43] 'Workclass.5th-6th', [44] 'Workclass.Priv-house-serv', [45] 'Workclass.Guatemala', [46] 'Workclass.Self-emp-inc', [47] 'Workclass.Assoc-voc', [48] 'Workclass.Mexico', [49] 'Workclass.Transport-moving' + [50] 'Workclass.Handlers-cleaners', [51] 'Workclass.9th', [52] 'Workclass.Married-spouse-absent', [53] 'Workclass.Other', [54] 'Workclass.Dominican-Republic', [55] 'Workclass.Armed-Forces', [56] 'Workclass.Amer-Indian-Inuit', [57] 'Workclass.Ireland', [58] 'Workclass.Germany', [59] 'Workclass.12th' + [60] 'Workclass.Other-relative', [61] 'Workclass.Philippines', [62] 'Workclass.Thailand', [63] 'Workclass.Haiti', [64] 'Workclass.El-Salvador', [65] 'Workclass.Puerto-Rico', [66] 'Workclass.Vietnam', [67] 'Workclass.1st-4th', [68] 'Workclass.South', [69] 'Workclass.Married-AF-spouse' + [70] 'Workclass.Columbia', [71] 'Workclass.Japan', [72] 'Workclass.India', [73] 'Workclass.Cambodia', [74] 'Workclass.Poland', [75] 'Workclass.Laos', [76] 'education.Private', [77] 'education.11th', [78] 'education.Never-married', [79] 'education.Machine-op-inspct' + [80] 'education.Own-child', [81] 'education.Black', [82] 'education.Male', [83] 'education.United-States', [84] 'education.HS-grad', [85] 'education.Married-civ-spouse', [86] 'education.Farming-fishing', [87] 'education.Husband', [88] 'education.White', [89] 'education.Local-gov' + [90] 'education.Assoc-acdm', [91] 'education.Protective-serv', [92] 'education.Some-college', [93] 'education.?', [94] 'education.Female', [95] 'education.10th', [96] 'education.Other-service', [97] 'education.Not-in-family', [98] 'education.Unmarried', [99] 'education.Self-emp-not-inc' + [100] 'education.Prof-school', [101] 'education.Prof-specialty', [102] 'education.7th-8th', [103] 'education.Craft-repair', [104] 'education.Federal-gov', [105] 'education.Bachelors', [106] 'education.Adm-clerical', [107] 'education.Masters', [108] 'education.Exec-managerial', [109] 'education.State-gov' + [110] 'education.Wife', [111] 'education.Widowed', [112] 'education.Doctorate', [113] 'education.Asian-Pac-Islander', [114] 'education.Tech-support', [115] 'education.Divorced', [116] 'education.Peru', [117] 'education.Separated', [118] 'education.Sales', [119] 'education.5th-6th' + [120] 'education.Priv-house-serv', [121] 'education.Guatemala', [122] 'education.Self-emp-inc', [123] 'education.Assoc-voc', [124] 'education.Mexico', [125] 'education.Transport-moving', [126] 'education.Handlers-cleaners', [127] 'education.9th', [128] 'education.Married-spouse-absent', [129] 'education.Other' + [130] 'education.Dominican-Republic', [131] 'education.Armed-Forces', [132] 'education.Amer-Indian-Inuit', [133] 'education.Ireland', [134] 'education.Germany', [135] 'education.12th', [136] 'education.Other-relative', [137] 'education.Philippines', [138] 'education.Thailand', [139] 'education.Haiti' + [140] 'education.El-Salvador', [141] 'education.Puerto-Rico', [142] 'education.Vietnam', [143] 'education.1st-4th', [144] 'education.South', [145] 'education.Married-AF-spouse', [146] 'education.Columbia', [147] 'education.Japan', [148] 'education.India', [149] 'education.Cambodia' + [150] 'education.Poland', [151] 'education.Laos', [152] 'marital-status.Private', [153] 'marital-status.11th', [154] 'marital-status.Never-married', [155] 'marital-status.Machine-op-inspct', [156] 'marital-status.Own-child', [157] 'marital-status.Black', [158] 'marital-status.Male', [159] 'marital-status.United-States' + [160] 'marital-status.HS-grad', [161] 'marital-status.Married-civ-spouse', [162] 'marital-status.Farming-fishing', [163] 'marital-status.Husband', [164] 'marital-status.White', [165] 'marital-status.Local-gov', [166] 'marital-status.Assoc-acdm', [167] 'marital-status.Protective-serv', [168] 'marital-status.Some-college', [169] 'marital-status.?' + [170] 'marital-status.Female', [171] 'marital-status.10th', [172] 'marital-status.Other-service', [173] 'marital-status.Not-in-family', [174] 'marital-status.Unmarried', [175] 'marital-status.Self-emp-not-inc', [176] 'marital-status.Prof-school', [177] 'marital-status.Prof-specialty', [178] 'marital-status.7th-8th', [179] 'marital-status.Craft-repair' + [180] 'marital-status.Federal-gov', [181] 'marital-status.Bachelors', [182] 'marital-status.Adm-clerical', [183] 'marital-status.Masters', [184] 'marital-status.Exec-managerial', [185] 'marital-status.State-gov', [186] 'marital-status.Wife', [187] 'marital-status.Widowed', [188] 'marital-status.Doctorate', [189] 'marital-status.Asian-Pac-Islander' + [190] 'marital-status.Tech-support', [191] 'marital-status.Divorced', [192] 'marital-status.Peru', [193] 'marital-status.Separated', [194] 'marital-status.Sales', [195] 'marital-status.5th-6th', [196] 'marital-status.Priv-house-serv', [197] 'marital-status.Guatemala', [198] 'marital-status.Self-emp-inc', [199] 'marital-status.Assoc-voc' + [200] 'marital-status.Mexico', [201] 'marital-status.Transport-moving', [202] 'marital-status.Handlers-cleaners', [203] 'marital-status.9th', [204] 'marital-status.Married-spouse-absent', [205] 'marital-status.Other', [206] 'marital-status.Dominican-Republic', [207] 'marital-status.Armed-Forces', [208] 'marital-status.Amer-Indian-Inuit', [209] 'marital-status.Ireland' + [210] 'marital-status.Germany', [211] 'marital-status.12th', [212] 'marital-status.Other-relative', [213] 'marital-status.Philippines', [214] 'marital-status.Thailand', [215] 'marital-status.Haiti', [216] 'marital-status.El-Salvador', [217] 'marital-status.Puerto-Rico', [218] 'marital-status.Vietnam', [219] 'marital-status.1st-4th' + [220] 'marital-status.South', [221] 'marital-status.Married-AF-spouse', [222] 'marital-status.Columbia', [223] 'marital-status.Japan', [224] 'marital-status.India', [225] 'marital-status.Cambodia', [226] 'marital-status.Poland', [227] 'marital-status.Laos', [228] 'occupation.Private', [229] 'occupation.11th' + [230] 'occupation.Never-married', [231] 'occupation.Machine-op-inspct', [232] 'occupation.Own-child', [233] 'occupation.Black', [234] 'occupation.Male', [235] 'occupation.United-States', [236] 'occupation.HS-grad', [237] 'occupation.Married-civ-spouse', [238] 'occupation.Farming-fishing', [239] 'occupation.Husband' + [240] 'occupation.White', [241] 'occupation.Local-gov', [242] 'occupation.Assoc-acdm', [243] 'occupation.Protective-serv', [244] 'occupation.Some-college', [245] 'occupation.?', [246] 'occupation.Female', [247] 'occupation.10th', [248] 'occupation.Other-service', [249] 'occupation.Not-in-family' + [250] 'occupation.Unmarried', [251] 'occupation.Self-emp-not-inc', [252] 'occupation.Prof-school', [253] 'occupation.Prof-specialty', [254] 'occupation.7th-8th', [255] 'occupation.Craft-repair', [256] 'occupation.Federal-gov', [257] 'occupation.Bachelors', [258] 'occupation.Adm-clerical', [259] 'occupation.Masters' + [260] 'occupation.Exec-managerial', [261] 'occupation.State-gov', [262] 'occupation.Wife', [263] 'occupation.Widowed', [264] 'occupation.Doctorate', [265] 'occupation.Asian-Pac-Islander', [266] 'occupation.Tech-support', [267] 'occupation.Divorced', [268] 'occupation.Peru', [269] 'occupation.Separated' + [270] 'occupation.Sales', [271] 'occupation.5th-6th', [272] 'occupation.Priv-house-serv', [273] 'occupation.Guatemala', [274] 'occupation.Self-emp-inc', [275] 'occupation.Assoc-voc', [276] 'occupation.Mexico', [277] 'occupation.Transport-moving', [278] 'occupation.Handlers-cleaners', [279] 'occupation.9th' + [280] 'occupation.Married-spouse-absent', [281] 'occupation.Other', [282] 'occupation.Dominican-Republic', [283] 'occupation.Armed-Forces', [284] 'occupation.Amer-Indian-Inuit', [285] 'occupation.Ireland', [286] 'occupation.Germany', [287] 'occupation.12th', [288] 'occupation.Other-relative', [289] 'occupation.Philippines' + [290] 'occupation.Thailand', [291] 'occupation.Haiti', [292] 'occupation.El-Salvador', [293] 'occupation.Puerto-Rico', [294] 'occupation.Vietnam', [295] 'occupation.1st-4th', [296] 'occupation.South', [297] 'occupation.Married-AF-spouse', [298] 'occupation.Columbia', [299] 'occupation.Japan' + [300] 'occupation.India', [301] 'occupation.Cambodia', [302] 'occupation.Poland', [303] 'occupation.Laos', [304] 'relationship.Private', [305] 'relationship.11th', [306] 'relationship.Never-married', [307] 'relationship.Machine-op-inspct', [308] 'relationship.Own-child', [309] 'relationship.Black' + [310] 'relationship.Male', [311] 'relationship.United-States', [312] 'relationship.HS-grad', [313] 'relationship.Married-civ-spouse', [314] 'relationship.Farming-fishing', [315] 'relationship.Husband', [316] 'relationship.White', [317] 'relationship.Local-gov', [318] 'relationship.Assoc-acdm', [319] 'relationship.Protective-serv' + [320] 'relationship.Some-college', [321] 'relationship.?', [322] 'relationship.Female', [323] 'relationship.10th', [324] 'relationship.Other-service', [325] 'relationship.Not-in-family', [326] 'relationship.Unmarried', [327] 'relationship.Self-emp-not-inc', [328] 'relationship.Prof-school', [329] 'relationship.Prof-specialty' + [330] 'relationship.7th-8th', [331] 'relationship.Craft-repair', [332] 'relationship.Federal-gov', [333] 'relationship.Bachelors', [334] 'relationship.Adm-clerical', [335] 'relationship.Masters', [336] 'relationship.Exec-managerial', [337] 'relationship.State-gov', [338] 'relationship.Wife', [339] 'relationship.Widowed' + [340] 'relationship.Doctorate', [341] 'relationship.Asian-Pac-Islander', [342] 'relationship.Tech-support', [343] 'relationship.Divorced', [344] 'relationship.Peru', [345] 'relationship.Separated', [346] 'relationship.Sales', [347] 'relationship.5th-6th', [348] 'relationship.Priv-house-serv', [349] 'relationship.Guatemala' + [350] 'relationship.Self-emp-inc', [351] 'relationship.Assoc-voc', [352] 'relationship.Mexico', [353] 'relationship.Transport-moving', [354] 'relationship.Handlers-cleaners', [355] 'relationship.9th', [356] 'relationship.Married-spouse-absent', [357] 'relationship.Other', [358] 'relationship.Dominican-Republic', [359] 'relationship.Armed-Forces' + [360] 'relationship.Amer-Indian-Inuit', [361] 'relationship.Ireland', [362] 'relationship.Germany', [363] 'relationship.12th', [364] 'relationship.Other-relative', [365] 'relationship.Philippines', [366] 'relationship.Thailand', [367] 'relationship.Haiti', [368] 'relationship.El-Salvador', [369] 'relationship.Puerto-Rico' + [370] 'relationship.Vietnam', [371] 'relationship.1st-4th', [372] 'relationship.South', [373] 'relationship.Married-AF-spouse', [374] 'relationship.Columbia', [375] 'relationship.Japan', [376] 'relationship.India', [377] 'relationship.Cambodia', [378] 'relationship.Poland', [379] 'relationship.Laos' + [380] 'ethnicity.Private', [381] 'ethnicity.11th', [382] 'ethnicity.Never-married', [383] 'ethnicity.Machine-op-inspct', [384] 'ethnicity.Own-child', [385] 'ethnicity.Black', [386] 'ethnicity.Male', [387] 'ethnicity.United-States', [388] 'ethnicity.HS-grad', [389] 'ethnicity.Married-civ-spouse' + [390] 'ethnicity.Farming-fishing', [391] 'ethnicity.Husband', [392] 'ethnicity.White', [393] 'ethnicity.Local-gov', [394] 'ethnicity.Assoc-acdm', [395] 'ethnicity.Protective-serv', [396] 'ethnicity.Some-college', [397] 'ethnicity.?', [398] 'ethnicity.Female', [399] 'ethnicity.10th' + [400] 'ethnicity.Other-service', [401] 'ethnicity.Not-in-family', [402] 'ethnicity.Unmarried', [403] 'ethnicity.Self-emp-not-inc', [404] 'ethnicity.Prof-school', [405] 'ethnicity.Prof-specialty', [406] 'ethnicity.7th-8th', [407] 'ethnicity.Craft-repair', [408] 'ethnicity.Federal-gov', [409] 'ethnicity.Bachelors' + [410] 'ethnicity.Adm-clerical', [411] 'ethnicity.Masters', [412] 'ethnicity.Exec-managerial', [413] 'ethnicity.State-gov', [414] 'ethnicity.Wife', [415] 'ethnicity.Widowed', [416] 'ethnicity.Doctorate', [417] 'ethnicity.Asian-Pac-Islander', [418] 'ethnicity.Tech-support', [419] 'ethnicity.Divorced' + [420] 'ethnicity.Peru', [421] 'ethnicity.Separated', [422] 'ethnicity.Sales', [423] 'ethnicity.5th-6th', [424] 'ethnicity.Priv-house-serv', [425] 'ethnicity.Guatemala', [426] 'ethnicity.Self-emp-inc', [427] 'ethnicity.Assoc-voc', [428] 'ethnicity.Mexico', [429] 'ethnicity.Transport-moving' + [430] 'ethnicity.Handlers-cleaners', [431] 'ethnicity.9th', [432] 'ethnicity.Married-spouse-absent', [433] 'ethnicity.Other', [434] 'ethnicity.Dominican-Republic', [435] 'ethnicity.Armed-Forces', [436] 'ethnicity.Amer-Indian-Inuit', [437] 'ethnicity.Ireland', [438] 'ethnicity.Germany', [439] 'ethnicity.12th' + [440] 'ethnicity.Other-relative', [441] 'ethnicity.Philippines', [442] 'ethnicity.Thailand', [443] 'ethnicity.Haiti', [444] 'ethnicity.El-Salvador', [445] 'ethnicity.Puerto-Rico', [446] 'ethnicity.Vietnam', [447] 'ethnicity.1st-4th', [448] 'ethnicity.South', [449] 'ethnicity.Married-AF-spouse' + [450] 'ethnicity.Columbia', [451] 'ethnicity.Japan', [452] 'ethnicity.India', [453] 'ethnicity.Cambodia', [454] 'ethnicity.Poland', [455] 'ethnicity.Laos', [456] 'sex.Private', [457] 'sex.11th', [458] 'sex.Never-married', [459] 'sex.Machine-op-inspct' + [460] 'sex.Own-child', [461] 'sex.Black', [462] 'sex.Male', [463] 'sex.United-States', [464] 'sex.HS-grad', [465] 'sex.Married-civ-spouse', [466] 'sex.Farming-fishing', [467] 'sex.Husband', [468] 'sex.White', [469] 'sex.Local-gov' + [470] 'sex.Assoc-acdm', [471] 'sex.Protective-serv', [472] 'sex.Some-college', [473] 'sex.?', [474] 'sex.Female', [475] 'sex.10th', [476] 'sex.Other-service', [477] 'sex.Not-in-family', [478] 'sex.Unmarried', [479] 'sex.Self-emp-not-inc' + [480] 'sex.Prof-school', [481] 'sex.Prof-specialty', [482] 'sex.7th-8th', [483] 'sex.Craft-repair', [484] 'sex.Federal-gov', [485] 'sex.Bachelors', [486] 'sex.Adm-clerical', [487] 'sex.Masters', [488] 'sex.Exec-managerial', [489] 'sex.State-gov' + [490] 'sex.Wife', [491] 'sex.Widowed', [492] 'sex.Doctorate', [493] 'sex.Asian-Pac-Islander', [494] 'sex.Tech-support', [495] 'sex.Divorced', [496] 'sex.Peru', [497] 'sex.Separated', [498] 'sex.Sales', [499] 'sex.5th-6th' + [500] 'sex.Priv-house-serv', [501] 'sex.Guatemala', [502] 'sex.Self-emp-inc', [503] 'sex.Assoc-voc', [504] 'sex.Mexico', [505] 'sex.Transport-moving', [506] 'sex.Handlers-cleaners', [507] 'sex.9th', [508] 'sex.Married-spouse-absent', [509] 'sex.Other' + [510] 'sex.Dominican-Republic', [511] 'sex.Armed-Forces', [512] 'sex.Amer-Indian-Inuit', [513] 'sex.Ireland', [514] 'sex.Germany', [515] 'sex.12th', [516] 'sex.Other-relative', [517] 'sex.Philippines', [518] 'sex.Thailand', [519] 'sex.Haiti' + [520] 'sex.El-Salvador', [521] 'sex.Puerto-Rico', [522] 'sex.Vietnam', [523] 'sex.1st-4th', [524] 'sex.South', [525] 'sex.Married-AF-spouse', [526] 'sex.Columbia', [527] 'sex.Japan', [528] 'sex.India', [529] 'sex.Cambodia' + [530] 'sex.Poland', [531] 'sex.Laos', [532] 'native-country-region.Private', [533] 'native-country-region.11th', [534] 'native-country-region.Never-married', [535] 'native-country-region.Machine-op-inspct', [536] 'native-country-region.Own-child', [537] 'native-country-region.Black', [538] 'native-country-region.Male', [539] 'native-country-region.United-States' + [540] 'native-country-region.HS-grad', [541] 'native-country-region.Married-civ-spouse', [542] 'native-country-region.Farming-fishing', [543] 'native-country-region.Husband', [544] 'native-country-region.White', [545] 'native-country-region.Local-gov', [546] 'native-country-region.Assoc-acdm', [547] 'native-country-region.Protective-serv', [548] 'native-country-region.Some-college', [549] 'native-country-region.?' + [550] 'native-country-region.Female', [551] 'native-country-region.10th', [552] 'native-country-region.Other-service', [553] 'native-country-region.Not-in-family', [554] 'native-country-region.Unmarried', [555] 'native-country-region.Self-emp-not-inc', [556] 'native-country-region.Prof-school', [557] 'native-country-region.Prof-specialty', [558] 'native-country-region.7th-8th', [559] 'native-country-region.Craft-repair' + [560] 'native-country-region.Federal-gov', [561] 'native-country-region.Bachelors', [562] 'native-country-region.Adm-clerical', [563] 'native-country-region.Masters', [564] 'native-country-region.Exec-managerial', [565] 'native-country-region.State-gov', [566] 'native-country-region.Wife', [567] 'native-country-region.Widowed', [568] 'native-country-region.Doctorate', [569] 'native-country-region.Asian-Pac-Islander' + [570] 'native-country-region.Tech-support', [571] 'native-country-region.Divorced', [572] 'native-country-region.Peru', [573] 'native-country-region.Separated', [574] 'native-country-region.Sales', [575] 'native-country-region.5th-6th', [576] 'native-country-region.Priv-house-serv', [577] 'native-country-region.Guatemala', [578] 'native-country-region.Self-emp-inc', [579] 'native-country-region.Assoc-voc' + [580] 'native-country-region.Mexico', [581] 'native-country-region.Transport-moving', [582] 'native-country-region.Handlers-cleaners', [583] 'native-country-region.9th', [584] 'native-country-region.Married-spouse-absent', [585] 'native-country-region.Other', [586] 'native-country-region.Dominican-Republic', [587] 'native-country-region.Armed-Forces', [588] 'native-country-region.Amer-Indian-Inuit', [589] 'native-country-region.Ireland' + [590] 'native-country-region.Germany', [591] 'native-country-region.12th', [592] 'native-country-region.Other-relative', [593] 'native-country-region.Philippines', [594] 'native-country-region.Thailand', [595] 'native-country-region.Haiti', [596] 'native-country-region.El-Salvador', [597] 'native-country-region.Puerto-Rico', [598] 'native-country-region.Vietnam', [599] 'native-country-region.1st-4th' + [600] 'native-country-region.South', [601] 'native-country-region.Married-AF-spouse', [602] 'native-country-region.Columbia', [603] 'native-country-region.Japan', [604] 'native-country-region.India', [605] 'native-country-region.Cambodia', [606] 'native-country-region.Poland', [607] 'native-country-region.Laos' +---- RowToRowMapperTransform ---- +6 columns: + NumFeatures: Vec + Metadata 'SlotNames': Vec: Length=6, Count=6 + [0] 'age', [1] 'fnlwgt', [2] 'education-num', [3] 'capital-gain', [4] 'capital-loss', [5] 'hours-per-week' + CatFeaturesText: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'Workclass', [1] 'education', [2] 'marital-status', [3] 'occupation', [4] 'relationship', [5] 'ethnicity', [6] 'sex', [7] 'native-country-region' + Label: R4 + CatFeatures: Vec, 8> + Metadata 'KeyValues': Vec: Length=76, Count=76 + [0] 'Private', [1] '11th', [2] 'Never-married', [3] 'Machine-op-inspct', [4] 'Own-child', [5] 'Black', [6] 'Male', [7] 'United-States', [8] 'HS-grad', [9] 'Married-civ-spouse' + [10] 'Farming-fishing', [11] 'Husband', [12] 'White', [13] 'Local-gov', [14] 'Assoc-acdm', [15] 'Protective-serv', [16] 'Some-college', [17] '?', [18] 'Female', [19] '10th' + [20] 'Other-service', [21] 'Not-in-family', [22] 'Unmarried', [23] 'Self-emp-not-inc', [24] 'Prof-school', [25] 'Prof-specialty', [26] '7th-8th', [27] 'Craft-repair', [28] 'Federal-gov', [29] 'Bachelors' + [30] 'Adm-clerical', [31] 'Masters', [32] 'Exec-managerial', [33] 'State-gov', [34] 'Wife', [35] 'Widowed', [36] 'Doctorate', [37] 'Asian-Pac-Islander', [38] 'Tech-support', [39] 'Divorced' + [40] 'Peru', [41] 'Separated', [42] 'Sales', [43] '5th-6th', [44] 'Priv-house-serv', [45] 'Guatemala', [46] 'Self-emp-inc', [47] 'Assoc-voc', [48] 'Mexico', [49] 'Transport-moving' + [50] 'Handlers-cleaners', [51] '9th', [52] 'Married-spouse-absent', [53] 'Other', [54] 'Dominican-Republic', [55] 'Armed-Forces', [56] 'Amer-Indian-Inuit', [57] 'Ireland', [58] 'Germany', [59] '12th' + [60] 'Other-relative', [61] 'Philippines', [62] 'Thailand', [63] 'Haiti', [64] 'El-Salvador', [65] 'Puerto-Rico', [66] 'Vietnam', [67] '1st-4th', [68] 'South', [69] 'Married-AF-spouse' + [70] 'Columbia', [71] 'Japan', [72] 'India', [73] 'Cambodia', [74] 'Poland', [75] 'Laos' + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'Workclass', [1] 'education', [2] 'marital-status', [3] 'occupation', [4] 'relationship', [5] 'ethnicity', [6] 'sex', [7] 'native-country-region' + CatFeatures: Vec + Metadata 'CategoricalSlotRanges': Vec: Length=16, Count=16 + [0] '0', [1] '75', [2] '76', [3] '151', [4] '152', [5] '227', [6] '228', [7] '303', [8] '304', [9] '379' + [10] '380', [11] '455', [12] '456', [13] '531', [14] '532', [15] '607' + Metadata 'IsNormalized': Bool: '1' + Metadata 'SlotNames': Vec: Length=608, Count=608 + [0] 'Workclass.Private', [1] 'Workclass.11th', [2] 'Workclass.Never-married', [3] 'Workclass.Machine-op-inspct', [4] 'Workclass.Own-child', [5] 'Workclass.Black', [6] 'Workclass.Male', [7] 'Workclass.United-States', [8] 'Workclass.HS-grad', [9] 'Workclass.Married-civ-spouse' + [10] 'Workclass.Farming-fishing', [11] 'Workclass.Husband', [12] 'Workclass.White', [13] 'Workclass.Local-gov', [14] 'Workclass.Assoc-acdm', [15] 'Workclass.Protective-serv', [16] 'Workclass.Some-college', [17] 'Workclass.?', [18] 'Workclass.Female', [19] 'Workclass.10th' + [20] 'Workclass.Other-service', [21] 'Workclass.Not-in-family', [22] 'Workclass.Unmarried', [23] 'Workclass.Self-emp-not-inc', [24] 'Workclass.Prof-school', [25] 'Workclass.Prof-specialty', [26] 'Workclass.7th-8th', [27] 'Workclass.Craft-repair', [28] 'Workclass.Federal-gov', [29] 'Workclass.Bachelors' + [30] 'Workclass.Adm-clerical', [31] 'Workclass.Masters', [32] 'Workclass.Exec-managerial', [33] 'Workclass.State-gov', [34] 'Workclass.Wife', [35] 'Workclass.Widowed', [36] 'Workclass.Doctorate', [37] 'Workclass.Asian-Pac-Islander', [38] 'Workclass.Tech-support', [39] 'Workclass.Divorced' + [40] 'Workclass.Peru', [41] 'Workclass.Separated', [42] 'Workclass.Sales', [43] 'Workclass.5th-6th', [44] 'Workclass.Priv-house-serv', [45] 'Workclass.Guatemala', [46] 'Workclass.Self-emp-inc', [47] 'Workclass.Assoc-voc', [48] 'Workclass.Mexico', [49] 'Workclass.Transport-moving' + [50] 'Workclass.Handlers-cleaners', [51] 'Workclass.9th', [52] 'Workclass.Married-spouse-absent', [53] 'Workclass.Other', [54] 'Workclass.Dominican-Republic', [55] 'Workclass.Armed-Forces', [56] 'Workclass.Amer-Indian-Inuit', [57] 'Workclass.Ireland', [58] 'Workclass.Germany', [59] 'Workclass.12th' + [60] 'Workclass.Other-relative', [61] 'Workclass.Philippines', [62] 'Workclass.Thailand', [63] 'Workclass.Haiti', [64] 'Workclass.El-Salvador', [65] 'Workclass.Puerto-Rico', [66] 'Workclass.Vietnam', [67] 'Workclass.1st-4th', [68] 'Workclass.South', [69] 'Workclass.Married-AF-spouse' + [70] 'Workclass.Columbia', [71] 'Workclass.Japan', [72] 'Workclass.India', [73] 'Workclass.Cambodia', [74] 'Workclass.Poland', [75] 'Workclass.Laos', [76] 'education.Private', [77] 'education.11th', [78] 'education.Never-married', [79] 'education.Machine-op-inspct' + [80] 'education.Own-child', [81] 'education.Black', [82] 'education.Male', [83] 'education.United-States', [84] 'education.HS-grad', [85] 'education.Married-civ-spouse', [86] 'education.Farming-fishing', [87] 'education.Husband', [88] 'education.White', [89] 'education.Local-gov' + [90] 'education.Assoc-acdm', [91] 'education.Protective-serv', [92] 'education.Some-college', [93] 'education.?', [94] 'education.Female', [95] 'education.10th', [96] 'education.Other-service', [97] 'education.Not-in-family', [98] 'education.Unmarried', [99] 'education.Self-emp-not-inc' + [100] 'education.Prof-school', [101] 'education.Prof-specialty', [102] 'education.7th-8th', [103] 'education.Craft-repair', [104] 'education.Federal-gov', [105] 'education.Bachelors', [106] 'education.Adm-clerical', [107] 'education.Masters', [108] 'education.Exec-managerial', [109] 'education.State-gov' + [110] 'education.Wife', [111] 'education.Widowed', [112] 'education.Doctorate', [113] 'education.Asian-Pac-Islander', [114] 'education.Tech-support', [115] 'education.Divorced', [116] 'education.Peru', [117] 'education.Separated', [118] 'education.Sales', [119] 'education.5th-6th' + [120] 'education.Priv-house-serv', [121] 'education.Guatemala', [122] 'education.Self-emp-inc', [123] 'education.Assoc-voc', [124] 'education.Mexico', [125] 'education.Transport-moving', [126] 'education.Handlers-cleaners', [127] 'education.9th', [128] 'education.Married-spouse-absent', [129] 'education.Other' + [130] 'education.Dominican-Republic', [131] 'education.Armed-Forces', [132] 'education.Amer-Indian-Inuit', [133] 'education.Ireland', [134] 'education.Germany', [135] 'education.12th', [136] 'education.Other-relative', [137] 'education.Philippines', [138] 'education.Thailand', [139] 'education.Haiti' + [140] 'education.El-Salvador', [141] 'education.Puerto-Rico', [142] 'education.Vietnam', [143] 'education.1st-4th', [144] 'education.South', [145] 'education.Married-AF-spouse', [146] 'education.Columbia', [147] 'education.Japan', [148] 'education.India', [149] 'education.Cambodia' + [150] 'education.Poland', [151] 'education.Laos', [152] 'marital-status.Private', [153] 'marital-status.11th', [154] 'marital-status.Never-married', [155] 'marital-status.Machine-op-inspct', [156] 'marital-status.Own-child', [157] 'marital-status.Black', [158] 'marital-status.Male', [159] 'marital-status.United-States' + [160] 'marital-status.HS-grad', [161] 'marital-status.Married-civ-spouse', [162] 'marital-status.Farming-fishing', [163] 'marital-status.Husband', [164] 'marital-status.White', [165] 'marital-status.Local-gov', [166] 'marital-status.Assoc-acdm', [167] 'marital-status.Protective-serv', [168] 'marital-status.Some-college', [169] 'marital-status.?' + [170] 'marital-status.Female', [171] 'marital-status.10th', [172] 'marital-status.Other-service', [173] 'marital-status.Not-in-family', [174] 'marital-status.Unmarried', [175] 'marital-status.Self-emp-not-inc', [176] 'marital-status.Prof-school', [177] 'marital-status.Prof-specialty', [178] 'marital-status.7th-8th', [179] 'marital-status.Craft-repair' + [180] 'marital-status.Federal-gov', [181] 'marital-status.Bachelors', [182] 'marital-status.Adm-clerical', [183] 'marital-status.Masters', [184] 'marital-status.Exec-managerial', [185] 'marital-status.State-gov', [186] 'marital-status.Wife', [187] 'marital-status.Widowed', [188] 'marital-status.Doctorate', [189] 'marital-status.Asian-Pac-Islander' + [190] 'marital-status.Tech-support', [191] 'marital-status.Divorced', [192] 'marital-status.Peru', [193] 'marital-status.Separated', [194] 'marital-status.Sales', [195] 'marital-status.5th-6th', [196] 'marital-status.Priv-house-serv', [197] 'marital-status.Guatemala', [198] 'marital-status.Self-emp-inc', [199] 'marital-status.Assoc-voc' + [200] 'marital-status.Mexico', [201] 'marital-status.Transport-moving', [202] 'marital-status.Handlers-cleaners', [203] 'marital-status.9th', [204] 'marital-status.Married-spouse-absent', [205] 'marital-status.Other', [206] 'marital-status.Dominican-Republic', [207] 'marital-status.Armed-Forces', [208] 'marital-status.Amer-Indian-Inuit', [209] 'marital-status.Ireland' + [210] 'marital-status.Germany', [211] 'marital-status.12th', [212] 'marital-status.Other-relative', [213] 'marital-status.Philippines', [214] 'marital-status.Thailand', [215] 'marital-status.Haiti', [216] 'marital-status.El-Salvador', [217] 'marital-status.Puerto-Rico', [218] 'marital-status.Vietnam', [219] 'marital-status.1st-4th' + [220] 'marital-status.South', [221] 'marital-status.Married-AF-spouse', [222] 'marital-status.Columbia', [223] 'marital-status.Japan', [224] 'marital-status.India', [225] 'marital-status.Cambodia', [226] 'marital-status.Poland', [227] 'marital-status.Laos', [228] 'occupation.Private', [229] 'occupation.11th' + [230] 'occupation.Never-married', [231] 'occupation.Machine-op-inspct', [232] 'occupation.Own-child', [233] 'occupation.Black', [234] 'occupation.Male', [235] 'occupation.United-States', [236] 'occupation.HS-grad', [237] 'occupation.Married-civ-spouse', [238] 'occupation.Farming-fishing', [239] 'occupation.Husband' + [240] 'occupation.White', [241] 'occupation.Local-gov', [242] 'occupation.Assoc-acdm', [243] 'occupation.Protective-serv', [244] 'occupation.Some-college', [245] 'occupation.?', [246] 'occupation.Female', [247] 'occupation.10th', [248] 'occupation.Other-service', [249] 'occupation.Not-in-family' + [250] 'occupation.Unmarried', [251] 'occupation.Self-emp-not-inc', [252] 'occupation.Prof-school', [253] 'occupation.Prof-specialty', [254] 'occupation.7th-8th', [255] 'occupation.Craft-repair', [256] 'occupation.Federal-gov', [257] 'occupation.Bachelors', [258] 'occupation.Adm-clerical', [259] 'occupation.Masters' + [260] 'occupation.Exec-managerial', [261] 'occupation.State-gov', [262] 'occupation.Wife', [263] 'occupation.Widowed', [264] 'occupation.Doctorate', [265] 'occupation.Asian-Pac-Islander', [266] 'occupation.Tech-support', [267] 'occupation.Divorced', [268] 'occupation.Peru', [269] 'occupation.Separated' + [270] 'occupation.Sales', [271] 'occupation.5th-6th', [272] 'occupation.Priv-house-serv', [273] 'occupation.Guatemala', [274] 'occupation.Self-emp-inc', [275] 'occupation.Assoc-voc', [276] 'occupation.Mexico', [277] 'occupation.Transport-moving', [278] 'occupation.Handlers-cleaners', [279] 'occupation.9th' + [280] 'occupation.Married-spouse-absent', [281] 'occupation.Other', [282] 'occupation.Dominican-Republic', [283] 'occupation.Armed-Forces', [284] 'occupation.Amer-Indian-Inuit', [285] 'occupation.Ireland', [286] 'occupation.Germany', [287] 'occupation.12th', [288] 'occupation.Other-relative', [289] 'occupation.Philippines' + [290] 'occupation.Thailand', [291] 'occupation.Haiti', [292] 'occupation.El-Salvador', [293] 'occupation.Puerto-Rico', [294] 'occupation.Vietnam', [295] 'occupation.1st-4th', [296] 'occupation.South', [297] 'occupation.Married-AF-spouse', [298] 'occupation.Columbia', [299] 'occupation.Japan' + [300] 'occupation.India', [301] 'occupation.Cambodia', [302] 'occupation.Poland', [303] 'occupation.Laos', [304] 'relationship.Private', [305] 'relationship.11th', [306] 'relationship.Never-married', [307] 'relationship.Machine-op-inspct', [308] 'relationship.Own-child', [309] 'relationship.Black' + [310] 'relationship.Male', [311] 'relationship.United-States', [312] 'relationship.HS-grad', [313] 'relationship.Married-civ-spouse', [314] 'relationship.Farming-fishing', [315] 'relationship.Husband', [316] 'relationship.White', [317] 'relationship.Local-gov', [318] 'relationship.Assoc-acdm', [319] 'relationship.Protective-serv' + [320] 'relationship.Some-college', [321] 'relationship.?', [322] 'relationship.Female', [323] 'relationship.10th', [324] 'relationship.Other-service', [325] 'relationship.Not-in-family', [326] 'relationship.Unmarried', [327] 'relationship.Self-emp-not-inc', [328] 'relationship.Prof-school', [329] 'relationship.Prof-specialty' + [330] 'relationship.7th-8th', [331] 'relationship.Craft-repair', [332] 'relationship.Federal-gov', [333] 'relationship.Bachelors', [334] 'relationship.Adm-clerical', [335] 'relationship.Masters', [336] 'relationship.Exec-managerial', [337] 'relationship.State-gov', [338] 'relationship.Wife', [339] 'relationship.Widowed' + [340] 'relationship.Doctorate', [341] 'relationship.Asian-Pac-Islander', [342] 'relationship.Tech-support', [343] 'relationship.Divorced', [344] 'relationship.Peru', [345] 'relationship.Separated', [346] 'relationship.Sales', [347] 'relationship.5th-6th', [348] 'relationship.Priv-house-serv', [349] 'relationship.Guatemala' + [350] 'relationship.Self-emp-inc', [351] 'relationship.Assoc-voc', [352] 'relationship.Mexico', [353] 'relationship.Transport-moving', [354] 'relationship.Handlers-cleaners', [355] 'relationship.9th', [356] 'relationship.Married-spouse-absent', [357] 'relationship.Other', [358] 'relationship.Dominican-Republic', [359] 'relationship.Armed-Forces' + [360] 'relationship.Amer-Indian-Inuit', [361] 'relationship.Ireland', [362] 'relationship.Germany', [363] 'relationship.12th', [364] 'relationship.Other-relative', [365] 'relationship.Philippines', [366] 'relationship.Thailand', [367] 'relationship.Haiti', [368] 'relationship.El-Salvador', [369] 'relationship.Puerto-Rico' + [370] 'relationship.Vietnam', [371] 'relationship.1st-4th', [372] 'relationship.South', [373] 'relationship.Married-AF-spouse', [374] 'relationship.Columbia', [375] 'relationship.Japan', [376] 'relationship.India', [377] 'relationship.Cambodia', [378] 'relationship.Poland', [379] 'relationship.Laos' + [380] 'ethnicity.Private', [381] 'ethnicity.11th', [382] 'ethnicity.Never-married', [383] 'ethnicity.Machine-op-inspct', [384] 'ethnicity.Own-child', [385] 'ethnicity.Black', [386] 'ethnicity.Male', [387] 'ethnicity.United-States', [388] 'ethnicity.HS-grad', [389] 'ethnicity.Married-civ-spouse' + [390] 'ethnicity.Farming-fishing', [391] 'ethnicity.Husband', [392] 'ethnicity.White', [393] 'ethnicity.Local-gov', [394] 'ethnicity.Assoc-acdm', [395] 'ethnicity.Protective-serv', [396] 'ethnicity.Some-college', [397] 'ethnicity.?', [398] 'ethnicity.Female', [399] 'ethnicity.10th' + [400] 'ethnicity.Other-service', [401] 'ethnicity.Not-in-family', [402] 'ethnicity.Unmarried', [403] 'ethnicity.Self-emp-not-inc', [404] 'ethnicity.Prof-school', [405] 'ethnicity.Prof-specialty', [406] 'ethnicity.7th-8th', [407] 'ethnicity.Craft-repair', [408] 'ethnicity.Federal-gov', [409] 'ethnicity.Bachelors' + [410] 'ethnicity.Adm-clerical', [411] 'ethnicity.Masters', [412] 'ethnicity.Exec-managerial', [413] 'ethnicity.State-gov', [414] 'ethnicity.Wife', [415] 'ethnicity.Widowed', [416] 'ethnicity.Doctorate', [417] 'ethnicity.Asian-Pac-Islander', [418] 'ethnicity.Tech-support', [419] 'ethnicity.Divorced' + [420] 'ethnicity.Peru', [421] 'ethnicity.Separated', [422] 'ethnicity.Sales', [423] 'ethnicity.5th-6th', [424] 'ethnicity.Priv-house-serv', [425] 'ethnicity.Guatemala', [426] 'ethnicity.Self-emp-inc', [427] 'ethnicity.Assoc-voc', [428] 'ethnicity.Mexico', [429] 'ethnicity.Transport-moving' + [430] 'ethnicity.Handlers-cleaners', [431] 'ethnicity.9th', [432] 'ethnicity.Married-spouse-absent', [433] 'ethnicity.Other', [434] 'ethnicity.Dominican-Republic', [435] 'ethnicity.Armed-Forces', [436] 'ethnicity.Amer-Indian-Inuit', [437] 'ethnicity.Ireland', [438] 'ethnicity.Germany', [439] 'ethnicity.12th' + [440] 'ethnicity.Other-relative', [441] 'ethnicity.Philippines', [442] 'ethnicity.Thailand', [443] 'ethnicity.Haiti', [444] 'ethnicity.El-Salvador', [445] 'ethnicity.Puerto-Rico', [446] 'ethnicity.Vietnam', [447] 'ethnicity.1st-4th', [448] 'ethnicity.South', [449] 'ethnicity.Married-AF-spouse' + [450] 'ethnicity.Columbia', [451] 'ethnicity.Japan', [452] 'ethnicity.India', [453] 'ethnicity.Cambodia', [454] 'ethnicity.Poland', [455] 'ethnicity.Laos', [456] 'sex.Private', [457] 'sex.11th', [458] 'sex.Never-married', [459] 'sex.Machine-op-inspct' + [460] 'sex.Own-child', [461] 'sex.Black', [462] 'sex.Male', [463] 'sex.United-States', [464] 'sex.HS-grad', [465] 'sex.Married-civ-spouse', [466] 'sex.Farming-fishing', [467] 'sex.Husband', [468] 'sex.White', [469] 'sex.Local-gov' + [470] 'sex.Assoc-acdm', [471] 'sex.Protective-serv', [472] 'sex.Some-college', [473] 'sex.?', [474] 'sex.Female', [475] 'sex.10th', [476] 'sex.Other-service', [477] 'sex.Not-in-family', [478] 'sex.Unmarried', [479] 'sex.Self-emp-not-inc' + [480] 'sex.Prof-school', [481] 'sex.Prof-specialty', [482] 'sex.7th-8th', [483] 'sex.Craft-repair', [484] 'sex.Federal-gov', [485] 'sex.Bachelors', [486] 'sex.Adm-clerical', [487] 'sex.Masters', [488] 'sex.Exec-managerial', [489] 'sex.State-gov' + [490] 'sex.Wife', [491] 'sex.Widowed', [492] 'sex.Doctorate', [493] 'sex.Asian-Pac-Islander', [494] 'sex.Tech-support', [495] 'sex.Divorced', [496] 'sex.Peru', [497] 'sex.Separated', [498] 'sex.Sales', [499] 'sex.5th-6th' + [500] 'sex.Priv-house-serv', [501] 'sex.Guatemala', [502] 'sex.Self-emp-inc', [503] 'sex.Assoc-voc', [504] 'sex.Mexico', [505] 'sex.Transport-moving', [506] 'sex.Handlers-cleaners', [507] 'sex.9th', [508] 'sex.Married-spouse-absent', [509] 'sex.Other' + [510] 'sex.Dominican-Republic', [511] 'sex.Armed-Forces', [512] 'sex.Amer-Indian-Inuit', [513] 'sex.Ireland', [514] 'sex.Germany', [515] 'sex.12th', [516] 'sex.Other-relative', [517] 'sex.Philippines', [518] 'sex.Thailand', [519] 'sex.Haiti' + [520] 'sex.El-Salvador', [521] 'sex.Puerto-Rico', [522] 'sex.Vietnam', [523] 'sex.1st-4th', [524] 'sex.South', [525] 'sex.Married-AF-spouse', [526] 'sex.Columbia', [527] 'sex.Japan', [528] 'sex.India', [529] 'sex.Cambodia' + [530] 'sex.Poland', [531] 'sex.Laos', [532] 'native-country-region.Private', [533] 'native-country-region.11th', [534] 'native-country-region.Never-married', [535] 'native-country-region.Machine-op-inspct', [536] 'native-country-region.Own-child', [537] 'native-country-region.Black', [538] 'native-country-region.Male', [539] 'native-country-region.United-States' + [540] 'native-country-region.HS-grad', [541] 'native-country-region.Married-civ-spouse', [542] 'native-country-region.Farming-fishing', [543] 'native-country-region.Husband', [544] 'native-country-region.White', [545] 'native-country-region.Local-gov', [546] 'native-country-region.Assoc-acdm', [547] 'native-country-region.Protective-serv', [548] 'native-country-region.Some-college', [549] 'native-country-region.?' + [550] 'native-country-region.Female', [551] 'native-country-region.10th', [552] 'native-country-region.Other-service', [553] 'native-country-region.Not-in-family', [554] 'native-country-region.Unmarried', [555] 'native-country-region.Self-emp-not-inc', [556] 'native-country-region.Prof-school', [557] 'native-country-region.Prof-specialty', [558] 'native-country-region.7th-8th', [559] 'native-country-region.Craft-repair' + [560] 'native-country-region.Federal-gov', [561] 'native-country-region.Bachelors', [562] 'native-country-region.Adm-clerical', [563] 'native-country-region.Masters', [564] 'native-country-region.Exec-managerial', [565] 'native-country-region.State-gov', [566] 'native-country-region.Wife', [567] 'native-country-region.Widowed', [568] 'native-country-region.Doctorate', [569] 'native-country-region.Asian-Pac-Islander' + [570] 'native-country-region.Tech-support', [571] 'native-country-region.Divorced', [572] 'native-country-region.Peru', [573] 'native-country-region.Separated', [574] 'native-country-region.Sales', [575] 'native-country-region.5th-6th', [576] 'native-country-region.Priv-house-serv', [577] 'native-country-region.Guatemala', [578] 'native-country-region.Self-emp-inc', [579] 'native-country-region.Assoc-voc' + [580] 'native-country-region.Mexico', [581] 'native-country-region.Transport-moving', [582] 'native-country-region.Handlers-cleaners', [583] 'native-country-region.9th', [584] 'native-country-region.Married-spouse-absent', [585] 'native-country-region.Other', [586] 'native-country-region.Dominican-Republic', [587] 'native-country-region.Armed-Forces', [588] 'native-country-region.Amer-Indian-Inuit', [589] 'native-country-region.Ireland' + [590] 'native-country-region.Germany', [591] 'native-country-region.12th', [592] 'native-country-region.Other-relative', [593] 'native-country-region.Philippines', [594] 'native-country-region.Thailand', [595] 'native-country-region.Haiti', [596] 'native-country-region.El-Salvador', [597] 'native-country-region.Puerto-Rico', [598] 'native-country-region.Vietnam', [599] 'native-country-region.1st-4th' + [600] 'native-country-region.South', [601] 'native-country-region.Married-AF-spouse', [602] 'native-country-region.Columbia', [603] 'native-country-region.Japan', [604] 'native-country-region.India', [605] 'native-country-region.Cambodia', [606] 'native-country-region.Poland', [607] 'native-country-region.Laos' + Features: Vec + Metadata 'CategoricalSlotRanges': Vec: Length=16, Count=16 + [0] '6', [1] '81', [2] '82', [3] '157', [4] '158', [5] '233', [6] '234', [7] '309', [8] '310', [9] '385' + [10] '386', [11] '461', [12] '462', [13] '537', [14] '538', [15] '613' + Metadata 'SlotNames': Vec: Length=614, Count=614 + [0] 'NumFeatures.age', [1] 'NumFeatures.fnlwgt', [2] 'NumFeatures.education-num', [3] 'NumFeatures.capital-gain', [4] 'NumFeatures.capital-loss', [5] 'NumFeatures.hours-per-week', [6] 'CatFeatures.Workclass.Private', [7] 'CatFeatures.Workclass.11th', [8] 'CatFeatures.Workclass.Never-married', [9] 'CatFeatures.Workclass.Machine-op-inspct' + [10] 'CatFeatures.Workclass.Own-child', [11] 'CatFeatures.Workclass.Black', [12] 'CatFeatures.Workclass.Male', [13] 'CatFeatures.Workclass.United-States', [14] 'CatFeatures.Workclass.HS-grad', [15] 'CatFeatures.Workclass.Married-civ-spouse', [16] 'CatFeatures.Workclass.Farming-fishing', [17] 'CatFeatures.Workclass.Husband', [18] 'CatFeatures.Workclass.White', [19] 'CatFeatures.Workclass.Local-gov' + [20] 'CatFeatures.Workclass.Assoc-acdm', [21] 'CatFeatures.Workclass.Protective-serv', [22] 'CatFeatures.Workclass.Some-college', [23] 'CatFeatures.Workclass.?', [24] 'CatFeatures.Workclass.Female', [25] 'CatFeatures.Workclass.10th', [26] 'CatFeatures.Workclass.Other-service', [27] 'CatFeatures.Workclass.Not-in-family', [28] 'CatFeatures.Workclass.Unmarried', [29] 'CatFeatures.Workclass.Self-emp-not-inc' + [30] 'CatFeatures.Workclass.Prof-school', [31] 'CatFeatures.Workclass.Prof-specialty', [32] 'CatFeatures.Workclass.7th-8th', [33] 'CatFeatures.Workclass.Craft-repair', [34] 'CatFeatures.Workclass.Federal-gov', [35] 'CatFeatures.Workclass.Bachelors', [36] 'CatFeatures.Workclass.Adm-clerical', [37] 'CatFeatures.Workclass.Masters', [38] 'CatFeatures.Workclass.Exec-managerial', [39] 'CatFeatures.Workclass.State-gov' + [40] 'CatFeatures.Workclass.Wife', [41] 'CatFeatures.Workclass.Widowed', [42] 'CatFeatures.Workclass.Doctorate', [43] 'CatFeatures.Workclass.Asian-Pac-Islander', [44] 'CatFeatures.Workclass.Tech-support', [45] 'CatFeatures.Workclass.Divorced', [46] 'CatFeatures.Workclass.Peru', [47] 'CatFeatures.Workclass.Separated', [48] 'CatFeatures.Workclass.Sales', [49] 'CatFeatures.Workclass.5th-6th' + [50] 'CatFeatures.Workclass.Priv-house-serv', [51] 'CatFeatures.Workclass.Guatemala', [52] 'CatFeatures.Workclass.Self-emp-inc', [53] 'CatFeatures.Workclass.Assoc-voc', [54] 'CatFeatures.Workclass.Mexico', [55] 'CatFeatures.Workclass.Transport-moving', [56] 'CatFeatures.Workclass.Handlers-cleaners', [57] 'CatFeatures.Workclass.9th', [58] 'CatFeatures.Workclass.Married-spouse-absent', [59] 'CatFeatures.Workclass.Other' + [60] 'CatFeatures.Workclass.Dominican-Republic', [61] 'CatFeatures.Workclass.Armed-Forces', [62] 'CatFeatures.Workclass.Amer-Indian-Inuit', [63] 'CatFeatures.Workclass.Ireland', [64] 'CatFeatures.Workclass.Germany', [65] 'CatFeatures.Workclass.12th', [66] 'CatFeatures.Workclass.Other-relative', [67] 'CatFeatures.Workclass.Philippines', [68] 'CatFeatures.Workclass.Thailand', [69] 'CatFeatures.Workclass.Haiti' + [70] 'CatFeatures.Workclass.El-Salvador', [71] 'CatFeatures.Workclass.Puerto-Rico', [72] 'CatFeatures.Workclass.Vietnam', [73] 'CatFeatures.Workclass.1st-4th', [74] 'CatFeatures.Workclass.South', [75] 'CatFeatures.Workclass.Married-AF-spouse', [76] 'CatFeatures.Workclass.Columbia', [77] 'CatFeatures.Workclass.Japan', [78] 'CatFeatures.Workclass.India', [79] 'CatFeatures.Workclass.Cambodia' + [80] 'CatFeatures.Workclass.Poland', [81] 'CatFeatures.Workclass.Laos', [82] 'CatFeatures.education.Private', [83] 'CatFeatures.education.11th', [84] 'CatFeatures.education.Never-married', [85] 'CatFeatures.education.Machine-op-inspct', [86] 'CatFeatures.education.Own-child', [87] 'CatFeatures.education.Black', [88] 'CatFeatures.education.Male', [89] 'CatFeatures.education.United-States' + [90] 'CatFeatures.education.HS-grad', [91] 'CatFeatures.education.Married-civ-spouse', [92] 'CatFeatures.education.Farming-fishing', [93] 'CatFeatures.education.Husband', [94] 'CatFeatures.education.White', [95] 'CatFeatures.education.Local-gov', [96] 'CatFeatures.education.Assoc-acdm', [97] 'CatFeatures.education.Protective-serv', [98] 'CatFeatures.education.Some-college', [99] 'CatFeatures.education.?' + [100] 'CatFeatures.education.Female', [101] 'CatFeatures.education.10th', [102] 'CatFeatures.education.Other-service', [103] 'CatFeatures.education.Not-in-family', [104] 'CatFeatures.education.Unmarried', [105] 'CatFeatures.education.Self-emp-not-inc', [106] 'CatFeatures.education.Prof-school', [107] 'CatFeatures.education.Prof-specialty', [108] 'CatFeatures.education.7th-8th', [109] 'CatFeatures.education.Craft-repair' + [110] 'CatFeatures.education.Federal-gov', [111] 'CatFeatures.education.Bachelors', [112] 'CatFeatures.education.Adm-clerical', [113] 'CatFeatures.education.Masters', [114] 'CatFeatures.education.Exec-managerial', [115] 'CatFeatures.education.State-gov', [116] 'CatFeatures.education.Wife', [117] 'CatFeatures.education.Widowed', [118] 'CatFeatures.education.Doctorate', [119] 'CatFeatures.education.Asian-Pac-Islander' + [120] 'CatFeatures.education.Tech-support', [121] 'CatFeatures.education.Divorced', [122] 'CatFeatures.education.Peru', [123] 'CatFeatures.education.Separated', [124] 'CatFeatures.education.Sales', [125] 'CatFeatures.education.5th-6th', [126] 'CatFeatures.education.Priv-house-serv', [127] 'CatFeatures.education.Guatemala', [128] 'CatFeatures.education.Self-emp-inc', [129] 'CatFeatures.education.Assoc-voc' + [130] 'CatFeatures.education.Mexico', [131] 'CatFeatures.education.Transport-moving', [132] 'CatFeatures.education.Handlers-cleaners', [133] 'CatFeatures.education.9th', [134] 'CatFeatures.education.Married-spouse-absent', [135] 'CatFeatures.education.Other', [136] 'CatFeatures.education.Dominican-Republic', [137] 'CatFeatures.education.Armed-Forces', [138] 'CatFeatures.education.Amer-Indian-Inuit', [139] 'CatFeatures.education.Ireland' + [140] 'CatFeatures.education.Germany', [141] 'CatFeatures.education.12th', [142] 'CatFeatures.education.Other-relative', [143] 'CatFeatures.education.Philippines', [144] 'CatFeatures.education.Thailand', [145] 'CatFeatures.education.Haiti', [146] 'CatFeatures.education.El-Salvador', [147] 'CatFeatures.education.Puerto-Rico', [148] 'CatFeatures.education.Vietnam', [149] 'CatFeatures.education.1st-4th' + [150] 'CatFeatures.education.South', [151] 'CatFeatures.education.Married-AF-spouse', [152] 'CatFeatures.education.Columbia', [153] 'CatFeatures.education.Japan', [154] 'CatFeatures.education.India', [155] 'CatFeatures.education.Cambodia', [156] 'CatFeatures.education.Poland', [157] 'CatFeatures.education.Laos', [158] 'CatFeatures.marital-status.Private', [159] 'CatFeatures.marital-status.11th' + [160] 'CatFeatures.marital-status.Never-married', [161] 'CatFeatures.marital-status.Machine-op-inspct', [162] 'CatFeatures.marital-status.Own-child', [163] 'CatFeatures.marital-status.Black', [164] 'CatFeatures.marital-status.Male', [165] 'CatFeatures.marital-status.United-States', [166] 'CatFeatures.marital-status.HS-grad', [167] 'CatFeatures.marital-status.Married-civ-spouse', [168] 'CatFeatures.marital-status.Farming-fishing', [169] 'CatFeatures.marital-status.Husband' + [170] 'CatFeatures.marital-status.White', [171] 'CatFeatures.marital-status.Local-gov', [172] 'CatFeatures.marital-status.Assoc-acdm', [173] 'CatFeatures.marital-status.Protective-serv', [174] 'CatFeatures.marital-status.Some-college', [175] 'CatFeatures.marital-status.?', [176] 'CatFeatures.marital-status.Female', [177] 'CatFeatures.marital-status.10th', [178] 'CatFeatures.marital-status.Other-service', [179] 'CatFeatures.marital-status.Not-in-family' + [180] 'CatFeatures.marital-status.Unmarried', [181] 'CatFeatures.marital-status.Self-emp-not-inc', [182] 'CatFeatures.marital-status.Prof-school', [183] 'CatFeatures.marital-status.Prof-specialty', [184] 'CatFeatures.marital-status.7th-8th', [185] 'CatFeatures.marital-status.Craft-repair', [186] 'CatFeatures.marital-status.Federal-gov', [187] 'CatFeatures.marital-status.Bachelors', [188] 'CatFeatures.marital-status.Adm-clerical', [189] 'CatFeatures.marital-status.Masters' + [190] 'CatFeatures.marital-status.Exec-managerial', [191] 'CatFeatures.marital-status.State-gov', [192] 'CatFeatures.marital-status.Wife', [193] 'CatFeatures.marital-status.Widowed', [194] 'CatFeatures.marital-status.Doctorate', [195] 'CatFeatures.marital-status.Asian-Pac-Islander', [196] 'CatFeatures.marital-status.Tech-support', [197] 'CatFeatures.marital-status.Divorced', [198] 'CatFeatures.marital-status.Peru', [199] 'CatFeatures.marital-status.Separated' + [200] 'CatFeatures.marital-status.Sales', [201] 'CatFeatures.marital-status.5th-6th', [202] 'CatFeatures.marital-status.Priv-house-serv', [203] 'CatFeatures.marital-status.Guatemala', [204] 'CatFeatures.marital-status.Self-emp-inc', [205] 'CatFeatures.marital-status.Assoc-voc', [206] 'CatFeatures.marital-status.Mexico', [207] 'CatFeatures.marital-status.Transport-moving', [208] 'CatFeatures.marital-status.Handlers-cleaners', [209] 'CatFeatures.marital-status.9th' + [210] 'CatFeatures.marital-status.Married-spouse-absent', [211] 'CatFeatures.marital-status.Other', [212] 'CatFeatures.marital-status.Dominican-Republic', [213] 'CatFeatures.marital-status.Armed-Forces', [214] 'CatFeatures.marital-status.Amer-Indian-Inuit', [215] 'CatFeatures.marital-status.Ireland', [216] 'CatFeatures.marital-status.Germany', [217] 'CatFeatures.marital-status.12th', [218] 'CatFeatures.marital-status.Other-relative', [219] 'CatFeatures.marital-status.Philippines' + [220] 'CatFeatures.marital-status.Thailand', [221] 'CatFeatures.marital-status.Haiti', [222] 'CatFeatures.marital-status.El-Salvador', [223] 'CatFeatures.marital-status.Puerto-Rico', [224] 'CatFeatures.marital-status.Vietnam', [225] 'CatFeatures.marital-status.1st-4th', [226] 'CatFeatures.marital-status.South', [227] 'CatFeatures.marital-status.Married-AF-spouse', [228] 'CatFeatures.marital-status.Columbia', [229] 'CatFeatures.marital-status.Japan' + [230] 'CatFeatures.marital-status.India', [231] 'CatFeatures.marital-status.Cambodia', [232] 'CatFeatures.marital-status.Poland', [233] 'CatFeatures.marital-status.Laos', [234] 'CatFeatures.occupation.Private', [235] 'CatFeatures.occupation.11th', [236] 'CatFeatures.occupation.Never-married', [237] 'CatFeatures.occupation.Machine-op-inspct', [238] 'CatFeatures.occupation.Own-child', [239] 'CatFeatures.occupation.Black' + [240] 'CatFeatures.occupation.Male', [241] 'CatFeatures.occupation.United-States', [242] 'CatFeatures.occupation.HS-grad', [243] 'CatFeatures.occupation.Married-civ-spouse', [244] 'CatFeatures.occupation.Farming-fishing', [245] 'CatFeatures.occupation.Husband', [246] 'CatFeatures.occupation.White', [247] 'CatFeatures.occupation.Local-gov', [248] 'CatFeatures.occupation.Assoc-acdm', [249] 'CatFeatures.occupation.Protective-serv' + [250] 'CatFeatures.occupation.Some-college', [251] 'CatFeatures.occupation.?', [252] 'CatFeatures.occupation.Female', [253] 'CatFeatures.occupation.10th', [254] 'CatFeatures.occupation.Other-service', [255] 'CatFeatures.occupation.Not-in-family', [256] 'CatFeatures.occupation.Unmarried', [257] 'CatFeatures.occupation.Self-emp-not-inc', [258] 'CatFeatures.occupation.Prof-school', [259] 'CatFeatures.occupation.Prof-specialty' + [260] 'CatFeatures.occupation.7th-8th', [261] 'CatFeatures.occupation.Craft-repair', [262] 'CatFeatures.occupation.Federal-gov', [263] 'CatFeatures.occupation.Bachelors', [264] 'CatFeatures.occupation.Adm-clerical', [265] 'CatFeatures.occupation.Masters', [266] 'CatFeatures.occupation.Exec-managerial', [267] 'CatFeatures.occupation.State-gov', [268] 'CatFeatures.occupation.Wife', [269] 'CatFeatures.occupation.Widowed' + [270] 'CatFeatures.occupation.Doctorate', [271] 'CatFeatures.occupation.Asian-Pac-Islander', [272] 'CatFeatures.occupation.Tech-support', [273] 'CatFeatures.occupation.Divorced', [274] 'CatFeatures.occupation.Peru', [275] 'CatFeatures.occupation.Separated', [276] 'CatFeatures.occupation.Sales', [277] 'CatFeatures.occupation.5th-6th', [278] 'CatFeatures.occupation.Priv-house-serv', [279] 'CatFeatures.occupation.Guatemala' + [280] 'CatFeatures.occupation.Self-emp-inc', [281] 'CatFeatures.occupation.Assoc-voc', [282] 'CatFeatures.occupation.Mexico', [283] 'CatFeatures.occupation.Transport-moving', [284] 'CatFeatures.occupation.Handlers-cleaners', [285] 'CatFeatures.occupation.9th', [286] 'CatFeatures.occupation.Married-spouse-absent', [287] 'CatFeatures.occupation.Other', [288] 'CatFeatures.occupation.Dominican-Republic', [289] 'CatFeatures.occupation.Armed-Forces' + [290] 'CatFeatures.occupation.Amer-Indian-Inuit', [291] 'CatFeatures.occupation.Ireland', [292] 'CatFeatures.occupation.Germany', [293] 'CatFeatures.occupation.12th', [294] 'CatFeatures.occupation.Other-relative', [295] 'CatFeatures.occupation.Philippines', [296] 'CatFeatures.occupation.Thailand', [297] 'CatFeatures.occupation.Haiti', [298] 'CatFeatures.occupation.El-Salvador', [299] 'CatFeatures.occupation.Puerto-Rico' + [300] 'CatFeatures.occupation.Vietnam', [301] 'CatFeatures.occupation.1st-4th', [302] 'CatFeatures.occupation.South', [303] 'CatFeatures.occupation.Married-AF-spouse', [304] 'CatFeatures.occupation.Columbia', [305] 'CatFeatures.occupation.Japan', [306] 'CatFeatures.occupation.India', [307] 'CatFeatures.occupation.Cambodia', [308] 'CatFeatures.occupation.Poland', [309] 'CatFeatures.occupation.Laos' + [310] 'CatFeatures.relationship.Private', [311] 'CatFeatures.relationship.11th', [312] 'CatFeatures.relationship.Never-married', [313] 'CatFeatures.relationship.Machine-op-inspct', [314] 'CatFeatures.relationship.Own-child', [315] 'CatFeatures.relationship.Black', [316] 'CatFeatures.relationship.Male', [317] 'CatFeatures.relationship.United-States', [318] 'CatFeatures.relationship.HS-grad', [319] 'CatFeatures.relationship.Married-civ-spouse' + [320] 'CatFeatures.relationship.Farming-fishing', [321] 'CatFeatures.relationship.Husband', [322] 'CatFeatures.relationship.White', [323] 'CatFeatures.relationship.Local-gov', [324] 'CatFeatures.relationship.Assoc-acdm', [325] 'CatFeatures.relationship.Protective-serv', [326] 'CatFeatures.relationship.Some-college', [327] 'CatFeatures.relationship.?', [328] 'CatFeatures.relationship.Female', [329] 'CatFeatures.relationship.10th' + [330] 'CatFeatures.relationship.Other-service', [331] 'CatFeatures.relationship.Not-in-family', [332] 'CatFeatures.relationship.Unmarried', [333] 'CatFeatures.relationship.Self-emp-not-inc', [334] 'CatFeatures.relationship.Prof-school', [335] 'CatFeatures.relationship.Prof-specialty', [336] 'CatFeatures.relationship.7th-8th', [337] 'CatFeatures.relationship.Craft-repair', [338] 'CatFeatures.relationship.Federal-gov', [339] 'CatFeatures.relationship.Bachelors' + [340] 'CatFeatures.relationship.Adm-clerical', [341] 'CatFeatures.relationship.Masters', [342] 'CatFeatures.relationship.Exec-managerial', [343] 'CatFeatures.relationship.State-gov', [344] 'CatFeatures.relationship.Wife', [345] 'CatFeatures.relationship.Widowed', [346] 'CatFeatures.relationship.Doctorate', [347] 'CatFeatures.relationship.Asian-Pac-Islander', [348] 'CatFeatures.relationship.Tech-support', [349] 'CatFeatures.relationship.Divorced' + [350] 'CatFeatures.relationship.Peru', [351] 'CatFeatures.relationship.Separated', [352] 'CatFeatures.relationship.Sales', [353] 'CatFeatures.relationship.5th-6th', [354] 'CatFeatures.relationship.Priv-house-serv', [355] 'CatFeatures.relationship.Guatemala', [356] 'CatFeatures.relationship.Self-emp-inc', [357] 'CatFeatures.relationship.Assoc-voc', [358] 'CatFeatures.relationship.Mexico', [359] 'CatFeatures.relationship.Transport-moving' + [360] 'CatFeatures.relationship.Handlers-cleaners', [361] 'CatFeatures.relationship.9th', [362] 'CatFeatures.relationship.Married-spouse-absent', [363] 'CatFeatures.relationship.Other', [364] 'CatFeatures.relationship.Dominican-Republic', [365] 'CatFeatures.relationship.Armed-Forces', [366] 'CatFeatures.relationship.Amer-Indian-Inuit', [367] 'CatFeatures.relationship.Ireland', [368] 'CatFeatures.relationship.Germany', [369] 'CatFeatures.relationship.12th' + [370] 'CatFeatures.relationship.Other-relative', [371] 'CatFeatures.relationship.Philippines', [372] 'CatFeatures.relationship.Thailand', [373] 'CatFeatures.relationship.Haiti', [374] 'CatFeatures.relationship.El-Salvador', [375] 'CatFeatures.relationship.Puerto-Rico', [376] 'CatFeatures.relationship.Vietnam', [377] 'CatFeatures.relationship.1st-4th', [378] 'CatFeatures.relationship.South', [379] 'CatFeatures.relationship.Married-AF-spouse' + [380] 'CatFeatures.relationship.Columbia', [381] 'CatFeatures.relationship.Japan', [382] 'CatFeatures.relationship.India', [383] 'CatFeatures.relationship.Cambodia', [384] 'CatFeatures.relationship.Poland', [385] 'CatFeatures.relationship.Laos', [386] 'CatFeatures.ethnicity.Private', [387] 'CatFeatures.ethnicity.11th', [388] 'CatFeatures.ethnicity.Never-married', [389] 'CatFeatures.ethnicity.Machine-op-inspct' + [390] 'CatFeatures.ethnicity.Own-child', [391] 'CatFeatures.ethnicity.Black', [392] 'CatFeatures.ethnicity.Male', [393] 'CatFeatures.ethnicity.United-States', [394] 'CatFeatures.ethnicity.HS-grad', [395] 'CatFeatures.ethnicity.Married-civ-spouse', [396] 'CatFeatures.ethnicity.Farming-fishing', [397] 'CatFeatures.ethnicity.Husband', [398] 'CatFeatures.ethnicity.White', [399] 'CatFeatures.ethnicity.Local-gov' + [400] 'CatFeatures.ethnicity.Assoc-acdm', [401] 'CatFeatures.ethnicity.Protective-serv', [402] 'CatFeatures.ethnicity.Some-college', [403] 'CatFeatures.ethnicity.?', [404] 'CatFeatures.ethnicity.Female', [405] 'CatFeatures.ethnicity.10th', [406] 'CatFeatures.ethnicity.Other-service', [407] 'CatFeatures.ethnicity.Not-in-family', [408] 'CatFeatures.ethnicity.Unmarried', [409] 'CatFeatures.ethnicity.Self-emp-not-inc' + [410] 'CatFeatures.ethnicity.Prof-school', [411] 'CatFeatures.ethnicity.Prof-specialty', [412] 'CatFeatures.ethnicity.7th-8th', [413] 'CatFeatures.ethnicity.Craft-repair', [414] 'CatFeatures.ethnicity.Federal-gov', [415] 'CatFeatures.ethnicity.Bachelors', [416] 'CatFeatures.ethnicity.Adm-clerical', [417] 'CatFeatures.ethnicity.Masters', [418] 'CatFeatures.ethnicity.Exec-managerial', [419] 'CatFeatures.ethnicity.State-gov' + [420] 'CatFeatures.ethnicity.Wife', [421] 'CatFeatures.ethnicity.Widowed', [422] 'CatFeatures.ethnicity.Doctorate', [423] 'CatFeatures.ethnicity.Asian-Pac-Islander', [424] 'CatFeatures.ethnicity.Tech-support', [425] 'CatFeatures.ethnicity.Divorced', [426] 'CatFeatures.ethnicity.Peru', [427] 'CatFeatures.ethnicity.Separated', [428] 'CatFeatures.ethnicity.Sales', [429] 'CatFeatures.ethnicity.5th-6th' + [430] 'CatFeatures.ethnicity.Priv-house-serv', [431] 'CatFeatures.ethnicity.Guatemala', [432] 'CatFeatures.ethnicity.Self-emp-inc', [433] 'CatFeatures.ethnicity.Assoc-voc', [434] 'CatFeatures.ethnicity.Mexico', [435] 'CatFeatures.ethnicity.Transport-moving', [436] 'CatFeatures.ethnicity.Handlers-cleaners', [437] 'CatFeatures.ethnicity.9th', [438] 'CatFeatures.ethnicity.Married-spouse-absent', [439] 'CatFeatures.ethnicity.Other' + [440] 'CatFeatures.ethnicity.Dominican-Republic', [441] 'CatFeatures.ethnicity.Armed-Forces', [442] 'CatFeatures.ethnicity.Amer-Indian-Inuit', [443] 'CatFeatures.ethnicity.Ireland', [444] 'CatFeatures.ethnicity.Germany', [445] 'CatFeatures.ethnicity.12th', [446] 'CatFeatures.ethnicity.Other-relative', [447] 'CatFeatures.ethnicity.Philippines', [448] 'CatFeatures.ethnicity.Thailand', [449] 'CatFeatures.ethnicity.Haiti' + [450] 'CatFeatures.ethnicity.El-Salvador', [451] 'CatFeatures.ethnicity.Puerto-Rico', [452] 'CatFeatures.ethnicity.Vietnam', [453] 'CatFeatures.ethnicity.1st-4th', [454] 'CatFeatures.ethnicity.South', [455] 'CatFeatures.ethnicity.Married-AF-spouse', [456] 'CatFeatures.ethnicity.Columbia', [457] 'CatFeatures.ethnicity.Japan', [458] 'CatFeatures.ethnicity.India', [459] 'CatFeatures.ethnicity.Cambodia' + [460] 'CatFeatures.ethnicity.Poland', [461] 'CatFeatures.ethnicity.Laos', [462] 'CatFeatures.sex.Private', [463] 'CatFeatures.sex.11th', [464] 'CatFeatures.sex.Never-married', [465] 'CatFeatures.sex.Machine-op-inspct', [466] 'CatFeatures.sex.Own-child', [467] 'CatFeatures.sex.Black', [468] 'CatFeatures.sex.Male', [469] 'CatFeatures.sex.United-States' + [470] 'CatFeatures.sex.HS-grad', [471] 'CatFeatures.sex.Married-civ-spouse', [472] 'CatFeatures.sex.Farming-fishing', [473] 'CatFeatures.sex.Husband', [474] 'CatFeatures.sex.White', [475] 'CatFeatures.sex.Local-gov', [476] 'CatFeatures.sex.Assoc-acdm', [477] 'CatFeatures.sex.Protective-serv', [478] 'CatFeatures.sex.Some-college', [479] 'CatFeatures.sex.?' + [480] 'CatFeatures.sex.Female', [481] 'CatFeatures.sex.10th', [482] 'CatFeatures.sex.Other-service', [483] 'CatFeatures.sex.Not-in-family', [484] 'CatFeatures.sex.Unmarried', [485] 'CatFeatures.sex.Self-emp-not-inc', [486] 'CatFeatures.sex.Prof-school', [487] 'CatFeatures.sex.Prof-specialty', [488] 'CatFeatures.sex.7th-8th', [489] 'CatFeatures.sex.Craft-repair' + [490] 'CatFeatures.sex.Federal-gov', [491] 'CatFeatures.sex.Bachelors', [492] 'CatFeatures.sex.Adm-clerical', [493] 'CatFeatures.sex.Masters', [494] 'CatFeatures.sex.Exec-managerial', [495] 'CatFeatures.sex.State-gov', [496] 'CatFeatures.sex.Wife', [497] 'CatFeatures.sex.Widowed', [498] 'CatFeatures.sex.Doctorate', [499] 'CatFeatures.sex.Asian-Pac-Islander' + [500] 'CatFeatures.sex.Tech-support', [501] 'CatFeatures.sex.Divorced', [502] 'CatFeatures.sex.Peru', [503] 'CatFeatures.sex.Separated', [504] 'CatFeatures.sex.Sales', [505] 'CatFeatures.sex.5th-6th', [506] 'CatFeatures.sex.Priv-house-serv', [507] 'CatFeatures.sex.Guatemala', [508] 'CatFeatures.sex.Self-emp-inc', [509] 'CatFeatures.sex.Assoc-voc' + [510] 'CatFeatures.sex.Mexico', [511] 'CatFeatures.sex.Transport-moving', [512] 'CatFeatures.sex.Handlers-cleaners', [513] 'CatFeatures.sex.9th', [514] 'CatFeatures.sex.Married-spouse-absent', [515] 'CatFeatures.sex.Other', [516] 'CatFeatures.sex.Dominican-Republic', [517] 'CatFeatures.sex.Armed-Forces', [518] 'CatFeatures.sex.Amer-Indian-Inuit', [519] 'CatFeatures.sex.Ireland' + [520] 'CatFeatures.sex.Germany', [521] 'CatFeatures.sex.12th', [522] 'CatFeatures.sex.Other-relative', [523] 'CatFeatures.sex.Philippines', [524] 'CatFeatures.sex.Thailand', [525] 'CatFeatures.sex.Haiti', [526] 'CatFeatures.sex.El-Salvador', [527] 'CatFeatures.sex.Puerto-Rico', [528] 'CatFeatures.sex.Vietnam', [529] 'CatFeatures.sex.1st-4th' + [530] 'CatFeatures.sex.South', [531] 'CatFeatures.sex.Married-AF-spouse', [532] 'CatFeatures.sex.Columbia', [533] 'CatFeatures.sex.Japan', [534] 'CatFeatures.sex.India', [535] 'CatFeatures.sex.Cambodia', [536] 'CatFeatures.sex.Poland', [537] 'CatFeatures.sex.Laos', [538] 'CatFeatures.native-country-region.Private', [539] 'CatFeatures.native-country-region.11th' + [540] 'CatFeatures.native-country-region.Never-married', [541] 'CatFeatures.native-country-region.Machine-op-inspct', [542] 'CatFeatures.native-country-region.Own-child', [543] 'CatFeatures.native-country-region.Black', [544] 'CatFeatures.native-country-region.Male', [545] 'CatFeatures.native-country-region.United-States', [546] 'CatFeatures.native-country-region.HS-grad', [547] 'CatFeatures.native-country-region.Married-civ-spouse', [548] 'CatFeatures.native-country-region.Farming-fishing', [549] 'CatFeatures.native-country-region.Husband' + [550] 'CatFeatures.native-country-region.White', [551] 'CatFeatures.native-country-region.Local-gov', [552] 'CatFeatures.native-country-region.Assoc-acdm', [553] 'CatFeatures.native-country-region.Protective-serv', [554] 'CatFeatures.native-country-region.Some-college', [555] 'CatFeatures.native-country-region.?', [556] 'CatFeatures.native-country-region.Female', [557] 'CatFeatures.native-country-region.10th', [558] 'CatFeatures.native-country-region.Other-service', [559] 'CatFeatures.native-country-region.Not-in-family' + [560] 'CatFeatures.native-country-region.Unmarried', [561] 'CatFeatures.native-country-region.Self-emp-not-inc', [562] 'CatFeatures.native-country-region.Prof-school', [563] 'CatFeatures.native-country-region.Prof-specialty', [564] 'CatFeatures.native-country-region.7th-8th', [565] 'CatFeatures.native-country-region.Craft-repair', [566] 'CatFeatures.native-country-region.Federal-gov', [567] 'CatFeatures.native-country-region.Bachelors', [568] 'CatFeatures.native-country-region.Adm-clerical', [569] 'CatFeatures.native-country-region.Masters' + [570] 'CatFeatures.native-country-region.Exec-managerial', [571] 'CatFeatures.native-country-region.State-gov', [572] 'CatFeatures.native-country-region.Wife', [573] 'CatFeatures.native-country-region.Widowed', [574] 'CatFeatures.native-country-region.Doctorate', [575] 'CatFeatures.native-country-region.Asian-Pac-Islander', [576] 'CatFeatures.native-country-region.Tech-support', [577] 'CatFeatures.native-country-region.Divorced', [578] 'CatFeatures.native-country-region.Peru', [579] 'CatFeatures.native-country-region.Separated' + [580] 'CatFeatures.native-country-region.Sales', [581] 'CatFeatures.native-country-region.5th-6th', [582] 'CatFeatures.native-country-region.Priv-house-serv', [583] 'CatFeatures.native-country-region.Guatemala', [584] 'CatFeatures.native-country-region.Self-emp-inc', [585] 'CatFeatures.native-country-region.Assoc-voc', [586] 'CatFeatures.native-country-region.Mexico', [587] 'CatFeatures.native-country-region.Transport-moving', [588] 'CatFeatures.native-country-region.Handlers-cleaners', [589] 'CatFeatures.native-country-region.9th' + [590] 'CatFeatures.native-country-region.Married-spouse-absent', [591] 'CatFeatures.native-country-region.Other', [592] 'CatFeatures.native-country-region.Dominican-Republic', [593] 'CatFeatures.native-country-region.Armed-Forces', [594] 'CatFeatures.native-country-region.Amer-Indian-Inuit', [595] 'CatFeatures.native-country-region.Ireland', [596] 'CatFeatures.native-country-region.Germany', [597] 'CatFeatures.native-country-region.12th', [598] 'CatFeatures.native-country-region.Other-relative', [599] 'CatFeatures.native-country-region.Philippines' + [600] 'CatFeatures.native-country-region.Thailand', [601] 'CatFeatures.native-country-region.Haiti', [602] 'CatFeatures.native-country-region.El-Salvador', [603] 'CatFeatures.native-country-region.Puerto-Rico', [604] 'CatFeatures.native-country-region.Vietnam', [605] 'CatFeatures.native-country-region.1st-4th', [606] 'CatFeatures.native-country-region.South', [607] 'CatFeatures.native-country-region.Married-AF-spouse', [608] 'CatFeatures.native-country-region.Columbia', [609] 'CatFeatures.native-country-region.Japan' + [610] 'CatFeatures.native-country-region.India', [611] 'CatFeatures.native-country-region.Cambodia', [612] 'CatFeatures.native-country-region.Poland', [613] 'CatFeatures.native-country-region.Laos' diff --git a/test/BaselineOutput/Common/Command/codegen-out.cs b/test/BaselineOutput/Common/Command/codegen-out.cs new file mode 100644 index 0000000000..052a079dad --- /dev/null +++ b/test/BaselineOutput/Common/Command/codegen-out.cs @@ -0,0 +1,88 @@ +using System; +using System.Collections.Generic; +using Microsoft.ML.Legacy; +using Microsoft.ML.Runtime.Api; + +namespace MLGeneratedCode +{ +public class Program +{ +/// +/// This is the input to the trained model. +/// +/// In most pipelines, not all columns that are used in training are also used in scoring. Namely, the label +/// and weight columns are almost never required at scoring time. Since we don't know which columns +/// are 'optional' in this sense, all the columns are listed below. +/// +/// You are free to remove any fields from the below class. If the fields are not required for scoring, the model +/// will continue to work. Otherwise, the exception will be thrown when a prediction engine is created. +/// +/// +public class InputData +{ + public Single Label; + + [VectorType(5)] + [ColumnName("F!1")] + public Single[] Column1 = new Single[5]; + + [VectorType(4)] + public Single[] F2 = new Single[4]; +} + +/// +/// This is the output of the scored model, the prediction. +/// +/// +public class ScoredOutput +{ + public Boolean PredictedLabel; + + public Single Score; + + public Single Probability; + + // These are all remaining available columns, either supplied as the input, or intermediate + // columns generated by the transforms. Materializing these columns has a performance cost, + // so they are commented out. Feel free to uncomment any column that is useful for your scenario. +#if false + public Single Label; + + [VectorType(5)] + [ColumnName("F!1")] + public Single[] Column1; + + [VectorType(4)] + public Single[] F2; + + [VectorType(9)] + public Single[] Features; +#endif +} + +/*public static void Main(string[] args) +{ +string modelPath; +modelPath = "model.zip"; +PredictAsync(modelPath); +}*/ + +/// +/// This method demonstrates how to run prediction. +/// +/// +public static async void PredictAsync(string modelPath) +{ + var model = await PredictionModel.ReadAsync(modelPath); + + var inputData = new InputData(); + // TODO: populate the example's features. + + var score = model.Predict(inputData); + // TODO: consume the resulting score. + + var scores = model.Predict(new List { inputData, inputData }); + // TODO: consume the resulting scores. + } +} +} diff --git a/test/Microsoft.ML.Predictor.Tests/TestParallelFasttreeInterface.cs b/test/Microsoft.ML.Predictor.Tests/TestParallelFasttreeInterface.cs index 280451fd2b..ef45fd8bfa 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestParallelFasttreeInterface.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestParallelFasttreeInterface.cs @@ -173,7 +173,7 @@ public TestParallelFasttreeInterface(ITestOutputHelper helper) { } - [Fact] + [Fact(Skip = "'checker' is not a valid value for the 'parag' argument in FastTree")] [TestCategory("ParallelFasttree")] public void CheckFastTreeParallelInterface() { @@ -185,7 +185,8 @@ public void CheckFastTreeParallelInterface() var trainArgs = string.Format( "train data={{{0}}} loader=Text{{col=Label:0 col=F!1:1-5 col=F2:6-9}} xf=Concat{{col=Features:F!1,F2}} tr=FastTreeBinaryClassification{{lr=0.1 nl=12 mil=10 iter=1 parag=checker}} out={{{1}}}", dataPath, modelOutPath); - MainForTest(trainArgs); + var res = MainForTest(trainArgs); + Assert.Equal(0, res); } } } diff --git a/test/Microsoft.ML.TestFramework/BaseTestBaseline.cs b/test/Microsoft.ML.TestFramework/BaseTestBaseline.cs index dbdbee29ae..795d219e44 100644 --- a/test/Microsoft.ML.TestFramework/BaseTestBaseline.cs +++ b/test/Microsoft.ML.TestFramework/BaseTestBaseline.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Tools; @@ -104,6 +105,7 @@ protected override void Initialize() _env = new ConsoleEnvironment(42, outWriter: LogWriter, errWriter: LogWriter) .AddStandardComponents(); ML = new MLContext(42); + ML.AddStandardComponents(); } // This method is used by subclass to dispose of disposable objects @@ -819,10 +821,9 @@ protected static StreamReader OpenReader(string path) /// This method is used in unit tests when the output is not baselined. /// If the output is to be baselined and compared, the other overload should be used. ///
- protected static int MainForTest(string args) + protected int MainForTest(string args) { - var env = new MLContext(); - return Maml.MainCore(env, args, false); + return Maml.MainCore(ML, args, false); } } diff --git a/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs b/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs index d410a24995..92eb3c7aa9 100644 --- a/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs +++ b/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs @@ -3,6 +3,7 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Runtime; +using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Ensemble; using Microsoft.ML.Runtime.Learners; @@ -28,6 +29,7 @@ public static TEnvironment AddStandardComponents(this TEnvironment #pragma warning disable 612 env.ComponentCatalog.RegisterAssembly(typeof(Experiment).Assembly); // ML.Legacy #pragma warning restore 612 + env.ComponentCatalog.RegisterAssembly(typeof(ComponentCreation).Assembly); // ML.Api return env; } } diff --git a/test/Microsoft.ML.TestFramework/TestCommandBase.cs b/test/Microsoft.ML.TestFramework/TestCommandBase.cs index bc62b2c9d0..d3c3591571 100644 --- a/test/Microsoft.ML.TestFramework/TestCommandBase.cs +++ b/test/Microsoft.ML.TestFramework/TestCommandBase.cs @@ -672,16 +672,15 @@ public void CommandShowSchema() } [TestCategory(Cat)] - [Fact(Skip = "Need CoreTLC specific baseline update")] + [Fact] public void CommandShowSchemaModel() { - string trainDataPath = GetDataPath(@"..\UCI", "adult.test.tiny"); + string trainDataPath = GetDataPath("adult.tiny.with-schema.txt"); string modelPath = ModelPath().Path; string args = string.Format( @"train data={{{0}}} loader=Text{{ - sep=, header=+ col=NumFeatures:Num:9-14 col=CatFeaturesText:TX:0~* @@ -922,29 +921,27 @@ public void CommandCrossValidationAndSave() // multiple different FastTree (Ranking and Classification for example) instances in different threads. // FastTree internally fails if we try to run it simultaneously and if this happens we wouldn't get model file for training. [TestCategory(Cat)] - [Fact(Skip = "Need CoreTLC specific baseline update")] + [Fact] public void CommandTrainFastTreeInDifferentThreads() { - var dataPath = GetDataPath("vw.dat"); - var firstModelOutPath = CreateOutputPath("TreeTransform-model2.zip"); - var secondModelOutPath = CreateOutputPath("TreeTransform-model1.zip"); - var trainArgs = "Train tr=SDCA loader=TextLoader{sep=space col=Label:R4:0 col=Features:R4:1 col=Name:TX:2,5-17 col=Cat:TX:3 col=Cat01:TX:4}" + "xf=CategoricalTransform{col=Cat col=Cat01} xf=Concat{col=Features:Features,Cat,Cat01} xf=TreeFeat{tr=FastTreeBinaryClassification} xf=TreeFeat" + "{tr=FastTreeRanking} xf=Concat{col=Features:Features,Leaves,Paths,Trees}"; + var dataPath = GetDataPath(TestDatasets.adult.testFilename); + var firstModelOutPath = DeleteOutputPath("TreeTransform-model2.zip"); + var secondModelOutPath = DeleteOutputPath("TreeTransform-model1.zip"); + var trainArgs = $"Train tr=SDCA {TestDatasets.adult.loaderSettings} {TestDatasets.adult.mamlExtraSettings[0]} {TestDatasets.adult.mamlExtraSettings[1]}" + + " xf=TreeFeat{tr=FastTreeBinaryClassification} xf=TreeFeat{tr=FastTreeRanking} xf=Concat{col=Features:Features,Leaves,Paths,Trees}"; - var firsttrainArgs = string.Format("{0} data={1} out={2}", trainArgs, dataPath, firstModelOutPath.Path); - var secondTrainArgs = string.Format("{0} data={1} out={2}", trainArgs, dataPath, secondModelOutPath.Path); + var firsttrainArgs = $"{trainArgs} data={dataPath} out={firstModelOutPath}"; + var secondTrainArgs = $"{trainArgs} data={dataPath} out={secondModelOutPath}"; - var t = new Task[2]; - t[0] = new Task(() => { MainForTest(firsttrainArgs); }); - t[1] = new Task(() => { MainForTest(secondTrainArgs); }); + var t = new Task[2]; + t[0] = new Task(() => MainForTest(firsttrainArgs)); + t[1] = new Task(() => MainForTest(secondTrainArgs)); t[0].Start(); t[1].Start(); Task.WaitAll(t); - if (!File.Exists(firstModelOutPath.Path)) - Fail("First model doesn't exist"); - if (!File.Exists(secondModelOutPath.Path)) - Fail("Second model doesn't exist"); - Done(); + Assert.Equal(0, t[0].Result); + Assert.Equal(0, t[1].Result); } [TestCategory(Cat), TestCategory("FastTree")] @@ -2096,5 +2093,32 @@ public void Datatypes() TestCore("savedata", intermediateData.Path, "loader=binary", "saver=text", textOutputPath.Arg("dout")); Done(); } + + [Fact] + public void CommandCodeGen() + { + if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + return; + + // REVIEW: this tests that the generated output matches the baseline. This does NOT baseline + // the console output. Currently, there's no console output either, but if some is added, a baseline test + // will be in order. + + // First, train a model on breast-cancer. + var dataPath = GetDataPath("breast-cancer.txt"); + var modelOutPath = DeleteOutputPath("Command", "codegen-model.zip"); + var csOutPath = DeleteOutputPath("Command", "codegen-out.cs"); + + var trainArgs = string.Format( + "train data={{{0}}} loader=Text{{col=Label:0 col=F!1:1-5 col=F2:6-9}} xf=Concat{{col=Features:F!1,F2}} tr=lr out={{{1}}}", + dataPath, modelOutPath); + MainForTest(trainArgs); + + // Now, generate the prediction code. + MainForTest(string.Format("codegen in={{{0}}} cs={{{1}}} modelNameOverride=model.zip", modelOutPath, csOutPath)); + CheckEquality("Command", "codegen-out.cs"); + + Done(); + } } } From 0678df590e7b15c8151f235372c7180d998b8c0c Mon Sep 17 00:00:00 2001 From: Yael Dekel Date: Sun, 2 Dec 2018 07:54:14 -0800 Subject: [PATCH 015/100] Calibrator trainer needs to clear data so it can train another calibrator with new data (#1805) --- .../Prediction/Calibrator.cs | 4 +- .../Common/OVA/OVA-CV-iris-out.txt | 58 +++++++ .../Common/OVA/OVA-CV-iris-rp.txt | 4 + .../BaselineOutput/Common/OVA/OVA-CV-iris.txt | 151 ++++++++++++++++++ .../Common/OVA/OVA-FastForest-CV-iris-out.txt | 100 ++++++++++++ .../Common/OVA/OVA-FastForest-CV-iris-rp.txt | 4 + .../Common/OVA/OVA-FastForest-CV-iris.txt | 151 ++++++++++++++++++ .../OVA/OVA-FastForest-TrainTest-iris-out.txt | 57 +++++++ .../OVA/OVA-FastForest-TrainTest-iris-rp.txt | 4 + .../OVA/OVA-FastForest-TrainTest-iris.txt | 151 ++++++++++++++++++ .../Common/OVA/OVA-TrainTest-iris-out.txt | 36 +++++ .../Common/OVA/OVA-TrainTest-iris-rp.txt | 4 + .../Common/OVA/OVA-TrainTest-iris.txt | 151 ++++++++++++++++++ .../Common/PKPD/PKPD-CV-iris-out.txt | 70 ++++++++ .../Common/PKPD/PKPD-CV-iris-rp.txt | 4 + .../Common/PKPD/PKPD-CV-iris.txt | 151 ++++++++++++++++++ .../Common/PKPD/PKPD-TrainTest-iris-out.txt | 42 +++++ .../Common/PKPD/PKPD-TrainTest-iris-rp.txt | 4 + .../Common/PKPD/PKPD-TrainTest-iris.txt | 151 ++++++++++++++++++ .../TestPredictors.cs | 11 ++ test/Microsoft.ML.TestFramework/Learners.cs | 19 +++ 21 files changed, 1326 insertions(+), 1 deletion(-) create mode 100644 test/BaselineOutput/Common/OVA/OVA-CV-iris-out.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-CV-iris-rp.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-CV-iris.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris-out.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris-rp.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris-out.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris-rp.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-out.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-rp.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-TrainTest-iris.txt create mode 100644 test/BaselineOutput/Common/PKPD/PKPD-CV-iris-out.txt create mode 100644 test/BaselineOutput/Common/PKPD/PKPD-CV-iris-rp.txt create mode 100644 test/BaselineOutput/Common/PKPD/PKPD-CV-iris.txt create mode 100644 test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-out.txt create mode 100644 test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-rp.txt create mode 100644 test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris.txt diff --git a/src/Microsoft.ML.Data/Prediction/Calibrator.cs b/src/Microsoft.ML.Data/Prediction/Calibrator.cs index 0e966a44dc..a44cf64753 100644 --- a/src/Microsoft.ML.Data/Prediction/Calibrator.cs +++ b/src/Microsoft.ML.Data/Prediction/Calibrator.cs @@ -1112,7 +1112,9 @@ public bool ProcessTrainingExample(Float output, bool labelIs1, Float weight) public ICalibrator FinishTraining(IChannel ch) { ch.Check(Data != null, "Calibrator trained on zero instances."); - return CreateCalibrator(ch); + var calibrator = CreateCalibrator(ch); + Data = null; + return calibrator; } public abstract ICalibrator CreateCalibrator(IChannel ch); diff --git a/test/BaselineOutput/Common/OVA/OVA-CV-iris-out.txt b/test/BaselineOutput/Common/OVA/OVA-CV-iris-out.txt new file mode 100644 index 0000000000..21874465e6 --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-CV-iris-out.txt @@ -0,0 +1,58 @@ +maml.exe CV tr=OVA{p=AvgPer{ lr=0.8 }} threads=- norm=No dout=%Output% data=%Data% seed=1 +Not adding a normalizer. +Training learner 0 +Training calibrator. +Training learner 1 +Training calibrator. +Training learner 2 +Training calibrator. +Not training a calibrator because it is not needed. +Not adding a normalizer. +Training learner 0 +Training calibrator. +Training learner 1 +Training calibrator. +Training learner 2 +Training calibrator. +Not training a calibrator because it is not needed. + +Confusion table + ||======================== +PREDICTED || 0 | 1 | 2 | Recall +TRUTH ||======================== + 0 || 21 | 0 | 0 | 1.0000 + 1 || 0 | 22 | 8 | 0.7333 + 2 || 0 | 0 | 28 | 1.0000 + ||======================== +Precision ||1.0000 |1.0000 |0.7778 | +Accuracy(micro-avg): 0.898734 +Accuracy(macro-avg): 0.911111 +Log-loss: 0.372620 +Log-loss reduction: 65.736556 + +Confusion table + ||======================== +PREDICTED || 0 | 1 | 2 | Recall +TRUTH ||======================== + 0 || 29 | 0 | 0 | 1.0000 + 1 || 0 | 18 | 2 | 0.9000 + 2 || 0 | 0 | 22 | 1.0000 + ||======================== +Precision ||1.0000 |1.0000 |0.9167 | +Accuracy(micro-avg): 0.971831 +Accuracy(macro-avg): 0.966667 +Log-loss: 0.357704 +Log-loss reduction: 67.051654 + +OVERALL RESULTS +--------------------------------------- +Accuracy(micro-avg): 0.935283 (0.0365) +Accuracy(macro-avg): 0.938889 (0.0278) +Log-loss: 0.365162 (0.0075) +Log-loss reduction: 66.394105 (0.6575) + +--------------------------------------- +Physical memory usage(MB): %Number% +Virtual memory usage(MB): %Number% +%DateTime% Time elapsed(s): %Number% + diff --git a/test/BaselineOutput/Common/OVA/OVA-CV-iris-rp.txt b/test/BaselineOutput/Common/OVA/OVA-CV-iris-rp.txt new file mode 100644 index 0000000000..7f9d1ab2be --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-CV-iris-rp.txt @@ -0,0 +1,4 @@ +OVA +Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings +0.935283 0.938889 0.365162 66.3941 AvgPer{lr=0.8} OVA %Data% %Output% 99 0 0 maml.exe CV tr=OVA{p=AvgPer{ lr=0.8 }} threads=- norm=No dout=%Output% data=%Data% seed=1 /p:AvgPer{lr=0.8} + diff --git a/test/BaselineOutput/Common/OVA/OVA-CV-iris.txt b/test/BaselineOutput/Common/OVA/OVA-CV-iris.txt new file mode 100644 index 0000000000..ea773d1bba --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-CV-iris.txt @@ -0,0 +1,151 @@ +Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class +5 0 0 0.043587643807703136 0.957348645 0.04264102 1.03425764E-05 0 1 2 +6 0 0 0.20844569128859777 0.8118451 0.188126311 2.8563165E-05 0 1 2 +8 0 0 0.44491771498326443 0.640877 0.359043151 7.987263E-05 0 1 2 +9 0 0 0.28103366767537485 0.7550029 0.244961023 3.60610429E-05 0 1 2 +10 0 0 0.064111239185181926 0.937900662 0.0620922744 7.04143076E-06 0 1 2 +11 0 0 0.19511668953898065 0.822738647 0.17722775 3.361244E-05 0 1 2 +18 0 0 0.040957067767296483 0.959870338 0.0401218459 7.82396E-06 0 1 2 +20 0 0 0.12310363986545093 0.884172 0.115805365 2.26346256E-05 0 1 2 +21 0 0 0.080695089616231355 0.9224749 0.07751174 1.33279436E-05 0 1 2 +25 0 0 0.30682306393325992 0.7357808 0.2641595 5.97413928E-05 0 1 2 +28 0 0 0.13141817305409223 0.876851 0.12313617 1.279574E-05 0 1 2 +31 0 0 0.10895984751128654 0.8967664 0.103215657 1.78892569E-05 0 1 2 +32 0 0 0.035477802883361699 0.965144157 0.0348526426 3.21791072E-06 0 1 2 +35 0 0 0.20274726386806977 0.8164846 0.183501333 1.40994789E-05 0 1 2 +37 0 0 0.28103366767537485 0.7550029 0.244961023 3.60610429E-05 0 1 2 +40 0 0 0.12298365201239185 0.8842781 0.115710318 1.158336E-05 0 1 2 +41 0 0 0.63401679194266458 0.5304568 0.4693291 0.000214140542 0 1 2 +44 0 0 0.077454598775344219 0.925469041 0.07449977 3.11931653E-05 0 1 2 +45 0 0 0.3215830979624606 0.7250004 0.274949133 5.0463128E-05 0 1 2 +46 0 0 0.072538640149662562 0.9300298 0.06995974 1.0462416E-05 0 1 2 +48 0 0 0.070505947028000213 0.9319222 0.0680698752 7.905172E-06 0 1 2 +50 1 1 0.97585559443809855 0.376869768 0.358608037 0.264522225 1 2 0 +51 1 1 0.820648723050456 0.440146029 0.3583106 0.201543346 1 2 0 +52 1 2 1.0835275133336952 0.5653485 0.3383997 0.0962518156 2 1 0 +54 1 2 0.75898112148691677 0.472428739 0.468143165 0.0594281144 2 1 0 +56 1 2 1.0174162545586878 0.5111817 0.36152783 0.127290443 2 1 0 +60 1 1 0.30253484094402477 0.738942742 0.209481522 0.051575724 1 2 0 +63 1 2 0.77949402405350943 0.499299049 0.458638 0.04206293 2 1 0 +64 1 1 0.45505022537231249 0.6344161 0.288069278 0.0775146 1 0 2 +66 1 1 0.7154835565078782 0.488955617 0.46298942 0.04805498 1 2 0 +68 1 1 0.68322766519277334 0.504984438 0.482306838 0.0127087329 1 2 0 +69 1 1 0.31084089328775633 0.732830465 0.183353335 0.08381622 1 2 0 +70 1 2 1.1682944017762613 0.6530067 0.310896754 0.0360965841 2 1 0 +71 1 1 0.43377030209255479 0.6480611 0.185485169 0.166453749 1 0 2 +72 1 2 0.88766165771254424 0.578003168 0.41161713 0.0103796953 2 1 0 +73 1 1 0.66368933400718488 0.514948 0.4451455 0.03990646 1 2 0 +74 1 1 0.54404239638263385 0.5803973 0.245238408 0.174364269 1 2 0 +76 1 2 0.84677727980192752 0.5165659 0.4287946 0.0546395145 2 1 0 +77 1 2 1.2789386167391619 0.688494444 0.278332561 0.0331729874 2 1 0 +79 1 1 0.34033011681215469 0.7115354 0.2354252 0.0530394167 1 0 2 +82 1 1 0.35118632841443026 0.7038526 0.15209128 0.144056112 1 2 0 +88 1 1 0.4571578145475656 0.6330804 0.217808262 0.149111286 1 2 0 +90 1 1 0.54303381152243435 0.580983 0.390706122 0.0283109024 1 2 0 +91 1 1 0.7255881783753686 0.484039783 0.444033325 0.0719268844 1 2 0 +92 1 1 0.35286862388238727 0.7026695 0.20336625 0.09396427 1 2 0 +93 1 1 0.24150358472221847 0.785446 0.122569308 0.0919846743 1 0 2 +95 1 1 0.45747345580807686 0.6328806 0.223053813 0.144065529 1 2 0 +96 1 1 0.46692162584127184 0.6269292 0.2688824 0.1041884 1 2 0 +97 1 1 0.52181706235134551 0.593441248 0.265519917 0.14103885 1 2 0 +98 1 1 0.33964763199167614 0.7120212 0.255649149 0.03232969 1 0 2 +99 1 1 0.42298578084071409 0.655087948 0.2430654 0.10184665 1 2 0 +100 2 2 0.13591733259440952 0.8729148 0.125132382 0.00195282849 2 1 0 +102 2 2 0.13809510857610402 0.871015847 0.125785753 0.00319840666 2 1 0 +104 2 2 0.19932133588014422 0.8192866 0.178226635 0.00248679356 2 1 0 +105 2 2 0.09978434131070596 0.9050326 0.09390649 0.00106095837 2 1 0 +106 2 2 0.65516062299283195 0.519358635 0.4732639 0.00737748668 2 1 0 +108 2 2 0.36038464423836569 0.697408 0.300992548 0.00159944966 2 1 0 +109 2 2 0.042800052177573163 0.958102942 0.03757144 0.00432561943 2 1 0 +111 2 2 0.33893424257144178 0.7125293 0.282670647 0.004800048 2 1 0 +112 2 2 0.17819193567707683 0.8367818 0.156975582 0.006242614 2 1 0 +113 2 2 0.49781014911918742 0.6078603 0.388630718 0.00350892986 2 1 0 +115 2 2 0.1683952699484349 0.845019758 0.146008313 0.008971939 2 1 0 +117 2 2 0.023365514010699712 0.976905346 0.02015102 0.00294363522 2 1 0 +120 2 2 0.11133724227002473 0.894637 0.100207157 0.005155826 2 1 0 +121 2 2 0.43666882240878063 0.6461854 0.346793234 0.007021348 2 1 0 +122 2 2 0.13629671282280101 0.8725837 0.126684025 0.000732263 2 1 0 +123 2 2 0.4310483662194341 0.6498275 0.338038325 0.0121342046 2 1 0 +125 2 2 0.11330052370098145 0.8928823 0.101871319 0.0052463985 2 1 0 +128 2 2 0.27949760674881013 0.756163538 0.2411889 0.00264759478 2 1 0 +129 2 2 0.17530740569786113 0.839199 0.153847516 0.006953467 2 1 0 +131 2 2 0.031839393778411017 0.968662143 0.0223613773 0.008976495 2 1 0 +132 2 2 0.27137481365798816 0.7623307 0.235219285 0.00245000049 2 1 0 +133 2 2 0.43700297433440277 0.6459695 0.341389537 0.01264096 2 1 0 +137 2 2 0.23063259534491895 0.794031143 0.198468238 0.00750062242 2 1 0 +138 2 2 0.43845130281190237 0.6450346 0.3309319 0.0240335166 2 1 0 +141 2 2 0.17626166414917829 0.8383986 0.142890155 0.018711295 2 1 0 +144 2 2 0.099717233123952864 0.9050933 0.09041383 0.00449282629 2 1 0 +145 2 2 0.18787613378173548 0.828717351 0.161682889 0.009599784 2 1 0 +147 2 2 0.24798062433245444 0.780375063 0.20853655 0.01108838 2 1 0 +0 0 0 0.34881132522048625 0.705526233 0.294473559 1.92144441E-07 0 1 2 +1 0 0 0.36141580969752651 0.696689248 0.303309947 7.8389877E-07 0 1 2 +2 0 0 0.35568660847624228 0.7006922 0.299307227 5.929496E-07 0 1 2 +3 0 0 0.36470718348091719 0.694399953 0.30559817 1.84990029E-06 0 1 2 +4 0 0 0.34775770739677259 0.70627 0.293729782 2.00147142E-07 0 1 2 +7 0 0 0.35382023048081196 0.702001154 0.297998428 4.17606344E-07 0 1 2 +12 0 0 0.36098727532383801 0.696987867 0.303011417 7.32556146E-07 0 1 2 +13 0 0 0.35788558263546733 0.699153066 0.300846159 7.88259E-07 0 1 2 +14 0 0 0.33437356737542145 0.715786338 0.284213632 7.41558059E-09 0 1 2 +15 0 0 0.33259729807630167 0.7170589 0.2829411 2.302074E-08 0 1 2 +16 0 0 0.33963038748907248 0.712033451 0.2879665 5.7538923E-08 0 1 2 +17 0 0 0.34952968472792562 0.7050196 0.294980139 2.583559E-07 0 1 2 +19 0 0 0.34579385759256209 0.70765835 0.292341441 2.096021E-07 0 1 2 +22 0 0 0.34605819997965914 0.7074713 0.29252857 1.20912986E-07 0 1 2 +23 0 0 0.36811690986051288 0.6920363 0.307961673 2.02298725E-06 0 1 2 +24 0 0 0.37119922981165249 0.6899065 0.310090721 2.80658514E-06 0 1 2 +26 0 0 0.35941763729273518 0.698082745 0.301916152 1.08453048E-06 0 1 2 +27 0 0 0.35009366263991337 0.7046221 0.295377672 2.18394433E-07 0 1 2 +29 0 0 0.36473963008629695 0.6943774 0.305620819 1.74348168E-06 0 1 2 +30 0 0 0.36694890288891646 0.692845047 0.307153255 1.67791779E-06 0 1 2 +33 0 0 0.33532989874849606 0.715102136 0.284897834 2.06016182E-08 0 1 2 +34 0 0 0.36074853902438908 0.6971543 0.302845 6.899852E-07 0 1 2 +36 0 0 0.3442039710581144 0.708784342 0.2912156 5.37456621E-08 0 1 2 +38 0 0 0.36249420192434484 0.695938349 0.304059923 1.72375007E-06 0 1 2 +39 0 0 0.35302378267720547 0.7025605 0.2974392 3.31980715E-07 0 1 2 +42 0 0 0.35802404250832931 0.699056268 0.30094254 1.18013247E-06 0 1 2 +43 0 0 0.35964093968844252 0.6979269 0.3020715 1.62606943E-06 0 1 2 +47 0 0 0.35894549345005311 0.6984124 0.301586539 1.06725963E-06 0 1 2 +49 0 0 0.35354270878931848 0.702196 0.2978036 3.52685419E-07 0 1 2 +53 1 1 0.43814530313713385 0.645232 0.30703035 0.0477376431 1 2 0 +55 1 1 0.52991693789558192 0.588653862 0.3812489 0.0300972275 1 2 0 +57 1 1 0.3144098829942828 0.730219662 0.222309768 0.047470592 1 0 2 +58 1 1 0.16338480577647163 0.8492643 0.10643021 0.0443054661 1 2 0 +59 1 1 0.43732800775193598 0.6457596 0.2802832 0.0739572 1 2 0 +61 1 1 0.26826825521335035 0.7647026 0.165891945 0.06940546 1 2 0 +62 1 1 0.16519114388964468 0.84773165 0.07620503 0.0760633051 1 2 0 +65 1 1 0.12274628026245138 0.884488046 0.0737581253 0.0417538173 1 0 2 +67 1 1 0.18131529311314101 0.8341723 0.09860581 0.0672218949 1 0 2 +75 1 1 0.13597202822637602 0.872867048 0.06550142 0.0616315342 1 0 2 +78 1 1 0.45281685214847861 0.6358346 0.332585216 0.0315802023 1 2 0 +80 1 1 0.23111914057709354 0.7936449 0.105801627 0.10055349 1 0 2 +81 1 1 0.21336965456040224 0.807857454 0.137252137 0.0548904277 1 0 2 +83 1 2 1.1418187241491273 0.6765539 0.3192379 0.00420819456 2 1 0 +84 1 2 0.79904921193122347 0.5272309 0.449756384 0.0230127368 2 1 0 +85 1 1 0.32376577391953759 0.723419666 0.223107859 0.05347247 1 2 0 +86 1 1 0.19858499419365563 0.8198901 0.142534047 0.037575867 1 2 0 +87 1 1 0.32077518585346926 0.725586355 0.24517718 0.0292364415 1 2 0 +89 1 1 0.36379979176298916 0.695030332 0.242874637 0.06209502 1 2 0 +94 1 1 0.37955450575815275 0.684166133 0.262924 0.0529098734 1 2 0 +101 2 2 0.34078617573620174 0.711210966 0.28588894 0.002900116 2 1 0 +103 2 2 0.33454245635946434 0.71566546 0.282688916 0.00164566189 2 1 0 +107 2 2 0.32176538137222749 0.724868238 0.27461502 0.000516714761 2 1 0 +110 2 2 0.4735643051120339 0.622778535 0.371121377 0.006100062 2 1 0 +114 2 2 0.3247388954422577 0.722716033 0.2752217 0.00206224318 2 1 0 +116 2 2 0.36482478597312679 0.6943183 0.3033064 0.00237530912 2 1 0 +118 2 2 0.30234231913723036 0.739085 0.260826528 8.847632E-05 2 1 0 +119 2 2 0.37792268388420569 0.6852835 0.3114479 0.0032686044 2 1 0 +124 2 2 0.33777190179266953 0.713358 0.284956157 0.00168584171 2 1 0 +126 2 2 0.51721400727433164 0.5961792 0.395325035 0.008495758 2 1 0 +127 2 2 0.48223827874761288 0.617399931 0.374503255 0.008096788 2 1 0 +130 2 2 0.33112825051245398 0.718113065 0.281247973 0.0006389589 2 1 0 +134 2 2 0.34240991810493487 0.7100571 0.288253874 0.00168902089 2 1 0 +135 2 2 0.3238549270121831 0.7233552 0.2760729 0.0005719304 2 1 0 +136 2 2 0.31869296062169472 0.727098763 0.271312952 0.00158829393 2 1 0 +139 2 2 0.39792518591800413 0.6717123 0.325529337 0.002758371 2 1 0 +140 2 2 0.32133155702629967 0.7251828 0.273566216 0.0012509838 2 1 0 +142 2 2 0.34078617573620174 0.711210966 0.28588894 0.002900116 2 1 0 +143 2 2 0.31559105025409723 0.72935766 0.269767135 0.0008751799 2 1 0 +146 2 2 0.3760214987664387 0.6865876 0.310142934 0.00326948427 2 1 0 +148 2 2 0.3305544580259554 0.718525231 0.2789816 0.00249314215 2 1 0 +149 2 2 0.37408822283240173 0.6879162 0.307514042 0.00456974143 2 1 0 diff --git a/test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris-out.txt b/test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris-out.txt new file mode 100644 index 0000000000..1a5da994b3 --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris-out.txt @@ -0,0 +1,100 @@ +maml.exe CV tr=OVA{p=FastForest{ }} threads=- norm=No dout=%Output% data=%Data% seed=1 +Not adding a normalizer. +Training learner 0 +Making per-feature arrays +Changing data from row-wise to column-wise +Processed 71 instances +Binning and forming Feature objects +Reserved memory for tree learner: %Number% bytes +Starting to train ... +Warning: 2 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Training calibrator. +Training learner 1 +Making per-feature arrays +Changing data from row-wise to column-wise +Processed 71 instances +Binning and forming Feature objects +Reserved memory for tree learner: %Number% bytes +Starting to train ... +Warning: 3 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Training calibrator. +Training learner 2 +Making per-feature arrays +Changing data from row-wise to column-wise +Processed 71 instances +Binning and forming Feature objects +Reserved memory for tree learner: %Number% bytes +Starting to train ... +Warning: 1 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Training calibrator. +Not training a calibrator because it is not needed. +Not adding a normalizer. +Training learner 0 +Making per-feature arrays +Changing data from row-wise to column-wise +Processed 79 instances +Binning and forming Feature objects +Reserved memory for tree learner: %Number% bytes +Starting to train ... +Warning: 2 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Training calibrator. +Training learner 1 +Making per-feature arrays +Changing data from row-wise to column-wise +Processed 79 instances +Binning and forming Feature objects +Reserved memory for tree learner: %Number% bytes +Starting to train ... +Warning: 3 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Training calibrator. +Training learner 2 +Making per-feature arrays +Changing data from row-wise to column-wise +Processed 79 instances +Binning and forming Feature objects +Reserved memory for tree learner: %Number% bytes +Starting to train ... +Warning: 1 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Training calibrator. +Not training a calibrator because it is not needed. + +Confusion table + ||======================== +PREDICTED || 0 | 1 | 2 | Recall +TRUTH ||======================== + 0 || 21 | 0 | 0 | 1.0000 + 1 || 0 | 25 | 5 | 0.8333 + 2 || 0 | 1 | 27 | 0.9643 + ||======================== +Precision ||1.0000 |0.9615 |0.8438 | +Accuracy(micro-avg): 0.924051 +Accuracy(macro-avg): 0.932540 +Log-loss: 0.197783 +Log-loss reduction: 81.813342 + +Confusion table + ||======================== +PREDICTED || 0 | 1 | 2 | Recall +TRUTH ||======================== + 0 || 29 | 0 | 0 | 1.0000 + 1 || 0 | 19 | 1 | 0.9500 + 2 || 0 | 2 | 20 | 0.9091 + ||======================== +Precision ||1.0000 |0.9048 |0.9524 | +Accuracy(micro-avg): 0.957746 +Accuracy(macro-avg): 0.953030 +Log-loss: 0.103360 +Log-loss reduction: 90.479422 + +OVERALL RESULTS +--------------------------------------- +Accuracy(micro-avg): 0.940899 (0.0168) +Accuracy(macro-avg): 0.942785 (0.0102) +Log-loss: 0.150571 (0.0472) +Log-loss reduction: 86.146382 (4.3330) + +--------------------------------------- +Physical memory usage(MB): %Number% +Virtual memory usage(MB): %Number% +%DateTime% Time elapsed(s): %Number% + diff --git a/test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris-rp.txt b/test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris-rp.txt new file mode 100644 index 0000000000..56ab2aa973 --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris-rp.txt @@ -0,0 +1,4 @@ +OVA +Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings +0.940899 0.942785 0.150571 86.14639 FastForest{} OVA %Data% %Output% 99 0 0 maml.exe CV tr=OVA{p=FastForest{ }} threads=- norm=No dout=%Output% data=%Data% seed=1 /p:FastForest{} + diff --git a/test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris.txt b/test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris.txt new file mode 100644 index 0000000000..8e1c60c346 --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris.txt @@ -0,0 +1,151 @@ +Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class +5 0 0 0.027350379370839409 0.973020256 0.0148040019 0.012175764 0 2 1 +6 0 0 0.023177365350252416 0.977089167 0.01466024 0.008250585 0 2 1 +8 0 0 0.03879484521448328 0.961948037 0.0225592032 0.0154927764 0 1 2 +9 0 0 0.026096982166513804 0.9742406 0.0147748869 0.01098449 0 2 1 +10 0 0 0.027350379370839409 0.973020256 0.0148040019 0.012175764 0 2 1 +11 0 0 0.023212076237087643 0.977055252 0.0146819549 0.008262806 0 2 1 +18 0 0 0.028815318823320137 0.9715959 0.0160096437 0.0123944618 0 2 1 +20 0 0 0.027350379370839409 0.973020256 0.0148040019 0.012175764 0 2 1 +21 0 0 0.023288456631253712 0.9769806 0.0146841714 0.008335201 0 2 1 +25 0 0 0.028704838697499659 0.971703231 0.0150171826 0.0132795852 0 2 1 +28 0 0 0.024710550769104819 0.975592256 0.0147331525 0.009674597 0 2 1 +31 0 0 0.027350379370839409 0.973020256 0.0148040019 0.012175764 0 2 1 +32 0 0 0.024756068218349039 0.97554785 0.0147599624 0.009692201 0 2 1 +35 0 0 0.023464117069036436 0.976809 0.0147707965 0.008420168 0 2 1 +37 0 0 0.026096982166513804 0.9742406 0.0147748869 0.01098449 0 2 1 +40 0 0 0.023182306545303632 0.977084339 0.01466333 0.008252324 0 2 1 +41 0 0 0.075381369255567446 0.927389741 0.0563764051 0.0162338577 0 1 2 +44 0 0 0.023288456631253712 0.9769806 0.0146841714 0.008335201 0 2 1 +45 0 0 0.028697907258661438 0.971709967 0.0150136231 0.0132764373 0 2 1 +46 0 0 0.023288456631253712 0.9769806 0.0146841714 0.008335201 0 2 1 +48 0 0 0.024756068218349039 0.97554785 0.0147599624 0.009692201 0 2 1 +50 1 1 0.063133538332774491 0.9388181 0.03674654 0.02443534 1 2 0 +51 1 1 0.063273160443441889 0.938687 0.036751762 0.0245612152 1 2 0 +52 1 2 1.5448652812576653 0.7616039 0.21334061 0.02505552 2 1 0 +54 1 1 0.058659395969803671 0.9430279 0.03797895 0.0189931467 1 2 0 +56 1 1 0.062932806186675683 0.939006567 0.0365141444 0.0244792849 1 2 0 +60 1 1 0.10446634255003935 0.9008051 0.08302587 0.0161690265 1 0 2 +63 1 1 0.05614180204487236 0.945405066 0.0355887674 0.0190061387 1 2 0 +64 1 1 0.046482192945595177 0.954581559 0.0288921539 0.0165262986 1 0 2 +66 1 1 0.04066444950323992 0.960151255 0.0232533664 0.016595358 1 0 2 +68 1 1 0.056871265867723821 0.9447157 0.03689458 0.0183897614 1 2 0 +69 1 1 0.039875310060784264 0.960909247 0.0222220439 0.0168687385 1 0 2 +70 1 2 2.2263563135006348 0.867944062 0.107920945 0.0241349712 2 1 0 +71 1 1 0.059013852579431925 0.9426937 0.0366258025 0.0206804741 1 2 0 +72 1 2 1.0876589583679515 0.645501137 0.3370045 0.0174943563 2 1 0 +73 1 1 0.056708184586295521 0.944869757 0.0361831523 0.01894711 1 2 0 +74 1 1 0.057253869975114946 0.9443543 0.0349945128 0.02065122 1 2 0 +76 1 2 0.89537371392369014 0.5719312 0.408454925 0.0196138732 2 1 0 +77 1 2 1.9303219693474667 0.831519544 0.145101473 0.0233789887 2 1 0 +79 1 1 0.05999253183033975 0.941771567 0.0405655652 0.01766284 1 0 2 +82 1 1 0.050023969260699123 0.9512066 0.02679572 0.02199766 1 2 0 +88 1 1 0.045065919607380718 0.955934465 0.02764862 0.0164169129 1 0 2 +90 1 1 0.03943592815149323 0.961331546 0.02179231 0.0168761518 1 0 2 +91 1 1 0.058344871168396539 0.943324566 0.0352619849 0.0214134734 1 2 0 +92 1 1 0.050023969260699123 0.9512066 0.02679572 0.02199766 1 2 0 +93 1 1 0.10446634255003935 0.9008051 0.08302587 0.0161690265 1 0 2 +95 1 1 0.044750654086148499 0.9562359 0.0272751376 0.0164890066 1 0 2 +96 1 1 0.040619629909546248 0.9601943 0.02274274 0.0170629937 1 0 2 +97 1 1 0.057039737516047594 0.944556534 0.0348673128 0.0205761548 1 2 0 +98 1 1 0.10393389655676435 0.9012849 0.0826243 0.0160908233 1 0 2 +99 1 1 0.042296455668819607 0.95858556 0.0228082649 0.0186061822 1 0 2 +100 2 2 0.04325137116698035 0.9576706 0.0220750477 0.0202543456 2 0 1 +102 2 2 0.042999956444440428 0.957911432 0.02217581 0.01991274 2 1 0 +104 2 2 0.042999956444440428 0.957911432 0.02217581 0.01991274 2 1 0 +105 2 2 0.042999956444440428 0.957911432 0.02217581 0.01991274 2 1 0 +106 2 1 4.0557668267792941 0.9352778 0.04739997 0.017322192 1 0 2 +108 2 2 0.070150335263153035 0.932253659 0.04979825 0.0179480817 2 1 0 +109 2 2 0.043858138364186193 0.9570897 0.0223862287 0.0205240529 2 0 1 +111 2 2 0.070161652013398079 0.9322431 0.0498059951 0.0179508738 2 1 0 +112 2 2 0.042999956444440428 0.957911432 0.02217581 0.01991274 2 1 0 +113 2 2 0.11690579430486296 0.889669 0.09017117 0.0201598033 2 1 0 +115 2 2 0.043217015723945179 0.957703531 0.0220578834 0.0202385988 2 0 1 +117 2 2 0.043858138364186193 0.9570897 0.0223862287 0.0205240529 2 0 1 +120 2 2 0.043209920725147559 0.9577103 0.02205432 0.02023533 2 0 1 +121 2 2 0.098561098118739199 0.9061403 0.0728506148 0.02100908 2 1 0 +122 2 2 0.0471715874197976 0.9539237 0.0277284756 0.0183478333 2 1 0 +123 2 2 0.099357467789256262 0.905419 0.0767814144 0.0177995767 2 1 0 +125 2 2 0.043416567622914827 0.957512438 0.0220497642 0.0204377677 2 0 1 +128 2 2 0.047179147971301752 0.9539165 0.0277327932 0.01835069 2 1 0 +129 2 2 0.16789945151543201 0.845438838 0.13190572 0.0226554666 2 1 0 +131 2 2 0.043858138364186193 0.9570897 0.0223862287 0.0205240529 2 0 1 +132 2 2 0.047179147971301752 0.9539165 0.0277327932 0.01835069 2 1 0 +133 2 2 0.20824894842479216 0.812004864 0.168109149 0.0198859684 2 1 0 +137 2 2 0.044668440799718316 0.9563145 0.02215964 0.0215258449 2 1 0 +138 2 2 0.15583318384491099 0.8557019 0.124324195 0.0199738927 2 1 0 +141 2 2 0.044522979110667826 0.9564536 0.0219786763 0.0215677191 2 1 0 +144 2 2 0.043244151446937412 0.957677543 0.0220714156 0.0202510133 2 0 1 +145 2 2 0.043081348153097068 0.957833469 0.0222168975 0.0199496336 2 1 0 +147 2 2 0.043081348153097068 0.957833469 0.0222168975 0.0199496336 2 1 0 +0 0 0 0.024228559539270302 0.9760626 0.01620378 0.00773361558 0 2 1 +1 0 0 0.028724345128365684 0.9716843 0.01679263 0.0115231182 0 2 1 +2 0 0 0.028387329965105464 0.9720118 0.01713079 0.0108573884 0 2 1 +3 0 0 0.028429764932604316 0.971970558 0.01715604 0.0108733922 0 2 1 +4 0 0 0.024228559539270302 0.9760626 0.01620378 0.00773361558 0 2 1 +7 0 0 0.024228559539270302 0.9760626 0.01620378 0.00773361558 0 2 1 +12 0 0 0.028724345128365684 0.9716843 0.01679263 0.0115231182 0 2 1 +13 0 0 0.028724345128365684 0.9716843 0.01679263 0.0115231182 0 2 1 +14 0 0 0.043610929364968049 0.957326353 0.0244053546 0.0182683 0 1 2 +15 0 0 0.042852621944898722 0.9580526 0.0241116975 0.0178357288 0 1 2 +16 0 0 0.024252864268300027 0.9760389 0.0162198264 0.007741274 0 2 1 +17 0 0 0.024252864268300027 0.9760389 0.0162198264 0.007741274 0 2 1 +19 0 0 0.024252864268300027 0.9760389 0.0162198264 0.007741274 0 2 1 +22 0 0 0.024228559539270302 0.9760626 0.01620378 0.00773361558 0 2 1 +23 0 0 0.027786259158249255 0.9725962 0.0171605088 0.0102432566 0 2 1 +24 0 0 0.024228559539270302 0.9760626 0.01620378 0.00773361558 0 2 1 +26 0 0 0.024252864268300027 0.9760389 0.0162198264 0.007741274 0 2 1 +27 0 0 0.024228559539270302 0.9760626 0.01620378 0.00773361558 0 2 1 +29 0 0 0.028387329965105464 0.9720118 0.01713079 0.0108573884 0 2 1 +30 0 0 0.028429764932604316 0.971970558 0.01715604 0.0108733922 0 2 1 +33 0 0 0.030899673422883547 0.969572842 0.0168408789 0.013586306 0 2 1 +34 0 0 0.028429764932604316 0.971970558 0.01715604 0.0108733922 0 2 1 +36 0 0 0.030899673422883547 0.969572842 0.0168408789 0.013586306 0 2 1 +38 0 0 0.028724345128365684 0.9716843 0.01679263 0.0115231182 0 2 1 +39 0 0 0.024228559539270302 0.9760626 0.01620378 0.00773361558 0 2 1 +42 0 0 0.028387329965105464 0.9720118 0.01713079 0.0108573884 0 2 1 +43 0 0 0.024252864268300027 0.9760389 0.0162198264 0.007741274 0 2 1 +47 0 0 0.028387329965105464 0.9720118 0.01713079 0.0108573884 0 2 1 +49 0 0 0.027755249905110149 0.9726264 0.0171416216 0.0102319829 0 2 1 +53 1 1 0.037556735021321959 0.9631398 0.0201520827 0.0167081431 1 2 0 +55 1 1 0.044305386985996155 0.956661761 0.0299111586 0.0134270638 1 2 0 +57 1 1 0.093536483034406898 0.9107048 0.07308376 0.0162114669 1 0 2 +58 1 1 0.061044837158525642 0.940781057 0.04666032 0.012558599 1 2 0 +59 1 1 0.057238090893367552 0.9443692 0.0346155576 0.02101522 1 0 2 +61 1 1 0.045384715012467255 0.955629766 0.0309758075 0.0133944545 1 2 0 +62 1 1 0.033237106686933228 0.9673092 0.01762867 0.0150621245 1 2 0 +65 1 1 0.05778179219177005 0.9438559 0.04312986 0.01301424 1 2 0 +67 1 1 0.033103587181904308 0.96743834 0.0176091753 0.0149524612 1 2 0 +75 1 1 0.058650104789816478 0.9430367 0.0440750532 0.0128882658 1 2 0 +78 1 1 0.056192871154629086 0.9453568 0.04159324 0.0130499722 1 2 0 +80 1 1 0.036130172143532695 0.964514732 0.0192673262 0.0162179433 1 0 2 +81 1 1 0.036130172143532695 0.964514732 0.0192673262 0.0162179433 1 0 2 +83 1 2 1.2480745375737912 0.69912 0.287056983 0.013823038 2 1 0 +84 1 1 0.083086720865945554 0.920271337 0.04498237 0.0347462669 1 2 0 +85 1 1 0.17039047641137059 0.84333545 0.141331524 0.0153330155 1 2 0 +86 1 1 0.080997463473013107 0.922196031 0.06443846 0.0133655285 1 2 0 +87 1 1 0.051908113898915885 0.9494161 0.03799608 0.01258784 1 2 0 +89 1 1 0.037556735021321959 0.9631398 0.0201520827 0.0167081431 1 2 0 +94 1 1 0.035086091646095253 0.9655223 0.0208149366 0.0136627667 1 2 0 +101 2 2 0.049619880098472589 0.9515911 0.0341902152 0.014218702 2 1 0 +103 2 2 0.03226036779292963 0.968254447 0.018891599 0.0128539279 2 1 0 +107 2 2 0.027233200921188156 0.9731343 0.0140761612 0.0127895456 2 1 0 +110 2 2 0.028649511205966538 0.971757 0.015087897 0.0131550925 2 1 0 +114 2 2 0.047580126549381846 0.953534067 0.03241034 0.0140556078 2 1 0 +116 2 2 0.026689263578566873 0.973663747 0.0134749562 0.0128613133 2 1 0 +118 2 2 0.024777391870297136 0.975527048 0.0127499122 0.0117230108 2 0 1 +119 2 1 1.7884441874760408 0.819463134 0.167220131 0.0133167468 1 2 0 +124 2 2 0.022308948384583055 0.977938056 0.0128816022 0.009180347 2 0 1 +126 2 2 0.58862166918290049 0.555091858 0.433407664 0.0115004806 2 1 0 +127 2 2 0.36319554960125283 0.6954504 0.293058 0.011491593 2 1 0 +130 2 2 0.026158164658917963 0.974181 0.0130336434 0.0127853788 2 1 0 +134 2 1 0.71868836890731069 0.493089527 0.4873911 0.019519357 1 2 0 +135 2 2 0.02241305521632829 0.977836251 0.0127510056 0.0094127655 2 0 1 +136 2 2 0.026674020694690079 0.9736786 0.0157151483 0.0106062358 2 0 1 +139 2 2 0.024212560266198765 0.9760782 0.0129676694 0.010954137 2 0 1 +140 2 2 0.023236661302344995 0.977031231 0.0128853433 0.0100834481 2 0 1 +142 2 2 0.049619880098472589 0.9515911 0.0341902152 0.014218702 2 1 0 +143 2 2 0.022719770126189896 0.9775364 0.012872614 0.009590993 2 0 1 +146 2 2 0.17992962432883589 0.835329 0.1525824 0.0120885922 2 1 0 +148 2 2 0.033195268246711006 0.967349648 0.0165074 0.0161429718 2 0 1 +149 2 2 0.04654326171654332 0.954523265 0.03170268 0.0137740513 2 1 0 diff --git a/test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris-out.txt b/test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris-out.txt new file mode 100644 index 0000000000..685dbd1c35 --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris-out.txt @@ -0,0 +1,57 @@ +maml.exe TrainTest test=%Data% tr=OVA{p=FastForest{ }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 +Not adding a normalizer. +Training learner 0 +Making per-feature arrays +Changing data from row-wise to column-wise +Processed 150 instances +Binning and forming Feature objects +Reserved memory for tree learner: %Number% bytes +Starting to train ... +Warning: 2 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Training calibrator. +Training learner 1 +Making per-feature arrays +Changing data from row-wise to column-wise +Processed 150 instances +Binning and forming Feature objects +Reserved memory for tree learner: %Number% bytes +Starting to train ... +Warning: 3 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Training calibrator. +Training learner 2 +Making per-feature arrays +Changing data from row-wise to column-wise +Processed 150 instances +Binning and forming Feature objects +Reserved memory for tree learner: %Number% bytes +Starting to train ... +Warning: 1 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Training calibrator. +Not training a calibrator because it is not needed. + +Confusion table + ||======================== +PREDICTED || 0 | 1 | 2 | Recall +TRUTH ||======================== + 0 || 50 | 0 | 0 | 1.0000 + 1 || 0 | 48 | 2 | 0.9600 + 2 || 0 | 2 | 48 | 0.9600 + ||======================== +Precision ||1.0000 |0.9600 |0.9600 | +Accuracy(micro-avg): 0.973333 +Accuracy(macro-avg): 0.973333 +Log-loss: 0.088201 +Log-loss reduction: 91.971614 + +OVERALL RESULTS +--------------------------------------- +Accuracy(micro-avg): 0.973333 (0.0000) +Accuracy(macro-avg): 0.973333 (0.0000) +Log-loss: 0.088201 (0.0000) +Log-loss reduction: 91.971614 (0.0000) + +--------------------------------------- +Physical memory usage(MB): %Number% +Virtual memory usage(MB): %Number% +%DateTime% Time elapsed(s): %Number% + diff --git a/test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris-rp.txt b/test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris-rp.txt new file mode 100644 index 0000000000..ccedd71525 --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris-rp.txt @@ -0,0 +1,4 @@ +OVA +Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings +0.973333 0.973333 0.088201 91.97161 FastForest{} OVA %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=OVA{p=FastForest{ }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 /p:FastForest{} + diff --git a/test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris.txt b/test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris.txt new file mode 100644 index 0000000000..f6208a78c9 --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris.txt @@ -0,0 +1,151 @@ +Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class +0 0 0 0.017464080270232062 0.982687533 0.0105327293 0.006779738 0 2 1 +1 0 0 0.019626027175872099 0.9805653 0.0113047613 0.008129944 0 2 1 +2 0 0 0.018428955784221195 0.9817398 0.0112173269 0.00704283174 0 2 1 +3 0 0 0.018512197155923133 0.9816581 0.0112163927 0.00712551735 0 2 1 +4 0 0 0.017492831024486424 0.9826593 0.0106203947 0.006720314 0 2 1 +5 0 0 0.01899702913087822 0.9811823 0.0107108289 0.008106917 0 2 1 +6 0 0 0.017808415781631996 0.9823492 0.0106611857 0.00698957127 0 2 1 +7 0 0 0.017417437872362801 0.982733369 0.0104801161 0.006786526 0 2 1 +8 0 0 0.040166580610096203 0.9606294 0.0283070058 0.0110635748 0 1 2 +9 0 0 0.018526162470332681 0.9816444 0.01122477 0.007130839 0 2 1 +10 0 0 0.017611300130802875 0.9825429 0.0106790941 0.00677801576 0 2 1 +11 0 0 0.01778869640055902 0.9823686 0.0106666889 0.00696469564 0 2 1 +12 0 0 0.019606150350793675 0.9805848 0.011293401 0.008121774 0 2 1 +13 0 0 0.019606150350793675 0.9805848 0.011293401 0.008121774 0 2 1 +14 0 0 0.026412295611523272 0.973933458 0.01333986 0.01272667 0 1 2 +15 0 0 0.025199192606323598 0.975115657 0.0133637665 0.0115205906 0 1 2 +16 0 0 0.018670259471363557 0.98150295 0.0106890509 0.0078079775 0 2 1 +17 0 0 0.017685676541357578 0.9824698 0.0105369147 0.006993265 0 2 1 +18 0 0 0.025319862054157889 0.974998 0.0134827839 0.0115192011 0 1 2 +19 0 0 0.017769887507106903 0.982387066 0.0106839687 0.006928955 0 2 1 +20 0 0 0.018154812191360413 0.982009 0.0104912994 0.00749973 0 2 1 +21 0 0 0.018599999721891122 0.9815719 0.0106898025 0.007738281 0 2 1 +22 0 0 0.017675059667948433 0.9824802 0.01080584 0.006713943 0 2 1 +23 0 0 0.019144535618423879 0.981037557 0.010790579 0.0081718415 0 2 1 +24 0 0 0.01786921459800335 0.9822895 0.0106762443 0.007034263 0 2 1 +25 0 0 0.019668760648275045 0.9805234 0.0111163342 0.008360277 0 2 1 +26 0 0 0.01871422746097379 0.9814598 0.0105026765 0.008037531 0 2 1 +27 0 0 0.017464080270232062 0.982687533 0.0105327293 0.006779738 0 2 1 +28 0 0 0.017410644882776549 0.982740045 0.0104760742 0.006783909 0 2 1 +29 0 0 0.018627325744847049 0.9815451 0.01122066 0.007234277 0 2 1 +30 0 0 0.018712891388611139 0.9814611 0.0112197 0.007319177 0 2 1 +31 0 0 0.018981599313886478 0.9811974 0.0104813036 0.008321293 0 2 1 +32 0 0 0.017550274352973697 0.982602835 0.010679746 0.00671739224 0 2 1 +33 0 0 0.021116150761496876 0.979105234 0.0108751021 0.0100196926 0 2 1 +34 0 0 0.018526162470332681 0.9816444 0.01122477 0.007130839 0 2 1 +35 0 0 0.018250049751936549 0.9819155 0.0110310521 0.007053445 0 2 1 +36 0 0 0.021060206686807477 0.97916 0.0107265348 0.0101134721 0 2 1 +37 0 0 0.018526162470332681 0.9816444 0.01122477 0.007130839 0 2 1 +38 0 0 0.019606150350793675 0.9805848 0.011293401 0.008121774 0 2 1 +39 0 0 0.017410644882776549 0.982740045 0.0104760742 0.006783909 0 2 1 +40 0 0 0.017692532068078491 0.982463062 0.0105409911 0.00699597038 0 2 1 +41 0 0 0.043281619871080874 0.957641661 0.0299625713 0.0123957563 0 1 2 +42 0 0 0.018428955784221195 0.9817398 0.0112173269 0.00704283174 0 2 1 +43 0 0 0.01876481733250969 0.981410146 0.0105582057 0.00803162251 0 2 1 +44 0 0 0.018924134452175709 0.9812538 0.0107116094 0.008034597 0 2 1 +45 0 0 0.019874004021370295 0.9803222 0.0112994574 0.008378385 0 2 1 +46 0 0 0.017750532918328971 0.9824061 0.0106895706 0.00690434966 0 2 1 +47 0 0 0.018428955784221195 0.9817398 0.0112173269 0.00704283174 0 2 1 +48 0 0 0.017550274352973697 0.982602835 0.010679746 0.00671739224 0 2 1 +49 0 0 0.017778139107128599 0.98237896 0.01077697 0.006844043 0 2 1 +50 1 1 0.041011093232678458 0.9598185 0.0318259969 0.008355537 1 2 0 +51 1 1 0.035006088801506997 0.965599537 0.0262838528 0.008116591 1 2 0 +52 1 1 0.40445010925845948 0.6673437 0.325442761 0.00721360464 1 2 0 +53 1 1 0.023231963861032118 0.9770358 0.0135212354 0.00944295153 1 2 0 +54 1 1 0.037938333582228756 0.9627723 0.0292570144 0.007970667 1 2 0 +55 1 1 0.026037026955485622 0.974299 0.01712974 0.008571236 1 2 0 +56 1 1 0.044224954690087047 0.9567387 0.034255214 0.009006079 1 2 0 +57 1 1 0.03303643367073926 0.9675033 0.0199500173 0.0125466976 1 0 2 +58 1 1 0.026098634044639583 0.974239 0.0178385321 0.007922452 1 2 0 +59 1 1 0.034668122729441544 0.965925932 0.0176332947 0.0164408013 1 2 0 +60 1 1 0.028925872510284695 0.9714885 0.0161692444 0.01234228 1 0 2 +61 1 1 0.032514946101960943 0.968008 0.0239325147 0.008059508 1 2 0 +62 1 1 0.024235704335496926 0.9760556 0.0151068689 0.008837529 1 2 0 +63 1 1 0.035760629302617933 0.9648712 0.02719415 0.007934644 1 2 0 +64 1 1 0.022137329869205608 0.9781059 0.0122523606 0.009641748 1 2 0 +65 1 1 0.02697255414826619 0.973387957 0.0186308678 0.007981158 1 2 0 +66 1 1 0.036729417078252675 0.9639369 0.02751693 0.008546158 1 2 0 +67 1 1 0.022503395570932342 0.9777479 0.013421339 0.008830723 1 2 0 +68 1 1 0.038855074517123001 0.9618901 0.0301695466 0.007940366 1 2 0 +69 1 1 0.023358863535601033 0.976911843 0.0131962541 0.009891883 1 2 0 +70 1 2 2.0041753304058187 0.856793344 0.134771392 0.008435239 2 1 0 +71 1 1 0.023006329648145647 0.9772563 0.014135709 0.00860798452 1 2 0 +72 1 1 0.43301575781706009 0.6485503 0.343778223 0.007671475 1 2 0 +73 1 1 0.028976552148168057 0.971439242 0.02040257 0.008158188 1 2 0 +74 1 1 0.022902770892273123 0.9773575 0.0147489849 0.007893479 1 2 0 +75 1 1 0.02691058704543008 0.9734483 0.01860106 0.007950675 1 2 0 +76 1 1 0.18688860192812912 0.82953614 0.162185773 0.008278089 1 2 0 +77 1 2 0.92579454557270313 0.5967647 0.396216482 0.00701882 2 1 0 +78 1 1 0.037027566079820731 0.9636496 0.028475076 0.007875373 1 2 0 +79 1 1 0.022937167310755541 0.9773239 0.0127002485 0.009975866 1 2 0 +80 1 1 0.022877401242447332 0.9773823 0.0125660188 0.0100516649 1 2 0 +81 1 1 0.022877401242447332 0.9773823 0.0125660188 0.0100516649 1 2 0 +82 1 1 0.023404014419184039 0.976867735 0.0133821117 0.009750146 1 2 0 +83 1 1 0.70075639726830674 0.49620983 0.496017843 0.007772315 1 2 0 +84 1 1 0.044236542522837867 0.9567276 0.0263874587 0.0168849323 1 2 0 +85 1 1 0.043024535046640382 0.9578879 0.03138008 0.01073204 1 2 0 +86 1 1 0.042293035786450502 0.958588839 0.0332719 0.008139238 1 2 0 +87 1 1 0.025156527765232962 0.975157261 0.0169389956 0.0079037305 1 2 0 +88 1 1 0.022358318540260842 0.9778898 0.0133089311 0.008801297 1 2 0 +89 1 1 0.023121976834571175 0.9771433 0.0134127168 0.009443991 1 2 0 +90 1 1 0.022650566634269925 0.977604032 0.0135507975 0.008845187 1 2 0 +91 1 1 0.030649193522063412 0.969815731 0.0222469531 0.00793733 1 2 0 +92 1 1 0.023666418009492051 0.976611435 0.0139777046 0.009410832 1 2 0 +93 1 1 0.032170618995321684 0.968341351 0.0193278752 0.0123307668 1 0 2 +94 1 1 0.023670568200763999 0.9766074 0.0147704128 0.008622234 1 2 0 +95 1 1 0.022473768887261287 0.9777769 0.0132312737 0.008991833 1 2 0 +96 1 1 0.022786722203040281 0.977470934 0.013828597 0.008700479 1 2 0 +97 1 1 0.022998705699126043 0.977263749 0.0148692625 0.00786699 1 2 0 +98 1 1 0.1831783948365549 0.8326196 0.154965982 0.0124144387 1 0 2 +99 1 1 0.023075496769295524 0.9771887 0.0140385861 0.008772707 1 2 0 +100 2 2 0.021566800361238793 0.9786641 0.0126012722 0.008734606 2 1 0 +101 2 2 0.042422564225694827 0.9584647 0.03307351 0.008461786 2 1 0 +102 2 2 0.016595760358213897 0.9835412 0.008572242 0.007886566 2 1 0 +103 2 2 0.022044646180015223 0.978196561 0.0139042465 0.007899184 2 1 0 +104 2 2 0.016709516937862282 0.9834293 0.008683964 0.007886705 2 1 0 +105 2 2 0.016595760358213897 0.9835412 0.008572242 0.007886566 2 1 0 +106 2 1 2.1090311772834194 0.862226963 0.121355481 0.0164175835 1 2 0 +107 2 2 0.020456222190452011 0.9797516 0.0123546757 0.00789376 2 1 0 +108 2 2 0.021078468826306502 0.9791421 0.0129446462 0.00791325 2 1 0 +109 2 2 0.019468846928401097 0.980719447 0.0106313359 0.008649208 2 0 1 +110 2 2 0.022319614569829012 0.9779276 0.0139611857 0.008111184 2 1 0 +111 2 2 0.019269823866623015 0.980914652 0.0112084318 0.007876894 2 1 0 +112 2 2 0.016977687007301158 0.9831656 0.00894971 0.007884688 2 1 0 +113 2 2 0.058042636346825223 0.9436097 0.047512807 0.008877498 2 1 0 +114 2 2 0.041556106366798046 0.9592955 0.0322531648 0.008451329 2 1 0 +115 2 2 0.019791317974830646 0.980403244 0.0115674054 0.008029342 2 1 0 +116 2 2 0.020432982922414907 0.979774356 0.0123304194 0.007895238 2 1 0 +117 2 2 0.019466962860278662 0.9807213 0.0106303059 0.008648369 2 0 1 +118 2 2 0.0188647899585574 0.981312037 0.0108158737 0.007872073 2 1 0 +119 2 1 0.94392904693686375 0.6026611 0.389096051 0.008242885 1 2 0 +120 2 2 0.017366248980001619 0.9827837 0.009171124 0.008045211 2 1 0 +121 2 2 0.088866562817115968 0.914967656 0.07653753 0.008494817 2 1 0 +122 2 2 0.018631090722093534 0.9815414 0.0105879167 0.007870668 2 1 0 +123 2 2 0.06145008441574909 0.9403999 0.05166457 0.007935554 2 1 0 +124 2 2 0.018444680647443288 0.9817244 0.009521433 0.008754187 2 1 0 +125 2 2 0.020508786296772101 0.9797001 0.01224504 0.008054874 2 1 0 +126 2 2 0.11925013225824062 0.887585759 0.104224905 0.008189328 2 1 0 +127 2 2 0.10605093599317304 0.899378836 0.0929255262 0.00769562228 2 1 0 +128 2 2 0.018565509274527169 0.981605768 0.010519552 0.007874679 2 1 0 +129 2 2 0.39821823307895049 0.671515465 0.320187718 0.008296814 2 1 0 +130 2 2 0.018932334838268155 0.981245756 0.0108812321 0.007873024 2 1 0 +131 2 2 0.019466962860278662 0.9807213 0.0106303059 0.008648369 2 0 1 +132 2 2 0.018565509274527169 0.981605768 0.010519552 0.007874679 2 1 0 +133 2 2 0.62873836754113976 0.53326416 0.458078653 0.008657171 2 1 0 +134 2 2 0.60851248990646989 0.5441597 0.448350728 0.00748954341 2 1 0 +135 2 2 0.016595760358213897 0.9835412 0.008572242 0.007886566 2 1 0 +136 2 2 0.022577039360831119 0.9776759 0.0117904674 0.0105336225 2 1 0 +137 2 2 0.022975285291276865 0.977286637 0.0148325013 0.007880885 2 1 0 +138 2 2 0.16372490438447862 0.848975539 0.142925009 0.008099449 2 1 0 +139 2 2 0.017566167387754482 0.9825872 0.00952361 0.0078891525 2 1 0 +140 2 2 0.017952470071537514 0.9822077 0.009910649 0.007881647 2 1 0 +141 2 2 0.02156308521875348 0.978667736 0.0133916177 0.007940661 2 1 0 +142 2 2 0.042422564225694827 0.9584647 0.03307351 0.008461786 2 1 0 +143 2 2 0.01734514342237381 0.9828044 0.009152353 0.008043245 2 1 0 +144 2 2 0.018444680647443288 0.9817244 0.009521433 0.008754187 2 1 0 +145 2 2 0.018486635063884913 0.9816832 0.0104312422 0.007885565 2 1 0 +146 2 2 0.035952581762754975 0.964686036 0.0272667371 0.008047254 2 1 0 +147 2 2 0.018195479765991927 0.981969059 0.0101450672 0.007885858 2 1 0 +148 2 2 0.02575522444396355 0.9745736 0.0148916543 0.0105347475 2 1 0 +149 2 2 0.043968997509596895 0.9569836 0.0350129567 0.008003409 2 1 0 diff --git a/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-out.txt b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-out.txt new file mode 100644 index 0000000000..4d9f2f452b --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-out.txt @@ -0,0 +1,36 @@ +maml.exe TrainTest test=%Data% tr=OVA{p=AvgPer{ lr=0.8 }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 +Not adding a normalizer. +Training learner 0 +Training calibrator. +Training learner 1 +Training calibrator. +Training learner 2 +Training calibrator. +Not training a calibrator because it is not needed. + +Confusion table + ||======================== +PREDICTED || 0 | 1 | 2 | Recall +TRUTH ||======================== + 0 || 50 | 0 | 0 | 1.0000 + 1 || 0 | 48 | 2 | 0.9600 + 2 || 0 | 0 | 50 | 1.0000 + ||======================== +Precision ||1.0000 |1.0000 |0.9615 | +Accuracy(micro-avg): 0.986667 +Accuracy(macro-avg): 0.986667 +Log-loss: 0.246444 +Log-loss reduction: 77.567746 + +OVERALL RESULTS +--------------------------------------- +Accuracy(micro-avg): 0.986667 (0.0000) +Accuracy(macro-avg): 0.986667 (0.0000) +Log-loss: 0.246444 (0.0000) +Log-loss reduction: 77.567746 (0.0000) + +--------------------------------------- +Physical memory usage(MB): %Number% +Virtual memory usage(MB): %Number% +%DateTime% Time elapsed(s): %Number% + diff --git a/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-rp.txt b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-rp.txt new file mode 100644 index 0000000000..971b18dd55 --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-rp.txt @@ -0,0 +1,4 @@ +OVA +Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings +0.986667 0.986667 0.246444 77.56775 AvgPer{lr=0.8} OVA %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=OVA{p=AvgPer{ lr=0.8 }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 /p:AvgPer{lr=0.8} + diff --git a/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris.txt b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris.txt new file mode 100644 index 0000000000..cbb0c14c4f --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris.txt @@ -0,0 +1,151 @@ +Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class +0 0 0 0.064187757670407464 0.9378289 0.0621710941 8.6083265E-09 0 1 2 +1 0 0 0.1893002350721239 0.827538 0.172461912 5.35715046E-08 0 1 2 +2 0 0 0.13195967886821539 0.876376331 0.123623639 3.315678E-08 0 1 2 +3 0 0 0.18799826814680765 0.828616142 0.1713837 1.33305463E-07 0 1 2 +4 0 0 0.053931560367015983 0.947496951 0.0525030419 8.35157454E-09 0 1 2 +5 0 0 0.024550920036862604 0.975748 0.0242519863 8.78616E-09 0 1 2 +6 0 0 0.092104047221280363 0.912010252 0.08798967 5.086476E-08 0 1 2 +7 0 0 0.0890750453193484 0.9147769 0.08522307 2.25849668E-08 0 1 2 +8 0 0 0.2649651905225579 0.767232656 0.23276712 2.323326E-07 0 1 2 +9 0 0 0.17798339361396398 0.8369563 0.163043633 4.85697953E-08 0 1 2 +10 0 0 0.038712934227392123 0.962026834 0.037973173 3.67671626E-09 0 1 2 +11 0 0 0.10398316683257577 0.901240468 0.09875945 5.7562815E-08 0 1 2 +12 0 0 0.20701490311777249 0.813007534 0.186992422 5.15469765E-08 0 1 2 +13 0 0 0.20677930042755727 0.8131991 0.186800852 4.620836E-08 0 1 2 +14 0 0 0.012900958726204939 0.9871819 0.0128180888 1.53412311E-10 0 1 2 +15 0 0 0.0056796126449842428 0.9943365 0.00566351926 4.0924758E-10 0 1 2 +16 0 0 0.018169258095637245 0.9819948 0.018005196 1.47014889E-09 0 1 2 +17 0 0 0.059814766253305368 0.941939 0.0580609739 1.13122178E-08 0 1 2 +18 0 0 0.028932621466359262 0.9714819 0.02851807 4.23276969E-09 0 1 2 +19 0 0 0.032923391844126633 0.9676127 0.0323873 7.642096E-09 0 1 2 +20 0 0 0.08651372619399908 0.91712296 0.08287702 2.06279154E-08 0 1 2 +21 0 0 0.038368820295834986 0.962357938 0.03764207 1.32810882E-08 0 1 2 +22 0 0 0.04847884988937378 0.9526775 0.0473225228 3.781359E-09 0 1 2 +23 0 0 0.099972123278544239 0.904862642 0.09513722 1.30113463E-07 0 1 2 +24 0 0 0.12884486436940729 0.879110336 0.120889448 2.201515E-07 0 1 2 +25 0 0 0.20701321690255622 0.8130089 0.186990991 1.01994196E-07 0 1 2 +26 0 0 0.083707914672920755 0.919699848 0.08030011 6.121948E-08 0 1 2 +27 0 0 0.065971839514429781 0.9361572 0.063842766 1.05041069E-08 0 1 2 +28 0 0 0.076179360015026509 0.92665 0.0733500347 8.859791E-09 0 1 2 +29 0 0 0.16075048860491603 0.8515045 0.148495391 1.25408391E-07 0 1 2 +30 0 0 0.18626475341925278 0.8300538 0.169946045 1.27813465E-07 0 1 2 +31 0 0 0.065274392613973067 0.9368104 0.0631895959 1.46311239E-08 0 1 2 +32 0 0 0.018341836022086215 0.981825352 0.018174639 1.48765988E-09 0 1 2 +33 0 0 0.010831873835231624 0.9892266 0.0107734147 4.45536885E-10 0 1 2 +34 0 0 0.17798339361396398 0.8369563 0.163043633 4.85697953E-08 0 1 2 +35 0 0 0.1094558082339183 0.8963218 0.103678234 1.02393658E-08 0 1 2 +36 0 0 0.049986936620370084 0.951241851 0.04875815 2.05979567E-09 0 1 2 +37 0 0 0.17798339361396398 0.8369563 0.163043633 4.85697953E-08 0 1 2 +38 0 0 0.2126495148617171 0.808439434 0.191560462 1.15782285E-07 0 1 2 +39 0 0 0.085343924605329857 0.91819644 0.08180357 1.76697252E-08 0 1 2 +40 0 0 0.058200059976790904 0.9434612 0.05653884 9.271867E-09 0 1 2 +41 0 0 0.49728774260829139 0.60817796 0.391821355 7.12752467E-07 0 1 2 +42 0 0 0.14839409412278926 0.8620913 0.137908638 6.888182E-08 0 1 2 +43 0 0 0.058733854938205904 0.9429577 0.0570422448 8.043525E-08 0 1 2 +44 0 0 0.041432529938052375 0.959414065 0.0405859053 6.019737E-08 0 1 2 +45 0 0 0.18582725015020429 0.830417037 0.169582859 9.04475E-08 0 1 2 +46 0 0 0.038175474655158276 0.962544 0.03745597 9.106087E-09 0 1 2 +47 0 0 0.14649366336893571 0.8637312 0.13626872 6.586965E-08 0 1 2 +48 0 0 0.040528072578562842 0.9602822 0.03971778 4.706963E-09 0 1 2 +49 0 0 0.10224582319060745 0.9028076 0.09719238 1.898811E-08 0 1 2 +50 1 1 0.13228530826110765 0.876091 0.07536332 0.04854567 1 0 2 +51 1 1 0.19869535635760471 0.8197996 0.101386108 0.0788142756 1 2 0 +52 1 1 0.22406365925144375 0.799264252 0.175007358 0.0257283952 1 2 0 +53 1 1 0.22846786324242829 0.79575187 0.190147012 0.0141011067 1 2 0 +54 1 1 0.2217374832444966 0.801125646 0.1814971 0.0173772536 1 2 0 +55 1 1 0.36194510206226777 0.6963206 0.290218145 0.0134612536 1 2 0 +56 1 1 0.38227740308279812 0.682305753 0.26903218 0.04866208 1 2 0 +57 1 1 0.11711832955706668 0.889479935 0.09261171 0.01790834 1 0 2 +58 1 1 0.11588080546894164 0.890581369 0.0796031356 0.0298154745 1 2 0 +59 1 1 0.22549808191140069 0.7981186 0.1629105 0.0389709137 1 2 0 +60 1 1 0.11452610106565105 0.891788661 0.08094173 0.027269613 1 2 0 +61 1 1 0.19606227523723149 0.821961045 0.1124982 0.0655407459 1 2 0 +62 1 1 0.065211469379248863 0.9368693 0.0428994 0.0202313047 1 2 0 +63 1 1 0.38674137023255867 0.679266751 0.3082701 0.0124631459 1 2 0 +64 1 1 0.23030100927747191 0.7942945 0.193664417 0.0120411161 1 0 2 +65 1 1 0.13921020030567513 0.8700451 0.0984156 0.0315392464 1 0 2 +66 1 1 0.52761682473815386 0.5900094 0.392468572 0.0175220035 1 2 0 +67 1 1 0.084603908782305962 0.9188762 0.0453156643 0.03580814 1 0 2 +68 1 1 0.4618931830241112 0.630089641 0.3666358 0.00327458978 1 2 0 +69 1 1 0.090485397720604446 0.9134877 0.04816054 0.03835176 1 2 0 +70 1 2 1.0140603005132554 0.6271839 0.362743139 0.0100729465 2 1 0 +71 1 1 0.11008255968443192 0.8957602 0.07861323 0.0256266128 1 0 2 +72 1 1 0.64771052952961239 0.523242354 0.474618584 0.00213904912 1 2 0 +73 1 1 0.27755598196422471 0.75763315 0.230862036 0.011504828 1 2 0 +74 1 1 0.10244027519493581 0.902632058 0.0589159 0.0384520665 1 0 2 +75 1 1 0.11841763901117131 0.888325 0.0673675239 0.0443075225 1 0 2 +76 1 1 0.19025677181646344 0.8267468 0.160040483 0.0132126883 1 2 0 +77 1 1 0.61336152114875442 0.54152745 0.4505507 0.00792186148 1 2 0 +78 1 1 0.33449082060539098 0.7157024 0.264204919 0.0200926811 1 2 0 +79 1 1 0.1490904339680772 0.8614912 0.134173632 0.00433517434 1 0 2 +80 1 1 0.089822225657751095 0.9140937 0.04908715 0.0368192 1 2 0 +81 1 1 0.077764627112278675 0.925182164 0.0500315838 0.0247862767 1 0 2 +82 1 1 0.10062242034830922 0.9042744 0.06707825 0.028647339 1 0 2 +83 1 2 0.87242455033259836 0.580679059 0.417937 0.001383926 2 1 0 +84 1 1 0.64643118238444741 0.5239122 0.462312371 0.0137754688 1 2 0 +85 1 1 0.38078103791751566 0.6833275 0.219007626 0.09766489 1 2 0 +86 1 1 0.18654240234510375 0.8298234 0.129826292 0.0403503366 1 2 0 +87 1 1 0.19556197322229293 0.8223724 0.169585884 0.008041753 1 2 0 +88 1 1 0.16967653754303061 0.843937755 0.07920382 0.07685845 1 2 0 +89 1 1 0.17942883979161328 0.8357474 0.140764222 0.0234883726 1 2 0 +90 1 1 0.37073556760095389 0.690226436 0.2999358 0.009837814 1 2 0 +91 1 1 0.28153328535230204 0.7546258 0.221098259 0.02427597 1 2 0 +92 1 1 0.095651081105254609 0.908781052 0.0509764329 0.0402425081 1 2 0 +93 1 1 0.10201305873404877 0.903017759 0.07907509 0.0179071445 1 0 2 +94 1 1 0.21320461976679056 0.8079908 0.167838141 0.02417106 1 2 0 +95 1 1 0.14772366482527016 0.862669468 0.07083947 0.06649103 1 2 0 +96 1 1 0.1676055742511661 0.8456873 0.107580893 0.04673176 1 2 0 +97 1 1 0.11592859305433842 0.8905388 0.058125712 0.051335495 1 2 0 +98 1 1 0.24343470732109662 0.783930659 0.212896168 0.00317314919 1 0 2 +99 1 1 0.14288965000739814 0.8668497 0.0867102742 0.0464400165 1 2 0 +100 2 2 0.22003618725213686 0.802489758 0.197204947 0.000305285153 2 1 0 +101 2 2 0.4769116280424554 0.6206974 0.378338724 0.0009639265 2 1 0 +102 2 2 0.32248110542186914 0.7243496 0.2752483 0.000402083038 2 1 0 +103 2 2 0.4318088621640172 0.6493335 0.3500793 0.00058722886 2 1 0 +104 2 2 0.33701376028625557 0.713899 0.2857394 0.000361559069 2 1 0 +105 2 2 0.36111778351144497 0.6968969 0.303009778 9.33285046E-05 2 1 0 +106 2 2 0.57324912019977703 0.56369096 0.434159666 0.00214935932 2 1 0 +107 2 2 0.43241433229369408 0.648940444 0.3508895 0.000170046318 2 1 0 +108 2 2 0.5524780213978463 0.5755219 0.424283057 0.000195083121 2 1 0 +109 2 2 0.10018846863192231 0.9046669 0.0947069 0.000626212 2 1 0 +110 2 2 0.34368591687548161 0.7091516 0.2858574 0.00499094324 2 1 0 +111 2 2 0.46723123458330745 0.626735151 0.3724764 0.000788469857 2 1 0 +112 2 2 0.31582627414124387 0.7291861 0.2698556 0.0009583179 2 1 0 +113 2 2 0.52239827082819301 0.593096435 0.406178564 0.00072502665 2 1 0 +114 2 2 0.36058132074789678 0.6972709 0.301918417 0.000810695637 2 1 0 +115 2 2 0.21578587862382262 0.805907845 0.192418143 0.00167402264 2 1 0 +116 2 2 0.38868733060861138 0.6779462 0.3210066 0.00104722043 2 1 0 +117 2 2 0.097428111747803359 0.907167554 0.0924843 0.000348151079 2 1 0 +118 2 2 0.51096864540412867 0.5999142 0.400067 1.88108879E-05 2 1 0 +119 2 2 0.64730667194394087 0.5234537 0.4758455 0.000700797769 2 1 0 +120 2 2 0.21506259321317989 0.806490958 0.192730322 0.0007787307 2 1 0 +121 2 2 0.42372860353635938 0.6546015 0.3437634 0.00163504237 2 1 0 +122 2 2 0.45634525198899917 0.633595049 0.366349727 5.52281235E-05 2 1 0 +123 2 2 0.56778894609208741 0.5667772 0.4307018 0.00252098124 2 1 0 +124 2 2 0.22282641548991947 0.800253749 0.198714435 0.00103183649 2 1 0 +125 2 2 0.31027244305630025 0.733247161 0.266095132 0.0006576972 2 1 0 +126 2 2 0.59157505509617891 0.5534549 0.442328155 0.004216949 2 1 0 +127 2 2 0.48357066476934346 0.616577864 0.3786479 0.00477422541 2 1 0 +128 2 2 0.41595305224046536 0.659711242 0.339890242 0.0003985048 2 1 0 +129 2 2 0.45550524433223816 0.6341275 0.364983171 0.0008893516 2 1 0 +130 2 2 0.43130825350797314 0.6496586 0.3501238 0.000217600667 2 1 0 +131 2 2 0.11700817008638008 0.8895779 0.109250523 0.00117157528 2 1 0 +132 2 2 0.40127211138702135 0.669467866 0.330161959 0.000370198279 2 1 0 +133 2 2 0.63157763358593388 0.5317522 0.465779215 0.002468573 2 1 0 +134 2 2 0.58314815792662678 0.5581385 0.441456854 0.000404651684 2 1 0 +135 2 2 0.27204979854142897 0.7618163 0.2379036 0.000280094158 2 1 0 +136 2 2 0.1629980271721547 0.849592865 0.149376482 0.00103064778 2 1 0 +137 2 2 0.35495650373339338 0.701203942 0.297558725 0.00123733515 2 1 0 +138 2 2 0.51228059795956926 0.59912765 0.394762278 0.00611006049 2 1 0 +139 2 2 0.29479453684898038 0.7446846 0.253554732 0.00176069664 2 1 0 +140 2 2 0.23636956568495673 0.789488852 0.209835038 0.000676139141 2 1 0 +141 2 2 0.28994324565081614 0.748306036 0.247889653 0.0038042888 2 1 0 +142 2 2 0.4769116280424554 0.6206974 0.378338724 0.0009639265 2 1 0 +143 2 2 0.23597765808861054 0.7897983 0.209748372 0.0004533111 2 1 0 +144 2 2 0.16769889506754851 0.8456084 0.153658465 0.000733090041 2 1 0 +145 2 2 0.27656426046073113 0.7583849 0.239901781 0.001713358 2 1 0 +146 2 2 0.5356947512214818 0.585262537 0.413683951 0.00105350767 2 1 0 +147 2 2 0.35725414522256133 0.6995947 0.298375219 0.00203008 2 1 0 +148 2 2 0.16789042737345514 0.845446467 0.152793139 0.00176038919 2 1 0 +149 2 2 0.4005334114812254 0.6699626 0.327896535 0.002140856 2 1 0 diff --git a/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-out.txt b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-out.txt new file mode 100644 index 0000000000..f71990cd92 --- /dev/null +++ b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-out.txt @@ -0,0 +1,70 @@ +maml.exe CV tr=PKPD{p=AvgPer { lr=0.8 }} threads=- norm=No dout=%Output% data=%Data% seed=1 +Not adding a normalizer. +Training learner (0,0) +Training calibrator. +Training learner (1,0) +Training calibrator. +Training learner (1,1) +Training calibrator. +Training learner (2,0) +Training calibrator. +Training learner (2,1) +Training calibrator. +Training learner (2,2) +Training calibrator. +Not training a calibrator because it is not needed. +Not adding a normalizer. +Training learner (0,0) +Training calibrator. +Training learner (1,0) +Training calibrator. +Training learner (1,1) +Training calibrator. +Training learner (2,0) +Training calibrator. +Training learner (2,1) +Training calibrator. +Training learner (2,2) +Training calibrator. +Not training a calibrator because it is not needed. + +Confusion table + ||======================== +PREDICTED || 0 | 1 | 2 | Recall +TRUTH ||======================== + 0 || 21 | 0 | 0 | 1.0000 + 1 || 0 | 26 | 4 | 0.8667 + 2 || 0 | 0 | 28 | 1.0000 + ||======================== +Precision ||1.0000 |1.0000 |0.8750 | +Accuracy(micro-avg): 0.949367 +Accuracy(macro-avg): 0.955556 +Log-loss: 0.343967 +Log-loss reduction: 68.371359 + +Confusion table + ||======================== +PREDICTED || 0 | 1 | 2 | Recall +TRUTH ||======================== + 0 || 29 | 0 | 0 | 1.0000 + 1 || 0 | 19 | 1 | 0.9500 + 2 || 0 | 0 | 22 | 1.0000 + ||======================== +Precision ||1.0000 |1.0000 |0.9565 | +Accuracy(micro-avg): 0.985915 +Accuracy(macro-avg): 0.983333 +Log-loss: 0.277101 +Log-loss reduction: 74.475991 + +OVERALL RESULTS +--------------------------------------- +Accuracy(micro-avg): 0.967641 (0.0183) +Accuracy(macro-avg): 0.969444 (0.0139) +Log-loss: 0.310534 (0.0334) +Log-loss reduction: 71.423675 (3.0523) + +--------------------------------------- +Physical memory usage(MB): %Number% +Virtual memory usage(MB): %Number% +%DateTime% Time elapsed(s): %Number% + diff --git a/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-rp.txt b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-rp.txt new file mode 100644 index 0000000000..4ee447e298 --- /dev/null +++ b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-rp.txt @@ -0,0 +1,4 @@ +PKPD +Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings +0.967641 0.969444 0.310534 71.42368 AvgPer{lr=0.8} PKPD %Data% %Output% 99 0 0 maml.exe CV tr=PKPD{p=AvgPer { lr=0.8 }} threads=- norm=No dout=%Output% data=%Data% seed=1 /p:AvgPer{lr=0.8} + diff --git a/test/BaselineOutput/Common/PKPD/PKPD-CV-iris.txt b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris.txt new file mode 100644 index 0000000000..100ad1843e --- /dev/null +++ b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris.txt @@ -0,0 +1,151 @@ +Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class +5 0 0 0.023753880830600982 0.976526 0.0234627537 1.12415537E-05 0 1 2 +6 0 0 0.1302157926188138 0.877905965 0.122053728 4.028206E-05 0 1 2 +8 0 0 0.38835109531985795 0.6781742 0.321734548 9.125436E-05 0 1 2 +9 0 0 0.25023100275372456 0.7786209 0.221342817 3.62790561E-05 0 1 2 +10 0 0 0.043651773791107358 0.957287252 0.0427056365 7.135807E-06 0 1 2 +11 0 0 0.13521420369658141 0.8735288 0.126428857 4.23762431E-05 0 1 2 +18 0 0 0.028430623462091707 0.9719697 0.02802308 7.212694E-06 0 1 2 +20 0 0 0.10458121712711159 0.900701642 0.0992766 2.17436227E-05 0 1 2 +21 0 0 0.045363134495307413 0.9556504 0.0443333276 1.62607685E-05 0 1 2 +25 0 0 0.28906012724616753 0.7489672 0.250976235 5.657253E-05 0 1 2 +28 0 0 0.10227830625980082 0.902778268 0.0972085 1.322162E-05 0 1 2 +31 0 0 0.08562861703311947 0.9179351 0.08204699 1.79389826E-05 0 1 2 +32 0 0 0.017136477098684464 0.9830095 0.0169868153 3.65674259E-06 0 1 2 +35 0 0 0.16731981579854963 0.845929 0.1540563 1.46633747E-05 0 1 2 +37 0 0 0.25023100275372456 0.7786209 0.221342817 3.62790561E-05 0 1 2 +40 0 0 0.079660704938754975 0.9234296 0.0765565857 1.37818333E-05 0 1 2 +41 0 0 0.64555225289552742 0.5243729 0.475452363 0.0001747969 0 1 2 +44 0 0 0.040687480851420016 0.960129142 0.0398315266 3.935343E-05 0 1 2 +45 0 0 0.27950273039157197 0.756159663 0.243786544 5.377483E-05 0 1 2 +46 0 0 0.040918754924550584 0.9599071 0.0400804244 1.24371918E-05 0 1 2 +48 0 0 0.046121164137562148 0.954926252 0.04506536 8.38812048E-06 0 1 2 +50 1 1 0.72887025154137375 0.482453734 0.272260427 0.245285824 1 0 2 +51 1 1 0.8088987568127185 0.445348233 0.336972356 0.2176794 1 2 0 +52 1 1 0.78136433840766417 0.457781017 0.438075066 0.104143932 1 2 0 +54 1 1 0.54403068907517893 0.5804041 0.36454007 0.0550558232 1 2 0 +56 1 2 1.1200462355861258 0.531909943 0.3262647 0.141825333 2 1 0 +60 1 1 0.23491005150903682 0.790641963 0.1784061 0.0309519637 1 2 0 +63 1 1 0.68939365760121074 0.5018803 0.458269477 0.0398502052 1 2 0 +64 1 1 0.4797019767355476 0.618967831 0.291831881 0.0892002955 1 0 2 +66 1 2 0.86155959099850343 0.5307835 0.422502637 0.04671388 2 1 0 +68 1 1 0.49420884947307447 0.61005336 0.3815175 0.008429126 1 2 0 +69 1 1 0.23145347728671642 0.7933796 0.145455226 0.061165195 1 2 0 +70 1 2 1.4851413739165595 0.736525655 0.226470321 0.03700401 2 1 0 +71 1 1 0.3561851307666371 0.700342953 0.164127111 0.13552995 1 0 2 +72 1 1 0.67173548994464938 0.5108213 0.4814403 0.007738397 1 2 0 +73 1 1 0.50612015658391962 0.602829933 0.3625389 0.034631148 1 2 0 +74 1 1 0.41484498646830853 0.66044265 0.18213135 0.157426015 1 2 0 +76 1 1 0.50271064477753691 0.6048888 0.345186323 0.0499248542 1 2 0 +77 1 2 1.0467776624316247 0.6128986 0.3510672 0.0360342041 2 1 0 +79 1 1 0.25594672318320155 0.7741832 0.183944046 0.0418727621 1 0 2 +82 1 1 0.28889429116536303 0.7490914 0.130277559 0.120631076 1 2 0 +88 1 1 0.52394753213635203 0.5921783 0.2563273 0.151494384 1 2 0 +90 1 1 0.45035315262659448 0.637403 0.3411746 0.0214223787 1 2 0 +91 1 1 0.68738585386581597 0.502889 0.425832123 0.07127889 1 2 0 +92 1 1 0.2697499931611898 0.763570368 0.162871167 0.0735584348 1 2 0 +93 1 1 0.17823538741474004 0.836745441 0.0821188241 0.08113571 1 2 0 +95 1 1 0.48391980144761892 0.616362631 0.241359085 0.1422783 1 2 0 +96 1 1 0.47873437290577431 0.619567037 0.281721354 0.09871157 1 2 0 +97 1 1 0.4309836113144338 0.649869561 0.220002741 0.130127683 1 2 0 +98 1 1 0.27001090521188814 0.76337117 0.202765256 0.0338635966 1 0 2 +99 1 1 0.40366256068064504 0.667869449 0.2403792 0.09175138 1 2 0 +100 2 2 0.098423959135663375 0.9062646 0.09183126 0.001904126 2 1 0 +102 2 2 0.18784362467414789 0.8287443 0.16809994 0.00315576326 2 1 0 +104 2 2 0.20305260298998334 0.8162353 0.181571469 0.00219323137 2 1 0 +105 2 2 0.16738689651485589 0.8458723 0.153207168 0.000920576451 2 1 0 +106 2 2 0.58800293393782621 0.5554354 0.439967334 0.00459726434 2 1 0 +108 2 2 0.4731297912724482 0.6230492 0.37598455 0.0009662311 2 1 0 +109 2 2 0.035500468049352432 0.9651223 0.0299819969 0.00489571551 2 1 0 +111 2 2 0.39672088491078095 0.6725217 0.3237172 0.00376107777 2 1 0 +112 2 2 0.20709995070378101 0.8129384 0.180399537 0.00666203769 2 1 0 +113 2 2 0.49689364308238115 0.6084177 0.3893122 0.002270126 2 1 0 +115 2 2 0.12645623913049378 0.8812127 0.109346546 0.009440767 2 1 0 +117 2 2 0.025087766630295084 0.9752243 0.02114348 0.00363221765 2 1 0 +120 2 2 0.11093517828288008 0.894996762 0.09939285 0.005610376 2 1 0 +121 2 2 0.37719650665097176 0.6857813 0.3084725 0.00574616855 2 1 0 +122 2 2 0.25512580405327051 0.774819 0.224642664 0.0005383256 2 1 0 +123 2 2 0.53470400837092702 0.585842669 0.4030093 0.0111480216 2 1 0 +125 2 2 0.15798263383297981 0.8538646 0.139957428 0.00617795158 2 1 0 +128 2 2 0.30543205932131445 0.736804962 0.2611207 0.00207435014 2 1 0 +129 2 2 0.31304921188329871 0.7312139 0.2608706 0.00791547 2 1 0 +131 2 2 0.050064073835890829 0.9511685 0.03442935 0.01440218 2 1 0 +132 2 2 0.29000752749814246 0.748257935 0.249804661 0.00193742709 2 1 0 +133 2 2 0.57582423964286222 0.562241256 0.4260323 0.0117264669 2 1 0 +137 2 2 0.23369102254431959 0.791606367 0.2005785 0.007815139 2 1 0 +138 2 2 0.38238249988924056 0.682234049 0.29368493 0.0240810253 2 1 0 +141 2 2 0.18873995406873867 0.8280018 0.150254741 0.0217434336 2 1 0 +144 2 2 0.080216932325286969 0.9229161 0.0723539 0.00473001366 2 1 0 +145 2 2 0.18503665462933813 0.8310738 0.158534229 0.0103919385 2 1 0 +147 2 2 0.25370059016834895 0.7759241 0.212361827 0.0117140962 2 1 0 +0 0 0 0.10211748577393479 0.902923465 0.09707638 1.31251113E-07 0 1 2 +1 0 0 0.18414005419872645 0.8318193 0.1681801 6.10566246E-07 0 1 2 +2 0 0 0.14492385261258312 0.865088165 0.134911478 3.69817315E-07 0 1 2 +3 0 0 0.18018182361642154 0.835118353 0.164880455 1.1808728E-06 0 1 2 +4 0 0 0.091854748075686457 0.912237644 0.08776226 1.22251592E-07 0 1 2 +7 0 0 0.12233785238606552 0.88484937 0.115150325 2.89829131E-07 0 1 2 +12 0 0 0.18838937109230286 0.828292131 0.1717073 5.786816E-07 0 1 2 +13 0 0 0.17483938525704146 0.839591861 0.1604077 4.46853E-07 0 1 2 +14 0 0 0.045135070620344352 0.955868363 0.0441316478 5.3407363E-09 0 1 2 +15 0 0 0.030612994376980014 0.969850838 0.0301491246 1.0891493E-08 0 1 2 +16 0 0 0.055112338614887763 0.9463788 0.05362115 3.08450865E-08 0 1 2 +17 0 0 0.10062993461057677 0.9042676 0.09573224 1.63700875E-07 0 1 2 +19 0 0 0.073413324188812371 0.9292167 0.07078323 1.13846653E-07 0 1 2 +22 0 0 0.080101141800871592 0.923023 0.07697697 5.50810348E-08 0 1 2 +23 0 0 0.14576182189759379 0.864363551 0.135635108 1.32143555E-06 0 1 2 +24 0 0 0.15527358603878322 0.8561809 0.143817171 1.903017E-06 0 1 2 +26 0 0 0.12564253084337382 0.881930053 0.118069261 6.645889E-07 0 1 2 +27 0 0 0.10587439997529158 0.8995376 0.100462213 1.60874819E-07 0 1 2 +29 0 0 0.16779715917645741 0.8455253 0.154473513 1.14837974E-06 0 1 2 +30 0 0 0.18480739195523371 0.8312644 0.168734416 1.2252525E-06 0 1 2 +33 0 0 0.04032543480505283 0.9604768 0.03952316 1.14692389E-08 0 1 2 +34 0 0 0.17552640171742001 0.839015245 0.160984188 5.622917E-07 0 1 2 +36 0 0 0.09204810470455442 0.9120613 0.0879386961 4.5151662E-08 0 1 2 +38 0 0 0.18661279637066258 0.829764962 0.170234054 9.896429E-07 0 1 2 +39 0 0 0.12071723979290006 0.88628453 0.113715246 2.43832517E-07 0 1 2 +42 0 0 0.15054831358788412 0.860236168 0.139763221 6.199213E-07 0 1 2 +43 0 0 0.10889597554551136 0.8968237 0.103175469 8.1666E-07 0 1 2 +47 0 0 0.15393456680529674 0.8573281 0.142671227 6.39328334E-07 0 1 2 +49 0 0 0.13066902060601812 0.877508163 0.122491583 2.52864339E-07 0 1 2 +53 1 1 0.2827592626915541 0.7537012 0.21389237 0.03240643 1 2 0 +55 1 1 0.40583815374571869 0.666418 0.308232874 0.0253491253 1 2 0 +57 1 1 0.22836674855401806 0.795832336 0.173427463 0.0307402182 1 0 2 +58 1 1 0.17705481273015428 0.837733865 0.126026779 0.0362393446 1 2 0 +59 1 1 0.28623948897938623 0.7510827 0.179181576 0.06973568 1 2 0 +61 1 1 0.23046176000020194 0.7941668 0.132671311 0.07316189 1 2 0 +62 1 1 0.13173560241334381 0.8765727 0.08075007 0.04267718 1 2 0 +65 1 1 0.14168260820038769 0.8678967 0.07845409 0.05364923 1 0 2 +67 1 1 0.15281502745602504 0.858288467 0.07580672 0.06590483 1 0 2 +75 1 1 0.14692167558386063 0.8633616 0.0731101856 0.06352824 1 2 0 +78 1 1 0.36772458144318781 0.69230783 0.278533429 0.0291587356 1 2 0 +80 1 1 0.16590807826044235 0.8471241 0.07986628 0.0730095953 1 2 0 +81 1 1 0.15418537033222618 0.8571131 0.09555078 0.0473361239 1 0 2 +83 1 2 0.87913156238340517 0.5822821 0.415143281 0.00257462286 2 1 0 +84 1 1 0.60329559653785991 0.547005951 0.428067416 0.0249266215 1 2 0 +85 1 1 0.30763923553748801 0.7351805 0.189976588 0.07484292 1 2 0 +86 1 1 0.22077802043432937 0.801894665 0.161515683 0.0365896225 1 2 0 +87 1 1 0.26521281231157245 0.7670427 0.216186985 0.0167703368 1 2 0 +89 1 1 0.24918952543906914 0.779432237 0.173652634 0.0469151475 1 2 0 +94 1 1 0.27882348553441511 0.756673455 0.19962126 0.04370527 1 2 0 +101 2 2 0.48364964736145688 0.616529167 0.3817476 0.00172325119 2 1 0 +103 2 2 0.46298590319892402 0.6294015 0.369625777 0.000972708163 2 1 0 +107 2 2 0.47875168972271287 0.6195563 0.380170017 0.000273669633 2 1 0 +110 2 2 0.50353121321209937 0.604392648 0.38902545 0.00658191368 2 1 0 +114 2 2 0.42627427461954132 0.652937233 0.345770031 0.00129272381 2 1 0 +116 2 2 0.46330187402047324 0.629202664 0.369202226 0.00159509375 2 1 0 +118 2 2 0.54017556235004138 0.582645953 0.4173134 4.062326E-05 2 1 0 +119 2 2 0.59238374503353441 0.5530075 0.445467323 0.00152520277 2 1 0 +124 2 2 0.37578192391690202 0.6867521 0.311931521 0.0013163907 2 1 0 +126 2 2 0.65538868877846068 0.5192402 0.4731488 0.007610987 2 1 0 +127 2 2 0.58488203770280534 0.5571716 0.4347405 0.008087933 2 1 0 +130 2 2 0.48483716808745803 0.61579746 0.383864969 0.0003375506 2 1 0 +134 2 2 0.53564556246675443 0.5852913 0.4138755 0.0008331971 2 1 0 +135 2 2 0.4222335993197745 0.6555809 0.344069272 0.0003498588 2 1 0 +136 2 2 0.3225183821825246 0.7243226 0.2744004 0.00127698807 2 1 0 +139 2 2 0.45570650730743484 0.6339999 0.3636862 0.002313912 2 1 0 +140 2 2 0.38195300929629561 0.6825271 0.316598237 0.00087465957 2 1 0 +142 2 2 0.48364964736145688 0.616529167 0.3817476 0.00172325119 2 1 0 +143 2 2 0.37769091414905154 0.6854423 0.313964784 0.0005928655 2 1 0 +146 2 2 0.54054677541576512 0.5824297 0.415693641 0.00187665562 2 1 0 +148 2 2 0.33311820419531579 0.7166855 0.281080544 0.00223395228 2 1 0 +149 2 2 0.47519601475299389 0.62176317 0.374690771 0.00354602537 2 1 0 diff --git a/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-out.txt b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-out.txt new file mode 100644 index 0000000000..c319ba3d49 --- /dev/null +++ b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-out.txt @@ -0,0 +1,42 @@ +maml.exe TrainTest test=%Data% tr=PKPD{p=AvgPer { lr=0.8 }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 +Not adding a normalizer. +Training learner (0,0) +Training calibrator. +Training learner (1,0) +Training calibrator. +Training learner (1,1) +Training calibrator. +Training learner (2,0) +Training calibrator. +Training learner (2,1) +Training calibrator. +Training learner (2,2) +Training calibrator. +Not training a calibrator because it is not needed. + +Confusion table + ||======================== +PREDICTED || 0 | 1 | 2 | Recall +TRUTH ||======================== + 0 || 50 | 0 | 0 | 1.0000 + 1 || 0 | 45 | 5 | 0.9000 + 2 || 0 | 1 | 49 | 0.9800 + ||======================== +Precision ||1.0000 |0.9783 |0.9074 | +Accuracy(micro-avg): 0.960000 +Accuracy(macro-avg): 0.960000 +Log-loss: 0.293938 +Log-loss reduction: 73.244600 + +OVERALL RESULTS +--------------------------------------- +Accuracy(micro-avg): 0.960000 (0.0000) +Accuracy(macro-avg): 0.960000 (0.0000) +Log-loss: 0.293938 (0.0000) +Log-loss reduction: 73.244600 (0.0000) + +--------------------------------------- +Physical memory usage(MB): %Number% +Virtual memory usage(MB): %Number% +%DateTime% Time elapsed(s): %Number% + diff --git a/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-rp.txt b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-rp.txt new file mode 100644 index 0000000000..b8bbf72cd9 --- /dev/null +++ b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-rp.txt @@ -0,0 +1,4 @@ +PKPD +Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings +0.96 0.96 0.293938 73.2446 AvgPer{lr=0.8} PKPD %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=PKPD{p=AvgPer { lr=0.8 }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 /p:AvgPer{lr=0.8} + diff --git a/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris.txt b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris.txt new file mode 100644 index 0000000000..abaafba303 --- /dev/null +++ b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris.txt @@ -0,0 +1,151 @@ +Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class +0 0 0 0.10709083790126951 0.898444057 0.101555951 1.09862652E-08 0 1 2 +1 0 0 0.14972937943085041 0.860940933 0.139058977 1.03412368E-07 0 1 2 +2 0 0 0.13145025814375275 0.8768229 0.123177059 6.145917E-08 0 1 2 +3 0 0 0.15528019966440448 0.856175244 0.143824473 2.62822567E-07 0 1 2 +4 0 0 0.10252830274490825 0.9025526 0.09744736 1.04376809E-08 0 1 2 +5 0 0 0.090053147743181336 0.9138826 0.08611736 8.101242E-09 0 1 2 +6 0 0 0.12357314644074496 0.883757 0.116242908 8.74405E-08 0 1 2 +7 0 0 0.12105140488742108 0.8859884 0.114011578 3.08569952E-08 0 1 2 +8 0 0 0.17274984965939472 0.841348052 0.158651352 5.97416147E-07 0 1 2 +9 0 0 0.14791348202123955 0.862505734 0.137494177 8.105486E-08 0 1 2 +10 0 0 0.093967949932628358 0.910311937 0.08968807 3.63372E-09 0 1 2 +11 0 0 0.13108575869963462 0.877142549 0.122857377 8.249825E-08 0 1 2 +12 0 0 0.15257914743793716 0.858490944 0.141508967 9.911802E-08 0 1 2 +13 0 0 0.14535615625152426 0.864714265 0.1352856 1.173372E-07 0 1 2 +14 0 0 0.06340435568818982 0.9385639 0.0614361 1.239382E-10 0 1 2 +15 0 0 0.056683708917569194 0.9448929 0.0551071428 2.807663E-10 0 1 2 +16 0 0 0.075170002932813201 0.9275858 0.07241419 1.54777513E-09 0 1 2 +17 0 0 0.10666806288443777 0.898824 0.101176038 1.5170011E-08 0 1 2 +18 0 0 0.091241664716928128 0.9127971 0.0872029141 3.50579832E-09 0 1 2 +19 0 0 0.092742902606664304 0.9114278 0.08857219 8.39613445E-09 0 1 2 +20 0 0 0.12374770832324052 0.883602738 0.116397209 2.285497E-08 0 1 2 +21 0 0 0.098256721071890374 0.9064162 0.093583785 1.62108957E-08 0 1 2 +22 0 0 0.092301570249172765 0.9118301 0.08816987 6.23606855E-09 0 1 2 +23 0 0 0.1380404335556189 0.8710635 0.12893635 2.00292092E-07 0 1 2 +24 0 0 0.15120025555722924 0.8596755 0.1403242 2.922401E-07 0 1 2 +25 0 0 0.16166208373075255 0.850728631 0.14927116 1.80764E-07 0 1 2 +26 0 0 0.12599234088657019 0.8816216 0.118378319 8.963828E-08 0 1 2 +27 0 0 0.11003318759164467 0.8958044 0.104195595 1.25205668E-08 0 1 2 +28 0 0 0.11184598225922286 0.894181967 0.105818 1.1564893E-08 0 1 2 +29 0 0 0.1505769302299601 0.860211551 0.139788255 2.15031562E-07 0 1 2 +30 0 0 0.15733441851850752 0.8544183 0.145581514 2.27139253E-07 0 1 2 +31 0 0 0.11219313224767015 0.8938716 0.1061284 1.89737328E-08 0 1 2 +32 0 0 0.076296562873109264 0.9265414 0.0734586 1.21595978E-09 0 1 2 +33 0 0 0.06454049206643403 0.937498152 0.06250186 3.385217E-10 0 1 2 +34 0 0 0.14791348202123955 0.862505734 0.137494177 8.105486E-08 0 1 2 +35 0 0 0.11957675391026261 0.8872959 0.112704061 1.73822077E-08 0 1 2 +36 0 0 0.095551393128538914 0.908871651 0.09112834 2.340219E-09 0 1 2 +37 0 0 0.14791348202123955 0.862505734 0.137494177 8.105486E-08 0 1 2 +38 0 0 0.15557611714722408 0.8559219 0.144077763 2.80571015E-07 0 1 2 +39 0 0 0.11898397051842637 0.887822032 0.112177923 2.32430128E-08 0 1 2 +40 0 0 0.10383152796029192 0.901377141 0.0986228138 1.3316324E-08 0 1 2 +41 0 0 0.23267843022808721 0.792408347 0.2075885 3.16824639E-06 0 1 2 +42 0 0 0.13825641388336071 0.870875359 0.129124492 1.43657843E-07 0 1 2 +43 0 0 0.11780844245896904 0.8888663 0.11113359 1.22518585E-07 0 1 2 +44 0 0 0.11106878201188827 0.8948772 0.105122752 6.136807E-08 0 1 2 +45 0 0 0.1519822314490463 0.859003544 0.140996248 1.90226359E-07 0 1 2 +46 0 0 0.097403275899291555 0.9071901 0.09280992 9.201566E-09 0 1 2 +47 0 0 0.13975224176627257 0.869573653 0.130426213 1.23595186E-07 0 1 2 +48 0 0 0.095640521579255233 0.908790648 0.09120932 4.824783E-09 0 1 2 +49 0 0 0.12297630490939666 0.8842846 0.115715332 2.84802919E-08 0 1 2 +50 1 1 0.072140726875920644 0.930399954 0.05064142 0.018958617 1 0 2 +51 1 1 0.12052363925502865 0.886456132 0.0577918626 0.0557519831 1 2 0 +52 1 1 0.11754125629409608 0.88910383 0.0897584558 0.0211377256 1 2 0 +53 1 1 0.55234194460144326 0.5756002 0.407715857 0.01668393 1 2 0 +54 1 1 0.22033072960732095 0.8022534 0.179187492 0.018559115 1 2 0 +55 1 1 0.48471715245043789 0.61587137 0.369235158 0.0148934675 1 2 0 +56 1 1 0.19649325323392461 0.8216069 0.144622579 0.03377054 1 2 0 +57 1 1 0.23907513650017656 0.7873557 0.144207269 0.06843699 1 0 2 +58 1 1 0.10057641320176978 0.904316 0.06448696 0.03119705 1 2 0 +59 1 1 0.4382277992649205 0.6451788 0.3130982 0.0417229943 1 2 0 +60 1 1 0.40378849464830396 0.667785347 0.294686764 0.0375279263 1 2 0 +61 1 1 0.19328713066283934 0.8242453 0.117469594 0.058285147 1 2 0 +62 1 1 0.14347483184211016 0.8663426 0.101038948 0.0326184332 1 2 0 +63 1 1 0.39254042807911971 0.675339043 0.3112865 0.013374473 1 2 0 +64 1 1 0.24625037004233435 0.7817265 0.199262485 0.01901104 1 0 2 +65 1 1 0.096962049648401036 0.907590449 0.07445438 0.0179551709 1 0 2 +66 1 1 0.58127624794437671 0.559184253 0.424495757 0.01631998 1 2 0 +67 1 1 0.12717766992503393 0.8805772 0.06211802 0.0573047921 1 0 2 +68 1 2 0.76418847827461567 0.5308523 0.4657117 0.003435955 2 1 0 +69 1 1 0.17854309398479379 0.836488 0.108136833 0.05537514 1 2 0 +70 1 2 0.7782997543015937 0.532311857 0.459186077 0.008502028 2 1 0 +71 1 1 0.12963404144590399 0.878416836 0.08763818 0.0339449868 1 0 2 +72 1 2 0.82527364026390571 0.5597655 0.4381151 0.00211936235 2 1 0 +73 1 1 0.31236741131512197 0.731712639 0.2543256 0.0139617641 1 2 0 +74 1 1 0.10091901291460656 0.904006243 0.0600393079 0.03595447 1 0 2 +75 1 1 0.094227600597530603 0.9100756 0.0583470054 0.0315773822 1 0 2 +76 1 1 0.16139006589793164 0.8509601 0.13421455 0.0148253879 1 2 0 +77 1 1 0.42271312925322674 0.6552666 0.336988866 0.00774457539 1 2 0 +78 1 1 0.36102952185045717 0.6969584 0.28298 0.0200616084 1 2 0 +79 1 1 0.22191911287049176 0.800980151 0.189535379 0.009484478 1 0 2 +80 1 1 0.20087047707276379 0.8180184 0.127419531 0.05456211 1 2 0 +81 1 1 0.15758181984651309 0.8542069 0.07838817 0.0674049258 1 0 2 +82 1 1 0.14551884398560022 0.8645736 0.08538146 0.0500449426 1 0 2 +83 1 2 1.0041035759831962 0.6325462 0.3663729 0.00108090381 2 1 0 +84 1 2 0.75476486164367673 0.517363548 0.470121145 0.0125153111 2 1 0 +85 1 1 0.1936571586064853 0.823940337 0.11649999 0.05955967 1 2 0 +86 1 1 0.1117176732744818 0.8942967 0.07367582 0.032027483 1 2 0 +87 1 1 0.35151004083460119 0.7036248 0.285515 0.0108601972 1 2 0 +88 1 1 0.18955293586722866 0.8273289 0.09654 0.0761311054 1 2 0 +89 1 1 0.38389360242414333 0.6812039 0.289862335 0.0289337616 1 2 0 +90 1 1 0.62618827467785798 0.534625769 0.454234719 0.0111395335 1 2 0 +91 1 1 0.25387959125212917 0.7757852 0.200217441 0.02399734 1 2 0 +92 1 1 0.16226915412183221 0.850212336 0.09567342 0.0541142225 1 2 0 +93 1 1 0.22196815331677711 0.8009409 0.127181739 0.07187738 1 0 2 +94 1 1 0.35124306791543597 0.703812659 0.268056452 0.02813091 1 2 0 +95 1 1 0.16237179398556273 0.8501251 0.08078788 0.06908702 1 2 0 +96 1 1 0.20996992406982243 0.8106086 0.139289573 0.05010183 1 2 0 +97 1 1 0.12058893069745849 0.886398256 0.0598800667 0.05372166 1 2 0 +98 1 1 0.37956731249976106 0.6841574 0.304465353 0.01137725 1 0 2 +99 1 1 0.20277814400339583 0.816459358 0.130446717 0.0530939251 1 2 0 +100 2 2 0.39358357371715191 0.674634933 0.32519117 0.00017387759 2 1 0 +101 2 2 0.42308350591214072 0.655023932 0.344296634 0.0006794688 2 1 0 +102 2 2 0.41936704076824455 0.657462835 0.342273951 0.00026323882 2 1 0 +103 2 2 0.42921904997620342 0.6510173 0.348579019 0.000403696467 2 1 0 +104 2 2 0.41208274684418961 0.6622695 0.337504864 0.00022567909 2 1 0 +105 2 2 0.44160572409172411 0.6430031 0.3569401 5.67683346E-05 2 1 0 +106 2 2 0.42934476448549436 0.6509355 0.347463518 0.00160101533 2 1 0 +107 2 2 0.45168489764494285 0.6365547 0.363331944 0.000113324531 2 1 0 +108 2 2 0.46645653711885576 0.627220869 0.372643322 0.000135785289 2 1 0 +109 2 2 0.35953291146180011 0.6980023 0.301614881 0.000382813538 2 1 0 +110 2 2 0.60830855615290891 0.5442707 0.451300323 0.004428956 2 1 0 +111 2 2 0.43425567366017515 0.6477466 0.35168618 0.0005671874 2 1 0 +112 2 2 0.42224814643798159 0.655571342 0.343755931 0.000672725844 2 1 0 +113 2 2 0.43378520195992787 0.648051441 0.351433426 0.000515127 2 1 0 +114 2 2 0.39828542770724013 0.671470344 0.328003943 0.00052572944 2 1 0 +115 2 2 0.38044631650760474 0.683556259 0.3153522 0.00109154719 2 1 0 +116 2 2 0.44214870680472357 0.642654061 0.3565694 0.0007765472 2 1 0 +117 2 2 0.39204321311235024 0.6756749 0.3240996 0.000225500859 2 1 0 +118 2 2 0.49534872735975943 0.6093584 0.390629977 1.165714E-05 2 1 0 +119 2 2 0.47792706875518332 0.6200674 0.379401267 0.0005313261 2 1 0 +120 2 2 0.38996755329866611 0.677078843 0.322423726 0.0004974509 2 1 0 +121 2 2 0.40418317857084346 0.667521834 0.331344754 0.00113342493 2 1 0 +122 2 2 0.46674185713969735 0.627041936 0.372923285 3.47993519E-05 2 1 0 +123 2 2 0.50860202326236292 0.601335645 0.396473944 0.00219042762 2 1 0 +124 2 2 0.40115690949067428 0.669545 0.3297638 0.0006912321 2 1 0 +125 2 2 0.47796417410708669 0.6200444 0.379430741 0.000524864765 2 1 0 +126 2 2 0.55674263926678424 0.573072731 0.422982275 0.00394499162 2 1 0 +127 2 2 0.53745603926068541 0.5842326 0.4114353 0.004332072 2 1 0 +128 2 2 0.42435197720848 0.6541936 0.3455436 0.000262821937 2 1 0 +129 2 2 0.59539649422184571 0.5513439 0.447734416 0.000921661733 2 1 0 +130 2 2 0.45136920579127138 0.6367557 0.363094628 0.000149650616 2 1 0 +131 2 1 0.87799286296834833 0.58302784 0.415616274 0.00135587773 1 2 0 +132 2 2 0.4217459387360496 0.655900657 0.343858719 0.0002406203 2 1 0 +133 2 2 0.58858419496856473 0.55511266 0.442362428 0.00252491026 2 1 0 +134 2 2 0.46954434981246995 0.6252871 0.3744188 0.000294059661 2 1 0 +135 2 2 0.41926206364690544 0.657531857 0.3422878 0.000180329866 2 1 0 +136 2 2 0.35983106354881655 0.6977942 0.301589876 0.000615945552 2 1 0 +137 2 2 0.43598868576509842 0.646625042 0.3524654 0.000909582246 2 1 0 +138 2 2 0.55146544914408413 0.576104939 0.418423772 0.00547132 2 1 0 +139 2 2 0.47595285901721079 0.62129277 0.377271771 0.00143543689 2 1 0 +140 2 2 0.38358127985275575 0.6814167 0.318166882 0.000416446419 2 1 0 +141 2 2 0.55824128131859518 0.572214544 0.4245338 0.00325164315 2 1 0 +142 2 2 0.42308350591214072 0.655023932 0.344296634 0.0006794688 2 1 0 +143 2 2 0.39101453828348787 0.6763703 0.32335642 0.000273291429 2 1 0 +144 2 2 0.36601283135925683 0.6934939 0.306069136 0.0004369896 2 1 0 +145 2 2 0.41354127957626624 0.661304235 0.337492228 0.00120350742 2 1 0 +146 2 2 0.44489957920965528 0.640888631 0.358320177 0.000791185652 2 1 0 +147 2 2 0.45420191711312169 0.6349545 0.363470852 0.00157464272 2 1 0 +148 2 2 0.36235400852316502 0.6960359 0.302856565 0.00110750983 2 1 0 +149 2 2 0.42624816689431871 0.6529543 0.3454746 0.00157109811 2 1 0 diff --git a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs index 4342fcc49f..a08795ca4c 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs @@ -233,6 +233,17 @@ public void MultiClassCVTest() Done(); } + [Fact] + [TestCategory("Multiclass")] + public void MulticlassReductionTest() + { + RunOneAllTests(TestLearners.Ova, TestDatasets.iris); + RunOneAllTests(TestLearners.OvaWithFastForest, TestDatasets.iris); + RunOneAllTests(TestLearners.Pkpd, TestDatasets.iris); + + Done(); + } + [Fact(Skip = "Need CoreTLC specific baseline update")] [TestCategory("Clustering")] [TestCategory("KMeans")] diff --git a/test/Microsoft.ML.TestFramework/Learners.cs b/test/Microsoft.ML.TestFramework/Learners.cs index f7062c3c4a..fd0e7d6aaa 100644 --- a/test/Microsoft.ML.TestFramework/Learners.cs +++ b/test/Microsoft.ML.TestFramework/Learners.cs @@ -130,6 +130,25 @@ static TestLearnersBase() Tag = "AveragedPerceptron-Reg" }; + public static PredictorAndArgs Ova = new PredictorAndArgs + { + Trainer = new SubComponent("OVA", "p=AvgPer{ lr=0.8 }"), + MamlArgs = new[] { "norm=no" }, + }; + + public static PredictorAndArgs OvaWithFastForest = new PredictorAndArgs + { + Trainer = new SubComponent("OVA", "p=FastForest{ }"), + MamlArgs = new[] { "norm=no" }, + Tag = "OVA-FastForest", + }; + + public static PredictorAndArgs Pkpd = new PredictorAndArgs + { + Trainer = new SubComponent("PKPD", "p=AvgPer { lr=0.8 }"), + MamlArgs = new[] { "norm=no" }, + }; + // Old. public static PredictorAndArgs perceptronDefault = new PredictorAndArgs { From d6ad1b4befd2e9a3b2a0c2913795c32640e7dd19 Mon Sep 17 00:00:00 2001 From: Tom Finley Date: Mon, 3 Dec 2018 09:56:06 -0800 Subject: [PATCH 016/100] Delete IColumn (#1801) * Stop using IColumn in predicted label scorers. * Stop using IColumn in static schema shape analysis. * Stop using IColumn in linear model statistics creation. * Stop using IColumn in FastTree statistics creation. * Stop using IColumn in benchmarking. * Stop using IColumn in many tests. * Add minor conveniences to metadata builder. * Allow metadata builder to have metadata of metadata. * Add appropriate validation in certain places. * Put warnings on an inappropriate method dealing with Batch that should not exist. * Remove IColumn. --- src/Microsoft.ML.Core/Data/MetadataBuilder.cs | 46 +- src/Microsoft.ML.Core/Data/MetadataUtils.cs | 29 + src/Microsoft.ML.Core/Data/Schema.cs | 7 +- src/Microsoft.ML.Core/Data/SchemaBuilder.cs | 4 + src/Microsoft.ML.Core/Utilities/Utils.cs | 9 + src/Microsoft.ML.Data/Data/IColumn.cs | 667 ------------------ src/Microsoft.ML.Data/Data/RowCursorUtils.cs | 24 +- .../Scorers/PredictedLabelScorerBase.cs | 47 +- .../StaticPipe/StaticSchemaShape.cs | 17 +- src/Microsoft.ML.FastTree/FastTree.cs | 11 +- .../Standard/LinearPredictor.cs | 31 +- .../MulticlassLogisticRegression.cs | 11 +- .../Standard/ModelStatistics.cs | 61 +- test/Microsoft.ML.Benchmarks/HashBench.cs | 58 +- .../StaticPipeTests.cs | 33 +- .../Transformers/HashTests.cs | 23 +- 16 files changed, 238 insertions(+), 840 deletions(-) delete mode 100644 src/Microsoft.ML.Data/Data/IColumn.cs diff --git a/src/Microsoft.ML.Core/Data/MetadataBuilder.cs b/src/Microsoft.ML.Core/Data/MetadataBuilder.cs index 0b72f53a25..06bf090567 100644 --- a/src/Microsoft.ML.Core/Data/MetadataBuilder.cs +++ b/src/Microsoft.ML.Core/Data/MetadataBuilder.cs @@ -16,11 +16,11 @@ namespace Microsoft.ML.Data ///
public sealed class MetadataBuilder { - private readonly List<(string Name, ColumnType Type, Delegate Getter)> _items; + private readonly List<(string Name, ColumnType Type, Delegate Getter, Schema.Metadata Metadata)> _items; public MetadataBuilder() { - _items = new List<(string Name, ColumnType Type, Delegate Getter)>(); + _items = new List<(string Name, ColumnType Type, Delegate Getter, Schema.Metadata Metadata)>(); } /// @@ -40,7 +40,7 @@ public void Add(Schema.Metadata metadata, Func selector) foreach (var column in metadata.Schema) { if (selector(column.Name)) - _items.Add((column.Name, column.Type, metadata.Getters[column.Index])); + _items.Add((column.Name, column.Type, metadata.Getters[column.Index], column.Metadata)); } } @@ -51,13 +51,17 @@ public void Add(Schema.Metadata metadata, Func selector) /// The metadata name. /// The metadata type. /// The getter delegate. - public void Add(string name, ColumnType type, ValueGetter getter) + /// Metadata of the input column. Note that metadata on a metadata column is somewhat rare + /// except for certain types (for example, slot names for a vector, key values for something of key type). + public void Add(string name, ColumnType type, ValueGetter getter, Schema.Metadata metadata = null) { Contracts.CheckNonEmpty(name, nameof(name)); Contracts.CheckValue(type, nameof(type)); Contracts.CheckValue(getter, nameof(getter)); - Contracts.CheckParam(type.RawType == typeof(TValue), nameof(getter)); - _items.Add((name, type, getter)); + Contracts.CheckParam(type.RawType == typeof(TValue), nameof(type)); + Contracts.CheckValueOrNull(metadata); + + _items.Add((name, type, getter, metadata)); } /// @@ -67,11 +71,31 @@ public void Add(string name, ColumnType type, ValueGetter getter /// The metadata type. /// The getter delegate that provides the value. Note that the type of the getter is still checked /// inside this method. - public void Add(string name, ColumnType type, Delegate getter) + /// Metadata of the input column. Note that metadata on a metadata column is somewhat rare + /// except for certain types (for example, slot names for a vector, key values for something of key type). + public void Add(string name, ColumnType type, Delegate getter, Schema.Metadata metadata = null) { Contracts.CheckNonEmpty(name, nameof(name)); Contracts.CheckValue(type, nameof(type)); - Utils.MarshalActionInvoke(AddDelegate, type.RawType, name, type, getter); + Contracts.CheckValueOrNull(metadata); + Utils.MarshalActionInvoke(AddDelegate, type.RawType, name, type, getter, metadata); + } + + /// + /// Add one metadata column for a primitive value type. + /// + /// The metadata name. + /// The metadata type. + /// The value of the metadata. + /// Metadata of the input column. Note that metadata on a metadata column is somewhat rare + /// except for certain types (for example, slot names for a vector, key values for something of key type). + public void AddPrimitiveValue(string name, PrimitiveType type, TValue value, Schema.Metadata metadata = null) + { + Contracts.CheckNonEmpty(name, nameof(name)); + Contracts.CheckValue(type, nameof(type)); + Contracts.CheckParam(type.RawType == typeof(TValue), nameof(type)); + Contracts.CheckValueOrNull(metadata); + Add(name, type, (ref TValue dst) => dst = value, metadata); } /// @@ -100,11 +124,11 @@ public Schema.Metadata GetMetadata() { var builder = new SchemaBuilder(); foreach (var item in _items) - builder.AddColumn(item.Name, item.Type, null); + builder.AddColumn(item.Name, item.Type, item.Metadata); return new Schema.Metadata(builder.GetSchema(), _items.Select(x => x.Getter).ToArray()); } - private void AddDelegate(string name, ColumnType type, Delegate getter) + private void AddDelegate(string name, ColumnType type, Delegate getter, Schema.Metadata metadata) { Contracts.AssertNonEmpty(name); Contracts.AssertValue(type); @@ -112,7 +136,7 @@ private void AddDelegate(string name, ColumnType type, Delegate getter) var typedGetter = getter as ValueGetter; Contracts.CheckParam(typedGetter != null, nameof(getter)); - _items.Add((name, type, typedGetter)); + _items.Add((name, type, typedGetter, metadata)); } } } diff --git a/src/Microsoft.ML.Core/Data/MetadataUtils.cs b/src/Microsoft.ML.Core/Data/MetadataUtils.cs index 9178c1c129..6e2bd5230f 100644 --- a/src/Microsoft.ML.Core/Data/MetadataUtils.cs +++ b/src/Microsoft.ML.Core/Data/MetadataUtils.cs @@ -494,5 +494,34 @@ public static bool TryGetCategoricalFeatureIndices(Schema schema, int colIndex, cols.AddRange(GetTrainerOutputMetadata()); return cols; } + + private sealed class MetadataRow : IRow + { + private readonly Schema.Metadata _metadata; + + public MetadataRow(Schema.Metadata metadata) + { + Contracts.AssertValue(metadata); + _metadata = metadata; + } + + public Schema Schema => _metadata.Schema; + public long Position => 0; + public long Batch => 0; + public ValueGetter GetGetter(int col) => _metadata.GetGetter(col); + public ValueGetter GetIdGetter() => (ref UInt128 dst) => dst = default; + public bool IsColumnActive(int col) => true; + } + + /// + /// Presents a as a an . + /// + /// The metadata to wrap. + /// A row that wraps an input metadata. + public static IRow MetadataAsRow(Schema.Metadata metadata) + { + Contracts.CheckValue(metadata, nameof(metadata)); + return new MetadataRow(metadata); + } } } \ No newline at end of file diff --git a/src/Microsoft.ML.Core/Data/Schema.cs b/src/Microsoft.ML.Core/Data/Schema.cs index e21bd41b86..ec45753331 100644 --- a/src/Microsoft.ML.Core/Data/Schema.cs +++ b/src/Microsoft.ML.Core/Data/Schema.cs @@ -194,7 +194,7 @@ public sealed class Metadata /// public Schema Schema { get; } - public static Metadata Empty { get; } = new Metadata(new Schema(Enumerable.Empty()), new Delegate[0]); + public static Metadata Empty { get; } = new Metadata(new Schema(new Column[0]), new Delegate[0]); /// /// Create a metadata row by supplying the schema columns and the getter delegates for all the values. @@ -256,11 +256,12 @@ public void GetValue(string kind, ref TValue value) /// /// This constructor should only be called by . /// - internal Schema(IEnumerable columns) + /// The input columns. The constructed instance takes ownership of the array. + internal Schema(Column[] columns) { Contracts.CheckValue(columns, nameof(columns)); - _columns = columns.ToArray(); + _columns = columns; _nameMap = new Dictionary(); for (int i = 0; i < _columns.Length; i++) { diff --git a/src/Microsoft.ML.Core/Data/SchemaBuilder.cs b/src/Microsoft.ML.Core/Data/SchemaBuilder.cs index 9fb22a7026..28017f1e7d 100644 --- a/src/Microsoft.ML.Core/Data/SchemaBuilder.cs +++ b/src/Microsoft.ML.Core/Data/SchemaBuilder.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using System; using System.Collections.Generic; @@ -32,6 +33,9 @@ public SchemaBuilder() /// The column metadata. public void AddColumn(string name, ColumnType type, Schema.Metadata metadata) { + Contracts.CheckNonEmpty(name, nameof(name)); + Contracts.CheckValue(type, nameof(type)); + Contracts.CheckValueOrNull(metadata); _items.Add((name, type, metadata)); } diff --git a/src/Microsoft.ML.Core/Utilities/Utils.cs b/src/Microsoft.ML.Core/Utilities/Utils.cs index 0fcf55a449..3395bdfe06 100644 --- a/src/Microsoft.ML.Core/Utilities/Utils.cs +++ b/src/Microsoft.ML.Core/Utilities/Utils.cs @@ -1076,6 +1076,15 @@ public static void MarshalActionInvoke(Action + /// A four-argument version of . + /// + public static void MarshalActionInvoke(Action act, Type genArg, TArg1 arg1, TArg2 arg2, TArg3 arg3, TArg4 arg4) + { + var meth = MarshalActionInvokeCheckAndCreate(genArg, act); + meth.Invoke(act.Target, new object[] { arg1, arg2, arg3, arg4 }); + } + public static string GetDescription(this Enum value) { Type type = value.GetType(); diff --git a/src/Microsoft.ML.Data/Data/IColumn.cs b/src/Microsoft.ML.Data/Data/IColumn.cs deleted file mode 100644 index 9d2146ee37..0000000000 --- a/src/Microsoft.ML.Data/Data/IColumn.cs +++ /dev/null @@ -1,667 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. -// See the LICENSE file in the project root for more information. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Internal.Utilities; - -namespace Microsoft.ML.Runtime.Data -{ - /// - /// This interface is an analogy to that encapsulates the contents of a single - /// column. - /// - /// Note that in the same sense that is not thread safe, implementors of this interface - /// by similar token must not be considered thread safe by users of the interface, and by the same token - /// implementors should feel free to write their implementations with the expectation that only one thread - /// will be calling it at a time. - /// - /// Similarly, in the same sense that an can have its values "change under it" by having - /// the underlying cursor move, so too might this item have its values change under it, and they will if - /// they were directly instantiated from a row. - /// - /// Generally actual implementors of this interface should not implement this directly, but instead implement - /// . - /// - // REVIEW: It is possible we may want to make this ICounted, but let's not start with - // that assumption. The use cases I have in mind are that we'll still, on the side, have an - // IRow lying around. - public interface IColumn - { - /// - /// The name of a column. This string should always be non-empty. - /// - string Name { get; } - - /// - /// The type of the column. - /// - ColumnType Type { get; } - - // REVIEW: This property anticipates a time when we get away with metadata accessors - // altogether, and just have the metadata for a column be represented as a row. - /// - /// The metadata for a column, or null if this column has no metadata. - /// - IRow Metadata { get; } - - /// - /// Whether the column should be considered active or not. - /// - bool IsActive { get; } - - /// - /// The value getter, as a . Implementators should just pass through - /// . - /// - /// The generic getter delegate - Delegate GetGetter(); - } - - /// - /// The type specific interface for a . - /// - /// The type of values in this column. This should agree with the - /// field of . - public interface IValueColumn : IColumn - { - new ValueGetter GetGetter(); - } - - public static class RowColumnUtils - { - /// - /// Exposes a single column in a row. - /// - /// The row to wrap - /// The column to expose - /// A row column instance - public static IColumn GetColumn(IRow row, int col) - { - Contracts.CheckValue(row, nameof(row)); - Contracts.CheckParam(0 <= col && col < row.Schema.ColumnCount, nameof(col)); - - Func func = GetColumnCore; - return Utils.MarshalInvoke(func, row.Schema.GetColumnType(col).RawType, row, col); - } - - private static IColumn GetColumnCore(IRow row, int col) - { - Contracts.AssertValue(row); - Contracts.Assert(0 <= col && col < row.Schema.ColumnCount); - Contracts.Assert(row.Schema.GetColumnType(col).RawType == typeof(T)); - - return new RowWrap(row, col); - } - - /// - /// Exposes a single column in a schema. The column is considered inactive. - /// - /// The schema to get the data for - /// The column to get - /// A column with false - public static IColumn GetColumn(ISchema schema, int col) - { - Contracts.CheckValue(schema, nameof(schema)); - Contracts.CheckParam(0 <= col && col < schema.ColumnCount, nameof(col)); - - Func func = GetColumnCore; - return Utils.MarshalInvoke(func, schema.GetColumnType(col).RawType, schema, col); - } - - private static IColumn GetColumnCore(ISchema schema, int col) - { - Contracts.AssertValue(schema); - Contracts.Assert(0 <= col && col < schema.ColumnCount); - Contracts.Assert(schema.GetColumnType(col).RawType == typeof(T)); - - return new SchemaWrap(schema, col); - } - - /// - /// Constructs a column out of a value. This will store the input value, not make a copy. - /// - /// The type of the value - /// The column name, which must be non-empty - /// The type of the column, whose raw type must be - /// The value to store in the column - /// Optionally, metadata for the column - /// A column with this value - public static IColumn GetColumn(string name, ColumnType type, ref T value, IRow meta = null) - { - Contracts.CheckNonEmpty(name, nameof(name)); - Contracts.CheckValue(type, nameof(type)); - Contracts.CheckParam(type.RawType == typeof(T), nameof(type), "Mismatch on object type and column type"); - if (type.IsVector) - return Utils.MarshalInvoke(GetColumnVecCore, type.ItemType.RawType, name, type.AsVector, (object)value, meta); - Contracts.CheckParam(type.IsPrimitive, nameof(type), "Type must be either vector or primitive"); - Contracts.CheckValueOrNull(meta); - return Utils.MarshalInvoke(GetColumnOneCore, type.RawType, name, type, (object)value, meta); - } - - private static IColumn GetColumnVecCore(string name, VectorType type, object value, IRow meta) - { - // REVIEW: Ugh. Nasty. Any alternative to boxing? - Contracts.AssertNonEmpty(name); - Contracts.AssertValue(type); - Contracts.Assert(type.IsVector); - Contracts.Assert(type.ItemType.RawType == typeof(T)); - Contracts.Assert(value is VBuffer); - Contracts.AssertValueOrNull(meta); - VBuffer typedVal = (VBuffer)value; - return new ConstVecImpl(name, meta, type, typedVal); - } - - private static IColumn GetColumnOneCore(string name, ColumnType type, object value, IRow meta) - { - Contracts.AssertNonEmpty(name); - Contracts.AssertValue(type); - Contracts.Assert(type.IsPrimitive); - Contracts.Assert(type.RawType == typeof(T)); - Contracts.Assert(value is T); - Contracts.AssertValueOrNull(meta); - T typedVal = (T)value; - return new ConstOneImpl(name, meta, type, typedVal); - } - - /// - /// Constructs a column out of a getter. - /// - /// The type of the value - /// The column name, which must be non-empty - /// The type of the column, whose raw type must be - /// The getter for the column - /// Optionally, metadata for the column - /// A column with this getter - public static IColumn GetColumn(string name, ColumnType type, ValueGetter getter, IRow meta = null) - { - Contracts.CheckNonEmpty(name, nameof(name)); - Contracts.CheckValue(type, nameof(type)); - Contracts.CheckParam(type.RawType == typeof(T), nameof(type), "Mismatch on object type and column type"); - Contracts.CheckValue(getter, nameof(getter)); - Contracts.CheckValueOrNull(meta); - - return new GetterImpl(name, meta, type, getter); - } - - /// - /// Wraps a set of row columns as a row. - /// - /// The counted object that the output row will wrap for its own implementation of - /// , or if null, the output row will yield default values for those implementations, - /// that is, a totally static row - /// A set of row columns - /// A row with items derived from - public static IRow GetRow(ICounted counted, params IColumn[] columns) - { - Contracts.CheckValueOrNull(counted); - Contracts.CheckValue(columns, nameof(columns)); - return new RowColumnRow(counted, columns); - } - - /// - /// Given a column, returns a deep-copied memory-materialized version of it. Note that - /// it is acceptable for the column to be inactive: the returned column will likewise - /// be inactive. - /// - /// - /// A memory materialized version of which may be, - /// under appropriate circumstances, the input object itself - public static IColumn CloneColumn(IColumn column) - { - Contracts.CheckValue(column, nameof(column)); - return Utils.MarshalInvoke(CloneColumnCore, column.Type.RawType, column); - } - - private static IColumn CloneColumnCore(IColumn column) - { - Contracts.Assert(column is IValueColumn); - IRow meta = column.Metadata; - if (meta != null) - meta = RowCursorUtils.CloneRow(meta); - - var tcolumn = (IValueColumn)column; - if (!tcolumn.IsActive) - return new InactiveImpl(tcolumn.Name, meta, tcolumn.Type); - T val = default(T); - tcolumn.GetGetter()(ref val); - return GetColumn(tcolumn.Name, tcolumn.Type, ref val, meta); - } - - /// - /// The implementation for a simple wrapping of an . - /// - private sealed class RowWrap : IValueColumn - { - private readonly IRow _row; - private readonly int _col; - private MetadataRow _meta; - - public string Name => _row.Schema.GetColumnName(_col); - public ColumnType Type => _row.Schema.GetColumnType(_col); - public bool IsActive => _row.IsColumnActive(_col); - - public IRow Metadata - { - get - { - if (_meta == null) - Interlocked.CompareExchange(ref _meta, new MetadataRow(_row.Schema, _col, x => true), null); - return _meta; - } - } - - public RowWrap(IRow row, int col) - { - Contracts.AssertValue(row); - Contracts.Assert(0 <= col && col < row.Schema.ColumnCount); - Contracts.Assert(row.Schema.GetColumnType(col).RawType == typeof(T)); - - _row = row; - _col = col; - } - - Delegate IColumn.GetGetter() - => GetGetter(); - - public ValueGetter GetGetter() - => _row.GetGetter(_col); - } - - /// - /// The base class for a few implementations that do not "go" anywhere. - /// - private abstract class DefaultCounted : ICounted - { - public long Position => 0; - public long Batch => 0; - public ValueGetter GetIdGetter() - => IdGetter; - - private static void IdGetter(ref UInt128 id) - => id = default; - } - - /// - /// Simple wrapper for a schema column, considered inctive with no getter. - /// - /// The type of the getter - private sealed class SchemaWrap : IValueColumn - { - private readonly ISchema _schema; - private readonly int _col; - private MetadataRow _meta; - - public string Name => _schema.GetColumnName(_col); - public ColumnType Type => _schema.GetColumnType(_col); - public bool IsActive => false; - - public IRow Metadata - { - get - { - if (_meta == null) - Interlocked.CompareExchange(ref _meta, new MetadataRow(_schema, _col, x => true), null); - return _meta; - } - } - - public SchemaWrap(ISchema schema, int col) - { - Contracts.AssertValue(schema); - Contracts.Assert(0 <= col && col < schema.ColumnCount); - Contracts.Assert(schema.GetColumnType(col).RawType == typeof(T)); - - _schema = schema; - _col = col; - } - - Delegate IColumn.GetGetter() - => GetGetter(); - - public ValueGetter GetGetter() - => throw Contracts.Except("Column not active"); - } - - /// - /// This class exists to present metadata as stored in an for one particular - /// column as an . This class will cease to be necessary at the point when all - /// metadata implementations are just simple s. - /// - public sealed class MetadataRow : IRow - { - public Schema Schema => _schemaImpl.AsSchema; - - private readonly ISchema _metaSchema; - private readonly int _col; - private readonly SchemaImpl _schemaImpl; - - private readonly KeyValuePair[] _map; - - long ICounted.Position => 0; - long ICounted.Batch => 0; - ValueGetter ICounted.GetIdGetter() - => IdGetter; - - private static void IdGetter(ref UInt128 id) - => id = default; - - private sealed class SchemaImpl : ISchema - { - private readonly MetadataRow _parent; - private readonly Dictionary _nameToCol; - public Schema AsSchema { get; } - - public int ColumnCount { get { return _parent._map.Length; } } - - public SchemaImpl(MetadataRow parent) - { - Contracts.AssertValue(parent); - _parent = parent; - _nameToCol = new Dictionary(ColumnCount); - for (int i = 0; i < _parent._map.Length; ++i) - _nameToCol[_parent._map[i].Key] = i; - - AsSchema = Schema.Create(this); - } - - public string GetColumnName(int col) - { - Contracts.CheckParam(0 <= col && col < ColumnCount, nameof(col)); - return _parent._map[col].Key; - } - - public ColumnType GetColumnType(int col) - { - Contracts.CheckParam(0 <= col && col < ColumnCount, nameof(col)); - return _parent._map[col].Value; - } - - public bool TryGetColumnIndex(string name, out int col) - { - return _nameToCol.TryGetValue(name, out col); - } - - public IEnumerable> GetMetadataTypes(int col) - { - return Enumerable.Empty>(); - } - - public void GetMetadata(string kind, int col, ref TValue value) - { - throw MetadataUtils.ExceptGetMetadata(); - } - - public ColumnType GetMetadataTypeOrNull(string kind, int col) - { - Contracts.CheckParam(0 <= col && col < ColumnCount, nameof(col)); - return null; - } - } - - public MetadataRow(ISchema schema, int col, Func takeMetadata) - { - Contracts.CheckValue(schema, nameof(schema)); - Contracts.CheckParam(0 <= col && col < schema.ColumnCount, nameof(col)); - Contracts.CheckValue(takeMetadata, nameof(takeMetadata)); - - _metaSchema = schema; - _col = col; - _map = _metaSchema.GetMetadataTypes(_col).Where(x => takeMetadata(x.Key)).ToArray(); - _schemaImpl = new SchemaImpl(this); - } - - public bool IsColumnActive(int col) - { - Contracts.CheckParam(0 <= col && col < _map.Length, nameof(col)); - return true; - } - - public ValueGetter GetGetter(int col) - { - Contracts.CheckParam(0 <= col && col < _map.Length, nameof(col)); - // REVIEW: On type mismatch, this will throw a metadata exception, which is not really - // appropriate. However, since this meant to be a shim anyway, we will tolerate imperfection. - return (ref TValue dst) => _metaSchema.GetMetadata(_map[col].Key, _col, ref dst); - } - } - - /// - /// This is used for a few implementations that need to store their own name, - /// metadata, and type themselves. - /// - private abstract class SimpleColumnBase : IValueColumn - { - public string Name { get; } - public IRow Metadata { get; } - public ColumnType Type { get; } - public abstract bool IsActive { get; } - - public SimpleColumnBase(string name, IRow meta, ColumnType type) - { - Contracts.CheckNonEmpty(name, nameof(name)); - Contracts.CheckValueOrNull(meta); - Contracts.CheckValue(type, nameof(type)); - Contracts.CheckParam(type.RawType == typeof(T), nameof(type), "Mismatch between CLR type and column type"); - - Name = name; - Metadata = meta; - Type = type; - } - - Delegate IColumn.GetGetter() - { - return GetGetter(); - } - - public abstract ValueGetter GetGetter(); - } - - private sealed class InactiveImpl : SimpleColumnBase - { - public override bool IsActive { get { return false; } } - - public InactiveImpl(string name, IRow meta, ColumnType type) - : base(name, meta, type) - { - } - - public override ValueGetter GetGetter() - { - throw Contracts.Except("Can't get getter for inactive column"); - } - } - - private sealed class ConstOneImpl : SimpleColumnBase - { - private readonly T _value; - - public override bool IsActive => true; - - public ConstOneImpl(string name, IRow meta, ColumnType type, T value) - : base(name, meta, type) - { - Contracts.Assert(type.IsPrimitive); - _value = value; - } - - public override ValueGetter GetGetter() - { - return Getter; - } - - private void Getter(ref T val) - { - val = _value; - } - } - - private sealed class ConstVecImpl : SimpleColumnBase> - { - private readonly VBuffer _value; - - public override bool IsActive { get { return true; } } - - public ConstVecImpl(string name, IRow meta, ColumnType type, VBuffer value) - : base(name, meta, type) - { - _value = value; - } - - public override ValueGetter> GetGetter() - { - return Getter; - } - - private void Getter(ref VBuffer val) - { - _value.CopyTo(ref val); - } - } - - private sealed class GetterImpl : SimpleColumnBase - { - private readonly ValueGetter _getter; - - public override bool IsActive => _getter != null; - - public GetterImpl(string name, IRow meta, ColumnType type, ValueGetter getter) - : base(name, meta, type) - { - Contracts.CheckValueOrNull(getter); - _getter = getter; - } - - public override ValueGetter GetGetter() - { - Contracts.Check(IsActive, "column is not active"); - return _getter; - } - } - - /// - /// An that is an amalgation of multiple implementers. - /// - private sealed class RowColumnRow : IRow - { - private static readonly DefaultCountedImpl _defCount = new DefaultCountedImpl(); - private readonly ICounted _counted; - private readonly IColumn[] _columns; - private readonly SchemaImpl _schema; - - public Schema Schema => _schema.AsSchema; - public long Position => _counted.Position; - public long Batch => _counted.Batch; - - public RowColumnRow(ICounted counted, IColumn[] columns) - { - Contracts.AssertValueOrNull(counted); - Contracts.AssertValue(columns); - _counted = counted ?? _defCount; - _columns = columns; - _schema = new SchemaImpl(this); - } - - public ValueGetter GetGetter(int col) - { - Contracts.CheckParam(IsColumnActive(col), nameof(col), "requested column not active"); - var rowCol = _columns[col] as IValueColumn; - if (rowCol == null) - throw Contracts.Except("Invalid TValue: '{0}'", typeof(TValue)); - return rowCol.GetGetter(); - } - - public bool IsColumnActive(int col) - { - Contracts.CheckParam(0 <= col && col < _columns.Length, nameof(col)); - return _columns[col].IsActive; - } - - public ValueGetter GetIdGetter() - { - return _counted.GetIdGetter(); - } - - private sealed class SchemaImpl : ISchema - { - private readonly RowColumnRow _parent; - private readonly Dictionary _nameToIndex; - - public Schema AsSchema { get; } - - public int ColumnCount => _parent._columns.Length; - - public SchemaImpl(RowColumnRow parent) - { - Contracts.AssertValue(parent); - _parent = parent; - _nameToIndex = new Dictionary(); - for (int i = 0; i < _parent._columns.Length; ++i) - _nameToIndex[_parent._columns[i].Name] = i; - AsSchema = Schema.Create(this); - } - - public void GetMetadata(string kind, int col, ref TValue value) - { - Contracts.CheckParam(0 <= col && col < ColumnCount, nameof(col)); - var meta = _parent._columns[col].Metadata; - int mcol; - if (meta == null || !meta.Schema.TryGetColumnIndex(kind, out mcol)) - throw MetadataUtils.ExceptGetMetadata(); - // REVIEW: Again, since this is a shim, not going to sweat the potential for inappropriate exception message. - meta.GetGetter(mcol)(ref value); - } - - public ColumnType GetMetadataTypeOrNull(string kind, int col) - { - Contracts.CheckParam(0 <= col && col < ColumnCount, nameof(col)); - var meta = _parent._columns[col].Metadata; - int mcol; - if (meta == null || !meta.Schema.TryGetColumnIndex(kind, out mcol)) - return null; - return meta.Schema.GetColumnType(mcol); - } - - public IEnumerable> GetMetadataTypes(int col) - { - Contracts.CheckParam(0 <= col && col < ColumnCount, nameof(col)); - // REVIEW: An IRow can have collisions in names, whereas there is no notion of this in metadata types. - // Since I intend to remove this soon anyway and the number of usages of this will be very low, I am just going - // to tolerate the potential for strangeness here, since it will practically never arise until we reorganize - // the whole thing. - var meta = _parent._columns[col].Metadata; - if (meta == null) - yield break; - var schema = meta.Schema; - for (int i = 0; i < schema.ColumnCount; ++i) - yield return new KeyValuePair(schema.GetColumnName(i), schema.GetColumnType(i)); - } - - public ColumnType GetColumnType(int col) - { - Contracts.CheckParam(0 <= col && col < ColumnCount, nameof(col)); - return _parent._columns[col].Type; - } - - public string GetColumnName(int col) - { - Contracts.CheckParam(0 <= col && col < ColumnCount, nameof(col)); - return _parent._columns[col].Name; - } - - public bool TryGetColumnIndex(string name, out int col) - { - return _nameToIndex.TryGetValue(name, out col); - } - } - - private sealed class DefaultCountedImpl : DefaultCounted - { - } - } - } -} diff --git a/src/Microsoft.ML.Data/Data/RowCursorUtils.cs b/src/Microsoft.ML.Data/Data/RowCursorUtils.cs index 869f263f3a..57098d2895 100644 --- a/src/Microsoft.ML.Data/Data/RowCursorUtils.cs +++ b/src/Microsoft.ML.Data/Data/RowCursorUtils.cs @@ -326,7 +326,10 @@ private static Func GetIsNewGroupDelegateCore(IRow cursor, int col) }; } - public static Func GetIsNewBatchDelegate(IRow cursor, int batchSize) + [Obsolete("The usages of this appear to be based on a total misunderstanding of what Batch actually is. It is a mechanism " + + "to enable sharding and recovery of parallelized data, and has nothing to do with actual data.")] + [BestFriend] + internal static Func GetIsNewBatchDelegate(IRow cursor, int batchSize) { Contracts.CheckParam(batchSize > 0, nameof(batchSize), "Batch size must be > 0"); long lastNewBatchPosition = -1; @@ -454,20 +457,6 @@ public static ValueGetter> GetLabelGetter(ISlotCursor cursor) }; } - /// - /// Returns a row that is a deep in-memory copy of an input row. Note that inactive - /// columns are allowed in this row, and their activity or inactivity will be reflected - /// in the output row. Note that the deep copy includes a copy of the metadata as well. - /// - /// The input row - /// A deep in-memory copy of the input row - public static IRow CloneRow(IRow row) - { - Contracts.CheckValue(row, nameof(row)); - return RowColumnUtils.GetRow(null, - Utils.BuildArray(row.Schema.ColumnCount, c => RowColumnUtils.GetColumn(row, c))); - } - /// /// Fetches the value of the column by name, in the given row. /// Used by the evaluators to retrieve the metrics from the results IDataView. @@ -487,8 +476,7 @@ public static T Fetch(IExceptionContext ectx, IRow row, string name) /// but want to save it somewhere using a .) /// Note that it is not possible for this method to ensure that the input does not /// change, so users of this convenience must take care of what they do with the input row or the data - /// source it came from, while the returned dataview is potentially being used; if this is somehow - /// difficult it may be wise to use to first have a deep copy of the resulting row. + /// source it came from, while the returned dataview is potentially being used. /// /// An environment used to create the host for the resulting data view /// A row, whose columns must all be active @@ -507,7 +495,7 @@ private sealed class OneRowDataView : IDataView private readonly IHost _host; // A channel provider is required for creating the cursor. public Schema Schema => _row.Schema; - public bool CanShuffle { get { return true; } } // The shuffling is even uniformly IID!! :) + public bool CanShuffle => true; // The shuffling is even uniformly IID!! :) public OneRowDataView(IHostEnvironment env, IRow row) { diff --git a/src/Microsoft.ML.Data/Scorers/PredictedLabelScorerBase.cs b/src/Microsoft.ML.Data/Scorers/PredictedLabelScorerBase.cs index e080e9f7be..8aff6ff69a 100644 --- a/src/Microsoft.ML.Data/Scorers/PredictedLabelScorerBase.cs +++ b/src/Microsoft.ML.Data/Scorers/PredictedLabelScorerBase.cs @@ -41,7 +41,7 @@ protected sealed class BindingsImpl : BindingsBase private readonly MetadataUtils.MetadataGetter> _getScoreColumnKind; private readonly MetadataUtils.MetadataGetter> _getScoreValueKind; - private readonly IRow _predColMetadata; + private readonly Schema.Metadata _predColMetadata; private BindingsImpl(Schema input, ISchemaBoundRowMapper mapper, string suffix, string scoreColumnKind, bool user, int scoreColIndex, ColumnType predColType) : base(input, mapper, suffix, user, DefaultColumnNames.PredictedLabel) @@ -59,42 +59,39 @@ private BindingsImpl(Schema input, ISchemaBoundRowMapper mapper, string suffix, // REVIEW: This logic is very specific to multiclass, which is deeply // regrettable, but the class structure as designed and the status of this schema // bearing object makes pushing the logic into the multiclass scorer almost impossible. - if (predColType.IsKey) + if (predColType is KeyType predColKeyType && predColKeyType.Count > 0) { - ColumnType scoreSlotsType = mapper.OutputSchema.GetMetadataTypeOrNull(MetadataUtils.Kinds.SlotNames, scoreColIndex); - if (scoreSlotsType != null && scoreSlotsType.IsKnownSizeVector && - scoreSlotsType.VectorSize == predColType.KeyCount) + var scoreColMetadata = mapper.OutputSchema[scoreColIndex].Metadata; + + var slotColumn = scoreColMetadata.Schema.GetColumnOrNull(MetadataUtils.Kinds.SlotNames); + if (slotColumn?.Type is VectorType slotColVecType && slotColVecType.Size == predColKeyType.Count) { - Contracts.Assert(scoreSlotsType.VectorSize > 0); - IColumn col = Utils.MarshalInvoke(KeyValueMetadataFromMetadata, - scoreSlotsType.RawType, mapper.OutputSchema, scoreColIndex, MetadataUtils.Kinds.SlotNames); - _predColMetadata = RowColumnUtils.GetRow(null, col); + Contracts.Assert(slotColVecType.Size > 0); + _predColMetadata = Utils.MarshalInvoke(KeyValueMetadataFromMetadata, slotColVecType.RawType, + scoreColMetadata, slotColumn.Value); } else { - scoreSlotsType = mapper.OutputSchema.GetMetadataTypeOrNull(MetadataUtils.Kinds.TrainingLabelValues, scoreColIndex); - if (scoreSlotsType != null && scoreSlotsType.IsKnownSizeVector && - scoreSlotsType.VectorSize == predColType.KeyCount) + var trainLabelColumn = scoreColMetadata.Schema.GetColumnOrNull(MetadataUtils.Kinds.TrainingLabelValues); + if (trainLabelColumn?.Type is VectorType trainLabelColVecType && trainLabelColVecType.Size == predColKeyType.Count) { - Contracts.Assert(scoreSlotsType.VectorSize > 0); - IColumn col = Utils.MarshalInvoke(KeyValueMetadataFromMetadata, - scoreSlotsType.RawType, mapper.OutputSchema, scoreColIndex, MetadataUtils.Kinds.TrainingLabelValues); - _predColMetadata = RowColumnUtils.GetRow(null, col); + Contracts.Assert(trainLabelColVecType.Size > 0); + _predColMetadata = Utils.MarshalInvoke(KeyValueMetadataFromMetadata, trainLabelColVecType.RawType, + scoreColMetadata, trainLabelColumn.Value); } } } } - private static IColumn KeyValueMetadataFromMetadata(ISchema schema, int col, string metadataName) + private static Schema.Metadata KeyValueMetadataFromMetadata(Schema.Metadata meta, Schema.Column metaCol) { - Contracts.AssertValue(schema); - Contracts.Assert(0 <= col && col < schema.ColumnCount); - var type = schema.GetMetadataTypeOrNull(metadataName, col); - Contracts.AssertValue(type); - Contracts.Assert(type.RawType == typeof(T)); - - ValueGetter getter = (ref T val) => schema.GetMetadata(metadataName, col, ref val); - return RowColumnUtils.GetColumn(MetadataUtils.Kinds.KeyValues, type, getter); + Contracts.AssertValue(meta); + Contracts.Assert(0 <= metaCol.Index && metaCol.Index < meta.Schema.ColumnCount); + Contracts.Assert(metaCol.Type.RawType == typeof(T)); + var getter = meta.GetGetter(metaCol.Index); + var builder = new MetadataBuilder(); + builder.Add(MetadataUtils.Kinds.KeyValues, metaCol.Type, meta.GetGetter(metaCol.Index)); + return builder.GetMetadata(); } public static BindingsImpl Create(Schema input, ISchemaBoundRowMapper mapper, string suffix, diff --git a/src/Microsoft.ML.Data/StaticPipe/StaticSchemaShape.cs b/src/Microsoft.ML.Data/StaticPipe/StaticSchemaShape.cs index 08d6936721..6ffd089f24 100644 --- a/src/Microsoft.ML.Data/StaticPipe/StaticSchemaShape.cs +++ b/src/Microsoft.ML.Data/StaticPipe/StaticSchemaShape.cs @@ -6,6 +6,7 @@ using System.Collections.Generic; using System.Reflection; using Microsoft.ML.Core.Data; +using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; @@ -51,7 +52,7 @@ public static StaticSchemaShape Make(ParameterInfo info) /// /// The context on which to throw exceptions /// The schema to check - public void Check(IExceptionContext ectx, ISchema schema) + public void Check(IExceptionContext ectx, Schema schema) { Contracts.AssertValue(ectx); ectx.AssertValue(schema); @@ -60,7 +61,7 @@ public void Check(IExceptionContext ectx, ISchema schema) { if (!schema.TryGetColumnIndex(pair.Key, out int colIdx)) throw ectx.ExceptParam(nameof(schema), $"Column named '{pair.Key}' was not found"); - var col = RowColumnUtils.GetColumn(schema, colIdx); + var col = schema[colIdx]; var type = GetTypeOrNull(col); if ((type != null && !pair.Value.IsAssignableFromStaticPipeline(type)) || (type == null && IsStandard(ectx, pair.Value))) { @@ -234,9 +235,8 @@ private static bool IsStandardCore(Type t) /// The column /// The .NET type for the static pipelines that should be used to reflect this type, given /// both the characteristics of the as well as one or two crucial pieces of metadata - private static Type GetTypeOrNull(IColumn col) + private static Type GetTypeOrNull(Schema.Column col) { - Contracts.AssertValue(col); var t = col.Type; Type vecType = null; @@ -278,13 +278,14 @@ private static Type GetTypeOrNull(IColumn col) { // Check to see if we have key value metadata of the appropriate type, size, and whatnot. var meta = col.Metadata; - if (meta.Schema.TryGetColumnIndex(MetadataUtils.Kinds.KeyValues, out int kvcol)) + if (meta.Schema.TryGetColumnIndex(MetadataUtils.Kinds.KeyValues, out int kvcolIndex)) { - var kvType = meta.Schema.GetColumnType(kvcol); - if (kvType.VectorSize == kt.Count) + var kvcol = meta.Schema[kvcolIndex]; + var kvType = kvcol.Type; + if (kvType is VectorType kvVecType && kvVecType.Size == kt.Count) { Contracts.Assert(kt.Count > 0); - var subtype = GetTypeOrNull(RowColumnUtils.GetColumn(meta, kvcol)); + var subtype = GetTypeOrNull(kvcol); if (subtype != null && subtype.IsGenericType) { var sgtype = subtype.GetGenericTypeDefinition(); diff --git a/src/Microsoft.ML.FastTree/FastTree.cs b/src/Microsoft.ML.FastTree/FastTree.cs index 1238b8392c..73d032dce5 100644 --- a/src/Microsoft.ML.FastTree/FastTree.cs +++ b/src/Microsoft.ML.FastTree/FastTree.cs @@ -3,6 +3,7 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Core.Data; +using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.CommandLine; using Microsoft.ML.Runtime.Data; @@ -3305,13 +3306,15 @@ public IRow GetSummaryIRowOrNull(RoleMappedSchema schema) { var names = default(VBuffer>); MetadataUtils.GetSlotNames(schema, RoleMappedSchema.ColumnRole.Feature, NumFeatures, ref names); - var slotNamesCol = RowColumnUtils.GetColumn(MetadataUtils.Kinds.SlotNames, - new VectorType(TextType.Instance, NumFeatures), ref names); - var slotNamesRow = RowColumnUtils.GetRow(null, slotNamesCol); + var metaBuilder = new MetadataBuilder(); + metaBuilder.AddSlotNames(NumFeatures, names.CopyTo); var weights = default(VBuffer); GetFeatureWeights(ref weights); - return RowColumnUtils.GetRow(null, RowColumnUtils.GetColumn("Gains", new VectorType(NumberType.R4, NumFeatures), ref weights, slotNamesRow)); + var builder = new MetadataBuilder(); + builder.Add>("Gains", new VectorType(NumberType.R4, NumFeatures), weights.CopyTo, metaBuilder.GetMetadata()); + + return MetadataUtils.MetadataAsRow(builder.GetMetadata()); } public IRow GetStatsIRowOrNull(RoleMappedSchema schema) diff --git a/src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs b/src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs index f8fb515d9f..72692c4016 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs @@ -20,6 +20,7 @@ using Microsoft.ML.Runtime.Model.Pfa; using Microsoft.ML.Runtime.Numeric; using Newtonsoft.Json.Linq; +using Microsoft.ML.Data; // This is for deserialization from a model repository. [assembly: LoadableClass(typeof(IPredictorProducing), typeof(LinearBinaryPredictor), null, typeof(SignatureLoadModel), @@ -347,27 +348,18 @@ public void SaveAsCode(TextWriter writer, RoleMappedSchema schema) public virtual IRow GetSummaryIRowOrNull(RoleMappedSchema schema) { - var cols = new List(); - var names = default(VBuffer>); MetadataUtils.GetSlotNames(schema, RoleMappedSchema.ColumnRole.Feature, Weight.Length, ref names); - var slotNamesCol = RowColumnUtils.GetColumn(MetadataUtils.Kinds.SlotNames, - new VectorType(TextType.Instance, Weight.Length), ref names); - var slotNamesRow = RowColumnUtils.GetRow(null, slotNamesCol); + var subBuilder = new MetadataBuilder(); + subBuilder.AddSlotNames(Weight.Length, (ref VBuffer> dst) => names.CopyTo(ref dst)); var colType = new VectorType(NumberType.R4, Weight.Length); - - // Add the bias and the weight columns. - var bias = Bias; - cols.Add(RowColumnUtils.GetColumn("Bias", NumberType.R4, ref bias)); - var weights = Weight; - cols.Add(RowColumnUtils.GetColumn("Weights", colType, ref weights, slotNamesRow)); - return RowColumnUtils.GetRow(null, cols.ToArray()); + var builder = new MetadataBuilder(); + builder.AddPrimitiveValue("Bias", NumberType.R4, Bias); + builder.Add("Weights", colType, (ref VBuffer dst) => Weight.CopyTo(ref dst), subBuilder.GetMetadata()); + return MetadataUtils.MetadataAsRow(builder.GetMetadata()); } - public virtual IRow GetStatsIRowOrNull(RoleMappedSchema schema) - { - return null; - } + public virtual IRow GetStatsIRowOrNull(RoleMappedSchema schema) => null; public abstract void SaveAsIni(TextWriter writer, RoleMappedSchema schema, ICalibrator calibrator = null); @@ -514,13 +506,10 @@ public override IRow GetStatsIRowOrNull(RoleMappedSchema schema) { if (_stats == null) return null; - var cols = new List(); var names = default(VBuffer>); MetadataUtils.GetSlotNames(schema, RoleMappedSchema.ColumnRole.Feature, Weight.Length, ref names); - - // Add the stat columns. - _stats.AddStatsColumns(cols, this, schema, in names); - return RowColumnUtils.GetRow(null, cols.ToArray()); + var meta = _stats.MakeStatisticsMetadata(this, schema, in names); + return MetadataUtils.MetadataAsRow(meta); } public override void SaveAsIni(TextWriter writer, RoleMappedSchema schema, ICalibrator calibrator = null) diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs index b159aaa4be..6d8a411e7c 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs @@ -440,9 +440,9 @@ public MulticlassLogisticRegressionPredictor(IHostEnvironment env, VBuffer= 2, "numClasses must be at least 2."); + Contracts.CheckParam(numClasses >= 2, nameof(numClasses), "Must be at least 2."); _numClasses = numClasses; - Contracts.Check(numFeatures >= 1, "numFeatures must be positive."); + Contracts.CheckParam(numFeatures >= 1, nameof(numFeatures), "Must be positive."); _numFeatures = numFeatures; Contracts.Check(Utils.Size(weights) == _numClasses); Contracts.Check(Utils.Size(bias) == _numClasses); @@ -992,10 +992,9 @@ public IRow GetStatsIRowOrNull(RoleMappedSchema schema) if (_stats == null) return null; - var cols = new List(); - var names = default(VBuffer>); - _stats.AddStatsColumns(cols, null, schema, in names); - return RowColumnUtils.GetRow(null, cols.ToArray()); + VBuffer> names = default; + var meta = _stats.MakeStatisticsMetadata(null, schema, in names); + return MetadataUtils.MetadataAsRow(meta); } } diff --git a/src/Microsoft.ML.StandardLearners/Standard/ModelStatistics.cs b/src/Microsoft.ML.StandardLearners/Standard/ModelStatistics.cs index e71cb85257..4626ee5b31 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/ModelStatistics.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/ModelStatistics.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.CpuMath; @@ -408,54 +409,50 @@ public void SaveSummaryInKeyValuePairs(LinearBinaryPredictor parent, } } - public void AddStatsColumns(List list, LinearBinaryPredictor parent, RoleMappedSchema schema, in VBuffer> names) + internal Schema.Metadata MakeStatisticsMetadata(LinearBinaryPredictor parent, RoleMappedSchema schema, in VBuffer> names) { - _env.AssertValue(list); _env.AssertValueOrNull(parent); _env.AssertValue(schema); - long count = _trainingExampleCount; - list.Add(RowColumnUtils.GetColumn("Count of training examples", NumberType.I8, ref count)); - var dev = _deviance; - list.Add(RowColumnUtils.GetColumn("Residual Deviance", NumberType.R4, ref dev)); - var nullDev = _nullDeviance; - list.Add(RowColumnUtils.GetColumn("Null Deviance", NumberType.R4, ref nullDev)); - var aic = 2 * _paramCount + _deviance; - list.Add(RowColumnUtils.GetColumn("AIC", NumberType.R4, ref aic)); + var builder = new MetadataBuilder(); + + builder.AddPrimitiveValue("Count of training examples", NumberType.I8, _trainingExampleCount); + builder.AddPrimitiveValue("Residual Deviance", NumberType.R4, _deviance); + builder.AddPrimitiveValue("Null Deviance", NumberType.R4, _nullDeviance); + builder.AddPrimitiveValue("AIC", NumberType.R4, 2 * _paramCount + _deviance); if (parent == null) - return; + return builder.GetMetadata(); - Single biasStdErr; - Single biasZScore; - Single biasPValue; - if (!TryGetBiasStatistics(parent.Statistics, parent.Bias, out biasStdErr, out biasZScore, out biasPValue)) - return; + if (!TryGetBiasStatistics(parent.Statistics, parent.Bias, out float biasStdErr, out float biasZScore, out float biasPValue)) + return builder.GetMetadata(); var biasEstimate = parent.Bias; - list.Add(RowColumnUtils.GetColumn("BiasEstimate", NumberType.R4, ref biasEstimate)); - list.Add(RowColumnUtils.GetColumn("BiasStandardError", NumberType.R4, ref biasStdErr)); - list.Add(RowColumnUtils.GetColumn("BiasZScore", NumberType.R4, ref biasZScore)); - list.Add(RowColumnUtils.GetColumn("BiasPValue", NumberType.R4, ref biasPValue)); + builder.AddPrimitiveValue("BiasEstimate", NumberType.R4, biasEstimate); + builder.AddPrimitiveValue("BiasStandardError", NumberType.R4, biasStdErr); + builder.AddPrimitiveValue("BiasZScore", NumberType.R4, biasZScore); + builder.AddPrimitiveValue("BiasPValue", NumberType.R4, biasPValue); - var weights = default(VBuffer); + var weights = default(VBuffer); parent.GetFeatureWeights(ref weights); - var estimate = default(VBuffer); - var stdErr = default(VBuffer); - var zScore = default(VBuffer); - var pValue = default(VBuffer); + var estimate = default(VBuffer); + var stdErr = default(VBuffer); + var zScore = default(VBuffer); + var pValue = default(VBuffer); ValueGetter>> getSlotNames; GetUnorderedCoefficientStatistics(parent.Statistics, in weights, in names, ref estimate, ref stdErr, ref zScore, ref pValue, out getSlotNames); - var slotNamesCol = RowColumnUtils.GetColumn(MetadataUtils.Kinds.SlotNames, - new VectorType(TextType.Instance, stdErr.Length), getSlotNames); - var slotNamesRow = RowColumnUtils.GetRow(null, slotNamesCol); + var subMetaBuilder = new MetadataBuilder(); + subMetaBuilder.AddSlotNames(stdErr.Length, getSlotNames); + var subMeta = subMetaBuilder.GetMetadata(); var colType = new VectorType(NumberType.R4, stdErr.Length); - list.Add(RowColumnUtils.GetColumn("Estimate", colType, ref estimate, slotNamesRow)); - list.Add(RowColumnUtils.GetColumn("StandardError", colType, ref stdErr, slotNamesRow)); - list.Add(RowColumnUtils.GetColumn("ZScore", colType, ref zScore, slotNamesRow)); - list.Add(RowColumnUtils.GetColumn("PValue", colType, ref pValue, slotNamesRow)); + builder.Add("Estimate", colType, (ref VBuffer dst) => estimate.CopyTo(ref dst), subMeta); + builder.Add("StandardError", colType, (ref VBuffer dst) => stdErr.CopyTo(ref dst), subMeta); + builder.Add("ZScore", colType, (ref VBuffer dst) => zScore.CopyTo(ref dst), subMeta); + builder.Add("PValue", colType, (ref VBuffer dst) => pValue.CopyTo(ref dst), subMeta); + + return builder.GetMetadata(); } private string DecorateProbabilityString(Single probZ) diff --git a/test/Microsoft.ML.Benchmarks/HashBench.cs b/test/Microsoft.ML.Benchmarks/HashBench.cs index 027411f016..6db0768287 100644 --- a/test/Microsoft.ML.Benchmarks/HashBench.cs +++ b/test/Microsoft.ML.Benchmarks/HashBench.cs @@ -17,35 +17,69 @@ namespace Microsoft.ML.Benchmarks { public class HashBench { - private sealed class Counted : ICounted + private sealed class Row : IRow { + public Schema Schema { get; } + public long Position { get; set; } public long Batch => 0; - public ValueGetter GetIdGetter() => (ref UInt128 val) => val = new UInt128((ulong)Position, 0); + + private readonly Delegate _getter; + + public bool IsColumnActive(int col) + { + if (col != 0) + throw new Exception(); + return true; + } + + public ValueGetter GetGetter(int col) + { + if (col != 0) + throw new Exception(); + if (_getter is ValueGetter typedGetter) + return typedGetter; + throw new Exception(); + } + + public static Row Create(ColumnType type, ValueGetter getter) + { + if (type.RawType != typeof(T)) + throw new Exception(); + return new Row(type, getter); + } + + private Row(ColumnType type, Delegate getter) + { + var builder = new SchemaBuilder(); + builder.AddColumn("Foo", type, null); + Schema = builder.GetSchema(); + _getter = getter; + } } private const int Count = 100_000; private readonly IHostEnvironment _env = new MLContext(); - private Counted _counted; + private Row _inRow; private ValueGetter _getter; private ValueGetter> _vecGetter; - private void InitMap(T val, ColumnType type, int hashBits = 20) + private void InitMap(T val, ColumnType type, int hashBits = 20, ValueGetter getter = null) { - var col = RowColumnUtils.GetColumn("Foo", type, ref val); - _counted = new Counted(); - var inRow = RowColumnUtils.GetRow(_counted, col); + if (getter == null) + getter = (ref T dst) => dst = val; + _inRow = Row.Create(type, getter); // One million features is a nice, typical number. var info = new HashingTransformer.ColumnInfo("Foo", "Bar", hashBits: hashBits); var xf = new HashingTransformer(_env, new[] { info }); - var mapper = xf.GetRowToRowMapper(inRow.Schema); + var mapper = xf.GetRowToRowMapper(_inRow.Schema); mapper.OutputSchema.TryGetColumnIndex("Bar", out int outCol); - var outRow = mapper.GetRow(inRow, c => c == outCol, out var _); + var outRow = mapper.GetRow(_inRow, c => c == outCol, out var _); if (type is VectorType) _vecGetter = outRow.GetGetter>(outCol); else @@ -61,14 +95,14 @@ private void RunScalar() for (int i = 0; i < Count; ++i) { _getter(ref val); - ++_counted.Position; + ++_inRow.Position; } } private void InitDenseVecMap(T[] vals, PrimitiveType itemType, int hashBits = 20) { var vbuf = new VBuffer(vals.Length, vals); - InitMap(vbuf, new VectorType(itemType, vals.Length), hashBits); + InitMap(vbuf, new VectorType(itemType, vals.Length), hashBits, vbuf.CopyTo); } /// @@ -80,7 +114,7 @@ private void RunVector() for (int i = 0; i < Count; ++i) { _vecGetter(ref val); - ++_counted.Position; + ++_inRow.Position; } } diff --git a/test/Microsoft.ML.StaticPipelineTesting/StaticPipeTests.cs b/test/Microsoft.ML.StaticPipelineTesting/StaticPipeTests.cs index 4b69791e8c..35da6afbc0 100644 --- a/test/Microsoft.ML.StaticPipelineTesting/StaticPipeTests.cs +++ b/test/Microsoft.ML.StaticPipelineTesting/StaticPipeTests.cs @@ -254,44 +254,37 @@ public void AssertStaticSimpleFailure() hello: c.Text.Scalar))); } - private sealed class MetaCounted : ICounted - { - public long Position => 0; - public long Batch => 0; - public ValueGetter GetIdGetter() => (ref UInt128 v) => v = default; - } - [Fact] public void AssertStaticKeys() { var env = new MLContext(0); - var counted = new MetaCounted(); // We'll test a few things here. First, the case where the key-value metadata is text. var metaValues1 = new VBuffer>(3, new[] { "a".AsMemory(), "b".AsMemory(), "c".AsMemory() }); - var meta1 = RowColumnUtils.GetColumn(MetadataUtils.Kinds.KeyValues, new VectorType(TextType.Instance, 3), ref metaValues1); - uint value1 = 2; - var col1 = RowColumnUtils.GetColumn("stay", new KeyType(typeof(uint), 0, 3), ref value1, RowColumnUtils.GetRow(counted, meta1)); + var metaBuilder = new MetadataBuilder(); + metaBuilder.AddKeyValues>(3, TextType.Instance, metaValues1.CopyTo); + + var builder = new MetadataBuilder(); + builder.AddPrimitiveValue("stay", new KeyType(typeof(uint), 0, 3), 2u, metaBuilder.GetMetadata()); // Next the case where those values are ints. var metaValues2 = new VBuffer(3, new int[] { 1, 2, 3, 4 }); - var meta2 = RowColumnUtils.GetColumn(MetadataUtils.Kinds.KeyValues, new VectorType(NumberType.I4, 4), ref metaValues2); + metaBuilder = new MetadataBuilder(); + metaBuilder.AddKeyValues(3, NumberType.I4, metaValues2.CopyTo); var value2 = new VBuffer(2, 0, null, null); - var col2 = RowColumnUtils.GetColumn("awhile", new VectorType(new KeyType(typeof(byte), 2, 4), 2), ref value2, RowColumnUtils.GetRow(counted, meta2)); + builder.Add>("awhile", new VectorType(new KeyType(typeof(byte), 2, 3), 2), value2.CopyTo, metaBuilder.GetMetadata()); // Then the case where a value of that kind exists, but is of not of the right kind, in which case it should not be identified as containing that metadata. - var metaValues3 = (float)2; - var meta3 = RowColumnUtils.GetColumn(MetadataUtils.Kinds.KeyValues, NumberType.R4, ref metaValues3); - var value3 = (ushort)1; - var col3 = RowColumnUtils.GetColumn("and", new KeyType(typeof(ushort), 0, 2), ref value3, RowColumnUtils.GetRow(counted, meta3)); + metaBuilder = new MetadataBuilder(); + metaBuilder.AddPrimitiveValue(MetadataUtils.Kinds.KeyValues, NumberType.R4, 2f); + builder.AddPrimitiveValue("and", new KeyType(typeof(ushort), 0, 2), (ushort)1, metaBuilder.GetMetadata()); // Then a final case where metadata of that kind is actaully simply altogether absent. var value4 = new VBuffer(5, 0, null, null); - var col4 = RowColumnUtils.GetColumn("listen", new VectorType(new KeyType(typeof(uint), 0, 2)), ref value4); + builder.Add>("listen", new VectorType(new KeyType(typeof(uint), 0, 2)), value4.CopyTo); // Finally compose a trivial data view out of all this. - var row = RowColumnUtils.GetRow(counted, col1, col2, col3, col4); - var view = RowCursorUtils.RowAsDataView(env, row); + var view = RowCursorUtils.RowAsDataView(env, MetadataUtils.MetadataAsRow(builder.GetMetadata())); // Whew! I'm glad that's over with. Let us start running the test in ernest. // First let's do a direct match of the types to ensure that works. diff --git a/test/Microsoft.ML.Tests/Transformers/HashTests.cs b/test/Microsoft.ML.Tests/Transformers/HashTests.cs index 417d9ed388..e5845db090 100644 --- a/test/Microsoft.ML.Tests/Transformers/HashTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/HashTests.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.Utilities; @@ -133,19 +134,13 @@ public void TestOldSavingAndLoading() } } - private sealed class Counted : ICounted - { - public long Position => 0; - public long Batch => 0; - public ValueGetter GetIdGetter() => (ref UInt128 val) => val = default; - } - private void HashTestCore(T val, PrimitiveType type, uint expected, uint expectedOrdered, uint expectedOrdered3) { const int bits = 10; - var col = RowColumnUtils.GetColumn("Foo", type, ref val); - var inRow = RowColumnUtils.GetRow(new Counted(), col); + var builder = new MetadataBuilder(); + builder.AddPrimitiveValue("Foo", type, val); + var inRow = MetadataUtils.MetadataAsRow(builder.GetMetadata()); // First do an unordered hash. var info = new HashingTransformer.ColumnInfo("Foo", "Bar", hashBits: bits); @@ -174,8 +169,9 @@ private void HashTestCore(T val, PrimitiveType type, uint expected, uint expe // at least in the first position, and in the unordered case, the last position. const int vecLen = 5; var denseVec = new VBuffer(vecLen, Utils.CreateArray(vecLen, val)); - col = RowColumnUtils.GetColumn("Foo", new VectorType(type, vecLen), ref denseVec); - inRow = RowColumnUtils.GetRow(new Counted(), col); + builder = new MetadataBuilder(); + builder.Add("Foo", new VectorType(type, vecLen), (ref VBuffer dst) => denseVec.CopyTo(ref dst)); + inRow = MetadataUtils.MetadataAsRow(builder.GetMetadata()); info = new HashingTransformer.ColumnInfo("Foo", "Bar", hashBits: bits, ordered: false); xf = new HashingTransformer(Env, new[] { info }); @@ -207,8 +203,9 @@ private void HashTestCore(T val, PrimitiveType type, uint expected, uint expe // Let's now do a sparse vector. var sparseVec = new VBuffer(10, 3, Utils.CreateArray(3, val), new[] { 0, 3, 7 }); - col = RowColumnUtils.GetColumn("Foo", new VectorType(type, vecLen), ref sparseVec); - inRow = RowColumnUtils.GetRow(new Counted(), col); + builder = new MetadataBuilder(); + builder.Add("Foo", new VectorType(type, vecLen), (ref VBuffer dst) => sparseVec.CopyTo(ref dst)); + inRow = MetadataUtils.MetadataAsRow(builder.GetMetadata()); info = new HashingTransformer.ColumnInfo("Foo", "Bar", hashBits: bits, ordered: false); xf = new HashingTransformer(Env, new[] { info }); From f40fb0238034101f2164edb7bd1354c4351b6e1c Mon Sep 17 00:00:00 2001 From: Zeeshan Siddiqui Date: Mon, 3 Dec 2018 10:53:30 -0800 Subject: [PATCH 017/100] Sample for IID spike and changepoint detection using time series stateful prediction engine. (#1762) * Sample for IID spike and changepoint detection using time series stateful prediction engine. --- .../IidChangePointDetectorTransform.cs | 129 +++++++++++++++++- .../Dynamic/IidSpikeDetectorTransform.cs | 96 ++++++++++++- .../IidChangePointDetector.cs | 3 +- .../IidSpikeDetector.cs | 3 +- .../PredictionFunction.cs | 26 ++++ .../SsaChangePointDetector.cs | 3 +- .../SsaSpikeDetector.cs | 3 +- 7 files changed, 245 insertions(+), 18 deletions(-) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/IidChangePointDetectorTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/IidChangePointDetectorTransform.cs index 179dda7444..52729c6c97 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/IidChangePointDetectorTransform.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/IidChangePointDetectorTransform.cs @@ -4,6 +4,10 @@ using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.TimeSeriesProcessing; +using Microsoft.ML.Core.Data; +using Microsoft.ML.TimeSeries; +using System.IO; +using Microsoft.ML.Data; namespace Microsoft.ML.Samples.Dynamic { @@ -34,26 +38,26 @@ public static void IidChangePointDetectorTransform() var ml = new MLContext(); // Generate sample series data with a change - const int size = 16; - var data = new List(size); - for (int i = 0; i < size / 2; i++) + const int Size = 16; + var data = new List(Size); + for (int i = 0; i < Size / 2; i++) data.Add(new IidChangePointData(5)); // This is a change point - for (int i = 0; i < size / 2; i++) + for (int i = 0; i < Size / 2; i++) data.Add(new IidChangePointData(7)); // Convert data to IDataView. var dataView = ml.CreateStreamingDataView(data); // Setup IidSpikeDetector arguments - string outputColumnName = "Prediction"; - string inputColumnName = "Value"; + string outputColumnName = nameof(ChangePointPrediction.Prediction); + string inputColumnName = nameof(IidChangePointData.Value); var args = new IidChangePointDetector.Arguments() { Source = inputColumnName, Name = outputColumnName, Confidence = 95, // The confidence for spike detection in the range [0, 100] - ChangeHistoryLength = size / 4, // The length of the sliding window on p-values for computing the martingale score. + ChangeHistoryLength = Size / 4, // The length of the sliding window on p-values for computing the martingale score. }; // The transformed data. @@ -88,5 +92,116 @@ public static void IidChangePointDetectorTransform() // 7 0 7.00 0.50 0.00 // 7 0 7.00 0.50 0.00 } + + // This example creates a time series (list of Data with the i-th element corresponding to the i-th time slot). + // IidChangePointDetector is applied then to identify points where data distribution changed using time series + // prediction engine. The engine is checkpointed and then loaded back from disk into memory and used for prediction. + public static void IidChangePointDetectorPrediction() + { + // Create a new ML context, for ML.NET operations. It can be used for exception tracking and logging, + // as well as the source of randomness. + var ml = new MLContext(); + + // Generate sample series data with a change + const int Size = 16; + var data = new List(Size); + for (int i = 0; i < Size / 2; i++) + data.Add(new IidChangePointData(5)); + // This is a change point + for (int i = 0; i < Size / 2; i++) + data.Add(new IidChangePointData(7)); + + // Convert data to IDataView. + var dataView = ml.CreateStreamingDataView(data); + + // Setup IidSpikeDetector arguments + string outputColumnName = nameof(ChangePointPrediction.Prediction); + string inputColumnName = nameof(IidChangePointData.Value); + var args = new IidChangePointDetector.Arguments() + { + Source = inputColumnName, + Name = outputColumnName, + Confidence = 95, // The confidence for spike detection in the range [0, 100] + ChangeHistoryLength = Size / 4, // The length of the sliding window on p-values for computing the martingale score. + }; + + // Time Series model. + ITransformer model = new IidChangePointEstimator(ml, args).Fit(dataView); + + // Create a time series prediction engine from the model. + var engine = model.CreateTimeSeriesPredictionFunction(ml); + for(int index = 0; index < 8; index++) + { + // Anomaly change point detection. + var prediction = engine.Predict(new IidChangePointData(5)); + Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}\t{4:0.00}", 5, prediction.Prediction[0], + prediction.Prediction[1], prediction.Prediction[2], prediction.Prediction[3]); + } + + // Change point + var changePointPrediction = engine.Predict(new IidChangePointData(7)); + Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}\t{4:0.00}", 7, changePointPrediction.Prediction[0], + changePointPrediction.Prediction[1], changePointPrediction.Prediction[2], changePointPrediction.Prediction[3]); + + // Checkpoint the model. + var modelPath = "temp.zip"; + engine.CheckPoint(ml, modelPath); + + // Reference to current time series engine because in the next step "engine" will point to the + // checkpointed model being loaded from disk. + var timeseries1 = engine; + + // Load the model. + using (var file = File.OpenRead(modelPath)) + model = TransformerChain.LoadFrom(ml, file); + + // Create a time series prediction engine from the checkpointed model. + engine = model.CreateTimeSeriesPredictionFunction(ml); + for (int index = 0; index < 8; index++) + { + // Anomaly change point detection. + var prediction = engine.Predict(new IidChangePointData(7)); + Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}\t{4:0.00}", 7, prediction.Prediction[0], + prediction.Prediction[1], prediction.Prediction[2], prediction.Prediction[3]); + } + + // Prediction from the original time series engine should match the prediction from + // check pointed model. + engine = timeseries1; + for (int index = 0; index < 8; index++) + { + // Anomaly change point detection. + var prediction = engine.Predict(new IidChangePointData(7)); + Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}\t{4:0.00}", 7, prediction.Prediction[0], + prediction.Prediction[1], prediction.Prediction[2], prediction.Prediction[3]); + } + + // Data Alert Score P-Value Martingale value + // 5 0 5.00 0.50 0.00 <-- Time Series 1. + // 5 0 5.00 0.50 0.00 + // 5 0 5.00 0.50 0.00 + // 5 0 5.00 0.50 0.00 + // 5 0 5.00 0.50 0.00 + // 5 0 5.00 0.50 0.00 + // 5 0 5.00 0.50 0.00 + // 5 0 5.00 0.50 0.00 + // 7 1 7.00 0.00 10298.67 <-- alert is on, predicted changepoint (and model is checkpointed). + + // 7 0 7.00 0.13 33950.16 <-- Time Series 2 : Model loaded back from disk and prediction is made. + // 7 0 7.00 0.26 60866.34 + // 7 0 7.00 0.38 78362.04 + // 7 0 7.00 0.50 0.01 + // 7 0 7.00 0.50 0.00 + // 7 0 7.00 0.50 0.00 + // 7 0 7.00 0.50 0.00 + + // 7 0 7.00 0.13 33950.16 <-- Time Series 1 and prediction is made. + // 7 0 7.00 0.26 60866.34 + // 7 0 7.00 0.38 78362.04 + // 7 0 7.00 0.50 0.01 + // 7 0 7.00 0.50 0.00 + // 7 0 7.00 0.50 0.00 + // 7 0 7.00 0.50 0.00 + } } } diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/IidSpikeDetectorTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/IidSpikeDetectorTransform.cs index 3c6ae7a9c9..5dcb7e1774 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/IidSpikeDetectorTransform.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/IidSpikeDetectorTransform.cs @@ -1,9 +1,13 @@ using System; +using System.IO; using System.Linq; using System.Collections.Generic; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.TimeSeriesProcessing; +using Microsoft.ML.Core.Data; +using Microsoft.ML.TimeSeries; namespace Microsoft.ML.Samples.Dynamic { @@ -34,27 +38,27 @@ public static void IidSpikeDetectorTransform() var ml = new MLContext(); // Generate sample series data with a spike - const int size = 10; - var data = new List(size); - for (int i = 0; i < size / 2; i++) + const int Size = 10; + var data = new List(Size); + for (int i = 0; i < Size / 2; i++) data.Add(new IidSpikeData(5)); // This is a spike data.Add(new IidSpikeData(10)); - for (int i = 0; i < size / 2; i++) + for (int i = 0; i < Size / 2; i++) data.Add(new IidSpikeData(5)); // Convert data to IDataView. var dataView = ml.CreateStreamingDataView(data); // Setup IidSpikeDetector arguments - string outputColumnName = "Prediction"; - string inputColumnName = "Value"; + string outputColumnName = nameof(IidSpikePrediction.Prediction); + string inputColumnName = nameof(IidSpikeData.Value); var args = new IidSpikeDetector.Arguments() { Source = inputColumnName, Name = outputColumnName, Confidence = 95, // The confidence for spike detection in the range [0, 100] - PvalueHistoryLength = size / 4 // The size of the sliding window for computing the p-value + PvalueHistoryLength = Size / 4 // The size of the sliding window for computing the p-value; shorter windows are more sensitive to spikes. }; // The transformed data. @@ -83,5 +87,83 @@ public static void IidSpikeDetectorTransform() // 0 5.00 0.50 // 0 5.00 0.50 } + + public static void IidSpikeDetectorPrediction() + { + // Create a new ML context, for ML.NET operations. It can be used for exception tracking and logging, + // as well as the source of randomness. + var ml = new MLContext(); + + // Generate sample series data with a spike + const int Size = 10; + var data = new List(Size); + for (int i = 0; i < Size / 2; i++) + data.Add(new IidSpikeData(5)); + // This is a spike + data.Add(new IidSpikeData(10)); + for (int i = 0; i < Size / 2; i++) + data.Add(new IidSpikeData(5)); + + // Convert data to IDataView. + var dataView = ml.CreateStreamingDataView(data); + + // Setup IidSpikeDetector arguments + string outputColumnName = nameof(IidSpikePrediction.Prediction); + string inputColumnName = nameof(IidSpikeData.Value); + var args = new IidSpikeDetector.Arguments() + { + Source = inputColumnName, + Name = outputColumnName, + Confidence = 95, // The confidence for spike detection in the range [0, 100] + PvalueHistoryLength = Size / 4 // The size of the sliding window for computing the p-value; shorter windows are more sensitive to spikes. + }; + + // The transformed model. + ITransformer model = new IidSpikeEstimator(ml, args).Fit(dataView); + + // Create a time series prediction engine from the model. + var engine = model.CreateTimeSeriesPredictionFunction(ml); + for (int index = 0; index < 5; index++) + { + // Anomaly spike detection. + var prediction = engine.Predict(new IidSpikeData(5)); + Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}", 5, prediction.Prediction[0], + prediction.Prediction[1], prediction.Prediction[2]); + } + + // Spike. + var spikePrediction = engine.Predict(new IidSpikeData(10)); + Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}", 10, spikePrediction.Prediction[0], + spikePrediction.Prediction[1], spikePrediction.Prediction[2]); + + // Checkpoint the model. + var modelPath = "temp.zip"; + engine.CheckPoint(ml, modelPath); + + // Load the model. + using (var file = File.OpenRead(modelPath)) + model = TransformerChain.LoadFrom(ml, file); + + for (int index = 0; index < 5; index++) + { + // Anomaly spike detection. + var prediction = engine.Predict(new IidSpikeData(5)); + Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}", 5, prediction.Prediction[0], + prediction.Prediction[1], prediction.Prediction[2]); + } + + // Data Alert Score P-Value + // 5 0 5.00 0.50 + // 5 0 5.00 0.50 + // 5 0 5.00 0.50 + // 5 0 5.00 0.50 + // 5 0 5.00 0.50 + // 10 1 10.00 0.00 <-- alert is on, predicted spike (check-point model) + // 5 0 5.00 0.26 <-- load model from disk. + // 5 0 5.00 0.26 + // 5 0 5.00 0.50 + // 5 0 5.00 0.50 + // 5 0 5.00 0.50 + } } } diff --git a/src/Microsoft.ML.TimeSeries/IidChangePointDetector.cs b/src/Microsoft.ML.TimeSeries/IidChangePointDetector.cs index 487a01a4d4..3a899f4339 100644 --- a/src/Microsoft.ML.TimeSeries/IidChangePointDetector.cs +++ b/src/Microsoft.ML.TimeSeries/IidChangePointDetector.cs @@ -212,7 +212,8 @@ public sealed class IidChangePointEstimator : TrivialEstimator /// Host Environment. /// Name of the input column. - /// Name of the output column. + /// Name of the output column. Column is a vector of type double and size 4. + /// The vector contains Alert, Raw Score, P-Value and Martingale score as first four values. /// The confidence for change point detection in the range [0, 100]. /// The length of the sliding window on p-values for computing the martingale score. /// The martingale used for scoring. diff --git a/src/Microsoft.ML.TimeSeries/IidSpikeDetector.cs b/src/Microsoft.ML.TimeSeries/IidSpikeDetector.cs index 7bc8c6b7a2..3fdf968025 100644 --- a/src/Microsoft.ML.TimeSeries/IidSpikeDetector.cs +++ b/src/Microsoft.ML.TimeSeries/IidSpikeDetector.cs @@ -191,7 +191,8 @@ public sealed class IidSpikeEstimator : TrivialEstimator /// /// Host Environment. /// Name of the input column. - /// Name of the output column. + /// Name of the output column. Column is a vector of type double and size 3. + /// The vector contains Alert, Raw Score, P-Value as first three values. /// The confidence for spike detection in the range [0, 100]. /// The size of the sliding window for computing the p-value. /// The argument that determines whether to detect positive or negative anomalies, or both. diff --git a/src/Microsoft.ML.TimeSeries/PredictionFunction.cs b/src/Microsoft.ML.TimeSeries/PredictionFunction.cs index 4c1d1681f6..792b06d6b3 100644 --- a/src/Microsoft.ML.TimeSeries/PredictionFunction.cs +++ b/src/Microsoft.ML.TimeSeries/PredictionFunction.cs @@ -53,6 +53,12 @@ public sealed class TimeSeriesPredictionFunction : PredictionEngineB private long _rowPosition; private ITransformer InputTransformer { get; set; } + /// + /// Checkpoints to disk with the updated + /// state. + /// + /// Usually . + /// Path to file on disk where the updated model needs to be saved. public void CheckPoint(IHostEnvironment env, string modelPath) { using (var file = File.Create(modelPath)) @@ -246,6 +252,26 @@ public override void Predict(TSrc example, ref TDst prediction) public static class PredictionFunctionExtensions { + /// + /// creates a prediction function/engine for a time series pipeline + /// It updates the state of time series model with observations seen at prediction phase and allows checkpointing the model. + /// + /// Class describing input schema to the model. + /// Class describing the output schema of the prediction. + /// The time series pipeline in the form of a . + /// Usually + /// To ignore missing columns. Default is false. + /// Input schema definition. Default is null. + /// Output schema definition. Default is null. + ///

Example code can be found by searching for TimeSeriesPredictionFunction in ML.NET.

+ /// + /// + /// + /// + /// public static TimeSeriesPredictionFunction CreateTimeSeriesPredictionFunction(this ITransformer transformer, IHostEnvironment env, bool ignoreMissingColumns = false, SchemaDefinition inputSchemaDefinition = null, SchemaDefinition outputSchemaDefinition = null) where TSrc : class diff --git a/src/Microsoft.ML.TimeSeries/SsaChangePointDetector.cs b/src/Microsoft.ML.TimeSeries/SsaChangePointDetector.cs index 856ba1a0e3..9171c34662 100644 --- a/src/Microsoft.ML.TimeSeries/SsaChangePointDetector.cs +++ b/src/Microsoft.ML.TimeSeries/SsaChangePointDetector.cs @@ -225,7 +225,8 @@ public sealed class SsaChangePointEstimator : IEstimator ///
/// Host Environment. /// Name of the input column. - /// Name of the output column. + /// Name of the output column. Column is a vector of type double and size 4. + /// The vector contains Alert, Raw Score, P-Value and Martingale score as first four values. /// The confidence for change point detection in the range [0, 100]. /// The number of points from the beginning of the sequence used for training. /// The size of the sliding window for computing the p-value. diff --git a/src/Microsoft.ML.TimeSeries/SsaSpikeDetector.cs b/src/Microsoft.ML.TimeSeries/SsaSpikeDetector.cs index 9c3d72cd2f..f98c8bf34b 100644 --- a/src/Microsoft.ML.TimeSeries/SsaSpikeDetector.cs +++ b/src/Microsoft.ML.TimeSeries/SsaSpikeDetector.cs @@ -206,7 +206,8 @@ public sealed class SsaSpikeEstimator : IEstimator ///
/// Host Environment. /// Name of the input column. - /// Name of the output column. + /// Name of the output column. Column is a vector of type double and size 3. + /// The vector contains Alert, Raw Score, P-Value as first three values. /// The confidence for spike detection in the range [0, 100]. /// The size of the sliding window for computing the p-value. /// The number of points from the beginning of the sequence used for training. From 93156b6b979896cbd7e1427c21d21ef706c326ac Mon Sep 17 00:00:00 2001 From: Monte Hoover <37886197+montebhoover@users.noreply.github.com> Date: Mon, 3 Dec 2018 10:57:19 -0800 Subject: [PATCH 018/100] SSA time series samples (#1788) * Added SsaChangePointDetectorPrediction sample. * Completed SsaChangePointDetectorPrediction sample. * Added SsaSpikeDetectorPrediction sample. * Cleanup usings, etc. * Added note when saved and loaded from disk. --- .../SsaChangePointDetectorTransform.cs | 183 +++++++++++++---- .../Dynamic/SsaSpikeDetectorTransform.cs | 190 ++++++++++++++---- 2 files changed, 300 insertions(+), 73 deletions(-) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/SsaChangePointDetectorTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/SsaChangePointDetectorTransform.cs index 42bc17e54f..e03ba68606 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/SsaChangePointDetectorTransform.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/SsaChangePointDetectorTransform.cs @@ -1,9 +1,12 @@ -using System; -using System.Linq; -using System.Collections.Generic; -using Microsoft.ML.Runtime.Data; +using Microsoft.ML.Core.Data; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.TimeSeriesProcessing; +using Microsoft.ML.TimeSeries; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; namespace Microsoft.ML.Samples.Dynamic { @@ -21,35 +24,41 @@ public SsaChangePointData(float value) // This example creates a time series (list of Data with the i-th element corresponding to the i-th time slot). // SsaChangePointDetector is applied then to identify points where data distribution changed. + // SsaChangePointDetector differs from IidChangePointDetector in that it can account for temporal seasonality + // in the data. public static void SsaChangePointDetectorTransform() { // Create a new ML context, for ML.NET operations. It can be used for exception tracking and logging, // as well as the source of randomness. var ml = new MLContext(); - // Generate sample series data with a change - const int size = 16; - var data = new List(size); - for (int i = 0; i < size / 2; i++) - data.Add(new SsaChangePointData(5)); + // Generate sample series data with a recurring pattern and then a change in trend + const int SeasonalitySize = 5; + const int TrainingSeasons = 3; + const int TrainingSize = SeasonalitySize * TrainingSeasons; + var data = new List(); + for (int i = 0; i < TrainingSeasons; i++) + for (int j = 0; j < SeasonalitySize; j++) + data.Add(new SsaChangePointData(j)); // This is a change point - for (int i = 0; i < size / 2; i++) - data.Add(new SsaChangePointData(7)); + for (int i = 0; i < SeasonalitySize; i++) + data.Add(new SsaChangePointData(i * 100)); // Convert data to IDataView. var dataView = ml.CreateStreamingDataView(data); - // Setup IidSpikeDetector arguments - string outputColumnName = "Prediction"; - string inputColumnName = "Value"; + // Setup SsaChangePointDetector arguments + var inputColumnName = nameof(SsaChangePointData.Value); + var outputColumnName = nameof(ChangePointPrediction.Prediction); var args = new SsaChangePointDetector.Arguments() { Source = inputColumnName, Name = outputColumnName, - Confidence = 95, // The confidence for spike detection in the range [0, 100] - ChangeHistoryLength = size / 4, // The length of the sliding window on p-values for computing the martingale score. - TrainingWindowSize = size / 2, // The number of points from the beginning of the sequence used for training. - SeasonalWindowSize = size / 8, // An upper bound on the largest relevant seasonality in the input time - series." + Confidence = 95, // The confidence for spike detection in the range [0, 100] + ChangeHistoryLength = 8, // The length of the window for detecting a change in trend; shorter windows are more sensitive to spikes. + TrainingWindowSize = TrainingSize, // The number of points from the beginning of the sequence used for training. + SeasonalWindowSize = SeasonalitySize + 1 // An upper bound on the largest relevant seasonality in the input time series." + }; // The transformed data. @@ -66,23 +75,127 @@ public static void SsaChangePointDetectorTransform() Console.WriteLine(""); // Prediction column obtained post-transformation. - // Data Alert Score P-Value Martingale value - // 5 0 0.00 0.50 0.00 - // 5 0 0.00 0.50 0.00 - // 5 0 0.00 0.50 0.00 - // 5 0 0.00 0.50 0.00 - // 5 0 0.00 0.50 0.00 - // 5 0 0.00 0.50 0.00 - // 5 0 0.00 0.50 0.00 - // 5 0 0.00 0.50 0.00 - // 7 1 2.00 0.00 10298.67 <-- alert is on, predicted changepoint - // 7 0 1.00 0.31 15741.58 - // 7 0 0.00 0.28 26487.48 - // 7 0 0.00 0.28 44569.02 - // 7 0 0.00 0.28 0.01 - // 7 0 0.00 0.38 0.01 - // 7 0 0.00 0.50 0.00 - // 7 0 0.00 0.50 0.00 + // Data Alert Score P-Value Martingale value + // 0 0 - 2.53 0.50 0.00 + // 1 0 - 0.01 0.01 0.00 + // 2 0 0.76 0.14 0.00 + // 3 0 0.69 0.28 0.00 + // 4 0 1.44 0.18 0.00 + // 0 0 - 1.84 0.17 0.00 + // 1 0 0.22 0.44 0.00 + // 2 0 0.20 0.45 0.00 + // 3 0 0.16 0.47 0.00 + // 4 0 1.33 0.18 0.00 + // 0 0 - 1.79 0.07 0.00 + // 1 0 0.16 0.50 0.00 + // 2 0 0.09 0.50 0.00 + // 3 0 0.08 0.45 0.00 + // 4 0 1.31 0.12 0.00 + // 0 0 - 1.79 0.07 0.00 + // 100 1 99.16 0.00 4031.94 <-- alert is on, predicted changepoint + // 200 0 185.23 0.00 731260.87 + // 300 0 270.40 0.01 3578470.47 + // 400 0 357.11 0.03 45298370.86 + } + + // This example shows change point detection as above, but demonstrates how to train a model + // that can run predictions on streaming data, and how to persist the trained model and then re-load it. + public static void SsaChangePointDetectorPrediction() + { + // Create a new ML context, for ML.NET operations. It can be used for exception tracking and logging, + // as well as the source of randomness. + var ml = new MLContext(); + + // Generate sample series data with a recurring pattern + const int SeasonalitySize = 5; + const int TrainingSeasons = 3; + const int TrainingSize = SeasonalitySize * TrainingSeasons; + var data = new List(); + for (int i = 0; i < TrainingSeasons; i++) + for (int j = 0; j < SeasonalitySize; j++) + data.Add(new SsaChangePointData(j)); + + // Convert data to IDataView. + var dataView = ml.CreateStreamingDataView(data); + + // Setup SsaChangePointDetector arguments + var inputColumnName = nameof(SsaChangePointData.Value); + var outputColumnName = nameof(ChangePointPrediction.Prediction); + var args = new SsaChangePointDetector.Arguments() + { + Source = inputColumnName, + Name = outputColumnName, + Confidence = 95, // The confidence for spike detection in the range [0, 100] + ChangeHistoryLength = 8, // The length of the window for detecting a change in trend; shorter windows are more sensitive to spikes. + TrainingWindowSize = TrainingSize, // The number of points from the beginning of the sequence used for training. + SeasonalWindowSize = SeasonalitySize + 1 // An upper bound on the largest relevant seasonality in the input time series." + + }; + + // Train the change point detector. + ITransformer model = new SsaChangePointEstimator(ml, args).Fit(dataView); + + // Create a prediction engine from the model for feeding new data. + var engine = model.CreateTimeSeriesPredictionFunction(ml); + + // Start streaming new data points with no change point to the prediction engine. + Console.WriteLine($"Output from ChangePoint predictions on new data:"); + Console.WriteLine("Data\tAlert\tScore\tP-Value\tMartingale value"); + ChangePointPrediction prediction = null; + for (int i = 0; i < 5; i++) + { + var value = i; + prediction = engine.Predict(new SsaChangePointData(value)); + Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}\t{4:0.00}", value, prediction.Prediction[0], prediction.Prediction[1], prediction.Prediction[2], prediction.Prediction[3]); + } + + // Now stream data points that reflect a change in trend. + for (int i = 0; i < 5; i++) + { + var value = (i + 1) * 100; + prediction = engine.Predict(new SsaChangePointData(value)); + Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}\t{4:0.00}", value, prediction.Prediction[0], prediction.Prediction[1], prediction.Prediction[2], prediction.Prediction[3]); + } + + // Now we demonstrate saving and loading the model. + + // Save the model that exists within the prediction engine. + // The engine has been updating this model with every new data point. + var modelPath = "model.zip"; + engine.CheckPoint(ml, modelPath); + + // Load the model. + using (var file = File.OpenRead(modelPath)) + model = TransformerChain.LoadFrom(ml, file); + + // We must create a new prediction engine from the persisted model. + engine = model.CreateTimeSeriesPredictionFunction(ml); + + // Run predictions on the loaded model. + for (int i = 0; i < 5; i++) + { + var value = (i + 1) * 100; + prediction = engine.Predict(new SsaChangePointData(value)); + Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}\t{4:0.00}", value, prediction.Prediction[0], prediction.Prediction[1], prediction.Prediction[2], prediction.Prediction[3]); + } + + // Output from ChangePoint predictions on new data: + // Data Alert Score P-Value Martingale value + // 0 0 - 1.01 0.50 0.00 + // 1 0 - 0.24 0.22 0.00 + // 2 0 - 0.31 0.30 0.00 + // 3 0 0.44 0.01 0.00 + // 4 0 2.16 0.00 0.24 + // 100 0 86.23 0.00 2076098.24 + // 200 0 171.38 0.00 809668524.21 + // 300 1 256.83 0.01 22130423541.93 <-- alert is on, note that delay is expected + // 400 0 326.55 0.04 241162710263.29 + // 500 0 364.82 0.08 597660527041.45 <-- saved to disk + // 100 0 - 58.58 0.15 1096021098844.34 <-- loaded from disk and running new predictions + // 200 0 - 41.24 0.20 97579154688.98 + // 300 0 - 30.61 0.24 95319753.87 + // 400 0 58.87 0.38 14.24 + // 500 0 219.28 0.36 0.05 } } } diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/SsaSpikeDetectorTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/SsaSpikeDetectorTransform.cs index 82ff8a891f..fe9e981ecd 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/SsaSpikeDetectorTransform.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/SsaSpikeDetectorTransform.cs @@ -1,9 +1,13 @@ -using System; -using System.Linq; -using System.Collections.Generic; -using Microsoft.ML.Runtime.Data; +using Microsoft.ML.Core.Data; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.TimeSeriesProcessing; +using Microsoft.ML.TimeSeries; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; namespace Microsoft.ML.Samples.Dynamic { @@ -27,36 +31,41 @@ class SsaSpikePrediction // This example creates a time series (list of Data with the i-th element corresponding to the i-th time slot). // SsaSpikeDetector is applied then to identify spiking points in the series. + // SsaSpikeDetector differs from IidSpikeDetector in that it can account for temporal seasonality + // in the data. public static void SsaSpikeDetectorTransform() { // Create a new ML context, for ML.NET operations. It can be used for exception tracking and logging, // as well as the source of randomness. var ml = new MLContext(); - // Generate sample series data with a spike - const int size = 16; - var data = new List(size); - for (int i = 0; i < size / 2; i++) - data.Add(new SsaSpikeData(5)); + // Generate sample series data with a recurring pattern and a spike within the pattern + const int SeasonalitySize = 5; + const int TrainingSeasons = 3; + const int TrainingSize = SeasonalitySize * TrainingSeasons; + var data = new List(); + for (int i = 0; i < TrainingSeasons; i++) + for (int j = 0; j < SeasonalitySize; j++) + data.Add(new SsaSpikeData(j)); // This is a spike - data.Add(new SsaSpikeData(10)); - for (int i = 0; i < size / 2; i++) - data.Add(new SsaSpikeData(5)); + data.Add(new SsaSpikeData(100)); + for (int i = 0; i < SeasonalitySize; i++) + data.Add(new SsaSpikeData(i)); // Convert data to IDataView. var dataView = ml.CreateStreamingDataView(data); // Setup IidSpikeDetector arguments - string outputColumnName = "Prediction"; - string inputColumnName = "Value"; + var inputColumnName = nameof(SsaSpikeData.Value); + var outputColumnName = nameof(SsaSpikePrediction.Prediction); var args = new SsaSpikeDetector.Arguments() { Source = inputColumnName, Name = outputColumnName, - Confidence = 95, // The confidence for spike detection in the range [0, 100] - PvalueHistoryLength = size / 4, // The size of the sliding window for computing the p-value - TrainingWindowSize = size / 2, // The number of points from the beginning of the sequence used for training. - SeasonalWindowSize = size / 8, // An upper bound on the largest relevant seasonality in the input time - series." + Confidence = 95, // The confidence for spike detection in the range [0, 100] + PvalueHistoryLength = 8, // The size of the sliding window for computing the p-value; shorter windows are more sensitive to spikes. + TrainingWindowSize = TrainingSize, // The number of points from the beginning of the sequence used for training. + SeasonalWindowSize = SeasonalitySize + 1 // An upper bound on the largest relevant seasonality in the input time series." }; // The transformed data. @@ -66,30 +75,135 @@ public static void SsaSpikeDetectorTransform() var predictionColumn = transformedData.AsEnumerable(ml, reuseRowObject: false); Console.WriteLine($"{outputColumnName} column obtained post-transformation."); - Console.WriteLine("Alert\tScore\tP-Value"); + Console.WriteLine("Data\tAlert\tScore\tP-Value"); + int k = 0; foreach (var prediction in predictionColumn) - Console.WriteLine("{0}\t{1:0.00}\t{2:0.00}", prediction.Prediction[0], prediction.Prediction[1], prediction.Prediction[2]); + Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}", data[k++].Value, prediction.Prediction[0], prediction.Prediction[1], prediction.Prediction[2]); Console.WriteLine(""); // Prediction column obtained post-transformation. - // Alert Score P-Value - // 0 0.00 0.50 - // 0 0.00 0.50 - // 0 0.00 0.50 - // 0 0.00 0.50 - // 0 0.00 0.50 - // 0 0.00 0.50 - // 0 0.00 0.50 - // 0 0.00 0.50 - // 1 5.00 0.00 <-- alert is on, predicted spike - // 0 -2.50 0.09 - // 0 -2.50 0.22 - // 0 0.00 0.47 - // 0 0.00 0.47 - // 0 0.00 0.26 - // 0 0.00 0.38 - // 0 0.00 0.50 - // 0 0.00 0.50 + // Data Alert Score P-Value + // 0 0 - 2.53 0.50 + // 1 0 - 0.01 0.01 + // 2 0 0.76 0.14 + // 3 0 0.69 0.28 + // 4 0 1.44 0.18 + // 0 0 - 1.84 0.17 + // 1 0 0.22 0.44 + // 2 0 0.20 0.45 + // 3 0 0.16 0.47 + // 4 0 1.33 0.18 + // 0 0 - 1.79 0.07 + // 1 0 0.16 0.50 + // 2 0 0.09 0.50 + // 3 0 0.08 0.45 + // 4 0 1.31 0.12 + // 100 1 98.21 0.00 <-- alert is on, predicted spike + // 0 0 - 13.83 0.29 + // 1 0 - 1.74 0.44 + // 2 0 - 0.47 0.46 + // 3 0 - 16.50 0.29 + // 4 0 - 29.82 0.21 + } + + // This example shows spike detection as above, but demonstrates how to train a model + // that can run predictions on streaming data, and how to persist the trained model and then re-load it. + public static void SsaSpikeDetectorPrediction() + { + // Create a new ML context, for ML.NET operations. It can be used for exception tracking and logging, + // as well as the source of randomness. + var ml = new MLContext(); + + // Generate sample series data with a recurring pattern + const int SeasonalitySize = 5; + const int TrainingSeasons = 3; + const int TrainingSize = SeasonalitySize * TrainingSeasons; + var data = new List(); + for (int i = 0; i < TrainingSeasons; i++) + for (int j = 0; j < SeasonalitySize; j++) + data.Add(new SsaSpikeData(j)); + + // Convert data to IDataView. + var dataView = ml.CreateStreamingDataView(data); + + // Setup IidSpikeDetector arguments + var inputColumnName = nameof(SsaSpikeData.Value); + var outputColumnName = nameof(SsaSpikePrediction.Prediction); + var args = new SsaSpikeDetector.Arguments() + { + Source = inputColumnName, + Name = outputColumnName, + Confidence = 95, // The confidence for spike detection in the range [0, 100] + PvalueHistoryLength = 8, // The size of the sliding window for computing the p-value; shorter windows are more sensitive to spikes. + TrainingWindowSize = TrainingSize, // The number of points from the beginning of the sequence used for training. + SeasonalWindowSize = SeasonalitySize + 1 // An upper bound on the largest relevant seasonality in the input time series." + }; + + // Train the change point detector. + ITransformer model = new SsaSpikeEstimator(ml, args).Fit(dataView); + + // Create a prediction engine from the model for feeding new data. + var engine = model.CreateTimeSeriesPredictionFunction(ml); + + // Start streaming new data points with no change point to the prediction engine. + Console.WriteLine($"Output from spike predictions on new data:"); + Console.WriteLine("Data\tAlert\tScore\tP-Value"); + SsaSpikePrediction prediction = null; + for (int j = 0; j < 2; j++) + { + for (int i = 0; i < 5; i++) + { + var value = i; + prediction = engine.Predict(new SsaSpikeData(value)); + Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}", value, prediction.Prediction[0], prediction.Prediction[1], prediction.Prediction[2]); + } + } + + // Now send a data point that reflects a spike. + var newValue = 100; + prediction = engine.Predict(new SsaSpikeData(newValue)); + Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}", newValue, prediction.Prediction[0], prediction.Prediction[1], prediction.Prediction[2]); + + // Now we demonstrate saving and loading the model. + + // Save the model that exists within the prediction engine. + // The engine has been updating this model with every new data point. + var modelPath = "model.zip"; + engine.CheckPoint(ml, modelPath); + + // Load the model. + using (var file = File.OpenRead(modelPath)) + model = TransformerChain.LoadFrom(ml, file); + + // We must create a new prediction engine from the persisted model. + engine = model.CreateTimeSeriesPredictionFunction(ml); + + // Run predictions on the loaded model. + for (int i = 0; i < 5; i++) + { + var value = i; + prediction = engine.Predict(new SsaSpikeData(value)); + Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}", value, prediction.Prediction[0], prediction.Prediction[1], prediction.Prediction[2]); + } + + // Output from spike predictions on new data: + // Data Alert Score P-Value + // 0 0 - 1.01 0.50 + // 1 0 - 0.24 0.22 + // 2 0 - 0.31 0.30 + // 3 0 0.44 0.01 + // 4 0 2.16 0.00 + // 0 0 - 0.78 0.27 + // 1 0 - 0.80 0.30 + // 2 0 - 0.84 0.31 + // 3 0 0.33 0.31 + // 4 0 2.21 0.07 + // 100 1 86.17 0.00 <-- alert is on, predicted spike + // 0 0 - 2.74 0.40 <-- saved to disk, re-loaded, and running new predictions + // 1 0 - 1.47 0.42 + // 2 0 - 17.50 0.24 + // 3 0 - 30.82 0.16 + // 4 0 - 23.24 0.28 } } } From f0fb2a0cc5316dc5abce55e3f0893e4d3d94ba1e Mon Sep 17 00:00:00 2001 From: Yael Dekel Date: Mon, 3 Dec 2018 14:20:55 -0800 Subject: [PATCH 019/100] Add test coverage for VBuffer (#1804) * Add ScaleBy test * Add VBuffer unit tests, and fix a couple of bugs * Fix test tolerance * Address PR comments and fix unit test tolerance * Fix AlignedArray bug --- src/Microsoft.ML.Core/Utilities/MathUtils.cs | 2 +- src/Microsoft.ML.Core/Utilities/Utils.cs | 28 +- .../Utilities/VBufferUtils.cs | 2 +- src/Microsoft.ML.CpuMath/AlignedArray.cs | 8 +- .../DataView/AppendRowsDataView.cs | 2 +- .../Vector/GenericSpanSortHelper.cs | 36 +- .../Depricated/Vector/VBufferMathUtils.cs | 3 +- .../Depricated/Vector/VectorUtils.cs | 2 +- .../Properties/AssemblyInfo.cs | 1 + ...FeatureContributionCalculationTransform.cs | 2 +- .../Transforms/KeyToVector.cs | 2 +- .../Utils/LinqExtensions.cs | 8 +- .../PermutationFeatureImportance.cs | 2 +- .../UnitTests/TestVBuffer.cs | 1230 ++++++++++++++++- .../UnitTests/TestVectorUtils.cs | 62 - 15 files changed, 1273 insertions(+), 117 deletions(-) delete mode 100644 test/Microsoft.ML.Core.Tests/UnitTests/TestVectorUtils.cs diff --git a/src/Microsoft.ML.Core/Utilities/MathUtils.cs b/src/Microsoft.ML.Core/Utilities/MathUtils.cs index 4ce1bb4cf0..859469db51 100644 --- a/src/Microsoft.ML.Core/Utilities/MathUtils.cs +++ b/src/Microsoft.ML.Core/Utilities/MathUtils.cs @@ -726,7 +726,7 @@ public static Float GetMedianInPlace(Float[] src, int count) return (src[iv - 1] + src[iv]) / 2; } - public static Double CosineSimilarity(Float[] a, Float[] b, int aIdx, int bIdx, int len) + public static Double CosineSimilarity(ReadOnlySpan a, ReadOnlySpan b, int aIdx, int bIdx, int len) { const Double epsilon = 1e-12f; Contracts.Assert(len > 0); diff --git a/src/Microsoft.ML.Core/Utilities/Utils.cs b/src/Microsoft.ML.Core/Utilities/Utils.cs index 3395bdfe06..971c5fb1d1 100644 --- a/src/Microsoft.ML.Core/Utilities/Utils.cs +++ b/src/Microsoft.ML.Core/Utilities/Utils.cs @@ -530,28 +530,10 @@ public static int[] GetRandomPermutation(Random rand, int size) Contracts.Assert(size >= 0); var res = GetIdentityPermutation(size); - Shuffle(rand, res); + Shuffle(rand, res); return res; } - public static void Shuffle(Random rand, T[] rgv) - { - Contracts.AssertValue(rand); - Contracts.AssertValue(rgv); - - Shuffle(rand, rgv, 0, rgv.Length); - } - - public static void Shuffle(Random rand, T[] rgv, int min, int lim) - { - Contracts.AssertValue(rand); - Contracts.AssertValue(rgv); - Contracts.Check(0 <= min & min <= lim & lim <= rgv.Length); - - for (int iv = min; iv < lim; iv++) - Swap(ref rgv[iv], ref rgv[iv + rand.Next(lim - iv)]); - } - public static bool AreEqual(Single[] arr1, Single[] arr2) { if (arr1 == arr2) @@ -586,6 +568,14 @@ public static bool AreEqual(Double[] arr1, Double[] arr2) return true; } + public static void Shuffle(Random rand, Span rgv) + { + Contracts.AssertValue(rand); + + for (int iv = 0; iv < rgv.Length; iv++) + Swap(ref rgv[iv], ref rgv[iv + rand.Next(rgv.Length - iv)]); + } + public static bool AreEqual(int[] arr1, int[] arr2) { if (arr1 == arr2) diff --git a/src/Microsoft.ML.Core/Utilities/VBufferUtils.cs b/src/Microsoft.ML.Core/Utilities/VBufferUtils.cs index ac07e3e614..974af42339 100644 --- a/src/Microsoft.ML.Core/Utilities/VBufferUtils.cs +++ b/src/Microsoft.ML.Core/Utilities/VBufferUtils.cs @@ -1322,7 +1322,7 @@ public static void ApplyInto(in VBuffer a, in VBuffer // REVIEW: Worth optimizing the newCount == a.Length case? // Probably not... - editor = VBufferEditor.Create(ref dst, a.Length, newCount); + editor = VBufferEditor.Create(ref dst, a.Length, newCount, requireIndicesOnDense: true); Span indices = editor.Indices; if (newCount == bValues.Length) diff --git a/src/Microsoft.ML.CpuMath/AlignedArray.cs b/src/Microsoft.ML.CpuMath/AlignedArray.cs index 4602e884e4..0a631be0e9 100644 --- a/src/Microsoft.ML.CpuMath/AlignedArray.cs +++ b/src/Microsoft.ML.CpuMath/AlignedArray.cs @@ -110,21 +110,21 @@ public Float this[int index] } } - public void CopyTo(Float[] dst, int index, int count) + public void CopyTo(Span dst, int index, int count) { Contracts.Assert(0 <= count && count <= _size); Contracts.Assert(dst != null); Contracts.Assert(0 <= index && index <= dst.Length - count); - Array.Copy(Items, _base, dst, index, count); + Items.AsSpan(_base, count).CopyTo(dst.Slice(index)); } - public void CopyTo(int start, Float[] dst, int index, int count) + public void CopyTo(int start, Span dst, int index, int count) { Contracts.Assert(0 <= count); Contracts.Assert(0 <= start && start <= _size - count); Contracts.Assert(dst != null); Contracts.Assert(0 <= index && index <= dst.Length - count); - Array.Copy(Items, start + _base, dst, index, count); + Items.AsSpan(start + _base, count).CopyTo(dst.Slice(index)); } public void CopyFrom(ReadOnlySpan src) diff --git a/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs b/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs index 3ad08e94f2..d20d832e88 100644 --- a/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs +++ b/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs @@ -444,7 +444,7 @@ private void GenerateNextBatch() _batchEnd = newEnd; } _totalLeft -= _batchEnd; - Utils.Shuffle(_rand, _batch, 0, _batchEnd); + Utils.Shuffle(_rand, _batch.AsSpan(0, _batchEnd)); } public int Next() diff --git a/src/Microsoft.ML.Data/Depricated/Vector/GenericSpanSortHelper.cs b/src/Microsoft.ML.Data/Depricated/Vector/GenericSpanSortHelper.cs index f94993e419..3f90ddca23 100644 --- a/src/Microsoft.ML.Data/Depricated/Vector/GenericSpanSortHelper.cs +++ b/src/Microsoft.ML.Data/Depricated/Vector/GenericSpanSortHelper.cs @@ -41,10 +41,10 @@ internal static int FloorLog2PlusOne(int n) } } - internal partial class GenericSpanSortHelper + internal partial class GenericSpanSortHelper where TKey : IComparable { - public static void Sort(Span keys, Span values, int index, int length) + public static void Sort(Span keys, Span values, int index, int length) { Contracts.Assert(keys != null, "Check the arguments in the caller!"); Contracts.Assert(index >= 0 && length >= 0 && (keys.Length - index >= length), "Check the arguments in the caller!"); @@ -52,38 +52,41 @@ public static void Sort(Span keys, Span values, int index, int len IntrospectiveSort(keys, values, index, length); } - private static void SwapIfGreaterWithItems(Span keys, Span values, int a, int b) + public static void Sort(Span keys, int index, int length) + { + Sort(keys, keys, index, length); + } + + private static void SwapIfGreaterWithItems(Span keys, Span values, int a, int b) { if (a != b) { if (keys[a] != null && keys[a].CompareTo(keys[b]) > 0) { TKey key = keys[a]; - keys[a] = keys[b]; - keys[b] = key; - TValue value = values[a]; + keys[a] = keys[b]; values[a] = values[b]; + keys[b] = key; values[b] = value; } } } - private static void Swap(Span keys, Span values, int i, int j) + private static void Swap(Span keys, Span values, int i, int j) { if (i != j) { TKey k = keys[i]; - keys[i] = keys[j]; - keys[j] = k; - TValue v = values[i]; + keys[i] = keys[j]; values[i] = values[j]; + keys[j] = k; values[j] = v; } } - internal static void IntrospectiveSort(Span keys, Span values, int left, int length) + internal static void IntrospectiveSort(Span keys, Span values, int left, int length) { Contracts.Assert(keys != null); Contracts.Assert(values != null); @@ -99,7 +102,7 @@ internal static void IntrospectiveSort(Span keys, Span values, int IntroSort(keys, values, left, length + left - 1, 2 * IntrospectiveSortUtilities.FloorLog2PlusOne(length)); } - private static void IntroSort(Span keys, Span values, int lo, int hi, int depthLimit) + private static void IntroSort(Span keys, Span values, int lo, int hi, int depthLimit) { Contracts.Assert(keys != null); Contracts.Assert(values != null); @@ -146,7 +149,7 @@ private static void IntroSort(Span keys, Span values, int lo, int } } - private static int PickPivotAndPartition(Span keys, Span values, int lo, int hi) + private static int PickPivotAndPartition(Span keys, Span values, int lo, int hi) { Contracts.Assert(keys != null); Contracts.Assert(values != null); @@ -191,7 +194,7 @@ private static int PickPivotAndPartition(Span keys, Span values, i return left; } - private static void Heapsort(Span keys, Span values, int lo, int hi) + private static void Heapsort(Span keys, Span values, int lo, int hi) { Contracts.Assert(keys != null); Contracts.Assert(values != null); @@ -211,7 +214,7 @@ private static void Heapsort(Span keys, Span values, int lo, int h } } - private static void DownHeap(Span keys, Span values, int i, int n, int lo) + private static void DownHeap(Span keys, Span values, int i, int n, int lo) { Contracts.Assert(keys != null); Contracts.Assert(lo >= 0); @@ -237,7 +240,7 @@ private static void DownHeap(Span keys, Span values, int i, int n, values[lo + i - 1] = dValue; } - private static void InsertionSort(Span keys, Span values, int lo, int hi) + private static void InsertionSort(Span keys, Span values, int lo, int hi) { Contracts.Assert(keys != null); Contracts.Assert(values != null); @@ -265,5 +268,4 @@ private static void InsertionSort(Span keys, Span values, int lo, } } } - } diff --git a/src/Microsoft.ML.Data/Depricated/Vector/VBufferMathUtils.cs b/src/Microsoft.ML.Data/Depricated/Vector/VBufferMathUtils.cs index 9be89b4f83..4fcf0b40ac 100644 --- a/src/Microsoft.ML.Data/Depricated/Vector/VBufferMathUtils.cs +++ b/src/Microsoft.ML.Data/Depricated/Vector/VBufferMathUtils.cs @@ -298,7 +298,8 @@ public static void AddMultWithOffset(in VBuffer src, Float c, ref VBuffer editor = VBufferEditor.Create(ref dst, dst.Length, dstValues.Length + gapCount, - keepOldOnResize: true); + keepOldOnResize: true, + requireIndicesOnDense: true); var indices = editor.Indices; values = editor.Values; if (gapCount > 0) diff --git a/src/Microsoft.ML.Data/Depricated/Vector/VectorUtils.cs b/src/Microsoft.ML.Data/Depricated/Vector/VectorUtils.cs index 58655063d2..b0c810633d 100644 --- a/src/Microsoft.ML.Data/Depricated/Vector/VectorUtils.cs +++ b/src/Microsoft.ML.Data/Depricated/Vector/VectorUtils.cs @@ -150,7 +150,7 @@ public static void SparsifyNormalize(ref VBuffer a, int top, int bottom, } if (!aEditor.Indices.IsEmpty) - GenericSpanSortHelper.Sort(aEditor.Indices, aEditor.Values, 0, newCount); + GenericSpanSortHelper.Sort(aEditor.Indices, aEditor.Values, 0, newCount); a = aEditor.Commit(); } diff --git a/src/Microsoft.ML.Data/Properties/AssemblyInfo.cs b/src/Microsoft.ML.Data/Properties/AssemblyInfo.cs index 849318c1e0..23e658a818 100644 --- a/src/Microsoft.ML.Data/Properties/AssemblyInfo.cs +++ b/src/Microsoft.ML.Data/Properties/AssemblyInfo.cs @@ -7,6 +7,7 @@ [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.TestFramework" + PublicKey.TestValue)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Tests" + PublicKey.TestValue)] +[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Core.Tests" + PublicKey.TestValue)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.InferenceTesting" + PublicKey.TestValue)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.OnnxTransformTest" + PublicKey.TestValue)] diff --git a/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs b/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs index 80a98881a3..9c4c03ae86 100644 --- a/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs +++ b/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs @@ -273,7 +273,7 @@ private ValueGetter> GetTextValueGetter(IRow input, i contributions.GetValues().CopyTo(values); var count = values.Length; var sb = new StringBuilder(); - GenericSpanSortHelper.Sort(indices, values, 0, count); + GenericSpanSortHelper.Sort(indices, values, 0, count); for (var i = 0; i < count; i++) { var val = values[i]; diff --git a/src/Microsoft.ML.Data/Transforms/KeyToVector.cs b/src/Microsoft.ML.Data/Transforms/KeyToVector.cs index 68bb844a3f..363bc4c83f 100644 --- a/src/Microsoft.ML.Data/Transforms/KeyToVector.cs +++ b/src/Microsoft.ML.Data/Transforms/KeyToVector.cs @@ -478,7 +478,7 @@ private ValueGetter> MakeGetterOne(IRow input, int iinfo) return; } - var editor = VBufferEditor.Create(ref dst, size, 1); + var editor = VBufferEditor.Create(ref dst, size, 1, requireIndicesOnDense: true); editor.Values[0] = 1; editor.Indices[0] = (int)src - 1; diff --git a/src/Microsoft.ML.FastTree/Utils/LinqExtensions.cs b/src/Microsoft.ML.FastTree/Utils/LinqExtensions.cs index 21e19dfd60..88d8825205 100644 --- a/src/Microsoft.ML.FastTree/Utils/LinqExtensions.cs +++ b/src/Microsoft.ML.FastTree/Utils/LinqExtensions.cs @@ -23,14 +23,14 @@ public static int ArgMin(this T[] arr) where T : IComparable return argMin; } - public static int ArgMax(this T[] arr) where T : IComparable + public static int ArgMax(this ReadOnlySpan span) where T : IComparable { - if (arr.Length == 0) + if (span.Length == 0) return -1; int argMax = 0; - for (int i = 1; i < arr.Length; i++) + for (int i = 1; i < span.Length; i++) { - if (arr[i].CompareTo(arr[argMax]) > 0) + if (span[i].CompareTo(span[argMax]) > 0) argMax = i; } return argMax; diff --git a/src/Microsoft.ML.Transforms/PermutationFeatureImportance.cs b/src/Microsoft.ML.Transforms/PermutationFeatureImportance.cs index 0d215d842b..7293ce8302 100644 --- a/src/Microsoft.ML.Transforms/PermutationFeatureImportance.cs +++ b/src/Microsoft.ML.Transforms/PermutationFeatureImportance.cs @@ -180,7 +180,7 @@ public static ImmutableArray int nextValuesIndex = 0; - Utils.Shuffle(RandomUtils.Create(shuffleSeed), featureValuesBuffer); + Utils.Shuffle(RandomUtils.Create(shuffleSeed), featureValuesBuffer); Action permuter = (src, dst, state) => diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestVBuffer.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestVBuffer.cs index 3712b06038..799e2e64cb 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestVBuffer.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestVBuffer.cs @@ -4,15 +4,32 @@ using System; using System.Collections.Generic; -using System.Text; +using System.Linq; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; +using Microsoft.ML.Runtime.Internal.Internallearn; using Microsoft.ML.Runtime.Internal.Utilities; +using Microsoft.ML.Runtime.Numeric; +using Microsoft.ML.Runtime.RunTests; using Xunit; +using Xunit.Abstractions; namespace Microsoft.ML.Core.Tests.UnitTests { - public sealed class TestVBuffer + public sealed class TestVBuffer : BaseTestBaseline { + // How many random trials to perform per test. + private const int _trials = 1000; + // Controls the maximum length of the vectors to generate. Should be + // small enough so that among _trials trials, there will be a few + // likely to have zero counts. + private const int _maxLen = 20; + + public TestVBuffer(ITestOutputHelper output) + : base(output) + { + } + [Fact] public void TestApplyAt() { @@ -21,5 +38,1212 @@ public void TestApplyAt() Assert.Equal(4, buffer.GetValues().Length); Assert.Equal(1, buffer.GetValues()[2]); } + + [Fact(Skip = "Bug 1802: https://github.com/dotnet/machinelearning/issues/1802")] + public void VBufferOpScaleBy() + { + var rgen = RandomUtils.Create(9); + VBuffer a = default; + VBuffer actualDst = default; + VBuffer dst = default; + + for (int trial = 0; trial < _trials; ++trial) + { + float c = ScaleFactor(trial, rgen); + int len = rgen.Next(_maxLen) + 1; + GenerateSingle(rgen, len, out a); + FullCopy(ref a, ref dst); + FullCopy(ref a, ref actualDst); + VectorUtils.ScaleBy(ref actualDst, c); + VBufferUtils.Apply(ref dst, (int i, ref float v) => v *= c); + TestSame(ref dst, ref actualDst, 1e-5); + } + } + + [Fact(Skip = "Bug 1802: https://github.com/dotnet/machinelearning/issues/1802")] + public void VBufferOpScaleByCopy() + { + var rgen = RandomUtils.Create(9); + VBuffer a = default(VBuffer); + VBuffer actualDst = default(VBuffer); + VBuffer dst = default(VBuffer); + + for (int trial = 0; trial < _trials; ++trial) + { + float c = ScaleFactor(trial, rgen); + int len = rgen.Next(_maxLen) + 1; + GenerateSingle(rgen, len, out a); + FullCopy(ref a, ref dst); + VectorUtils.ScaleBy(ref dst, c); + VectorUtils.ScaleBy(a, ref actualDst, c); + TestSame(ref dst, ref actualDst, 1e-5); + } + } + + [Fact] + public void VBufferOpMath() + { + const int tol = 4; + var rgen = RandomUtils.Create(42); + + VBuffer a; + VBuffer aOrig = default(VBuffer); + for (int trial = 0; trial < _trials; ++trial) + { + GenerateSingle(rgen, rgen.Next(_maxLen) + 1, out a); + a.CopyTo(ref aOrig); + + float sum = a.Items().Sum(iv => iv.Value); + float l1 = a.Items().Sum(iv => Math.Abs(iv.Value)); + float l2Squared = a.Items().Sum(iv => iv.Value * iv.Value); + float l2 = MathUtils.Sqrt(l2Squared); + + float infNorm = a.GetValues().Length == 0 ? 0 : a.Items().Max(iv => Math.Abs(iv.Value)); + + Assert.True(CompareNumbersWithTolerance(sum, VectorUtils.Sum(in a), digitsOfPrecision: tol)); + Assert.True(CompareNumbersWithTolerance(l1, VectorUtils.L1Norm(in a), digitsOfPrecision: tol)); + Assert.True(CompareNumbersWithTolerance(l2Squared, VectorUtils.NormSquared(in a), digitsOfPrecision: tol)); + Assert.True(CompareNumbersWithTolerance(l2, VectorUtils.Norm(in a), digitsOfPrecision: tol)); + Assert.True(CompareNumbersWithTolerance(infNorm, VectorUtils.MaxNorm(in a), digitsOfPrecision: tol)); + + float d = 0; + switch (trial % 3) + { + case 0: + d = 0; + break; + case 1: + d = 1; + break; + case 2: + d = rgen.NextDouble().ToFloat(); + break; + } + VectorUtils.ScaleBy(ref a, d); + var editor = VBufferEditor.CreateFromBuffer(ref aOrig); + for (int i = 0; i < editor.Values.Length; ++i) + editor.Values[i] *= d; + aOrig = editor.Commit(); + TestSame(ref aOrig, ref a, 1e-5); + + } + } + + [Fact] + public void VBufferOpMathMul() + { + VBuffer a; + // Test element-wise multiplication. + var rgen = RandomUtils.Create(0); + GenerateSingle(rgen, rgen.Next(_maxLen) + 1, out a); + var length = 15; + var values = Enumerable.Range(0, length).Select(x => x + 0.1f).ToArray(); + var indicies = Enumerable.Range(0, 15).Where(x => x % 2 == 0).ToArray(); + a = new VBuffer(length, values, indicies); + float[] aDense = a.DenseValues().ToArray(); + float[] a2Values = aDense.Select(x => x + 1).ToArray(); + VBuffer a2DenseVbuff = new VBuffer(length, a2Values); + var multResExpected = new float[length]; + for (var i = 0; i < length; i++) + multResExpected[i] = aDense[i] * a2Values[i]; + + var vbufMultExpected = new VBuffer(length, multResExpected); + VectorUtils.MulElementWise(in a, ref a2DenseVbuff); + TestSame(ref vbufMultExpected, ref a2DenseVbuff, 1e-5); + } + + [Fact] + public void VBufferOpMathMul2() + { + VBuffer a; + // Test element-wise multiplication. + var rgen = RandomUtils.Create(0); + GenerateSingle(rgen, rgen.Next(_maxLen) + 1, out a); + var length = 15; + var values = Enumerable.Range(0, length).Select(x => x + 0.1f).ToArray(); + var indicies = Enumerable.Range(0, 15).Where(x => x % 2 == 0).ToArray(); + a = new VBuffer(length, values, indicies); + float[] aDense = a.DenseValues().ToArray(); + float[] a2Values = aDense.Select(x => x + 1).ToArray(); + VBuffer a2DenseVbuff = new VBuffer(length, a2Values); + var multResExpected = new float[length]; + for (var i = 0; i < length; i++) + multResExpected[i] = aDense[i] * a2Values[i]; + + var vbufMultExpected = new VBuffer(length, multResExpected); + VectorUtils.MulElementWise(in a2DenseVbuff, ref a); + TestSame(ref vbufMultExpected, ref a, 1e-5); + } + + [Fact] + public void SparsifyNormalize() + { + var a = new VBuffer(5, new float[5] { 1, 2, 3, 4, 5 }); + var length = a.Length; + float[] aDense = a.DenseValues().ToArray(); + float[] a2Values = aDense.Select(x => x + 1).ToArray(); + VBuffer a2 = new VBuffer(length, a2Values); + var multResExpected = new float[2]; + multResExpected[0] = aDense[3] * a2Values[3]; + multResExpected[1] = aDense[4] * a2Values[4]; + + var vbufMultExpected = new VBuffer(5, 2, multResExpected, new int[2] { 3, 4 }); + var multResActual = new VBuffer(length, new float[length]); + VectorUtils.MulElementWise(in a, ref a2); + VectorUtils.SparsifyNormalize(ref a2, 2, 2, normalize: false); + TestSame(ref vbufMultExpected, ref a2, 1e-5); + } + + [Fact] + public void SparsifyNormalizeTop2() + { + var a = new VBuffer(5, new float[5] { 1, 2, 3, 4, 5 }); + var length = a.Length; + float[] aDense = a.DenseValues().ToArray(); + float[] a2Values = new float[5] { -1, -2, 3, 4, 5 }; + VBuffer a2 = new VBuffer(length, a2Values); + var multResExpected = new float[4]; + multResExpected[0] = aDense[0] * a2Values[0]; + multResExpected[1] = aDense[1] * a2Values[1]; + multResExpected[2] = aDense[3] * a2Values[3]; + multResExpected[3] = aDense[4] * a2Values[4]; + + var vbufMultExpected = new VBuffer(5, 4, multResExpected, new int[4] { 0, 1, 3, 4 }); + VectorUtils.MulElementWise(in a, ref a2); + VectorUtils.SparsifyNormalize(ref a2, 2, 2, normalize: false); + TestSame(ref vbufMultExpected, ref a2, 1e-5); + } + + [Fact] + public void SparsifyNormalizeTopSparse() + { + for (var i = 0; i < 2; i++) + { + var norm = i != 0; + var a = new VBuffer(7, 6, new float[6] { 10, 20, 40, 50, 60, -70 }, + new int[] { 0, 1, /*2 is missed*/3, 4, 5, 6 }); + var length = a.Length; + float[] aDense = a.DenseValues().ToArray(); + float[] a2Values = aDense.Select(x => x).ToArray(); + a2Values[6] = -a2Values[6]; + VBuffer a2 = new VBuffer(length, a2Values); + var multResExpected = new float[3]; + multResExpected[0] = 2500; + multResExpected[1] = 3600; + multResExpected[2] = -4900; + + if (norm) + for (var j = 0; j < multResExpected.Length; j++) + multResExpected[j] = multResExpected[j] / 4900; + + var vbufMultExpected = new VBuffer(7, 3, multResExpected, new int[3] { 4, 5, 6 }); + VectorUtils.MulElementWise(in a, ref a2); + VectorUtils.SparsifyNormalize(ref a2, 2, 3, normalize: norm); + TestSame(ref vbufMultExpected, ref a2, 1e-5); + } + } + + [Fact] + public void SparsifyNormalizeTopSparse2() + { + for (var i = 0; i < 2; i++) + { + var norm = i != 0; + var a = new VBuffer(7, 6, new float[6] { 100, 20, 40, 50, 60, 70 }, + new int[] { 0, 1, /*2 is missed*/3, 4, 5, 6 }); + var b = new VBuffer(7, 6, new float[6] { 100, 20, 30, 40, 50, 70 }, + new int[] { 0, 1, 2, 3, 4, /*5 is missed*/ 6 }); + var length = a.Length; + var multResExpected = new float[2]; + multResExpected[0] = 10000; + multResExpected[1] = 4900; + + if (norm) + for (var j = 0; j < multResExpected.Length; j++) + multResExpected[j] = multResExpected[j] / 10000; + + var vbufMultExpected = new VBuffer(7, 2, multResExpected, new int[2] { 0, 6 }); + VectorUtils.MulElementWise(in a, ref b); + VectorUtils.SparsifyNormalize(ref b, 2, 2, normalize: norm); + TestSame(ref vbufMultExpected, ref b, 1e-5); + } + } + + /// + /// Tests SparsifyNormalize works correctly. + /// + [Theory] + [InlineData(1, true, new[] { 0.8f, 0.9f, 1f }, new[] { 7, 8, 9 })] + [InlineData(1, false, new[] { 8f, 9f, 10f }, new[] { 7, 8, 9 })] + [InlineData(-4, true, new[] { -0.8f, -0.6f, -0.4f, 0.6f, 0.8f, 1f }, new[] { 0, 1, 2, 7, 8, 9 })] + [InlineData(-4, false, new[] { -4f, -3f, -2f, 3f, 4f, 5f }, new[] { 0, 1, 2, 7, 8, 9 })] + [InlineData(-10, true, new[] { -1f, -0.9f, -0.8f }, new[] { 0, 1, 2 })] + [InlineData(-10, false, new[] { -10f, -9f, -8f }, new[] { 0, 1, 2 })] + public void TestSparsifyNormalize(int startRange, bool normalize, float[] expectedValues, int[] expectedIndices) + { + float[] values = Enumerable.Range(startRange, 10).Select(i => (float)i).ToArray(); + var a = new VBuffer(10, values); + + VectorUtils.SparsifyNormalize(ref a, 3, 3, normalize); + + Assert.False(a.IsDense); + Assert.Equal(10, a.Length); + Assert.Equal(expectedIndices, a.GetIndices().ToArray()); + + var actualValues = a.GetValues().ToArray(); + Assert.Equal(expectedValues.Length, actualValues.Length); + for (int i = 0; i < expectedValues.Length; i++) + Assert.Equal(expectedValues[i], actualValues[i], precision: 6); + } + + /// + /// Tests SparsifyNormalize works when asked for all values. + /// + [Theory] + [InlineData(10, 0)] + [InlineData(10, 10)] + [InlineData(20, 20)] + public void TestSparsifyNormalizeReturnsDense(int top, int bottom) + { + float[] values = Enumerable.Range(1, 10).Select(i => (float)i).ToArray(); + var a = new VBuffer(10, values); + + VectorUtils.SparsifyNormalize(ref a, top, bottom, false); + + Assert.True(a.IsDense); + Assert.Equal(10, a.Length); + Assert.True(a.GetIndices().IsEmpty); + + Assert.Equal(values, a.GetValues().ToArray()); + } + + /// + /// A trivial inefficient implementation equivalent to . + /// + private static void NaiveApplyWith(ref VBuffer a, ref VBuffer b, VBufferUtils.PairManipulator manip) + { + Contracts.Assert(a.Length == b.Length); + var aIndices = new HashSet(a.Items().Select(iv => iv.Key)); + int[] indices = aIndices.Union(b.Items().Select(iv => iv.Key)).OrderBy(i => i).ToArray(); + T2[] values = new T2[indices.Length]; + T1 temp = default(T1); + for (int ii = 0; ii < indices.Length; ++ii) + { + int i = indices[ii]; + b.GetItemOrDefault(i, ref values[ii]); + if (aIndices.Contains(i)) + { + a.GetItemOrDefault(i, ref temp); + manip(i, temp, ref values[ii]); + } + } + b = new VBuffer(a.Length, indices.Length, values, indices); + } + + [Fact] + public void VBufferOpApplyWith() + { + var rgen = RandomUtils.Create(1); + VBuffer a = default(VBuffer); + VBuffer b = default(VBuffer); + VBuffer aOrig = default(VBuffer); + VBuffer bOrig = default(VBuffer); + + VBufferUtils.PairManipulator manip = (int ind, float av, ref float bv) => bv = 2 * bv + av - ind; + + for (int trial = 0; trial < _trials; ++trial) + { + int len = rgen.Next(_maxLen) + 1; + GenLogic genLogic = 0; + GeneratePair(rgen, len, out a, out b, out genLogic); + FullCopy(ref a, ref aOrig); + FullCopy(ref b, ref bOrig); + VBufferUtils.ApplyWith(in a, ref b, manip); + NaiveApplyWith(ref aOrig, ref bOrig, manip); + TestSame(ref bOrig, ref b); + } + } + + /// + /// A trivial inefficient implementation equivalent to . + /// + private static void NaiveApplyWithEither(ref VBuffer a, ref VBuffer b, VBufferUtils.PairManipulator manip) + { + int[] indices = a.Items().Select(iv => iv.Key).Union(b.Items().Select(iv => iv.Key)).OrderBy(i => i).ToArray(); + T2[] values = new T2[indices.Length]; + T1 temp = default(T1); + for (int ii = 0; ii < indices.Length; ++ii) + { + int i = indices[ii]; + a.GetItemOrDefault(i, ref temp); + b.GetItemOrDefault(i, ref values[ii]); + manip(i, temp, ref values[ii]); + } + b = new VBuffer(a.Length, indices.Length, values, indices); + } + + [Fact] + public void VBufferOpApplyWithEither() + { + var rgen = RandomUtils.Create(2); + VBuffer a = default(VBuffer); + VBuffer b = default(VBuffer); + VBuffer aOrig = default(VBuffer); + VBuffer bOrig = default(VBuffer); + VBuffer dst = default(VBuffer); + + VBufferUtils.PairManipulator manip = (int ind, float av, ref float bv) => bv = 2 * bv + av - ind; + + for (int trial = 0; trial < _trials; ++trial) + { + int len = rgen.Next(_maxLen) + 1; + GenLogic genLogic = 0; + GeneratePair(rgen, len, out a, out b, out genLogic); + FullCopy(ref a, ref aOrig); + FullCopy(ref b, ref bOrig); + FullCopy(ref b, ref dst); + VBufferUtils.ApplyWithEitherDefined(in a, ref b, manip); + NaiveApplyWithEither(ref aOrig, ref dst, manip); + TestSame(ref dst, ref b); + } + } + + /// + /// A trivial inefficient implementation equivalent to + /// if is true, or if false . + /// + private static void NaiveForEach(ref VBuffer a, ref VBuffer b, Action vis, bool union) + { + var aIndices = a.Items().Select(iv => iv.Key); + var bIndices = b.Items().Select(iv => iv.Key); + var indices = union ? aIndices.Union(bIndices) : aIndices.Intersect(bIndices); + indices = indices.Distinct().OrderBy(x => x); + T1 aValue = default(T1); + T2 bValue = default(T2); + foreach (var index in indices) + { + a.GetItemOrDefault(index, ref aValue); + b.GetItemOrDefault(index, ref bValue); + vis(index, aValue, bValue); + } + } + + private void VBufferOpForEachHelper(bool union) + { + var rgen = RandomUtils.Create(union ? 3 : 4); + VBuffer a = default(VBuffer); + VBuffer b = default(VBuffer); + + float accum = 0; + Action vis = (int ind, float av, float bv) => accum += 2 * bv + av - ind; + + for (int trial = 0; trial < _trials; ++trial) + { + int len = rgen.Next(_maxLen) + 1; + GenLogic genLogic = 0; + GeneratePair(rgen, len, out a, out b, out genLogic); + if (union) + VBufferUtils.ForEachEitherDefined(in a, in b, vis); + else + VBufferUtils.ForEachBothDefined(in a, in b, vis); + var actualAccum = accum; + accum = 0; + NaiveForEach(ref a, ref b, vis, union); + Assert.Equal(accum, actualAccum); + accum = 0; + } + } + + [Fact] + public void VBufferOpForEachEither() + { + VBufferOpForEachHelper(union: true); + } + + [Fact] + public void VBufferOpForEachBoth() + { + VBufferOpForEachHelper(union: false); + } + + private static void NaiveApplyInto(ref VBuffer a, ref VBuffer dst, Func func) + { + List indices = new List(a.GetIndices().Length); + TDst[] values = new TDst[a.GetValues().Length]; + foreach (var iv in a.Items()) + { + values[indices.Count] = func(iv.Key, iv.Value); + indices.Add(iv.Key); + } + dst = new VBuffer(a.Length, indices.Count, values, indices.ToArray()); + } + + [Fact] + public void VBufferOpApplyIntoSingle() + { + var rgen = RandomUtils.Create(5); + VBuffer a = default(VBuffer); + VBuffer actualDst = default(VBuffer); + VBuffer dst = default(VBuffer); + + Func func = (int ind, float av) => av - ind; + + for (int trial = 0; trial < _trials; ++trial) + { + int len = rgen.Next(_maxLen) + 1; + GenerateSingle(rgen, len, out a); + VBufferUtils.ApplyIntoEitherDefined(in a, ref actualDst, func); + NaiveApplyInto(ref a, ref dst, func); + TestSame(ref dst, ref actualDst); + } + } + + private static void NaiveApplyInto(ref VBuffer a, ref VBuffer b, ref VBuffer dst, Func func) + { + int[] indices = a.Items().Select(iv => iv.Key) + .Union(b.Items().Select(iv => iv.Key)).Distinct().OrderBy(x => x).ToArray(); + TDst[] values = new TDst[indices.Length]; + T1 aValue = default(T1); + T2 bValue = default(T2); + for (int i = 0; i < indices.Length; ++i) + { + a.GetItemOrDefault(indices[i], ref aValue); + b.GetItemOrDefault(indices[i], ref bValue); + values[i] = func(indices[i], aValue, bValue); + } + dst = new VBuffer(a.Length, indices.Length, values, indices); + } + + [Fact] + public void VBufferOpApplyIntoPair() + { + var rgen = RandomUtils.Create(6); + VBuffer a = default(VBuffer); + VBuffer b = default(VBuffer); + VBuffer actualDst = default(VBuffer); + VBuffer dst = default(VBuffer); + + Func func = (int ind, float av, float bv) => 2 * bv + av - ind; + + for (int trial = 0; trial < _trials; ++trial) + { + int len = rgen.Next(_maxLen) + 1; + GenLogic genLogic = 0; + GeneratePair(rgen, len, out a, out b, out genLogic); + VBufferUtils.ApplyInto(in a, in b, ref actualDst, func); + NaiveApplyInto(ref a, ref b, ref dst, func); + TestSame(ref dst, ref actualDst); + } + } + + /// + /// Naive version of , + /// which can be used to generalize all the other add-mult functions. + /// + private static void NaiveAddMult(ref VBuffer a, float c, ref VBuffer b, int offset) + { + Contracts.Assert(0 <= offset && a.Length <= b.Length - offset); + if (a.GetValues().Length == 0 || c == 0) + return; + VBuffer aa = default(VBuffer); + if (offset == 0 && a.Length == b.Length) + aa = a; + else + { + a.CopyTo(ref aa); + var editor = VBufferEditor.Create(ref aa, b.Length, aa.GetValues().Length, requireIndicesOnDense: true); + var indices = editor.Indices; + if (aa.IsDense) + { + for (int i = 0; i < aa.Length; i++) + indices[i] = i; + } + for (int i = 0; i < editor.Indices.Length; ++i) + indices[i] += offset; + aa = editor.Commit(); + } + VBufferUtils.PairManipulator manip = + (int ind, float av, ref float bv) => bv = bv + c * av; + NaiveApplyWith(ref aa, ref b, manip); + } + + [Fact] + public void VBufferOpAddMult() + { + var rgen = RandomUtils.Create(7); + VBuffer a = default(VBuffer); + VBuffer b = default(VBuffer); + VBuffer actualDst = default(VBuffer); + VBuffer dst = default(VBuffer); + + for (int trial = 0; trial < _trials; ++trial) + { + float c = ScaleFactor(trial, rgen); + int len = rgen.Next(_maxLen) + 1; + GenLogic genLogic; + GeneratePair(rgen, len, out a, out b, out genLogic); + // Keep b around as an original for debugging purposes. + FullCopy(ref b, ref dst); + FullCopy(ref b, ref actualDst); + NaiveAddMult(ref a, c, ref dst, 0); + VectorUtils.AddMult(in a, c, ref actualDst); + TestSame(ref dst, ref actualDst, 1e-4); + if (c == 1) + { + // While we're at it, test Add. + FullCopy(ref b, ref actualDst); + VectorUtils.Add(in a, ref actualDst); + TestSame(ref dst, ref actualDst); + } + } + } + + [Fact] + public void VBufferOpAddMultCopy() + { + var rgen = RandomUtils.Create(7); + VBuffer a = default(VBuffer); + VBuffer b = default(VBuffer); + VBuffer actualDst = default(VBuffer); + VBuffer dst = default(VBuffer); + + for (int trial = 0; trial < _trials; ++trial) + { + float c = ScaleFactor(trial, rgen); + int len = rgen.Next(_maxLen) + 1; + GenLogic genLogic; + GeneratePair(rgen, len, out a, out b, out genLogic); + FullCopy(ref b, ref dst); + NaiveAddMult(ref a, c, ref dst, 0); + VectorUtils.AddMult(in a, c, ref b, ref actualDst); + TestEquivalent(ref dst, ref actualDst, 1e-4); + } + } + + [Fact] + public void VBufferOpAddMultWithOffset() + { + var rgen = RandomUtils.Create(8); + VBuffer a = default(VBuffer); + VBuffer b = default(VBuffer); + VBuffer actualDst = default(VBuffer); + VBuffer dst = default(VBuffer); + + for (int trial = 0; trial < _trials; ++trial) + { + float c = ScaleFactor(trial, rgen); + int len = rgen.Next(_maxLen) + 1; + GenerateSingle(rgen, len, out b); + GenerateSingle(rgen, rgen.Next(len) + 1, out a); + int offset = rgen.Next(b.Length - a.Length + 1); + // Keep b around as an original for debugging purposes. + FullCopy(ref b, ref dst); + FullCopy(ref b, ref actualDst); + NaiveAddMult(ref a, c, ref dst, offset); + VectorUtils.AddMultWithOffset(in a, c, ref actualDst, offset); + TestSame(ref dst, ref actualDst, 1e-4); + } + } + + [Fact] + public void VBufferOpScaleInto() + { + var rgen = RandomUtils.Create(10); + VBuffer a = default(VBuffer); + VBuffer b = default(VBuffer); + VBuffer actualDst = default(VBuffer); + VBuffer dst = default(VBuffer); + + for (int trial = 0; trial < _trials; ++trial) + { + float c = ScaleFactor(trial, rgen); + int len = rgen.Next(_maxLen) + 1; + GenerateSingle(rgen, len, out a); + GenerateSingle(rgen, rgen.Next(_maxLen) + 1, out b); + FullCopy(ref b, ref dst); + FullCopy(ref b, ref actualDst); + // Inefficient ScaleInto, including the c==0 deviation from ApplyInto. + if (c == 0 && !a.IsDense) + dst = VBufferEditor.Create(ref dst, a.Length, 0).Commit(); + else + NaiveApplyInto(ref a, ref dst, (i, av) => c * av); + VectorUtils.ScaleInto(in a, c, ref actualDst); + TestSame(ref dst, ref actualDst, 1e-5); + } + } + + [Fact] + public void VBufferOpAddMultInto() + { + var rgen = RandomUtils.Create(11); + VBuffer a = default(VBuffer); + VBuffer b = default(VBuffer); + VBuffer actualDst = default(VBuffer); + VBuffer dst = default(VBuffer); + + for (int trial = 0; trial < _trials; ++trial) + { + float c = ScaleFactor(trial, rgen); + int len = rgen.Next(_maxLen) + 1; + GenLogic genLogic; + GeneratePair(rgen, len, out a, out b, out genLogic); + FullCopy(ref a, ref dst); + NaiveAddMult(ref b, c, ref dst, 0); + VectorUtils.AddMultInto(in a, c, in b, ref actualDst); + TestSame(ref dst, ref actualDst); + } + } + + [Fact] + public void VBufferOpApplySlot() + { + var rgen = RandomUtils.Create(12); + VBuffer a = default(VBuffer); + float[] expected = new float[_maxLen]; + float[] actual = new float[_maxLen]; + + VBufferUtils.SlotValueManipulator manip = (int i, ref float value) => value += i; + + for (int trial = 0; trial < _trials; ++trial) + { + int len = rgen.Next(_maxLen) + 1; + GenerateSingle(rgen, len, out a); + a.CopyTo(expected); + int slot = rgen.Next(len); + manip(slot, ref expected[slot]); + VBufferUtils.ApplyAt(ref a, slot, manip); + Assert.Equal(len, a.Length); + a.CopyTo(actual); + for (int i = 0; i < len; ++i) + Assert.Equal(expected[i], actual[i]); + } + } + + [Fact] + public void VBufferOpCopyRange() + { + var rgen = RandomUtils.Create(13); + VBuffer a = default(VBuffer); + VBuffer dst = default(VBuffer); + + VBufferUtils.SlotValueManipulator manip = (int i, ref float value) => value += i; + + for (int trial = 0; trial < _trials; ++trial) + { + int len = rgen.Next(_maxLen) + 1; + GenerateSingle(rgen, len, out a); + int copyMin = rgen.Next(len + 1); + int copyLen = rgen.Next(len - copyMin + 1); + a.CopyTo(ref dst, copyMin, copyLen); + Assert.Equal(copyLen, dst.Length); + float value = 0; + foreach (var iv in dst.Items(all: true)) + { + a.GetItemOrDefault(iv.Key + copyMin, ref value); + Assert.Equal(value, iv.Value); + } + } + } + + [Fact] + public void VBufferOpDensify() + { + var rgen = RandomUtils.Create(14); + VBuffer a = default(VBuffer); + VBuffer b = default(VBuffer); + + VBufferUtils.SlotValueManipulator manip = (int i, ref float value) => value += i; + + for (int trial = 0; trial < _trials; ++trial) + { + int len = rgen.Next(_maxLen) + 1; + GenerateSingle(rgen, len, out a); + FullCopy(ref a, ref b); + VBufferUtils.Densify(ref b); + + Assert.Equal(len, b.Length); + Assert.True(b.IsDense, "Result was not dense, as expected"); + int count = 0; + float value = 0; + foreach (var iv in b.Items(all: false)) + { + a.GetItemOrDefault(iv.Key, ref value); + Assert.Equal(value, iv.Value); + count++; + } + Assert.Equal(len, count); + } + } + + [Fact] + public void VBufferOpDensifyFirst() + { + var rgen = RandomUtils.Create(15); + VBuffer a = default(VBuffer); + VBuffer b = default(VBuffer); + + VBufferUtils.SlotValueManipulator manip = (int i, ref float value) => value += i; + + for (int trial = 0; trial < _trials; ++trial) + { + int len = rgen.Next(_maxLen) + 1; + GenerateSingle(rgen, len, out a); + FullCopy(ref a, ref b); + int dense = rgen.Next(len + 1); + VBufferUtils.DensifyFirst(ref b, dense); + + Assert.Equal(len, b.Length); + Assert.True(b.IsDense || !a.IsDense, "Density of a did not imply density of b"); + float value = 0; + + HashSet aIndices = new HashSet(a.Items().Select(iv => iv.Key)); + HashSet bIndices = new HashSet(b.Items().Select(iv => iv.Key)); + + foreach (var iv in b.Items(all: false)) + { + a.GetItemOrDefault(iv.Key, ref value); + Assert.Equal(value, iv.Value); + } + for (int i = 0; i < dense; ++i) + { + Assert.True(bIndices.Remove(i), $"Slot {i} not explicitly represented"); + aIndices.Remove(i); + } + // Now we consider the set of indices beyond those we explicitly densified. + Assert.True(aIndices.SetEquals(bIndices), "Indices disagreed on explicit representation"); + } + } + + [Fact] + public void VBufferOpPairwiseMath() + { + var rgen = RandomUtils.Create(16); + VBuffer a = default(VBuffer); + VBuffer b = default(VBuffer); + + for (int trial = 0; trial < _trials; ++trial) + { + GenLogic genLogic; + int len = rgen.Next(_maxLen) + 1; + GeneratePair(rgen, len, out a, out b, out genLogic); + + var l1Dist = a.Items(all: true).Zip(b.Items(all: true), (av, bv) => Math.Abs(av.Value - bv.Value)).Sum(); + var l2Dist2 = a.Items(all: true).Zip(b.Items(all: true), (av, bv) => MathUtils.Pow(av.Value - bv.Value, 2)).Sum(); + var l2Dist = MathUtils.Sqrt(l2Dist2); + var dot = a.Items(all: true).Zip(b.Items(all: true), (av, bv) => av.Value * bv.Value).Sum(); + + const int tol = 4; + Assert.True(CompareNumbersWithTolerance(l1Dist, VectorUtils.L1Distance(in a, in b), digitsOfPrecision: tol)); + Assert.True(CompareNumbersWithTolerance(l2Dist2, VectorUtils.L2DistSquared(in a, in b), digitsOfPrecision: tol)); + Assert.True(CompareNumbersWithTolerance(l2Dist, VectorUtils.Distance(in a, in b), digitsOfPrecision: tol)); + Assert.True(CompareNumbersWithTolerance(dot, VectorUtils.DotProduct(in a, in b), digitsOfPrecision: tol)); + } + } + + [Fact] + public void VBufferDropSlots() + { + var rgen = RandomUtils.Create(16); + var a = default(VBuffer); + int dropSlotSparseOpCount = 0; + int dropSlotDenseOpCount = 0; + var minSlots = new List(); + var maxSlots = new List(); + bool dropAll = true; + while (dropSlotSparseOpCount < _trials || dropSlotDenseOpCount < _trials) + { + int len = rgen.Next(_maxLen + 300); + GenerateSingle(rgen, len, out a); + minSlots.Clear(); + maxSlots.Clear(); + int min; + int max = -2; + while (max + 2 < len) + { + if (dropAll) + { + min = 0; + max = len; + } + else + { + min = rgen.Next(Math.Min(max + 10, len + 100) - max - 2) + max + 2; + max = rgen.Next(Math.Min(min + 10, len + 100) - min) + min; + } + + minSlots.Add(min); + maxSlots.Add(max); + + if (maxSlots.Count > 20 || len < 200 && rgen.Next(20) < 2 || dropAll) + { + dropAll = false; + var slotDropper = new SlotDropper(len, minSlots.ToArray(), maxSlots.ToArray()); + int slotsDropped = 0; + int nonExistentSlotsDropped = 0; + + for (int i = 0; i < minSlots.Count; i++) + { + if (maxSlots[i] < len) + slotsDropped += maxSlots[i] - minSlots[i] + 1; + else if (minSlots[i] >= len) + nonExistentSlotsDropped += maxSlots[i] - minSlots[i] + 1; + else + { + slotsDropped += len - minSlots[i]; + nonExistentSlotsDropped += maxSlots[i] - len + 1; + } + } + + Assert.True(slotsDropped + nonExistentSlotsDropped > 0); + + int expectedLength = Math.Max(1, a.Length - slotsDropped); + + Assert.True(expectedLength >= 1); + + var expectedIndices = new List(); + var expectedValues = new List(); + int index = 0; + int dropSlotMinIndex = 0; + int slotsDroppedSoFar = 0; + while (index < a.GetValues().Length) + { + int logicalIndex = a.IsDense ? index : a.GetIndices()[index]; + if (dropSlotMinIndex >= minSlots.Count || logicalIndex < minSlots[dropSlotMinIndex]) + { + Assert.True(logicalIndex - slotsDroppedSoFar >= 0); + expectedIndices.Add(logicalIndex - slotsDroppedSoFar); + expectedValues.Add(a.GetValues()[index]); + index++; + } + else if (logicalIndex <= maxSlots[dropSlotMinIndex]) + index++; + else + { + slotsDroppedSoFar += maxSlots[dropSlotMinIndex] - minSlots[dropSlotMinIndex] + 1; + dropSlotMinIndex++; + } + } + + Assert.Equal(expectedIndices.Count, expectedValues.Count); + Assert.True(expectedIndices.Count <= a.GetValues().Length); + + if (a.IsDense) + dropSlotDenseOpCount++; + else + dropSlotSparseOpCount++; + + var expectedVector = new VBuffer(Math.Max(1, expectedLength), + expectedIndices.Count, expectedValues.ToArray(), a.IsDense ? null : expectedIndices.ToArray()); + + var dst = rgen.Next(2) == 0 ? a : default(VBuffer); + slotDropper.DropSlots(ref a, ref dst); + TestSame(ref expectedVector, ref dst); + minSlots.Clear(); + maxSlots.Clear(); + max = -1; + len = dst.Length; + Utils.Swap(ref a, ref dst); + } + } + } + } + + private static float ScaleFactor(int trial, Random rgen) + { + switch (trial % 4) + { + case 0: + return 0; + case 1: + return 1; + case 2: + return -1; + default: + return rgen.NextDouble().ToFloat() * 10 - 5; + } + } + + private static void GenerateSingle(Random rgen, int len, out VBuffer a) + { + int count = rgen.Next(2) == 0 ? len : rgen.Next(len); + GenerateVBuffer(rgen, len, count, out a); + } + + private static void GenerateVBuffer(Random rgen, int len, int count, out VBuffer dst) + { + const int excess = 4; + Contracts.Assert(count <= len); + int[] indices = null; + if (count != len) + { + indices = new int[count + rgen.Next(excess)]; + FillRandomIndices(rgen, indices, len, count); + for (int i = count; i < indices.Length; ++i) + indices[i] = rgen.Next(200) - 100; + if (indices.Length == 0 && rgen.Next(2) == 0) + indices = null; + } + float[] values = new float[count + rgen.Next(excess)]; + for (int i = 0; i < values.Length; ++i) + values[i] = rgen.NextDouble().ToFloat() * 10 - 5; + if (values.Length == 0 && rgen.Next(2) == 0) + values = null; + dst = new VBuffer(len, count, values, indices); + } + + private static void FillRandomIndices(Random rgen, int[] indices, int len, int count) + { + Contracts.Assert(Utils.Size(indices) >= count); + int max = len - count + 1; + for (int i = 0; i < count; ++i) + indices[i] = rgen.Next(max); + Array.Sort(indices, 0, count); + for (int i = 0; i < count; ++i) + indices[i] += i; + Assert.True(Utils.IsIncreasing(0, indices, count, len)); + } + + /// + /// Copy everything about the , to , + /// not just something logically equivalent but possibly with different internal + /// structure. This will produce a totally inefficient copy right down to the + /// length and contents of the excess indices/values. + /// + private static void FullCopy(ref VBuffer src, ref VBuffer dst) + { + var editor = VBufferEditor.Create(ref dst, src.Length, src.GetValues().Length); + var indices = editor.Indices; + var values = editor.Values; + src.GetIndices().CopyTo(indices); + src.GetValues().CopyTo(values); + dst = editor.Commit(); + } + + private static void TestSame(ref VBuffer expected, ref VBuffer actual, Double tol = 0) + { + TestSame(ref expected, ref actual, FloatEquality((float)tol)); + } + + private static void TestSame(ref VBuffer expected, ref VBuffer actual, Func equalityFunc) + { + Assert.Equal(expected.Length, actual.Length); + Assert.Equal(expected.GetValues().Length, actual.GetValues().Length); + Assert.Equal(expected.IsDense, actual.IsDense); + if (!expected.IsDense) + { + for (int i = 0; i < expected.GetIndices().Length; ++i) + Assert.Equal(expected.GetIndices()[i], actual.GetIndices()[i]); + } + for (int i = 0; i < expected.GetValues().Length; ++i) + { + var result = equalityFunc(expected.GetValues()[i], actual.GetValues()[i]); + Assert.True(result, $"Value [{i}] mismatch on expected {expected.GetValues()[i]} vs. actual {actual.GetValues()[i]}"); + } + } + + private static Func FloatEquality(float tol) + { + if (tol == 0) + return (i, j) => FloatUtils.GetBits(i) == FloatUtils.GetBits(j); + return (i, j) => + { + if (FloatUtils.GetBits(i) == FloatUtils.GetBits(j) || Math.Abs(i - j) == 0) + return true; + // Seemingly needlessly verbose here for the purpose of setting breakpoints. + float comp = Math.Abs(i - j) / (Math.Abs(i) + Math.Abs(j)); + return comp <= tol; + }; + } + + /// + /// Returned out of so that debugging runs can see what + /// specific subcase of pair relationship was being explored. + /// + private enum GenLogic : byte + { + BothDense, + ASparseBDense, + ADenseBSparse, + BothSparseASameB, + BothSparseASubsetB, + BothSparseBSubsetA, + BothSparseAUnrelatedB, + BothSparseADisjointB, + } + + /// + /// Generates a pair of vectors, where the pairs are generated in such a way that we + /// have a good chance (across many trials) to exercise all of the special casing logic + /// we see in and , e.g., various + /// density/sparsity settings, different ways of the indices overlapping each other, etc. + /// + /// The random number generator + /// The length of the vectors to generate + /// The first of the pair + /// The second of the pair + /// An enum describing the specific case of logic that generated + /// the two generated vectors, which goes some way to describing the relationship between + /// the two + private static void GeneratePair(Random rgen, int len, out VBuffer a, out VBuffer b, out GenLogic subcase) + { + // 0. Both dense. + // 1. a sparse, b dense. + // 2. a dense, b sparse. + // Both sparse: + // 3. a indices the same as b's. + // 4. a indices a subset of b's. + // 5. b indices a subset of a's. + // 6. a and b may overlap. (But I don't prevent distinct.) + // 7. a and b totally disjoint. + const int cases = 8; + Contracts.Assert(cases == Enum.GetValues(typeof(GenLogic)).Length); + subcase = (GenLogic)rgen.Next(cases); + VBufferEditor bEditor; + switch (subcase) + { + case GenLogic.BothDense: + // Both dense. + GenerateVBuffer(rgen, len, len, out a); + GenerateVBuffer(rgen, len, len, out b); + break; + case GenLogic.ASparseBDense: + case GenLogic.ADenseBSparse: + GenerateVBuffer(rgen, len, len, out a); + GenerateVBuffer(rgen, len, rgen.Next(len), out b); + if (subcase == GenLogic.ASparseBDense) + Utils.Swap(ref a, ref b); + break; + case GenLogic.BothSparseASameB: + GenerateVBuffer(rgen, len, rgen.Next(len), out a); + GenerateVBuffer(rgen, len, a.GetValues().Length, out b); + bEditor = VBufferEditor.CreateFromBuffer(ref b); + for (int i = 0; i < a.GetIndices().Length; ++i) + bEditor.Indices[i] = a.GetIndices()[i]; + b = bEditor.Commit(); + break; + case GenLogic.BothSparseASubsetB: + case GenLogic.BothSparseBSubsetA: + GenerateVBuffer(rgen, len, rgen.Next(len), out a); + GenerateVBuffer(rgen, a.GetValues().Length, rgen.Next(a.GetValues().Length), out b); + bEditor = VBufferEditor.Create(ref b, len, b.GetValues().Length); + for (int i = 0; i < bEditor.Values.Length; ++i) + bEditor.Indices[i] = a.GetIndices()[bEditor.Indices[i]]; + b = bEditor.Commit(); + if (subcase == GenLogic.BothSparseASubsetB) + Utils.Swap(ref a, ref b); + break; + case GenLogic.BothSparseAUnrelatedB: + GenerateVBuffer(rgen, len, rgen.Next(len), out a); + GenerateVBuffer(rgen, len, rgen.Next(len), out b); + break; + case GenLogic.BothSparseADisjointB: + GenerateVBuffer(rgen, len, rgen.Next(len), out a); + int boundary = rgen.Next(a.GetValues().Length + 1); + GenerateVBuffer(rgen, len, a.GetValues().Length - boundary, out b); + if (a.GetValues().Length != 0 && b.GetValues().Length != 0 && a.GetValues().Length != b.GetValues().Length) + { + var aEditor = VBufferEditor.CreateFromBuffer(ref a); + bEditor = VBufferEditor.CreateFromBuffer(ref b); + Utils.Shuffle(rgen, aEditor.Indices); + aEditor.Indices.Slice(boundary).CopyTo(bEditor.Indices); + + GenericSpanSortHelper.Sort(aEditor.Indices, 0, boundary); + GenericSpanSortHelper.Sort(bEditor.Indices, 0, bEditor.Indices.Length); + a = aEditor.CommitTruncated(boundary); + b = bEditor.Commit(); + } + if (rgen.Next(2) == 0) + Utils.Swap(ref a, ref b); + break; + default: + throw Contracts.Except("Whoops, did you miss a case?"); + } + Contracts.Assert(a.Length == len); + Contracts.Assert(b.Length == len); + } + + private static void TestEquivalent(ref VBuffer expected, ref VBuffer actual, Double tol = 0) + { + TestEquivalent(ref expected, ref actual, FloatEquality((float)tol)); + } + + private static void TestEquivalent(ref VBuffer expected, ref VBuffer actual, Func equalityFunc) + { + Contracts.AssertValue(equalityFunc); + Assert.Equal(expected.Length, actual.Length); + + int length = expected.Length; + if (length == 0) + return; + + Contracts.Assert(length > 0); + if (expected.IsDense && actual.IsDense) + { + for (int i = 0; i < length; ++i) + Assert.True(equalityFunc(expected.GetValues()[i], actual.GetValues()[i])); + } + else if (expected.IsDense) + { + // expected is dense, actual is sparse. + int jj = 0; + int j = actual.GetValues().Length == 0 ? length : actual.GetIndices()[jj]; + for (int i = 0; i < length; ++i) + { + if (i == j) + { + Assert.True(equalityFunc(expected.GetValues()[i], actual.GetValues()[jj])); + j = ++jj == actual.GetValues().Length ? length : actual.GetIndices()[jj]; + } + else + Assert.True(equalityFunc(expected.GetValues()[i], default(T))); + } + } + else if (actual.IsDense) + { + // expected is sparse, actual is dense. + int ii = 0; + int i = expected.GetValues().Length == 0 ? length : expected.GetIndices()[ii]; + for (int j = 0; j < length; ++j) + { + if (j == i) + { + Assert.True(equalityFunc(expected.GetValues()[ii], actual.GetValues()[j])); + i = ++ii == expected.GetValues().Length ? length : expected.GetIndices()[ii]; + } + else + Assert.True(equalityFunc(actual.GetValues()[j], default(T))); + } + } + else + { + // Both expected and actual are sparse. + int ii = 0; + int jj = 0; + int i = expected.GetValues().Length == 0 ? length : expected.GetIndices()[ii]; + int j = actual.GetValues().Length == 0 ? length : actual.GetIndices()[jj]; + + while (i < length || j < length) + { + if (i == j) + { + // Common slot for expected and actual. + Assert.True(equalityFunc(expected.GetValues()[ii], actual.GetValues()[jj])); + i = ++ii == expected.GetValues().Length ? length : expected.GetIndices()[ii]; + j = ++jj == actual.GetValues().Length ? length : actual.GetIndices()[jj]; + } + else if (i < j) + { + Assert.True(equalityFunc(expected.GetValues()[ii], default(T))); + i = ++ii == expected.GetValues().Length ? length : expected.GetIndices()[ii]; + } + else + { + // i > j + Assert.True(equalityFunc(actual.GetValues()[ii], default(T))); + j = ++jj == actual.GetValues().Length ? length : actual.GetIndices()[jj]; + } + } + } + } } -} +} \ No newline at end of file diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestVectorUtils.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestVectorUtils.cs deleted file mode 100644 index ee3e531e4d..0000000000 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestVectorUtils.cs +++ /dev/null @@ -1,62 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. -// See the LICENSE file in the project root for more information. - -using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Runtime.Numeric; -using System.Linq; -using Xunit; - -namespace Microsoft.ML.Runtime.RunTests -{ - public class TestVectorUtils - { - /// - /// Tests SparsifyNormalize works correctly. - /// - [Theory] - [InlineData(1, true, new[] { 0.8f, 0.9f, 1f }, new[] { 7, 8, 9 })] - [InlineData(1, false, new[] { 8f, 9f, 10f }, new[] { 7, 8, 9 })] - [InlineData(-4, true, new[] { -0.8f, -0.6f, -0.4f, 0.6f, 0.8f, 1f }, new[] { 0, 1, 2, 7, 8, 9 })] - [InlineData(-4, false, new[] { -4f, -3f, -2f, 3f, 4f, 5f }, new[] { 0, 1, 2, 7, 8, 9 })] - [InlineData(-10, true, new[] { -1f, -0.9f, -0.8f }, new[] { 0, 1, 2 })] - [InlineData(-10, false, new[] { -10f, -9f, -8f }, new[] { 0, 1, 2 })] - public void TestSparsifyNormalize(int startRange, bool normalize, float[] expectedValues, int[] expectedIndices) - { - float[] values = Enumerable.Range(startRange, 10).Select(i => (float)i).ToArray(); - var a = new VBuffer(10, values); - - VectorUtils.SparsifyNormalize(ref a, 3, 3, normalize); - - Assert.False(a.IsDense); - Assert.Equal(10, a.Length); - Assert.Equal(expectedIndices, a.GetIndices().ToArray()); - - var actualValues = a.GetValues().ToArray(); - Assert.Equal(expectedValues.Length, actualValues.Length); - for (int i = 0; i < expectedValues.Length; i++) - Assert.Equal(expectedValues[i], actualValues[i], precision: 6); - } - - /// - /// Tests SparsifyNormalize works when asked for all values. - /// - [Theory] - [InlineData(10, 0)] - [InlineData(10, 10)] - [InlineData(20, 20)] - public void TestSparsifyNormalizeReturnsDense(int top, int bottom) - { - float[] values = Enumerable.Range(1, 10).Select(i => (float)i).ToArray(); - var a = new VBuffer(10, values); - - VectorUtils.SparsifyNormalize(ref a, top, bottom, false); - - Assert.True(a.IsDense); - Assert.Equal(10, a.Length); - Assert.True(a.GetIndices().IsEmpty); - - Assert.Equal(values, a.GetValues().ToArray()); - } - } -} From a06a0b79e4fc23d570806389d20305db7972b957 Mon Sep 17 00:00:00 2001 From: Rogan Carr Date: Mon, 3 Dec 2018 14:59:55 -0800 Subject: [PATCH 020/100] Fixing incorrect link (#1808) --- .../Scorers/FeatureContributionCalculationTransform.cs | 2 +- src/Microsoft.ML.FastTree/GamTrainer.cs | 2 +- .../PermutationFeatureImportanceExtensions.cs | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs b/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs index 9c4c03ae86..5062068a40 100644 --- a/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs +++ b/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs @@ -37,7 +37,7 @@ namespace Microsoft.ML.Runtime.Data /// /// /// /// /// diff --git a/src/Microsoft.ML.FastTree/GamTrainer.cs b/src/Microsoft.ML.FastTree/GamTrainer.cs index 421a6d9c60..05fc8eab4f 100644 --- a/src/Microsoft.ML.FastTree/GamTrainer.cs +++ b/src/Microsoft.ML.FastTree/GamTrainer.cs @@ -57,7 +57,7 @@ namespace Microsoft.ML.Trainers.FastTree /// /// /// /// /// diff --git a/src/Microsoft.ML.Transforms/PermutationFeatureImportanceExtensions.cs b/src/Microsoft.ML.Transforms/PermutationFeatureImportanceExtensions.cs index 8d596c94df..35b7db1ad9 100644 --- a/src/Microsoft.ML.Transforms/PermutationFeatureImportanceExtensions.cs +++ b/src/Microsoft.ML.Transforms/PermutationFeatureImportanceExtensions.cs @@ -40,7 +40,7 @@ public static class PermutationFeatureImportanceExtensions /// /// /// /// /// @@ -112,7 +112,7 @@ private static RegressionMetrics RegressionDelta( /// /// /// /// /// From 8a9b0161562d2b639ea4ec7ee6f8c46de52a14d0 Mon Sep 17 00:00:00 2001 From: Yael Dekel Date: Tue, 4 Dec 2018 09:42:32 -0800 Subject: [PATCH 021/100] Nadrop (#1810) * Fix MissingValueDroppingTransformer bug * Add unit test * Add baseline --- .../MissingValueDroppingTransformer.cs | 2 +- .../Common/SavePipe/SavePipeDropNAs-Data.txt | 17 +++++++++++ .../DataPipe/TestDataPipe.cs | 29 +++++++++++++++++++ 3 files changed, 47 insertions(+), 1 deletion(-) create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeDropNAs-Data.txt diff --git a/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs b/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs index 3c52227bec..456ba0dc58 100644 --- a/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs +++ b/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs @@ -204,7 +204,7 @@ protected override Delegate MakeGetter(IRow input, int iinfo, Func ac private ValueGetter> MakeVecGetter(IRow input, int iinfo) { - var srcGetter = input.GetGetter>(iinfo); + var srcGetter = input.GetGetter>(_srcCols[iinfo]); var buffer = default(VBuffer); var isNA = (InPredicate)_isNAs[iinfo]; var def = default(TDst); diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeDropNAs-Data.txt b/test/BaselineOutput/Common/SavePipe/SavePipeDropNAs-Data.txt new file mode 100644 index 0000000000..f53121a8ab --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeDropNAs-Data.txt @@ -0,0 +1,17 @@ +#@ TextLoader{ +#@ header+ +#@ sep=tab +#@ col=Num:R4:0-1 +#@ col=Sep:TX:2 +#@ col=NumNAsDropped:R4:3-** +#@ col={name=Sep2 type=TX src={ min=-1}} +#@ col={name=Text type=TX src={ min=-1 var=+}} +#@ col={name=Sep3 type=TX src={ min=-1}} +#@ col={name=TextNAsDropped type=U4 src={ min=-1 var=+} key=0-3} +#@ } +"" "" Sep Sep2 Sep3 +2 0 | 2 0 | Hello World! | 0 +3 4 | 3 4 | | +0 ? | 0 | Bye all | 3 1 +7 8 | 7 8 | Good bye | 2 +? ? | | this is a | diff --git a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipe.cs b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipe.cs index e2b35aeb1a..115ebede8f 100644 --- a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipe.cs +++ b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipe.cs @@ -785,6 +785,35 @@ public void SavePipeTokenizerAndStopWords() Done(); } + [Fact] + public void SavePipeDropNAs() + { + string pathData = DeleteOutputPath("SavePipe", "DropNAs.txt"); + File.WriteAllLines(pathData, + new[] + { + "2,0,|,Hello World!", + "3,4,|,", + "0,nan,|,Bye all", + "7,8,|,Good bye", + "?,nan,|,this is a" + }); + + TestCore(pathData, false, + new[] + { + "loader=Text{header- sep=, col=Num:R4:0-1 col=Sep:TX:2 col=Text:TX:3}", + "xf=NADrop{col=NumNAsDropped:Num}", + "xf=Token{col=Text}", + "xf=Term{col=Text2:Text terms=Hello,all,Good,Bye}", + "xf=NADrop{col=TextNAsDropped:Text2}", + "xf=Copy{col=Sep2:Sep col=Sep3:Sep}", + "xf=Select{keepcol=Num keepcol=Sep keepcol=NumNAsDropped keepcol=Sep2 keepcol=Text keepcol=Sep3 keepcol=TextNAsDropped}" + }, baselineSchema: false, roundTripText: false); + + Done(); + } + [Fact] public void TestHashTransformFloat() { From 72ec121afe1a889218c750f7bda7ee5093c140b7 Mon Sep 17 00:00:00 2001 From: Anirudh Agnihotry Date: Tue, 4 Dec 2018 12:51:35 -0800 Subject: [PATCH 022/100] Removed AlignedArray (#1657) * Removing Aligned Array usage from rff and timeseries. Removing aligned matrix and cpumathaligned entirely. moving Aligned array to FactorAware where it is only being used * adding some asserts * unrolling the loop, aligned removed from name, using new float comparision * enabling some more tests * case comment added, tanners feedback --- src/Microsoft.ML.CpuMath/AlignedMatrix.cs | 681 ------------------ src/Microsoft.ML.CpuMath/AvxIntrinsics.cs | 306 ++++---- .../CpuAligenedMathUtils.cs | 148 ---- .../CpuMathUtils.netcoreapp.cs | 166 ++--- .../CpuMathUtils.netstandard.cs | 15 +- src/Microsoft.ML.CpuMath/Sse.cs | 78 +- src/Microsoft.ML.CpuMath/SseIntrinsics.cs | 295 ++++---- src/Microsoft.ML.CpuMath/Thunk.cs | 6 - .../FactorizationMachine}/AlignedArray.cs | 93 +-- ...AdaptiveSingularSpectrumSequenceModeler.cs | 71 +- .../RandomFourierFeaturizing.cs | 67 +- src/Native/CpuMathNative/Sse.cpp | 353 +++++---- .../UnitTests.cs | 189 +++-- .../DataPipe/TestDataPipeBase.cs | 10 +- .../TimeSeries.cs | 2 +- .../TimeSeriesDirectApi.cs | 24 +- .../TimeSeriesEstimatorTests.cs | 4 +- 17 files changed, 809 insertions(+), 1699 deletions(-) delete mode 100644 src/Microsoft.ML.CpuMath/AlignedMatrix.cs delete mode 100644 src/Microsoft.ML.CpuMath/CpuAligenedMathUtils.cs rename src/{Microsoft.ML.CpuMath => Microsoft.ML.StandardLearners/FactorizationMachine}/AlignedArray.cs (53%) diff --git a/src/Microsoft.ML.CpuMath/AlignedMatrix.cs b/src/Microsoft.ML.CpuMath/AlignedMatrix.cs deleted file mode 100644 index 6d550fc3fc..0000000000 --- a/src/Microsoft.ML.CpuMath/AlignedMatrix.cs +++ /dev/null @@ -1,681 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. -// See the LICENSE file in the project root for more information. - -using Float = System.Single; - -using Microsoft.ML.Runtime.Internal.CpuMath.Core; -using System; -using System.Collections; -using System.Collections.Generic; - -namespace Microsoft.ML.Runtime.Internal.CpuMath -{ - using Conditional = System.Diagnostics.ConditionalAttribute; - - /// - /// This implements a logical array of Floats that is automatically aligned for SSE/AVX operations. - /// This is a thin wrapper around the AlignedArray type implemented in C++. This simply couples - /// the AlignedArray with a logical size, which does not include padding, while the AlignedArray - /// size does include padding. - /// - [BestFriend] - internal sealed class CpuAlignedVector : ICpuVector - { - private readonly AlignedArray _items; - private readonly int _size; // The logical size. - - /// - /// The value count. - /// - public int ValueCount { get { return _size; } } - - /// - /// The logical size of the vector. - /// - public int VectorSize { get { return _size; } } - - // Round cflt up to a multiple of cfltAlign. - private static int RoundUp(int cflt, int cfltAlign) - { - Contracts.Assert(0 < cflt); - // cfltAlign should be a power of two. - Contracts.Assert(0 < cfltAlign && (cfltAlign & (cfltAlign - 1)) == 0); - - // Determine the number of "blobs" of size cfltAlign. - int cblob = (cflt + cfltAlign - 1) / cfltAlign; - return cblob * cfltAlign; - } - - /// - /// Allocate an aligned vector with the given alignment (in bytes). - /// The alignment must be a power of two and at least sizeof(Float). - /// - public CpuAlignedVector(int size, int cbAlign) - { - Contracts.Assert(0 < size); - // cbAlign should be a power of two. - Contracts.Assert(sizeof(Float) <= cbAlign); - Contracts.Assert((cbAlign & (cbAlign - 1)) == 0); - - int cfltAlign = cbAlign / sizeof(Float); - int cflt = RoundUp(size, cfltAlign); - _items = new AlignedArray(cflt, cbAlign); - _size = size; - AssertValid(); - } - - public void Dispose() - { - } - - [Conditional("DEBUG")] - private void AssertValid() - { -#if DEBUG - Contracts.Assert(0 < _size && _size <= _items.Size); - - // The padding, [_size, _items.Size), should contain zeros. - for (int i = _size; i < _items.Size; i++) - Contracts.Assert(_items[i] == 0); -#endif - } - - /// - /// The physical AligenedArray items. - /// - public AlignedArray Items { get { return _items; } } - - /// - /// The alignment. - /// - public int CbAlign - { - get { return _items.CbAlign; } - } - - /// - /// Set and get the value of the vector at the given index. - /// - /// The index - /// The value at the given index - public Float this[int index] - { - get - { - Contracts.Assert(0 <= index && index < _size); - return _items[index]; - } - set - { - Contracts.Assert(0 <= index && index < _size); - _items[index] = value; - } - } - - /// - /// Get the value of the vector at the given index. - /// - /// The index - /// The value at the given index - public Float GetValue(int i) - { - Contracts.Assert(0 <= i && i < _size); - return _items[i]; - } - - /// - /// Assign randomized values to the vector elements via the input function. - /// - /// The input rand om function that takes no arguments and returns a float value - public void Randomize(Func rand) - { - Contracts.AssertValue(rand); - for (int i = 0; i < _size; i++) - _items[i] = rand(); - } - - /// - /// Assign zeros to the vector elements. - /// - public void Zero() - { - _items.ZeroItems(); - } - - /// - /// Copy the values into dst, starting at slot ivDst and advancing ivDst. - /// - /// The destination array - /// The starting index in the destination array - public void CopyTo(Float[] dst, ref int ivDst) - { - Contracts.AssertValue(dst); - Contracts.Assert(0 <= ivDst && ivDst <= dst.Length - _size); - _items.CopyTo(dst, ivDst, _size); - ivDst += _size; - } - - /// - /// Copy the values from this vector starting at slot ivSrc into dst, starting at slot ivDst. - /// The number of values that are copied is determined by count. - /// - /// The staring index in this vector - /// The destination array - /// The starting index in the destination array - /// The number of elements to be copied - public void CopyTo(int ivSrc, Float[] dst, int ivDst, int count) - { - Contracts.AssertValue(dst); - Contracts.Assert(0 <= count && count <= dst.Length); - Contracts.Assert(0 <= ivSrc && ivSrc <= _size - count); - Contracts.Assert(0 <= ivDst && ivDst <= dst.Length - count); - _items.CopyTo(ivSrc, dst, ivDst, count); - } - - /// - /// Copy the values from src, starting at slot index and advancing index, into this vector. - /// - /// The source array - /// The starting index in the source array - public void CopyFrom(Float[] src, ref int index) - { - Contracts.AssertValue(src); - Contracts.Assert(0 <= index && index <= src.Length - _size); - _items.CopyFrom(src.AsSpan(index, _size)); - index += _size; - } - - /// - /// Copy the values from src, starting at slot index and advancing index, into this vector, starting at slot ivDst. - /// The number of values that are copied is determined by count. - /// - /// The staring index in this vector - /// The source array - /// The starting index in the source array - /// The number of elements to be copied - public void CopyFrom(int ivDst, Float[] src, int ivSrc, int count) - { - Contracts.AssertValue(src); - Contracts.Assert(0 <= count && count <= src.Length); - Contracts.Assert(0 <= ivDst && ivDst <= _size - count); - Contracts.Assert(0 <= ivSrc && ivSrc <= src.Length - count); - _items.CopyFrom(ivDst, src.AsSpan(ivSrc, _size)); - } - - /// - /// Copy the values of src vector into this vector. The src vector must have the same size as this vector. - /// - /// The source vector - public void CopyFrom(CpuAlignedVector src) - { - Contracts.AssertValue(src); - Contracts.Assert(src._size == _size); - _items.CopyFrom(src._items); - } - - /// - /// Get the underlying AlignedArray as IEnumerator<Float>. - /// - public IEnumerator GetEnumerator() - { - for (int i = 0; i < _size; i++) - yield return _items[i]; - } - - IEnumerator IEnumerable.GetEnumerator() - { - return GetEnumerator(); - } - } - - /// - /// This implements a logical matrix of Floats that is automatically aligned for SSE/AVX operations. - /// The ctor takes an alignment value, which must be a power of two at least sizeof(Float). - /// - [BestFriend] - internal abstract class CpuAlignedMatrixBase - { - // _items includes "head" items filled with NaN, followed by RunLenPhy * RunCntPhy entries, followed by - // "tail" items, also filled with NaN. Note that RunLenPhy and RunCntPhy are divisible by the alignment - // specified in the ctor and are >= RunLen and RunCnt, respectively. It is illegal to access any slot - // outsize [_base, _base + RunLenPhy * RunCntPhy). The padding should all be zero (and maintained as such). - // The items are arranged in "runs" of length RunLen. There are RunCnt such runs. Each run ends with - // (RunLenPhy - RunLen) padding slots. There are an addition (RunCntPhy - RunCnt) padding runs of length - // RunLenPhy, which are entirely zero. Any native code should be able to assume and should maintain - // these invariants. - public AlignedArray Items { get; } - - protected readonly int FloatAlign; // The alignment. - - // Since FloatAlign is a power of two, shifting by Shift = log_2(FloatAlign) is the same as multiplying/dividing by FloatAlign. - protected readonly int Shift; - // Since FloatAlign is a power of two, bitwise and with Mask = FloatAlign - 1 will be the same as moding by FloatAlign. - protected readonly int Mask; - - // Logical length of runs (RunLen) and number of runs (RunCnt). - public readonly int RunLen; - public readonly int RunCnt; - - // Physical (padded) length and number of runs. - public readonly int RunLenPhy; - public readonly int RunCntPhy; - - /// - /// The logical number values in the matrix - /// - public int ValueCount => RunLen * RunCnt; - - /// - /// The logical number of rows - /// - public abstract int RowCount { get; } - - /// - /// The logical number of columns - /// - public abstract int ColCount { get; } - - /// - /// The physical number of rows - /// - public abstract int RowCountPhy { get; } - - /// - /// The pysical number of columns - /// - public abstract int ColCountPhy { get; } - - // Round cflt up to a multiple of cfltAlign. - protected static int RoundUp(int cflt, int cfltAlign) - { - Contracts.Assert(0 < cflt); - // cfltAlign should be a power of two. - Contracts.Assert(0 < cfltAlign && (cfltAlign & (cfltAlign - 1)) == 0); - - // Determine the number of "blobs" of size cfltAlign. - int cblob = (cflt + cfltAlign - 1) / cfltAlign; - return cblob * cfltAlign; - } - - /// - /// Allocate an aligned matrix with the given alignment (in bytes). - /// - protected CpuAlignedMatrixBase(int runLen, int runCnt, int cbAlign) - { - Contracts.Assert(0 < runLen); - Contracts.Assert(0 < runCnt); - // cbAlign should be a power of two. - Contracts.Assert(sizeof(Float) <= cbAlign); - Contracts.Assert((cbAlign & (cbAlign - 1)) == 0); - - RunLen = runLen; - RunCnt = runCnt; - - FloatAlign = cbAlign / sizeof(Float); - Shift = GeneralUtils.CbitLowZero((uint)FloatAlign); - Mask = FloatAlign - 1; - - RunLenPhy = RoundUp(runLen, FloatAlign); - RunCntPhy = RoundUp(runCnt, FloatAlign); - Items = new AlignedArray(RunLenPhy * RunCntPhy, cbAlign); - - AssertValid(); - } - - [Conditional("DEBUG")] - protected void AssertValid() - { -#if DEBUG - Contracts.Assert(0 < RunLen && RunLen <= RunLenPhy); - Contracts.Assert(0 < RunCnt && RunCnt <= RunCntPhy); - Contracts.Assert(RunLenPhy * RunCntPhy == Items.Size); - - // Assert that the padding at the end of each run contains zeros. - for (int i = 0; i < RunCnt; i++) - { - for (int j = RunLen; j < RunLenPhy; j++) - Contracts.Assert(Items[i * RunLenPhy + j] == 0); - } - - // Assert that the padding runs contain zeros. - for (int i = RunCnt; i < RunCntPhy; i++) - { - for (int j = 0; j < RunLenPhy; j++) - Contracts.Assert(Items[i * RunLenPhy + j] == 0); - } -#endif - } - - public void Dispose() - { - } - - /// - /// Assign randomized values to the matrix elements via the input function. - /// - /// The input rand om function that takes no arguments and returns a float value - public void Randomize(Func rand) - { - Contracts.AssertValue(rand); - for (int i = 0, k = 0; i < RunCnt; i++) - { - Contracts.Assert(k == i * RunLenPhy); - for (int j = 0; j < RunLen; j++) - Items[k + j] = rand(); - k += RunLenPhy; - } - } - - /// - /// Assign zeros to the matrix elements. - /// - public void Zero() - { - Items.ZeroItems(); - } - - /// - /// Copy the values of src matrix into this matrix. The src matrix must have the same physical and logical size as this matrix. - /// - /// The source matrix - public void CopyFrom(CpuAlignedMatrixBase src) - { - AssertValid(); - Contracts.AssertValue(src); - src.AssertValid(); - Contracts.Assert(src.RunLen == RunLen); - Contracts.Assert(src.RunCnt == RunCnt); - Contracts.Assert(src.RunLenPhy == RunLenPhy); - Contracts.Assert(src.RunCntPhy == RunCntPhy); - Items.CopyFrom(src.Items); - } - } - - /// - /// This implements a logical row-major matrix of Floats that is automatically aligned for SSE/AVX operations. - /// The ctor takes an alignment value, which must be a power of two at least sizeof(Float). - /// - [BestFriend] - internal abstract class CpuAlignedMatrixRowBase : CpuAlignedMatrixBase, ICpuBuffer - { - protected CpuAlignedMatrixRowBase(int crow, int ccol, int cbAlign) - : base(ccol, crow, cbAlign) - { - } - - /// - /// The logical number of rows - /// - public override int RowCount => RunCnt; - - /// - /// The logical number of columns - /// - public override int ColCount { get { return RunLen; } } - - /// - /// The physical number of rows - /// - public override int RowCountPhy { get { return RunCntPhy; } } - - /// - /// The physical number of columns - /// - public override int ColCountPhy { get { return RunLenPhy; } } - - /// - /// Copy the values into dst, starting at slot ivDst and advancing ivDst. - /// - /// The destination array - /// The starting index in the destination array - public void CopyTo(Float[] dst, ref int ivDst) - { - Contracts.AssertValue(dst); - Contracts.Assert(0 <= ivDst && ivDst <= dst.Length - ValueCount); - - if (ColCount == ColCountPhy) - { - // Can copy all at once. - Items.CopyTo(0, dst, ivDst, ValueCount); - ivDst += ValueCount; - } - else - { - // Copy each row. - int ivSrc = 0; - for (int row = 0; row < RowCount; row++) - { - Items.CopyTo(ivSrc, dst, ivDst, ColCount); - ivSrc += ColCountPhy; - ivDst += ColCount; - } - } - } - - /// - /// Copy the values from src, starting at slot ivSrc and advancing ivSrc. - /// - /// The source array - /// The starting index in the source array - public void CopyFrom(Float[] src, ref int ivSrc) - { - Contracts.AssertValue(src); - Contracts.Assert(0 <= ivSrc && ivSrc <= src.Length - ValueCount); - - if (ColCount == ColCountPhy) - { - Items.CopyFrom(src.AsSpan(ivSrc, ValueCount)); - ivSrc += ValueCount; - } - else - { - for (int row = 0; row < RowCount; row++) - { - Items.CopyFrom(row * ColCountPhy, src.AsSpan(ivSrc, ColCount)); - ivSrc += ColCount; - } - } - } - - /// - /// Get the underlying AlignedArray as IEnumerator<Float>. - /// - public IEnumerator GetEnumerator() - { - for (int row = 0; row < RowCount; row++) - { - int ivBase = row * ColCountPhy; - for (int col = 0; col < ColCount; col++) - yield return Items[ivBase + col]; - } - } - - IEnumerator IEnumerable.GetEnumerator() - { - return GetEnumerator(); - } - } - - /// - /// This implements a row-major matrix of Floats that is automatically aligned for SSE/AVX operations. - /// The ctor takes an alignment value, which must be a power of two at least sizeof(Float). - /// - [BestFriend] - internal sealed class CpuAlignedMatrixRow : CpuAlignedMatrixRowBase, ICpuFullMatrix - { - public CpuAlignedMatrixRow(int crow, int ccol, int cbAlign) - : base(crow, ccol, cbAlign) - { - } - - /// - /// The logical number of rows - /// - public override int RowCount { get { return RunCnt; } } - - /// - /// The logical number of columns - /// - public override int ColCount { get { return RunLen; } } - - /// - /// The physical number of rows - /// - public override int RowCountPhy { get { return RunCntPhy; } } - - /// - /// The physical number of columns - /// - public override int ColCountPhy { get { return RunLenPhy; } } - - /// - /// Copy the values from this matrix, starting from the row into dst, starting at slot ivDst and advancing ivDst. - /// - /// The starting row in this matrix - /// The destination array - /// The starting index in the destination array - public void CopyTo(int row, Float[] dst, ref int ivDst) - { - Contracts.AssertValue(dst); - Contracts.Assert(0 <= row && row < RowCount); - Contracts.Assert(0 <= ivDst && ivDst <= dst.Length - ColCount); - - Items.CopyTo(row * ColCountPhy, dst, ivDst, ColCount); - ivDst += ColCount; - } - - /// - /// Assign zeros to the values at the indices - /// - /// The indices - public void ZeroItems(int[] indices) - { - Contracts.AssertValue(indices); - - // REVIEW: Ideally, we'd adjust the indices once so we wouldn't need to - // repeatedly deal with padding adjustments. - CpuMathUtils.ZeroMatrixItems(Items, ColCount, ColCountPhy, indices); - } - } - - /// - /// This implements a logical matrix of Floats that is automatically aligned for SSE/AVX operations. - /// The ctor takes an alignment value, which must be a power of two at least sizeof(Float). - /// - [BestFriend] - internal sealed class CpuAlignedMatrixCol : CpuAlignedMatrixBase, ICpuFullMatrix - { - /// - /// Allocate an aligned matrix with the given alignment (in bytes). - /// - public CpuAlignedMatrixCol(int crow, int ccol, int cbAlign) - : base(crow, ccol, cbAlign) - { - } - - /// - /// The logical number of rows - /// - public override int RowCount { get { return RunCnt; } } - - /// - /// The logical number of columns - /// - public override int ColCount { get { return RunLen; } } - - /// - /// The physical number of rows - /// - public override int RowCountPhy { get { return RunCntPhy; } } - - /// - /// The physical number of columns - /// - public override int ColCountPhy { get { return RunLenPhy; } } - - /// - /// Copy the values into dst, starting at slot ivDst and advancing ivDst. - /// - /// The destination array - /// The starting index in the destination array - public void CopyTo(Float[] dst, ref int ivDst) - { - Contracts.AssertValue(dst); - Contracts.Assert(0 <= ivDst && ivDst <= dst.Length - ValueCount); - - for (int row = 0; row < RowCount; row++) - { - for (int col = 0; col < ColCount; col++) - dst[ivDst++] = Items[row + col * RowCountPhy]; - } - } - - /// - /// Copy the values from this matrix, starting from the row into dst, starting at slot ivDst and advancing ivDst. - /// - /// The starting row in this matrix - /// The destination array - /// The starting index in the destination array - public void CopyTo(int row, Float[] dst, ref int ivDst) - { - Contracts.AssertValue(dst); - Contracts.Assert(0 <= row && row < RowCount); - Contracts.Assert(0 <= ivDst && ivDst <= dst.Length - ColCount); - - for (int col = 0; col < ColCount; col++) - dst[ivDst++] = Items[row + col * RowCountPhy]; - } - - /// - /// Copy the values from src, starting at slot ivSrc and advancing ivSrc. - /// - /// The source array - /// The starting index in the source array - public void CopyFrom(Float[] src, ref int ivSrc) - { - Contracts.AssertValue(src); - Contracts.Assert(0 <= ivSrc && ivSrc <= src.Length - ValueCount); - for (int row = 0; row < RowCount; row++) - { - for (int col = 0; col < ColCount; col++) - Items[row + col * RowCountPhy] = src[ivSrc++]; - } - } - - /// - /// Assign zeros to the values at the indices - /// - /// The indices - public void ZeroItems(int[] indices) - { - Contracts.AssertValue(indices); - - // REVIEW: Ideally, we'd adjust the indices once so we wouldn't need to - // repeatedly deal with padding adjustments. - foreach (int iv in indices) - { - int row = iv / ColCount; - int col = iv % ColCount; - Items[row + col * ColCountPhy] = 0; - } - } - - /// - /// Get the underlying AlignedArray as IEnumerator<Float>. - /// - public IEnumerator GetEnumerator() - { - for (int row = 0; row < RowCount; row++) - { - for (int col = 0; col < ColCount; col++) - yield return Items[row + col * RowCountPhy]; - } - } - - IEnumerator IEnumerable.GetEnumerator() - { - return GetEnumerator(); - } - } -} \ No newline at end of file diff --git a/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs b/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs index 2156ddf5fa..d6fb169094 100644 --- a/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs +++ b/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs @@ -152,11 +152,6 @@ private static Vector256 MultiplyAdd(Vector256 src1, Vector256 mat, ReadOnlySpan src, Span dst, int crow, int ccol) { fixed (float* psrc = &MemoryMarshal.GetReference(src)) @@ -169,8 +164,9 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr float* pDstEnd = pdst + crow; float* pDstCurrent = pdst; float* pMatCurrent = pmat; + int numRows = crow; - while (pDstCurrent < pDstEnd) + while (pDstCurrent + 4 <= pDstEnd) { Vector256 res0 = Avx.SetZeroVector256(); Vector256 res1 = Avx.SetZeroVector256(); @@ -184,10 +180,11 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr int misalignment = (int)(address % 32); int remainder = 0; - if ((misalignment & 3) != 0) + if ((misalignment & 3) != 0 || (ccol % 8 != 0)) { // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - while (pSrcCurrent < pSrcEnd) + remainder = length % 8; + while (pSrcCurrent + 8 <= pSrcEnd) { Vector256 vector = Avx.LoadVector256(pSrcCurrent); @@ -259,32 +256,32 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr // unaligned loads where we mask the input each time. remainder = length; } + } - if (remainder != 0) - { - // Handle any trailing elements that don't fit into a 256-bit block by moving back so that the next - // unaligned load will read to the end of the array and then mask out any elements already processed + if (remainder != 0) + { + // Handle any trailing elements that don't fit into a 256-bit block by moving back so that the next + // unaligned load will read to the end of the array and then mask out any elements already processed - pMatCurrent -= (8 - remainder); - pSrcCurrent -= (8 - remainder); + pMatCurrent -= (8 - remainder); + pSrcCurrent -= (8 - remainder); - Vector256 mask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + (remainder * 8)); + Vector256 mask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + (remainder * 8)); - float* pMatTemp = pMatCurrent; - Vector256 x01 = Avx.And(mask, Avx.LoadVector256(pMatTemp)); - Vector256 x11 = Avx.And(mask, Avx.LoadVector256(pMatTemp += ccol)); - Vector256 x21 = Avx.And(mask, Avx.LoadVector256(pMatTemp += ccol)); - Vector256 x31 = Avx.And(mask, Avx.LoadVector256(pMatTemp += ccol)); - Vector256 vector = Avx.And(mask, Avx.LoadVector256(pSrcCurrent)); + float* pMatTemp = pMatCurrent; + Vector256 x01 = Avx.And(mask, Avx.LoadVector256(pMatTemp)); + Vector256 x11 = Avx.And(mask, Avx.LoadVector256(pMatTemp += ccol)); + Vector256 x21 = Avx.And(mask, Avx.LoadVector256(pMatTemp += ccol)); + Vector256 x31 = Avx.And(mask, Avx.LoadVector256(pMatTemp += ccol)); + Vector256 vector = Avx.And(mask, Avx.LoadVector256(pSrcCurrent)); - res0 = MultiplyAdd(x01, vector, res0); - res1 = MultiplyAdd(x11, vector, res1); - res2 = MultiplyAdd(x21, vector, res2); - res3 = MultiplyAdd(x31, vector, res3); + res0 = MultiplyAdd(x01, vector, res0); + res1 = MultiplyAdd(x11, vector, res1); + res2 = MultiplyAdd(x21, vector, res2); + res3 = MultiplyAdd(x31, vector, res3); - pMatCurrent += 8; - pSrcCurrent += 8; - } + pMatCurrent += 8; + pSrcCurrent += 8; } // Add up the entries of each, with the 4 results in res0 @@ -297,17 +294,58 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr pDstCurrent += 4; pMatCurrent += 3 * ccol; + numRows -= 4; + } + + // falling through the case statements + switch (numRows) + { + case 3: + *(pDstCurrent + 2) = RowMultiply(pMatCurrent + 2 * ccol, psrc, pSrcEnd, ccol); + goto case 2; + case 2: + *(pDstCurrent + 1) = RowMultiply(pMatCurrent + ccol, psrc, pSrcEnd, ccol); + goto case 1; + case 1: + *pDstCurrent = RowMultiply(pMatCurrent, psrc, pSrcEnd, ccol); + break; } } } - // Partial sparse source vector. - public static unsafe void MatMulP(AlignedArray mat, ReadOnlySpan rgposSrc, AlignedArray src, - int posMin, int iposMin, int iposEnd, AlignedArray dst, int crow, int ccol) + private static unsafe float RowMultiply(float* pMatCurrent, float* pSrcCurrent, float* pSrcEnd, int ccol) { - MatMulP(mat.Items, rgposSrc, src.Items, posMin, iposMin, iposEnd, dst.Items, crow, ccol); + Vector256 res0 = Avx.SetZeroVector256(); + int remainder = ccol % 8; + while (pSrcCurrent + 8 <= pSrcEnd) + { + Vector256 vector = Avx.LoadVector256(pSrcCurrent); + + float* pMatTemp = pMatCurrent; + res0 = MultiplyAdd(pMatTemp, vector, res0); + + pSrcCurrent += 8; + pMatCurrent += 8; + } + + res0 = VectorSum256(in res0); + float sum = Sse.ConvertToSingle(Sse.AddScalar(Avx.GetLowerHalf(res0), GetHigh(res0))); + + // falling through the case statements + switch (remainder) + { + case 7: sum += *(pSrcCurrent + 6) * *(pMatCurrent + 6); goto case 6; + case 6: sum += *(pSrcCurrent + 5) * *(pMatCurrent + 5); goto case 5; + case 5: sum += *(pSrcCurrent + 4) * *(pMatCurrent + 4); goto case 4; + case 4: sum += *(pSrcCurrent + 3) * *(pMatCurrent + 3); goto case 3; + case 3: sum += *(pSrcCurrent + 2) * *(pMatCurrent + 2); goto case 2; + case 2: sum += *(pSrcCurrent + 1) * *(pMatCurrent + 1); goto case 1; + case 1: sum += *(pSrcCurrent) * *(pMatCurrent); break; + } + return sum; } + // Partial sparse source vector. public static unsafe void MatMulP(ReadOnlySpan mat, ReadOnlySpan rgposSrc, ReadOnlySpan src, int posMin, int iposMin, int iposEnd, Span dst, int crow, int ccol) { @@ -461,11 +499,6 @@ Vector256 SparseMultiplicationAcrossRow() } } - public static unsafe void MatMulTran(AlignedArray mat, AlignedArray src, AlignedArray dst, int crow, int ccol) - { - MatMulTran(mat.Items, src.Items, dst.Items, crow, ccol); - } - public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan src, Span dst, int crow, int ccol) { fixed (float* psrc = &MemoryMarshal.GetReference(src)) @@ -478,43 +511,31 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan h01 = Sse.LoadVector128(pSrcCurrent); - // Replicate each slot of h01 (ABCD) into its own register. - Vector128 h11 = Avx.Permute(h01, 0x55); // B - Vector128 h21 = Avx.Permute(h01, 0xAA); // C - Vector128 h31 = Avx.Permute(h01, 0xFF); // D h01 = Avx.Permute(h01, 0x00); // A Vector256 x01 = Avx.SetHighLow(h01, h01); - Vector256 x11 = Avx.SetHighLow(h11, h11); - Vector256 x21 = Avx.SetHighLow(h21, h21); - Vector256 x31 = Avx.SetHighLow(h31, h31); - int length = crow; float* pDstCurrent = pdst; nuint address = (nuint)(pMatCurrent); int misalignment = (int)(address % 32); - if ((misalignment & 3) != 0) + if ((misalignment & 3) != 0 || (crow % 8 != 0)) { // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - while (pDstCurrent < pDstEnd) + remainder = crow % 8; + while (pDstCurrent + 8 <= pDstEnd) { float* pMatTemp = pMatCurrent; Vector256 x02 = Avx.Multiply(x01, Avx.LoadVector256(pMatTemp)); - Vector256 x12 = Avx.Multiply(x11, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x22 = Avx.Multiply(x21, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x32 = Avx.Multiply(x31, Avx.LoadVector256(pMatTemp += crow)); - - x02 = Avx.Add(x02, x12); - x22 = Avx.Add(x22, x32); - x02 = Avx.Add(x02, x22); Avx.Store(pDstCurrent, x02); pDstCurrent += 8; @@ -523,7 +544,6 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan mat, ReadOnlySpan x02 = Avx.And(leadingMask, Avx.LoadVector256(pMatTemp)); - Vector256 x12 = Avx.And(leadingMask, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x22 = Avx.And(leadingMask, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x32 = Avx.And(leadingMask, Avx.LoadVector256(pMatTemp += crow)); - x02 = Avx.Multiply(x01, x02); - x12 = Avx.Multiply(x11, x12); - x22 = Avx.Multiply(x21, x22); - x32 = Avx.Multiply(x31, x32); - - x02 = Avx.Add(x02, x12); - x22 = Avx.Add(x22, x32); - x02 = Avx.Add(x02, x22); Vector256 trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + ((8 - misalignment) * 8)); Vector256 x3 = Avx.LoadVector256(pDstCurrent); - x02 = Avx.Or(x02, Avx.And(x3, trailingMask)); + x02 = Avx.Add(x02, Avx.And(x3, trailingMask)); Avx.Store(pDstCurrent, x02); pMatCurrent += misalignment; @@ -569,15 +578,7 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan x02 = Avx.Multiply(x01, Avx.LoadVector256(pMatTemp)); - Vector256 x12 = Avx.Multiply(x11, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x22 = Avx.Multiply(x21, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x32 = Avx.Multiply(x31, Avx.LoadVector256(pMatTemp += crow)); - - x02 = Avx.Add(x02, x12); - x22 = Avx.Add(x22, x32); - x02 = Avx.Add(x02, x22); Avx.Store(pDstCurrent, x02); pDstCurrent += 8; @@ -591,47 +592,36 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + (remainder * 8)); - - float* pMatTemp = pMatCurrent; - Vector256 x02 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp)); - Vector256 x12 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x22 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x32 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); + if (remainder != 0) + { + // Handle any trailing elements that don't fit into a 256-bit block by moving back so that the next + // unaligned load will read to the end of the array and then mask out any elements already processed - x02 = Avx.Multiply(x01, x02); - x12 = Avx.Multiply(x11, x12); - x22 = Avx.Multiply(x21, x22); - x32 = Avx.Multiply(x31, x32); + pMatCurrent -= (8 - remainder); + pDstCurrent -= (8 - remainder); + Vector256 trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + (remainder * 8)); - x02 = Avx.Add(x02, x12); - x22 = Avx.Add(x22, x32); - x02 = Avx.Add(x02, x22); + float* pMatTemp = pMatCurrent; + Vector256 x02 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp)); + x02 = Avx.Multiply(x01, x02); - Vector256 leadingMask = Avx.LoadVector256(((float*)(pLeadingAlignmentMask)) + ((8 - remainder) * 8)); - Vector256 x3 = Avx.LoadVector256(pDstCurrent); - x02 = Avx.Or(x02, Avx.And(x3, leadingMask)); + Vector256 leadingMask = Avx.LoadVector256(((float*)(pLeadingAlignmentMask)) + ((8 - remainder) * 8)); + Vector256 x3 = Avx.LoadVector256(pDstCurrent); + x02 = Avx.Add(x02, Avx.And(x3, leadingMask)); - Avx.Store(pDstCurrent, x02); - pDstCurrent += 8; - pMatCurrent += 8; - } + Avx.Store(pDstCurrent, x02); + pDstCurrent += 8; + pMatCurrent += 8; } - pMatCurrent += 3 * crow; - pSrcCurrent += 4; + pSrcCurrent += 1; + numCol -= 1; } // We do 4-way unrolling - while (pSrcCurrent < pSrcEnd) + while (pSrcCurrent + 4 <= pSrcEnd) { Vector128 h01 = Sse.LoadVector128(pSrcCurrent); // Replicate each slot of h01 (ABCD) into its own register. @@ -651,9 +641,10 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan x02 = Avx.Multiply(x01, Avx.LoadVector256(pMatTemp)); @@ -674,7 +665,6 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan mat, ReadOnlySpan trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + ((8 - misalignment) * 8)); Vector256 x3 = Avx.LoadVector256(pDstCurrent); - x02 = Avx.Or(x02, Avx.And(x3, trailingMask)); + x02 = Avx.Add(x02, Avx.And(x3, trailingMask)); x02 = Avx.Add(x02, Avx.And(x3, leadingMask)); @@ -738,46 +728,92 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + (remainder * 8)); + if (remainder != 0) + { + pMatCurrent -= (8 - remainder); + pDstCurrent -= (8 - remainder); + Vector256 trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + (remainder * 8)); - float* pMatTemp = pMatCurrent; - Vector256 x02 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp)); - Vector256 x12 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x22 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x32 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); + float* pMatTemp = pMatCurrent; + Vector256 x02 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp)); + Vector256 x12 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); + Vector256 x22 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); + Vector256 x32 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); - x02 = Avx.Multiply(x01, x02); - x12 = Avx.Multiply(x11, x12); - x22 = Avx.Multiply(x21, x22); - x32 = Avx.Multiply(x31, x32); + x02 = Avx.Multiply(x01, x02); + x12 = Avx.Multiply(x11, x12); + x22 = Avx.Multiply(x21, x22); + x32 = Avx.Multiply(x31, x32); - x02 = Avx.Add(x02, x12); - x22 = Avx.Add(x22, x32); - x02 = Avx.Add(x02, x22); + x02 = Avx.Add(x02, x12); + x22 = Avx.Add(x22, x32); + x02 = Avx.Add(x02, x22); - Vector256 leadingMask = Avx.LoadVector256(((float*)(pLeadingAlignmentMask)) + ((8 - remainder) * 8)); - Vector256 x3 = Avx.LoadVector256(pDstCurrent); - x02 = Avx.Or(x02, Avx.And(x3, leadingMask)); + Vector256 leadingMask = Avx.LoadVector256(((float*)(pLeadingAlignmentMask)) + ((8 - remainder) * 8)); + Vector256 x3 = Avx.LoadVector256(pDstCurrent); + x02 = Avx.Add(x02, Avx.And(x3, leadingMask)); - x02 = Avx.Add(x02, Avx.And(x3, trailingMask)); + x02 = Avx.Add(x02, Avx.And(x3, trailingMask)); - Avx.Store(pDstCurrent, x02); - pDstCurrent += 8; - pMatCurrent += 8; - } + Avx.Store(pDstCurrent, x02); + pDstCurrent += 8; + pMatCurrent += 8; } pMatCurrent += 3 * crow; pSrcCurrent += 4; + numCol -= 4; + } + + // falling through the case statements + switch (numCol) + { + case 3: ColumnMultiply(pMatCurrent + 2 * crow, pSrcCurrent + 2, pdst, pDstEnd, crow); goto case 2; + case 2: ColumnMultiply(pMatCurrent + crow, pSrcCurrent + 1, pdst, pDstEnd, crow); goto case 1; + case 1: ColumnMultiply(pMatCurrent, pSrcCurrent, pdst, pDstEnd, crow); break; } } } + private static unsafe void ColumnMultiply(float* pMatCurrent, float* pSrcCurrent, float* pdst, float* pDstEnd, int crow) + { + Vector128 h01 = Sse.LoadVector128(pSrcCurrent); + // Replicate each slot of h01 (ABCD) into its own register. + h01 = Avx.Permute(h01, 0x00); // A + Vector256 x01 = Avx.SetHighLow(h01, h01); + int remainder = crow % 8; + float* pDstCurrent = pdst; + + while (pDstCurrent + 8 <= pDstEnd) + { + // If we aren't using the VEX-encoding, the JIT will only fold away aligned loads + // (due to semantics of the legacy encoding). + // We don't need an assert, since the instruction will throw for unaligned inputs. + float* pMatTemp = pMatCurrent; + Vector256 x02 = Avx.Multiply(x01, Avx.LoadVector256(pMatTemp)); + x02 = Avx.Add(x02, Avx.LoadVector256(pDstCurrent)); + + Avx.Store(pDstCurrent, x02); + pDstCurrent += 8; + pMatCurrent += 8; + } + + // falling through the case statements + switch (remainder) + { + case 7: *(pDstCurrent + 6) += *(pSrcCurrent) * *(pMatCurrent + 6); goto case 6; + case 6: *(pDstCurrent + 5) += *(pSrcCurrent) * *(pMatCurrent + 5); goto case 5; + case 5: *(pDstCurrent + 4) += *(pSrcCurrent) * *(pMatCurrent + 4); goto case 4; + case 4: *(pDstCurrent + 3) += *(pSrcCurrent) * *(pMatCurrent + 3); goto case 3; + case 3: *(pDstCurrent + 2) += *(pSrcCurrent) * *(pMatCurrent + 2); goto case 2; + case 2: *(pDstCurrent + 1) += *(pSrcCurrent) * *(pMatCurrent + 1); goto case 1; + case 1: *pDstCurrent += *(pSrcCurrent) * *(pMatCurrent); break; + } + return; + } + // dst[i] += scale public static unsafe void AddScalarU(float scalar, Span dst) { diff --git a/src/Microsoft.ML.CpuMath/CpuAligenedMathUtils.cs b/src/Microsoft.ML.CpuMath/CpuAligenedMathUtils.cs deleted file mode 100644 index 33690055b2..0000000000 --- a/src/Microsoft.ML.CpuMath/CpuAligenedMathUtils.cs +++ /dev/null @@ -1,148 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. -// See the LICENSE file in the project root for more information. - -using Microsoft.ML.Runtime.Internal.CpuMath.Core; - -namespace Microsoft.ML.Runtime.Internal.CpuMath -{ - [BestFriend] - internal static class CpuAligenedMathUtils - where TMatrix : CpuAlignedMatrixBase, ICpuFullMatrix - { - /// - /// Assert the compatibility of the underlying AlignedArray for the input matrix in terms of alignment amount. - /// - /// The input matrix - public static void AssertCompatible(ICpuFullMatrix values) - { -#if DEBUG - var mat = values as TMatrix; - Contracts.AssertValue(mat); - Contracts.Assert((mat.Items.CbAlign % CpuMathUtils.GetVectorAlignment()) == 0); -#endif - } - - /// - /// Assert the compatibility of the underlying AlignedArray for the input vector in terms of alignment amount. - /// - /// The input vector - public static void AssertCompatible(ICpuVector values) - { -#if DEBUG - CpuAlignedVector vec = values as CpuAlignedVector; - Contracts.AssertValue(vec); - Contracts.Assert((vec.Items.CbAlign % CpuMathUtils.GetVectorAlignment()) == 0); -#endif - } - - private static TMatrix A(ICpuFullMatrix x) - { - AssertCompatible(x); - return (TMatrix)x; - } - - private static CpuAlignedVector A(ICpuVector x) - { - AssertCompatible(x); - return (CpuAlignedVector)x; - } - - private static void AssertCompatibleCore(ICpuMatrix mat, ICpuVector src, ICpuVector dst) - { - AssertCompatible(src); - AssertCompatible(dst); - Contracts.Assert(mat.ColCount == src.VectorSize); - Contracts.Assert(mat.RowCount == dst.VectorSize); - } - - /// - /// Asserts the following: - /// 1. The compatibility of the underlying AlignedArray for mat in terms of alignment amount. - /// 2. The compatibility of the underlying AlignedArray for src in terms of alignment amount. - /// 3. The compatibility of the underlying AlignedArray for dst in terms of alignment amount. - /// 4. The compatibility of the matrix-vector multiplication mat * src = dst. - /// - /// - /// - /// - public static void AssertCompatible(ICpuFullMatrix mat, ICpuVector src, ICpuVector dst) - { - // Also check the physical sizes. - AssertCompatible(mat); - AssertCompatibleCore(mat, src, dst); - var m = A(mat); - Contracts.Assert(m.ColCountPhy == A(src).Items.Size); - Contracts.Assert(m.RowCountPhy == A(dst).Items.Size); - } - - /// - /// Matrix multiplication: - /// dst = mat * src - /// - /// The multiplier matrix - /// The source vector - /// The destination vector - public static void MatTimesSrc(ICpuFullMatrix mat, ICpuVector src, ICpuVector dst) - { - bool colMajor = typeof(TMatrix) == typeof(CpuAlignedMatrixCol); - AssertCompatible(mat, src, dst); - var m = A(mat); - CpuMathUtils.MatrixTimesSource(colMajor, m.Items, A(src).Items, A(dst).Items, m.RunCnt); - } - - /// - /// Matrix transpose multiplication: - /// dst = mat' * src - /// - /// The multiplier matrix - /// The source vector - /// The destination vector - public static void MatTranTimesSrc(ICpuFullMatrix mat, ICpuVector src, ICpuVector dst) - { - bool colMajor = typeof(TMatrix) == typeof(CpuAlignedMatrixCol); - AssertCompatible(mat, dst, src); - var m = A(mat); - CpuMathUtils.MatrixTimesSource(!colMajor, m.Items, A(src).Items, A(dst).Items, m.RunCnt); - } - } - - public static class GeneralUtils - { - /// - /// Count the number of zero bits in the lonest string of zero's from the lowest significant bit of the input integer. - /// - /// The input integer - /// - public static int CbitLowZero(uint u) - { - if (u == 0) - return 32; - - int cbit = 0; - if ((u & 0x0000FFFF) == 0) - { - cbit += 16; - u >>= 16; - } - if ((u & 0x000000FF) == 0) - { - cbit += 8; - u >>= 8; - } - if ((u & 0x0000000F) == 0) - { - cbit += 4; - u >>= 4; - } - if ((u & 0x00000003) == 0) - { - cbit += 2; - u >>= 2; - } - if ((u & 0x00000001) == 0) - cbit += 1; - return cbit; - } - } -} diff --git a/src/Microsoft.ML.CpuMath/CpuMathUtils.netcoreapp.cs b/src/Microsoft.ML.CpuMath/CpuMathUtils.netcoreapp.cs index d895e590a9..6546eeef31 100644 --- a/src/Microsoft.ML.CpuMath/CpuMathUtils.netcoreapp.cs +++ b/src/Microsoft.ML.CpuMath/CpuMathUtils.netcoreapp.cs @@ -11,76 +11,62 @@ namespace Microsoft.ML.Runtime.Internal.CpuMath { internal static partial class CpuMathUtils { - // The count of bytes in Vector128, corresponding to _cbAlign in AlignedArray - private const int Vector128Alignment = 16; - - // The count of bytes in Vector256, corresponding to _cbAlign in AlignedArray - private const int Vector256Alignment = 32; - - // The count of bytes in a 32-bit float, corresponding to _cbAlign in AlignedArray - private const int FloatAlignment = 4; - - // If neither AVX nor SSE is supported, return basic alignment for a 4-byte float. - [MethodImplAttribute(MethodImplOptions.AggressiveInlining)] - public static int GetVectorAlignment() - => Avx.IsSupported ? Vector256Alignment : (Sse.IsSupported ? Vector128Alignment : FloatAlignment); - - public static void MatrixTimesSource(bool transpose, AlignedArray matrix, AlignedArray source, AlignedArray destination, int stride) + public static void MatrixTimesSource(bool transpose, ReadOnlySpan matrix, ReadOnlySpan source, Span destination, int stride) { - Contracts.Assert(matrix.Size == destination.Size * source.Size); + Contracts.AssertNonEmpty(matrix); + Contracts.AssertNonEmpty(source); + Contracts.AssertNonEmpty(destination); + Contracts.Assert(matrix.Length == destination.Length * source.Length); Contracts.Assert(stride >= 0); - if (Avx.IsSupported) + if (!transpose) { - if (!transpose) + if (Avx.IsSupported && source.Length >= 8) { - Contracts.Assert(stride <= destination.Size); - AvxIntrinsics.MatMul(matrix, source, destination, stride, source.Size); + Contracts.Assert(stride <= destination.Length); + AvxIntrinsics.MatMul(matrix, source, destination, stride, source.Length); } - else + else if (Sse.IsSupported && source.Length >= 4) { - Contracts.Assert(stride <= source.Size); - AvxIntrinsics.MatMulTran(matrix, source, destination, destination.Size, stride); - } - } - else if (Sse.IsSupported) - { - if (!transpose) - { - Contracts.Assert(stride <= destination.Size); - SseIntrinsics.MatMul(matrix, source, destination, stride, source.Size); + Contracts.Assert(stride <= destination.Length); + SseIntrinsics.MatMul(matrix, source, destination, stride, source.Length); } else { - Contracts.Assert(stride <= source.Size); - SseIntrinsics.MatMulTran(matrix, source, destination, destination.Size, stride); - } - } - else - { - if (!transpose) - { - Contracts.Assert(stride <= destination.Size); + Contracts.Assert(stride <= destination.Length); for (int i = 0; i < stride; i++) { float dotProduct = 0; - for (int j = 0; j < source.Size; j++) + for (int j = 0; j < source.Length; j++) { - dotProduct += matrix[i * source.Size + j] * source[j]; + dotProduct += matrix[i * source.Length + j] * source[j]; } destination[i] = dotProduct; } } + } + else + { + if (Avx.IsSupported && destination.Length >= 8) + { + Contracts.Assert(stride <= source.Length); + AvxIntrinsics.MatMulTran(matrix, source, destination, destination.Length, stride); + } + else if (Sse.IsSupported && destination.Length >=4) + { + Contracts.Assert(stride <= source.Length); + SseIntrinsics.MatMulTran(matrix, source, destination, destination.Length, stride); + } else { - Contracts.Assert(stride <= source.Size); - for (int i = 0; i < destination.Size; i++) + Contracts.Assert(stride <= source.Length); + for (int i = 0; i < destination.Length; i++) { float dotProduct = 0; for (int j = 0; j < stride; j++) { - dotProduct += matrix[j * source.Size + i] * source[j]; + dotProduct += matrix[j * destination.Length + i] * source[j]; } destination[i] = dotProduct; @@ -89,17 +75,22 @@ public static void MatrixTimesSource(bool transpose, AlignedArray matrix, Aligne } } - public static void MatrixTimesSource(AlignedArray matrix, ReadOnlySpan rgposSrc, AlignedArray sourceValues, - int posMin, int iposMin, int iposLimit, AlignedArray destination, int stride) + public static void MatrixTimesSource(ReadOnlySpan matrix, ReadOnlySpan rgposSrc, ReadOnlySpan sourceValues, + int posMin, int iposMin, int iposLimit, Span destination, int stride) { Contracts.Assert(iposMin >= 0); Contracts.Assert(iposMin <= iposLimit); Contracts.Assert(iposLimit <= rgposSrc.Length); - Contracts.Assert(matrix.Size == destination.Size * sourceValues.Size); + Contracts.AssertNonEmpty(matrix); + Contracts.AssertNonEmpty(sourceValues); + Contracts.AssertNonEmpty(destination); + Contracts.AssertNonEmpty(rgposSrc); + Contracts.Assert(stride > 0); + Contracts.Assert(matrix.Length == destination.Length * sourceValues.Length); if (iposMin >= iposLimit) { - destination.ZeroItems(); + destination.Clear(); return; } @@ -108,24 +99,24 @@ public static void MatrixTimesSource(AlignedArray matrix, ReadOnlySpan rgpo if (Avx.IsSupported) { - Contracts.Assert(stride <= destination.Size); - AvxIntrinsics.MatMulP(matrix, rgposSrc, sourceValues, posMin, iposMin, iposLimit, destination, stride, sourceValues.Size); + Contracts.Assert(stride <= destination.Length); + AvxIntrinsics.MatMulP(matrix, rgposSrc, sourceValues, posMin, iposMin, iposLimit, destination, stride, sourceValues.Length); } else if (Sse.IsSupported) { - Contracts.Assert(stride <= destination.Size); - SseIntrinsics.MatMulP(matrix, rgposSrc, sourceValues, posMin, iposMin, iposLimit, destination, stride, sourceValues.Size); + Contracts.Assert(stride <= destination.Length); + SseIntrinsics.MatMulP(matrix, rgposSrc, sourceValues, posMin, iposMin, iposLimit, destination, stride, sourceValues.Length); } else { - Contracts.Assert(stride <= destination.Size); + Contracts.Assert(stride <= destination.Length); for (int i = 0; i < stride; i++) { float dotProduct = 0; for (int j = iposMin; j < iposLimit; j++) { int col = rgposSrc[j] - posMin; - dotProduct += matrix[i * sourceValues.Size + col] * sourceValues[col]; + dotProduct += matrix[i * sourceValues.Length + col] * sourceValues[col]; } destination[i] = dotProduct; } @@ -636,71 +627,6 @@ public static float L2DistSquared(ReadOnlySpan left, ReadOnlySpan } } - public static void ZeroMatrixItems(AlignedArray destination, int ccol, int cfltRow, int[] indices) - { - Contracts.Assert(ccol > 0); - Contracts.Assert(ccol <= cfltRow); - - if (ccol == cfltRow) - { - ZeroItemsU(destination, destination.Size, indices, indices.Length); - } - else - { - ZeroMatrixItemsCore(destination, destination.Size, ccol, cfltRow, indices, indices.Length); - } - } - - private static unsafe void ZeroItemsU(AlignedArray destination, int c, int[] indices, int cindices) - { - fixed (float* pdst = &destination.Items[0]) - fixed (int* pidx = &indices[0]) - { - for (int i = 0; i < cindices; ++i) - { - int index = pidx[i]; - Contracts.Assert(index >= 0); - Contracts.Assert(index < c); - pdst[index] = 0; - } - } - } - - private static unsafe void ZeroMatrixItemsCore(AlignedArray destination, int c, int ccol, int cfltRow, int[] indices, int cindices) - { - fixed (float* pdst = &destination.Items[0]) - fixed (int* pidx = &indices[0]) - { - int ivLogMin = 0; - int ivLogLim = ccol; - int ivPhyMin = 0; - - for (int i = 0; i < cindices; ++i) - { - int index = pidx[i]; - Contracts.Assert(index >= 0); - Contracts.Assert(index < c); - - int col = index - ivLogMin; - if ((uint)col >= (uint)ccol) - { - Contracts.Assert(ivLogMin > index || index >= ivLogLim); - - int row = index / ccol; - ivLogMin = row * ccol; - ivLogLim = ivLogMin + ccol; - ivPhyMin = row * cfltRow; - - Contracts.Assert(index >= ivLogMin); - Contracts.Assert(index < ivLogLim); - col = index - ivLogMin; - } - - pdst[ivPhyMin + col] = 0; - } - } - } - public static void SdcaL1UpdateDense(float primalUpdate, int count, ReadOnlySpan source, float threshold, Span v, Span w) { Contracts.AssertNonEmpty(source); diff --git a/src/Microsoft.ML.CpuMath/CpuMathUtils.netstandard.cs b/src/Microsoft.ML.CpuMath/CpuMathUtils.netstandard.cs index 5ecbc62be1..400b70d651 100644 --- a/src/Microsoft.ML.CpuMath/CpuMathUtils.netstandard.cs +++ b/src/Microsoft.ML.CpuMath/CpuMathUtils.netstandard.cs @@ -11,17 +11,10 @@ namespace Microsoft.ML.Runtime.Internal.CpuMath [BestFriend] internal static partial class CpuMathUtils { - // The count of bytes in Vector128, corresponding to _cbAlign in AlignedArray - private const int Vector128Alignment = 16; + public static void MatrixTimesSource(bool transpose, ReadOnlySpan matrix, ReadOnlySpan source, Span destination, int stride) => SseUtils.MatTimesSrc(transpose, matrix, source, destination, stride); - [MethodImplAttribute(MethodImplOptions.AggressiveInlining)] - public static int GetVectorAlignment() - => Vector128Alignment; - - public static void MatrixTimesSource(bool transpose, AlignedArray matrix, AlignedArray source, AlignedArray destination, int stride) => SseUtils.MatTimesSrc(transpose, matrix, source, destination, stride); - - public static void MatrixTimesSource(AlignedArray matrix, ReadOnlySpan rgposSrc, AlignedArray sourceValues, - int posMin, int iposMin, int iposLimit, AlignedArray destination, int stride) => SseUtils.MatTimesSrc(matrix, rgposSrc, sourceValues, posMin, iposMin, iposLimit, destination, stride); + public static void MatrixTimesSource(ReadOnlySpan matrix, ReadOnlySpan rgposSrc, ReadOnlySpan sourceValues, + int posMin, int iposMin, int iposLimit, Span destination, int stride) => SseUtils.MatTimesSrc(matrix, rgposSrc, sourceValues, posMin, iposMin, iposLimit, destination, stride); public static void Add(float value, Span destination) => SseUtils.Add(value, destination); @@ -63,8 +56,6 @@ public static void MatrixTimesSource(AlignedArray matrix, ReadOnlySpan rgpo public static float L2DistSquared(ReadOnlySpan left, ReadOnlySpan right, int count) => SseUtils.L2DistSquared(left, right, count); - public static void ZeroMatrixItems(AlignedArray destination, int ccol, int cfltRow, int[] indices) => SseUtils.ZeroMatrixItems(destination, ccol, cfltRow, indices); - public static void SdcaL1UpdateDense(float primalUpdate, int count, ReadOnlySpan source, float threshold, Span v, Span w) => SseUtils.SdcaL1UpdateDense(primalUpdate, count, source, threshold, v, w); diff --git a/src/Microsoft.ML.CpuMath/Sse.cs b/src/Microsoft.ML.CpuMath/Sse.cs index 8b1c4da70f..d57b400e9c 100644 --- a/src/Microsoft.ML.CpuMath/Sse.cs +++ b/src/Microsoft.ML.CpuMath/Sse.cs @@ -15,74 +15,53 @@ namespace Microsoft.ML.Runtime.Internal.CpuMath [BestFriend] internal static class SseUtils { - public const int CbAlign = 16; - - private static bool Compat(AlignedArray a) - { - Contracts.AssertValue(a); - Contracts.Assert(a.Size > 0); - return a.CbAlign == CbAlign; - } - - private static unsafe float* Ptr(AlignedArray a, float* p) - { - Contracts.AssertValue(a); - float* q = p + a.GetBase((long)p); - Contracts.Assert(((long)q & (CbAlign - 1)) == 0); - return q; - } - - public static void MatTimesSrc(bool tran, AlignedArray mat, AlignedArray src, AlignedArray dst, int crun) + public static void MatTimesSrc(bool tran, ReadOnlySpan mat, ReadOnlySpan src, Span dst, int crun) { - Contracts.Assert(Compat(mat)); - Contracts.Assert(Compat(src)); - Contracts.Assert(Compat(dst)); - Contracts.Assert(mat.Size == dst.Size * src.Size); + Contracts.Assert(mat.Length == dst.Length * src.Length); unsafe { - fixed (float* pmat = &mat.Items[0]) - fixed (float* psrc = &src.Items[0]) - fixed (float* pdst = &dst.Items[0]) + fixed (float* pmat = &mat[0]) + fixed (float* psrc = &src[0]) + fixed (float* pdst = &dst[0]) { if (!tran) { - Contracts.Assert(0 <= crun && crun <= dst.Size); - Thunk.MatMul(Ptr(mat, pmat), Ptr(src, psrc), Ptr(dst, pdst), crun, src.Size); + Contracts.Assert(0 <= crun && crun <= dst.Length); + Thunk.MatMul(pmat, psrc, pdst, crun, src.Length); } else { - Contracts.Assert(0 <= crun && crun <= src.Size); - Thunk.MatMulTran(Ptr(mat, pmat), Ptr(src, psrc), Ptr(dst, pdst), dst.Size, crun); + Contracts.Assert(0 <= crun && crun <= src.Length); + Thunk.MatMulTran(pmat, psrc, pdst, dst.Length, crun); } } } } - public static void MatTimesSrc(AlignedArray mat, ReadOnlySpan rgposSrc, AlignedArray srcValues, - int posMin, int iposMin, int iposLim, AlignedArray dst, int crun) + public static void MatTimesSrc(ReadOnlySpan mat, ReadOnlySpan rgposSrc, ReadOnlySpan srcValues, + int posMin, int iposMin, int iposLim, Span dst, int crun) { - Contracts.Assert(Compat(mat)); - Contracts.Assert(Compat(srcValues)); - Contracts.Assert(Compat(dst)); Contracts.Assert(0 <= iposMin && iposMin <= iposLim && iposLim <= rgposSrc.Length); - Contracts.Assert(mat.Size == dst.Size * srcValues.Size); + Contracts.Assert(mat.Length == dst.Length * srcValues.Length); if (iposMin >= iposLim) { - dst.ZeroItems(); + dst.Clear(); return; } + Contracts.AssertNonEmpty(rgposSrc); + unsafe { - fixed (float* pdst = &dst.Items[0]) - fixed (float* pmat = &mat.Items[0]) - fixed (float* psrc = &srcValues.Items[0]) + fixed (float* pdst = &dst[0]) + fixed (float* pmat = &mat[0]) + fixed (float* psrc = &srcValues[0]) fixed (int* ppossrc = &rgposSrc[0]) { - Contracts.Assert(0 <= crun && crun <= dst.Size); - Thunk.MatMulP(Ptr(mat, pmat), ppossrc, Ptr(srcValues, psrc), posMin, iposMin, iposLim, Ptr(dst, pdst), crun, srcValues.Size); + Contracts.Assert(0 <= crun && crun <= dst.Length); + Thunk.MatMulP(pmat, ppossrc, psrc, posMin, iposMin, iposLim, pdst, crun, srcValues.Length); } } } @@ -366,23 +345,6 @@ public static float L2DistSquared(ReadOnlySpan a, ReadOnlySpan b, } } - public static void ZeroMatrixItems(AlignedArray dst, int ccol, int cfltRow, int[] indices) - { - Contracts.Assert(0 < ccol && ccol <= cfltRow); - - unsafe - { - fixed (float* pdst = &dst.Items[0]) - fixed (int* pi = &indices[0]) - { - if (ccol == cfltRow) - Thunk.ZeroItemsU(Ptr(dst, pdst), dst.Size, pi, indices.Length); - else - Thunk.ZeroMatrixItemsCore(Ptr(dst, pdst), dst.Size, ccol, cfltRow, pi, indices.Length); - } - } - } - public static void SdcaL1UpdateDense(float primalUpdate, int count, ReadOnlySpan src, float threshold, Span v, Span w) { Contracts.AssertNonEmpty(src); diff --git a/src/Microsoft.ML.CpuMath/SseIntrinsics.cs b/src/Microsoft.ML.CpuMath/SseIntrinsics.cs index b83fd6bbc6..98394f5d7d 100644 --- a/src/Microsoft.ML.CpuMath/SseIntrinsics.cs +++ b/src/Microsoft.ML.CpuMath/SseIntrinsics.cs @@ -116,11 +116,6 @@ internal static Vector128 GetNewDst128(in Vector128 xDst1, in Vect } // Multiply matrix times vector into vector. - public static unsafe void MatMul(AlignedArray mat, AlignedArray src, AlignedArray dst, int crow, int ccol) - { - MatMul(mat.Items, src.Items, dst.Items, crow, ccol); - } - public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan src, Span dst, int crow, int ccol) { fixed (float* psrc = &MemoryMarshal.GetReference(src)) @@ -133,8 +128,9 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr float* pDstEnd = pdst + crow; float* pDstCurrent = pdst; float* pMatCurrent = pmat; + int numRows = crow; - while (pDstCurrent < pDstEnd) + while (pDstCurrent + 4 <= pDstEnd) { Vector128 res0 = Sse.SetZeroVector128(); Vector128 res1 = Sse.SetZeroVector128(); @@ -148,10 +144,11 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr int misalignment = (int)(address % 16); int remainder = 0; - if ((misalignment & 3) != 0) + if ((misalignment & 3) != 0 || (ccol % 4 != 0)) { // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - while (pSrcCurrent < pSrcEnd) + remainder = length % 4; + while (pSrcCurrent + 4 <= pSrcEnd) { Vector128 vector = Sse.LoadVector128(pSrcCurrent); @@ -233,32 +230,32 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr // unaligned loads where we mask the input each time. remainder = length; } + } - if (remainder != 0) - { - // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next - // unaligned load will read to the end of the array and then mask out any elements already processed + if (remainder != 0) + { + // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next + // unaligned load will read to the end of the array and then mask out any elements already processed - pMatCurrent -= (4 - remainder); - pSrcCurrent -= (4 - remainder); + pMatCurrent -= (4 - remainder); + pSrcCurrent -= (4 - remainder); - Vector128 mask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + (remainder * 4)); + Vector128 mask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + (remainder * 4)); - float* pMatTemp = pMatCurrent; - Vector128 x01 = Sse.And(mask, Sse.LoadVector128(pMatTemp)); - Vector128 x11 = Sse.And(mask, Sse.LoadVector128(pMatTemp += ccol)); - Vector128 x21 = Sse.And(mask, Sse.LoadVector128(pMatTemp += ccol)); - Vector128 x31 = Sse.And(mask, Sse.LoadVector128(pMatTemp += ccol)); - Vector128 vector = Sse.And(mask, Sse.LoadVector128(pSrcCurrent)); + float* pMatTemp = pMatCurrent; + Vector128 x01 = Sse.And(mask, Sse.LoadVector128(pMatTemp)); + Vector128 x11 = Sse.And(mask, Sse.LoadVector128(pMatTemp += ccol)); + Vector128 x21 = Sse.And(mask, Sse.LoadVector128(pMatTemp += ccol)); + Vector128 x31 = Sse.And(mask, Sse.LoadVector128(pMatTemp += ccol)); + Vector128 vector = Sse.And(mask, Sse.LoadVector128(pSrcCurrent)); - res0 = Sse.Add(res0, Sse.Multiply(x01, vector)); - res1 = Sse.Add(res1, Sse.Multiply(x11, vector)); - res2 = Sse.Add(res2, Sse.Multiply(x21, vector)); - res3 = Sse.Add(res3, Sse.Multiply(x31, vector)); + res0 = Sse.Add(res0, Sse.Multiply(x01, vector)); + res1 = Sse.Add(res1, Sse.Multiply(x11, vector)); + res2 = Sse.Add(res2, Sse.Multiply(x21, vector)); + res3 = Sse.Add(res3, Sse.Multiply(x31, vector)); - pMatCurrent += 4; - pSrcCurrent += 4; - } + pMatCurrent += 4; + pSrcCurrent += 4; } // Add up the entries of each, with the 4 results in res0 @@ -269,17 +266,56 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr Sse.Store(pDstCurrent, res0); pDstCurrent += 4; pMatCurrent += 3 * ccol; + numRows -= 4; + } + + // falling through the case statements + switch (numRows) + { + case 3: + *(pDstCurrent + 2) = RowMultiply(pMatCurrent + 2 * ccol, psrc, pSrcEnd, ccol); + goto case 2; + case 2: + *(pDstCurrent + 1) = RowMultiply(pMatCurrent + ccol, psrc, pSrcEnd, ccol); + goto case 1; + case 1: + *pDstCurrent = RowMultiply(pMatCurrent, psrc, pSrcEnd, ccol); + break; } } } - // Partial sparse source vector. - public static unsafe void MatMulP(AlignedArray mat, ReadOnlySpan rgposSrc, AlignedArray src, - int posMin, int iposMin, int iposEnd, AlignedArray dst, int crow, int ccol) + private static unsafe float RowMultiply(float* pMatCurrent, float* pSrcCurrent, float* pSrcEnd, int ccol) { - MatMulP(mat.Items, rgposSrc, src.Items, posMin, iposMin, iposEnd, dst.Items, crow, ccol); + Vector128 res0 = Sse.SetZeroVector128(); + int remainder = ccol % 4; + while (pSrcCurrent + 4 <= pSrcEnd) + { + Vector128 vector = Sse.LoadVector128(pSrcCurrent); + + float* pMatTemp = pMatCurrent; + Vector128 x01 = Sse.Multiply(vector, Sse.LoadVector128(pMatTemp)); + res0 = Sse.Add(res0, x01); + + pSrcCurrent += 4; + pMatCurrent += 4; + } + + // Add up the entries of each, with the 4 results in res0 + res0 = VectorSum128(in res0); + float sum = Sse.ConvertToSingle(res0); + + // falling through the case statements + switch (remainder) + { + case 3: sum += *(pSrcCurrent + 2) * *(pMatCurrent + 2); goto case 2; + case 2: sum += *(pSrcCurrent + 1) * *(pMatCurrent + 1); goto case 1; + case 1: sum += *(pSrcCurrent) * *(pMatCurrent); break; + } + return sum; } + // Partial sparse source vector. public static unsafe void MatMulP(ReadOnlySpan mat, ReadOnlySpan rgposSrc, ReadOnlySpan src, int posMin, int iposMin, int iposEnd, Span dst, int crow, int ccol) { @@ -436,11 +472,6 @@ Vector128 SparseMultiplicationAcrossRow() } } - public static unsafe void MatMulTran(AlignedArray mat, AlignedArray src, AlignedArray dst, int crow, int ccol) - { - MatMulTran(mat.Items, src.Items, dst.Items, crow, ccol); - } - public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan src, Span dst, int crow, int ccol) { fixed (float* psrc = &MemoryMarshal.GetReference(src)) @@ -453,16 +484,14 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan x01 = Sse.LoadVector128(pSrcCurrent); - // Replicate each 32-bit slot of x01 (ABCD) into its own register. - Vector128 x11 = Sse.Shuffle(x01, x01, 0x55); // B - Vector128 x21 = Sse.Shuffle(x01, x01, 0xAA); // C - Vector128 x31 = Sse.Shuffle(x01, x01, 0xFF); // D x01 = Sse.Shuffle(x01, x01, 0x00); // A int length = crow; @@ -471,20 +500,14 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan x02 = Sse.Multiply(x01, Sse.LoadVector128(pMatTemp)); - Vector128 x12 = Sse.Multiply(x11, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x22 = Sse.Multiply(x21, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x32 = Sse.Multiply(x31, Sse.LoadVector128(pMatTemp += crow)); - - x02 = Sse.Add(x02, x12); - x22 = Sse.Add(x22, x32); - x02 = Sse.Add(x02, x22); Sse.Store(pDstCurrent, x02); pDstCurrent += 4; @@ -493,7 +516,6 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan mat, ReadOnlySpan x02 = Sse.And(leadingMask, Sse.LoadVector128(pMatTemp)); - Vector128 x12 = Sse.And(leadingMask, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x22 = Sse.And(leadingMask, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x32 = Sse.And(leadingMask, Sse.LoadVector128(pMatTemp += crow)); - x02 = Sse.Multiply(x01, x02); - x12 = Sse.Multiply(x11, x12); - x22 = Sse.Multiply(x21, x22); - x32 = Sse.Multiply(x31, x32); - - x02 = Sse.Add(x02, x12); - x22 = Sse.Add(x22, x32); - x02 = Sse.Add(x02, x22); Vector128 trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + ((4 - misalignment) * 4)); Vector128 x3 = Sse.LoadVector128(pDstCurrent); - x02 = Sse.Or(x02, Sse.And(x3, trailingMask)); + x02 = Sse.Add(x02, Sse.And(x3, trailingMask)); Sse.Store(pDstCurrent, x02); pMatCurrent += misalignment; @@ -538,15 +549,7 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan x02 = Sse.Multiply(x01, Sse.LoadAlignedVector128(pMatTemp)); - Vector128 x12 = Sse.Multiply(x11, Sse.LoadAlignedVector128(pMatTemp += crow)); - Vector128 x22 = Sse.Multiply(x21, Sse.LoadAlignedVector128(pMatTemp += crow)); - Vector128 x32 = Sse.Multiply(x31, Sse.LoadAlignedVector128(pMatTemp += crow)); - - x02 = Sse.Add(x02, x12); - x22 = Sse.Add(x22, x32); - x02 = Sse.Add(x02, x22); Sse.Store(pDstCurrent, x02); pDstCurrent += 4; @@ -560,47 +563,36 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + (remainder * 4)); - - float* pMatTemp = pMatCurrent; - Vector128 x02 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp)); - Vector128 x12 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x22 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x32 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); + if (remainder != 0) + { + // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next + // unaligned load will read to the end of the array and then mask out any elements already processed + pMatCurrent -= (4 - remainder); + pDstCurrent -= (4 - remainder); - x02 = Sse.Multiply(x01, x02); - x12 = Sse.Multiply(x11, x12); - x22 = Sse.Multiply(x21, x22); - x32 = Sse.Multiply(x31, x32); + Vector128 trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + (remainder * 4)); - x02 = Sse.Add(x02, x12); - x22 = Sse.Add(x22, x32); - x02 = Sse.Add(x02, x22); + float* pMatTemp = pMatCurrent; + Vector128 x02 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp)); + x02 = Sse.Multiply(x01, x02); - Vector128 leadingMask = Sse.LoadVector128(((float*)(pLeadingAlignmentMask)) + ((4 - remainder) * 4)); - Vector128 x3 = Sse.LoadVector128(pDstCurrent); - x02 = Sse.Or(x02, Sse.And(x3, leadingMask)); + Vector128 leadingMask = Sse.LoadVector128(((float*)(pLeadingAlignmentMask)) + ((4 - remainder) * 4)); + Vector128 x3 = Sse.LoadVector128(pDstCurrent); + x02 = Sse.Add(x02, Sse.And(x3, leadingMask)); - Sse.Store(pDstCurrent, x02); - pDstCurrent += 4; - pMatCurrent += 4; - } + Sse.Store(pDstCurrent, x02); + pDstCurrent += 4; + pMatCurrent += 4; } - pMatCurrent += 3 * crow; - pSrcCurrent += 4; + numCol -= 1; + pSrcCurrent += 1; } // We do 4-way unrolling - while (pSrcCurrent < pSrcEnd) + while (pSrcCurrent + 4 <= pSrcEnd) { Vector128 x01 = Sse.LoadVector128(pSrcCurrent); // Replicate each 32-bit slot of x01 (ABCD) into its own register. @@ -615,9 +607,10 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan x02 = Sse.Multiply(x01, Sse.LoadVector128(pMatTemp)); @@ -638,7 +631,6 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan mat, ReadOnlySpan trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + ((4 - misalignment) * 4)); Vector128 x3 = Sse.LoadVector128(pDstCurrent); - x02 = Sse.Or(x02, Sse.And(x3, trailingMask)); + x02 = Sse.Add(x02, Sse.And(x3, trailingMask)); x02 = Sse.Add(x02, Sse.And(x3, leadingMask)); @@ -701,43 +693,84 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + (remainder * 4)); + if (remainder != 0) + { + pMatCurrent -= (4 - remainder); + pDstCurrent -= (4 - remainder); + Vector128 trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + (remainder * 4)); - float* pMatTemp = pMatCurrent; - Vector128 x02 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp)); - Vector128 x12 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x22 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x32 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); + float* pMatTemp = pMatCurrent; + Vector128 x02 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp)); + Vector128 x12 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); + Vector128 x22 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); + Vector128 x32 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); - x02 = Sse.Multiply(x01, x02); - x12 = Sse.Multiply(x11, x12); - x22 = Sse.Multiply(x21, x22); - x32 = Sse.Multiply(x31, x32); + x02 = Sse.Multiply(x01, x02); + x12 = Sse.Multiply(x11, x12); + x22 = Sse.Multiply(x21, x22); + x32 = Sse.Multiply(x31, x32); - x02 = Sse.Add(x02, x12); - x22 = Sse.Add(x22, x32); - x02 = Sse.Add(x02, x22); + x02 = Sse.Add(x02, x12); + x22 = Sse.Add(x22, x32); + x02 = Sse.Add(x02, x22); - Vector128 leadingMask = Sse.LoadVector128(((float*)(pLeadingAlignmentMask)) + ((4 - remainder) * 4)); - Vector128 x3 = Sse.LoadVector128(pDstCurrent); - x02 = Sse.Or(x02, Sse.And(x3, leadingMask)); + Vector128 leadingMask = Sse.LoadVector128(((float*)(pLeadingAlignmentMask)) + ((4 - remainder) * 4)); + Vector128 x3 = Sse.LoadVector128(pDstCurrent); + x02 = Sse.Add(x02, Sse.And(x3, leadingMask)); - x02 = Sse.Add(x02, Sse.And(x3, trailingMask)); - Sse.Store(pDstCurrent, x02); - pDstCurrent += 4; - pMatCurrent += 4; - } + x02 = Sse.Add(x02, Sse.And(x3, trailingMask)); + Sse.Store(pDstCurrent, x02); + pDstCurrent += 4; + pMatCurrent += 4; } pMatCurrent += 3 * crow; pSrcCurrent += 4; + numCol -= 4; } + + // falling through the case statements + switch (numCol) + { + case 3: ColumnMultiply(pMatCurrent + 2 * crow, pSrcCurrent + 2, pdst, pDstEnd, crow); goto case 2; + case 2: ColumnMultiply(pMatCurrent + crow, pSrcCurrent + 1, pdst, pDstEnd, crow); goto case 1; + case 1: ColumnMultiply(pMatCurrent, pSrcCurrent, pdst, pDstEnd, crow); break; + } + } + } + + private static unsafe void ColumnMultiply(float* pMatCurrent, float* pSrcCurrent, float* pdst, float* pDstEnd, int crow) + { + Vector128 x01 = Sse.LoadVector128(pSrcCurrent); + // Replicate each slot of h01 (ABCD) into its own register. + x01 = Sse.Shuffle(x01, x01, 0x00); // A + int remainder = crow % 4; + float* pDstCurrent = pdst; + + while (pDstCurrent + 4 <= pDstEnd) + { + // If we aren't using the VEX-encoding, the JIT will only fold away aligned loads + // (due to semantics of the legacy encoding). + // We don't need an assert, since the instruction will throw for unaligned inputs. + float* pMatTemp = pMatCurrent; + Vector128 x02 = Sse.Multiply(x01, Sse.LoadVector128(pMatTemp)); + x02 = Sse.Add(x02, Sse.LoadVector128(pDstCurrent)); + + Sse.Store(pDstCurrent, x02); + pDstCurrent += 4; + pMatCurrent += 4; + } + + // falling through the case statements + switch (remainder) + { + case 3: *(pDstCurrent + 2) += *(pSrcCurrent) * *(pMatCurrent + 2); goto case 2; + case 2: *(pDstCurrent + 1) += *(pSrcCurrent) * *(pMatCurrent + 1); goto case 1; + case 1: *pDstCurrent += *(pSrcCurrent) * *(pMatCurrent); break; } + return; } // dst[i] += scale diff --git a/src/Microsoft.ML.CpuMath/Thunk.cs b/src/Microsoft.ML.CpuMath/Thunk.cs index 8ff725b54a..9505db8766 100644 --- a/src/Microsoft.ML.CpuMath/Thunk.cs +++ b/src/Microsoft.ML.CpuMath/Thunk.cs @@ -86,12 +86,6 @@ public static extern void MatMulP(/*const*/ float* pmat, /*const*/ int* pposSrc, [DllImport(NativePath), SuppressUnmanagedCodeSecurity] public static extern float Dist2(/*const*/ float* px, /*const*/ float* py, int c); - [DllImport(NativePath), SuppressUnmanagedCodeSecurity] - public static extern void ZeroItemsU(float* pd, int c, /*const*/ int* pindices, int cindices); - - [DllImport(NativePath), SuppressUnmanagedCodeSecurity] - public static extern void ZeroMatrixItemsCore(float* pd, int c, int ccol, int cfltRow, /*const*/ int* pindices, int cindices); - [DllImport(NativePath), SuppressUnmanagedCodeSecurity] public static extern void SdcaL1UpdateU(float primalUpdate, /*const*/ float* ps, float threshold, float* pd1, float* pd2, int c); diff --git a/src/Microsoft.ML.CpuMath/AlignedArray.cs b/src/Microsoft.ML.StandardLearners/FactorizationMachine/AlignedArray.cs similarity index 53% rename from src/Microsoft.ML.CpuMath/AlignedArray.cs rename to src/Microsoft.ML.StandardLearners/FactorizationMachine/AlignedArray.cs index 0a631be0e9..f01da9fe28 100644 --- a/src/Microsoft.ML.CpuMath/AlignedArray.cs +++ b/src/Microsoft.ML.StandardLearners/FactorizationMachine/AlignedArray.cs @@ -5,7 +5,7 @@ using Microsoft.ML.Runtime.Internal.CpuMath.Core; using System; -namespace Microsoft.ML.Runtime.Internal.CpuMath +namespace Microsoft.ML.Runtime.FactorizationMachine { using Float = System.Single; @@ -17,7 +17,6 @@ namespace Microsoft.ML.Runtime.Internal.CpuMath /// /// The ctor takes an alignment value, which must be a power of two at least sizeof(Float). ///
- [BestFriend] internal sealed class AlignedArray { // Items includes "head" items filled with NaN, followed by _size entries, followed by "tail" @@ -110,60 +109,6 @@ public Float this[int index] } } - public void CopyTo(Span dst, int index, int count) - { - Contracts.Assert(0 <= count && count <= _size); - Contracts.Assert(dst != null); - Contracts.Assert(0 <= index && index <= dst.Length - count); - Items.AsSpan(_base, count).CopyTo(dst.Slice(index)); - } - - public void CopyTo(int start, Span dst, int index, int count) - { - Contracts.Assert(0 <= count); - Contracts.Assert(0 <= start && start <= _size - count); - Contracts.Assert(dst != null); - Contracts.Assert(0 <= index && index <= dst.Length - count); - Items.AsSpan(start + _base, count).CopyTo(dst.Slice(index)); - } - - public void CopyFrom(ReadOnlySpan src) - { - Contracts.Assert(src.Length <= _size); - src.CopyTo(Items.AsSpan(_base)); - } - - public void CopyFrom(int start, ReadOnlySpan src) - { - Contracts.Assert(0 <= start && start <= _size - src.Length); - src.CopyTo(Items.AsSpan(start + _base)); - } - - // Copies values from a sparse vector. - // valuesSrc contains only the non-zero entries. Those are copied into their logical positions in the dense array. - // rgposSrc contains the logical positions + offset of the non-zero entries in the dense array. - // rgposSrc runs parallel to the valuesSrc array. - public void CopyFrom(ReadOnlySpan rgposSrc, ReadOnlySpan valuesSrc, int posMin, int iposMin, int iposLim, bool zeroItems) - { - Contracts.Assert(rgposSrc != null); - Contracts.Assert(valuesSrc != null); - Contracts.Assert(rgposSrc.Length <= valuesSrc.Length); - Contracts.Assert(0 <= iposMin && iposMin <= iposLim && iposLim <= rgposSrc.Length); - - // Zeroing-out and setting the values in one-pass does not seem to give any perf benefit. - // So explicitly zeroing and then setting the values. - if (zeroItems) - ZeroItems(); - - for (int ipos = iposMin; ipos < iposLim; ++ipos) - { - Contracts.Assert(posMin <= rgposSrc[ipos]); - int iv = _base + rgposSrc[ipos] - posMin; - Contracts.Assert(iv < _size + _base); - Items[iv] = valuesSrc[ipos]; - } - } - public void CopyFrom(AlignedArray src) { Contracts.Assert(src != null); @@ -171,41 +116,5 @@ public void CopyFrom(AlignedArray src) Contracts.Assert(src._cbAlign == _cbAlign); Array.Copy(src.Items, src._base, Items, _base, _size); } - - public void ZeroItems() - { - Array.Clear(Items, _base, _size); - } - - public void ZeroItems(int[] rgposSrc, int posMin, int iposMin, int iposLim) - { - Contracts.Assert(rgposSrc != null); - Contracts.Assert(0 <= iposMin && iposMin <= iposLim && iposLim <= rgposSrc.Length); - Contracts.Assert(iposLim - iposMin <= _size); - - int ivCur = 0; - for (int ipos = iposMin; ipos < iposLim; ++ipos) - { - int ivNextNonZero = rgposSrc[ipos] - posMin; - Contracts.Assert(ivCur <= ivNextNonZero && ivNextNonZero < _size); - while (ivCur < ivNextNonZero) - Items[_base + ivCur++] = 0; - Contracts.Assert(ivCur == ivNextNonZero); - // Skip the non-zero element at ivNextNonZero. - ivCur++; - } - - while (ivCur < _size) - Items[_base + ivCur++] = 0; - } - - // REVIEW: This is hackish and slightly dangerous. Perhaps we should wrap this in an - // IDisposable that "locks" this, prohibiting GetBase from being called, while the buffer - // is "checked out". - public void GetRawBuffer(out Float[] items, out int offset) - { - items = Items; - offset = _base; - } } } \ No newline at end of file diff --git a/src/Microsoft.ML.TimeSeries/AdaptiveSingularSpectrumSequenceModeler.cs b/src/Microsoft.ML.TimeSeries/AdaptiveSingularSpectrumSequenceModeler.cs index a473ccec29..88cbc536dd 100644 --- a/src/Microsoft.ML.TimeSeries/AdaptiveSingularSpectrumSequenceModeler.cs +++ b/src/Microsoft.ML.TimeSeries/AdaptiveSingularSpectrumSequenceModeler.cs @@ -174,8 +174,8 @@ public ModelInfo Info private Single[] _alpha; private Single[] _state; private readonly FixedSizeQueue _buffer; - private CpuAlignedVector _x; - private CpuAlignedVector _xSmooth; + private float[] _x; + private float[] _xSmooth; private int _windowSize; private readonly int _seriesLength; private readonly RankSelectionMethod _rankSelectionMethod; @@ -188,14 +188,14 @@ public ModelInfo Info private readonly IHost _host; - private CpuAlignedMatrixRow _wTrans; + private float[] _wTrans; private Single _observationNoiseVariance; private Single _observationNoiseMean; private Single _autoregressionNoiseVariance; private Single _autoregressionNoiseMean; private int _rank; - private CpuAlignedVector _y; + private float[] _y; private Single _nextPrediction; /// @@ -290,8 +290,8 @@ public AdaptiveSingularSpectrumSequenceModeler(IHostEnvironment env, int trainSi _alpha = new Single[windowSize - 1]; _state = new Single[windowSize - 1]; - _x = new CpuAlignedVector(windowSize, SseUtils.CbAlign); - _xSmooth = new CpuAlignedVector(windowSize, SseUtils.CbAlign); + _x = new float[windowSize]; + _xSmooth = new float[windowSize]; ShouldComputeForecastIntervals = shouldComputeForecastIntervals; _observationNoiseVariance = 0; @@ -345,14 +345,14 @@ private AdaptiveSingularSpectrumSequenceModeler(AdaptiveSingularSpectrumSequence _state = new Single[_windowSize - 1]; Array.Copy(model._state, _state, _windowSize - 1); - _x = new CpuAlignedVector(_windowSize, SseUtils.CbAlign); - _xSmooth = new CpuAlignedVector(_windowSize, SseUtils.CbAlign); + _x = new float[_windowSize]; + _xSmooth = new float[_windowSize]; if (model._wTrans != null) { - _y = new CpuAlignedVector(_rank, SseUtils.CbAlign); - _wTrans = new CpuAlignedMatrixRow(_rank, _windowSize, SseUtils.CbAlign); - _wTrans.CopyFrom(model._wTrans); + _y = new float[_rank]; + _wTrans = new float[_rank * _windowSize]; + Array.Copy(model._wTrans, _wTrans, _rank * _windowSize); } } @@ -452,18 +452,16 @@ public AdaptiveSingularSpectrumSequenceModeler(IHostEnvironment env, ModelLoadCo { var tempArray = ctx.Reader.ReadFloatArray(); _host.CheckDecode(Utils.Size(tempArray) == _rank * _windowSize); - _wTrans = new CpuAlignedMatrixRow(_rank, _windowSize, SseUtils.CbAlign); - int i = 0; - _wTrans.CopyFrom(tempArray, ref i); + _wTrans = new float[_rank * _windowSize]; + Array.Copy(tempArray, _wTrans, tempArray.Length); tempArray = ctx.Reader.ReadFloatArray(); - i = 0; - _y = new CpuAlignedVector(_rank, SseUtils.CbAlign); - _y.CopyFrom(tempArray, ref i); + _y = new float[_rank]; + Array.Copy(tempArray, _y, tempArray.Length); } _buffer = TimeSeriesUtils.DeserializeFixedSizeQueueSingle(ctx.Reader, _host); - _x = new CpuAlignedVector(_windowSize, SseUtils.CbAlign); - _xSmooth = new CpuAlignedVector(_windowSize, SseUtils.CbAlign); + _x = new float[_windowSize]; + _xSmooth = new float[_windowSize]; } public override void Save(ModelSaveContext ctx) @@ -527,14 +525,11 @@ public override void Save(ModelSaveContext ctx) if (_wTrans != null) { - // REVIEW: this may not be the most efficient way for serializing an aligned matrix. var tempArray = new Single[_rank * _windowSize]; - int iv = 0; - _wTrans.CopyTo(tempArray, ref iv); + Array.Copy(_wTrans, tempArray, _wTrans.Length); ctx.Writer.WriteSingleArray(tempArray); tempArray = new float[_rank]; - iv = 0; - _y.CopyTo(tempArray, ref iv); + Array.Copy(_y, tempArray, tempArray.Length); ctx.Writer.WriteSingleArray(tempArray); } @@ -1130,15 +1125,14 @@ internal override void Consume(ref Single input, bool updateModel = false) if (_wTrans == null) { - _y = new CpuAlignedVector(_rank, SseUtils.CbAlign); - _wTrans = new CpuAlignedMatrixRow(_rank, _windowSize, SseUtils.CbAlign); + _y = new float[_rank]; + _wTrans = new float[_rank * _windowSize]; Single[] vecs = new Single[_rank * _windowSize]; for (i = 0; i < _rank; ++i) vecs[(_windowSize + 1) * i] = 1; - i = 0; - _wTrans.CopyFrom(vecs, ref i); + Array.Copy(_wTrans, vecs, _rank * _windowSize); } // Forming vector x @@ -1157,10 +1151,10 @@ internal override void Consume(ref Single input, bool updateModel = false) _x[_windowSize - 1] = input; // Computing y: Eq. (11) in https://hal-institut-mines-telecom.archives-ouvertes.fr/hal-00479772/file/twocolumns.pdf - CpuAligenedMathUtils.MatTimesSrc(_wTrans, _x, _y); + CpuMathUtils.MatrixTimesSource(transpose: false, _wTrans, _x, _y, _y.Length); // Updating the state vector - CpuAligenedMathUtils.MatTranTimesSrc(_wTrans, _y, _xSmooth); + CpuMathUtils.MatrixTimesSource(transpose: true, _wTrans, _y, _xSmooth, _y.Length); _nextPrediction = _autoregressionNoiseMean + _observationNoiseMean; for (i = 0; i < _windowSize - 2; ++i) @@ -1311,8 +1305,8 @@ private void TrainCore(Single[] dataArray, int originalSeriesLength) _maxRank = _windowSize / 2; _alpha = new Single[_windowSize - 1]; _state = new Single[_windowSize - 1]; - _x = new CpuAlignedVector(_windowSize, SseUtils.CbAlign); - _xSmooth = new CpuAlignedVector(_windowSize, SseUtils.CbAlign); + _x = new float[_windowSize]; + _xSmooth = new float[_windowSize]; TrainCore(dataArray, originalSeriesLength); return; @@ -1349,12 +1343,11 @@ private void TrainCore(Single[] dataArray, int originalSeriesLength) } // Setting the the y vector - _y = new CpuAlignedVector(_rank, SseUtils.CbAlign); + _y = new float[_rank]; // Setting the weight matrix - _wTrans = new CpuAlignedMatrixRow(_rank, _windowSize, SseUtils.CbAlign); - i = 0; - _wTrans.CopyFrom(leftSingularVecs, ref i); + _wTrans = new float[_rank * _windowSize]; + Array.Copy(leftSingularVecs, _wTrans, _wTrans.Length); // Setting alpha Single nu = 0; @@ -1364,7 +1357,7 @@ private void TrainCore(Single[] dataArray, int originalSeriesLength) nu += _y[i] * _y[i]; } - CpuAligenedMathUtils.MatTranTimesSrc(_wTrans, _y, _xSmooth); + CpuMathUtils.MatrixTimesSource(transpose: true, _wTrans, _y, _xSmooth, _y.Length); for (i = 0; i < _windowSize - 1; ++i) _alpha[i] = _xSmooth[i] / (1 - nu); @@ -1409,8 +1402,8 @@ private void TrainCore(Single[] dataArray, int originalSeriesLength) _x[i - originalSeriesLength + _windowSize] = dataArray[i]; } - CpuAligenedMathUtils.MatTimesSrc(_wTrans, _x, _y); - CpuAligenedMathUtils.MatTranTimesSrc(_wTrans, _y, _xSmooth); + CpuMathUtils.MatrixTimesSource(transpose: false, _wTrans, _x, _y, _y.Length); + CpuMathUtils.MatrixTimesSource(transpose: true, _wTrans, _y, _xSmooth, _y.Length); for (i = 1; i < _windowSize; ++i) { diff --git a/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs b/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs index 0984f40f97..f629201dfa 100644 --- a/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs +++ b/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs @@ -95,10 +95,10 @@ private sealed class TransformInfo public readonly int SrcDim; // the matrix containing the random fourier vectors - public readonly AlignedArray RndFourierVectors; + public readonly float[] RndFourierVectors; // the random rotations - public readonly AlignedArray RotationTerms; + public readonly float[] RotationTerms; private readonly IFourierDistributionSampler _matrixGenerator; private readonly bool _useSin; @@ -120,10 +120,10 @@ public TransformInfo(IHost host, ColumnInfo column, int d, float avgDist) var generator = column.Generator; _matrixGenerator = generator.CreateComponent(host, avgDist); - int roundedUpD = RoundUp(NewDim, _cfltAlign); - int roundedUpNumFeatures = RoundUp(SrcDim, _cfltAlign); - RndFourierVectors = new AlignedArray(roundedUpD * roundedUpNumFeatures, CpuMathUtils.GetVectorAlignment()); - RotationTerms = _useSin ? null : new AlignedArray(roundedUpD, CpuMathUtils.GetVectorAlignment()); + int roundedUpD = RoundToMultipleOf4(NewDim); + int roundedUpNumFeatures = RoundToMultipleOf4(SrcDim); + RndFourierVectors = new float[roundedUpD * roundedUpNumFeatures]; + RotationTerms = _useSin ? null : new float[roundedUpD]; InitializeFourierCoefficients(roundedUpNumFeatures, roundedUpD); } @@ -154,10 +154,10 @@ public TransformInfo(IHostEnvironment env, ModelLoadContext ctx, string director ctx.LoadModelOrNull(env, out _matrixGenerator, directoryName)); // initialize the transform matrix - int roundedUpD = RoundUp(NewDim, _cfltAlign); - int roundedUpNumFeatures = RoundUp(SrcDim, _cfltAlign); - RndFourierVectors = new AlignedArray(roundedUpD * roundedUpNumFeatures, CpuMathUtils.GetVectorAlignment()); - RotationTerms = _useSin ? null : new AlignedArray(roundedUpD, CpuMathUtils.GetVectorAlignment()); + int roundedUpD = RoundToMultipleOf4(NewDim); + int roundedUpNumFeatures = RoundToMultipleOf4(SrcDim); + RndFourierVectors = new float[roundedUpD * roundedUpNumFeatures]; + RotationTerms = _useSin ? null : new float[roundedUpD]; InitializeFourierCoefficients(roundedUpNumFeatures, roundedUpD); } @@ -225,8 +225,6 @@ private static VersionInfo GetVersionInfo() private readonly TransformInfo[] _transformInfos; - private static readonly int _cfltAlign = CpuMathUtils.GetVectorAlignment() / sizeof(float); - private static string TestColumnType(ColumnType type) { if (type.ItemType == NumberType.Float && type.IsKnownSizeVector) @@ -295,16 +293,11 @@ public RandomFourierFeaturizingTransformer(IHostEnvironment env, IDataView input } } - // Round cflt up to a multiple of cfltAlign. - private static int RoundUp(int cflt, int cfltAlign) + private static int RoundToMultipleOf4(int number) { - Contracts.Assert(0 < cflt); - // cfltAlign should be a power of two. - Contracts.Assert(0 < cfltAlign && (cfltAlign & (cfltAlign - 1)) == 0); - - // Determine the number of "blobs" of size cfltAlign. - int cblob = (cflt + cfltAlign - 1) / cfltAlign; - return cblob * cfltAlign; + Contracts.Assert(0 < number); + int multipleOf4 = (number + 3) / 4; + return multipleOf4 * 4; } private float[] GetAvgDistances(ColumnInfo[] columns, IDataView input) @@ -555,14 +548,14 @@ private ValueGetter> GetterFromVectorType(IRow input, int iinfo) var getSrc = input.GetGetter>(_srcCols[iinfo]); var src = default(VBuffer); - var featuresAligned = new AlignedArray(RoundUp(_srcTypes[iinfo].ValueCount, _cfltAlign), CpuMathUtils.GetVectorAlignment()); - var productAligned = new AlignedArray(RoundUp(_parent._transformInfos[iinfo].NewDim, _cfltAlign), CpuMathUtils.GetVectorAlignment()); + var features = new float[RoundToMultipleOf4(_srcTypes[iinfo].ValueCount)]; + var product = new float[RoundToMultipleOf4(_parent._transformInfos[iinfo].NewDim)]; return (ref VBuffer dst) => { getSrc(ref src); - TransformFeatures(in src, ref dst, _parent._transformInfos[iinfo], featuresAligned, productAligned); + TransformFeatures(in src, ref dst, _parent._transformInfos[iinfo], features, product); }; } @@ -572,8 +565,8 @@ private ValueGetter> GetterFromFloatType(IRow input, int iinfo) var getSrc = input.GetGetter(_srcCols[iinfo]); var src = default(float); - var featuresAligned = new AlignedArray(RoundUp(1, _cfltAlign), CpuMathUtils.GetVectorAlignment()); - var productAligned = new AlignedArray(RoundUp(_parent._transformInfos[iinfo].NewDim, _cfltAlign), CpuMathUtils.GetVectorAlignment()); + var featuresAligned = new float[4]; + var productAligned = new float[RoundToMultipleOf4(_parent._transformInfos[iinfo].NewDim)]; var oneDimensionalVector = new VBuffer(1, new float[] { 0 }); @@ -587,7 +580,7 @@ private ValueGetter> GetterFromFloatType(IRow input, int iinfo) } private void TransformFeatures(in VBuffer src, ref VBuffer dst, TransformInfo transformInfo, - AlignedArray featuresAligned, AlignedArray productAligned) + float[] features, float[] product) { Host.Check(src.Length == transformInfo.SrcDim, "column does not have the expected dimensionality."); @@ -606,9 +599,9 @@ private void TransformFeatures(in VBuffer src, ref VBuffer dst, Tr if (src.IsDense) { - featuresAligned.CopyFrom(src.GetValues()); - CpuMathUtils.MatrixTimesSource(false, transformInfo.RndFourierVectors, featuresAligned, productAligned, - transformInfo.NewDim); + src.GetValues().CopyTo(features); + CpuMathUtils.MatrixTimesSource(transpose: false, transformInfo.RndFourierVectors, features, product, + RoundToMultipleOf4(transformInfo.NewDim)); } else { @@ -616,15 +609,21 @@ private void TransformFeatures(in VBuffer src, ref VBuffer dst, Tr // no need to zero them out. var srcValues = src.GetValues(); var srcIndices = src.GetIndices(); - featuresAligned.CopyFrom(srcIndices, srcValues, 0, 0, srcValues.Length, zeroItems: false); - CpuMathUtils.MatrixTimesSource(transformInfo.RndFourierVectors, srcIndices, featuresAligned, 0, 0, - srcValues.Length, productAligned, transformInfo.NewDim); + + for (int i = 0; i < srcValues.Length; i++) + { + int iv = srcIndices[i]; + features[iv] = srcValues[i]; + } + + CpuMathUtils.MatrixTimesSource(transformInfo.RndFourierVectors, srcIndices, features, 0, 0, + srcValues.Length, product, RoundToMultipleOf4(transformInfo.NewDim)); } var dstEditor = VBufferEditor.Create(ref dst, newDstLength); for (int i = 0; i < transformInfo.NewDim; i++) { - var dotProduct = productAligned[i]; + var dotProduct = product[i]; if (transformInfo.RotationTerms != null) dstEditor.Values[i] = (float)MathUtils.Cos(dotProduct + transformInfo.RotationTerms[i]) * scale; else diff --git a/src/Native/CpuMathNative/Sse.cpp b/src/Native/CpuMathNative/Sse.cpp index 607af332d1..2240d630eb 100644 --- a/src/Native/CpuMathNative/Sse.cpp +++ b/src/Native/CpuMathNative/Sse.cpp @@ -57,15 +57,69 @@ const unsigned int TrailingAlignmentMask[16] = 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, }; +float RowMultiply(const float* pMatCurrent, const float* pSrcCurrent, const float* pSrcEnd, int ccol) +{ + __m128 res0 = _mm_setzero_ps(); + int remainder = ccol % 4; + + while (pSrcCurrent + 4 <= pSrcEnd) + { + __m128 vector = _mm_loadu_ps(pSrcCurrent); + const float* pMatTemp = pMatCurrent; + __m128 x01 = _mm_mul_ps(vector, _mm_loadu_ps(pMatTemp)); + res0 = _mm_add_ps(res0, x01); + + pSrcCurrent += 4; + pMatCurrent += 4; + } + + res0 = _mm_hadd_ps(res0, res0); + res0 = _mm_hadd_ps(res0, res0); + + float sum = _mm_cvtss_f32(res0); + + // falling through the case statements + switch (remainder) + { + case 3: + sum += *(pSrcCurrent + 2) * *(pMatCurrent + 2); + case 2: + sum += *(pSrcCurrent + 1) * *(pMatCurrent + 1); + case 1: + sum += *(pSrcCurrent) * *(pMatCurrent); + } + return sum; +} + // Multiply matrix times vector into vector. EXPORT_API(void) MatMul(_In_ const float * pmat, _In_ const float * psrc, _Inout_ float * pdst, int crow, int ccol) { + if (ccol < 4) + { + for (int i = 0 ; i < crow; i++) + { + float dotProduct = 0; + switch (ccol) + { + case 3: + dotProduct += pmat[i * ccol + 2] * psrc[2]; + case 2: + dotProduct += pmat[i * ccol + 1] * psrc[1]; + case 1: + dotProduct += pmat[i * ccol + 0] * psrc[0]; + } + pdst[i] = dotProduct; + } + return; + } + const float * pSrcEnd = psrc + ccol; const float * pDstEnd = pdst + crow; float* pDstCurrent = pdst; const float* pMatCurrent = pmat; + int numRows = crow; - while (pDstCurrent < pDstEnd) + while (pDstCurrent + 4 <= pDstEnd) { __m128 res0 = _mm_setzero_ps(); __m128 res1 = res0; @@ -79,10 +133,11 @@ EXPORT_API(void) MatMul(_In_ const float * pmat, _In_ const float * psrc, _Inout uintptr_t misalignment = address % 16; int remainder = 0; - if ((misalignment & 3) != 0) + if ((misalignment & 3) != 0 || (ccol % 4 != 0)) { // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - while (pSrcCurrent < pSrcEnd) + remainder = length % 4; + while (pSrcCurrent + 4 <= pSrcEnd) { __m128 vector = _mm_loadu_ps(pSrcCurrent); @@ -161,32 +216,32 @@ EXPORT_API(void) MatMul(_In_ const float * pmat, _In_ const float * psrc, _Inout // unaligned loads where we mask the input each time. remainder = length; } + } - if (remainder != 0) - { - // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next - // unaligned load will read to the end of the array and then mask out any elements already processed + if (remainder != 0) + { + // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next + // unaligned load will read to the end of the array and then mask out any elements already processed - pMatCurrent -= (4 - remainder); - pSrcCurrent -= (4 - remainder); + pMatCurrent -= (4 - remainder); + pSrcCurrent -= (4 - remainder); - __m128 mask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + (remainder * 4)); + __m128 mask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + (remainder * 4)); - const float* pMatTemp = pMatCurrent; - __m128 x01 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp)); - __m128 x11 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp += ccol)); - __m128 x21 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp += ccol)); - __m128 x31 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp += ccol)); - __m128 vector = _mm_and_ps(mask, _mm_loadu_ps(pSrcCurrent)); + const float* pMatTemp = pMatCurrent; + __m128 x01 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp)); + __m128 x11 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp += ccol)); + __m128 x21 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp += ccol)); + __m128 x31 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp += ccol)); + __m128 vector = _mm_and_ps(mask, _mm_loadu_ps(pSrcCurrent)); - res0 = _mm_add_ps(res0, _mm_mul_ps(x01, vector)); - res1 = _mm_add_ps(res1, _mm_mul_ps(x11, vector)); - res2 = _mm_add_ps(res2, _mm_mul_ps(x21, vector)); - res3 = _mm_add_ps(res3, _mm_mul_ps(x31, vector)); + res0 = _mm_add_ps(res0, _mm_mul_ps(x01, vector)); + res1 = _mm_add_ps(res1, _mm_mul_ps(x11, vector)); + res2 = _mm_add_ps(res2, _mm_mul_ps(x21, vector)); + res3 = _mm_add_ps(res3, _mm_mul_ps(x31, vector)); - pMatCurrent += 4; - pSrcCurrent += 4; - } + pMatCurrent += 4; + pSrcCurrent += 4; } // Add up the entries of each, with the 4 results in res0 @@ -198,6 +253,19 @@ EXPORT_API(void) MatMul(_In_ const float * pmat, _In_ const float * psrc, _Inout pDstCurrent += 4; pMatCurrent += 3 * ccol; + numRows -= 4; + } + + // falling through the case statements + switch(numRows) + { + case 3: + *(pDstCurrent + 2) = RowMultiply(pMatCurrent + 2 * ccol, psrc, pSrcEnd, ccol); + case 2: + *(pDstCurrent + 1) = RowMultiply(pMatCurrent + 1 * ccol, psrc, pSrcEnd, ccol); + case 1: + *pDstCurrent = RowMultiply(pMatCurrent, psrc, pSrcEnd, ccol); + break; } } @@ -356,21 +424,62 @@ EXPORT_API(void) MatMulP(_In_ const float * pmat, _In_ const int * pposSrc, _In_ } } +void ColumnMultiply(const float* pMatCurrent, const float* pSrcCurrent, float* pdst, const float* pDstEnd, int crow) +{ + __m128 x01 = _mm_loadu_ps(pSrcCurrent); + x01 = _mm_shuffle_ps(x01, x01, 0x00); + float* pDstCurrent = pdst; + int remainder = crow % 4; + + while (pDstCurrent + 4 <= pDstEnd) + { + const float* pMatTemp = pMatCurrent; + __m128 x02 = _mm_mul_ps(x01, _mm_loadu_ps(pMatTemp)); + x02 = _mm_add_ps(x02, _mm_loadu_ps(pDstCurrent)); + + _mm_storeu_ps(pDstCurrent, x02); + + pDstCurrent += 4; + pMatCurrent += 4; + } + + // falling through the case statements + switch (remainder) + { + case 3: *(pDstCurrent + 2) += *(pSrcCurrent) * *(pMatCurrent + 2); + case 2: *(pDstCurrent + 1) += *(pSrcCurrent) * *(pMatCurrent + 1); + case 1: *pDstCurrent += *(pSrcCurrent) * *(pMatCurrent); break; + } + return; +} + EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _Inout_ float * pdst, int crow, int ccol) { + if (crow < 4) + { + for (int i = 0 ; i < crow; i++) + { + float dotProduct = 0; + for (int j = 0; j < ccol; j++) + { + dotProduct += pmat[j * crow + i] * psrc[j]; + } + pdst[i] = dotProduct; + } + return; + } + const float * pSrcEnd = psrc + ccol; const float * pDstEnd = pdst + crow; const float* pMatCurrent = pmat; const float* pSrcCurrent = psrc; + int remainder = 0; + int numCol = ccol; if (pSrcCurrent < pSrcEnd) { __m128 x01 = _mm_loadu_ps(pSrcCurrent); - // Replicate each slot of x01 into its own register. - __m128 x11 = _mm_shuffle_ps(x01, x01, 0x55); - __m128 x21 = _mm_shuffle_ps(x01, x01, 0xAA); - __m128 x31 = _mm_shuffle_ps(x01, x01, 0xFF); x01 = _mm_shuffle_ps(x01, x01, 0x00); int length = crow; @@ -380,20 +489,14 @@ EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _I uintptr_t misalignment = address % 16; int remainder = 0; - if ((misalignment & 3) != 0) + if ((misalignment & 3) != 0 || (crow % 4 != 0)) { // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - while (pDstCurrent < pDstEnd) + remainder = crow % 4; + while (pDstCurrent + 4 <= pDstEnd) { const float* pMatTemp = pMatCurrent; __m128 x02 = _mm_mul_ps(x01, _mm_loadu_ps(pMatTemp)); - __m128 x12 = _mm_mul_ps(x11, _mm_loadu_ps(pMatTemp += crow)); - __m128 x22 = _mm_mul_ps(x21, _mm_loadu_ps(pMatTemp += crow)); - __m128 x32 = _mm_mul_ps(x31, _mm_loadu_ps(pMatTemp += crow)); - - x02 = _mm_add_ps(x02, x12); - x22 = _mm_add_ps(x22, x32); - x02 = _mm_add_ps(x02, x22); _mm_storeu_ps(pDstCurrent, x02); pDstCurrent += 4; @@ -402,7 +505,6 @@ EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _I } else { - int remainder = 0; if (misalignment != 0) { // Handle cases where the data is not 128-bit aligned by doing an unaligned read and then @@ -415,22 +517,11 @@ EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _I // We only align pMat since it has significantly more reads. const float* pMatTemp = pMatCurrent; __m128 x02 = _mm_and_ps(leadingMask, _mm_loadu_ps(pMatTemp)); - __m128 x12 = _mm_and_ps(leadingMask, _mm_loadu_ps(pMatTemp += crow)); - __m128 x22 = _mm_and_ps(leadingMask, _mm_loadu_ps(pMatTemp += crow)); - __m128 x32 = _mm_and_ps(leadingMask, _mm_loadu_ps(pMatTemp += crow)); - x02 = _mm_mul_ps(x01, x02); - x12 = _mm_mul_ps(x11, x12); - x22 = _mm_mul_ps(x21, x22); - x32 = _mm_mul_ps(x31, x32); - - x02 = _mm_add_ps(x02, x12); - x22 = _mm_add_ps(x22, x32); - x02 = _mm_add_ps(x02, x22); __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + ((4 - misalignment) * 4)); __m128 x3 = _mm_loadu_ps(pDstCurrent); - x02 = _mm_or_ps(x02, _mm_and_ps(x3, trailingMask)); + x02 = _mm_add_ps(x02, _mm_and_ps(x3, trailingMask)); _mm_storeu_ps(pDstCurrent, x02); pMatCurrent += misalignment; @@ -447,13 +538,6 @@ EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _I { const float* pMatTemp = pMatCurrent; __m128 x02 = _mm_mul_ps(x01, _mm_load_ps(pMatTemp)); - __m128 x12 = _mm_mul_ps(x11, _mm_load_ps(pMatTemp += crow)); - __m128 x22 = _mm_mul_ps(x21, _mm_load_ps(pMatTemp += crow)); - __m128 x32 = _mm_mul_ps(x31, _mm_load_ps(pMatTemp += crow)); - - x02 = _mm_add_ps(x02, x12); - x22 = _mm_add_ps(x22, x32); - x02 = _mm_add_ps(x02, x22); _mm_storeu_ps(pDstCurrent, x02); @@ -468,47 +552,36 @@ EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _I // unaligned loads where we mask the input each time. remainder = length; } + } - if (remainder != 0) - { - // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next - // unaligned load will read to the end of the array and then mask out any elements already processed - - pMatCurrent -= (4 - remainder); - pDstCurrent -= (4 - remainder); - - __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + (remainder * 4)); + if (remainder != 0) + { + // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next + // unaligned load will read to the end of the array and then mask out any elements already processed - const float* pMatTemp = pMatCurrent; - __m128 x02 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp)); - __m128 x12 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); - __m128 x22 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); - __m128 x32 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); + pMatCurrent -= (4 - remainder); + pDstCurrent -= (4 - remainder); - x02 = _mm_mul_ps(x01, x02); - x12 = _mm_mul_ps(x11, x12); - x22 = _mm_mul_ps(x21, x22); - x32 = _mm_mul_ps(x31, x32); + __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + (remainder * 4)); - x02 = _mm_add_ps(x02, x12); - x22 = _mm_add_ps(x22, x32); - x02 = _mm_add_ps(x02, x22); + const float* pMatTemp = pMatCurrent; + __m128 x02 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp)); + x02 = _mm_mul_ps(x01, x02); - __m128 leadingMask = _mm_loadu_ps(((float*)(&LeadingAlignmentMask)) + ((4 - remainder) * 4)); - __m128 x3 = _mm_loadu_ps(pDstCurrent); - x02 = _mm_or_ps(x02, _mm_and_ps(x3, leadingMask)); + __m128 leadingMask = _mm_loadu_ps(((float*)(&LeadingAlignmentMask)) + ((4 - remainder) * 4)); + __m128 x3 = _mm_loadu_ps(pDstCurrent); + x02 = _mm_add_ps(x02, _mm_and_ps(x3, leadingMask)); - _mm_storeu_ps(pDstCurrent, x02); - pMatCurrent += 4; - pDstCurrent += 4; - } + _mm_storeu_ps(pDstCurrent, x02); + pMatCurrent += 4; + pDstCurrent += 4; } - pMatCurrent += 3 * crow; - pSrcCurrent += 4; + numCol -= 1; + pSrcCurrent += 1; } - - while (pSrcCurrent < pSrcEnd) + + while (pSrcCurrent + 4 <= pSrcEnd) { __m128 x01 = _mm_loadu_ps(pSrcCurrent); // Replicate each slot of x01 into its own register. @@ -524,9 +597,10 @@ EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _I uintptr_t misalignment = address % 16; int remainder = 0; - if ((misalignment & 3) != 0) + if ((misalignment & 3) != 0 || (crow % 4 != 0)) { - while (pDstCurrent < pDstEnd) + remainder = length % 4; + while (pDstCurrent + 4 <= pDstEnd) { const float* pMatTemp = pMatCurrent; __m128 x02 = _mm_mul_ps(x01, _mm_loadu_ps(pMatTemp)); @@ -547,7 +621,6 @@ EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _I } else { - int remainder = 0; if (misalignment != 0) { misalignment >>= 2; @@ -573,7 +646,7 @@ EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _I __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + ((4 - misalignment) * 4)); __m128 x3 = _mm_loadu_ps(pDstCurrent); - x02 = _mm_or_ps(x02, _mm_and_ps(x3, trailingMask)); + x02 = _mm_add_ps(x02, _mm_and_ps(x3, trailingMask)); x02 = _mm_add_ps(x02, _mm_and_ps(x3, leadingMask)); _mm_storeu_ps(pDstCurrent, x02); @@ -609,43 +682,52 @@ EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _I { remainder = length; } + } - if (remainder != 0) - { - pMatCurrent -= (4 - remainder); - pDstCurrent -= (4 - remainder); + if (remainder != 0) + { + pMatCurrent -= (4 - remainder); + pDstCurrent -= (4 - remainder); - __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + (remainder * 4)); + __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + (remainder * 4)); - const float* pMatTemp = pMatCurrent; - __m128 x02 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp)); - __m128 x12 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); - __m128 x22 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); - __m128 x32 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); + const float* pMatTemp = pMatCurrent; + __m128 x02 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp)); + __m128 x12 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); + __m128 x22 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); + __m128 x32 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); - x02 = _mm_mul_ps(x01, x02); - x12 = _mm_mul_ps(x11, x12); - x22 = _mm_mul_ps(x21, x22); - x32 = _mm_mul_ps(x31, x32); + x02 = _mm_mul_ps(x01, x02); + x12 = _mm_mul_ps(x11, x12); + x22 = _mm_mul_ps(x21, x22); + x32 = _mm_mul_ps(x31, x32); - x02 = _mm_add_ps(x02, x12); - x22 = _mm_add_ps(x22, x32); - x02 = _mm_add_ps(x02, x22); + x02 = _mm_add_ps(x02, x12); + x22 = _mm_add_ps(x22, x32); + x02 = _mm_add_ps(x02, x22); - __m128 leadingMask = _mm_loadu_ps(((float*)(&LeadingAlignmentMask)) + ((4 - remainder) * 4)); - __m128 x3 = _mm_loadu_ps(pDstCurrent); - x02 = _mm_or_ps(x02, _mm_and_ps(x3, leadingMask)); + __m128 leadingMask = _mm_loadu_ps(((float*)(&LeadingAlignmentMask)) + ((4 - remainder) * 4)); + __m128 x3 = _mm_loadu_ps(pDstCurrent); + x02 = _mm_add_ps(x02, _mm_and_ps(x3, leadingMask)); - x02 = _mm_add_ps(x02, _mm_and_ps(x3, trailingMask)); - _mm_storeu_ps(pDstCurrent, x02); - pMatCurrent += 4; - pDstCurrent += 4; - } + x02 = _mm_add_ps(x02, _mm_and_ps(x3, trailingMask)); + _mm_storeu_ps(pDstCurrent, x02); + pMatCurrent += 4; + pDstCurrent += 4; } + numCol -= 4; pMatCurrent += 3 * crow; pSrcCurrent += 4; } + + // falling through the case statements + switch (numCol) + { + case 3: ColumnMultiply(pMatCurrent + 2 * crow, pSrcCurrent + 2, pdst, pDstEnd, crow); + case 2: ColumnMultiply(pMatCurrent + crow, pSrcCurrent + 1, pdst, pDstEnd, crow); + case 1: ColumnMultiply(pMatCurrent, pSrcCurrent, pdst, pDstEnd, crow); break; + } } // pd[i] += a @@ -1238,43 +1320,6 @@ EXPORT_API(float) Dist2(const float * px, const float * py, int c) return norm2; } -EXPORT_API(void) ZeroItemsU(_Inout_ float * pd, int c, _In_ const int * pindices, int cindices) -{ - DEBUG_ONLY(c); - for (int i = 0; i < cindices; ++i) - { - int iv = pindices[i]; - assert(0 <= iv && iv < c); - pd[iv] = 0; - } -} - -EXPORT_API(void) ZeroMatrixItemsCore(_Inout_ float * pd, int c, int ccol, int cfltRow, _In_ const int * pindices, int cindices) -{ - DEBUG_ONLY(c); - int ivLogMin = 0; - int ivLogLim = ccol; - int ivPhyMin = 0; - for (int i = 0; i < cindices; ++i) - { - int iv = pindices[i]; - assert(0 <= iv && iv < c); - - int col = iv - ivLogMin; - if ((unsigned int)col >= (unsigned int)ccol) - { - assert(ivLogMin > iv || iv >= ivLogLim); - int row = iv / ccol; - ivLogMin = row * ccol; - ivLogLim = ivLogMin + ccol; - ivPhyMin = row * cfltRow; - assert(ivLogMin <= iv && iv < ivLogLim); - col = iv - ivLogMin; - } - pd[ivPhyMin + col] = 0; - } -} - EXPORT_API(void) SdcaL1UpdateU(float primalUpdate, _In_ const float * ps, float threshold, _Inout_ float *pd1, _Inout_ float * pd2, int c) { const float * psLim = ps + c; diff --git a/test/Microsoft.ML.CpuMath.UnitTests.netcoreapp/UnitTests.cs b/test/Microsoft.ML.CpuMath.UnitTests.netcoreapp/UnitTests.cs index 25996ec42c..5adb641b92 100644 --- a/test/Microsoft.ML.CpuMath.UnitTests.netcoreapp/UnitTests.cs +++ b/test/Microsoft.ML.CpuMath.UnitTests.netcoreapp/UnitTests.cs @@ -14,14 +14,14 @@ public class CpuMathUtilsUnitTests { private readonly float[][] _testArrays; private readonly int[] _testIndexArray; - private readonly AlignedArray[] _testMatrices; - private readonly AlignedArray[] _testSrcVectors; - private readonly AlignedArray[] _testDstVectors; - private readonly int _vectorAlignment = CpuMathUtils.GetVectorAlignment(); + private readonly float[][] _testMatrices; + private readonly float[][] _testSrcVectors; + private readonly float[][] _testDstVectors; private readonly FloatEqualityComparer _comparer; private readonly FloatEqualityComparerForMatMul _matMulComparer; private const float DefaultScale = 1.7f; + private const int DefaultSeed = 253421; public CpuMathUtilsUnitTests() { @@ -50,34 +50,19 @@ public CpuMathUtilsUnitTests() testMatrix2[i] = i + 1; } - AlignedArray testMatrixAligned1 = new AlignedArray(8 * 8, _vectorAlignment); - AlignedArray testMatrixAligned2 = new AlignedArray(8 * 16, _vectorAlignment); - testMatrixAligned1.CopyFrom(testMatrix1); - testMatrixAligned2.CopyFrom(testMatrix2); - - _testMatrices = new AlignedArray[] { testMatrixAligned1, testMatrixAligned2 }; + _testMatrices = new float[][] { testMatrix1, testMatrix2 }; // Padded source vectors whose dimensions are multiples of 8 float[] testSrcVector1 = new float[8] { 1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f }; float[] testSrcVector2 = new float[16] { 1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f, 9f, 10f, 11f, 12f, 13f, 14f, 15f, 16f }; - AlignedArray testSrcVectorAligned1 = new AlignedArray(8, _vectorAlignment); - AlignedArray testSrcVectorAligned2 = new AlignedArray(16, _vectorAlignment); - testSrcVectorAligned1.CopyFrom(testSrcVector1); - testSrcVectorAligned2.CopyFrom(testSrcVector2); - - _testSrcVectors = new AlignedArray[] { testSrcVectorAligned1, testSrcVectorAligned2 }; + _testSrcVectors = new float[][] { testSrcVector1, testSrcVector2 }; // Padded destination vectors whose dimensions are multiples of 8 float[] testDstVector1 = new float[8] { 0f, 1f, 2f, 3f, 4f, 5f, 6f, 7f }; float[] testDstVector2 = new float[16] { 0f, 1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f, 9f, 10f, 11f, 12f, 13f, 14f, 15f }; - AlignedArray testDstVectorAligned1 = new AlignedArray(8, _vectorAlignment); - AlignedArray testDstVectorAligned2 = new AlignedArray(16, _vectorAlignment); - testDstVectorAligned1.CopyFrom(testDstVector1); - testDstVectorAligned2.CopyFrom(testDstVector2); - - _testDstVectors = new AlignedArray[] { testDstVectorAligned1, testDstVectorAligned2 }; + _testDstVectors = new float[][] { testDstVector1, testDstVector2 }; } [Theory] @@ -86,29 +71,125 @@ public CpuMathUtilsUnitTests() [InlineData(1, 0, 1, new float[] { 204f, 492f, 780f, 1068f, 1356f, 1644f, 1932f, 2220f, 2508f, 2796f, 3084f, 3372f, 3660f, 3948f, 4236f, 4524f })] public void MatMulTest(int matTest, int srcTest, int dstTest, float[] expected) { - AlignedArray mat = _testMatrices[matTest]; - AlignedArray src = _testSrcVectors[srcTest]; - AlignedArray dst = _testDstVectors[dstTest]; + float[] mat = _testMatrices[matTest]; + float[] src = _testSrcVectors[srcTest]; + float[] dst = _testDstVectors[dstTest]; - CpuMathUtils.MatrixTimesSource(false, mat, src, dst, dst.Size); - float[] actual = new float[dst.Size]; - dst.CopyTo(actual, 0, dst.Size); + CpuMathUtils.MatrixTimesSource(false, mat, src, dst, dst.Length); + float[] actual = new float[dst.Length]; + Array.Copy(dst, actual, dst.Length); Assert.Equal(expected, actual, _matMulComparer); } + [Theory] + [InlineData(10, 5)] + [InlineData(10, 8)] + [InlineData(10, 11)] + [InlineData(11, 8)] + [InlineData(8, 23)] + [InlineData(2, 8)] + [InlineData(2, 9)] + [InlineData(2, 3)] + [InlineData(2, 5)] + [InlineData(4, 5)] + [InlineData(4, 7)] + [InlineData(4, 9)] + [InlineData(5, 7)] + [InlineData(5, 9)] + private void MatMulAnyDimensionTest(int col, int row) + { + Random rand = new Random(DefaultSeed); + float[] mat = new float[col * row]; + for (int i = 0; i < col * row; i++) + { + mat[i] = rand.Next(-10, 10); + } + + float[] src = new float[col]; + for (int i = 0; i < col; i++) + { + src[i] = rand.Next(-10, 10); + } + + float[] dst = new float[row]; + float[] expected = new float[row]; + + for (int i = 0; i < row; i++) + { + float dotProduct = 0; + for (int j = 0; j < src.Length; j++) + { + dotProduct += mat[i * src.Length + j] * src[j]; + } + + expected[i] = dotProduct; + } + + CpuMathUtils.MatrixTimesSource(false, mat, src, dst, dst.Length); + Assert.Equal(expected, dst, _matMulComparer); + } + + [Theory] + [InlineData(10, 5)] + [InlineData(10, 8)] + [InlineData(10, 11)] + [InlineData(11, 8)] + [InlineData(8, 23)] + [InlineData(2, 8)] + [InlineData(2, 9)] + [InlineData(2, 3)] + [InlineData(2, 5)] + [InlineData(4, 5)] + [InlineData(4, 7)] + [InlineData(4, 9)] + [InlineData(5, 7)] + [InlineData(5, 9)] + private void MatMulTranAnyDimensionTest(int col, int row) + { + float[] mat = new float[col * row]; + Random rand = new Random(DefaultSeed); + for (int i = 0; i < col * row; i++) + { + mat[i] = rand.Next(0, 10); + } + + float[] src = new float[row]; + for (int i = 0; i < row; i++) + { + src[i] = rand.Next(0, 10); + } + + float[] dst = new float[col]; + float[] expected = new float[col]; + + for (int i = 0; i < dst.Length; i++) + { + float dotProduct = 0; + for (int j = 0; j < row; j++) + { + dotProduct += mat[j * dst.Length + i] * src[j]; + } + + expected[i] = dotProduct; + } + + CpuMathUtils.MatrixTimesSource(true, mat, src, dst, row); + Assert.Equal(expected, dst, _matMulComparer); + } + [Theory] [InlineData(0, 0, 0, new float[] { 70.56001f, -85.68f, -351.36f, 498.24f, -3829.32f, -969.48f, 1168.2f, 118.44f })] [InlineData(1, 0, 1, new float[] { 2724f, 2760f, 2796f, 2832f, 2868f, 2904f, 2940f, 2976f, 3012f, 3048f, 3084f, 3120f, 3156f, 3192f, 3228f, 3264f })] [InlineData(1, 1, 0, new float[] { 11016f, 11152f, 11288f, 11424f, 11560f, 11696f, 11832f, 11968f })] public void MatMulTranTest(int matTest, int srcTest, int dstTest, float[] expected) { - AlignedArray mat = _testMatrices[matTest]; - AlignedArray src = _testSrcVectors[srcTest]; - AlignedArray dst = _testDstVectors[dstTest]; + float[] mat = _testMatrices[matTest]; + float[] src = _testSrcVectors[srcTest]; + float[] dst = _testDstVectors[dstTest]; - CpuMathUtils.MatrixTimesSource(true, mat, src, dst, src.Size); - float[] actual = new float[dst.Size]; - dst.CopyTo(actual, 0, dst.Size); + CpuMathUtils.MatrixTimesSource(true, mat, src, dst, src.Length); + float[] actual = new float[dst.Length]; + Array.Copy(dst, actual, dst.Length); Assert.Equal(expected, actual, _matMulComparer); } @@ -118,14 +199,14 @@ public void MatMulTranTest(int matTest, int srcTest, int dstTest, float[] expect [InlineData(1, 0, 1, new float[] { 95f, 231f, 367f, 503f, 639f, 775f, 911f, 1047f, 1183f, 1319f, 1455f, 1591f, 1727f, 1863f, 1999f, 2135f })] public void MatTimesSrcSparseTest(int matTest, int srcTest, int dstTest, float[] expected) { - AlignedArray mat = _testMatrices[matTest]; - AlignedArray src = _testSrcVectors[srcTest]; - AlignedArray dst = _testDstVectors[dstTest]; + float[] mat = _testMatrices[matTest]; + float[] src = _testSrcVectors[srcTest]; + float[] dst = _testDstVectors[dstTest]; int[] idx = _testIndexArray; - CpuMathUtils.MatrixTimesSource(mat, idx, src, 0, 0, (srcTest == 0) ? 4 : 9, dst, dst.Size); - float[] actual = new float[dst.Size]; - dst.CopyTo(actual, 0, dst.Size); + CpuMathUtils.MatrixTimesSource(mat, idx, src, 0, 0, (srcTest == 0) ? 4 : 9, dst, dst.Length); + float[] actual = new float[dst.Length]; + Array.Copy(dst, actual, dst.Length); Assert.Equal(expected, actual, _matMulComparer); } @@ -467,34 +548,6 @@ public void Dist2Test(int test, float expected) Assert.Equal(expected, actual, 0); } - [Theory] - [InlineData(0, new int[] { 0, 2, 5, 6 }, new float[] { 0f, 2f, 0f, 4f, 5f, 0f, 0f, 8f })] - [InlineData(1, new int[] { 0, 2, 5, 6, 8, 11, 12, 13, 14 }, new float[] { 0f, 2f, 0f, 4f, 5f, 0f, 0f, 8f, 0f, 10f, 11f, 0f, 0f, 0f, 0f, 16f })] - public void ZeroItemsUTest(int test, int[] idx, float[] expected) - { - AlignedArray src = new AlignedArray(8 + 8 * test, _vectorAlignment); - src.CopyFrom(_testSrcVectors[test]); - - CpuMathUtils.ZeroMatrixItems(src, src.Size, src.Size, idx); - float[] actual = new float[src.Size]; - src.CopyTo(actual, 0, src.Size); - Assert.Equal(expected, actual, _comparer); - } - - [Theory] - [InlineData(0, new int[] { 0, 2, 5 }, new float[] { 0f, 2f, 0f, 4f, 5f, 6f, 0f, 8f })] - [InlineData(1, new int[] { 0, 2, 5, 6, 8, 11, 12, 13 }, new float[] { 0f, 2f, 0f, 4f, 5f, 0f, 0f, 8f, 9f, 0f, 11f, 12f, 0f, 0f, 0f, 16f })] - public void ZeroMatrixItemsCoreTest(int test, int[] idx, float[] expected) - { - AlignedArray src = new AlignedArray(8 + 8 * test, _vectorAlignment); - src.CopyFrom(_testSrcVectors[test]); - - CpuMathUtils.ZeroMatrixItems(src, src.Size / 2 - 1, src.Size / 2, idx); - float[] actual = new float[src.Size]; - src.CopyTo(actual, 0, src.Size); - Assert.Equal(expected, actual, _comparer); - } - [Theory] [InlineData(0)] [InlineData(1)] diff --git a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs index e2bc679830..b0aa994f27 100644 --- a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs +++ b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs @@ -17,6 +17,7 @@ using Microsoft.ML.Runtime.Model; using Microsoft.ML.TestFramework; using Xunit; +using Xunit.Abstractions; namespace Microsoft.ML.Runtime.RunTests { @@ -774,7 +775,6 @@ protected bool SaveLoadTransposed(IDataView view, IHostEnvironment env, string s public abstract partial class TestDataViewBase : BaseTestBaseline { - public class SentimentData { [ColumnName("Label")] @@ -934,7 +934,7 @@ protected bool CheckSameValues(IRowCursor curs1, IRowCursor curs2, bool exactTyp var comp = comps[col]; if (comp != null && !comp()) { - Fail("Different values in column {0} of row {1}", col, curs1.Position); + Fail($"Different values in column {col} of row {curs1.Position}"); return Failed(); } if (idComp != null && !idComp()) @@ -1158,12 +1158,10 @@ protected Func GetColumnComparer(IRow r1, IRow r2, int col, ColumnType typ throw Contracts.Except("Unknown type in GetColumnComparer: '{0}'", type); } - private const Double DoubleEps = 1e-9; - - private static bool EqualWithEpsDouble(Double x, Double y) + private bool EqualWithEpsDouble(Double x, Double y) { // bitwise comparison is needed because Abs(Inf-Inf) and Abs(NaN-NaN) are not 0s. - return FloatUtils.GetBits(x) == FloatUtils.GetBits(y) || Math.Abs(x - y) < DoubleEps; + return FloatUtils.GetBits(x) == FloatUtils.GetBits(y) || CompareNumbersWithTolerance(x, y, null, 3); } private const float SingleEps = 1e-6f; diff --git a/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs b/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs index 39340d225b..90428934ba 100644 --- a/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs +++ b/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs @@ -72,7 +72,7 @@ public void SavePipeSsaSpike() Done(); } - [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline + [Fact] public void SavePipeSsaSpikeNoData() { string pathData = DeleteOutputPath("SavePipe", "SsaSpikeNoData.txt"); diff --git a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs index 0bd4604daa..274a229630 100644 --- a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs +++ b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs @@ -87,7 +87,7 @@ public void ChangeDetection() } } - [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline + [Fact] public void ChangePointDetectionWithSeasonality() { var env = new MLContext(conc: 1); @@ -130,14 +130,14 @@ public void ChangePointDetectionWithSeasonality() while (enumerator.MoveNext() && index < expectedValues.Count) { row = enumerator.Current; - Assert.Equal(expectedValues[index++], row.Change[0], precision: 7); // Alert - Assert.Equal(expectedValues[index++], row.Change[1], precision: 7); // Raw score - Assert.Equal(expectedValues[index++], row.Change[2], precision: 7); // P-Value score - Assert.Equal(expectedValues[index++], row.Change[3], precision: 7); // Martingale score + Assert.Equal(expectedValues[index++], row.Change[0], precision: 5); // Alert + Assert.Equal(expectedValues[index++], row.Change[1], precision: 5); // Raw score + Assert.Equal(expectedValues[index++], row.Change[2], precision: 5); // P-Value score + Assert.Equal(expectedValues[index++], row.Change[3], precision: 5); // Martingale score } } - [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] + [Fact] public void ChangePointDetectionWithSeasonalityPredictionEngineNoColumn() { const int ChangeHistorySize = 10; @@ -190,10 +190,10 @@ public void ChangePointDetectionWithSeasonalityPredictionEngineNoColumn() var engine2 = model2.CreateTimeSeriesPredictionFunction(ml); var prediction2 = engine2.Predict(new Data(1)); //Raw score after first input. - Assert.Equal(1.1661833524703979, prediction2.Change[1], precision: 5); // Raw score + Assert.Equal(1.1661833524703979, prediction2.Change[1], precision: 4); // Raw score prediction2 = engine2.Predict(new Data(1)); //Raw score after second input. - Assert.Equal(0.12216401100158691, prediction2.Change[1], precision: 5); // Raw score + Assert.Equal(0.12216401100158691, prediction2.Change[1], precision: 4); // Raw score //Even though time series column is not requested it will // pass the observation through time series transform and update the state with the first input. @@ -210,10 +210,10 @@ public void ChangePointDetectionWithSeasonalityPredictionEngineNoColumn() //and raw score should match the raw score obtained by passing the two input in the first model. var engine3 = model3.CreateTimeSeriesPredictionFunction(ml); var prediction3 = engine3.Predict(new Data(1)); - Assert.Equal(0.12216401100158691, prediction2.Change[1], precision: 5); // Raw score + Assert.Equal(0.12216401100158691, prediction2.Change[1], precision: 4); // Raw score } - [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] + [Fact] public void ChangePointDetectionWithSeasonalityPredictionEngine() { const int ChangeHistorySize = 10; @@ -264,7 +264,7 @@ public void ChangePointDetectionWithSeasonalityPredictionEngine() //Model 1: Prediction #2 prediction = engine.Predict(new Data(1)); Assert.Equal(0, prediction.Change[0], precision: 7); // Alert - Assert.Equal(0.12216401100158691, prediction.Change[1], precision: 5); // Raw score + Assert.Equal(0.12216401100158691, prediction.Change[1], precision: 4); // Raw score Assert.Equal(0.14823824685192111, prediction.Change[2], precision: 5); // P-Value score Assert.Equal(1.5292508189989167E-07, prediction.Change[3], precision: 7); // Martingale score @@ -277,7 +277,7 @@ public void ChangePointDetectionWithSeasonalityPredictionEngine() engine = model2.CreateTimeSeriesPredictionFunction(ml); prediction = engine.Predict(new Data(1)); Assert.Equal(0, prediction.Change[0], precision: 7); // Alert - Assert.Equal(0.12216401100158691, prediction.Change[1], precision: 5); // Raw score + Assert.Equal(0.12216401100158691, prediction.Change[1], precision: 4); // Raw score Assert.Equal(0.14823824685192111, prediction.Change[2], precision: 5); // P-Value score Assert.Equal(1.5292508189989167E-07, prediction.Change[3], precision: 5); // Martingale score } diff --git a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs index a7892701d7..0e6a5063bd 100644 --- a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs +++ b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs @@ -41,7 +41,7 @@ public TimeSeriesEstimatorTests(ITestOutputHelper output) : base(output) { } - [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCoreAnd64BitProcess))] // 32bit and netcore3.0 output differs from Baseline void TestSsaChangePointEstimator() { int Confidence = 95; @@ -75,7 +75,7 @@ void TestSsaChangePointEstimator() Done(); } - [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline + [Fact] void TestSsaSpikeEstimator() { int Confidence = 95; From 521acad830408b43175821575f5f774629aeafe7 Mon Sep 17 00:00:00 2001 From: Tom Finley Date: Tue, 4 Dec 2018 13:08:39 -0800 Subject: [PATCH 023/100] Cleanup of IRowCursor and related interfaces. IRowCursor/IRow are now classes. (#1814) * Slot cursor now an abstract class and not an ICursor. * Members of ICounted moved to IRow. * Members of ICursor moved to IRowCursor. * Remove ICounted and ICursor. * Remove IRowCursor from places that previously had to accomodate different sorts of cursors. * Add benchmark for cache data view cursor/seeker access. * Rename private RowCursor implementations Cursor. * IRowCursor is now an abstract class. * Rename IRowCursor to RowCursor, as well as a few other similar classes. * IRow is now an abstract class. * Rename private IRow implementations named Row, RowImpl. * Rename IRow to Row, and IStatefulRow to StatefulRow. --- .../CustomMappingTransformer.cs | 4 +- .../DataViewConstructionUtils.cs | 80 ++--- .../StatefulFilterTransform.cs | 18 +- src/Microsoft.ML.Api/TypedCursor.cs | 123 ++++--- src/Microsoft.ML.Core/Data/ICursor.cs | 106 ------ src/Microsoft.ML.Core/Data/IDataView.cs | 109 +++++- .../Data/ISchemaBindableMapper.cs | 18 +- .../Data/LinkedRootCursorBase.cs | 19 +- .../Data/LinkedRowFilterCursorBase.cs | 2 +- .../Data/LinkedRowRootCursorBase.cs | 14 +- src/Microsoft.ML.Core/Data/MetadataUtils.cs | 18 +- src/Microsoft.ML.Core/Data/RootCursorBase.cs | 41 ++- src/Microsoft.ML.Core/Data/Schema.cs | 2 +- .../Data/SynchronizedCursorBase.cs | 39 +-- src/Microsoft.ML.Data/Data/DataViewUtils.cs | 69 ++-- src/Microsoft.ML.Data/Data/IRowSeekable.cs | 12 +- .../Data/ITransposeDataView.cs | 31 +- src/Microsoft.ML.Data/Data/RowCursorUtils.cs | 76 ++--- src/Microsoft.ML.Data/Data/SlotCursor.cs | 142 ++++++++ src/Microsoft.ML.Data/DataDebuggerPreview.cs | 2 +- .../DataLoadSave/Binary/BinaryLoader.cs | 16 +- .../DataLoadSave/Binary/BinarySaver.cs | 12 +- .../DataLoadSave/CompositeDataLoader.cs | 6 +- .../DataLoadSave/PartitionedFileLoader.cs | 16 +- .../DataLoadSave/Text/TextLoader.cs | 4 +- .../DataLoadSave/Text/TextLoaderCursor.cs | 18 +- .../DataLoadSave/Text/TextSaver.cs | 8 +- .../DataLoadSave/Transpose/TransposeLoader.cs | 85 ++--- .../DataView/AppendRowsDataView.cs | 22 +- .../DataView/ArrayDataViewBuilder.cs | 18 +- .../DataView/CacheDataView.cs | 170 ++++++---- .../DataView/CompositeRowToRowMapper.cs | 22 +- .../DataView/EmptyDataView.cs | 12 +- .../DataView/LambdaColumnMapper.cs | 2 +- .../DataView/LambdaFilter.cs | 14 +- .../DataView/OpaqueDataView.cs | 4 +- .../DataView/RowToRowMapperTransform.cs | 44 +-- src/Microsoft.ML.Data/DataView/SimpleRow.cs | 25 +- src/Microsoft.ML.Data/DataView/Transposer.cs | 309 ++++++------------ src/Microsoft.ML.Data/DataView/ZipDataView.cs | 22 +- .../Dirty/ChooseColumnsByIndexTransform.cs | 20 +- .../Dirty/PredictorInterfaces.cs | 4 +- .../EntryPoints/TransformModel.cs | 2 +- .../Evaluators/AnomalyDetectionEvaluator.cs | 2 +- .../Evaluators/BinaryClassifierEvaluator.cs | 4 +- .../Evaluators/ClusteringEvaluator.cs | 4 +- .../Evaluators/EvaluatorBase.cs | 10 +- .../Evaluators/EvaluatorUtils.cs | 2 +- .../Metrics/BinaryClassificationMetrics.cs | 4 +- .../CalibratedBinaryClassificationMetrics.cs | 2 +- .../Evaluators/Metrics/ClusteringMetrics.cs | 2 +- .../Metrics/MultiClassClassifierMetrics.cs | 2 +- .../Evaluators/Metrics/RankerMetrics.cs | 4 +- .../Evaluators/Metrics/RegressionMetrics.cs | 2 +- .../MultiClassClassifierEvaluator.cs | 4 +- .../MultiOutputRegressionEvaluator.cs | 4 +- .../Evaluators/QuantileRegressionEvaluator.cs | 2 +- .../Evaluators/RankerEvaluator.cs | 12 +- .../Evaluators/RegressionEvaluator.cs | 2 +- .../Evaluators/RegressionEvaluatorBase.cs | 2 +- .../Prediction/Calibrator.cs | 6 +- .../Scorers/BinaryClassifierScorer.cs | 2 +- .../Scorers/ClusteringScorer.cs | 2 +- ...FeatureContributionCalculationTransform.cs | 16 +- .../Scorers/GenericScorer.cs | 2 +- .../Scorers/MultiClassClassifierScorer.cs | 22 +- .../Scorers/PredictedLabelScorerBase.cs | 6 +- .../Scorers/RowToRowScorerBase.cs | 32 +- .../Scorers/SchemaBindablePredictorWrapper.cs | 16 +- .../Training/TrainerUtils.cs | 48 +-- .../Transforms/BindingsWrappedRowCursor.cs | 10 +- .../ColumnConcatenatingTransformer.cs | 8 +- .../Transforms/ColumnCopying.cs | 4 +- .../Transforms/ColumnSelecting.cs | 44 +-- .../Transforms/DropSlotsTransform.cs | 18 +- .../Transforms/GenerateNumberTransform.cs | 24 +- src/Microsoft.ML.Data/Transforms/Hashing.cs | 30 +- .../Transforms/KeyToValue.cs | 6 +- .../Transforms/KeyToVector.cs | 8 +- .../Transforms/LabelConvertTransform.cs | 18 +- .../Transforms/LabelIndicatorTransform.cs | 4 +- src/Microsoft.ML.Data/Transforms/NAFilter.cs | 34 +- .../Transforms/NopTransform.cs | 6 +- .../Transforms/NormalizeColumn.cs | 32 +- .../Transforms/NormalizeColumnDbl.cs | 20 +- .../Transforms/NormalizeColumnSng.cs | 20 +- .../Transforms/NormalizeUtils.cs | 2 +- .../Transforms/Normalizer.cs | 12 +- .../Transforms/PerGroupTransformBase.cs | 36 +- .../Transforms/RangeFilter.cs | 24 +- .../Transforms/RowShufflingTransformer.cs | 31 +- .../Transforms/RowToRowTransformerBase.cs | 4 +- .../Transforms/SkipTakeFilter.cs | 12 +- .../Transforms/TransformBase.cs | 66 ++-- .../Transforms/TypeConverting.cs | 2 +- .../ValueToKeyMappingTransformer.cs | 4 +- .../ValueToKeyMappingTransformerImpl.cs | 10 +- src/Microsoft.ML.Ensemble/PipelineEnsemble.cs | 10 +- src/Microsoft.ML.FastTree/FastTree.cs | 18 +- .../TreeEnsembleFeaturizer.cs | 8 +- .../VectorWhitening.cs | 4 +- .../ImageGrayscaleTransform.cs | 2 +- .../ImageLoaderTransform.cs | 2 +- .../ImagePixelExtractorTransform.cs | 4 +- .../ImageResizerTransform.cs | 2 +- .../VectorToImageTransform.cs | 4 +- .../Models/ConfusionMatrix.cs | 2 +- .../OnnxTransform.cs | 14 +- src/Microsoft.ML.PCA/PcaTransform.cs | 2 +- src/Microsoft.ML.Parquet/ParquetLoader.cs | 14 +- .../MatrixFactorizationPredictor.cs | 4 +- .../SafeTrainingAndModelBuffer.cs | 8 +- .../FieldAwareFactorizationMachineUtils.cs | 2 +- .../Standard/LinearPredictor.cs | 6 +- .../MulticlassLogisticRegression.cs | 4 +- .../TensorflowTransform.cs | 14 +- .../PredictionFunction.cs | 40 +-- ...SequentialAnomalyDetectionTransformBase.cs | 8 +- .../SequentialTransformBase.cs | 16 +- .../SequentialTransformerBase.cs | 82 ++--- .../BootstrapSamplingTransformer.cs | 20 +- .../CountFeatureSelection.cs | 12 +- src/Microsoft.ML.Transforms/GcnTransform.cs | 2 +- src/Microsoft.ML.Transforms/GroupTransform.cs | 32 +- .../HashJoiningTransform.cs | 14 +- .../KeyToVectorMapping.cs | 6 +- .../MissingValueDroppingTransformer.cs | 6 +- .../MissingValueIndicatorTransform.cs | 2 +- .../MissingValueIndicatorTransformer.cs | 10 +- .../MissingValueReplacing.cs | 10 +- .../MissingValueReplacingUtils.cs | 38 +-- .../OptionalColumnTransform.cs | 30 +- .../ProduceIdTransform.cs | 20 +- .../RandomFourierFeaturizing.cs | 6 +- .../TermLookupTransformer.cs | 6 +- .../Text/LdaTransform.cs | 4 +- .../Text/NgramHashingTransformer.cs | 36 +- .../Text/NgramTransform.cs | 2 +- .../Text/StopWordsRemovingTransformer.cs | 4 +- .../Text/TextNormalizing.cs | 6 +- .../Text/TokenizingByCharacters.cs | 6 +- .../Text/WordEmbeddingsExtractor.cs | 4 +- .../Text/WordTokenizing.cs | 6 +- .../UngroupTransform.cs | 16 +- .../CacheDataViewBench.cs | 97 ++++++ test/Microsoft.ML.Benchmarks/HashBench.cs | 30 +- .../UnitTests/CoreBaseTestClass.cs | 14 +- .../TestPredictors.cs | 4 +- .../DataPipe/TestDataPipeBase.cs | 14 +- 149 files changed, 1665 insertions(+), 1547 deletions(-) delete mode 100644 src/Microsoft.ML.Core/Data/ICursor.cs create mode 100644 src/Microsoft.ML.Data/Data/SlotCursor.cs create mode 100644 test/Microsoft.ML.Benchmarks/CacheDataViewBench.cs diff --git a/src/Microsoft.ML.Api/CustomMappingTransformer.cs b/src/Microsoft.ML.Api/CustomMappingTransformer.cs index 3e4102aad3..963996d099 100644 --- a/src/Microsoft.ML.Api/CustomMappingTransformer.cs +++ b/src/Microsoft.ML.Api/CustomMappingTransformer.cs @@ -111,7 +111,7 @@ public Mapper(CustomMappingTransformer parent, Schema inputSchema) _typedSrc = TypedCursorable.Create(_host, emptyDataView, false, _parent.InputSchemaDefinition); } - public Delegate[] CreateGetters(IRow input, Func activeOutput, out Action disposer) + public Delegate[] CreateGetters(Row input, Func activeOutput, out Action disposer) { disposer = null; // If no outputs are active, we short-circuit to empty array of getters. @@ -147,7 +147,7 @@ public Delegate[] CreateGetters(IRow input, Func activeOutput, out Ac return result; } - private Delegate GetDstGetter(IRow input, int colIndex, Action refreshAction) + private Delegate GetDstGetter(Row input, int colIndex, Action refreshAction) { var getter = input.GetGetter(colIndex); ValueGetter combinedGetter = (ref T dst) => diff --git a/src/Microsoft.ML.Api/DataViewConstructionUtils.cs b/src/Microsoft.ML.Api/DataViewConstructionUtils.cs index 1d05d67f3b..d68882f805 100644 --- a/src/Microsoft.ML.Api/DataViewConstructionUtils.cs +++ b/src/Microsoft.ML.Api/DataViewConstructionUtils.cs @@ -125,22 +125,20 @@ protected override TRow GetCurrentRowObject() } /// - /// A row that consumes items of type , and provides an . This + /// A row that consumes items of type , and provides an . This /// is in contrast to which consumes a data view row and publishes them as the output type. /// /// The input data type. - public abstract class InputRowBase : IRow + public abstract class InputRowBase : Row where TRow : class { private readonly int _colCount; private readonly Delegate[] _getters; protected readonly IHost Host; - public long Batch => 0; + public override long Batch => 0; - public Schema Schema { get; } - - public abstract long Position { get; } + public override Schema Schema { get; } public InputRowBase(IHostEnvironment env, Schema schema, InternalSchemaDefinition schemaDef, Delegate[] peeks, Func predicate) { @@ -332,7 +330,7 @@ private Delegate CreateKeyGetterDelegate(Delegate peekDel, ColumnType colT protected abstract TRow GetCurrentRowObject(); - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { CheckColumnInRange(col); return _getters[col] != null; @@ -344,7 +342,7 @@ private void CheckColumnInRange(int columnIndex) throw Host.Except("Column index must be between 0 and {0}", _colCount); } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { if (!IsColumnActive(col)) throw Host.Except("Column {0} is not active in the cursor", col); @@ -355,8 +353,6 @@ public ValueGetter GetGetter(int col) throw Host.Except("Invalid TValue in GetGetter for column #{0}: '{1}'", col, typeof(TValue)); return fn; } - - public abstract ValueGetter GetIdGetter(); } /// @@ -400,16 +396,37 @@ protected DataViewBase(IHostEnvironment env, string name, InternalSchemaDefiniti public abstract long? GetRowCount(); - public abstract IRowCursor GetRowCursor(Func predicate, Random rand = null); + public abstract RowCursor GetRowCursor(Func predicate, Random rand = null); - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { consolidator = null; return new[] { GetRowCursor(predicate, rand) }; } - public abstract class DataViewCursorBase : InputRowBase, IRowCursor + public sealed class WrappedCursor : RowCursor + { + private readonly DataViewCursorBase _toWrap; + + public WrappedCursor(DataViewCursorBase toWrap) => _toWrap = toWrap; + + public override CursorState State => _toWrap.State; + public override long Position => _toWrap.Position; + public override long Batch => _toWrap.Batch; + public override Schema Schema => _toWrap.Schema; + + public override void Dispose() => _toWrap.Dispose(); + public override ValueGetter GetGetter(int col) + => _toWrap.GetGetter(col); + public override ValueGetter GetIdGetter() => _toWrap.GetIdGetter(); + public override RowCursor GetRootCursor() => this; + public override bool IsColumnActive(int col) => _toWrap.IsColumnActive(col); + public override bool MoveMany(long count) => _toWrap.MoveMany(count); + public override bool MoveNext() => _toWrap.MoveNext(); + } + + public abstract class DataViewCursorBase : InputRowBase { // There is no real concept of multiple inheritance and for various reasons it was better to // descend from the row class as opposed to wrapping it, so much of this class is regrettably @@ -524,14 +541,6 @@ protected virtual bool MoveManyCore(long count) /// . /// protected abstract bool MoveNextCore(); - - /// - /// Returns a cursor that can be used for invoking , , - /// , and , with results identical to calling - /// those on this cursor. Generally, if the root cursor is not the same as this cursor, using - /// the root cursor will be faster. - /// - public ICursor GetRootCursor() => this; } } @@ -561,10 +570,10 @@ public override bool CanShuffle return _data.Count; } - public override IRowCursor GetRowCursor(Func predicate, Random rand = null) + public override RowCursor GetRowCursor(Func predicate, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); - return new Cursor(Host, "ListDataView", this, predicate, rand); + return new WrappedCursor(new Cursor(Host, "ListDataView", this, predicate, rand)); } private sealed class Cursor : DataViewCursorBase @@ -660,9 +669,9 @@ public override bool CanShuffle return (_data as ICollection)?.Count; } - public override IRowCursor GetRowCursor(Func predicate, Random rand = null) + public override RowCursor GetRowCursor(Func predicate, Random rand = null) { - return new Cursor(Host, this, predicate); + return new WrappedCursor (new Cursor(Host, this, predicate)); } /// @@ -677,7 +686,7 @@ public void SetData(IEnumerable data) _data = data; } - private class Cursor : DataViewCursorBase + private sealed class Cursor : DataViewCursorBase { private readonly IEnumerator _enumerator; private TRow _currentRow; @@ -731,15 +740,9 @@ public SingleRowLoopDataView(IHostEnvironment env, InternalSchemaDefinition sche { } - public override bool CanShuffle - { - get { return false; } - } + public override bool CanShuffle => false; - public override long? GetRowCount() - { - return null; - } + public override long? GetRowCount() => null; public void SetCurrentRowObject(TRow value) { @@ -747,10 +750,10 @@ public void SetCurrentRowObject(TRow value) _current = value; } - public override IRowCursor GetRowCursor(Func predicate, Random rand = null) + public override RowCursor GetRowCursor(Func predicate, Random rand = null) { Contracts.Assert(_current != null, "The current object must be set prior to cursoring"); - return new Cursor(Host, this, predicate); + return new WrappedCursor (new Cursor(Host, this, predicate)); } private sealed class Cursor : DataViewCursorBase @@ -773,10 +776,7 @@ public override ValueGetter GetIdGetter() }; } - protected override TRow GetCurrentRowObject() - { - return _currentRow; - } + protected override TRow GetCurrentRowObject() => _currentRow; protected override bool MoveNextCore() { diff --git a/src/Microsoft.ML.Api/StatefulFilterTransform.cs b/src/Microsoft.ML.Api/StatefulFilterTransform.cs index a8341639ff..f07ef7daad 100644 --- a/src/Microsoft.ML.Api/StatefulFilterTransform.cs +++ b/src/Microsoft.ML.Api/StatefulFilterTransform.cs @@ -108,7 +108,7 @@ private StatefulFilterTransform(IHostEnvironment env, StatefulFilterTransform predicate, Random rand = null) + public RowCursor GetRowCursor(Func predicate, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); @@ -120,7 +120,7 @@ public IRowCursor GetRowCursor(Func predicate, Random rand = null) return new Cursor(this, input, predicate); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Contracts.CheckValue(predicate, nameof(predicate)); Contracts.CheckParam(n >= 0, nameof(n)); @@ -144,13 +144,13 @@ public IDataTransform ApplyToData(IHostEnvironment env, IDataView newSource) return new StatefulFilterTransform(env, this, newSource); } - private sealed class Cursor : RootCursorBase, IRowCursor + private sealed class Cursor : RootCursorBase { private readonly StatefulFilterTransform _parent; - private readonly IRowCursor _input; + private readonly RowCursor _input; // This is used to serve getters for the columns we produce. - private readonly IRow _appendedRow; + private readonly Row _appendedRow; private readonly TSrc _src; private readonly TDst _dst; @@ -163,7 +163,7 @@ public override long Batch get { return _input.Batch; } } - public Cursor(StatefulFilterTransform parent, IRowCursor input, Func predicate) + public Cursor(StatefulFilterTransform parent, RowCursor input, Func predicate) : base(parent.Host) { Ch.AssertValue(input); @@ -240,9 +240,9 @@ private void RunLambda(out bool isRowAccepted) isRowAccepted = _parent._filterFunc(_src, _dst, _state); } - public Schema Schema => _parent._bindings.Schema; + public override Schema Schema => _parent._bindings.Schema; - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Contracts.CheckParam(0 <= col && col < Schema.ColumnCount, nameof(col)); bool isSrc; @@ -252,7 +252,7 @@ public bool IsColumnActive(int col) return _appendedRow.IsColumnActive(iCol); } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Contracts.CheckParam(0 <= col && col < Schema.ColumnCount, nameof(col)); bool isSrc; diff --git a/src/Microsoft.ML.Api/TypedCursor.cs b/src/Microsoft.ML.Api/TypedCursor.cs index c13d277305..4607697662 100644 --- a/src/Microsoft.ML.Api/TypedCursor.cs +++ b/src/Microsoft.ML.Api/TypedCursor.cs @@ -13,11 +13,12 @@ namespace Microsoft.ML.Runtime.Api { /// - /// This interface is an with 'strongly typed' binding. + /// This interface is an with 'strongly typed' binding. /// It can populate the user-supplied object's fields with the values of the current row. /// /// The user-defined type that is being populated while cursoring. - public interface IRowReadableAs : IRow + [BestFriend] + internal interface IRowReadableAs where TRow : class { /// @@ -28,11 +29,11 @@ public interface IRowReadableAs : IRow } /// - /// This interface is an with 'strongly typed' binding. + /// This interface is an with 'strongly typed' binding. /// It can accept values of type and present the value as a row. /// /// The user-defined type that provides the values while cursoring. - public interface IRowBackedBy : IRow + internal interface IRowBackedBy where TRow : class { /// @@ -48,9 +49,10 @@ public interface IRowBackedBy : IRow /// It can populate the user-supplied object's fields with the values of the current row. /// /// The user-defined type that is being populated while cursoring. - public interface IRowCursor : IRowReadableAs, ICursor + public abstract class RowCursor : RowCursor, IRowReadableAs where TRow : class { + public abstract void FillValues(TRow row); } /// @@ -63,13 +65,13 @@ public interface ICursorable /// /// Get a new cursor. /// - IRowCursor GetCursor(); + RowCursor GetCursor(); /// /// Get a new randomized cursor. /// /// The random seed to use. - IRowCursor GetRandomizedCursor(int randomSeed); + RowCursor GetRandomizedCursor(int randomSeed); } /// @@ -163,7 +165,7 @@ private static bool IsCompatibleType(ColumnType colType, MemberInfo memberInfo) /// /// Create and return a new cursor. /// - public IRowCursor GetCursor() + public RowCursor GetCursor() { return GetCursor(x => false); } @@ -172,14 +174,14 @@ public IRowCursor GetCursor() /// Create and return a new randomized cursor. /// /// The random seed to use. - public IRowCursor GetRandomizedCursor(int randomSeed) + public RowCursor GetRandomizedCursor(int randomSeed) { return GetCursor(x => false, randomSeed); } - public IRowReadableAs GetRow(IRow input) + public IRowReadableAs GetRow(Row input) { - return new TypedRow(this, input); + return new RowImplementation(new TypedRow(this, input)); } /// @@ -188,14 +190,14 @@ public IRowReadableAs GetRow(IRow input) /// Predicate that denotes which additional columns to include in the cursor, /// in addition to the columns that are needed for populating the object. /// The random seed to use. If null, the cursor will be non-randomized. - public IRowCursor GetCursor(Func additionalColumnsPredicate, int? randomSeed = null) + public RowCursor GetCursor(Func additionalColumnsPredicate, int? randomSeed = null) { _host.CheckValue(additionalColumnsPredicate, nameof(additionalColumnsPredicate)); Random rand = randomSeed.HasValue ? RandomUtils.Create(randomSeed.Value) : null; var cursor = _data.GetRowCursor(GetDependencies(additionalColumnsPredicate), rand); - return new TypedCursor(this, cursor); + return new RowCursorImplementation(new TypedCursor(this, cursor)); } public Func GetDependencies(Func additionalColumnsPredicate) @@ -211,7 +213,7 @@ public Func GetDependencies(Func additionalColumnsPredicat /// in addition to the columns that are needed for populating the object. /// Number of cursors to create /// Random generator to use - public IRowCursor[] GetCursorSet(out IRowCursorConsolidator consolidator, + public RowCursor[] GetCursorSet(out IRowCursorConsolidator consolidator, Func additionalColumnsPredicate, int n, Random rand) { _host.CheckValue(additionalColumnsPredicate, nameof(additionalColumnsPredicate)); @@ -226,8 +228,8 @@ public IRowCursor[] GetCursorSet(out IRowCursorConsolidator consolidator, _host.AssertNonEmpty(inputs); return inputs - .Select(rc => (IRowCursor)(new TypedCursor(this, rc))) - .ToArray(); + .Select(rc => (RowCursor)(new RowCursorImplementation(new TypedCursor(this, rc)))) + .ToArray(); } /// @@ -251,10 +253,10 @@ public static TypedCursorable Create(IHostEnvironment env, IDataView data, return new TypedCursorable(env, data, ignoreMissingColumns, outSchema); } - private abstract class TypedRowBase : IRowReadableAs + private abstract class TypedRowBase { protected readonly IChannel Ch; - private readonly IRow _input; + private readonly Row _input; private readonly Action[] _setters; public long Batch => _input.Batch; @@ -263,7 +265,7 @@ private abstract class TypedRowBase : IRowReadableAs public Schema Schema => _input.Schema; - public TypedRowBase(TypedCursorable parent, IRow input, string channelMessage) + public TypedRowBase(TypedCursorable parent, Row input, string channelMessage) { Contracts.AssertValue(parent); Contracts.AssertValue(parent._host); @@ -280,17 +282,14 @@ public TypedRowBase(TypedCursorable parent, IRow input, string channelMess _setters[i] = GenerateSetter(_input, parent._columnIndices[i], parent._columns[i], parent._pokes[i], parent._peeks[i]); } - public ValueGetter GetIdGetter() - { - return _input.GetIdGetter(); - } + public ValueGetter GetIdGetter() => _input.GetIdGetter(); - private Action GenerateSetter(IRow input, int index, InternalSchemaDefinition.Column column, Delegate poke, Delegate peek) + private Action GenerateSetter(Row input, int index, InternalSchemaDefinition.Column column, Delegate poke, Delegate peek) { var colType = input.Schema.GetColumnType(index); var fieldType = column.OutputType; var genericType = fieldType; - Func> del; + Func> del; if (fieldType.IsArray) { Ch.Assert(colType.IsVector); @@ -349,7 +348,7 @@ private Action GenerateSetter(IRow input, int index, InternalSchemaDefinit // REVIEW: The converting getter invokes a type conversion delegate on every call, so it's inherently slower // than the 'direct' getter. We don't have good indication of this to the user, and the selection // of affected types is pretty arbitrary (signed integers and bools, but not uints and floats). - private Action CreateConvertingVBufferSetter(IRow input, int col, Delegate poke, Delegate peek, Func convert) + private Action CreateConvertingVBufferSetter(Row input, int col, Delegate poke, Delegate peek, Func convert) { var getter = input.GetGetter>(col); var typedPoke = poke as Poke; @@ -371,7 +370,7 @@ private Action CreateConvertingVBufferSetter(IRow input, int c }; } - private Action CreateDirectVBufferSetter(IRow input, int col, Delegate poke, Delegate peek) + private Action CreateDirectVBufferSetter(Row input, int col, Delegate poke, Delegate peek) { var getter = input.GetGetter>(col); var typedPoke = poke as Poke; @@ -409,7 +408,7 @@ private Action CreateDirectVBufferSetter(IRow input, int col, Delega }; } - private static Action CreateConvertingActionSetter(IRow input, int col, Delegate poke, Func convert) + private static Action CreateConvertingActionSetter(Row input, int col, Delegate poke, Func convert) { var getter = input.GetGetter(col); var typedPoke = poke as Poke; @@ -423,7 +422,7 @@ private static Action CreateConvertingActionSetter(IRow input, }; } - private static Action CreateDirectSetter(IRow input, int col, Delegate poke, Delegate peek) + private static Action CreateDirectSetter(Row input, int col, Delegate poke, Delegate peek) { // Awkward to have a parameter that's always null, but slightly more convenient for generalizing the setter. Contracts.Assert(peek == null); @@ -438,7 +437,7 @@ private static Action CreateDirectSetter(IRow input, int col, Delega }; } - private Action CreateVBufferToVBufferSetter(IRow input, int col, Delegate poke, Delegate peek) + private Action CreateVBufferToVBufferSetter(Row input, int col, Delegate poke, Delegate peek) { var getter = input.GetGetter>(col); var typedPoke = poke as Poke>; @@ -473,18 +472,55 @@ public ValueGetter GetGetter(int col) private sealed class TypedRow : TypedRowBase { - public TypedRow(TypedCursorable parent, IRow input) + public TypedRow(TypedCursorable parent, Row input) : base(parent, input, "Row") { } } - private sealed class TypedCursor : TypedRowBase, IRowCursor + private sealed class RowImplementation : IRowReadableAs + { + private readonly TypedRow _row; + + public RowImplementation(TypedRow row) => _row = row; + + public long Position => _row.Position; + public long Batch => _row.Batch; + public Schema Schema => _row.Schema; + public void FillValues(TRow row) => _row.FillValues(row); + public ValueGetter GetGetter(int col) => _row.GetGetter(col); + public ValueGetter GetIdGetter() => _row.GetIdGetter(); + public bool IsColumnActive(int col) => _row.IsColumnActive(col); + } + + private sealed class RowCursorImplementation : RowCursor { - private readonly IRowCursor _input; + private readonly TypedCursor _cursor; + + public RowCursorImplementation(TypedCursor cursor) => _cursor = cursor; + + public override CursorState State => _cursor.State; + public override long Position => _cursor.Position; + public override long Batch => _cursor.Batch; + public override Schema Schema => _cursor.Schema; + + public override void Dispose() { } + + public override void FillValues(TRow row) => _cursor.FillValues(row); + public override ValueGetter GetGetter(int col) => _cursor.GetGetter(col); + public override ValueGetter GetIdGetter() => _cursor.GetIdGetter(); + public override RowCursor GetRootCursor() => _cursor.GetRootCursor(); + public override bool IsColumnActive(int col) => _cursor.IsColumnActive(col); + public override bool MoveMany(long count) => _cursor.MoveMany(count); + public override bool MoveNext() => _cursor.MoveNext(); + } + + private sealed class TypedCursor : TypedRowBase + { + private readonly RowCursor _input; private bool _disposed; - public TypedCursor(TypedCursorable parent, IRowCursor input) + public TypedCursor(TypedCursorable parent, RowCursor input) : base(parent, input, "Cursor") { _input = input; @@ -496,7 +532,7 @@ public override void FillValues(TRow row) base.FillValues(row); } - public CursorState State { get { return _input.State; } } + public CursorState State => _input.State; public void Dispose() { @@ -508,20 +544,9 @@ public void Dispose() } } - public bool MoveNext() - { - return _input.MoveNext(); - } - - public bool MoveMany(long count) - { - return _input.MoveMany(count); - } - - public ICursor GetRootCursor() - { - return _input.GetRootCursor(); - } + public bool MoveNext() => _input.MoveNext(); + public bool MoveMany(long count) => _input.MoveMany(count); + public RowCursor GetRootCursor() => _input.GetRootCursor(); } } diff --git a/src/Microsoft.ML.Core/Data/ICursor.cs b/src/Microsoft.ML.Core/Data/ICursor.cs deleted file mode 100644 index 476f07245c..0000000000 --- a/src/Microsoft.ML.Core/Data/ICursor.cs +++ /dev/null @@ -1,106 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. -// See the LICENSE file in the project root for more information. - -using Float = System.Single; - -using System; - -namespace Microsoft.ML.Runtime.Data -{ - /// - /// This is a base interface for an and . It contains only the - /// positional properties, no behavioral methods, and no data. - /// - public interface ICounted - { - /// - /// This is incremented for ICursor when the underlying contents changes, giving clients a way to detect change. - /// Generally it's -1 when the object is in an invalid state. In particular, for an , this is -1 - /// when the is or . - /// - /// Note that this position is not position within the underlying data, but position of this cursor only. - /// If one, for example, opened a set of parallel streaming cursors, or a shuffled cursor, each such cursor's - /// first valid entry would always have position 0. - /// - long Position { get; } - - /// - /// This provides a means for reconciling multiple streams of counted things. Generally, in each stream, - /// batch numbers should be non-decreasing. Furthermore, any given batch number should only appear in one - /// of the streams. Order is determined by batch number. The reconciler ensures that each stream (that is - /// still active) has at least one item available, then takes the item with the smallest batch number. - /// - /// Note that there is no suggestion that the batches for a particular entry will be consistent from - /// cursoring to cursoring, except for the consistency in resulting in the same overall ordering. The same - /// entry could have different batch numbers from one cursoring to another. There is also no requirement - /// that any given batch number must appear, at all. - /// - long Batch { get; } - - /// - /// A getter for a 128-bit ID value. It is common for objects to serve multiple - /// instances to iterate over what is supposed to be the same data, for example, in a - /// a cursor set will produce the same data as a serial cursor, just partitioned, and a shuffled cursor - /// will produce the same data as a serial cursor or any other shuffled cursor, only shuffled. The ID - /// exists for applications that need to reconcile which entry is actually which. Ideally this ID should - /// be unique, but for practical reasons, it suffices if collisions are simply extremely improbable. - /// - /// Note that this ID, while it must be consistent for multiple streams according to the semantics - /// above, is not considered part of the data per se. So, to take the example of a data view specifically, - /// a single data view must render consistent IDs across all cursorings, but there is no suggestion at - /// all that if the "same" data were presented in a different data view (as by, say, being transformed, - /// cached, saved, or whatever), that the IDs between the two different data views would have any - /// discernable relationship. - ValueGetter GetIdGetter(); - } - - /// - /// Defines the possible states of a cursor. - /// - public enum CursorState - { - NotStarted, - Good, - Done - } - - /// - /// The basic cursor interface. is incremented by - /// and . When the cursor state is or - /// , is -1. Otherwise, - /// >= 0. - /// - public interface ICursor : ICounted, IDisposable - { - /// - /// Returns the state of the cursor. Before the first call to or - /// this should be . After - /// any call those move functions that returns true, this should return - /// , - /// - CursorState State { get; } - - /// - /// Advance to the next row. When the cursor is first created, this method should be called to - /// move to the first row. Returns false if there are no more rows. - /// - bool MoveNext(); - - /// - /// Logically equivalent to calling the given number of times. The - /// parameter must be positive. Note that cursor implementations may be - /// able to optimize this. - /// - bool MoveMany(long count); - - /// - /// Returns a cursor that can be used for invoking , , - /// , and , with results identical to calling those - /// on this cursor. Generally, if the root cursor is not the same as this cursor, using the - /// root cursor will be faster. As an aside, note that this is not necessarily the case of - /// values from . - /// - ICursor GetRootCursor(); - } -} \ No newline at end of file diff --git a/src/Microsoft.ML.Core/Data/IDataView.cs b/src/Microsoft.ML.Core/Data/IDataView.cs index 7d5e0c1790..c67bba60aa 100644 --- a/src/Microsoft.ML.Core/Data/IDataView.cs +++ b/src/Microsoft.ML.Core/Data/IDataView.cs @@ -88,7 +88,7 @@ public interface IDataView /// a getter for an inactive columns will throw. The predicate must be /// non-null. To activate all columns, pass "col => true". /// - IRowCursor GetRowCursor(Func needCol, Random rand = null); + RowCursor GetRowCursor(Func needCol, Random rand = null); /// /// This constructs a set of parallel batch cursors. The value n is a recommended limit @@ -102,7 +102,7 @@ public interface IDataView /// should return the "same" row as would have been returned through the regular serial cursor, /// but all rows should be returned by exactly one of the cursors returned from this cursor. /// The cursors can have their values reconciled downstream through the use of the - /// property. + /// property. /// /// This is an object that can be used to reconcile the /// returned array of cursors. When the array of cursors is of length 1, it is legal, @@ -111,7 +111,7 @@ public interface IDataView /// The suggested degree of parallelism. /// An instance /// - IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, + RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, Random rand = null); /// @@ -129,7 +129,7 @@ public interface IRowCursorConsolidator /// /// Create a consolidated cursor from the given parallel cursor set. /// - IRowCursor CreateCursor(IChannelProvider provider, IRowCursor[] inputs); + RowCursor CreateCursor(IChannelProvider provider, RowCursor[] inputs); } /// @@ -139,36 +139,119 @@ public interface IRowCursorConsolidator public delegate void ValueGetter(ref TValue value); /// - /// A logical row. May be a row of an IDataView or a stand-alone row. If/when its contents - /// change, its ICounted.Counter value is incremented. + /// A logical row. May be a row of an or a stand-alone row. If/when its contents + /// change, its value is changed. /// - public interface IRow : ICounted + public abstract class Row { + /// + /// This is incremented when the underlying contents changes, giving clients a way to detect change. + /// Generally it's -1 when the object is in an invalid state. In particular, for an , this is -1 + /// when the is or . + /// + /// Note that this position is not position within the underlying data, but position of this cursor only. + /// If one, for example, opened a set of parallel streaming cursors, or a shuffled cursor, each such cursor's + /// first valid entry would always have position 0. + /// + public abstract long Position { get; } + + /// + /// This provides a means for reconciling multiple streams of counted things. Generally, in each stream, + /// batch numbers should be non-decreasing. Furthermore, any given batch number should only appear in one + /// of the streams. Order is determined by batch number. The reconciler ensures that each stream (that is + /// still active) has at least one item available, then takes the item with the smallest batch number. + /// + /// Note that there is no suggestion that the batches for a particular entry will be consistent from + /// cursoring to cursoring, except for the consistency in resulting in the same overall ordering. The same + /// entry could have different batch numbers from one cursoring to another. There is also no requirement + /// that any given batch number must appear, at all. + /// + public abstract long Batch { get; } + + /// + /// A getter for a 128-bit ID value. It is common for objects to serve multiple + /// instances to iterate over what is supposed to be the same data, for example, in a + /// a cursor set will produce the same data as a serial cursor, just partitioned, and a shuffled cursor + /// will produce the same data as a serial cursor or any other shuffled cursor, only shuffled. The ID + /// exists for applications that need to reconcile which entry is actually which. Ideally this ID should + /// be unique, but for practical reasons, it suffices if collisions are simply extremely improbable. + /// + /// Note that this ID, while it must be consistent for multiple streams according to the semantics + /// above, is not considered part of the data per se. So, to take the example of a data view specifically, + /// a single data view must render consistent IDs across all cursorings, but there is no suggestion at + /// all that if the "same" data were presented in a different data view (as by, say, being transformed, + /// cached, saved, or whatever), that the IDs between the two different data views would have any + /// discernable relationship. + public abstract ValueGetter GetIdGetter(); + /// /// Returns whether the given column is active in this row. /// - bool IsColumnActive(int col); + public abstract bool IsColumnActive(int col); /// /// Returns a value getter delegate to fetch the given column value from the row. /// This throws if the column is not active in this row, or if the type /// differs from this column's type. /// - ValueGetter GetGetter(int col); + public abstract ValueGetter GetGetter(int col); /// /// Gets a , which provides name and type information for variables /// (i.e., columns in ML.NET's type system) stored in this row. /// - Schema Schema { get; } + public abstract Schema Schema { get; } + + } + /// + /// Defines the possible states of a cursor. + /// + public enum CursorState + { + NotStarted, + Good, + Done } /// - /// A cursor through rows of an . Note that this includes/is an - /// , as well as an . + /// The basic cursor base class to cursor through rows of an . Note that + /// this is also an . The is incremented by + /// and . When the cursor state is or + /// , is -1. Otherwise, + /// >= 0. /// - public interface IRowCursor : ICursor, IRow + public abstract class RowCursor : Row, IDisposable { + /// + /// Returns the state of the cursor. Before the first call to or + /// this should be . After + /// any call those move functions that returns , this should return + /// , + /// + public abstract CursorState State { get; } + + /// + /// Advance to the next row. When the cursor is first created, this method should be called to + /// move to the first row. Returns false if there are no more rows. + /// + public abstract bool MoveNext(); + + /// + /// Logically equivalent to calling the given number of times. The + /// parameter must be positive. Note that cursor implementations may be + /// able to optimize this. + /// + public abstract bool MoveMany(long count); + + /// + /// Returns a cursor that can be used for invoking , , + /// , and , with results identical to calling those + /// on this cursor. Generally, if the root cursor is not the same as this cursor, using the + /// root cursor will be faster. As an aside, note that this is not necessarily the case of + /// values from . + /// + public abstract RowCursor GetRootCursor(); + public abstract void Dispose(); } } \ No newline at end of file diff --git a/src/Microsoft.ML.Core/Data/ISchemaBindableMapper.cs b/src/Microsoft.ML.Core/Data/ISchemaBindableMapper.cs index c4b3147d2c..f0e3f45adb 100644 --- a/src/Microsoft.ML.Core/Data/ISchemaBindableMapper.cs +++ b/src/Microsoft.ML.Core/Data/ISchemaBindableMapper.cs @@ -66,7 +66,7 @@ public interface ISchemaBoundRowMapper : ISchemaBoundMapper, IRowToRowMapper } /// - /// This interface maps an input to an output . Typically, the output contains + /// This interface maps an input to an output . Typically, the output contains /// both the input columns and new columns added by the implementing class, although some implementations may /// return a subset of the input columns. /// This interface is similar to , except it does not have any input role mappings, @@ -93,26 +93,26 @@ public interface IRowToRowMapper Func GetDependencies(Func predicate); /// - /// Get an with the indicated active columns, based on the input . + /// Get an with the indicated active columns, based on the input . /// The active columns are those for which returns true. Getting values on inactive /// columns of the returned row will throw. Null predicates are disallowed. /// - /// The of should be the same object as + /// The of should be the same object as /// . Implementors of this method should throw if that is not the case. Conversely, /// the returned value must have the same schema as . /// - /// This method creates a live connection between the input and the output . In particular, when the getters of the output are invoked, they invoke the - /// getters of the input row and base the output values on the current values of the input . - /// The output values are re-computed when requested through the getters. + /// This method creates a live connection between the input and the output . In particular, when the getters of the output are invoked, they invoke the + /// getters of the input row and base the output values on the current values of the input . + /// The output values are re-computed when requested through the getters. /// /// The optional should be invoked by any user of this row mapping, once it no - /// longer needs the . If no action is needed when the cursor is Disposed, the implementation + /// longer needs the . If no action is needed when the cursor is Disposed, the implementation /// should set to null, otherwise it should be set to a delegate to be /// invoked by the code calling this object. (For example, a wrapping cursor's /// method. It's best for this action to be idempotent - calling it multiple times should be equivalent to /// calling it once. /// - IRow GetRow(IRow input, Func active, out Action disposer); + Row GetRow(Row input, Func active, out Action disposer); } } diff --git a/src/Microsoft.ML.Core/Data/LinkedRootCursorBase.cs b/src/Microsoft.ML.Core/Data/LinkedRootCursorBase.cs index ecec0b1a0d..1f95d941cb 100644 --- a/src/Microsoft.ML.Core/Data/LinkedRootCursorBase.cs +++ b/src/Microsoft.ML.Core/Data/LinkedRootCursorBase.cs @@ -6,31 +6,30 @@ namespace Microsoft.ML.Runtime.Data { /// /// Base class for a cursor has an input cursor, but still needs to do work on - /// MoveNext/MoveMany. + /// / . /// [BestFriend] - internal abstract class LinkedRootCursorBase : RootCursorBase - where TInput : class, ICursor + internal abstract class LinkedRootCursorBase : RootCursorBase { - private readonly ICursor _root; /// Gets the input cursor. - protected TInput Input { get; } + protected RowCursor Input { get; } /// /// Returns the root cursor of the input. It should be used to perform MoveNext or MoveMany operations. - /// Note that GetRootCursor() returns "this", NOT Root. Root is used to advance our input, not for - /// clients of this cursor. That's why it is protected, not public. + /// Note that returns , not . + /// is used to advance our input, not for clients of this cursor. That is why it is + /// protected, not public. /// - protected ICursor Root { get { return _root; } } + protected RowCursor Root { get; } - protected LinkedRootCursorBase(IChannelProvider provider, TInput input) + protected LinkedRootCursorBase(IChannelProvider provider, RowCursor input) : base(provider) { Ch.AssertValue(input, nameof(input)); Input = input; - _root = Input.GetRootCursor(); + Root = Input.GetRootCursor(); } public override void Dispose() diff --git a/src/Microsoft.ML.Core/Data/LinkedRowFilterCursorBase.cs b/src/Microsoft.ML.Core/Data/LinkedRowFilterCursorBase.cs index 0ed4dd19f9..78b4fd142b 100644 --- a/src/Microsoft.ML.Core/Data/LinkedRowFilterCursorBase.cs +++ b/src/Microsoft.ML.Core/Data/LinkedRowFilterCursorBase.cs @@ -14,7 +14,7 @@ internal abstract class LinkedRowFilterCursorBase : LinkedRowRootCursorBase { public override long Batch => Input.Batch; - protected LinkedRowFilterCursorBase(IChannelProvider provider, IRowCursor input, Schema schema, bool[] active) + protected LinkedRowFilterCursorBase(IChannelProvider provider, RowCursor input, Schema schema, bool[] active) : base(provider, input, schema, active) { } diff --git a/src/Microsoft.ML.Core/Data/LinkedRowRootCursorBase.cs b/src/Microsoft.ML.Core/Data/LinkedRowRootCursorBase.cs index 188522ad3d..fb045ec6e4 100644 --- a/src/Microsoft.ML.Core/Data/LinkedRowRootCursorBase.cs +++ b/src/Microsoft.ML.Core/Data/LinkedRowRootCursorBase.cs @@ -7,20 +7,20 @@ namespace Microsoft.ML.Runtime.Data { /// - /// A base class for a that has an input cursor, but still needs - /// to do work on /. Note + /// A base class for a that has an input cursor, but still needs + /// to do work on /. Note /// that the default assumes /// that each input column is exposed as an output column with the same column index. /// [BestFriend] - internal abstract class LinkedRowRootCursorBase : LinkedRootCursorBase, IRowCursor + internal abstract class LinkedRowRootCursorBase : LinkedRootCursorBase { private readonly bool[] _active; /// Gets row's schema. - public Schema Schema { get; } + public sealed override Schema Schema { get; } - protected LinkedRowRootCursorBase(IChannelProvider provider, IRowCursor input, Schema schema, bool[] active) + protected LinkedRowRootCursorBase(IChannelProvider provider, RowCursor input, Schema schema, bool[] active) : base(provider, input) { Ch.CheckValue(schema, nameof(schema)); @@ -29,13 +29,13 @@ protected LinkedRowRootCursorBase(IChannelProvider provider, IRowCursor input, S Schema = schema; } - public bool IsColumnActive(int col) + public sealed override bool IsColumnActive(int col) { Ch.Check(0 <= col && col < Schema.Count); return _active == null || _active[col]; } - public virtual ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { return Input.GetGetter(col); } diff --git a/src/Microsoft.ML.Core/Data/MetadataUtils.cs b/src/Microsoft.ML.Core/Data/MetadataUtils.cs index 6e2bd5230f..d5cbe6928b 100644 --- a/src/Microsoft.ML.Core/Data/MetadataUtils.cs +++ b/src/Microsoft.ML.Core/Data/MetadataUtils.cs @@ -495,7 +495,7 @@ public static bool TryGetCategoricalFeatureIndices(Schema schema, int colIndex, return cols; } - private sealed class MetadataRow : IRow + private sealed class MetadataRow : Row { private readonly Schema.Metadata _metadata; @@ -505,20 +505,20 @@ public MetadataRow(Schema.Metadata metadata) _metadata = metadata; } - public Schema Schema => _metadata.Schema; - public long Position => 0; - public long Batch => 0; - public ValueGetter GetGetter(int col) => _metadata.GetGetter(col); - public ValueGetter GetIdGetter() => (ref UInt128 dst) => dst = default; - public bool IsColumnActive(int col) => true; + public override Schema Schema => _metadata.Schema; + public override long Position => 0; + public override long Batch => 0; + public override ValueGetter GetGetter(int col) => _metadata.GetGetter(col); + public override ValueGetter GetIdGetter() => (ref UInt128 dst) => dst = default; + public override bool IsColumnActive(int col) => true; } /// - /// Presents a as a an . + /// Presents a as a an . /// /// The metadata to wrap. /// A row that wraps an input metadata. - public static IRow MetadataAsRow(Schema.Metadata metadata) + public static Row MetadataAsRow(Schema.Metadata metadata) { Contracts.CheckValue(metadata, nameof(metadata)); return new MetadataRow(metadata); diff --git a/src/Microsoft.ML.Core/Data/RootCursorBase.cs b/src/Microsoft.ML.Core/Data/RootCursorBase.cs index 5b64a40d6a..75d58d98bb 100644 --- a/src/Microsoft.ML.Core/Data/RootCursorBase.cs +++ b/src/Microsoft.ML.Core/Data/RootCursorBase.cs @@ -3,6 +3,7 @@ // See the LICENSE file in the project root for more information. using System; +using Microsoft.ML.Data; namespace Microsoft.ML.Runtime.Data { @@ -15,23 +16,21 @@ namespace Microsoft.ML.Runtime.Data /// This cursor base class returns "this" from . That is, all /// / calls will be seen by this cursor. For a cursor /// that has an input cursor and does NOT need notification on /, - /// use . + /// use . /// [BestFriend] - internal abstract class RootCursorBase : ICursor + internal abstract class RootCursorBase : RowCursor { protected readonly IChannel Ch; + private CursorState _state; + private long _position; /// /// Zero-based position of the cursor. /// - public long Position { get; private set; } + public sealed override long Position => _position; - public abstract long Batch { get; } - - public abstract ValueGetter GetIdGetter(); - - public CursorState State { get; private set; } + public sealed override CursorState State => _state; /// /// Convenience property for checking whether the current state of the cursor is . @@ -39,7 +38,7 @@ internal abstract class RootCursorBase : ICursor protected bool IsGood => State == CursorState.Good; /// - /// Creates an instance of the RootCursorBase class + /// Creates an instance of the class /// /// Channel provider protected RootCursorBase(IChannelProvider provider) @@ -47,21 +46,21 @@ protected RootCursorBase(IChannelProvider provider) Contracts.CheckValue(provider, nameof(provider)); Ch = provider.Start("Cursor"); - Position = -1; - State = CursorState.NotStarted; + _position = -1; + _state = CursorState.NotStarted; } - public virtual void Dispose() + public override void Dispose() { if (State != CursorState.Done) { Ch.Dispose(); - Position = -1; - State = CursorState.Done; + _position = -1; + _state = CursorState.Done; } } - public bool MoveNext() + public sealed override bool MoveNext() { if (State == CursorState.Done) return false; @@ -71,8 +70,8 @@ public bool MoveNext() { Ch.Assert(State == CursorState.NotStarted || State == CursorState.Good); - Position++; - State = CursorState.Good; + _position++; + _state = CursorState.Good; return true; } @@ -80,7 +79,7 @@ public bool MoveNext() return false; } - public bool MoveMany(long count) + public sealed override bool MoveMany(long count) { // Note: If we decide to allow count == 0, then we need to special case // that MoveNext() has never been called. It's not entirely clear what the return @@ -95,8 +94,8 @@ public bool MoveMany(long count) { Ch.Assert(State == CursorState.NotStarted || State == CursorState.Good); - Position += count; - State = CursorState.Good; + _position += count; + _state = CursorState.Good; return true; } @@ -137,6 +136,6 @@ protected virtual bool MoveManyCore(long count) /// those on this cursor. Generally, if the root cursor is not the same as this cursor, using /// the root cursor will be faster. /// - public ICursor GetRootCursor() => this; + public override RowCursor GetRootCursor() => this; } } \ No newline at end of file diff --git a/src/Microsoft.ML.Core/Data/Schema.cs b/src/Microsoft.ML.Core/Data/Schema.cs index ec45753331..90c87fc54a 100644 --- a/src/Microsoft.ML.Core/Data/Schema.cs +++ b/src/Microsoft.ML.Core/Data/Schema.cs @@ -14,7 +14,7 @@ namespace Microsoft.ML.Data { /// - /// This class represents the of an object like, for interstance, an or an . + /// This class represents the of an object like, for interstance, an or an . /// On the high level, the schema is a collection of 'columns'. Each column has the following properties: /// - Column name. /// - Column type. diff --git a/src/Microsoft.ML.Core/Data/SynchronizedCursorBase.cs b/src/Microsoft.ML.Core/Data/SynchronizedCursorBase.cs index da60c84ccf..b2641d985e 100644 --- a/src/Microsoft.ML.Core/Data/SynchronizedCursorBase.cs +++ b/src/Microsoft.ML.Core/Data/SynchronizedCursorBase.cs @@ -2,6 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; + namespace Microsoft.ML.Runtime.Data { /// @@ -11,28 +13,27 @@ namespace Microsoft.ML.Runtime.Data /// Dispose is virtual with the default implementation delegating to the input cursor. /// [BestFriend] - internal abstract class SynchronizedCursorBase : ICursor - where TBase : class, ICursor + internal abstract class SynchronizedCursorBase : RowCursor { protected readonly IChannel Ch; - private readonly ICursor _root; + private readonly RowCursor _root; private bool _disposed; - protected TBase Input { get; } + protected RowCursor Input { get; } - public long Position => _root.Position; + public sealed override long Position => _root.Position; - public long Batch => _root.Batch; + public sealed override long Batch => _root.Batch; - public CursorState State => _root.State; + public sealed override CursorState State => _root.State; /// /// Convenience property for checking whether the current state is CursorState.Good. /// protected bool IsGood => _root.State == CursorState.Good; - protected SynchronizedCursorBase(IChannelProvider provider, TBase input) + protected SynchronizedCursorBase(IChannelProvider provider, RowCursor input) { Contracts.AssertValue(provider, "provider"); Ch = provider.Start("Cursor"); @@ -42,7 +43,7 @@ protected SynchronizedCursorBase(IChannelProvider provider, TBase input) _root = Input.GetRootCursor(); } - public virtual void Dispose() + public override void Dispose() { if (!_disposed) { @@ -52,24 +53,12 @@ public virtual void Dispose() } } - public bool MoveNext() - { - return _root.MoveNext(); - } + public sealed override bool MoveNext() => _root.MoveNext(); - public bool MoveMany(long count) - { - return _root.MoveMany(count); - } + public sealed override bool MoveMany(long count) => _root.MoveMany(count); - public ICursor GetRootCursor() - { - return _root; - } + public sealed override RowCursor GetRootCursor() => _root; - public ValueGetter GetIdGetter() - { - return Input.GetIdGetter(); - } + public sealed override ValueGetter GetIdGetter() => Input.GetIdGetter(); } } diff --git a/src/Microsoft.ML.Data/Data/DataViewUtils.cs b/src/Microsoft.ML.Data/Data/DataViewUtils.cs index 4d8752a6fe..f81672008b 100644 --- a/src/Microsoft.ML.Data/Data/DataViewUtils.cs +++ b/src/Microsoft.ML.Data/Data/DataViewUtils.cs @@ -114,7 +114,7 @@ public static int GetThreadCount(IHost host, int num = 0, bool preferOne = false /// Try to create a cursor set from upstream and consolidate it here. The host determines /// the target cardinality of the cursor set. /// - public static bool TryCreateConsolidatingCursor(out IRowCursor curs, + public static bool TryCreateConsolidatingCursor(out RowCursor curs, IDataView view, Func predicate, IHost host, Random rand) { Contracts.CheckValue(host, nameof(host)); @@ -146,19 +146,19 @@ public static bool TryCreateConsolidatingCursor(out IRowCursor curs, /// cardinality. If not all the active columns are cachable, this will only /// produce the given input cursor. /// - public static IRowCursor[] CreateSplitCursors(out IRowCursorConsolidator consolidator, - IChannelProvider provider, IRowCursor input, int num) + public static RowCursor[] CreateSplitCursors(out IRowCursorConsolidator consolidator, + IChannelProvider provider, RowCursor input, int num) { Contracts.CheckValue(provider, nameof(provider)); provider.CheckValue(input, nameof(input)); consolidator = null; if (num <= 1) - return new IRowCursor[1] { input }; + return new RowCursor[1] { input }; // If any active columns are not cachable, we can't split. if (!AllCachable(input.Schema, input.IsColumnActive)) - return new IRowCursor[1] { input }; + return new RowCursor[1] { input }; // REVIEW: Should we limit the cardinality to some reasonable size? @@ -205,7 +205,7 @@ public static bool IsCachable(this ColumnType type) /// that is, they all are non-null, have the same schemas, and the same /// set of columns are active. /// - public static bool SameSchemaAndActivity(IRowCursor[] cursors) + public static bool SameSchemaAndActivity(RowCursor[] cursors) { // There must be something to actually consolidate. if (Utils.Size(cursors) == 0) @@ -239,7 +239,7 @@ public static bool SameSchemaAndActivity(IRowCursor[] cursors) /// Given a parallel cursor set, this consolidates them into a single cursor. The batchSize /// is a hint used for efficiency. /// - public static IRowCursor ConsolidateGeneric(IChannelProvider provider, IRowCursor[] inputs, int batchSize) + public static RowCursor ConsolidateGeneric(IChannelProvider provider, RowCursor[] inputs, int batchSize) { Contracts.CheckValue(provider, nameof(provider)); provider.CheckNonEmpty(inputs, nameof(inputs)); @@ -309,12 +309,12 @@ public Consolidator(Splitter splitter) _splitter = splitter; } - public IRowCursor CreateCursor(IChannelProvider provider, IRowCursor[] inputs) + public RowCursor CreateCursor(IChannelProvider provider, RowCursor[] inputs) { return Consolidate(provider, inputs, 128, ref _splitter._consolidateCachePools); } - public static IRowCursor Consolidate(IChannelProvider provider, IRowCursor[] inputs, int batchSize, ref object[] ourPools) + public static RowCursor Consolidate(IChannelProvider provider, RowCursor[] inputs, int batchSize, ref object[] ourPools) { Contracts.AssertValue(provider); using (var ch = provider.Start("Consolidate")) @@ -323,14 +323,14 @@ public static IRowCursor Consolidate(IChannelProvider provider, IRowCursor[] inp } } - private static IRowCursor ConsolidateCore(IChannelProvider provider, IRowCursor[] inputs, ref object[] ourPools, IChannel ch) + private static RowCursor ConsolidateCore(IChannelProvider provider, RowCursor[] inputs, ref object[] ourPools, IChannel ch) { ch.CheckNonEmpty(inputs, nameof(inputs)); if (inputs.Length == 1) return inputs[0]; ch.CheckParam(SameSchemaAndActivity(inputs), nameof(inputs), "Inputs not compatible for consolidation"); - IRowCursor cursor = inputs[0]; + RowCursor cursor = inputs[0]; var schema = cursor.Schema; ch.CheckParam(AllCachable(schema, cursor.IsColumnActive), nameof(inputs), "Inputs had some uncachable input columns"); @@ -494,7 +494,7 @@ private static MadeObjectPool GetPoolCore(object[] pools, int poolIdx) } } - public static IRowCursor[] Split(out IRowCursorConsolidator consolidator, IChannelProvider provider, Schema schema, IRowCursor input, int cthd) + public static RowCursor[] Split(out IRowCursorConsolidator consolidator, IChannelProvider provider, Schema schema, RowCursor input, int cthd) { Contracts.AssertValue(provider, "provider"); @@ -506,7 +506,7 @@ public static IRowCursor[] Split(out IRowCursorConsolidator consolidator, IChann } } - private IRowCursor[] SplitCore(out IRowCursorConsolidator consolidator, IChannelProvider ch, IRowCursor input, int cthd) + private RowCursor[] SplitCore(out IRowCursorConsolidator consolidator, IChannelProvider ch, RowCursor input, int cthd) { Contracts.AssertValue(ch); ch.AssertValue(input); @@ -524,7 +524,7 @@ private IRowCursor[] SplitCore(out IRowCursorConsolidator consolidator, IChannel int[] colToActive; Utils.BuildSubsetMaps(_schema.ColumnCount, input.IsColumnActive, out activeToCol, out colToActive); - Func createFunc = CreateInPipe; + Func createFunc = CreateInPipe; var inGenMethod = createFunc.GetMethodInfo().GetGenericMethodDefinition(); object[] arguments = new object[] { input, 0 }; // Only one set of in-pipes, one per column, as well as for extra side information. @@ -644,7 +644,7 @@ private IRowCursor[] SplitCore(out IRowCursorConsolidator consolidator, IChannel /// /// An in pipe creator intended to be used from the splitter only. /// - private InPipe CreateInPipe(IRow input, int col) + private InPipe CreateInPipe(Row input, int col) { Contracts.AssertValue(input); Contracts.Assert(0 <= col && col < _schema.ColumnCount); @@ -654,7 +654,7 @@ private InPipe CreateInPipe(IRow input, int col) /// /// An in pipe creator intended to be used from the splitter only. /// - private InPipe CreateIdInPipe(IRow input) + private InPipe CreateIdInPipe(Row input) { Contracts.AssertValue(input); return CreateInPipeCore(_schema.ColumnCount + (int)ExtraIndex.Id, input.GetIdGetter()); @@ -849,7 +849,7 @@ public void SetAll(OutPipe[] pipes) /// /// This helps a cursor present the results of a . Practically its role - /// really is to just provide a stable delegate for the . + /// really is to just provide a stable delegate for the . /// There is one of these created per column, per output cursor, i.e., in splitting /// there are n of these created per column, and when consolidating only one of these /// is created per column. @@ -999,7 +999,7 @@ protected override void Getter(ref T value) /// objects from the input blocking collection, and yields the /// values stored therein through the help of objects. /// - private sealed class Cursor : RootCursorBase, IRowCursor + private sealed class Cursor : RootCursorBase { private readonly Schema _schema; private readonly int[] _activeToCol; @@ -1014,12 +1014,9 @@ private sealed class Cursor : RootCursorBase, IRowCursor private long _batch; private bool _disposed; - public Schema Schema => _schema; + public override Schema Schema => _schema; - public override long Batch - { - get { return _batch; } - } + public override long Batch => _batch; /// /// Constructs one of the split cursors. @@ -1114,13 +1111,13 @@ protected override bool MoveNextCore() return true; } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.CheckParam(0 <= col && col < _colToActive.Length, nameof(col)); return _colToActive[col] >= 0; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.CheckParam(IsColumnActive(col), nameof(col), "requested column not active"); var getter = _getters[_colToActive[col]] as ValueGetter; @@ -1136,9 +1133,9 @@ public ValueGetter GetGetter(int col) /// at the cost of being totally synchronous, that is, there is no parallel benefit from /// having split the input cursors. /// - internal sealed class SynchronousConsolidatingCursor : RootCursorBase, IRowCursor + internal sealed class SynchronousConsolidatingCursor : RootCursorBase { - private readonly IRowCursor[] _cursors; + private readonly RowCursor[] _cursors; private readonly Delegate[] _getters; private readonly Schema _schema; @@ -1152,7 +1149,7 @@ internal sealed class SynchronousConsolidatingCursor : RootCursorBase, IRowCurso // Index into _cursors array pointing to the current cursor, or -1 if this cursor is not in Good state. private int _icursor; // If this cursor is in Good state then this should equal _cursors[_icursor], else null. - private IRowCursor _currentCursor; + private RowCursor _currentCursor; private bool _disposed; private readonly struct CursorStats @@ -1171,9 +1168,9 @@ public CursorStats(long batch, int idx) // input batch as our own batch. Should we suppress it? public override long Batch { get { return _batch; } } - public Schema Schema => _schema; + public override Schema Schema => _schema; - public SynchronousConsolidatingCursor(IChannelProvider provider, IRowCursor[] cursors) + public SynchronousConsolidatingCursor(IChannelProvider provider, RowCursor[] cursors) : base(provider) { Ch.CheckNonEmpty(cursors, nameof(cursors)); @@ -1199,7 +1196,7 @@ private void InitHeap() { for (int i = 0; i < _cursors.Length; ++i) { - IRowCursor cursor = _cursors[i]; + RowCursor cursor = _cursors[i]; Ch.Assert(cursor.State == CursorState.NotStarted); if (cursor.MoveNext()) _mins.Add(new CursorStats(cursor.Batch, i)); @@ -1291,13 +1288,13 @@ protected override bool MoveNextCore() return true; } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.CheckParam(0 <= col && col < _colToActive.Length, nameof(col)); return _colToActive[col] >= 0; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.CheckParam(IsColumnActive(col), nameof(col), "requested column not active"); var getter = _getters[_colToActive[col]] as ValueGetter; @@ -1307,7 +1304,7 @@ public ValueGetter GetGetter(int col) } } - public static ValueGetter>[] PopulateGetterArray(IRowCursor cursor, List colIndices) + public static ValueGetter>[] PopulateGetterArray(RowCursor cursor, List colIndices) { var n = colIndices.Count; var getters = new ValueGetter>[n]; @@ -1335,7 +1332,7 @@ public static ValueGetter>[] PopulateGetterArray(IRowCursor return getters; } - public static ValueGetter> GetSingleValueGetter(IRow cursor, int i, ColumnType colType) + public static ValueGetter> GetSingleValueGetter(Row cursor, int i, ColumnType colType) { var floatGetter = cursor.GetGetter(i); T v = default(T); @@ -1365,7 +1362,7 @@ public static ValueGetter> GetSingleValueGetter(IRow cur return getter; } - public static ValueGetter> GetVectorFlatteningGetter(IRow cursor, int colIndex, ColumnType colType) + public static ValueGetter> GetVectorFlatteningGetter(Row cursor, int colIndex, ColumnType colType) { var vecGetter = cursor.GetGetter>(colIndex); var vbuf = default(VBuffer); diff --git a/src/Microsoft.ML.Data/Data/IRowSeekable.cs b/src/Microsoft.ML.Data/Data/IRowSeekable.cs index 9270ddc7f3..17514612af 100644 --- a/src/Microsoft.ML.Data/Data/IRowSeekable.cs +++ b/src/Microsoft.ML.Data/Data/IRowSeekable.cs @@ -14,18 +14,20 @@ namespace Microsoft.ML.Runtime.Data /// public interface IRowSeekable { - IRowSeeker GetSeeker(Func predicate); + RowSeeker GetSeeker(Func predicate); Schema Schema { get; } } /// /// Represents a row seeker with random access that can retrieve a specific row by the row index. - /// For IRowSeeker, when the state is valid (that is when MoveTo() returns true), it returns the - /// current row index. Otherwise it's -1. + /// For , when the state is valid (that is when + /// returns ), it returns the current row index. Otherwise it's -1. /// - public interface IRowSeeker : IRow, IDisposable + public abstract class RowSeeker : Row, IDisposable { + public abstract void Dispose(); + /// /// Moves the seeker to a row at a specific row index. /// If the row index specified is out of range (less than zero or not less than the @@ -33,6 +35,6 @@ public interface IRowSeeker : IRow, IDisposable /// /// The row index to move to. /// True if a row with specified index is found; false otherwise. - bool MoveTo(long rowIndex); + public abstract bool MoveTo(long rowIndex); } } diff --git a/src/Microsoft.ML.Data/Data/ITransposeDataView.cs b/src/Microsoft.ML.Data/Data/ITransposeDataView.cs index 4c68dcbdd1..c090b185c6 100644 --- a/src/Microsoft.ML.Data/Data/ITransposeDataView.cs +++ b/src/Microsoft.ML.Data/Data/ITransposeDataView.cs @@ -2,6 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using System; + namespace Microsoft.ML.Runtime.Data { // REVIEW: There are a couple problems. Firstly, what to do about cases where @@ -14,8 +16,8 @@ namespace Microsoft.ML.Runtime.Data /// /// A view of data where columns can optionally be accessed slot by slot, as opposed to row /// by row in a typical dataview. A slot-accessible column can be accessed with a slot-by-slot - /// cursor via an (naturally, as opposed to row-by-row through an - /// ). This interface is intended to be implemented by classes that + /// cursor via an (naturally, as opposed to row-by-row through an + /// ). This interface is intended to be implemented by classes that /// want to provide an option for an alternate way of accessing the data stored in a /// . /// @@ -36,26 +38,7 @@ public interface ITransposeDataView : IDataView /// Presents a cursor over the slots of a transposable column, or throws if the column /// is not transposable. /// - ISlotCursor GetSlotCursor(int col); - } - - /// - /// A cursor that allows slot-by-slot access of data. - /// - public interface ISlotCursor : ICursor - { - /// - /// The slot type for this cursor. Note that this should equal the - /// for the column from which this slot cursor - /// was created. - /// - VectorType GetSlotType(); - - /// - /// A getter delegate for the slot values. The type must correspond - /// to the item type from . - /// - ValueGetter> GetGetter(); + SlotCursor GetSlotCursor(int col); } /// @@ -65,8 +48,8 @@ public interface ITransposeSchema : ISchema { /// /// Analogous to , except instead of returning the type of value - /// accessible through the , returns the item type of value accessible - /// through the . This will return null iff this particular + /// accessible through the , returns the item type of value accessible + /// through the . This will return null iff this particular /// column is not transposable, that is, it cannot be viewed in a slotwise fashion. Observe from /// the return type that this will always be a vector type. This vector type should be of fixed /// size and one dimension. diff --git a/src/Microsoft.ML.Data/Data/RowCursorUtils.cs b/src/Microsoft.ML.Data/Data/RowCursorUtils.cs index 57098d2895..5e79d05448 100644 --- a/src/Microsoft.ML.Data/Data/RowCursorUtils.cs +++ b/src/Microsoft.ML.Data/Data/RowCursorUtils.cs @@ -23,17 +23,17 @@ public static class RowCursorUtils /// The row to get the getter for /// The column index, which must be active on that row /// The getter as a delegate - public static Delegate GetGetterAsDelegate(IRow row, int col) + public static Delegate GetGetterAsDelegate(Row row, int col) { Contracts.CheckValue(row, nameof(row)); Contracts.CheckParam(0 <= col && col < row.Schema.ColumnCount, nameof(col)); Contracts.CheckParam(row.IsColumnActive(col), nameof(col), "column was not active"); - Func getGetter = GetGetterAsDelegateCore; + Func getGetter = GetGetterAsDelegateCore; return Utils.MarshalInvoke(getGetter, row.Schema.GetColumnType(col).RawType, row, col); } - private static Delegate GetGetterAsDelegateCore(IRow row, int col) + private static Delegate GetGetterAsDelegateCore(Row row, int col) { return row.GetGetter(col); } @@ -44,7 +44,7 @@ private static Delegate GetGetterAsDelegateCore(IRow row, int col) /// . /// /// - public static Delegate GetGetterAs(ColumnType typeDst, IRow row, int col) + public static Delegate GetGetterAs(ColumnType typeDst, Row row, int col) { Contracts.CheckValue(typeDst, nameof(typeDst)); Contracts.CheckParam(typeDst.IsPrimitive, nameof(typeDst)); @@ -55,7 +55,7 @@ public static Delegate GetGetterAs(ColumnType typeDst, IRow row, int col) var typeSrc = row.Schema.GetColumnType(col); Contracts.Check(typeSrc.IsPrimitive, "Source column type must be primitive"); - Func> del = GetGetterAsCore; + Func> del = GetGetterAsCore; var methodInfo = del.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(typeSrc.RawType, typeDst.RawType); return (Delegate)methodInfo.Invoke(null, new object[] { typeSrc, typeDst, row, col }); } @@ -64,7 +64,7 @@ public static Delegate GetGetterAs(ColumnType typeDst, IRow row, int col) /// Given a destination type, IRow, and column index, return a ValueGetter{TDst} for the column /// with a conversion to typeDst, if needed. /// - public static ValueGetter GetGetterAs(ColumnType typeDst, IRow row, int col) + public static ValueGetter GetGetterAs(ColumnType typeDst, Row row, int col) { Contracts.CheckValue(typeDst, nameof(typeDst)); Contracts.CheckParam(typeDst.IsPrimitive, nameof(typeDst)); @@ -76,12 +76,12 @@ public static ValueGetter GetGetterAs(ColumnType typeDst, IRow row, var typeSrc = row.Schema.GetColumnType(col); Contracts.Check(typeSrc.IsPrimitive, "Source column type must be primitive"); - Func> del = GetGetterAsCore; + Func> del = GetGetterAsCore; var methodInfo = del.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(typeSrc.RawType, typeof(TDst)); return (ValueGetter)methodInfo.Invoke(null, new object[] { typeSrc, typeDst, row, col }); } - private static ValueGetter GetGetterAsCore(ColumnType typeSrc, ColumnType typeDst, IRow row, int col) + private static ValueGetter GetGetterAsCore(ColumnType typeSrc, ColumnType typeDst, Row row, int col) { Contracts.Assert(typeof(TSrc) == typeSrc.RawType); Contracts.Assert(typeof(TDst) == typeDst.RawType); @@ -112,7 +112,7 @@ private static ValueGetter GetGetterAsCore(ColumnType typeSrc, /// into the required type. This method can be useful if you want to output a value /// as a string in a generic way, but don't really care how you do it. /// - public static ValueGetter GetGetterAsStringBuilder(IRow row, int col) + public static ValueGetter GetGetterAsStringBuilder(Row row, int col) { Contracts.CheckValue(row, nameof(row)); Contracts.CheckParam(0 <= col && col < row.Schema.ColumnCount, nameof(col)); @@ -123,7 +123,7 @@ public static ValueGetter GetGetterAsStringBuilder(IRow row, int return Utils.MarshalInvoke(GetGetterAsStringBuilderCore, typeSrc.RawType, typeSrc, row, col); } - private static ValueGetter GetGetterAsStringBuilderCore(ColumnType typeSrc, IRow row, int col) + private static ValueGetter GetGetterAsStringBuilderCore(ColumnType typeSrc, Row row, int col) { Contracts.Assert(typeof(TSrc) == typeSrc.RawType); @@ -142,9 +142,9 @@ private static ValueGetter GetGetterAsStringBuilderCore(Col /// /// Given the item type, typeDst, a row, and column index, return a ValueGetter for the vector-valued /// column with a conversion to a vector of typeDst, if needed. This is the weakly typed version of - /// . + /// . /// - public static Delegate GetVecGetterAs(PrimitiveType typeDst, IRow row, int col) + public static Delegate GetVecGetterAs(PrimitiveType typeDst, Row row, int col) { Contracts.CheckValue(typeDst, nameof(typeDst)); Contracts.CheckValue(row, nameof(row)); @@ -163,7 +163,7 @@ public static Delegate GetVecGetterAs(PrimitiveType typeDst, IRow row, int col) /// Given the item type, typeDst, a row, and column index, return a ValueGetter{VBuffer{TDst}} for the /// vector-valued column with a conversion to a vector of typeDst, if needed. /// - public static ValueGetter> GetVecGetterAs(PrimitiveType typeDst, IRow row, int col) + public static ValueGetter> GetVecGetterAs(PrimitiveType typeDst, Row row, int col) { Contracts.CheckValue(typeDst, nameof(typeDst)); Contracts.CheckParam(typeDst.RawType == typeof(TDst), nameof(typeDst)); @@ -183,7 +183,7 @@ public static ValueGetter> GetVecGetterAs(PrimitiveType type /// Given the item type, typeDst, and a slot cursor, return a ValueGetter{VBuffer{TDst}} for the /// vector-valued column with a conversion to a vector of typeDst, if needed. /// - public static ValueGetter> GetVecGetterAs(PrimitiveType typeDst, ISlotCursor cursor) + public static ValueGetter> GetVecGetterAs(PrimitiveType typeDst, SlotCursor cursor) { Contracts.CheckValue(typeDst, nameof(typeDst)); Contracts.CheckParam(typeDst.RawType == typeof(TDst), nameof(typeDst)); @@ -200,12 +200,12 @@ public static ValueGetter> GetVecGetterAs(PrimitiveType type ///
private abstract class GetterFactory { - public static GetterFactory Create(IRow row, int col) + public static GetterFactory Create(Row row, int col) { return new RowImpl(row, col); } - public static GetterFactory Create(ISlotCursor cursor) + public static GetterFactory Create(SlotCursor cursor) { return new SlotImpl(cursor); } @@ -214,10 +214,10 @@ public static GetterFactory Create(ISlotCursor cursor) private sealed class RowImpl : GetterFactory { - private readonly IRow _row; + private readonly Row _row; private readonly int _col; - public RowImpl(IRow row, int col) + public RowImpl(Row row, int col) { _row = row; _col = col; @@ -231,9 +231,9 @@ public override ValueGetter GetGetter() private sealed class SlotImpl : GetterFactory { - private readonly ISlotCursor _cursor; + private readonly SlotCursor _cursor; - public SlotImpl(ISlotCursor cursor) + public SlotImpl(SlotCursor cursor) { _cursor = cursor; } @@ -294,7 +294,7 @@ private static ValueGetter> GetVecGetterAsCore(VectorT /// is different than it was, in the last call. This is practically useful for determining /// group boundaries. Note that the delegate will return true on the first row. ///
- public static Func GetIsNewGroupDelegate(IRow cursor, int col) + public static Func GetIsNewGroupDelegate(Row cursor, int col) { Contracts.CheckValue(cursor, nameof(cursor)); Contracts.Check(0 <= col && col < cursor.Schema.ColumnCount); @@ -303,7 +303,7 @@ public static Func GetIsNewGroupDelegate(IRow cursor, int col) return Utils.MarshalInvoke(GetIsNewGroupDelegateCore, type.RawType, cursor, col); } - private static Func GetIsNewGroupDelegateCore(IRow cursor, int col) + private static Func GetIsNewGroupDelegateCore(Row cursor, int col) { var getter = cursor.GetGetter(col); bool first = true; @@ -329,7 +329,7 @@ private static Func GetIsNewGroupDelegateCore(IRow cursor, int col) [Obsolete("The usages of this appear to be based on a total misunderstanding of what Batch actually is. It is a mechanism " + "to enable sharding and recovery of parallelized data, and has nothing to do with actual data.")] [BestFriend] - internal static Func GetIsNewBatchDelegate(IRow cursor, int batchSize) + internal static Func GetIsNewBatchDelegate(Row cursor, int batchSize) { Contracts.CheckParam(batchSize > 0, nameof(batchSize), "Batch size must be > 0"); long lastNewBatchPosition = -1; @@ -366,7 +366,7 @@ public static string TestGetLabelGetter(ColumnType type, bool allowKeys) return allowKeys ? "Expected R4, R8, Bool or Key type" : "Expected R4, R8 or Bool type"; } - public static ValueGetter GetLabelGetter(IRow cursor, int labelIndex) + public static ValueGetter GetLabelGetter(Row cursor, int labelIndex) { var type = cursor.Schema.GetColumnType(labelIndex); @@ -388,7 +388,7 @@ public static ValueGetter GetLabelGetter(IRow cursor, int labelIndex) return GetLabelGetterNotFloat(cursor, labelIndex); } - private static ValueGetter GetLabelGetterNotFloat(IRow cursor, int labelIndex) + private static ValueGetter GetLabelGetterNotFloat(Row cursor, int labelIndex) { var type = cursor.Schema.GetColumnType(labelIndex); @@ -425,7 +425,7 @@ private static ValueGetter GetLabelGetterNotFloat(IRow cursor, int label }; } - public static ValueGetter> GetLabelGetter(ISlotCursor cursor) + public static ValueGetter> GetLabelGetter(SlotCursor cursor) { var type = cursor.GetSlotType().ItemType; if (type == NumberType.R4) @@ -461,7 +461,7 @@ public static ValueGetter> GetLabelGetter(ISlotCursor cursor) /// Fetches the value of the column by name, in the given row. /// Used by the evaluators to retrieve the metrics from the results IDataView. /// - public static T Fetch(IExceptionContext ectx, IRow row, string name) + public static T Fetch(IExceptionContext ectx, Row row, string name) { if (!row.Schema.TryGetColumnIndex(name, out int col)) throw ectx.Except($"Could not find column '{name}'"); @@ -472,7 +472,7 @@ public static T Fetch(IExceptionContext ectx, IRow row, string name) /// /// Given a row, returns a one-row data view. This is useful for cases where you have a row, and you - /// wish to use some facility normally only exposed to dataviews. (For example, you have an + /// wish to use some facility normally only exposed to dataviews. (For example, you have an /// but want to save it somewhere using a .) /// Note that it is not possible for this method to ensure that the input does not /// change, so users of this convenience must take care of what they do with the input row or the data @@ -481,7 +481,7 @@ public static T Fetch(IExceptionContext ectx, IRow row, string name) /// An environment used to create the host for the resulting data view /// A row, whose columns must all be active /// A single-row data view incorporating that row - public static IDataView RowAsDataView(IHostEnvironment env, IRow row) + public static IDataView RowAsDataView(IHostEnvironment env, Row row) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(row, nameof(row)); @@ -491,13 +491,13 @@ public static IDataView RowAsDataView(IHostEnvironment env, IRow row) private sealed class OneRowDataView : IDataView { - private readonly IRow _row; + private readonly Row _row; private readonly IHost _host; // A channel provider is required for creating the cursor. public Schema Schema => _row.Schema; public bool CanShuffle => true; // The shuffling is even uniformly IID!! :) - public OneRowDataView(IHostEnvironment env, IRow row) + public OneRowDataView(IHostEnvironment env, Row row) { Contracts.AssertValue(env); _host = env.Register("OneRowDataView"); @@ -507,7 +507,7 @@ public OneRowDataView(IHostEnvironment env, IRow row) _row = row; } - public IRowCursor GetRowCursor(Func needCol, Random rand = null) + public RowCursor GetRowCursor(Func needCol, Random rand = null) { _host.CheckValue(needCol, nameof(needCol)); _host.CheckValueOrNull(rand); @@ -515,12 +515,12 @@ public IRowCursor GetRowCursor(Func needCol, Random rand = null) return new Cursor(_host, this, active); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, Random rand = null) + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, Random rand = null) { _host.CheckValue(needCol, nameof(needCol)); _host.CheckValueOrNull(rand); consolidator = null; - return new IRowCursor[] { GetRowCursor(needCol, rand) }; + return new RowCursor[] { GetRowCursor(needCol, rand) }; } public long? GetRowCount() @@ -528,12 +528,12 @@ public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Fun return 1; } - private sealed class Cursor : RootCursorBase, IRowCursor + private sealed class Cursor : RootCursorBase { private readonly OneRowDataView _parent; private readonly bool[] _active; - public Schema Schema => _parent.Schema; + public override Schema Schema => _parent.Schema; public override long Batch { get { return 0; } } public Cursor(IHost host, OneRowDataView parent, bool[] active) @@ -551,7 +551,7 @@ protected override bool MoveNextCore() return State == CursorState.NotStarted; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.CheckParam(0 <= col && col < Schema.ColumnCount, nameof(col)); Ch.CheckParam(IsColumnActive(col), nameof(col), "Requested column is not active"); @@ -564,7 +564,7 @@ public ValueGetter GetGetter(int col) }; } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.CheckParam(0 <= col && col < Schema.ColumnCount, nameof(col)); // We present the "illusion" that this column is not active, even though it must be diff --git a/src/Microsoft.ML.Data/Data/SlotCursor.cs b/src/Microsoft.ML.Data/Data/SlotCursor.cs new file mode 100644 index 0000000000..7e151254e0 --- /dev/null +++ b/src/Microsoft.ML.Data/Data/SlotCursor.cs @@ -0,0 +1,142 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; + +namespace Microsoft.ML.Runtime.Data +{ + /// + /// A cursor that allows slot-by-slot access of data. This is to + /// what is to . + /// + public abstract class SlotCursor : IDisposable + { + [BestFriend] + private protected readonly IChannel Ch; + private CursorState _state; + + /// + /// Whether the cursor is in a state where it can serve up data, that is, + /// has been called and returned . + /// + [BestFriend] + private protected bool IsGood => _state == CursorState.Good; + + [BestFriend] + private protected SlotCursor(IChannelProvider provider) + { + Contracts.AssertValue(provider); + Ch = provider.Start("Slot Cursor"); + _state = CursorState.NotStarted; + } + + /// + /// The slot index. Incremented by one when is called and returns . + /// When initially created, or after returns , this will be -1. + /// + public abstract int SlotIndex { get; } + + /// + /// Advance to the next slot. When the cursor is first created, this method should be called to + /// move to the first slot. Returns if there are no more slots. + /// + public abstract bool MoveNext(); + + /// + /// The slot type for this cursor. Note that this should equal the + /// for the column from which this slot cursor + /// was created. + /// + public abstract VectorType GetSlotType(); + + /// + /// A getter delegate for the slot values. The type must correspond + /// to the item type from . + /// + public abstract ValueGetter> GetGetter(); + + public virtual void Dispose() + { + if (_state != CursorState.Done) + { + Ch.Dispose(); + _state = CursorState.Done; + } + } + + /// + /// For wrapping another slot cursor from which we get and , + /// but not the data or type accesors. Somewhat analogous to the + /// for s. + /// + [BestFriend] + internal abstract class SynchronizedSlotCursor : SlotCursor + { + private readonly SlotCursor _root; + + public SynchronizedSlotCursor(IChannelProvider provider, SlotCursor cursor) + : base(provider) + { + Contracts.AssertValue(cursor); + // If the input is itself a sync-base, we can walk up the chain to get its root, + // thereby making things more efficient. + _root = cursor is SynchronizedSlotCursor sync ? sync._root : cursor; + } + + public override bool MoveNext() + => _root.MoveNext(); + + public override int SlotIndex => _root.SlotIndex; + } + + /// + /// A useful base class for common implementations, somewhat + /// analogous to the for s. + /// + [BestFriend] + internal abstract class RootSlotCursor : SlotCursor + { + private int _slotIndex; + + public RootSlotCursor(IChannelProvider provider) + : base(provider) + { + _slotIndex = -1; + } + + public override int SlotIndex => _slotIndex; + + public override void Dispose() + { + base.Dispose(); + _slotIndex = -1; + } + + public override bool MoveNext() + { + if (_state == CursorState.Done) + return false; + + Ch.Assert(_state == CursorState.NotStarted || _state == CursorState.Good); + if (MoveNextCore()) + { + Ch.Assert(_state == CursorState.NotStarted || _state == CursorState.Good); + + _slotIndex++; + _state = CursorState.Good; + return true; + } + + Dispose(); + return false; + } + + /// + /// Core implementation of . Called only if this method + /// has not yet previously returned . + /// + protected abstract bool MoveNextCore(); + } + } +} diff --git a/src/Microsoft.ML.Data/DataDebuggerPreview.cs b/src/Microsoft.ML.Data/DataDebuggerPreview.cs index ba1049eb2b..296495b88c 100644 --- a/src/Microsoft.ML.Data/DataDebuggerPreview.cs +++ b/src/Microsoft.ML.Data/DataDebuggerPreview.cs @@ -63,7 +63,7 @@ internal DataDebuggerPreview(IDataView data, int maxRows = Defaults.MaxRows) public override string ToString() => $"{Schema.Count} columns, {RowView.Length} rows"; - private Action> MakeSetter(IRow row, int col) + private Action> MakeSetter(Row row, int col) { var getter = row.GetGetter(col); string name = row.Schema[col].Name; diff --git a/src/Microsoft.ML.Data/DataLoadSave/Binary/BinaryLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/Binary/BinaryLoader.cs index 172a6d695e..aa33db97b8 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Binary/BinaryLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Binary/BinaryLoader.cs @@ -1234,7 +1234,7 @@ private TableOfContentsEntry CreateRowIndexEntry(string rowIndexName) return entry; } - private IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + private RowCursor GetRowCursorCore(Func predicate, Random rand = null) { if (rand != null && _randomShufflePoolRows > 0) { @@ -1247,23 +1247,23 @@ private IRowCursor GetRowCursorCore(Func predicate, Random rand = nul return new Cursor(this, predicate, rand); } - public IRowCursor GetRowCursor(Func predicate, Random rand = null) + public RowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); return GetRowCursorCore(predicate, rand); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); consolidator = null; - return new IRowCursor[] { GetRowCursorCore(predicate, rand) }; + return new RowCursor[] { GetRowCursorCore(predicate, rand) }; } - private sealed class Cursor : RootCursorBase, IRowCursor + private sealed class Cursor : RootCursorBase { private const string _badCursorState = "cursor is either not started or is ended, and cannot get values"; @@ -1285,7 +1285,7 @@ private sealed class Cursor : RootCursorBase, IRowCursor private volatile bool _disposed; - public Schema Schema => _parent.Schema; + public override Schema Schema => _parent.Schema; public override long Batch { @@ -2009,7 +2009,7 @@ public override Delegate GetGetter() } } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.CheckParam(0 <= col && col < _colToActivesIndex.Length, nameof(col)); return _colToActivesIndex[col] >= 0; @@ -2070,7 +2070,7 @@ protected override bool MoveNextCore() return more; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.CheckParam(0 <= col && col < _colToActivesIndex.Length, nameof(col)); Ch.CheckParam(_colToActivesIndex[col] >= 0, nameof(col), "requested column not active"); diff --git a/src/Microsoft.ML.Data/DataLoadSave/Binary/BinarySaver.cs b/src/Microsoft.ML.Data/DataLoadSave/Binary/BinarySaver.cs index 9a47f932de..d8c060c031 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Binary/BinarySaver.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Binary/BinarySaver.cs @@ -88,7 +88,7 @@ protected WritePipe(BinarySaver parent) /// /// Returns an appropriate generic WritePipe{T} for the given column. /// - public static WritePipe Create(BinarySaver parent, IRowCursor cursor, ColumnCodec col) + public static WritePipe Create(BinarySaver parent, RowCursor cursor, ColumnCodec col) { Type writePipeType = typeof(WritePipe<>).MakeGenericType(col.Codec.Type.RawType); return (WritePipe)Activator.CreateInstance(writePipeType, parent, cursor, col); @@ -109,7 +109,7 @@ private sealed class WritePipe : WritePipe private MemoryStream _currentStream; private T _value; - public WritePipe(BinarySaver parent, IRowCursor cursor, ColumnCodec col) + public WritePipe(BinarySaver parent, RowCursor cursor, ColumnCodec col) : base(parent) { var codec = col.Codec as IValueCodec; @@ -581,7 +581,7 @@ private void FetchWorker(BlockingCollection toCompress, IDataView data, HashSet activeSet = new HashSet(activeColumns.Select(col => col.SourceIndex)); long blockIndex = 0; int remainingInBlock = rowsPerBlock; - using (IRowCursor cursor = data.GetRowCursor(activeSet.Contains)) + using (RowCursor cursor = data.GetRowCursor(activeSet.Contains)) { WritePipe[] pipes = new WritePipe[activeColumns.Length]; for (int c = 0; c < activeColumns.Length; ++c) @@ -746,7 +746,7 @@ private int RowsPerBlockHeuristic(IDataView data, ColumnCodec[] actives) EstimatorDelegate del = EstimatorCore; MethodInfo methInfo = del.GetMethodInfo().GetGenericMethodDefinition(); - using (IRowCursor cursor = data.GetRowCursor(active.Contains, rand)) + using (RowCursor cursor = data.GetRowCursor(active.Contains, rand)) { object[] args = new object[] { cursor, null, null, null }; var writers = new IValueWriter[actives.Length]; @@ -776,10 +776,10 @@ private int RowsPerBlockHeuristic(IDataView data, ColumnCodec[] actives) } } - private delegate void EstimatorDelegate(IRowCursor cursor, ColumnCodec col, + private delegate void EstimatorDelegate(RowCursor cursor, ColumnCodec col, out Func fetchWriteEstimator, out IValueWriter writer); - private void EstimatorCore(IRowCursor cursor, ColumnCodec col, + private void EstimatorCore(RowCursor cursor, ColumnCodec col, out Func fetchWriteEstimator, out IValueWriter writer) { ValueGetter getter = cursor.GetGetter(col.SourceIndex); diff --git a/src/Microsoft.ML.Data/DataLoadSave/CompositeDataLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/CompositeDataLoader.cs index e281cd0876..2ed8081bad 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/CompositeDataLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/CompositeDataLoader.cs @@ -568,14 +568,14 @@ private static string GenerateTag(int index) public ITransposeSchema TransposeSchema { get; } - public IRowCursor GetRowCursor(Func predicate, Random rand = null) + public RowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); return View.GetRowCursor(predicate, rand); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); @@ -583,7 +583,7 @@ public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, return View.GetRowCursorSet(out consolidator, predicate, n, rand); } - public ISlotCursor GetSlotCursor(int col) + public SlotCursor GetSlotCursor(int col) { _host.CheckParam(0 <= col && col < Schema.ColumnCount, nameof(col)); if (TransposeSchema?.GetSlotType(col) == null) diff --git a/src/Microsoft.ML.Data/DataLoadSave/PartitionedFileLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/PartitionedFileLoader.cs index 4efb11f0fe..5ad393f2d2 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/PartitionedFileLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/PartitionedFileLoader.cs @@ -293,16 +293,16 @@ public void Save(ModelSaveContext ctx) return null; } - public IRowCursor GetRowCursor(Func needCol, Random rand = null) + public RowCursor GetRowCursor(Func needCol, Random rand = null) { return new Cursor(_host, this, _files, needCol, rand); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, Random rand = null) + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, Random rand = null) { consolidator = null; var cursor = new Cursor(_host, this, _files, needCol, rand); - return new IRowCursor[] { cursor }; + return new RowCursor[] { cursor }; } /// @@ -362,7 +362,7 @@ private IDataLoader CreateLoaderFromBytes(byte[] loaderBytes, IMultiStreamSource } } - private sealed class Cursor : RootCursorBase, IRowCursor + private sealed class Cursor : RootCursorBase { private PartitionedFileLoader _parent; @@ -372,7 +372,7 @@ private sealed class Cursor : RootCursorBase, IRowCursor private Delegate[] _subGetters; // Cached getters of the sub-cursor. private ReadOnlyMemory[] _colValues; // Column values cached from the file path. - private IRowCursor _subCursor; // Sub cursor of the current file. + private RowCursor _subCursor; // Sub cursor of the current file. private IEnumerator _fileOrder; @@ -397,9 +397,9 @@ public Cursor(IChannelProvider provider, PartitionedFileLoader parent, IMultiStr public override long Batch => 0; - public Schema Schema => _parent.Schema; + public override Schema Schema => _parent.Schema; - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.Check(IsColumnActive(col)); @@ -423,7 +423,7 @@ public override ValueGetter GetIdGetter() }; } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.Check(0 <= col && col < Schema.Count); return _active[col]; diff --git a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoader.cs index 225dfe1e98..e748967714 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoader.cs @@ -1364,7 +1364,7 @@ public BoundLoader(TextLoader reader, IMultiStreamSource files) public Schema Schema => _reader._bindings.AsSchema; - public IRowCursor GetRowCursor(Func predicate, Random rand = null) + public RowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); @@ -1372,7 +1372,7 @@ public IRowCursor GetRowCursor(Func predicate, Random rand = null) return Cursor.Create(_reader, _files, active); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); diff --git a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderCursor.cs b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderCursor.cs index ac70417f6b..1a2f94da3b 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderCursor.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderCursor.cs @@ -14,7 +14,7 @@ namespace Microsoft.ML.Runtime.Data { public sealed partial class TextLoader { - private sealed class Cursor : RootCursorBase, IRowCursor + private sealed class Cursor : RootCursorBase { // Lines are divided into batches and processed a batch at a time. This enables // parallel parsing. @@ -133,7 +133,7 @@ private Cursor(TextLoader parent, ParseStats stats, bool[] active, LineReader re } } - public static IRowCursor Create(TextLoader parent, IMultiStreamSource files, bool[] active) + public static RowCursor Create(TextLoader parent, IMultiStreamSource files, bool[] active) { // Note that files is allowed to be empty. Contracts.AssertValue(parent); @@ -150,7 +150,7 @@ public static IRowCursor Create(TextLoader parent, IMultiStreamSource files, boo return new Cursor(parent, stats, active, reader, srcNeeded, cthd); } - public static IRowCursor[] CreateSet(out IRowCursorConsolidator consolidator, + public static RowCursor[] CreateSet(out IRowCursorConsolidator consolidator, TextLoader parent, IMultiStreamSource files, bool[] active, int n) { // Note that files is allowed to be empty. @@ -168,11 +168,11 @@ public static IRowCursor[] CreateSet(out IRowCursorConsolidator consolidator, if (cthd <= 1) { consolidator = null; - return new IRowCursor[1] { new Cursor(parent, stats, active, reader, srcNeeded, 1) }; + return new RowCursor[1] { new Cursor(parent, stats, active, reader, srcNeeded, 1) }; } consolidator = new Consolidator(cthd); - var cursors = new IRowCursor[cthd]; + var cursors = new RowCursor[cthd]; try { for (int i = 0; i < cursors.Length; i++) @@ -273,7 +273,7 @@ public static string GetEmbeddedArgs(IMultiStreamSource files) return sb.ToString(); } - public Schema Schema => _bindings.AsSchema; + public override Schema Schema => _bindings.AsSchema; public override void Dispose() { @@ -301,13 +301,13 @@ protected override bool MoveNextCore() return false; } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.Check(0 <= col && col < _bindings.Infos.Length); return _active == null || _active[col]; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.Check(IsColumnActive(col)); var fn = _getters[col] as ValueGetter; @@ -834,7 +834,7 @@ public Consolidator(int cthd) _cthd = cthd; } - public IRowCursor CreateCursor(IChannelProvider provider, IRowCursor[] inputs) + public RowCursor CreateCursor(IChannelProvider provider, RowCursor[] inputs) { Contracts.AssertValue(provider); int cthd = Interlocked.Exchange(ref _cthd, 0); diff --git a/src/Microsoft.ML.Data/DataLoadSave/Text/TextSaver.cs b/src/Microsoft.ML.Data/DataLoadSave/Text/TextSaver.cs index 9474605079..e55db4df55 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Text/TextSaver.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Text/TextSaver.cs @@ -47,7 +47,7 @@ private abstract class ValueWriter { public readonly int Source; - public static ValueWriter Create(IRowCursor cursor, int col, char sep) + public static ValueWriter Create(RowCursor cursor, int col, char sep) { Contracts.AssertValue(cursor); @@ -148,7 +148,7 @@ private sealed class VecValueWriter : ValueWriterBase private readonly VBuffer> _slotNames; private readonly int _slotCount; - public VecValueWriter(IRowCursor cursor, VectorType type, int source, char sep) + public VecValueWriter(RowCursor cursor, VectorType type, int source, char sep) : base(type.ItemType, source, sep) { _getSrc = cursor.GetGetter>(source); @@ -213,7 +213,7 @@ private sealed class ValueWriter : ValueWriterBase private T _src; private string _columnName; - public ValueWriter(IRowCursor cursor, PrimitiveType type, int source, char sep) + public ValueWriter(RowCursor cursor, PrimitiveType type, int source, char sep) : base(type, source, sep) { _getSrc = cursor.GetGetter(source); @@ -573,7 +573,7 @@ public State(TextSaver parent, TextWriter writer, ValueWriter[] pipes, bool hasH _mpslotichLim = new int[128]; } - public void Run(IRowCursor cursor, ref long count, out int minLen, out int maxLen) + public void Run(RowCursor cursor, ref long count, out int minLen, out int maxLen) { minLen = int.MaxValue; maxLen = 0; diff --git a/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs index 0954bae382..f57e08b8c7 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs @@ -668,7 +668,7 @@ public VectorType GetSlotType(int col) return _header.RowCount; } - public IRowCursor GetRowCursor(Func predicate, Random rand = null) + public RowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); @@ -677,16 +677,16 @@ public IRowCursor GetRowCursor(Func predicate, Random rand = null) return new Cursor(this, predicate); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); if (HasRowData) return _schemaEntry.GetView().GetRowCursorSet(out consolidator, predicate, n, rand); consolidator = null; - return new IRowCursor[] { GetRowCursor(predicate, rand) }; + return new RowCursor[] { GetRowCursor(predicate, rand) }; } - public ISlotCursor GetSlotCursor(int col) + public SlotCursor GetSlotCursor(int col) { _host.CheckParam(0 <= col && col < _header.ColumnCount, nameof(col)); var view = _entries[col].GetViewOrNull(); @@ -699,7 +699,7 @@ public ISlotCursor GetSlotCursor(int col) // We don't want the type error, if there is one, to be handled by the get-getter, because // at the point we've gotten the interior cursor, but not yet constructed the slot cursor. ColumnType cursorType = TransposeSchema.GetSlotType(col).ItemType; - IRowCursor inputCursor = view.GetRowCursor(c => true); + RowCursor inputCursor = view.GetRowCursor(c => true); try { return Utils.MarshalInvoke(GetSlotCursorCore, cursorType.RawType, inputCursor); @@ -714,41 +714,56 @@ public ISlotCursor GetSlotCursor(int col) } } - private ISlotCursor GetSlotCursorCore(IRowCursor inputCursor) + private SlotCursor GetSlotCursorCore(RowCursor inputCursor) { return new SlotCursor(this, inputCursor); } - private sealed class SlotCursor : SynchronizedCursorBase, ISlotCursor + private sealed class SlotCursor : SlotCursor { private readonly TransposeLoader _parent; private readonly ValueGetter> _getter; + private readonly RowCursor _rowCursor; - private IHost Host { get { return _parent._host; } } - - public SlotCursor(TransposeLoader parent, IRowCursor cursor) - : base(parent._host, cursor) + public SlotCursor(TransposeLoader parent, RowCursor cursor) + : base(parent._host) { _parent = parent; - Ch.Assert(cursor.Schema.ColumnCount == 1); - Ch.Assert(cursor.Schema.GetColumnType(0).RawType == typeof(VBuffer)); - _getter = Input.GetGetter>(0); - } + Ch.AssertValue(cursor); + Ch.Assert(cursor.Schema.Count == 1); + Ch.Assert(cursor.Schema[0].Type.RawType == typeof(VBuffer)); + Ch.Assert(cursor.Schema[0].Type is VectorType); + _rowCursor = cursor; - public VectorType GetSlotType() - { - var type = Input.Schema.GetColumnType(0).AsVector; - Ch.AssertValue(type); - return type; + _getter = _rowCursor.GetGetter>(0); } - public ValueGetter> GetGetter() + public override VectorType GetSlotType() + => (VectorType)_rowCursor.Schema[0].Type; + + public override ValueGetter> GetGetter() { ValueGetter> getter = _getter as ValueGetter>; if (getter == null) throw Ch.Except("Invalid TValue: '{0}'", typeof(TValue)); return getter; } + + public override bool MoveNext() + { + return _rowCursor.MoveNext(); + } + + public override int SlotIndex + { + get + { + long pos = _rowCursor.Position; + Contracts.Assert(pos <= int.MaxValue); + return (int)pos; + } + } + } private Transposer EnsureAndGetTransposer(int col) @@ -777,16 +792,16 @@ private Transposer EnsureAndGetTransposer(int col) return _colTransposers[col]; } - private sealed class Cursor : RootCursorBase, IRowCursor + private sealed class Cursor : RootCursorBase { private readonly TransposeLoader _parent; private readonly int[] _actives; private readonly int[] _colToActivesIndex; - private readonly ICursor[] _transCursors; + private readonly SlotCursor[] _transCursors; private readonly Delegate[] _getters; private bool _disposed; - public Schema Schema => _parent.Schema; + public override Schema Schema => _parent.Schema; public override long Batch { get { return 0; } } @@ -802,7 +817,7 @@ public Cursor(TransposeLoader parent, Func pred) Ch.Assert(!_parent.HasRowData); Utils.BuildSubsetMaps(_parent._header.ColumnCount, pred, out _actives, out _colToActivesIndex); - _transCursors = new ICursor[_actives.Length]; + _transCursors = new SlotCursor[_actives.Length]; _getters = new Delegate[_actives.Length]; // The following will fill in both the _transCursors and _getters arrays. for (int i = 0; i < _actives.Length; ++i) @@ -841,7 +856,7 @@ private void InitOne(int col) var type = Schema.GetColumnType(col); Ch.Assert(typeof(T) == type.RawType); var trans = _parent.EnsureAndGetTransposer(col); - ISlotCursor cursor = trans.GetSlotCursor(0); + SlotCursor cursor = trans.GetSlotCursor(0); ValueGetter> getter = cursor.GetGetter(); VBuffer buff = default(VBuffer); ValueGetter oneGetter = @@ -862,7 +877,7 @@ private void InitVec(int col) Ch.Assert(type.IsVector); Ch.Assert(typeof(T) == type.ItemType.RawType); var trans = _parent.EnsureAndGetTransposer(col); - ISlotCursor cursor = trans.GetSlotCursor(0); + SlotCursor cursor = trans.GetSlotCursor(0); ValueGetter> getter = cursor.GetGetter(); int i = _colToActivesIndex[col]; _getters[i] = getter; @@ -892,25 +907,13 @@ protected override bool MoveNextCore() return more; } - protected override bool MoveManyCore(long count) - { - Ch.Assert(State != CursorState.Done); - bool more = Position < _parent._header.RowCount - count; - for (int i = 0; i < _transCursors.Length; ++i) - { - bool cMore = _transCursors[i].MoveMany(count); - Ch.Assert(cMore == more); - } - return more; - } - - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.CheckParam(0 <= col && col <= _colToActivesIndex.Length, nameof(col)); return _colToActivesIndex[col] >= 0; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.CheckParam(0 <= col && col <= _colToActivesIndex.Length, nameof(col)); Ch.CheckParam(IsColumnActive(col), nameof(col), "requested column not active"); diff --git a/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs b/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs index d20d832e88..e9a100bbef 100644 --- a/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs +++ b/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs @@ -146,7 +146,7 @@ private void CheckSchemaConsistency() return sum; } - public IRowCursor GetRowCursor(Func needCol, Random rand = null) + public RowCursor GetRowCursor(Func needCol, Random rand = null) { _host.CheckValue(needCol, nameof(needCol)); if (rand == null || !_canShuffle) @@ -154,20 +154,20 @@ public IRowCursor GetRowCursor(Func needCol, Random rand = null) return new RandCursor(this, needCol, rand, _counts); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { consolidator = null; - return new IRowCursor[] { GetRowCursor(predicate, rand) }; + return new RowCursor[] { GetRowCursor(predicate, rand) }; } - private abstract class CursorBase : RootCursorBase, IRowCursor + private abstract class CursorBase : RootCursorBase { protected readonly IDataView[] Sources; protected readonly Delegate[] Getters; public override long Batch => 0; - public Schema Schema { get; } + public sealed override Schema Schema { get; } public CursorBase(AppendRowsDataView parent) : base(parent._host) @@ -189,7 +189,7 @@ protected Delegate CreateGetter(int col) protected abstract ValueGetter CreateTypedGetter(int col); - public ValueGetter GetGetter(int col) + public sealed override ValueGetter GetGetter(int col) { Ch.Check(IsColumnActive(col), "The column must be active against the defined predicate."); if (!(Getters[col] is ValueGetter)) @@ -197,7 +197,7 @@ public ValueGetter GetGetter(int col) return Getters[col] as ValueGetter; } - public bool IsColumnActive(int col) + public sealed override bool IsColumnActive(int col) { Ch.Check(0 <= col && col < Schema.ColumnCount, "Column index is out of range"); return Getters[col] != null; @@ -209,7 +209,7 @@ public bool IsColumnActive(int col) /// private sealed class Cursor : CursorBase { - private IRowCursor _currentCursor; + private RowCursor _currentCursor; private ValueGetter _currentIdGetter; private int _currentSourceIndex; @@ -299,7 +299,7 @@ public override void Dispose() /// private sealed class RandCursor : CursorBase { - private readonly IRowCursor[] _cursorSet; + private readonly RowCursor[] _cursorSet; private readonly MultinomialWithoutReplacementSampler _sampler; private readonly Random _rand; private int _currentSourceIndex; @@ -313,7 +313,7 @@ public RandCursor(AppendRowsDataView parent, Func needCol, Random ran _rand = rand; Ch.AssertValue(counts); Ch.Assert(Sources.Length == counts.Length); - _cursorSet = new IRowCursor[counts.Length]; + _cursorSet = new RowCursor[counts.Length]; for (int i = 0; i < counts.Length; i++) { Ch.Assert(counts[i] >= 0); @@ -374,7 +374,7 @@ public override void Dispose() if (State != CursorState.Done) { Ch.Dispose(); - foreach (IRowCursor c in _cursorSet) + foreach (RowCursor c in _cursorSet) c.Dispose(); base.Dispose(); } diff --git a/src/Microsoft.ML.Data/DataView/ArrayDataViewBuilder.cs b/src/Microsoft.ML.Data/DataView/ArrayDataViewBuilder.cs index c11d49923c..38e0e1b5bf 100644 --- a/src/Microsoft.ML.Data/DataView/ArrayDataViewBuilder.cs +++ b/src/Microsoft.ML.Data/DataView/ArrayDataViewBuilder.cs @@ -226,29 +226,29 @@ public DataView(IHostEnvironment env, ArrayDataViewBuilder builder, int rowCount _rowCount = rowCount; } - public IRowCursor GetRowCursor(Func predicate, Random rand = null) + public RowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); - return new RowCursor(_host, this, predicate, rand); + return new Cursor(_host, this, predicate, rand); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); consolidator = null; - return new IRowCursor[] { new RowCursor(_host, this, predicate, rand) }; + return new RowCursor[] { new Cursor(_host, this, predicate, rand) }; } - private sealed class RowCursor : RootCursorBase, IRowCursor + private sealed class Cursor : RootCursorBase { private readonly DataView _view; private readonly BitArray _active; private readonly int[] _indices; - public Schema Schema => _view.Schema; + public override Schema Schema => _view.Schema; public override long Batch { @@ -256,7 +256,7 @@ public override long Batch get { return 0; } } - public RowCursor(IChannelProvider provider, DataView view, Func predicate, Random rand) + public Cursor(IChannelProvider provider, DataView view, Func predicate, Random rand) : base(provider) { Ch.AssertValue(view); @@ -298,13 +298,13 @@ public override ValueGetter GetIdGetter() } } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.Check(0 <= col & col < Schema.ColumnCount); return _active[col]; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.Check(0 <= col & col < Schema.ColumnCount); Ch.Check(_active[col], "column is not active"); diff --git a/src/Microsoft.ML.Data/DataView/CacheDataView.cs b/src/Microsoft.ML.Data/DataView/CacheDataView.cs index 6d670db5ef..9f2e0ab447 100644 --- a/src/Microsoft.ML.Data/DataView/CacheDataView.cs +++ b/src/Microsoft.ML.Data/DataView/CacheDataView.cs @@ -203,7 +203,7 @@ public int MapInputToCacheColumnIndex(int inputIndex) return _rowCount; } - public IRowCursor GetRowCursor(Func predicate, Random rand = null) + public RowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); @@ -235,7 +235,7 @@ private int[] GetPermutationOrNull(Random rand) return Utils.GetRandomPermutation(rand, (int)_rowCount); } - private IRowCursor GetRowCursorWaiterCore(TWaiter waiter, Func predicate, Random rand) + private RowCursor GetRowCursorWaiterCore(TWaiter waiter, Func predicate, Random rand) where TWaiter : struct, IWaiter { _host.AssertValue(predicate); @@ -247,7 +247,7 @@ private IRowCursor GetRowCursorWaiterCore(TWaiter waiter, Func.Create(waiter, perm)); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); @@ -258,7 +258,7 @@ public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, if (n <= 1) { consolidator = null; - return new IRowCursor[] { GetRowCursor(predicate, rand) }; + return new RowCursor[] { GetRowCursor(predicate, rand) }; } consolidator = new Consolidator(); @@ -273,13 +273,13 @@ public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, /// private sealed class Consolidator : IRowCursorConsolidator { - public IRowCursor CreateCursor(IChannelProvider provider, IRowCursor[] inputs) + public RowCursor CreateCursor(IChannelProvider provider, RowCursor[] inputs) { return DataViewUtils.ConsolidateGeneric(provider, inputs, _batchSize); } } - private IRowCursor[] GetRowCursorSetWaiterCore(TWaiter waiter, Func predicate, int n, Random rand) + private RowCursor[] GetRowCursorSetWaiterCore(TWaiter waiter, Func predicate, int n, Random rand) where TWaiter : struct, IWaiter { _host.AssertValue(predicate); @@ -287,7 +287,7 @@ private IRowCursor[] GetRowCursorSetWaiterCore(TWaiter waiter, Func(TWaiter waiter, Func(Func predicate, TIndex index) + private RowCursor CreateCursor(Func predicate, TIndex index) where TIndex : struct, IIndex { Contracts.AssertValue(predicate); return new RowCursor(this, predicate, index); } - public IRowSeeker GetSeeker(Func predicate) + public RowSeeker GetSeeker(Func predicate) { _host.CheckValue(predicate, nameof(predicate)); // The seeker needs to know the row count when it validates the row index to move to. @@ -320,11 +320,11 @@ public IRowSeeker GetSeeker(Func predicate) return GetSeeker(predicate, waiter); } - private IRowSeeker GetSeeker(Func predicate, TWaiter waiter) + private RowSeeker GetSeeker(Func predicate, TWaiter waiter) where TWaiter : struct, IWaiter { _host.AssertValue(predicate); - return new RowSeeker(this, predicate, waiter); + return new RowSeeker(new RowSeekerCore(this, predicate, waiter)); } /// @@ -339,7 +339,7 @@ private void KickoffFiller(int[] columns) _host.AssertValue(columns); HashSet taskColumns = null; - IRowCursor cursor; + RowCursor cursor; ColumnCache[] caches; OrderedWaiter waiter; lock (_cacheLock) @@ -390,7 +390,7 @@ private void KickoffFiller(int[] columns) /// The caches we must fill and, at the end of the cursor, freeze /// The waiter to increment as we cache each additional row /// - private void Filler(IRowCursor cursor, ColumnCache[] caches, OrderedWaiter waiter) + private void Filler(RowCursor cursor, ColumnCache[] caches, OrderedWaiter waiter) { _host.AssertValue(cursor); _host.AssertValue(caches); @@ -464,15 +464,15 @@ internal void Wait() } } - private sealed class RowCursor : RowCursorSeekerBase, IRowCursor + private sealed class RowCursor : RowCursorSeekerBase where TIndex : struct, IIndex { private CursorState _state; private readonly TIndex _index; - public CursorState State { get { return _state; } } + public override CursorState State => _state; - public long Batch { get { return _index.Batch; } } + public override long Batch => _index.Batch; public RowCursor(CacheDataView parent, Func predicate, TIndex index) : base(parent, predicate) @@ -481,17 +481,11 @@ public RowCursor(CacheDataView parent, Func predicate, TIndex index) _index = index; } - public ValueGetter GetIdGetter() - { - return _index.GetIdGetter(); - } + public override ValueGetter GetIdGetter() => _index.GetIdGetter(); - public ICursor GetRootCursor() - { - return this; - } + public override RowCursor GetRootCursor() => this; - public bool MoveNext() + public override bool MoveNext() { if (_state == CursorState.Done) { @@ -502,7 +496,7 @@ public bool MoveNext() Ch.Assert(_state == CursorState.NotStarted || _state == CursorState.Good); if (_index.MoveNext()) { - Position++; + PositionCore++; Ch.Assert(Position >= 0); _state = CursorState.Good; return true; @@ -513,7 +507,7 @@ public bool MoveNext() return false; } - public bool MoveMany(long count) + public override bool MoveMany(long count) { // Note: If we decide to allow count == 0, then we need to special case // that MoveNext() has never been called. It's not entirely clear what the return @@ -529,7 +523,7 @@ public bool MoveMany(long count) Ch.Assert(_state == CursorState.NotStarted || _state == CursorState.Good); if (_index.MoveMany(count)) { - Position += count; + PositionCore += count; _state = CursorState.Good; Ch.Assert(Position >= 0); return true; @@ -556,14 +550,41 @@ protected override ValueGetter CreateGetterDelegateCore(ColumnCa } } - private sealed class RowSeeker : RowCursorSeekerBase, IRowSeeker - where TWaiter : struct, IWaiter + private sealed class RowSeeker : RowSeeker + where TWaiter : struct, IWaiter + { + private readonly RowSeekerCore _internal; + + public RowSeeker(RowSeekerCore toWrap) + { + Contracts.AssertValue(toWrap); + _internal = toWrap; + } + + public override long Position => _internal.Position; + public override long Batch => _internal.Batch; + public override Schema Schema => _internal.Schema; + + public override void Dispose() + { + } + + public override ValueGetter GetGetter(int col) => _internal.GetGetter(col); + public override ValueGetter GetIdGetter() => _internal.GetIdGetter(); + public override bool IsColumnActive(int col) => _internal.IsColumnActive(col); + public override bool MoveTo(long rowIndex) => _internal.MoveTo(rowIndex); + } + + private sealed class RowSeekerCore : RowCursorSeekerBase + where TWaiter : struct, IWaiter { private readonly TWaiter _waiter; - public long Batch { get { return 0; } } + public override long Batch => 0; - public ValueGetter GetIdGetter() + public override CursorState State => throw new NotImplementedException(); + + public override ValueGetter GetIdGetter() { return (ref UInt128 val) => @@ -573,7 +594,7 @@ public ValueGetter GetIdGetter() }; } - public RowSeeker(CacheDataView parent, Func predicate, TWaiter waiter) + public RowSeekerCore(CacheDataView parent, Func predicate, TWaiter waiter) : base(parent, predicate) { _waiter = waiter; @@ -585,11 +606,11 @@ public bool MoveTo(long rowIndex) { // If requested row index is out of range, the row seeker // returns false and sets its position to -1. - Position = -1; + PositionCore = -1; return false; } - Position = rowIndex; + PositionCore = rowIndex; return true; } @@ -601,6 +622,10 @@ protected override ValueGetter CreateGetterDelegateCore(ColumnCa { return (ref TValue value) => cache.Fetch((int)Position, ref value); } + + public override bool MoveNext() => throw Ch.ExceptNotSupp(); + public override bool MoveMany(long count) => throw Ch.ExceptNotSupp(); + public override RowCursor GetRootCursor() => throw Ch.ExceptNotSupp(); } private interface IWaiter @@ -675,7 +700,7 @@ private sealed class WaiterWaiter : IWaiter /// /// If this is true, then a could be used instead. /// - public bool IsTrivial { get { return _waiters.Length == 0; } } + public bool IsTrivial => _waiters.Length == 0; private WaiterWaiter(CacheDataView parent, Func pred) { @@ -722,7 +747,7 @@ public static Wrapper Create(CacheDataView parent, Func pred) { private readonly WaiterWaiter _waiter; - public bool IsTrivial { get { return _waiter.IsTrivial; } } + public bool IsTrivial => _waiter.IsTrivial; public Wrapper(WaiterWaiter waiter) { @@ -730,7 +755,7 @@ public Wrapper(WaiterWaiter waiter) _waiter = waiter; } - public bool Wait(long pos) { return _waiter.Wait(pos); } + public bool Wait(long pos) => _waiter.Wait(pos); } } @@ -758,7 +783,7 @@ private interface IIndex /// /// Moves to the next index. Once this or has returned /// false, it should never be called again. (This in constrast to public - /// objects, whose move methods are robust to that usage.) + /// objects, whose move methods are robust to that usage.) /// /// Whether the next index is available. bool MoveNext(); @@ -842,11 +867,11 @@ public Wrapper(SequenceIndex index) _index = index; } - public long Batch { get { return _index.Batch; } } - public long GetIndex() { return _index.GetIndex(); } - public ValueGetter GetIdGetter() { return _index.GetIdGetter(); } - public bool MoveNext() { return _index.MoveNext(); } - public bool MoveMany(long count) { return _index.MoveMany(count); } + public long Batch => _index.Batch; + public long GetIndex() => _index.GetIndex(); + public ValueGetter GetIdGetter() => _index.GetIdGetter(); + public bool MoveNext() => _index.MoveNext(); + public bool MoveMany(long count) => _index.MoveMany(count); } } @@ -933,11 +958,11 @@ public Wrapper(RandomIndex index) _index = index; } - public long Batch { get { return _index.Batch; } } - public long GetIndex() { return _index.GetIndex(); } - public ValueGetter GetIdGetter() { return _index.GetIdGetter(); } - public bool MoveNext() { return _index.MoveNext(); } - public bool MoveMany(long count) { return _index.MoveMany(count); } + public long Batch => _index.Batch; + public long GetIndex() => _index.GetIndex(); + public ValueGetter GetIdGetter() => _index.GetIdGetter(); + public bool MoveNext() => _index.MoveNext(); + public bool MoveMany(long count) => _index.MoveMany(count); } } @@ -1103,11 +1128,11 @@ public Wrapper(BlockSequenceIndex index) _index = index; } - public long Batch { get { return _index.Batch; } } - public long GetIndex() { return _index.GetIndex(); } - public ValueGetter GetIdGetter() { return _index.GetIdGetter(); } - public bool MoveNext() { return _index.MoveNext(); } - public bool MoveMany(long count) { return _index.MoveMany(count); } + public long Batch => _index.Batch; + public long GetIndex() => _index.GetIndex(); + public ValueGetter GetIdGetter() => _index.GetIdGetter(); + public bool MoveNext() => _index.MoveNext(); + public bool MoveMany(long count) => _index.MoveMany(count); } } @@ -1211,34 +1236,35 @@ public Wrapper(BlockRandomIndex index) _index = index; } - public long Batch { get { return _index.Batch; } } - public long GetIndex() { return _index.GetIndex(); } - public ValueGetter GetIdGetter() { return _index.GetIdGetter(); } - public bool MoveNext() { return _index.MoveNext(); } - public bool MoveMany(long count) { return _index.MoveMany(count); } + public long Batch => _index.Batch; + public long GetIndex() => _index.GetIndex(); + public ValueGetter GetIdGetter() => _index.GetIdGetter(); + public bool MoveNext() => _index.MoveNext(); + public bool MoveMany(long count) => _index.MoveMany(count); } } - private abstract class RowCursorSeekerBase : IDisposable + private abstract class RowCursorSeekerBase : RowCursor { protected readonly CacheDataView Parent; protected readonly IChannel Ch; + protected long PositionCore; private readonly int[] _colToActivesIndex; private readonly Delegate[] _getters; private bool _disposed; - public Schema Schema => Parent.Schema; + public sealed override Schema Schema => Parent.Schema; - public long Position { get; protected set; } + public sealed override long Position => PositionCore; protected RowCursorSeekerBase(CacheDataView parent, Func predicate) { Contracts.AssertValue(parent); Parent = parent; Ch = parent._host.Start("Cursor"); - Position = -1; + PositionCore = -1; // Set up the mapping from active columns. int colLim = Schema.ColumnCount; @@ -1259,24 +1285,24 @@ protected RowCursorSeekerBase(CacheDataView parent, Func predicate) } } - public bool IsColumnActive(int col) + public sealed override bool IsColumnActive(int col) { Ch.CheckParam(0 <= col && col < _colToActivesIndex.Length, nameof(col)); return _colToActivesIndex[col] >= 0; } - public void Dispose() + public sealed override void Dispose() { if (!_disposed) { DisposeCore(); - Position = -1; + PositionCore = -1; Ch.Dispose(); _disposed = true; } } - public ValueGetter GetGetter(int col) + public sealed override ValueGetter GetGetter(int col) { if (!IsColumnActive(col)) throw Ch.Except("Column #{0} is requested but not active in the cursor", col); @@ -1348,7 +1374,7 @@ protected ColumnCache(IExceptionContext ctx, OrderedWaiter waiter) /// The column of the cursor we are wrapping. /// The waiter for the filler associated with this column /// - public static ColumnCache Create(CacheDataView parent, IRowCursor input, int srcCol, OrderedWaiter waiter) + public static ColumnCache Create(CacheDataView parent, RowCursor input, int srcCol, OrderedWaiter waiter) { Contracts.AssertValue(parent); var host = parent._host; @@ -1368,7 +1394,7 @@ public static ColumnCache Create(CacheDataView parent, IRowCursor input, int src if (_pipeConstructorTypes == null) { Interlocked.CompareExchange(ref _pipeConstructorTypes, - new Type[] { typeof(CacheDataView), typeof(IRowCursor), typeof(int), typeof(OrderedWaiter) }, null); + new Type[] { typeof(CacheDataView), typeof(RowCursor), typeof(int), typeof(OrderedWaiter) }, null); } var constructor = pipeType.GetConstructor(_pipeConstructorTypes); return (ColumnCache)constructor.Invoke(new object[] { parent, input, srcCol, waiter }); @@ -1416,7 +1442,7 @@ private sealed class ImplVec : ColumnCache> // Temporary working reusable storage for caching the source data. private VBuffer _temp; - public ImplVec(CacheDataView parent, IRowCursor input, int srcCol, OrderedWaiter waiter) + public ImplVec(CacheDataView parent, RowCursor input, int srcCol, OrderedWaiter waiter) : base(parent, input, srcCol, waiter) { var type = input.Schema.GetColumnType(srcCol); @@ -1499,7 +1525,7 @@ private sealed class ImplOne : ColumnCache private T[] _values; private ValueGetter _getter; - public ImplOne(CacheDataView parent, IRowCursor input, int srcCol, OrderedWaiter waiter) + public ImplOne(CacheDataView parent, RowCursor input, int srcCol, OrderedWaiter waiter) : base(parent, input, srcCol, waiter) { _getter = input.GetGetter(srcCol); @@ -1534,7 +1560,7 @@ public override void Freeze() private abstract class ColumnCache : ColumnCache { - public ColumnCache(CacheDataView parent, IRowCursor input, int srcCol, OrderedWaiter waiter) + public ColumnCache(CacheDataView parent, RowCursor input, int srcCol, OrderedWaiter waiter) : base(parent._host, waiter) { Contracts.AssertValue(input); diff --git a/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs b/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs index eb319cb7bc..326a750a26 100644 --- a/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs +++ b/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs @@ -43,7 +43,7 @@ public Func GetDependencies(Func predicate) return toReturn; } - public IRow GetRow(IRow input, Func active, out Action disposer) + public Row GetRow(Row input, Func active, out Action disposer) { Contracts.CheckValue(input, nameof(input)); Contracts.CheckValue(active, nameof(active)); @@ -73,7 +73,7 @@ public IRow GetRow(IRow input, Func active, out Action disposer) for (int i = deps.Length - 1; i >= 1; --i) deps[i - 1] = InnerMappers[i].GetDependencies(deps[i]); - IRow result = input; + Row result = input; for (int i = 0; i < InnerMappers.Length; ++i) { result = InnerMappers[i].GetRow(result, deps[i], out var localDisp); @@ -90,12 +90,12 @@ public IRow GetRow(IRow input, Func active, out Action disposer) return result; } - private sealed class SubsetActive : IRow + private sealed class SubsetActive : Row { - private readonly IRow _row; + private readonly Row _row; private Func _pred; - public SubsetActive(IRow row, Func pred) + public SubsetActive(Row row, Func pred) { Contracts.AssertValue(row); Contracts.AssertValue(pred); @@ -103,12 +103,12 @@ public SubsetActive(IRow row, Func pred) _pred = pred; } - public Schema Schema => _row.Schema; - public long Position => _row.Position; - public long Batch => _row.Batch; - public ValueGetter GetGetter(int col) => _row.GetGetter(col); - public ValueGetter GetIdGetter() => _row.GetIdGetter(); - public bool IsColumnActive(int col) => _pred(col); + public override Schema Schema => _row.Schema; + public override long Position => _row.Position; + public override long Batch => _row.Batch; + public override ValueGetter GetGetter(int col) => _row.GetGetter(col); + public override ValueGetter GetIdGetter() => _row.GetIdGetter(); + public override bool IsColumnActive(int col) => _pred(col); } } } diff --git a/src/Microsoft.ML.Data/DataView/EmptyDataView.cs b/src/Microsoft.ML.Data/DataView/EmptyDataView.cs index 90aedca48a..466da6b098 100644 --- a/src/Microsoft.ML.Data/DataView/EmptyDataView.cs +++ b/src/Microsoft.ML.Data/DataView/EmptyDataView.cs @@ -28,14 +28,14 @@ public EmptyDataView(IHostEnvironment env, Schema schema) public long? GetRowCount() => 0; - public IRowCursor GetRowCursor(Func needCol, Random rand = null) + public RowCursor GetRowCursor(Func needCol, Random rand = null) { _host.CheckValue(needCol, nameof(needCol)); _host.CheckValueOrNull(rand); return new Cursor(_host, Schema, needCol); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, Random rand = null) + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, Random rand = null) { _host.CheckValue(needCol, nameof(needCol)); _host.CheckValueOrNull(rand); @@ -43,11 +43,11 @@ public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Fun return new[] { new Cursor(_host, Schema, needCol) }; } - private sealed class Cursor : RootCursorBase, IRowCursor + private sealed class Cursor : RootCursorBase { private readonly bool[] _active; - public Schema Schema { get; } + public override Schema Schema { get; } public override long Batch => 0; public Cursor(IChannelProvider provider, Schema schema, Func needCol) @@ -71,9 +71,9 @@ public override ValueGetter GetIdGetter() protected override bool MoveNextCore() => false; - public bool IsColumnActive(int col) => 0 <= col && col < _active.Length && _active[col]; + public override bool IsColumnActive(int col) => 0 <= col && col < _active.Length && _active[col]; - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.Check(IsColumnActive(col), "Can't get getter for inactive column"); return diff --git a/src/Microsoft.ML.Data/DataView/LambdaColumnMapper.cs b/src/Microsoft.ML.Data/DataView/LambdaColumnMapper.cs index 817ba98ed1..e5b5ae1654 100644 --- a/src/Microsoft.ML.Data/DataView/LambdaColumnMapper.cs +++ b/src/Microsoft.ML.Data/DataView/LambdaColumnMapper.cs @@ -150,7 +150,7 @@ protected override ColumnType GetColumnTypeCore(int iinfo) return _typeDst; } - protected override Delegate GetGetterCore(IChannel ch, IRow input, int iinfo, out Action disposer) + protected override Delegate GetGetterCore(IChannel ch, Row input, int iinfo, out Action disposer) { Host.AssertValueOrNull(ch); Host.AssertValue(input); diff --git a/src/Microsoft.ML.Data/DataView/LambdaFilter.cs b/src/Microsoft.ML.Data/DataView/LambdaFilter.cs index 0cec2a7a58..92e39caede 100644 --- a/src/Microsoft.ML.Data/DataView/LambdaFilter.cs +++ b/src/Microsoft.ML.Data/DataView/LambdaFilter.cs @@ -106,7 +106,7 @@ public override void Save(ModelSaveContext ctx) return null; } - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -114,10 +114,10 @@ protected override IRowCursor GetRowCursorCore(Func predicate, Random bool[] active; Func inputPred = GetActive(predicate, out active); var input = Source.GetRowCursor(inputPred, rand); - return new RowCursor(this, input, active); + return new Cursor(this, input, active); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, + public override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); @@ -129,9 +129,9 @@ public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolid Host.AssertNonEmpty(inputs); // No need to split if this is given 1 input cursor. - var cursors = new IRowCursor[inputs.Length]; + var cursors = new RowCursor[inputs.Length]; for (int i = 0; i < inputs.Length; i++) - cursors[i] = new RowCursor(this, inputs[i], active); + cursors[i] = new Cursor(this, inputs[i], active); return cursors; } @@ -147,13 +147,13 @@ private Func GetActive(Func predicate, out bool[] active) } // REVIEW: Should this cache the source value like MissingValueFilter does? - private sealed class RowCursor : LinkedRowFilterCursorBase + private sealed class Cursor : LinkedRowFilterCursorBase { private readonly ValueGetter _getSrc; private readonly InPredicate _pred; private T1 _src; - public RowCursor(Impl parent, IRowCursor input, bool[] active) + public Cursor(Impl parent, RowCursor input, bool[] active) : base(parent.Host, input, parent.OutputSchema, active) { _getSrc = Input.GetGetter(parent._colSrc); diff --git a/src/Microsoft.ML.Data/DataView/OpaqueDataView.cs b/src/Microsoft.ML.Data/DataView/OpaqueDataView.cs index 7d93533625..25613791d0 100644 --- a/src/Microsoft.ML.Data/DataView/OpaqueDataView.cs +++ b/src/Microsoft.ML.Data/DataView/OpaqueDataView.cs @@ -27,12 +27,12 @@ public OpaqueDataView(IDataView source) return _source.GetRowCount(); } - public IRowCursor GetRowCursor(Func predicate, Random rand = null) + public RowCursor GetRowCursor(Func predicate, Random rand = null) { return _source.GetRowCursor(predicate, rand); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { return _source.GetRowCursorSet(out consolidator, predicate, n, rand); diff --git a/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs b/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs index d5ccb2d063..a4839d710b 100644 --- a/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs +++ b/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs @@ -38,7 +38,7 @@ public interface IRowMapper : ICanSaveModel /// array should be equal to the number of columns added by the IRowMapper. It should contain the getter for the /// i'th output column if activeOutput(i) is true, and null otherwise. /// - Delegate[] CreateGetters(IRow input, Func activeOutput, out Action disposer); + Delegate[] CreateGetters(Row input, Func activeOutput, out Action disposer); /// /// Returns information about the output columns, including their name, type and any metadata information. @@ -178,14 +178,14 @@ private Func GetActiveOutputColumns(bool[] active) return null; } - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { Func predicateInput; var active = GetActive(predicate, out predicateInput); - return new RowCursor(Host, Source.GetRowCursor(predicateInput, rand), this, active); + return new Cursor(Host, Source.GetRowCursor(predicateInput, rand), this, active); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) + public override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); @@ -200,9 +200,9 @@ public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolid inputs = DataViewUtils.CreateSplitCursors(out consolidator, Host, inputs[0], n); Host.AssertNonEmpty(inputs); - var cursors = new IRowCursor[inputs.Length]; + var cursors = new RowCursor[inputs.Length]; for (int i = 0; i < inputs.Length; i++) - cursors[i] = new RowCursor(Host, inputs[i], this, active); + cursors[i] = new Cursor(Host, inputs[i], this, active); return cursors; } @@ -235,7 +235,7 @@ public Func GetDependencies(Func predicate) public Schema InputSchema => Source.Schema; - public IRow GetRow(IRow input, Func active, out Action disposer) + public Row GetRow(Row input, Func active, out Action disposer) { Host.CheckValue(input, nameof(input)); Host.CheckValue(active, nameof(active)); @@ -251,7 +251,7 @@ public IRow GetRow(IRow input, Func active, out Action disposer) var pred = GetActiveOutputColumns(activeArr); var getters = _mapper.CreateGetters(input, pred, out disp); disposer += disp; - return new Row(input, this, OutputSchema, getters); + return new RowImpl(input, this, OutputSchema, getters); } } @@ -285,20 +285,20 @@ public IDataTransform ApplyToData(IHostEnvironment env, IDataView newSource) } } - private sealed class Row : IRow + private sealed class RowImpl : Row { - private readonly IRow _input; + private readonly Row _input; private readonly Delegate[] _getters; private readonly RowToRowMapperTransform _parent; - public long Batch { get { return _input.Batch; } } + public override long Batch => _input.Batch; - public long Position { get { return _input.Position; } } + public override long Position => _input.Position; - public Schema Schema { get; } + public override Schema Schema { get; } - public Row(IRow input, RowToRowMapperTransform parent, Schema schema, Delegate[] getters) + public RowImpl(Row input, RowToRowMapperTransform parent, Schema schema, Delegate[] getters) { _input = input; _parent = parent; @@ -306,7 +306,7 @@ public Row(IRow input, RowToRowMapperTransform parent, Schema schema, Delegate[] _getters = getters; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { bool isSrc; int index = _parent._bindings.MapColumnIndex(out isSrc, col); @@ -320,9 +320,9 @@ public ValueGetter GetGetter(int col) return fn; } - public ValueGetter GetIdGetter() => _input.GetIdGetter(); + public override ValueGetter GetIdGetter() => _input.GetIdGetter(); - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { bool isSrc; int index = _parent._bindings.MapColumnIndex(out isSrc, col); @@ -332,16 +332,16 @@ public bool IsColumnActive(int col) } } - private sealed class RowCursor : SynchronizedCursorBase, IRowCursor + private sealed class Cursor : SynchronizedCursorBase { private readonly Delegate[] _getters; private readonly bool[] _active; private readonly ColumnBindings _bindings; private readonly Action _disposer; - public Schema Schema => _bindings.Schema; + public override Schema Schema => _bindings.Schema; - public RowCursor(IChannelProvider provider, IRowCursor input, RowToRowMapperTransform parent, bool[] active) + public Cursor(IChannelProvider provider, RowCursor input, RowToRowMapperTransform parent, bool[] active) : base(provider, input) { var pred = parent.GetActiveOutputColumns(active); @@ -350,13 +350,13 @@ public RowCursor(IChannelProvider provider, IRowCursor input, RowToRowMapperTran _bindings = parent._bindings; } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.Check(0 <= col && col < _bindings.Schema.ColumnCount); return _active[col]; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.Check(IsColumnActive(col)); diff --git a/src/Microsoft.ML.Data/DataView/SimpleRow.cs b/src/Microsoft.ML.Data/DataView/SimpleRow.cs index 7cc8ec7059..55f67e6de2 100644 --- a/src/Microsoft.ML.Data/DataView/SimpleRow.cs +++ b/src/Microsoft.ML.Data/DataView/SimpleRow.cs @@ -11,41 +11,40 @@ namespace Microsoft.ML.Runtime.Data { /// - /// An implementation of that gets its , , - /// and from an input row. The constructor requires a schema and array of getter + /// An implementation of that gets its , , + /// and from an input row. The constructor requires a schema and array of getter /// delegates. A null delegate indicates an inactive column. The delegates are assumed to be of the appropriate type /// (this does not validate the type). /// REVIEW: Should this validate that the delegates are of the appropriate type? It wouldn't be difficult /// to do so. /// - public sealed class SimpleRow : IRow + public sealed class SimpleRow : Row { - private readonly Schema _schema; - private readonly IRow _input; + private readonly Row _input; private readonly Delegate[] _getters; - public Schema Schema { get { return _schema; } } + public override Schema Schema { get; } - public long Position { get { return _input.Position; } } + public override long Position => _input.Position; - public long Batch { get { return _input.Batch; } } + public override long Batch => _input.Batch; - public SimpleRow(Schema schema, IRow input, Delegate[] getters) + public SimpleRow(Schema schema, Row input, Delegate[] getters) { Contracts.CheckValue(schema, nameof(schema)); Contracts.CheckValue(input, nameof(input)); Contracts.Check(Utils.Size(getters) == schema.ColumnCount); - _schema = schema; + Schema = schema; _input = input; _getters = getters ?? new Delegate[0]; } - public ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return _input.GetIdGetter(); } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Contracts.CheckParam(0 <= col && col < _getters.Length, nameof(col), "Invalid col value in GetGetter"); Contracts.Check(IsColumnActive(col)); @@ -55,7 +54,7 @@ public ValueGetter GetGetter(int col) return fn; } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Contracts.Check(0 <= col && col < _getters.Length); return _getters[col] != null; diff --git a/src/Microsoft.ML.Data/DataView/Transposer.cs b/src/Microsoft.ML.Data/DataView/Transposer.cs index 2befe3a726..e6ddcd57b5 100644 --- a/src/Microsoft.ML.Data/DataView/Transposer.cs +++ b/src/Microsoft.ML.Data/DataView/Transposer.cs @@ -229,7 +229,7 @@ private static int[] CheckIndices(IHost host, IDataView view, int[] columns) return columns; } - public ISlotCursor GetSlotCursor(int col) + public SlotCursor GetSlotCursor(int col) { _host.CheckParam(0 <= col && col < _tschema.ColumnCount, nameof(col)); if (_inputToTransposed[col] == -1) @@ -249,7 +249,7 @@ public ISlotCursor GetSlotCursor(int col) return Utils.MarshalInvoke(GetSlotCursorCore, type, col); } - private ISlotCursor GetSlotCursorCore(int col) + private SlotCursor GetSlotCursorCore(int col) { if (_tschema.GetColumnType(col).IsVector) return new SlotCursorVec(this, col); @@ -265,12 +265,12 @@ private ISlotCursor GetSlotCursorCore(int col) public bool CanShuffle { get { return _view.CanShuffle; } } - public IRowCursor GetRowCursor(Func predicate, Random rand = null) + public RowCursor GetRowCursor(Func predicate, Random rand = null) { return _view.GetRowCursor(predicate, rand); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { return _view.GetRowCursorSet(out consolidator, predicate, n, rand); } @@ -357,14 +357,12 @@ public VectorType GetSlotType(int col) } } - private abstract class SlotCursor : RootCursorBase, ISlotCursor + private abstract class SlotCursor : SlotCursor.RootSlotCursor { private readonly Transposer _parent; private readonly int _col; private ValueGetter> _getter; - public override long Batch { get { return 0; } } - protected SlotCursor(Transposer parent, int col) : base(parent._host) { @@ -373,17 +371,7 @@ protected SlotCursor(Transposer parent, int col) _col = col; } - public override ValueGetter GetIdGetter() - { - return - (ref UInt128 val) => - { - Ch.Check(IsGood, "Cannot call ID getter in current state"); - val = new UInt128((ulong)Position, 0); - }; - } - - public ValueGetter> GetGetter() + public override ValueGetter> GetGetter() { if (_getter == null) _getter = GetGetterCore(); @@ -393,7 +381,7 @@ public ValueGetter> GetGetter() return getter; } - public VectorType GetSlotType() + public override VectorType GetSlotType() { return _parent.TransposeSchema.GetSlotType(_col); } @@ -406,6 +394,7 @@ private sealed class SlotCursorOne : SlotCursor private readonly IDataView _view; private readonly int _col; private readonly int _len; + private bool _moved; public SlotCursorOne(Transposer parent, int col) : base(parent, col) @@ -435,7 +424,7 @@ public SlotCursorOne(Transposer parent, int col) protected override bool MoveNextCore() { // We only can move next on one slot, since this is a scalar column. - return State == CursorState.NotStarted; + return _moved = !_moved; } protected override ValueGetter> GetGetterCore() @@ -577,7 +566,7 @@ public SlotCursorVec(Transposer parent, int col) /// private void EnsureValid() { - Ch.Check(State == CursorState.Good, "Cursor is not in good state, cannot get values"); + Ch.Check(IsGood, "Cursor is not in good state, cannot get values"); Ch.Assert(_slotCurr >= 0); if (_colStored == _colCurr) return; @@ -867,7 +856,7 @@ private void OutputColumnToSplitterIndices(int col, out int splitInd, out int sp splitCol = _colToSplitCol[col]; } - public IRowCursor GetRowCursor(Func predicate, Random rand = null) + public RowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); bool[] activeSplitters; @@ -875,7 +864,7 @@ public IRowCursor GetRowCursor(Func predicate, Random rand = null) return new Cursor(_host, this, _input.GetRowCursor(srcPred, rand), predicate, activeSplitters); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); @@ -1000,7 +989,7 @@ public void GetMetadata(string kind, int col, ref TValue value) /// There is one instance of these per column, implementing the possible splitting /// of one column from a into multiple columns. The instance /// describes the resulting split columns through its implementation of - /// , and then can be bound to an to provide + /// , and then can be bound to an to provide /// that splitting functionality. /// private abstract class Splitter : NoMetadataSchema @@ -1059,10 +1048,10 @@ public static Splitter Create(IDataView view, int col) } /// - /// Given an input , create the containing the split + /// Given an input , create the containing the split /// version of the columns. /// - public abstract IRow Bind(IRow row, Func pred); + public abstract Row Bind(Row row, Func pred); private static Splitter CreateCore(IDataView view, int col) { @@ -1097,17 +1086,17 @@ public override string GetColumnName(int col) } #endregion - private abstract class RowBase : IRow + private abstract class RowBase : Row where TSplitter : Splitter { protected readonly TSplitter Parent; - protected readonly IRow Input; + protected readonly Row Input; - public Schema Schema => Parent.AsSchema; - public long Position => Input.Position; - public long Batch => Input.Batch; + public sealed override Schema Schema => Parent.AsSchema; + public sealed override long Position => Input.Position; + public sealed override long Batch => Input.Batch; - public RowBase(TSplitter parent, IRow input) + public RowBase(TSplitter parent, Row input) { Contracts.AssertValue(parent); Contracts.AssertValue(input); @@ -1116,14 +1105,10 @@ public RowBase(TSplitter parent, IRow input) Input = input; } - public ValueGetter GetIdGetter() + public sealed override ValueGetter GetIdGetter() { return Input.GetIdGetter(); } - - public abstract bool IsColumnActive(int col); - - public abstract ValueGetter GetGetter(int col); } /// @@ -1150,20 +1135,20 @@ public override ColumnType GetColumnType(int col) return _view.Schema.GetColumnType(SrcCol); } - public override IRow Bind(IRow row, Func pred) + public override Row Bind(Row row, Func pred) { Contracts.AssertValue(row); Contracts.Assert(row.Schema == _view.Schema); Contracts.AssertValue(pred); Contracts.Assert(row.IsColumnActive(SrcCol)); - return new Row(this, row, pred(0)); + return new RowImpl(this, row, pred(0)); } - private sealed class Row : RowBase> + private sealed class RowImpl : RowBase> { private readonly bool _isActive; - public Row(NoSplitter parent, IRow input, bool isActive) + public RowImpl(NoSplitter parent, Row input, bool isActive) : base(parent, input) { Contracts.Assert(Parent.ColumnCount == 1); @@ -1236,16 +1221,16 @@ public override ColumnType GetColumnType(int col) return _types[col]; } - public override IRow Bind(IRow row, Func pred) + public override Row Bind(Row row, Func pred) { Contracts.AssertValue(row); Contracts.Assert(row.Schema == _view.Schema); Contracts.AssertValue(pred); Contracts.Assert(row.IsColumnActive(SrcCol)); - return new Row(this, row, pred); + return new RowImpl(this, row, pred); } - private sealed class Row : RowBase> + private sealed class RowImpl : RowBase> { // Counter of the last valid input, updated by EnsureValid. private long _lastValid; @@ -1260,7 +1245,7 @@ private sealed class Row : RowBase> // Getters. private readonly ValueGetter>[] _getters; - public Row(ColumnSplitter parent, IRow input, Func pred) + public RowImpl(ColumnSplitter parent, Row input, Func pred) : base(parent, input) { _inputGetter = input.GetGetter>(Parent.SrcCol); @@ -1367,17 +1352,17 @@ private void EnsureValid() } /// - /// The cursor implementation creates the s using , + /// The cursor implementation creates the s using , /// then collates the results from those rows as effectively one big row. /// - private sealed class Cursor : SynchronizedCursorBase, IRowCursor + private sealed class Cursor : SynchronizedCursorBase { private readonly DataViewSlicer _slicer; - private readonly IRow[] _sliceRows; + private readonly Row[] _sliceRows; - public Schema Schema => _slicer.Schema; + public override Schema Schema => _slicer.Schema; - public Cursor(IChannelProvider provider, DataViewSlicer slicer, IRowCursor input, Func pred, bool[] activeSplitters) + public Cursor(IChannelProvider provider, DataViewSlicer slicer, RowCursor input, Func pred, bool[] activeSplitters) : base(provider, input) { Ch.AssertValue(slicer); @@ -1385,7 +1370,7 @@ public Cursor(IChannelProvider provider, DataViewSlicer slicer, IRowCursor input Ch.Assert(Utils.Size(activeSplitters) == slicer._splitters.Length); _slicer = slicer; - _sliceRows = new IRow[_slicer._splitters.Length]; + _sliceRows = new Row[_slicer._splitters.Length]; var activeSrc = new bool[slicer._splitters.Length]; var activeSrcSet = new HashSet(); int offset = 0; @@ -1403,7 +1388,7 @@ public Cursor(IChannelProvider provider, DataViewSlicer slicer, IRowCursor input } } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.Check(0 <= col && col < Schema.ColumnCount, "col"); int splitInd; @@ -1412,7 +1397,7 @@ public bool IsColumnActive(int col) return _sliceRows[splitInd] != null && _sliceRows[splitInd].IsColumnActive(splitCol); } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.Check(IsColumnActive(col)); int splitInd; @@ -1446,7 +1431,7 @@ public static void GetSingleSlotValue(this ITransposeDataView view, int col, } /// - /// The is parameterized by a type that becomes the + /// The is parameterized by a type that becomes the /// type parameter for a , and this is generally preferable and more /// sensible but for various reasons it's often a lot simpler to have a get-getter be over /// the actual type returned by the getter, that is, parameterize this by the actual @@ -1457,7 +1442,7 @@ public static void GetSingleSlotValue(this ITransposeDataView view, int col, /// The cursor to get the getter for /// The exception contxt /// The value getter - public static ValueGetter GetGetterWithVectorType(this ISlotCursor cursor, IExceptionContext ctx = null) + public static ValueGetter GetGetterWithVectorType(this SlotCursor cursor, IExceptionContext ctx = null) { Contracts.CheckValueOrNull(ctx); ctx.CheckValue(cursor, nameof(cursor)); @@ -1478,15 +1463,15 @@ public static ValueGetter GetGetterWithVectorType(this ISlotCurs /// /// Given a slot cursor, construct a single-column equivalent row cursor, with the single column /// active and having the same type. This is useful to exploit the many utility methods that exist - /// to handle and but that know nothing about - /// , without having to rewrite all of them. This is, however, rather + /// to handle and but that know nothing about + /// , without having to rewrite all of them. This is, however, rather /// something of a hack; whenever possible or reasonable the slot cursor should be used directly. /// The name of this column is always "Waffles". /// /// The channel provider used in creating the wrapping row cursor /// The slot cursor to wrap /// A row cursor with a single active column with the same type as the slot type - public static IRowCursor GetRowCursorShim(IChannelProvider provider, ISlotCursor cursor) + public static RowCursor GetRowCursorShim(IChannelProvider provider, SlotCursor cursor) { Contracts.CheckValue(provider, nameof(provider)); provider.CheckValue(cursor, nameof(cursor)); @@ -1494,7 +1479,7 @@ public static IRowCursor GetRowCursorShim(IChannelProvider provider, ISlotCursor return Utils.MarshalInvoke(GetRowCursorShimCore, cursor.GetSlotType().ItemType.RawType, provider, cursor); } - private static IRowCursor GetRowCursorShimCore(IChannelProvider provider, ISlotCursor cursor) + private static RowCursor GetRowCursorShimCore(IChannelProvider provider, SlotCursor cursor) { return new SlotRowCursorShim(provider, cursor); } @@ -1508,11 +1493,10 @@ public sealed class SlotDataView : IDataView private readonly ITransposeDataView _data; private readonly int _col; private readonly ColumnType _type; - private readonly SchemaImpl _schemaImpl; - public Schema Schema => _schemaImpl.AsSchema; + public Schema Schema { get; } - public bool CanShuffle { get { return false; } } + public bool CanShuffle => false; public SlotDataView(IHostEnvironment env, ITransposeDataView data, int col) { @@ -1525,7 +1509,10 @@ public SlotDataView(IHostEnvironment env, ITransposeDataView data, int col) _data = data; _col = col; - _schemaImpl = new SchemaImpl(this); + + var builder = new SchemaBuilder(); + builder.AddColumn(_data.Schema[_col].Name, _type, null); + Schema = builder.GetSchema(); } public long? GetRowCount() @@ -1536,113 +1523,50 @@ public SlotDataView(IHostEnvironment env, ITransposeDataView data, int col) return valueCount; } - public IRowCursor GetRowCursor(Func predicate, Random rand = null) + public RowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); return Utils.MarshalInvoke(GetRowCursor, _type.ItemType.RawType, predicate(0)); } - private IRowCursor GetRowCursor(bool active) + private RowCursor GetRowCursor(bool active) { return new Cursor(this, active); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); consolidator = null; - return new IRowCursor[] { GetRowCursor(predicate, rand) }; + return new RowCursor[] { GetRowCursor(predicate, rand) }; } - private sealed class SchemaImpl : ISchema - { - private readonly SlotDataView _parent; - - private IHost Host { get { return _parent._host; } } - - public Schema AsSchema { get; } - - public int ColumnCount { get { return 1; } } - - public SchemaImpl(SlotDataView parent) - { - Contracts.AssertValue(parent); - _parent = parent; - AsSchema = Schema.Create(this); - } - - public ColumnType GetColumnType(int col) - { - Host.CheckParam(col == 0, nameof(col)); - return _parent._type; - } - - public string GetColumnName(int col) - { - Host.CheckParam(col == 0, nameof(col)); - // There is no real need for this to have the real name as the internal IDV - // substream does not have its name accessed, but we'll save it just the same. - // I am tempted though to just have this thing always claim its name is 'Pancakes'. - return _parent._data.Schema.GetColumnName(_parent._col); - } - - public bool TryGetColumnIndex(string name, out int col) - { - if (name == GetColumnName(0)) - { - col = 0; - return true; - } - col = -1; - return false; - } - - // No metadata. The top level IDV will hold the schema information, including metadata. - // This per-column dataview schema information is just minimally functional. - - public IEnumerable> GetMetadataTypes(int col) - { - Host.CheckParam(col == 0, nameof(col)); - return Enumerable.Empty>(); - } - - public ColumnType GetMetadataTypeOrNull(string kind, int col) - { - Host.CheckNonEmpty(kind, nameof(kind)); - Host.CheckParam(col == 0, nameof(col)); - return null; - } - - public void GetMetadata(string kind, int col, ref TValue value) - { - Host.CheckNonEmpty(kind, nameof(kind)); - Host.CheckParam(col == 0, nameof(col)); - throw MetadataUtils.ExceptGetMetadata(); - } - } - - private sealed class Cursor : SynchronizedCursorBase, IRowCursor + private sealed class Cursor : RootCursorBase { private readonly SlotDataView _parent; + private readonly SlotCursor _slotCursor; private readonly Delegate _getter; - public Schema Schema => _parent.Schema; + public override Schema Schema => _parent.Schema; + + public override long Batch => 0; public Cursor(SlotDataView parent, bool active) - : base(parent._host, parent._data.GetSlotCursor(parent._col)) + : base(parent._host) { _parent = parent; + _slotCursor = _parent._data.GetSlotCursor(parent._col); if (active) - _getter = Input.GetGetter(); + _getter = _slotCursor.GetGetter(); } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.CheckParam(col == 0, nameof(col)); return _getter != null; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.CheckParam(col == 0, nameof(col)); Ch.CheckParam(_getter != null, nameof(col), "requested column not active"); @@ -1652,98 +1576,61 @@ public ValueGetter GetGetter(int col) throw Ch.Except("Invalid TValue: '{0}'", typeof(TValue)); return getter; } - } - } - - // REVIEW: This shim class is very similar to the above shim class, except at the - // cursor level, not the cursorable level. Is there some non-horrifying way to unify both, somehow? - private sealed class SlotRowCursorShim : SynchronizedCursorBase, IRowCursor - { - private readonly SchemaImpl _schema; - - public Schema Schema => _schema.AsSchema; - - private sealed class SchemaImpl : ISchema - { - private readonly SlotRowCursorShim _parent; - private readonly VectorType _type; - private IChannel Ch { get { return _parent.Ch; } } + public override ValueGetter GetIdGetter() => GetId; - public Schema AsSchema { get; } - - public int ColumnCount { get { return 1; } } - - public SchemaImpl(SlotRowCursorShim parent, VectorType slotType) + private void GetId(ref UInt128 id) { - Contracts.AssertValue(parent); - _parent = parent; - Ch.AssertValue(slotType); - _type = slotType; - AsSchema = Schema.Create(this); + Ch.Check(_slotCursor.SlotIndex >= 0, "Cannot get ID with cursor in current state."); + id = new UInt128((ulong)_slotCursor.SlotIndex, 0); } - public ColumnType GetColumnType(int col) - { - Ch.CheckParam(col == 0, nameof(col)); - return _type; - } + protected override bool MoveNextCore() => _slotCursor.MoveNext(); + } + } - public string GetColumnName(int col) - { - Ch.CheckParam(col == 0, nameof(col)); - return "Waffles"; - } + // REVIEW: This shim class is very similar to the above shim class, except at the + // cursor level, not the cursorable level. Is there some non-horrifying way to unify both, somehow? + private sealed class SlotRowCursorShim : RootCursorBase + { + private readonly SlotCursor _slotCursor; - public bool TryGetColumnIndex(string name, out int col) - { - if (name == GetColumnName(0)) - { - col = 0; - return true; - } - col = -1; - return false; - } + public override Schema Schema { get; } - public IEnumerable> GetMetadataTypes(int col) - { - Ch.CheckParam(col == 0, nameof(col)); - return Enumerable.Empty>(); - } + public override long Batch => 0; - public ColumnType GetMetadataTypeOrNull(string kind, int col) - { - Ch.CheckNonEmpty(kind, nameof(kind)); - Ch.CheckParam(col == 0, nameof(col)); - return null; - } + public SlotRowCursorShim(IChannelProvider provider, SlotCursor cursor) + : base(provider) + { + Contracts.AssertValue(cursor); - public void GetMetadata(string kind, int col, ref TValue value) - { - Ch.CheckNonEmpty(kind, nameof(kind)); - Ch.CheckParam(col == 0, nameof(col)); - throw MetadataUtils.ExceptGetMetadata(); - } + _slotCursor = cursor; + var builder = new SchemaBuilder(); + builder.AddColumn("Waffles", cursor.GetSlotType(), null); + Schema = builder.GetSchema(); } - public SlotRowCursorShim(IChannelProvider provider, ISlotCursor cursor) - : base(provider, cursor) + public override bool IsColumnActive(int col) { - _schema = new SchemaImpl(this, Input.GetSlotType()); + Ch.CheckParam(col == 0, nameof(col)); + return true; } - public bool IsColumnActive(int col) + public override ValueGetter GetGetter(int col) { Ch.CheckParam(col == 0, nameof(col)); - return true; + return _slotCursor.GetGetterWithVectorType(Ch); } - public ValueGetter GetGetter(int col) + public override ValueGetter GetIdGetter() => GetId; + + private void GetId(ref UInt128 id) { - Ch.CheckParam(col == 0, nameof(col)); - return Input.GetGetterWithVectorType(Ch); + Ch.Check(_slotCursor.SlotIndex >= 0, "Cannot get ID with cursor in current state."); + id = new UInt128((ulong)_slotCursor.SlotIndex, 0); } + + protected override bool MoveNextCore() => _slotCursor.MoveNext(); } /// diff --git a/src/Microsoft.ML.Data/DataView/ZipDataView.cs b/src/Microsoft.ML.Data/DataView/ZipDataView.cs index 6bd6356b2a..7b344a02be 100644 --- a/src/Microsoft.ML.Data/DataView/ZipDataView.cs +++ b/src/Microsoft.ML.Data/DataView/ZipDataView.cs @@ -71,7 +71,7 @@ private ZipDataView(IHost host, IDataView[] sources) return min; } - public IRowCursor GetRowCursor(Func predicate, Random rand = null) + public RowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); @@ -89,31 +89,31 @@ public IRowCursor GetRowCursor(Func predicate, Random rand = null) } /// - /// Create an with no requested columns on a data view. + /// Create an with no requested columns on a data view. /// Potentially, this can be optimized by calling GetRowCount(lazy:true) first, and if the count is not known, /// wrapping around GetCursor(). /// - private IRowCursor GetMinimumCursor(IDataView dv) + private RowCursor GetMinimumCursor(IDataView dv) { _host.AssertValue(dv); return dv.GetRowCursor(x => false); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { consolidator = null; - return new IRowCursor[] { GetRowCursor(predicate, rand) }; + return new RowCursor[] { GetRowCursor(predicate, rand) }; } - private sealed class Cursor : RootCursorBase, IRowCursor + private sealed class Cursor : RootCursorBase { - private readonly IRowCursor[] _cursors; + private readonly RowCursor[] _cursors; private readonly CompositeSchema _compositeSchema; private readonly bool[] _isColumnActive; public override long Batch { get { return 0; } } - public Cursor(ZipDataView parent, IRowCursor[] srcCursors, Func predicate) + public Cursor(ZipDataView parent, RowCursor[] srcCursors, Func predicate) : base(parent._host) { Ch.AssertNonEmpty(srcCursors); @@ -167,15 +167,15 @@ protected override bool MoveManyCore(long count) return true; } - public Schema Schema => _compositeSchema.AsSchema; + public override Schema Schema => _compositeSchema.AsSchema; - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { _compositeSchema.CheckColumnInRange(col); return _isColumnActive[col]; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { int dv; int srcCol; diff --git a/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs b/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs index 14a23ab580..7a5b98da5b 100644 --- a/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs +++ b/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs @@ -254,7 +254,7 @@ public override void Save(ModelSaveContext ctx) return null; } - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -262,10 +262,10 @@ protected override IRowCursor GetRowCursorCore(Func predicate, Random var inputPred = _bindings.GetDependencies(predicate); var active = _bindings.GetActive(predicate); var input = Source.GetRowCursor(inputPred, rand); - return new RowCursor(Host, _bindings, input, active); + return new Cursor(Host, _bindings, input, active); } - public sealed override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, + public sealed override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); @@ -277,18 +277,18 @@ public sealed override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator c Host.AssertNonEmpty(inputs); // No need to split if this is given 1 input cursor. - var cursors = new IRowCursor[inputs.Length]; + var cursors = new RowCursor[inputs.Length]; for (int i = 0; i < inputs.Length; i++) - cursors[i] = new RowCursor(Host, _bindings, inputs[i], active); + cursors[i] = new Cursor(Host, _bindings, inputs[i], active); return cursors; } - private sealed class RowCursor : SynchronizedCursorBase, IRowCursor + private sealed class Cursor : SynchronizedCursorBase { private readonly Bindings _bindings; private readonly bool[] _active; - public RowCursor(IChannelProvider provider, Bindings bindings, IRowCursor input, bool[] active) + public Cursor(IChannelProvider provider, Bindings bindings, RowCursor input, bool[] active) : base(provider, input) { Ch.AssertValue(bindings); @@ -298,15 +298,15 @@ public RowCursor(IChannelProvider provider, Bindings bindings, IRowCursor input, _active = active; } - public Schema Schema => _bindings.AsSchema; + public override Schema Schema => _bindings.AsSchema; - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.Check(0 <= col && col < _bindings.ColumnCount); return _active == null || _active[col]; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.Check(IsColumnActive(col)); diff --git a/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs b/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs index 153f442d32..b04101f5b7 100644 --- a/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs +++ b/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs @@ -128,9 +128,9 @@ public interface ICanGetSummaryInKeyValuePairs public interface ICanGetSummaryAsIRow { - IRow GetSummaryIRowOrNull(RoleMappedSchema schema); + Row GetSummaryIRowOrNull(RoleMappedSchema schema); - IRow GetStatsIRowOrNull(RoleMappedSchema schema); + Row GetStatsIRowOrNull(RoleMappedSchema schema); } public interface ICanGetSummaryAsIDataView diff --git a/src/Microsoft.ML.Data/EntryPoints/TransformModel.cs b/src/Microsoft.ML.Data/EntryPoints/TransformModel.cs index 8eae667c53..994414d8f4 100644 --- a/src/Microsoft.ML.Data/EntryPoints/TransformModel.cs +++ b/src/Microsoft.ML.Data/EntryPoints/TransformModel.cs @@ -236,7 +236,7 @@ public Func GetDependencies(Func predicate) public Schema InputSchema => _rootSchema; - public IRow GetRow(IRow input, Func active, out Action disposer) + public Row GetRow(Row input, Func active, out Action disposer) { _ectx.Assert(IsCompositeRowToRowMapper(_chain)); _ectx.AssertValue(input); diff --git a/src/Microsoft.ML.Data/Evaluators/AnomalyDetectionEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/AnomalyDetectionEvaluator.cs index 7b8bd9651e..361f580c71 100644 --- a/src/Microsoft.ML.Data/Evaluators/AnomalyDetectionEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/AnomalyDetectionEvaluator.cs @@ -497,7 +497,7 @@ private void FinishOtherMetrics() } } - public override void InitializeNextPass(IRow row, RoleMappedSchema schema) + public override void InitializeNextPass(Row row, RoleMappedSchema schema) { Host.Assert(!_streaming && PassNum < 2 || PassNum < 1); Host.AssertValue(schema.Label); diff --git a/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs index 528f24efce..4dfe37f9f6 100644 --- a/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs @@ -609,7 +609,7 @@ public Aggregator(IHostEnvironment env, ReadOnlyMemory[] classNames, bool } } - public override void InitializeNextPass(IRow row, RoleMappedSchema schema) + public override void InitializeNextPass(Row row, RoleMappedSchema schema) { Host.AssertValue(schema.Label); Host.Assert(PassNum < 1); @@ -981,7 +981,7 @@ public override Func GetDependencies(Func activeOutput) return col => activeOutput(AssignedCol) && col == ScoreIndex; } - public override Delegate[] CreateGetters(IRow input, Func activeCols, out Action disposer) + public override Delegate[] CreateGetters(Row input, Func activeCols, out Action disposer) { Host.Assert(LabelIndex >= 0); Host.Assert(ScoreIndex >= 0); diff --git a/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs index 7d015da046..797854dcbf 100644 --- a/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs @@ -484,7 +484,7 @@ private void ProcessRowSecondPass() WeightedCounters.UpdateSecondPass(in _features, _indicesArr); } - public override void InitializeNextPass(IRow row, RoleMappedSchema schema) + public override void InitializeNextPass(Row row, RoleMappedSchema schema) { AssertValid(assertGetters: false); @@ -646,7 +646,7 @@ public override Func GetDependencies(Func activeOutput) (activeOutput(ClusterIdCol) || activeOutput(SortedClusterCol) || activeOutput(SortedClusterScoreCol)); } - public override Delegate[] CreateGetters(IRow input, Func activeOutput, out Action disposer) + public override Delegate[] CreateGetters(Row input, Func activeOutput, out Action disposer) { disposer = null; diff --git a/src/Microsoft.ML.Data/Evaluators/EvaluatorBase.cs b/src/Microsoft.ML.Data/Evaluators/EvaluatorBase.cs index 14c8d58649..829dd0043a 100644 --- a/src/Microsoft.ML.Data/Evaluators/EvaluatorBase.cs +++ b/src/Microsoft.ML.Data/Evaluators/EvaluatorBase.cs @@ -253,7 +253,7 @@ public bool Start() /// /// This method should get the getters of the new IRow that are needed for the next pass. /// - public abstract void InitializeNextPass(IRow row, RoleMappedSchema schema); + public abstract void InitializeNextPass(Row row, RoleMappedSchema schema); /// /// Call the getters once, and process the input as necessary. @@ -324,7 +324,7 @@ protected virtual List GetWarningsCore() // When a new value is encountered, it uses a callback for creating a new aggregator. protected abstract class AggregatorDictionaryBase { - protected IRow Row; + protected Row Row; protected readonly Func CreateAgg; protected readonly RoleMappedSchema Schema; @@ -346,7 +346,7 @@ protected AggregatorDictionaryBase(RoleMappedSchema schema, string stratCol, Fun /// /// Gets the stratification column getter for the new IRow. /// - public abstract void Reset(IRow row); + public abstract void Reset(Row row); public static AggregatorDictionaryBase Create(RoleMappedSchema schema, string stratCol, ColumnType stratType, Func createAgg) @@ -397,7 +397,7 @@ public GenericAggregatorDictionary(RoleMappedSchema schema, string stratCol, Col _dict = new Dictionary(); } - public override void Reset(IRow row) + public override void Reset(Row row) { Row = row; int col; @@ -505,6 +505,6 @@ public virtual void Save(ModelSaveContext ctx) public abstract Schema.DetachedColumn[] GetOutputColumns(); - public abstract Delegate[] CreateGetters(IRow input, Func activeCols, out Action disposer); + public abstract Delegate[] CreateGetters(Row input, Func activeCols, out Action disposer); } } diff --git a/src/Microsoft.ML.Data/Evaluators/EvaluatorUtils.cs b/src/Microsoft.ML.Data/Evaluators/EvaluatorUtils.cs index 8889ccd6cf..205c24a8cf 100644 --- a/src/Microsoft.ML.Data/Evaluators/EvaluatorUtils.cs +++ b/src/Microsoft.ML.Data/Evaluators/EvaluatorUtils.cs @@ -978,7 +978,7 @@ private static IDataView AddVarLengthColumn(IHostEnvironment env, IDataVie (in VBuffer src, ref VBuffer dst) => src.CopyTo(ref dst)); } - private static List GetMetricNames(IChannel ch, Schema schema, IRow row, Func ignoreCol, + private static List GetMetricNames(IChannel ch, Schema schema, Row row, Func ignoreCol, ValueGetter[] getters, ValueGetter>[] vBufferGetters) { ch.AssertValue(schema); diff --git a/src/Microsoft.ML.Data/Evaluators/Metrics/BinaryClassificationMetrics.cs b/src/Microsoft.ML.Data/Evaluators/Metrics/BinaryClassificationMetrics.cs index 102e43a894..59a0c2c700 100644 --- a/src/Microsoft.ML.Data/Evaluators/Metrics/BinaryClassificationMetrics.cs +++ b/src/Microsoft.ML.Data/Evaluators/Metrics/BinaryClassificationMetrics.cs @@ -74,7 +74,7 @@ public class BinaryClassificationMetrics /// public double Auprc { get; } - protected private static T Fetch(IExceptionContext ectx, IRow row, string name) + protected private static T Fetch(IExceptionContext ectx, Row row, string name) { if (!row.Schema.TryGetColumnIndex(name, out int col)) throw ectx.Except($"Could not find column '{name}'"); @@ -83,7 +83,7 @@ protected private static T Fetch(IExceptionContext ectx, IRow row, string nam return val; } - internal BinaryClassificationMetrics(IExceptionContext ectx, IRow overallResult) + internal BinaryClassificationMetrics(IExceptionContext ectx, Row overallResult) { double Fetch(string name) => Fetch(ectx, overallResult, name); Auc = Fetch(BinaryClassifierEvaluator.Auc); diff --git a/src/Microsoft.ML.Data/Evaluators/Metrics/CalibratedBinaryClassificationMetrics.cs b/src/Microsoft.ML.Data/Evaluators/Metrics/CalibratedBinaryClassificationMetrics.cs index c4f0861224..7a8787eeff 100644 --- a/src/Microsoft.ML.Data/Evaluators/Metrics/CalibratedBinaryClassificationMetrics.cs +++ b/src/Microsoft.ML.Data/Evaluators/Metrics/CalibratedBinaryClassificationMetrics.cs @@ -42,7 +42,7 @@ public sealed class CalibratedBinaryClassificationMetrics : BinaryClassification /// public double Entropy { get; } - internal CalibratedBinaryClassificationMetrics(IExceptionContext ectx, IRow overallResult) + internal CalibratedBinaryClassificationMetrics(IExceptionContext ectx, Row overallResult) : base(ectx, overallResult) { double Fetch(string name) => Fetch(ectx, overallResult, name); diff --git a/src/Microsoft.ML.Data/Evaluators/Metrics/ClusteringMetrics.cs b/src/Microsoft.ML.Data/Evaluators/Metrics/ClusteringMetrics.cs index fc7e87e150..62b3352b63 100644 --- a/src/Microsoft.ML.Data/Evaluators/Metrics/ClusteringMetrics.cs +++ b/src/Microsoft.ML.Data/Evaluators/Metrics/ClusteringMetrics.cs @@ -35,7 +35,7 @@ public sealed class ClusteringMetrics /// public double Dbi { get; } - internal ClusteringMetrics(IExceptionContext ectx, IRow overallResult, bool calculateDbi) + internal ClusteringMetrics(IExceptionContext ectx, Row overallResult, bool calculateDbi) { double Fetch(string name) => RowCursorUtils.Fetch(ectx, overallResult, name); diff --git a/src/Microsoft.ML.Data/Evaluators/Metrics/MultiClassClassifierMetrics.cs b/src/Microsoft.ML.Data/Evaluators/Metrics/MultiClassClassifierMetrics.cs index 4eff184abc..7acdce7025 100644 --- a/src/Microsoft.ML.Data/Evaluators/Metrics/MultiClassClassifierMetrics.cs +++ b/src/Microsoft.ML.Data/Evaluators/Metrics/MultiClassClassifierMetrics.cs @@ -81,7 +81,7 @@ public sealed class MultiClassClassifierMetrics /// public double[] PerClassLogLoss { get; } - internal MultiClassClassifierMetrics(IExceptionContext ectx, IRow overallResult, int topK) + internal MultiClassClassifierMetrics(IExceptionContext ectx, Row overallResult, int topK) { double FetchDouble(string name) => RowCursorUtils.Fetch(ectx, overallResult, name); AccuracyMicro = FetchDouble(MultiClassClassifierEvaluator.AccuracyMicro); diff --git a/src/Microsoft.ML.Data/Evaluators/Metrics/RankerMetrics.cs b/src/Microsoft.ML.Data/Evaluators/Metrics/RankerMetrics.cs index 975d4a494c..a3749f0ecc 100644 --- a/src/Microsoft.ML.Data/Evaluators/Metrics/RankerMetrics.cs +++ b/src/Microsoft.ML.Data/Evaluators/Metrics/RankerMetrics.cs @@ -24,7 +24,7 @@ public sealed class RankerMetrics /// public double[] Dcg { get; } - private static T Fetch(IExceptionContext ectx, IRow row, string name) + private static T Fetch(IExceptionContext ectx, Row row, string name) { if (!row.Schema.TryGetColumnIndex(name, out int col)) throw ectx.Except($"Could not find column '{name}'"); @@ -33,7 +33,7 @@ private static T Fetch(IExceptionContext ectx, IRow row, string name) return val; } - internal RankerMetrics(IExceptionContext ectx, IRow overallResult) + internal RankerMetrics(IExceptionContext ectx, Row overallResult) { VBuffer Fetch(string name) => Fetch>(ectx, overallResult, name); diff --git a/src/Microsoft.ML.Data/Evaluators/Metrics/RegressionMetrics.cs b/src/Microsoft.ML.Data/Evaluators/Metrics/RegressionMetrics.cs index 8a9fd96b31..8bda753e21 100644 --- a/src/Microsoft.ML.Data/Evaluators/Metrics/RegressionMetrics.cs +++ b/src/Microsoft.ML.Data/Evaluators/Metrics/RegressionMetrics.cs @@ -53,7 +53,7 @@ public sealed class RegressionMetrics /// public double RSquared { get; } - internal RegressionMetrics(IExceptionContext ectx, IRow overallResult) + internal RegressionMetrics(IExceptionContext ectx, Row overallResult) { double Fetch(string name) => RowCursorUtils.Fetch(ectx, overallResult, name); L1 = Fetch(RegressionEvaluator.L1); diff --git a/src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs index 273599f7f5..cb12bf92e4 100644 --- a/src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs @@ -387,7 +387,7 @@ public Aggregator(IHostEnvironment env, ReadOnlyMemory[] classNames, int s ClassNames = classNames; } - public override void InitializeNextPass(IRow row, RoleMappedSchema schema) + public override void InitializeNextPass(Row row, RoleMappedSchema schema) { Host.Assert(PassNum < 1); Host.AssertValue(schema.Label); @@ -662,7 +662,7 @@ public override Func GetDependencies(Func activeOutput) activeOutput(SortedClassesCol) || activeOutput(LogLossCol)); } - public override Delegate[] CreateGetters(IRow input, Func activeOutput, out Action disposer) + public override Delegate[] CreateGetters(Row input, Func activeOutput, out Action disposer) { disposer = null; diff --git a/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs index 1b1f8f6f05..fb88a5bb60 100644 --- a/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs @@ -299,7 +299,7 @@ public Aggregator(IHostEnvironment env, IRegressionLoss lossFunction, int size, WeightedCounters = Weighted ? new Counters(lossFunction, _size) : null; } - public override void InitializeNextPass(IRow row, RoleMappedSchema schema) + public override void InitializeNextPass(Row row, RoleMappedSchema schema) { Contracts.Assert(PassNum < 1); Contracts.AssertValue(schema.Label); @@ -457,7 +457,7 @@ public override Schema.DetachedColumn[] GetOutputColumns() return infos; } - public override Delegate[] CreateGetters(IRow input, Func activeCols, out Action disposer) + public override Delegate[] CreateGetters(Row input, Func activeCols, out Action disposer) { Host.Assert(LabelIndex >= 0); Host.Assert(ScoreIndex >= 0); diff --git a/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs index ab05c1ee0c..36eabc8a71 100644 --- a/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs @@ -380,7 +380,7 @@ private ValueGetter>> CreateSlotNamesGetter(string }; } - public override Delegate[] CreateGetters(IRow input, Func activeCols, out Action disposer) + public override Delegate[] CreateGetters(Row input, Func activeCols, out Action disposer) { Host.Assert(LabelIndex >= 0); Host.Assert(ScoreIndex >= 0); diff --git a/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs index fe750fce59..a2fea8415d 100644 --- a/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs @@ -440,7 +440,7 @@ public Aggregator(IHostEnvironment env, Double[] labelGains, int truncationLevel GroupId = new List>(); } - public override void InitializeNextPass(IRow row, RoleMappedSchema schema) + public override void InitializeNextPass(Row row, RoleMappedSchema schema) { Contracts.Assert(PassNum < 1); Contracts.AssertValue(schema.Label); @@ -610,12 +610,12 @@ public void Save(ModelSaveContext ctx) return _transform.GetRowCount(); } - public IRowCursor GetRowCursor(Func needCol, Random rand = null) + public RowCursor GetRowCursor(Func needCol, Random rand = null) { return _transform.GetRowCursor(needCol, rand); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, Random rand = null) + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, Random rand = null) { return _transform.GetRowCursorSet(out consolidator, needCol, n, rand); } @@ -775,7 +775,7 @@ private void Copy(Double[] src, ref VBuffer dst) dst = editor.Commit(); } - protected override ValueGetter GetLabelGetter(IRow row) + protected override ValueGetter GetLabelGetter(Row row) { var lb = RowCursorUtils.GetLabelGetter(row, _bindings.LabelIndex); return @@ -787,12 +787,12 @@ protected override ValueGetter GetLabelGetter(IRow row) }; } - protected override ValueGetter GetScoreGetter(IRow row) + protected override ValueGetter GetScoreGetter(Row row) { return row.GetGetter(_bindings.ScoreIndex); } - protected override RowCursorState InitializeState(IRow input) + protected override RowCursorState InitializeState(Row input) { return new RowCursorState(_truncationLevel); } diff --git a/src/Microsoft.ML.Data/Evaluators/RegressionEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/RegressionEvaluator.cs index 729e0bffe0..b4fdf358a0 100644 --- a/src/Microsoft.ML.Data/Evaluators/RegressionEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/RegressionEvaluator.cs @@ -259,7 +259,7 @@ public override Schema.DetachedColumn[] GetOutputColumns() return infos; } - public override Delegate[] CreateGetters(IRow input, Func activeCols, out Action disposer) + public override Delegate[] CreateGetters(Row input, Func activeCols, out Action disposer) { Host.Assert(LabelIndex >= 0); Host.Assert(ScoreIndex >= 0); diff --git a/src/Microsoft.ML.Data/Evaluators/RegressionEvaluatorBase.cs b/src/Microsoft.ML.Data/Evaluators/RegressionEvaluatorBase.cs index dcd2ce618e..27856f0bcb 100644 --- a/src/Microsoft.ML.Data/Evaluators/RegressionEvaluatorBase.cs +++ b/src/Microsoft.ML.Data/Evaluators/RegressionEvaluatorBase.cs @@ -191,7 +191,7 @@ protected RegressionAggregatorBase(IHostEnvironment env, IRegressionLoss lossFun Weighted = weighted; } - public override void InitializeNextPass(IRow row, RoleMappedSchema schema) + public override void InitializeNextPass(Row row, RoleMappedSchema schema) { Contracts.Assert(PassNum < 1); Contracts.AssertValue(schema.Label); diff --git a/src/Microsoft.ML.Data/Prediction/Calibrator.cs b/src/Microsoft.ML.Data/Prediction/Calibrator.cs index a44cf64753..90670d518f 100644 --- a/src/Microsoft.ML.Data/Prediction/Calibrator.cs +++ b/src/Microsoft.ML.Data/Prediction/Calibrator.cs @@ -559,7 +559,7 @@ public Func GetDependencies(Func predicate) return _predictor.GetInputColumnRoles(); } - public IRow GetRow(IRow input, Func predicate, out Action disposer) + public Row GetRow(Row input, Func predicate, out Action disposer) { Func predictorPredicate = col => false; for (int i = 0; i < OutputSchema.ColumnCount; i++) @@ -584,12 +584,12 @@ public IRow GetRow(IRow input, Func predicate, out Action disposer) return new SimpleRow(OutputSchema, predictorRow, getters); } - private Delegate GetPredictorGetter(IRow input, int col) + private Delegate GetPredictorGetter(Row input, int col) { return input.GetGetter(col); } - private Delegate GetProbGetter(IRow input) + private Delegate GetProbGetter(Row input) { var scoreGetter = RowCursorUtils.GetGetterAs(NumberType.R4, input, _scoreCol); ValueGetter probGetter = diff --git a/src/Microsoft.ML.Data/Scorers/BinaryClassifierScorer.cs b/src/Microsoft.ML.Data/Scorers/BinaryClassifierScorer.cs index 68fdb15ab7..84acb75898 100644 --- a/src/Microsoft.ML.Data/Scorers/BinaryClassifierScorer.cs +++ b/src/Microsoft.ML.Data/Scorers/BinaryClassifierScorer.cs @@ -220,7 +220,7 @@ public override IDataTransform ApplyToData(IHostEnvironment env, IDataView newSo return new BinaryClassifierScorer(env, this, newSource); } - protected override Delegate GetPredictedLabelGetter(IRow output, out Delegate scoreGetter) + protected override Delegate GetPredictedLabelGetter(Row output, out Delegate scoreGetter) { Host.AssertValue(output); Host.Assert(output.Schema == Bindings.RowMapper.OutputSchema); diff --git a/src/Microsoft.ML.Data/Scorers/ClusteringScorer.cs b/src/Microsoft.ML.Data/Scorers/ClusteringScorer.cs index 1035735bc3..5b5796ca59 100644 --- a/src/Microsoft.ML.Data/Scorers/ClusteringScorer.cs +++ b/src/Microsoft.ML.Data/Scorers/ClusteringScorer.cs @@ -91,7 +91,7 @@ public override IDataTransform ApplyToData(IHostEnvironment env, IDataView newSo return new ClusteringScorer(env, this, newSource); } - protected override Delegate GetPredictedLabelGetter(IRow output, out Delegate scoreGetter) + protected override Delegate GetPredictedLabelGetter(Row output, out Delegate scoreGetter) { Contracts.AssertValue(output); Contracts.Assert(output.Schema == Bindings.RowMapper.OutputSchema); diff --git a/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs b/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs index 5062068a40..3a738887ec 100644 --- a/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs +++ b/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs @@ -214,24 +214,24 @@ public void Save(ModelSaveContext ctx) ctx.Writer.WriteBoolByte(Stringify); } - public Delegate GetTextContributionGetter(IRow input, int colSrc, VBuffer> slotNames) + public Delegate GetTextContributionGetter(Row input, int colSrc, VBuffer> slotNames) { Contracts.CheckValue(input, nameof(input)); Contracts.Check(0 <= colSrc && colSrc < input.Schema.ColumnCount); var typeSrc = input.Schema.GetColumnType(colSrc); - Func>, ValueGetter>> del = GetTextValueGetter; + Func>, ValueGetter>> del = GetTextValueGetter; var meth = del.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(typeSrc.RawType); return (Delegate)meth.Invoke(this, new object[] { input, colSrc, slotNames }); } - public Delegate GetContributionGetter(IRow input, int colSrc) + public Delegate GetContributionGetter(Row input, int colSrc) { Contracts.CheckValue(input, nameof(input)); Contracts.Check(0 <= colSrc && colSrc < input.Schema.ColumnCount); var typeSrc = input.Schema.GetColumnType(colSrc); - Func>> del = GetValueGetter; + Func>> del = GetValueGetter; // REVIEW: Assuming Feature contributions will be VBuffer. // For multiclass LR it needs to be(VBuffer[]. @@ -249,7 +249,7 @@ private ReadOnlyMemory GetSlotName(int index, VBuffer : slotName; } - private ValueGetter> GetTextValueGetter(IRow input, int colSrc, VBuffer> slotNames) + private ValueGetter> GetTextValueGetter(Row input, int colSrc, VBuffer> slotNames) { Contracts.AssertValue(input); Contracts.AssertValue(Predictor); @@ -292,7 +292,7 @@ private ValueGetter> GetTextValueGetter(IRow input, i }; } - private ValueGetter> GetValueGetter(IRow input, int colSrc) + private ValueGetter> GetValueGetter(Row input, int colSrc) { Contracts.AssertValue(input); Contracts.AssertValue(Predictor); @@ -385,7 +385,7 @@ public Func GetDependencies(Func predicate) return col => false; } - public IRow GetOutputRow(IRow input, Func predicate, out Action disposer) + public Row GetOutputRow(Row input, Func predicate, out Action disposer) { Contracts.AssertValue(input); Contracts.AssertValue(predicate); @@ -419,7 +419,7 @@ public Func GetGenericPredicate(Func predicate) yield return RoleMappedSchema.ColumnRole.Feature.Bind(InputRoleMappedSchema.Feature.Name); } - public IRow GetRow(IRow input, Func active, out Action disposer) + public Row GetRow(Row input, Func active, out Action disposer) { return GetOutputRow(input, active, out disposer); } diff --git a/src/Microsoft.ML.Data/Scorers/GenericScorer.cs b/src/Microsoft.ML.Data/Scorers/GenericScorer.cs index 99097dc12b..f301792603 100644 --- a/src/Microsoft.ML.Data/Scorers/GenericScorer.cs +++ b/src/Microsoft.ML.Data/Scorers/GenericScorer.cs @@ -261,7 +261,7 @@ public override IDataTransform ApplyToData(IHostEnvironment env, IDataView newSo return new GenericScorer(env, this, newSource); } - protected override Delegate[] GetGetters(IRow output, Func predicate) + protected override Delegate[] GetGetters(Row output, Func predicate) { Host.Assert(_bindings.DerivedColumnCount == 0); Host.AssertValue(output); diff --git a/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs b/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs index 3ee4377443..0324d950ce 100644 --- a/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs +++ b/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs @@ -307,7 +307,7 @@ public Func GetDependencies(Func predicate) return _mapper.GetInputColumnRoles(); } - public IRow GetRow(IRow input, Func predicate, out Action disposer) + public Row GetRow(Row input, Func predicate, out Action disposer) { var innerRow = _mapper.GetRow(input, predicate, out disposer); return new RowImpl(innerRow, OutputSchema); @@ -386,17 +386,17 @@ public void GetMetadata(string kind, int col, ref TValue value) } } - private sealed class RowImpl : IRow + private sealed class RowImpl : Row { - private readonly IRow _row; + private readonly Row _row; private readonly Schema _schema; - public long Batch { get { return _row.Batch; } } - public long Position { get { return _row.Position; } } + public override long Batch => _row.Batch; + public override long Position => _row.Position; // The schema is of course the only difference from _row. - public Schema Schema => _schema; + public override Schema Schema => _schema; - public RowImpl(IRow row, Schema schema) + public RowImpl(Row row, Schema schema) { Contracts.AssertValue(row); Contracts.AssertValue(schema); @@ -405,17 +405,17 @@ public RowImpl(IRow row, Schema schema) _schema = schema; } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { return _row.IsColumnActive(col); } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { return _row.GetGetter(col); } - public ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return _row.GetIdGetter(); } @@ -551,7 +551,7 @@ public override IDataTransform ApplyToData(IHostEnvironment env, IDataView newSo return new MultiClassClassifierScorer(env, this, newSource); } - protected override Delegate GetPredictedLabelGetter(IRow output, out Delegate scoreGetter) + protected override Delegate GetPredictedLabelGetter(Row output, out Delegate scoreGetter) { Host.AssertValue(output); Host.Assert(output.Schema == Bindings.RowMapper.OutputSchema); diff --git a/src/Microsoft.ML.Data/Scorers/PredictedLabelScorerBase.cs b/src/Microsoft.ML.Data/Scorers/PredictedLabelScorerBase.cs index 8aff6ff69a..488f0101fe 100644 --- a/src/Microsoft.ML.Data/Scorers/PredictedLabelScorerBase.cs +++ b/src/Microsoft.ML.Data/Scorers/PredictedLabelScorerBase.cs @@ -400,7 +400,7 @@ protected override bool WantParallelCursors(Func predicate) return Bindings.AnyNewColumnsActive(predicate); } - protected override Delegate[] GetGetters(IRow output, Func predicate) + protected override Delegate[] GetGetters(Row output, Func predicate) { Host.Assert(Bindings.DerivedColumnCount == 1); Host.AssertValue(output); @@ -432,10 +432,10 @@ protected override Delegate[] GetGetters(IRow output, Func predicate) return getters; } - protected abstract Delegate GetPredictedLabelGetter(IRow output, out Delegate scoreGetter); + protected abstract Delegate GetPredictedLabelGetter(Row output, out Delegate scoreGetter); protected void EnsureCachedPosition(ref long cachedPosition, ref TScore score, - IRow boundRow, ValueGetter scoreGetter) + Row boundRow, ValueGetter scoreGetter) { if (cachedPosition != boundRow.Position) { diff --git a/src/Microsoft.ML.Data/Scorers/RowToRowScorerBase.cs b/src/Microsoft.ML.Data/Scorers/RowToRowScorerBase.cs index 8cb39d2f82..34913c5d1b 100644 --- a/src/Microsoft.ML.Data/Scorers/RowToRowScorerBase.cs +++ b/src/Microsoft.ML.Data/Scorers/RowToRowScorerBase.cs @@ -114,7 +114,7 @@ private static bool[] GetActive(BindingsBase bindings, Func predicate /// protected abstract bool WantParallelCursors(Func predicate); - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { Contracts.AssertValue(predicate); Contracts.AssertValueOrNull(rand); @@ -124,10 +124,10 @@ protected override IRowCursor GetRowCursorCore(Func predicate, Random Func predicateMapper; var active = GetActive(bindings, predicate, out predicateInput, out predicateMapper); var input = Source.GetRowCursor(predicateInput, rand); - return new RowCursor(Host, this, input, active, predicateMapper); + return new Cursor(Host, this, input, active, predicateMapper); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) + public override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); @@ -143,13 +143,13 @@ public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolid inputs = DataViewUtils.CreateSplitCursors(out consolidator, Host, inputs[0], n); Contracts.AssertNonEmpty(inputs); - var cursors = new IRowCursor[inputs.Length]; + var cursors = new RowCursor[inputs.Length]; for (int i = 0; i < inputs.Length; i++) - cursors[i] = new RowCursor(Host, this, inputs[i], active, predicateMapper); + cursors[i] = new Cursor(Host, this, inputs[i], active, predicateMapper); return cursors; } - protected override Delegate[] CreateGetters(IRow input, Func active, out Action disp) + protected override Delegate[] CreateGetters(Row input, Func active, out Action disp) { var bindings = GetBindings(); Func predicateInput; @@ -173,9 +173,9 @@ protected override Func GetDependenciesCore(Func predicate /// Create and fill an array of getters of size InfoCount. The indices of the non-null entries in the /// result should be exactly those for which predicate(iinfo) is true. /// - protected abstract Delegate[] GetGetters(IRow output, Func predicate); + protected abstract Delegate[] GetGetters(Row output, Func predicate); - protected static Delegate[] GetGettersFromRow(IRow row, Func predicate) + protected static Delegate[] GetGettersFromRow(Row row, Func predicate) { Contracts.AssertValue(row); Contracts.AssertValue(predicate); @@ -189,19 +189,19 @@ protected static Delegate[] GetGettersFromRow(IRow row, Func predicat return getters; } - protected static Delegate GetGetterFromRow(IRow row, int col) + protected static Delegate GetGetterFromRow(Row row, int col) { Contracts.AssertValue(row); Contracts.Assert(0 <= col && col < row.Schema.ColumnCount); Contracts.Assert(row.IsColumnActive(col)); var type = row.Schema.GetColumnType(col); - Func> del = GetGetterFromRow; + Func> del = GetGetterFromRow; var meth = del.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(type.RawType); return (Delegate)meth.Invoke(null, new object[] { row, col }); } - protected static ValueGetter GetGetterFromRow(IRow output, int col) + protected static ValueGetter GetGetterFromRow(Row output, int col) { Contracts.AssertValue(output); Contracts.Assert(0 <= col && col < output.Schema.ColumnCount); @@ -215,16 +215,16 @@ protected override int MapColumnIndex(out bool isSrc, int col) return bindings.MapColumnIndex(out isSrc, col); } - private sealed class RowCursor : SynchronizedCursorBase, IRowCursor + private sealed class Cursor : SynchronizedCursorBase { private readonly BindingsBase _bindings; private readonly bool[] _active; private readonly Delegate[] _getters; private readonly Action _disposer; - public Schema Schema { get; } + public override Schema Schema { get; } - public RowCursor(IChannelProvider provider, RowToRowScorerBase parent, IRowCursor input, bool[] active, Func predicateMapper) + public Cursor(IChannelProvider provider, RowToRowScorerBase parent, RowCursor input, bool[] active, Func predicateMapper) : base(provider, input) { Ch.AssertValue(parent); @@ -255,13 +255,13 @@ public override void Dispose() base.Dispose(); } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.Check(0 <= col && col < _bindings.ColumnCount); return _active[col]; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.Check(IsColumnActive(col)); diff --git a/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs b/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs index 4d44d07543..c5f6301154 100644 --- a/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs +++ b/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs @@ -144,18 +144,18 @@ public ISchemaBoundMapper Bind(IHostEnvironment env, RoleMappedSchema schema) protected abstract ISchemaBoundMapper BindCore(IChannel ch, RoleMappedSchema schema); - protected virtual Delegate GetPredictionGetter(IRow input, int colSrc) + protected virtual Delegate GetPredictionGetter(Row input, int colSrc) { Contracts.AssertValue(input); Contracts.Assert(0 <= colSrc && colSrc < input.Schema.ColumnCount); var typeSrc = input.Schema.GetColumnType(colSrc); - Func> del = GetValueGetter; + Func> del = GetValueGetter; var meth = del.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(typeSrc.RawType, ScoreType.RawType); return (Delegate)meth.Invoke(this, new object[] { input, colSrc }); } - private ValueGetter GetValueGetter(IRow input, int colSrc) + private ValueGetter GetValueGetter(Row input, int colSrc) { Contracts.AssertValue(input); Contracts.Assert(ValueMapper != null); @@ -223,7 +223,7 @@ public Func GetDependencies(Func predicate) public Schema InputSchema => InputRoleMappedSchema.Schema; - public IRow GetRow(IRow input, Func predicate, out Action disposer) + public Row GetRow(Row input, Func predicate, out Action disposer) { Contracts.AssertValue(input); Contracts.AssertValue(predicate); @@ -510,7 +510,7 @@ public Func GetDependencies(Func predicate) yield return RoleMappedSchema.ColumnRole.Feature.Bind(InputRoleMappedSchema.Feature?.Name); } - private Delegate[] CreateGetters(IRow input, bool[] active) + private Delegate[] CreateGetters(Row input, bool[] active) { Contracts.Assert(Utils.Size(active) == 2); Contracts.Assert(_parent._distMapper != null); @@ -553,7 +553,7 @@ private Delegate[] CreateGetters(IRow input, bool[] active) private static void EnsureCachedResultValueMapper(ValueMapper, Float, Float> mapper, ref long cachedPosition, ValueGetter> featureGetter, ref VBuffer features, - ref Float score, ref Float prob, IRow input) + ref Float score, ref Float prob, Row input) { Contracts.AssertValue(mapper); if (cachedPosition != input.Position) @@ -566,7 +566,7 @@ private static void EnsureCachedResultValueMapper(ValueMapper, Fl } } - public IRow GetRow(IRow input, Func predicate, out Action disposer) + public Row GetRow(Row input, Func predicate, out Action disposer) { Contracts.AssertValue(input); var active = Utils.BuildArray(OutputSchema.ColumnCount, predicate); @@ -657,7 +657,7 @@ protected override ISchemaBoundMapper BindCore(IChannel ch, RoleMappedSchema sch return new SingleValueRowMapper(schema, this, Schema.Create(new SchemaImpl(ScoreType, _quantiles))); } - protected override Delegate GetPredictionGetter(IRow input, int colSrc) + protected override Delegate GetPredictionGetter(Row input, int colSrc) { Contracts.AssertValue(input); Contracts.Assert(0 <= colSrc && colSrc < input.Schema.ColumnCount); diff --git a/src/Microsoft.ML.Data/Training/TrainerUtils.cs b/src/Microsoft.ML.Data/Training/TrainerUtils.cs index 41e984f522..b641744421 100644 --- a/src/Microsoft.ML.Data/Training/TrainerUtils.cs +++ b/src/Microsoft.ML.Data/Training/TrainerUtils.cs @@ -237,14 +237,14 @@ private static Func CreatePredicate(RoleMappedData data, CursOpt opt, /// Create a row cursor for the RoleMappedData with the indicated standard columns active. /// This does not verify that the columns exist, but merely activates the ones that do exist. /// - public static IRowCursor CreateRowCursor(this RoleMappedData data, CursOpt opt, Random rand, IEnumerable extraCols = null) + public static RowCursor CreateRowCursor(this RoleMappedData data, CursOpt opt, Random rand, IEnumerable extraCols = null) => data.Data.GetRowCursor(CreatePredicate(data, opt, extraCols), rand); /// /// Create a row cursor set for the RoleMappedData with the indicated standard columns active. /// This does not verify that the columns exist, but merely activates the ones that do exist. /// - public static IRowCursor[] CreateRowCursorSet(this RoleMappedData data, out IRowCursorConsolidator consolidator, + public static RowCursor[] CreateRowCursorSet(this RoleMappedData data, out IRowCursorConsolidator consolidator, CursOpt opt, int n, Random rand, IEnumerable extraCols = null) => data.Data.GetRowCursorSet(out consolidator, CreatePredicate(data, opt, extraCols), n, rand); @@ -258,7 +258,7 @@ private static void AddOpt(HashSet cols, ColumnInfo info) /// /// Get the getter for the feature column, assuming it is a vector of float. /// - public static ValueGetter> GetFeatureFloatVectorGetter(this IRow row, RoleMappedSchema schema) + public static ValueGetter> GetFeatureFloatVectorGetter(this Row row, RoleMappedSchema schema) { Contracts.CheckValue(row, nameof(row)); Contracts.CheckValue(schema, nameof(schema)); @@ -271,7 +271,7 @@ public static ValueGetter> GetFeatureFloatVectorGetter(this IRow /// /// Get the getter for the feature column, assuming it is a vector of float. /// - public static ValueGetter> GetFeatureFloatVectorGetter(this IRow row, RoleMappedData data) + public static ValueGetter> GetFeatureFloatVectorGetter(this Row row, RoleMappedData data) { Contracts.CheckValue(data, nameof(data)); return GetFeatureFloatVectorGetter(row, data.Schema); @@ -281,7 +281,7 @@ public static ValueGetter> GetFeatureFloatVectorGetter(this IRow /// Get a getter for the label as a float. This assumes that the label column type /// has already been validated as appropriate for the kind of training being done. /// - public static ValueGetter GetLabelFloatGetter(this IRow row, RoleMappedSchema schema) + public static ValueGetter GetLabelFloatGetter(this Row row, RoleMappedSchema schema) { Contracts.CheckValue(row, nameof(row)); Contracts.CheckValue(schema, nameof(schema)); @@ -295,7 +295,7 @@ public static ValueGetter GetLabelFloatGetter(this IRow row, RoleMappedSc /// Get a getter for the label as a float. This assumes that the label column type /// has already been validated as appropriate for the kind of training being done. /// - public static ValueGetter GetLabelFloatGetter(this IRow row, RoleMappedData data) + public static ValueGetter GetLabelFloatGetter(this Row row, RoleMappedData data) { Contracts.CheckValue(data, nameof(data)); return GetLabelFloatGetter(row, data.Schema); @@ -304,7 +304,7 @@ public static ValueGetter GetLabelFloatGetter(this IRow row, RoleMappedDa /// /// Get the getter for the weight column, or null if there is no weight column. /// - public static ValueGetter GetOptWeightFloatGetter(this IRow row, RoleMappedSchema schema) + public static ValueGetter GetOptWeightFloatGetter(this Row row, RoleMappedSchema schema) { Contracts.CheckValue(row, nameof(row)); Contracts.CheckValue(schema, nameof(schema)); @@ -317,7 +317,7 @@ public static ValueGetter GetOptWeightFloatGetter(this IRow row, RoleMapp return RowCursorUtils.GetGetterAs(NumberType.Float, row, col.Index); } - public static ValueGetter GetOptWeightFloatGetter(this IRow row, RoleMappedData data) + public static ValueGetter GetOptWeightFloatGetter(this Row row, RoleMappedData data) { Contracts.CheckValue(data, nameof(data)); return GetOptWeightFloatGetter(row, data.Schema); @@ -326,7 +326,7 @@ public static ValueGetter GetOptWeightFloatGetter(this IRow row, RoleMapp /// /// Get the getter for the group column, or null if there is no group column. /// - public static ValueGetter GetOptGroupGetter(this IRow row, RoleMappedSchema schema) + public static ValueGetter GetOptGroupGetter(this Row row, RoleMappedSchema schema) { Contracts.CheckValue(row, nameof(row)); Contracts.CheckValue(schema, nameof(schema)); @@ -339,7 +339,7 @@ public static ValueGetter GetOptGroupGetter(this IRow row, RoleMappedSche return RowCursorUtils.GetGetterAs(NumberType.U8, row, col.Index); } - public static ValueGetter GetOptGroupGetter(this IRow row, RoleMappedData data) + public static ValueGetter GetOptGroupGetter(this Row row, RoleMappedData data) { Contracts.CheckValue(data, nameof(data)); return GetOptGroupGetter(row, data.Schema); @@ -393,7 +393,7 @@ public static SchemaShape.Column MakeR4ScalarWeightColumn(string weightColumn, b /// /// This is the base class for a data cursor. Data cursors are specially typed - /// "convenience" cursor-like objects, less general than a but + /// "convenience" cursor-like objects, less general than a but /// more convenient for common access patterns that occur in machine learning. For /// example, the common idiom of iterating over features/labels/weights while skipping /// "bad" features, labels, and weights. There will be two typical access patterns for @@ -404,9 +404,9 @@ public static SchemaShape.Column MakeR4ScalarWeightColumn(string weightColumn, b /// public abstract class TrainingCursorBase : IDisposable { - public IRow Row { get { return _cursor; } } + public Row Row { get { return _cursor; } } - private readonly IRowCursor _cursor; + private readonly RowCursor _cursor; private readonly Action _signal; private long _skipCount; @@ -420,7 +420,7 @@ public abstract class TrainingCursorBase : IDisposable /// /// /// This method is called - protected TrainingCursorBase(IRowCursor input, Action signal) + protected TrainingCursorBase(RowCursor input, Action signal) { Contracts.AssertValue(input); Contracts.AssertValueOrNull(signal); @@ -428,7 +428,7 @@ protected TrainingCursorBase(IRowCursor input, Action signal) _signal = signal; } - protected static IRowCursor CreateCursor(RoleMappedData data, CursOpt opt, Random rand, params int[] extraCols) + protected static RowCursor CreateCursor(RoleMappedData data, CursOpt opt, Random rand, params int[] extraCols) { Contracts.AssertValue(data); Contracts.AssertValueOrNull(rand); @@ -596,7 +596,7 @@ public TCurs[] CreateSet(int n, Random rand = null, params int[] extraCols) /// , whose return value is used to call /// this action. /// - protected abstract TCurs CreateCursorCore(IRowCursor input, RoleMappedData data, CursOpt opt, Action signal); + protected abstract TCurs CreateCursorCore(RowCursor input, RoleMappedData data, CursOpt opt, Action signal); /// /// Accumulates signals from cursors, anding them together. Once it has @@ -658,7 +658,7 @@ public StandardScalarCursor(RoleMappedData data, CursOpt opt, Random rand = null { } - protected StandardScalarCursor(IRowCursor input, RoleMappedData data, CursOpt opt, Action signal = null) + protected StandardScalarCursor(RowCursor input, RoleMappedData data, CursOpt opt, Action signal = null) : base(input, signal) { Contracts.AssertValue(data); @@ -723,7 +723,7 @@ public Factory(RoleMappedData data, CursOpt opt) { } - protected override StandardScalarCursor CreateCursorCore(IRowCursor input, RoleMappedData data, CursOpt opt, Action signal) + protected override StandardScalarCursor CreateCursorCore(RowCursor input, RoleMappedData data, CursOpt opt, Action signal) => new StandardScalarCursor(input, data, opt, signal); } } @@ -748,7 +748,7 @@ public FeatureFloatVectorCursor(RoleMappedData data, CursOpt opt = CursOpt.Featu { } - protected FeatureFloatVectorCursor(IRowCursor input, RoleMappedData data, CursOpt opt, Action signal = null) + protected FeatureFloatVectorCursor(RowCursor input, RoleMappedData data, CursOpt opt, Action signal = null) : base(input, data, opt, signal) { if ((opt & CursOpt.Features) != 0 && data.Schema.Feature != null) @@ -789,7 +789,7 @@ public Factory(RoleMappedData data, CursOpt opt = CursOpt.Features) { } - protected override FeatureFloatVectorCursor CreateCursorCore(IRowCursor input, RoleMappedData data, CursOpt opt, Action signal) + protected override FeatureFloatVectorCursor CreateCursorCore(RowCursor input, RoleMappedData data, CursOpt opt, Action signal) { return new FeatureFloatVectorCursor(input, data, opt, signal); } @@ -816,7 +816,7 @@ public FloatLabelCursor(RoleMappedData data, CursOpt opt = CursOpt.Label, { } - protected FloatLabelCursor(IRowCursor input, RoleMappedData data, CursOpt opt, Action signal = null) + protected FloatLabelCursor(RowCursor input, RoleMappedData data, CursOpt opt, Action signal = null) : base(input, data, opt, signal) { if ((opt & CursOpt.Label) != 0 && data.Schema.Label != null) @@ -856,7 +856,7 @@ public Factory(RoleMappedData data, CursOpt opt = CursOpt.Label) { } - protected override FloatLabelCursor CreateCursorCore(IRowCursor input, RoleMappedData data, CursOpt opt, Action signal) + protected override FloatLabelCursor CreateCursorCore(RowCursor input, RoleMappedData data, CursOpt opt, Action signal) { return new FloatLabelCursor(input, data, opt, signal); } @@ -885,7 +885,7 @@ public MultiClassLabelCursor(int classCount, RoleMappedData data, CursOpt opt = { } - protected MultiClassLabelCursor(int classCount, IRowCursor input, RoleMappedData data, CursOpt opt, Action signal = null) + protected MultiClassLabelCursor(int classCount, RowCursor input, RoleMappedData data, CursOpt opt, Action signal = null) : base(input, data, opt, signal) { Contracts.Assert(classCount >= 0); @@ -934,7 +934,7 @@ public Factory(int classCount, RoleMappedData data, CursOpt opt = CursOpt.Label) _classCount = classCount; } - protected override MultiClassLabelCursor CreateCursorCore(IRowCursor input, RoleMappedData data, CursOpt opt, Action signal) + protected override MultiClassLabelCursor CreateCursorCore(RowCursor input, RoleMappedData data, CursOpt opt, Action signal) { return new MultiClassLabelCursor(_classCount, input, data, opt, signal); } diff --git a/src/Microsoft.ML.Data/Transforms/BindingsWrappedRowCursor.cs b/src/Microsoft.ML.Data/Transforms/BindingsWrappedRowCursor.cs index fecac3d005..114bbc4fdd 100644 --- a/src/Microsoft.ML.Data/Transforms/BindingsWrappedRowCursor.cs +++ b/src/Microsoft.ML.Data/Transforms/BindingsWrappedRowCursor.cs @@ -13,11 +13,11 @@ namespace Microsoft.ML.Runtime.Data /// inconvenient or inefficient to handle the "no output selected" case in their /// own implementation. /// - internal sealed class BindingsWrappedRowCursor : SynchronizedCursorBase, IRowCursor + internal sealed class BindingsWrappedRowCursor : SynchronizedCursorBase { private readonly ColumnBindingsBase _bindings; - public Schema Schema => _bindings.AsSchema; + public override Schema Schema => _bindings.AsSchema; /// /// Creates a wrapped version of the cursor @@ -25,7 +25,7 @@ internal sealed class BindingsWrappedRowCursor : SynchronizedCursorBaseChannel provider /// The input cursor /// The bindings object, - public BindingsWrappedRowCursor(IChannelProvider provider, IRowCursor input, ColumnBindingsBase bindings) + public BindingsWrappedRowCursor(IChannelProvider provider, RowCursor input, ColumnBindingsBase bindings) : base(provider, input) { Ch.CheckValue(input, nameof(input)); @@ -34,7 +34,7 @@ public BindingsWrappedRowCursor(IChannelProvider provider, IRowCursor input, Col _bindings = bindings; } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.Check(0 <= col & col < _bindings.ColumnCount, "col"); bool isSrc; @@ -42,7 +42,7 @@ public bool IsColumnActive(int col) return isSrc && Input.IsColumnActive(col); } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.Check(IsColumnActive(col), "col"); bool isSrc; diff --git a/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingTransformer.cs b/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingTransformer.cs index c33c2e55ff..327c029767 100644 --- a/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingTransformer.cs +++ b/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingTransformer.cs @@ -650,7 +650,7 @@ private void GetSlotNames(ref VBuffer> dst) bldr.GetResult(ref dst); } - public Delegate MakeGetter(IRow input) + public Delegate MakeGetter(Row input) { if (_isIdentity) return Utils.MarshalInvoke(MakeIdentityGetter, OutputType.RawType, input); @@ -658,13 +658,13 @@ public Delegate MakeGetter(IRow input) return Utils.MarshalInvoke(MakeGetter, OutputType.ItemType.RawType, input); } - private Delegate MakeIdentityGetter(IRow input) + private Delegate MakeIdentityGetter(Row input) { Contracts.Assert(SrcIndices.Length == 1); return input.GetGetter(SrcIndices[0]); } - private Delegate MakeGetter(IRow input) + private Delegate MakeGetter(Row input) { var srcGetterOnes = new ValueGetter[SrcIndices.Length]; var srcGetterVecs = new ValueGetter>[SrcIndices.Length]; @@ -847,7 +847,7 @@ public override Func GetDependencies(Func activeOutput) public override void Save(ModelSaveContext ctx) => _parent.Save(ctx); - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { disposer = null; return _columns[iinfo].MakeGetter(input); diff --git a/src/Microsoft.ML.Data/Transforms/ColumnCopying.cs b/src/Microsoft.ML.Data/Transforms/ColumnCopying.cs index 6ab8fe9b7b..4479856cc0 100644 --- a/src/Microsoft.ML.Data/Transforms/ColumnCopying.cs +++ b/src/Microsoft.ML.Data/Transforms/ColumnCopying.cs @@ -175,13 +175,13 @@ internal Mapper(ColumnCopyingTransformer parent, Schema inputSchema, (string Sou _columns = columns; } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _columns.Length); disposer = null; - Delegate MakeGetter(IRow row, int index) + Delegate MakeGetter(Row row, int index) => input.GetGetter(index); input.Schema.TryGetColumnIndex(_columns[iinfo].Source, out int colIndex); diff --git a/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs b/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs index afbbbe59b8..33d61e4560 100644 --- a/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs +++ b/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs @@ -579,32 +579,32 @@ private static Schema GenerateOutputSchema(IEnumerable map, } } - private sealed class Row : IRow + private sealed class RowImpl : Row { private readonly Mapper _mapper; - private readonly IRow _input; - public Row(IRow input, Mapper mapper) + private readonly Row _input; + public RowImpl(Row input, Mapper mapper) { _mapper = mapper; _input = input; } - public long Position => _input.Position; + public override long Position => _input.Position; - public long Batch => _input.Batch; + public override long Batch => _input.Batch; - Schema IRow.Schema => _mapper.OutputSchema; + public override Schema Schema => _mapper.OutputSchema; - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { int index = _mapper.GetInputIndex(col); return _input.GetGetter(index); } - public ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() => _input.GetIdGetter(); - public bool IsColumnActive(int col) => true; + public override bool IsColumnActive(int col) => true; } private sealed class SelectColumnsDataTransform : IDataTransform, IRowToRowMapper, ITransformTemplate @@ -633,7 +633,7 @@ public SelectColumnsDataTransform(IHostEnvironment env, ColumnSelectingTransform public long? GetRowCount() => Source.GetRowCount(); - public IRowCursor GetRowCursor(Func needCol, Random rand = null) + public RowCursor GetRowCursor(Func needCol, Random rand = null) { _host.AssertValue(needCol, nameof(needCol)); _host.AssertValueOrNull(rand); @@ -644,10 +644,10 @@ public IRowCursor GetRowCursor(Func needCol, Random rand = null) // Build the active state for the output var active = Utils.BuildArray(_mapper.OutputSchema.ColumnCount, needCol); - return new RowCursor(_host, _mapper, inputRowCursor, active); + return new Cursor(_host, _mapper, inputRowCursor, active); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, Random rand = null) + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func needCol, int n, Random rand = null) { _host.CheckValue(needCol, nameof(needCol)); _host.CheckValueOrNull(rand); @@ -661,10 +661,10 @@ public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Fun _host.AssertNonEmpty(inputs); // No need to split if this is given 1 input cursor. - var cursors = new IRowCursor[inputs.Length]; + var cursors = new RowCursor[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { - cursors[i] = new RowCursor(_host, _mapper, inputs[i], active); + cursors[i] = new Cursor(_host, _mapper, inputs[i], active); } return cursors; } @@ -684,22 +684,22 @@ public Func GetDependencies(Func activeOutput) return col => active[col]; } - public IRow GetRow(IRow input, Func active, out Action disposer) + public Row GetRow(Row input, Func active, out Action disposer) { disposer = null; - return new Row(input, _mapper); + return new RowImpl(input, _mapper); } public IDataTransform ApplyToData(IHostEnvironment env, IDataView newSource) => new SelectColumnsDataTransform(env, _transform, new Mapper(_transform, newSource.Schema), newSource); } - private sealed class RowCursor : SynchronizedCursorBase, IRowCursor + private sealed class Cursor : SynchronizedCursorBase { private readonly Mapper _mapper; - private readonly IRowCursor _inputCursor; + private readonly RowCursor _inputCursor; private readonly bool[] _active; - public RowCursor(IChannelProvider provider, Mapper mapper, IRowCursor input, bool[] active) + public Cursor(IChannelProvider provider, Mapper mapper, RowCursor input, bool[] active) : base(provider, input) { _mapper = mapper; @@ -707,15 +707,15 @@ public RowCursor(IChannelProvider provider, Mapper mapper, IRowCursor input, boo _active = active; } - public Schema Schema => _mapper.OutputSchema; + public override Schema Schema => _mapper.OutputSchema; - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { int index = _mapper.GetInputIndex(col); return _inputCursor.GetGetter(index); } - public bool IsColumnActive(int col) => _active[col]; + public override bool IsColumnActive(int col) => _active[col]; } } } diff --git a/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs b/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs index d1f6c70bee..e8a75d4edd 100644 --- a/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs @@ -692,7 +692,7 @@ private void CombineRanges( newRangeMax = maxRange2; } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); @@ -711,7 +711,7 @@ protected override Delegate MakeGetter(IRow input, int iinfo, Func ac return MakeVecGetter(input, iinfo); } - private Delegate MakeOneTrivialGetter(IRow input, int iinfo) + private Delegate MakeOneTrivialGetter(Row input, int iinfo) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); @@ -734,7 +734,7 @@ private void OneTrivialGetter(ref TDst value) value = default(TDst); } - private Delegate MakeVecTrivialGetter(IRow input, int iinfo) + private Delegate MakeVecTrivialGetter(Row input, int iinfo) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); @@ -757,19 +757,19 @@ private void VecTrivialGetter(ref VBuffer value) VBufferUtils.Resize(ref value, 1, 0); } - private Delegate MakeVecGetter(IRow input, int iinfo) + private Delegate MakeVecGetter(Row input, int iinfo) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); Host.Assert(_srcTypes[iinfo].IsVector); Host.Assert(!_suppressed[iinfo]); - Func>> del = MakeVecGetter; + Func>> del = MakeVecGetter; var methodInfo = del.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(_srcTypes[iinfo].ItemType.RawType); return (Delegate)methodInfo.Invoke(this, new object[] { input, iinfo }); } - private ValueGetter> MakeVecGetter(IRow input, int iinfo) + private ValueGetter> MakeVecGetter(Row input, int iinfo) { var srcGetter = GetSrcGetter>(input, iinfo); var typeDst = _dstTypes[iinfo]; @@ -786,7 +786,7 @@ private ValueGetter> MakeVecGetter(IRow input, int iinfo) }; } - private ValueGetter GetSrcGetter(IRow input, int iinfo) + private ValueGetter GetSrcGetter(Row input, int iinfo) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); @@ -795,12 +795,12 @@ private ValueGetter GetSrcGetter(IRow input, int iinfo) return input.GetGetter(src); } - private Delegate GetSrcGetter(ColumnType typeDst, IRow row, int iinfo) + private Delegate GetSrcGetter(ColumnType typeDst, Row row, int iinfo) { Host.CheckValue(typeDst, nameof(typeDst)); Host.CheckValue(row, nameof(row)); - Func> del = GetSrcGetter; + Func> del = GetSrcGetter; var methodInfo = del.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(typeDst.RawType); return (Delegate)methodInfo.Invoke(this, new object[] { row, iinfo }); } diff --git a/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs b/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs index 72231d5eb9..0bf2687c39 100644 --- a/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs @@ -332,7 +332,7 @@ public override void Save(ModelSaveContext ctx) return null; } - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -340,10 +340,10 @@ protected override IRowCursor GetRowCursorCore(Func predicate, Random var inputPred = _bindings.GetDependencies(predicate); var active = _bindings.GetActive(predicate); var input = Source.GetRowCursor(inputPred); - return new RowCursor(Host, _bindings, input, active); + return new Cursor(Host, _bindings, input, active); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, + public override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); @@ -351,7 +351,7 @@ public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolid var inputPred = _bindings.GetDependencies(predicate); var active = _bindings.GetActive(predicate); - IRowCursor input; + RowCursor input; if (n > 1 && ShouldUseParallelCursors(predicate) != false) { @@ -360,9 +360,9 @@ public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolid if (inputs.Length != 1) { - var cursors = new IRowCursor[inputs.Length]; + var cursors = new RowCursor[inputs.Length]; for (int i = 0; i < inputs.Length; i++) - cursors[i] = new RowCursor(Host, _bindings, inputs[i], active); + cursors[i] = new Cursor(Host, _bindings, inputs[i], active); return cursors; } input = inputs[0]; @@ -371,10 +371,10 @@ public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolid input = Source.GetRowCursor(inputPred); consolidator = null; - return new IRowCursor[] { new RowCursor(Host, _bindings, input, active) }; + return new RowCursor[] { new Cursor(Host, _bindings, input, active) }; } - private sealed class RowCursor : SynchronizedCursorBase, IRowCursor + private sealed class Cursor : SynchronizedCursorBase { private readonly Bindings _bindings; private readonly bool[] _active; @@ -383,7 +383,7 @@ private sealed class RowCursor : SynchronizedCursorBase, IRowCursor private readonly TauswortheHybrid[] _rngs; private readonly long[] _lastCounters; - public RowCursor(IChannelProvider provider, Bindings bindings, IRowCursor input, bool[] active) + public Cursor(IChannelProvider provider, Bindings bindings, RowCursor input, bool[] active) : base(provider, input) { Ch.CheckValue(bindings, nameof(bindings)); @@ -408,15 +408,15 @@ public RowCursor(IChannelProvider provider, Bindings bindings, IRowCursor input, } } - public Schema Schema => _bindings.AsSchema; + public override Schema Schema => _bindings.AsSchema; - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.Check(0 <= col && col < _bindings.ColumnCount); return _active == null || _active[col]; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.Check(IsColumnActive(col)); diff --git a/src/Microsoft.ML.Data/Transforms/Hashing.cs b/src/Microsoft.ML.Data/Transforms/Hashing.cs index 60e799ddb6..1e9151f44a 100644 --- a/src/Microsoft.ML.Data/Transforms/Hashing.cs +++ b/src/Microsoft.ML.Data/Transforms/Hashing.cs @@ -273,7 +273,7 @@ internal HashingTransformer(IHostEnvironment env, IDataView input, ColumnInfo[] } if (Utils.Size(sourceColumnsForInvertHash) > 0) { - using (IRowCursor srcCursor = input.GetRowCursor(sourceColumnsForInvertHash.Contains)) + using (RowCursor srcCursor = input.GetRowCursor(sourceColumnsForInvertHash.Contains)) { using (var ch = Host.Start("Invert hash building")) { @@ -307,7 +307,7 @@ internal HashingTransformer(IHostEnvironment env, IDataView input, ColumnInfo[] } } - private Delegate GetGetterCore(IRow input, int iinfo, out Action disposer) + private Delegate GetGetterCore(Row input, int iinfo, out Action disposer) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _columns.Length); @@ -394,7 +394,7 @@ public static IDataTransform Create(IHostEnvironment env, Arguments args, IDataV } #region Getters - private ValueGetter ComposeGetterOne(IRow input, int iinfo, int srcCol, ColumnType srcType) + private ValueGetter ComposeGetterOne(Row input, int iinfo, int srcCol, ColumnType srcType) { Host.Assert(HashingEstimator.IsColumnTypeValid(srcType)); @@ -452,7 +452,7 @@ private ValueGetter ComposeGetterOne(IRow input, int iinfo, int srcCol, Co } } - private ValueGetter> ComposeGetterVec(IRow input, int iinfo, int srcCol, ColumnType srcType) + private ValueGetter> ComposeGetterVec(Row input, int iinfo, int srcCol, ColumnType srcType) { Host.Assert(srcType.IsVector); Host.Assert(HashingEstimator.IsColumnTypeValid(srcType.ItemType)); @@ -505,7 +505,7 @@ private ValueGetter> ComposeGetterVec(IRow input, int iinfo, int s } } - private ValueGetter> ComposeGetterVecCore(IRow input, int iinfo, int srcCol, ColumnType srcType) + private ValueGetter> ComposeGetterVecCore(Row input, int iinfo, int srcCol, ColumnType srcType) where THash : struct, IHasher { Host.Assert(srcType.IsVector); @@ -709,7 +709,7 @@ public uint HashCore(uint seed, uint mask, in long value) } } - private static ValueGetter MakeScalarHashGetter(IRow input, int srcCol, uint seed, uint mask) + private static ValueGetter MakeScalarHashGetter(Row input, int srcCol, uint seed, uint mask) where THash : struct, IHasher { Contracts.Assert(Utils.IsPowerOfTwo(mask + 1)); @@ -914,18 +914,18 @@ private void AddMetaKeyValues(int i, MetadataBuilder builder) builder.AddKeyValues(_parent._kvTypes[i].VectorSize, _parent._kvTypes[i].ItemType.AsPrimitive, getter); } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) => _parent.GetGetterCore(input, iinfo, out disposer); + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) => _parent.GetGetterCore(input, iinfo, out disposer); } private abstract class InvertHashHelper { - protected readonly IRow Row; + protected readonly Row Row; private readonly bool _includeSlot; private readonly ColumnInfo _ex; private readonly ColumnType _srcType; private readonly int _srcCol; - private InvertHashHelper(IRow row, ColumnInfo ex) + private InvertHashHelper(Row row, ColumnInfo ex) { Contracts.AssertValue(row); Row = row; @@ -946,13 +946,13 @@ private InvertHashHelper(IRow row, ColumnInfo ex) /// The extra column info /// The number of input hashed valuPres to accumulate per output hash value /// A hash getter, built on top of . - public static InvertHashHelper Create(IRow row, ColumnInfo ex, int invertHashMaxCount, Delegate dstGetter) + public static InvertHashHelper Create(Row row, ColumnInfo ex, int invertHashMaxCount, Delegate dstGetter) { row.Schema.TryGetColumnIndex(ex.Input, out int srcCol); ColumnType typeSrc = row.Schema.GetColumnType(srcCol); Type t = typeSrc.IsVector ? (ex.Ordered ? typeof(ImplVecOrdered<>) : typeof(ImplVec<>)) : typeof(ImplOne<>); t = t.MakeGenericType(typeSrc.ItemType.RawType); - var consTypes = new Type[] { typeof(IRow), typeof(ColumnInfo), typeof(int), typeof(Delegate) }; + var consTypes = new Type[] { typeof(Row), typeof(ColumnInfo), typeof(int), typeof(Delegate) }; var constructorInfo = t.GetConstructor(consTypes); return (InvertHashHelper)constructorInfo.Invoke(new object[] { row, ex, invertHashMaxCount, dstGetter }); } @@ -1029,7 +1029,7 @@ private abstract class Impl : InvertHashHelper { protected readonly InvertHashCollector Collector; - protected Impl(IRow row, ColumnInfo ex, int invertHashMaxCount) + protected Impl(Row row, ColumnInfo ex, int invertHashMaxCount) : base(row, ex) { Contracts.AssertValue(row); @@ -1062,7 +1062,7 @@ private sealed class ImplOne : Impl private T _value; private uint _hash; - public ImplOne(IRow row, ColumnInfo ex, int invertHashMaxCount, Delegate dstGetter) + public ImplOne(Row row, ColumnInfo ex, int invertHashMaxCount, Delegate dstGetter) : base(row, ex, invertHashMaxCount) { _srcGetter = Row.GetGetter(_srcCol); @@ -1096,7 +1096,7 @@ private sealed class ImplVec : Impl private VBuffer _value; private VBuffer _hash; - public ImplVec(IRow row, ColumnInfo ex, int invertHashMaxCount, Delegate dstGetter) + public ImplVec(Row row, ColumnInfo ex, int invertHashMaxCount, Delegate dstGetter) : base(row, ex, invertHashMaxCount) { _srcGetter = Row.GetGetter>(_srcCol); @@ -1130,7 +1130,7 @@ private sealed class ImplVecOrdered : Impl> private VBuffer _value; private VBuffer _hash; - public ImplVecOrdered(IRow row, ColumnInfo ex, int invertHashMaxCount, Delegate dstGetter) + public ImplVecOrdered(Row row, ColumnInfo ex, int invertHashMaxCount, Delegate dstGetter) : base(row, ex, invertHashMaxCount) { _srcGetter = Row.GetGetter>(_srcCol); diff --git a/src/Microsoft.ML.Data/Transforms/KeyToValue.cs b/src/Microsoft.ML.Data/Transforms/KeyToValue.cs index fd6af73412..0fe412ee84 100644 --- a/src/Microsoft.ML.Data/Transforms/KeyToValue.cs +++ b/src/Microsoft.ML.Data/Transforms/KeyToValue.cs @@ -211,7 +211,7 @@ public void SaveAsPfa(BoundPfaContext ctx) ctx.DeclareVar(toDeclare.ToArray()); } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _types.Length); @@ -294,7 +294,7 @@ protected KeyToValueMap(Mapper mapper, PrimitiveType typeVal, int iinfo) InfoIndex = iinfo; } - public abstract Delegate GetMappingGetter(IRow input); + public abstract Delegate GetMappingGetter(Row input); public abstract JToken SavePfa(BoundPfaContext ctx, JToken srcToken); } @@ -346,7 +346,7 @@ private void MapKey(in TKey src, ReadOnlySpan values, ref TValue dst) dst = _na; } - public override Delegate GetMappingGetter(IRow input) + public override Delegate GetMappingGetter(Row input) { // When constructing the getter, there are a few cases we have to consider: // If scalar then it's just a straightforward mapping. diff --git a/src/Microsoft.ML.Data/Transforms/KeyToVector.cs b/src/Microsoft.ML.Data/Transforms/KeyToVector.cs index 363bc4c83f..98193ad24e 100644 --- a/src/Microsoft.ML.Data/Transforms/KeyToVector.cs +++ b/src/Microsoft.ML.Data/Transforms/KeyToVector.cs @@ -438,7 +438,7 @@ private void GetCategoricalSlotRanges(int iinfo, ref VBuffer dst) dst = new VBuffer(ranges.Length, ranges); } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _infos.Length); @@ -456,7 +456,7 @@ protected override Delegate MakeGetter(IRow input, int iinfo, Func ac /// This is for the singleton case. This should be equivalent to both Bag and Ord over /// a vector of size one. /// - private ValueGetter> MakeGetterOne(IRow input, int iinfo) + private ValueGetter> MakeGetterOne(Row input, int iinfo) { Host.AssertValue(input); Host.Assert(_infos[iinfo].TypeSrc.IsKey); @@ -489,7 +489,7 @@ private ValueGetter> MakeGetterOne(IRow input, int iinfo) /// /// This is for the bagging case - vector input and outputs should be added. /// - private ValueGetter> MakeGetterBag(IRow input, int iinfo) + private ValueGetter> MakeGetterBag(Row input, int iinfo) { Host.AssertValue(input); Host.Assert(_infos[iinfo].TypeSrc.IsVector); @@ -533,7 +533,7 @@ private ValueGetter> MakeGetterBag(IRow input, int iinfo) /// /// This is for the indicator (non-bagging) case - vector input and outputs should be concatenated. /// - private ValueGetter> MakeGetterInd(IRow input, int iinfo) + private ValueGetter> MakeGetterInd(Row input, int iinfo) { Host.AssertValue(input); Host.Assert(_infos[iinfo].TypeSrc.IsVector); diff --git a/src/Microsoft.ML.Data/Transforms/LabelConvertTransform.cs b/src/Microsoft.ML.Data/Transforms/LabelConvertTransform.cs index f63b97333b..3832ab6f8a 100644 --- a/src/Microsoft.ML.Data/Transforms/LabelConvertTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/LabelConvertTransform.cs @@ -165,7 +165,7 @@ private bool PassThrough(string kind, int iinfo) return kind != MetadataUtils.Kinds.KeyValues; } - protected override Delegate GetGetterCore(IChannel ch, IRow input, int iinfo, out Action disposer) + protected override Delegate GetGetterCore(IChannel ch, Row input, int iinfo, out Action disposer) { Contracts.AssertValueOrNull(ch); Contracts.AssertValue(input); @@ -190,34 +190,34 @@ protected override VectorType GetSlotTypeCore(int iinfo) return _slotType; } - protected override ISlotCursor GetSlotCursorCore(int iinfo) + protected override SlotCursor GetSlotCursorCore(int iinfo) { Host.Assert(0 <= iinfo && iinfo < Infos.Length); Host.AssertValue(Infos[iinfo].SlotTypeSrc); - ISlotCursor cursor = InputTranspose.GetSlotCursor(Infos[iinfo].Source); - return new SlotCursor(Host, cursor, GetSlotTypeCore(iinfo)); + var cursor = InputTranspose.GetSlotCursor(Infos[iinfo].Source); + return new SlotCursorImpl(Host, cursor, GetSlotTypeCore(iinfo)); } - private sealed class SlotCursor : SynchronizedCursorBase, ISlotCursor + private sealed class SlotCursorImpl : SlotCursor.SynchronizedSlotCursor { private readonly Delegate _getter; private readonly VectorType _type; - public SlotCursor(IChannelProvider provider, ISlotCursor cursor, VectorType typeDst) + public SlotCursorImpl(IChannelProvider provider, SlotCursor cursor, VectorType typeDst) : base(provider, cursor) { Ch.AssertValue(typeDst); - _getter = RowCursorUtils.GetLabelGetter(Input); + _getter = RowCursorUtils.GetLabelGetter(cursor); _type = typeDst; } - public VectorType GetSlotType() + public override VectorType GetSlotType() { return _type; } - public ValueGetter> GetGetter() + public override ValueGetter> GetGetter() { ValueGetter> getter = _getter as ValueGetter>; if (getter == null) diff --git a/src/Microsoft.ML.Data/Transforms/LabelIndicatorTransform.cs b/src/Microsoft.ML.Data/Transforms/LabelIndicatorTransform.cs index 3734245258..2c5acafd1d 100644 --- a/src/Microsoft.ML.Data/Transforms/LabelIndicatorTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/LabelIndicatorTransform.cs @@ -163,7 +163,7 @@ protected override ColumnType GetColumnTypeCore(int iinfo) return BoolType.Instance; } - protected override Delegate GetGetterCore(IChannel ch, IRow input, + protected override Delegate GetGetterCore(IChannel ch, Row input, int iinfo, out Action disposer) { Host.AssertValue(ch); @@ -175,7 +175,7 @@ protected override Delegate GetGetterCore(IChannel ch, IRow input, return GetGetter(ch, input, iinfo); } - private ValueGetter GetGetter(IChannel ch, IRow input, int iinfo) + private ValueGetter GetGetter(IChannel ch, Row input, int iinfo) { Host.AssertValue(ch); ch.AssertValue(input); diff --git a/src/Microsoft.ML.Data/Transforms/NAFilter.cs b/src/Microsoft.ML.Data/Transforms/NAFilter.cs index 36c9f43368..1f7a192627 100644 --- a/src/Microsoft.ML.Data/Transforms/NAFilter.cs +++ b/src/Microsoft.ML.Data/Transforms/NAFilter.cs @@ -204,7 +204,7 @@ private static bool TestType(ColumnType type) return null; } - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -212,10 +212,10 @@ protected override IRowCursor GetRowCursorCore(Func predicate, Random bool[] active; Func inputPred = GetActive(predicate, out active); var input = Source.GetRowCursor(inputPred, rand); - return new RowCursor(this, input, active); + return new Cursor(this, input, active); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, + public override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); @@ -227,9 +227,9 @@ public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolid Host.AssertNonEmpty(inputs); // No need to split if this is given 1 input cursor. - var cursors = new IRowCursor[inputs.Length]; + var cursors = new RowCursor[inputs.Length]; for (int i = 0; i < inputs.Length; i++) - cursors[i] = new RowCursor(this, inputs[i], active); + cursors[i] = new Cursor(this, inputs[i], active); return cursors; } @@ -245,13 +245,13 @@ private Func GetActive(Func predicate, out bool[] active) return col => activeInput[col]; } - private sealed class RowCursor : LinkedRowFilterCursorBase + private sealed class Cursor : LinkedRowFilterCursorBase { private abstract class Value { - protected readonly RowCursor Cursor; + protected readonly Cursor Cursor; - protected Value(RowCursor cursor) + protected Value(Cursor cursor) { Contracts.AssertValue(cursor); Cursor = cursor; @@ -261,7 +261,7 @@ protected Value(RowCursor cursor) public abstract Delegate GetGetter(); - public static Value Create(RowCursor cursor, ColInfo info) + public static Value Create(Cursor cursor, ColInfo info) { Contracts.AssertValue(cursor); Contracts.AssertValue(info); @@ -269,18 +269,18 @@ public static Value Create(RowCursor cursor, ColInfo info) MethodInfo meth; if (info.Type is VectorType vecType) { - Func> d = CreateVec; + Func> d = CreateVec; meth = d.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(vecType.ItemType.RawType); } else { - Func> d = CreateOne; + Func> d = CreateOne; meth = d.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(info.Type.RawType); } return (Value)meth.Invoke(null, new object[] { cursor, info }); } - private static ValueOne CreateOne(RowCursor cursor, ColInfo info) + private static ValueOne CreateOne(Cursor cursor, ColInfo info) { Contracts.AssertValue(cursor); Contracts.AssertValue(info); @@ -292,7 +292,7 @@ private static ValueOne CreateOne(RowCursor cursor, ColInfo info) return new ValueOne(cursor, getSrc, hasBad); } - private static ValueVec CreateVec(RowCursor cursor, ColInfo info) + private static ValueVec CreateVec(Cursor cursor, ColInfo info) { Contracts.AssertValue(cursor); Contracts.AssertValue(info); @@ -310,7 +310,7 @@ private abstract class TypedValue : Value private readonly InPredicate _hasBad; public T Src; - protected TypedValue(RowCursor cursor, ValueGetter getSrc, InPredicate hasBad) + protected TypedValue(Cursor cursor, ValueGetter getSrc, InPredicate hasBad) : base(cursor) { Contracts.AssertValue(getSrc); @@ -330,7 +330,7 @@ private sealed class ValueOne : TypedValue { private readonly ValueGetter _getter; - public ValueOne(RowCursor cursor, ValueGetter getSrc, InPredicate hasBad) + public ValueOne(Cursor cursor, ValueGetter getSrc, InPredicate hasBad) : base(cursor, getSrc, hasBad) { _getter = GetValue; @@ -352,7 +352,7 @@ private sealed class ValueVec : TypedValue> { private readonly ValueGetter> _getter; - public ValueVec(RowCursor cursor, ValueGetter> getSrc, InPredicate> hasBad) + public ValueVec(Cursor cursor, ValueGetter> getSrc, InPredicate> hasBad) : base(cursor, getSrc, hasBad) { _getter = GetValue; @@ -374,7 +374,7 @@ public override Delegate GetGetter() private readonly NAFilter _parent; private readonly Value[] _values; - public RowCursor(NAFilter parent, IRowCursor input, bool[] active) + public Cursor(NAFilter parent, RowCursor input, bool[] active) : base(parent.Host, input, parent.OutputSchema, active) { _parent = parent; diff --git a/src/Microsoft.ML.Data/Transforms/NopTransform.cs b/src/Microsoft.ML.Data/Transforms/NopTransform.cs index 0d0a619c1c..3d4219a60d 100644 --- a/src/Microsoft.ML.Data/Transforms/NopTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/NopTransform.cs @@ -118,12 +118,12 @@ public bool CanShuffle return Source.GetRowCount(); } - public IRowCursor GetRowCursor(Func predicate, Random rand = null) + public RowCursor GetRowCursor(Func predicate, Random rand = null) { return Source.GetRowCursor(predicate, rand); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { return Source.GetRowCursorSet(out consolidator, predicate, n, rand); } @@ -133,7 +133,7 @@ public Func GetDependencies(Func predicate) return predicate; } - public IRow GetRow(IRow input, Func active, out Action disposer) + public Row GetRow(Row input, Func active, out Action disposer) { Contracts.CheckValue(input, nameof(input)); Contracts.CheckValue(active, nameof(active)); diff --git a/src/Microsoft.ML.Data/Transforms/NormalizeColumn.cs b/src/Microsoft.ML.Data/Transforms/NormalizeColumn.cs index 7c3bdd363c..a57f6270a1 100644 --- a/src/Microsoft.ML.Data/Transforms/NormalizeColumn.cs +++ b/src/Microsoft.ML.Data/Transforms/NormalizeColumn.cs @@ -361,7 +361,7 @@ private AffineColumnFunction(IHost host) public bool CanSaveOnnx(OnnxContext ctx) => true; public abstract bool OnnxInfo(OnnxContext ctx, OnnxNode nodeProtoWrapper, int featureCount); - public abstract Delegate GetGetter(IRow input, int icol); + public abstract Delegate GetGetter(Row input, int icol); public abstract void AttachMetadata(MetadataDispatcher.Builder bldr, ColumnType typeSrc); @@ -480,7 +480,7 @@ private CdfColumnFunction(IHost host) public bool OnnxInfo(OnnxContext ctx, OnnxNode nodeProtoWrapper, int featureCount) => throw Host.ExceptNotSupp(); - public abstract Delegate GetGetter(IRow input, int icol); + public abstract Delegate GetGetter(Row input, int icol); public abstract void AttachMetadata(MetadataDispatcher.Builder bldr, ColumnType typeSrc); public abstract NormalizingTransformer.NormalizerModelParametersBase GetNormalizerModelParams(); @@ -609,7 +609,7 @@ protected BinColumnFunction(IHost host) public bool OnnxInfo(OnnxContext ctx, OnnxNode nodeProtoWrapper, int featureCount) => throw Host.ExceptNotSupp(); - public abstract Delegate GetGetter(IRow input, int icol); + public abstract Delegate GetGetter(Row input, int icol); public void AttachMetadata(MetadataDispatcher.Builder bldr, ColumnType typeSrc) { @@ -732,7 +732,7 @@ private abstract class SupervisedBinFunctionBuilderBase : IColumnFunctionBuilder protected readonly int LabelCardinality; private readonly ValueGetter _labelGetterSrc; - protected SupervisedBinFunctionBuilderBase(IHost host, long lim, int labelColId, IRow dataRow) + protected SupervisedBinFunctionBuilderBase(IHost host, long lim, int labelColId, Row dataRow) { Contracts.CheckValue(host, nameof(host)); Host = host; @@ -742,7 +742,7 @@ protected SupervisedBinFunctionBuilderBase(IHost host, long lim, int labelColId, _labelGetterSrc = GetLabelGetter(dataRow, labelColId, out LabelCardinality); } - private ValueGetter GetLabelGetter(IRow row, int col, out int labelCardinality) + private ValueGetter GetLabelGetter(Row row, int col, out int labelCardinality) { // The label column type is checked as part of args validation. var type = row.Schema.GetColumnType(col); @@ -816,7 +816,7 @@ private abstract class OneColumnSupervisedBinFunctionBuilderBase : Super protected readonly List ColValues; protected OneColumnSupervisedBinFunctionBuilderBase(IHost host, long lim, int valueColId, int labelColId, - IRow dataRow) + Row dataRow) : base(host, lim, labelColId, dataRow) { _colGetterSrc = dataRow.GetGetter(valueColId); @@ -844,7 +844,7 @@ private abstract class VecColumnSupervisedBinFunctionBuilderBase : Super protected readonly List[] ColValues; protected readonly int ColumnSlotCount; - protected VecColumnSupervisedBinFunctionBuilderBase(IHost host, long lim, int valueColId, int labelColId, IRow dataRow) + protected VecColumnSupervisedBinFunctionBuilderBase(IHost host, long lim, int valueColId, int labelColId, Row dataRow) : base(host, lim, labelColId, dataRow) { _colValueGetter = dataRow.GetGetter>(valueColId); @@ -899,7 +899,7 @@ protected override bool AcceptColumnValue() internal static partial class MinMaxUtils { public static IColumnFunctionBuilder CreateBuilder(MinMaxArguments args, IHost host, - int icol, int srcIndex, ColumnType srcType, IRowCursor cursor) + int icol, int srcIndex, ColumnType srcType, RowCursor cursor) { Contracts.AssertValue(host); host.AssertValue(args); @@ -912,7 +912,7 @@ public static IColumnFunctionBuilder CreateBuilder(MinMaxArguments args, IHost h } public static IColumnFunctionBuilder CreateBuilder(NormalizingEstimator.MinMaxColumn column, IHost host, - int srcIndex, ColumnType srcType, IRowCursor cursor) + int srcIndex, ColumnType srcType, RowCursor cursor) { if (srcType.IsNumber) { @@ -935,7 +935,7 @@ public static IColumnFunctionBuilder CreateBuilder(NormalizingEstimator.MinMaxCo internal static partial class MeanVarUtils { public static IColumnFunctionBuilder CreateBuilder(MeanVarArguments args, IHost host, - int icol, int srcIndex, ColumnType srcType, IRowCursor cursor) + int icol, int srcIndex, ColumnType srcType, RowCursor cursor) { Contracts.AssertValue(host); host.AssertValue(args); @@ -949,7 +949,7 @@ public static IColumnFunctionBuilder CreateBuilder(MeanVarArguments args, IHost } public static IColumnFunctionBuilder CreateBuilder(NormalizingEstimator.MeanVarColumn column, IHost host, - int srcIndex, ColumnType srcType, IRowCursor cursor) + int srcIndex, ColumnType srcType, RowCursor cursor) { Contracts.AssertValue(host); @@ -975,7 +975,7 @@ public static IColumnFunctionBuilder CreateBuilder(NormalizingEstimator.MeanVarC internal static partial class LogMeanVarUtils { public static IColumnFunctionBuilder CreateBuilder(LogMeanVarArguments args, IHost host, - int icol, int srcIndex, ColumnType srcType, IRowCursor cursor) + int icol, int srcIndex, ColumnType srcType, RowCursor cursor) { Contracts.AssertValue(host); host.AssertValue(args); @@ -988,7 +988,7 @@ public static IColumnFunctionBuilder CreateBuilder(LogMeanVarArguments args, IHo } public static IColumnFunctionBuilder CreateBuilder(NormalizingEstimator.LogMeanVarColumn column, IHost host, - int srcIndex, ColumnType srcType, IRowCursor cursor) + int srcIndex, ColumnType srcType, RowCursor cursor) { Contracts.AssertValue(host); host.AssertValue(column); @@ -1014,7 +1014,7 @@ public static IColumnFunctionBuilder CreateBuilder(NormalizingEstimator.LogMeanV internal static partial class BinUtils { public static IColumnFunctionBuilder CreateBuilder(BinArguments args, IHost host, - int icol, int srcIndex, ColumnType srcType, IRowCursor cursor) + int icol, int srcIndex, ColumnType srcType, RowCursor cursor) { Contracts.AssertValue(host); host.AssertValue(args); @@ -1028,7 +1028,7 @@ public static IColumnFunctionBuilder CreateBuilder(BinArguments args, IHost host } public static IColumnFunctionBuilder CreateBuilder(NormalizingEstimator.BinningColumn column, IHost host, - int srcIndex, ColumnType srcType, IRowCursor cursor) + int srcIndex, ColumnType srcType, RowCursor cursor) { Contracts.AssertValue(host); @@ -1053,7 +1053,7 @@ public static IColumnFunctionBuilder CreateBuilder(NormalizingEstimator.BinningC internal static class SupervisedBinUtils { public static IColumnFunctionBuilder CreateBuilder(SupervisedBinArguments args, IHost host, - int icol, int srcIndex, ColumnType srcType, IRowCursor cursor) + int icol, int srcIndex, ColumnType srcType, RowCursor cursor) { Contracts.AssertValue(host); host.AssertValue(args); diff --git a/src/Microsoft.ML.Data/Transforms/NormalizeColumnDbl.cs b/src/Microsoft.ML.Data/Transforms/NormalizeColumnDbl.cs index 5792486012..42a03e3eb6 100644 --- a/src/Microsoft.ML.Data/Transforms/NormalizeColumnDbl.cs +++ b/src/Microsoft.ML.Data/Transforms/NormalizeColumnDbl.cs @@ -583,7 +583,7 @@ public override bool OnnxInfo(OnnxContext ctx, OnnxNode nodeProtoWrapper, int fe return true; } - public override Delegate GetGetter(IRow input, int icol) + public override Delegate GetGetter(Row input, int icol) { var getSrc = input.GetGetter(icol); ValueGetter del = @@ -659,7 +659,7 @@ public override bool OnnxInfo(OnnxContext ctx, OnnxNode node, int featureCount) return true; } - public override Delegate GetGetter(IRow input, int icol) + public override Delegate GetGetter(Row input, int icol) { var getSrc = input.GetGetter>(icol); var bldr = new BufferBuilder(R8Adder.Instance); @@ -901,7 +901,7 @@ public override void Save(ModelSaveContext ctx) CdfNormSerializationUtils.SaveModel(ctx, UseLog, new[] { Mean }, new[] { Stddev }); } - public override Delegate GetGetter(IRow input, int icol) + public override Delegate GetGetter(Row input, int icol) { if (Stddev <= TFloat.Epsilon) { @@ -956,7 +956,7 @@ public override void Save(ModelSaveContext ctx) CdfNormSerializationUtils.SaveModel(ctx, UseLog, Mean, Stddev); } - public override Delegate GetGetter(IRow input, int icol) + public override Delegate GetGetter(Row input, int icol) { var getSrc = input.GetGetter>(icol); var bldr = new BufferBuilder(R8Adder.Instance); @@ -1085,7 +1085,7 @@ public override void Save(ModelSaveContext ctx) c => BinNormSerializationUtils.SaveModel(c, new[] { _binUpperBounds }, saveText: true)); } - public override Delegate GetGetter(IRow input, int icol) + public override Delegate GetGetter(Row input, int icol) { var getSrc = input.GetGetter(icol); ValueGetter del = @@ -1170,7 +1170,7 @@ public override void Save(ModelSaveContext ctx) ctx.SaveSubModel("BinNormalizer", c => BinNormSerializationUtils.SaveModel(c, _binUpperBounds, saveText: true)); } - public override Delegate GetGetter(IRow input, int icol) + public override Delegate GetGetter(Row input, int icol) { var getSrc = input.GetGetter>(icol); var bldr = new BufferBuilder(R8Adder.Instance); @@ -1842,7 +1842,7 @@ public sealed class SupervisedBinOneColumnFunctionBuilder : OneColumnSupervisedB private readonly int _numBins; private readonly int _minBinSize; - private SupervisedBinOneColumnFunctionBuilder(IHost host, long lim, bool fix, int numBins, int minBinSize, int valueColumnId, int labelColumnId, IRow dataRow) + private SupervisedBinOneColumnFunctionBuilder(IHost host, long lim, bool fix, int numBins, int minBinSize, int valueColumnId, int labelColumnId, Row dataRow) : base(host, lim, valueColumnId, labelColumnId, dataRow) { _fix = fix; @@ -1862,7 +1862,7 @@ public override IColumnFunction CreateColumnFunction() return BinColumnFunction.Create(Host, binUpperBounds, _fix); } - public static IColumnFunctionBuilder Create(SupervisedBinArguments args, IHost host, int argsColumnIndex, int valueColumnId, int labelColumnId, IRow dataRow) + public static IColumnFunctionBuilder Create(SupervisedBinArguments args, IHost host, int argsColumnIndex, int valueColumnId, int labelColumnId, Row dataRow) { var lim = args.Column[argsColumnIndex].MaxTrainingExamples ?? args.MaxTrainingExamples; host.CheckUserArg(lim > 1, nameof(args.MaxTrainingExamples), "Must be greater than 1"); @@ -1880,7 +1880,7 @@ public sealed class SupervisedBinVecColumnFunctionBuilder : VecColumnSupervisedB private readonly int _numBins; private readonly int _minBinSize; - private SupervisedBinVecColumnFunctionBuilder(IHost host, long lim, bool fix, int numBins, int minBinSize, int valueColumnId, int labelColumnId, IRow dataRow) + private SupervisedBinVecColumnFunctionBuilder(IHost host, long lim, bool fix, int numBins, int minBinSize, int valueColumnId, int labelColumnId, Row dataRow) : base(host, lim, valueColumnId, labelColumnId, dataRow) { _fix = fix; @@ -1902,7 +1902,7 @@ public override IColumnFunction CreateColumnFunction() return BinColumnFunction.Create(Host, binUpperBounds, _fix); } - public static IColumnFunctionBuilder Create(SupervisedBinArguments args, IHost host, int argsColumnIndex, int valueColumnId, int labelColumnId, IRow dataRow) + public static IColumnFunctionBuilder Create(SupervisedBinArguments args, IHost host, int argsColumnIndex, int valueColumnId, int labelColumnId, Row dataRow) { var lim = args.Column[argsColumnIndex].MaxTrainingExamples ?? args.MaxTrainingExamples; host.CheckUserArg(lim > 1, nameof(args.MaxTrainingExamples), "Must be greater than 1"); diff --git a/src/Microsoft.ML.Data/Transforms/NormalizeColumnSng.cs b/src/Microsoft.ML.Data/Transforms/NormalizeColumnSng.cs index 54e6693f76..d310dbb2e4 100644 --- a/src/Microsoft.ML.Data/Transforms/NormalizeColumnSng.cs +++ b/src/Microsoft.ML.Data/Transforms/NormalizeColumnSng.cs @@ -585,7 +585,7 @@ public override bool OnnxInfo(OnnxContext ctx, OnnxNode node, int featureCount) return true; } - public override Delegate GetGetter(IRow input, int icol) + public override Delegate GetGetter(Row input, int icol) { var getSrc = input.GetGetter(icol); ValueGetter del = @@ -660,7 +660,7 @@ public override bool OnnxInfo(OnnxContext ctx, OnnxNode node, int featureCount) return true; } - public override Delegate GetGetter(IRow input, int icol) + public override Delegate GetGetter(Row input, int icol) { var getSrc = input.GetGetter>(icol); var bldr = new BufferBuilder(R4Adder.Instance); @@ -905,7 +905,7 @@ public override void Save(ModelSaveContext ctx) CdfNormSerializationUtils.SaveModel(ctx, UseLog, new[] { Mean }, new[] { Stddev }); } - public override Delegate GetGetter(IRow input, int icol) + public override Delegate GetGetter(Row input, int icol) { if (Stddev <= TFloat.Epsilon) { @@ -960,7 +960,7 @@ public override void Save(ModelSaveContext ctx) CdfNormSerializationUtils.SaveModel(ctx, UseLog, Mean, Stddev); } - public override Delegate GetGetter(IRow input, int icol) + public override Delegate GetGetter(Row input, int icol) { var getSrc = input.GetGetter>(icol); var bldr = new BufferBuilder(R4Adder.Instance); @@ -1090,7 +1090,7 @@ public override void Save(ModelSaveContext ctx) c => BinNormSerializationUtils.SaveModel(c, new[] { _binUpperBounds }, saveText: true)); } - public override Delegate GetGetter(IRow input, int icol) + public override Delegate GetGetter(Row input, int icol) { var getSrc = input.GetGetter(icol); ValueGetter del = @@ -1175,7 +1175,7 @@ public override void Save(ModelSaveContext ctx) ctx.SaveSubModel("BinNormalizer", c => BinNormSerializationUtils.SaveModel(c, _binUpperBounds, saveText: true)); } - public override Delegate GetGetter(IRow input, int icol) + public override Delegate GetGetter(Row input, int icol) { var getSrc = input.GetGetter>(icol); var bldr = new BufferBuilder(R4Adder.Instance); @@ -1850,7 +1850,7 @@ public sealed class SupervisedBinOneColumnFunctionBuilder : OneColumnSupervisedB private readonly int _numBins; private readonly int _minBinSize; - private SupervisedBinOneColumnFunctionBuilder(IHost host, long lim, bool fix, int numBins, int minBinSize, int valueColumnId, int labelColumnId, IRow dataRow) + private SupervisedBinOneColumnFunctionBuilder(IHost host, long lim, bool fix, int numBins, int minBinSize, int valueColumnId, int labelColumnId, Row dataRow) : base(host, lim, valueColumnId, labelColumnId, dataRow) { _fix = fix; @@ -1870,7 +1870,7 @@ public override IColumnFunction CreateColumnFunction() return BinColumnFunction.Create(Host, binUpperBounds, _fix); } - public static IColumnFunctionBuilder Create(SupervisedBinArguments args, IHost host, int argsColumnIndex, int valueColumnId, int labelColumnId, IRow dataRow) + public static IColumnFunctionBuilder Create(SupervisedBinArguments args, IHost host, int argsColumnIndex, int valueColumnId, int labelColumnId, Row dataRow) { var lim = args.Column[argsColumnIndex].MaxTrainingExamples ?? args.MaxTrainingExamples; host.CheckUserArg(lim > 1, nameof(args.MaxTrainingExamples), "Must be greater than 1"); @@ -1888,7 +1888,7 @@ public sealed class SupervisedBinVecColumnFunctionBuilder : VecColumnSupervisedB private readonly int _numBins; private readonly int _minBinSize; - private SupervisedBinVecColumnFunctionBuilder(IHost host, long lim, bool fix, int numBins, int minBinSize, int valueColumnId, int labelColumnId, IRow dataRow) + private SupervisedBinVecColumnFunctionBuilder(IHost host, long lim, bool fix, int numBins, int minBinSize, int valueColumnId, int labelColumnId, Row dataRow) : base(host, lim, valueColumnId, labelColumnId, dataRow) { _fix = fix; @@ -1910,7 +1910,7 @@ public override IColumnFunction CreateColumnFunction() return BinColumnFunction.Create(Host, binUpperBounds, _fix); } - public static IColumnFunctionBuilder Create(SupervisedBinArguments args, IHost host, int argsColumnIndex, int valueColumnId, int labelColumnId, IRow dataRow) + public static IColumnFunctionBuilder Create(SupervisedBinArguments args, IHost host, int argsColumnIndex, int valueColumnId, int labelColumnId, Row dataRow) { var lim = args.Column[argsColumnIndex].MaxTrainingExamples ?? args.MaxTrainingExamples; host.CheckUserArg(lim > 1, nameof(args.MaxTrainingExamples), "Must be greater than 1"); diff --git a/src/Microsoft.ML.Data/Transforms/NormalizeUtils.cs b/src/Microsoft.ML.Data/Transforms/NormalizeUtils.cs index cc3f4e352f..e84d4eb8a4 100644 --- a/src/Microsoft.ML.Data/Transforms/NormalizeUtils.cs +++ b/src/Microsoft.ML.Data/Transforms/NormalizeUtils.cs @@ -55,7 +55,7 @@ public interface IColumnAggregator internal interface IColumnFunction : ICanSaveModel { - Delegate GetGetter(IRow input, int icol); + Delegate GetGetter(Row input, int icol); void AttachMetadata(MetadataDispatcher.Builder bldr, ColumnType typeSrc); diff --git a/src/Microsoft.ML.Data/Transforms/Normalizer.cs b/src/Microsoft.ML.Data/Transforms/Normalizer.cs index 1434b680a7..222fea019c 100644 --- a/src/Microsoft.ML.Data/Transforms/Normalizer.cs +++ b/src/Microsoft.ML.Data/Transforms/Normalizer.cs @@ -74,7 +74,7 @@ private protected ColumnBase(string input, string output, long maxTrainingExampl MaxTrainingExamples = maxTrainingExamples; } - internal abstract IColumnFunctionBuilder MakeBuilder(IHost host, int srcIndex, ColumnType srcType, IRowCursor cursor); + internal abstract IColumnFunctionBuilder MakeBuilder(IHost host, int srcIndex, ColumnType srcType, RowCursor cursor); internal static ColumnBase Create(string input, string output, NormalizerMode mode) { @@ -112,7 +112,7 @@ public MinMaxColumn(string input, string output = null, long maxTrainingExamples { } - internal override IColumnFunctionBuilder MakeBuilder(IHost host, int srcIndex, ColumnType srcType, IRowCursor cursor) + internal override IColumnFunctionBuilder MakeBuilder(IHost host, int srcIndex, ColumnType srcType, RowCursor cursor) => NormalizeTransform.MinMaxUtils.CreateBuilder(this, host, srcIndex, srcType, cursor); } @@ -127,7 +127,7 @@ public MeanVarColumn(string input, string output = null, UseCdf = useCdf; } - internal override IColumnFunctionBuilder MakeBuilder(IHost host, int srcIndex, ColumnType srcType, IRowCursor cursor) + internal override IColumnFunctionBuilder MakeBuilder(IHost host, int srcIndex, ColumnType srcType, RowCursor cursor) => NormalizeTransform.MeanVarUtils.CreateBuilder(this, host, srcIndex, srcType, cursor); } @@ -142,7 +142,7 @@ public LogMeanVarColumn(string input, string output = null, UseCdf = useCdf; } - internal override IColumnFunctionBuilder MakeBuilder(IHost host, int srcIndex, ColumnType srcType, IRowCursor cursor) + internal override IColumnFunctionBuilder MakeBuilder(IHost host, int srcIndex, ColumnType srcType, RowCursor cursor) => NormalizeTransform.LogMeanVarUtils.CreateBuilder(this, host, srcIndex, srcType, cursor); } @@ -157,7 +157,7 @@ public BinningColumn(string input, string output = null, NumBins = numBins; } - internal override IColumnFunctionBuilder MakeBuilder(IHost host, int srcIndex, ColumnType srcType, IRowCursor cursor) + internal override IColumnFunctionBuilder MakeBuilder(IHost host, int srcIndex, ColumnType srcType, RowCursor cursor) => NormalizeTransform.BinUtils.CreateBuilder(this, host, srcIndex, srcType, cursor); } @@ -516,7 +516,7 @@ private void IsNormalizedGetter(ref bool dst) dst = true; } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { disposer = null; return _parent.Columns[iinfo].ColumnFunction.GetGetter(input, ColMapNewToOld[iinfo]); diff --git a/src/Microsoft.ML.Data/Transforms/PerGroupTransformBase.cs b/src/Microsoft.ML.Data/Transforms/PerGroupTransformBase.cs index 38eea76459..625bd22e9e 100644 --- a/src/Microsoft.ML.Data/Transforms/PerGroupTransformBase.cs +++ b/src/Microsoft.ML.Data/Transforms/PerGroupTransformBase.cs @@ -152,15 +152,15 @@ public virtual void Save(ModelSaveContext ctx) return Source.GetRowCount(); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); consolidator = null; - return new IRowCursor[] { GetRowCursor(predicate, rand) }; + return new RowCursor[] { GetRowCursor(predicate, rand) }; } - public IRowCursor GetRowCursor(Func predicate, Random rand = null) + public RowCursor GetRowCursor(Func predicate, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); @@ -178,14 +178,14 @@ public IRowCursor GetRowCursor(Func predicate, Random rand = null) return GetRowCursorCore(predicate); } - private IRowCursor GetRowCursorCore(Func predicate) + private RowCursor GetRowCursorCore(Func predicate) { var bindings = GetBindings(); var active = bindings.GetActive(predicate); Contracts.Assert(active.Length == bindings.ColumnCount); var predInput = bindings.GetDependencies(predicate); - return new RowCursor(this, Source.GetRowCursor(predInput, null), Source.GetRowCursor(predInput, null), active); + return new Cursor(this, Source.GetRowCursor(predInput, null), Source.GetRowCursor(predInput, null), active); } /// @@ -199,17 +199,17 @@ private IRowCursor GetRowCursorCore(Func predicate) /// /// Get the getter for the first input column. /// - protected abstract ValueGetter GetLabelGetter(IRow row); + protected abstract ValueGetter GetLabelGetter(Row row); /// /// Get the getter for the second input column. /// - protected abstract ValueGetter GetScoreGetter(IRow row); + protected abstract ValueGetter GetScoreGetter(Row row); /// /// Return a new state object. /// - protected abstract TState InitializeState(IRow input); + protected abstract TState InitializeState(Row input); /// /// Update the state object with one example. @@ -222,11 +222,11 @@ private IRowCursor GetRowCursorCore(Func predicate) /// protected abstract void UpdateState(TState state); - private sealed class RowCursor : RootCursorBase, IRowCursor + private sealed class Cursor : RootCursorBase { private readonly PerGroupTransformBase _parent; - private readonly IRowCursor _groupCursor; - private readonly IRowCursor _input; + private readonly RowCursor _groupCursor; + private readonly RowCursor _input; private readonly bool[] _active; private readonly Delegate[] _getters; @@ -237,11 +237,11 @@ private sealed class RowCursor : RootCursorBase, IRowCursor private readonly ValueGetter _labelGetter; private readonly ValueGetter _scoreGetter; - public Schema Schema => _parent.OutputSchema; + public override Schema Schema => _parent.OutputSchema; - public override long Batch { get { return 0; } } + public override long Batch => 0; - public RowCursor(PerGroupTransformBase parent, IRowCursor input, IRowCursor groupCursor, bool[] active) + public Cursor(PerGroupTransformBase parent, RowCursor input, RowCursor groupCursor, bool[] active) : base(parent.Host) { Ch.AssertValue(parent); @@ -267,13 +267,13 @@ public RowCursor(PerGroupTransformBase parent, IRowCurso _scoreGetter = _parent.GetScoreGetter(_groupCursor); } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.Check(0 <= col && col < _parent.GetBindings().ColumnCount); return _active[col]; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Contracts.CheckParam(IsColumnActive(col), nameof(col), "requested column is not active"); @@ -324,8 +324,8 @@ protected override bool MoveNextCore() // Read the whole group from the auxiliary cursor. while (_groupCursor.State != CursorState.Done && !_newGroupInGroupCursorDel()) { - TLabel label = default(TLabel); - TScore score = default(TScore); + TLabel label = default; + TScore score = default; _labelGetter(ref label); _scoreGetter(ref score); _parent.ProcessExample(_state, label, score); diff --git a/src/Microsoft.ML.Data/Transforms/RangeFilter.cs b/src/Microsoft.ML.Data/Transforms/RangeFilter.cs index f02a05dfd9..88413f9ef2 100644 --- a/src/Microsoft.ML.Data/Transforms/RangeFilter.cs +++ b/src/Microsoft.ML.Data/Transforms/RangeFilter.cs @@ -204,7 +204,7 @@ public override void Save(ModelSaveContext ctx) return null; } - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -215,7 +215,7 @@ protected override IRowCursor GetRowCursorCore(Func predicate, Random return CreateCursorCore(input, active); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, + public override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); @@ -227,13 +227,13 @@ public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolid Host.AssertNonEmpty(inputs); // No need to split if this is given 1 input cursor. - var cursors = new IRowCursor[inputs.Length]; + var cursors = new RowCursor[inputs.Length]; for (int i = 0; i < inputs.Length; i++) cursors[i] = CreateCursorCore(inputs[i], active); return cursors; } - private IRowCursor CreateCursorCore(IRowCursor input, bool[] active) + private RowCursor CreateCursorCore(RowCursor input, bool[] active) { if (_type == NumberType.R4) return new SingleRowCursor(this, input, active); @@ -268,7 +268,7 @@ private abstract class RowCursorBase : LinkedRowFilterCursorBase private readonly Double _min; private readonly Double _max; - protected RowCursorBase(RangeFilter parent, IRowCursor input, bool[] active) + protected RowCursorBase(RangeFilter parent, RowCursor input, bool[] active) : base(parent.Host, input, parent.OutputSchema, active) { Parent = parent; @@ -319,15 +319,15 @@ public override ValueGetter GetGetter(int col) return fn; } - public static IRowCursor CreateKeyRowCursor(RangeFilter filter, IRowCursor input, bool[] active) + public static RowCursor CreateKeyRowCursor(RangeFilter filter, RowCursor input, bool[] active) { Contracts.Assert(filter._type.IsKey); - Func del = CreateKeyRowCursor; + Func del = CreateKeyRowCursor; var methodInfo = del.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(filter._type.RawType); - return (IRowCursor)methodInfo.Invoke(null, new object[] { filter, input, active }); + return (RowCursor)methodInfo.Invoke(null, new object[] { filter, input, active }); } - private static IRowCursor CreateKeyRowCursor(RangeFilter filter, IRowCursor input, bool[] active) + private static RowCursor CreateKeyRowCursor(RangeFilter filter, RowCursor input, bool[] active) { Contracts.Assert(filter._type.IsKey); return new KeyRowCursor(filter, input, active); @@ -340,7 +340,7 @@ private sealed class SingleRowCursor : RowCursorBase private readonly ValueGetter _getter; private Single _value; - public SingleRowCursor(RangeFilter parent, IRowCursor input, bool[] active) + public SingleRowCursor(RangeFilter parent, RowCursor input, bool[] active) : base(parent, input, active) { Ch.Assert(Parent._type == NumberType.R4); @@ -373,7 +373,7 @@ private sealed class DoubleRowCursor : RowCursorBase private readonly ValueGetter _getter; private Double _value; - public DoubleRowCursor(RangeFilter parent, IRowCursor input, bool[] active) + public DoubleRowCursor(RangeFilter parent, RowCursor input, bool[] active) : base(parent, input, active) { Ch.Assert(Parent._type == NumberType.R8); @@ -408,7 +408,7 @@ private sealed class KeyRowCursor : RowCursorBase private readonly ValueMapper _conv; private readonly int _count; - public KeyRowCursor(RangeFilter parent, IRowCursor input, bool[] active) + public KeyRowCursor(RangeFilter parent, RowCursor input, bool[] active) : base(parent, input, active) { Ch.Assert(Parent._type.KeyCount > 0); diff --git a/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs b/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs index 121d9c43fb..76da06d846 100644 --- a/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs +++ b/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs @@ -223,7 +223,7 @@ internal static bool CanShuffleAll(ISchema schema) /// /// Utility to take a cursor, and get a shuffled version of this cursor. /// - public static IRowCursor GetShuffledCursor(IChannelProvider provider, int poolRows, IRowCursor cursor, Random rand) + public static RowCursor GetShuffledCursor(IChannelProvider provider, int poolRows, RowCursor cursor, Random rand) { Contracts.CheckValue(provider, nameof(provider)); @@ -236,7 +236,7 @@ public static IRowCursor GetShuffledCursor(IChannelProvider provider, int poolRo if (poolRows == 1) return cursor; - return new RowCursor(provider, poolRows, cursor, rand); + return new Cursor(provider, poolRows, cursor, rand); } public override bool CanShuffle { get { return true; } } @@ -249,7 +249,7 @@ public static IRowCursor GetShuffledCursor(IChannelProvider provider, int poolRo return false; } - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -286,16 +286,16 @@ protected override IRowCursor GetRowCursorCore(Func predicate, Random // source cursor. if (rand == null || _poolRows == 1) return input; - return new RowCursor(Host, _poolRows, input, rand); + return new Cursor(Host, _poolRows, input, rand); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, + public override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); consolidator = null; - return new IRowCursor[] { GetRowCursorCore(predicate, rand) }; + return new RowCursor[] { GetRowCursorCore(predicate, rand) }; } /// @@ -344,7 +344,7 @@ public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolid /// The result is something functionally equivalent to but but considerably faster than the /// simple implementation described in the first paragraph. /// - private sealed class RowCursor : RootCursorBase, IRowCursor + private sealed class Cursor : RootCursorBase { /// /// Pipes, in addition to column values, will also communicate extra information @@ -465,7 +465,7 @@ public void Fetch(int idx, ref T value) private const int _bufferDepth = 3; private readonly int _poolRows; - private readonly IRowCursor _input; + private readonly RowCursor _input; private readonly Random _rand; // This acts as mapping from the "circular" index to the actual index within the pipe. @@ -496,15 +496,12 @@ public void Fetch(int idx, ref T value) private readonly int[] _colToActivesIndex; - public Schema Schema { get { return _input.Schema; } } + public override Schema Schema => _input.Schema; - public override long Batch - { - // REVIEW: Implement cursor set support. - get { return 0; } - } + // REVIEW: Implement cursor set support. + public override long Batch => 0; - public RowCursor(IChannelProvider provider, int poolRows, IRowCursor input, Random rand) + public Cursor(IChannelProvider provider, int poolRows, RowCursor input, Random rand) : base(provider) { Ch.AssertValue(input); @@ -669,7 +666,7 @@ protected override bool MoveNextCore() return true; } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.CheckParam(0 <= col && col < _colToActivesIndex.Length, nameof(col)); Ch.Assert((_colToActivesIndex[col] >= 0) == _input.IsColumnActive(col)); @@ -706,7 +703,7 @@ private ValueGetter CreateGetterDelegate(ShufflePipe pipe) return getter; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.CheckParam(0 <= col && col < _colToActivesIndex.Length, nameof(col)); Ch.CheckParam(_colToActivesIndex[col] >= 0, nameof(col), "requested column not active"); diff --git a/src/Microsoft.ML.Data/Transforms/RowToRowTransformerBase.cs b/src/Microsoft.ML.Data/Transforms/RowToRowTransformerBase.cs index cce93e439b..356a0cc546 100644 --- a/src/Microsoft.ML.Data/Transforms/RowToRowTransformerBase.cs +++ b/src/Microsoft.ML.Data/Transforms/RowToRowTransformerBase.cs @@ -69,7 +69,7 @@ protected MapperBase(IHost host, Schema inputSchema) public Schema.DetachedColumn[] GetOutputColumns() => _outputColumns.Value; - public Delegate[] CreateGetters(IRow input, Func activeOutput, out Action disposer) + public Delegate[] CreateGetters(Row input, Func activeOutput, out Action disposer) { // REVIEW: it used to be that the mapper's input schema in the constructor was required to be reference-equal to the schema // of the input row. @@ -98,7 +98,7 @@ public Delegate[] CreateGetters(IRow input, Func activeOutput, out Ac return result; } - protected abstract Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer); + protected abstract Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer); public abstract Func GetDependencies(Func activeOutput); diff --git a/src/Microsoft.ML.Data/Transforms/SkipTakeFilter.cs b/src/Microsoft.ML.Data/Transforms/SkipTakeFilter.cs index 7f0399fc6d..12a8cf2138 100644 --- a/src/Microsoft.ML.Data/Transforms/SkipTakeFilter.cs +++ b/src/Microsoft.ML.Data/Transforms/SkipTakeFilter.cs @@ -188,26 +188,26 @@ public override void Save(ModelSaveContext ctx) return false; } - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate); Host.AssertValueOrNull(rand); var input = Source.GetRowCursor(predicate); var activeColumns = Utils.BuildArray(OutputSchema.ColumnCount, predicate); - return new RowCursor(Host, input, OutputSchema, activeColumns, _skip, _take); + return new Cursor(Host, input, OutputSchema, activeColumns, _skip, _take); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, + public override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); consolidator = null; - return new IRowCursor[] { GetRowCursorCore(predicate) }; + return new RowCursor[] { GetRowCursorCore(predicate) }; } - private sealed class RowCursor : LinkedRowRootCursorBase + private sealed class Cursor : LinkedRowRootCursorBase { private readonly long _skip; private readonly long _take; @@ -219,7 +219,7 @@ public override long Batch { get { return 0; } } - public RowCursor(IChannelProvider provider, IRowCursor input, Schema schema, bool[] active, long skip, long take) + public Cursor(IChannelProvider provider, RowCursor input, Schema schema, bool[] active, long skip, long take) : base(provider, input, schema, active) { Ch.Assert(skip >= 0); diff --git a/src/Microsoft.ML.Data/Transforms/TransformBase.cs b/src/Microsoft.ML.Data/Transforms/TransformBase.cs index cbf60b6967..4618b756f7 100644 --- a/src/Microsoft.ML.Data/Transforms/TransformBase.cs +++ b/src/Microsoft.ML.Data/Transforms/TransformBase.cs @@ -60,7 +60,7 @@ protected TransformBase(IHost host, IDataView input) public abstract Schema OutputSchema { get; } - public IRowCursor GetRowCursor(Func predicate, Random rand = null) + public RowCursor GetRowCursor(Func predicate, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); @@ -72,7 +72,7 @@ public IRowCursor GetRowCursor(Func predicate, Random rand = null) // When the input wants to be split, this puts the consolidation after this transform // instead of before. This is likely to produce better performance, for example, when // this is RangeFilter. - IRowCursor curs; + RowCursor curs; if (useParallel != false && DataViewUtils.TryCreateConsolidatingCursor(out curs, this, predicate, Host, rng)) { @@ -93,9 +93,9 @@ public IRowCursor GetRowCursor(Func predicate, Random rand = null) /// /// Create a single (non-parallel) row cursor. /// - protected abstract IRowCursor GetRowCursorCore(Func predicate, Random rand = null); + protected abstract RowCursor GetRowCursorCore(Func predicate, Random rand = null); - public abstract IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, + public abstract RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null); } @@ -168,7 +168,7 @@ public Func GetDependencies(Func predicate) public Schema InputSchema => Source.Schema; - public IRow GetRow(IRow input, Func active, out Action disposer) + public Row GetRow(Row input, Func active, out Action disposer) { Host.CheckValue(input, nameof(input)); Host.CheckValue(active, nameof(active)); @@ -180,29 +180,29 @@ public IRow GetRow(IRow input, Func active, out Action disposer) Action disp; var getters = CreateGetters(input, active, out disp); disposer += disp; - return new Row(input, this, OutputSchema, getters); + return new RowImpl(input, this, OutputSchema, getters); } } - protected abstract Delegate[] CreateGetters(IRow input, Func active, out Action disp); + protected abstract Delegate[] CreateGetters(Row input, Func active, out Action disp); protected abstract int MapColumnIndex(out bool isSrc, int col); - private sealed class Row : IRow + private sealed class RowImpl : Row { private readonly Schema _schema; - private readonly IRow _input; + private readonly Row _input; private readonly Delegate[] _getters; private readonly RowToRowMapperTransformBase _parent; - public long Batch { get { return _input.Batch; } } + public override long Batch => _input.Batch; - public long Position { get { return _input.Position; } } + public override long Position => _input.Position; - public Schema Schema { get { return _schema; } } + public override Schema Schema => _schema; - public Row(IRow input, RowToRowMapperTransformBase parent, Schema schema, Delegate[] getters) + public RowImpl(Row input, RowToRowMapperTransformBase parent, Schema schema, Delegate[] getters) { _input = input; _parent = parent; @@ -210,7 +210,7 @@ public Row(IRow input, RowToRowMapperTransformBase parent, Schema schema, Delega _getters = getters; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { bool isSrc; int index = _parent.MapColumnIndex(out isSrc, col); @@ -224,12 +224,12 @@ public ValueGetter GetGetter(int col) return fn; } - public ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return _input.GetIdGetter(); } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { bool isSrc; int index = _parent.MapColumnIndex(out isSrc, col); @@ -683,9 +683,9 @@ protected virtual void ActivateSourceColumns(int iinfo, bool[] active) /// otherwise it should be set to a delegate to be invoked by the cursor's Dispose method. It's best /// for this action to be idempotent - calling it multiple times should be equivalent to calling it once. /// - protected abstract Delegate GetGetterCore(IChannel ch, IRow input, int iinfo, out Action disposer); + protected abstract Delegate GetGetterCore(IChannel ch, Row input, int iinfo, out Action disposer); - protected ValueGetter GetSrcGetter(IRow input, int iinfo) + protected ValueGetter GetSrcGetter(Row input, int iinfo) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < Infos.Length); @@ -694,12 +694,12 @@ protected ValueGetter GetSrcGetter(IRow input, int iinfo) return input.GetGetter(src); } - protected Delegate GetSrcGetter(ColumnType typeDst, IRow row, int iinfo) + protected Delegate GetSrcGetter(ColumnType typeDst, Row row, int iinfo) { Host.CheckValue(typeDst, nameof(typeDst)); Host.CheckValue(row, nameof(row)); - Func> del = GetSrcGetter; + Func> del = GetSrcGetter; var methodInfo = del.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(typeDst.RawType); return (Delegate)methodInfo.Invoke(this, new object[] { row, iinfo }); } @@ -727,7 +727,7 @@ protected virtual bool WantParallelCursors(Func predicate) return _bindings.AnyNewColumnsActive(predicate); } - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -735,10 +735,10 @@ protected override IRowCursor GetRowCursorCore(Func predicate, Random var inputPred = _bindings.GetDependencies(predicate); var active = _bindings.GetActive(predicate); var input = Source.GetRowCursor(inputPred, rand); - return new RowCursor(Host, this, input, active); + return new Cursor(Host, this, input, active); } - public sealed override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, + public sealed override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); @@ -753,9 +753,9 @@ public sealed override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator c inputs = DataViewUtils.CreateSplitCursors(out consolidator, Host, inputs[0], n); Host.AssertNonEmpty(inputs); - var cursors = new IRowCursor[inputs.Length]; + var cursors = new RowCursor[inputs.Length]; for (int i = 0; i < inputs.Length; i++) - cursors[i] = new RowCursor(Host, this, inputs[i], active); + cursors[i] = new Cursor(Host, this, inputs[i], active); return cursors; } @@ -770,7 +770,7 @@ protected Exception ExceptGetSlotCursor(int col) OutputSchema[col].Name); } - public ISlotCursor GetSlotCursor(int col) + public SlotCursor GetSlotCursor(int col) { Host.CheckParam(0 <= col && col < _bindings.ColumnCount, nameof(col)); @@ -795,7 +795,7 @@ public ISlotCursor GetSlotCursor(int col) /// null for all new columns, and so reaching this is only possible if there is a /// bug. /// - protected virtual ISlotCursor GetSlotCursorCore(int iinfo) + protected virtual SlotCursor GetSlotCursorCore(int iinfo) { Host.Assert(false); throw Host.ExceptNotImpl("Data view indicated it could transpose a column, but apparently it could not"); @@ -811,7 +811,7 @@ protected override Func GetDependenciesCore(Func predicate return _bindings.GetDependencies(predicate); } - protected override Delegate[] CreateGetters(IRow input, Func active, out Action disposer) + protected override Delegate[] CreateGetters(Row input, Func active, out Action disposer) { Func activeInfos = iinfo => @@ -836,7 +836,7 @@ protected override Delegate[] CreateGetters(IRow input, Func active, } } - private sealed class RowCursor : SynchronizedCursorBase, IRowCursor + private sealed class Cursor : SynchronizedCursorBase { private readonly Bindings _bindings; private readonly bool[] _active; @@ -844,7 +844,7 @@ private sealed class RowCursor : SynchronizedCursorBase, IRowCursor private readonly Delegate[] _getters; private readonly Action[] _disposers; - public RowCursor(IChannelProvider provider, OneToOneTransformBase parent, IRowCursor input, bool[] active) + public Cursor(IChannelProvider provider, OneToOneTransformBase parent, RowCursor input, bool[] active) : base(provider, input) { Ch.AssertValue(parent); @@ -880,9 +880,9 @@ public override void Dispose() base.Dispose(); } - public Schema Schema => _bindings.AsSchema; + public override Schema Schema => _bindings.AsSchema; - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.Check(IsColumnActive(col)); @@ -898,7 +898,7 @@ public ValueGetter GetGetter(int col) return fn; } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.Check(0 <= col && col < _bindings.ColumnCount); return _active == null || _active[col]; diff --git a/src/Microsoft.ML.Data/Transforms/TypeConverting.cs b/src/Microsoft.ML.Data/Transforms/TypeConverting.cs index 331e5fff71..7c2d42168b 100644 --- a/src/Microsoft.ML.Data/Transforms/TypeConverting.cs +++ b/src/Microsoft.ML.Data/Transforms/TypeConverting.cs @@ -468,7 +468,7 @@ protected override Schema.DetachedColumn[] GetOutputColumnsCore() return result; } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Contracts.AssertValue(input); Contracts.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); diff --git a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs index 15b10c3e74..81d8e927b4 100644 --- a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs +++ b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs @@ -768,7 +768,7 @@ protected override Schema.DetachedColumn[] GetOutputColumnsCore() return result; } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Contracts.AssertValue(input); Contracts.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); @@ -777,7 +777,7 @@ protected override Delegate MakeGetter(IRow input, int iinfo, Func ac return Utils.MarshalInvoke(MakeGetter, type.RawType, input, iinfo); } - private Delegate MakeGetter(IRow row, int src) => _termMap[src].GetMappingGetter(row); + private Delegate MakeGetter(Row row, int src) => _termMap[src].GetMappingGetter(row); private bool SaveAsOnnxCore(OnnxContext ctx, int iinfo, ColInfo info, string srcVariableName, string dstVariableName) { diff --git a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformerImpl.cs b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformerImpl.cs index c136bda9c6..87c112b87d 100644 --- a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformerImpl.cs +++ b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformerImpl.cs @@ -280,7 +280,7 @@ private Trainer(Builder bldr, int max) /// the input type to the desired type /// The builder we add items to /// An associated training pipe - public static Trainer Create(IRow row, int col, bool autoConvert, int count, Builder bldr) + public static Trainer Create(Row row, int col, bool autoConvert, int count, Builder bldr) { Contracts.AssertValue(row); var schema = row.Schema; @@ -297,7 +297,7 @@ public static Trainer Create(IRow row, int col, bool autoConvert, int count, Bui return Utils.MarshalInvoke(CreateOne, bldr.ItemType.RawType, row, col, autoConvert, count, bldr); } - private static Trainer CreateOne(IRow row, int col, bool autoConvert, int count, Builder bldr) + private static Trainer CreateOne(Row row, int col, bool autoConvert, int count, Builder bldr) { Contracts.AssertValue(row); Contracts.AssertValue(bldr); @@ -313,7 +313,7 @@ private static Trainer CreateOne(IRow row, int col, bool autoConvert, int cou return new ImplOne(inputGetter, count, bldrT); } - private static Trainer CreateVec(IRow row, int col, int count, Builder bldr) + private static Trainer CreateVec(Row row, int col, int count, Builder bldr) { Contracts.AssertValue(row); Contracts.AssertValue(bldr); @@ -849,7 +849,7 @@ public static BoundTermMap CreateCore(IHostEnvironment env, Schema schema, Te return new Impl(env, schema, mapT, infos, textMetadata, iinfo); } - public abstract Delegate GetMappingGetter(IRow row); + public abstract Delegate GetMappingGetter(Row row); /// /// Allows us to optionally register metadata. It is also perfectly legal for @@ -890,7 +890,7 @@ private static uint MapDefault(ValueMapper map) return dst; } - public override Delegate GetMappingGetter(IRow input) + public override Delegate GetMappingGetter(Row input) { // When constructing the getter, there are a few cases we have to consider: // If scalar then it's just a straightforward mapping. diff --git a/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs b/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs index 067a4c37bb..dcce4f0018 100644 --- a/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs +++ b/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs @@ -103,12 +103,12 @@ public Func GetDependencies(Func predicate) yield break; } - public IRow GetRow(IRow input, Func predicate, out Action disposer) + public Row GetRow(Row input, Func predicate, out Action disposer) { return new SimpleRow(OutputSchema, input, new[] { CreateScoreGetter(input, predicate, out disposer) }); } - public abstract Delegate CreateScoreGetter(IRow input, Func mapperPredicate, out Action disposer); + public abstract Delegate CreateScoreGetter(Row input, Func mapperPredicate, out Action disposer); } // A generic base class for pipeline ensembles. This class contains the combiner. @@ -124,7 +124,7 @@ public Bound(SchemaBindablePipelineEnsemble parent, RoleMappedSchema schema) _combiner = parent.Combiner; } - public override Delegate CreateScoreGetter(IRow input, Func mapperPredicate, out Action disposer) + public override Delegate CreateScoreGetter(Row input, Func mapperPredicate, out Action disposer) { disposer = null; @@ -158,7 +158,7 @@ public override Delegate CreateScoreGetter(IRow input, Func mapperPre return scoreGetter; } - public ValueGetter GetLabelGetter(IRow input, int i, out Action disposer) + public ValueGetter GetLabelGetter(Row input, int i, out Action disposer) { Parent.Host.Assert(0 <= i && i < Mappers.Length); Parent.Host.Check(Mappers[i].InputRoleMappedSchema.Label != null, "Mapper was not trained using a label column"); @@ -168,7 +168,7 @@ public ValueGetter GetLabelGetter(IRow input, int i, out Action disposer return RowCursorUtils.GetLabelGetter(pipelineRow, Mappers[i].InputRoleMappedSchema.Label.Index); } - public ValueGetter GetWeightGetter(IRow input, int i, out Action disposer) + public ValueGetter GetWeightGetter(Row input, int i, out Action disposer) { Parent.Host.Assert(0 <= i && i < Mappers.Length); diff --git a/src/Microsoft.ML.FastTree/FastTree.cs b/src/Microsoft.ML.FastTree/FastTree.cs index 73d032dce5..997d14b281 100644 --- a/src/Microsoft.ML.FastTree/FastTree.cs +++ b/src/Microsoft.ML.FastTree/FastTree.cs @@ -1441,7 +1441,7 @@ private Dataset Construct(RoleMappedData examples, ref int numExamples, int maxB pch.SetHeader(new ProgressHeader("features"), e => e.SetProgress(0, iFeature, features.Length)); while (cursor.MoveNext()) { - iFeature = checked((int)cursor.Position); + iFeature = cursor.SlotIndex; if (!localConstructBinFeatures[iFeature]) continue; @@ -1670,19 +1670,19 @@ private Dataset Construct(RoleMappedData examples, ref int numExamples, int maxB return result; } - private void GetFeatureValues(ISlotCursor cursor, int iFeature, ValueGetter> getter, + private void GetFeatureValues(SlotCursor cursor, int iFeature, ValueGetter> getter, ref VBuffer temp, ref VBuffer doubleTemp, ValueMapper, VBuffer> copier) { while (cursor.MoveNext()) { - Contracts.Assert(iFeature >= checked((int)cursor.Position)); + Contracts.Assert(iFeature >= cursor.SlotIndex); - if (iFeature == checked((int)cursor.Position)) + if (iFeature == cursor.SlotIndex) break; } - Contracts.Assert(cursor.Position == iFeature); + Contracts.Assert(cursor.SlotIndex == iFeature); getter(ref temp); copier(in temp, ref doubleTemp); @@ -1700,13 +1700,13 @@ private static ValueGetter> SubsetGetter(ValueGetter> g /// Returns a slot dropper object that has ranges of slots to be dropped, /// based on an examination of the feature values. /// - private static SlotDropper ConstructDropSlotRanges(ISlotCursor cursor, + private static SlotDropper ConstructDropSlotRanges(SlotCursor cursor, ValueGetter> getter, ref VBuffer temp) { // The iteration here is slightly differently from a usual cursor iteration. Here, temp // already holds the value of the cursor's current position, and we don't really want // to re-fetch it, and the cursor is necessarily advanced. - Contracts.Assert(cursor.State == CursorState.Good); + Contracts.Assert(cursor.SlotIndex >= 0); BitArray rowHasMissing = new BitArray(temp.Length); for (; ; ) { @@ -3302,7 +3302,7 @@ public int GetLeaf(int treeId, in VBuffer features, ref List path) return TrainedEnsemble.GetTreeAt(treeId).GetLeaf(in features, ref path); } - public IRow GetSummaryIRowOrNull(RoleMappedSchema schema) + public Row GetSummaryIRowOrNull(RoleMappedSchema schema) { var names = default(VBuffer>); MetadataUtils.GetSlotNames(schema, RoleMappedSchema.ColumnRole.Feature, NumFeatures, ref names); @@ -3317,7 +3317,7 @@ public IRow GetSummaryIRowOrNull(RoleMappedSchema schema) return MetadataUtils.MetadataAsRow(builder.GetMetadata()); } - public IRow GetStatsIRowOrNull(RoleMappedSchema schema) + public Row GetStatsIRowOrNull(RoleMappedSchema schema) { return null; } diff --git a/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs b/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs index 5f1e46b6dc..1e9cda953f 100644 --- a/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs +++ b/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs @@ -208,7 +208,7 @@ public BoundMapper(IExceptionContext ectx, TreeEnsembleFeaturizerBindableMapper OutputSchema = Schema.Create(new SchemaImpl(ectx, owner, treeValueType, leafIdType, pathIdType)); } - public IRow GetRow(IRow input, Func predicate, out Action disposer) + public Row GetRow(Row input, Func predicate, out Action disposer) { _ectx.CheckValue(input, nameof(input)); _ectx.CheckValue(predicate, nameof(predicate)); @@ -216,7 +216,7 @@ public IRow GetRow(IRow input, Func predicate, out Action disposer) return new SimpleRow(OutputSchema, input, CreateGetters(input, predicate)); } - private Delegate[] CreateGetters(IRow input, Func predicate) + private Delegate[] CreateGetters(Row input, Func predicate) { _ectx.AssertValue(input); _ectx.AssertValue(predicate); @@ -259,7 +259,7 @@ private Delegate[] CreateGetters(IRow input, Func predicate) private sealed class State { private readonly IExceptionContext _ectx; - private readonly IRow _input; + private readonly Row _input; private readonly FastTreePredictionWrapper _ensemble; private readonly int _numTrees; private readonly int _numLeaves; @@ -276,7 +276,7 @@ private sealed class State private long _cachedLeafBuilderPosition; private long _cachedPathBuilderPosition; - public State(IExceptionContext ectx, IRow input, FastTreePredictionWrapper ensemble, int numLeaves, int featureIndex) + public State(IExceptionContext ectx, Row input, FastTreePredictionWrapper ensemble, int numLeaves, int featureIndex) { Contracts.AssertValue(ectx); _ectx = ectx; diff --git a/src/Microsoft.ML.HalLearners/VectorWhitening.cs b/src/Microsoft.ML.HalLearners/VectorWhitening.cs index 9c9cd820b4..0964154eb6 100644 --- a/src/Microsoft.ML.HalLearners/VectorWhitening.cs +++ b/src/Microsoft.ML.HalLearners/VectorWhitening.cs @@ -689,7 +689,7 @@ protected override Schema.DetachedColumn[] GetOutputColumnsCore() return result; } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); @@ -714,7 +714,7 @@ protected override Delegate MakeGetter(IRow input, int iinfo, Func ac return del; } - private ValueGetter GetSrcGetter(IRow input, int iinfo) + private ValueGetter GetSrcGetter(Row input, int iinfo) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); diff --git a/src/Microsoft.ML.ImageAnalytics/ImageGrayscaleTransform.cs b/src/Microsoft.ML.ImageAnalytics/ImageGrayscaleTransform.cs index 7b89bdb173..eb2f444a92 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImageGrayscaleTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImageGrayscaleTransform.cs @@ -173,7 +173,7 @@ public Mapper(ImageGrayscaleTransform parent, Schema inputSchema) protected override Schema.DetachedColumn[] GetOutputColumnsCore() => _parent.ColumnPairs.Select((x, idx) => new Schema.DetachedColumn(x.output, InputSchema[ColMapNewToOld[idx]].Type, null)).ToArray(); - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Contracts.AssertValue(input); Contracts.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); diff --git a/src/Microsoft.ML.ImageAnalytics/ImageLoaderTransform.cs b/src/Microsoft.ML.ImageAnalytics/ImageLoaderTransform.cs index 81b3d339e5..f996560011 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImageLoaderTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImageLoaderTransform.cs @@ -161,7 +161,7 @@ public Mapper(ImageLoaderTransform parent, Schema inputSchema) _parent = parent; } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Contracts.AssertValue(input); Contracts.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); diff --git a/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs b/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs index ceecacb0f9..8b14382bcb 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs @@ -429,7 +429,7 @@ public Mapper(ImagePixelExtractorTransform parent, Schema inputSchema) protected override Schema.DetachedColumn[] GetOutputColumnsCore() => _parent._columns.Select((x, idx) => new Schema.DetachedColumn(x.Output, _types[idx], null)).ToArray(); - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Contracts.AssertValue(input); Contracts.Assert(0 <= iinfo && iinfo < _parent._columns.Length); @@ -440,7 +440,7 @@ protected override Delegate MakeGetter(IRow input, int iinfo, Func ac } //REVIEW Rewrite it to where TValue : IConvertible - private ValueGetter> GetGetterCore(IRow input, int iinfo, out Action disposer) + private ValueGetter> GetGetterCore(Row input, int iinfo, out Action disposer) where TValue : struct { var type = _types[iinfo]; diff --git a/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs b/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs index e5054b515f..8241b2ff12 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs @@ -306,7 +306,7 @@ public Mapper(ImageResizerTransform parent, Schema inputSchema) protected override Schema.DetachedColumn[] GetOutputColumnsCore() => _parent._columns.Select(x => new Schema.DetachedColumn(x.Output, x.Type, null)).ToArray(); - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Contracts.AssertValue(input); Contracts.Assert(0 <= iinfo && iinfo < _parent._columns.Length); diff --git a/src/Microsoft.ML.ImageAnalytics/VectorToImageTransform.cs b/src/Microsoft.ML.ImageAnalytics/VectorToImageTransform.cs index 54b9148e86..ffaa91aa51 100644 --- a/src/Microsoft.ML.ImageAnalytics/VectorToImageTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/VectorToImageTransform.cs @@ -330,7 +330,7 @@ protected override ColumnType GetColumnTypeCore(int iinfo) return _types[iinfo]; } - protected override Delegate GetGetterCore(IChannel ch, IRow input, int iinfo, out Action disposer) + protected override Delegate GetGetterCore(IChannel ch, Row input, int iinfo, out Action disposer) { Host.AssertValueOrNull(ch); Host.AssertValue(input); @@ -351,7 +351,7 @@ protected override Delegate GetGetterCore(IChannel ch, IRow input, int iinfo, ou } - private ValueGetter GetterFromType(IRow input, int iinfo, ColInfoEx ex, bool needScale) where TValue : IConvertible + private ValueGetter GetterFromType(Row input, int iinfo, ColInfoEx ex, bool needScale) where TValue : IConvertible { var getSrc = GetSrcGetter>(input, iinfo); var src = default(VBuffer); diff --git a/src/Microsoft.ML.Legacy/Models/ConfusionMatrix.cs b/src/Microsoft.ML.Legacy/Models/ConfusionMatrix.cs index 2fee41ee3a..99df7bd672 100644 --- a/src/Microsoft.ML.Legacy/Models/ConfusionMatrix.cs +++ b/src/Microsoft.ML.Legacy/Models/ConfusionMatrix.cs @@ -52,7 +52,7 @@ internal static List Create(IHostEnvironment env, IDataView con throw env.Except($"ConfusionMatrix data view did not contain a {nameof(MetricKinds.ColumnNames.Count)} column."); } - IRowCursor cursor = confusionMatrix.GetRowCursor(col => col == countColumn); + RowCursor cursor = confusionMatrix.GetRowCursor(col => col == countColumn); var slots = default(VBuffer>); confusionMatrix.Schema.GetMetadata(MetadataUtils.Kinds.SlotNames, countColumn, ref slots); var slotsValues = slots.GetValues(); diff --git a/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs b/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs index e414261336..4390ad31ab 100644 --- a/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs +++ b/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs @@ -353,7 +353,7 @@ private void UpdateCacheIfNeeded(long position, INamedOnnxValueGetter[] srcNamed } } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { disposer = null; Host.AssertValue(input); @@ -367,7 +367,7 @@ protected override Delegate MakeGetter(IRow input, int iinfo, Func ac return Utils.MarshalInvoke(MakeGetter, type, input, iinfo, srcNamedValueGetters, activeOutputColNames, outputCache); } - private Delegate MakeGetter(IRow input, int iinfo, INamedOnnxValueGetter[] srcNamedValueGetters, string[] activeOutputColNames, OutputCache outputCache) + private Delegate MakeGetter(Row input, int iinfo, INamedOnnxValueGetter[] srcNamedValueGetters, string[] activeOutputColNames, OutputCache outputCache) { Host.AssertValue(input); ValueGetter> valuegetter = (ref VBuffer dst) => @@ -384,7 +384,7 @@ private Delegate MakeGetter(IRow input, int iinfo, INamedOnnxValueGetter[] sr return valuegetter; } - private static INamedOnnxValueGetter[] GetNamedOnnxValueGetters(IRow input, + private static INamedOnnxValueGetter[] GetNamedOnnxValueGetters(Row input, string[] inputColNames, int[] inputColIndices, bool[] isInputVector, @@ -400,14 +400,14 @@ private static INamedOnnxValueGetter[] GetNamedOnnxValueGetters(IRow input, return srcNamedOnnxValueGetters; } - private static INamedOnnxValueGetter CreateNamedOnnxValueGetter(IRow input, System.Type onnxType, bool isVector, string colName, int colIndex, OnnxShape onnxShape) + private static INamedOnnxValueGetter CreateNamedOnnxValueGetter(Row input, System.Type onnxType, bool isVector, string colName, int colIndex, OnnxShape onnxShape) { var type = OnnxUtils.OnnxToMlNetType(onnxType).RawType; Contracts.AssertValue(type); return Utils.MarshalInvoke(CreateNameOnnxValueGetter, type, input, isVector, colName, colIndex, onnxShape); } - private static INamedOnnxValueGetter CreateNameOnnxValueGetter(IRow input, bool isVector, string colName, int colIndex, OnnxShape onnxShape) + private static INamedOnnxValueGetter CreateNameOnnxValueGetter(Row input, bool isVector, string colName, int colIndex, OnnxShape onnxShape) { if (isVector) return new NamedOnnxValueGetterVec(input, colName, colIndex, onnxShape); @@ -419,7 +419,7 @@ private class NameOnnxValueGetter : INamedOnnxValueGetter private readonly ValueGetter _srcgetter; private readonly string _colName; - public NameOnnxValueGetter(IRow input, string colName, int colIndex) + public NameOnnxValueGetter(Row input, string colName, int colIndex) { _colName = colName; _srcgetter = input.GetGetter(colIndex); @@ -439,7 +439,7 @@ private class NamedOnnxValueGetterVec : INamedOnnxValueGetter private readonly string _colName; private VBuffer _vBuffer; private VBuffer _vBufferDense; - public NamedOnnxValueGetterVec(IRow input, string colName, int colIndex, OnnxShape tensorShape) + public NamedOnnxValueGetterVec(Row input, string colName, int colIndex, OnnxShape tensorShape) { _srcgetter = input.GetGetter>(colIndex); _tensorShape = tensorShape; diff --git a/src/Microsoft.ML.PCA/PcaTransform.cs b/src/Microsoft.ML.PCA/PcaTransform.cs index 1a6a5c85f2..385b5d57a8 100644 --- a/src/Microsoft.ML.PCA/PcaTransform.cs +++ b/src/Microsoft.ML.PCA/PcaTransform.cs @@ -609,7 +609,7 @@ protected override Schema.DetachedColumn[] GetOutputColumnsCore() return result; } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Contracts.AssertValue(input); Contracts.Assert(0 <= iinfo && iinfo < _numColumns); diff --git a/src/Microsoft.ML.Parquet/ParquetLoader.cs b/src/Microsoft.ML.Parquet/ParquetLoader.cs index 5627f9c0a1..e5fb2609d5 100644 --- a/src/Microsoft.ML.Parquet/ParquetLoader.cs +++ b/src/Microsoft.ML.Parquet/ParquetLoader.cs @@ -391,19 +391,19 @@ private static Stream OpenStream(string filename) return _rowCount; } - public IRowCursor GetRowCursor(Func predicate, Random rand = null) + public RowCursor GetRowCursor(Func predicate, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); return new Cursor(this, predicate, rand); } - public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) + public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { _host.CheckValue(predicate, nameof(predicate)); _host.CheckValueOrNull(rand); consolidator = null; - return new IRowCursor[] { GetRowCursor(predicate, rand) }; + return new RowCursor[] { GetRowCursor(predicate, rand) }; } public void Save(ModelSaveContext ctx) @@ -433,7 +433,7 @@ public void Save(ModelSaveContext ctx) } } - private sealed class Cursor : RootCursorBase, IRowCursor + private sealed class Cursor : RootCursorBase { private readonly ParquetLoader _loader; private readonly Stream _fileStream; @@ -586,11 +586,11 @@ protected override bool MoveNextCore() return false; } - public ML.Data.Schema Schema => _loader.Schema; + public override ML.Data.Schema Schema => _loader.Schema; public override long Batch => 0; - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.CheckParam(IsColumnActive(col), nameof(col), "requested column not active"); @@ -612,7 +612,7 @@ public override ValueGetter GetIdGetter() }; } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.CheckParam(0 <= col && col < _colToActivesIndex.Length, nameof(col)); return _colToActivesIndex[col] >= 0; diff --git a/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs b/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs index 465989a265..1993752f02 100644 --- a/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs +++ b/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs @@ -339,7 +339,7 @@ private void CheckInputSchema(ISchema schema, int matrixColumnIndexCol, int matr _env.CheckParam(type.Equals(_parent.MatrixRowIndexType), nameof(schema), msg); } - private Delegate[] CreateGetter(IRow input, bool[] active) + private Delegate[] CreateGetter(Row input, bool[] active) { _env.CheckValue(input, nameof(input)); _env.Assert(Utils.Size(active) == OutputSchema.ColumnCount); @@ -358,7 +358,7 @@ private Delegate[] CreateGetter(IRow input, bool[] active) return getters; } - public IRow GetRow(IRow input, Func predicate, out Action disposer) + public Row GetRow(Row input, Func predicate, out Action disposer) { var active = Utils.BuildArray(OutputSchema.ColumnCount, predicate); var getters = CreateGetter(input, active); diff --git a/src/Microsoft.ML.Recommender/SafeTrainingAndModelBuffer.cs b/src/Microsoft.ML.Recommender/SafeTrainingAndModelBuffer.cs index 33bb90ae0b..b1305a5a90 100644 --- a/src/Microsoft.ML.Recommender/SafeTrainingAndModelBuffer.cs +++ b/src/Microsoft.ML.Recommender/SafeTrainingAndModelBuffer.cs @@ -249,7 +249,7 @@ private unsafe void Dispose(bool disposing) } } - private MFNode[] ConstructLabeledNodesFrom(IChannel ch, ICursor cursor, ValueGetter labGetter, + private MFNode[] ConstructLabeledNodesFrom(IChannel ch, RowCursor cursor, ValueGetter labGetter, ValueGetter rowGetter, ValueGetter colGetter, int rowCount, int colCount) { @@ -303,7 +303,7 @@ private MFNode[] ConstructLabeledNodesFrom(IChannel ch, ICursor cursor, ValueGet } public unsafe void Train(IChannel ch, int rowCount, int colCount, - ICursor cursor, ValueGetter labGetter, + RowCursor cursor, ValueGetter labGetter, ValueGetter rowGetter, ValueGetter colGetter) { if (_pMFModel != null) @@ -333,9 +333,9 @@ public unsafe void Train(IChannel ch, int rowCount, int colCount, } public unsafe void TrainWithValidation(IChannel ch, int rowCount, int colCount, - ICursor cursor, ValueGetter labGetter, + RowCursor cursor, ValueGetter labGetter, ValueGetter rowGetter, ValueGetter colGetter, - ICursor validCursor, ValueGetter validLabGetter, + RowCursor validCursor, ValueGetter validLabGetter, ValueGetter validRowGetter, ValueGetter validColGetter) { if (_pMFModel != null) diff --git a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachineUtils.cs b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachineUtils.cs index 11ce748cb2..df486453b8 100644 --- a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachineUtils.cs +++ b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachineUtils.cs @@ -96,7 +96,7 @@ public FieldAwareFactorizationMachineScalarRowMapper(IHostEnvironment env, RoleM } } - public IRow GetRow(IRow input, Func predicate, out Action action) + public Row GetRow(Row input, Func predicate, out Action action) { var latentSum = new AlignedArray(_pred.FieldCount * _pred.FieldCount * _pred.LatentDimAligned, 16); var featureBuffer = new VBuffer(); diff --git a/src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs b/src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs index 72692c4016..ad4fca42df 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs @@ -346,7 +346,7 @@ public void SaveAsCode(TextWriter writer, RoleMappedSchema schema) public abstract void SaveSummary(TextWriter writer, RoleMappedSchema schema); - public virtual IRow GetSummaryIRowOrNull(RoleMappedSchema schema) + public virtual Row GetSummaryIRowOrNull(RoleMappedSchema schema) { var names = default(VBuffer>); MetadataUtils.GetSlotNames(schema, RoleMappedSchema.ColumnRole.Feature, Weight.Length, ref names); @@ -359,7 +359,7 @@ public virtual IRow GetSummaryIRowOrNull(RoleMappedSchema schema) return MetadataUtils.MetadataAsRow(builder.GetMetadata()); } - public virtual IRow GetStatsIRowOrNull(RoleMappedSchema schema) => null; + public virtual Row GetStatsIRowOrNull(RoleMappedSchema schema) => null; public abstract void SaveAsIni(TextWriter writer, RoleMappedSchema schema, ICalibrator calibrator = null); @@ -502,7 +502,7 @@ public IList> GetSummaryInKeyValuePairs(RoleMappedS return results; } - public override IRow GetStatsIRowOrNull(RoleMappedSchema schema) + public override Row GetStatsIRowOrNull(RoleMappedSchema schema) { if (_stats == null) return null; diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs index 6d8a411e7c..e2b280ca1e 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs @@ -982,12 +982,12 @@ public IDataView GetSummaryDataView(RoleMappedSchema schema) return bldr.GetDataView(); } - public IRow GetSummaryIRowOrNull(RoleMappedSchema schema) + public Row GetSummaryIRowOrNull(RoleMappedSchema schema) { return null; } - public IRow GetStatsIRowOrNull(RoleMappedSchema schema) + public Row GetStatsIRowOrNull(RoleMappedSchema schema) { if (_stats == null) return null; diff --git a/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs b/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs index b4ced664c1..e29435cc63 100644 --- a/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs +++ b/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs @@ -540,21 +540,21 @@ private void UpdateModelOnDisk(string modelDir, Arguments args) } } - private static ITensorValueGetter CreateTensorValueGetter(IRow input, bool isVector, int colIndex, TFShape tfShape) + private static ITensorValueGetter CreateTensorValueGetter(Row input, bool isVector, int colIndex, TFShape tfShape) { if (isVector) return new TensorValueGetterVec(input, colIndex, tfShape); return new TensorValueGetter(input, colIndex, tfShape); } - private static ITensorValueGetter CreateTensorValueGetter(IRow input, TFDataType tfType, bool isVector, int colIndex, TFShape tfShape) + private static ITensorValueGetter CreateTensorValueGetter(Row input, TFDataType tfType, bool isVector, int colIndex, TFShape tfShape) { var type = TFTensor.TypeFromTensorType(tfType); Contracts.AssertValue(type); return Utils.MarshalInvoke(CreateTensorValueGetter, type, input, isVector, colIndex, tfShape); } - private static ITensorValueGetter[] GetTensorValueGetters(IRow input, + private static ITensorValueGetter[] GetTensorValueGetters(Row input, int[] inputColIndices, bool[] isInputVector, TFDataType[] tfInputTypes, @@ -861,7 +861,7 @@ public OutputCache() } } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { disposer = null; Host.AssertValue(input); @@ -875,7 +875,7 @@ protected override Delegate MakeGetter(IRow input, int iinfo, Func ac return Utils.MarshalInvoke(MakeGetter, type, input, iinfo, srcTensorGetters, activeOutputColNames, outputCache); } - private Delegate MakeGetter(IRow input, int iinfo, ITensorValueGetter[] srcTensorGetters, string[] activeOutputColNames, OutputCache outputCache) + private Delegate MakeGetter(Row input, int iinfo, ITensorValueGetter[] srcTensorGetters, string[] activeOutputColNames, OutputCache outputCache) { Host.AssertValue(input); ValueGetter> valuegetter = (ref VBuffer dst) => @@ -964,7 +964,7 @@ private class TensorValueGetter : ITensorValueGetter private readonly TFShape _tfShape; private int _position; - public TensorValueGetter(IRow input, int colIndex, TFShape tfShape) + public TensorValueGetter(Row input, int colIndex, TFShape tfShape) { _srcgetter = input.GetGetter(colIndex); _tfShape = tfShape; @@ -1010,7 +1010,7 @@ private class TensorValueGetterVec : ITensorValueGetter private readonly T[] _bufferedData; private int _position; - public TensorValueGetterVec(IRow input, int colIndex, TFShape tfShape) + public TensorValueGetterVec(Row input, int colIndex, TFShape tfShape) { _srcgetter = input.GetGetter>(colIndex); _tfShape = tfShape; diff --git a/src/Microsoft.ML.TimeSeries/PredictionFunction.cs b/src/Microsoft.ML.TimeSeries/PredictionFunction.cs index 792b06d6b3..8dc4cc27b3 100644 --- a/src/Microsoft.ML.TimeSeries/PredictionFunction.cs +++ b/src/Microsoft.ML.TimeSeries/PredictionFunction.cs @@ -25,16 +25,16 @@ internal interface IStatefulTransformer : ITransformer IStatefulTransformer Clone(); } - internal interface IStatefulRow : IRow + internal abstract class StatefulRow : Row { - Action GetPinger(); + public abstract Action GetPinger(); } internal interface IStatefulRowMapper : IRowMapper { void CloneState(); - Action CreatePinger(IRow input, Func activeOutput, out Action disposer); + Action CreatePinger(Row input, Func activeOutput, out Action disposer); } /// @@ -98,8 +98,8 @@ public TimeSeriesPredictionFunction(IHostEnvironment env, ITransformer transform { } - internal IRow GetStatefulRows(IRow input, IRowToRowMapper mapper, Func active, - List rows, out Action disposer) + internal Row GetStatefulRows(Row input, IRowToRowMapper mapper, Func active, + List rows, out Action disposer) { Contracts.CheckValue(input, nameof(input)); Contracts.CheckValue(active, nameof(active)); @@ -123,8 +123,8 @@ internal IRow GetStatefulRows(IRow input, IRowToRowMapper mapper, Func= 1; --i) deps[i - 1] = innerMappers[i].GetDependencies(deps[i]); - IRow result = input; + Row result = input; for (int i = 0; i < innerMappers.Length; ++i) { Action localDisp; result = GetStatefulRows(result, innerMappers[i], deps[i], rows, out localDisp); - if (result is IStatefulRow) - rows.Add((IStatefulRow)result); + if (result is StatefulRow statefulResult) + rows.Add(statefulResult); if (localDisp != null) { @@ -158,25 +158,21 @@ internal IRow GetStatefulRows(IRow input, IRowToRowMapper mapper, Func CreatePinger(List rows) + private Action CreatePinger(List rows) { - Action[] pingers = new Action[rows.Count]; - int index = 0; + if (rows.Count == 0) + return position => { }; + Action pinger = null; foreach (var row in rows) - pingers[index++] = row.GetPinger(); - - return (long position) => - { - foreach (var ping in pingers) - ping(position); - }; + pinger += row.GetPinger(); + return pinger; } internal override void PredictionEngineCore(IHostEnvironment env, DataViewConstructionUtils.InputRow inputRow, IRowToRowMapper mapper, bool ignoreMissingColumns, SchemaDefinition inputSchemaDefinition, SchemaDefinition outputSchemaDefinition, out Action disposer, out IRowReadableAs outputRow) { - List rows = new List(); - IRow outputRowLocal = outputRowLocal = GetStatefulRows(inputRow, mapper, col => true, rows, out disposer); + List rows = new List(); + Row outputRowLocal = outputRowLocal = GetStatefulRows(inputRow, mapper, col => true, rows, out disposer); var cursorable = TypedCursorable.Create(env, new EmptyDataView(env, mapper.OutputSchema), ignoreMissingColumns, outputSchemaDefinition); _pinger = CreatePinger(rows); outputRow = cursorable.GetRow(outputRowLocal); diff --git a/src/Microsoft.ML.TimeSeries/SequentialAnomalyDetectionTransformBase.cs b/src/Microsoft.ML.TimeSeries/SequentialAnomalyDetectionTransformBase.cs index 1c867a5f19..8b41ee8edf 100644 --- a/src/Microsoft.ML.TimeSeries/SequentialAnomalyDetectionTransformBase.cs +++ b/src/Microsoft.ML.TimeSeries/SequentialAnomalyDetectionTransformBase.cs @@ -625,7 +625,7 @@ public Func GetDependencies(Func activeOutput) public void Save(ModelSaveContext ctx) => _parent.Save(ctx); - public Delegate[] CreateGetters(IRow input, Func activeOutput, out Action disposer) + public Delegate[] CreateGetters(Row input, Func activeOutput, out Action disposer) { disposer = null; var getters = new Delegate[1]; @@ -637,7 +637,7 @@ public Delegate[] CreateGetters(IRow input, Func activeOutput, out Ac private delegate void ProcessData(ref TInput src, ref VBuffer dst); - private Delegate MakeGetter(IRow input, TState state) + private Delegate MakeGetter(Row input, TState state) { _host.AssertValue(input); var srcGetter = input.GetGetter(_inputColumnIndex); @@ -653,7 +653,7 @@ private Delegate MakeGetter(IRow input, TState state) return valueGetter; } - public Action CreatePinger(IRow input, Func activeOutput, out Action disposer) + public Action CreatePinger(Row input, Func activeOutput, out Action disposer) { disposer = null; Action pinger = null; @@ -663,7 +663,7 @@ public Action CreatePinger(IRow input, Func activeOutput, out A return pinger; } - private Action MakePinger(IRow input, TState state) + private Action MakePinger(Row input, TState state) { _host.AssertValue(input); var srcGetter = input.GetGetter(_inputColumnIndex); diff --git a/src/Microsoft.ML.TimeSeries/SequentialTransformBase.cs b/src/Microsoft.ML.TimeSeries/SequentialTransformBase.cs index c6f38380f4..97bb3eb53a 100644 --- a/src/Microsoft.ML.TimeSeries/SequentialTransformBase.cs +++ b/src/Microsoft.ML.TimeSeries/SequentialTransformBase.cs @@ -352,7 +352,7 @@ private void InitFunction(TState state) return false; } - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { var srcCursor = _transform.GetRowCursor(predicate, rand); return new Cursor(this, srcCursor); @@ -365,35 +365,35 @@ protected override IRowCursor GetRowCursorCore(Func predicate, Random return _transform.GetRowCount(); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) + public override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { consolidator = null; - return new IRowCursor[] { GetRowCursorCore(predicate, rand) }; + return new RowCursor[] { GetRowCursorCore(predicate, rand) }; } /// /// A wrapper around the cursor which replaces the schema. /// - private sealed class Cursor : SynchronizedCursorBase, IRowCursor + private sealed class Cursor : SynchronizedCursorBase { private readonly SequentialTransformBase _parent; - public Cursor(SequentialTransformBase parent, IRowCursor input) + public Cursor(SequentialTransformBase parent, RowCursor input) : base(parent.Host, input) { Ch.Assert(input.Schema.ColumnCount == parent.OutputSchema.ColumnCount); _parent = parent; } - public Schema Schema { get { return _parent.OutputSchema; } } + public override Schema Schema { get { return _parent.OutputSchema; } } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.Check(0 <= col && col < Schema.ColumnCount, "col"); return Input.IsColumnActive(col); } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.Check(IsColumnActive(col), "col"); return Input.GetGetter(col); diff --git a/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs b/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs index 8b6d74426c..7af0eb1cf1 100644 --- a/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs +++ b/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs @@ -429,7 +429,7 @@ private void InitFunction(TState state) public override bool CanShuffle { get { return false; } } - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { var srcCursor = _transform.GetRowCursor(predicate, rand); var clone = (SequentialDataTransform)MemberwiseClone(); @@ -448,10 +448,10 @@ protected override IRowCursor GetRowCursorCore(Func predicate, Random return _transform.GetRowCount(); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) + public override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { consolidator = null; - return new IRowCursor[] { GetRowCursorCore(predicate, rand) }; + return new RowCursor[] { GetRowCursorCore(predicate, rand) }; } public override void Save(ModelSaveContext ctx) @@ -478,26 +478,26 @@ public Func GetDependencies(Func predicate) return col => false; } - public IRow GetRow(IRow input, Func active, out Action disposer) => - new Row(_bindings.Schema, input, _mapper.CreateGetters(input, active, out disposer), + public Row GetRow(Row input, Func active, out Action disposer) => + new RowImpl(_bindings.Schema, input, _mapper.CreateGetters(input, active, out disposer), _mapper.CreatePinger(input, active, out disposer)); } - private sealed class Row : IStatefulRow + private sealed class RowImpl : StatefulRow { private readonly Schema _schema; - private readonly IRow _input; + private readonly Row _input; private readonly Delegate[] _getters; private readonly Action _pinger; - public Schema Schema { get { return _schema; } } + public override Schema Schema => _schema; - public long Position { get { return _input.Position; } } + public override long Position => _input.Position; - public long Batch { get { return _input.Batch; } } + public override long Batch => _input.Batch; - public Row(Schema schema, IRow input, Delegate[] getters, Action pinger) + public RowImpl(Schema schema, Row input, Delegate[] getters, Action pinger) { Contracts.CheckValue(schema, nameof(schema)); Contracts.CheckValue(input, nameof(input)); @@ -508,12 +508,12 @@ public Row(Schema schema, IRow input, Delegate[] getters, Action pinger) _pinger = pinger; } - public ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return _input.GetIdGetter(); } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Contracts.CheckParam(0 <= col && col < _getters.Length, nameof(col), "Invalid col value in GetGetter"); Contracts.Check(IsColumnActive(col)); @@ -523,10 +523,10 @@ public ValueGetter GetGetter(int col) return fn; } - public Action GetPinger() => + public override Action GetPinger() => _pinger as Action ?? throw Contracts.Except("Invalid TValue in GetPinger: '{0}'", typeof(long)); - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Contracts.Check(0 <= col && col < _getters.Length); return _getters[col] != null; @@ -536,26 +536,26 @@ public bool IsColumnActive(int col) /// /// A wrapper around the cursor which replaces the schema. /// - private sealed class Cursor : SynchronizedCursorBase, IRowCursor + private sealed class Cursor : SynchronizedCursorBase { private readonly SequentialDataTransform _parent; - public Cursor(IHost host, SequentialDataTransform parent, IRowCursor input) + public Cursor(IHost host, SequentialDataTransform parent, RowCursor input) : base(host, input) { Ch.Assert(input.Schema.ColumnCount == parent.OutputSchema.ColumnCount); _parent = parent; } - public Schema Schema { get { return _parent.OutputSchema; } } + public override Schema Schema => _parent.OutputSchema; - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.Check(0 <= col && col < Schema.ColumnCount, "col"); return Input.IsColumnActive(col); } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.Check(IsColumnActive(col), "col"); return Input.GetGetter(col); @@ -688,14 +688,14 @@ private Func GetActiveOutputColumns(bool[] active) return null; } - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { Func predicateInput; var active = GetActive(predicate, out predicateInput); - return new RowCursor(Host, Source.GetRowCursor(predicateInput, rand), this, active); + return new Cursor(Host, Source.GetRowCursor(predicateInput, rand), this, active); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) + public override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); @@ -710,9 +710,9 @@ public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolid inputs = DataViewUtils.CreateSplitCursors(out consolidator, Host, inputs[0], n); Host.AssertNonEmpty(inputs); - var cursors = new IRowCursor[inputs.Length]; + var cursors = new RowCursor[inputs.Length]; for (int i = 0; i < inputs.Length; i++) - cursors[i] = new RowCursor(Host, inputs[i], this, active); + cursors[i] = new Cursor(Host, inputs[i], this, active); return cursors; } @@ -745,7 +745,7 @@ public Func GetDependencies(Func predicate) Schema IRowToRowMapper.InputSchema => Source.Schema; - public IRow GetRow(IRow input, Func active, out Action disposer) + public Row GetRow(Row input, Func active, out Action disposer) { Host.CheckValue(input, nameof(input)); Host.CheckValue(active, nameof(active)); @@ -766,21 +766,21 @@ public IRow GetRow(IRow input, Func active, out Action disposer) } } - private sealed class StatefulRow : IStatefulRow + private sealed class StatefulRow : TimeSeries.StatefulRow { - private readonly IRow _input; + private readonly Row _input; private readonly Delegate[] _getters; private readonly Action _pinger; private readonly TimeSeriesRowToRowMapperTransform _parent; - public long Batch { get { return _input.Batch; } } + public override long Batch => _input.Batch; - public long Position { get { return _input.Position; } } + public override long Position => _input.Position; - public Schema Schema { get; } + public override Schema Schema { get; } - public StatefulRow(IRow input, TimeSeriesRowToRowMapperTransform parent, + public StatefulRow(Row input, TimeSeriesRowToRowMapperTransform parent, Schema schema, Delegate[] getters, Action pinger) { _input = input; @@ -790,7 +790,7 @@ public StatefulRow(IRow input, TimeSeriesRowToRowMapperTransform parent, _pinger = pinger; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { bool isSrc; int index = _parent._bindings.MapColumnIndex(out isSrc, col); @@ -804,12 +804,12 @@ public ValueGetter GetGetter(int col) return fn; } - public Action GetPinger() => + public override Action GetPinger() => _pinger as Action ?? throw Contracts.Except("Invalid TValue in GetPinger: '{0}'", typeof(long)); - public ValueGetter GetIdGetter() => _input.GetIdGetter(); + public override ValueGetter GetIdGetter() => _input.GetIdGetter(); - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { bool isSrc; int index = _parent._bindings.MapColumnIndex(out isSrc, col); @@ -819,16 +819,16 @@ public bool IsColumnActive(int col) } } - private sealed class RowCursor : SynchronizedCursorBase, IRowCursor + private sealed class Cursor : SynchronizedCursorBase { private readonly Delegate[] _getters; private readonly bool[] _active; private readonly ColumnBindings _bindings; private readonly Action _disposer; - public Schema Schema => _bindings.Schema; + public override Schema Schema => _bindings.Schema; - public RowCursor(IChannelProvider provider, IRowCursor input, TimeSeriesRowToRowMapperTransform parent, bool[] active) + public Cursor(IChannelProvider provider, RowCursor input, TimeSeriesRowToRowMapperTransform parent, bool[] active) : base(provider, input) { var pred = parent.GetActiveOutputColumns(active); @@ -837,13 +837,13 @@ public RowCursor(IChannelProvider provider, IRowCursor input, TimeSeriesRowToRow _bindings = parent._bindings; } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.Check(0 <= col && col < _bindings.Schema.ColumnCount); return _active[col]; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.Check(IsColumnActive(col)); diff --git a/src/Microsoft.ML.Transforms/BootstrapSamplingTransformer.cs b/src/Microsoft.ML.Transforms/BootstrapSamplingTransformer.cs index 7476920482..3418dcf64c 100644 --- a/src/Microsoft.ML.Transforms/BootstrapSamplingTransformer.cs +++ b/src/Microsoft.ML.Transforms/BootstrapSamplingTransformer.cs @@ -164,35 +164,35 @@ public static BootstrapSamplingTransformer Create(IHostEnvironment env, ModelLoa return false; } - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { // We do not use the input random because this cursor does not support shuffling. var rgen = new TauswortheHybrid(_state); var input = Source.GetRowCursor(predicate, _shuffleInput ? new TauswortheHybrid(rgen) : null); - IRowCursor cursor = new RowCursor(this, input, rgen); + RowCursor cursor = new Cursor(this, input, rgen); if (_poolSize > 1) cursor = RowShufflingTransformer.GetShuffledCursor(Host, _poolSize, cursor, new TauswortheHybrid(rgen)); return cursor; } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) + public override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { var cursor = GetRowCursorCore(predicate, rand); consolidator = null; - return new IRowCursor[] { cursor }; + return new RowCursor[] { cursor }; } - private sealed class RowCursor : LinkedRootCursorBase, IRowCursor + private sealed class Cursor : LinkedRootCursorBase { private int _remaining; private readonly BootstrapSamplingTransformer _parent; private readonly Random _rgen; - public override long Batch { get { return 0; } } + public override long Batch => 0; - public Schema Schema { get { return Input.Schema; } } + public override Schema Schema => Input.Schema; - public RowCursor(BootstrapSamplingTransformer parent, IRowCursor input, Random rgen) + public Cursor(BootstrapSamplingTransformer parent, RowCursor input, Random rgen) : base(parent.Host, input) { Ch.AssertValue(rgen); @@ -211,12 +211,12 @@ public override ValueGetter GetIdGetter() }; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { return Input.GetGetter(col); } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { return Input.IsColumnActive(col); } diff --git a/src/Microsoft.ML.Transforms/CountFeatureSelection.cs b/src/Microsoft.ML.Transforms/CountFeatureSelection.cs index d597422854..d330a06f37 100644 --- a/src/Microsoft.ML.Transforms/CountFeatureSelection.cs +++ b/src/Microsoft.ML.Transforms/CountFeatureSelection.cs @@ -285,26 +285,26 @@ public static long[][] Train(IHostEnvironment env, IDataView input, string[] col public static bool IsValidColumnType(ColumnType type) => type == NumberType.R4 || type == NumberType.R8 || type.IsText; - private static CountAggregator GetOneAggregator(IRow row, ColumnType colType, int colSrc) + private static CountAggregator GetOneAggregator(Row row, ColumnType colType, int colSrc) { - Func del = GetOneAggregator; + Func del = GetOneAggregator; var methodInfo = del.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(colType.RawType); return (CountAggregator)methodInfo.Invoke(null, new object[] { row, colType, colSrc }); } - private static CountAggregator GetOneAggregator(IRow row, ColumnType colType, int colSrc) + private static CountAggregator GetOneAggregator(Row row, ColumnType colType, int colSrc) { return new CountAggregator(colType, row.GetGetter(colSrc)); } - private static CountAggregator GetVecAggregator(IRow row, ColumnType colType, int colSrc) + private static CountAggregator GetVecAggregator(Row row, ColumnType colType, int colSrc) { - Func del = GetVecAggregator; + Func del = GetVecAggregator; var methodInfo = del.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(colType.ItemType.RawType); return (CountAggregator)methodInfo.Invoke(null, new object[] { row, colType, colSrc }); } - private static CountAggregator GetVecAggregator(IRow row, ColumnType colType, int colSrc) + private static CountAggregator GetVecAggregator(Row row, ColumnType colType, int colSrc) { return new CountAggregator(colType, row.GetGetter>(colSrc)); } diff --git a/src/Microsoft.ML.Transforms/GcnTransform.cs b/src/Microsoft.ML.Transforms/GcnTransform.cs index e35a41d5e3..e8d13b4944 100644 --- a/src/Microsoft.ML.Transforms/GcnTransform.cs +++ b/src/Microsoft.ML.Transforms/GcnTransform.cs @@ -460,7 +460,7 @@ protected override Schema.DetachedColumn[] GetOutputColumnsCore() return result; } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Contracts.AssertValue(input); Contracts.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); diff --git a/src/Microsoft.ML.Transforms/GroupTransform.cs b/src/Microsoft.ML.Transforms/GroupTransform.cs index 1de6dea27b..7020eb6ede 100644 --- a/src/Microsoft.ML.Transforms/GroupTransform.cs +++ b/src/Microsoft.ML.Transforms/GroupTransform.cs @@ -156,7 +156,7 @@ public override void Save(ModelSaveContext ctx) public override Schema OutputSchema => _groupSchema.AsSchema; - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); @@ -173,12 +173,12 @@ protected override IRowCursor GetRowCursorCore(Func predicate, Random public override bool CanShuffle { get { return false; } } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) + public override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); consolidator = null; - return new IRowCursor[] { GetRowCursorCore(predicate) }; + return new RowCursor[] { GetRowCursorCore(predicate) }; } /// @@ -430,7 +430,7 @@ public void GetMetadata(string kind, int col, ref TValue value) /// - The group column getters are taken directly from the trailing cursor. /// - The keep column getters are provided by the aggregators. /// - private sealed class Cursor : RootCursorBase, IRowCursor + private sealed class Cursor : RootCursorBase { /// /// This class keeps track of the previous group key and tests the current group key against the previous one. @@ -439,7 +439,7 @@ private sealed class GroupKeyColumnChecker { public readonly Func IsSameKey; - private static Func MakeSameChecker(IRow row, int col) + private static Func MakeSameChecker(Row row, int col) { T oldValue = default(T); T newValue = default(T); @@ -465,12 +465,12 @@ private static Func MakeSameChecker(IRow row, int col) }; } - public GroupKeyColumnChecker(IRow row, int col) + public GroupKeyColumnChecker(Row row, int col) { Contracts.AssertValue(row); var type = row.Schema.GetColumnType(col); - Func> del = MakeSameChecker; + Func> del = MakeSameChecker; var mi = del.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(type.RawType); IsSameKey = (Func)mi.Invoke(null, new object[] { row, col }); } @@ -480,7 +480,7 @@ public GroupKeyColumnChecker(IRow row, int col) // REVIEW: Currently, it always produces dense buffers. The anticipated use cases don't include many // default values at the moment. /// - /// This class handles the aggregation of one 'keep' column into a vector. It wraps around an 's + /// This class handles the aggregation of one 'keep' column into a vector. It wraps around an 's /// column, reads the data and aggregates. /// private abstract class KeepColumnAggregator @@ -489,7 +489,7 @@ private abstract class KeepColumnAggregator public abstract void SetSize(int size); public abstract void ReadValue(int position); - public static KeepColumnAggregator Create(IRow row, int col) + public static KeepColumnAggregator Create(Row row, int col) { Contracts.AssertValue(row); var colType = row.Schema.GetColumnType(col); @@ -497,7 +497,7 @@ public static KeepColumnAggregator Create(IRow row, int col) var type = typeof(ListAggregator<>); - var cons = type.MakeGenericType(colType.RawType).GetConstructor(new[] { typeof(IRow), typeof(int) }); + var cons = type.MakeGenericType(colType.RawType).GetConstructor(new[] { typeof(Row), typeof(int) }); return cons.Invoke(new object[] { row, col }) as KeepColumnAggregator; } @@ -508,7 +508,7 @@ private sealed class ListAggregator : KeepColumnAggregator private TValue[] _buffer; private int _size; - public ListAggregator(IRow row, int col) + public ListAggregator(Row row, int col) { Contracts.AssertValue(row); _srcGetter = row.GetGetter(col); @@ -546,15 +546,15 @@ public override void ReadValue(int position) private readonly bool[] _active; private readonly int _groupCount; - private readonly IRowCursor _leadingCursor; - private readonly IRowCursor _trailingCursor; + private readonly RowCursor _leadingCursor; + private readonly RowCursor _trailingCursor; private readonly GroupKeyColumnChecker[] _groupCheckers; private readonly KeepColumnAggregator[] _aggregators; public override long Batch { get { return 0; } } - public Schema Schema => _parent.OutputSchema; + public override Schema Schema => _parent.OutputSchema; public Cursor(GroupTransform parent, Func predicate) : base(parent.Host) @@ -601,7 +601,7 @@ public override ValueGetter GetIdGetter() return _trailingCursor.GetIdGetter(); } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { _parent._groupSchema.CheckColumnInRange(col); return _active[col]; @@ -669,7 +669,7 @@ public override void Dispose() base.Dispose(); } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { _parent._groupSchema.CheckColumnInRange(col); if (!_active[col]) diff --git a/src/Microsoft.ML.Transforms/HashJoiningTransform.cs b/src/Microsoft.ML.Transforms/HashJoiningTransform.cs index bf674b8ed5..e8ce9c1c15 100644 --- a/src/Microsoft.ML.Transforms/HashJoiningTransform.cs +++ b/src/Microsoft.ML.Transforms/HashJoiningTransform.cs @@ -456,7 +456,7 @@ private void GetSlotNames(int iinfo, ref VBuffer> dst) private static MethodInfo _methGetterVecToVec; private static MethodInfo _methGetterVecToOne; - protected override Delegate GetGetterCore(IChannel ch, IRow input, int iinfo, out Action disposer) + protected override Delegate GetGetterCore(IChannel ch, Row input, int iinfo, out Action disposer) { Host.AssertValueOrNull(ch); Host.AssertValue(input); @@ -466,17 +466,17 @@ protected override Delegate GetGetterCore(IChannel ch, IRow input, int iinfo, ou // Construct MethodInfos templates that we need for the generic methods. if (_methGetterOneToOne == null) { - Func> del = ComposeGetterOneToOne; + Func> del = ComposeGetterOneToOne; Interlocked.CompareExchange(ref _methGetterOneToOne, del.GetMethodInfo().GetGenericMethodDefinition(), null); } if (_methGetterVecToVec == null) { - Func>> del = ComposeGetterVecToVec; + Func>> del = ComposeGetterVecToVec; Interlocked.CompareExchange(ref _methGetterVecToVec, del.GetMethodInfo().GetGenericMethodDefinition(), null); } if (_methGetterVecToOne == null) { - Func> del = ComposeGetterVecToOne; + Func> del = ComposeGetterVecToOne; Interlocked.CompareExchange(ref _methGetterVecToOne, del.GetMethodInfo().GetGenericMethodDefinition(), null); } @@ -502,7 +502,7 @@ protected override Delegate GetGetterCore(IChannel ch, IRow input, int iinfo, ou /// Input type. Must be a non-vector /// Row inout /// Index of the getter - private ValueGetter ComposeGetterOneToOne(IRow input, int iinfo) + private ValueGetter ComposeGetterOneToOne(Row input, int iinfo) { Host.AssertValue(input); Host.Assert(!Infos[iinfo].TypeSrc.IsVector); @@ -526,7 +526,7 @@ private ValueGetter ComposeGetterOneToOne(IRow input, int iinfo) /// Input type. Must be a vector /// Row input /// Index of the getter - private ValueGetter> ComposeGetterVecToVec(IRow input, int iinfo) + private ValueGetter> ComposeGetterVecToVec(Row input, int iinfo) { Host.AssertValue(input); Host.Assert(Infos[iinfo].TypeSrc.IsVector); @@ -590,7 +590,7 @@ private ValueGetter> ComposeGetterVecToVec(IRow input, int i /// Input type. Must be a vector /// Row input /// Index of the getter - private ValueGetter ComposeGetterVecToOne(IRow input, int iinfo) + private ValueGetter ComposeGetterVecToOne(Row input, int iinfo) { Host.AssertValue(input); Host.Assert(Infos[iinfo].TypeSrc.IsVector); diff --git a/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs b/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs index 818806083c..f883f29c45 100644 --- a/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs +++ b/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs @@ -352,7 +352,7 @@ private void GetSlotNames(int iinfo, ref VBuffer> dst) dst = editor.Commit(); } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _infos.Length); @@ -367,7 +367,7 @@ protected override Delegate MakeGetter(IRow input, int iinfo, Func ac /// /// This is for the scalar case. /// - private ValueGetter> MakeGetterOne(IRow input, int iinfo) + private ValueGetter> MakeGetterOne(Row input, int iinfo) { Host.AssertValue(input); Host.Assert(_infos[iinfo].TypeSrc.IsKey); @@ -397,7 +397,7 @@ private ValueGetter> MakeGetterOne(IRow input, int iinfo) /// /// This is for the indicator case - vector input and outputs should be concatenated. /// - private ValueGetter> MakeGetterInd(IRow input, int iinfo) + private ValueGetter> MakeGetterInd(Row input, int iinfo) { Host.AssertValue(input); Host.Assert(_infos[iinfo].TypeSrc.IsVector); diff --git a/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs b/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs index 456ba0dc58..90191211dc 100644 --- a/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs +++ b/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs @@ -191,18 +191,18 @@ protected override Schema.DetachedColumn[] GetOutputColumnsCore() } return result; } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Contracts.AssertValue(input); Contracts.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); disposer = null; - Func>> del = MakeVecGetter; + Func>> del = MakeVecGetter; var methodInfo = del.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(_srcTypes[iinfo].ItemType.RawType); return (Delegate)methodInfo.Invoke(this, new object[] { input, iinfo }); } - private ValueGetter> MakeVecGetter(IRow input, int iinfo) + private ValueGetter> MakeVecGetter(Row input, int iinfo) { var srcGetter = input.GetGetter>(_srcCols[iinfo]); var buffer = default(VBuffer); diff --git a/src/Microsoft.ML.Transforms/MissingValueIndicatorTransform.cs b/src/Microsoft.ML.Transforms/MissingValueIndicatorTransform.cs index 72a52e5cdc..a58603c16e 100644 --- a/src/Microsoft.ML.Transforms/MissingValueIndicatorTransform.cs +++ b/src/Microsoft.ML.Transforms/MissingValueIndicatorTransform.cs @@ -237,7 +237,7 @@ private void GetSlotNames(int iinfo, ref VBuffer> dst) dst = editor.Commit(); } - protected override Delegate GetGetterCore(IChannel ch, IRow input, int iinfo, out Action disposer) + protected override Delegate GetGetterCore(IChannel ch, Row input, int iinfo, out Action disposer) { Host.AssertValueOrNull(ch); Host.AssertValue(input); diff --git a/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs b/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs index 049911f48e..c54f6693bd 100644 --- a/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs +++ b/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs @@ -224,7 +224,7 @@ private static Delegate GetIsNADelegate(ColumnType type) return Runtime.Data.Conversion.Conversions.Instance.GetIsNAPredicate(type.ItemType); } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _infos.Length); @@ -238,10 +238,10 @@ protected override Delegate MakeGetter(IRow input, int iinfo, Func ac /// /// Getter generator for single valued inputs. /// - private ValueGetter ComposeGetterOne(IRow input, int iinfo) + private ValueGetter ComposeGetterOne(Row input, int iinfo) => Utils.MarshalInvoke(ComposeGetterOne, _infos[iinfo].InputType.RawType, input, iinfo); - private ValueGetter ComposeGetterOne(IRow input, int iinfo) + private ValueGetter ComposeGetterOne(Row input, int iinfo) { var getSrc = input.GetGetter(ColMapNewToOld[iinfo]); var src = default(T); @@ -260,10 +260,10 @@ private ValueGetter ComposeGetterOne(IRow input, int iinfo) /// /// Getter generator for vector valued inputs. /// - private ValueGetter> ComposeGetterVec(IRow input, int iinfo) + private ValueGetter> ComposeGetterVec(Row input, int iinfo) => Utils.MarshalInvoke(ComposeGetterVec, _infos[iinfo].InputType.ItemType.RawType, input, iinfo); - private ValueGetter> ComposeGetterVec(IRow input, int iinfo) + private ValueGetter> ComposeGetterVec(Row input, int iinfo) { var getSrc = input.GetGetter>(ColMapNewToOld[iinfo]); var isNA = (InPredicate)_infos[iinfo].InputIsNA; diff --git a/src/Microsoft.ML.Transforms/MissingValueReplacing.cs b/src/Microsoft.ML.Transforms/MissingValueReplacing.cs index 41e7b5f857..b1b48f7864 100644 --- a/src/Microsoft.ML.Transforms/MissingValueReplacing.cs +++ b/src/Microsoft.ML.Transforms/MissingValueReplacing.cs @@ -645,7 +645,7 @@ protected override Schema.DetachedColumn[] GetOutputColumnsCore() return result; } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _infos.Length); @@ -659,13 +659,13 @@ protected override Delegate MakeGetter(IRow input, int iinfo, Func ac /// /// Getter generator for single valued inputs. /// - private Delegate ComposeGetterOne(IRow input, int iinfo) + private Delegate ComposeGetterOne(Row input, int iinfo) => Utils.MarshalInvoke(ComposeGetterOne, _infos[iinfo].TypeSrc.RawType, input, iinfo); /// /// Replaces NA values for scalars. /// - private Delegate ComposeGetterOne(IRow input, int iinfo) + private Delegate ComposeGetterOne(Row input, int iinfo) { var getSrc = input.GetGetter(ColMapNewToOld[iinfo]); var src = default(T); @@ -685,13 +685,13 @@ private Delegate ComposeGetterOne(IRow input, int iinfo) /// /// Getter generator for vector valued inputs. /// - private Delegate ComposeGetterVec(IRow input, int iinfo) + private Delegate ComposeGetterVec(Row input, int iinfo) => Utils.MarshalInvoke(ComposeGetterVec, _infos[iinfo].TypeSrc.ItemType.RawType, input, iinfo); /// /// Replaces NA values for vectors. /// - private Delegate ComposeGetterVec(IRow input, int iinfo) + private Delegate ComposeGetterVec(Row input, int iinfo) { var getSrc = input.GetGetter>(ColMapNewToOld[iinfo]); var isNA = (InPredicate)_isNAs[iinfo]; diff --git a/src/Microsoft.ML.Transforms/MissingValueReplacingUtils.cs b/src/Microsoft.ML.Transforms/MissingValueReplacingUtils.cs index 8466d1b5ef..f9cdde6ad6 100644 --- a/src/Microsoft.ML.Transforms/MissingValueReplacingUtils.cs +++ b/src/Microsoft.ML.Transforms/MissingValueReplacingUtils.cs @@ -14,7 +14,7 @@ namespace Microsoft.ML.Transforms public sealed partial class MissingValueReplacingTransformer { - private static StatAggregator CreateStatAggregator(IChannel ch, ColumnType type, ReplacementKind? kind, bool bySlot, IRowCursor cursor, int col) + private static StatAggregator CreateStatAggregator(IChannel ch, ColumnType type, ReplacementKind? kind, bool bySlot, RowCursor cursor, int col) { ch.Assert(type.ItemType.IsNumber); if (!type.IsVector) @@ -150,7 +150,7 @@ private abstract class StatAggregator : StatAggregator /// public long RowCount { get { return _rowCount; } } - protected StatAggregator(IChannel ch, IRowCursor cursor, int col) + protected StatAggregator(IChannel ch, RowCursor cursor, int col) : base(ch) { Ch.AssertValue(cursor); @@ -178,7 +178,7 @@ private abstract class StatAggregatorAcrossSlots : StatAggregator< /// public UInt128 ValueCount { get { return _valueCount; } } - protected StatAggregatorAcrossSlots(IChannel ch, IRowCursor cursor, int col) + protected StatAggregatorAcrossSlots(IChannel ch, RowCursor cursor, int col) : base(ch, cursor, col) { } @@ -199,7 +199,7 @@ protected sealed override void ProcessRow(in VBuffer src) private abstract class StatAggregatorBySlot : StatAggregator, TStatItem[]> { - protected StatAggregatorBySlot(IChannel ch, ColumnType type, IRowCursor cursor, int col) + protected StatAggregatorBySlot(IChannel ch, ColumnType type, RowCursor cursor, int col) : base(ch, cursor, col) { Ch.AssertValue(type); @@ -235,7 +235,7 @@ private abstract class MinMaxAggregatorOne : StatAggregator : StatAggregato /// public long ValuesProcessed { get { return _valuesProcessed; } } - protected MinMaxAggregatorAcrossSlots(IChannel ch, IRowCursor cursor, int col, bool returnMax) + protected MinMaxAggregatorAcrossSlots(IChannel ch, RowCursor cursor, int col, bool returnMax) : base(ch, cursor, col) { ReturnMax = returnMax; @@ -300,7 +300,7 @@ private abstract class MinMaxAggregatorBySlot : StatAggregator // The count of the number of times ProcessValue has been called on a specific slot (used for tracking sparsity). private readonly long[] _valuesProcessed; - protected MinMaxAggregatorBySlot(IChannel ch, ColumnType type, IRowCursor cursor, int col, bool returnMax) + protected MinMaxAggregatorBySlot(IChannel ch, ColumnType type, RowCursor cursor, int col, bool returnMax) : base(ch, type, cursor, col) { Ch.AssertValue(type); @@ -540,7 +540,7 @@ private static class R4 // mean of a set of Single values. Conversion to Single happens in GetStat. public sealed class MeanAggregatorOne : StatAggregator { - public MeanAggregatorOne(IChannel ch, IRowCursor cursor, int col) + public MeanAggregatorOne(IChannel ch, RowCursor cursor, int col) : base(ch, cursor, col) { } @@ -560,7 +560,7 @@ public override object GetStat() public sealed class MeanAggregatorAcrossSlots : StatAggregatorAcrossSlots { - public MeanAggregatorAcrossSlots(IChannel ch, IRowCursor cursor, int col) + public MeanAggregatorAcrossSlots(IChannel ch, RowCursor cursor, int col) : base(ch, cursor, col) { } @@ -580,7 +580,7 @@ public override object GetStat() public sealed class MeanAggregatorBySlot : StatAggregatorBySlot { - public MeanAggregatorBySlot(IChannel ch, ColumnType type, IRowCursor cursor, int col) + public MeanAggregatorBySlot(IChannel ch, ColumnType type, RowCursor cursor, int col) : base(ch, type, cursor, col) { } @@ -606,7 +606,7 @@ public override object GetStat() public sealed class MinMaxAggregatorOne : MinMaxAggregatorOne { - public MinMaxAggregatorOne(IChannel ch, IRowCursor cursor, int col, bool returnMax) + public MinMaxAggregatorOne(IChannel ch, RowCursor cursor, int col, bool returnMax) : base(ch, cursor, col, returnMax) { Stat = ReturnMax ? Single.NegativeInfinity : Single.PositiveInfinity; @@ -627,7 +627,7 @@ protected override void ProcessValueMax(in Single val) public sealed class MinMaxAggregatorAcrossSlots : MinMaxAggregatorAcrossSlots { - public MinMaxAggregatorAcrossSlots(IChannel ch, IRowCursor cursor, int col, bool returnMax) + public MinMaxAggregatorAcrossSlots(IChannel ch, RowCursor cursor, int col, bool returnMax) : base(ch, cursor, col, returnMax) { Stat = ReturnMax ? Single.NegativeInfinity : Single.PositiveInfinity; @@ -659,7 +659,7 @@ public override object GetStat() public sealed class MinMaxAggregatorBySlot : MinMaxAggregatorBySlot { - public MinMaxAggregatorBySlot(IChannel ch, ColumnType type, IRowCursor cursor, int col, bool returnMax) + public MinMaxAggregatorBySlot(IChannel ch, ColumnType type, RowCursor cursor, int col, bool returnMax) : base(ch, type, cursor, col, returnMax) { Single bound = ReturnMax ? Single.NegativeInfinity : Single.PositiveInfinity; @@ -701,7 +701,7 @@ private static class R8 { public sealed class MeanAggregatorOne : StatAggregator { - public MeanAggregatorOne(IChannel ch, IRowCursor cursor, int col) + public MeanAggregatorOne(IChannel ch, RowCursor cursor, int col) : base(ch, cursor, col) { } @@ -719,7 +719,7 @@ public override object GetStat() public sealed class MeanAggregatorAcrossSlots : StatAggregatorAcrossSlots { - public MeanAggregatorAcrossSlots(IChannel ch, IRowCursor cursor, int col) + public MeanAggregatorAcrossSlots(IChannel ch, RowCursor cursor, int col) : base(ch, cursor, col) { } @@ -737,7 +737,7 @@ public override object GetStat() public sealed class MeanAggregatorBySlot : StatAggregatorBySlot { - public MeanAggregatorBySlot(IChannel ch, ColumnType type, IRowCursor cursor, int col) + public MeanAggregatorBySlot(IChannel ch, ColumnType type, RowCursor cursor, int col) : base(ch, type, cursor, col) { } @@ -759,7 +759,7 @@ public override object GetStat() public sealed class MinMaxAggregatorOne : MinMaxAggregatorOne { - public MinMaxAggregatorOne(IChannel ch, IRowCursor cursor, int col, bool returnMax) + public MinMaxAggregatorOne(IChannel ch, RowCursor cursor, int col, bool returnMax) : base(ch, cursor, col, returnMax) { Stat = ReturnMax ? Double.NegativeInfinity : Double.PositiveInfinity; @@ -780,7 +780,7 @@ protected override void ProcessValueMax(in Double val) public sealed class MinMaxAggregatorAcrossSlots : MinMaxAggregatorAcrossSlots { - public MinMaxAggregatorAcrossSlots(IChannel ch, IRowCursor cursor, int col, bool returnMax) + public MinMaxAggregatorAcrossSlots(IChannel ch, RowCursor cursor, int col, bool returnMax) : base(ch, cursor, col, returnMax) { Stat = ReturnMax ? Double.NegativeInfinity : Double.PositiveInfinity; @@ -812,7 +812,7 @@ public override object GetStat() public sealed class MinMaxAggregatorBySlot : MinMaxAggregatorBySlot { - public MinMaxAggregatorBySlot(IChannel ch, ColumnType type, IRowCursor cursor, int col, bool returnMax) + public MinMaxAggregatorBySlot(IChannel ch, ColumnType type, RowCursor cursor, int col, bool returnMax) : base(ch, type, cursor, col, returnMax) { Double bound = ReturnMax ? Double.MinValue : Double.MaxValue; diff --git a/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs b/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs index d2b3fe483e..0f24deb240 100644 --- a/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs +++ b/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs @@ -297,7 +297,7 @@ public override void Save(ModelSaveContext ctx) return null; } - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -305,10 +305,10 @@ protected override IRowCursor GetRowCursorCore(Func predicate, Random var inputPred = _bindings.GetDependencies(predicate); var active = _bindings.GetActive(predicate); var input = Source.GetRowCursor(inputPred); - return new RowCursor(Host, _bindings, input, active); + return new Cursor(Host, _bindings, input, active); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, + public override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); @@ -316,7 +316,7 @@ public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolid var inputPred = _bindings.GetDependencies(predicate); var active = _bindings.GetActive(predicate); - IRowCursor input; + RowCursor input; if (n > 1 && ShouldUseParallelCursors(predicate) != false) { @@ -325,9 +325,9 @@ public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolid if (inputs.Length != 1) { - var cursors = new IRowCursor[inputs.Length]; + var cursors = new RowCursor[inputs.Length]; for (int i = 0; i < inputs.Length; i++) - cursors[i] = new RowCursor(Host, _bindings, inputs[i], active); + cursors[i] = new Cursor(Host, _bindings, inputs[i], active); return cursors; } input = inputs[0]; @@ -336,7 +336,7 @@ public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolid input = Source.GetRowCursor(inputPred); consolidator = null; - return new IRowCursor[] { new RowCursor(Host, _bindings, input, active) }; + return new RowCursor[] { new Cursor(Host, _bindings, input, active) }; } protected override Func GetDependenciesCore(Func predicate) @@ -349,7 +349,7 @@ protected override int MapColumnIndex(out bool isSrc, int col) return _bindings.MapColumnIndex(out isSrc, col); } - protected override Delegate[] CreateGetters(IRow input, Func active, out Action disposer) + protected override Delegate[] CreateGetters(Row input, Func active, out Action disposer) { Func activeInfos = iinfo => @@ -370,7 +370,7 @@ protected override Delegate[] CreateGetters(IRow input, Func active, getters[iinfo] = MakeGetter(iinfo); else { - Func> srcDel = GetSrcGetter; + Func> srcDel = GetSrcGetter; var meth = srcDel.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(_bindings.ColumnTypes[iinfo].ItemType.RawType); getters[iinfo] = (Delegate)meth.Invoke(this, new object[] { input, iinfo }); } @@ -379,7 +379,7 @@ protected override Delegate[] CreateGetters(IRow input, Func active, } } - private ValueGetter GetSrcGetter(IRow input, int iinfo) + private ValueGetter GetSrcGetter(Row input, int iinfo) { return input.GetGetter(_bindings.SrcCols[iinfo]); } @@ -403,13 +403,13 @@ private Delegate MakeGetterVec(int length) VBufferUtils.Resize(ref value, length, 0)); } - private sealed class RowCursor : SynchronizedCursorBase, IRowCursor + private sealed class Cursor : SynchronizedCursorBase { private readonly Bindings _bindings; private readonly bool[] _active; private readonly Delegate[] _getters; - public RowCursor(IChannelProvider provider, Bindings bindings, IRowCursor input, bool[] active) + public Cursor(IChannelProvider provider, Bindings bindings, RowCursor input, bool[] active) : base(provider, input) { Ch.CheckValue(bindings, nameof(bindings)); @@ -427,15 +427,15 @@ public RowCursor(IChannelProvider provider, Bindings bindings, IRowCursor input, } } - public Schema Schema => _bindings.AsSchema; + public override Schema Schema => _bindings.AsSchema; - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.Check(0 <= col && col < _bindings.ColumnCount); return _active == null || _active[col]; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.Check(IsColumnActive(col)); diff --git a/src/Microsoft.ML.Transforms/ProduceIdTransform.cs b/src/Microsoft.ML.Transforms/ProduceIdTransform.cs index e2fc0359ad..c9f0f46d9b 100644 --- a/src/Microsoft.ML.Transforms/ProduceIdTransform.cs +++ b/src/Microsoft.ML.Transforms/ProduceIdTransform.cs @@ -136,7 +136,7 @@ public override void Save(ModelSaveContext ctx) _bindings.Save(ctx); } - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -145,19 +145,19 @@ protected override IRowCursor GetRowCursorCore(Func predicate, Random var input = Source.GetRowCursor(inputPred, rand); bool active = predicate(_bindings.MapIinfoToCol(0)); - return new RowCursor(Host, _bindings, input, active); + return new Cursor(Host, _bindings, input, active); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) + public override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); var inputPred = _bindings.GetDependencies(predicate); - IRowCursor[] cursors = Source.GetRowCursorSet(out consolidator, inputPred, n, rand); + RowCursor[] cursors = Source.GetRowCursorSet(out consolidator, inputPred, n, rand); bool active = predicate(_bindings.MapIinfoToCol(0)); for (int c = 0; c < cursors.Length; ++c) - cursors[c] = new RowCursor(Host, _bindings, cursors[c], active); + cursors[c] = new Cursor(Host, _bindings, cursors[c], active); return cursors; } @@ -167,14 +167,14 @@ public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolid return null; } - private sealed class RowCursor : SynchronizedCursorBase, IRowCursor + private sealed class Cursor : SynchronizedCursorBase { private readonly Bindings _bindings; private readonly bool _active; - public Schema Schema => _bindings.AsSchema; + public override Schema Schema => _bindings.AsSchema; - public RowCursor(IChannelProvider provider, Bindings bindings, IRowCursor input, bool active) + public Cursor(IChannelProvider provider, Bindings bindings, RowCursor input, bool active) : base(provider, input) { Ch.CheckValue(bindings, nameof(bindings)); @@ -182,7 +182,7 @@ public RowCursor(IChannelProvider provider, Bindings bindings, IRowCursor input, _active = active; } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.CheckParam(0 <= col && col < _bindings.ColumnCount, nameof(col)); bool isSrc; @@ -193,7 +193,7 @@ public bool IsColumnActive(int col) return _active; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.CheckParam(0 <= col && col < _bindings.ColumnCount, nameof(col)); Ch.CheckParam(IsColumnActive(col), nameof(col)); diff --git a/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs b/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs index f629201dfa..5960ff1143 100644 --- a/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs +++ b/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs @@ -533,7 +533,7 @@ protected override Schema.DetachedColumn[] GetOutputColumnsCore() return result; } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Contracts.AssertValue(input); Contracts.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); @@ -543,7 +543,7 @@ protected override Delegate MakeGetter(IRow input, int iinfo, Func ac return GetterFromFloatType(input, iinfo); } - private ValueGetter> GetterFromVectorType(IRow input, int iinfo) + private ValueGetter> GetterFromVectorType(Row input, int iinfo) { var getSrc = input.GetGetter>(_srcCols[iinfo]); var src = default(VBuffer); @@ -560,7 +560,7 @@ private ValueGetter> GetterFromVectorType(IRow input, int iinfo) } - private ValueGetter> GetterFromFloatType(IRow input, int iinfo) + private ValueGetter> GetterFromFloatType(Row input, int iinfo) { var getSrc = input.GetGetter(_srcCols[iinfo]); var src = default(float); diff --git a/src/Microsoft.ML.Transforms/TermLookupTransformer.cs b/src/Microsoft.ML.Transforms/TermLookupTransformer.cs index 70c90a64d1..83a36c55af 100644 --- a/src/Microsoft.ML.Transforms/TermLookupTransformer.cs +++ b/src/Microsoft.ML.Transforms/TermLookupTransformer.cs @@ -114,7 +114,7 @@ public static VecValueMap CreateVector(VectorType type) return new VecValueMap(type); } - public abstract void Train(IExceptionContext ectx, IRowCursor cursor, int colTerm, int colValue); + public abstract void Train(IExceptionContext ectx, RowCursor cursor, int colTerm, int colValue); public abstract Delegate GetGetter(ValueGetter> getSrc); } @@ -136,7 +136,7 @@ protected ValueMap(ColumnType type) /// /// Bind this value map to the given cursor for "training". /// - public override void Train(IExceptionContext ectx, IRowCursor cursor, int colTerm, int colValue) + public override void Train(IExceptionContext ectx, RowCursor cursor, int colTerm, int colValue) { Contracts.AssertValue(ectx); ectx.Assert(_terms == null); @@ -694,7 +694,7 @@ private void SetMetadata() md.Seal(); } - protected override Delegate GetGetterCore(IChannel ch, IRow input, int iinfo, out Action disposer) + protected override Delegate GetGetterCore(IChannel ch, Row input, int iinfo, out Action disposer) { Host.AssertValueOrNull(ch); Host.AssertValue(input); diff --git a/src/Microsoft.ML.Transforms/Text/LdaTransform.cs b/src/Microsoft.ML.Transforms/Text/LdaTransform.cs index 8978401c68..f05ab6161d 100644 --- a/src/Microsoft.ML.Transforms/Text/LdaTransform.cs +++ b/src/Microsoft.ML.Transforms/Text/LdaTransform.cs @@ -720,7 +720,7 @@ protected override Schema.DetachedColumn[] GetOutputColumnsCore() return result; } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Contracts.AssertValue(input); Contracts.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); @@ -729,7 +729,7 @@ protected override Delegate MakeGetter(IRow input, int iinfo, Func ac return GetTopic(input, iinfo); } - private ValueGetter> GetTopic(IRow input, int iinfo) + private ValueGetter> GetTopic(Row input, int iinfo) { var getSrc = RowCursorUtils.GetVecGetterAs(NumberType.R8, input, _srcCols[iinfo]); var src = default(VBuffer); diff --git a/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs b/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs index a84c7102d0..c57b8ffcb4 100644 --- a/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs +++ b/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs @@ -378,8 +378,8 @@ public NgramHashingTransformer(IHostEnvironment env, Arguments args, IDataView i string[][] friendlyNames = args.Column.Select(c => c.FriendlyNames).ToArray(); var helper = new InvertHashHelper(this, friendlyNames, inputPred, invertHashMaxCounts); - using (IRowCursor srcCursor = input.GetRowCursor(inputPred)) - using (var dstCursor = new RowCursor(this, srcCursor, active, helper.Decorate)) + using (RowCursor srcCursor = input.GetRowCursor(inputPred)) + using (var dstCursor = new Cursor(this, srcCursor, active, helper.Decorate)) { var allGetters = InvertHashHelper.CallAllGetters(dstCursor); while (dstCursor.MoveNext()) @@ -628,7 +628,7 @@ private void AssertValid(uint[] ngram, int ngramLength, int lim, int icol) return null; } - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { Host.AssertValue(predicate, "predicate"); Host.AssertValueOrNull(rand); @@ -636,10 +636,10 @@ protected override IRowCursor GetRowCursorCore(Func predicate, Random var inputPred = _bindings.GetDependencies(predicate); var active = _bindings.GetActive(predicate); var input = Source.GetRowCursor(inputPred, rand); - return new RowCursor(this, input, active); + return new Cursor(this, input, active); } - public sealed override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, + public sealed override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { Host.CheckValue(predicate, nameof(predicate)); @@ -654,9 +654,9 @@ public sealed override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator c inputs = DataViewUtils.CreateSplitCursors(out consolidator, Host, inputs[0], n); Host.AssertNonEmpty(inputs); - var cursors = new IRowCursor[inputs.Length]; + var cursors = new RowCursor[inputs.Length]; for (int i = 0; i < inputs.Length; i++) - cursors[i] = new RowCursor(this, inputs[i], active); + cursors[i] = new Cursor(this, inputs[i], active); return cursors; } @@ -665,7 +665,7 @@ protected override Func GetDependenciesCore(Func predicate return _bindings.GetDependencies(predicate); } - protected override Delegate[] CreateGetters(IRow input, Func active, out Action disp) + protected override Delegate[] CreateGetters(Row input, Func active, out Action disp) { Func activeInfos = iinfo => @@ -693,7 +693,7 @@ protected override int MapColumnIndex(out bool isSrc, int col) return _bindings.MapColumnIndex(out isSrc, col); } - private Delegate MakeGetter(IChannel ch, IRow input, int iinfo, FinderDecorator decorator = null) + private Delegate MakeGetter(IChannel ch, Row input, int iinfo, FinderDecorator decorator = null) { ch.Assert(_bindings.Infos[iinfo].SrcTypes.All(t => t.IsVector && t.ItemType.IsKey)); @@ -728,15 +728,15 @@ private Delegate MakeGetter(IChannel ch, IRow input, int iinfo, FinderDecorator private delegate NgramIdFinder FinderDecorator(int iinfo, NgramIdFinder finder); - private sealed class RowCursor : SynchronizedCursorBase, IRowCursor + private sealed class Cursor : SynchronizedCursorBase { private readonly Bindings _bindings; private readonly bool[] _active; private readonly Delegate[] _getters; - public Schema Schema => _bindings.AsSchema; + public override Schema Schema => _bindings.AsSchema; - public RowCursor(NgramHashingTransformer parent, IRowCursor input, bool[] active, FinderDecorator decorator = null) + public Cursor(NgramHashingTransformer parent, RowCursor input, bool[] active, FinderDecorator decorator = null) : base(parent.Host, input) { Ch.AssertValue(parent); @@ -760,13 +760,13 @@ private bool IsIndexActive(int iinfo) return _active == null || _active[_bindings.MapIinfoToCol(iinfo)]; } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.Check(0 <= col && col < _bindings.ColumnCount); return _active == null || _active[col]; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.Check(IsColumnActive(col)); @@ -827,7 +827,7 @@ public InvertHashHelper(NgramHashingTransformer parent, string[][] friendlyNames /// Construct an action that calls all the getters for a row, so as to easily force computation /// of lazily computed values. This will have the side effect of calling the decorator. /// - public static Action CallAllGetters(IRow row) + public static Action CallAllGetters(Row row) { var colCount = row.Schema.ColumnCount; List getters = new List(); @@ -845,14 +845,14 @@ public static Action CallAllGetters(IRow row) }; } - private static Action GetNoOpGetter(IRow row, int col) + private static Action GetNoOpGetter(Row row, int col) { - Func func = GetNoOpGetter; + Func func = GetNoOpGetter; var meth = func.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(row.Schema.GetColumnType(col).RawType); return (Action)meth.Invoke(null, new object[] { row, col }); } - private static Action GetNoOpGetter(IRow row, int col) + private static Action GetNoOpGetter(Row row, int col) { T value = default(T); var getter = row.GetGetter(col); diff --git a/src/Microsoft.ML.Transforms/Text/NgramTransform.cs b/src/Microsoft.ML.Transforms/Text/NgramTransform.cs index 5f548c4632..ca9794cef6 100644 --- a/src/Microsoft.ML.Transforms/Text/NgramTransform.cs +++ b/src/Microsoft.ML.Transforms/Text/NgramTransform.cs @@ -665,7 +665,7 @@ private NgramIdFinder GetNgramIdFinder(int iinfo) }; } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Contracts.AssertValue(input); Contracts.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); diff --git a/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs b/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs index 52ba66f0c6..eeffa7b8f7 100644 --- a/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs +++ b/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs @@ -425,7 +425,7 @@ protected override Schema.DetachedColumn[] GetOutputColumnsCore() return result; } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); @@ -1001,7 +1001,7 @@ protected override Schema.DetachedColumn[] GetOutputColumnsCore() return result; } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); diff --git a/src/Microsoft.ML.Transforms/Text/TextNormalizing.cs b/src/Microsoft.ML.Transforms/Text/TextNormalizing.cs index 51bc1555bf..eb84af33a3 100644 --- a/src/Microsoft.ML.Transforms/Text/TextNormalizing.cs +++ b/src/Microsoft.ML.Transforms/Text/TextNormalizing.cs @@ -280,7 +280,7 @@ private static Dictionary CombinedDiacriticsMap } } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); @@ -299,7 +299,7 @@ protected override Delegate MakeGetter(IRow input, int iinfo, Func ac return MakeGetterOne(input, iinfo); } - private ValueGetter> MakeGetterOne(IRow input, int iinfo) + private ValueGetter> MakeGetterOne(Row input, int iinfo) { var getSrc = input.GetGetter>(ColMapNewToOld[iinfo]); Host.AssertValue(getSrc); @@ -313,7 +313,7 @@ private ValueGetter> MakeGetterOne(IRow input, int iinfo) }; } - private ValueGetter>> MakeGetterVec(IRow input, int iinfo) + private ValueGetter>> MakeGetterVec(Row input, int iinfo) { var getSrc = input.GetGetter>>(ColMapNewToOld[iinfo]); Host.AssertValue(getSrc); diff --git a/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs b/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs index 590b98af5e..8d4a46652d 100644 --- a/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs +++ b/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs @@ -398,7 +398,7 @@ private void AppendCharRepr(char c, StringBuilder bldr) } } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); @@ -409,7 +409,7 @@ protected override Delegate MakeGetter(IRow input, int iinfo, Func ac return MakeGetterVec(input, iinfo); } - private ValueGetter> MakeGetterOne(IRow input, int iinfo) + private ValueGetter> MakeGetterOne(Row input, int iinfo) { Host.AssertValue(input); var getSrc = input.GetGetter>(ColMapNewToOld[iinfo]); @@ -438,7 +438,7 @@ private ValueGetter> MakeGetterOne(IRow input, int iinfo) }; } - private ValueGetter> MakeGetterVec(IRow input, int iinfo) + private ValueGetter> MakeGetterVec(Row input, int iinfo) { Host.AssertValue(input); diff --git a/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs b/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs index a2056e8af5..585b435c70 100644 --- a/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs +++ b/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs @@ -559,7 +559,7 @@ private void SaveAsOnnxCore(OnnxContext ctx, string srcVariableName, string dstV nodeP.AddAttribute("axis", 1); } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); @@ -567,7 +567,7 @@ protected override Delegate MakeGetter(IRow input, int iinfo, Func ac return GetGetterVec(input, iinfo); } - private ValueGetter> GetGetterVec(IRow input, int iinfo) + private ValueGetter> GetGetterVec(Row input, int iinfo) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); diff --git a/src/Microsoft.ML.Transforms/Text/WordTokenizing.cs b/src/Microsoft.ML.Transforms/Text/WordTokenizing.cs index 4419e08888..307821d1cb 100644 --- a/src/Microsoft.ML.Transforms/Text/WordTokenizing.cs +++ b/src/Microsoft.ML.Transforms/Text/WordTokenizing.cs @@ -257,7 +257,7 @@ protected override Schema.DetachedColumn[] GetOutputColumnsCore() return result; } - protected override Delegate MakeGetter(IRow input, int iinfo, Func activeOutput, out Action disposer) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { Host.AssertValue(input); Host.Assert(0 <= iinfo && iinfo < _parent._columns.Length); @@ -272,7 +272,7 @@ protected override Delegate MakeGetter(IRow input, int iinfo, Func ac return MakeGetterVec(input, iinfo); } - private ValueGetter>> MakeGetterOne(IRow input, int iinfo) + private ValueGetter>> MakeGetterOne(Row input, int iinfo) { Host.AssertValue(input); var getSrc = input.GetGetter>(ColMapNewToOld[iinfo]); @@ -298,7 +298,7 @@ private ValueGetter>> MakeGetterOne(IRow input, int }; } - private ValueGetter>> MakeGetterVec(IRow input, int iinfo) + private ValueGetter>> MakeGetterVec(Row input, int iinfo) { Host.AssertValue(input); diff --git a/src/Microsoft.ML.Transforms/UngroupTransform.cs b/src/Microsoft.ML.Transforms/UngroupTransform.cs index 6c58376f84..0467a5763d 100644 --- a/src/Microsoft.ML.Transforms/UngroupTransform.cs +++ b/src/Microsoft.ML.Transforms/UngroupTransform.cs @@ -179,19 +179,19 @@ public override bool CanShuffle get { return false; } } - protected override IRowCursor GetRowCursorCore(Func predicate, Random rand = null) + protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) { var activeInput = _schemaImpl.GetActiveInput(predicate); var inputCursor = Source.GetRowCursor(col => activeInput[col], null); return new Cursor(Host, inputCursor, _schemaImpl, predicate); } - public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, + public override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func predicate, int n, Random rand = null) { var activeInput = _schemaImpl.GetActiveInput(predicate); var inputCursors = Source.GetRowCursorSet(out consolidator, col => activeInput[col], n, null); - return Utils.BuildArray(inputCursors.Length, + return Utils.BuildArray(inputCursors.Length, x => new Cursor(Host, inputCursors[x], _schemaImpl, predicate)); } @@ -441,7 +441,7 @@ public void GetMetadata(string kind, int col, ref TValue value) } } - private sealed class Cursor : LinkedRootCursorBase, IRowCursor + private sealed class Cursor : LinkedRootCursorBase { private readonly SchemaImpl _schemaImpl; @@ -467,7 +467,7 @@ private sealed class Cursor : LinkedRootCursorBase, IRowCursor // Parallel to columns. private int[] _colSizes; - public Cursor(IChannelProvider provider, IRowCursor input, SchemaImpl schema, Func predicate) + public Cursor(IChannelProvider provider, RowCursor input, SchemaImpl schema, Func predicate) : base(provider, input) { _schemaImpl = schema; @@ -582,15 +582,15 @@ private Func MakeSizeGetter(int col) }; } - public Schema Schema => _schemaImpl.AsSchema; + public override Schema Schema => _schemaImpl.AsSchema; - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { Ch.Check(0 <= col && col < _schemaImpl.ColumnCount); return _active[col]; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { Ch.CheckParam(0 <= col && col < _schemaImpl.ColumnCount, nameof(col)); diff --git a/test/Microsoft.ML.Benchmarks/CacheDataViewBench.cs b/test/Microsoft.ML.Benchmarks/CacheDataViewBench.cs new file mode 100644 index 0000000000..d01491f36f --- /dev/null +++ b/test/Microsoft.ML.Benchmarks/CacheDataViewBench.cs @@ -0,0 +1,97 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using BenchmarkDotNet.Attributes; +using Microsoft.ML.Runtime.Data; +using System; + +namespace Microsoft.ML.Benchmarks +{ + public class CacheDataViewBench + { + private const int Length = 100000; + + // Global. + private IDataView _cacheDataView; + // Per iteration. + private RowCursor _cursor; + private ValueGetter _getter; + + private RowSeeker _seeker; + private long[] _positions; + + [GlobalSetup(Targets = new[] { nameof(CacheWithCursor), nameof(CacheWithSeeker) })] + public void Setup() + { + var ctx = new MLContext(); + var builder = new ArrayDataViewBuilder(ctx); + int[] values = new int[Length]; + for (int i = 0; i < values.Length; ++i) + values[i] = i; + builder.AddColumn("A", NumberType.I4, values); + var dv = builder.GetDataView(); + var cacheDv = ctx.Data.Cache(dv); + + var col = cacheDv.Schema.GetColumnOrNull("A").Value; + // First do one pass through. + using (var cursor = cacheDv.GetRowCursor(colIndex => colIndex == col.Index)) + { + var getter = cursor.GetGetter(col.Index); + int val = 0; + int count = 0; + while (cursor.MoveNext()) + { + getter(ref val); + if (val != cursor.Position) + throw new Exception($"Unexpected value {val} at {cursor.Position}"); + count++; + } + if (count != Length) + throw new Exception($"Expected {Length} values in cache but only saw {count}"); + } + _cacheDataView = cacheDv; + + // Only needed for seeker, but may as well set it. + _positions = new long[Length]; + var rand = new Random(0); + for (int i = 0; i < _positions.Length; ++i) + _positions[i] = rand.Next(Length); + } + + [IterationSetup(Target = nameof(CacheWithCursor))] + public void CacheWithCursorSetup() + { + var col = _cacheDataView.Schema.GetColumnOrNull("A").Value; + _cursor = _cacheDataView.GetRowCursor(colIndex => colIndex == col.Index); + _getter = _cursor.GetGetter(col.Index); + } + + [Benchmark] + public void CacheWithCursor() + { + int val = 0; + while (_cursor.MoveNext()) + _getter(ref val); + } + + [IterationSetup(Target = nameof(CacheWithSeeker))] + public void CacheWithSeekerSetup() + { + var col = _cacheDataView.Schema.GetColumnOrNull("A").Value; + _seeker = ((IRowSeekable)_cacheDataView).GetSeeker(colIndex => colIndex == col.Index); + _getter = _seeker.GetGetter(col.Index); + } + + [Benchmark] + public void CacheWithSeeker() + { + int val = 0; + foreach (long pos in _positions) + { + _seeker.MoveTo(pos); + _getter(ref val); + } + } + } +} diff --git a/test/Microsoft.ML.Benchmarks/HashBench.cs b/test/Microsoft.ML.Benchmarks/HashBench.cs index 6db0768287..4a518be564 100644 --- a/test/Microsoft.ML.Benchmarks/HashBench.cs +++ b/test/Microsoft.ML.Benchmarks/HashBench.cs @@ -17,26 +17,26 @@ namespace Microsoft.ML.Benchmarks { public class HashBench { - private sealed class Row : IRow + private sealed class RowImpl : Row { - public Schema Schema { get; } + public long PositionValue; - public long Position { get; set; } - - public long Batch => 0; - public ValueGetter GetIdGetter() + public override Schema Schema { get; } + public override long Position => PositionValue; + public override long Batch => 0; + public override ValueGetter GetIdGetter() => (ref UInt128 val) => val = new UInt128((ulong)Position, 0); private readonly Delegate _getter; - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { if (col != 0) throw new Exception(); return true; } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { if (col != 0) throw new Exception(); @@ -45,14 +45,14 @@ public ValueGetter GetGetter(int col) throw new Exception(); } - public static Row Create(ColumnType type, ValueGetter getter) + public static RowImpl Create(ColumnType type, ValueGetter getter) { if (type.RawType != typeof(T)) throw new Exception(); - return new Row(type, getter); + return new RowImpl(type, getter); } - private Row(ColumnType type, Delegate getter) + private RowImpl(ColumnType type, Delegate getter) { var builder = new SchemaBuilder(); builder.AddColumn("Foo", type, null); @@ -65,7 +65,7 @@ private Row(ColumnType type, Delegate getter) private readonly IHostEnvironment _env = new MLContext(); - private Row _inRow; + private RowImpl _inRow; private ValueGetter _getter; private ValueGetter> _vecGetter; @@ -73,7 +73,7 @@ private void InitMap(T val, ColumnType type, int hashBits = 20, ValueGetter dst = val; - _inRow = Row.Create(type, getter); + _inRow = RowImpl.Create(type, getter); // One million features is a nice, typical number. var info = new HashingTransformer.ColumnInfo("Foo", "Bar", hashBits: hashBits); var xf = new HashingTransformer(_env, new[] { info }); @@ -95,7 +95,7 @@ private void RunScalar() for (int i = 0; i < Count; ++i) { _getter(ref val); - ++_inRow.Position; + ++_inRow.PositionValue; } } @@ -114,7 +114,7 @@ private void RunVector() for (int i = 0; i < Count; ++i) { _vecGetter(ref val); - ++_inRow.Position; + ++_inRow.PositionValue; } } diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/CoreBaseTestClass.cs b/test/Microsoft.ML.Core.Tests/UnitTests/CoreBaseTestClass.cs index 1d102ca37d..08e8039b51 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/CoreBaseTestClass.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/CoreBaseTestClass.cs @@ -34,7 +34,7 @@ protected bool EqualTypes(ColumnType type1, ColumnType type2, bool exactTypes) return !exactTypes && type1 is VectorType vt1 && type2 is VectorType vt2 && vt1.ItemType.Equals(vt2.ItemType) && vt1.Size == vt2.Size; } - protected Func GetIdComparer(IRow r1, IRow r2, out ValueGetter idGetter) + protected Func GetIdComparer(Row r1, Row r2, out ValueGetter idGetter) { var g1 = r1.GetIdGetter(); idGetter = g1; @@ -50,7 +50,7 @@ protected Func GetIdComparer(IRow r1, IRow r2, out ValueGetter id }; } - protected Func GetComparerOne(IRow r1, IRow r2, int col, Func fn) + protected Func GetComparerOne(Row r1, Row r2, int col, Func fn) { var g1 = r1.GetGetter(col); var g2 = r2.GetGetter(col); @@ -73,7 +73,7 @@ private static bool EqualWithEps(Double x, Double y) // bitwise comparison is needed because Abs(Inf-Inf) and Abs(NaN-NaN) are not 0s. return FloatUtils.GetBits(x) == FloatUtils.GetBits(y) || Math.Abs(x - y) < DoubleEps; } - protected Func GetComparerVec(IRow r1, IRow r2, int col, int size, Func fn) + protected Func GetComparerVec(Row r1, Row r2, int col, int size, Func fn) { var g1 = r1.GetGetter>(col); var g2 = r2.GetGetter>(col); @@ -153,7 +153,7 @@ protected bool CompareVec(in VBuffer v1, in VBuffer v2, int size, Func< return false; } } - protected Func GetColumnComparer(IRow r1, IRow r2, int col, ColumnType type, bool exactDoubles) + protected Func GetColumnComparer(Row r1, Row r2, int col, ColumnType type, bool exactDoubles) { if (type is VectorType vecType) { @@ -312,7 +312,7 @@ protected bool CheckSameValues(IDataView view1, IDataView view2, bool exactTypes return all; } - protected bool CheckSameValues(IRowCursor curs1, IRowCursor curs2, bool exactTypes, bool exactDoubles, bool checkId, bool checkIdCollisions = true) + protected bool CheckSameValues(RowCursor curs1, RowCursor curs2, bool exactTypes, bool exactDoubles, bool checkId, bool checkIdCollisions = true) { Contracts.Assert(curs1.Schema.ColumnCount == curs2.Schema.ColumnCount); @@ -392,13 +392,13 @@ protected bool CheckSameValues(IRowCursor curs1, IRowCursor curs2, bool exactTyp } } - protected bool CheckSameValues(IRowCursor curs1, IDataView view2, bool exactTypes = true, bool exactDoubles = true, bool checkId = true) + protected bool CheckSameValues(RowCursor curs1, IDataView view2, bool exactTypes = true, bool exactDoubles = true, bool checkId = true) { Contracts.Assert(curs1.Schema.ColumnCount == view2.Schema.ColumnCount); // Get a cursor for each column. int colLim = curs1.Schema.ColumnCount; - var cursors = new IRowCursor[colLim]; + var cursors = new RowCursor[colLim]; try { for (int col = 0; col < colLim; col++) diff --git a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs index a08795ca4c..f96f6f205a 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs @@ -672,7 +672,7 @@ private void CombineAndTestTreeEnsembles(IDataView idv, IPredictorModel[] fastTr Assert.True(scoredArray[i].Schema.TryGetColumnIndex("PredictedLabel", out predColArray[i])); } - var cursors = new IRowCursor[predCount]; + var cursors = new RowCursor[predCount]; for (int i = 0; i < predCount; i++) cursors[i] = scoredArray[i].GetRowCursor(c => c == scoreColArray[i] || c == probColArray[i] || c == predColArray[i]); @@ -850,7 +850,7 @@ private void CombineAndTestEnsembles(IDataView idv, string name, string options, } } - var cursors = new IRowCursor[predCount]; + var cursors = new RowCursor[predCount]; for (int i = 0; i < predCount; i++) cursors[i] = scoredArray[i].GetRowCursor(c => c == scoreColArray[i] || c == probColArray[i] || c == predColArray[i]); diff --git a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs index b0aa994f27..304ac6751f 100644 --- a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs +++ b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs @@ -865,7 +865,7 @@ protected bool CheckSameValues(IDataView view1, IDataView view2, bool exactTypes return all; } - protected bool CheckSameValues(IRowCursor curs1, IRowCursor curs2, bool exactTypes, bool exactDoubles, bool checkId, bool checkIdCollisions = true) + protected bool CheckSameValues(RowCursor curs1, RowCursor curs2, bool exactTypes, bool exactDoubles, bool checkId, bool checkIdCollisions = true) { Contracts.Assert(curs1.Schema.ColumnCount == curs2.Schema.ColumnCount); @@ -946,13 +946,13 @@ protected bool CheckSameValues(IRowCursor curs1, IRowCursor curs2, bool exactTyp } } - protected bool CheckSameValues(IRowCursor curs1, IDataView view2, bool exactTypes = true, bool exactDoubles = true, bool checkId = true) + protected bool CheckSameValues(RowCursor curs1, IDataView view2, bool exactTypes = true, bool exactDoubles = true, bool checkId = true) { Contracts.Assert(curs1.Schema.ColumnCount == view2.Schema.ColumnCount); // Get a cursor for each column. int colLim = curs1.Schema.ColumnCount; - var cursors = new IRowCursor[colLim]; + var cursors = new RowCursor[colLim]; try { for (int col = 0; col < colLim; col++) @@ -1029,7 +1029,7 @@ protected bool CheckSameValues(IRowCursor curs1, IDataView view2, bool exactType } } - protected Func GetIdComparer(IRow r1, IRow r2, out ValueGetter idGetter) + protected Func GetIdComparer(Row r1, Row r2, out ValueGetter idGetter) { var g1 = r1.GetIdGetter(); idGetter = g1; @@ -1045,7 +1045,7 @@ protected Func GetIdComparer(IRow r1, IRow r2, out ValueGetter id }; } - protected Func GetColumnComparer(IRow r1, IRow r2, int col, ColumnType type, bool exactDoubles) + protected Func GetColumnComparer(Row r1, Row r2, int col, ColumnType type, bool exactDoubles) { if (!type.IsVector) { @@ -1172,7 +1172,7 @@ private static bool EqualWithEpsSingle(float x, float y) return FloatUtils.GetBits(x) == FloatUtils.GetBits(y) || Math.Abs(x - y) < SingleEps; } - protected Func GetComparerOne(IRow r1, IRow r2, int col, Func fn) + protected Func GetComparerOne(Row r1, Row r2, int col, Func fn) { var g1 = r1.GetGetter(col); var g2 = r2.GetGetter(col); @@ -1189,7 +1189,7 @@ protected Func GetComparerOne(IRow r1, IRow r2, int col, Func GetComparerVec(IRow r1, IRow r2, int col, int size, Func fn) + protected Func GetComparerVec(Row r1, Row r2, int col, int size, Func fn) { var g1 = r1.GetGetter>(col); var g2 = r2.GetGetter>(col); From 5f43d613f1afc3374bf02d9a4aa5480f32046ce1 Mon Sep 17 00:00:00 2001 From: Shauheen Date: Tue, 4 Dec 2018 23:11:23 -0800 Subject: [PATCH 024/100] Update README.md for 0.8 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 15dd419948..38829cd751 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ Along with these ML capabilities, this first release of ML.NET also brings the f ML.NET runs on Windows, Linux, and macOS using [.NET Core](https://github.com/dotnet/core), or Windows using .NET Framework. 64 bit is supported on all platforms. 32 bit is supported on Windows, except for TensorFlow, LightGBM, and ONNX related functionality. -The current release is 0.7. Check out the [release notes](docs/release-notes/0.7/release-0.7.md) and [blog post](https://blogs.msdn.microsoft.com/dotnet/2018/11/08/announcing-ml-net-0-7-machine-learning-net/) to see what's new. +The current release is 0.8. Check out the [release notes](docs/release-notes/0.8/release-0.8.md) and [blog post](https://blogs.msdn.microsoft.com/dotnet/2018/12/02/announcing-ml-net-0-8-machine-learning-for-net/) to see what's new. First, ensure you have installed [.NET Core 2.1](https://www.microsoft.com/net/learn/get-started) or later. ML.NET also works on the .NET Framework 4.6.1 or later, but 4.7.2 or later is recommended. From a087ec5ae883d262f1622645bb0480eb6dd7c4d8 Mon Sep 17 00:00:00 2001 From: Najeeb Kazmi Date: Wed, 5 Dec 2018 00:11:52 -0800 Subject: [PATCH 025/100] Public API for KMeansPredictor (#1739) * Rename KMeansPredictor to KMeansModelParameters, rename namespace, and internalize interfaces except ICanSaveModel * Internalizing SaveCore * Adding sample * BestFriend attribute for the correct method overridden by LightGbmBinaryPredictor.SaveCore * Making FastTree best friends with LightGBM * Update core_ep-list.tsv * Reverting libmf commit id * Explicitly implementing IValueMapper and ICanSaveInTextFormat everywhere * Final cleanup * addressing some comments * reverting namespace change * update core_ep-list.tsv --- .../Microsoft.ML.Samples/Dynamic/KMeans.cs | 47 +++++++++++++++ .../Microsoft.ML.Samples.csproj | 1 + src/Microsoft.ML.Core/Data/IValueMapper.cs | 6 +- src/Microsoft.ML.Data/Dirty/PredictorBase.cs | 3 +- .../Dirty/PredictorInterfaces.cs | 3 +- .../Prediction/Calibrator.cs | 2 +- .../Scorers/SchemaBindablePredictorWrapper.cs | 2 +- .../Trainer/EnsembleDistributionPredictor.cs | 2 +- .../Trainer/EnsemblePredictor.cs | 2 +- .../Trainer/EnsemblePredictorBase.cs | 4 +- .../Multiclass/EnsembleMultiClassPredictor.cs | 2 +- src/Microsoft.ML.FastTree/FastTree.cs | 17 ++++-- .../FastTreeClassification.cs | 2 +- src/Microsoft.ML.FastTree/FastTreeRanking.cs | 2 +- .../FastTreeRegression.cs | 2 +- src/Microsoft.ML.FastTree/FastTreeTweedie.cs | 2 +- src/Microsoft.ML.FastTree/GamTrainer.cs | 16 ++--- .../Properties/AssemblyInfo.cs | 1 + .../RandomForestClassification.cs | 2 +- .../RandomForestRegression.cs | 2 +- .../OlsLinearRegression.cs | 4 +- ...sPredictor.cs => KMeansModelParameters.cs} | 58 +++++++++++-------- .../KMeansPlusPlusTrainer.cs | 12 ++-- .../KMeansStatic.cs | 4 +- .../AssemblyRegistration.cs | 6 +- .../LightGbmBinaryTrainer.cs | 2 +- .../LightGbmRankingTrainer.cs | 2 +- .../LightGbmRegressionTrainer.cs | 2 +- src/Microsoft.ML.PCA/PcaTrainer.cs | 12 ++-- .../MatrixFactorizationPredictor.cs | 2 +- ...FieldAwareFactorizationMachinePredictor.cs | 2 +- .../Standard/LinearPredictor.cs | 31 ++++++---- .../MulticlassLogisticRegression.cs | 15 +++-- .../MultiClass/MultiClassNaiveBayesTrainer.cs | 8 +-- .../Standard/MultiClass/Ova.cs | 24 +++++--- .../Standard/MultiClass/Pkpd.cs | 22 +++---- .../Standard/Simple/SimpleTrainers.cs | 4 +- .../Training.cs | 8 +-- .../EnvironmentExtensions.cs | 2 +- 39 files changed, 217 insertions(+), 123 deletions(-) create mode 100644 docs/samples/Microsoft.ML.Samples/Dynamic/KMeans.cs rename src/Microsoft.ML.KMeansClustering/{KMeansPredictor.cs => KMeansModelParameters.cs} (89%) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/KMeans.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/KMeans.cs new file mode 100644 index 0000000000..c25e75b0e2 --- /dev/null +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/KMeans.cs @@ -0,0 +1,47 @@ +using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime.Data; +using System; +using System.Collections.Generic; + +namespace Microsoft.ML.Samples.Dynamic +{ + public class KMeans_example + { + public static void KMeans() + { + // Create a new ML context, for ML.NET operations. It can be used for exception tracking and logging, + // as well as the source of randomness. + var ml = new MLContext(); + + // Get a small dataset as an IEnumerable and convert it to an IDataView. + var data = SamplesUtils.DatasetUtils.GetInfertData(); + var trainData = ml.CreateStreamingDataView(data); + + // Preview of the data. + // + // Age Case Education Induced Parity PooledStratum RowNum ... + // 26 1 0-5yrs 1 6 3 1 ... + // 42 1 0-5yrs 1 1 1 2 ... + // 39 1 0-5yrs 2 6 4 3 ... + // 34 1 0-5yrs 2 4 2 4 ... + // 35 1 6-11yrs 1 3 32 5 ... + + // A pipeline for concatenating the age, parity and induced columns together in the Features column and training a KMeans model on them. + string outputColumnName = "Features"; + var pipeline = ml.Transforms.Concatenate(outputColumnName, new[] { "Age", "Parity", "Induced" }) + .Append(ml.Clustering.Trainers.KMeans(outputColumnName, clustersCount: 2)); + + var model = pipeline.Fit(trainData); + + // Get cluster centroids and the number of clusters k from KMeansModelParameters. + VBuffer[] centroids = default; + int k; + + var modelParams = model.LastTransformer.Model; + modelParams.GetClusterCentroids(ref centroids, out k); + + var centroid = centroids[0].GetValues(); + Console.WriteLine("The coordinates of centroid 0 are: " + string.Join(", ", centroid.ToArray())); + } + } +} diff --git a/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj b/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj index 63568930aa..135e502d9f 100644 --- a/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj +++ b/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj @@ -7,6 +7,7 @@ + diff --git a/src/Microsoft.ML.Core/Data/IValueMapper.cs b/src/Microsoft.ML.Core/Data/IValueMapper.cs index c6abfbc02d..d0b8d4b755 100644 --- a/src/Microsoft.ML.Core/Data/IValueMapper.cs +++ b/src/Microsoft.ML.Core/Data/IValueMapper.cs @@ -24,7 +24,8 @@ namespace Microsoft.ML.Runtime.Data /// type arguments for GetMapper, but typically contain additional information like /// vector lengths. /// - public interface IValueMapper + [BestFriend] + internal interface IValueMapper { ColumnType InputType { get; } ColumnType OutputType { get; } @@ -43,7 +44,8 @@ public interface IValueMapper /// type arguments for GetMapper, but typically contain additional information like /// vector lengths. /// - public interface IValueMapperDist : IValueMapper + [BestFriend] + internal interface IValueMapperDist : IValueMapper { ColumnType DistType { get; } diff --git a/src/Microsoft.ML.Data/Dirty/PredictorBase.cs b/src/Microsoft.ML.Data/Dirty/PredictorBase.cs index 0d73db05fc..fe27705ff8 100644 --- a/src/Microsoft.ML.Data/Dirty/PredictorBase.cs +++ b/src/Microsoft.ML.Data/Dirty/PredictorBase.cs @@ -53,7 +53,8 @@ public virtual void Save(ModelSaveContext ctx) SaveCore(ctx); } - protected virtual void SaveCore(ModelSaveContext ctx) + [BestFriend] + private protected virtual void SaveCore(ModelSaveContext ctx) { Contracts.AssertValue(ctx); diff --git a/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs b/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs index b04101f5b7..a1baf261c2 100644 --- a/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs +++ b/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs @@ -92,7 +92,8 @@ public interface ISampleableDistribution : IDistribution /// /// Predictors that can output themselves in a human-readable text format /// - public interface ICanSaveInTextFormat + [BestFriend] + internal interface ICanSaveInTextFormat { void SaveAsText(TextWriter writer, RoleMappedSchema schema); } diff --git a/src/Microsoft.ML.Data/Prediction/Calibrator.cs b/src/Microsoft.ML.Data/Prediction/Calibrator.cs index 90670d518f..45860d2e39 100644 --- a/src/Microsoft.ML.Data/Prediction/Calibrator.cs +++ b/src/Microsoft.ML.Data/Prediction/Calibrator.cs @@ -159,7 +159,7 @@ public void SaveAsIni(TextWriter writer, RoleMappedSchema schema, ICalibrator ca saver?.SaveAsIni(writer, schema, Calibrator); } - public void SaveAsText(TextWriter writer, RoleMappedSchema schema) + void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) { // REVIEW: What about the calibrator? var saver = SubPredictor as ICanSaveInTextFormat; diff --git a/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs b/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs index c5f6301154..279d2a4ad0 100644 --- a/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs +++ b/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs @@ -41,7 +41,7 @@ public abstract class SchemaBindablePredictorWrapperBase : ISchemaBindableMapper // ValueMapper or FloatPredictor is non-null (or both). With these guarantees, // the score value type (_scoreType) can be determined. protected readonly IPredictor Predictor; - protected readonly IValueMapper ValueMapper; + private protected readonly IValueMapper ValueMapper; protected readonly ColumnType ScoreType; bool ICanSavePfa.CanSavePfa => (ValueMapper as ICanSavePfa)?.CanSavePfa == true; diff --git a/src/Microsoft.ML.Ensemble/Trainer/EnsembleDistributionPredictor.cs b/src/Microsoft.ML.Ensemble/Trainer/EnsembleDistributionPredictor.cs index 5a92512e6d..6346abfc80 100644 --- a/src/Microsoft.ML.Ensemble/Trainer/EnsembleDistributionPredictor.cs +++ b/src/Microsoft.ML.Ensemble/Trainer/EnsembleDistributionPredictor.cs @@ -109,7 +109,7 @@ public static EnsembleDistributionPredictor Create(IHostEnvironment env, ModelLo return new EnsembleDistributionPredictor(env, ctx); } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); diff --git a/src/Microsoft.ML.Ensemble/Trainer/EnsemblePredictor.cs b/src/Microsoft.ML.Ensemble/Trainer/EnsemblePredictor.cs index b69b76f40e..30b2dce057 100644 --- a/src/Microsoft.ML.Ensemble/Trainer/EnsemblePredictor.cs +++ b/src/Microsoft.ML.Ensemble/Trainer/EnsemblePredictor.cs @@ -99,7 +99,7 @@ public static EnsemblePredictor Create(IHostEnvironment env, ModelLoadContext ct return new EnsemblePredictor(env, ctx); } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); diff --git a/src/Microsoft.ML.Ensemble/Trainer/EnsemblePredictorBase.cs b/src/Microsoft.ML.Ensemble/Trainer/EnsemblePredictorBase.cs index 032312acde..72a81b12e3 100644 --- a/src/Microsoft.ML.Ensemble/Trainer/EnsemblePredictorBase.cs +++ b/src/Microsoft.ML.Ensemble/Trainer/EnsemblePredictorBase.cs @@ -86,7 +86,7 @@ protected EnsemblePredictorBase(IHostEnvironment env, string name, ModelLoadCont ctx.LoadModel, SignatureLoadModel>(Host, out Combiner, @"Combiner"); } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); @@ -128,7 +128,7 @@ protected override void SaveCore(ModelSaveContext ctx) /// /// Output the INI model to a given writer /// - public void SaveAsText(TextWriter writer, RoleMappedSchema schema) + void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) { using (var ch = Host.Start("SaveAsText")) { diff --git a/src/Microsoft.ML.Ensemble/Trainer/Multiclass/EnsembleMultiClassPredictor.cs b/src/Microsoft.ML.Ensemble/Trainer/Multiclass/EnsembleMultiClassPredictor.cs index 6f931f3e9c..a332d3ddb0 100644 --- a/src/Microsoft.ML.Ensemble/Trainer/Multiclass/EnsembleMultiClassPredictor.cs +++ b/src/Microsoft.ML.Ensemble/Trainer/Multiclass/EnsembleMultiClassPredictor.cs @@ -95,7 +95,7 @@ public static EnsembleMultiClassPredictor Create(IHostEnvironment env, ModelLoad return new EnsembleMultiClassPredictor(env, ctx); } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); diff --git a/src/Microsoft.ML.FastTree/FastTree.cs b/src/Microsoft.ML.FastTree/FastTree.cs index 997d14b281..8676331bd2 100644 --- a/src/Microsoft.ML.FastTree/FastTree.cs +++ b/src/Microsoft.ML.FastTree/FastTree.cs @@ -2830,8 +2830,12 @@ public abstract class FastTreePredictionWrapper : protected abstract uint VerCategoricalSplitSerialized { get; } - public ColumnType InputType { get; } - public ColumnType OutputType => NumberType.Float; + protected internal readonly ColumnType InputType; + ColumnType IValueMapper.InputType => InputType; + + protected readonly ColumnType OutputType; + ColumnType IValueMapper.OutputType => OutputType; + bool ICanSavePfa.CanSavePfa => true; bool ICanSaveOnnx.CanSaveOnnx(OnnxContext ctx) => true; @@ -2853,6 +2857,7 @@ protected FastTreePredictionWrapper(IHostEnvironment env, string name, TreeEnsem Contracts.Assert(NumFeatures > MaxSplitFeatIdx); InputType = new VectorType(NumberType.Float, NumFeatures); + OutputType = NumberType.Float; } protected FastTreePredictionWrapper(IHostEnvironment env, string name, ModelLoadContext ctx, VersionInfo ver) @@ -2890,9 +2895,11 @@ protected FastTreePredictionWrapper(IHostEnvironment env, string name, ModelLoad // tricks. InputType = new VectorType(NumberType.Float, NumFeatures); + OutputType = NumberType.Float; } - protected override void SaveCore(ModelSaveContext ctx) + [BestFriend] + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); @@ -2907,7 +2914,7 @@ protected override void SaveCore(ModelSaveContext ctx) ctx.Writer.Write(NumFeatures); } - public ValueMapper GetMapper() + ValueMapper IValueMapper.GetMapper() { Host.Check(typeof(TIn) == typeof(VBuffer)); Host.Check(typeof(TOut) == typeof(Float)); @@ -2965,7 +2972,7 @@ public void SaveAsCode(TextWriter writer, RoleMappedSchema schema) /// /// Output the INI model to a given writer /// - public void SaveAsText(TextWriter writer, RoleMappedSchema schema) + void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) { Host.CheckValue(writer, nameof(writer)); Host.CheckValueOrNull(schema); diff --git a/src/Microsoft.ML.FastTree/FastTreeClassification.cs b/src/Microsoft.ML.FastTree/FastTreeClassification.cs index 7de0cd1ac5..3effa46c02 100644 --- a/src/Microsoft.ML.FastTree/FastTreeClassification.cs +++ b/src/Microsoft.ML.FastTree/FastTreeClassification.cs @@ -79,7 +79,7 @@ private FastTreeBinaryPredictor(IHostEnvironment env, ModelLoadContext ctx) { } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); diff --git a/src/Microsoft.ML.FastTree/FastTreeRanking.cs b/src/Microsoft.ML.FastTree/FastTreeRanking.cs index 9fe6023764..451f47482e 100644 --- a/src/Microsoft.ML.FastTree/FastTreeRanking.cs +++ b/src/Microsoft.ML.FastTree/FastTreeRanking.cs @@ -1140,7 +1140,7 @@ private FastTreeRankingPredictor(IHostEnvironment env, ModelLoadContext ctx) { } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); diff --git a/src/Microsoft.ML.FastTree/FastTreeRegression.cs b/src/Microsoft.ML.FastTree/FastTreeRegression.cs index 5b6aac44f7..a187accb26 100644 --- a/src/Microsoft.ML.FastTree/FastTreeRegression.cs +++ b/src/Microsoft.ML.FastTree/FastTreeRegression.cs @@ -477,7 +477,7 @@ private FastTreeRegressionPredictor(IHostEnvironment env, ModelLoadContext ctx) { } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); diff --git a/src/Microsoft.ML.FastTree/FastTreeTweedie.cs b/src/Microsoft.ML.FastTree/FastTreeTweedie.cs index 1a40239082..e49886884c 100644 --- a/src/Microsoft.ML.FastTree/FastTreeTweedie.cs +++ b/src/Microsoft.ML.FastTree/FastTreeTweedie.cs @@ -480,7 +480,7 @@ private FastTreeTweediePredictor(IHostEnvironment env, ModelLoadContext ctx) { } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); diff --git a/src/Microsoft.ML.FastTree/GamTrainer.cs b/src/Microsoft.ML.FastTree/GamTrainer.cs index 05fc8eab4f..7943af8835 100644 --- a/src/Microsoft.ML.FastTree/GamTrainer.cs +++ b/src/Microsoft.ML.FastTree/GamTrainer.cs @@ -656,6 +656,7 @@ public abstract class GamPredictorBase : PredictorBase, public readonly double Intercept; private readonly int _numFeatures; private readonly ColumnType _inputType; + private readonly ColumnType _outputType; // These would be the bins for a totally sparse input. private readonly int[] _binsAtAllZero; // The output value for all zeros @@ -665,9 +666,8 @@ public abstract class GamPredictorBase : PredictorBase, private readonly int _inputLength; private readonly Dictionary _inputFeatureToDatasetFeatureMap; - public ColumnType InputType => _inputType; - - public ColumnType OutputType => NumberType.Float; + ColumnType IValueMapper.InputType => _inputType; + ColumnType IValueMapper.OutputType => _outputType; private protected GamPredictorBase(IHostEnvironment env, string name, int inputLength, Dataset trainSet, double meanEffect, double[][] binEffects, int[] featureMap) @@ -683,6 +683,7 @@ private protected GamPredictorBase(IHostEnvironment env, string name, _numFeatures = binEffects.Length; _inputType = new VectorType(NumberType.Float, _inputLength); + _outputType = NumberType.Float; _featureMap = featureMap; Intercept = meanEffect; @@ -787,6 +788,7 @@ protected GamPredictorBase(IHostEnvironment env, string name, ModelLoadContext c } _inputType = new VectorType(NumberType.Float, _inputLength); + _outputType = NumberType.Float; } public override void Save(ModelSaveContext ctx) @@ -1000,7 +1002,7 @@ public double[] GetFeatureWeights(int featureIndex) return featureWeights; } - public void SaveAsText(TextWriter writer, RoleMappedSchema schema) + void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) { Host.CheckValue(writer, nameof(writer)); Host.CheckValueOrNull(schema); @@ -1043,7 +1045,7 @@ public void SaveAsText(TextWriter writer, RoleMappedSchema schema) public void SaveSummary(TextWriter writer, RoleMappedSchema schema) { - SaveAsText(writer, schema); + ((ICanSaveInTextFormat)this).SaveAsText(writer, schema); } /// @@ -1122,7 +1124,7 @@ private sealed class Context /// These are the number of input features, as opposed to the number of features used within GAM /// which may be lower. /// - public int NumFeatures => _pred.InputType.VectorSize; + public int NumFeatures => _pred._inputType.VectorSize; public Context(IChannel ch, GamPredictorBase pred, RoleMappedData data, IEvaluator eval) { @@ -1342,7 +1344,7 @@ private FeatureInfo(Context context, int index, int internalIndex, int[] catsMap public static FeatureInfo GetInfoForIndex(Context context, int index) { Contracts.AssertValue(context); - Contracts.Assert(0 <= index && index < context._pred.InputType.ValueCount); + Contracts.Assert(0 <= index && index < context._pred._inputType.ValueCount); lock (context._pred) { int internalIndex; diff --git a/src/Microsoft.ML.FastTree/Properties/AssemblyInfo.cs b/src/Microsoft.ML.FastTree/Properties/AssemblyInfo.cs index a03d7bdab6..cd27563c10 100644 --- a/src/Microsoft.ML.FastTree/Properties/AssemblyInfo.cs +++ b/src/Microsoft.ML.FastTree/Properties/AssemblyInfo.cs @@ -6,5 +6,6 @@ using Microsoft.ML; [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Core.Tests" + PublicKey.TestValue)] +[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.LightGBM" + PublicKey.Value)] [assembly: WantsToBeBestFriends] diff --git a/src/Microsoft.ML.FastTree/RandomForestClassification.cs b/src/Microsoft.ML.FastTree/RandomForestClassification.cs index 21e6f50722..eb4a773cf9 100644 --- a/src/Microsoft.ML.FastTree/RandomForestClassification.cs +++ b/src/Microsoft.ML.FastTree/RandomForestClassification.cs @@ -88,7 +88,7 @@ private FastForestClassificationPredictor(IHostEnvironment env, ModelLoadContext { } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); diff --git a/src/Microsoft.ML.FastTree/RandomForestRegression.cs b/src/Microsoft.ML.FastTree/RandomForestRegression.cs index 71daa1b169..1b70ab32f4 100644 --- a/src/Microsoft.ML.FastTree/RandomForestRegression.cs +++ b/src/Microsoft.ML.FastTree/RandomForestRegression.cs @@ -76,7 +76,7 @@ private FastForestRegressionPredictor(IHostEnvironment env, ModelLoadContext ctx _quantileSampleCount = ctx.Reader.ReadInt32(); } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); diff --git a/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs b/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs index 5f69c3e83d..c4fdd85e22 100644 --- a/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs +++ b/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs @@ -299,7 +299,7 @@ private OlsLinearRegressionPredictor TrainCore(IChannel ch, FloatLabelCursor.Fac Double tss = 0; // total sum of squares using (var cursor = cursorFactory.Create()) { - var lrPredictor = new LinearRegressionPredictor(Host, in weights, bias); + IValueMapper lrPredictor = new LinearRegressionPredictor(Host, in weights, bias); var lrMap = lrPredictor.GetMapper, float>(); float yh = default; while (cursor.MoveNext()) @@ -685,7 +685,7 @@ private OlsLinearRegressionPredictor(IHostEnvironment env, ModelLoadContext ctx) ProbCheckDecode(_pValues[i]); } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); diff --git a/src/Microsoft.ML.KMeansClustering/KMeansPredictor.cs b/src/Microsoft.ML.KMeansClustering/KMeansModelParameters.cs similarity index 89% rename from src/Microsoft.ML.KMeansClustering/KMeansPredictor.cs rename to src/Microsoft.ML.KMeansClustering/KMeansModelParameters.cs index 38b5116da4..e5862249bb 100644 --- a/src/Microsoft.ML.KMeansClustering/KMeansPredictor.cs +++ b/src/Microsoft.ML.KMeansClustering/KMeansModelParameters.cs @@ -4,31 +4,37 @@ using Float = System.Single; -using System; -using System.IO; -using Microsoft.ML.Runtime.Numeric; -using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Trainers.KMeans; +using Microsoft.ML.Runtime.Internal.Internallearn; +using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; using Microsoft.ML.Runtime.Model.Onnx; -using Microsoft.ML.Runtime.Internal.Internallearn; +using Microsoft.ML.Runtime.Numeric; +using Microsoft.ML.Trainers.KMeans; +using System; +using System.IO; using System.Collections.Generic; -[assembly: LoadableClass(typeof(KMeansPredictor), null, typeof(SignatureLoadModel), - "KMeans predictor", KMeansPredictor.LoaderSignature)] +[assembly: LoadableClass(typeof(KMeansModelParameters), null, typeof(SignatureLoadModel), + "KMeans predictor", KMeansModelParameters.LoaderSignature)] namespace Microsoft.ML.Trainers.KMeans { - public sealed class KMeansPredictor : + /// + /// + /// + /// + public sealed class KMeansModelParameters : PredictorBase>, IValueMapper, ICanSaveInTextFormat, ICanSaveModel, ISingleCanSaveOnnx { - public const string LoaderSignature = "KMeansPredictor"; + internal const string LoaderSignature = "KMeansPredictor"; /// /// Version information to be saved in binary format @@ -42,12 +48,16 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00010002, verWeCanReadBack: 0x00010001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(KMeansPredictor).Assembly.FullName); + loaderAssemblyName: typeof(KMeansModelParameters).Assembly.FullName); } + // REVIEW: Leaving this public for now until we figure out the correct way to remove it. public override PredictionKind PredictionKind => PredictionKind.Clustering; - public ColumnType InputType { get; } - public ColumnType OutputType { get; } + + private readonly ColumnType _inputType; + private readonly ColumnType _outputType; + ColumnType IValueMapper.InputType => _inputType; + ColumnType IValueMapper.OutputType => _outputType; bool ICanSaveOnnx.CanSaveOnnx(OnnxContext ctx) => true; @@ -66,7 +76,7 @@ private static VersionInfo GetVersionInfo() /// a deep copy, if false then this constructor will take ownership of the passed in centroid vectors. /// If false then the caller must take care to not use or modify the input vectors once this object /// is constructed, and should probably remove all references. - public KMeansPredictor(IHostEnvironment env, int k, VBuffer[] centroids, bool copyIn) + public KMeansModelParameters(IHostEnvironment env, int k, VBuffer[] centroids, bool copyIn) : base(env, LoaderSignature) { Host.CheckParam(k > 0, nameof(k), "Need at least one cluster"); @@ -92,8 +102,8 @@ public KMeansPredictor(IHostEnvironment env, int k, VBuffer[] centroids, InitPredictor(); - InputType = new VectorType(NumberType.Float, _dimensionality); - OutputType = new VectorType(NumberType.Float, _k); + _inputType = new VectorType(NumberType.Float, _dimensionality); + _outputType = new VectorType(NumberType.Float, _k); } /// @@ -101,7 +111,7 @@ public KMeansPredictor(IHostEnvironment env, int k, VBuffer[] centroids, /// /// The load context /// The host environment - private KMeansPredictor(IHostEnvironment env, ModelLoadContext ctx) + private KMeansModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, LoaderSignature, ctx) { // *** Binary format *** @@ -134,11 +144,11 @@ private KMeansPredictor(IHostEnvironment env, ModelLoadContext ctx) InitPredictor(); - InputType = new VectorType(NumberType.Float, _dimensionality); - OutputType = new VectorType(NumberType.Float, _k); + _inputType = new VectorType(NumberType.Float, _dimensionality); + _outputType = new VectorType(NumberType.Float, _k); } - public ValueMapper GetMapper() + ValueMapper IValueMapper.GetMapper() { Host.Check(typeof(TIn) == typeof(VBuffer)); Host.Check(typeof(TOut) == typeof(VBuffer)); @@ -169,7 +179,7 @@ private void Map(in VBuffer src, Span distances) } } - public void SaveAsText(TextWriter writer, RoleMappedSchema schema) + void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) { writer.WriteLine("K: {0}", _k); writer.WriteLine("Dimensionality: {0}", _dimensionality); @@ -215,7 +225,7 @@ public void SaveAsText(TextWriter writer, RoleMappedSchema schema) /// Save the predictor in binary format. /// /// The context to save to - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); @@ -247,12 +257,12 @@ protected override void SaveCore(ModelSaveContext ctx) /// /// This method is called by reflection to instantiate a predictor. /// - public static KMeansPredictor Create(IHostEnvironment env, ModelLoadContext ctx) + private static KMeansModelParameters Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); - return new KMeansPredictor(env, ctx); + return new KMeansModelParameters(env, ctx); } /// diff --git a/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs b/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs index 389a8be9c2..72d4513627 100644 --- a/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs +++ b/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs @@ -29,7 +29,7 @@ namespace Microsoft.ML.Trainers.KMeans { /// - public class KMeansPlusPlusTrainer : TrainerEstimatorBase, KMeansPredictor> + public class KMeansPlusPlusTrainer : TrainerEstimatorBase, KMeansModelParameters> { public const string LoadNameValue = "KMeansPlusPlus"; internal const string UserNameValue = "KMeans++ Clustering"; @@ -151,7 +151,7 @@ private KMeansPlusPlusTrainer(IHostEnvironment env, Arguments args, Action MakeTransformer(KMeansPredictor model, Schema trainSchema) - => new ClusteringPredictionTransformer(Host, model, trainSchema, _featureColumn); + protected override ClusteringPredictionTransformer MakeTransformer(KMeansModelParameters model, Schema trainSchema) + => new ClusteringPredictionTransformer(Host, model, trainSchema, _featureColumn); } internal static class KMeansPlusPlusInit diff --git a/src/Microsoft.ML.KMeansClustering/KMeansStatic.cs b/src/Microsoft.ML.KMeansClustering/KMeansStatic.cs index f5fe8a91d9..1fb94afc8e 100644 --- a/src/Microsoft.ML.KMeansClustering/KMeansStatic.cs +++ b/src/Microsoft.ML.KMeansClustering/KMeansStatic.cs @@ -4,8 +4,8 @@ using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Trainers.KMeans; using Microsoft.ML.StaticPipe.Runtime; +using Microsoft.ML.Trainers.KMeans; using System; namespace Microsoft.ML.StaticPipe @@ -33,7 +33,7 @@ public static (Vector score, Key predictedLabel) KMeans(this Cluste Vector features, Scalar weights = null, int clustersCount = KMeansPlusPlusTrainer.Defaults.K, Action advancedSettings = null, - Action onFit = null) + Action onFit = null) { Contracts.CheckValue(features, nameof(features)); Contracts.CheckValueOrNull(weights); diff --git a/src/Microsoft.ML.Legacy/AssemblyRegistration.cs b/src/Microsoft.ML.Legacy/AssemblyRegistration.cs index 022645b069..3766199c56 100644 --- a/src/Microsoft.ML.Legacy/AssemblyRegistration.cs +++ b/src/Microsoft.ML.Legacy/AssemblyRegistration.cs @@ -4,11 +4,11 @@ using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; +using Microsoft.ML.Runtime.Sweeper; +using Microsoft.ML.Runtime.Tools; using Microsoft.ML.Trainers.FastTree; using Microsoft.ML.Trainers.KMeans; using Microsoft.ML.Trainers.PCA; -using Microsoft.ML.Runtime.Sweeper; -using Microsoft.ML.Runtime.Tools; using Microsoft.ML.Transforms.Categorical; using System; using System.Reflection; @@ -44,7 +44,7 @@ private static bool LoadStandardAssemblies() _ = typeof(TextLoader).Assembly; // ML.Data //_ = typeof(EnsemblePredictor).Assembly); // ML.Ensemble BUG https://github.com/dotnet/machinelearning/issues/1078 Ensemble isn't in a NuGet package _ = typeof(FastTreeBinaryPredictor).Assembly; // ML.FastTree - _ = typeof(KMeansPredictor).Assembly; // ML.KMeansClustering + _ = typeof(KMeansModelParameters).Assembly; // ML.KMeansClustering _ = typeof(Maml).Assembly; // ML.Maml _ = typeof(PcaPredictor).Assembly; // ML.PCA _ = typeof(SweepCommand).Assembly; // ML.Sweeper diff --git a/src/Microsoft.ML.LightGBM/LightGbmBinaryTrainer.cs b/src/Microsoft.ML.LightGBM/LightGbmBinaryTrainer.cs index d279e6a0cf..4217ee0612 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmBinaryTrainer.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmBinaryTrainer.cs @@ -65,7 +65,7 @@ private LightGbmBinaryPredictor(IHostEnvironment env, ModelLoadContext ctx) { } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); diff --git a/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs b/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs index 61b2c965b1..4f0d49b75e 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs @@ -61,7 +61,7 @@ private LightGbmRankingPredictor(IHostEnvironment env, ModelLoadContext ctx) { } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); diff --git a/src/Microsoft.ML.LightGBM/LightGbmRegressionTrainer.cs b/src/Microsoft.ML.LightGBM/LightGbmRegressionTrainer.cs index a831d46ee3..feeb227ba4 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmRegressionTrainer.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmRegressionTrainer.cs @@ -61,7 +61,7 @@ private LightGbmRegressionPredictor(IHostEnvironment env, ModelLoadContext ctx) { } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); diff --git a/src/Microsoft.ML.PCA/PcaTrainer.cs b/src/Microsoft.ML.PCA/PcaTrainer.cs index d37cf58851..7740d2f6e6 100644 --- a/src/Microsoft.ML.PCA/PcaTrainer.cs +++ b/src/Microsoft.ML.PCA/PcaTrainer.cs @@ -462,7 +462,7 @@ private PcaPredictor(IHostEnvironment env, ModelLoadContext ctx) _inputType = new VectorType(NumberType.Float, _dimension); } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); @@ -500,10 +500,10 @@ public static PcaPredictor Create(IHostEnvironment env, ModelLoadContext ctx) public void SaveSummary(TextWriter writer, RoleMappedSchema schema) { - SaveAsText(writer, schema); + ((ICanSaveInTextFormat)this).SaveAsText(writer, schema); } - public void SaveAsText(TextWriter writer, RoleMappedSchema schema) + void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) { writer.WriteLine("Dimension: {0}", _dimension); writer.WriteLine("Rank: {0}", _rank); @@ -550,17 +550,17 @@ public IDataView GetSummaryDataView(RoleMappedSchema schema) return bldr.GetDataView(); } - public ColumnType InputType + ColumnType IValueMapper.InputType { get { return _inputType; } } - public ColumnType OutputType + ColumnType IValueMapper.OutputType { get { return NumberType.Float; } } - public ValueMapper GetMapper() + ValueMapper IValueMapper.GetMapper() { Host.Check(typeof(TIn) == typeof(VBuffer)); Host.Check(typeof(TOut) == typeof(float)); diff --git a/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs b/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs index 1993752f02..55e50c5e36 100644 --- a/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs +++ b/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs @@ -163,7 +163,7 @@ public void Save(ModelSaveContext ctx) /// /// Save the trained matrix factorization model (two factor matrices) in text format /// - public void SaveAsText(TextWriter writer, RoleMappedSchema schema) + void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) { writer.WriteLine("# Imputed matrix is P * Q'"); writer.WriteLine("# P in R^({0} x {1}), rows correpond to Y item", _numberOfRows, _approximationRank); diff --git a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachinePredictor.cs b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachinePredictor.cs index 91e57b13a9..7c818102bc 100644 --- a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachinePredictor.cs +++ b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachinePredictor.cs @@ -120,7 +120,7 @@ public static FieldAwareFactorizationMachinePredictor Create(IHostEnvironment en return new FieldAwareFactorizationMachinePredictor(env, ctx); } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { Host.AssertValue(ctx); ctx.SetVersionInfo(GetVersionInfo()); diff --git a/src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs b/src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs index ad4fca42df..538fb51c65 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs @@ -98,9 +98,7 @@ IEnumerator IEnumerable.GetEnumerator() /// The predictor's bias term. public Float Bias { get; protected set; } - public ColumnType InputType { get; } - - public ColumnType OutputType => NumberType.Float; + private readonly ColumnType _inputType; bool ICanSavePfa.CanSavePfa => true; @@ -122,7 +120,7 @@ internal LinearPredictor(IHostEnvironment env, string name, in VBuffer we Weight = weights; Bias = bias; - InputType = new VectorType(NumberType.Float, Weight.Length); + _inputType = new VectorType(NumberType.Float, Weight.Length); if (Weight.IsDense) _weightsDense = Weight; @@ -177,7 +175,7 @@ protected LinearPredictor(IHostEnvironment env, string name, ModelLoadContext ct else Weight = new VBuffer(len, Utils.Size(weights), weights, indices); - InputType = new VectorType(NumberType.Float, Weight.Length); + _inputType = new VectorType(NumberType.Float, Weight.Length); WarnOnOldNormalizer(ctx, GetType(), Host); if (Weight.IsDense) @@ -186,7 +184,8 @@ protected LinearPredictor(IHostEnvironment env, string name, ModelLoadContext ct _weightsDenseLock = new object(); } - protected override void SaveCore(ModelSaveContext ctx) + [BestFriend] + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); @@ -284,7 +283,17 @@ private void EnsureWeightsDense() } } - public ValueMapper GetMapper() + ColumnType IValueMapper.InputType + { + get { return _inputType; } + } + + ColumnType IValueMapper.OutputType + { + get { return NumberType.Float; } + } + + ValueMapper IValueMapper.GetMapper() { Contracts.Check(typeof(TIn) == typeof(VBuffer)); Contracts.Check(typeof(TOut) == typeof(Float)); @@ -327,7 +336,7 @@ protected void CombineParameters(IList> models, out VBuff bias /= models.Count; } - public void SaveAsText(TextWriter writer, RoleMappedSchema schema) + void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) { Host.CheckValue(writer, nameof(writer)); Host.CheckValue(schema, nameof(schema)); @@ -451,7 +460,7 @@ public static IPredictorProducing Create(IHostEnvironment env, ModelLoadC return new SchemaBindableCalibratedPredictor(env, predictor, calibrator); } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { // *** Binary format *** // (Base class) @@ -602,7 +611,7 @@ public static LinearRegressionPredictor Create(IHostEnvironment env, ModelLoadCo return new LinearRegressionPredictor(env, ctx); } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); @@ -678,7 +687,7 @@ public static PoissonRegressionPredictor Create(IHostEnvironment env, ModelLoadC return new PoissonRegressionPredictor(env, ctx); } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs index e2b280ca1e..a114c2bb18 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs @@ -387,8 +387,11 @@ private static VersionInfo GetVersionInfo() private volatile VBuffer[] _weightsDense; public override PredictionKind PredictionKind => PredictionKind.MultiClassClassification; - public ColumnType InputType { get; } - public ColumnType OutputType { get; } + internal readonly ColumnType InputType; + internal readonly ColumnType OutputType; + ColumnType IValueMapper.InputType => InputType; + ColumnType IValueMapper.OutputType => OutputType; + bool ICanSavePfa.CanSavePfa => true; bool ICanSaveOnnx.CanSaveOnnx(OnnxContext ctx) => true; @@ -560,7 +563,7 @@ public static MulticlassLogisticRegressionPredictor Create(IHostEnvironment env, return new MulticlassLogisticRegressionPredictor(env, ctx); } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); @@ -703,7 +706,7 @@ private static int NonZeroCount(in VBuffer vector) return count; } - public ValueMapper GetMapper() + ValueMapper IValueMapper.GetMapper() { Host.Check(typeof(TSrc) == typeof(VBuffer), "Invalid source type in GetMapper"); Host.Check(typeof(TDst) == typeof(VBuffer), "Invalid destination type in GetMapper"); @@ -774,7 +777,7 @@ private void Calibrate(Span dst) /// /// Output the text model to a given writer /// - public void SaveAsText(TextWriter writer, RoleMappedSchema schema) + void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) { writer.WriteLine(nameof(MulticlassLogisticRegression) + " bias and non-zero weights"); @@ -850,7 +853,7 @@ public void SaveAsCode(TextWriter writer, RoleMappedSchema schema) public void SaveSummary(TextWriter writer, RoleMappedSchema schema) { - SaveAsText(writer, schema); + ((ICanSaveInTextFormat)this).SaveAsText(writer, schema); } JToken ISingleCanSavePfa.SaveAsPfa(BoundPfaContext ctx, JToken input) diff --git a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MultiClassNaiveBayesTrainer.cs b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MultiClassNaiveBayesTrainer.cs index 751a62b7b6..89a5d33cf9 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MultiClassNaiveBayesTrainer.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MultiClassNaiveBayesTrainer.cs @@ -206,9 +206,9 @@ private static VersionInfo GetVersionInfo() public override PredictionKind PredictionKind => PredictionKind.MultiClassClassification; - public ColumnType InputType => _inputType; + ColumnType IValueMapper.InputType => _inputType; - public ColumnType OutputType => _outputType; + ColumnType IValueMapper.OutputType => _outputType; /// /// Copies the label histogram into a buffer. @@ -303,7 +303,7 @@ public static MultiClassNaiveBayesPredictor Create(IHostEnvironment env, ModelLo return new MultiClassNaiveBayesPredictor(env, ctx); } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); @@ -349,7 +349,7 @@ private static double[] CalculateAbsentFeatureLogProbabilities(int[] labelHistog return absentFeaturesLogProb; } - public ValueMapper GetMapper() + ValueMapper IValueMapper.GetMapper() { Host.Check(typeof(TIn) == typeof(VBuffer)); Host.Check(typeof(TOut) == typeof(VBuffer)); diff --git a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Ova.cs b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Ova.cs index ce43edf2e7..36a05b7cc2 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Ova.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Ova.cs @@ -227,9 +227,8 @@ private static VersionInfo GetVersionInfo() private readonly ImplBase _impl; public override PredictionKind PredictionKind => PredictionKind.MultiClassClassification; - public ColumnType InputType => _impl.InputType; - public ColumnType OutputType { get; } - public ColumnType DistType => OutputType; + private readonly ColumnType _outputType; + public ColumnType DistType => _outputType; bool ICanSavePfa.CanSavePfa => _impl.CanSavePfa; [BestFriend] @@ -280,7 +279,7 @@ private OvaPredictor(IHostEnvironment env, ImplBase impl) Host.Assert(Utils.Size(impl.Predictors) > 0); _impl = impl; - OutputType = new VectorType(NumberType.Float, _impl.Predictors.Length); + _outputType = new VectorType(NumberType.Float, _impl.Predictors.Length); } private OvaPredictor(IHostEnvironment env, ModelLoadContext ctx) @@ -306,7 +305,7 @@ private OvaPredictor(IHostEnvironment env, ModelLoadContext ctx) _impl = new ImplRaw(predictors); } - OutputType = new VectorType(NumberType.Float, _impl.Predictors.Length); + _outputType = new VectorType(NumberType.Float, _impl.Predictors.Length); } public static OvaPredictor Create(IHostEnvironment env, ModelLoadContext ctx) @@ -324,7 +323,7 @@ private static void LoadPredictors(IHostEnvironment env, TPredictor[ ctx.LoadModel(env, out predictors[i], string.Format(SubPredictorFmt, i)); } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); @@ -349,7 +348,16 @@ JToken ISingleCanSavePfa.SaveAsPfa(BoundPfaContext ctx, JToken input) return _impl.SaveAsPfa(ctx, input); } - public ValueMapper GetMapper() + ColumnType IValueMapper.InputType + { + get { return _impl.InputType; } + } + + ColumnType IValueMapper.OutputType + { + get { return _outputType; } + } + ValueMapper IValueMapper.GetMapper() { Host.Check(typeof(TIn) == typeof(VBuffer)); Host.Check(typeof(TOut) == typeof(VBuffer)); @@ -377,7 +385,7 @@ public void SaveAsCode(TextWriter writer, RoleMappedSchema schema) } } - public void SaveAsText(TextWriter writer, RoleMappedSchema schema) + void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) { Host.CheckValue(writer, nameof(writer)); Host.CheckValue(schema, nameof(schema)); diff --git a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Pkpd.cs b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Pkpd.cs index ae0ffe8694..902906c5da 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Pkpd.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Pkpd.cs @@ -244,8 +244,10 @@ private static VersionInfo GetVersionInfo() private readonly IValueMapperDist[] _mappers; public override PredictionKind PredictionKind => PredictionKind.MultiClassClassification; - public ColumnType InputType { get; } - public ColumnType OutputType { get; } + private readonly ColumnType _inputType; + private readonly ColumnType _outputType; + ColumnType IValueMapper.InputType => _inputType; + ColumnType IValueMapper.OutputType => _outputType; internal PkpdPredictor(IHostEnvironment env, TDistPredictor[][] predictors) : base(env, RegistrationName) @@ -267,8 +269,8 @@ internal PkpdPredictor(IHostEnvironment env, TDistPredictor[][] predictors) : } Host.Assert(index == _predictors.Length); - InputType = InitializeMappers(out _mappers); - OutputType = new VectorType(NumberType.Float, _numClasses); + _inputType = InitializeMappers(out _mappers); + _outputType = new VectorType(NumberType.Float, _numClasses); } private PkpdPredictor(IHostEnvironment env, ModelLoadContext ctx) @@ -295,8 +297,8 @@ private PkpdPredictor(IHostEnvironment env, ModelLoadContext ctx) Host.Assert(index == GetIndex(i, i)); ctx.LoadModel(Host, out _predictors[index++], string.Format(SubPredictorFmt, i)); } - InputType = InitializeMappers(out _mappers); - OutputType = new VectorType(NumberType.Float, _numClasses); + _inputType = InitializeMappers(out _mappers); + _outputType = new VectorType(NumberType.Float, _numClasses); } private ColumnType InitializeMappers(out IValueMapperDist[] mappers) @@ -337,7 +339,7 @@ public static PkpdPredictor Create(IHostEnvironment env, ModelLoadContext ctx) return new PkpdPredictor(env, ctx); } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); @@ -442,7 +444,7 @@ private int GetIndex(int i, int j) return i * (i + 1) / 2 + j; } - public ValueMapper GetMapper() + ValueMapper IValueMapper.GetMapper() { Host.Check(typeof(TIn) == typeof(VBuffer)); Host.Check(typeof(TOut) == typeof(VBuffer)); @@ -455,8 +457,8 @@ public ValueMapper GetMapper() ValueMapper, VBuffer> del = (in VBuffer src, ref VBuffer dst) => { - if (InputType.VectorSize > 0) - Host.Check(src.Length == InputType.VectorSize); + if (_inputType.VectorSize > 0) + Host.Check(src.Length == _inputType.VectorSize); var tmp = src; Parallel.For(0, maps.Length, parallelOptions, i => diff --git a/src/Microsoft.ML.StandardLearners/Standard/Simple/SimpleTrainers.cs b/src/Microsoft.ML.StandardLearners/Standard/Simple/SimpleTrainers.cs index 2e031433e1..049ce8b8d5 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/Simple/SimpleTrainers.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/Simple/SimpleTrainers.cs @@ -173,7 +173,7 @@ public static RandomPredictor Create(IHostEnvironment env, ModelLoadContext ctx) /// Save the predictor in the binary format. /// /// - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); @@ -396,7 +396,7 @@ public static PriorPredictor Create(IHostEnvironment env, ModelLoadContext ctx) return new PriorPredictor(env, ctx); } - protected override void SaveCore(ModelSaveContext ctx) + private protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); diff --git a/test/Microsoft.ML.StaticPipelineTesting/Training.cs b/test/Microsoft.ML.StaticPipelineTesting/Training.cs index 83836447e9..9d4a2bc010 100644 --- a/test/Microsoft.ML.StaticPipelineTesting/Training.cs +++ b/test/Microsoft.ML.StaticPipelineTesting/Training.cs @@ -5,22 +5,22 @@ using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.FactorizationMachine; -using Microsoft.ML.Trainers.FastTree; using Microsoft.ML.Runtime.Internal.Calibration; using Microsoft.ML.Runtime.Internal.Internallearn; -using Microsoft.ML.Trainers.KMeans; using Microsoft.ML.Runtime.Learners; using Microsoft.ML.Runtime.LightGBM; using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.StaticPipe; using Microsoft.ML.Trainers; +using Microsoft.ML.Trainers.FastTree; +using Microsoft.ML.Trainers.KMeans; +using Microsoft.ML.Trainers.Recommender; using Microsoft.ML.Transforms.Categorical; using Microsoft.ML.Transforms.Conversions; using System; using System.Linq; using Xunit; using Xunit.Abstractions; -using Microsoft.ML.Trainers.Recommender; namespace Microsoft.ML.StaticPipelineTesting { @@ -713,7 +713,7 @@ public void KMeans() var reader = TextLoader.CreateReader(env, c => (label: c.LoadText(0), features: c.LoadFloat(1, 4))); - KMeansPredictor pred = null; + KMeansModelParameters pred = null; var est = reader.MakeNewEstimator() .Append(r => (label: r.label.ToKey(), r.features)) diff --git a/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs b/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs index 92eb3c7aa9..ccd041f7a5 100644 --- a/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs +++ b/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs @@ -24,7 +24,7 @@ public static TEnvironment AddStandardComponents(this TEnvironment env.ComponentCatalog.RegisterAssembly(typeof(OneHotEncodingTransformer).Assembly); // ML.Transforms env.ComponentCatalog.RegisterAssembly(typeof(FastTreeBinaryPredictor).Assembly); // ML.FastTree env.ComponentCatalog.RegisterAssembly(typeof(EnsemblePredictor).Assembly); // ML.Ensemble - env.ComponentCatalog.RegisterAssembly(typeof(KMeansPredictor).Assembly); // ML.KMeansClustering + env.ComponentCatalog.RegisterAssembly(typeof(KMeansModelParameters).Assembly); // ML.KMeansClustering env.ComponentCatalog.RegisterAssembly(typeof(PcaPredictor).Assembly); // ML.PCA #pragma warning disable 612 env.ComponentCatalog.RegisterAssembly(typeof(Experiment).Assembly); // ML.Legacy From 4c047bfc7ec7a1afc477c61700f1733fc9bf9975 Mon Sep 17 00:00:00 2001 From: Eric Erhardt Date: Wed, 5 Dec 2018 18:48:57 +0000 Subject: [PATCH 026/100] Allow ML.NET native binaries to work on Windows machines that don't have the VC runtime installed. (#1828) This allows ML.NET to run on Windows Nano containers. I also ported 2 Unix compile options we are using in core-setup and corefx that were missed when originally creating the ML.NET native build infrastructure. Fix #1823 --- src/Native/CMakeLists.txt | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/Native/CMakeLists.txt b/src/Native/CMakeLists.txt index 9334b1c45c..0100cc353c 100644 --- a/src/Native/CMakeLists.txt +++ b/src/Native/CMakeLists.txt @@ -18,6 +18,9 @@ if(WIN32) add_compile_options($<$:-DNDEBUG>) add_compile_options($<$:-DNDEBUG>) add_compile_options($<$:/Od>) + add_compile_options($<$:/MTd>) # /MT will static link the VC runtime library, so it doesn't need to be installed on the target machine + add_compile_options($<$:/MT>) + add_compile_options($<$:/MT>) add_compile_options(/guard:cf) add_compile_options(/d2Zi+) # make optimized builds debugging easier add_compile_options(/nologo) # Suppress Startup Banner @@ -62,6 +65,8 @@ if(WIN32) list(APPEND RESOURCES ${CMAKE_SOURCE_DIR}/../../Tools/NativeVersion.rc) else() add_compile_options(-Wno-unused-local-typedef) + add_compile_options(-fPIC) + add_compile_options(-fvisibility=hidden) add_definitions(-Werror) # treat warnings as errors endif() From d7d4e99d6d366516a19739aa68e25ee4991d90d3 Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Wed, 5 Dec 2018 15:56:47 -0800 Subject: [PATCH 027/100] Make SchemaShape.Column a struct instead of a class (#1822) * Make SchemaShape.Column a struct instead of a class * Make SchemaShape an IReadOnlyList and remove SchemaException * Make a (best) friend * Address comments * Make all member functions of SchemaShape.Column be best friends * Address comments * Change Columns' type from Column[] to ImmutableList * Fix build * Remove unnecessary checks because for some nullable columns * Address comments * SchemaShape is only a list not a class containing a list * Use array for immutable field and add a best friend function * Assert doesn't need variable name --- .../CustomMappingTransformer.cs | 4 +- src/Microsoft.ML.Core/Data/IEstimator.cs | 61 +++++++++++-------- src/Microsoft.ML.Core/Data/MetadataUtils.cs | 4 +- src/Microsoft.ML.Core/Utilities/Contracts.cs | 4 ++ .../DataLoadSave/FakeSchema.cs | 18 +++--- .../StaticPipe/StaticSchemaShape.cs | 2 +- .../Training/TrainerEstimatorBase.cs | 41 +++++-------- .../Training/TrainerUtils.cs | 4 +- .../ColumnConcatenatingEstimator.cs | 4 +- .../Transforms/ColumnCopying.cs | 2 +- .../Transforms/ColumnSelecting.cs | 4 +- src/Microsoft.ML.Data/Transforms/Hashing.cs | 2 +- .../Transforms/KeyToValue.cs | 2 +- .../Transforms/KeyToVector.cs | 2 +- .../Transforms/Normalizer.cs | 2 +- .../Transforms/TypeConverting.cs | 2 +- .../Transforms/ValueToKeyMappingEstimator.cs | 4 +- src/Microsoft.ML.FastTree/FastTreeRanking.cs | 2 +- .../VectorWhitening.cs | 2 +- .../ImageGrayscaleTransform.cs | 2 +- .../ImageLoaderTransform.cs | 2 +- .../ImagePixelExtractorTransform.cs | 2 +- .../ImageResizerTransform.cs | 2 +- .../KMeansPlusPlusTrainer.cs | 2 +- .../LightGbmMulticlassTrainer.cs | 2 +- .../LightGbmRankingTrainer.cs | 2 +- .../OnnxTransform.cs | 4 +- src/Microsoft.ML.PCA/PcaTrainer.cs | 4 +- src/Microsoft.ML.PCA/PcaTransform.cs | 2 +- .../MatrixFactorizationTrainer.cs | 2 +- .../FactorizationMachineTrainer.cs | 8 +-- .../LogisticRegression/LbfgsPredictorBase.cs | 2 +- .../MulticlassLogisticRegression.cs | 2 +- .../MultiClass/MetaMulticlassTrainer.cs | 8 +-- .../MultiClass/MultiClassNaiveBayesTrainer.cs | 2 +- .../Standard/Online/AveragedPerceptron.cs | 2 +- .../Standard/SdcaBinary.cs | 8 +-- .../Standard/SdcaMultiClass.cs | 4 +- .../Standard/Simple/SimpleTrainers.cs | 4 +- .../Standard/StochasticTrainerBase.cs | 2 +- .../TensorflowTransform.cs | 4 +- .../IidChangePointDetector.cs | 2 +- .../IidSpikeDetector.cs | 2 +- .../SsaChangePointDetector.cs | 2 +- .../SsaSpikeDetector.cs | 2 +- .../CountFeatureSelection.cs | 2 +- src/Microsoft.ML.Transforms/GcnTransform.cs | 2 +- .../KeyToVectorMapping.cs | 2 +- .../MissingValueDroppingTransformer.cs | 2 +- .../MissingValueIndicatorTransformer.cs | 2 +- .../MissingValueReplacing.cs | 2 +- .../MutualInformationFeatureSelection.cs | 2 +- .../RandomFourierFeaturizing.cs | 2 +- .../Text/LdaTransform.cs | 2 +- .../Text/NgramTransform.cs | 2 +- .../Text/StopWordsRemovingTransformer.cs | 4 +- .../Text/TextFeaturizingEstimator.cs | 2 +- .../Text/TextNormalizing.cs | 2 +- .../Text/TokenizingByCharacters.cs | 2 +- .../Text/WordEmbeddingsExtractor.cs | 2 +- .../Text/WordTokenizing.cs | 2 +- .../DataPipe/TestDataPipeBase.cs | 6 +- 62 files changed, 145 insertions(+), 141 deletions(-) diff --git a/src/Microsoft.ML.Api/CustomMappingTransformer.cs b/src/Microsoft.ML.Api/CustomMappingTransformer.cs index 963996d099..d3081fc096 100644 --- a/src/Microsoft.ML.Api/CustomMappingTransformer.cs +++ b/src/Microsoft.ML.Api/CustomMappingTransformer.cs @@ -205,7 +205,7 @@ public override SchemaShape GetOutputSchema(SchemaShape inputSchema) var addedCols = DataViewConstructionUtils.GetSchemaColumns(Transformer.AddedSchema); var addedSchemaShape = SchemaShape.Create(SchemaBuilder.MakeSchema(addedCols)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); var inputDef = InternalSchemaDefinition.Create(typeof(TSrc), Transformer.InputSchemaDefinition); foreach (var col in inputDef.Columns) { @@ -223,7 +223,7 @@ public override SchemaShape GetOutputSchema(SchemaShape inputSchema) } } - foreach (var addedCol in addedSchemaShape.Columns) + foreach (var addedCol in addedSchemaShape) result[addedCol.Name] = addedCol; return new SchemaShape(result.Values); diff --git a/src/Microsoft.ML.Core/Data/IEstimator.cs b/src/Microsoft.ML.Core/Data/IEstimator.cs index b82ad4f5a7..5994a671bd 100644 --- a/src/Microsoft.ML.Core/Data/IEstimator.cs +++ b/src/Microsoft.ML.Core/Data/IEstimator.cs @@ -5,8 +5,9 @@ using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; -using System; +using System.Collections; using System.Collections.Generic; +using System.Collections.Immutable; using System.Linq; namespace Microsoft.ML.Core.Data @@ -16,13 +17,17 @@ namespace Microsoft.ML.Core.Data /// This is more relaxed than the proper , since it's only a subset of the columns, /// and also since it doesn't specify exact 's for vectors and keys. /// - public sealed class SchemaShape + public sealed class SchemaShape : IReadOnlyList { - public readonly Column[] Columns; + private readonly Column[] _columns; private static readonly SchemaShape _empty = new SchemaShape(Enumerable.Empty()); - public sealed class Column + public int Count => _columns.Count(); + + public Column this[int index] => _columns[index]; + + public struct Column { public enum VectorKind { @@ -55,13 +60,13 @@ public enum VectorKind /// public readonly SchemaShape Metadata; - public Column(string name, VectorKind vecKind, ColumnType itemType, bool isKey, SchemaShape metadata = null) + [BestFriend] + internal Column(string name, VectorKind vecKind, ColumnType itemType, bool isKey, SchemaShape metadata = null) { Contracts.CheckNonEmpty(name, nameof(name)); Contracts.CheckValueOrNull(metadata); Contracts.CheckParam(!itemType.IsKey, nameof(itemType), "Item type cannot be a key"); Contracts.CheckParam(!itemType.IsVector, nameof(itemType), "Item type cannot be a vector"); - Contracts.CheckParam(!isKey || KeyType.IsValidDataKind(itemType.RawKind), nameof(itemType), "The item type must be valid for a key"); Name = name; @@ -80,9 +85,10 @@ public Column(string name, VectorKind vecKind, ColumnType itemType, bool isKey, /// - The columns of of is a superset of our columns. /// - Each such metadata column is itself compatible with the input metadata column. /// - public bool IsCompatibleWith(Column inputColumn) + [BestFriend] + internal bool IsCompatibleWith(Column inputColumn) { - Contracts.CheckValue(inputColumn, nameof(inputColumn)); + Contracts.Check(inputColumn.IsValid, nameof(inputColumn)); if (Name != inputColumn.Name) return false; if (Kind != inputColumn.Kind) @@ -91,7 +97,7 @@ public bool IsCompatibleWith(Column inputColumn) return false; if (IsKey != inputColumn.IsKey) return false; - foreach (var metaCol in Metadata.Columns) + foreach (var metaCol in Metadata) { if (!inputColumn.Metadata.TryFindColumn(metaCol.Name, out var inputMetaCol)) return false; @@ -101,7 +107,8 @@ public bool IsCompatibleWith(Column inputColumn) return true; } - public string GetTypeString() + [BestFriend] + internal string GetTypeString() { string result = ItemType.ToString(); if (IsKey) @@ -112,13 +119,20 @@ public string GetTypeString() result = $"VarVector<{result}>"; return result; } + + /// + /// Return if this structure is not identical to the default value of . If true, + /// it means this structure is initialized properly and therefore considered as valid. + /// + [BestFriend] + internal bool IsValid => Name != null; } public SchemaShape(IEnumerable columns) { Contracts.CheckValue(columns, nameof(columns)); - Columns = columns.ToArray(); - Contracts.CheckParam(columns.All(c => c != null), nameof(columns), "No items should be null."); + _columns = columns.ToArray(); + Contracts.CheckParam(columns.All(c => c.IsValid), nameof(columns), "Some items are not initialized properly."); } /// @@ -151,7 +165,8 @@ internal static void GetColumnTypeShape(ColumnType type, /// /// Create a schema shape out of the fully defined schema. /// - public static SchemaShape Create(Schema schema) + [BestFriend] + internal static SchemaShape Create(Schema schema) { Contracts.CheckValue(schema, nameof(schema)); var cols = new List(); @@ -179,25 +194,23 @@ public static SchemaShape Create(Schema schema) /// /// Returns if there is a column with a specified and if so stores it in . /// - public bool TryFindColumn(string name, out Column column) + [BestFriend] + internal bool TryFindColumn(string name, out Column column) { Contracts.CheckValue(name, nameof(name)); - column = Columns.FirstOrDefault(x => x.Name == name); - return column != null; + column = _columns.FirstOrDefault(x => x.Name == name); + return column.IsValid; } + public IEnumerator GetEnumerator() => ((IEnumerable)_columns).GetEnumerator(); + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); + // REVIEW: I think we should have an IsCompatible method to check if it's OK to use one schema shape // as an input to another schema shape. I started writing, but realized that there's more than one way to check for // the 'compatibility': as in, 'CAN be compatible' vs. 'WILL be compatible'. } - /// - /// Exception class for schema validation errors. - /// - public class SchemaException : Exception - { - } - /// /// The 'data reader' takes a certain kind of input and turns it into an . /// @@ -246,7 +259,6 @@ public interface ITransformer /// /// Schema propagation for transformers. /// Returns the output schema of the data, if the input schema is like the one provided. - /// Throws if the input schema is not valid for the transformer. /// Schema GetOutputSchema(Schema inputSchema); @@ -288,7 +300,6 @@ public interface IEstimator /// /// Schema propagation for estimators. /// Returns the output schema shape of the estimator, if the input schema shape is like the one provided. - /// Throws iff the input schema is not valid for the estimator. /// SchemaShape GetOutputSchema(SchemaShape inputSchema); } diff --git a/src/Microsoft.ML.Core/Data/MetadataUtils.cs b/src/Microsoft.ML.Core/Data/MetadataUtils.cs index d5cbe6928b..024953f173 100644 --- a/src/Microsoft.ML.Core/Data/MetadataUtils.cs +++ b/src/Microsoft.ML.Core/Data/MetadataUtils.cs @@ -367,7 +367,7 @@ public static bool IsNormalized(this Schema schema, int col) /// of a scalar type, which we assume, if set, should be true. public static bool IsNormalized(this SchemaShape.Column col) { - Contracts.CheckValue(col, nameof(col)); + Contracts.CheckParam(col.IsValid, nameof(col), "struct not initialized properly"); return col.Metadata.TryFindColumn(Kinds.IsNormalized, out var metaCol) && metaCol.Kind == SchemaShape.Column.VectorKind.Scalar && !metaCol.IsKey && metaCol.ItemType == BoolType.Instance; @@ -382,7 +382,7 @@ public static bool IsNormalized(this SchemaShape.Column col) /// metadata of definite sized vectors of text. public static bool HasSlotNames(this SchemaShape.Column col) { - Contracts.CheckValue(col, nameof(col)); + Contracts.CheckParam(col.IsValid, nameof(col), "struct not initialized properly"); return col.Kind == SchemaShape.Column.VectorKind.Vector && col.Metadata.TryFindColumn(Kinds.SlotNames, out var metaCol) && metaCol.Kind == SchemaShape.Column.VectorKind.Vector && !metaCol.IsKey diff --git a/src/Microsoft.ML.Core/Utilities/Contracts.cs b/src/Microsoft.ML.Core/Utilities/Contracts.cs index d567d6e883..cda7cbc539 100644 --- a/src/Microsoft.ML.Core/Utilities/Contracts.cs +++ b/src/Microsoft.ML.Core/Utilities/Contracts.cs @@ -758,6 +758,10 @@ public static void CheckAlive(this IHostEnvironment env) public static void CheckValueOrNull(T val) where T : class { } + + /// + /// This documents that the parameter can legally be null. + /// [Conditional("INVARIANT_CHECKS")] public static void CheckValueOrNull(this IExceptionContext ctx, T val) where T : class { diff --git a/src/Microsoft.ML.Data/DataLoadSave/FakeSchema.cs b/src/Microsoft.ML.Data/DataLoadSave/FakeSchema.cs index d94219a453..c67c6ae733 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/FakeSchema.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/FakeSchema.cs @@ -30,22 +30,22 @@ public FakeSchema(IHostEnvironment env, SchemaShape inputShape) { _env = env; _shape = inputShape; - _colMap = Enumerable.Range(0, _shape.Columns.Length) - .ToDictionary(idx => _shape.Columns[idx].Name, idx => idx); + _colMap = Enumerable.Range(0, _shape.Count) + .ToDictionary(idx => _shape[idx].Name, idx => idx); } - public int ColumnCount => _shape.Columns.Length; + public int ColumnCount => _shape.Count; public string GetColumnName(int col) { _env.Check(0 <= col && col < ColumnCount); - return _shape.Columns[col].Name; + return _shape[col].Name; } public ColumnType GetColumnType(int col) { _env.Check(0 <= col && col < ColumnCount); - var inputCol = _shape.Columns[col]; + var inputCol = _shape[col]; return MakeColumnType(inputCol); } @@ -66,7 +66,7 @@ private static ColumnType MakeColumnType(SchemaShape.Column inputCol) public void GetMetadata(string kind, int col, ref TValue value) { _env.Check(0 <= col && col < ColumnCount); - var inputCol = _shape.Columns[col]; + var inputCol = _shape[col]; var metaShape = inputCol.Metadata; if (metaShape == null || !metaShape.TryFindColumn(kind, out var metaColumn)) throw _env.ExceptGetMetadata(); @@ -89,7 +89,7 @@ public void GetMetadata(string kind, int col, ref TValue value) public ColumnType GetMetadataTypeOrNull(string kind, int col) { _env.Check(0 <= col && col < ColumnCount); - var inputCol = _shape.Columns[col]; + var inputCol = _shape[col]; var metaShape = inputCol.Metadata; if (metaShape == null || !metaShape.TryFindColumn(kind, out var metaColumn)) return null; @@ -99,12 +99,12 @@ public ColumnType GetMetadataTypeOrNull(string kind, int col) public IEnumerable> GetMetadataTypes(int col) { _env.Check(0 <= col && col < ColumnCount); - var inputCol = _shape.Columns[col]; + var inputCol = _shape[col]; var metaShape = inputCol.Metadata; if (metaShape == null) return Enumerable.Empty>(); - return metaShape.Columns.Select(c => new KeyValuePair(c.Name, MakeColumnType(c))); + return metaShape.Select(c => new KeyValuePair(c.Name, MakeColumnType(c))); } } } diff --git a/src/Microsoft.ML.Data/StaticPipe/StaticSchemaShape.cs b/src/Microsoft.ML.Data/StaticPipe/StaticSchemaShape.cs index 6ffd089f24..7b1edd3398 100644 --- a/src/Microsoft.ML.Data/StaticPipe/StaticSchemaShape.cs +++ b/src/Microsoft.ML.Data/StaticPipe/StaticSchemaShape.cs @@ -113,7 +113,7 @@ public void Check(IExceptionContext ectx, SchemaShape shape) private static Type GetTypeOrNull(SchemaShape.Column col) { - Contracts.AssertValue(col); + Contracts.Assert(col.IsValid); Type vecType = null; diff --git a/src/Microsoft.ML.Data/Training/TrainerEstimatorBase.cs b/src/Microsoft.ML.Data/Training/TrainerEstimatorBase.cs index 1e49c32ed3..55d85201b7 100644 --- a/src/Microsoft.ML.Data/Training/TrainerEstimatorBase.cs +++ b/src/Microsoft.ML.Data/Training/TrainerEstimatorBase.cs @@ -55,13 +55,11 @@ public abstract class TrainerEstimatorBase : ITrainerEstim private protected TrainerEstimatorBase(IHost host, SchemaShape.Column feature, SchemaShape.Column label, - SchemaShape.Column weight = null) + SchemaShape.Column weight = default) { Contracts.CheckValue(host, nameof(host)); Host = host; - Host.CheckValue(feature, nameof(feature)); - Host.CheckValueOrNull(label); - Host.CheckValueOrNull(weight); + Host.CheckParam(feature.IsValid, nameof(feature), "not initialized properly"); FeatureColumn = feature; LabelColumn = label; @@ -76,7 +74,7 @@ public SchemaShape GetOutputSchema(SchemaShape inputSchema) CheckInputSchema(inputSchema); - var outColumns = inputSchema.Columns.ToDictionary(x => x.Name); + var outColumns = inputSchema.ToDictionary(x => x.Name); foreach (var col in GetOutputColumnsCore(inputSchema)) outColumns[col.Name] = col; @@ -102,7 +100,7 @@ private void CheckInputSchema(SchemaShape inputSchema) if (!FeatureColumn.IsCompatibleWith(featureCol)) throw Host.Except($"Feature column '{FeatureColumn.Name}' is not compatible"); - if (WeightColumn != null) + if (WeightColumn.IsValid) { if (!inputSchema.TryFindColumn(WeightColumn.Name, out var weightCol)) throw Host.Except($"Weight column '{WeightColumn.Name}' is not found"); @@ -112,7 +110,7 @@ private void CheckInputSchema(SchemaShape inputSchema) // Special treatment for label column: we allow different types of labels, so the trainers // may define their own requirements on the label column. - if (LabelColumn != null) + if (LabelColumn.IsValid) { if (!inputSchema.TryFindColumn(LabelColumn.Name, out var labelCol)) throw Host.Except($"Label column '{LabelColumn.Name}' is not found"); @@ -122,8 +120,8 @@ private void CheckInputSchema(SchemaShape inputSchema) protected virtual void CheckLabelCompatible(SchemaShape.Column labelCol) { - Contracts.CheckValue(labelCol, nameof(labelCol)); - Contracts.AssertValue(LabelColumn); + Contracts.CheckParam(labelCol.IsValid, nameof(labelCol), "not initialized properly"); + Host.Assert(LabelColumn.IsValid); if (!LabelColumn.IsCompatibleWith(labelCol)) throw Host.Except($"Label column '{LabelColumn.Name}' is not compatible"); @@ -133,20 +131,12 @@ protected TTransformer TrainTransformer(IDataView trainSet, IDataView validationSet = null, IPredictor initPredictor = null) { var cachedTrain = Info.WantCaching ? new CacheDataView(Host, trainSet, prefetch: null) : trainSet; + var cachedValid = Info.WantCaching && validationSet != null ? new CacheDataView(Host, validationSet, prefetch: null) : validationSet; - var trainRoles = MakeRoles(cachedTrain); + var trainRoleMapped = MakeRoles(cachedTrain); + var validRoleMapped = validationSet == null ? null : MakeRoles(cachedValid); - RoleMappedData validRoles; - - if (validationSet == null) - validRoles = null; - else - { - var cachedValid = Info.WantCaching ? new CacheDataView(Host, validationSet, prefetch: null) : validationSet; - validRoles = MakeRoles(cachedValid); - } - - var pred = TrainModelCore(new TrainContext(trainRoles, validRoles, null, initPredictor)); + var pred = TrainModelCore(new TrainContext(trainRoleMapped, validRoleMapped, null, initPredictor)); return MakeTransformer(pred, trainSet.Schema); } @@ -156,7 +146,7 @@ protected TTransformer TrainTransformer(IDataView trainSet, protected abstract TTransformer MakeTransformer(TModel model, Schema trainSchema); protected virtual RoleMappedData MakeRoles(IDataView data) => - new RoleMappedData(data, label: LabelColumn?.Name, feature: FeatureColumn.Name, weight: WeightColumn?.Name); + new RoleMappedData(data, label: LabelColumn.Name, feature: FeatureColumn.Name, weight: WeightColumn.Name); IPredictor ITrainer.Train(TrainContext context) => ((ITrainer)this).Train(context); } @@ -178,16 +168,15 @@ public abstract class TrainerEstimatorBaseWithGroupId : Tr public TrainerEstimatorBaseWithGroupId(IHost host, SchemaShape.Column feature, SchemaShape.Column label, - SchemaShape.Column weight = null, - SchemaShape.Column groupId = null) + SchemaShape.Column weight = default, + SchemaShape.Column groupId = default) :base(host, feature, label, weight) { - Host.CheckValueOrNull(groupId); GroupIdColumn = groupId; } protected override RoleMappedData MakeRoles(IDataView data) => - new RoleMappedData(data, label: LabelColumn?.Name, feature: FeatureColumn.Name, group: GroupIdColumn?.Name, weight: WeightColumn?.Name); + new RoleMappedData(data, label: LabelColumn.Name, feature: FeatureColumn.Name, group: GroupIdColumn.Name, weight: WeightColumn.Name); } } diff --git a/src/Microsoft.ML.Data/Training/TrainerUtils.cs b/src/Microsoft.ML.Data/Training/TrainerUtils.cs index b641744421..83d7efaf90 100644 --- a/src/Microsoft.ML.Data/Training/TrainerUtils.cs +++ b/src/Microsoft.ML.Data/Training/TrainerUtils.cs @@ -366,7 +366,7 @@ public static SchemaShape.Column MakeR4ScalarLabel(string labelColumn) public static SchemaShape.Column MakeU4ScalarColumn(string columnName) { if (columnName == null) - return null; + return default; return new SchemaShape.Column(columnName, SchemaShape.Column.VectorKind.Scalar, NumberType.U4, true); } @@ -386,7 +386,7 @@ public static SchemaShape.Column MakeR4VecFeature(string featureColumn) public static SchemaShape.Column MakeR4ScalarWeightColumn(string weightColumn, bool isExplicit = true) { if (weightColumn == null || !isExplicit) - return null; + return default; return new SchemaShape.Column(weightColumn, SchemaShape.Column.VectorKind.Scalar, NumberType.R4, false); } } diff --git a/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingEstimator.cs b/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingEstimator.cs index dc8ad9b389..81df252d58 100644 --- a/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingEstimator.cs +++ b/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingEstimator.cs @@ -49,7 +49,7 @@ public ITransformer Fit(IDataView input) private bool HasCategoricals(SchemaShape.Column col) { - _host.AssertValue(col); + _host.Assert(col.IsValid); if (!col.Metadata.TryFindColumn(MetadataUtils.Kinds.CategoricalSlotRanges, out var mcol)) return false; // The indices must be ints and of a definite size vector type. (Definite becuase @@ -116,7 +116,7 @@ private SchemaShape.Column CheckInputsAndMakeColumn( public SchemaShape GetOutputSchema(SchemaShape inputSchema) { _host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); result[_name] = CheckInputsAndMakeColumn(inputSchema, _name, _source); return new SchemaShape(result.Values); } diff --git a/src/Microsoft.ML.Data/Transforms/ColumnCopying.cs b/src/Microsoft.ML.Data/Transforms/ColumnCopying.cs index 4479856cc0..4c35b845a4 100644 --- a/src/Microsoft.ML.Data/Transforms/ColumnCopying.cs +++ b/src/Microsoft.ML.Data/Transforms/ColumnCopying.cs @@ -49,7 +49,7 @@ public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var resultDic = inputSchema.Columns.ToDictionary(x => x.Name); + var resultDic = inputSchema.ToDictionary(x => x.Name); foreach (var (Source, Name) in Transformer.Columns) { if (!inputSchema.TryFindColumn(Source, out var originalColumn)) diff --git a/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs b/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs index 33d61e4560..cd602bad1a 100644 --- a/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs +++ b/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs @@ -98,14 +98,14 @@ public static ColumnSelectingEstimator DropColumns(IHostEnvironment env, params public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - if (!Transformer.IgnoreMissing && !ColumnSelectingTransformer.IsSchemaValid(inputSchema.Columns.Select(x => x.Name), + if (!Transformer.IgnoreMissing && !ColumnSelectingTransformer.IsSchemaValid(inputSchema.Select(x => x.Name), Transformer.SelectColumns, out IEnumerable invalidColumns)) { throw Host.ExceptSchemaMismatch(nameof(inputSchema), "input", string.Join(",", invalidColumns)); } - var columns = inputSchema.Columns.Where(c => _selectPredicate(c.Name)); + var columns = inputSchema.Where(c => _selectPredicate(c.Name)); return new SchemaShape(columns); } } diff --git a/src/Microsoft.ML.Data/Transforms/Hashing.cs b/src/Microsoft.ML.Data/Transforms/Hashing.cs index 1e9151f44a..960c033dad 100644 --- a/src/Microsoft.ML.Data/Transforms/Hashing.cs +++ b/src/Microsoft.ML.Data/Transforms/Hashing.cs @@ -1235,7 +1235,7 @@ public HashingEstimator(IHostEnvironment env, params HashingTransformer.ColumnIn public SchemaShape GetOutputSchema(SchemaShape inputSchema) { _host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in _columns) { if (!inputSchema.TryFindColumn(colInfo.Input, out var col)) diff --git a/src/Microsoft.ML.Data/Transforms/KeyToValue.cs b/src/Microsoft.ML.Data/Transforms/KeyToValue.cs index 0fe412ee84..6553b27239 100644 --- a/src/Microsoft.ML.Data/Transforms/KeyToValue.cs +++ b/src/Microsoft.ML.Data/Transforms/KeyToValue.cs @@ -512,7 +512,7 @@ public KeyToValueMappingEstimator(IHostEnvironment env, params (string input, st public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in Transformer.Columns) { if (!inputSchema.TryFindColumn(colInfo.input, out var col)) diff --git a/src/Microsoft.ML.Data/Transforms/KeyToVector.cs b/src/Microsoft.ML.Data/Transforms/KeyToVector.cs index 98193ad24e..da67c1a3b5 100644 --- a/src/Microsoft.ML.Data/Transforms/KeyToVector.cs +++ b/src/Microsoft.ML.Data/Transforms/KeyToVector.cs @@ -746,7 +746,7 @@ private KeyToVectorMappingEstimator(IHostEnvironment env, KeyToVectorMappingTran public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in Transformer.Columns) { if (!inputSchema.TryFindColumn(colInfo.Input, out var col)) diff --git a/src/Microsoft.ML.Data/Transforms/Normalizer.cs b/src/Microsoft.ML.Data/Transforms/Normalizer.cs index 222fea019c..0e20466a2e 100644 --- a/src/Microsoft.ML.Data/Transforms/Normalizer.cs +++ b/src/Microsoft.ML.Data/Transforms/Normalizer.cs @@ -213,7 +213,7 @@ public NormalizingTransformer Fit(IDataView input) public SchemaShape GetOutputSchema(SchemaShape inputSchema) { _host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in _columns) { diff --git a/src/Microsoft.ML.Data/Transforms/TypeConverting.cs b/src/Microsoft.ML.Data/Transforms/TypeConverting.cs index 7c2d42168b..41be7c861f 100644 --- a/src/Microsoft.ML.Data/Transforms/TypeConverting.cs +++ b/src/Microsoft.ML.Data/Transforms/TypeConverting.cs @@ -552,7 +552,7 @@ public TypeConvertingEstimator(IHostEnvironment env, params TypeConvertingTransf public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in Transformer.Columns) { if (!inputSchema.TryFindColumn(colInfo.Input, out var col)) diff --git a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingEstimator.cs b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingEstimator.cs index 4d01a62541..24ff1c6915 100644 --- a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingEstimator.cs +++ b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingEstimator.cs @@ -60,7 +60,7 @@ public ValueToKeyMappingEstimator(IHostEnvironment env, ValueToKeyMappingTransfo public SchemaShape GetOutputSchema(SchemaShape inputSchema) { _host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in _columns) { if (!inputSchema.TryFindColumn(colInfo.Input, out var col)) @@ -77,7 +77,7 @@ public SchemaShape GetOutputSchema(SchemaShape inputSchema) kv = new SchemaShape.Column(MetadataUtils.Kinds.KeyValues, SchemaShape.Column.VectorKind.Vector, colInfo.TextKeyValues ? TextType.Instance : col.ItemType, col.IsKey); } - Contracts.AssertValue(kv); + Contracts.Assert(kv.IsValid); if (col.Metadata.TryFindColumn(MetadataUtils.Kinds.SlotNames, out var slotMeta)) metadata = new SchemaShape(new[] { slotMeta, kv }); diff --git a/src/Microsoft.ML.FastTree/FastTreeRanking.cs b/src/Microsoft.ML.FastTree/FastTreeRanking.cs index 451f47482e..dce1e7b398 100644 --- a/src/Microsoft.ML.FastTree/FastTreeRanking.cs +++ b/src/Microsoft.ML.FastTree/FastTreeRanking.cs @@ -97,7 +97,7 @@ internal FastTreeRankingTrainer(IHostEnvironment env, Arguments args) protected override void CheckLabelCompatible(SchemaShape.Column labelCol) { - Contracts.AssertValue(labelCol); + Contracts.Assert(labelCol.IsValid); Action error = () => throw Host.ExceptSchemaMismatch(nameof(labelCol), RoleMappedSchema.ColumnRole.Label.Value, labelCol.Name, "R4 or a Key", labelCol.GetTypeString()); diff --git a/src/Microsoft.ML.HalLearners/VectorWhitening.cs b/src/Microsoft.ML.HalLearners/VectorWhitening.cs index 0964154eb6..c18ef3f36a 100644 --- a/src/Microsoft.ML.HalLearners/VectorWhitening.cs +++ b/src/Microsoft.ML.HalLearners/VectorWhitening.cs @@ -806,7 +806,7 @@ public VectorWhiteningTransformer Fit(IDataView input) public SchemaShape GetOutputSchema(SchemaShape inputSchema) { _host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colPair in _infos) { if (!inputSchema.TryFindColumn(colPair.Input, out var col)) diff --git a/src/Microsoft.ML.ImageAnalytics/ImageGrayscaleTransform.cs b/src/Microsoft.ML.ImageAnalytics/ImageGrayscaleTransform.cs index eb2f444a92..2a117dfeda 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImageGrayscaleTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImageGrayscaleTransform.cs @@ -227,7 +227,7 @@ public ImageGrayscalingEstimator(IHostEnvironment env, params (string input, str public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in Transformer.Columns) { if (!inputSchema.TryFindColumn(colInfo.input, out var col)) diff --git a/src/Microsoft.ML.ImageAnalytics/ImageLoaderTransform.cs b/src/Microsoft.ML.ImageAnalytics/ImageLoaderTransform.cs index f996560011..fd7707ecf7 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImageLoaderTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImageLoaderTransform.cs @@ -230,7 +230,7 @@ public ImageLoadingEstimator(IHostEnvironment env, ImageLoaderTransform transfor public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var (input, output) in Transformer.Columns) { if (!inputSchema.TryFindColumn(input, out var col)) diff --git a/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs b/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs index 8b14382bcb..ef1a71c47f 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs @@ -655,7 +655,7 @@ public ImagePixelExtractingEstimator(IHostEnvironment env, params ImagePixelExtr public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in Transformer.Columns) { if (!inputSchema.TryFindColumn(colInfo.Input, out var col)) diff --git a/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs b/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs index 8241b2ff12..ef4318c547 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs @@ -447,7 +447,7 @@ public ImageResizingEstimator(IHostEnvironment env, ImageResizerTransform transf public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in Transformer.Columns) { if (!inputSchema.TryFindColumn(colInfo.Input, out var col)) diff --git a/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs b/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs index 72d4513627..d6654507fe 100644 --- a/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs +++ b/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs @@ -121,7 +121,7 @@ internal KMeansPlusPlusTrainer(IHostEnvironment env, Arguments args) } private KMeansPlusPlusTrainer(IHostEnvironment env, Arguments args, Action advancedSettings = null) - : base(Contracts.CheckRef(env, nameof(env)).Register(LoadNameValue), TrainerUtils.MakeR4VecFeature(args.FeatureColumn), null, TrainerUtils.MakeR4ScalarWeightColumn(args.WeightColumn)) + : base(Contracts.CheckRef(env, nameof(env)).Register(LoadNameValue), TrainerUtils.MakeR4VecFeature(args.FeatureColumn), default, TrainerUtils.MakeR4ScalarWeightColumn(args.WeightColumn)) { Host.CheckValue(args, nameof(args)); diff --git a/src/Microsoft.ML.LightGBM/LightGbmMulticlassTrainer.cs b/src/Microsoft.ML.LightGBM/LightGbmMulticlassTrainer.cs index 62b476b40f..c787a606fa 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmMulticlassTrainer.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmMulticlassTrainer.cs @@ -213,7 +213,7 @@ protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSc bool success = inputSchema.TryFindColumn(LabelColumn.Name, out var labelCol); Contracts.Assert(success); - var metadata = new SchemaShape(labelCol.Metadata.Columns.Where(x => x.Name == MetadataUtils.Kinds.KeyValues) + var metadata = new SchemaShape(labelCol.Metadata.Where(x => x.Name == MetadataUtils.Kinds.KeyValues) .Concat(MetadataUtils.GetTrainerOutputMetadata())); return new[] { diff --git a/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs b/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs index 4f0d49b75e..f9326f00ac 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs @@ -140,7 +140,7 @@ protected override void CheckDataValid(IChannel ch, RoleMappedData data) protected override void CheckLabelCompatible(SchemaShape.Column labelCol) { - Contracts.AssertValue(labelCol); + Contracts.Assert(labelCol.IsValid); Action error = () => throw Host.ExceptSchemaMismatch(nameof(labelCol), RoleMappedSchema.ColumnRole.Label.Value, labelCol.Name, "R4 or a Key", labelCol.GetTypeString()); diff --git a/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs b/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs index 4390ad31ab..dcffd150d5 100644 --- a/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs +++ b/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs @@ -480,8 +480,8 @@ public OnnxScoringEstimator(IHostEnvironment env, OnnxTransform transformer) public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); - var resultDic = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); + var resultDic = inputSchema.ToDictionary(x => x.Name); for (var i = 0; i < Transformer.Inputs.Length; i++) { diff --git a/src/Microsoft.ML.PCA/PcaTrainer.cs b/src/Microsoft.ML.PCA/PcaTrainer.cs index 7740d2f6e6..914db596a0 100644 --- a/src/Microsoft.ML.PCA/PcaTrainer.cs +++ b/src/Microsoft.ML.PCA/PcaTrainer.cs @@ -111,7 +111,7 @@ internal RandomizedPcaTrainer(IHostEnvironment env, Arguments args) private RandomizedPcaTrainer(IHostEnvironment env, Arguments args, string featureColumn, string weightColumn, int rank = 20, int oversampling = 20, bool center = true, int? seed = null) - : base(Contracts.CheckRef(env, nameof(env)).Register(LoadNameValue), TrainerUtils.MakeR4VecFeature(featureColumn), null, TrainerUtils.MakeR4ScalarWeightColumn(weightColumn)) + : base(Contracts.CheckRef(env, nameof(env)).Register(LoadNameValue), TrainerUtils.MakeR4VecFeature(featureColumn), default, TrainerUtils.MakeR4ScalarWeightColumn(weightColumn)) { // if the args are not null, we got here from maml, and the internal ctor. if (args != null) @@ -152,7 +152,7 @@ private protected override PcaPredictor TrainModelCore(TrainContext context) private static SchemaShape.Column MakeWeightColumn(string weightColumn) { if (weightColumn == null) - return null; + return default; return new SchemaShape.Column(weightColumn, SchemaShape.Column.VectorKind.Scalar, NumberType.R4, false); } diff --git a/src/Microsoft.ML.PCA/PcaTransform.cs b/src/Microsoft.ML.PCA/PcaTransform.cs index 385b5d57a8..27ab5113d2 100644 --- a/src/Microsoft.ML.PCA/PcaTransform.cs +++ b/src/Microsoft.ML.PCA/PcaTransform.cs @@ -707,7 +707,7 @@ public PrincipalComponentAnalysisEstimator(IHostEnvironment env, params PcaTrans public SchemaShape GetOutputSchema(SchemaShape inputSchema) { _host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in _columns) { if (!inputSchema.TryFindColumn(colInfo.Input, out var col)) diff --git a/src/Microsoft.ML.Recommender/MatrixFactorizationTrainer.cs b/src/Microsoft.ML.Recommender/MatrixFactorizationTrainer.cs index eb2db599a9..e42c6b002e 100644 --- a/src/Microsoft.ML.Recommender/MatrixFactorizationTrainer.cs +++ b/src/Microsoft.ML.Recommender/MatrixFactorizationTrainer.cs @@ -436,7 +436,7 @@ void CheckColumnsCompatible(SchemaShape.Column cachedColumn, string expectedColu CheckColumnsCompatible(matrixRowIndexColumn, MatrixRowIndexName); // Input columns just pass through so that output column dictionary contains all input columns. - var outColumns = inputSchema.Columns.ToDictionary(x => x.Name); + var outColumns = inputSchema.ToDictionary(x => x.Name); // Add columns produced by this estimator. foreach (var col in GetOutputColumnsCore(inputSchema)) diff --git a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FactorizationMachineTrainer.cs b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FactorizationMachineTrainer.cs index 81ef9d03cf..be73a2de30 100644 --- a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FactorizationMachineTrainer.cs +++ b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FactorizationMachineTrainer.cs @@ -151,7 +151,7 @@ public FieldAwareFactorizationMachineTrainer(IHostEnvironment env, FeatureColumns[i] = new SchemaShape.Column(featureColumns[i], SchemaShape.Column.VectorKind.Vector, NumberType.R4, false); LabelColumn = new SchemaShape.Column(labelColumn, SchemaShape.Column.VectorKind.Scalar, BoolType.Instance, false); - WeightColumn = weights != null ? new SchemaShape.Column(weights, SchemaShape.Column.VectorKind.Scalar, NumberType.R4, false) : null; + WeightColumn = weights != null ? new SchemaShape.Column(weights, SchemaShape.Column.VectorKind.Scalar, NumberType.R4, false) : default; } /// @@ -461,7 +461,7 @@ public FieldAwareFactorizationMachinePredictionTransformer Train(IDataView train roles.Add(new KeyValuePair(RoleMappedSchema.ColumnRole.Label, LabelColumn.Name)); - if (WeightColumn != null) + if (WeightColumn.IsValid) roles.Add(new KeyValuePair(RoleMappedSchema.ColumnRole.Feature, WeightColumn.Name)); var trainingData = new RoleMappedData(trainData, roles); @@ -500,10 +500,10 @@ void CheckColumnsCompatible(SchemaShape.Column column, string defaultName) CheckColumnsCompatible(feat, DefaultColumnNames.Features); } - if (WeightColumn != null) + if (WeightColumn.IsValid) CheckColumnsCompatible(WeightColumn, DefaultColumnNames.Weight); - var outColumns = inputSchema.Columns.ToDictionary(x => x.Name); + var outColumns = inputSchema.ToDictionary(x => x.Name); foreach (var col in GetOutputColumnsCore(inputSchema)) outColumns[col.Name] = col; diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs index 0d12aee904..2ec7021cd4 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs @@ -191,7 +191,7 @@ internal LbfgsTrainerBase(IHostEnvironment env, args.FeatureColumn = FeatureColumn.Name; args.LabelColumn = LabelColumn.Name; - args.WeightColumn = WeightColumn?.Name; + args.WeightColumn = WeightColumn.Name; Host.CheckUserArg(!Args.UseThreads || Args.NumThreads > 0 || Args.NumThreads == null, nameof(Args.NumThreads), "numThreads must be positive (or empty for default)"); Host.CheckUserArg(Args.L2Weight >= 0, nameof(Args.L2Weight), "Must be non-negative"); diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs index a114c2bb18..55f6a8b5e5 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs @@ -321,7 +321,7 @@ protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSc bool success = inputSchema.TryFindColumn(LabelColumn.Name, out var labelCol); Contracts.Assert(success); - var metadata = new SchemaShape(labelCol.Metadata.Columns.Where(x => x.Name == MetadataUtils.Kinds.KeyValues) + var metadata = new SchemaShape(labelCol.Metadata.Where(x => x.Name == MetadataUtils.Kinds.KeyValues) .Concat(MetadataUtils.GetTrainerOutputMetadata())); return new[] { diff --git a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MetaMulticlassTrainer.cs b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MetaMulticlassTrainer.cs index 15de478efd..e7aeade4fe 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MetaMulticlassTrainer.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MetaMulticlassTrainer.cs @@ -149,7 +149,7 @@ public SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - if (LabelColumn != null) + if (LabelColumn.IsValid) { if (!inputSchema.TryFindColumn(LabelColumn.Name, out var labelCol)) throw Host.ExceptSchemaMismatch(nameof(labelCol), DefaultColumnNames.PredictedLabel, DefaultColumnNames.PredictedLabel); @@ -158,7 +158,7 @@ public SchemaShape GetOutputSchema(SchemaShape inputSchema) throw Host.Except($"Label column '{LabelColumn.Name}' is not compatible"); } - var outColumns = inputSchema.Columns.ToDictionary(x => x.Name); + var outColumns = inputSchema.ToDictionary(x => x.Name); foreach (var col in GetOutputColumnsCore(inputSchema)) outColumns[col.Name] = col; @@ -167,12 +167,12 @@ public SchemaShape GetOutputSchema(SchemaShape inputSchema) private SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSchema) { - if (LabelColumn != null) + if (LabelColumn.IsValid) { bool success = inputSchema.TryFindColumn(LabelColumn.Name, out var labelCol); Contracts.Assert(success); - var metadata = new SchemaShape(labelCol.Metadata.Columns.Where(x => x.Name == MetadataUtils.Kinds.KeyValues) + var metadata = new SchemaShape(labelCol.Metadata.Where(x => x.Name == MetadataUtils.Kinds.KeyValues) .Concat(MetadataForScoreColumn())); return new[] { diff --git a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MultiClassNaiveBayesTrainer.cs b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MultiClassNaiveBayesTrainer.cs index 89a5d33cf9..66294fd240 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MultiClassNaiveBayesTrainer.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MultiClassNaiveBayesTrainer.cs @@ -76,7 +76,7 @@ protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSc bool success = inputSchema.TryFindColumn(LabelColumn.Name, out var labelCol); Contracts.Assert(success); - var predLabelMetadata = new SchemaShape(labelCol.Metadata.Columns.Where(x => x.Name == MetadataUtils.Kinds.KeyValues) + var predLabelMetadata = new SchemaShape(labelCol.Metadata.Where(x => x.Name == MetadataUtils.Kinds.KeyValues) .Concat(MetadataUtils.GetTrainerOutputMetadata())); return new[] diff --git a/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedPerceptron.cs b/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedPerceptron.cs index 91f68a85f6..c853aa7f79 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedPerceptron.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedPerceptron.cs @@ -163,7 +163,7 @@ protected override void CheckLabels(RoleMappedData data) protected override void CheckLabelCompatible(SchemaShape.Column labelCol) { - Contracts.AssertValue(labelCol); + Contracts.Assert(labelCol.IsValid); Action error = () => throw Host.ExceptSchemaMismatch(nameof(labelCol), RoleMappedSchema.ColumnRole.Label.Value, labelCol.Name, "BL, R8, R4 or a Key", labelCol.GetTypeString()); diff --git a/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs b/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs index 42e447004a..7016568bed 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs @@ -259,13 +259,13 @@ private static TArgs ArgsInit(string featureColumn, SchemaShape.Column labelColu } internal SdcaTrainerBase(IHostEnvironment env, string featureColumn, SchemaShape.Column labelColumn, - SchemaShape.Column weight = null, Action advancedSettings = null, float? l2Const = null, + SchemaShape.Column weight = default, Action advancedSettings = null, float? l2Const = null, float? l1Threshold = null, int? maxIterations = null) : this(env, ArgsInit(featureColumn, labelColumn, advancedSettings), labelColumn, weight, l2Const, l1Threshold, maxIterations) { } - internal SdcaTrainerBase(IHostEnvironment env, TArgs args, SchemaShape.Column label, SchemaShape.Column weight = null, + internal SdcaTrainerBase(IHostEnvironment env, TArgs args, SchemaShape.Column label, SchemaShape.Column weight = default, float? l2Const = null, float? l1Threshold = null, int? maxIterations = null) : base(Contracts.CheckRef(env, nameof(env)).Register(RegisterName), TrainerUtils.MakeR4VecFeature(args.FeatureColumn), label, weight) { @@ -1520,7 +1520,7 @@ public SdcaBinaryTrainer(IHostEnvironment env, Arguments args) protected override void CheckLabelCompatible(SchemaShape.Column labelCol) { - Contracts.AssertValue(labelCol); + Contracts.Assert(labelCol.IsValid); Action error = () => throw Host.ExceptSchemaMismatch(nameof(labelCol), RoleMappedSchema.ColumnRole.Label.Value, labelCol.Name, "BL, R8, R4 or a Key", labelCol.GetTypeString()); @@ -1535,7 +1535,7 @@ protected override void CheckLabelCompatible(SchemaShape.Column labelCol) private static SchemaShape.Column MakeWeightColumn(string weightColumn) { if (weightColumn == null) - return null; + return default; return new SchemaShape.Column(weightColumn, SchemaShape.Column.VectorKind.Scalar, NumberType.R4, false); } diff --git a/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs b/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs index f60b822827..af6617d862 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs @@ -101,7 +101,7 @@ protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSc bool success = inputSchema.TryFindColumn(LabelColumn.Name, out var labelCol); Contracts.Assert(success); - var metadata = new SchemaShape(labelCol.Metadata.Columns.Where(x => x.Name == MetadataUtils.Kinds.KeyValues) + var metadata = new SchemaShape(labelCol.Metadata.Where(x => x.Name == MetadataUtils.Kinds.KeyValues) .Concat(MetadataUtils.GetTrainerOutputMetadata())); return new[] { @@ -112,7 +112,7 @@ protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSc protected override void CheckLabelCompatible(SchemaShape.Column labelCol) { - Contracts.AssertValue(labelCol); + Contracts.Assert(labelCol.IsValid); Action error = () => throw Host.ExceptSchemaMismatch(nameof(labelCol), RoleMappedSchema.ColumnRole.Label.Value, labelCol.Name, "R8, R4 or a Key", labelCol.GetTypeString()); diff --git a/src/Microsoft.ML.StandardLearners/Standard/Simple/SimpleTrainers.cs b/src/Microsoft.ML.StandardLearners/Standard/Simple/SimpleTrainers.cs index 049ce8b8d5..bae20cd10f 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/Simple/SimpleTrainers.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/Simple/SimpleTrainers.cs @@ -86,7 +86,7 @@ public SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var outColumns = inputSchema.Columns.ToDictionary(x => x.Name); + var outColumns = inputSchema.ToDictionary(x => x.Name); var newColumns = new[] { @@ -327,7 +327,7 @@ public SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var outColumns = inputSchema.Columns.ToDictionary(x => x.Name); + var outColumns = inputSchema.ToDictionary(x => x.Name); var newColumns = new[] { diff --git a/src/Microsoft.ML.StandardLearners/Standard/StochasticTrainerBase.cs b/src/Microsoft.ML.StandardLearners/Standard/StochasticTrainerBase.cs index d419a3a8a1..792844ef26 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/StochasticTrainerBase.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/StochasticTrainerBase.cs @@ -15,7 +15,7 @@ public abstract class StochasticTrainerBase : TrainerEstim where TTransformer : ISingleFeaturePredictionTransformer where TModel : IPredictor { - public StochasticTrainerBase(IHost host, SchemaShape.Column feature, SchemaShape.Column label, SchemaShape.Column weight = null) + public StochasticTrainerBase(IHost host, SchemaShape.Column feature, SchemaShape.Column label, SchemaShape.Column weight = default) : base(host, feature, label, weight) { } diff --git a/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs b/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs index e29435cc63..016082d44d 100644 --- a/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs +++ b/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs @@ -1101,8 +1101,8 @@ private static TensorFlowTransform.Arguments CreateArguments(TensorFlowModelInfo public SchemaShape GetOutputSchema(SchemaShape inputSchema) { _host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); - var resultDic = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); + var resultDic = inputSchema.ToDictionary(x => x.Name); for (var i = 0; i < _args.InputColumns.Length; i++) { var input = _args.InputColumns[i]; diff --git a/src/Microsoft.ML.TimeSeries/IidChangePointDetector.cs b/src/Microsoft.ML.TimeSeries/IidChangePointDetector.cs index 3a899f4339..6a42483130 100644 --- a/src/Microsoft.ML.TimeSeries/IidChangePointDetector.cs +++ b/src/Microsoft.ML.TimeSeries/IidChangePointDetector.cs @@ -251,7 +251,7 @@ public override SchemaShape GetOutputSchema(SchemaShape inputSchema) var metadata = new List() { new SchemaShape.Column(MetadataUtils.Kinds.SlotNames, SchemaShape.Column.VectorKind.Vector, TextType.Instance, false) }; - var resultDic = inputSchema.Columns.ToDictionary(x => x.Name); + var resultDic = inputSchema.ToDictionary(x => x.Name); resultDic[Transformer.OutputColumnName] = new SchemaShape.Column( Transformer.OutputColumnName, SchemaShape.Column.VectorKind.Vector, NumberType.R8, false, new SchemaShape(metadata)); diff --git a/src/Microsoft.ML.TimeSeries/IidSpikeDetector.cs b/src/Microsoft.ML.TimeSeries/IidSpikeDetector.cs index 3fdf968025..107b2873cf 100644 --- a/src/Microsoft.ML.TimeSeries/IidSpikeDetector.cs +++ b/src/Microsoft.ML.TimeSeries/IidSpikeDetector.cs @@ -226,7 +226,7 @@ public override SchemaShape GetOutputSchema(SchemaShape inputSchema) var metadata = new List() { new SchemaShape.Column(MetadataUtils.Kinds.SlotNames, SchemaShape.Column.VectorKind.Vector, TextType.Instance, false) }; - var resultDic = inputSchema.Columns.ToDictionary(x => x.Name); + var resultDic = inputSchema.ToDictionary(x => x.Name); resultDic[Transformer.OutputColumnName] = new SchemaShape.Column( Transformer.OutputColumnName, SchemaShape.Column.VectorKind.Vector, NumberType.R8, false, new SchemaShape(metadata)); diff --git a/src/Microsoft.ML.TimeSeries/SsaChangePointDetector.cs b/src/Microsoft.ML.TimeSeries/SsaChangePointDetector.cs index 9171c34662..4565f853b6 100644 --- a/src/Microsoft.ML.TimeSeries/SsaChangePointDetector.cs +++ b/src/Microsoft.ML.TimeSeries/SsaChangePointDetector.cs @@ -282,7 +282,7 @@ public SchemaShape GetOutputSchema(SchemaShape inputSchema) var metadata = new List() { new SchemaShape.Column(MetadataUtils.Kinds.SlotNames, SchemaShape.Column.VectorKind.Vector, TextType.Instance, false) }; - var resultDic = inputSchema.Columns.ToDictionary(x => x.Name); + var resultDic = inputSchema.ToDictionary(x => x.Name); resultDic[_args.Name] = new SchemaShape.Column( _args.Name, SchemaShape.Column.VectorKind.Vector, NumberType.R8, false, new SchemaShape(metadata)); diff --git a/src/Microsoft.ML.TimeSeries/SsaSpikeDetector.cs b/src/Microsoft.ML.TimeSeries/SsaSpikeDetector.cs index f98c8bf34b..00eb719b46 100644 --- a/src/Microsoft.ML.TimeSeries/SsaSpikeDetector.cs +++ b/src/Microsoft.ML.TimeSeries/SsaSpikeDetector.cs @@ -260,7 +260,7 @@ public SchemaShape GetOutputSchema(SchemaShape inputSchema) var metadata = new List() { new SchemaShape.Column(MetadataUtils.Kinds.SlotNames, SchemaShape.Column.VectorKind.Vector, TextType.Instance, false) }; - var resultDic = inputSchema.Columns.ToDictionary(x => x.Name); + var resultDic = inputSchema.ToDictionary(x => x.Name); resultDic[_args.Name] = new SchemaShape.Column( _args.Name, SchemaShape.Column.VectorKind.Vector, NumberType.R8, false, new SchemaShape(metadata)); diff --git a/src/Microsoft.ML.Transforms/CountFeatureSelection.cs b/src/Microsoft.ML.Transforms/CountFeatureSelection.cs index d330a06f37..d37d6b76b4 100644 --- a/src/Microsoft.ML.Transforms/CountFeatureSelection.cs +++ b/src/Microsoft.ML.Transforms/CountFeatureSelection.cs @@ -106,7 +106,7 @@ public CountFeatureSelectingEstimator(IHostEnvironment env, string inputColumn, public SchemaShape GetOutputSchema(SchemaShape inputSchema) { _host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colPair in _columns) { if (!inputSchema.TryFindColumn(colPair.Input, out var col)) diff --git a/src/Microsoft.ML.Transforms/GcnTransform.cs b/src/Microsoft.ML.Transforms/GcnTransform.cs index e8d13b4944..5112452c13 100644 --- a/src/Microsoft.ML.Transforms/GcnTransform.cs +++ b/src/Microsoft.ML.Transforms/GcnTransform.cs @@ -795,7 +795,7 @@ internal static bool IsSchemaColumnValid(SchemaShape.Column col) public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colPair in Transformer.Columns) { if (!inputSchema.TryFindColumn(colPair.Input, out var col)) diff --git a/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs b/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs index f883f29c45..6159637da5 100644 --- a/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs +++ b/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs @@ -465,7 +465,7 @@ private KeyToBinaryVectorMappingEstimator(IHostEnvironment env, KeyToBinaryVecto public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in Transformer.Columns) { if (!inputSchema.TryFindColumn(colInfo.Input, out var col)) diff --git a/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs b/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs index 90191211dc..c509881d31 100644 --- a/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs +++ b/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs @@ -371,7 +371,7 @@ public MissingValueDroppingEstimator(IHostEnvironment env, string input, string public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colPair in Transformer.Columns) { if (!inputSchema.TryFindColumn(colPair.input, out var col) || !Runtime.Data.Conversion.Conversions.Instance.TryGetIsNAPredicate(col.ItemType, out Delegate del)) diff --git a/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs b/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs index c54f6693bd..971aae846c 100644 --- a/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs +++ b/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs @@ -460,7 +460,7 @@ public MissingValueIndicatorEstimator(IHostEnvironment env, string input, string public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colPair in Transformer.Columns) { if (!inputSchema.TryFindColumn(colPair.input, out var col) || !Runtime.Data.Conversion.Conversions.Instance.TryGetIsNAPredicate(col.ItemType, out Delegate del)) diff --git a/src/Microsoft.ML.Transforms/MissingValueReplacing.cs b/src/Microsoft.ML.Transforms/MissingValueReplacing.cs index b1b48f7864..7659d53c32 100644 --- a/src/Microsoft.ML.Transforms/MissingValueReplacing.cs +++ b/src/Microsoft.ML.Transforms/MissingValueReplacing.cs @@ -952,7 +952,7 @@ public MissingValueReplacingEstimator(IHostEnvironment env, params MissingValueR public SchemaShape GetOutputSchema(SchemaShape inputSchema) { _host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in _columns) { if (!inputSchema.TryFindColumn(colInfo.Input, out var col)) diff --git a/src/Microsoft.ML.Transforms/MutualInformationFeatureSelection.cs b/src/Microsoft.ML.Transforms/MutualInformationFeatureSelection.cs index 3c496e711c..517628655d 100644 --- a/src/Microsoft.ML.Transforms/MutualInformationFeatureSelection.cs +++ b/src/Microsoft.ML.Transforms/MutualInformationFeatureSelection.cs @@ -167,7 +167,7 @@ public ITransformer Fit(IDataView input) public SchemaShape GetOutputSchema(SchemaShape inputSchema) { _host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colPair in _columns) { if (!inputSchema.TryFindColumn(colPair.input, out var col)) diff --git a/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs b/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs index 5960ff1143..8f4e2b6221 100644 --- a/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs +++ b/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs @@ -677,7 +677,7 @@ public RandomFourierFeaturizingEstimator(IHostEnvironment env, params RandomFour public SchemaShape GetOutputSchema(SchemaShape inputSchema) { _host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in _columns) { if (!inputSchema.TryFindColumn(colInfo.Input, out var col)) diff --git a/src/Microsoft.ML.Transforms/Text/LdaTransform.cs b/src/Microsoft.ML.Transforms/Text/LdaTransform.cs index f05ab6161d..dd0738e50e 100644 --- a/src/Microsoft.ML.Transforms/Text/LdaTransform.cs +++ b/src/Microsoft.ML.Transforms/Text/LdaTransform.cs @@ -1145,7 +1145,7 @@ public LatentDirichletAllocationEstimator(IHostEnvironment env, params LatentDir public SchemaShape GetOutputSchema(SchemaShape inputSchema) { _host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in _columns) { if (!inputSchema.TryFindColumn(colInfo.Input, out var col)) diff --git a/src/Microsoft.ML.Transforms/Text/NgramTransform.cs b/src/Microsoft.ML.Transforms/Text/NgramTransform.cs index ca9794cef6..5351e3f7f4 100644 --- a/src/Microsoft.ML.Transforms/Text/NgramTransform.cs +++ b/src/Microsoft.ML.Transforms/Text/NgramTransform.cs @@ -863,7 +863,7 @@ internal static bool IsSchemaColumnValid(SchemaShape.Column col) public SchemaShape GetOutputSchema(SchemaShape inputSchema) { _host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in _columns) { if (!inputSchema.TryFindColumn(colInfo.Input, out var col)) diff --git a/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs b/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs index eeffa7b8f7..a4e0cee7e6 100644 --- a/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs +++ b/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs @@ -582,7 +582,7 @@ public StopWordsRemovingEstimator(IHostEnvironment env, params StopWordsRemoving public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in Transformer.Columns) { if (!inputSchema.TryFindColumn(colInfo.Input, out var col)) @@ -1076,7 +1076,7 @@ public CustomStopWordsRemovingEstimator(IHostEnvironment env, (string input, str public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in Transformer.Columns) { if (!inputSchema.TryFindColumn(colInfo.input, out var col)) diff --git a/src/Microsoft.ML.Transforms/Text/TextFeaturizingEstimator.cs b/src/Microsoft.ML.Transforms/Text/TextFeaturizingEstimator.cs index bf4edae5de..1fa39930bd 100644 --- a/src/Microsoft.ML.Transforms/Text/TextFeaturizingEstimator.cs +++ b/src/Microsoft.ML.Transforms/Text/TextFeaturizingEstimator.cs @@ -472,7 +472,7 @@ private static string GenerateColumnName(ISchema schema, string srcName, string public SchemaShape GetOutputSchema(SchemaShape inputSchema) { _host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var srcName in _inputColumns) { if (!inputSchema.TryFindColumn(srcName, out var col)) diff --git a/src/Microsoft.ML.Transforms/Text/TextNormalizing.cs b/src/Microsoft.ML.Transforms/Text/TextNormalizing.cs index eb84af33a3..2a7fa68e4a 100644 --- a/src/Microsoft.ML.Transforms/Text/TextNormalizing.cs +++ b/src/Microsoft.ML.Transforms/Text/TextNormalizing.cs @@ -500,7 +500,7 @@ public TextNormalizingEstimator(IHostEnvironment env, public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in Transformer.Columns) { if (!inputSchema.TryFindColumn(colInfo.input, out var col)) diff --git a/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs b/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs index 8d4a46652d..949c4474bb 100644 --- a/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs +++ b/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs @@ -587,7 +587,7 @@ public TokenizingByCharactersEstimator(IHostEnvironment env, bool useMarkerChara public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in Transformer.Columns) { if (!inputSchema.TryFindColumn(colInfo.input, out var col)) diff --git a/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs b/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs index 585b435c70..b1c6c73fe5 100644 --- a/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs +++ b/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs @@ -846,7 +846,7 @@ public WordEmbeddingsExtractingEstimator(IHostEnvironment env, string customMode public SchemaShape GetOutputSchema(SchemaShape inputSchema) { _host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in _columns) { if (!inputSchema.TryFindColumn(colInfo.Input, out var col)) diff --git a/src/Microsoft.ML.Transforms/Text/WordTokenizing.cs b/src/Microsoft.ML.Transforms/Text/WordTokenizing.cs index 307821d1cb..1805e9c317 100644 --- a/src/Microsoft.ML.Transforms/Text/WordTokenizing.cs +++ b/src/Microsoft.ML.Transforms/Text/WordTokenizing.cs @@ -468,7 +468,7 @@ public WordTokenizingEstimator(IHostEnvironment env, params WordTokenizingTransf public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var result = inputSchema.Columns.ToDictionary(x => x.Name); + var result = inputSchema.ToDictionary(x => x.Name); foreach (var colInfo in Transformer.Columns) { if (!inputSchema.TryFindColumn(colInfo.Input, out var col)) diff --git a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs index 304ac6751f..d8141426ea 100644 --- a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs +++ b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs @@ -152,9 +152,9 @@ protected void TestEstimatorCore(IEstimator estimator, private void CheckSameSchemaShape(SchemaShape promised, SchemaShape delivered) { - Assert.True(promised.Columns.Length == delivered.Columns.Length); - var sortedCols1 = promised.Columns.OrderBy(x => x.Name); - var sortedCols2 = delivered.Columns.OrderBy(x => x.Name); + Assert.True(promised.Count == delivered.Count); + var sortedCols1 = promised.OrderBy(x => x.Name); + var sortedCols2 = delivered.OrderBy(x => x.Name); foreach (var (x, y) in sortedCols1.Zip(sortedCols2, (x, y) => (x, y))) { From 435a63b9db7464ec0400eb4be8ecc362f0d33dcc Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Thu, 6 Dec 2018 09:49:45 -0800 Subject: [PATCH 028/100] Remove auto-cache mechanism (#1780) * Remove auto-cache mechanism * Add caching usage into a sample and tests --- docs/code/MlNetCookBook.md | 67 +++- .../Microsoft.ML.Samples/Dynamic/SDCA.cs | 9 +- src/Microsoft.ML.Data/StaticPipe/DataView.cs | 14 + src/Microsoft.ML.Data/StaticPipe/Estimator.cs | 9 + .../Training/TrainerEstimatorBase.cs | 7 +- .../Common/OVA/OVA-CV-iris-out.txt | 32 +- .../Common/OVA/OVA-CV-iris-rp.txt | 2 +- .../BaselineOutput/Common/OVA/OVA-CV-iris.txt | 300 +++++++++--------- .../Common/OVA/OVA-TrainTest-iris-out.txt | 20 +- .../Common/OVA/OVA-TrainTest-iris-rp.txt | 2 +- .../Common/OVA/OVA-TrainTest-iris.txt | 300 +++++++++--------- .../Common/PKPD/PKPD-CV-iris-out.txt | 32 +- .../Common/PKPD/PKPD-CV-iris-rp.txt | 2 +- .../Common/PKPD/PKPD-CV-iris.txt | 300 +++++++++--------- .../Common/PKPD/PKPD-TrainTest-iris-out.txt | 22 +- .../Common/PKPD/PKPD-TrainTest-iris-rp.txt | 2 +- .../Common/PKPD/PKPD-TrainTest-iris.txt | 300 +++++++++--------- .../Training.cs | 5 +- test/Microsoft.ML.Tests/CachingTests.cs | 20 ++ .../FeatureContributionTests.cs | 22 +- .../Api/CookbookSamples/CookbookSamples.cs | 36 +++ .../CookbookSamplesDynamicApi.cs | 31 +- .../Api/Estimators/IntrospectiveTraining.cs | 1 + .../Api/Estimators/MultithreadedPrediction.cs | 1 + .../Estimators/ReconfigurablePrediction.cs | 2 +- .../Api/Estimators/SimpleTrainAndPredict.cs | 1 + .../Estimators/TrainSaveModelAndPredict.cs | 1 + .../Estimators/TrainWithInitialPredictor.cs | 5 +- .../Api/Estimators/TrainWithValidationSet.cs | 2 +- .../Scenarios/Api/TestApi.cs | 6 +- test/Microsoft.ML.Tests/Scenarios/OvaTest.cs | 6 +- .../Scenarios/TensorflowTests.cs | 1 + .../IrisPlantClassificationTests.cs | 1 + .../TensorflowTests.cs | 1 + .../MatrixFactorizationTests.cs | 5 +- .../TrainerEstimators/SdcaTests.cs | 3 +- .../Transformers/FeatureSelectionTests.cs | 3 +- 37 files changed, 885 insertions(+), 688 deletions(-) diff --git a/docs/code/MlNetCookBook.md b/docs/code/MlNetCookBook.md index 6ce3c2c67b..7ff7b1f8eb 100644 --- a/docs/code/MlNetCookBook.md +++ b/docs/code/MlNetCookBook.md @@ -443,10 +443,24 @@ var reader = mlContext.Data.TextReader(ctx => ( // Now read the file (remember though, readers are lazy, so the actual reading will happen when the data is accessed). var trainData = reader.Read(trainDataPath); +// Sometime, caching data in-memory after its first access can save some loading time when the data is going to be used +// several times somewhere. The caching mechanism is also lazy; it only caches things after being used. +// User can replace all the subsequently uses of "trainData" with "cachedTrainData". We still use "trainData" because +// a caching step, which provides the same caching function, will be inserted in the considered "learningPipeline." +var cachedTrainData = trainData.Cache(); + // Step two: define the learning pipeline. // We 'start' the pipeline with the output of the reader. var learningPipeline = reader.MakeNewEstimator() + // We add a step for caching data in memory so that the downstream iterative training + // algorithm can efficiently scan through the data multiple times. Otherwise, the following + // trainer will read data from disk multiple times. The caching mechanism uses an on-demand strategy. + // The data accessed in any downstream step will be cached since its first use. In general, you only + // need to add a caching step before trainable step, because caching is not helpful if the data is + // only scanned once. This step can be removed if user doesn't have enough memory to store the whole + // data set. + .AppendCacheCheckpoint() // Now we can add any 'training steps' to it. In our case we want to 'normalize' the data (rescale to be // between -1 and 1 for all examples) .Append(r => ( @@ -486,6 +500,12 @@ var reader = mlContext.Data.TextReader(new TextLoader.Arguments // Now read the file (remember though, readers are lazy, so the actual reading will happen when the data is accessed). var trainData = reader.Read(trainDataPath); +// Sometime, caching data in-memory after its first access can save some loading time when the data is going to be used +// several times somewhere. The caching mechanism is also lazy; it only caches things after being used. +// User can replace all the subsequently uses of "trainData" with "cachedTrainData". We still use "trainData" because +// a caching step, which provides the same caching function, will be inserted in the considered "dynamicPipeline." +var cachedTrainData = mlContext.Data.Cache(trainData); + // Step two: define the learning pipeline. // We 'start' the pipeline with the output of the reader. @@ -493,6 +513,15 @@ var dynamicPipeline = // First 'normalize' the data (rescale to be // between -1 and 1 for all examples) mlContext.Transforms.Normalize("FeatureVector") + // We add a step for caching data in memory so that the downstream iterative training + // algorithm can efficiently scan through the data multiple times. Otherwise, the following + // trainer will read data from disk multiple times. The caching mechanism uses an on-demand strategy. + // The data accessed in any downstream step will be cached since its first use. In general, you only + // need to add a caching step before trainable step, because caching is not helpful if the data is + // only scanned once. This step can be removed if user doesn't have enough memory to store the whole + // data set. Notice that in the upstream Transforms.Normalize step, we only scan through the data + // once so adding a caching step before it is not helpful. + .AppendCacheCheckpoint(mlContext) // Add the SDCA regression trainer. .Append(mlContext.Regression.Trainers.StochasticDualCoordinateAscent(label: "Target", features: "FeatureVector")); @@ -595,6 +624,13 @@ var learningPipeline = reader.MakeNewEstimator() r.Label, // Concatenate all the features together into one column 'Features'. Features: r.SepalLength.ConcatWith(r.SepalWidth, r.PetalLength, r.PetalWidth))) + // We add a step for caching data in memory so that the downstream iterative training + // algorithm can efficiently scan through the data multiple times. Otherwise, the following + // trainer will read data from disk multiple times. The caching mechanism uses an on-demand strategy. + // The data accessed in any downstream step will be cached since its first use. In general, you only + // need to add a caching step before trainable step, because caching is not helpful if the data is + // only scanned once. + .AppendCacheCheckpoint() .Append(r => ( r.Label, // Train the multi-class SDCA model to predict the label using features. @@ -640,6 +676,8 @@ var dynamicPipeline = mlContext.Transforms.Concatenate("Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") // Note that the label is text, so it needs to be converted to key. .Append(mlContext.Transforms.Categorical.MapValueToKey("Label"), TransformerScope.TrainTest) + // Cache data in moemory for steps after the cache check point stage. + .AppendCacheCheckpoint(mlContext) // Use the multi-class SDCA model to predict the label using features. .Append(mlContext.MulticlassClassification.Trainers.StochasticDualCoordinateAscent()) // Apply the inverse conversion from 'PredictedLabel' column back to string value. @@ -741,6 +779,7 @@ var trainData = mlContext.CreateStreamingDataView(churnData); var dynamicLearningPipeline = mlContext.Transforms.Categorical.OneHotEncoding("DemographicCategory") .Append(mlContext.Transforms.Concatenate("Features", "DemographicCategory", "LastVisits")) + .AppendCacheCheckpoint(mlContext) // FastTree will benefit from caching data in memory. .Append(mlContext.BinaryClassification.Trainers.FastTree("HasChurned", "Features", numTrees: 20)); var dynamicModel = dynamicLearningPipeline.Fit(trainData); @@ -757,6 +796,7 @@ var staticLearningPipeline = staticData.MakeNewEstimator() .Append(r => ( r.HasChurned, Features: r.DemographicCategory.OneHotEncoding().ConcatWith(r.LastVisits))) + .AppendCacheCheckpoint() // FastTree will benefit from caching data in memory. .Append(r => mlContext.BinaryClassification.Trainers.FastTree(r.HasChurned, r.Features, numTrees: 20)); var staticModel = staticLearningPipeline.Fit(staticData); @@ -813,6 +853,8 @@ var learningPipeline = reader.MakeNewEstimator() // When the normalizer is trained, the below delegate is going to be called. // We use it to memorize the scales. onFit: (scales, offsets) => normScales = scales))) + // Cache data used in memory because the subsequently trainer needs to access the data multiple times. + .AppendCacheCheckpoint() .Append(r => ( r.Label, // Train the multi-class SDCA model to predict the label using features. @@ -987,6 +1029,10 @@ var catColumns = data.GetColumn(r => r.CategoricalFeatures).Take(10).ToArray(); // Build several alternative featurization pipelines. var learningPipeline = reader.MakeNewEstimator() + // Cache data in memory in an on-demand manner. Columns used in any downstream step will be + // cached in memory at their first uses. This step can be removed if user's machine doesn't + // have enough memory. + .AppendCacheCheckpoint() .Append(r => ( r.Label, r.NumericalFeatures, @@ -1070,6 +1116,9 @@ var workclasses = transformedData.GetColumn(mlContext, "WorkclassOneHot var fullLearningPipeline = dynamicPipeline // Concatenate two of the 3 categorical pipelines, and the numeric features. .Append(mlContext.Transforms.Concatenate("Features", "NumericalFeatures", "CategoricalBag", "WorkclassOneHotTrimmed")) + // Cache data in memory so that the following trainer will be able to access training examples without + // reading them from disk multiple times. + .AppendCacheCheckpoint(mlContext) // Now we're ready to train. We chose our FastTree trainer for this classification task. .Append(mlContext.BinaryClassification.Trainers.FastTree(numTrees: 50)); @@ -1121,6 +1170,10 @@ var messageTexts = data.GetColumn(x => x.Message).Take(20).ToArray(); // Apply various kinds of text operations supported by ML.NET. var learningPipeline = reader.MakeNewEstimator() + // Cache data in memory in an on-demand manner. Columns used in any downstream step will be + // cached in memory at their first uses. This step can be removed if user's machine doesn't + // have enough memory. + .AppendCacheCheckpoint() .Append(r => ( // One-stop shop to run the full text featurization. TextFeatures: r.Message.FeaturizeText(), @@ -1243,6 +1296,9 @@ var learningPipeline = reader.MakeNewEstimator() Label: r.Label.ToKey(), // Concatenate all the features together into one column 'Features'. Features: r.SepalLength.ConcatWith(r.SepalWidth, r.PetalLength, r.PetalWidth))) + // Add a step for caching data in memory so that the downstream iterative training + // algorithm can efficiently scan through the data multiple times. + .AppendCacheCheckpoint() .Append(r => ( r.Label, // Train the multi-class SDCA model to predict the label using features. @@ -1298,6 +1354,10 @@ var dynamicPipeline = mlContext.Transforms.Concatenate("Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") // Note that the label is text, so it needs to be converted to key. .Append(mlContext.Transforms.Conversions.MapValueToKey("Label"), TransformerScope.TrainTest) + // Cache data in memory so that SDCA trainer will be able to randomly access training examples without + // reading data from disk multiple times. Data will be cached at its first use in any downstream step. + // Notice that unused part in the data may not be cached. + .AppendCacheCheckpoint(mlContext) // Use the multi-class SDCA model to predict the label using features. .Append(mlContext.MulticlassClassification.Trainers.StochasticDualCoordinateAscent()); @@ -1439,6 +1499,7 @@ public static ITransformer TrainModel(MLContext mlContext, IDataView trainData) Action mapping = (input, output) => output.Label = input.Income > 50000; // Construct the learning pipeline. var estimator = mlContext.Transforms.CustomMapping(mapping, null) + .AppendCacheCheckpoint(mlContext) .Append(mlContext.BinaryClassification.Trainers.FastTree(label: "Label")); return estimator.Fit(trainData); @@ -1480,8 +1541,12 @@ public class CustomMappings var estimator = mlContext.Transforms.CustomMapping(CustomMappings.IncomeMapping, nameof(CustomMappings.IncomeMapping)) .Append(mlContext.BinaryClassification.Trainers.FastTree(label: "Label")); +// If memory is enough, we can cache the data in-memory to avoid reading them from file +// when it will be accessed multiple times. +var cachedTrainData = mlContext.Data.Cache(trainData); + // Train the model. -var model = estimator.Fit(trainData); +var model = estimator.Fit(cachedTrainData); // Save the model. using (var fs = File.Create(modelPath)) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs index 32a33cdc43..a6c1904f6a 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs @@ -38,11 +38,18 @@ public static void SDCA_BinaryClassification() // Read the data var data = reader.Read(dataFile); + // ML.NET doesn't cache data set by default. Therefore, if one reads a data set from a file and accesses it many times, it can be slow due to + // expensive featurization and disk operations. When the considered data can fit into memory, a solution is to cache the data in memory. Caching is especially + // helpful when working with iterative algorithms which needs many data passes. Since SDCA is the case, we cache. Inserting a + // cache step in a pipeline is also possible, please see the construction of pipeline below. + data = mlContext.Data.Cache(data); + // Step 2: Pipeline // Featurize the text column through the FeaturizeText API. // Then append a binary classifier, setting the "Label" column as the label of the dataset, and - // the "Features" column produced by FeaturizeText as the features column. + // the "Features" column produced by FeaturizeText as the features column. var pipeline = mlContext.Transforms.Text.FeaturizeText("SentimentText", "Features") + .AppendCacheCheckpoint(mlContext) // Add a data-cache step within a pipeline. .Append(mlContext.BinaryClassification.Trainers.StochasticDualCoordinateAscent(labelColumn: "Sentiment", featureColumn: "Features", l2Const: 0.001f)); // Step 3: Run Cross-Validation on this pipeline. diff --git a/src/Microsoft.ML.Data/StaticPipe/DataView.cs b/src/Microsoft.ML.Data/StaticPipe/DataView.cs index 153623c5cc..28d1621380 100644 --- a/src/Microsoft.ML.Data/StaticPipe/DataView.cs +++ b/src/Microsoft.ML.Data/StaticPipe/DataView.cs @@ -8,6 +8,7 @@ using Microsoft.ML.StaticPipe.Runtime; using System.Collections.Generic; using System; +using System.Linq; namespace Microsoft.ML.StaticPipe { @@ -23,6 +24,19 @@ internal DataView(IHostEnvironment env, IDataView view, StaticSchemaShape shape) AsDynamic = view; Shape.Check(Env, AsDynamic.Schema); } + + /// + /// This function return a whose columns are all cached in memory. + /// This returned is almost the same to the source . + /// The only difference are cache-related properties. + /// + public DataView Cache() + { + // Generate all column indexes in the source data. + var prefetched = Enumerable.Range(0, AsDynamic.Schema.ColumnCount).ToArray(); + // Create a cached version of the source data by caching all columns. + return new DataView(Env, new CacheDataView(Env, AsDynamic, prefetched), Shape); + } } public static class DataViewExtensions diff --git a/src/Microsoft.ML.Data/StaticPipe/Estimator.cs b/src/Microsoft.ML.Data/StaticPipe/Estimator.cs index 1575a0a2f1..3a7eda5fd0 100644 --- a/src/Microsoft.ML.Data/StaticPipe/Estimator.cs +++ b/src/Microsoft.ML.Data/StaticPipe/Estimator.cs @@ -77,5 +77,14 @@ string NameMap(PipelineColumn col) return new Estimator(Env, est, _inShape, newOut); } } + + /// + /// Cache data produced in memory by this estimator. It may append an extra estimator to the this estimator + /// for caching. The newly added estimator would be returned. + /// + public Estimator AppendCacheCheckpoint() + { + return new Estimator(Env, AsDynamic.AppendCacheCheckpoint(Env), _inShape, Shape); + } } } diff --git a/src/Microsoft.ML.Data/Training/TrainerEstimatorBase.cs b/src/Microsoft.ML.Data/Training/TrainerEstimatorBase.cs index 55d85201b7..9049d751e8 100644 --- a/src/Microsoft.ML.Data/Training/TrainerEstimatorBase.cs +++ b/src/Microsoft.ML.Data/Training/TrainerEstimatorBase.cs @@ -130,11 +130,8 @@ protected virtual void CheckLabelCompatible(SchemaShape.Column labelCol) protected TTransformer TrainTransformer(IDataView trainSet, IDataView validationSet = null, IPredictor initPredictor = null) { - var cachedTrain = Info.WantCaching ? new CacheDataView(Host, trainSet, prefetch: null) : trainSet; - var cachedValid = Info.WantCaching && validationSet != null ? new CacheDataView(Host, validationSet, prefetch: null) : validationSet; - - var trainRoleMapped = MakeRoles(cachedTrain); - var validRoleMapped = validationSet == null ? null : MakeRoles(cachedValid); + var trainRoleMapped = MakeRoles(trainSet); + var validRoleMapped = validationSet == null ? null : MakeRoles(validationSet); var pred = TrainModelCore(new TrainContext(trainRoleMapped, validRoleMapped, null, initPredictor)); return MakeTransformer(pred, trainSet.Schema); diff --git a/test/BaselineOutput/Common/OVA/OVA-CV-iris-out.txt b/test/BaselineOutput/Common/OVA/OVA-CV-iris-out.txt index 21874465e6..e81b3a9f75 100644 --- a/test/BaselineOutput/Common/OVA/OVA-CV-iris-out.txt +++ b/test/BaselineOutput/Common/OVA/OVA-CV-iris-out.txt @@ -21,35 +21,35 @@ Confusion table PREDICTED || 0 | 1 | 2 | Recall TRUTH ||======================== 0 || 21 | 0 | 0 | 1.0000 - 1 || 0 | 22 | 8 | 0.7333 + 1 || 0 | 20 | 10 | 0.6667 2 || 0 | 0 | 28 | 1.0000 ||======================== -Precision ||1.0000 |1.0000 |0.7778 | -Accuracy(micro-avg): 0.898734 -Accuracy(macro-avg): 0.911111 -Log-loss: 0.372620 -Log-loss reduction: 65.736556 +Precision ||1.0000 |1.0000 |0.7368 | +Accuracy(micro-avg): 0.873418 +Accuracy(macro-avg): 0.888889 +Log-loss: 0.393949 +Log-loss reduction: 63.775293 Confusion table ||======================== PREDICTED || 0 | 1 | 2 | Recall TRUTH ||======================== 0 || 29 | 0 | 0 | 1.0000 - 1 || 0 | 18 | 2 | 0.9000 + 1 || 0 | 19 | 1 | 0.9500 2 || 0 | 0 | 22 | 1.0000 ||======================== -Precision ||1.0000 |1.0000 |0.9167 | -Accuracy(micro-avg): 0.971831 -Accuracy(macro-avg): 0.966667 -Log-loss: 0.357704 -Log-loss reduction: 67.051654 +Precision ||1.0000 |1.0000 |0.9565 | +Accuracy(micro-avg): 0.985915 +Accuracy(macro-avg): 0.983333 +Log-loss: 0.299620 +Log-loss reduction: 72.401815 OVERALL RESULTS --------------------------------------- -Accuracy(micro-avg): 0.935283 (0.0365) -Accuracy(macro-avg): 0.938889 (0.0278) -Log-loss: 0.365162 (0.0075) -Log-loss reduction: 66.394105 (0.6575) +Accuracy(micro-avg): 0.929667 (0.0562) +Accuracy(macro-avg): 0.936111 (0.0472) +Log-loss: 0.346785 (0.0472) +Log-loss reduction: 68.088554 (4.3133) --------------------------------------- Physical memory usage(MB): %Number% diff --git a/test/BaselineOutput/Common/OVA/OVA-CV-iris-rp.txt b/test/BaselineOutput/Common/OVA/OVA-CV-iris-rp.txt index 7f9d1ab2be..23b93e5bc7 100644 --- a/test/BaselineOutput/Common/OVA/OVA-CV-iris-rp.txt +++ b/test/BaselineOutput/Common/OVA/OVA-CV-iris-rp.txt @@ -1,4 +1,4 @@ OVA Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings -0.935283 0.938889 0.365162 66.3941 AvgPer{lr=0.8} OVA %Data% %Output% 99 0 0 maml.exe CV tr=OVA{p=AvgPer{ lr=0.8 }} threads=- norm=No dout=%Output% data=%Data% seed=1 /p:AvgPer{lr=0.8} +0.929667 0.936111 0.346785 68.08855 AvgPer{lr=0.8} OVA %Data% %Output% 99 0 0 maml.exe CV tr=OVA{p=AvgPer{ lr=0.8 }} threads=- norm=No dout=%Output% data=%Data% seed=1 /p:AvgPer{lr=0.8} diff --git a/test/BaselineOutput/Common/OVA/OVA-CV-iris.txt b/test/BaselineOutput/Common/OVA/OVA-CV-iris.txt index ea773d1bba..42e07aea08 100644 --- a/test/BaselineOutput/Common/OVA/OVA-CV-iris.txt +++ b/test/BaselineOutput/Common/OVA/OVA-CV-iris.txt @@ -1,151 +1,151 @@ Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class -5 0 0 0.043587643807703136 0.957348645 0.04264102 1.03425764E-05 0 1 2 -6 0 0 0.20844569128859777 0.8118451 0.188126311 2.8563165E-05 0 1 2 -8 0 0 0.44491771498326443 0.640877 0.359043151 7.987263E-05 0 1 2 -9 0 0 0.28103366767537485 0.7550029 0.244961023 3.60610429E-05 0 1 2 -10 0 0 0.064111239185181926 0.937900662 0.0620922744 7.04143076E-06 0 1 2 -11 0 0 0.19511668953898065 0.822738647 0.17722775 3.361244E-05 0 1 2 -18 0 0 0.040957067767296483 0.959870338 0.0401218459 7.82396E-06 0 1 2 -20 0 0 0.12310363986545093 0.884172 0.115805365 2.26346256E-05 0 1 2 -21 0 0 0.080695089616231355 0.9224749 0.07751174 1.33279436E-05 0 1 2 -25 0 0 0.30682306393325992 0.7357808 0.2641595 5.97413928E-05 0 1 2 -28 0 0 0.13141817305409223 0.876851 0.12313617 1.279574E-05 0 1 2 -31 0 0 0.10895984751128654 0.8967664 0.103215657 1.78892569E-05 0 1 2 -32 0 0 0.035477802883361699 0.965144157 0.0348526426 3.21791072E-06 0 1 2 -35 0 0 0.20274726386806977 0.8164846 0.183501333 1.40994789E-05 0 1 2 -37 0 0 0.28103366767537485 0.7550029 0.244961023 3.60610429E-05 0 1 2 -40 0 0 0.12298365201239185 0.8842781 0.115710318 1.158336E-05 0 1 2 -41 0 0 0.63401679194266458 0.5304568 0.4693291 0.000214140542 0 1 2 -44 0 0 0.077454598775344219 0.925469041 0.07449977 3.11931653E-05 0 1 2 -45 0 0 0.3215830979624606 0.7250004 0.274949133 5.0463128E-05 0 1 2 -46 0 0 0.072538640149662562 0.9300298 0.06995974 1.0462416E-05 0 1 2 -48 0 0 0.070505947028000213 0.9319222 0.0680698752 7.905172E-06 0 1 2 -50 1 1 0.97585559443809855 0.376869768 0.358608037 0.264522225 1 2 0 -51 1 1 0.820648723050456 0.440146029 0.3583106 0.201543346 1 2 0 -52 1 2 1.0835275133336952 0.5653485 0.3383997 0.0962518156 2 1 0 -54 1 2 0.75898112148691677 0.472428739 0.468143165 0.0594281144 2 1 0 -56 1 2 1.0174162545586878 0.5111817 0.36152783 0.127290443 2 1 0 -60 1 1 0.30253484094402477 0.738942742 0.209481522 0.051575724 1 2 0 -63 1 2 0.77949402405350943 0.499299049 0.458638 0.04206293 2 1 0 -64 1 1 0.45505022537231249 0.6344161 0.288069278 0.0775146 1 0 2 -66 1 1 0.7154835565078782 0.488955617 0.46298942 0.04805498 1 2 0 -68 1 1 0.68322766519277334 0.504984438 0.482306838 0.0127087329 1 2 0 -69 1 1 0.31084089328775633 0.732830465 0.183353335 0.08381622 1 2 0 -70 1 2 1.1682944017762613 0.6530067 0.310896754 0.0360965841 2 1 0 -71 1 1 0.43377030209255479 0.6480611 0.185485169 0.166453749 1 0 2 -72 1 2 0.88766165771254424 0.578003168 0.41161713 0.0103796953 2 1 0 -73 1 1 0.66368933400718488 0.514948 0.4451455 0.03990646 1 2 0 -74 1 1 0.54404239638263385 0.5803973 0.245238408 0.174364269 1 2 0 -76 1 2 0.84677727980192752 0.5165659 0.4287946 0.0546395145 2 1 0 -77 1 2 1.2789386167391619 0.688494444 0.278332561 0.0331729874 2 1 0 -79 1 1 0.34033011681215469 0.7115354 0.2354252 0.0530394167 1 0 2 -82 1 1 0.35118632841443026 0.7038526 0.15209128 0.144056112 1 2 0 -88 1 1 0.4571578145475656 0.6330804 0.217808262 0.149111286 1 2 0 -90 1 1 0.54303381152243435 0.580983 0.390706122 0.0283109024 1 2 0 -91 1 1 0.7255881783753686 0.484039783 0.444033325 0.0719268844 1 2 0 -92 1 1 0.35286862388238727 0.7026695 0.20336625 0.09396427 1 2 0 -93 1 1 0.24150358472221847 0.785446 0.122569308 0.0919846743 1 0 2 -95 1 1 0.45747345580807686 0.6328806 0.223053813 0.144065529 1 2 0 -96 1 1 0.46692162584127184 0.6269292 0.2688824 0.1041884 1 2 0 -97 1 1 0.52181706235134551 0.593441248 0.265519917 0.14103885 1 2 0 -98 1 1 0.33964763199167614 0.7120212 0.255649149 0.03232969 1 0 2 -99 1 1 0.42298578084071409 0.655087948 0.2430654 0.10184665 1 2 0 -100 2 2 0.13591733259440952 0.8729148 0.125132382 0.00195282849 2 1 0 -102 2 2 0.13809510857610402 0.871015847 0.125785753 0.00319840666 2 1 0 -104 2 2 0.19932133588014422 0.8192866 0.178226635 0.00248679356 2 1 0 -105 2 2 0.09978434131070596 0.9050326 0.09390649 0.00106095837 2 1 0 -106 2 2 0.65516062299283195 0.519358635 0.4732639 0.00737748668 2 1 0 -108 2 2 0.36038464423836569 0.697408 0.300992548 0.00159944966 2 1 0 -109 2 2 0.042800052177573163 0.958102942 0.03757144 0.00432561943 2 1 0 -111 2 2 0.33893424257144178 0.7125293 0.282670647 0.004800048 2 1 0 -112 2 2 0.17819193567707683 0.8367818 0.156975582 0.006242614 2 1 0 -113 2 2 0.49781014911918742 0.6078603 0.388630718 0.00350892986 2 1 0 -115 2 2 0.1683952699484349 0.845019758 0.146008313 0.008971939 2 1 0 -117 2 2 0.023365514010699712 0.976905346 0.02015102 0.00294363522 2 1 0 -120 2 2 0.11133724227002473 0.894637 0.100207157 0.005155826 2 1 0 -121 2 2 0.43666882240878063 0.6461854 0.346793234 0.007021348 2 1 0 -122 2 2 0.13629671282280101 0.8725837 0.126684025 0.000732263 2 1 0 -123 2 2 0.4310483662194341 0.6498275 0.338038325 0.0121342046 2 1 0 -125 2 2 0.11330052370098145 0.8928823 0.101871319 0.0052463985 2 1 0 -128 2 2 0.27949760674881013 0.756163538 0.2411889 0.00264759478 2 1 0 -129 2 2 0.17530740569786113 0.839199 0.153847516 0.006953467 2 1 0 -131 2 2 0.031839393778411017 0.968662143 0.0223613773 0.008976495 2 1 0 -132 2 2 0.27137481365798816 0.7623307 0.235219285 0.00245000049 2 1 0 -133 2 2 0.43700297433440277 0.6459695 0.341389537 0.01264096 2 1 0 -137 2 2 0.23063259534491895 0.794031143 0.198468238 0.00750062242 2 1 0 -138 2 2 0.43845130281190237 0.6450346 0.3309319 0.0240335166 2 1 0 -141 2 2 0.17626166414917829 0.8383986 0.142890155 0.018711295 2 1 0 -144 2 2 0.099717233123952864 0.9050933 0.09041383 0.00449282629 2 1 0 -145 2 2 0.18787613378173548 0.828717351 0.161682889 0.009599784 2 1 0 -147 2 2 0.24798062433245444 0.780375063 0.20853655 0.01108838 2 1 0 -0 0 0 0.34881132522048625 0.705526233 0.294473559 1.92144441E-07 0 1 2 -1 0 0 0.36141580969752651 0.696689248 0.303309947 7.8389877E-07 0 1 2 -2 0 0 0.35568660847624228 0.7006922 0.299307227 5.929496E-07 0 1 2 -3 0 0 0.36470718348091719 0.694399953 0.30559817 1.84990029E-06 0 1 2 -4 0 0 0.34775770739677259 0.70627 0.293729782 2.00147142E-07 0 1 2 -7 0 0 0.35382023048081196 0.702001154 0.297998428 4.17606344E-07 0 1 2 -12 0 0 0.36098727532383801 0.696987867 0.303011417 7.32556146E-07 0 1 2 -13 0 0 0.35788558263546733 0.699153066 0.300846159 7.88259E-07 0 1 2 -14 0 0 0.33437356737542145 0.715786338 0.284213632 7.41558059E-09 0 1 2 -15 0 0 0.33259729807630167 0.7170589 0.2829411 2.302074E-08 0 1 2 -16 0 0 0.33963038748907248 0.712033451 0.2879665 5.7538923E-08 0 1 2 -17 0 0 0.34952968472792562 0.7050196 0.294980139 2.583559E-07 0 1 2 -19 0 0 0.34579385759256209 0.70765835 0.292341441 2.096021E-07 0 1 2 -22 0 0 0.34605819997965914 0.7074713 0.29252857 1.20912986E-07 0 1 2 -23 0 0 0.36811690986051288 0.6920363 0.307961673 2.02298725E-06 0 1 2 -24 0 0 0.37119922981165249 0.6899065 0.310090721 2.80658514E-06 0 1 2 -26 0 0 0.35941763729273518 0.698082745 0.301916152 1.08453048E-06 0 1 2 -27 0 0 0.35009366263991337 0.7046221 0.295377672 2.18394433E-07 0 1 2 -29 0 0 0.36473963008629695 0.6943774 0.305620819 1.74348168E-06 0 1 2 -30 0 0 0.36694890288891646 0.692845047 0.307153255 1.67791779E-06 0 1 2 -33 0 0 0.33532989874849606 0.715102136 0.284897834 2.06016182E-08 0 1 2 -34 0 0 0.36074853902438908 0.6971543 0.302845 6.899852E-07 0 1 2 -36 0 0 0.3442039710581144 0.708784342 0.2912156 5.37456621E-08 0 1 2 -38 0 0 0.36249420192434484 0.695938349 0.304059923 1.72375007E-06 0 1 2 -39 0 0 0.35302378267720547 0.7025605 0.2974392 3.31980715E-07 0 1 2 -42 0 0 0.35802404250832931 0.699056268 0.30094254 1.18013247E-06 0 1 2 -43 0 0 0.35964093968844252 0.6979269 0.3020715 1.62606943E-06 0 1 2 -47 0 0 0.35894549345005311 0.6984124 0.301586539 1.06725963E-06 0 1 2 -49 0 0 0.35354270878931848 0.702196 0.2978036 3.52685419E-07 0 1 2 -53 1 1 0.43814530313713385 0.645232 0.30703035 0.0477376431 1 2 0 -55 1 1 0.52991693789558192 0.588653862 0.3812489 0.0300972275 1 2 0 -57 1 1 0.3144098829942828 0.730219662 0.222309768 0.047470592 1 0 2 -58 1 1 0.16338480577647163 0.8492643 0.10643021 0.0443054661 1 2 0 -59 1 1 0.43732800775193598 0.6457596 0.2802832 0.0739572 1 2 0 -61 1 1 0.26826825521335035 0.7647026 0.165891945 0.06940546 1 2 0 -62 1 1 0.16519114388964468 0.84773165 0.07620503 0.0760633051 1 2 0 -65 1 1 0.12274628026245138 0.884488046 0.0737581253 0.0417538173 1 0 2 -67 1 1 0.18131529311314101 0.8341723 0.09860581 0.0672218949 1 0 2 -75 1 1 0.13597202822637602 0.872867048 0.06550142 0.0616315342 1 0 2 -78 1 1 0.45281685214847861 0.6358346 0.332585216 0.0315802023 1 2 0 -80 1 1 0.23111914057709354 0.7936449 0.105801627 0.10055349 1 0 2 -81 1 1 0.21336965456040224 0.807857454 0.137252137 0.0548904277 1 0 2 -83 1 2 1.1418187241491273 0.6765539 0.3192379 0.00420819456 2 1 0 -84 1 2 0.79904921193122347 0.5272309 0.449756384 0.0230127368 2 1 0 -85 1 1 0.32376577391953759 0.723419666 0.223107859 0.05347247 1 2 0 -86 1 1 0.19858499419365563 0.8198901 0.142534047 0.037575867 1 2 0 -87 1 1 0.32077518585346926 0.725586355 0.24517718 0.0292364415 1 2 0 -89 1 1 0.36379979176298916 0.695030332 0.242874637 0.06209502 1 2 0 -94 1 1 0.37955450575815275 0.684166133 0.262924 0.0529098734 1 2 0 -101 2 2 0.34078617573620174 0.711210966 0.28588894 0.002900116 2 1 0 -103 2 2 0.33454245635946434 0.71566546 0.282688916 0.00164566189 2 1 0 -107 2 2 0.32176538137222749 0.724868238 0.27461502 0.000516714761 2 1 0 -110 2 2 0.4735643051120339 0.622778535 0.371121377 0.006100062 2 1 0 -114 2 2 0.3247388954422577 0.722716033 0.2752217 0.00206224318 2 1 0 -116 2 2 0.36482478597312679 0.6943183 0.3033064 0.00237530912 2 1 0 -118 2 2 0.30234231913723036 0.739085 0.260826528 8.847632E-05 2 1 0 -119 2 2 0.37792268388420569 0.6852835 0.3114479 0.0032686044 2 1 0 -124 2 2 0.33777190179266953 0.713358 0.284956157 0.00168584171 2 1 0 -126 2 2 0.51721400727433164 0.5961792 0.395325035 0.008495758 2 1 0 -127 2 2 0.48223827874761288 0.617399931 0.374503255 0.008096788 2 1 0 -130 2 2 0.33112825051245398 0.718113065 0.281247973 0.0006389589 2 1 0 -134 2 2 0.34240991810493487 0.7100571 0.288253874 0.00168902089 2 1 0 -135 2 2 0.3238549270121831 0.7233552 0.2760729 0.0005719304 2 1 0 -136 2 2 0.31869296062169472 0.727098763 0.271312952 0.00158829393 2 1 0 -139 2 2 0.39792518591800413 0.6717123 0.325529337 0.002758371 2 1 0 -140 2 2 0.32133155702629967 0.7251828 0.273566216 0.0012509838 2 1 0 -142 2 2 0.34078617573620174 0.711210966 0.28588894 0.002900116 2 1 0 -143 2 2 0.31559105025409723 0.72935766 0.269767135 0.0008751799 2 1 0 -146 2 2 0.3760214987664387 0.6865876 0.310142934 0.00326948427 2 1 0 -148 2 2 0.3305544580259554 0.718525231 0.2789816 0.00249314215 2 1 0 -149 2 2 0.37408822283240173 0.6879162 0.307514042 0.00456974143 2 1 0 +5 0 0 0.14898499738372634 0.861582041 0.138407215 1.07481983E-05 0 1 2 +6 0 0 0.3065288836068612 0.73599726 0.2639699 3.28646565E-05 0 1 2 +8 0 0 0.42786795330515681 0.6518975 0.3480076 9.492641E-05 0 1 2 +9 0 0 0.32727882650425077 0.7208827 0.279081136 3.612931E-05 0 1 2 +10 0 0 0.17138787650099252 0.8424947 0.157498628 6.671493E-06 0 1 2 +11 0 0 0.29096005205092529 0.74754554 0.2524177 3.67338434E-05 0 1 2 +18 0 0 0.13536917585154803 0.8733934 0.1265994 7.166915E-06 0 1 2 +20 0 0 0.21879048630641887 0.803490043 0.196489558 2.03802119E-05 0 1 2 +21 0 0 0.19863559351573729 0.8198486 0.180137366 1.40261882E-05 0 1 2 +25 0 0 0.33531381208991595 0.71511364 0.2848282 5.815608E-05 0 1 2 +28 0 0 0.23158758890843326 0.7932732 0.2067149 1.18948565E-05 0 1 2 +31 0 0 0.20718141254425143 0.8128722 0.187111884 1.593467E-05 0 1 2 +32 0 0 0.14581809307946758 0.8643149 0.135681465 3.6142344E-06 0 1 2 +35 0 0 0.28094446239802623 0.755070269 0.244916424 1.32786363E-05 0 1 2 +37 0 0 0.32727882650425077 0.7208827 0.279081136 3.612931E-05 0 1 2 +40 0 0 0.23484944167000599 0.7906899 0.209298462 1.16704177E-05 0 1 2 +41 0 0 0.52141117030361683 0.59368217 0.406092882 0.000224944655 0 1 2 +44 0 0 0.19869528365124761 0.819799662 0.180165187 3.516637E-05 0 1 2 +45 0 0 0.35016624430023463 0.704570949 0.295377225 5.18644047E-05 0 1 2 +46 0 0 0.19245268368525237 0.82493335 0.17505537 1.12744219E-05 0 1 2 +48 0 0 0.18156112383605871 0.833967268 0.166025028 7.721445E-06 0 1 2 +50 1 1 0.64624484712856134 0.5240098 0.262612373 0.2133778 1 2 0 +51 1 1 0.63276740252217267 0.531119943 0.307034731 0.161845312 1 2 0 +52 1 2 0.83214667389569419 0.478386253 0.435114235 0.08649951 2 1 0 +54 1 1 0.74737111472015905 0.473609984 0.46261856 0.0637714639 1 2 0 +56 1 2 0.80606426845587942 0.4562977 0.446612358 0.09708993 2 1 0 +60 1 1 0.40475436673945725 0.667140663 0.2567001 0.07615923 1 2 0 +63 1 2 0.83848982664755245 0.5245598 0.432362974 0.0430772379 2 1 0 +64 1 1 0.4724140828680039 0.6234953 0.2927696 0.08373512 1 0 2 +66 1 2 0.81331552350806924 0.510974467 0.443385571 0.045639988 2 1 0 +68 1 2 0.88581390317653119 0.569699466 0.4123784 0.017922163 2 1 0 +69 1 1 0.4096502018565843 0.663882434 0.2231506 0.112966977 1 2 0 +70 1 2 1.1272952904290068 0.646777868 0.32390815 0.0293139871 2 1 0 +71 1 1 0.44999656255507409 0.637630343 0.198594451 0.1637752 1 0 2 +72 1 2 1.0945008493805846 0.6525861 0.334706634 0.0127072614 2 1 0 +73 1 2 0.75466565674392561 0.484873533 0.4701678 0.044958692 2 1 0 +74 1 1 0.50278888715808445 0.6048415 0.221343279 0.173815265 1 2 0 +76 1 2 0.78856604341887837 0.4854024 0.454496056 0.0601015352 2 1 0 +77 1 2 1.1512132035767342 0.6525395 0.316252857 0.0312076658 2 1 0 +79 1 1 0.42617249485652908 0.6530037 0.2899244 0.057071913 1 0 2 +82 1 1 0.41470700444302333 0.6605338 0.170520529 0.1689457 1 0 2 +88 1 1 0.49394371600700848 0.6102151 0.243849784 0.145935118 1 2 0 +90 1 1 0.71582408663720343 0.488789141 0.47624135 0.03496951 1 2 0 +91 1 1 0.73003131975183999 0.4818939 0.449290931 0.0688152 1 2 0 +92 1 1 0.43657621695425319 0.646245241 0.234681442 0.119073339 1 2 0 +93 1 1 0.34178431537548232 0.710501432 0.170813844 0.118684709 1 0 2 +95 1 1 0.49105836953030818 0.611978352 0.245221257 0.142800376 1 2 0 +96 1 1 0.52911389653841068 0.589126766 0.3021384 0.108734839 1 2 0 +97 1 1 0.51338818870184666 0.5984644 0.258210152 0.143325433 1 2 0 +98 1 1 0.44598737716555714 0.640191853 0.320325464 0.0394826569 1 0 2 +99 1 1 0.49633411615743322 0.6087582 0.278045028 0.113196738 1 2 0 +100 2 2 0.1657485118529694 0.8472593 0.15114595 0.00159478688 2 1 0 +102 2 2 0.15062702864772329 0.860168457 0.1369899 0.002841651 2 1 0 +104 2 2 0.19216183181956109 0.8251733 0.172580525 0.00224616844 2 1 0 +105 2 2 0.10783693379121159 0.897774 0.101297595 0.0009284109 2 1 0 +106 2 2 0.49803864682427001 0.607721448 0.38346687 0.008811677 2 1 0 +108 2 2 0.24869617414609116 0.779816866 0.218381524 0.00180161942 2 1 0 +109 2 2 0.088986239489588018 0.914858162 0.0819557458 0.00318608852 2 1 0 +111 2 2 0.27248498685253925 0.761484861 0.23352161 0.004993557 2 1 0 +112 2 2 0.19350236090832176 0.8240679 0.170360163 0.00557197351 2 1 0 +113 2 2 0.36301900972746426 0.6955732 0.3003853 0.0040415 2 1 0 +115 2 2 0.20394542591432366 0.8155069 0.177217782 0.00727535738 2 1 0 +117 2 2 0.060604416045993939 0.9411955 0.05663682 0.002167699 2 1 0 +120 2 2 0.14770763531771011 0.8626833 0.133085579 0.004231106 2 1 0 +121 2 2 0.35116812164489036 0.7038654 0.288968384 0.007166216 2 1 0 +122 2 2 0.11938337402387501 0.8874675 0.111851335 0.0006811737 2 1 0 +123 2 2 0.36594708304609636 0.6935395 0.293548316 0.0129121933 2 1 0 +125 2 2 0.14890267602086421 0.861653 0.133782089 0.004564954 2 1 0 +128 2 2 0.23149637602267908 0.7933456 0.204050332 0.002604086 2 1 0 +129 2 2 0.19738168861304117 0.820877254 0.172454879 0.006667889 2 1 0 +131 2 2 0.082877993597994665 0.920463443 0.0726075843 0.006928972 2 1 0 +132 2 2 0.2261608752358937 0.7975898 0.200027317 0.002382908 2 1 0 +133 2 2 0.37668679350259709 0.686130941 0.300454319 0.01341475 2 1 0 +137 2 2 0.23730028214241819 0.7887544 0.2045337 0.0067119 2 1 0 +138 2 2 0.41466847394913298 0.660559237 0.317496151 0.0219445918 2 1 0 +141 2 2 0.2378343130973842 0.7883333 0.195801422 0.01586528 2 1 0 +144 2 2 0.14102681941995521 0.868466 0.1280038 0.00353016751 2 1 0 +145 2 2 0.21316426895669463 0.8080234 0.183620155 0.008356474 2 1 0 +147 2 2 0.25860642449489452 0.772126853 0.217881814 0.009991349 2 1 0 +0 0 0 0.14822243523905682 0.8622393 0.137760535 1.42600655E-07 0 1 2 +1 0 0 0.20081429998478928 0.818064332 0.181935042 6.369149E-07 0 1 2 +2 0 0 0.18386421756476942 0.8320488 0.167950869 3.90649234E-07 0 1 2 +3 0 0 0.21430091092093637 0.8071055 0.192893252 1.25093914E-06 0 1 2 +4 0 0 0.14464925459452174 0.865325749 0.134674087 1.351252E-07 0 1 2 +7 0 0 0.16671164417651543 0.846443653 0.153556049 3.12257328E-07 0 1 2 +12 0 0 0.20372786340069227 0.8156843 0.184315056 6.161207E-07 0 1 2 +13 0 0 0.2053889533214896 0.8143305 0.185669 4.731557E-07 0 1 2 +14 0 0 0.086467323769418583 0.9171655 0.0828345 6.221406E-09 0 1 2 +15 0 0 0.081957730046945412 0.9213109 0.07868907 1.38329819E-08 0 1 2 +16 0 0 0.10829029194802958 0.89736706 0.102632925 3.48168818E-08 0 1 2 +17 0 0 0.14958801772137612 0.861062646 0.13893716 1.75314966E-07 0 1 2 +19 0 0 0.13322714868487645 0.875266254 0.124733619 1.28628983E-07 0 1 2 +22 0 0 0.13862518012010799 0.870554268 0.129445657 6.036393E-08 0 1 2 +23 0 0 0.19310245722698802 0.8243975 0.175601169 1.32743924E-06 0 1 2 +24 0 0 0.20865693931030993 0.811673641 0.188324288 2.048797E-06 0 1 2 +26 0 0 0.17749546972040989 0.8373648 0.1626345 6.926975E-07 0 1 2 +27 0 0 0.15019147117595094 0.8605432 0.13945666 1.74842683E-07 0 1 2 +29 0 0 0.20747930702213993 0.812630057 0.187368736 1.223238E-06 0 1 2 +30 0 0 0.21292293576370525 0.8082184 0.1917803 1.29386513E-06 0 1 2 +33 0 0 0.091640067349721938 0.9124335 0.0875665 1.41758205E-08 0 1 2 +34 0 0 0.19699061011964958 0.821198344 0.17880109 6.01980844E-07 0 1 2 +36 0 0 0.12779659882948383 0.880032361 0.119967587 4.84299072E-08 0 1 2 +38 0 0 0.21789921377726856 0.8042065 0.195792481 1.04123671E-06 0 1 2 +39 0 0 0.16240138202263071 0.8500999 0.149899811 2.621196E-07 0 1 2 +42 0 0 0.19811089534959545 0.8202789 0.179720446 6.594638E-07 0 1 2 +43 0 0 0.17222970502139762 0.8417858 0.158213392 8.341555E-07 0 1 2 +47 0 0 0.19599802886062165 0.822013855 0.17798546 6.796521E-07 0 1 2 +49 0 0 0.16848901720737497 0.844940543 0.1550592 2.68694436E-07 0 1 2 +53 1 1 0.33929501593555544 0.7122723 0.249259621 0.0384680778 1 2 0 +55 1 1 0.43740997493483258 0.645706654 0.327492118 0.026801208 1 2 0 +57 1 1 0.25341172081105845 0.77614826 0.19214052 0.0317112133 1 0 2 +58 1 1 0.20133568049613867 0.8176379 0.140906453 0.0414556079 1 2 0 +59 1 1 0.28494290112955128 0.7520572 0.176915661 0.07102712 1 2 0 +61 1 1 0.22031497886724014 0.802266061 0.126393482 0.07134045 1 2 0 +62 1 1 0.17283791289976866 0.841273963 0.100865625 0.05786044 1 2 0 +65 1 1 0.14734558889976437 0.8629957 0.08228925 0.054715097 1 0 2 +67 1 1 0.17615666464048946 0.8384866 0.0872618854 0.07425149 1 0 2 +75 1 1 0.15642215815585309 0.8551981 0.0761427358 0.0686592 1 2 0 +78 1 1 0.37642751820989312 0.686308861 0.28346312 0.030228015 1 2 0 +80 1 1 0.1944674886980301 0.823272943 0.08934537 0.08738167 1 2 0 +81 1 1 0.18446994007360493 0.831544936 0.115473866 0.05298119 1 0 2 +83 1 2 0.92853824559840126 0.6011451 0.395130873 0.0037240074 2 1 0 +84 1 1 0.56465177772236885 0.5685581 0.4072805 0.0241614021 1 2 0 +85 1 1 0.25727484156807667 0.7731557 0.1642441 0.0626002 1 2 0 +86 1 1 0.22862622189799611 0.795625865 0.166149229 0.03822488 1 2 0 +87 1 1 0.35292282916697471 0.7026314 0.275890559 0.02147802 1 2 0 +89 1 1 0.27794501284082396 0.757338464 0.189463019 0.0531985424 1 2 0 +94 1 1 0.29989849188818812 0.7408934 0.2118927 0.0472138971 1 2 0 +101 2 2 0.4738038899565814 0.622629344 0.3747406 0.002630036 2 1 0 +103 2 2 0.44910078675473908 0.6382018 0.360287219 0.00151102315 2 1 0 +107 2 2 0.42756627142640746 0.6520942 0.347433776 0.000472046959 2 1 0 +110 2 2 0.57916486299376979 0.560366154 0.432973266 0.00666057365 2 1 0 +114 2 2 0.45214138527005782 0.6362642 0.361806333 0.00192946056 2 1 0 +116 2 2 0.45497497267329101 0.634463847 0.363294363 0.00224180124 2 1 0 +118 2 2 0.470631821054253 0.6246075 0.375314832 7.76593442E-05 2 1 0 +119 2 2 0.51596566495735996 0.5969239 0.400479466 0.00259662978 2 1 0 +124 2 2 0.41382397334687865 0.6611173 0.33714956 0.00173310027 2 1 0 +126 2 2 0.64706268251636467 0.523581445 0.468205124 0.00821345 2 1 0 +127 2 2 0.6163570913261589 0.5399077 0.4518157 0.008276599 2 1 0 +130 2 2 0.42555053982455915 0.653409958 0.3460102 0.0005798615 2 1 0 +134 2 2 0.47967559175010499 0.618984163 0.379600972 0.00141487492 2 1 0 +135 2 2 0.39549652187626555 0.6733456 0.326093346 0.000561050954 2 1 0 +136 2 2 0.40373690532217765 0.6678198 0.330517977 0.001662206 2 1 0 +139 2 2 0.46630393097913669 0.6273166 0.3698809 0.00280250143 2 1 0 +140 2 2 0.41184706306881064 0.6624256 0.336315721 0.00125870958 2 1 0 +142 2 2 0.4738038899565814 0.622629344 0.3747406 0.002630036 2 1 0 +143 2 2 0.40730363349873294 0.6654421 0.333683163 0.0008747191 2 1 0 +146 2 2 0.49646297645874904 0.6086798 0.388417184 0.00290302956 2 1 0 +148 2 2 0.41691013162708651 0.659080148 0.338237643 0.00268219085 2 1 0 +149 2 2 0.49336894801249942 0.61056596 0.384958029 0.004475997 2 1 0 diff --git a/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-out.txt b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-out.txt index 4d9f2f452b..f0bb6cd1b8 100644 --- a/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-out.txt +++ b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-out.txt @@ -13,21 +13,21 @@ Confusion table PREDICTED || 0 | 1 | 2 | Recall TRUTH ||======================== 0 || 50 | 0 | 0 | 1.0000 - 1 || 0 | 48 | 2 | 0.9600 + 1 || 0 | 46 | 4 | 0.9200 2 || 0 | 0 | 50 | 1.0000 ||======================== -Precision ||1.0000 |1.0000 |0.9615 | -Accuracy(micro-avg): 0.986667 -Accuracy(macro-avg): 0.986667 -Log-loss: 0.246444 -Log-loss reduction: 77.567746 +Precision ||1.0000 |1.0000 |0.9259 | +Accuracy(micro-avg): 0.973333 +Accuracy(macro-avg): 0.973333 +Log-loss: 0.291849 +Log-loss reduction: 73.434718 OVERALL RESULTS --------------------------------------- -Accuracy(micro-avg): 0.986667 (0.0000) -Accuracy(macro-avg): 0.986667 (0.0000) -Log-loss: 0.246444 (0.0000) -Log-loss reduction: 77.567746 (0.0000) +Accuracy(micro-avg): 0.973333 (0.0000) +Accuracy(macro-avg): 0.973333 (0.0000) +Log-loss: 0.291849 (0.0000) +Log-loss reduction: 73.434718 (0.0000) --------------------------------------- Physical memory usage(MB): %Number% diff --git a/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-rp.txt b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-rp.txt index 971b18dd55..12797db1fb 100644 --- a/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-rp.txt +++ b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-rp.txt @@ -1,4 +1,4 @@ OVA Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings -0.986667 0.986667 0.246444 77.56775 AvgPer{lr=0.8} OVA %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=OVA{p=AvgPer{ lr=0.8 }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 /p:AvgPer{lr=0.8} +0.973333 0.973333 0.291849 73.43472 AvgPer{lr=0.8} OVA %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=OVA{p=AvgPer{ lr=0.8 }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 /p:AvgPer{lr=0.8} diff --git a/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris.txt b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris.txt index cbb0c14c4f..4e152462bd 100644 --- a/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris.txt +++ b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris.txt @@ -1,151 +1,151 @@ Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class -0 0 0 0.064187757670407464 0.9378289 0.0621710941 8.6083265E-09 0 1 2 -1 0 0 0.1893002350721239 0.827538 0.172461912 5.35715046E-08 0 1 2 -2 0 0 0.13195967886821539 0.876376331 0.123623639 3.315678E-08 0 1 2 -3 0 0 0.18799826814680765 0.828616142 0.1713837 1.33305463E-07 0 1 2 -4 0 0 0.053931560367015983 0.947496951 0.0525030419 8.35157454E-09 0 1 2 -5 0 0 0.024550920036862604 0.975748 0.0242519863 8.78616E-09 0 1 2 -6 0 0 0.092104047221280363 0.912010252 0.08798967 5.086476E-08 0 1 2 -7 0 0 0.0890750453193484 0.9147769 0.08522307 2.25849668E-08 0 1 2 -8 0 0 0.2649651905225579 0.767232656 0.23276712 2.323326E-07 0 1 2 -9 0 0 0.17798339361396398 0.8369563 0.163043633 4.85697953E-08 0 1 2 -10 0 0 0.038712934227392123 0.962026834 0.037973173 3.67671626E-09 0 1 2 -11 0 0 0.10398316683257577 0.901240468 0.09875945 5.7562815E-08 0 1 2 -12 0 0 0.20701490311777249 0.813007534 0.186992422 5.15469765E-08 0 1 2 -13 0 0 0.20677930042755727 0.8131991 0.186800852 4.620836E-08 0 1 2 -14 0 0 0.012900958726204939 0.9871819 0.0128180888 1.53412311E-10 0 1 2 -15 0 0 0.0056796126449842428 0.9943365 0.00566351926 4.0924758E-10 0 1 2 -16 0 0 0.018169258095637245 0.9819948 0.018005196 1.47014889E-09 0 1 2 -17 0 0 0.059814766253305368 0.941939 0.0580609739 1.13122178E-08 0 1 2 -18 0 0 0.028932621466359262 0.9714819 0.02851807 4.23276969E-09 0 1 2 -19 0 0 0.032923391844126633 0.9676127 0.0323873 7.642096E-09 0 1 2 -20 0 0 0.08651372619399908 0.91712296 0.08287702 2.06279154E-08 0 1 2 -21 0 0 0.038368820295834986 0.962357938 0.03764207 1.32810882E-08 0 1 2 -22 0 0 0.04847884988937378 0.9526775 0.0473225228 3.781359E-09 0 1 2 -23 0 0 0.099972123278544239 0.904862642 0.09513722 1.30113463E-07 0 1 2 -24 0 0 0.12884486436940729 0.879110336 0.120889448 2.201515E-07 0 1 2 -25 0 0 0.20701321690255622 0.8130089 0.186990991 1.01994196E-07 0 1 2 -26 0 0 0.083707914672920755 0.919699848 0.08030011 6.121948E-08 0 1 2 -27 0 0 0.065971839514429781 0.9361572 0.063842766 1.05041069E-08 0 1 2 -28 0 0 0.076179360015026509 0.92665 0.0733500347 8.859791E-09 0 1 2 -29 0 0 0.16075048860491603 0.8515045 0.148495391 1.25408391E-07 0 1 2 -30 0 0 0.18626475341925278 0.8300538 0.169946045 1.27813465E-07 0 1 2 -31 0 0 0.065274392613973067 0.9368104 0.0631895959 1.46311239E-08 0 1 2 -32 0 0 0.018341836022086215 0.981825352 0.018174639 1.48765988E-09 0 1 2 -33 0 0 0.010831873835231624 0.9892266 0.0107734147 4.45536885E-10 0 1 2 -34 0 0 0.17798339361396398 0.8369563 0.163043633 4.85697953E-08 0 1 2 -35 0 0 0.1094558082339183 0.8963218 0.103678234 1.02393658E-08 0 1 2 -36 0 0 0.049986936620370084 0.951241851 0.04875815 2.05979567E-09 0 1 2 -37 0 0 0.17798339361396398 0.8369563 0.163043633 4.85697953E-08 0 1 2 -38 0 0 0.2126495148617171 0.808439434 0.191560462 1.15782285E-07 0 1 2 -39 0 0 0.085343924605329857 0.91819644 0.08180357 1.76697252E-08 0 1 2 -40 0 0 0.058200059976790904 0.9434612 0.05653884 9.271867E-09 0 1 2 -41 0 0 0.49728774260829139 0.60817796 0.391821355 7.12752467E-07 0 1 2 -42 0 0 0.14839409412278926 0.8620913 0.137908638 6.888182E-08 0 1 2 -43 0 0 0.058733854938205904 0.9429577 0.0570422448 8.043525E-08 0 1 2 -44 0 0 0.041432529938052375 0.959414065 0.0405859053 6.019737E-08 0 1 2 -45 0 0 0.18582725015020429 0.830417037 0.169582859 9.04475E-08 0 1 2 -46 0 0 0.038175474655158276 0.962544 0.03745597 9.106087E-09 0 1 2 -47 0 0 0.14649366336893571 0.8637312 0.13626872 6.586965E-08 0 1 2 -48 0 0 0.040528072578562842 0.9602822 0.03971778 4.706963E-09 0 1 2 -49 0 0 0.10224582319060745 0.9028076 0.09719238 1.898811E-08 0 1 2 -50 1 1 0.13228530826110765 0.876091 0.07536332 0.04854567 1 0 2 -51 1 1 0.19869535635760471 0.8197996 0.101386108 0.0788142756 1 2 0 -52 1 1 0.22406365925144375 0.799264252 0.175007358 0.0257283952 1 2 0 -53 1 1 0.22846786324242829 0.79575187 0.190147012 0.0141011067 1 2 0 -54 1 1 0.2217374832444966 0.801125646 0.1814971 0.0173772536 1 2 0 -55 1 1 0.36194510206226777 0.6963206 0.290218145 0.0134612536 1 2 0 -56 1 1 0.38227740308279812 0.682305753 0.26903218 0.04866208 1 2 0 -57 1 1 0.11711832955706668 0.889479935 0.09261171 0.01790834 1 0 2 -58 1 1 0.11588080546894164 0.890581369 0.0796031356 0.0298154745 1 2 0 -59 1 1 0.22549808191140069 0.7981186 0.1629105 0.0389709137 1 2 0 -60 1 1 0.11452610106565105 0.891788661 0.08094173 0.027269613 1 2 0 -61 1 1 0.19606227523723149 0.821961045 0.1124982 0.0655407459 1 2 0 -62 1 1 0.065211469379248863 0.9368693 0.0428994 0.0202313047 1 2 0 -63 1 1 0.38674137023255867 0.679266751 0.3082701 0.0124631459 1 2 0 -64 1 1 0.23030100927747191 0.7942945 0.193664417 0.0120411161 1 0 2 -65 1 1 0.13921020030567513 0.8700451 0.0984156 0.0315392464 1 0 2 -66 1 1 0.52761682473815386 0.5900094 0.392468572 0.0175220035 1 2 0 -67 1 1 0.084603908782305962 0.9188762 0.0453156643 0.03580814 1 0 2 -68 1 1 0.4618931830241112 0.630089641 0.3666358 0.00327458978 1 2 0 -69 1 1 0.090485397720604446 0.9134877 0.04816054 0.03835176 1 2 0 -70 1 2 1.0140603005132554 0.6271839 0.362743139 0.0100729465 2 1 0 -71 1 1 0.11008255968443192 0.8957602 0.07861323 0.0256266128 1 0 2 -72 1 1 0.64771052952961239 0.523242354 0.474618584 0.00213904912 1 2 0 -73 1 1 0.27755598196422471 0.75763315 0.230862036 0.011504828 1 2 0 -74 1 1 0.10244027519493581 0.902632058 0.0589159 0.0384520665 1 0 2 -75 1 1 0.11841763901117131 0.888325 0.0673675239 0.0443075225 1 0 2 -76 1 1 0.19025677181646344 0.8267468 0.160040483 0.0132126883 1 2 0 -77 1 1 0.61336152114875442 0.54152745 0.4505507 0.00792186148 1 2 0 -78 1 1 0.33449082060539098 0.7157024 0.264204919 0.0200926811 1 2 0 -79 1 1 0.1490904339680772 0.8614912 0.134173632 0.00433517434 1 0 2 -80 1 1 0.089822225657751095 0.9140937 0.04908715 0.0368192 1 2 0 -81 1 1 0.077764627112278675 0.925182164 0.0500315838 0.0247862767 1 0 2 -82 1 1 0.10062242034830922 0.9042744 0.06707825 0.028647339 1 0 2 -83 1 2 0.87242455033259836 0.580679059 0.417937 0.001383926 2 1 0 -84 1 1 0.64643118238444741 0.5239122 0.462312371 0.0137754688 1 2 0 -85 1 1 0.38078103791751566 0.6833275 0.219007626 0.09766489 1 2 0 -86 1 1 0.18654240234510375 0.8298234 0.129826292 0.0403503366 1 2 0 -87 1 1 0.19556197322229293 0.8223724 0.169585884 0.008041753 1 2 0 -88 1 1 0.16967653754303061 0.843937755 0.07920382 0.07685845 1 2 0 -89 1 1 0.17942883979161328 0.8357474 0.140764222 0.0234883726 1 2 0 -90 1 1 0.37073556760095389 0.690226436 0.2999358 0.009837814 1 2 0 -91 1 1 0.28153328535230204 0.7546258 0.221098259 0.02427597 1 2 0 -92 1 1 0.095651081105254609 0.908781052 0.0509764329 0.0402425081 1 2 0 -93 1 1 0.10201305873404877 0.903017759 0.07907509 0.0179071445 1 0 2 -94 1 1 0.21320461976679056 0.8079908 0.167838141 0.02417106 1 2 0 -95 1 1 0.14772366482527016 0.862669468 0.07083947 0.06649103 1 2 0 -96 1 1 0.1676055742511661 0.8456873 0.107580893 0.04673176 1 2 0 -97 1 1 0.11592859305433842 0.8905388 0.058125712 0.051335495 1 2 0 -98 1 1 0.24343470732109662 0.783930659 0.212896168 0.00317314919 1 0 2 -99 1 1 0.14288965000739814 0.8668497 0.0867102742 0.0464400165 1 2 0 -100 2 2 0.22003618725213686 0.802489758 0.197204947 0.000305285153 2 1 0 -101 2 2 0.4769116280424554 0.6206974 0.378338724 0.0009639265 2 1 0 -102 2 2 0.32248110542186914 0.7243496 0.2752483 0.000402083038 2 1 0 -103 2 2 0.4318088621640172 0.6493335 0.3500793 0.00058722886 2 1 0 -104 2 2 0.33701376028625557 0.713899 0.2857394 0.000361559069 2 1 0 -105 2 2 0.36111778351144497 0.6968969 0.303009778 9.33285046E-05 2 1 0 -106 2 2 0.57324912019977703 0.56369096 0.434159666 0.00214935932 2 1 0 -107 2 2 0.43241433229369408 0.648940444 0.3508895 0.000170046318 2 1 0 -108 2 2 0.5524780213978463 0.5755219 0.424283057 0.000195083121 2 1 0 -109 2 2 0.10018846863192231 0.9046669 0.0947069 0.000626212 2 1 0 -110 2 2 0.34368591687548161 0.7091516 0.2858574 0.00499094324 2 1 0 -111 2 2 0.46723123458330745 0.626735151 0.3724764 0.000788469857 2 1 0 -112 2 2 0.31582627414124387 0.7291861 0.2698556 0.0009583179 2 1 0 -113 2 2 0.52239827082819301 0.593096435 0.406178564 0.00072502665 2 1 0 -114 2 2 0.36058132074789678 0.6972709 0.301918417 0.000810695637 2 1 0 -115 2 2 0.21578587862382262 0.805907845 0.192418143 0.00167402264 2 1 0 -116 2 2 0.38868733060861138 0.6779462 0.3210066 0.00104722043 2 1 0 -117 2 2 0.097428111747803359 0.907167554 0.0924843 0.000348151079 2 1 0 -118 2 2 0.51096864540412867 0.5999142 0.400067 1.88108879E-05 2 1 0 -119 2 2 0.64730667194394087 0.5234537 0.4758455 0.000700797769 2 1 0 -120 2 2 0.21506259321317989 0.806490958 0.192730322 0.0007787307 2 1 0 -121 2 2 0.42372860353635938 0.6546015 0.3437634 0.00163504237 2 1 0 -122 2 2 0.45634525198899917 0.633595049 0.366349727 5.52281235E-05 2 1 0 -123 2 2 0.56778894609208741 0.5667772 0.4307018 0.00252098124 2 1 0 -124 2 2 0.22282641548991947 0.800253749 0.198714435 0.00103183649 2 1 0 -125 2 2 0.31027244305630025 0.733247161 0.266095132 0.0006576972 2 1 0 -126 2 2 0.59157505509617891 0.5534549 0.442328155 0.004216949 2 1 0 -127 2 2 0.48357066476934346 0.616577864 0.3786479 0.00477422541 2 1 0 -128 2 2 0.41595305224046536 0.659711242 0.339890242 0.0003985048 2 1 0 -129 2 2 0.45550524433223816 0.6341275 0.364983171 0.0008893516 2 1 0 -130 2 2 0.43130825350797314 0.6496586 0.3501238 0.000217600667 2 1 0 -131 2 2 0.11700817008638008 0.8895779 0.109250523 0.00117157528 2 1 0 -132 2 2 0.40127211138702135 0.669467866 0.330161959 0.000370198279 2 1 0 -133 2 2 0.63157763358593388 0.5317522 0.465779215 0.002468573 2 1 0 -134 2 2 0.58314815792662678 0.5581385 0.441456854 0.000404651684 2 1 0 -135 2 2 0.27204979854142897 0.7618163 0.2379036 0.000280094158 2 1 0 -136 2 2 0.1629980271721547 0.849592865 0.149376482 0.00103064778 2 1 0 -137 2 2 0.35495650373339338 0.701203942 0.297558725 0.00123733515 2 1 0 -138 2 2 0.51228059795956926 0.59912765 0.394762278 0.00611006049 2 1 0 -139 2 2 0.29479453684898038 0.7446846 0.253554732 0.00176069664 2 1 0 -140 2 2 0.23636956568495673 0.789488852 0.209835038 0.000676139141 2 1 0 -141 2 2 0.28994324565081614 0.748306036 0.247889653 0.0038042888 2 1 0 -142 2 2 0.4769116280424554 0.6206974 0.378338724 0.0009639265 2 1 0 -143 2 2 0.23597765808861054 0.7897983 0.209748372 0.0004533111 2 1 0 -144 2 2 0.16769889506754851 0.8456084 0.153658465 0.000733090041 2 1 0 -145 2 2 0.27656426046073113 0.7583849 0.239901781 0.001713358 2 1 0 -146 2 2 0.5356947512214818 0.585262537 0.413683951 0.00105350767 2 1 0 -147 2 2 0.35725414522256133 0.6995947 0.298375219 0.00203008 2 1 0 -148 2 2 0.16789042737345514 0.845446467 0.152793139 0.00176038919 2 1 0 -149 2 2 0.4005334114812254 0.6699626 0.327896535 0.002140856 2 1 0 +0 0 0 0.10523233415072977 0.9001154 0.0998846442 1.0279285E-08 0 1 2 +1 0 0 0.15045297688295883 0.8603182 0.1396817 1.00128133E-07 0 1 2 +2 0 0 0.13461746792169346 0.8740502 0.12594974 5.544896E-08 0 1 2 +3 0 0 0.16018456513825421 0.8519865 0.1480132 2.45973467E-07 0 1 2 +4 0 0 0.10163504747526553 0.9033592 0.09664083 9.512855E-09 0 1 2 +5 0 0 0.087135297305318909 0.9165531 0.08344689 7.194732E-09 0 1 2 +6 0 0 0.12881272716605582 0.8791386 0.120861337 7.34464E-08 0 1 2 +7 0 0 0.12020947959674978 0.886734664 0.1132653 2.94635036E-08 0 1 2 +8 0 0 0.18126320476886801 0.83421576 0.1657837 5.45412831E-07 0 1 2 +9 0 0 0.14710229495967375 0.8632057 0.136794254 8.353311E-08 0 1 2 +10 0 0 0.089318764342498622 0.914554 0.08544598 3.523288E-09 0 1 2 +11 0 0 0.13264827155619371 0.8757731 0.124226868 7.822249E-08 0 1 2 +12 0 0 0.15328973362849263 0.8578811 0.142118782 1.00022937E-07 0 1 2 +13 0 0 0.15323095619900862 0.857931554 0.142068356 1.04181886E-07 0 1 2 +14 0 0 0.057481053232275779 0.9441398 0.0558602326 1.140168E-10 0 1 2 +15 0 0 0.052653845383986382 0.948708355 0.0512916744 2.335356E-10 0 1 2 +16 0 0 0.072733745980773104 0.9298484 0.0701516 1.26047239E-09 0 1 2 +17 0 0 0.10587466502102688 0.8995374 0.100462623 1.34591636E-08 0 1 2 +18 0 0 0.084824480551934117 0.9186735 0.0813265 3.45837914E-09 0 1 2 +19 0 0 0.091700952027628138 0.912377954 0.0876220241 7.326582E-09 0 1 2 +20 0 0 0.11812077534840727 0.8885887 0.111411221 2.396447E-08 0 1 2 +21 0 0 0.098283485173918703 0.9063919 0.09360805 1.358749E-08 0 1 2 +22 0 0 0.095385487148060916 0.90902245 0.09097755 4.93118124E-09 0 1 2 +23 0 0 0.13988193670435536 0.8694609 0.130538926 1.726634E-07 0 1 2 +24 0 0 0.15291427062106341 0.8582033 0.141796425 2.92172246E-07 0 1 2 +25 0 0 0.16063310681505052 0.851604462 0.148395374 1.83643166E-07 0 1 2 +26 0 0 0.12759397101970846 0.8802107 0.11978925 7.834799E-08 0 1 2 +27 0 0 0.1070308001238898 0.898498 0.101502016 1.21276811E-08 0 1 2 +28 0 0 0.10894994409847403 0.8967753 0.103224695 1.11078133E-08 0 1 2 +29 0 0 0.15385148940418955 0.857399344 0.142600432 2.05544708E-07 0 1 2 +30 0 0 0.15925761030987443 0.852776647 0.1472231 2.22335842E-07 0 1 2 +31 0 0 0.10928146263621834 0.896478057 0.103521936 1.71705246E-08 0 1 2 +32 0 0 0.072791118439489405 0.929795 0.07020497 1.14868859E-09 0 1 2 +33 0 0 0.060183242535912201 0.941592 0.058408048 3.05011655E-10 0 1 2 +34 0 0 0.14710229495967375 0.8632057 0.136794254 8.353311E-08 0 1 2 +35 0 0 0.11911838542872015 0.8877027 0.112297252 1.59854281E-08 0 1 2 +36 0 0 0.090204603153647908 0.9137442 0.0862557739 2.25922059E-09 0 1 2 +37 0 0 0.14710229495967375 0.8632057 0.136794254 8.353311E-08 0 1 2 +38 0 0 0.16364290511877541 0.849045157 0.150954619 2.48957946E-07 0 1 2 +39 0 0 0.11701286032744909 0.889573753 0.110426195 2.24934382E-08 0 1 2 +40 0 0 0.1041189537265977 0.9011181 0.0988818854 1.141039E-08 0 1 2 +41 0 0 0.2409593240414564 0.7858736 0.214123532 2.847107E-06 0 1 2 +42 0 0 0.1455323565302056 0.8645619 0.13543798 1.24459433E-07 0 1 2 +43 0 0 0.12169677790176237 0.8854168 0.1145831 9.505505E-08 0 1 2 +44 0 0 0.11106771630833344 0.894878149 0.1051218 5.51358532E-08 0 1 2 +45 0 0 0.15554666072988901 0.855947137 0.144052714 1.71912419E-07 0 1 2 +46 0 0 0.095315460789796344 0.9090861 0.09091391 8.649329E-09 0 1 2 +47 0 0 0.14432694269533022 0.8656047 0.134395167 1.12255542E-07 0 1 2 +48 0 0 0.091843575178599535 0.912247837 0.087752156 4.616522E-09 0 1 2 +49 0 0 0.12227116687678849 0.8849084 0.115091577 2.69719624E-08 0 1 2 +50 1 1 0.078831361330132368 0.924195766 0.0509617627 0.02484247 1 0 2 +51 1 1 0.1196506498131379 0.887230337 0.06149472 0.0512749478 1 2 0 +52 1 1 0.14434030142402013 0.865593135 0.113934696 0.0204721466 1 2 0 +53 1 1 0.4934820006305104 0.610496938 0.371178329 0.0183247533 1 2 0 +54 1 1 0.24425490805116146 0.783287942 0.198487073 0.0182249676 1 2 0 +55 1 1 0.49207912028628681 0.611354 0.374479532 0.0141664632 1 2 0 +56 1 1 0.19891189934063311 0.8196221 0.151094437 0.02928347 1 2 0 +57 1 1 0.22857161004907667 0.7956693 0.147004321 0.05732636 1 0 2 +58 1 1 0.11986027517911223 0.88704437 0.08104583 0.0319098048 1 2 0 +59 1 1 0.36570630064133169 0.6937065 0.265697747 0.0405957177 1 2 0 +60 1 1 0.36092040289187055 0.6970345 0.259449363 0.0435161777 1 2 0 +61 1 1 0.17705837022933765 0.8377309 0.10888046 0.05338865 1 2 0 +62 1 1 0.16838200919468641 0.845030963 0.117188893 0.03778013 1 2 0 +63 1 1 0.42812390574384268 0.651730657 0.335861862 0.0124074444 1 2 0 +64 1 1 0.22844793909500513 0.7957677 0.188083753 0.0161485374 1 0 2 +65 1 1 0.099649536788089965 0.9051546 0.07398548 0.0208599083 1 0 2 +66 1 1 0.5453565395266291 0.5796351 0.405610561 0.0147543559 1 2 0 +67 1 1 0.13984594672930761 0.8694922 0.06589201 0.06461582 1 2 0 +68 1 2 0.7572861919735806 0.526890635 0.4689373 0.0041720774 2 1 0 +69 1 1 0.183017695190754 0.8327534 0.109402813 0.05784374 1 2 0 +70 1 2 0.73640284624029584 0.5139542 0.478833258 0.007212497 2 1 0 +71 1 1 0.13047042795082142 0.877682447 0.0879009 0.0344166756 1 0 2 +72 1 2 0.85715197211787608 0.5731456 0.424368978 0.00248538121 2 1 0 +73 1 1 0.37062883826437742 0.6903001 0.296230942 0.0134689622 1 2 0 +74 1 1 0.10740222974602938 0.898164332 0.0608370751 0.0409985669 1 0 2 +75 1 1 0.099088584274589248 0.9056625 0.0582326576 0.03610486 1 0 2 +76 1 1 0.2052441115809647 0.8144485 0.170464963 0.0150865708 1 2 0 +77 1 1 0.47976004541463291 0.6189319 0.3741432 0.006924921 1 2 0 +78 1 1 0.36081958960905797 0.697104752 0.284223527 0.0186717287 1 2 0 +79 1 1 0.23303488425015825 0.79212594 0.198442191 0.009431858 1 0 2 +80 1 1 0.2009401381302133 0.8179614 0.124288775 0.0577498041 1 2 0 +81 1 1 0.16485110862941516 0.848019958 0.08404041 0.06793966 1 0 2 +82 1 1 0.14609328821283002 0.8640771 0.0861562 0.0497667044 1 0 2 +83 1 2 1.0026545840412577 0.6316748 0.366904169 0.00142097753 2 1 0 +84 1 1 0.68502526450725787 0.5040775 0.484458417 0.0114640659 1 2 0 +85 1 1 0.17462188818465721 0.8397745 0.110411562 0.0498139337 1 2 0 +86 1 1 0.12483819546905675 0.8826397 0.0866358355 0.0307244845 1 2 0 +87 1 1 0.38610223888203354 0.67970103 0.308468759 0.0118302088 1 2 0 +88 1 1 0.17645790131635949 0.838234067 0.09166675 0.07009915 1 2 0 +89 1 1 0.35301563815054371 0.7025662 0.267733276 0.0297005326 1 2 0 +90 1 1 0.60666196289210772 0.5451676 0.443510771 0.011321608 1 2 0 +91 1 1 0.27313941699903715 0.7609867 0.21682857 0.0221847668 1 2 0 +92 1 1 0.16583954882677754 0.847182155 0.0972737148 0.0555441566 1 2 0 +93 1 1 0.216110318178794 0.8056464 0.133047938 0.061305657 1 0 2 +94 1 1 0.33904533886610982 0.712450147 0.2600237 0.02752616 1 2 0 +95 1 1 0.1608852461683882 0.851389766 0.08360588 0.0650043562 1 2 0 +96 1 1 0.20448474944375503 0.8150672 0.137656 0.0472768024 1 2 0 +97 1 1 0.1266300196897841 0.8810596 0.06579876 0.0531416424 1 2 0 +98 1 1 0.37085267211427492 0.6901456 0.301162064 0.008692351 1 0 2 +99 1 1 0.19570795638070573 0.822252333 0.12653698 0.0512106866 1 2 0 +100 2 2 0.40655867620709907 0.665938 0.333874941 0.000187000172 2 1 0 +101 2 2 0.43796444518642835 0.6453487 0.353739828 0.000911408337 2 1 0 +102 2 2 0.40727129871168982 0.6654636 0.334205151 0.000331235438 2 1 0 +103 2 2 0.42867974653227631 0.6513685 0.348098725 0.000532808946 2 1 0 +104 2 2 0.41635889462312808 0.659443557 0.34027496 0.000281510758 2 1 0 +105 2 2 0.42038138990784635 0.6567963 0.343124479 7.92395E-05 2 1 0 +106 2 2 0.46017554613877809 0.631172836 0.3666074 0.00221975357 2 1 0 +107 2 2 0.42771902078488977 0.6519946 0.3478424 0.000162985671 2 1 0 +108 2 2 0.45942242941974976 0.631648362 0.3681335 0.0002181388 2 1 0 +109 2 2 0.35632063155674082 0.700248063 0.2993861 0.000365806918 2 1 0 +110 2 2 0.59813038533862872 0.549838662 0.446502864 0.003658446 2 1 0 +111 2 2 0.43582388496630797 0.6467316 0.352501541 0.000766820565 2 1 0 +112 2 2 0.41992964480905842 0.657093048 0.342131168 0.0007757888 2 1 0 +113 2 2 0.45203899927824553 0.636329353 0.3629379 0.0007327517 2 1 0 +114 2 2 0.42251257719321916 0.655398 0.343965828 0.0006361924 2 1 0 +115 2 2 0.39888443163474774 0.671068251 0.327796131 0.00113561912 2 1 0 +116 2 2 0.43433821762281899 0.647693157 0.351395071 0.0009117467 2 1 0 +117 2 2 0.35882090036153141 0.698499441 0.3012908 0.000209733931 2 1 0 +118 2 2 0.48012712133830948 0.618704736 0.381276459 1.8805069E-05 2 1 0 +119 2 2 0.47851707234635865 0.6197017 0.379401237 0.000897047052 2 1 0 +120 2 2 0.39042472023472213 0.6767694 0.3226851 0.0005455372 2 1 0 +121 2 2 0.42800437996383239 0.65180856 0.3467855 0.00140595378 2 1 0 +122 2 2 0.443412170952244 0.6418426 0.3581035 5.38630848E-05 2 1 0 +123 2 2 0.51604974472334419 0.5968737 0.400675178 0.002451097 2 1 0 +124 2 2 0.39974298414777737 0.670492351 0.3287836 0.000724044454 2 1 0 +125 2 2 0.43554116923472252 0.6469145 0.3525337 0.0005518172 2 1 0 +126 2 2 0.57668850951089956 0.561755538 0.434410423 0.00383405667 2 1 0 +127 2 2 0.56072400509281628 0.570795655 0.425277829 0.003926514 2 1 0 +128 2 2 0.42931537169521405 0.6509546 0.34869352 0.00035188318 2 1 0 +129 2 2 0.51307680059299199 0.5986508 0.4004598 0.0008893604 2 1 0 +130 2 2 0.42609399915540624 0.653054953 0.346731 0.000214045882 2 1 0 +131 2 2 0.58811551010728713 0.5553729 0.4437172 0.0009098785 2 1 0 +132 2 2 0.42874407787955016 0.6513266 0.348355353 0.000318029517 2 1 0 +133 2 2 0.55828940663943993 0.572187 0.425267071 0.00254591252 2 1 0 +134 2 2 0.4642649883906762 0.628596961 0.3709423 0.0004607632 2 1 0 +135 2 2 0.39742590785588855 0.672047734 0.327726841 0.000225416516 2 1 0 +136 2 2 0.37475292580837244 0.6874591 0.311920583 0.000620306 2 1 0 +137 2 2 0.43104167040457064 0.649831831 0.349148631 0.00101951673 2 1 0 +138 2 2 0.58947794559538014 0.554616749 0.440451324 0.004931943 2 1 0 +139 2 2 0.47411410502523832 0.6224362 0.376182139 0.00138162647 2 1 0 +140 2 2 0.39041001226649513 0.6767793 0.322745562 0.0004751122 2 1 0 +141 2 2 0.57104908497416329 0.564932466 0.432308227 0.0027593167 2 1 0 +142 2 2 0.43796444518642835 0.6453487 0.353739828 0.000911408337 2 1 0 +143 2 2 0.39119688403383229 0.676247 0.323438883 0.000314130477 2 1 0 +144 2 2 0.37472258028119243 0.68748 0.3120606 0.0004594017 2 1 0 +145 2 2 0.43064486292504078 0.650089741 0.348618239 0.001292042 2 1 0 +146 2 2 0.45142977120547095 0.636717141 0.3621718 0.00111102767 2 1 0 +147 2 2 0.46269994854161944 0.6295815 0.3687677 0.00165078847 2 1 0 +148 2 2 0.38241482614892125 0.682212 0.3167025 0.00108551537 2 1 0 +149 2 2 0.44044916021338609 0.6437472 0.354472637 0.00178015162 2 1 0 diff --git a/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-out.txt b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-out.txt index f71990cd92..60bf133c7c 100644 --- a/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-out.txt +++ b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-out.txt @@ -33,35 +33,35 @@ Confusion table PREDICTED || 0 | 1 | 2 | Recall TRUTH ||======================== 0 || 21 | 0 | 0 | 1.0000 - 1 || 0 | 26 | 4 | 0.8667 + 1 || 0 | 27 | 3 | 0.9000 2 || 0 | 0 | 28 | 1.0000 ||======================== -Precision ||1.0000 |1.0000 |0.8750 | -Accuracy(micro-avg): 0.949367 -Accuracy(macro-avg): 0.955556 -Log-loss: 0.343967 -Log-loss reduction: 68.371359 +Precision ||1.0000 |1.0000 |0.9032 | +Accuracy(micro-avg): 0.962025 +Accuracy(macro-avg): 0.966667 +Log-loss: 0.366057 +Log-loss reduction: 66.340104 Confusion table ||======================== PREDICTED || 0 | 1 | 2 | Recall TRUTH ||======================== 0 || 29 | 0 | 0 | 1.0000 - 1 || 0 | 19 | 1 | 0.9500 + 1 || 0 | 18 | 2 | 0.9000 2 || 0 | 0 | 22 | 1.0000 ||======================== -Precision ||1.0000 |1.0000 |0.9565 | -Accuracy(micro-avg): 0.985915 -Accuracy(macro-avg): 0.983333 -Log-loss: 0.277101 -Log-loss reduction: 74.475991 +Precision ||1.0000 |1.0000 |0.9167 | +Accuracy(micro-avg): 0.971831 +Accuracy(macro-avg): 0.966667 +Log-loss: 0.336734 +Log-loss reduction: 68.983181 OVERALL RESULTS --------------------------------------- -Accuracy(micro-avg): 0.967641 (0.0183) -Accuracy(macro-avg): 0.969444 (0.0139) -Log-loss: 0.310534 (0.0334) -Log-loss reduction: 71.423675 (3.0523) +Accuracy(micro-avg): 0.966928 (0.0049) +Accuracy(macro-avg): 0.966667 (0.0000) +Log-loss: 0.351395 (0.0147) +Log-loss reduction: 67.661643 (1.3215) --------------------------------------- Physical memory usage(MB): %Number% diff --git a/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-rp.txt b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-rp.txt index 4ee447e298..d2d984758b 100644 --- a/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-rp.txt +++ b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-rp.txt @@ -1,4 +1,4 @@ PKPD Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings -0.967641 0.969444 0.310534 71.42368 AvgPer{lr=0.8} PKPD %Data% %Output% 99 0 0 maml.exe CV tr=PKPD{p=AvgPer { lr=0.8 }} threads=- norm=No dout=%Output% data=%Data% seed=1 /p:AvgPer{lr=0.8} +0.966928 0.966667 0.351395 67.66164 AvgPer{lr=0.8} PKPD %Data% %Output% 99 0 0 maml.exe CV tr=PKPD{p=AvgPer { lr=0.8 }} threads=- norm=No dout=%Output% data=%Data% seed=1 /p:AvgPer{lr=0.8} diff --git a/test/BaselineOutput/Common/PKPD/PKPD-CV-iris.txt b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris.txt index 100ad1843e..b436ddf13d 100644 --- a/test/BaselineOutput/Common/PKPD/PKPD-CV-iris.txt +++ b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris.txt @@ -1,151 +1,151 @@ Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class -5 0 0 0.023753880830600982 0.976526 0.0234627537 1.12415537E-05 0 1 2 -6 0 0 0.1302157926188138 0.877905965 0.122053728 4.028206E-05 0 1 2 -8 0 0 0.38835109531985795 0.6781742 0.321734548 9.125436E-05 0 1 2 -9 0 0 0.25023100275372456 0.7786209 0.221342817 3.62790561E-05 0 1 2 -10 0 0 0.043651773791107358 0.957287252 0.0427056365 7.135807E-06 0 1 2 -11 0 0 0.13521420369658141 0.8735288 0.126428857 4.23762431E-05 0 1 2 -18 0 0 0.028430623462091707 0.9719697 0.02802308 7.212694E-06 0 1 2 -20 0 0 0.10458121712711159 0.900701642 0.0992766 2.17436227E-05 0 1 2 -21 0 0 0.045363134495307413 0.9556504 0.0443333276 1.62607685E-05 0 1 2 -25 0 0 0.28906012724616753 0.7489672 0.250976235 5.657253E-05 0 1 2 -28 0 0 0.10227830625980082 0.902778268 0.0972085 1.322162E-05 0 1 2 -31 0 0 0.08562861703311947 0.9179351 0.08204699 1.79389826E-05 0 1 2 -32 0 0 0.017136477098684464 0.9830095 0.0169868153 3.65674259E-06 0 1 2 -35 0 0 0.16731981579854963 0.845929 0.1540563 1.46633747E-05 0 1 2 -37 0 0 0.25023100275372456 0.7786209 0.221342817 3.62790561E-05 0 1 2 -40 0 0 0.079660704938754975 0.9234296 0.0765565857 1.37818333E-05 0 1 2 -41 0 0 0.64555225289552742 0.5243729 0.475452363 0.0001747969 0 1 2 -44 0 0 0.040687480851420016 0.960129142 0.0398315266 3.935343E-05 0 1 2 -45 0 0 0.27950273039157197 0.756159663 0.243786544 5.377483E-05 0 1 2 -46 0 0 0.040918754924550584 0.9599071 0.0400804244 1.24371918E-05 0 1 2 -48 0 0 0.046121164137562148 0.954926252 0.04506536 8.38812048E-06 0 1 2 -50 1 1 0.72887025154137375 0.482453734 0.272260427 0.245285824 1 0 2 -51 1 1 0.8088987568127185 0.445348233 0.336972356 0.2176794 1 2 0 -52 1 1 0.78136433840766417 0.457781017 0.438075066 0.104143932 1 2 0 -54 1 1 0.54403068907517893 0.5804041 0.36454007 0.0550558232 1 2 0 -56 1 2 1.1200462355861258 0.531909943 0.3262647 0.141825333 2 1 0 -60 1 1 0.23491005150903682 0.790641963 0.1784061 0.0309519637 1 2 0 -63 1 1 0.68939365760121074 0.5018803 0.458269477 0.0398502052 1 2 0 -64 1 1 0.4797019767355476 0.618967831 0.291831881 0.0892002955 1 0 2 -66 1 2 0.86155959099850343 0.5307835 0.422502637 0.04671388 2 1 0 -68 1 1 0.49420884947307447 0.61005336 0.3815175 0.008429126 1 2 0 -69 1 1 0.23145347728671642 0.7933796 0.145455226 0.061165195 1 2 0 -70 1 2 1.4851413739165595 0.736525655 0.226470321 0.03700401 2 1 0 -71 1 1 0.3561851307666371 0.700342953 0.164127111 0.13552995 1 0 2 -72 1 1 0.67173548994464938 0.5108213 0.4814403 0.007738397 1 2 0 -73 1 1 0.50612015658391962 0.602829933 0.3625389 0.034631148 1 2 0 -74 1 1 0.41484498646830853 0.66044265 0.18213135 0.157426015 1 2 0 -76 1 1 0.50271064477753691 0.6048888 0.345186323 0.0499248542 1 2 0 -77 1 2 1.0467776624316247 0.6128986 0.3510672 0.0360342041 2 1 0 -79 1 1 0.25594672318320155 0.7741832 0.183944046 0.0418727621 1 0 2 -82 1 1 0.28889429116536303 0.7490914 0.130277559 0.120631076 1 2 0 -88 1 1 0.52394753213635203 0.5921783 0.2563273 0.151494384 1 2 0 -90 1 1 0.45035315262659448 0.637403 0.3411746 0.0214223787 1 2 0 -91 1 1 0.68738585386581597 0.502889 0.425832123 0.07127889 1 2 0 -92 1 1 0.2697499931611898 0.763570368 0.162871167 0.0735584348 1 2 0 -93 1 1 0.17823538741474004 0.836745441 0.0821188241 0.08113571 1 2 0 -95 1 1 0.48391980144761892 0.616362631 0.241359085 0.1422783 1 2 0 -96 1 1 0.47873437290577431 0.619567037 0.281721354 0.09871157 1 2 0 -97 1 1 0.4309836113144338 0.649869561 0.220002741 0.130127683 1 2 0 -98 1 1 0.27001090521188814 0.76337117 0.202765256 0.0338635966 1 0 2 -99 1 1 0.40366256068064504 0.667869449 0.2403792 0.09175138 1 2 0 -100 2 2 0.098423959135663375 0.9062646 0.09183126 0.001904126 2 1 0 -102 2 2 0.18784362467414789 0.8287443 0.16809994 0.00315576326 2 1 0 -104 2 2 0.20305260298998334 0.8162353 0.181571469 0.00219323137 2 1 0 -105 2 2 0.16738689651485589 0.8458723 0.153207168 0.000920576451 2 1 0 -106 2 2 0.58800293393782621 0.5554354 0.439967334 0.00459726434 2 1 0 -108 2 2 0.4731297912724482 0.6230492 0.37598455 0.0009662311 2 1 0 -109 2 2 0.035500468049352432 0.9651223 0.0299819969 0.00489571551 2 1 0 -111 2 2 0.39672088491078095 0.6725217 0.3237172 0.00376107777 2 1 0 -112 2 2 0.20709995070378101 0.8129384 0.180399537 0.00666203769 2 1 0 -113 2 2 0.49689364308238115 0.6084177 0.3893122 0.002270126 2 1 0 -115 2 2 0.12645623913049378 0.8812127 0.109346546 0.009440767 2 1 0 -117 2 2 0.025087766630295084 0.9752243 0.02114348 0.00363221765 2 1 0 -120 2 2 0.11093517828288008 0.894996762 0.09939285 0.005610376 2 1 0 -121 2 2 0.37719650665097176 0.6857813 0.3084725 0.00574616855 2 1 0 -122 2 2 0.25512580405327051 0.774819 0.224642664 0.0005383256 2 1 0 -123 2 2 0.53470400837092702 0.585842669 0.4030093 0.0111480216 2 1 0 -125 2 2 0.15798263383297981 0.8538646 0.139957428 0.00617795158 2 1 0 -128 2 2 0.30543205932131445 0.736804962 0.2611207 0.00207435014 2 1 0 -129 2 2 0.31304921188329871 0.7312139 0.2608706 0.00791547 2 1 0 -131 2 2 0.050064073835890829 0.9511685 0.03442935 0.01440218 2 1 0 -132 2 2 0.29000752749814246 0.748257935 0.249804661 0.00193742709 2 1 0 -133 2 2 0.57582423964286222 0.562241256 0.4260323 0.0117264669 2 1 0 -137 2 2 0.23369102254431959 0.791606367 0.2005785 0.007815139 2 1 0 -138 2 2 0.38238249988924056 0.682234049 0.29368493 0.0240810253 2 1 0 -141 2 2 0.18873995406873867 0.8280018 0.150254741 0.0217434336 2 1 0 -144 2 2 0.080216932325286969 0.9229161 0.0723539 0.00473001366 2 1 0 -145 2 2 0.18503665462933813 0.8310738 0.158534229 0.0103919385 2 1 0 -147 2 2 0.25370059016834895 0.7759241 0.212361827 0.0117140962 2 1 0 -0 0 0 0.10211748577393479 0.902923465 0.09707638 1.31251113E-07 0 1 2 -1 0 0 0.18414005419872645 0.8318193 0.1681801 6.10566246E-07 0 1 2 -2 0 0 0.14492385261258312 0.865088165 0.134911478 3.69817315E-07 0 1 2 -3 0 0 0.18018182361642154 0.835118353 0.164880455 1.1808728E-06 0 1 2 -4 0 0 0.091854748075686457 0.912237644 0.08776226 1.22251592E-07 0 1 2 -7 0 0 0.12233785238606552 0.88484937 0.115150325 2.89829131E-07 0 1 2 -12 0 0 0.18838937109230286 0.828292131 0.1717073 5.786816E-07 0 1 2 -13 0 0 0.17483938525704146 0.839591861 0.1604077 4.46853E-07 0 1 2 -14 0 0 0.045135070620344352 0.955868363 0.0441316478 5.3407363E-09 0 1 2 -15 0 0 0.030612994376980014 0.969850838 0.0301491246 1.0891493E-08 0 1 2 -16 0 0 0.055112338614887763 0.9463788 0.05362115 3.08450865E-08 0 1 2 -17 0 0 0.10062993461057677 0.9042676 0.09573224 1.63700875E-07 0 1 2 -19 0 0 0.073413324188812371 0.9292167 0.07078323 1.13846653E-07 0 1 2 -22 0 0 0.080101141800871592 0.923023 0.07697697 5.50810348E-08 0 1 2 -23 0 0 0.14576182189759379 0.864363551 0.135635108 1.32143555E-06 0 1 2 -24 0 0 0.15527358603878322 0.8561809 0.143817171 1.903017E-06 0 1 2 -26 0 0 0.12564253084337382 0.881930053 0.118069261 6.645889E-07 0 1 2 -27 0 0 0.10587439997529158 0.8995376 0.100462213 1.60874819E-07 0 1 2 -29 0 0 0.16779715917645741 0.8455253 0.154473513 1.14837974E-06 0 1 2 -30 0 0 0.18480739195523371 0.8312644 0.168734416 1.2252525E-06 0 1 2 -33 0 0 0.04032543480505283 0.9604768 0.03952316 1.14692389E-08 0 1 2 -34 0 0 0.17552640171742001 0.839015245 0.160984188 5.622917E-07 0 1 2 -36 0 0 0.09204810470455442 0.9120613 0.0879386961 4.5151662E-08 0 1 2 -38 0 0 0.18661279637066258 0.829764962 0.170234054 9.896429E-07 0 1 2 -39 0 0 0.12071723979290006 0.88628453 0.113715246 2.43832517E-07 0 1 2 -42 0 0 0.15054831358788412 0.860236168 0.139763221 6.199213E-07 0 1 2 -43 0 0 0.10889597554551136 0.8968237 0.103175469 8.1666E-07 0 1 2 -47 0 0 0.15393456680529674 0.8573281 0.142671227 6.39328334E-07 0 1 2 -49 0 0 0.13066902060601812 0.877508163 0.122491583 2.52864339E-07 0 1 2 -53 1 1 0.2827592626915541 0.7537012 0.21389237 0.03240643 1 2 0 -55 1 1 0.40583815374571869 0.666418 0.308232874 0.0253491253 1 2 0 -57 1 1 0.22836674855401806 0.795832336 0.173427463 0.0307402182 1 0 2 -58 1 1 0.17705481273015428 0.837733865 0.126026779 0.0362393446 1 2 0 -59 1 1 0.28623948897938623 0.7510827 0.179181576 0.06973568 1 2 0 -61 1 1 0.23046176000020194 0.7941668 0.132671311 0.07316189 1 2 0 -62 1 1 0.13173560241334381 0.8765727 0.08075007 0.04267718 1 2 0 -65 1 1 0.14168260820038769 0.8678967 0.07845409 0.05364923 1 0 2 -67 1 1 0.15281502745602504 0.858288467 0.07580672 0.06590483 1 0 2 -75 1 1 0.14692167558386063 0.8633616 0.0731101856 0.06352824 1 2 0 -78 1 1 0.36772458144318781 0.69230783 0.278533429 0.0291587356 1 2 0 -80 1 1 0.16590807826044235 0.8471241 0.07986628 0.0730095953 1 2 0 -81 1 1 0.15418537033222618 0.8571131 0.09555078 0.0473361239 1 0 2 -83 1 2 0.87913156238340517 0.5822821 0.415143281 0.00257462286 2 1 0 -84 1 1 0.60329559653785991 0.547005951 0.428067416 0.0249266215 1 2 0 -85 1 1 0.30763923553748801 0.7351805 0.189976588 0.07484292 1 2 0 -86 1 1 0.22077802043432937 0.801894665 0.161515683 0.0365896225 1 2 0 -87 1 1 0.26521281231157245 0.7670427 0.216186985 0.0167703368 1 2 0 -89 1 1 0.24918952543906914 0.779432237 0.173652634 0.0469151475 1 2 0 -94 1 1 0.27882348553441511 0.756673455 0.19962126 0.04370527 1 2 0 -101 2 2 0.48364964736145688 0.616529167 0.3817476 0.00172325119 2 1 0 -103 2 2 0.46298590319892402 0.6294015 0.369625777 0.000972708163 2 1 0 -107 2 2 0.47875168972271287 0.6195563 0.380170017 0.000273669633 2 1 0 -110 2 2 0.50353121321209937 0.604392648 0.38902545 0.00658191368 2 1 0 -114 2 2 0.42627427461954132 0.652937233 0.345770031 0.00129272381 2 1 0 -116 2 2 0.46330187402047324 0.629202664 0.369202226 0.00159509375 2 1 0 -118 2 2 0.54017556235004138 0.582645953 0.4173134 4.062326E-05 2 1 0 -119 2 2 0.59238374503353441 0.5530075 0.445467323 0.00152520277 2 1 0 -124 2 2 0.37578192391690202 0.6867521 0.311931521 0.0013163907 2 1 0 -126 2 2 0.65538868877846068 0.5192402 0.4731488 0.007610987 2 1 0 -127 2 2 0.58488203770280534 0.5571716 0.4347405 0.008087933 2 1 0 -130 2 2 0.48483716808745803 0.61579746 0.383864969 0.0003375506 2 1 0 -134 2 2 0.53564556246675443 0.5852913 0.4138755 0.0008331971 2 1 0 -135 2 2 0.4222335993197745 0.6555809 0.344069272 0.0003498588 2 1 0 -136 2 2 0.3225183821825246 0.7243226 0.2744004 0.00127698807 2 1 0 -139 2 2 0.45570650730743484 0.6339999 0.3636862 0.002313912 2 1 0 -140 2 2 0.38195300929629561 0.6825271 0.316598237 0.00087465957 2 1 0 -142 2 2 0.48364964736145688 0.616529167 0.3817476 0.00172325119 2 1 0 -143 2 2 0.37769091414905154 0.6854423 0.313964784 0.0005928655 2 1 0 -146 2 2 0.54054677541576512 0.5824297 0.415693641 0.00187665562 2 1 0 -148 2 2 0.33311820419531579 0.7166855 0.281080544 0.00223395228 2 1 0 -149 2 2 0.47519601475299389 0.62176317 0.374690771 0.00354602537 2 1 0 +5 0 0 0.042901834728573146 0.9580054 0.0419840477 1.05340787E-05 0 1 2 +6 0 0 0.083706488880306434 0.919701159 0.08026548 3.335983E-05 0 1 2 +8 0 0 0.15648858149565906 0.8551413 0.144762635 9.60744146E-05 0 1 2 +9 0 0 0.11740370169687092 0.889226139 0.110738449 3.540015E-05 0 1 2 +10 0 0 0.049963314146599096 0.9512643 0.04872921 6.479303E-06 0 1 2 +11 0 0 0.089614043688354766 0.914284 0.085680455 3.558085E-05 0 1 2 +18 0 0 0.045030317118332754 0.9559685 0.0440244265 7.0445376E-06 0 1 2 +20 0 0 0.080502881922273103 0.922652245 0.07732772 2.00166778E-05 0 1 2 +21 0 0 0.053251321578678124 0.9481417 0.0518441126 1.42192039E-05 0 1 2 +25 0 0 0.1345256835252219 0.8741304 0.125810727 5.882482E-05 0 1 2 +28 0 0 0.071694176117578209 0.9308155 0.06917247 1.20015538E-05 0 1 2 +31 0 0 0.070443013528733611 0.931980848 0.06800209 1.70340463E-05 0 1 2 +32 0 0 0.032654422158073904 0.967873 0.0321238 3.193805E-06 0 1 2 +35 0 0 0.085718229139385371 0.9178528 0.08213324 1.393153E-05 0 1 2 +37 0 0 0.11740370169687092 0.889226139 0.110738449 3.540015E-05 0 1 2 +40 0 0 0.062928870664993405 0.939010262 0.06097769 1.20330114E-05 0 1 2 +41 0 0 0.26464229432497571 0.767480433 0.232282773 0.000236815 0 1 2 +44 0 0 0.059370648640626932 0.9423574 0.0576089844 3.36135163E-05 0 1 2 +45 0 0 0.12572539265519678 0.881857 0.118088 5.50402838E-05 0 1 2 +46 0 0 0.050943143901380036 0.9503327 0.04965677 1.0502069E-05 0 1 2 +48 0 0 0.051390686845431854 0.9499075 0.0500850454 7.45635452E-06 0 1 2 +50 1 1 0.3736753570058497 0.6882003 0.17439957 0.137400135 1 2 0 +51 1 1 0.46974941198892683 0.6251589 0.245875 0.1289661 1 2 0 +52 1 1 0.48996150579033221 0.61265 0.3297582 0.05759184 1 2 0 +54 1 1 0.5115137576811869 0.599587262 0.353570044 0.04684268 1 2 0 +56 1 1 0.60702971635293523 0.5449672 0.3745032 0.0805296 1 2 0 +60 1 1 0.36506630072943136 0.6941506 0.230054215 0.07579513 1 2 0 +63 1 1 0.61679568772755644 0.539670944 0.423799872 0.0365292132 1 2 0 +64 1 1 0.51079160996051487 0.6000204 0.311807573 0.08817205 1 0 2 +66 1 1 0.71014226716045414 0.491574258 0.463784844 0.04464092 1 2 0 +68 1 1 0.67782447270478297 0.507720351 0.479092419 0.0131872464 1 2 0 +69 1 1 0.35969209704682265 0.6978912 0.195766345 0.106342494 1 2 0 +70 1 2 0.94663324211722399 0.5851442 0.388045281 0.0268104747 2 1 0 +71 1 1 0.37710811805711025 0.6858419 0.171295851 0.142862245 1 0 2 +72 1 2 0.77788692984780239 0.531407952 0.459375679 0.0092163505 2 1 0 +73 1 1 0.52786345281280167 0.5898639 0.372436285 0.03769979 1 2 0 +74 1 1 0.36466649795555972 0.6944282 0.171437979 0.134133831 1 2 0 +76 1 1 0.4706543421646936 0.624593437 0.3352478 0.0401587524 1 2 0 +77 1 2 0.7535727724729181 0.5064204 0.4706819 0.0228977185 2 1 0 +79 1 1 0.42207386769765121 0.6556856 0.289230019 0.055084385 1 0 2 +82 1 1 0.37196204518518855 0.6893804 0.158601031 0.152018562 1 0 2 +88 1 1 0.46796960461157472 0.626272559 0.226254344 0.147473127 1 2 0 +90 1 1 0.57981657513372886 0.5600011 0.406342536 0.033656422 1 2 0 +91 1 1 0.55044769976504981 0.576691568 0.364665627 0.0586428121 1 2 0 +92 1 1 0.37206623633705027 0.6893086 0.203397572 0.107293814 1 2 0 +93 1 1 0.36403179497279287 0.6948691 0.1855017 0.119629189 1 0 2 +95 1 1 0.43682545951919971 0.6460842 0.214487135 0.139428675 1 2 0 +96 1 1 0.46570722322654323 0.627691031 0.26823175 0.10407722 1 2 0 +97 1 1 0.39291303807962741 0.675087452 0.207266659 0.117645919 1 2 0 +98 1 1 0.54273254198757837 0.581158042 0.372920245 0.0459217168 1 0 2 +99 1 1 0.44123101916283403 0.6432441 0.249400109 0.107355788 1 2 0 +100 2 2 0.33617401944991809 0.714498758 0.2845078 0.0009934271 2 1 0 +102 2 2 0.39689975338292421 0.6724014 0.325897127 0.00170145172 2 1 0 +104 2 2 0.39234380670092406 0.675471842 0.323201925 0.00132622325 2 1 0 +105 2 2 0.41028614985925072 0.6634604 0.336062819 0.000476817338 2 1 0 +106 2 2 0.5132441832999014 0.5985506 0.395236254 0.00621313741 2 1 0 +108 2 2 0.49929150642931341 0.606960535 0.392131835 0.0009075964 2 1 0 +109 2 2 0.26646094488200939 0.7660859 0.231548309 0.002365779 2 1 0 +111 2 2 0.46694178172420903 0.6269166 0.37000373 0.003079707 2 1 0 +112 2 2 0.40654775668194076 0.6659453 0.3302957 0.00375901931 2 1 0 +113 2 2 0.47945423566250328 0.6191212 0.378457546 0.002421245 2 1 0 +115 2 2 0.35276463261077462 0.7027426 0.291719645 0.00553779537 2 1 0 +117 2 2 0.26515686478225903 0.7670856 0.231291756 0.00162261887 2 1 0 +120 2 2 0.34688731344682866 0.706885 0.290222138 0.00289288373 2 1 0 +121 2 2 0.43965283179672848 0.644260049 0.3507073 0.005032635 2 1 0 +122 2 2 0.45541567136355199 0.6341843 0.365496635 0.000319047016 2 1 0 +123 2 2 0.54552139173103675 0.579539537 0.410656959 0.009803501 2 1 0 +125 2 2 0.41249593491283987 0.6619959 0.334856242 0.003147891 2 1 0 +128 2 2 0.42919597809006471 0.6510323 0.347469151 0.0014985021 2 1 0 +129 2 2 0.50220576404796058 0.6051943 0.390145928 0.00465982826 2 1 0 +131 2 2 0.29930157249559664 0.7413358 0.253664881 0.00499929441 2 1 0 +132 2 2 0.42241536257386447 0.6554617 0.343174279 0.00136402 2 1 0 +133 2 2 0.58981556575484373 0.554429531 0.434764743 0.0108057242 2 1 0 +137 2 2 0.42743776430605873 0.652178 0.342929065 0.00489296857 2 1 0 +138 2 2 0.53872416162362391 0.5834922 0.397239983 0.0192677956 2 1 0 +141 2 2 0.40652439645884153 0.665960848 0.32259953 0.0114396559 2 1 0 +144 2 2 0.31126961975707773 0.732516348 0.2650514 0.00243223668 2 1 0 +145 2 2 0.39667976209482653 0.672549367 0.3213434 0.00610723253 2 1 0 +147 2 2 0.44466626159250777 0.6410382 0.351262271 0.007699582 2 1 0 +0 0 0 0.18759581342652074 0.8289497 0.1710501 1.9547646E-07 0 1 2 +1 0 0 0.21987628671839457 0.8026181 0.197380453 1.43798934E-06 0 1 2 +2 0 0 0.2038049585744301 0.815621436 0.184377745 8.43562248E-07 0 1 2 +3 0 0 0.2236521678622588 0.7995932 0.200403824 2.96490521E-06 0 1 2 +4 0 0 0.18330310696789939 0.8325158 0.167484045 1.79656709E-07 0 1 2 +7 0 0 0.19967126091978882 0.818999946 0.180999577 4.76393581E-07 0 1 2 +12 0 0 0.21914588210495925 0.803204536 0.196794033 1.406126E-06 0 1 2 +13 0 0 0.20770898542680327 0.812443435 0.187555075 1.46583784E-06 0 1 2 +14 0 0 0.14854109570902546 0.8619646 0.138035417 4.3592463E-09 0 1 2 +15 0 0 0.14513911963444476 0.86490196 0.13509801 7.44877537E-09 0 1 2 +16 0 0 0.16241624648537462 0.8500873 0.149912685 3.33129044E-08 0 1 2 +17 0 0 0.18903730098766516 0.82775563 0.172244146 2.49378132E-07 0 1 2 +19 0 0 0.17785962794139024 0.8370599 0.162939966 1.41271613E-07 0 1 2 +22 0 0 0.1699063130036905 0.843743861 0.15625605 1.06977211E-07 0 1 2 +23 0 0 0.2245420938262514 0.798881948 0.201115757 2.27329838E-06 0 1 2 +24 0 0 0.229204736236894 0.7951657 0.204831034 3.225305E-06 0 1 2 +26 0 0 0.20940154601520181 0.8110695 0.188929364 1.11967393E-06 0 1 2 +27 0 0 0.19162167579232847 0.825619161 0.17438063 2.234952E-07 0 1 2 +29 0 0 0.22241379486318399 0.800584 0.1994135 2.49890059E-06 0 1 2 +30 0 0 0.22817293653384596 0.7959866 0.20401068 2.7336057E-06 0 1 2 +33 0 0 0.1498761619987731 0.8608146 0.139185429 9.332131E-09 0 1 2 +34 0 0 0.21777818951761163 0.8043038 0.195694983 1.18367143E-06 0 1 2 +36 0 0 0.17861934062632651 0.836424232 0.1635757 5.6025E-08 0 1 2 +38 0 0 0.21971722850899228 0.802745759 0.1972512 3.06361426E-06 0 1 2 +39 0 0 0.19852240288511083 0.8199414 0.180058211 3.809114E-07 0 1 2 +42 0 0 0.20729030754644143 0.812783659 0.187214673 1.64959692E-06 0 1 2 +43 0 0 0.20694320482416118 0.8130658 0.186932817 1.34293532E-06 0 1 2 +47 0 0 0.21072198499042374 0.8099992 0.189999253 1.51110225E-06 0 1 2 +49 0 0 0.19999502697398505 0.8187348 0.181264728 4.53224345E-07 0 1 2 +53 1 1 0.66911039918186277 0.512164 0.455997 0.0318390019 1 2 0 +55 1 1 0.58505952859509458 0.5570727 0.417532742 0.0253945347 1 2 0 +57 1 1 0.37984692367562606 0.6839661 0.2027571 0.1132768 1 0 2 +58 1 1 0.1751224720380232 0.8393542 0.120186314 0.04045944 1 2 0 +59 1 1 0.52831240938040969 0.589599133 0.346245944 0.0641549453 1 2 0 +61 1 1 0.26188627198696379 0.769598544 0.160627723 0.0697737262 1 2 0 +62 1 1 0.28652910933785641 0.7508652 0.193148091 0.0559866764 1 2 0 +65 1 1 0.12089803023097732 0.8861243 0.07507464 0.0388010442 1 0 2 +67 1 1 0.22276578883221054 0.800302267 0.111182526 0.08851518 1 2 0 +75 1 1 0.13613517691306831 0.872724652 0.0644540042 0.0628213137 1 0 2 +78 1 1 0.45097416965240078 0.6370073 0.3337904 0.02920233 1 2 0 +80 1 1 0.3382419257011689 0.713022768 0.2016004 0.08537684 1 2 0 +81 1 1 0.27941823305216318 0.756223559 0.126084387 0.117692038 1 2 0 +83 1 2 1.022795857870648 0.6375606 0.359588176 0.002851213 2 1 0 +84 1 2 0.7625488488996448 0.511201739 0.466475934 0.0223223064 2 1 0 +85 1 1 0.23335036702118436 0.7918761 0.145803913 0.06232002 1 2 0 +86 1 1 0.17283040277952735 0.8412803 0.121625617 0.0370940939 1 2 0 +87 1 1 0.50874982273809755 0.6012468 0.3793505 0.0194027033 1 2 0 +89 1 1 0.5094391481829712 0.600832462 0.351426542 0.04774102 1 2 0 +94 1 1 0.46648105509590337 0.6272055 0.3286032 0.04419127 1 2 0 +101 2 2 0.4153786841345754 0.660090268 0.338023067 0.00188670226 2 1 0 +103 2 2 0.42346376004261183 0.6547749 0.344130754 0.001094352 2 1 0 +107 2 2 0.43830355758261946 0.6451299 0.354542017 0.000328052876 2 1 0 +110 2 2 0.64043030720854799 0.5270656 0.46593073 0.00700371573 2 1 0 +114 2 2 0.40350630322747794 0.6679738 0.330664366 0.00136183063 2 1 0 +116 2 2 0.4558091754861342 0.6339348 0.364266962 0.00179821951 2 1 0 +118 2 2 0.46175810750647578 0.630174756 0.3697757 4.95243366E-05 2 1 0 +119 2 2 0.43781178515434294 0.645447254 0.35272187 0.00183087389 2 1 0 +124 2 2 0.44417506838660581 0.64135313 0.357245624 0.00140123651 2 1 0 +126 2 2 0.57398343636358973 0.5632772 0.4290593 0.007663534 2 1 0 +127 2 2 0.57237577260674821 0.5641835 0.427727461 0.008089082 2 1 0 +130 2 2 0.44368643858948603 0.6416666 0.357922435 0.000410959037 2 1 0 +134 2 2 0.43117898943914462 0.6497426 0.349251479 0.00100588414 2 1 0 +135 2 2 0.44319173399404627 0.6419841 0.3576087 0.000407162734 2 1 0 +136 2 2 0.40260894274567183 0.6685735 0.33020252 0.00122395344 2 1 0 +139 2 2 0.52407476588656432 0.592102945 0.405261934 0.002635112 2 1 0 +140 2 2 0.41465548039366562 0.6605678 0.338525534 0.000906673 2 1 0 +142 2 2 0.4153786841345754 0.660090268 0.338023067 0.00188670226 2 1 0 +143 2 2 0.41418493871989953 0.6608787 0.3385007 0.000620570441 2 1 0 +146 2 2 0.44112047883688321 0.6433152 0.354572773 0.00211200817 2 1 0 +148 2 2 0.41909754913633196 0.65764004 0.3402519 0.00210807845 2 1 0 +149 2 2 0.44896079794147509 0.6382911 0.358130246 0.0035786368 2 1 0 diff --git a/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-out.txt b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-out.txt index c319ba3d49..e4ceccca5e 100644 --- a/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-out.txt +++ b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-out.txt @@ -19,21 +19,21 @@ Confusion table PREDICTED || 0 | 1 | 2 | Recall TRUTH ||======================== 0 || 50 | 0 | 0 | 1.0000 - 1 || 0 | 45 | 5 | 0.9000 - 2 || 0 | 1 | 49 | 0.9800 + 1 || 0 | 47 | 3 | 0.9400 + 2 || 0 | 0 | 50 | 1.0000 ||======================== -Precision ||1.0000 |0.9783 |0.9074 | -Accuracy(micro-avg): 0.960000 -Accuracy(macro-avg): 0.960000 -Log-loss: 0.293938 -Log-loss reduction: 73.244600 +Precision ||1.0000 |1.0000 |0.9434 | +Accuracy(micro-avg): 0.980000 +Accuracy(macro-avg): 0.980000 +Log-loss: 0.272667 +Log-loss reduction: 75.180769 OVERALL RESULTS --------------------------------------- -Accuracy(micro-avg): 0.960000 (0.0000) -Accuracy(macro-avg): 0.960000 (0.0000) -Log-loss: 0.293938 (0.0000) -Log-loss reduction: 73.244600 (0.0000) +Accuracy(micro-avg): 0.980000 (0.0000) +Accuracy(macro-avg): 0.980000 (0.0000) +Log-loss: 0.272667 (0.0000) +Log-loss reduction: 75.180769 (0.0000) --------------------------------------- Physical memory usage(MB): %Number% diff --git a/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-rp.txt b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-rp.txt index b8bbf72cd9..05d350bd96 100644 --- a/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-rp.txt +++ b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-rp.txt @@ -1,4 +1,4 @@ PKPD Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings -0.96 0.96 0.293938 73.2446 AvgPer{lr=0.8} PKPD %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=PKPD{p=AvgPer { lr=0.8 }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 /p:AvgPer{lr=0.8} +0.98 0.98 0.272667 75.18077 AvgPer{lr=0.8} PKPD %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=PKPD{p=AvgPer { lr=0.8 }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 /p:AvgPer{lr=0.8} diff --git a/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris.txt b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris.txt index abaafba303..0e29a4e2ff 100644 --- a/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris.txt +++ b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris.txt @@ -1,151 +1,151 @@ Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class -0 0 0 0.10709083790126951 0.898444057 0.101555951 1.09862652E-08 0 1 2 -1 0 0 0.14972937943085041 0.860940933 0.139058977 1.03412368E-07 0 1 2 -2 0 0 0.13145025814375275 0.8768229 0.123177059 6.145917E-08 0 1 2 -3 0 0 0.15528019966440448 0.856175244 0.143824473 2.62822567E-07 0 1 2 -4 0 0 0.10252830274490825 0.9025526 0.09744736 1.04376809E-08 0 1 2 -5 0 0 0.090053147743181336 0.9138826 0.08611736 8.101242E-09 0 1 2 -6 0 0 0.12357314644074496 0.883757 0.116242908 8.74405E-08 0 1 2 -7 0 0 0.12105140488742108 0.8859884 0.114011578 3.08569952E-08 0 1 2 -8 0 0 0.17274984965939472 0.841348052 0.158651352 5.97416147E-07 0 1 2 -9 0 0 0.14791348202123955 0.862505734 0.137494177 8.105486E-08 0 1 2 -10 0 0 0.093967949932628358 0.910311937 0.08968807 3.63372E-09 0 1 2 -11 0 0 0.13108575869963462 0.877142549 0.122857377 8.249825E-08 0 1 2 -12 0 0 0.15257914743793716 0.858490944 0.141508967 9.911802E-08 0 1 2 -13 0 0 0.14535615625152426 0.864714265 0.1352856 1.173372E-07 0 1 2 -14 0 0 0.06340435568818982 0.9385639 0.0614361 1.239382E-10 0 1 2 -15 0 0 0.056683708917569194 0.9448929 0.0551071428 2.807663E-10 0 1 2 -16 0 0 0.075170002932813201 0.9275858 0.07241419 1.54777513E-09 0 1 2 -17 0 0 0.10666806288443777 0.898824 0.101176038 1.5170011E-08 0 1 2 -18 0 0 0.091241664716928128 0.9127971 0.0872029141 3.50579832E-09 0 1 2 -19 0 0 0.092742902606664304 0.9114278 0.08857219 8.39613445E-09 0 1 2 -20 0 0 0.12374770832324052 0.883602738 0.116397209 2.285497E-08 0 1 2 -21 0 0 0.098256721071890374 0.9064162 0.093583785 1.62108957E-08 0 1 2 -22 0 0 0.092301570249172765 0.9118301 0.08816987 6.23606855E-09 0 1 2 -23 0 0 0.1380404335556189 0.8710635 0.12893635 2.00292092E-07 0 1 2 -24 0 0 0.15120025555722924 0.8596755 0.1403242 2.922401E-07 0 1 2 -25 0 0 0.16166208373075255 0.850728631 0.14927116 1.80764E-07 0 1 2 -26 0 0 0.12599234088657019 0.8816216 0.118378319 8.963828E-08 0 1 2 -27 0 0 0.11003318759164467 0.8958044 0.104195595 1.25205668E-08 0 1 2 -28 0 0 0.11184598225922286 0.894181967 0.105818 1.1564893E-08 0 1 2 -29 0 0 0.1505769302299601 0.860211551 0.139788255 2.15031562E-07 0 1 2 -30 0 0 0.15733441851850752 0.8544183 0.145581514 2.27139253E-07 0 1 2 -31 0 0 0.11219313224767015 0.8938716 0.1061284 1.89737328E-08 0 1 2 -32 0 0 0.076296562873109264 0.9265414 0.0734586 1.21595978E-09 0 1 2 -33 0 0 0.06454049206643403 0.937498152 0.06250186 3.385217E-10 0 1 2 -34 0 0 0.14791348202123955 0.862505734 0.137494177 8.105486E-08 0 1 2 -35 0 0 0.11957675391026261 0.8872959 0.112704061 1.73822077E-08 0 1 2 -36 0 0 0.095551393128538914 0.908871651 0.09112834 2.340219E-09 0 1 2 -37 0 0 0.14791348202123955 0.862505734 0.137494177 8.105486E-08 0 1 2 -38 0 0 0.15557611714722408 0.8559219 0.144077763 2.80571015E-07 0 1 2 -39 0 0 0.11898397051842637 0.887822032 0.112177923 2.32430128E-08 0 1 2 -40 0 0 0.10383152796029192 0.901377141 0.0986228138 1.3316324E-08 0 1 2 -41 0 0 0.23267843022808721 0.792408347 0.2075885 3.16824639E-06 0 1 2 -42 0 0 0.13825641388336071 0.870875359 0.129124492 1.43657843E-07 0 1 2 -43 0 0 0.11780844245896904 0.8888663 0.11113359 1.22518585E-07 0 1 2 -44 0 0 0.11106878201188827 0.8948772 0.105122752 6.136807E-08 0 1 2 -45 0 0 0.1519822314490463 0.859003544 0.140996248 1.90226359E-07 0 1 2 -46 0 0 0.097403275899291555 0.9071901 0.09280992 9.201566E-09 0 1 2 -47 0 0 0.13975224176627257 0.869573653 0.130426213 1.23595186E-07 0 1 2 -48 0 0 0.095640521579255233 0.908790648 0.09120932 4.824783E-09 0 1 2 -49 0 0 0.12297630490939666 0.8842846 0.115715332 2.84802919E-08 0 1 2 -50 1 1 0.072140726875920644 0.930399954 0.05064142 0.018958617 1 0 2 -51 1 1 0.12052363925502865 0.886456132 0.0577918626 0.0557519831 1 2 0 -52 1 1 0.11754125629409608 0.88910383 0.0897584558 0.0211377256 1 2 0 -53 1 1 0.55234194460144326 0.5756002 0.407715857 0.01668393 1 2 0 -54 1 1 0.22033072960732095 0.8022534 0.179187492 0.018559115 1 2 0 -55 1 1 0.48471715245043789 0.61587137 0.369235158 0.0148934675 1 2 0 -56 1 1 0.19649325323392461 0.8216069 0.144622579 0.03377054 1 2 0 -57 1 1 0.23907513650017656 0.7873557 0.144207269 0.06843699 1 0 2 -58 1 1 0.10057641320176978 0.904316 0.06448696 0.03119705 1 2 0 -59 1 1 0.4382277992649205 0.6451788 0.3130982 0.0417229943 1 2 0 -60 1 1 0.40378849464830396 0.667785347 0.294686764 0.0375279263 1 2 0 -61 1 1 0.19328713066283934 0.8242453 0.117469594 0.058285147 1 2 0 -62 1 1 0.14347483184211016 0.8663426 0.101038948 0.0326184332 1 2 0 -63 1 1 0.39254042807911971 0.675339043 0.3112865 0.013374473 1 2 0 -64 1 1 0.24625037004233435 0.7817265 0.199262485 0.01901104 1 0 2 -65 1 1 0.096962049648401036 0.907590449 0.07445438 0.0179551709 1 0 2 -66 1 1 0.58127624794437671 0.559184253 0.424495757 0.01631998 1 2 0 -67 1 1 0.12717766992503393 0.8805772 0.06211802 0.0573047921 1 0 2 -68 1 2 0.76418847827461567 0.5308523 0.4657117 0.003435955 2 1 0 -69 1 1 0.17854309398479379 0.836488 0.108136833 0.05537514 1 2 0 -70 1 2 0.7782997543015937 0.532311857 0.459186077 0.008502028 2 1 0 -71 1 1 0.12963404144590399 0.878416836 0.08763818 0.0339449868 1 0 2 -72 1 2 0.82527364026390571 0.5597655 0.4381151 0.00211936235 2 1 0 -73 1 1 0.31236741131512197 0.731712639 0.2543256 0.0139617641 1 2 0 -74 1 1 0.10091901291460656 0.904006243 0.0600393079 0.03595447 1 0 2 -75 1 1 0.094227600597530603 0.9100756 0.0583470054 0.0315773822 1 0 2 -76 1 1 0.16139006589793164 0.8509601 0.13421455 0.0148253879 1 2 0 -77 1 1 0.42271312925322674 0.6552666 0.336988866 0.00774457539 1 2 0 -78 1 1 0.36102952185045717 0.6969584 0.28298 0.0200616084 1 2 0 -79 1 1 0.22191911287049176 0.800980151 0.189535379 0.009484478 1 0 2 -80 1 1 0.20087047707276379 0.8180184 0.127419531 0.05456211 1 2 0 -81 1 1 0.15758181984651309 0.8542069 0.07838817 0.0674049258 1 0 2 -82 1 1 0.14551884398560022 0.8645736 0.08538146 0.0500449426 1 0 2 -83 1 2 1.0041035759831962 0.6325462 0.3663729 0.00108090381 2 1 0 -84 1 2 0.75476486164367673 0.517363548 0.470121145 0.0125153111 2 1 0 -85 1 1 0.1936571586064853 0.823940337 0.11649999 0.05955967 1 2 0 -86 1 1 0.1117176732744818 0.8942967 0.07367582 0.032027483 1 2 0 -87 1 1 0.35151004083460119 0.7036248 0.285515 0.0108601972 1 2 0 -88 1 1 0.18955293586722866 0.8273289 0.09654 0.0761311054 1 2 0 -89 1 1 0.38389360242414333 0.6812039 0.289862335 0.0289337616 1 2 0 -90 1 1 0.62618827467785798 0.534625769 0.454234719 0.0111395335 1 2 0 -91 1 1 0.25387959125212917 0.7757852 0.200217441 0.02399734 1 2 0 -92 1 1 0.16226915412183221 0.850212336 0.09567342 0.0541142225 1 2 0 -93 1 1 0.22196815331677711 0.8009409 0.127181739 0.07187738 1 0 2 -94 1 1 0.35124306791543597 0.703812659 0.268056452 0.02813091 1 2 0 -95 1 1 0.16237179398556273 0.8501251 0.08078788 0.06908702 1 2 0 -96 1 1 0.20996992406982243 0.8106086 0.139289573 0.05010183 1 2 0 -97 1 1 0.12058893069745849 0.886398256 0.0598800667 0.05372166 1 2 0 -98 1 1 0.37956731249976106 0.6841574 0.304465353 0.01137725 1 0 2 -99 1 1 0.20277814400339583 0.816459358 0.130446717 0.0530939251 1 2 0 -100 2 2 0.39358357371715191 0.674634933 0.32519117 0.00017387759 2 1 0 -101 2 2 0.42308350591214072 0.655023932 0.344296634 0.0006794688 2 1 0 -102 2 2 0.41936704076824455 0.657462835 0.342273951 0.00026323882 2 1 0 -103 2 2 0.42921904997620342 0.6510173 0.348579019 0.000403696467 2 1 0 -104 2 2 0.41208274684418961 0.6622695 0.337504864 0.00022567909 2 1 0 -105 2 2 0.44160572409172411 0.6430031 0.3569401 5.67683346E-05 2 1 0 -106 2 2 0.42934476448549436 0.6509355 0.347463518 0.00160101533 2 1 0 -107 2 2 0.45168489764494285 0.6365547 0.363331944 0.000113324531 2 1 0 -108 2 2 0.46645653711885576 0.627220869 0.372643322 0.000135785289 2 1 0 -109 2 2 0.35953291146180011 0.6980023 0.301614881 0.000382813538 2 1 0 -110 2 2 0.60830855615290891 0.5442707 0.451300323 0.004428956 2 1 0 -111 2 2 0.43425567366017515 0.6477466 0.35168618 0.0005671874 2 1 0 -112 2 2 0.42224814643798159 0.655571342 0.343755931 0.000672725844 2 1 0 -113 2 2 0.43378520195992787 0.648051441 0.351433426 0.000515127 2 1 0 -114 2 2 0.39828542770724013 0.671470344 0.328003943 0.00052572944 2 1 0 -115 2 2 0.38044631650760474 0.683556259 0.3153522 0.00109154719 2 1 0 -116 2 2 0.44214870680472357 0.642654061 0.3565694 0.0007765472 2 1 0 -117 2 2 0.39204321311235024 0.6756749 0.3240996 0.000225500859 2 1 0 -118 2 2 0.49534872735975943 0.6093584 0.390629977 1.165714E-05 2 1 0 -119 2 2 0.47792706875518332 0.6200674 0.379401267 0.0005313261 2 1 0 -120 2 2 0.38996755329866611 0.677078843 0.322423726 0.0004974509 2 1 0 -121 2 2 0.40418317857084346 0.667521834 0.331344754 0.00113342493 2 1 0 -122 2 2 0.46674185713969735 0.627041936 0.372923285 3.47993519E-05 2 1 0 -123 2 2 0.50860202326236292 0.601335645 0.396473944 0.00219042762 2 1 0 -124 2 2 0.40115690949067428 0.669545 0.3297638 0.0006912321 2 1 0 -125 2 2 0.47796417410708669 0.6200444 0.379430741 0.000524864765 2 1 0 -126 2 2 0.55674263926678424 0.573072731 0.422982275 0.00394499162 2 1 0 -127 2 2 0.53745603926068541 0.5842326 0.4114353 0.004332072 2 1 0 -128 2 2 0.42435197720848 0.6541936 0.3455436 0.000262821937 2 1 0 -129 2 2 0.59539649422184571 0.5513439 0.447734416 0.000921661733 2 1 0 -130 2 2 0.45136920579127138 0.6367557 0.363094628 0.000149650616 2 1 0 -131 2 1 0.87799286296834833 0.58302784 0.415616274 0.00135587773 1 2 0 -132 2 2 0.4217459387360496 0.655900657 0.343858719 0.0002406203 2 1 0 -133 2 2 0.58858419496856473 0.55511266 0.442362428 0.00252491026 2 1 0 -134 2 2 0.46954434981246995 0.6252871 0.3744188 0.000294059661 2 1 0 -135 2 2 0.41926206364690544 0.657531857 0.3422878 0.000180329866 2 1 0 -136 2 2 0.35983106354881655 0.6977942 0.301589876 0.000615945552 2 1 0 -137 2 2 0.43598868576509842 0.646625042 0.3524654 0.000909582246 2 1 0 -138 2 2 0.55146544914408413 0.576104939 0.418423772 0.00547132 2 1 0 -139 2 2 0.47595285901721079 0.62129277 0.377271771 0.00143543689 2 1 0 -140 2 2 0.38358127985275575 0.6814167 0.318166882 0.000416446419 2 1 0 -141 2 2 0.55824128131859518 0.572214544 0.4245338 0.00325164315 2 1 0 -142 2 2 0.42308350591214072 0.655023932 0.344296634 0.0006794688 2 1 0 -143 2 2 0.39101453828348787 0.6763703 0.32335642 0.000273291429 2 1 0 -144 2 2 0.36601283135925683 0.6934939 0.306069136 0.0004369896 2 1 0 -145 2 2 0.41354127957626624 0.661304235 0.337492228 0.00120350742 2 1 0 -146 2 2 0.44489957920965528 0.640888631 0.358320177 0.000791185652 2 1 0 -147 2 2 0.45420191711312169 0.6349545 0.363470852 0.00157464272 2 1 0 -148 2 2 0.36235400852316502 0.6960359 0.302856565 0.00110750983 2 1 0 -149 2 2 0.42624816689431871 0.6529543 0.3454746 0.00157109811 2 1 0 +0 0 0 0.10104000891558908 0.903896868 0.0961031 7.488518E-09 0 1 2 +1 0 0 0.14638353214498703 0.863826334 0.136173636 5.1798505E-08 0 1 2 +2 0 0 0.1283544437118587 0.8795416 0.120458394 3.08966719E-08 0 1 2 +3 0 0 0.15434957050199044 0.8569724 0.14302747 1.21021856E-07 0 1 2 +4 0 0 0.096795646835533725 0.9077415 0.09225848 7.11907022E-09 0 1 2 +5 0 0 0.082328633275609589 0.920969248 0.07903077 7.635004E-09 0 1 2 +6 0 0 0.12045209925290842 0.886519551 0.113480389 4.64870524E-08 0 1 2 +7 0 0 0.11605228917489584 0.890428662 0.109571308 1.93115284E-08 0 1 2 +8 0 0 0.17410254265902697 0.840210736 0.159789056 2.30003792E-07 0 1 2 +9 0 0 0.14493659920883731 0.865077138 0.134922832 4.20849844E-08 0 1 2 +10 0 0 0.086479346599929008 0.9171545 0.08284551 3.07338577E-09 0 1 2 +11 0 0 0.12775338798466579 0.8800704 0.119929567 4.74386574E-08 0 1 2 +12 0 0 0.15036907978730446 0.860390365 0.139609575 4.71005E-08 0 1 2 +13 0 0 0.14562357098244386 0.864483058 0.135516867 4.49310065E-08 0 1 2 +14 0 0 0.054979518813470657 0.946504533 0.05349546 1.40412862E-10 0 1 2 +15 0 0 0.048704298424504931 0.952462733 0.04753725 3.692506E-10 0 1 2 +16 0 0 0.067260639592286578 0.9349515 0.06504852 1.4465189E-09 0 1 2 +17 0 0 0.10037197724394951 0.9045009 0.0954990759 1.04363931E-08 0 1 2 +18 0 0 0.082536533834129983 0.9207778 0.07922219 3.5612675E-09 0 1 2 +19 0 0 0.086233524701821371 0.91738 0.08262004 6.584751E-09 0 1 2 +20 0 0 0.11691155601423082 0.8896639 0.110336132 1.701226E-08 0 1 2 +21 0 0 0.091647579743620436 0.912426651 0.08757332 1.2294044E-08 0 1 2 +22 0 0 0.087599273238671213 0.9161279 0.08387205 3.566754E-09 0 1 2 +23 0 0 0.13253155638908179 0.8758753 0.124124587 1.29247553E-07 0 1 2 +24 0 0 0.14941269327639764 0.8612136 0.138786182 1.68698662E-07 0 1 2 +25 0 0 0.1582994624330345 0.8535941 0.1464058 9.44424556E-08 0 1 2 +26 0 0 0.12078415803856805 0.8862252 0.113774747 5.72726E-08 0 1 2 +27 0 0 0.10368361459150026 0.9015105 0.0984895155 8.918391E-09 0 1 2 +28 0 0 0.10545352972030293 0.8999163 0.100083686 7.876811E-09 0 1 2 +29 0 0 0.14886573737670886 0.8616848 0.138315111 1.08243654E-07 0 1 2 +30 0 0 0.15525082157761474 0.8562004 0.143799469 1.14021653E-07 0 1 2 +31 0 0 0.10444827879709964 0.9008214 0.0991786 1.43583634E-08 0 1 2 +32 0 0 0.069636357350842421 0.93273294 0.06726707 1.110497E-09 0 1 2 +33 0 0 0.056951459880491904 0.9446399 0.05536007 3.6885342E-10 0 1 2 +34 0 0 0.14493659920883731 0.865077138 0.134922832 4.20849844E-08 0 1 2 +35 0 0 0.11424442200129405 0.8920399 0.107960112 9.937101E-09 0 1 2 +36 0 0 0.087529269528966488 0.916192055 0.08380793 1.889867E-09 0 1 2 +37 0 0 0.14493659920883731 0.865077138 0.134922832 4.20849844E-08 0 1 2 +38 0 0 0.15576561995295157 0.85575974 0.144240141 1.13207719E-07 0 1 2 +39 0 0 0.11339071420829476 0.892801762 0.107198223 1.5183268E-08 0 1 2 +40 0 0 0.097824714741614124 0.90680784 0.09319213 8.766067E-09 0 1 2 +41 0 0 0.23410689248439309 0.7912772 0.208721831 9.598655E-07 0 1 2 +42 0 0 0.13732206665130264 0.871689439 0.1283105 6.345321E-08 0 1 2 +43 0 0 0.11186897963522327 0.8941614 0.105838537 8.346816E-08 0 1 2 +44 0 0 0.10529789300522154 0.900056362 0.09994357 4.90767036E-08 0 1 2 +45 0 0 0.14905521801941385 0.861521542 0.138478383 9.212065E-08 0 1 2 +46 0 0 0.091279342891190621 0.9127627 0.08723732 7.162675E-09 0 1 2 +47 0 0 0.1378151957858233 0.8712597 0.128740251 5.954E-08 0 1 2 +48 0 0 0.088571634321459605 0.915237546 0.08476245 3.910685E-09 0 1 2 +49 0 0 0.11794819886458666 0.8887421 0.111257881 1.70392749E-08 0 1 2 +50 1 1 0.081209289486757055 0.9220007 0.04854119 0.029458113 1 0 2 +51 1 1 0.12866724127240567 0.8792665 0.0700123459 0.05072113 1 2 0 +52 1 1 0.15317350216907719 0.857980847 0.122576281 0.01944289 1 2 0 +53 1 1 0.32451954100807251 0.7228746 0.255729884 0.0213955659 1 2 0 +54 1 1 0.22103486276558745 0.801688731 0.1801593 0.018151952 1 2 0 +55 1 1 0.37419956812496991 0.6878396 0.296539336 0.0156210242 1 2 0 +56 1 1 0.22857827717944601 0.795664 0.175655216 0.0286807753 1 2 0 +57 1 1 0.19916289482341393 0.8194164 0.152322426 0.02826115 1 0 2 +58 1 1 0.1063902456719211 0.8990737 0.0701745749 0.0307517052 1 2 0 +59 1 1 0.29092696307238658 0.7475703 0.2072915 0.04513823 1 2 0 +60 1 1 0.18803891098121719 0.828582466 0.120945193 0.05047233 1 2 0 +61 1 1 0.17555318460619629 0.8389928 0.106392682 0.05461454 1 2 0 +62 1 1 0.10109658863590253 0.9038457 0.05832019 0.0378341079 1 2 0 +63 1 1 0.35751522850321965 0.699412048 0.287616372 0.01297156 1 2 0 +64 1 1 0.23270731497449093 0.792385459 0.193651378 0.0139631759 1 0 2 +65 1 1 0.10042898048975613 0.904449344 0.07210191 0.0234487578 1 0 2 +66 1 1 0.50046914869790193 0.6062462 0.378073484 0.0156803057 1 2 0 +67 1 1 0.1089625726286635 0.896764 0.06373632 0.0394997 1 0 2 +68 1 1 0.63447197276801259 0.5302154 0.46525687 0.004527755 1 2 0 +69 1 1 0.13171479543296832 0.876590967 0.06407214 0.05933687 1 2 0 +70 1 2 0.83290977262594312 0.558413148 0.434782326 0.00680455053 2 1 0 +71 1 1 0.12403842013034277 0.8833459 0.08760595 0.029048143 1 0 2 +72 1 2 0.78071021520697825 0.539567947 0.45808056 0.00235148682 2 1 0 +73 1 1 0.26216381029753316 0.769385 0.216382757 0.0142322574 1 2 0 +74 1 1 0.10090819981572396 0.904016 0.0594188645 0.03656511 1 0 2 +75 1 1 0.098780230652794412 0.9059418 0.05678719 0.037271034 1 0 2 +76 1 1 0.17661200292438084 0.8381049 0.14724794 0.014647142 1 2 0 +77 1 1 0.49709606267321277 0.608294547 0.385036558 0.006668892 1 2 0 +78 1 1 0.32465156022402802 0.722779155 0.257891983 0.0193288662 1 2 0 +79 1 1 0.22550988164835631 0.7981092 0.195976883 0.005913923 1 0 2 +80 1 1 0.1389190171826217 0.8702985 0.0696324259 0.06006905 1 2 0 +81 1 1 0.12784238539838114 0.879992068 0.0846114755 0.03539645 1 0 2 +82 1 1 0.13014830814989062 0.8779652 0.08652696 0.03550784 1 0 2 +83 1 2 0.97105425937888956 0.6202549 0.3786836 0.00106150063 2 1 0 +84 1 1 0.64076940168252139 0.5268869 0.460860282 0.0122528346 1 2 0 +85 1 1 0.20375190476499336 0.8156647 0.134265348 0.0500699468 1 2 0 +86 1 1 0.13263656539712049 0.8757833 0.09452618 0.0296905078 1 2 0 +87 1 1 0.24777251189508356 0.7805375 0.2066182 0.0128443064 1 2 0 +88 1 1 0.15989525404777322 0.852233052 0.0756609738 0.07210598 1 2 0 +89 1 1 0.24698619977615285 0.7811515 0.185964838 0.0328837 1 2 0 +90 1 1 0.41986134281607856 0.65713793 0.329622746 0.01323931 1 2 0 +91 1 1 0.24168899231074675 0.7853004 0.192192927 0.0225067027 1 2 0 +92 1 1 0.12954685205423969 0.8784934 0.06508461 0.056421984 1 2 0 +93 1 1 0.18237708602274647 0.83328706 0.137341574 0.0293713361 1 0 2 +94 1 1 0.25156542087626454 0.7775826 0.1925942 0.0298232343 1 2 0 +95 1 1 0.13971467998896822 0.8696063 0.06581085 0.06458283 1 0 2 +96 1 1 0.17168074450908882 0.842248 0.108960845 0.0487911142 1 2 0 +97 1 1 0.11489745057641973 0.891457558 0.05599328 0.05254919 1 2 0 +98 1 1 0.3779311207940162 0.6852777 0.309432238 0.00529004168 1 0 2 +99 1 1 0.16229109741496545 0.8501937 0.09691363 0.0528927 1 2 0 +100 2 2 0.39443484405146978 0.6740609 0.3258114 0.00012772638 2 1 0 +101 2 2 0.4352277688797892 0.647117257 0.3522631 0.0006196466 2 1 0 +102 2 2 0.40761852687857175 0.6652326 0.334541261 0.000226154822 2 1 0 +103 2 2 0.43460641750346979 0.647519469 0.352121562 0.000358995982 2 1 0 +104 2 2 0.41317523057189731 0.66154635 0.33826533 0.000188354912 2 1 0 +105 2 2 0.43554144564522806 0.6469143 0.353034973 5.072583E-05 2 1 0 +106 2 2 0.45750820880118187 0.632858634 0.365612477 0.00152889371 2 1 0 +107 2 2 0.44622295891202246 0.640041053 0.359854043 0.000104909741 2 1 0 +108 2 2 0.47264277844659991 0.6233527 0.37651068 0.0001366027 2 1 0 +109 2 2 0.33415974887088012 0.7159394 0.283796668 0.000263924216 2 1 0 +110 2 2 0.48470805507816545 0.615877 0.3809107 0.00321232 2 1 0 +111 2 2 0.43818687368346548 0.6452052 0.3542619 0.000532875 2 1 0 +112 2 2 0.40582294900584642 0.666428149 0.3330102 0.0005616462 2 1 0 +113 2 2 0.44628805630319068 0.6399994 0.3595179 0.000482706848 2 1 0 +114 2 2 0.40388721797545668 0.6677194 0.331844 0.000436590431 2 1 0 +115 2 2 0.36892849159540098 0.691474855 0.3076841 0.0008410496 2 1 0 +116 2 2 0.43612474984448363 0.646537066 0.352793843 0.0006690618 2 1 0 +117 2 2 0.34956071258049848 0.7049977 0.294855028 0.000147259445 2 1 0 +118 2 2 0.49580602117459666 0.6090798 0.390908957 1.12794305E-05 2 1 0 +119 2 2 0.5010768375091309 0.6058779 0.393525 0.0005971396 2 1 0 +120 2 2 0.37305187680517504 0.6886295 0.310981363 0.000389128429 2 1 0 +121 2 2 0.41706976410191654 0.658974946 0.3400311 0.0009939764 2 1 0 +122 2 2 0.46398985361366846 0.628769934 0.371196777 3.33121025E-05 2 1 0 +123 2 2 0.51852277206219199 0.595399439 0.4024652 0.00213534082 2 1 0 +124 2 2 0.38329117846085392 0.6816144 0.3178601 0.0005255363 2 1 0 +125 2 2 0.43366656102208012 0.648128331 0.351467848 0.000403808546 2 1 0 +126 2 2 0.56925127061263225 0.565949 0.430288017 0.00376297347 2 1 0 +127 2 2 0.52548236233338275 0.5912701 0.4049958 0.00373412925 2 1 0 +128 2 2 0.42798362216044067 0.6518221 0.347944975 0.000232911741 2 1 0 +129 2 2 0.52354520155204987 0.5924166 0.4068511 0.000732291839 2 1 0 +130 2 2 0.44040981017782477 0.643772542 0.3560869 0.000140576958 2 1 0 +131 2 2 0.44523128335715673 0.6406761 0.358631045 0.0006928492 2 1 0 +132 2 2 0.42486506730523199 0.653858 0.3459313 0.000210723374 2 1 0 +133 2 2 0.60184142333607371 0.547802 0.449737847 0.00246017659 2 1 0 +134 2 2 0.4849108299783475 0.6157521 0.383954018 0.000293882535 2 1 0 +135 2 2 0.39443908851825471 0.674058 0.3257882 0.000153778965 2 1 0 +136 2 2 0.35418947324267314 0.701742 0.297810316 0.000447696162 2 1 0 +137 2 2 0.42977843142555361 0.650653243 0.348588884 0.00075789704 2 1 0 +138 2 2 0.54197530692752183 0.5815983 0.4136512 0.004750538 2 1 0 +139 2 2 0.42892098754456459 0.6512114 0.347700328 0.001088271 2 1 0 +140 2 2 0.37551325171827621 0.6869366 0.312728733 0.000334628654 2 1 0 +141 2 2 0.42866501404653645 0.6513781 0.346361071 0.00226086657 2 1 0 +142 2 2 0.4352277688797892 0.647117257 0.3522631 0.0006196466 2 1 0 +143 2 2 0.38428882589639007 0.6809347 0.318848133 0.000217100533 2 1 0 +144 2 2 0.35562893581316218 0.7007326 0.298938572 0.000328857132 2 1 0 +145 2 2 0.39169244368937323 0.675911963 0.323115468 0.000972554262 2 1 0 +146 2 2 0.4544126826514176 0.6348207 0.364385724 0.0007935546 2 1 0 +147 2 2 0.4375766068676662 0.645599067 0.3530911 0.00130985165 2 1 0 +148 2 2 0.35398182110350701 0.7018877 0.297308266 0.0008040049 2 1 0 +149 2 2 0.43712837931120685 0.6458885 0.352742374 0.00136915036 2 1 0 diff --git a/test/Microsoft.ML.StaticPipelineTesting/Training.cs b/test/Microsoft.ML.StaticPipelineTesting/Training.cs index 9d4a2bc010..959f316111 100644 --- a/test/Microsoft.ML.StaticPipelineTesting/Training.cs +++ b/test/Microsoft.ML.StaticPipelineTesting/Training.cs @@ -716,8 +716,9 @@ public void KMeans() KMeansModelParameters pred = null; var est = reader.MakeNewEstimator() - .Append(r => (label: r.label.ToKey(), r.features)) - .Append(r => (r.label, r.features, preds: env.Clustering.Trainers.KMeans(r.features, clustersCount: 3, onFit: p => pred = p, advancedSettings: s => s.NumThreads = 1))); + .AppendCacheCheckpoint() + .Append(r => (label: r.label.ToKey(), r.features)) + .Append(r => (r.label, r.features, preds: env.Clustering.Trainers.KMeans(r.features, clustersCount: 3, onFit: p => pred = p, advancedSettings: s => s.NumThreads = 1))); var pipe = reader.Append(est); diff --git a/test/Microsoft.ML.Tests/CachingTests.cs b/test/Microsoft.ML.Tests/CachingTests.cs index ef77eab19f..42c85cce41 100644 --- a/test/Microsoft.ML.Tests/CachingTests.cs +++ b/test/Microsoft.ML.Tests/CachingTests.cs @@ -78,5 +78,25 @@ public void CacheTest() data.GetColumn(ML, "Features").ToArray(); Assert.True(src.All(x => x.AccessCount == 1)); } + + [Fact] + public void StaticDataCacheTest() + { + var env = new MLContext(seed: 0); + var dataPath = GetDataPath(TestDatasets.breastCancer.trainFilename); + var dataSource = new MultiFileSource(dataPath); + + var reader = TextLoader.CreateReader(env, + c => (label: c.LoadBool(0), features: c.LoadFloat(1, 9))); + + var data = reader.Read(dataSource); + + var cachedData = data.Cache(); + + // Before caching, we are not able to shuffle the data. + Assert.True(data.AsDynamic.CanShuffle == false); + // After caching, we are able to shuffle the data! + Assert.True(cachedData.AsDynamic.CanShuffle == true); + } } } diff --git a/test/Microsoft.ML.Tests/FeatureContributionTests.cs b/test/Microsoft.ML.Tests/FeatureContributionTests.cs index 9229c55a08..7cbd452851 100644 --- a/test/Microsoft.ML.Tests/FeatureContributionTests.cs +++ b/test/Microsoft.ML.Tests/FeatureContributionTests.cs @@ -71,9 +71,10 @@ public void TestFeatureImportance() var srcDV = bldr.GetDataView(); var pipeline = ML.Transforms.Concatenate("Features", "X1", "X2Important", "X3", "X4Rand") + .AppendCacheCheckpoint(ML) .Append(ML.Transforms.Normalize("Features")); var data = pipeline.Fit(srcDV).Transform(srcDV); - var model = ML.Regression.Trainers.OnlineGradientDescent().Fit(data); + var model = ML.Regression.Trainers.OrdinaryLeastSquares().Fit(data); var args = new FeatureContributionCalculationTransform.Arguments() { Bottom = 10, @@ -85,19 +86,20 @@ public void TestFeatureImportance() var enumerator = output.AsEnumerable(Env, true).GetEnumerator(); ScoreAndContribution row = null; var expectedValues = new List(); - expectedValues.Add(new float[4] { 0.15640761F, 1, 0.155862764F, 0.07276783F }); - expectedValues.Add(new float[4] { 0.09507586F, 1, 0.1835608F, 0.0437548943F }); - expectedValues.Add(new float[4] { 0.297142357F, 1, 0.2855884F, 0.193529665F }); - expectedValues.Add(new float[4] { 0.45465675F, 0.8805887F, 0.4031663F, 1 }); - expectedValues.Add(new float[4] { 0.0595234372F, 0.99999994F, 0.349647522F, 0.137912869F }); + expectedValues.Add(new float[4] { 0.06319684F, 1, 0.1386623F, 4.46209469E-06F }); + expectedValues.Add(new float[4] { 0.03841561F, 1, 0.1633037F, 2.68303256E-06F }); + expectedValues.Add(new float[4] { 0.12006103F, 1, 0.254072F, 1.18671605E-05F }); + expectedValues.Add(new float[4] { 0.20861618F, 0.99999994F, 0.407312155F, 6.963478E-05F }); + expectedValues.Add(new float[4] { 0.024050576F, 0.99999994F, 0.31106182F, 8.456762E-06F }); int index = 0; while (enumerator.MoveNext() && index < expectedValues.Count) { row = enumerator.Current; - Assert.True(row.FeatureContributions[0] == expectedValues[index][0]); - Assert.True(row.FeatureContributions[1] == expectedValues[index][1]); - Assert.True(row.FeatureContributions[2] == expectedValues[index][2]); - Assert.True(row.FeatureContributions[3] == expectedValues[index++][3]); + // We set predicion to 6 because the limit of floating-point numbers is 7. + Assert.Equal(expectedValues[index][0], row.FeatureContributions[0], 6); + Assert.Equal(expectedValues[index][1], row.FeatureContributions[1], 6); + Assert.Equal(expectedValues[index][2], row.FeatureContributions[2], 6); + Assert.Equal(expectedValues[index++][3], row.FeatureContributions[3], 6); } Done(); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs index 6c5f99591a..104855c2a9 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs @@ -114,10 +114,24 @@ private void TrainRegression(string trainDataPath, string testDataPath, string m // Now read the file (remember though, readers are lazy, so the actual reading will happen when the data is accessed). var trainData = reader.Read(trainDataPath); + // Sometime, caching data in-memory after its first access can save some loading time when the data is going to used + // several times somewhere. The caching mechanism is also lazy; it only caches things after being used. + // User can replace all the subsequently uses of "trainData" with "cachedTrainData". We still use "trainData" because + // a caching step, which provides the same caching function, will be inserted in the considered "learningPipeline." + var cachedTrainData = trainData.Cache(); + // Step two: define the learning pipeline. // We 'start' the pipeline with the output of the reader. var learningPipeline = reader.MakeNewEstimator() + // We add a step for caching data in memory so that the downstream iterative training + // algorithm can efficiently scan through the data multiple times. Otherwise, the following + // trainer will read data from disk multiple times. The caching mechanism uses an on-demand strategy. + // The data accessed in any downstream step will be cached since its first use. In general, you only + // need to add a caching step before trainable step, because caching is not helpful if the data is + // only scanned once. This step can be removed if user doesn't have enough memory to store the whole + // data set. + .AppendCacheCheckpoint() // Now we can add any 'training steps' to it. In our case we want to 'normalize' the data (rescale to be // between -1 and 1 for all examples), and then train the model. .Append(r => ( @@ -185,6 +199,13 @@ private ITransformer TrainOnIris(string irisDataPath) r.Label, // Concatenate all the features together into one column 'Features'. Features: r.SepalLength.ConcatWith(r.SepalWidth, r.PetalLength, r.PetalWidth))) + // We add a step for caching data in memory so that the downstream iterative training + // algorithm can efficiently scan through the data multiple times. Otherwise, the following + // trainer will read data from disk multiple times. The caching mechanism uses an on-demand strategy. + // The data accessed in any downstream step will be cached since its first use. In general, you only + // need to add a caching step before trainable step, because caching is not helpful if the data is + // only scanned once. + .AppendCacheCheckpoint() .Append(r => ( r.Label, // Train the multi-class SDCA model to predict the label using features. @@ -267,6 +288,8 @@ private void TrainAndInspectWeights(string dataPath) // When the normalizer is trained, the below delegate is going to be called. // We use it to memorize the scales. onFit: (scales, offsets) => normScales = scales))) + // Cache data used in memory because the subsequently trainer needs to access the data multiple times. + .AppendCacheCheckpoint() .Append(r => ( r.Label, // Train the multi-class SDCA model to predict the label using features. @@ -386,6 +409,7 @@ public void TrainOnAutoGeneratedData() var dynamicLearningPipeline = mlContext.Transforms.Categorical.OneHotEncoding("DemographicCategory") .Append(new ColumnConcatenatingEstimator (mlContext, "Features", "DemographicCategory", "LastVisits")) + .AppendCacheCheckpoint(mlContext) // FastTree will benefit from caching data in memory. .Append(mlContext.BinaryClassification.Trainers.FastTree("HasChurned", "Features", numTrees: 20)); var dynamicModel = dynamicLearningPipeline.Fit(trainData); @@ -402,6 +426,7 @@ public void TrainOnAutoGeneratedData() .Append(r => ( r.HasChurned, Features: r.DemographicCategory.OneHotEncoding().ConcatWith(r.LastVisits))) + .AppendCacheCheckpoint() // FastTree will benefit from caching data in memory. .Append(r => mlContext.BinaryClassification.Trainers.FastTree(r.HasChurned, r.Features, numTrees: 20)); var staticModel = staticLearningPipeline.Fit(staticData); @@ -432,6 +457,10 @@ private void TextFeaturizationOn(string dataPath) // Apply various kinds of text operations supported by ML.NET. var learningPipeline = reader.MakeNewEstimator() + // Cache data in memory in an on-demand manner. Columns used in any downstream step will be + // cached in memory at their first uses. This step can be removed if user's machine doesn't + // have enough memory. + .AppendCacheCheckpoint() .Append(r => ( // One-stop shop to run the full text featurization. TextFeatures: r.Message.FeaturizeText(), @@ -495,6 +524,10 @@ private void CategoricalFeaturizationOn(params string[] dataPath) // Build several alternative featurization pipelines. var learningPipeline = reader.MakeNewEstimator() + // Cache data in memory in an on-demand manner. Columns used in any downstream step will be + // cached in memory at their first uses. This step can be removed if user's machine doesn't + // have enough memory. + .AppendCacheCheckpoint() .Append(r => ( r.Label, r.NumericalFeatures, @@ -562,6 +595,9 @@ private void CrossValidationOn(string dataPath) Label: r.Label.ToKey(), // Concatenate all the features together into one column 'Features'. Features: r.SepalLength.ConcatWith(r.SepalWidth, r.PetalLength, r.PetalWidth))) + // Add a step for caching data in memory so that the downstream iterative training + // algorithm can efficiently scan through the data multiple times. + .AppendCacheCheckpoint() .Append(r => ( r.Label, // Train the multi-class SDCA model to predict the label using features. diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs index 6807faffc5..1c14a5b158 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs @@ -111,6 +111,12 @@ private void TrainRegression(string trainDataPath, string testDataPath, string m // Now read the file (remember though, readers are lazy, so the actual reading will happen when the data is accessed). var trainData = reader.Read(trainDataPath); + // Sometime, caching data in-memory after its first access can save some loading time when the data is going to be used + // several times somewhere. The caching mechanism is also lazy; it only caches things after being used. + // User can replace all the subsequently uses of "trainData" with "cachedTrainData". We still use "trainData" because + // a caching step, which provides the same caching function, will be inserted in the considered "dynamicPipeline." + var cachedTrainData = mlContext.Data.Cache(trainData); + // Step two: define the learning pipeline. // We 'start' the pipeline with the output of the reader. @@ -118,6 +124,15 @@ private void TrainRegression(string trainDataPath, string testDataPath, string m // First 'normalize' the data (rescale to be // between -1 and 1 for all examples), and then train the model. mlContext.Transforms.Normalize("FeatureVector") + // We add a step for caching data in memory so that the downstream iterative training + // algorithm can efficiently scan through the data multiple times. Otherwise, the following + // trainer will read data from disk multiple times. The caching mechanism uses an on-demand strategy. + // The data accessed in any downstream step will be cached since its first use. In general, you only + // need to add a caching step before trainable step, because caching is not helpful if the data is + // only scanned once. This step can be removed if user doesn't have enough memory to store the whole + // data set. Notice that in the upstream Transforms.Normalize step, we only scan through the data + // once so adding a caching step before it is not helpful. + .AppendCacheCheckpoint(mlContext) // Add the SDCA regression trainer. .Append(mlContext.Regression.Trainers.StochasticDualCoordinateAscent(labelColumn: "Target", featureColumn: "FeatureVector")); @@ -179,6 +194,8 @@ private ITransformer TrainOnIris(string irisDataPath) mlContext.Transforms.Concatenate("Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") // Note that the label is text, so it needs to be converted to key. .Append(mlContext.Transforms.Conversion.MapValueToKey("Label"), TransformerScope.TrainTest) + // Cache data in moemory for steps after the cache check point stage. + .AppendCacheCheckpoint(mlContext) // Use the multi-class SDCA model to predict the label using features. .Append(mlContext.MulticlassClassification.Trainers.StochasticDualCoordinateAscent()) // Apply the inverse conversion from 'PredictedLabel' column back to string value. @@ -397,6 +414,9 @@ private void CategoricalFeaturizationOn(params string[] dataPath) var fullLearningPipeline = dynamicPipeline // Concatenate two of the 3 categorical pipelines, and the numeric features. .Append(mlContext.Transforms.Concatenate("Features", "NumericalFeatures", "CategoricalBag", "WorkclassOneHotTrimmed")) + // Cache data in memory so that the following trainer will be able to access training examples without + // reading them from disk multiple times. + .AppendCacheCheckpoint(mlContext) // Now we're ready to train. We chose our FastTree trainer for this classification task. .Append(mlContext.BinaryClassification.Trainers.FastTree(numTrees: 50)); @@ -440,6 +460,10 @@ private void CrossValidationOn(string dataPath) mlContext.Transforms.Concatenate("Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") // Note that the label is text, so it needs to be converted to key. .Append(mlContext.Transforms.Conversion.MapValueToKey("Label"), TransformerScope.TrainTest) + // Cache data in memory so that SDCA trainer will be able to randomly access training examples without + // reading data from disk multiple times. Data will be cached at its first use in any downstream step. + // Notice that unused part in the data may not be cached. + .AppendCacheCheckpoint(mlContext) // Use the multi-class SDCA model to predict the label using features. .Append(mlContext.MulticlassClassification.Trainers.StochasticDualCoordinateAscent()); @@ -541,8 +565,12 @@ private static void RunEndToEnd(MLContext mlContext, IDataView trainData, string var estimator = mlContext.Transforms.CustomMapping(CustomMappings.IncomeMapping, nameof(CustomMappings.IncomeMapping)) .Append(mlContext.BinaryClassification.Trainers.FastTree(labelColumn: "Label")); + // If memory is enough, we can cache the data in-memory to avoid reading them from file + // when it will be accessed multiple times. + var cachedTrainData = mlContext.Data.Cache(trainData); + // Train the model. - var model = estimator.Fit(trainData); + var model = estimator.Fit(cachedTrainData); // Save the model. using (var fs = File.Create(modelPath)) @@ -575,6 +603,7 @@ public static ITransformer TrainModel(MLContext mlContext, IDataView trainData) Action mapping = (input, output) => output.Label = input.Income > 50000; // Construct the learning pipeline. var estimator = mlContext.Transforms.CustomMapping(mapping, null) + .AppendCacheCheckpoint(mlContext) .Append(mlContext.BinaryClassification.Trainers.FastTree(labelColumn: "Label")); return estimator.Fit(trainData); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/IntrospectiveTraining.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/IntrospectiveTraining.cs index d0ba9a82db..daf2777047 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/IntrospectiveTraining.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/IntrospectiveTraining.cs @@ -38,6 +38,7 @@ public void New_IntrospectiveTraining() .Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features") + .AppendCacheCheckpoint(ml) .Append(ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: s => s.NumThreads = 1)); // Train. diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs index 4f52662d34..f2285b5200 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs @@ -31,6 +31,7 @@ void New_MultithreadedPrediction() // Pipeline. var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features") + .AppendCacheCheckpoint(ml) .Append(ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: s => s.NumThreads = 1)); // Train. diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/ReconfigurablePrediction.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/ReconfigurablePrediction.cs index 5755efe56e..ffda5b1ce1 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/ReconfigurablePrediction.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/ReconfigurablePrediction.cs @@ -32,7 +32,7 @@ public void New_ReconfigurablePrediction() .Fit(data); var trainer = ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: (s) => s.NumThreads = 1); - var trainData = pipeline.Transform(data); + var trainData = ml.Data.Cache(pipeline.Transform(data)); // Cache the data right before the trainer to boost the training speed. var model = trainer.Fit(trainData); var scoredTest = model.Transform(pipeline.Transform(testData)); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs index ad1f37e161..016acd6220 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs @@ -26,6 +26,7 @@ public void New_SimpleTrainAndPredict() var data = reader.Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); // Pipeline. var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features") + .AppendCacheCheckpoint(ml) .Append(ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: s => s.NumThreads = 1)); // Train. diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs index f917d84a1c..adab64dec1 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs @@ -31,6 +31,7 @@ public void New_TrainSaveModelAndPredict() // Pipeline. var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features") + .AppendCacheCheckpoint(ml) .Append(ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: s => s.NumThreads = 1)); // Train. diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs index 32486abb27..a117de429c 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs @@ -27,8 +27,9 @@ public void New_TrainWithInitialPredictor() // Pipeline. var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features"); - // Train the pipeline, prepare train set. - var trainData = pipeline.Fit(data).Transform(data); + // Train the pipeline, prepare train set. Since it will be scanned multiple times in the subsequent trainer, we cache the + // transformed data in memory. + var trainData = ml.Data.Cache(pipeline.Fit(data).Transform(data)); // Train the first predictor. var trainer = ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features",advancedSettings: s => s.NumThreads = 1); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithValidationSet.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithValidationSet.cs index 367192ff47..2a7030ea94 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithValidationSet.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithValidationSet.cs @@ -31,7 +31,7 @@ public void New_TrainWithValidationSet() // Train model with validation set. var trainer = ml.BinaryClassification.Trainers.FastTree("Label","Features"); - var model = trainer.Train(trainData, validData); + var model = trainer.Train(ml.Data.Cache(trainData), ml.Data.Cache(validData)); } } } diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs b/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs index b3086a68a1..a907f8dd70 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs @@ -177,7 +177,11 @@ public void TrainAveragedPerceptronWithCache() (i, s) => true, s => { globalCounter++; }); - new AveragedPerceptronTrainer(env, "Label", "Features", numIterations: 2).Fit(xf).Transform(xf); + // The baseline result of this was generated with everything cached in memory. As auto-cache is removed, + // an explicit step of caching is required to make this test ok. + var cached = env.Data.Cache(xf); + + new AveragedPerceptronTrainer(env, "Label", "Features", numIterations: 2).Fit(cached).Transform(cached); // Make sure there were 2 cursoring events. Assert.Equal(1, globalCounter); diff --git a/test/Microsoft.ML.Tests/Scenarios/OvaTest.cs b/test/Microsoft.ML.Tests/Scenarios/OvaTest.cs index 0c7d436008..4fe2e95914 100644 --- a/test/Microsoft.ML.Tests/Scenarios/OvaTest.cs +++ b/test/Microsoft.ML.Tests/Scenarios/OvaTest.cs @@ -65,7 +65,7 @@ public void OvaAveragedPerceptron() }); // Data - var data = reader.Read(GetDataPath(dataPath)); + var data = mlContext.Data.Cache(reader.Read(GetDataPath(dataPath))); // Pipeline var pipeline = new Ova( @@ -133,10 +133,10 @@ public void OvaLinearSvm() }); // Data - var data = reader.Read(GetDataPath(dataPath)); + var data = mlContext.Data.Cache(reader.Read(GetDataPath(dataPath))); // Pipeline - var pipeline = new Ova(mlContext, new LinearSvm(mlContext), useProbabilities: false); + var pipeline = new Ova(mlContext, new LinearSvm(mlContext, numIterations: 100), useProbabilities: false); var model = pipeline.Fit(data); var predictions = model.Transform(data); diff --git a/test/Microsoft.ML.Tests/Scenarios/TensorflowTests.cs b/test/Microsoft.ML.Tests/Scenarios/TensorflowTests.cs index b9fc1c21d2..f93f06912b 100644 --- a/test/Microsoft.ML.Tests/Scenarios/TensorflowTests.cs +++ b/test/Microsoft.ML.Tests/Scenarios/TensorflowTests.cs @@ -41,6 +41,7 @@ public void TensorFlowTransforCifarEndToEndTest() .Append(new TensorFlowEstimator(mlContext, model_location, new[] { "Input" }, new[] { "Output" })) .Append(new ColumnConcatenatingEstimator(mlContext, "Features", "Output")) .Append(new ValueToKeyMappingEstimator(mlContext, "Label")) + .AppendCacheCheckpoint(mlContext) .Append(new SdcaMultiClassTrainer(mlContext)); diff --git a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs index 01b3d75d13..4752119236 100644 --- a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs +++ b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs @@ -41,6 +41,7 @@ public void TrainAndPredictIrisModelUsingDirectInstantiationTest() var pipe = mlContext.Transforms.Concatenate("Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") .Append(mlContext.Transforms.Normalize("Features")) + .AppendCacheCheckpoint(mlContext) .Append(mlContext.MulticlassClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: s => s.NumThreads = 1)); // Read training and test data sets diff --git a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs index 8a98764996..10d0db46a9 100644 --- a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs +++ b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs @@ -455,6 +455,7 @@ private void ExecuteTFTransformMNISTConvTrainingTest(bool shuffle, int? shuffleS ReTrain = true })) .Append(mlContext.Transforms.Concatenate("Features", "Prediction")) + .AppendCacheCheckpoint(mlContext) .Append(mlContext.MulticlassClassification.Trainers.LightGbm("Label", "Features")); var trainedModel = pipe.Fit(preprocessedTrainData); diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/MatrixFactorizationTests.cs b/test/Microsoft.ML.Tests/TrainerEstimators/MatrixFactorizationTests.cs index f94400c9fc..ce403b548a 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/MatrixFactorizationTests.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/MatrixFactorizationTests.cs @@ -379,7 +379,10 @@ public void OneClassMatrixFactorizationInMemoryDataZeroBaseIndex() // factorization problem, unspecified matrix elements are all a constant provided by user. If that constant is 0.15, // the following list means a 3-by-2 training matrix with elements: // (0, 0, 1), (1, 1, 1), (0, 2, 1), (0, 1, 0.15), (1, 0, 0.15), (1, 2, 0.15). - // because matrix elements at (0, 1), (1, 0), and (1, 2) are not specified. + // because matrix elements at (0, 1), (1, 0), and (1, 2) are not specified. Below is a visualization of the training matrix. + // [1, ?] + // |?, 1| where ? will be set to 0.15 by user when creating the trainer. + // [1, ?] var dataMatrix = new List(); dataMatrix.Add(new OneClassMatrixElementZeroBased() { MatrixColumnIndex = 0, MatrixRowIndex = 0, Value = 1 }); dataMatrix.Add(new OneClassMatrixElementZeroBased() { MatrixColumnIndex = 1, MatrixRowIndex = 1, Value = 1 }); diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs b/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs index f8553d8218..691ea7447d 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs @@ -16,7 +16,8 @@ public void SdcaWorkout() var dataPath = GetDataPath("breast-cancer.txt"); var data = TextLoader.CreateReader(Env, ctx => (Label: ctx.LoadFloat(0), Features: ctx.LoadFloat(1, 10))) - .Read(dataPath); + .Read(dataPath).Cache(); + var binaryTrainer = new SdcaBinaryTrainer(Env, "Label", "Features", advancedSettings: (s) => s.ConvergenceTolerance = 1e-2f); TestEstimatorCore(binaryTrainer, data.AsDynamic); diff --git a/test/Microsoft.ML.Tests/Transformers/FeatureSelectionTests.cs b/test/Microsoft.ML.Tests/Transformers/FeatureSelectionTests.cs index f11fb666e3..ee2a20855d 100644 --- a/test/Microsoft.ML.Tests/Transformers/FeatureSelectionTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/FeatureSelectionTests.cs @@ -39,6 +39,7 @@ public void FeatureSelectionWorkout() .Read(sentimentDataPath); var est = new WordBagEstimator(ML, "text", "bag_of_words") + .AppendCacheCheckpoint(ML) .Append(ML.Transforms.FeatureSelection.SelectFeaturesBasedOnCount("bag_of_words", "bag_of_words_count", 10) .Append(ML.Transforms.FeatureSelection.SelectFeaturesBasedOnMutualInformation("bag_of_words", "bag_of_words_mi", labelColumn: "label"))); @@ -110,7 +111,7 @@ public void CountFeatureSelectionWorkout() VectorDouble: ctx.LoadDouble(4, 8) )); - var data = reader.Read(new MultiFileSource(dataPath)).AsDynamic; + var data = ML.Data.Cache(reader.Read(new MultiFileSource(dataPath)).AsDynamic); var columns = new[] { new CountFeatureSelectingEstimator.ColumnInfo("VectorDouble", "FeatureSelectDouble", minCount: 1), From 91cee09b77fe4334c9ec08ffe83086f568c50970 Mon Sep 17 00:00:00 2001 From: Tom Finley Date: Thu, 6 Dec 2018 10:08:53 -0800 Subject: [PATCH 029/100] Revert "Removed AlignedArray (#1657)" (#1838) This reverts commit 72ec121afe1a889218c750f7bda7ee5093c140b7. A bug was detected that resulted in non-deterministic calculation, since the underlying C++ code was written in a way apparently that required alignment to produce consistent results, so of course just removing the alignment and calling an only slightly modified algorithm compromised determinism, resulting in test failure for RFF in particular. --- .../AlignedArray.cs | 93 ++- src/Microsoft.ML.CpuMath/AlignedMatrix.cs | 681 ++++++++++++++++++ src/Microsoft.ML.CpuMath/AvxIntrinsics.cs | 306 ++++---- .../CpuAligenedMathUtils.cs | 148 ++++ .../CpuMathUtils.netcoreapp.cs | 166 +++-- .../CpuMathUtils.netstandard.cs | 15 +- src/Microsoft.ML.CpuMath/Sse.cs | 78 +- src/Microsoft.ML.CpuMath/SseIntrinsics.cs | 295 ++++---- src/Microsoft.ML.CpuMath/Thunk.cs | 6 + ...AdaptiveSingularSpectrumSequenceModeler.cs | 71 +- .../RandomFourierFeaturizing.cs | 67 +- src/Native/CpuMathNative/Sse.cpp | 353 ++++----- .../UnitTests.cs | 189 ++--- .../DataPipe/TestDataPipeBase.cs | 10 +- .../TimeSeries.cs | 2 +- .../TimeSeriesDirectApi.cs | 24 +- .../TimeSeriesEstimatorTests.cs | 4 +- 17 files changed, 1699 insertions(+), 809 deletions(-) rename src/{Microsoft.ML.StandardLearners/FactorizationMachine => Microsoft.ML.CpuMath}/AlignedArray.cs (53%) create mode 100644 src/Microsoft.ML.CpuMath/AlignedMatrix.cs create mode 100644 src/Microsoft.ML.CpuMath/CpuAligenedMathUtils.cs diff --git a/src/Microsoft.ML.StandardLearners/FactorizationMachine/AlignedArray.cs b/src/Microsoft.ML.CpuMath/AlignedArray.cs similarity index 53% rename from src/Microsoft.ML.StandardLearners/FactorizationMachine/AlignedArray.cs rename to src/Microsoft.ML.CpuMath/AlignedArray.cs index f01da9fe28..0a631be0e9 100644 --- a/src/Microsoft.ML.StandardLearners/FactorizationMachine/AlignedArray.cs +++ b/src/Microsoft.ML.CpuMath/AlignedArray.cs @@ -5,7 +5,7 @@ using Microsoft.ML.Runtime.Internal.CpuMath.Core; using System; -namespace Microsoft.ML.Runtime.FactorizationMachine +namespace Microsoft.ML.Runtime.Internal.CpuMath { using Float = System.Single; @@ -17,6 +17,7 @@ namespace Microsoft.ML.Runtime.FactorizationMachine /// /// The ctor takes an alignment value, which must be a power of two at least sizeof(Float). /// + [BestFriend] internal sealed class AlignedArray { // Items includes "head" items filled with NaN, followed by _size entries, followed by "tail" @@ -109,6 +110,60 @@ public Float this[int index] } } + public void CopyTo(Span dst, int index, int count) + { + Contracts.Assert(0 <= count && count <= _size); + Contracts.Assert(dst != null); + Contracts.Assert(0 <= index && index <= dst.Length - count); + Items.AsSpan(_base, count).CopyTo(dst.Slice(index)); + } + + public void CopyTo(int start, Span dst, int index, int count) + { + Contracts.Assert(0 <= count); + Contracts.Assert(0 <= start && start <= _size - count); + Contracts.Assert(dst != null); + Contracts.Assert(0 <= index && index <= dst.Length - count); + Items.AsSpan(start + _base, count).CopyTo(dst.Slice(index)); + } + + public void CopyFrom(ReadOnlySpan src) + { + Contracts.Assert(src.Length <= _size); + src.CopyTo(Items.AsSpan(_base)); + } + + public void CopyFrom(int start, ReadOnlySpan src) + { + Contracts.Assert(0 <= start && start <= _size - src.Length); + src.CopyTo(Items.AsSpan(start + _base)); + } + + // Copies values from a sparse vector. + // valuesSrc contains only the non-zero entries. Those are copied into their logical positions in the dense array. + // rgposSrc contains the logical positions + offset of the non-zero entries in the dense array. + // rgposSrc runs parallel to the valuesSrc array. + public void CopyFrom(ReadOnlySpan rgposSrc, ReadOnlySpan valuesSrc, int posMin, int iposMin, int iposLim, bool zeroItems) + { + Contracts.Assert(rgposSrc != null); + Contracts.Assert(valuesSrc != null); + Contracts.Assert(rgposSrc.Length <= valuesSrc.Length); + Contracts.Assert(0 <= iposMin && iposMin <= iposLim && iposLim <= rgposSrc.Length); + + // Zeroing-out and setting the values in one-pass does not seem to give any perf benefit. + // So explicitly zeroing and then setting the values. + if (zeroItems) + ZeroItems(); + + for (int ipos = iposMin; ipos < iposLim; ++ipos) + { + Contracts.Assert(posMin <= rgposSrc[ipos]); + int iv = _base + rgposSrc[ipos] - posMin; + Contracts.Assert(iv < _size + _base); + Items[iv] = valuesSrc[ipos]; + } + } + public void CopyFrom(AlignedArray src) { Contracts.Assert(src != null); @@ -116,5 +171,41 @@ public void CopyFrom(AlignedArray src) Contracts.Assert(src._cbAlign == _cbAlign); Array.Copy(src.Items, src._base, Items, _base, _size); } + + public void ZeroItems() + { + Array.Clear(Items, _base, _size); + } + + public void ZeroItems(int[] rgposSrc, int posMin, int iposMin, int iposLim) + { + Contracts.Assert(rgposSrc != null); + Contracts.Assert(0 <= iposMin && iposMin <= iposLim && iposLim <= rgposSrc.Length); + Contracts.Assert(iposLim - iposMin <= _size); + + int ivCur = 0; + for (int ipos = iposMin; ipos < iposLim; ++ipos) + { + int ivNextNonZero = rgposSrc[ipos] - posMin; + Contracts.Assert(ivCur <= ivNextNonZero && ivNextNonZero < _size); + while (ivCur < ivNextNonZero) + Items[_base + ivCur++] = 0; + Contracts.Assert(ivCur == ivNextNonZero); + // Skip the non-zero element at ivNextNonZero. + ivCur++; + } + + while (ivCur < _size) + Items[_base + ivCur++] = 0; + } + + // REVIEW: This is hackish and slightly dangerous. Perhaps we should wrap this in an + // IDisposable that "locks" this, prohibiting GetBase from being called, while the buffer + // is "checked out". + public void GetRawBuffer(out Float[] items, out int offset) + { + items = Items; + offset = _base; + } } } \ No newline at end of file diff --git a/src/Microsoft.ML.CpuMath/AlignedMatrix.cs b/src/Microsoft.ML.CpuMath/AlignedMatrix.cs new file mode 100644 index 0000000000..6d550fc3fc --- /dev/null +++ b/src/Microsoft.ML.CpuMath/AlignedMatrix.cs @@ -0,0 +1,681 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Float = System.Single; + +using Microsoft.ML.Runtime.Internal.CpuMath.Core; +using System; +using System.Collections; +using System.Collections.Generic; + +namespace Microsoft.ML.Runtime.Internal.CpuMath +{ + using Conditional = System.Diagnostics.ConditionalAttribute; + + /// + /// This implements a logical array of Floats that is automatically aligned for SSE/AVX operations. + /// This is a thin wrapper around the AlignedArray type implemented in C++. This simply couples + /// the AlignedArray with a logical size, which does not include padding, while the AlignedArray + /// size does include padding. + /// + [BestFriend] + internal sealed class CpuAlignedVector : ICpuVector + { + private readonly AlignedArray _items; + private readonly int _size; // The logical size. + + /// + /// The value count. + /// + public int ValueCount { get { return _size; } } + + /// + /// The logical size of the vector. + /// + public int VectorSize { get { return _size; } } + + // Round cflt up to a multiple of cfltAlign. + private static int RoundUp(int cflt, int cfltAlign) + { + Contracts.Assert(0 < cflt); + // cfltAlign should be a power of two. + Contracts.Assert(0 < cfltAlign && (cfltAlign & (cfltAlign - 1)) == 0); + + // Determine the number of "blobs" of size cfltAlign. + int cblob = (cflt + cfltAlign - 1) / cfltAlign; + return cblob * cfltAlign; + } + + /// + /// Allocate an aligned vector with the given alignment (in bytes). + /// The alignment must be a power of two and at least sizeof(Float). + /// + public CpuAlignedVector(int size, int cbAlign) + { + Contracts.Assert(0 < size); + // cbAlign should be a power of two. + Contracts.Assert(sizeof(Float) <= cbAlign); + Contracts.Assert((cbAlign & (cbAlign - 1)) == 0); + + int cfltAlign = cbAlign / sizeof(Float); + int cflt = RoundUp(size, cfltAlign); + _items = new AlignedArray(cflt, cbAlign); + _size = size; + AssertValid(); + } + + public void Dispose() + { + } + + [Conditional("DEBUG")] + private void AssertValid() + { +#if DEBUG + Contracts.Assert(0 < _size && _size <= _items.Size); + + // The padding, [_size, _items.Size), should contain zeros. + for (int i = _size; i < _items.Size; i++) + Contracts.Assert(_items[i] == 0); +#endif + } + + /// + /// The physical AligenedArray items. + /// + public AlignedArray Items { get { return _items; } } + + /// + /// The alignment. + /// + public int CbAlign + { + get { return _items.CbAlign; } + } + + /// + /// Set and get the value of the vector at the given index. + /// + /// The index + /// The value at the given index + public Float this[int index] + { + get + { + Contracts.Assert(0 <= index && index < _size); + return _items[index]; + } + set + { + Contracts.Assert(0 <= index && index < _size); + _items[index] = value; + } + } + + /// + /// Get the value of the vector at the given index. + /// + /// The index + /// The value at the given index + public Float GetValue(int i) + { + Contracts.Assert(0 <= i && i < _size); + return _items[i]; + } + + /// + /// Assign randomized values to the vector elements via the input function. + /// + /// The input rand om function that takes no arguments and returns a float value + public void Randomize(Func rand) + { + Contracts.AssertValue(rand); + for (int i = 0; i < _size; i++) + _items[i] = rand(); + } + + /// + /// Assign zeros to the vector elements. + /// + public void Zero() + { + _items.ZeroItems(); + } + + /// + /// Copy the values into dst, starting at slot ivDst and advancing ivDst. + /// + /// The destination array + /// The starting index in the destination array + public void CopyTo(Float[] dst, ref int ivDst) + { + Contracts.AssertValue(dst); + Contracts.Assert(0 <= ivDst && ivDst <= dst.Length - _size); + _items.CopyTo(dst, ivDst, _size); + ivDst += _size; + } + + /// + /// Copy the values from this vector starting at slot ivSrc into dst, starting at slot ivDst. + /// The number of values that are copied is determined by count. + /// + /// The staring index in this vector + /// The destination array + /// The starting index in the destination array + /// The number of elements to be copied + public void CopyTo(int ivSrc, Float[] dst, int ivDst, int count) + { + Contracts.AssertValue(dst); + Contracts.Assert(0 <= count && count <= dst.Length); + Contracts.Assert(0 <= ivSrc && ivSrc <= _size - count); + Contracts.Assert(0 <= ivDst && ivDst <= dst.Length - count); + _items.CopyTo(ivSrc, dst, ivDst, count); + } + + /// + /// Copy the values from src, starting at slot index and advancing index, into this vector. + /// + /// The source array + /// The starting index in the source array + public void CopyFrom(Float[] src, ref int index) + { + Contracts.AssertValue(src); + Contracts.Assert(0 <= index && index <= src.Length - _size); + _items.CopyFrom(src.AsSpan(index, _size)); + index += _size; + } + + /// + /// Copy the values from src, starting at slot index and advancing index, into this vector, starting at slot ivDst. + /// The number of values that are copied is determined by count. + /// + /// The staring index in this vector + /// The source array + /// The starting index in the source array + /// The number of elements to be copied + public void CopyFrom(int ivDst, Float[] src, int ivSrc, int count) + { + Contracts.AssertValue(src); + Contracts.Assert(0 <= count && count <= src.Length); + Contracts.Assert(0 <= ivDst && ivDst <= _size - count); + Contracts.Assert(0 <= ivSrc && ivSrc <= src.Length - count); + _items.CopyFrom(ivDst, src.AsSpan(ivSrc, _size)); + } + + /// + /// Copy the values of src vector into this vector. The src vector must have the same size as this vector. + /// + /// The source vector + public void CopyFrom(CpuAlignedVector src) + { + Contracts.AssertValue(src); + Contracts.Assert(src._size == _size); + _items.CopyFrom(src._items); + } + + /// + /// Get the underlying AlignedArray as IEnumerator<Float>. + /// + public IEnumerator GetEnumerator() + { + for (int i = 0; i < _size; i++) + yield return _items[i]; + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + } + + /// + /// This implements a logical matrix of Floats that is automatically aligned for SSE/AVX operations. + /// The ctor takes an alignment value, which must be a power of two at least sizeof(Float). + /// + [BestFriend] + internal abstract class CpuAlignedMatrixBase + { + // _items includes "head" items filled with NaN, followed by RunLenPhy * RunCntPhy entries, followed by + // "tail" items, also filled with NaN. Note that RunLenPhy and RunCntPhy are divisible by the alignment + // specified in the ctor and are >= RunLen and RunCnt, respectively. It is illegal to access any slot + // outsize [_base, _base + RunLenPhy * RunCntPhy). The padding should all be zero (and maintained as such). + // The items are arranged in "runs" of length RunLen. There are RunCnt such runs. Each run ends with + // (RunLenPhy - RunLen) padding slots. There are an addition (RunCntPhy - RunCnt) padding runs of length + // RunLenPhy, which are entirely zero. Any native code should be able to assume and should maintain + // these invariants. + public AlignedArray Items { get; } + + protected readonly int FloatAlign; // The alignment. + + // Since FloatAlign is a power of two, shifting by Shift = log_2(FloatAlign) is the same as multiplying/dividing by FloatAlign. + protected readonly int Shift; + // Since FloatAlign is a power of two, bitwise and with Mask = FloatAlign - 1 will be the same as moding by FloatAlign. + protected readonly int Mask; + + // Logical length of runs (RunLen) and number of runs (RunCnt). + public readonly int RunLen; + public readonly int RunCnt; + + // Physical (padded) length and number of runs. + public readonly int RunLenPhy; + public readonly int RunCntPhy; + + /// + /// The logical number values in the matrix + /// + public int ValueCount => RunLen * RunCnt; + + /// + /// The logical number of rows + /// + public abstract int RowCount { get; } + + /// + /// The logical number of columns + /// + public abstract int ColCount { get; } + + /// + /// The physical number of rows + /// + public abstract int RowCountPhy { get; } + + /// + /// The pysical number of columns + /// + public abstract int ColCountPhy { get; } + + // Round cflt up to a multiple of cfltAlign. + protected static int RoundUp(int cflt, int cfltAlign) + { + Contracts.Assert(0 < cflt); + // cfltAlign should be a power of two. + Contracts.Assert(0 < cfltAlign && (cfltAlign & (cfltAlign - 1)) == 0); + + // Determine the number of "blobs" of size cfltAlign. + int cblob = (cflt + cfltAlign - 1) / cfltAlign; + return cblob * cfltAlign; + } + + /// + /// Allocate an aligned matrix with the given alignment (in bytes). + /// + protected CpuAlignedMatrixBase(int runLen, int runCnt, int cbAlign) + { + Contracts.Assert(0 < runLen); + Contracts.Assert(0 < runCnt); + // cbAlign should be a power of two. + Contracts.Assert(sizeof(Float) <= cbAlign); + Contracts.Assert((cbAlign & (cbAlign - 1)) == 0); + + RunLen = runLen; + RunCnt = runCnt; + + FloatAlign = cbAlign / sizeof(Float); + Shift = GeneralUtils.CbitLowZero((uint)FloatAlign); + Mask = FloatAlign - 1; + + RunLenPhy = RoundUp(runLen, FloatAlign); + RunCntPhy = RoundUp(runCnt, FloatAlign); + Items = new AlignedArray(RunLenPhy * RunCntPhy, cbAlign); + + AssertValid(); + } + + [Conditional("DEBUG")] + protected void AssertValid() + { +#if DEBUG + Contracts.Assert(0 < RunLen && RunLen <= RunLenPhy); + Contracts.Assert(0 < RunCnt && RunCnt <= RunCntPhy); + Contracts.Assert(RunLenPhy * RunCntPhy == Items.Size); + + // Assert that the padding at the end of each run contains zeros. + for (int i = 0; i < RunCnt; i++) + { + for (int j = RunLen; j < RunLenPhy; j++) + Contracts.Assert(Items[i * RunLenPhy + j] == 0); + } + + // Assert that the padding runs contain zeros. + for (int i = RunCnt; i < RunCntPhy; i++) + { + for (int j = 0; j < RunLenPhy; j++) + Contracts.Assert(Items[i * RunLenPhy + j] == 0); + } +#endif + } + + public void Dispose() + { + } + + /// + /// Assign randomized values to the matrix elements via the input function. + /// + /// The input rand om function that takes no arguments and returns a float value + public void Randomize(Func rand) + { + Contracts.AssertValue(rand); + for (int i = 0, k = 0; i < RunCnt; i++) + { + Contracts.Assert(k == i * RunLenPhy); + for (int j = 0; j < RunLen; j++) + Items[k + j] = rand(); + k += RunLenPhy; + } + } + + /// + /// Assign zeros to the matrix elements. + /// + public void Zero() + { + Items.ZeroItems(); + } + + /// + /// Copy the values of src matrix into this matrix. The src matrix must have the same physical and logical size as this matrix. + /// + /// The source matrix + public void CopyFrom(CpuAlignedMatrixBase src) + { + AssertValid(); + Contracts.AssertValue(src); + src.AssertValid(); + Contracts.Assert(src.RunLen == RunLen); + Contracts.Assert(src.RunCnt == RunCnt); + Contracts.Assert(src.RunLenPhy == RunLenPhy); + Contracts.Assert(src.RunCntPhy == RunCntPhy); + Items.CopyFrom(src.Items); + } + } + + /// + /// This implements a logical row-major matrix of Floats that is automatically aligned for SSE/AVX operations. + /// The ctor takes an alignment value, which must be a power of two at least sizeof(Float). + /// + [BestFriend] + internal abstract class CpuAlignedMatrixRowBase : CpuAlignedMatrixBase, ICpuBuffer + { + protected CpuAlignedMatrixRowBase(int crow, int ccol, int cbAlign) + : base(ccol, crow, cbAlign) + { + } + + /// + /// The logical number of rows + /// + public override int RowCount => RunCnt; + + /// + /// The logical number of columns + /// + public override int ColCount { get { return RunLen; } } + + /// + /// The physical number of rows + /// + public override int RowCountPhy { get { return RunCntPhy; } } + + /// + /// The physical number of columns + /// + public override int ColCountPhy { get { return RunLenPhy; } } + + /// + /// Copy the values into dst, starting at slot ivDst and advancing ivDst. + /// + /// The destination array + /// The starting index in the destination array + public void CopyTo(Float[] dst, ref int ivDst) + { + Contracts.AssertValue(dst); + Contracts.Assert(0 <= ivDst && ivDst <= dst.Length - ValueCount); + + if (ColCount == ColCountPhy) + { + // Can copy all at once. + Items.CopyTo(0, dst, ivDst, ValueCount); + ivDst += ValueCount; + } + else + { + // Copy each row. + int ivSrc = 0; + for (int row = 0; row < RowCount; row++) + { + Items.CopyTo(ivSrc, dst, ivDst, ColCount); + ivSrc += ColCountPhy; + ivDst += ColCount; + } + } + } + + /// + /// Copy the values from src, starting at slot ivSrc and advancing ivSrc. + /// + /// The source array + /// The starting index in the source array + public void CopyFrom(Float[] src, ref int ivSrc) + { + Contracts.AssertValue(src); + Contracts.Assert(0 <= ivSrc && ivSrc <= src.Length - ValueCount); + + if (ColCount == ColCountPhy) + { + Items.CopyFrom(src.AsSpan(ivSrc, ValueCount)); + ivSrc += ValueCount; + } + else + { + for (int row = 0; row < RowCount; row++) + { + Items.CopyFrom(row * ColCountPhy, src.AsSpan(ivSrc, ColCount)); + ivSrc += ColCount; + } + } + } + + /// + /// Get the underlying AlignedArray as IEnumerator<Float>. + /// + public IEnumerator GetEnumerator() + { + for (int row = 0; row < RowCount; row++) + { + int ivBase = row * ColCountPhy; + for (int col = 0; col < ColCount; col++) + yield return Items[ivBase + col]; + } + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + } + + /// + /// This implements a row-major matrix of Floats that is automatically aligned for SSE/AVX operations. + /// The ctor takes an alignment value, which must be a power of two at least sizeof(Float). + /// + [BestFriend] + internal sealed class CpuAlignedMatrixRow : CpuAlignedMatrixRowBase, ICpuFullMatrix + { + public CpuAlignedMatrixRow(int crow, int ccol, int cbAlign) + : base(crow, ccol, cbAlign) + { + } + + /// + /// The logical number of rows + /// + public override int RowCount { get { return RunCnt; } } + + /// + /// The logical number of columns + /// + public override int ColCount { get { return RunLen; } } + + /// + /// The physical number of rows + /// + public override int RowCountPhy { get { return RunCntPhy; } } + + /// + /// The physical number of columns + /// + public override int ColCountPhy { get { return RunLenPhy; } } + + /// + /// Copy the values from this matrix, starting from the row into dst, starting at slot ivDst and advancing ivDst. + /// + /// The starting row in this matrix + /// The destination array + /// The starting index in the destination array + public void CopyTo(int row, Float[] dst, ref int ivDst) + { + Contracts.AssertValue(dst); + Contracts.Assert(0 <= row && row < RowCount); + Contracts.Assert(0 <= ivDst && ivDst <= dst.Length - ColCount); + + Items.CopyTo(row * ColCountPhy, dst, ivDst, ColCount); + ivDst += ColCount; + } + + /// + /// Assign zeros to the values at the indices + /// + /// The indices + public void ZeroItems(int[] indices) + { + Contracts.AssertValue(indices); + + // REVIEW: Ideally, we'd adjust the indices once so we wouldn't need to + // repeatedly deal with padding adjustments. + CpuMathUtils.ZeroMatrixItems(Items, ColCount, ColCountPhy, indices); + } + } + + /// + /// This implements a logical matrix of Floats that is automatically aligned for SSE/AVX operations. + /// The ctor takes an alignment value, which must be a power of two at least sizeof(Float). + /// + [BestFriend] + internal sealed class CpuAlignedMatrixCol : CpuAlignedMatrixBase, ICpuFullMatrix + { + /// + /// Allocate an aligned matrix with the given alignment (in bytes). + /// + public CpuAlignedMatrixCol(int crow, int ccol, int cbAlign) + : base(crow, ccol, cbAlign) + { + } + + /// + /// The logical number of rows + /// + public override int RowCount { get { return RunCnt; } } + + /// + /// The logical number of columns + /// + public override int ColCount { get { return RunLen; } } + + /// + /// The physical number of rows + /// + public override int RowCountPhy { get { return RunCntPhy; } } + + /// + /// The physical number of columns + /// + public override int ColCountPhy { get { return RunLenPhy; } } + + /// + /// Copy the values into dst, starting at slot ivDst and advancing ivDst. + /// + /// The destination array + /// The starting index in the destination array + public void CopyTo(Float[] dst, ref int ivDst) + { + Contracts.AssertValue(dst); + Contracts.Assert(0 <= ivDst && ivDst <= dst.Length - ValueCount); + + for (int row = 0; row < RowCount; row++) + { + for (int col = 0; col < ColCount; col++) + dst[ivDst++] = Items[row + col * RowCountPhy]; + } + } + + /// + /// Copy the values from this matrix, starting from the row into dst, starting at slot ivDst and advancing ivDst. + /// + /// The starting row in this matrix + /// The destination array + /// The starting index in the destination array + public void CopyTo(int row, Float[] dst, ref int ivDst) + { + Contracts.AssertValue(dst); + Contracts.Assert(0 <= row && row < RowCount); + Contracts.Assert(0 <= ivDst && ivDst <= dst.Length - ColCount); + + for (int col = 0; col < ColCount; col++) + dst[ivDst++] = Items[row + col * RowCountPhy]; + } + + /// + /// Copy the values from src, starting at slot ivSrc and advancing ivSrc. + /// + /// The source array + /// The starting index in the source array + public void CopyFrom(Float[] src, ref int ivSrc) + { + Contracts.AssertValue(src); + Contracts.Assert(0 <= ivSrc && ivSrc <= src.Length - ValueCount); + for (int row = 0; row < RowCount; row++) + { + for (int col = 0; col < ColCount; col++) + Items[row + col * RowCountPhy] = src[ivSrc++]; + } + } + + /// + /// Assign zeros to the values at the indices + /// + /// The indices + public void ZeroItems(int[] indices) + { + Contracts.AssertValue(indices); + + // REVIEW: Ideally, we'd adjust the indices once so we wouldn't need to + // repeatedly deal with padding adjustments. + foreach (int iv in indices) + { + int row = iv / ColCount; + int col = iv % ColCount; + Items[row + col * ColCountPhy] = 0; + } + } + + /// + /// Get the underlying AlignedArray as IEnumerator<Float>. + /// + public IEnumerator GetEnumerator() + { + for (int row = 0; row < RowCount; row++) + { + for (int col = 0; col < ColCount; col++) + yield return Items[row + col * RowCountPhy]; + } + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + } +} \ No newline at end of file diff --git a/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs b/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs index d6fb169094..2156ddf5fa 100644 --- a/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs +++ b/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs @@ -152,6 +152,11 @@ private static Vector256 MultiplyAdd(Vector256 src1, Vector256 mat, ReadOnlySpan src, Span dst, int crow, int ccol) { fixed (float* psrc = &MemoryMarshal.GetReference(src)) @@ -164,9 +169,8 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr float* pDstEnd = pdst + crow; float* pDstCurrent = pdst; float* pMatCurrent = pmat; - int numRows = crow; - while (pDstCurrent + 4 <= pDstEnd) + while (pDstCurrent < pDstEnd) { Vector256 res0 = Avx.SetZeroVector256(); Vector256 res1 = Avx.SetZeroVector256(); @@ -180,11 +184,10 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr int misalignment = (int)(address % 32); int remainder = 0; - if ((misalignment & 3) != 0 || (ccol % 8 != 0)) + if ((misalignment & 3) != 0) { // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - remainder = length % 8; - while (pSrcCurrent + 8 <= pSrcEnd) + while (pSrcCurrent < pSrcEnd) { Vector256 vector = Avx.LoadVector256(pSrcCurrent); @@ -256,32 +259,32 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr // unaligned loads where we mask the input each time. remainder = length; } - } - if (remainder != 0) - { - // Handle any trailing elements that don't fit into a 256-bit block by moving back so that the next - // unaligned load will read to the end of the array and then mask out any elements already processed + if (remainder != 0) + { + // Handle any trailing elements that don't fit into a 256-bit block by moving back so that the next + // unaligned load will read to the end of the array and then mask out any elements already processed - pMatCurrent -= (8 - remainder); - pSrcCurrent -= (8 - remainder); + pMatCurrent -= (8 - remainder); + pSrcCurrent -= (8 - remainder); - Vector256 mask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + (remainder * 8)); + Vector256 mask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + (remainder * 8)); - float* pMatTemp = pMatCurrent; - Vector256 x01 = Avx.And(mask, Avx.LoadVector256(pMatTemp)); - Vector256 x11 = Avx.And(mask, Avx.LoadVector256(pMatTemp += ccol)); - Vector256 x21 = Avx.And(mask, Avx.LoadVector256(pMatTemp += ccol)); - Vector256 x31 = Avx.And(mask, Avx.LoadVector256(pMatTemp += ccol)); - Vector256 vector = Avx.And(mask, Avx.LoadVector256(pSrcCurrent)); + float* pMatTemp = pMatCurrent; + Vector256 x01 = Avx.And(mask, Avx.LoadVector256(pMatTemp)); + Vector256 x11 = Avx.And(mask, Avx.LoadVector256(pMatTemp += ccol)); + Vector256 x21 = Avx.And(mask, Avx.LoadVector256(pMatTemp += ccol)); + Vector256 x31 = Avx.And(mask, Avx.LoadVector256(pMatTemp += ccol)); + Vector256 vector = Avx.And(mask, Avx.LoadVector256(pSrcCurrent)); - res0 = MultiplyAdd(x01, vector, res0); - res1 = MultiplyAdd(x11, vector, res1); - res2 = MultiplyAdd(x21, vector, res2); - res3 = MultiplyAdd(x31, vector, res3); + res0 = MultiplyAdd(x01, vector, res0); + res1 = MultiplyAdd(x11, vector, res1); + res2 = MultiplyAdd(x21, vector, res2); + res3 = MultiplyAdd(x31, vector, res3); - pMatCurrent += 8; - pSrcCurrent += 8; + pMatCurrent += 8; + pSrcCurrent += 8; + } } // Add up the entries of each, with the 4 results in res0 @@ -294,58 +297,17 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr pDstCurrent += 4; pMatCurrent += 3 * ccol; - numRows -= 4; - } - - // falling through the case statements - switch (numRows) - { - case 3: - *(pDstCurrent + 2) = RowMultiply(pMatCurrent + 2 * ccol, psrc, pSrcEnd, ccol); - goto case 2; - case 2: - *(pDstCurrent + 1) = RowMultiply(pMatCurrent + ccol, psrc, pSrcEnd, ccol); - goto case 1; - case 1: - *pDstCurrent = RowMultiply(pMatCurrent, psrc, pSrcEnd, ccol); - break; } } } - private static unsafe float RowMultiply(float* pMatCurrent, float* pSrcCurrent, float* pSrcEnd, int ccol) + // Partial sparse source vector. + public static unsafe void MatMulP(AlignedArray mat, ReadOnlySpan rgposSrc, AlignedArray src, + int posMin, int iposMin, int iposEnd, AlignedArray dst, int crow, int ccol) { - Vector256 res0 = Avx.SetZeroVector256(); - int remainder = ccol % 8; - while (pSrcCurrent + 8 <= pSrcEnd) - { - Vector256 vector = Avx.LoadVector256(pSrcCurrent); - - float* pMatTemp = pMatCurrent; - res0 = MultiplyAdd(pMatTemp, vector, res0); - - pSrcCurrent += 8; - pMatCurrent += 8; - } - - res0 = VectorSum256(in res0); - float sum = Sse.ConvertToSingle(Sse.AddScalar(Avx.GetLowerHalf(res0), GetHigh(res0))); - - // falling through the case statements - switch (remainder) - { - case 7: sum += *(pSrcCurrent + 6) * *(pMatCurrent + 6); goto case 6; - case 6: sum += *(pSrcCurrent + 5) * *(pMatCurrent + 5); goto case 5; - case 5: sum += *(pSrcCurrent + 4) * *(pMatCurrent + 4); goto case 4; - case 4: sum += *(pSrcCurrent + 3) * *(pMatCurrent + 3); goto case 3; - case 3: sum += *(pSrcCurrent + 2) * *(pMatCurrent + 2); goto case 2; - case 2: sum += *(pSrcCurrent + 1) * *(pMatCurrent + 1); goto case 1; - case 1: sum += *(pSrcCurrent) * *(pMatCurrent); break; - } - return sum; + MatMulP(mat.Items, rgposSrc, src.Items, posMin, iposMin, iposEnd, dst.Items, crow, ccol); } - // Partial sparse source vector. public static unsafe void MatMulP(ReadOnlySpan mat, ReadOnlySpan rgposSrc, ReadOnlySpan src, int posMin, int iposMin, int iposEnd, Span dst, int crow, int ccol) { @@ -499,6 +461,11 @@ Vector256 SparseMultiplicationAcrossRow() } } + public static unsafe void MatMulTran(AlignedArray mat, AlignedArray src, AlignedArray dst, int crow, int ccol) + { + MatMulTran(mat.Items, src.Items, dst.Items, crow, ccol); + } + public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan src, Span dst, int crow, int ccol) { fixed (float* psrc = &MemoryMarshal.GetReference(src)) @@ -511,31 +478,43 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan h01 = Sse.LoadVector128(pSrcCurrent); + // Replicate each slot of h01 (ABCD) into its own register. + Vector128 h11 = Avx.Permute(h01, 0x55); // B + Vector128 h21 = Avx.Permute(h01, 0xAA); // C + Vector128 h31 = Avx.Permute(h01, 0xFF); // D h01 = Avx.Permute(h01, 0x00); // A Vector256 x01 = Avx.SetHighLow(h01, h01); + Vector256 x11 = Avx.SetHighLow(h11, h11); + Vector256 x21 = Avx.SetHighLow(h21, h21); + Vector256 x31 = Avx.SetHighLow(h31, h31); + int length = crow; float* pDstCurrent = pdst; nuint address = (nuint)(pMatCurrent); int misalignment = (int)(address % 32); - if ((misalignment & 3) != 0 || (crow % 8 != 0)) + if ((misalignment & 3) != 0) { // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - remainder = crow % 8; - while (pDstCurrent + 8 <= pDstEnd) + while (pDstCurrent < pDstEnd) { float* pMatTemp = pMatCurrent; Vector256 x02 = Avx.Multiply(x01, Avx.LoadVector256(pMatTemp)); + Vector256 x12 = Avx.Multiply(x11, Avx.LoadVector256(pMatTemp += crow)); + Vector256 x22 = Avx.Multiply(x21, Avx.LoadVector256(pMatTemp += crow)); + Vector256 x32 = Avx.Multiply(x31, Avx.LoadVector256(pMatTemp += crow)); + + x02 = Avx.Add(x02, x12); + x22 = Avx.Add(x22, x32); + x02 = Avx.Add(x02, x22); Avx.Store(pDstCurrent, x02); pDstCurrent += 8; @@ -544,6 +523,7 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan mat, ReadOnlySpan x02 = Avx.And(leadingMask, Avx.LoadVector256(pMatTemp)); + Vector256 x12 = Avx.And(leadingMask, Avx.LoadVector256(pMatTemp += crow)); + Vector256 x22 = Avx.And(leadingMask, Avx.LoadVector256(pMatTemp += crow)); + Vector256 x32 = Avx.And(leadingMask, Avx.LoadVector256(pMatTemp += crow)); + x02 = Avx.Multiply(x01, x02); + x12 = Avx.Multiply(x11, x12); + x22 = Avx.Multiply(x21, x22); + x32 = Avx.Multiply(x31, x32); + + x02 = Avx.Add(x02, x12); + x22 = Avx.Add(x22, x32); + x02 = Avx.Add(x02, x22); Vector256 trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + ((8 - misalignment) * 8)); Vector256 x3 = Avx.LoadVector256(pDstCurrent); - x02 = Avx.Add(x02, Avx.And(x3, trailingMask)); + x02 = Avx.Or(x02, Avx.And(x3, trailingMask)); Avx.Store(pDstCurrent, x02); pMatCurrent += misalignment; @@ -578,7 +569,15 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan x02 = Avx.Multiply(x01, Avx.LoadVector256(pMatTemp)); + Vector256 x12 = Avx.Multiply(x11, Avx.LoadVector256(pMatTemp += crow)); + Vector256 x22 = Avx.Multiply(x21, Avx.LoadVector256(pMatTemp += crow)); + Vector256 x32 = Avx.Multiply(x31, Avx.LoadVector256(pMatTemp += crow)); + + x02 = Avx.Add(x02, x12); + x22 = Avx.Add(x22, x32); + x02 = Avx.Add(x02, x22); Avx.Store(pDstCurrent, x02); pDstCurrent += 8; @@ -592,36 +591,47 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + (remainder * 8)); + pMatCurrent -= (8 - remainder); + pDstCurrent -= (8 - remainder); + Vector256 trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + (remainder * 8)); - float* pMatTemp = pMatCurrent; - Vector256 x02 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp)); - x02 = Avx.Multiply(x01, x02); + float* pMatTemp = pMatCurrent; + Vector256 x02 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp)); + Vector256 x12 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); + Vector256 x22 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); + Vector256 x32 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); - Vector256 leadingMask = Avx.LoadVector256(((float*)(pLeadingAlignmentMask)) + ((8 - remainder) * 8)); - Vector256 x3 = Avx.LoadVector256(pDstCurrent); - x02 = Avx.Add(x02, Avx.And(x3, leadingMask)); + x02 = Avx.Multiply(x01, x02); + x12 = Avx.Multiply(x11, x12); + x22 = Avx.Multiply(x21, x22); + x32 = Avx.Multiply(x31, x32); - Avx.Store(pDstCurrent, x02); - pDstCurrent += 8; - pMatCurrent += 8; + x02 = Avx.Add(x02, x12); + x22 = Avx.Add(x22, x32); + x02 = Avx.Add(x02, x22); + + Vector256 leadingMask = Avx.LoadVector256(((float*)(pLeadingAlignmentMask)) + ((8 - remainder) * 8)); + Vector256 x3 = Avx.LoadVector256(pDstCurrent); + x02 = Avx.Or(x02, Avx.And(x3, leadingMask)); + + Avx.Store(pDstCurrent, x02); + pDstCurrent += 8; + pMatCurrent += 8; + } } - pSrcCurrent += 1; - numCol -= 1; + pMatCurrent += 3 * crow; + pSrcCurrent += 4; } // We do 4-way unrolling - while (pSrcCurrent + 4 <= pSrcEnd) + while (pSrcCurrent < pSrcEnd) { Vector128 h01 = Sse.LoadVector128(pSrcCurrent); // Replicate each slot of h01 (ABCD) into its own register. @@ -641,10 +651,9 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan x02 = Avx.Multiply(x01, Avx.LoadVector256(pMatTemp)); @@ -665,6 +674,7 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan mat, ReadOnlySpan trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + ((8 - misalignment) * 8)); Vector256 x3 = Avx.LoadVector256(pDstCurrent); - x02 = Avx.Add(x02, Avx.And(x3, trailingMask)); + x02 = Avx.Or(x02, Avx.And(x3, trailingMask)); x02 = Avx.Add(x02, Avx.And(x3, leadingMask)); @@ -728,92 +738,46 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + (remainder * 8)); + if (remainder != 0) + { + pMatCurrent -= (8 - remainder); + pDstCurrent -= (8 - remainder); + Vector256 trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + (remainder * 8)); - float* pMatTemp = pMatCurrent; - Vector256 x02 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp)); - Vector256 x12 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x22 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x32 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); + float* pMatTemp = pMatCurrent; + Vector256 x02 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp)); + Vector256 x12 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); + Vector256 x22 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); + Vector256 x32 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); - x02 = Avx.Multiply(x01, x02); - x12 = Avx.Multiply(x11, x12); - x22 = Avx.Multiply(x21, x22); - x32 = Avx.Multiply(x31, x32); + x02 = Avx.Multiply(x01, x02); + x12 = Avx.Multiply(x11, x12); + x22 = Avx.Multiply(x21, x22); + x32 = Avx.Multiply(x31, x32); - x02 = Avx.Add(x02, x12); - x22 = Avx.Add(x22, x32); - x02 = Avx.Add(x02, x22); + x02 = Avx.Add(x02, x12); + x22 = Avx.Add(x22, x32); + x02 = Avx.Add(x02, x22); - Vector256 leadingMask = Avx.LoadVector256(((float*)(pLeadingAlignmentMask)) + ((8 - remainder) * 8)); - Vector256 x3 = Avx.LoadVector256(pDstCurrent); - x02 = Avx.Add(x02, Avx.And(x3, leadingMask)); + Vector256 leadingMask = Avx.LoadVector256(((float*)(pLeadingAlignmentMask)) + ((8 - remainder) * 8)); + Vector256 x3 = Avx.LoadVector256(pDstCurrent); + x02 = Avx.Or(x02, Avx.And(x3, leadingMask)); - x02 = Avx.Add(x02, Avx.And(x3, trailingMask)); + x02 = Avx.Add(x02, Avx.And(x3, trailingMask)); - Avx.Store(pDstCurrent, x02); - pDstCurrent += 8; - pMatCurrent += 8; + Avx.Store(pDstCurrent, x02); + pDstCurrent += 8; + pMatCurrent += 8; + } } pMatCurrent += 3 * crow; pSrcCurrent += 4; - numCol -= 4; - } - - // falling through the case statements - switch (numCol) - { - case 3: ColumnMultiply(pMatCurrent + 2 * crow, pSrcCurrent + 2, pdst, pDstEnd, crow); goto case 2; - case 2: ColumnMultiply(pMatCurrent + crow, pSrcCurrent + 1, pdst, pDstEnd, crow); goto case 1; - case 1: ColumnMultiply(pMatCurrent, pSrcCurrent, pdst, pDstEnd, crow); break; } } } - private static unsafe void ColumnMultiply(float* pMatCurrent, float* pSrcCurrent, float* pdst, float* pDstEnd, int crow) - { - Vector128 h01 = Sse.LoadVector128(pSrcCurrent); - // Replicate each slot of h01 (ABCD) into its own register. - h01 = Avx.Permute(h01, 0x00); // A - Vector256 x01 = Avx.SetHighLow(h01, h01); - int remainder = crow % 8; - float* pDstCurrent = pdst; - - while (pDstCurrent + 8 <= pDstEnd) - { - // If we aren't using the VEX-encoding, the JIT will only fold away aligned loads - // (due to semantics of the legacy encoding). - // We don't need an assert, since the instruction will throw for unaligned inputs. - float* pMatTemp = pMatCurrent; - Vector256 x02 = Avx.Multiply(x01, Avx.LoadVector256(pMatTemp)); - x02 = Avx.Add(x02, Avx.LoadVector256(pDstCurrent)); - - Avx.Store(pDstCurrent, x02); - pDstCurrent += 8; - pMatCurrent += 8; - } - - // falling through the case statements - switch (remainder) - { - case 7: *(pDstCurrent + 6) += *(pSrcCurrent) * *(pMatCurrent + 6); goto case 6; - case 6: *(pDstCurrent + 5) += *(pSrcCurrent) * *(pMatCurrent + 5); goto case 5; - case 5: *(pDstCurrent + 4) += *(pSrcCurrent) * *(pMatCurrent + 4); goto case 4; - case 4: *(pDstCurrent + 3) += *(pSrcCurrent) * *(pMatCurrent + 3); goto case 3; - case 3: *(pDstCurrent + 2) += *(pSrcCurrent) * *(pMatCurrent + 2); goto case 2; - case 2: *(pDstCurrent + 1) += *(pSrcCurrent) * *(pMatCurrent + 1); goto case 1; - case 1: *pDstCurrent += *(pSrcCurrent) * *(pMatCurrent); break; - } - return; - } - // dst[i] += scale public static unsafe void AddScalarU(float scalar, Span dst) { diff --git a/src/Microsoft.ML.CpuMath/CpuAligenedMathUtils.cs b/src/Microsoft.ML.CpuMath/CpuAligenedMathUtils.cs new file mode 100644 index 0000000000..33690055b2 --- /dev/null +++ b/src/Microsoft.ML.CpuMath/CpuAligenedMathUtils.cs @@ -0,0 +1,148 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.ML.Runtime.Internal.CpuMath.Core; + +namespace Microsoft.ML.Runtime.Internal.CpuMath +{ + [BestFriend] + internal static class CpuAligenedMathUtils + where TMatrix : CpuAlignedMatrixBase, ICpuFullMatrix + { + /// + /// Assert the compatibility of the underlying AlignedArray for the input matrix in terms of alignment amount. + /// + /// The input matrix + public static void AssertCompatible(ICpuFullMatrix values) + { +#if DEBUG + var mat = values as TMatrix; + Contracts.AssertValue(mat); + Contracts.Assert((mat.Items.CbAlign % CpuMathUtils.GetVectorAlignment()) == 0); +#endif + } + + /// + /// Assert the compatibility of the underlying AlignedArray for the input vector in terms of alignment amount. + /// + /// The input vector + public static void AssertCompatible(ICpuVector values) + { +#if DEBUG + CpuAlignedVector vec = values as CpuAlignedVector; + Contracts.AssertValue(vec); + Contracts.Assert((vec.Items.CbAlign % CpuMathUtils.GetVectorAlignment()) == 0); +#endif + } + + private static TMatrix A(ICpuFullMatrix x) + { + AssertCompatible(x); + return (TMatrix)x; + } + + private static CpuAlignedVector A(ICpuVector x) + { + AssertCompatible(x); + return (CpuAlignedVector)x; + } + + private static void AssertCompatibleCore(ICpuMatrix mat, ICpuVector src, ICpuVector dst) + { + AssertCompatible(src); + AssertCompatible(dst); + Contracts.Assert(mat.ColCount == src.VectorSize); + Contracts.Assert(mat.RowCount == dst.VectorSize); + } + + /// + /// Asserts the following: + /// 1. The compatibility of the underlying AlignedArray for mat in terms of alignment amount. + /// 2. The compatibility of the underlying AlignedArray for src in terms of alignment amount. + /// 3. The compatibility of the underlying AlignedArray for dst in terms of alignment amount. + /// 4. The compatibility of the matrix-vector multiplication mat * src = dst. + /// + /// + /// + /// + public static void AssertCompatible(ICpuFullMatrix mat, ICpuVector src, ICpuVector dst) + { + // Also check the physical sizes. + AssertCompatible(mat); + AssertCompatibleCore(mat, src, dst); + var m = A(mat); + Contracts.Assert(m.ColCountPhy == A(src).Items.Size); + Contracts.Assert(m.RowCountPhy == A(dst).Items.Size); + } + + /// + /// Matrix multiplication: + /// dst = mat * src + /// + /// The multiplier matrix + /// The source vector + /// The destination vector + public static void MatTimesSrc(ICpuFullMatrix mat, ICpuVector src, ICpuVector dst) + { + bool colMajor = typeof(TMatrix) == typeof(CpuAlignedMatrixCol); + AssertCompatible(mat, src, dst); + var m = A(mat); + CpuMathUtils.MatrixTimesSource(colMajor, m.Items, A(src).Items, A(dst).Items, m.RunCnt); + } + + /// + /// Matrix transpose multiplication: + /// dst = mat' * src + /// + /// The multiplier matrix + /// The source vector + /// The destination vector + public static void MatTranTimesSrc(ICpuFullMatrix mat, ICpuVector src, ICpuVector dst) + { + bool colMajor = typeof(TMatrix) == typeof(CpuAlignedMatrixCol); + AssertCompatible(mat, dst, src); + var m = A(mat); + CpuMathUtils.MatrixTimesSource(!colMajor, m.Items, A(src).Items, A(dst).Items, m.RunCnt); + } + } + + public static class GeneralUtils + { + /// + /// Count the number of zero bits in the lonest string of zero's from the lowest significant bit of the input integer. + /// + /// The input integer + /// + public static int CbitLowZero(uint u) + { + if (u == 0) + return 32; + + int cbit = 0; + if ((u & 0x0000FFFF) == 0) + { + cbit += 16; + u >>= 16; + } + if ((u & 0x000000FF) == 0) + { + cbit += 8; + u >>= 8; + } + if ((u & 0x0000000F) == 0) + { + cbit += 4; + u >>= 4; + } + if ((u & 0x00000003) == 0) + { + cbit += 2; + u >>= 2; + } + if ((u & 0x00000001) == 0) + cbit += 1; + return cbit; + } + } +} diff --git a/src/Microsoft.ML.CpuMath/CpuMathUtils.netcoreapp.cs b/src/Microsoft.ML.CpuMath/CpuMathUtils.netcoreapp.cs index 6546eeef31..d895e590a9 100644 --- a/src/Microsoft.ML.CpuMath/CpuMathUtils.netcoreapp.cs +++ b/src/Microsoft.ML.CpuMath/CpuMathUtils.netcoreapp.cs @@ -11,62 +11,76 @@ namespace Microsoft.ML.Runtime.Internal.CpuMath { internal static partial class CpuMathUtils { - public static void MatrixTimesSource(bool transpose, ReadOnlySpan matrix, ReadOnlySpan source, Span destination, int stride) + // The count of bytes in Vector128, corresponding to _cbAlign in AlignedArray + private const int Vector128Alignment = 16; + + // The count of bytes in Vector256, corresponding to _cbAlign in AlignedArray + private const int Vector256Alignment = 32; + + // The count of bytes in a 32-bit float, corresponding to _cbAlign in AlignedArray + private const int FloatAlignment = 4; + + // If neither AVX nor SSE is supported, return basic alignment for a 4-byte float. + [MethodImplAttribute(MethodImplOptions.AggressiveInlining)] + public static int GetVectorAlignment() + => Avx.IsSupported ? Vector256Alignment : (Sse.IsSupported ? Vector128Alignment : FloatAlignment); + + public static void MatrixTimesSource(bool transpose, AlignedArray matrix, AlignedArray source, AlignedArray destination, int stride) { - Contracts.AssertNonEmpty(matrix); - Contracts.AssertNonEmpty(source); - Contracts.AssertNonEmpty(destination); - Contracts.Assert(matrix.Length == destination.Length * source.Length); + Contracts.Assert(matrix.Size == destination.Size * source.Size); Contracts.Assert(stride >= 0); - if (!transpose) + if (Avx.IsSupported) { - if (Avx.IsSupported && source.Length >= 8) + if (!transpose) { - Contracts.Assert(stride <= destination.Length); - AvxIntrinsics.MatMul(matrix, source, destination, stride, source.Length); + Contracts.Assert(stride <= destination.Size); + AvxIntrinsics.MatMul(matrix, source, destination, stride, source.Size); } - else if (Sse.IsSupported && source.Length >= 4) + else { - Contracts.Assert(stride <= destination.Length); - SseIntrinsics.MatMul(matrix, source, destination, stride, source.Length); + Contracts.Assert(stride <= source.Size); + AvxIntrinsics.MatMulTran(matrix, source, destination, destination.Size, stride); + } + } + else if (Sse.IsSupported) + { + if (!transpose) + { + Contracts.Assert(stride <= destination.Size); + SseIntrinsics.MatMul(matrix, source, destination, stride, source.Size); } else { - Contracts.Assert(stride <= destination.Length); + Contracts.Assert(stride <= source.Size); + SseIntrinsics.MatMulTran(matrix, source, destination, destination.Size, stride); + } + } + else + { + if (!transpose) + { + Contracts.Assert(stride <= destination.Size); for (int i = 0; i < stride; i++) { float dotProduct = 0; - for (int j = 0; j < source.Length; j++) + for (int j = 0; j < source.Size; j++) { - dotProduct += matrix[i * source.Length + j] * source[j]; + dotProduct += matrix[i * source.Size + j] * source[j]; } destination[i] = dotProduct; } } - } - else - { - if (Avx.IsSupported && destination.Length >= 8) - { - Contracts.Assert(stride <= source.Length); - AvxIntrinsics.MatMulTran(matrix, source, destination, destination.Length, stride); - } - else if (Sse.IsSupported && destination.Length >=4) - { - Contracts.Assert(stride <= source.Length); - SseIntrinsics.MatMulTran(matrix, source, destination, destination.Length, stride); - } else { - Contracts.Assert(stride <= source.Length); - for (int i = 0; i < destination.Length; i++) + Contracts.Assert(stride <= source.Size); + for (int i = 0; i < destination.Size; i++) { float dotProduct = 0; for (int j = 0; j < stride; j++) { - dotProduct += matrix[j * destination.Length + i] * source[j]; + dotProduct += matrix[j * source.Size + i] * source[j]; } destination[i] = dotProduct; @@ -75,22 +89,17 @@ public static void MatrixTimesSource(bool transpose, ReadOnlySpan matrix, } } - public static void MatrixTimesSource(ReadOnlySpan matrix, ReadOnlySpan rgposSrc, ReadOnlySpan sourceValues, - int posMin, int iposMin, int iposLimit, Span destination, int stride) + public static void MatrixTimesSource(AlignedArray matrix, ReadOnlySpan rgposSrc, AlignedArray sourceValues, + int posMin, int iposMin, int iposLimit, AlignedArray destination, int stride) { Contracts.Assert(iposMin >= 0); Contracts.Assert(iposMin <= iposLimit); Contracts.Assert(iposLimit <= rgposSrc.Length); - Contracts.AssertNonEmpty(matrix); - Contracts.AssertNonEmpty(sourceValues); - Contracts.AssertNonEmpty(destination); - Contracts.AssertNonEmpty(rgposSrc); - Contracts.Assert(stride > 0); - Contracts.Assert(matrix.Length == destination.Length * sourceValues.Length); + Contracts.Assert(matrix.Size == destination.Size * sourceValues.Size); if (iposMin >= iposLimit) { - destination.Clear(); + destination.ZeroItems(); return; } @@ -99,24 +108,24 @@ public static void MatrixTimesSource(ReadOnlySpan matrix, ReadOnlySpan left, ReadOnlySpan } } + public static void ZeroMatrixItems(AlignedArray destination, int ccol, int cfltRow, int[] indices) + { + Contracts.Assert(ccol > 0); + Contracts.Assert(ccol <= cfltRow); + + if (ccol == cfltRow) + { + ZeroItemsU(destination, destination.Size, indices, indices.Length); + } + else + { + ZeroMatrixItemsCore(destination, destination.Size, ccol, cfltRow, indices, indices.Length); + } + } + + private static unsafe void ZeroItemsU(AlignedArray destination, int c, int[] indices, int cindices) + { + fixed (float* pdst = &destination.Items[0]) + fixed (int* pidx = &indices[0]) + { + for (int i = 0; i < cindices; ++i) + { + int index = pidx[i]; + Contracts.Assert(index >= 0); + Contracts.Assert(index < c); + pdst[index] = 0; + } + } + } + + private static unsafe void ZeroMatrixItemsCore(AlignedArray destination, int c, int ccol, int cfltRow, int[] indices, int cindices) + { + fixed (float* pdst = &destination.Items[0]) + fixed (int* pidx = &indices[0]) + { + int ivLogMin = 0; + int ivLogLim = ccol; + int ivPhyMin = 0; + + for (int i = 0; i < cindices; ++i) + { + int index = pidx[i]; + Contracts.Assert(index >= 0); + Contracts.Assert(index < c); + + int col = index - ivLogMin; + if ((uint)col >= (uint)ccol) + { + Contracts.Assert(ivLogMin > index || index >= ivLogLim); + + int row = index / ccol; + ivLogMin = row * ccol; + ivLogLim = ivLogMin + ccol; + ivPhyMin = row * cfltRow; + + Contracts.Assert(index >= ivLogMin); + Contracts.Assert(index < ivLogLim); + col = index - ivLogMin; + } + + pdst[ivPhyMin + col] = 0; + } + } + } + public static void SdcaL1UpdateDense(float primalUpdate, int count, ReadOnlySpan source, float threshold, Span v, Span w) { Contracts.AssertNonEmpty(source); diff --git a/src/Microsoft.ML.CpuMath/CpuMathUtils.netstandard.cs b/src/Microsoft.ML.CpuMath/CpuMathUtils.netstandard.cs index 400b70d651..5ecbc62be1 100644 --- a/src/Microsoft.ML.CpuMath/CpuMathUtils.netstandard.cs +++ b/src/Microsoft.ML.CpuMath/CpuMathUtils.netstandard.cs @@ -11,10 +11,17 @@ namespace Microsoft.ML.Runtime.Internal.CpuMath [BestFriend] internal static partial class CpuMathUtils { - public static void MatrixTimesSource(bool transpose, ReadOnlySpan matrix, ReadOnlySpan source, Span destination, int stride) => SseUtils.MatTimesSrc(transpose, matrix, source, destination, stride); + // The count of bytes in Vector128, corresponding to _cbAlign in AlignedArray + private const int Vector128Alignment = 16; - public static void MatrixTimesSource(ReadOnlySpan matrix, ReadOnlySpan rgposSrc, ReadOnlySpan sourceValues, - int posMin, int iposMin, int iposLimit, Span destination, int stride) => SseUtils.MatTimesSrc(matrix, rgposSrc, sourceValues, posMin, iposMin, iposLimit, destination, stride); + [MethodImplAttribute(MethodImplOptions.AggressiveInlining)] + public static int GetVectorAlignment() + => Vector128Alignment; + + public static void MatrixTimesSource(bool transpose, AlignedArray matrix, AlignedArray source, AlignedArray destination, int stride) => SseUtils.MatTimesSrc(transpose, matrix, source, destination, stride); + + public static void MatrixTimesSource(AlignedArray matrix, ReadOnlySpan rgposSrc, AlignedArray sourceValues, + int posMin, int iposMin, int iposLimit, AlignedArray destination, int stride) => SseUtils.MatTimesSrc(matrix, rgposSrc, sourceValues, posMin, iposMin, iposLimit, destination, stride); public static void Add(float value, Span destination) => SseUtils.Add(value, destination); @@ -56,6 +63,8 @@ public static void MatrixTimesSource(ReadOnlySpan matrix, ReadOnlySpan left, ReadOnlySpan right, int count) => SseUtils.L2DistSquared(left, right, count); + public static void ZeroMatrixItems(AlignedArray destination, int ccol, int cfltRow, int[] indices) => SseUtils.ZeroMatrixItems(destination, ccol, cfltRow, indices); + public static void SdcaL1UpdateDense(float primalUpdate, int count, ReadOnlySpan source, float threshold, Span v, Span w) => SseUtils.SdcaL1UpdateDense(primalUpdate, count, source, threshold, v, w); diff --git a/src/Microsoft.ML.CpuMath/Sse.cs b/src/Microsoft.ML.CpuMath/Sse.cs index d57b400e9c..8b1c4da70f 100644 --- a/src/Microsoft.ML.CpuMath/Sse.cs +++ b/src/Microsoft.ML.CpuMath/Sse.cs @@ -15,53 +15,74 @@ namespace Microsoft.ML.Runtime.Internal.CpuMath [BestFriend] internal static class SseUtils { - public static void MatTimesSrc(bool tran, ReadOnlySpan mat, ReadOnlySpan src, Span dst, int crun) + public const int CbAlign = 16; + + private static bool Compat(AlignedArray a) + { + Contracts.AssertValue(a); + Contracts.Assert(a.Size > 0); + return a.CbAlign == CbAlign; + } + + private static unsafe float* Ptr(AlignedArray a, float* p) + { + Contracts.AssertValue(a); + float* q = p + a.GetBase((long)p); + Contracts.Assert(((long)q & (CbAlign - 1)) == 0); + return q; + } + + public static void MatTimesSrc(bool tran, AlignedArray mat, AlignedArray src, AlignedArray dst, int crun) { - Contracts.Assert(mat.Length == dst.Length * src.Length); + Contracts.Assert(Compat(mat)); + Contracts.Assert(Compat(src)); + Contracts.Assert(Compat(dst)); + Contracts.Assert(mat.Size == dst.Size * src.Size); unsafe { - fixed (float* pmat = &mat[0]) - fixed (float* psrc = &src[0]) - fixed (float* pdst = &dst[0]) + fixed (float* pmat = &mat.Items[0]) + fixed (float* psrc = &src.Items[0]) + fixed (float* pdst = &dst.Items[0]) { if (!tran) { - Contracts.Assert(0 <= crun && crun <= dst.Length); - Thunk.MatMul(pmat, psrc, pdst, crun, src.Length); + Contracts.Assert(0 <= crun && crun <= dst.Size); + Thunk.MatMul(Ptr(mat, pmat), Ptr(src, psrc), Ptr(dst, pdst), crun, src.Size); } else { - Contracts.Assert(0 <= crun && crun <= src.Length); - Thunk.MatMulTran(pmat, psrc, pdst, dst.Length, crun); + Contracts.Assert(0 <= crun && crun <= src.Size); + Thunk.MatMulTran(Ptr(mat, pmat), Ptr(src, psrc), Ptr(dst, pdst), dst.Size, crun); } } } } - public static void MatTimesSrc(ReadOnlySpan mat, ReadOnlySpan rgposSrc, ReadOnlySpan srcValues, - int posMin, int iposMin, int iposLim, Span dst, int crun) + public static void MatTimesSrc(AlignedArray mat, ReadOnlySpan rgposSrc, AlignedArray srcValues, + int posMin, int iposMin, int iposLim, AlignedArray dst, int crun) { + Contracts.Assert(Compat(mat)); + Contracts.Assert(Compat(srcValues)); + Contracts.Assert(Compat(dst)); Contracts.Assert(0 <= iposMin && iposMin <= iposLim && iposLim <= rgposSrc.Length); - Contracts.Assert(mat.Length == dst.Length * srcValues.Length); + Contracts.Assert(mat.Size == dst.Size * srcValues.Size); if (iposMin >= iposLim) { - dst.Clear(); + dst.ZeroItems(); return; } - Contracts.AssertNonEmpty(rgposSrc); - unsafe { - fixed (float* pdst = &dst[0]) - fixed (float* pmat = &mat[0]) - fixed (float* psrc = &srcValues[0]) + fixed (float* pdst = &dst.Items[0]) + fixed (float* pmat = &mat.Items[0]) + fixed (float* psrc = &srcValues.Items[0]) fixed (int* ppossrc = &rgposSrc[0]) { - Contracts.Assert(0 <= crun && crun <= dst.Length); - Thunk.MatMulP(pmat, ppossrc, psrc, posMin, iposMin, iposLim, pdst, crun, srcValues.Length); + Contracts.Assert(0 <= crun && crun <= dst.Size); + Thunk.MatMulP(Ptr(mat, pmat), ppossrc, Ptr(srcValues, psrc), posMin, iposMin, iposLim, Ptr(dst, pdst), crun, srcValues.Size); } } } @@ -345,6 +366,23 @@ public static float L2DistSquared(ReadOnlySpan a, ReadOnlySpan b, } } + public static void ZeroMatrixItems(AlignedArray dst, int ccol, int cfltRow, int[] indices) + { + Contracts.Assert(0 < ccol && ccol <= cfltRow); + + unsafe + { + fixed (float* pdst = &dst.Items[0]) + fixed (int* pi = &indices[0]) + { + if (ccol == cfltRow) + Thunk.ZeroItemsU(Ptr(dst, pdst), dst.Size, pi, indices.Length); + else + Thunk.ZeroMatrixItemsCore(Ptr(dst, pdst), dst.Size, ccol, cfltRow, pi, indices.Length); + } + } + } + public static void SdcaL1UpdateDense(float primalUpdate, int count, ReadOnlySpan src, float threshold, Span v, Span w) { Contracts.AssertNonEmpty(src); diff --git a/src/Microsoft.ML.CpuMath/SseIntrinsics.cs b/src/Microsoft.ML.CpuMath/SseIntrinsics.cs index 98394f5d7d..b83fd6bbc6 100644 --- a/src/Microsoft.ML.CpuMath/SseIntrinsics.cs +++ b/src/Microsoft.ML.CpuMath/SseIntrinsics.cs @@ -116,6 +116,11 @@ internal static Vector128 GetNewDst128(in Vector128 xDst1, in Vect } // Multiply matrix times vector into vector. + public static unsafe void MatMul(AlignedArray mat, AlignedArray src, AlignedArray dst, int crow, int ccol) + { + MatMul(mat.Items, src.Items, dst.Items, crow, ccol); + } + public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan src, Span dst, int crow, int ccol) { fixed (float* psrc = &MemoryMarshal.GetReference(src)) @@ -128,9 +133,8 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr float* pDstEnd = pdst + crow; float* pDstCurrent = pdst; float* pMatCurrent = pmat; - int numRows = crow; - while (pDstCurrent + 4 <= pDstEnd) + while (pDstCurrent < pDstEnd) { Vector128 res0 = Sse.SetZeroVector128(); Vector128 res1 = Sse.SetZeroVector128(); @@ -144,11 +148,10 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr int misalignment = (int)(address % 16); int remainder = 0; - if ((misalignment & 3) != 0 || (ccol % 4 != 0)) + if ((misalignment & 3) != 0) { // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - remainder = length % 4; - while (pSrcCurrent + 4 <= pSrcEnd) + while (pSrcCurrent < pSrcEnd) { Vector128 vector = Sse.LoadVector128(pSrcCurrent); @@ -230,32 +233,32 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr // unaligned loads where we mask the input each time. remainder = length; } - } - if (remainder != 0) - { - // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next - // unaligned load will read to the end of the array and then mask out any elements already processed + if (remainder != 0) + { + // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next + // unaligned load will read to the end of the array and then mask out any elements already processed - pMatCurrent -= (4 - remainder); - pSrcCurrent -= (4 - remainder); + pMatCurrent -= (4 - remainder); + pSrcCurrent -= (4 - remainder); - Vector128 mask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + (remainder * 4)); + Vector128 mask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + (remainder * 4)); - float* pMatTemp = pMatCurrent; - Vector128 x01 = Sse.And(mask, Sse.LoadVector128(pMatTemp)); - Vector128 x11 = Sse.And(mask, Sse.LoadVector128(pMatTemp += ccol)); - Vector128 x21 = Sse.And(mask, Sse.LoadVector128(pMatTemp += ccol)); - Vector128 x31 = Sse.And(mask, Sse.LoadVector128(pMatTemp += ccol)); - Vector128 vector = Sse.And(mask, Sse.LoadVector128(pSrcCurrent)); + float* pMatTemp = pMatCurrent; + Vector128 x01 = Sse.And(mask, Sse.LoadVector128(pMatTemp)); + Vector128 x11 = Sse.And(mask, Sse.LoadVector128(pMatTemp += ccol)); + Vector128 x21 = Sse.And(mask, Sse.LoadVector128(pMatTemp += ccol)); + Vector128 x31 = Sse.And(mask, Sse.LoadVector128(pMatTemp += ccol)); + Vector128 vector = Sse.And(mask, Sse.LoadVector128(pSrcCurrent)); - res0 = Sse.Add(res0, Sse.Multiply(x01, vector)); - res1 = Sse.Add(res1, Sse.Multiply(x11, vector)); - res2 = Sse.Add(res2, Sse.Multiply(x21, vector)); - res3 = Sse.Add(res3, Sse.Multiply(x31, vector)); + res0 = Sse.Add(res0, Sse.Multiply(x01, vector)); + res1 = Sse.Add(res1, Sse.Multiply(x11, vector)); + res2 = Sse.Add(res2, Sse.Multiply(x21, vector)); + res3 = Sse.Add(res3, Sse.Multiply(x31, vector)); - pMatCurrent += 4; - pSrcCurrent += 4; + pMatCurrent += 4; + pSrcCurrent += 4; + } } // Add up the entries of each, with the 4 results in res0 @@ -266,56 +269,17 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr Sse.Store(pDstCurrent, res0); pDstCurrent += 4; pMatCurrent += 3 * ccol; - numRows -= 4; - } - - // falling through the case statements - switch (numRows) - { - case 3: - *(pDstCurrent + 2) = RowMultiply(pMatCurrent + 2 * ccol, psrc, pSrcEnd, ccol); - goto case 2; - case 2: - *(pDstCurrent + 1) = RowMultiply(pMatCurrent + ccol, psrc, pSrcEnd, ccol); - goto case 1; - case 1: - *pDstCurrent = RowMultiply(pMatCurrent, psrc, pSrcEnd, ccol); - break; } } } - private static unsafe float RowMultiply(float* pMatCurrent, float* pSrcCurrent, float* pSrcEnd, int ccol) + // Partial sparse source vector. + public static unsafe void MatMulP(AlignedArray mat, ReadOnlySpan rgposSrc, AlignedArray src, + int posMin, int iposMin, int iposEnd, AlignedArray dst, int crow, int ccol) { - Vector128 res0 = Sse.SetZeroVector128(); - int remainder = ccol % 4; - while (pSrcCurrent + 4 <= pSrcEnd) - { - Vector128 vector = Sse.LoadVector128(pSrcCurrent); - - float* pMatTemp = pMatCurrent; - Vector128 x01 = Sse.Multiply(vector, Sse.LoadVector128(pMatTemp)); - res0 = Sse.Add(res0, x01); - - pSrcCurrent += 4; - pMatCurrent += 4; - } - - // Add up the entries of each, with the 4 results in res0 - res0 = VectorSum128(in res0); - float sum = Sse.ConvertToSingle(res0); - - // falling through the case statements - switch (remainder) - { - case 3: sum += *(pSrcCurrent + 2) * *(pMatCurrent + 2); goto case 2; - case 2: sum += *(pSrcCurrent + 1) * *(pMatCurrent + 1); goto case 1; - case 1: sum += *(pSrcCurrent) * *(pMatCurrent); break; - } - return sum; + MatMulP(mat.Items, rgposSrc, src.Items, posMin, iposMin, iposEnd, dst.Items, crow, ccol); } - // Partial sparse source vector. public static unsafe void MatMulP(ReadOnlySpan mat, ReadOnlySpan rgposSrc, ReadOnlySpan src, int posMin, int iposMin, int iposEnd, Span dst, int crow, int ccol) { @@ -472,6 +436,11 @@ Vector128 SparseMultiplicationAcrossRow() } } + public static unsafe void MatMulTran(AlignedArray mat, AlignedArray src, AlignedArray dst, int crow, int ccol) + { + MatMulTran(mat.Items, src.Items, dst.Items, crow, ccol); + } + public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan src, Span dst, int crow, int ccol) { fixed (float* psrc = &MemoryMarshal.GetReference(src)) @@ -484,14 +453,16 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan x01 = Sse.LoadVector128(pSrcCurrent); + // Replicate each 32-bit slot of x01 (ABCD) into its own register. + Vector128 x11 = Sse.Shuffle(x01, x01, 0x55); // B + Vector128 x21 = Sse.Shuffle(x01, x01, 0xAA); // C + Vector128 x31 = Sse.Shuffle(x01, x01, 0xFF); // D x01 = Sse.Shuffle(x01, x01, 0x00); // A int length = crow; @@ -500,14 +471,20 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan x02 = Sse.Multiply(x01, Sse.LoadVector128(pMatTemp)); + Vector128 x12 = Sse.Multiply(x11, Sse.LoadVector128(pMatTemp += crow)); + Vector128 x22 = Sse.Multiply(x21, Sse.LoadVector128(pMatTemp += crow)); + Vector128 x32 = Sse.Multiply(x31, Sse.LoadVector128(pMatTemp += crow)); + + x02 = Sse.Add(x02, x12); + x22 = Sse.Add(x22, x32); + x02 = Sse.Add(x02, x22); Sse.Store(pDstCurrent, x02); pDstCurrent += 4; @@ -516,6 +493,7 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan mat, ReadOnlySpan x02 = Sse.And(leadingMask, Sse.LoadVector128(pMatTemp)); + Vector128 x12 = Sse.And(leadingMask, Sse.LoadVector128(pMatTemp += crow)); + Vector128 x22 = Sse.And(leadingMask, Sse.LoadVector128(pMatTemp += crow)); + Vector128 x32 = Sse.And(leadingMask, Sse.LoadVector128(pMatTemp += crow)); + x02 = Sse.Multiply(x01, x02); + x12 = Sse.Multiply(x11, x12); + x22 = Sse.Multiply(x21, x22); + x32 = Sse.Multiply(x31, x32); + + x02 = Sse.Add(x02, x12); + x22 = Sse.Add(x22, x32); + x02 = Sse.Add(x02, x22); Vector128 trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + ((4 - misalignment) * 4)); Vector128 x3 = Sse.LoadVector128(pDstCurrent); - x02 = Sse.Add(x02, Sse.And(x3, trailingMask)); + x02 = Sse.Or(x02, Sse.And(x3, trailingMask)); Sse.Store(pDstCurrent, x02); pMatCurrent += misalignment; @@ -549,7 +538,15 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan x02 = Sse.Multiply(x01, Sse.LoadAlignedVector128(pMatTemp)); + Vector128 x12 = Sse.Multiply(x11, Sse.LoadAlignedVector128(pMatTemp += crow)); + Vector128 x22 = Sse.Multiply(x21, Sse.LoadAlignedVector128(pMatTemp += crow)); + Vector128 x32 = Sse.Multiply(x31, Sse.LoadAlignedVector128(pMatTemp += crow)); + + x02 = Sse.Add(x02, x12); + x22 = Sse.Add(x22, x32); + x02 = Sse.Add(x02, x22); Sse.Store(pDstCurrent, x02); pDstCurrent += 4; @@ -563,36 +560,47 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + (remainder * 4)); + Vector128 trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + (remainder * 4)); - float* pMatTemp = pMatCurrent; - Vector128 x02 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp)); - x02 = Sse.Multiply(x01, x02); + float* pMatTemp = pMatCurrent; + Vector128 x02 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp)); + Vector128 x12 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); + Vector128 x22 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); + Vector128 x32 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); - Vector128 leadingMask = Sse.LoadVector128(((float*)(pLeadingAlignmentMask)) + ((4 - remainder) * 4)); - Vector128 x3 = Sse.LoadVector128(pDstCurrent); - x02 = Sse.Add(x02, Sse.And(x3, leadingMask)); + x02 = Sse.Multiply(x01, x02); + x12 = Sse.Multiply(x11, x12); + x22 = Sse.Multiply(x21, x22); + x32 = Sse.Multiply(x31, x32); - Sse.Store(pDstCurrent, x02); - pDstCurrent += 4; - pMatCurrent += 4; + x02 = Sse.Add(x02, x12); + x22 = Sse.Add(x22, x32); + x02 = Sse.Add(x02, x22); + + Vector128 leadingMask = Sse.LoadVector128(((float*)(pLeadingAlignmentMask)) + ((4 - remainder) * 4)); + Vector128 x3 = Sse.LoadVector128(pDstCurrent); + x02 = Sse.Or(x02, Sse.And(x3, leadingMask)); + + Sse.Store(pDstCurrent, x02); + pDstCurrent += 4; + pMatCurrent += 4; + } } - numCol -= 1; - pSrcCurrent += 1; + pMatCurrent += 3 * crow; + pSrcCurrent += 4; } // We do 4-way unrolling - while (pSrcCurrent + 4 <= pSrcEnd) + while (pSrcCurrent < pSrcEnd) { Vector128 x01 = Sse.LoadVector128(pSrcCurrent); // Replicate each 32-bit slot of x01 (ABCD) into its own register. @@ -607,10 +615,9 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan x02 = Sse.Multiply(x01, Sse.LoadVector128(pMatTemp)); @@ -631,6 +638,7 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan mat, ReadOnlySpan trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + ((4 - misalignment) * 4)); Vector128 x3 = Sse.LoadVector128(pDstCurrent); - x02 = Sse.Add(x02, Sse.And(x3, trailingMask)); + x02 = Sse.Or(x02, Sse.And(x3, trailingMask)); x02 = Sse.Add(x02, Sse.And(x3, leadingMask)); @@ -693,84 +701,43 @@ public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + (remainder * 4)); + if (remainder != 0) + { + pMatCurrent -= (4 - remainder); + pDstCurrent -= (4 - remainder); + Vector128 trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + (remainder * 4)); - float* pMatTemp = pMatCurrent; - Vector128 x02 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp)); - Vector128 x12 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x22 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x32 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); + float* pMatTemp = pMatCurrent; + Vector128 x02 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp)); + Vector128 x12 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); + Vector128 x22 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); + Vector128 x32 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); - x02 = Sse.Multiply(x01, x02); - x12 = Sse.Multiply(x11, x12); - x22 = Sse.Multiply(x21, x22); - x32 = Sse.Multiply(x31, x32); + x02 = Sse.Multiply(x01, x02); + x12 = Sse.Multiply(x11, x12); + x22 = Sse.Multiply(x21, x22); + x32 = Sse.Multiply(x31, x32); - x02 = Sse.Add(x02, x12); - x22 = Sse.Add(x22, x32); - x02 = Sse.Add(x02, x22); + x02 = Sse.Add(x02, x12); + x22 = Sse.Add(x22, x32); + x02 = Sse.Add(x02, x22); - Vector128 leadingMask = Sse.LoadVector128(((float*)(pLeadingAlignmentMask)) + ((4 - remainder) * 4)); - Vector128 x3 = Sse.LoadVector128(pDstCurrent); - x02 = Sse.Add(x02, Sse.And(x3, leadingMask)); + Vector128 leadingMask = Sse.LoadVector128(((float*)(pLeadingAlignmentMask)) + ((4 - remainder) * 4)); + Vector128 x3 = Sse.LoadVector128(pDstCurrent); + x02 = Sse.Or(x02, Sse.And(x3, leadingMask)); - x02 = Sse.Add(x02, Sse.And(x3, trailingMask)); - Sse.Store(pDstCurrent, x02); - pDstCurrent += 4; - pMatCurrent += 4; + x02 = Sse.Add(x02, Sse.And(x3, trailingMask)); + Sse.Store(pDstCurrent, x02); + pDstCurrent += 4; + pMatCurrent += 4; + } } pMatCurrent += 3 * crow; pSrcCurrent += 4; - numCol -= 4; } - - // falling through the case statements - switch (numCol) - { - case 3: ColumnMultiply(pMatCurrent + 2 * crow, pSrcCurrent + 2, pdst, pDstEnd, crow); goto case 2; - case 2: ColumnMultiply(pMatCurrent + crow, pSrcCurrent + 1, pdst, pDstEnd, crow); goto case 1; - case 1: ColumnMultiply(pMatCurrent, pSrcCurrent, pdst, pDstEnd, crow); break; - } - } - } - - private static unsafe void ColumnMultiply(float* pMatCurrent, float* pSrcCurrent, float* pdst, float* pDstEnd, int crow) - { - Vector128 x01 = Sse.LoadVector128(pSrcCurrent); - // Replicate each slot of h01 (ABCD) into its own register. - x01 = Sse.Shuffle(x01, x01, 0x00); // A - int remainder = crow % 4; - float* pDstCurrent = pdst; - - while (pDstCurrent + 4 <= pDstEnd) - { - // If we aren't using the VEX-encoding, the JIT will only fold away aligned loads - // (due to semantics of the legacy encoding). - // We don't need an assert, since the instruction will throw for unaligned inputs. - float* pMatTemp = pMatCurrent; - Vector128 x02 = Sse.Multiply(x01, Sse.LoadVector128(pMatTemp)); - x02 = Sse.Add(x02, Sse.LoadVector128(pDstCurrent)); - - Sse.Store(pDstCurrent, x02); - pDstCurrent += 4; - pMatCurrent += 4; - } - - // falling through the case statements - switch (remainder) - { - case 3: *(pDstCurrent + 2) += *(pSrcCurrent) * *(pMatCurrent + 2); goto case 2; - case 2: *(pDstCurrent + 1) += *(pSrcCurrent) * *(pMatCurrent + 1); goto case 1; - case 1: *pDstCurrent += *(pSrcCurrent) * *(pMatCurrent); break; } - return; } // dst[i] += scale diff --git a/src/Microsoft.ML.CpuMath/Thunk.cs b/src/Microsoft.ML.CpuMath/Thunk.cs index 9505db8766..8ff725b54a 100644 --- a/src/Microsoft.ML.CpuMath/Thunk.cs +++ b/src/Microsoft.ML.CpuMath/Thunk.cs @@ -86,6 +86,12 @@ public static extern void MatMulP(/*const*/ float* pmat, /*const*/ int* pposSrc, [DllImport(NativePath), SuppressUnmanagedCodeSecurity] public static extern float Dist2(/*const*/ float* px, /*const*/ float* py, int c); + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] + public static extern void ZeroItemsU(float* pd, int c, /*const*/ int* pindices, int cindices); + + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] + public static extern void ZeroMatrixItemsCore(float* pd, int c, int ccol, int cfltRow, /*const*/ int* pindices, int cindices); + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] public static extern void SdcaL1UpdateU(float primalUpdate, /*const*/ float* ps, float threshold, float* pd1, float* pd2, int c); diff --git a/src/Microsoft.ML.TimeSeries/AdaptiveSingularSpectrumSequenceModeler.cs b/src/Microsoft.ML.TimeSeries/AdaptiveSingularSpectrumSequenceModeler.cs index 88cbc536dd..a473ccec29 100644 --- a/src/Microsoft.ML.TimeSeries/AdaptiveSingularSpectrumSequenceModeler.cs +++ b/src/Microsoft.ML.TimeSeries/AdaptiveSingularSpectrumSequenceModeler.cs @@ -174,8 +174,8 @@ public ModelInfo Info private Single[] _alpha; private Single[] _state; private readonly FixedSizeQueue _buffer; - private float[] _x; - private float[] _xSmooth; + private CpuAlignedVector _x; + private CpuAlignedVector _xSmooth; private int _windowSize; private readonly int _seriesLength; private readonly RankSelectionMethod _rankSelectionMethod; @@ -188,14 +188,14 @@ public ModelInfo Info private readonly IHost _host; - private float[] _wTrans; + private CpuAlignedMatrixRow _wTrans; private Single _observationNoiseVariance; private Single _observationNoiseMean; private Single _autoregressionNoiseVariance; private Single _autoregressionNoiseMean; private int _rank; - private float[] _y; + private CpuAlignedVector _y; private Single _nextPrediction; /// @@ -290,8 +290,8 @@ public AdaptiveSingularSpectrumSequenceModeler(IHostEnvironment env, int trainSi _alpha = new Single[windowSize - 1]; _state = new Single[windowSize - 1]; - _x = new float[windowSize]; - _xSmooth = new float[windowSize]; + _x = new CpuAlignedVector(windowSize, SseUtils.CbAlign); + _xSmooth = new CpuAlignedVector(windowSize, SseUtils.CbAlign); ShouldComputeForecastIntervals = shouldComputeForecastIntervals; _observationNoiseVariance = 0; @@ -345,14 +345,14 @@ private AdaptiveSingularSpectrumSequenceModeler(AdaptiveSingularSpectrumSequence _state = new Single[_windowSize - 1]; Array.Copy(model._state, _state, _windowSize - 1); - _x = new float[_windowSize]; - _xSmooth = new float[_windowSize]; + _x = new CpuAlignedVector(_windowSize, SseUtils.CbAlign); + _xSmooth = new CpuAlignedVector(_windowSize, SseUtils.CbAlign); if (model._wTrans != null) { - _y = new float[_rank]; - _wTrans = new float[_rank * _windowSize]; - Array.Copy(model._wTrans, _wTrans, _rank * _windowSize); + _y = new CpuAlignedVector(_rank, SseUtils.CbAlign); + _wTrans = new CpuAlignedMatrixRow(_rank, _windowSize, SseUtils.CbAlign); + _wTrans.CopyFrom(model._wTrans); } } @@ -452,16 +452,18 @@ public AdaptiveSingularSpectrumSequenceModeler(IHostEnvironment env, ModelLoadCo { var tempArray = ctx.Reader.ReadFloatArray(); _host.CheckDecode(Utils.Size(tempArray) == _rank * _windowSize); - _wTrans = new float[_rank * _windowSize]; - Array.Copy(tempArray, _wTrans, tempArray.Length); + _wTrans = new CpuAlignedMatrixRow(_rank, _windowSize, SseUtils.CbAlign); + int i = 0; + _wTrans.CopyFrom(tempArray, ref i); tempArray = ctx.Reader.ReadFloatArray(); - _y = new float[_rank]; - Array.Copy(tempArray, _y, tempArray.Length); + i = 0; + _y = new CpuAlignedVector(_rank, SseUtils.CbAlign); + _y.CopyFrom(tempArray, ref i); } _buffer = TimeSeriesUtils.DeserializeFixedSizeQueueSingle(ctx.Reader, _host); - _x = new float[_windowSize]; - _xSmooth = new float[_windowSize]; + _x = new CpuAlignedVector(_windowSize, SseUtils.CbAlign); + _xSmooth = new CpuAlignedVector(_windowSize, SseUtils.CbAlign); } public override void Save(ModelSaveContext ctx) @@ -525,11 +527,14 @@ public override void Save(ModelSaveContext ctx) if (_wTrans != null) { + // REVIEW: this may not be the most efficient way for serializing an aligned matrix. var tempArray = new Single[_rank * _windowSize]; - Array.Copy(_wTrans, tempArray, _wTrans.Length); + int iv = 0; + _wTrans.CopyTo(tempArray, ref iv); ctx.Writer.WriteSingleArray(tempArray); tempArray = new float[_rank]; - Array.Copy(_y, tempArray, tempArray.Length); + iv = 0; + _y.CopyTo(tempArray, ref iv); ctx.Writer.WriteSingleArray(tempArray); } @@ -1125,14 +1130,15 @@ internal override void Consume(ref Single input, bool updateModel = false) if (_wTrans == null) { - _y = new float[_rank]; - _wTrans = new float[_rank * _windowSize]; + _y = new CpuAlignedVector(_rank, SseUtils.CbAlign); + _wTrans = new CpuAlignedMatrixRow(_rank, _windowSize, SseUtils.CbAlign); Single[] vecs = new Single[_rank * _windowSize]; for (i = 0; i < _rank; ++i) vecs[(_windowSize + 1) * i] = 1; - Array.Copy(_wTrans, vecs, _rank * _windowSize); + i = 0; + _wTrans.CopyFrom(vecs, ref i); } // Forming vector x @@ -1151,10 +1157,10 @@ internal override void Consume(ref Single input, bool updateModel = false) _x[_windowSize - 1] = input; // Computing y: Eq. (11) in https://hal-institut-mines-telecom.archives-ouvertes.fr/hal-00479772/file/twocolumns.pdf - CpuMathUtils.MatrixTimesSource(transpose: false, _wTrans, _x, _y, _y.Length); + CpuAligenedMathUtils.MatTimesSrc(_wTrans, _x, _y); // Updating the state vector - CpuMathUtils.MatrixTimesSource(transpose: true, _wTrans, _y, _xSmooth, _y.Length); + CpuAligenedMathUtils.MatTranTimesSrc(_wTrans, _y, _xSmooth); _nextPrediction = _autoregressionNoiseMean + _observationNoiseMean; for (i = 0; i < _windowSize - 2; ++i) @@ -1305,8 +1311,8 @@ private void TrainCore(Single[] dataArray, int originalSeriesLength) _maxRank = _windowSize / 2; _alpha = new Single[_windowSize - 1]; _state = new Single[_windowSize - 1]; - _x = new float[_windowSize]; - _xSmooth = new float[_windowSize]; + _x = new CpuAlignedVector(_windowSize, SseUtils.CbAlign); + _xSmooth = new CpuAlignedVector(_windowSize, SseUtils.CbAlign); TrainCore(dataArray, originalSeriesLength); return; @@ -1343,11 +1349,12 @@ private void TrainCore(Single[] dataArray, int originalSeriesLength) } // Setting the the y vector - _y = new float[_rank]; + _y = new CpuAlignedVector(_rank, SseUtils.CbAlign); // Setting the weight matrix - _wTrans = new float[_rank * _windowSize]; - Array.Copy(leftSingularVecs, _wTrans, _wTrans.Length); + _wTrans = new CpuAlignedMatrixRow(_rank, _windowSize, SseUtils.CbAlign); + i = 0; + _wTrans.CopyFrom(leftSingularVecs, ref i); // Setting alpha Single nu = 0; @@ -1357,7 +1364,7 @@ private void TrainCore(Single[] dataArray, int originalSeriesLength) nu += _y[i] * _y[i]; } - CpuMathUtils.MatrixTimesSource(transpose: true, _wTrans, _y, _xSmooth, _y.Length); + CpuAligenedMathUtils.MatTranTimesSrc(_wTrans, _y, _xSmooth); for (i = 0; i < _windowSize - 1; ++i) _alpha[i] = _xSmooth[i] / (1 - nu); @@ -1402,8 +1409,8 @@ private void TrainCore(Single[] dataArray, int originalSeriesLength) _x[i - originalSeriesLength + _windowSize] = dataArray[i]; } - CpuMathUtils.MatrixTimesSource(transpose: false, _wTrans, _x, _y, _y.Length); - CpuMathUtils.MatrixTimesSource(transpose: true, _wTrans, _y, _xSmooth, _y.Length); + CpuAligenedMathUtils.MatTimesSrc(_wTrans, _x, _y); + CpuAligenedMathUtils.MatTranTimesSrc(_wTrans, _y, _xSmooth); for (i = 1; i < _windowSize; ++i) { diff --git a/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs b/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs index 8f4e2b6221..f1b7d49895 100644 --- a/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs +++ b/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs @@ -95,10 +95,10 @@ private sealed class TransformInfo public readonly int SrcDim; // the matrix containing the random fourier vectors - public readonly float[] RndFourierVectors; + public readonly AlignedArray RndFourierVectors; // the random rotations - public readonly float[] RotationTerms; + public readonly AlignedArray RotationTerms; private readonly IFourierDistributionSampler _matrixGenerator; private readonly bool _useSin; @@ -120,10 +120,10 @@ public TransformInfo(IHost host, ColumnInfo column, int d, float avgDist) var generator = column.Generator; _matrixGenerator = generator.CreateComponent(host, avgDist); - int roundedUpD = RoundToMultipleOf4(NewDim); - int roundedUpNumFeatures = RoundToMultipleOf4(SrcDim); - RndFourierVectors = new float[roundedUpD * roundedUpNumFeatures]; - RotationTerms = _useSin ? null : new float[roundedUpD]; + int roundedUpD = RoundUp(NewDim, _cfltAlign); + int roundedUpNumFeatures = RoundUp(SrcDim, _cfltAlign); + RndFourierVectors = new AlignedArray(roundedUpD * roundedUpNumFeatures, CpuMathUtils.GetVectorAlignment()); + RotationTerms = _useSin ? null : new AlignedArray(roundedUpD, CpuMathUtils.GetVectorAlignment()); InitializeFourierCoefficients(roundedUpNumFeatures, roundedUpD); } @@ -154,10 +154,10 @@ public TransformInfo(IHostEnvironment env, ModelLoadContext ctx, string director ctx.LoadModelOrNull(env, out _matrixGenerator, directoryName)); // initialize the transform matrix - int roundedUpD = RoundToMultipleOf4(NewDim); - int roundedUpNumFeatures = RoundToMultipleOf4(SrcDim); - RndFourierVectors = new float[roundedUpD * roundedUpNumFeatures]; - RotationTerms = _useSin ? null : new float[roundedUpD]; + int roundedUpD = RoundUp(NewDim, _cfltAlign); + int roundedUpNumFeatures = RoundUp(SrcDim, _cfltAlign); + RndFourierVectors = new AlignedArray(roundedUpD * roundedUpNumFeatures, CpuMathUtils.GetVectorAlignment()); + RotationTerms = _useSin ? null : new AlignedArray(roundedUpD, CpuMathUtils.GetVectorAlignment()); InitializeFourierCoefficients(roundedUpNumFeatures, roundedUpD); } @@ -225,6 +225,8 @@ private static VersionInfo GetVersionInfo() private readonly TransformInfo[] _transformInfos; + private static readonly int _cfltAlign = CpuMathUtils.GetVectorAlignment() / sizeof(float); + private static string TestColumnType(ColumnType type) { if (type.ItemType == NumberType.Float && type.IsKnownSizeVector) @@ -293,11 +295,16 @@ public RandomFourierFeaturizingTransformer(IHostEnvironment env, IDataView input } } - private static int RoundToMultipleOf4(int number) + // Round cflt up to a multiple of cfltAlign. + private static int RoundUp(int cflt, int cfltAlign) { - Contracts.Assert(0 < number); - int multipleOf4 = (number + 3) / 4; - return multipleOf4 * 4; + Contracts.Assert(0 < cflt); + // cfltAlign should be a power of two. + Contracts.Assert(0 < cfltAlign && (cfltAlign & (cfltAlign - 1)) == 0); + + // Determine the number of "blobs" of size cfltAlign. + int cblob = (cflt + cfltAlign - 1) / cfltAlign; + return cblob * cfltAlign; } private float[] GetAvgDistances(ColumnInfo[] columns, IDataView input) @@ -548,14 +555,14 @@ private ValueGetter> GetterFromVectorType(Row input, int iinfo) var getSrc = input.GetGetter>(_srcCols[iinfo]); var src = default(VBuffer); - var features = new float[RoundToMultipleOf4(_srcTypes[iinfo].ValueCount)]; - var product = new float[RoundToMultipleOf4(_parent._transformInfos[iinfo].NewDim)]; + var featuresAligned = new AlignedArray(RoundUp(_srcTypes[iinfo].ValueCount, _cfltAlign), CpuMathUtils.GetVectorAlignment()); + var productAligned = new AlignedArray(RoundUp(_parent._transformInfos[iinfo].NewDim, _cfltAlign), CpuMathUtils.GetVectorAlignment()); return (ref VBuffer dst) => { getSrc(ref src); - TransformFeatures(in src, ref dst, _parent._transformInfos[iinfo], features, product); + TransformFeatures(in src, ref dst, _parent._transformInfos[iinfo], featuresAligned, productAligned); }; } @@ -565,8 +572,8 @@ private ValueGetter> GetterFromFloatType(Row input, int iinfo) var getSrc = input.GetGetter(_srcCols[iinfo]); var src = default(float); - var featuresAligned = new float[4]; - var productAligned = new float[RoundToMultipleOf4(_parent._transformInfos[iinfo].NewDim)]; + var featuresAligned = new AlignedArray(RoundUp(1, _cfltAlign), CpuMathUtils.GetVectorAlignment()); + var productAligned = new AlignedArray(RoundUp(_parent._transformInfos[iinfo].NewDim, _cfltAlign), CpuMathUtils.GetVectorAlignment()); var oneDimensionalVector = new VBuffer(1, new float[] { 0 }); @@ -580,7 +587,7 @@ private ValueGetter> GetterFromFloatType(Row input, int iinfo) } private void TransformFeatures(in VBuffer src, ref VBuffer dst, TransformInfo transformInfo, - float[] features, float[] product) + AlignedArray featuresAligned, AlignedArray productAligned) { Host.Check(src.Length == transformInfo.SrcDim, "column does not have the expected dimensionality."); @@ -599,9 +606,9 @@ private void TransformFeatures(in VBuffer src, ref VBuffer dst, Tr if (src.IsDense) { - src.GetValues().CopyTo(features); - CpuMathUtils.MatrixTimesSource(transpose: false, transformInfo.RndFourierVectors, features, product, - RoundToMultipleOf4(transformInfo.NewDim)); + featuresAligned.CopyFrom(src.GetValues()); + CpuMathUtils.MatrixTimesSource(false, transformInfo.RndFourierVectors, featuresAligned, productAligned, + transformInfo.NewDim); } else { @@ -609,21 +616,15 @@ private void TransformFeatures(in VBuffer src, ref VBuffer dst, Tr // no need to zero them out. var srcValues = src.GetValues(); var srcIndices = src.GetIndices(); - - for (int i = 0; i < srcValues.Length; i++) - { - int iv = srcIndices[i]; - features[iv] = srcValues[i]; - } - - CpuMathUtils.MatrixTimesSource(transformInfo.RndFourierVectors, srcIndices, features, 0, 0, - srcValues.Length, product, RoundToMultipleOf4(transformInfo.NewDim)); + featuresAligned.CopyFrom(srcIndices, srcValues, 0, 0, srcValues.Length, zeroItems: false); + CpuMathUtils.MatrixTimesSource(transformInfo.RndFourierVectors, srcIndices, featuresAligned, 0, 0, + srcValues.Length, productAligned, transformInfo.NewDim); } var dstEditor = VBufferEditor.Create(ref dst, newDstLength); for (int i = 0; i < transformInfo.NewDim; i++) { - var dotProduct = product[i]; + var dotProduct = productAligned[i]; if (transformInfo.RotationTerms != null) dstEditor.Values[i] = (float)MathUtils.Cos(dotProduct + transformInfo.RotationTerms[i]) * scale; else diff --git a/src/Native/CpuMathNative/Sse.cpp b/src/Native/CpuMathNative/Sse.cpp index 2240d630eb..607af332d1 100644 --- a/src/Native/CpuMathNative/Sse.cpp +++ b/src/Native/CpuMathNative/Sse.cpp @@ -57,69 +57,15 @@ const unsigned int TrailingAlignmentMask[16] = 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, }; -float RowMultiply(const float* pMatCurrent, const float* pSrcCurrent, const float* pSrcEnd, int ccol) -{ - __m128 res0 = _mm_setzero_ps(); - int remainder = ccol % 4; - - while (pSrcCurrent + 4 <= pSrcEnd) - { - __m128 vector = _mm_loadu_ps(pSrcCurrent); - const float* pMatTemp = pMatCurrent; - __m128 x01 = _mm_mul_ps(vector, _mm_loadu_ps(pMatTemp)); - res0 = _mm_add_ps(res0, x01); - - pSrcCurrent += 4; - pMatCurrent += 4; - } - - res0 = _mm_hadd_ps(res0, res0); - res0 = _mm_hadd_ps(res0, res0); - - float sum = _mm_cvtss_f32(res0); - - // falling through the case statements - switch (remainder) - { - case 3: - sum += *(pSrcCurrent + 2) * *(pMatCurrent + 2); - case 2: - sum += *(pSrcCurrent + 1) * *(pMatCurrent + 1); - case 1: - sum += *(pSrcCurrent) * *(pMatCurrent); - } - return sum; -} - // Multiply matrix times vector into vector. EXPORT_API(void) MatMul(_In_ const float * pmat, _In_ const float * psrc, _Inout_ float * pdst, int crow, int ccol) { - if (ccol < 4) - { - for (int i = 0 ; i < crow; i++) - { - float dotProduct = 0; - switch (ccol) - { - case 3: - dotProduct += pmat[i * ccol + 2] * psrc[2]; - case 2: - dotProduct += pmat[i * ccol + 1] * psrc[1]; - case 1: - dotProduct += pmat[i * ccol + 0] * psrc[0]; - } - pdst[i] = dotProduct; - } - return; - } - const float * pSrcEnd = psrc + ccol; const float * pDstEnd = pdst + crow; float* pDstCurrent = pdst; const float* pMatCurrent = pmat; - int numRows = crow; - while (pDstCurrent + 4 <= pDstEnd) + while (pDstCurrent < pDstEnd) { __m128 res0 = _mm_setzero_ps(); __m128 res1 = res0; @@ -133,11 +79,10 @@ EXPORT_API(void) MatMul(_In_ const float * pmat, _In_ const float * psrc, _Inout uintptr_t misalignment = address % 16; int remainder = 0; - if ((misalignment & 3) != 0 || (ccol % 4 != 0)) + if ((misalignment & 3) != 0) { // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - remainder = length % 4; - while (pSrcCurrent + 4 <= pSrcEnd) + while (pSrcCurrent < pSrcEnd) { __m128 vector = _mm_loadu_ps(pSrcCurrent); @@ -216,32 +161,32 @@ EXPORT_API(void) MatMul(_In_ const float * pmat, _In_ const float * psrc, _Inout // unaligned loads where we mask the input each time. remainder = length; } - } - if (remainder != 0) - { - // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next - // unaligned load will read to the end of the array and then mask out any elements already processed + if (remainder != 0) + { + // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next + // unaligned load will read to the end of the array and then mask out any elements already processed - pMatCurrent -= (4 - remainder); - pSrcCurrent -= (4 - remainder); + pMatCurrent -= (4 - remainder); + pSrcCurrent -= (4 - remainder); - __m128 mask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + (remainder * 4)); + __m128 mask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + (remainder * 4)); - const float* pMatTemp = pMatCurrent; - __m128 x01 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp)); - __m128 x11 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp += ccol)); - __m128 x21 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp += ccol)); - __m128 x31 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp += ccol)); - __m128 vector = _mm_and_ps(mask, _mm_loadu_ps(pSrcCurrent)); + const float* pMatTemp = pMatCurrent; + __m128 x01 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp)); + __m128 x11 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp += ccol)); + __m128 x21 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp += ccol)); + __m128 x31 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp += ccol)); + __m128 vector = _mm_and_ps(mask, _mm_loadu_ps(pSrcCurrent)); - res0 = _mm_add_ps(res0, _mm_mul_ps(x01, vector)); - res1 = _mm_add_ps(res1, _mm_mul_ps(x11, vector)); - res2 = _mm_add_ps(res2, _mm_mul_ps(x21, vector)); - res3 = _mm_add_ps(res3, _mm_mul_ps(x31, vector)); + res0 = _mm_add_ps(res0, _mm_mul_ps(x01, vector)); + res1 = _mm_add_ps(res1, _mm_mul_ps(x11, vector)); + res2 = _mm_add_ps(res2, _mm_mul_ps(x21, vector)); + res3 = _mm_add_ps(res3, _mm_mul_ps(x31, vector)); - pMatCurrent += 4; - pSrcCurrent += 4; + pMatCurrent += 4; + pSrcCurrent += 4; + } } // Add up the entries of each, with the 4 results in res0 @@ -253,19 +198,6 @@ EXPORT_API(void) MatMul(_In_ const float * pmat, _In_ const float * psrc, _Inout pDstCurrent += 4; pMatCurrent += 3 * ccol; - numRows -= 4; - } - - // falling through the case statements - switch(numRows) - { - case 3: - *(pDstCurrent + 2) = RowMultiply(pMatCurrent + 2 * ccol, psrc, pSrcEnd, ccol); - case 2: - *(pDstCurrent + 1) = RowMultiply(pMatCurrent + 1 * ccol, psrc, pSrcEnd, ccol); - case 1: - *pDstCurrent = RowMultiply(pMatCurrent, psrc, pSrcEnd, ccol); - break; } } @@ -424,62 +356,21 @@ EXPORT_API(void) MatMulP(_In_ const float * pmat, _In_ const int * pposSrc, _In_ } } -void ColumnMultiply(const float* pMatCurrent, const float* pSrcCurrent, float* pdst, const float* pDstEnd, int crow) -{ - __m128 x01 = _mm_loadu_ps(pSrcCurrent); - x01 = _mm_shuffle_ps(x01, x01, 0x00); - float* pDstCurrent = pdst; - int remainder = crow % 4; - - while (pDstCurrent + 4 <= pDstEnd) - { - const float* pMatTemp = pMatCurrent; - __m128 x02 = _mm_mul_ps(x01, _mm_loadu_ps(pMatTemp)); - x02 = _mm_add_ps(x02, _mm_loadu_ps(pDstCurrent)); - - _mm_storeu_ps(pDstCurrent, x02); - - pDstCurrent += 4; - pMatCurrent += 4; - } - - // falling through the case statements - switch (remainder) - { - case 3: *(pDstCurrent + 2) += *(pSrcCurrent) * *(pMatCurrent + 2); - case 2: *(pDstCurrent + 1) += *(pSrcCurrent) * *(pMatCurrent + 1); - case 1: *pDstCurrent += *(pSrcCurrent) * *(pMatCurrent); break; - } - return; -} - EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _Inout_ float * pdst, int crow, int ccol) { - if (crow < 4) - { - for (int i = 0 ; i < crow; i++) - { - float dotProduct = 0; - for (int j = 0; j < ccol; j++) - { - dotProduct += pmat[j * crow + i] * psrc[j]; - } - pdst[i] = dotProduct; - } - return; - } - const float * pSrcEnd = psrc + ccol; const float * pDstEnd = pdst + crow; const float* pMatCurrent = pmat; const float* pSrcCurrent = psrc; - int remainder = 0; - int numCol = ccol; if (pSrcCurrent < pSrcEnd) { __m128 x01 = _mm_loadu_ps(pSrcCurrent); + // Replicate each slot of x01 into its own register. + __m128 x11 = _mm_shuffle_ps(x01, x01, 0x55); + __m128 x21 = _mm_shuffle_ps(x01, x01, 0xAA); + __m128 x31 = _mm_shuffle_ps(x01, x01, 0xFF); x01 = _mm_shuffle_ps(x01, x01, 0x00); int length = crow; @@ -489,14 +380,20 @@ EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _I uintptr_t misalignment = address % 16; int remainder = 0; - if ((misalignment & 3) != 0 || (crow % 4 != 0)) + if ((misalignment & 3) != 0) { // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - remainder = crow % 4; - while (pDstCurrent + 4 <= pDstEnd) + while (pDstCurrent < pDstEnd) { const float* pMatTemp = pMatCurrent; __m128 x02 = _mm_mul_ps(x01, _mm_loadu_ps(pMatTemp)); + __m128 x12 = _mm_mul_ps(x11, _mm_loadu_ps(pMatTemp += crow)); + __m128 x22 = _mm_mul_ps(x21, _mm_loadu_ps(pMatTemp += crow)); + __m128 x32 = _mm_mul_ps(x31, _mm_loadu_ps(pMatTemp += crow)); + + x02 = _mm_add_ps(x02, x12); + x22 = _mm_add_ps(x22, x32); + x02 = _mm_add_ps(x02, x22); _mm_storeu_ps(pDstCurrent, x02); pDstCurrent += 4; @@ -505,6 +402,7 @@ EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _I } else { + int remainder = 0; if (misalignment != 0) { // Handle cases where the data is not 128-bit aligned by doing an unaligned read and then @@ -517,11 +415,22 @@ EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _I // We only align pMat since it has significantly more reads. const float* pMatTemp = pMatCurrent; __m128 x02 = _mm_and_ps(leadingMask, _mm_loadu_ps(pMatTemp)); + __m128 x12 = _mm_and_ps(leadingMask, _mm_loadu_ps(pMatTemp += crow)); + __m128 x22 = _mm_and_ps(leadingMask, _mm_loadu_ps(pMatTemp += crow)); + __m128 x32 = _mm_and_ps(leadingMask, _mm_loadu_ps(pMatTemp += crow)); + x02 = _mm_mul_ps(x01, x02); + x12 = _mm_mul_ps(x11, x12); + x22 = _mm_mul_ps(x21, x22); + x32 = _mm_mul_ps(x31, x32); + + x02 = _mm_add_ps(x02, x12); + x22 = _mm_add_ps(x22, x32); + x02 = _mm_add_ps(x02, x22); __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + ((4 - misalignment) * 4)); __m128 x3 = _mm_loadu_ps(pDstCurrent); - x02 = _mm_add_ps(x02, _mm_and_ps(x3, trailingMask)); + x02 = _mm_or_ps(x02, _mm_and_ps(x3, trailingMask)); _mm_storeu_ps(pDstCurrent, x02); pMatCurrent += misalignment; @@ -538,6 +447,13 @@ EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _I { const float* pMatTemp = pMatCurrent; __m128 x02 = _mm_mul_ps(x01, _mm_load_ps(pMatTemp)); + __m128 x12 = _mm_mul_ps(x11, _mm_load_ps(pMatTemp += crow)); + __m128 x22 = _mm_mul_ps(x21, _mm_load_ps(pMatTemp += crow)); + __m128 x32 = _mm_mul_ps(x31, _mm_load_ps(pMatTemp += crow)); + + x02 = _mm_add_ps(x02, x12); + x22 = _mm_add_ps(x22, x32); + x02 = _mm_add_ps(x02, x22); _mm_storeu_ps(pDstCurrent, x02); @@ -552,36 +468,47 @@ EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _I // unaligned loads where we mask the input each time. remainder = length; } - } - if (remainder != 0) - { - // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next - // unaligned load will read to the end of the array and then mask out any elements already processed + if (remainder != 0) + { + // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next + // unaligned load will read to the end of the array and then mask out any elements already processed - pMatCurrent -= (4 - remainder); - pDstCurrent -= (4 - remainder); + pMatCurrent -= (4 - remainder); + pDstCurrent -= (4 - remainder); - __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + (remainder * 4)); + __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + (remainder * 4)); + + const float* pMatTemp = pMatCurrent; + __m128 x02 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp)); + __m128 x12 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); + __m128 x22 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); + __m128 x32 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); - const float* pMatTemp = pMatCurrent; - __m128 x02 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp)); - x02 = _mm_mul_ps(x01, x02); + x02 = _mm_mul_ps(x01, x02); + x12 = _mm_mul_ps(x11, x12); + x22 = _mm_mul_ps(x21, x22); + x32 = _mm_mul_ps(x31, x32); - __m128 leadingMask = _mm_loadu_ps(((float*)(&LeadingAlignmentMask)) + ((4 - remainder) * 4)); - __m128 x3 = _mm_loadu_ps(pDstCurrent); - x02 = _mm_add_ps(x02, _mm_and_ps(x3, leadingMask)); + x02 = _mm_add_ps(x02, x12); + x22 = _mm_add_ps(x22, x32); + x02 = _mm_add_ps(x02, x22); - _mm_storeu_ps(pDstCurrent, x02); - pMatCurrent += 4; - pDstCurrent += 4; + __m128 leadingMask = _mm_loadu_ps(((float*)(&LeadingAlignmentMask)) + ((4 - remainder) * 4)); + __m128 x3 = _mm_loadu_ps(pDstCurrent); + x02 = _mm_or_ps(x02, _mm_and_ps(x3, leadingMask)); + + _mm_storeu_ps(pDstCurrent, x02); + pMatCurrent += 4; + pDstCurrent += 4; + } } - numCol -= 1; - pSrcCurrent += 1; + pMatCurrent += 3 * crow; + pSrcCurrent += 4; } - - while (pSrcCurrent + 4 <= pSrcEnd) + + while (pSrcCurrent < pSrcEnd) { __m128 x01 = _mm_loadu_ps(pSrcCurrent); // Replicate each slot of x01 into its own register. @@ -597,10 +524,9 @@ EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _I uintptr_t misalignment = address % 16; int remainder = 0; - if ((misalignment & 3) != 0 || (crow % 4 != 0)) + if ((misalignment & 3) != 0) { - remainder = length % 4; - while (pDstCurrent + 4 <= pDstEnd) + while (pDstCurrent < pDstEnd) { const float* pMatTemp = pMatCurrent; __m128 x02 = _mm_mul_ps(x01, _mm_loadu_ps(pMatTemp)); @@ -621,6 +547,7 @@ EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _I } else { + int remainder = 0; if (misalignment != 0) { misalignment >>= 2; @@ -646,7 +573,7 @@ EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _I __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + ((4 - misalignment) * 4)); __m128 x3 = _mm_loadu_ps(pDstCurrent); - x02 = _mm_add_ps(x02, _mm_and_ps(x3, trailingMask)); + x02 = _mm_or_ps(x02, _mm_and_ps(x3, trailingMask)); x02 = _mm_add_ps(x02, _mm_and_ps(x3, leadingMask)); _mm_storeu_ps(pDstCurrent, x02); @@ -682,52 +609,43 @@ EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _I { remainder = length; } - } - if (remainder != 0) - { - pMatCurrent -= (4 - remainder); - pDstCurrent -= (4 - remainder); + if (remainder != 0) + { + pMatCurrent -= (4 - remainder); + pDstCurrent -= (4 - remainder); - __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + (remainder * 4)); + __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + (remainder * 4)); - const float* pMatTemp = pMatCurrent; - __m128 x02 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp)); - __m128 x12 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); - __m128 x22 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); - __m128 x32 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); + const float* pMatTemp = pMatCurrent; + __m128 x02 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp)); + __m128 x12 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); + __m128 x22 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); + __m128 x32 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); - x02 = _mm_mul_ps(x01, x02); - x12 = _mm_mul_ps(x11, x12); - x22 = _mm_mul_ps(x21, x22); - x32 = _mm_mul_ps(x31, x32); + x02 = _mm_mul_ps(x01, x02); + x12 = _mm_mul_ps(x11, x12); + x22 = _mm_mul_ps(x21, x22); + x32 = _mm_mul_ps(x31, x32); - x02 = _mm_add_ps(x02, x12); - x22 = _mm_add_ps(x22, x32); - x02 = _mm_add_ps(x02, x22); + x02 = _mm_add_ps(x02, x12); + x22 = _mm_add_ps(x22, x32); + x02 = _mm_add_ps(x02, x22); - __m128 leadingMask = _mm_loadu_ps(((float*)(&LeadingAlignmentMask)) + ((4 - remainder) * 4)); - __m128 x3 = _mm_loadu_ps(pDstCurrent); - x02 = _mm_add_ps(x02, _mm_and_ps(x3, leadingMask)); + __m128 leadingMask = _mm_loadu_ps(((float*)(&LeadingAlignmentMask)) + ((4 - remainder) * 4)); + __m128 x3 = _mm_loadu_ps(pDstCurrent); + x02 = _mm_or_ps(x02, _mm_and_ps(x3, leadingMask)); - x02 = _mm_add_ps(x02, _mm_and_ps(x3, trailingMask)); - _mm_storeu_ps(pDstCurrent, x02); - pMatCurrent += 4; - pDstCurrent += 4; + x02 = _mm_add_ps(x02, _mm_and_ps(x3, trailingMask)); + _mm_storeu_ps(pDstCurrent, x02); + pMatCurrent += 4; + pDstCurrent += 4; + } } - numCol -= 4; pMatCurrent += 3 * crow; pSrcCurrent += 4; } - - // falling through the case statements - switch (numCol) - { - case 3: ColumnMultiply(pMatCurrent + 2 * crow, pSrcCurrent + 2, pdst, pDstEnd, crow); - case 2: ColumnMultiply(pMatCurrent + crow, pSrcCurrent + 1, pdst, pDstEnd, crow); - case 1: ColumnMultiply(pMatCurrent, pSrcCurrent, pdst, pDstEnd, crow); break; - } } // pd[i] += a @@ -1320,6 +1238,43 @@ EXPORT_API(float) Dist2(const float * px, const float * py, int c) return norm2; } +EXPORT_API(void) ZeroItemsU(_Inout_ float * pd, int c, _In_ const int * pindices, int cindices) +{ + DEBUG_ONLY(c); + for (int i = 0; i < cindices; ++i) + { + int iv = pindices[i]; + assert(0 <= iv && iv < c); + pd[iv] = 0; + } +} + +EXPORT_API(void) ZeroMatrixItemsCore(_Inout_ float * pd, int c, int ccol, int cfltRow, _In_ const int * pindices, int cindices) +{ + DEBUG_ONLY(c); + int ivLogMin = 0; + int ivLogLim = ccol; + int ivPhyMin = 0; + for (int i = 0; i < cindices; ++i) + { + int iv = pindices[i]; + assert(0 <= iv && iv < c); + + int col = iv - ivLogMin; + if ((unsigned int)col >= (unsigned int)ccol) + { + assert(ivLogMin > iv || iv >= ivLogLim); + int row = iv / ccol; + ivLogMin = row * ccol; + ivLogLim = ivLogMin + ccol; + ivPhyMin = row * cfltRow; + assert(ivLogMin <= iv && iv < ivLogLim); + col = iv - ivLogMin; + } + pd[ivPhyMin + col] = 0; + } +} + EXPORT_API(void) SdcaL1UpdateU(float primalUpdate, _In_ const float * ps, float threshold, _Inout_ float *pd1, _Inout_ float * pd2, int c) { const float * psLim = ps + c; diff --git a/test/Microsoft.ML.CpuMath.UnitTests.netcoreapp/UnitTests.cs b/test/Microsoft.ML.CpuMath.UnitTests.netcoreapp/UnitTests.cs index 5adb641b92..25996ec42c 100644 --- a/test/Microsoft.ML.CpuMath.UnitTests.netcoreapp/UnitTests.cs +++ b/test/Microsoft.ML.CpuMath.UnitTests.netcoreapp/UnitTests.cs @@ -14,14 +14,14 @@ public class CpuMathUtilsUnitTests { private readonly float[][] _testArrays; private readonly int[] _testIndexArray; - private readonly float[][] _testMatrices; - private readonly float[][] _testSrcVectors; - private readonly float[][] _testDstVectors; + private readonly AlignedArray[] _testMatrices; + private readonly AlignedArray[] _testSrcVectors; + private readonly AlignedArray[] _testDstVectors; + private readonly int _vectorAlignment = CpuMathUtils.GetVectorAlignment(); private readonly FloatEqualityComparer _comparer; private readonly FloatEqualityComparerForMatMul _matMulComparer; private const float DefaultScale = 1.7f; - private const int DefaultSeed = 253421; public CpuMathUtilsUnitTests() { @@ -50,19 +50,34 @@ public CpuMathUtilsUnitTests() testMatrix2[i] = i + 1; } - _testMatrices = new float[][] { testMatrix1, testMatrix2 }; + AlignedArray testMatrixAligned1 = new AlignedArray(8 * 8, _vectorAlignment); + AlignedArray testMatrixAligned2 = new AlignedArray(8 * 16, _vectorAlignment); + testMatrixAligned1.CopyFrom(testMatrix1); + testMatrixAligned2.CopyFrom(testMatrix2); + + _testMatrices = new AlignedArray[] { testMatrixAligned1, testMatrixAligned2 }; // Padded source vectors whose dimensions are multiples of 8 float[] testSrcVector1 = new float[8] { 1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f }; float[] testSrcVector2 = new float[16] { 1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f, 9f, 10f, 11f, 12f, 13f, 14f, 15f, 16f }; - _testSrcVectors = new float[][] { testSrcVector1, testSrcVector2 }; + AlignedArray testSrcVectorAligned1 = new AlignedArray(8, _vectorAlignment); + AlignedArray testSrcVectorAligned2 = new AlignedArray(16, _vectorAlignment); + testSrcVectorAligned1.CopyFrom(testSrcVector1); + testSrcVectorAligned2.CopyFrom(testSrcVector2); + + _testSrcVectors = new AlignedArray[] { testSrcVectorAligned1, testSrcVectorAligned2 }; // Padded destination vectors whose dimensions are multiples of 8 float[] testDstVector1 = new float[8] { 0f, 1f, 2f, 3f, 4f, 5f, 6f, 7f }; float[] testDstVector2 = new float[16] { 0f, 1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f, 9f, 10f, 11f, 12f, 13f, 14f, 15f }; - _testDstVectors = new float[][] { testDstVector1, testDstVector2 }; + AlignedArray testDstVectorAligned1 = new AlignedArray(8, _vectorAlignment); + AlignedArray testDstVectorAligned2 = new AlignedArray(16, _vectorAlignment); + testDstVectorAligned1.CopyFrom(testDstVector1); + testDstVectorAligned2.CopyFrom(testDstVector2); + + _testDstVectors = new AlignedArray[] { testDstVectorAligned1, testDstVectorAligned2 }; } [Theory] @@ -71,125 +86,29 @@ public CpuMathUtilsUnitTests() [InlineData(1, 0, 1, new float[] { 204f, 492f, 780f, 1068f, 1356f, 1644f, 1932f, 2220f, 2508f, 2796f, 3084f, 3372f, 3660f, 3948f, 4236f, 4524f })] public void MatMulTest(int matTest, int srcTest, int dstTest, float[] expected) { - float[] mat = _testMatrices[matTest]; - float[] src = _testSrcVectors[srcTest]; - float[] dst = _testDstVectors[dstTest]; + AlignedArray mat = _testMatrices[matTest]; + AlignedArray src = _testSrcVectors[srcTest]; + AlignedArray dst = _testDstVectors[dstTest]; - CpuMathUtils.MatrixTimesSource(false, mat, src, dst, dst.Length); - float[] actual = new float[dst.Length]; - Array.Copy(dst, actual, dst.Length); + CpuMathUtils.MatrixTimesSource(false, mat, src, dst, dst.Size); + float[] actual = new float[dst.Size]; + dst.CopyTo(actual, 0, dst.Size); Assert.Equal(expected, actual, _matMulComparer); } - [Theory] - [InlineData(10, 5)] - [InlineData(10, 8)] - [InlineData(10, 11)] - [InlineData(11, 8)] - [InlineData(8, 23)] - [InlineData(2, 8)] - [InlineData(2, 9)] - [InlineData(2, 3)] - [InlineData(2, 5)] - [InlineData(4, 5)] - [InlineData(4, 7)] - [InlineData(4, 9)] - [InlineData(5, 7)] - [InlineData(5, 9)] - private void MatMulAnyDimensionTest(int col, int row) - { - Random rand = new Random(DefaultSeed); - float[] mat = new float[col * row]; - for (int i = 0; i < col * row; i++) - { - mat[i] = rand.Next(-10, 10); - } - - float[] src = new float[col]; - for (int i = 0; i < col; i++) - { - src[i] = rand.Next(-10, 10); - } - - float[] dst = new float[row]; - float[] expected = new float[row]; - - for (int i = 0; i < row; i++) - { - float dotProduct = 0; - for (int j = 0; j < src.Length; j++) - { - dotProduct += mat[i * src.Length + j] * src[j]; - } - - expected[i] = dotProduct; - } - - CpuMathUtils.MatrixTimesSource(false, mat, src, dst, dst.Length); - Assert.Equal(expected, dst, _matMulComparer); - } - - [Theory] - [InlineData(10, 5)] - [InlineData(10, 8)] - [InlineData(10, 11)] - [InlineData(11, 8)] - [InlineData(8, 23)] - [InlineData(2, 8)] - [InlineData(2, 9)] - [InlineData(2, 3)] - [InlineData(2, 5)] - [InlineData(4, 5)] - [InlineData(4, 7)] - [InlineData(4, 9)] - [InlineData(5, 7)] - [InlineData(5, 9)] - private void MatMulTranAnyDimensionTest(int col, int row) - { - float[] mat = new float[col * row]; - Random rand = new Random(DefaultSeed); - for (int i = 0; i < col * row; i++) - { - mat[i] = rand.Next(0, 10); - } - - float[] src = new float[row]; - for (int i = 0; i < row; i++) - { - src[i] = rand.Next(0, 10); - } - - float[] dst = new float[col]; - float[] expected = new float[col]; - - for (int i = 0; i < dst.Length; i++) - { - float dotProduct = 0; - for (int j = 0; j < row; j++) - { - dotProduct += mat[j * dst.Length + i] * src[j]; - } - - expected[i] = dotProduct; - } - - CpuMathUtils.MatrixTimesSource(true, mat, src, dst, row); - Assert.Equal(expected, dst, _matMulComparer); - } - [Theory] [InlineData(0, 0, 0, new float[] { 70.56001f, -85.68f, -351.36f, 498.24f, -3829.32f, -969.48f, 1168.2f, 118.44f })] [InlineData(1, 0, 1, new float[] { 2724f, 2760f, 2796f, 2832f, 2868f, 2904f, 2940f, 2976f, 3012f, 3048f, 3084f, 3120f, 3156f, 3192f, 3228f, 3264f })] [InlineData(1, 1, 0, new float[] { 11016f, 11152f, 11288f, 11424f, 11560f, 11696f, 11832f, 11968f })] public void MatMulTranTest(int matTest, int srcTest, int dstTest, float[] expected) { - float[] mat = _testMatrices[matTest]; - float[] src = _testSrcVectors[srcTest]; - float[] dst = _testDstVectors[dstTest]; + AlignedArray mat = _testMatrices[matTest]; + AlignedArray src = _testSrcVectors[srcTest]; + AlignedArray dst = _testDstVectors[dstTest]; - CpuMathUtils.MatrixTimesSource(true, mat, src, dst, src.Length); - float[] actual = new float[dst.Length]; - Array.Copy(dst, actual, dst.Length); + CpuMathUtils.MatrixTimesSource(true, mat, src, dst, src.Size); + float[] actual = new float[dst.Size]; + dst.CopyTo(actual, 0, dst.Size); Assert.Equal(expected, actual, _matMulComparer); } @@ -199,14 +118,14 @@ public void MatMulTranTest(int matTest, int srcTest, int dstTest, float[] expect [InlineData(1, 0, 1, new float[] { 95f, 231f, 367f, 503f, 639f, 775f, 911f, 1047f, 1183f, 1319f, 1455f, 1591f, 1727f, 1863f, 1999f, 2135f })] public void MatTimesSrcSparseTest(int matTest, int srcTest, int dstTest, float[] expected) { - float[] mat = _testMatrices[matTest]; - float[] src = _testSrcVectors[srcTest]; - float[] dst = _testDstVectors[dstTest]; + AlignedArray mat = _testMatrices[matTest]; + AlignedArray src = _testSrcVectors[srcTest]; + AlignedArray dst = _testDstVectors[dstTest]; int[] idx = _testIndexArray; - CpuMathUtils.MatrixTimesSource(mat, idx, src, 0, 0, (srcTest == 0) ? 4 : 9, dst, dst.Length); - float[] actual = new float[dst.Length]; - Array.Copy(dst, actual, dst.Length); + CpuMathUtils.MatrixTimesSource(mat, idx, src, 0, 0, (srcTest == 0) ? 4 : 9, dst, dst.Size); + float[] actual = new float[dst.Size]; + dst.CopyTo(actual, 0, dst.Size); Assert.Equal(expected, actual, _matMulComparer); } @@ -548,6 +467,34 @@ public void Dist2Test(int test, float expected) Assert.Equal(expected, actual, 0); } + [Theory] + [InlineData(0, new int[] { 0, 2, 5, 6 }, new float[] { 0f, 2f, 0f, 4f, 5f, 0f, 0f, 8f })] + [InlineData(1, new int[] { 0, 2, 5, 6, 8, 11, 12, 13, 14 }, new float[] { 0f, 2f, 0f, 4f, 5f, 0f, 0f, 8f, 0f, 10f, 11f, 0f, 0f, 0f, 0f, 16f })] + public void ZeroItemsUTest(int test, int[] idx, float[] expected) + { + AlignedArray src = new AlignedArray(8 + 8 * test, _vectorAlignment); + src.CopyFrom(_testSrcVectors[test]); + + CpuMathUtils.ZeroMatrixItems(src, src.Size, src.Size, idx); + float[] actual = new float[src.Size]; + src.CopyTo(actual, 0, src.Size); + Assert.Equal(expected, actual, _comparer); + } + + [Theory] + [InlineData(0, new int[] { 0, 2, 5 }, new float[] { 0f, 2f, 0f, 4f, 5f, 6f, 0f, 8f })] + [InlineData(1, new int[] { 0, 2, 5, 6, 8, 11, 12, 13 }, new float[] { 0f, 2f, 0f, 4f, 5f, 0f, 0f, 8f, 9f, 0f, 11f, 12f, 0f, 0f, 0f, 16f })] + public void ZeroMatrixItemsCoreTest(int test, int[] idx, float[] expected) + { + AlignedArray src = new AlignedArray(8 + 8 * test, _vectorAlignment); + src.CopyFrom(_testSrcVectors[test]); + + CpuMathUtils.ZeroMatrixItems(src, src.Size / 2 - 1, src.Size / 2, idx); + float[] actual = new float[src.Size]; + src.CopyTo(actual, 0, src.Size); + Assert.Equal(expected, actual, _comparer); + } + [Theory] [InlineData(0)] [InlineData(1)] diff --git a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs index d8141426ea..16ceae22d3 100644 --- a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs +++ b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs @@ -17,7 +17,6 @@ using Microsoft.ML.Runtime.Model; using Microsoft.ML.TestFramework; using Xunit; -using Xunit.Abstractions; namespace Microsoft.ML.Runtime.RunTests { @@ -775,6 +774,7 @@ protected bool SaveLoadTransposed(IDataView view, IHostEnvironment env, string s public abstract partial class TestDataViewBase : BaseTestBaseline { + public class SentimentData { [ColumnName("Label")] @@ -934,7 +934,7 @@ protected bool CheckSameValues(RowCursor curs1, RowCursor curs2, bool exactTypes var comp = comps[col]; if (comp != null && !comp()) { - Fail($"Different values in column {col} of row {curs1.Position}"); + Fail("Different values in column {0} of row {1}", col, curs1.Position); return Failed(); } if (idComp != null && !idComp()) @@ -1158,10 +1158,12 @@ protected Func GetColumnComparer(Row r1, Row r2, int col, ColumnType type, throw Contracts.Except("Unknown type in GetColumnComparer: '{0}'", type); } - private bool EqualWithEpsDouble(Double x, Double y) + private const Double DoubleEps = 1e-9; + + private static bool EqualWithEpsDouble(Double x, Double y) { // bitwise comparison is needed because Abs(Inf-Inf) and Abs(NaN-NaN) are not 0s. - return FloatUtils.GetBits(x) == FloatUtils.GetBits(y) || CompareNumbersWithTolerance(x, y, null, 3); + return FloatUtils.GetBits(x) == FloatUtils.GetBits(y) || Math.Abs(x - y) < DoubleEps; } private const float SingleEps = 1e-6f; diff --git a/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs b/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs index 90428934ba..39340d225b 100644 --- a/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs +++ b/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs @@ -72,7 +72,7 @@ public void SavePipeSsaSpike() Done(); } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline public void SavePipeSsaSpikeNoData() { string pathData = DeleteOutputPath("SavePipe", "SsaSpikeNoData.txt"); diff --git a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs index 274a229630..0bd4604daa 100644 --- a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs +++ b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs @@ -87,7 +87,7 @@ public void ChangeDetection() } } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline public void ChangePointDetectionWithSeasonality() { var env = new MLContext(conc: 1); @@ -130,14 +130,14 @@ public void ChangePointDetectionWithSeasonality() while (enumerator.MoveNext() && index < expectedValues.Count) { row = enumerator.Current; - Assert.Equal(expectedValues[index++], row.Change[0], precision: 5); // Alert - Assert.Equal(expectedValues[index++], row.Change[1], precision: 5); // Raw score - Assert.Equal(expectedValues[index++], row.Change[2], precision: 5); // P-Value score - Assert.Equal(expectedValues[index++], row.Change[3], precision: 5); // Martingale score + Assert.Equal(expectedValues[index++], row.Change[0], precision: 7); // Alert + Assert.Equal(expectedValues[index++], row.Change[1], precision: 7); // Raw score + Assert.Equal(expectedValues[index++], row.Change[2], precision: 7); // P-Value score + Assert.Equal(expectedValues[index++], row.Change[3], precision: 7); // Martingale score } } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] public void ChangePointDetectionWithSeasonalityPredictionEngineNoColumn() { const int ChangeHistorySize = 10; @@ -190,10 +190,10 @@ public void ChangePointDetectionWithSeasonalityPredictionEngineNoColumn() var engine2 = model2.CreateTimeSeriesPredictionFunction(ml); var prediction2 = engine2.Predict(new Data(1)); //Raw score after first input. - Assert.Equal(1.1661833524703979, prediction2.Change[1], precision: 4); // Raw score + Assert.Equal(1.1661833524703979, prediction2.Change[1], precision: 5); // Raw score prediction2 = engine2.Predict(new Data(1)); //Raw score after second input. - Assert.Equal(0.12216401100158691, prediction2.Change[1], precision: 4); // Raw score + Assert.Equal(0.12216401100158691, prediction2.Change[1], precision: 5); // Raw score //Even though time series column is not requested it will // pass the observation through time series transform and update the state with the first input. @@ -210,10 +210,10 @@ public void ChangePointDetectionWithSeasonalityPredictionEngineNoColumn() //and raw score should match the raw score obtained by passing the two input in the first model. var engine3 = model3.CreateTimeSeriesPredictionFunction(ml); var prediction3 = engine3.Predict(new Data(1)); - Assert.Equal(0.12216401100158691, prediction2.Change[1], precision: 4); // Raw score + Assert.Equal(0.12216401100158691, prediction2.Change[1], precision: 5); // Raw score } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] public void ChangePointDetectionWithSeasonalityPredictionEngine() { const int ChangeHistorySize = 10; @@ -264,7 +264,7 @@ public void ChangePointDetectionWithSeasonalityPredictionEngine() //Model 1: Prediction #2 prediction = engine.Predict(new Data(1)); Assert.Equal(0, prediction.Change[0], precision: 7); // Alert - Assert.Equal(0.12216401100158691, prediction.Change[1], precision: 4); // Raw score + Assert.Equal(0.12216401100158691, prediction.Change[1], precision: 5); // Raw score Assert.Equal(0.14823824685192111, prediction.Change[2], precision: 5); // P-Value score Assert.Equal(1.5292508189989167E-07, prediction.Change[3], precision: 7); // Martingale score @@ -277,7 +277,7 @@ public void ChangePointDetectionWithSeasonalityPredictionEngine() engine = model2.CreateTimeSeriesPredictionFunction(ml); prediction = engine.Predict(new Data(1)); Assert.Equal(0, prediction.Change[0], precision: 7); // Alert - Assert.Equal(0.12216401100158691, prediction.Change[1], precision: 4); // Raw score + Assert.Equal(0.12216401100158691, prediction.Change[1], precision: 5); // Raw score Assert.Equal(0.14823824685192111, prediction.Change[2], precision: 5); // P-Value score Assert.Equal(1.5292508189989167E-07, prediction.Change[3], precision: 5); // Martingale score } diff --git a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs index 0e6a5063bd..a7892701d7 100644 --- a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs +++ b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs @@ -41,7 +41,7 @@ public TimeSeriesEstimatorTests(ITestOutputHelper output) : base(output) { } - [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCoreAnd64BitProcess))] // 32bit and netcore3.0 output differs from Baseline + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline void TestSsaChangePointEstimator() { int Confidence = 95; @@ -75,7 +75,7 @@ void TestSsaChangePointEstimator() Done(); } - [Fact] + [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline void TestSsaSpikeEstimator() { int Confidence = 95; From 1f73f720e132e920fab9fe5af49dcf595c2276c1 Mon Sep 17 00:00:00 2001 From: Tom Finley Date: Thu, 6 Dec 2018 12:34:48 -0800 Subject: [PATCH 030/100] Row now disposable (#1835) * Row now disposable, and implements recommended disposable pattern. * Internalize IRowMapper and its implementing methods. * Have IRowToRowMapper return disposable Rows and no longer have disposer delegate. --- .../CustomMappingTransformer.cs | 6 +- .../DataViewConstructionUtils.cs | 25 +++-- src/Microsoft.ML.Api/PredictionEngine.cs | 25 ++++- .../StatefulFilterTransform.cs | 32 ++---- src/Microsoft.ML.Api/TypedCursor.cs | 79 ++++++------- src/Microsoft.ML.Core/Data/IDataView.cs | 24 +++- .../Data/ISchemaBindableMapper.cs | 12 +- .../Data/LinkedRootCursorBase.cs | 9 +- src/Microsoft.ML.Core/Data/RootCursorBase.cs | 12 +- src/Microsoft.ML.Core/Data/SchemaBuilder.cs | 2 +- .../Data/SynchronizedCursorBase.cs | 9 +- src/Microsoft.ML.Core/Data/WrappingRow.cs | 64 +++++++++++ src/Microsoft.ML.Data/Data/DataViewUtils.cs | 30 ++--- src/Microsoft.ML.Data/Data/IRowSeekable.cs | 4 +- .../DataLoadSave/Binary/BinaryLoader.cs | 93 +++++++-------- .../DataLoadSave/Text/TextLoaderCursor.cs | 14 ++- .../DataLoadSave/Transpose/TransposeLoader.cs | 10 +- .../DataView/AppendRowsDataView.cs | 19 ++-- .../DataView/CacheDataView.cs | 13 +-- .../DataView/CompositeRowToRowMapper.cs | 15 +-- .../DataView/RowToRowMapperTransform.cs | 59 +++++----- src/Microsoft.ML.Data/DataView/SimpleRow.cs | 106 +++++------------- src/Microsoft.ML.Data/DataView/Transposer.cs | 16 +-- src/Microsoft.ML.Data/DataView/ZipDataView.cs | 15 ++- .../EntryPoints/TransformModel.cs | 9 +- .../Evaluators/BinaryClassifierEvaluator.cs | 8 +- .../Evaluators/ClusteringEvaluator.cs | 16 +-- .../Evaluators/EvaluatorBase.cs | 21 +++- .../MultiClassClassifierEvaluator.cs | 20 ++-- .../MultiOutputRegressionEvaluator.cs | 8 +- .../Evaluators/QuantileRegressionEvaluator.cs | 8 +- .../Evaluators/RegressionEvaluator.cs | 8 +- .../Prediction/Calibrator.cs | 4 +- ...FeatureContributionCalculationTransform.cs | 20 ++-- .../Scorers/MultiClassClassifierScorer.cs | 20 +--- .../Scorers/RowToRowScorerBase.cs | 24 ++-- .../Scorers/SchemaBindablePredictorWrapper.cs | 6 +- .../ColumnConcatenatingTransformer.cs | 8 +- .../Transforms/ColumnCopying.cs | 2 +- .../Transforms/ColumnSelecting.cs | 17 +-- .../Transforms/DropSlotsTransform.cs | 2 +- src/Microsoft.ML.Data/Transforms/Hashing.cs | 2 +- .../Transforms/KeyToValue.cs | 2 +- .../Transforms/KeyToVector.cs | 2 +- .../Transforms/NopTransform.cs | 4 +- .../Transforms/Normalizer.cs | 2 +- .../Transforms/OneToOneTransformerBase.cs | 2 +- .../Transforms/RowShufflingTransformer.cs | 10 +- .../Transforms/RowToRowTransformerBase.cs | 13 ++- .../Transforms/TransformBase.cs | 65 +++++------ .../Transforms/TypeConverting.cs | 2 +- .../ValueToKeyMappingTransformer.cs | 2 +- src/Microsoft.ML.Ensemble/PipelineEnsemble.cs | 27 +++-- .../TreeEnsembleFeaturizer.cs | 3 +- .../VectorWhitening.cs | 2 +- .../ImageGrayscaleTransform.cs | 2 +- .../ImageLoaderTransform.cs | 2 +- .../ImagePixelExtractorTransform.cs | 2 +- .../ImageResizerTransform.cs | 2 +- .../LearningPipelineDebugProxy.cs | 5 +- .../OnnxTransform.cs | 4 +- src/Microsoft.ML.PCA/PcaTransform.cs | 2 +- .../MatrixFactorizationPredictor.cs | 9 +- .../FieldAwareFactorizationMachineUtils.cs | 3 +- .../TensorflowTransform.cs | 4 +- .../PredictionFunction.cs | 29 ++--- .../SequentialTransformerBase.cs | 75 ++++++++----- src/Microsoft.ML.Transforms/GcnTransform.cs | 2 +- src/Microsoft.ML.Transforms/GroupTransform.cs | 18 ++- .../KeyToVectorMapping.cs | 2 +- .../MissingValueDroppingTransformer.cs | 2 +- .../MissingValueIndicatorTransformer.cs | 2 +- .../MissingValueReplacing.cs | 8 +- .../RandomFourierFeaturizing.cs | 2 +- .../Text/LdaTransform.cs | 6 +- .../Text/NgramTransform.cs | 2 +- .../Text/StopWordsRemovingTransformer.cs | 6 +- .../Text/TextNormalizing.cs | 2 +- .../Text/TokenizingByCharacters.cs | 2 +- .../Text/WordEmbeddingsExtractor.cs | 2 +- .../Text/WordTokenizing.cs | 2 +- test/Microsoft.ML.Benchmarks/HashBench.cs | 2 +- .../Transformers/HashTests.cs | 12 +- 83 files changed, 642 insertions(+), 601 deletions(-) create mode 100644 src/Microsoft.ML.Core/Data/WrappingRow.cs diff --git a/src/Microsoft.ML.Api/CustomMappingTransformer.cs b/src/Microsoft.ML.Api/CustomMappingTransformer.cs index d3081fc096..7c81412d78 100644 --- a/src/Microsoft.ML.Api/CustomMappingTransformer.cs +++ b/src/Microsoft.ML.Api/CustomMappingTransformer.cs @@ -111,7 +111,7 @@ public Mapper(CustomMappingTransformer parent, Schema inputSchema) _typedSrc = TypedCursorable.Create(_host, emptyDataView, false, _parent.InputSchemaDefinition); } - public Delegate[] CreateGetters(Row input, Func activeOutput, out Action disposer) + Delegate[] IRowMapper.CreateGetters(Row input, Func activeOutput, out Action disposer) { disposer = null; // If no outputs are active, we short-circuit to empty array of getters. @@ -158,7 +158,7 @@ private Delegate GetDstGetter(Row input, int colIndex, Action refreshAction) return combinedGetter; } - public Func GetDependencies(Func activeOutput) + Func IRowMapper.GetDependencies(Func activeOutput) { if (Enumerable.Range(0, _parent.AddedSchema.Columns.Length).Any(activeOutput)) { @@ -169,7 +169,7 @@ public Func GetDependencies(Func activeOutput) return col => false; } - public Schema.DetachedColumn[] GetOutputColumns() + Schema.DetachedColumn[] IRowMapper.GetOutputColumns() { var dstRow = new DataViewConstructionUtils.InputRow(_host, _parent.AddedSchema); // All the output columns of dstRow are our outputs. diff --git a/src/Microsoft.ML.Api/DataViewConstructionUtils.cs b/src/Microsoft.ML.Api/DataViewConstructionUtils.cs index d68882f805..9c75f7189f 100644 --- a/src/Microsoft.ML.Api/DataViewConstructionUtils.cs +++ b/src/Microsoft.ML.Api/DataViewConstructionUtils.cs @@ -76,7 +76,7 @@ public static IDataView LoadPipeWithPredictor(IHostEnvironment env, Stream model return pipe; } - public sealed class InputRow : InputRowBase, IRowBackedBy + public sealed class InputRow : InputRowBase where TRow : class { private TRow _value; @@ -416,7 +416,12 @@ public sealed class WrappedCursor : RowCursor public override long Batch => _toWrap.Batch; public override Schema Schema => _toWrap.Schema; - public override void Dispose() => _toWrap.Dispose(); + protected override void Dispose(bool disposing) + { + if (disposing) + _toWrap.Dispose(); + } + public override ValueGetter GetGetter(int col) => _toWrap.GetGetter(col); public override ValueGetter GetIdGetter() => _toWrap.GetIdGetter(); @@ -434,8 +439,8 @@ public abstract class DataViewCursorBase : InputRowBase protected readonly DataViewBase DataView; protected readonly IChannel Ch; - private long _position; + /// /// Zero-based position of the cursor. /// @@ -462,14 +467,14 @@ protected DataViewCursorBase(IHostEnvironment env, DataViewBase dataView, /// protected bool IsGood => State == CursorState.Good; - public virtual void Dispose() + protected sealed override void Dispose(bool disposing) { - if (State != CursorState.Done) - { - Ch.Dispose(); - _position = -1; - State = CursorState.Done; - } + if (State == CursorState.Done) + return; + Ch.Dispose(); + _position = -1; + base.Dispose(disposing); + State = CursorState.Done; } public bool MoveNext() diff --git a/src/Microsoft.ML.Api/PredictionEngine.cs b/src/Microsoft.ML.Api/PredictionEngine.cs index 78d4810568..b6080b4fc3 100644 --- a/src/Microsoft.ML.Api/PredictionEngine.cs +++ b/src/Microsoft.ML.Api/PredictionEngine.cs @@ -157,13 +157,15 @@ public override void Predict(TSrc example, ref TDst prediction) /// /// The user-defined type that holds the example. /// The user-defined type that holds the prediction. - public abstract class PredictionEngineBase + public abstract class PredictionEngineBase : IDisposable where TSrc : class where TDst : class, new() { private readonly DataViewConstructionUtils.InputRow _inputRow; private readonly IRowReadableAs _outputRow; private readonly Action _disposer; + private bool _disposed; + [BestFriend] private protected ITransformer Transformer { get; } @@ -193,12 +195,14 @@ private protected PredictionEngineBase(IHostEnvironment env, ITransformer transf PredictionEngineCore(env, _inputRow, makeMapper(_inputRow.Schema), ignoreMissingColumns, inputSchemaDefinition, outputSchemaDefinition, out _disposer, out _outputRow); } - internal virtual void PredictionEngineCore(IHostEnvironment env, DataViewConstructionUtils.InputRow inputRow, IRowToRowMapper mapper, bool ignoreMissingColumns, + [BestFriend] + private protected virtual void PredictionEngineCore(IHostEnvironment env, DataViewConstructionUtils.InputRow inputRow, IRowToRowMapper mapper, bool ignoreMissingColumns, SchemaDefinition inputSchemaDefinition, SchemaDefinition outputSchemaDefinition, out Action disposer, out IRowReadableAs outputRow) { var cursorable = TypedCursorable.Create(env, new EmptyDataView(env, mapper.OutputSchema), ignoreMissingColumns, outputSchemaDefinition); - var outputRowLocal = mapper.GetRow(_inputRow, col => true, out disposer); + var outputRowLocal = mapper.GetRow(inputRow, col => true); outputRow = cursorable.GetRow(outputRowLocal); + disposer = inputRow.Dispose; } protected virtual Func TransformerChecker(IExceptionContext ectx, ITransformer transformer) @@ -208,9 +212,20 @@ protected virtual Func TransformerChecker(IExceptionCon return transformer.GetRowToRowMapper; } - ~PredictionEngineBase() + public void Dispose() + { + Disposing(true); + GC.SuppressFinalize(this); + } + + [BestFriend] + private protected void Disposing(bool disposing) { - _disposer?.Invoke(); + if (_disposed) + return; + if (disposing) + _disposer?.Invoke(); + _disposed = true; } /// diff --git a/src/Microsoft.ML.Api/StatefulFilterTransform.cs b/src/Microsoft.ML.Api/StatefulFilterTransform.cs index f07ef7daad..6b63c37349 100644 --- a/src/Microsoft.ML.Api/StatefulFilterTransform.cs +++ b/src/Microsoft.ML.Api/StatefulFilterTransform.cs @@ -96,7 +96,7 @@ private StatefulFilterTransform(IHostEnvironment env, StatefulFilterTransform false; Schema IDataView.Schema => OutputSchema; @@ -132,10 +132,7 @@ public RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func return new[] { GetRowCursor(predicate, rand) }; } - public IDataView Source - { - get { return _source; } - } + public IDataView Source => _source; public IDataTransform ApplyToData(IHostEnvironment env, IDataView newSource) { @@ -158,10 +155,7 @@ private sealed class Cursor : RootCursorBase private bool _disposed; - public override long Batch - { - get { return _input.Batch; } - } + public override long Batch => _input.Batch; public Cursor(StatefulFilterTransform parent, RowCursor input, Func predicate) : base(parent.Host) @@ -196,24 +190,22 @@ public Cursor(StatefulFilterTransform parent, RowCursor GetIdGetter() diff --git a/src/Microsoft.ML.Api/TypedCursor.cs b/src/Microsoft.ML.Api/TypedCursor.cs index 4607697662..e73dc88b10 100644 --- a/src/Microsoft.ML.Api/TypedCursor.cs +++ b/src/Microsoft.ML.Api/TypedCursor.cs @@ -18,7 +18,7 @@ namespace Microsoft.ML.Runtime.Api /// /// The user-defined type that is being populated while cursoring. [BestFriend] - internal interface IRowReadableAs + internal interface IRowReadableAs : IDisposable where TRow : class { /// @@ -28,22 +28,6 @@ internal interface IRowReadableAs void FillValues(TRow row); } - /// - /// This interface is an with 'strongly typed' binding. - /// It can accept values of type and present the value as a row. - /// - /// The user-defined type that provides the values while cursoring. - internal interface IRowBackedBy - where TRow : class - { - /// - /// Accepts the fields of the user-supplied object and publishes the instance as a row. - /// If the row is accessed prior to any object being set, then the data accessors on the row should throw. - /// - /// The row object. Cannot be null. - void ExtractValues(TRow row); - } - /// /// This interface provides cursoring through a via a 'strongly typed' binding. /// It can populate the user-supplied object's fields with the values of the current row. @@ -253,36 +237,34 @@ public static TypedCursorable Create(IHostEnvironment env, IDataView data, return new TypedCursorable(env, data, ignoreMissingColumns, outSchema); } - private abstract class TypedRowBase + private abstract class TypedRowBase : WrappingRow { protected readonly IChannel Ch; - private readonly Row _input; private readonly Action[] _setters; - public long Batch => _input.Batch; - - public long Position => _input.Position; - - public Schema Schema => _input.Schema; + public override Schema Schema => base.Input.Schema; public TypedRowBase(TypedCursorable parent, Row input, string channelMessage) + : base(input) { Contracts.AssertValue(parent); Contracts.AssertValue(parent._host); Ch = parent._host.Start(channelMessage); Ch.AssertValue(input); - _input = input; - int n = parent._pokes.Length; Ch.Assert(n == parent._columns.Length); Ch.Assert(n == parent._columnIndices.Length); _setters = new Action[n]; for (int i = 0; i < n; i++) - _setters[i] = GenerateSetter(_input, parent._columnIndices[i], parent._columns[i], parent._pokes[i], parent._peeks[i]); + _setters[i] = GenerateSetter(Input, parent._columnIndices[i], parent._columns[i], parent._pokes[i], parent._peeks[i]); } - public ValueGetter GetIdGetter() => _input.GetIdGetter(); + protected override void DisposeCore(bool disposing) + { + if (disposing) + Ch.Dispose(); + } private Action GenerateSetter(Row input, int index, InternalSchemaDefinition.Column column, Delegate poke, Delegate peek) { @@ -292,7 +274,7 @@ private Action GenerateSetter(Row input, int index, InternalSchemaDefiniti Func> del; if (fieldType.IsArray) { - Ch.Assert(colType.IsVector); + Ch.Assert(colType is VectorType); // VBuffer> -> String[] if (fieldType.GetElementType() == typeof(string)) { @@ -459,14 +441,14 @@ public virtual void FillValues(TRow row) setter(row); } - public bool IsColumnActive(int col) + public override bool IsColumnActive(int col) { - return _input.IsColumnActive(col); + return Input.IsColumnActive(col); } - public ValueGetter GetGetter(int col) + public override ValueGetter GetGetter(int col) { - return _input.GetGetter(col); + return Input.GetGetter(col); } } @@ -481,6 +463,15 @@ public TypedRow(TypedCursorable parent, Row input) private sealed class RowImplementation : IRowReadableAs { private readonly TypedRow _row; + private bool _disposed; + + public void Dispose() + { + if (_disposed) + return; + _row.Dispose(); + _disposed = true; + } public RowImplementation(TypedRow row) => _row = row; @@ -496,6 +487,7 @@ private sealed class RowImplementation : IRowReadableAs private sealed class RowCursorImplementation : RowCursor { private readonly TypedCursor _cursor; + private bool _disposed; public RowCursorImplementation(TypedCursor cursor) => _cursor = cursor; @@ -504,7 +496,15 @@ private sealed class RowCursorImplementation : RowCursor public override long Batch => _cursor.Batch; public override Schema Schema => _cursor.Schema; - public override void Dispose() { } + protected override void Dispose(bool disposing) + { + if (_disposed) + return; + if (disposing) + _cursor.Dispose(); + _disposed = true; + base.Dispose(disposing); + } public override void FillValues(TRow row) => _cursor.FillValues(row); public override ValueGetter GetGetter(int col) => _cursor.GetGetter(col); @@ -518,7 +518,6 @@ public override void Dispose() { } private sealed class TypedCursor : TypedRowBase { private readonly RowCursor _input; - private bool _disposed; public TypedCursor(TypedCursorable parent, RowCursor input) : base(parent, input, "Cursor") @@ -534,16 +533,6 @@ public override void FillValues(TRow row) public CursorState State => _input.State; - public void Dispose() - { - if (!_disposed) - { - _input.Dispose(); - Ch.Dispose(); - _disposed = true; - } - } - public bool MoveNext() => _input.MoveNext(); public bool MoveMany(long count) => _input.MoveMany(count); public RowCursor GetRootCursor() => _input.GetRootCursor(); diff --git a/src/Microsoft.ML.Core/Data/IDataView.cs b/src/Microsoft.ML.Core/Data/IDataView.cs index c67bba60aa..64da333c3a 100644 --- a/src/Microsoft.ML.Core/Data/IDataView.cs +++ b/src/Microsoft.ML.Core/Data/IDataView.cs @@ -142,7 +142,7 @@ public interface IRowCursorConsolidator /// A logical row. May be a row of an or a stand-alone row. If/when its contents /// change, its value is changed. /// - public abstract class Row + public abstract class Row : IDisposable { /// /// This is incremented when the underlying contents changes, giving clients a way to detect change. @@ -202,6 +202,25 @@ public abstract class Row /// public abstract Schema Schema { get; } + /// + /// Implementation of dispose. Calls with . + /// + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + /// + /// The disposable method for the disposable pattern. This default implementation does nothing. + /// + /// Whether this was called from . + /// Subclasses that implement should call this method with + /// , but I hasten to add that implementing finalizers should be + /// avoided if at all possible.. + protected virtual void Dispose(bool disposing) + { + } } /// @@ -221,7 +240,7 @@ public enum CursorState /// , is -1. Otherwise, /// >= 0. /// - public abstract class RowCursor : Row, IDisposable + public abstract class RowCursor : Row { /// /// Returns the state of the cursor. Before the first call to or @@ -252,6 +271,5 @@ public abstract class RowCursor : Row, IDisposable /// values from . /// public abstract RowCursor GetRootCursor(); - public abstract void Dispose(); } } \ No newline at end of file diff --git a/src/Microsoft.ML.Core/Data/ISchemaBindableMapper.cs b/src/Microsoft.ML.Core/Data/ISchemaBindableMapper.cs index f0e3f45adb..ab6a0223a1 100644 --- a/src/Microsoft.ML.Core/Data/ISchemaBindableMapper.cs +++ b/src/Microsoft.ML.Core/Data/ISchemaBindableMapper.cs @@ -104,15 +104,9 @@ public interface IRowToRowMapper /// This method creates a live connection between the input and the output . In particular, when the getters of the output are invoked, they invoke the /// getters of the input row and base the output values on the current values of the input . - /// The output values are re-computed when requested through the getters. - /// - /// The optional should be invoked by any user of this row mapping, once it no - /// longer needs the . If no action is needed when the cursor is Disposed, the implementation - /// should set to null, otherwise it should be set to a delegate to be - /// invoked by the code calling this object. (For example, a wrapping cursor's - /// method. It's best for this action to be idempotent - calling it multiple times should be equivalent to - /// calling it once. + /// The output values are re-computed when requested through the getters. Also, the returned + /// will dispose when it is disposed. /// - Row GetRow(Row input, Func active, out Action disposer); + Row GetRow(Row input, Func active); } } diff --git a/src/Microsoft.ML.Core/Data/LinkedRootCursorBase.cs b/src/Microsoft.ML.Core/Data/LinkedRootCursorBase.cs index 1f95d941cb..9025e27af4 100644 --- a/src/Microsoft.ML.Core/Data/LinkedRootCursorBase.cs +++ b/src/Microsoft.ML.Core/Data/LinkedRootCursorBase.cs @@ -32,12 +32,15 @@ protected LinkedRootCursorBase(IChannelProvider provider, RowCursor input) Root = Input.GetRootCursor(); } - public override void Dispose() + protected override void Dispose(bool disposing) { - if (State != CursorState.Done) + if (State == CursorState.Done) + return; + if (disposing) { Input.Dispose(); - base.Dispose(); + // The base class should set the state to done under these circumstances. + base.Dispose(true); } } } diff --git a/src/Microsoft.ML.Core/Data/RootCursorBase.cs b/src/Microsoft.ML.Core/Data/RootCursorBase.cs index 75d58d98bb..15c0b501ff 100644 --- a/src/Microsoft.ML.Core/Data/RootCursorBase.cs +++ b/src/Microsoft.ML.Core/Data/RootCursorBase.cs @@ -50,14 +50,14 @@ protected RootCursorBase(IChannelProvider provider) _state = CursorState.NotStarted; } - public override void Dispose() + protected override void Dispose(bool disposing) { - if (State != CursorState.Done) - { + if (State == CursorState.Done) + return; + if (disposing) Ch.Dispose(); - _position = -1; - _state = CursorState.Done; - } + _position = -1; + _state = CursorState.Done; } public sealed override bool MoveNext() diff --git a/src/Microsoft.ML.Core/Data/SchemaBuilder.cs b/src/Microsoft.ML.Core/Data/SchemaBuilder.cs index 28017f1e7d..711360ac79 100644 --- a/src/Microsoft.ML.Core/Data/SchemaBuilder.cs +++ b/src/Microsoft.ML.Core/Data/SchemaBuilder.cs @@ -31,7 +31,7 @@ public SchemaBuilder() /// The column name. /// The column type. /// The column metadata. - public void AddColumn(string name, ColumnType type, Schema.Metadata metadata) + public void AddColumn(string name, ColumnType type, Schema.Metadata metadata = null) { Contracts.CheckNonEmpty(name, nameof(name)); Contracts.CheckValue(type, nameof(type)); diff --git a/src/Microsoft.ML.Core/Data/SynchronizedCursorBase.cs b/src/Microsoft.ML.Core/Data/SynchronizedCursorBase.cs index b2641d985e..83bd53a3e3 100644 --- a/src/Microsoft.ML.Core/Data/SynchronizedCursorBase.cs +++ b/src/Microsoft.ML.Core/Data/SynchronizedCursorBase.cs @@ -43,14 +43,17 @@ protected SynchronizedCursorBase(IChannelProvider provider, RowCursor input) _root = Input.GetRootCursor(); } - public override void Dispose() + protected override void Dispose(bool disposing) { - if (!_disposed) + if (_disposed) + return; + if (disposing) { Input.Dispose(); Ch.Dispose(); - _disposed = true; } + base.Dispose(disposing); + _disposed = true; } public sealed override bool MoveNext() => _root.MoveNext(); diff --git a/src/Microsoft.ML.Core/Data/WrappingRow.cs b/src/Microsoft.ML.Core/Data/WrappingRow.cs new file mode 100644 index 0000000000..6f855ad225 --- /dev/null +++ b/src/Microsoft.ML.Core/Data/WrappingRow.cs @@ -0,0 +1,64 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; + +namespace Microsoft.ML.Runtime.Data +{ + /// + /// Convenient base class for implementors that wrap a single + /// as their input. The , , and + /// are taken from this . + /// + [BestFriend] + internal abstract class WrappingRow : Row + { + private bool _disposed; + + /// + /// The wrapped input row. + /// + protected Row Input { get; } + + public sealed override long Batch => Input.Batch; + public sealed override long Position => Input.Position; + public override ValueGetter GetIdGetter() => Input.GetIdGetter(); + + [BestFriend] + private protected WrappingRow(Row input) + { + Contracts.AssertValue(input); + Input = input; + } + + /// + /// This override of the dispose method by default only calls 's + /// method, but subclasses can enable additional functionality + /// via the functionality. + /// + /// + protected sealed override void Dispose(bool disposing) + { + if (_disposed) + return; + // Since the input was created first, and this instance may depend on it, we should + // dispose local resources first before potentially disposing the input row resources. + DisposeCore(disposing); + if (disposing) + Input.Dispose(); + _disposed = true; + } + + /// + /// Called from with in the case where + /// that method has never been called before, and right after has been + /// disposed. The default implementation does nothing. + /// + /// Whether this was called through the dispose path, as opposed + /// to the finalizer path. + protected virtual void DisposeCore(bool disposing) + { + } + } +} diff --git a/src/Microsoft.ML.Data/Data/DataViewUtils.cs b/src/Microsoft.ML.Data/Data/DataViewUtils.cs index f81672008b..0b0cef5343 100644 --- a/src/Microsoft.ML.Data/Data/DataViewUtils.cs +++ b/src/Microsoft.ML.Data/Data/DataViewUtils.cs @@ -55,7 +55,7 @@ public static string[] GetTempColumnNames(this ISchema schema, int n, string tag int j = 0; for (int i = 0; i < n; i++) { - for (;;) + for (; ; ) { string name = string.IsNullOrWhiteSpace(tag) ? string.Format("temp_{0:000}", j) : @@ -1056,23 +1056,21 @@ public Cursor(IChannelProvider provider, Schema schema, int[] activeToCol, int[] _quitAction = quitAction; } - public override void Dispose() + protected override void Dispose(bool disposing) { - if (!_disposed) + if (_disposed) + return; + if (disposing) { foreach (var pipe in _pipes) pipe.Unset(); - _disposed = true; - if (_quitAction != null) - _quitAction(); + _quitAction?.Invoke(); } - base.Dispose(); + _disposed = true; + base.Dispose(disposing); } - public override ValueGetter GetIdGetter() - { - return _idGetter; - } + public override ValueGetter GetIdGetter() => _idGetter; protected override bool MoveNextCore() { @@ -1203,11 +1201,12 @@ private void InitHeap() } } - public override void Dispose() + protected override void Dispose(bool disposing) { - if (!_disposed) + if (_disposed) + return; + if (disposing) { - _disposed = true; _batch = -1; _icursor = -1; _currentCursor = null; @@ -1215,7 +1214,8 @@ public override void Dispose() foreach (var cursor in _cursors) cursor.Dispose(); } - base.Dispose(); + _disposed = true; + base.Dispose(disposing); } public override ValueGetter GetIdGetter() diff --git a/src/Microsoft.ML.Data/Data/IRowSeekable.cs b/src/Microsoft.ML.Data/Data/IRowSeekable.cs index 17514612af..d1ae1ebb7e 100644 --- a/src/Microsoft.ML.Data/Data/IRowSeekable.cs +++ b/src/Microsoft.ML.Data/Data/IRowSeekable.cs @@ -24,10 +24,8 @@ public interface IRowSeekable /// For , when the state is valid (that is when /// returns ), it returns the current row index. Otherwise it's -1. /// - public abstract class RowSeeker : Row, IDisposable + public abstract class RowSeeker : Row { - public abstract void Dispose(); - /// /// Moves the seeker to a row at a specific row index. /// If the row index specified is out of range (less than zero or not less than the diff --git a/src/Microsoft.ML.Data/DataLoadSave/Binary/BinaryLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/Binary/BinaryLoader.cs index aa33db97b8..abbcc163a7 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Binary/BinaryLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Binary/BinaryLoader.cs @@ -1363,60 +1363,65 @@ public Cursor(BinaryLoader parent, Func predicate, Random rand) _pipeTask = SetupDecompressTask(); } - public override void Dispose() + protected override void Dispose(bool disposing) { - if (!_disposed && _readerThread != null) + if (_disposed) + return; + if (disposing) { - // We should reach this block only in the event of a dispose - // before all rows have been iterated upon. + if (_readerThread != null) + { + // We should reach this block only in the event of a dispose + // before all rows have been iterated upon. - // First set the flag on the cursor. The stream-reader and the - // pipe-decompressor workers will detect this, stop their work, - // and do whatever "cleanup" is natural for them to perform. - _disposed = true; + // First set the flag on the cursor. The stream-reader and the + // pipe-decompressor workers will detect this, stop their work, + // and do whatever "cleanup" is natural for them to perform. + _disposed = true; - // In the disk read -> decompress -> codec read pipeline, we - // clean up in reverse order. - // 1. First we clear out any pending codec readers, for each pipe. - // 2. Then we join the pipe worker threads, which in turn should - // have cleared out all of the pending blocks to decompress. - // 3. Then finally we join against the reader thread. + // In the disk read -> decompress -> codec read pipeline, we + // clean up in reverse order. + // 1. First we clear out any pending codec readers, for each pipe. + // 2. Then we join the pipe worker threads, which in turn should + // have cleared out all of the pending blocks to decompress. + // 3. Then finally we join against the reader thread. - // This code is analogous to the stuff in MoveNextCore, except - // nothing is actually done with the resulting blocks. + // This code is analogous to the stuff in MoveNextCore, except + // nothing is actually done with the resulting blocks. - try - { - for (; ; ) + try { - // This cross-block-index access pattern is deliberate, as - // by having a consistent access pattern everywhere we can - // have much greater confidence this will never deadlock. - bool anyTrue = false; - for (int c = 0; c < _pipes.Length; ++c) - anyTrue |= _pipes[c].MoveNextCleanup(); - if (!anyTrue) - break; + for (; ; ) + { + // This cross-block-index access pattern is deliberate, as + // by having a consistent access pattern everywhere we can + // have much greater confidence this will never deadlock. + bool anyTrue = false; + for (int c = 0; c < _pipes.Length; ++c) + anyTrue |= _pipes[c].MoveNextCleanup(); + if (!anyTrue) + break; + } + } + catch (OperationCanceledException ex) + { + // REVIEW: Encountering this here means that we did not encounter + // the exception during normal cursoring, but at some later point. I feel + // we should not be tolerant of this, and should throw, though it might be + // an ambiguous point. + Contracts.Assert(ex.CancellationToken == _exMarshaller.Token); + _exMarshaller.ThrowIfSet(Ch); + Contracts.Assert(false); + } + finally + { + _pipeTask.Wait(); + _readerThread.Join(); } - } - catch (OperationCanceledException ex) - { - // REVIEW: Encountering this here means that we did not encounter - // the exception during normal cursoring, but at some later point. I feel - // we should not be tolerant of this, and should throw, though it might be - // an ambiguous point. - Contracts.Assert(ex.CancellationToken == _exMarshaller.Token); - _exMarshaller.ThrowIfSet(Ch); - Contracts.Assert(false); - } - finally - { - _pipeTask.Wait(); - _readerThread.Join(); } } - - base.Dispose(); + _disposed = true; + base.Dispose(disposing); } private Task SetupDecompressTask() diff --git a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderCursor.cs b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderCursor.cs index 1a2f94da3b..dbeb1c891d 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderCursor.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderCursor.cs @@ -275,16 +275,18 @@ public static string GetEmbeddedArgs(IMultiStreamSource files) public override Schema Schema => _bindings.AsSchema; - public override void Dispose() + protected override void Dispose(bool disposing) { if (_disposed) return; - + if (disposing) + { + _ator.Dispose(); + _reader.Release(); + _stats.Release(); + } _disposed = true; - _ator.Dispose(); - _reader.Release(); - _stats.Release(); - base.Dispose(); + base.Dispose(disposing); } protected override bool MoveNextCore() diff --git a/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs index f57e08b8c7..0b42c26ce0 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs @@ -824,15 +824,17 @@ public Cursor(TransposeLoader parent, Func pred) Init(_actives[i]); } - public override void Dispose() + protected override void Dispose(bool disposing) { - if (!_disposed) + if (_disposed) + return; + if (disposing) { - _disposed = true; for (int i = 0; i < _transCursors.Length; ++i) _transCursors[i].Dispose(); - base.Dispose(); } + _disposed = true; + base.Dispose(disposing); } /// diff --git a/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs b/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs index e9a100bbef..c0d15d87f2 100644 --- a/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs +++ b/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs @@ -280,15 +280,16 @@ protected override bool MoveNextCore() return true; } - public override void Dispose() + protected override void Dispose(bool disposing) { - if (State != CursorState.Done) + if (State == CursorState.Done) + return; + if (disposing) { Ch.Dispose(); - if (_currentCursor != null) - _currentCursor.Dispose(); - base.Dispose(); + _currentCursor?.Dispose(); } + base.Dispose(disposing); } } @@ -369,15 +370,17 @@ protected override bool MoveNextCore() return true; } - public override void Dispose() + protected override void Dispose(bool disposing) { - if (State != CursorState.Done) + if (State == CursorState.Done) + return; + if (disposing) { Ch.Dispose(); foreach (RowCursor c in _cursorSet) c.Dispose(); - base.Dispose(); } + base.Dispose(disposing); } } diff --git a/src/Microsoft.ML.Data/DataView/CacheDataView.cs b/src/Microsoft.ML.Data/DataView/CacheDataView.cs index 9f2e0ab447..0e9308dc22 100644 --- a/src/Microsoft.ML.Data/DataView/CacheDataView.cs +++ b/src/Microsoft.ML.Data/DataView/CacheDataView.cs @@ -565,10 +565,6 @@ public RowSeeker(RowSeekerCore toWrap) public override long Batch => _internal.Batch; public override Schema Schema => _internal.Schema; - public override void Dispose() - { - } - public override ValueGetter GetGetter(int col) => _internal.GetGetter(col); public override ValueGetter GetIdGetter() => _internal.GetIdGetter(); public override bool IsColumnActive(int col) => _internal.IsColumnActive(col); @@ -1291,15 +1287,18 @@ public sealed override bool IsColumnActive(int col) return _colToActivesIndex[col] >= 0; } - public sealed override void Dispose() + protected sealed override void Dispose(bool disposing) { - if (!_disposed) + if (_disposed) + return; + if (disposing) { DisposeCore(); PositionCore = -1; Ch.Dispose(); - _disposed = true; } + base.Dispose(disposing); + _disposed = true; } public sealed override ValueGetter GetGetter(int col) diff --git a/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs b/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs index 326a750a26..aaf2e9f49e 100644 --- a/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs +++ b/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs @@ -43,13 +43,12 @@ public Func GetDependencies(Func predicate) return toReturn; } - public Row GetRow(Row input, Func active, out Action disposer) + public Row GetRow(Row input, Func active) { Contracts.CheckValue(input, nameof(input)); Contracts.CheckValue(active, nameof(active)); Contracts.CheckParam(input.Schema == InputSchema, nameof(input), "Schema did not match original schema"); - disposer = null; if (InnerMappers.Length == 0) { bool differentActive = false; @@ -75,17 +74,7 @@ public Row GetRow(Row input, Func active, out Action disposer) Row result = input; for (int i = 0; i < InnerMappers.Length; ++i) - { - result = InnerMappers[i].GetRow(result, deps[i], out var localDisp); - if (localDisp != null) - { - if (disposer == null) - disposer = localDisp; - else - disposer = localDisp + disposer; - // We want the last disposer to be called first, so the order of the addition here is important. - } - } + result = InnerMappers[i].GetRow(result, deps[i]); return result; } diff --git a/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs b/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs index a4839d710b..57382f890b 100644 --- a/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs +++ b/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs @@ -26,7 +26,8 @@ namespace Microsoft.ML.Runtime.Data /// ctor or Create method with , along with a corresponding /// . /// - public interface IRowMapper : ICanSaveModel + [BestFriend] + internal interface IRowMapper : ICanSaveModel { /// /// Returns the input columns needed for the requested output columns. @@ -36,7 +37,9 @@ public interface IRowMapper : ICanSaveModel /// /// Returns the getters for the output columns given an active set of output columns. The length of the getters /// array should be equal to the number of columns added by the IRowMapper. It should contain the getter for the - /// i'th output column if activeOutput(i) is true, and null otherwise. + /// i'th output column if activeOutput(i) is true, and null otherwise. If creating a or + /// out of this, the delegate (if non-null) should be called + /// from the dispose of either of those instances. /// Delegate[] CreateGetters(Row input, Func activeOutput, out Action disposer); @@ -81,7 +84,8 @@ private static VersionInfo GetVersionInfo() bool ICanSavePfa.CanSavePfa => _mapper is ICanSavePfa pfaMapper ? pfaMapper.CanSavePfa : false; - public RowToRowMapperTransform(IHostEnvironment env, IDataView input, IRowMapper mapper, Func mapperFactory) + [BestFriend] + internal RowToRowMapperTransform(IHostEnvironment env, IDataView input, IRowMapper mapper, Func mapperFactory) : base(env, RegistrationName, input) { Contracts.CheckValue(mapper, nameof(mapper)); @@ -91,7 +95,8 @@ public RowToRowMapperTransform(IHostEnvironment env, IDataView input, IRowMapper _bindings = new ColumnBindings(Schema.Create(input.Schema), mapper.GetOutputColumns()); } - public static Schema GetOutputSchema(ISchema inputSchema, IRowMapper mapper) + [BestFriend] + internal static Schema GetOutputSchema(ISchema inputSchema, IRowMapper mapper) { Contracts.CheckValue(inputSchema, nameof(inputSchema)); Contracts.CheckValue(mapper, nameof(mapper)); @@ -235,23 +240,20 @@ public Func GetDependencies(Func predicate) public Schema InputSchema => Source.Schema; - public Row GetRow(Row input, Func active, out Action disposer) + public Row GetRow(Row input, Func active) { Host.CheckValue(input, nameof(input)); Host.CheckValue(active, nameof(active)); Host.Check(input.Schema == Source.Schema, "Schema of input row must be the same as the schema the mapper is bound to"); - disposer = null; using (var ch = Host.Start("GetEntireRow")) { - Action disp; var activeArr = new bool[OutputSchema.ColumnCount]; for (int i = 0; i < OutputSchema.ColumnCount; i++) activeArr[i] = active(i); var pred = GetActiveOutputColumns(activeArr); - var getters = _mapper.CreateGetters(input, pred, out disp); - disposer += disp; - return new RowImpl(input, this, OutputSchema, getters); + var getters = _mapper.CreateGetters(input, pred, out Action disp); + return new RowImpl(input, this, OutputSchema, getters, disp); } } @@ -285,25 +287,27 @@ public IDataTransform ApplyToData(IHostEnvironment env, IDataView newSource) } } - private sealed class RowImpl : Row + private sealed class RowImpl : WrappingRow { - private readonly Row _input; private readonly Delegate[] _getters; - private readonly RowToRowMapperTransform _parent; - - public override long Batch => _input.Batch; - - public override long Position => _input.Position; + private readonly Action _disposer; public override Schema Schema { get; } - public RowImpl(Row input, RowToRowMapperTransform parent, Schema schema, Delegate[] getters) + public RowImpl(Row input, RowToRowMapperTransform parent, Schema schema, Delegate[] getters, Action disposer) + : base(input) { - _input = input; _parent = parent; Schema = schema; _getters = getters; + _disposer = disposer; + } + + protected override void DisposeCore(bool disposing) + { + if (disposing) + _disposer?.Invoke(); } public override ValueGetter GetGetter(int col) @@ -311,7 +315,7 @@ public override ValueGetter GetGetter(int col) bool isSrc; int index = _parent._bindings.MapColumnIndex(out isSrc, col); if (isSrc) - return _input.GetGetter(index); + return Input.GetGetter(index); Contracts.Assert(_getters[index] != null); var fn = _getters[index] as ValueGetter; @@ -320,14 +324,12 @@ public override ValueGetter GetGetter(int col) return fn; } - public override ValueGetter GetIdGetter() => _input.GetIdGetter(); - public override bool IsColumnActive(int col) { bool isSrc; int index = _parent._bindings.MapColumnIndex(out isSrc, col); if (isSrc) - return _input.IsColumnActive((index)); + return Input.IsColumnActive((index)); return _getters[index] != null; } } @@ -338,6 +340,7 @@ private sealed class Cursor : SynchronizedCursorBase private readonly bool[] _active; private readonly ColumnBindings _bindings; private readonly Action _disposer; + private bool _disposed; public override Schema Schema => _bindings.Schema; @@ -374,10 +377,14 @@ public override ValueGetter GetGetter(int col) return fn; } - public override void Dispose() + protected override void Dispose(bool disposing) { - _disposer?.Invoke(); - base.Dispose(); + if (_disposed) + return; + if (disposing) + _disposer?.Invoke(); + _disposed = true; + base.Dispose(disposing); } } } diff --git a/src/Microsoft.ML.Data/DataView/SimpleRow.cs b/src/Microsoft.ML.Data/DataView/SimpleRow.cs index 55f67e6de2..01a2b719b6 100644 --- a/src/Microsoft.ML.Data/DataView/SimpleRow.cs +++ b/src/Microsoft.ML.Data/DataView/SimpleRow.cs @@ -13,45 +13,54 @@ namespace Microsoft.ML.Runtime.Data /// /// An implementation of that gets its , , /// and from an input row. The constructor requires a schema and array of getter - /// delegates. A null delegate indicates an inactive column. The delegates are assumed to be of the appropriate type - /// (this does not validate the type). + /// delegates. A delegate indicates an inactive column. The delegates are assumed to be + /// of the appropriate type (this does not validate the type). /// REVIEW: Should this validate that the delegates are of the appropriate type? It wouldn't be difficult /// to do so. /// - public sealed class SimpleRow : Row + [BestFriend] + internal sealed class SimpleRow : WrappingRow { - private readonly Row _input; private readonly Delegate[] _getters; + private readonly Action _disposer; public override Schema Schema { get; } - public override long Position => _input.Position; - - public override long Batch => _input.Batch; - - public SimpleRow(Schema schema, Row input, Delegate[] getters) + /// + /// Constructor. + /// + /// The schema for the row. + /// The row that is being wrapped by this row, where our , + /// , . + /// The collection of getter delegates, whose types should map those in a schema. + /// If one of these is , the corresponding column is considered inactive. + /// A method that, if non-null, will be called exactly once during + /// , prior to disposing . + public SimpleRow(Schema schema, Row input, Delegate[] getters, Action disposer = null) + : base(input) { Contracts.CheckValue(schema, nameof(schema)); Contracts.CheckValue(input, nameof(input)); - Contracts.Check(Utils.Size(getters) == schema.ColumnCount); + Contracts.Check(Utils.Size(getters) == schema.Count); + Contracts.CheckValueOrNull(disposer); Schema = schema; - _input = input; _getters = getters ?? new Delegate[0]; + _disposer = disposer; } - public override ValueGetter GetIdGetter() + protected override void DisposeCore(bool disposing) { - return _input.GetIdGetter(); + if (disposing) + _disposer?.Invoke(); } public override ValueGetter GetGetter(int col) { Contracts.CheckParam(0 <= col && col < _getters.Length, nameof(col), "Invalid col value in GetGetter"); Contracts.Check(IsColumnActive(col)); - var fn = _getters[col] as ValueGetter; - if (fn == null) - throw Contracts.Except("Unexpected TValue in GetGetter"); - return fn; + if (_getters[col] is ValueGetter fn) + return fn; + throw Contracts.Except("Unexpected TValue in GetGetter"); } public override bool IsColumnActive(int col) @@ -135,68 +144,6 @@ public void GetMetadata(string kind, int col, ref TValue value) protected abstract void GetMetadataCore(string kind, int col, ref TValue value); } - /// - /// An that takes all column names and types as constructor parameters. - /// The columns can optionally have text metadata. - /// - public sealed class SimpleSchema : SimpleSchemaBase - { - private readonly MetadataUtils.MetadataGetter>>[] _keyValueGetters; - - public SimpleSchema(IExceptionContext ectx, params KeyValuePair[] columns) - : base(ectx, columns) - { - _keyValueGetters = new MetadataUtils.MetadataGetter>>[ColumnCount]; - } - - public SimpleSchema(IExceptionContext ectx, KeyValuePair[] columns, - Dictionary>>> keyValues) - : this(ectx, columns) - { - foreach (var kvp in keyValues) - { - var name = kvp.Key; - var getter = kvp.Value; - if (!ColumnNameMap.TryGetValue(name, out int col)) - throw Ectx.ExceptParam(nameof(keyValues), $"Output schema does not contain column '{name}'"); - if (!Types[col].ItemType.IsKey) - throw Ectx.ExceptParam(nameof(keyValues), $"Column '{name}' is not a key column, so it cannot have key value metadata"); - _keyValueGetters[col] = getter; - } - } - - protected override IEnumerable> GetMetadataTypesCore(int col) - { - Ectx.Assert(0 <= col && col < ColumnCount); - if (_keyValueGetters[col] != null) - { - Ectx.Assert(Types[col].ItemType.IsKey); - yield return new KeyValuePair(MetadataUtils.Kinds.KeyValues, - new VectorType(TextType.Instance, Types[col].ItemType.KeyCount)); - } - } - - protected override ColumnType GetMetadataTypeOrNullCore(string kind, int col) - { - Ectx.Assert(0 <= col && col < ColumnCount); - if (kind == MetadataUtils.Kinds.KeyValues && _keyValueGetters[col] != null) - { - Ectx.Assert(Types[col].ItemType.IsKey); - return new VectorType(TextType.Instance, Types[col].ItemType.KeyCount); - } - return null; - } - - protected override void GetMetadataCore(string kind, int col, ref TValue value) - { - Ectx.Assert(0 <= col && col < ColumnCount); - if (kind == MetadataUtils.Kinds.KeyValues && _keyValueGetters[col] != null) - _keyValueGetters[col].Marshal(col, ref value); - else - throw Ectx.ExceptGetMetadata(); - } - } - public static class SimpleSchemaUtils { public static Schema Create(IExceptionContext ectx, params KeyValuePair[] columns) @@ -209,5 +156,4 @@ public static Schema Create(IExceptionContext ectx, params KeyValuePair : Row + private abstract class RowBase : WrappingRow where TSplitter : Splitter { protected readonly TSplitter Parent; - protected readonly Row Input; public sealed override Schema Schema => Parent.AsSchema; - public sealed override long Position => Input.Position; - public sealed override long Batch => Input.Batch; public RowBase(TSplitter parent, Row input) + : base(input) { Contracts.AssertValue(parent); Contracts.AssertValue(input); Contracts.Assert(input.IsColumnActive(parent.SrcCol)); Parent = parent; - Input = input; - } - - public sealed override ValueGetter GetIdGetter() - { - return Input.GetIdGetter(); } } @@ -1511,7 +1503,7 @@ public SlotDataView(IHostEnvironment env, ITransposeDataView data, int col) _col = col; var builder = new SchemaBuilder(); - builder.AddColumn(_data.Schema[_col].Name, _type, null); + builder.AddColumn(_data.Schema[_col].Name, _type); Schema = builder.GetSchema(); } @@ -1606,7 +1598,7 @@ public SlotRowCursorShim(IChannelProvider provider, SlotCursor cursor) _slotCursor = cursor; var builder = new SchemaBuilder(); - builder.AddColumn("Waffles", cursor.GetSlotType(), null); + builder.AddColumn("Waffles", cursor.GetSlotType()); Schema = builder.GetSchema(); } diff --git a/src/Microsoft.ML.Data/DataView/ZipDataView.cs b/src/Microsoft.ML.Data/DataView/ZipDataView.cs index 7b344a02be..9de68b9d55 100644 --- a/src/Microsoft.ML.Data/DataView/ZipDataView.cs +++ b/src/Microsoft.ML.Data/DataView/ZipDataView.cs @@ -110,6 +110,7 @@ private sealed class Cursor : RootCursorBase private readonly RowCursor[] _cursors; private readonly CompositeSchema _compositeSchema; private readonly bool[] _isColumnActive; + private bool _disposed; public override long Batch { get { return 0; } } @@ -124,11 +125,17 @@ public Cursor(ZipDataView parent, RowCursor[] srcCursors, Func predic _isColumnActive = Utils.BuildArray(_compositeSchema.ColumnCount, predicate); } - public override void Dispose() + protected override void Dispose(bool disposing) { - for (int i = _cursors.Length - 1; i >= 0; i--) - _cursors[i].Dispose(); - base.Dispose(); + if (_disposed) + return; + if (disposing) + { + for (int i = _cursors.Length - 1; i >= 0; i--) + _cursors[i].Dispose(); + } + _disposed = true; + base.Dispose(disposing); } public override ValueGetter GetIdGetter() diff --git a/src/Microsoft.ML.Data/EntryPoints/TransformModel.cs b/src/Microsoft.ML.Data/EntryPoints/TransformModel.cs index 994414d8f4..5af6213603 100644 --- a/src/Microsoft.ML.Data/EntryPoints/TransformModel.cs +++ b/src/Microsoft.ML.Data/EntryPoints/TransformModel.cs @@ -236,7 +236,7 @@ public Func GetDependencies(Func predicate) public Schema InputSchema => _rootSchema; - public Row GetRow(Row input, Func active, out Action disposer) + public Row GetRow(Row input, Func active) { _ectx.Assert(IsCompositeRowToRowMapper(_chain)); _ectx.AssertValue(input); @@ -244,7 +244,6 @@ public Row GetRow(Row input, Func active, out Action disposer) _ectx.Check(input.Schema == InputSchema, "Schema of input row must be the same as the schema the mapper is bound to"); - disposer = null; var mappers = new List(); var actives = new List>(); var transform = _chain as IDataTransform; @@ -262,11 +261,7 @@ public Row GetRow(Row input, Func active, out Action disposer) actives.Reverse(); var row = input; for (int i = 0; i < mappers.Count; i++) - { - Action disp; - row = mappers[i].GetRow(row, actives[i], out disp); - disposer += disp; - } + row = mappers[i].GetRow(row, actives[i]); return row; } diff --git a/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs index 4dfe37f9f6..3dd2ca8f5c 100644 --- a/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs @@ -188,7 +188,7 @@ private ReadOnlyMemory[] GetClassNames(RoleMappedSchema schema) return names; } - protected override IRowMapper CreatePerInstanceRowMapper(RoleMappedSchema schema) + private protected override IRowMapper CreatePerInstanceRowMapper(RoleMappedSchema schema) { Contracts.CheckValue(schema, nameof(schema)); Contracts.CheckParam(schema.Label != null, nameof(schema), "Could not find the label column"); @@ -968,7 +968,7 @@ public override void Save(ModelSaveContext ctx) ctx.Writer.WriteBoolByte(_useRaw); } - public override Func GetDependencies(Func activeOutput) + private protected override Func GetDependenciesCore(Func activeOutput) { if (_probIndex >= 0) { @@ -981,7 +981,7 @@ public override Func GetDependencies(Func activeOutput) return col => activeOutput(AssignedCol) && col == ScoreIndex; } - public override Delegate[] CreateGetters(Row input, Func activeCols, out Action disposer) + private protected override Delegate[] CreateGettersCore(Row input, Func activeCols, out Action disposer) { Host.Assert(LabelIndex >= 0); Host.Assert(ScoreIndex >= 0); @@ -1079,7 +1079,7 @@ private bool GetPredictedLabel(Single val) return Single.IsNaN(val) ? false : val > _threshold; } - public override Schema.DetachedColumn[] GetOutputColumns() + private protected override Schema.DetachedColumn[] GetOutputColumnsCore() { if (_probIndex >= 0) { diff --git a/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs index 797854dcbf..ca2d36c3c4 100644 --- a/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs @@ -142,7 +142,7 @@ protected override Aggregator GetAggregatorCore(RoleMappedSchema schema, string return new Aggregator(Host, schema.Feature, numClusters, _calculateDbi, schema.Weight != null, stratName); } - protected override IRowMapper CreatePerInstanceRowMapper(RoleMappedSchema schema) + private protected override IRowMapper CreatePerInstanceRowMapper(RoleMappedSchema schema) { var scoreInfo = schema.GetUniqueColumn(MetadataUtils.Const.ScoreValueKind.Score); int numClusters = scoreInfo.Type.VectorSize; @@ -638,7 +638,7 @@ public override void Save(ModelSaveContext ctx) ctx.Writer.Write(_numClusters); } - public override Func GetDependencies(Func activeOutput) + private protected override Func GetDependenciesCore(Func activeOutput) { return col => @@ -646,13 +646,13 @@ public override Func GetDependencies(Func activeOutput) (activeOutput(ClusterIdCol) || activeOutput(SortedClusterCol) || activeOutput(SortedClusterScoreCol)); } - public override Delegate[] CreateGetters(Row input, Func activeOutput, out Action disposer) + private protected override Delegate[] CreateGettersCore(Row input, Func activeCols, out Action disposer) { disposer = null; var getters = new Delegate[3]; - if (!activeOutput(ClusterIdCol) && !activeOutput(SortedClusterCol) && !activeOutput(SortedClusterScoreCol)) + if (!activeCols(ClusterIdCol) && !activeCols(SortedClusterCol) && !activeCols(SortedClusterScoreCol)) return getters; long cachedPosition = -1; @@ -675,7 +675,7 @@ public override Delegate[] CreateGetters(Row input, Func activeOutput } }; - if (activeOutput(ClusterIdCol)) + if (activeCols(ClusterIdCol)) { ValueGetter assignedFn = (ref uint dst) => @@ -686,7 +686,7 @@ public override Delegate[] CreateGetters(Row input, Func activeOutput getters[ClusterIdCol] = assignedFn; } - if (activeOutput(SortedClusterScoreCol)) + if (activeCols(SortedClusterScoreCol)) { ValueGetter> topKScoresFn = (ref VBuffer dst) => @@ -700,7 +700,7 @@ public override Delegate[] CreateGetters(Row input, Func activeOutput getters[SortedClusterScoreCol] = topKScoresFn; } - if (activeOutput(SortedClusterCol)) + if (activeCols(SortedClusterCol)) { ValueGetter> topKClassesFn = (ref VBuffer dst) => @@ -716,7 +716,7 @@ public override Delegate[] CreateGetters(Row input, Func activeOutput return getters; } - public override Schema.DetachedColumn[] GetOutputColumns() + private protected override Schema.DetachedColumn[] GetOutputColumnsCore() { var infos = new Schema.DetachedColumn[3]; infos[ClusterIdCol] = new Schema.DetachedColumn(ClusterId, _types[ClusterIdCol], null); diff --git a/src/Microsoft.ML.Data/Evaluators/EvaluatorBase.cs b/src/Microsoft.ML.Data/Evaluators/EvaluatorBase.cs index 829dd0043a..906bc020b8 100644 --- a/src/Microsoft.ML.Data/Evaluators/EvaluatorBase.cs +++ b/src/Microsoft.ML.Data/Evaluators/EvaluatorBase.cs @@ -446,7 +446,8 @@ public override IDataTransform GetPerInstanceMetrics(RoleMappedData data) return new RowToRowMapperTransform(Host, data.Data, mapper, null); } - protected abstract IRowMapper CreatePerInstanceRowMapper(RoleMappedSchema schema); + [BestFriend] + private protected abstract IRowMapper CreatePerInstanceRowMapper(RoleMappedSchema schema); } /// @@ -501,10 +502,22 @@ public virtual void Save(ModelSaveContext ctx) ctx.SaveStringOrNull(LabelCol); } - public abstract Func GetDependencies(Func activeOutput); + Func IRowMapper.GetDependencies(Func activeOutput) + => GetDependenciesCore(activeOutput); - public abstract Schema.DetachedColumn[] GetOutputColumns(); + [BestFriend] + private protected abstract Func GetDependenciesCore(Func activeOutput); - public abstract Delegate[] CreateGetters(Row input, Func activeCols, out Action disposer); + Schema.DetachedColumn[] IRowMapper.GetOutputColumns() + => GetOutputColumnsCore(); + + [BestFriend] + private protected abstract Schema.DetachedColumn[] GetOutputColumnsCore(); + + Delegate[] IRowMapper.CreateGetters(Row input, Func activeCols, out Action disposer) + => CreateGettersCore(input, activeCols, out disposer); + + [BestFriend] + private protected abstract Delegate[] CreateGettersCore(Row input, Func activeCols, out Action disposer); } } diff --git a/src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs index cb12bf92e4..04316e3ed6 100644 --- a/src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs @@ -117,7 +117,7 @@ private ReadOnlyMemory[] GetClassNames(RoleMappedSchema schema) return names; } - protected override IRowMapper CreatePerInstanceRowMapper(RoleMappedSchema schema) + private protected override IRowMapper CreatePerInstanceRowMapper(RoleMappedSchema schema) { Host.CheckParam(schema.Label != null, nameof(schema), "Schema must contain a label column"); var scoreInfo = schema.GetUniqueColumn(MetadataUtils.Const.ScoreValueKind.Score); @@ -648,7 +648,7 @@ public override void Save(ModelSaveContext ctx) ctx.SaveNonEmptyString(_classNames[i].ToString()); } - public override Func GetDependencies(Func activeOutput) + private protected override Func GetDependenciesCore(Func activeOutput) { Host.Assert(ScoreIndex >= 0); Host.Assert(LabelIndex >= 0); @@ -662,13 +662,13 @@ public override Func GetDependencies(Func activeOutput) activeOutput(SortedClassesCol) || activeOutput(LogLossCol)); } - public override Delegate[] CreateGetters(Row input, Func activeOutput, out Action disposer) + private protected override Delegate[] CreateGettersCore(Row input, Func activeCols, out Action disposer) { disposer = null; var getters = new Delegate[4]; - if (!activeOutput(AssignedCol) && !activeOutput(SortedClassesCol) && !activeOutput(SortedScoresCol) && !activeOutput(LogLossCol)) + if (!activeCols(AssignedCol) && !activeCols(SortedClassesCol) && !activeCols(SortedScoresCol) && !activeCols(LogLossCol)) return getters; long cachedPosition = -1; @@ -677,7 +677,7 @@ public override Delegate[] CreateGetters(Row input, Func activeOutput var scoresArr = new float[_numClasses]; int[] sortedIndices = new int[_numClasses]; - var labelGetter = activeOutput(LogLossCol) ? RowCursorUtils.GetLabelGetter(input, LabelIndex) : + var labelGetter = activeCols(LogLossCol) ? RowCursorUtils.GetLabelGetter(input, LabelIndex) : (ref float dst) => dst = float.NaN; var scoreGetter = input.GetGetter>(ScoreIndex); Action updateCacheIfNeeded = @@ -695,7 +695,7 @@ public override Delegate[] CreateGetters(Row input, Func activeOutput } }; - if (activeOutput(AssignedCol)) + if (activeCols(AssignedCol)) { ValueGetter assignedFn = (ref uint dst) => @@ -706,7 +706,7 @@ public override Delegate[] CreateGetters(Row input, Func activeOutput getters[AssignedCol] = assignedFn; } - if (activeOutput(SortedScoresCol)) + if (activeCols(SortedScoresCol)) { ValueGetter> topKScoresFn = (ref VBuffer dst) => @@ -720,7 +720,7 @@ public override Delegate[] CreateGetters(Row input, Func activeOutput getters[SortedScoresCol] = topKScoresFn; } - if (activeOutput(SortedClassesCol)) + if (activeCols(SortedClassesCol)) { ValueGetter> topKClassesFn = (ref VBuffer dst) => @@ -734,7 +734,7 @@ public override Delegate[] CreateGetters(Row input, Func activeOutput getters[SortedClassesCol] = topKClassesFn; } - if (activeOutput(LogLossCol)) + if (activeCols(LogLossCol)) { ValueGetter logLossFn = (ref double dst) => @@ -761,7 +761,7 @@ public override Delegate[] CreateGetters(Row input, Func activeOutput return getters; } - public override Schema.DetachedColumn[] GetOutputColumns() + private protected override Schema.DetachedColumn[] GetOutputColumnsCore() { var infos = new Schema.DetachedColumn[4]; diff --git a/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs index fb88a5bb60..daa71a0cb1 100644 --- a/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs @@ -47,7 +47,7 @@ public MultiOutputRegressionEvaluator(IHostEnvironment env, Arguments args) { } - protected override IRowMapper CreatePerInstanceRowMapper(RoleMappedSchema schema) + private protected override IRowMapper CreatePerInstanceRowMapper(RoleMappedSchema schema) { Host.CheckParam(schema.Label != null, nameof(schema), "Could not find the label column"); var scoreInfo = schema.GetUniqueColumn(MetadataUtils.Const.ScoreValueKind.Score); @@ -436,7 +436,7 @@ public override void Save(ModelSaveContext ctx) base.Save(ctx); } - public override Func GetDependencies(Func activeOutput) + private protected override Func GetDependenciesCore(Func activeOutput) { return col => @@ -446,7 +446,7 @@ public override Func GetDependencies(Func activeOutput) (col == ScoreIndex || col == LabelIndex); } - public override Schema.DetachedColumn[] GetOutputColumns() + private protected override Schema.DetachedColumn[] GetOutputColumnsCore() { var infos = new Schema.DetachedColumn[5]; infos[LabelOutput] = new Schema.DetachedColumn(LabelCol, _labelType, _labelMetadata); @@ -457,7 +457,7 @@ public override Schema.DetachedColumn[] GetOutputColumns() return infos; } - public override Delegate[] CreateGetters(Row input, Func activeCols, out Action disposer) + private protected override Delegate[] CreateGettersCore(Row input, Func activeCols, out Action disposer) { Host.Assert(LabelIndex >= 0); Host.Assert(ScoreIndex >= 0); diff --git a/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs index 36eabc8a71..eb2f9f677d 100644 --- a/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs @@ -39,7 +39,7 @@ public QuantileRegressionEvaluator(IHostEnvironment env, Arguments args) { } - protected override IRowMapper CreatePerInstanceRowMapper(RoleMappedSchema schema) + private protected override IRowMapper CreatePerInstanceRowMapper(RoleMappedSchema schema) { Host.CheckParam(schema.Label != null, nameof(schema), "Schema must contain a label column"); var scoreInfo = schema.GetUniqueColumn(MetadataUtils.Const.ScoreValueKind.Score); @@ -345,13 +345,13 @@ public override void Save(ModelSaveContext ctx) ctx.SaveNonEmptyString(quantiles[i].ToString()); } - public override Func GetDependencies(Func activeOutput) + private protected override Func GetDependenciesCore(Func activeOutput) { return col => (activeOutput(L1Col) || activeOutput(L2Col)) && (col == ScoreIndex || col == LabelIndex); } - public override Schema.DetachedColumn[] GetOutputColumns() + private protected override Schema.DetachedColumn[] GetOutputColumnsCore() { var infos = new Schema.DetachedColumn[2]; @@ -380,7 +380,7 @@ private ValueGetter>> CreateSlotNamesGetter(string }; } - public override Delegate[] CreateGetters(Row input, Func activeCols, out Action disposer) + private protected override Delegate[] CreateGettersCore(Row input, Func activeCols, out Action disposer) { Host.Assert(LabelIndex >= 0); Host.Assert(ScoreIndex >= 0); diff --git a/src/Microsoft.ML.Data/Evaluators/RegressionEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/RegressionEvaluator.cs index b4fdf358a0..92a0765c88 100644 --- a/src/Microsoft.ML.Data/Evaluators/RegressionEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/RegressionEvaluator.cs @@ -69,7 +69,7 @@ protected override Aggregator GetAggregatorCore(RoleMappedSchema schema, string return new Aggregator(Host, LossFunction, schema.Weight != null, stratName); } - protected override IRowMapper CreatePerInstanceRowMapper(RoleMappedSchema schema) + private protected override IRowMapper CreatePerInstanceRowMapper(RoleMappedSchema schema) { Contracts.CheckParam(schema.Label != null, nameof(schema), "Could not find the label column"); var scoreInfo = schema.GetUniqueColumn(MetadataUtils.Const.ScoreValueKind.Score); @@ -245,13 +245,13 @@ public override void Save(ModelSaveContext ctx) base.Save(ctx); } - public override Func GetDependencies(Func activeOutput) + private protected override Func GetDependenciesCore(Func activeOutput) { return col => (activeOutput(L1Col) || activeOutput(L2Col)) && (col == ScoreIndex || col == LabelIndex); } - public override Schema.DetachedColumn[] GetOutputColumns() + private protected override Schema.DetachedColumn[] GetOutputColumnsCore() { var infos = new Schema.DetachedColumn[2]; infos[L1Col] = new Schema.DetachedColumn(L1, NumberType.R8, null); @@ -259,7 +259,7 @@ public override Schema.DetachedColumn[] GetOutputColumns() return infos; } - public override Delegate[] CreateGetters(Row input, Func activeCols, out Action disposer) + private protected override Delegate[] CreateGettersCore(Row input, Func activeCols, out Action disposer) { Host.Assert(LabelIndex >= 0); Host.Assert(ScoreIndex >= 0); diff --git a/src/Microsoft.ML.Data/Prediction/Calibrator.cs b/src/Microsoft.ML.Data/Prediction/Calibrator.cs index 45860d2e39..17be128709 100644 --- a/src/Microsoft.ML.Data/Prediction/Calibrator.cs +++ b/src/Microsoft.ML.Data/Prediction/Calibrator.cs @@ -559,7 +559,7 @@ public Func GetDependencies(Func predicate) return _predictor.GetInputColumnRoles(); } - public Row GetRow(Row input, Func predicate, out Action disposer) + public Row GetRow(Row input, Func predicate) { Func predictorPredicate = col => false; for (int i = 0; i < OutputSchema.ColumnCount; i++) @@ -570,7 +570,7 @@ public Row GetRow(Row input, Func predicate, out Action disposer) break; } } - var predictorRow = _predictor.GetRow(input, predictorPredicate, out disposer); + var predictorRow = _predictor.GetRow(input, predictorPredicate); var getters = new Delegate[OutputSchema.ColumnCount]; for (int i = 0; i < OutputSchema.ColumnCount - 1; i++) { diff --git a/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs b/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs index 3a738887ec..5eb1b91e49 100644 --- a/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs +++ b/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs @@ -353,8 +353,9 @@ public RowMapper(IHostEnvironment env, BindableMapper parent, RoleMappedSchema s if (parent.Stringify) { - _outputSchema = new SimpleSchema(_env, - new KeyValuePair(DefaultColumnNames.FeatureContributions, TextType.Instance)); + var builder = new SchemaBuilder(); + builder.AddColumn(DefaultColumnNames.FeatureContributions, TextType.Instance, null); + _outputSchema = builder.GetSchema(); if (InputSchema.HasSlotNames(InputRoleMappedSchema.Feature.Index, InputRoleMappedSchema.Feature.Type.VectorSize)) InputSchema.GetMetadata(MetadataUtils.Kinds.SlotNames, InputRoleMappedSchema.Feature.Index, ref _slotNames); @@ -385,28 +386,28 @@ public Func GetDependencies(Func predicate) return col => false; } - public Row GetOutputRow(Row input, Func predicate, out Action disposer) + public Row GetRow(Row input, Func active) { Contracts.AssertValue(input); - Contracts.AssertValue(predicate); + Contracts.AssertValue(active); var totalColumnsCount = 1 + _outputGenericSchema.ColumnCount; var getters = new Delegate[totalColumnsCount]; - if (predicate(totalColumnsCount - 1)) + if (active(totalColumnsCount - 1)) { getters[totalColumnsCount - 1] = _parent.Stringify ? _parent.GetTextContributionGetter(input, InputRoleMappedSchema.Feature.Index, _slotNames) : _parent.GetContributionGetter(input, InputRoleMappedSchema.Feature.Index); } - var genericRow = _genericRowMapper.GetRow(input, GetGenericPredicate(predicate), out disposer); + var genericRow = _genericRowMapper.GetRow(input, GetGenericPredicate(active)); for (var i = 0; i < _outputGenericSchema.ColumnCount; i++) { if (genericRow.IsColumnActive(i)) getters[i] = RowCursorUtils.GetGetterAsDelegate(genericRow, i); } - return new SimpleRow(OutputSchema, input, getters); + return new SimpleRow(OutputSchema, genericRow, getters); } public Func GetGenericPredicate(Func predicate) @@ -418,11 +419,6 @@ public Func GetGenericPredicate(Func predicate) { yield return RoleMappedSchema.ColumnRole.Feature.Bind(InputRoleMappedSchema.Feature.Name); } - - public Row GetRow(Row input, Func active, out Action disposer) - { - return GetOutputRow(input, active, out disposer); - } } private sealed class FeatureContributionSchema : ISchema diff --git a/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs b/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs index 0324d950ce..41dff06100 100644 --- a/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs +++ b/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs @@ -307,9 +307,9 @@ public Func GetDependencies(Func predicate) return _mapper.GetInputColumnRoles(); } - public Row GetRow(Row input, Func predicate, out Action disposer) + public Row GetRow(Row input, Func predicate) { - var innerRow = _mapper.GetRow(input, predicate, out disposer); + var innerRow = _mapper.GetRow(input, predicate); return new RowImpl(innerRow, OutputSchema); } @@ -386,38 +386,30 @@ public void GetMetadata(string kind, int col, ref TValue value) } } - private sealed class RowImpl : Row + private sealed class RowImpl : WrappingRow { - private readonly Row _row; private readonly Schema _schema; - public override long Batch => _row.Batch; - public override long Position => _row.Position; // The schema is of course the only difference from _row. public override Schema Schema => _schema; public RowImpl(Row row, Schema schema) + : base(row) { Contracts.AssertValue(row); Contracts.AssertValue(schema); - _row = row; _schema = schema; } public override bool IsColumnActive(int col) { - return _row.IsColumnActive(col); + return Input.IsColumnActive(col); } public override ValueGetter GetGetter(int col) { - return _row.GetGetter(col); - } - - public override ValueGetter GetIdGetter() - { - return _row.GetIdGetter(); + return Input.GetGetter(col); } } } diff --git a/src/Microsoft.ML.Data/Scorers/RowToRowScorerBase.cs b/src/Microsoft.ML.Data/Scorers/RowToRowScorerBase.cs index 34913c5d1b..4a490f04f2 100644 --- a/src/Microsoft.ML.Data/Scorers/RowToRowScorerBase.cs +++ b/src/Microsoft.ML.Data/Scorers/RowToRowScorerBase.cs @@ -155,8 +155,9 @@ protected override Delegate[] CreateGetters(Row input, Func active, o Func predicateInput; Func predicateMapper; GetActive(bindings, active, out predicateInput, out predicateMapper); - var output = bindings.RowMapper.GetRow(input, predicateMapper, out disp); + var output = bindings.RowMapper.GetRow(input, predicateMapper); Func activeInfos = iinfo => active(bindings.MapIinfoToCol(iinfo)); + disp = output.Dispose; return GetGetters(output, activeInfos); } @@ -220,7 +221,8 @@ private sealed class Cursor : SynchronizedCursorBase private readonly BindingsBase _bindings; private readonly bool[] _active; private readonly Delegate[] _getters; - private readonly Action _disposer; + private readonly Row _output; + private bool _disposed; public override Schema Schema { get; } @@ -236,23 +238,27 @@ public Cursor(IChannelProvider provider, RowToRowScorerBase parent, RowCursor in Ch.Assert(active.Length == _bindings.ColumnCount); _active = active; - var output = _bindings.RowMapper.GetRow(input, predicateMapper, out _disposer); + _output = _bindings.RowMapper.GetRow(input, predicateMapper); try { - Ch.Assert(output.Schema == _bindings.RowMapper.OutputSchema); - _getters = parent.GetGetters(output, iinfo => active[_bindings.MapIinfoToCol(iinfo)]); + Ch.Assert(_output.Schema == _bindings.RowMapper.OutputSchema); + _getters = parent.GetGetters(_output, iinfo => active[_bindings.MapIinfoToCol(iinfo)]); } catch (Exception) { - _disposer?.Invoke(); + _output.Dispose(); throw; } } - public override void Dispose() + protected override void Dispose(bool disposing) { - _disposer?.Invoke(); - base.Dispose(); + if (_disposed) + return; + if (disposing) + _output.Dispose(); + _disposed = true; + base.Dispose(disposing); } public override bool IsColumnActive(int col) diff --git a/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs b/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs index 279d2a4ad0..e0244fb3a7 100644 --- a/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs +++ b/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs @@ -223,7 +223,7 @@ public Func GetDependencies(Func predicate) public Schema InputSchema => InputRoleMappedSchema.Schema; - public Row GetRow(Row input, Func predicate, out Action disposer) + public Row GetRow(Row input, Func predicate) { Contracts.AssertValue(input); Contracts.AssertValue(predicate); @@ -231,7 +231,6 @@ public Row GetRow(Row input, Func predicate, out Action disposer) var getters = new Delegate[1]; if (predicate(0)) getters[0] = _parent.GetPredictionGetter(input, InputRoleMappedSchema.Feature.Index); - disposer = null; return new SimpleRow(OutputSchema, input, getters); } } @@ -566,12 +565,11 @@ private static void EnsureCachedResultValueMapper(ValueMapper, Fl } } - public Row GetRow(Row input, Func predicate, out Action disposer) + public Row GetRow(Row input, Func predicate) { Contracts.AssertValue(input); var active = Utils.BuildArray(OutputSchema.ColumnCount, predicate); var getters = CreateGetters(input, active); - disposer = null; return new SimpleRow(OutputSchema, input, getters); } } diff --git a/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingTransformer.cs b/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingTransformer.cs index 327c029767..e3c47fa401 100644 --- a/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingTransformer.cs +++ b/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingTransformer.cs @@ -393,18 +393,18 @@ public static IDataTransform Create(IHostEnvironment env, TaggedArguments args, return transformer.MakeDataTransform(input); } - protected override IRowMapper MakeRowMapper(Schema inputSchema) => new Mapper(this, inputSchema); + private protected override IRowMapper MakeRowMapper(Schema inputSchema) => new Mapper(this, inputSchema); /// /// Factory method for SignatureLoadDataTransform. /// - public static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, IDataView input) + private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, IDataView input) => new ColumnConcatenatingTransformer(env, ctx).MakeDataTransform(input); /// /// Factory method for SignatureLoadRowMapper. /// - public static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) => new ColumnConcatenatingTransformer(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); private sealed class Mapper : MapperBase, ISaveAsOnnx, ISaveAsPfa @@ -829,7 +829,7 @@ public KeyValuePair SavePfaInfo(BoundPfaContext ctx) } } - public override Func GetDependencies(Func activeOutput) + private protected override Func GetDependenciesCore(Func activeOutput) { var active = new bool[InputSchema.ColumnCount]; for (int i = 0; i < _columns.Length; i++) diff --git a/src/Microsoft.ML.Data/Transforms/ColumnCopying.cs b/src/Microsoft.ML.Data/Transforms/ColumnCopying.cs index 4c35b845a4..9cfd65bb38 100644 --- a/src/Microsoft.ML.Data/Transforms/ColumnCopying.cs +++ b/src/Microsoft.ML.Data/Transforms/ColumnCopying.cs @@ -158,7 +158,7 @@ public override void Save(ModelSaveContext ctx) SaveColumns(ctx); } - protected override IRowMapper MakeRowMapper(Schema inputSchema) + private protected override IRowMapper MakeRowMapper(Schema inputSchema) => new Mapper(this, inputSchema, ColumnPairs); private sealed class Mapper : OneToOneMapperBase, ISaveAsOnnx diff --git a/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs b/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs index cd602bad1a..1193f391e9 100644 --- a/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs +++ b/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs @@ -579,31 +579,23 @@ private static Schema GenerateOutputSchema(IEnumerable map, } } - private sealed class RowImpl : Row + private sealed class RowImpl : WrappingRow { private readonly Mapper _mapper; - private readonly Row _input; public RowImpl(Row input, Mapper mapper) + : base(input) { _mapper = mapper; - _input = input; } - public override long Position => _input.Position; - - public override long Batch => _input.Batch; - public override Schema Schema => _mapper.OutputSchema; public override ValueGetter GetGetter(int col) { int index = _mapper.GetInputIndex(col); - return _input.GetGetter(index); + return Input.GetGetter(index); } - public override ValueGetter GetIdGetter() - => _input.GetIdGetter(); - public override bool IsColumnActive(int col) => true; } @@ -684,9 +676,8 @@ public Func GetDependencies(Func activeOutput) return col => active[col]; } - public Row GetRow(Row input, Func active, out Action disposer) + public Row GetRow(Row input, Func active) { - disposer = null; return new RowImpl(input, _mapper); } diff --git a/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs b/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs index e8a75d4edd..80e7ac13fd 100644 --- a/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs @@ -434,7 +434,7 @@ private static bool AreRangesValid(int[][] slotsMin, int[][] slotsMax) return true; } - protected override IRowMapper MakeRowMapper(Schema schema) + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); private sealed class Mapper : OneToOneMapperBase diff --git a/src/Microsoft.ML.Data/Transforms/Hashing.cs b/src/Microsoft.ML.Data/Transforms/Hashing.cs index 960c033dad..71c4fa3147 100644 --- a/src/Microsoft.ML.Data/Transforms/Hashing.cs +++ b/src/Microsoft.ML.Data/Transforms/Hashing.cs @@ -319,7 +319,7 @@ private Delegate GetGetterCore(Row input, int iinfo, out Action disposer) return ComposeGetterVec(input, iinfo, srcCol, srcType); } - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); // Factory method for SignatureLoadModel. private static HashingTransformer Create(IHostEnvironment env, ModelLoadContext ctx) diff --git a/src/Microsoft.ML.Data/Transforms/KeyToValue.cs b/src/Microsoft.ML.Data/Transforms/KeyToValue.cs index 6553b27239..60b692ad50 100644 --- a/src/Microsoft.ML.Data/Transforms/KeyToValue.cs +++ b/src/Microsoft.ML.Data/Transforms/KeyToValue.cs @@ -153,7 +153,7 @@ public override void Save(ModelSaveContext ctx) SaveColumns(ctx); } - protected override IRowMapper MakeRowMapper(Schema inputSchema) => new Mapper(this, inputSchema); + private protected override IRowMapper MakeRowMapper(Schema inputSchema) => new Mapper(this, inputSchema); private sealed class Mapper : OneToOneMapperBase, ISaveAsPfa { diff --git a/src/Microsoft.ML.Data/Transforms/KeyToVector.cs b/src/Microsoft.ML.Data/Transforms/KeyToVector.cs index da67c1a3b5..cd4142bbad 100644 --- a/src/Microsoft.ML.Data/Transforms/KeyToVector.cs +++ b/src/Microsoft.ML.Data/Transforms/KeyToVector.cs @@ -229,7 +229,7 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); private sealed class Mapper : OneToOneMapperBase, ISaveAsOnnx, ISaveAsPfa { diff --git a/src/Microsoft.ML.Data/Transforms/NopTransform.cs b/src/Microsoft.ML.Data/Transforms/NopTransform.cs index 3d4219a60d..c57a60d387 100644 --- a/src/Microsoft.ML.Data/Transforms/NopTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/NopTransform.cs @@ -133,13 +133,11 @@ public Func GetDependencies(Func predicate) return predicate; } - public Row GetRow(Row input, Func active, out Action disposer) + public Row GetRow(Row input, Func active) { Contracts.CheckValue(input, nameof(input)); Contracts.CheckValue(active, nameof(active)); Contracts.CheckParam(input.Schema == Source.Schema, nameof(input), "Schema of input row must be the same as the schema the mapper is bound to"); - - disposer = null; return input; } diff --git a/src/Microsoft.ML.Data/Transforms/Normalizer.cs b/src/Microsoft.ML.Data/Transforms/Normalizer.cs index 0e20466a2e..afc597cf50 100644 --- a/src/Microsoft.ML.Data/Transforms/Normalizer.cs +++ b/src/Microsoft.ML.Data/Transforms/Normalizer.cs @@ -478,7 +478,7 @@ protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCo public new IDataTransform MakeDataTransform(IDataView input) => base.MakeDataTransform(input); - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); private sealed class Mapper : OneToOneMapperBase, ISaveAsOnnx, ISaveAsPfa { diff --git a/src/Microsoft.ML.Data/Transforms/OneToOneTransformerBase.cs b/src/Microsoft.ML.Data/Transforms/OneToOneTransformerBase.cs index 0a74c8a39c..47b5ef4670 100644 --- a/src/Microsoft.ML.Data/Transforms/OneToOneTransformerBase.cs +++ b/src/Microsoft.ML.Data/Transforms/OneToOneTransformerBase.cs @@ -101,7 +101,7 @@ protected OneToOneMapperBase(IHost host, OneToOneTransformerBase parent, Schema } } - public override Func GetDependencies(Func activeOutput) + private protected override Func GetDependenciesCore(Func activeOutput) { var active = new bool[InputSchema.ColumnCount]; foreach (var pair in ColMapNewToOld) diff --git a/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs b/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs index 76da06d846..56adc40906 100644 --- a/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs +++ b/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs @@ -495,6 +495,7 @@ public void Fetch(int idx, ref T value) private Exception _producerTaskException; private readonly int[] _colToActivesIndex; + private bool _disposed; public override Schema Schema => _input.Schema; @@ -554,14 +555,17 @@ public Cursor(IChannelProvider provider, int poolRows, RowCursor input, Random r _producerTask = LoopProducerWorker(); } - public override void Dispose() + protected override void Dispose(bool disposing) { - if (_producerTask.Status == TaskStatus.Running) + if (_disposed) + return; + if (disposing && _producerTask.Status == TaskStatus.Running) { _toProduce.Post(0); _producerTask.Wait(); } - base.Dispose(); + _disposed = true; + base.Dispose(disposing); } public static void PostAssert(ITargetBlock target, T item) diff --git a/src/Microsoft.ML.Data/Transforms/RowToRowTransformerBase.cs b/src/Microsoft.ML.Data/Transforms/RowToRowTransformerBase.cs index 356a0cc546..6796736da0 100644 --- a/src/Microsoft.ML.Data/Transforms/RowToRowTransformerBase.cs +++ b/src/Microsoft.ML.Data/Transforms/RowToRowTransformerBase.cs @@ -33,7 +33,8 @@ public IRowToRowMapper GetRowToRowMapper(Schema inputSchema) return new RowToRowMapperTransform(Host, new EmptyDataView(Host, inputSchema), MakeRowMapper(inputSchema), MakeRowMapper); } - protected abstract IRowMapper MakeRowMapper(Schema schema); + [BestFriend] + private protected abstract IRowMapper MakeRowMapper(Schema schema); public Schema GetOutputSchema(Schema inputSchema) { @@ -67,9 +68,9 @@ protected MapperBase(IHost host, Schema inputSchema) protected abstract Schema.DetachedColumn[] GetOutputColumnsCore(); - public Schema.DetachedColumn[] GetOutputColumns() => _outputColumns.Value; + Schema.DetachedColumn[] IRowMapper.GetOutputColumns() => _outputColumns.Value; - public Delegate[] CreateGetters(Row input, Func activeOutput, out Action disposer) + Delegate[] IRowMapper.CreateGetters(Row input, Func activeOutput, out Action disposer) { // REVIEW: it used to be that the mapper's input schema in the constructor was required to be reference-equal to the schema // of the input row. @@ -100,7 +101,11 @@ public Delegate[] CreateGetters(Row input, Func activeOutput, out Act protected abstract Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer); - public abstract Func GetDependencies(Func activeOutput); + Func IRowMapper.GetDependencies(Func activeOutput) + => GetDependenciesCore(activeOutput); + + [BestFriend] + private protected abstract Func GetDependenciesCore(Func activeOutput); public abstract void Save(ModelSaveContext ctx); } diff --git a/src/Microsoft.ML.Data/Transforms/TransformBase.cs b/src/Microsoft.ML.Data/Transforms/TransformBase.cs index 4618b756f7..41ddf322e5 100644 --- a/src/Microsoft.ML.Data/Transforms/TransformBase.cs +++ b/src/Microsoft.ML.Data/Transforms/TransformBase.cs @@ -168,19 +168,16 @@ public Func GetDependencies(Func predicate) public Schema InputSchema => Source.Schema; - public Row GetRow(Row input, Func active, out Action disposer) + public Row GetRow(Row input, Func active) { Host.CheckValue(input, nameof(input)); Host.CheckValue(active, nameof(active)); Host.Check(input.Schema == Source.Schema, "Schema of input row must be the same as the schema the mapper is bound to"); - disposer = null; using (var ch = Host.Start("GetEntireRow")) { - Action disp; - var getters = CreateGetters(input, active, out disp); - disposer += disp; - return new RowImpl(input, this, OutputSchema, getters); + var getters = CreateGetters(input, active, out Action disp); + return new RowImpl(input, this, OutputSchema, getters, disp); } } @@ -188,26 +185,29 @@ public Row GetRow(Row input, Func active, out Action disposer) protected abstract int MapColumnIndex(out bool isSrc, int col); - private sealed class RowImpl : Row + private sealed class RowImpl : WrappingRow { private readonly Schema _schema; - private readonly Row _input; private readonly Delegate[] _getters; + private readonly Action _disposer; private readonly RowToRowMapperTransformBase _parent; - public override long Batch => _input.Batch; - - public override long Position => _input.Position; - public override Schema Schema => _schema; - public RowImpl(Row input, RowToRowMapperTransformBase parent, Schema schema, Delegate[] getters) + public RowImpl(Row input, RowToRowMapperTransformBase parent, Schema schema, Delegate[] getters, Action disposer) + : base(input) { - _input = input; _parent = parent; _schema = schema; _getters = getters; + _disposer = disposer; + } + + protected override void DisposeCore(bool disposing) + { + if (disposing) + _disposer?.Invoke(); } public override ValueGetter GetGetter(int col) @@ -215,7 +215,7 @@ public override ValueGetter GetGetter(int col) bool isSrc; int index = _parent.MapColumnIndex(out isSrc, col); if (isSrc) - return _input.GetGetter(index); + return Input.GetGetter(index); Contracts.Assert(_getters[index] != null); var fn = _getters[index] as ValueGetter; @@ -224,17 +224,12 @@ public override ValueGetter GetGetter(int col) return fn; } - public override ValueGetter GetIdGetter() - { - return _input.GetIdGetter(); - } - public override bool IsColumnActive(int col) { bool isSrc; int index = _parent.MapColumnIndex(out isSrc, col); if (isSrc) - return _input.IsColumnActive((index)); + return Input.IsColumnActive((index)); return _getters[index] != null; } } @@ -842,7 +837,8 @@ private sealed class Cursor : SynchronizedCursorBase private readonly bool[] _active; private readonly Delegate[] _getters; - private readonly Action[] _disposers; + private readonly Action _disposer; + private bool _disposed; public Cursor(IChannelProvider provider, OneToOneTransformBase parent, RowCursor input, bool[] active) : base(provider, input) @@ -854,30 +850,29 @@ public Cursor(IChannelProvider provider, OneToOneTransformBase parent, RowCursor _active = active; _getters = new Delegate[parent.Infos.Length]; - // Build the delegates. - List disposers = null; + // Build the disposing delegate. + Action masterDisposer = null; for (int iinfo = 0; iinfo < _getters.Length; iinfo++) { if (!IsColumnActive(parent._bindings.MapIinfoToCol(iinfo))) continue; - Action disposer; - _getters[iinfo] = parent.GetGetterCore(Ch, Input, iinfo, out disposer); + _getters[iinfo] = parent.GetGetterCore(Ch, Input, iinfo, out Action disposer); if (disposer != null) - Utils.Add(ref disposers, disposer); + masterDisposer += disposer; } - - if (Utils.Size(disposers) > 0) - _disposers = disposers.ToArray(); + _disposer = masterDisposer; } - public override void Dispose() + protected override void Dispose(bool disposing) { - if (_disposers != null) + if (_disposed) + return; + if (disposing) { - foreach (var act in _disposers) - act(); + _disposer?.Invoke(); } - base.Dispose(); + _disposed = true; + base.Dispose(disposing); } public override Schema Schema => _bindings.AsSchema; diff --git a/src/Microsoft.ML.Data/Transforms/TypeConverting.cs b/src/Microsoft.ML.Data/Transforms/TypeConverting.cs index 41be7c861f..3388f15c35 100644 --- a/src/Microsoft.ML.Data/Transforms/TypeConverting.cs +++ b/src/Microsoft.ML.Data/Transforms/TypeConverting.cs @@ -360,7 +360,7 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); internal static bool GetNewType(IExceptionContext ectx, ColumnType srcType, DataKind kind, KeyRange range, out PrimitiveType itemType) { diff --git a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs index 81d8e927b4..7a5bf18995 100644 --- a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs +++ b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs @@ -713,7 +713,7 @@ public TermMap GetTermMap(int iinfo) return _unboundMaps[iinfo]; } - protected override IRowMapper MakeRowMapper(Schema schema) + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); private sealed class Mapper : OneToOneMapperBase, ISaveAsOnnx, ISaveAsPfa diff --git a/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs b/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs index dcce4f0018..3fd5ef95e8 100644 --- a/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs +++ b/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs @@ -103,12 +103,13 @@ public Func GetDependencies(Func predicate) yield break; } - public Row GetRow(Row input, Func predicate, out Action disposer) + public Row GetRow(Row input, Func predicate) { - return new SimpleRow(OutputSchema, input, new[] { CreateScoreGetter(input, predicate, out disposer) }); + var scoreGetter = CreateScoreGetter(input, predicate, out Action disposer); + return new SimpleRow(OutputSchema, input, new[] { scoreGetter }, disposer); } - public abstract Delegate CreateScoreGetter(Row input, Func mapperPredicate, out Action disposer); + internal abstract Delegate CreateScoreGetter(Row input, Func mapperPredicate, out Action disposer); } // A generic base class for pipeline ensembles. This class contains the combiner. @@ -124,7 +125,7 @@ public Bound(SchemaBindablePipelineEnsemble parent, RoleMappedSchema schema) _combiner = parent.Combiner; } - public override Delegate CreateScoreGetter(Row input, Func mapperPredicate, out Action disposer) + internal override Delegate CreateScoreGetter(Row input, Func mapperPredicate, out Action disposer) { disposer = null; @@ -137,13 +138,12 @@ public override Delegate CreateScoreGetter(Row input, Func mapperPred // First get the output row from the pipelines. The input predicate of the predictor // is the output predicate of the pipeline. var inputPredicate = Mappers[i].GetDependencies(mapperPredicate); - var pipelineRow = BoundPipelines[i].GetRow(input, inputPredicate, out Action disp); - disposer += disp; + var pipelineRow = BoundPipelines[i].GetRow(input, inputPredicate); // Next we get the output row from the predictors. We activate the score column as output predicate. - var predictorRow = Mappers[i].GetRow(pipelineRow, col => col == ScoreCols[i], out disp); - disposer += disp; + var predictorRow = Mappers[i].GetRow(pipelineRow, col => col == ScoreCols[i]); getters[i] = predictorRow.GetGetter(ScoreCols[i]); + disposer += predictorRow.Dispose; } var comb = _combiner.GetCombiner(); @@ -164,7 +164,8 @@ public ValueGetter GetLabelGetter(Row input, int i, out Action disposer) Parent.Host.Check(Mappers[i].InputRoleMappedSchema.Label != null, "Mapper was not trained using a label column"); // The label should be in the output row of the i'th pipeline - var pipelineRow = BoundPipelines[i].GetRow(input, col => col == Mappers[i].InputRoleMappedSchema.Label.Index, out disposer); + var pipelineRow = BoundPipelines[i].GetRow(input, col => col == Mappers[i].InputRoleMappedSchema.Label.Index); + disposer = pipelineRow.Dispose; return RowCursorUtils.GetLabelGetter(pipelineRow, Mappers[i].InputRoleMappedSchema.Label.Index); } @@ -174,14 +175,16 @@ public ValueGetter GetWeightGetter(Row input, int i, out Action disposer if (Mappers[i].InputRoleMappedSchema.Weight == null) { - ValueGetter weight = (ref Single dst) => dst = 1; + ValueGetter weight = (ref float dst) => dst = 1; disposer = null; return weight; } // The weight should be in the output row of the i'th pipeline if it exists. var inputPredicate = Mappers[i].GetDependencies(col => col == Mappers[i].InputRoleMappedSchema.Weight.Index); - var pipelineRow = BoundPipelines[i].GetRow(input, inputPredicate, out disposer); - return pipelineRow.GetGetter(Mappers[i].InputRoleMappedSchema.Weight.Index); + var pipelineRow = BoundPipelines[i].GetRow(input, inputPredicate); + disposer = pipelineRow.Dispose; + return pipelineRow.GetGetter(Mappers[i].InputRoleMappedSchema.Weight.Index); + } } diff --git a/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs b/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs index 1e9cda953f..a6b5e4ebb7 100644 --- a/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs +++ b/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs @@ -208,11 +208,10 @@ public BoundMapper(IExceptionContext ectx, TreeEnsembleFeaturizerBindableMapper OutputSchema = Schema.Create(new SchemaImpl(ectx, owner, treeValueType, leafIdType, pathIdType)); } - public Row GetRow(Row input, Func predicate, out Action disposer) + public Row GetRow(Row input, Func predicate) { _ectx.CheckValue(input, nameof(input)); _ectx.CheckValue(predicate, nameof(predicate)); - disposer = null; return new SimpleRow(OutputSchema, input, CreateGetters(input, predicate)); } diff --git a/src/Microsoft.ML.HalLearners/VectorWhitening.cs b/src/Microsoft.ML.HalLearners/VectorWhitening.cs index c18ef3f36a..da47e4e599 100644 --- a/src/Microsoft.ML.HalLearners/VectorWhitening.cs +++ b/src/Microsoft.ML.HalLearners/VectorWhitening.cs @@ -639,7 +639,7 @@ public static extern int Svd(Layout layout, SvdJob jobu, SvdJob jobvt, int m, int n, float[] a, int lda, float[] s, float[] u, int ldu, float[] vt, int ldvt, float[] superb); } - protected override IRowMapper MakeRowMapper(Schema schema) + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); private sealed class Mapper : OneToOneMapperBase diff --git a/src/Microsoft.ML.ImageAnalytics/ImageGrayscaleTransform.cs b/src/Microsoft.ML.ImageAnalytics/ImageGrayscaleTransform.cs index 2a117dfeda..d7b8a96c54 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImageGrayscaleTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImageGrayscaleTransform.cs @@ -152,7 +152,7 @@ public override void Save(ModelSaveContext ctx) new float[] {0, 0, 0, 0, 1} }); - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) { diff --git a/src/Microsoft.ML.ImageAnalytics/ImageLoaderTransform.cs b/src/Microsoft.ML.ImageAnalytics/ImageLoaderTransform.cs index fd7707ecf7..9165a7e06d 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImageLoaderTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImageLoaderTransform.cs @@ -147,7 +147,7 @@ private static VersionInfo GetVersionInfo() loaderAssemblyName: typeof(ImageLoaderTransform).Assembly.FullName); } - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); private sealed class Mapper : OneToOneMapperBase { diff --git a/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs b/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs index ef1a71c47f..6b631c8065 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs @@ -400,7 +400,7 @@ public override void Save(ModelSaveContext ctx) info.Save(ctx); } - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) { diff --git a/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs b/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs index ef4318c547..8a1ba5829d 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs @@ -285,7 +285,7 @@ public override void Save(ModelSaveContext ctx) } } - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) { diff --git a/src/Microsoft.ML.Legacy/LearningPipelineDebugProxy.cs b/src/Microsoft.ML.Legacy/LearningPipelineDebugProxy.cs index 5b56bc28a0..a627657f62 100644 --- a/src/Microsoft.ML.Legacy/LearningPipelineDebugProxy.cs +++ b/src/Microsoft.ML.Legacy/LearningPipelineDebugProxy.cs @@ -147,8 +147,9 @@ private IDataView ExecutePipeline() catch (Exception e) { _pipelineExecutionException = e; - var fakeColumn = new KeyValuePair("Blank", TextType.Instance); - _preview = new EmptyDataView(_environment, Schema.Create(new SimpleSchema(_environment, fakeColumn))); + var builder = new SchemaBuilder(); + builder.AddColumn("Blank", TextType.Instance); + _preview = new EmptyDataView(_environment, builder.GetSchema()); } } } diff --git a/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs b/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs index dcffd150d5..1e20ebe350 100644 --- a/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs +++ b/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs @@ -221,7 +221,7 @@ public override void Save(ModelSaveContext ctx) foreach (var colName in Outputs) ctx.SaveNonEmptyString(colName); } - protected override IRowMapper MakeRowMapper(Schema inputSchema) => new Mapper(this, inputSchema); + private protected override IRowMapper MakeRowMapper(Schema inputSchema) => new Mapper(this, inputSchema); private static int[] AdjustDimensions(OnnxShape shape) { @@ -309,7 +309,7 @@ protected override Schema.DetachedColumn[] GetOutputColumnsCore() return info; } - public override Func GetDependencies(Func activeOutput) + private protected override Func GetDependenciesCore(Func activeOutput) { return col => Enumerable.Range(0, _parent.Outputs.Length).Any(i => activeOutput(i)) && _inputColIndices.Any(i => i == col); } diff --git a/src/Microsoft.ML.PCA/PcaTransform.cs b/src/Microsoft.ML.PCA/PcaTransform.cs index 27ab5113d2..e3fee2639f 100644 --- a/src/Microsoft.ML.PCA/PcaTransform.cs +++ b/src/Microsoft.ML.PCA/PcaTransform.cs @@ -540,7 +540,7 @@ private float[][] PostProcess(float[][] y, float[] sigma, float[] z, int d, int return y; } - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) { diff --git a/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs b/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs index 55e50c5e36..4af405b5d5 100644 --- a/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs +++ b/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs @@ -358,15 +358,14 @@ private Delegate[] CreateGetter(Row input, bool[] active) return getters; } - public Row GetRow(Row input, Func predicate, out Action disposer) + public Row GetRow(Row input, Func active) { - var active = Utils.BuildArray(OutputSchema.ColumnCount, predicate); - var getters = CreateGetter(input, active); - disposer = null; + var activeArray = Utils.BuildArray(OutputSchema.ColumnCount, active); + var getters = CreateGetter(input, activeArray); return new SimpleRow(OutputSchema, input, getters); } - public ISchemaBindableMapper Bindable { get { return _parent; } } + public ISchemaBindableMapper Bindable => _parent; } } diff --git a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachineUtils.cs b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachineUtils.cs index df486453b8..1fa2a323b5 100644 --- a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachineUtils.cs +++ b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachineUtils.cs @@ -96,7 +96,7 @@ public FieldAwareFactorizationMachineScalarRowMapper(IHostEnvironment env, RoleM } } - public Row GetRow(Row input, Func predicate, out Action action) + public Row GetRow(Row input, Func predicate) { var latentSum = new AlignedArray(_pred.FieldCount * _pred.FieldCount * _pred.LatentDimAligned, 16); var featureBuffer = new VBuffer(); @@ -111,7 +111,6 @@ public Row GetRow(Row input, Func predicate, out Action action) inputGetters[f] = input.GetGetter>(_inputColumnIndexes[f]); } - action = null; var getters = new Delegate[2]; if (predicate(0)) { diff --git a/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs b/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs index 016082d44d..05234bffe0 100644 --- a/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs +++ b/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs @@ -679,7 +679,7 @@ internal static (TFDataType[] tfOutputTypes, ColumnType[] outputTypes) GetOutput return (tfOutputTypes, outputTypes); } - protected override IRowMapper MakeRowMapper(Schema inputSchema) => new Mapper(this, inputSchema); + private protected override IRowMapper MakeRowMapper(Schema inputSchema) => new Mapper(this, inputSchema); public override void Save(ModelSaveContext ctx) { @@ -913,7 +913,7 @@ private void UpdateCacheIfNeeded(long position, ITensorValueGetter[] srcTensorGe } } - public override Func GetDependencies(Func activeOutput) + private protected override Func GetDependenciesCore(Func activeOutput) { return col => Enumerable.Range(0, _parent.Outputs.Length).Any(i => activeOutput(i)) && _inputColIndices.Any(i => i == col); } diff --git a/src/Microsoft.ML.TimeSeries/PredictionFunction.cs b/src/Microsoft.ML.TimeSeries/PredictionFunction.cs index 8dc4cc27b3..11195e8874 100644 --- a/src/Microsoft.ML.TimeSeries/PredictionFunction.cs +++ b/src/Microsoft.ML.TimeSeries/PredictionFunction.cs @@ -98,16 +98,14 @@ public TimeSeriesPredictionFunction(IHostEnvironment env, ITransformer transform { } - internal Row GetStatefulRows(Row input, IRowToRowMapper mapper, Func active, - List rows, out Action disposer) + internal Row GetStatefulRows(Row input, IRowToRowMapper mapper, Func active, List rows) { Contracts.CheckValue(input, nameof(input)); Contracts.CheckValue(active, nameof(active)); - disposer = null; IRowToRowMapper[] innerMappers = new IRowToRowMapper[0]; - if (mapper is CompositeRowToRowMapper) - innerMappers = ((CompositeRowToRowMapper)mapper).InnerMappers; + if (mapper is CompositeRowToRowMapper compositeMapper) + innerMappers = compositeMapper.InnerMappers; if (innerMappers.Length == 0) { @@ -122,10 +120,9 @@ internal Row GetStatefulRows(Row input, IRowToRowMapper mapper, Func throw Contracts.ExceptParam(nameof(input), $"Mapper required column '{input.Schema.GetColumnName(c)}' active but it was not."); } - var row = mapper.GetRow(input, active, out disposer); + var row = mapper.GetRow(input, active); if (row is StatefulRow statefulRow) rows.Add(statefulRow); - return row; } @@ -140,21 +137,10 @@ internal Row GetStatefulRows(Row input, IRowToRowMapper mapper, Func Row result = input; for (int i = 0; i < innerMappers.Length; ++i) { - Action localDisp; - result = GetStatefulRows(result, innerMappers[i], deps[i], rows, out localDisp); + result = GetStatefulRows(result, innerMappers[i], deps[i], rows); if (result is StatefulRow statefulResult) rows.Add(statefulResult); - - if (localDisp != null) - { - if (disposer == null) - disposer = localDisp; - else - disposer = localDisp + disposer; - // We want the last disposer to be called first, so the order of the addition here is important. - } } - return result; } @@ -168,13 +154,14 @@ private Action CreatePinger(List rows) return pinger; } - internal override void PredictionEngineCore(IHostEnvironment env, DataViewConstructionUtils.InputRow inputRow, IRowToRowMapper mapper, bool ignoreMissingColumns, + private protected override void PredictionEngineCore(IHostEnvironment env, DataViewConstructionUtils.InputRow inputRow, IRowToRowMapper mapper, bool ignoreMissingColumns, SchemaDefinition inputSchemaDefinition, SchemaDefinition outputSchemaDefinition, out Action disposer, out IRowReadableAs outputRow) { List rows = new List(); - Row outputRowLocal = outputRowLocal = GetStatefulRows(inputRow, mapper, col => true, rows, out disposer); + Row outputRowLocal = outputRowLocal = GetStatefulRows(inputRow, mapper, col => true, rows); var cursorable = TypedCursorable.Create(env, new EmptyDataView(env, mapper.OutputSchema), ignoreMissingColumns, outputSchemaDefinition); _pinger = CreatePinger(rows); + disposer = outputRowLocal.Dispose; outputRow = cursorable.GetRow(outputRowLocal); } diff --git a/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs b/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs index 7af0eb1cf1..bef7a167e7 100644 --- a/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs +++ b/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs @@ -259,7 +259,7 @@ private protected virtual void CloneCore(StateBase state) public bool IsRowToRowMapper => false; - public TState StateRef{ get; set; } + public TState StateRef { get; set; } public int StateRefCount; @@ -478,10 +478,12 @@ public Func GetDependencies(Func predicate) return col => false; } - public Row GetRow(Row input, Func active, out Action disposer) => - new RowImpl(_bindings.Schema, input, _mapper.CreateGetters(input, active, out disposer), - _mapper.CreatePinger(input, active, out disposer)); - + public Row GetRow(Row input, Func active) + { + var getters = _mapper.CreateGetters(input, active, out Action disposer); + var pingers = _mapper.CreatePinger(input, active, out Action pingerDisposer); + return new RowImpl(_bindings.Schema, input, getters, pingers, disposer + pingerDisposer); + } } private sealed class RowImpl : StatefulRow @@ -490,6 +492,8 @@ private sealed class RowImpl : StatefulRow private readonly Row _input; private readonly Delegate[] _getters; private readonly Action _pinger; + private readonly Action _disposer; + private bool _disposed; public override Schema Schema => _schema; @@ -497,7 +501,7 @@ private sealed class RowImpl : StatefulRow public override long Batch => _input.Batch; - public RowImpl(Schema schema, Row input, Delegate[] getters, Action pinger) + public RowImpl(Schema schema, Row input, Delegate[] getters, Action pinger, Action disposer) { Contracts.CheckValue(schema, nameof(schema)); Contracts.CheckValue(input, nameof(input)); @@ -506,13 +510,22 @@ public RowImpl(Schema schema, Row input, Delegate[] getters, Action pinger _input = input; _getters = getters ?? new Delegate[0]; _pinger = pinger; + _disposer = disposer; } - public override ValueGetter GetIdGetter() + protected override void Dispose(bool disposing) { - return _input.GetIdGetter(); + if (_disposed) + return; + if (disposing) + _disposer?.Invoke(); + _disposed = true; + base.Dispose(disposing); } + public override ValueGetter GetIdGetter() + => _input.GetIdGetter(); + public override ValueGetter GetGetter(int col) { Contracts.CheckParam(0 <= col && col < _getters.Length, nameof(col), "Invalid col value in GetGetter"); @@ -745,32 +758,30 @@ public Func GetDependencies(Func predicate) Schema IRowToRowMapper.InputSchema => Source.Schema; - public Row GetRow(Row input, Func active, out Action disposer) + public Row GetRow(Row input, Func active) { Host.CheckValue(input, nameof(input)); Host.CheckValue(active, nameof(active)); Host.Check(input.Schema == Source.Schema, "Schema of input row must be the same as the schema the mapper is bound to"); - disposer = null; using (var ch = Host.Start("GetEntireRow")) { - Action disp; var activeArr = new bool[OutputSchema.ColumnCount]; for (int i = 0; i < OutputSchema.ColumnCount; i++) activeArr[i] = active(i); var pred = GetActiveOutputColumns(activeArr); - var getters = _mapper.CreateGetters(input, pred, out disp); - disposer += disp; - return new StatefulRow(input, this, OutputSchema, getters, - _mapper.CreatePinger(input, pred, out disp)); + var getters = _mapper.CreateGetters(input, pred, out Action disp); + var pingers = _mapper.CreatePinger(input, pred, out Action pingerDisp); + return new StatefulRowImpl(input, this, OutputSchema, getters, pingers, disp + pingerDisp); } } - private sealed class StatefulRow : TimeSeries.StatefulRow + private sealed class StatefulRowImpl : StatefulRow { private readonly Row _input; private readonly Delegate[] _getters; private readonly Action _pinger; + private readonly Action _disposer; private readonly TimeSeriesRowToRowMapperTransform _parent; @@ -780,14 +791,21 @@ private sealed class StatefulRow : TimeSeries.StatefulRow public override Schema Schema { get; } - public StatefulRow(Row input, TimeSeriesRowToRowMapperTransform parent, - Schema schema, Delegate[] getters, Action pinger) + public StatefulRowImpl(Row input, TimeSeriesRowToRowMapperTransform parent, + Schema schema, Delegate[] getters, Action pinger, Action disposer) { _input = input; _parent = parent; Schema = schema; _getters = getters; _pinger = pinger; + _disposer = disposer; + } + + protected override void Dispose(bool disposing) + { + if (disposing) + _disposer?.Invoke(); } public override ValueGetter GetGetter(int col) @@ -825,6 +843,7 @@ private sealed class Cursor : SynchronizedCursorBase private readonly bool[] _active; private readonly ColumnBindings _bindings; private readonly Action _disposer; + private bool _disposed; public override Schema Schema => _bindings.Schema; @@ -854,19 +873,21 @@ public override ValueGetter GetGetter(int col) Ch.AssertValue(_getters); var getter = _getters[index]; - Ch.Assert(getter != null); - var fn = getter as ValueGetter; - if (fn == null) - throw Ch.Except("Invalid TValue in GetGetter: '{0}'", typeof(TValue)); - return fn; + Ch.AssertValue(getter); + if (getter is ValueGetter fn) + return fn; + throw Ch.Except("Invalid TValue in GetGetter: '{0}'", typeof(TValue)); } - public override void Dispose() + protected override void Dispose(bool disposing) { - _disposer?.Invoke(); - base.Dispose(); + if (_disposed) + return; + if (disposing) + _disposer?.Invoke(); + _disposed = true; + base.Dispose(disposing); } } } - } diff --git a/src/Microsoft.ML.Transforms/GcnTransform.cs b/src/Microsoft.ML.Transforms/GcnTransform.cs index 5112452c13..21ad23b9b5 100644 --- a/src/Microsoft.ML.Transforms/GcnTransform.cs +++ b/src/Microsoft.ML.Transforms/GcnTransform.cs @@ -421,7 +421,7 @@ public override void Save(ModelSaveContext ctx) col.Save(ctx); } - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, Schema.Create(schema)); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, Schema.Create(schema)); private sealed class Mapper : OneToOneMapperBase { diff --git a/src/Microsoft.ML.Transforms/GroupTransform.cs b/src/Microsoft.ML.Transforms/GroupTransform.cs index 7020eb6ede..f700b35590 100644 --- a/src/Microsoft.ML.Transforms/GroupTransform.cs +++ b/src/Microsoft.ML.Transforms/GroupTransform.cs @@ -552,7 +552,7 @@ public override void ReadValue(int position) private readonly GroupKeyColumnChecker[] _groupCheckers; private readonly KeepColumnAggregator[] _aggregators; - public override long Batch { get { return 0; } } + public override long Batch => 0; public override Schema Schema => _parent.OutputSchema; @@ -662,11 +662,19 @@ private bool IsSameGroup() return result; } - public override void Dispose() + private bool _disposed; + + protected override void Dispose(bool disposing) { - _leadingCursor.Dispose(); - _trailingCursor.Dispose(); - base.Dispose(); + if (_disposed) + return; + if (disposing) + { + _leadingCursor.Dispose(); + _trailingCursor.Dispose(); + } + _disposed = true; + base.Dispose(disposing); } public override ValueGetter GetGetter(int col) diff --git a/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs b/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs index 6159637da5..27456da815 100644 --- a/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs +++ b/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs @@ -162,7 +162,7 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); private sealed class Mapper : OneToOneMapperBase { diff --git a/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs b/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs index c509881d31..64dae82ef7 100644 --- a/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs +++ b/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs @@ -140,7 +140,7 @@ public override void Save(ModelSaveContext ctx) SaveColumns(ctx); } - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); private sealed class Mapper : OneToOneMapperBase { diff --git a/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs b/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs index 971aae846c..8273467e7e 100644 --- a/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs +++ b/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs @@ -139,7 +139,7 @@ public override void Save(ModelSaveContext ctx) SaveColumns(ctx); } - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); private sealed class Mapper : OneToOneMapperBase { diff --git a/src/Microsoft.ML.Transforms/MissingValueReplacing.cs b/src/Microsoft.ML.Transforms/MissingValueReplacing.cs index 7659d53c32..2f6ebef311 100644 --- a/src/Microsoft.ML.Transforms/MissingValueReplacing.cs +++ b/src/Microsoft.ML.Transforms/MissingValueReplacing.cs @@ -493,7 +493,7 @@ public static IDataTransform Create(IHostEnvironment env, IDataView input, param } // Factory method for SignatureLoadModel. - public static MissingValueReplacingTransformer Create(IHostEnvironment env, ModelLoadContext ctx) + private static MissingValueReplacingTransformer Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); var host = env.Register(LoadName); @@ -505,11 +505,11 @@ public static MissingValueReplacingTransformer Create(IHostEnvironment env, Mode } // Factory method for SignatureLoadDataTransform. - public static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, IDataView input) + private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, IDataView input) => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - public static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); private VBuffer CreateVBuffer(T[] array) @@ -558,7 +558,7 @@ public override void Save(ModelSaveContext ctx) } } - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); private sealed class Mapper : OneToOneMapperBase, ISaveAsOnnx { diff --git a/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs b/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs index f1b7d49895..d5fa0defa1 100644 --- a/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs +++ b/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs @@ -505,7 +505,7 @@ public override void Save(ModelSaveContext ctx) _transformInfos[i].Save(ctx, string.Format("MatrixGenerator{0}", i)); } - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); private sealed class Mapper : OneToOneMapperBase { diff --git a/src/Microsoft.ML.Transforms/Text/LdaTransform.cs b/src/Microsoft.ML.Transforms/Text/LdaTransform.cs index dd0738e50e..08ff810c2b 100644 --- a/src/Microsoft.ML.Transforms/Text/LdaTransform.cs +++ b/src/Microsoft.ML.Transforms/Text/LdaTransform.cs @@ -1068,10 +1068,8 @@ private static List>> Train(IHostEnvironment env, I return columnMappings; } - protected override IRowMapper MakeRowMapper(Schema schema) - { - return new Mapper(this, schema); - } + private protected override IRowMapper MakeRowMapper(Schema schema) + => new Mapper(this, schema); } /// diff --git a/src/Microsoft.ML.Transforms/Text/NgramTransform.cs b/src/Microsoft.ML.Transforms/Text/NgramTransform.cs index 5351e3f7f4..170141c823 100644 --- a/src/Microsoft.ML.Transforms/Text/NgramTransform.cs +++ b/src/Microsoft.ML.Transforms/Text/NgramTransform.cs @@ -540,7 +540,7 @@ public override void Save(ModelSaveContext ctx) } } - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); private sealed class Mapper : OneToOneMapperBase { diff --git a/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs b/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs index a4e0cee7e6..a9459df027 100644 --- a/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs +++ b/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs @@ -307,7 +307,7 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, Schema.Create(schema)); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, Schema.Create(schema)); private void CheckResources() { @@ -491,7 +491,7 @@ private bool ResourceExists(StopWordsRemovingEstimator.Language lang) (_resourcesExist[langVal] ?? (_resourcesExist[langVal] = GetResourceFileStreamOrNull(lang) != null).Value); } - public override Func GetDependencies(Func activeOutput) + private protected override Func GetDependenciesCore(Func activeOutput) { var active = new bool[InputSchema.ColumnCount]; foreach (var pair in _colMapNewToOld) @@ -966,7 +966,7 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, Schema.Create(schema)); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, Schema.Create(schema)); private sealed class Mapper : OneToOneMapperBase { diff --git a/src/Microsoft.ML.Transforms/Text/TextNormalizing.cs b/src/Microsoft.ML.Transforms/Text/TextNormalizing.cs index 2a7fa68e4a..52ab910488 100644 --- a/src/Microsoft.ML.Transforms/Text/TextNormalizing.cs +++ b/src/Microsoft.ML.Transforms/Text/TextNormalizing.cs @@ -193,7 +193,7 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); private sealed class Mapper : OneToOneMapperBase { diff --git a/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs b/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs index 949c4474bb..d267c6706a 100644 --- a/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs +++ b/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs @@ -183,7 +183,7 @@ internal static IDataTransform Create(IHostEnvironment env, Arguments args, IDat private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); private sealed class Mapper : OneToOneMapperBase { diff --git a/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs b/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs index b1c6c73fe5..f83c0c26d4 100644 --- a/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs +++ b/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs @@ -318,7 +318,7 @@ public override void Save(ModelSaveContext ctx) ctx.Writer.Write((uint)_modelKind); } - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) { diff --git a/src/Microsoft.ML.Transforms/Text/WordTokenizing.cs b/src/Microsoft.ML.Transforms/Text/WordTokenizing.cs index 1805e9c317..18f652ad6d 100644 --- a/src/Microsoft.ML.Transforms/Text/WordTokenizing.cs +++ b/src/Microsoft.ML.Transforms/Text/WordTokenizing.cs @@ -221,7 +221,7 @@ internal static IDataTransform Create(IHostEnvironment env, Arguments args, IDat private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); - protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); private sealed class Mapper : OneToOneMapperBase, ISaveAsPfa { diff --git a/test/Microsoft.ML.Benchmarks/HashBench.cs b/test/Microsoft.ML.Benchmarks/HashBench.cs index 4a518be564..fe823a220c 100644 --- a/test/Microsoft.ML.Benchmarks/HashBench.cs +++ b/test/Microsoft.ML.Benchmarks/HashBench.cs @@ -79,7 +79,7 @@ private void InitMap(T val, ColumnType type, int hashBits = 20, ValueGetter c == outCol, out var _); + var outRow = mapper.GetRow(_inRow, c => c == outCol); if (type is VectorType) _vecGetter = outRow.GetGetter>(outCol); else diff --git a/test/Microsoft.ML.Tests/Transformers/HashTests.cs b/test/Microsoft.ML.Tests/Transformers/HashTests.cs index e5845db090..b328cb60fc 100644 --- a/test/Microsoft.ML.Tests/Transformers/HashTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/HashTests.cs @@ -147,7 +147,7 @@ private void HashTestCore(T val, PrimitiveType type, uint expected, uint expe var xf = new HashingTransformer(Env, new[] { info }); var mapper = xf.GetRowToRowMapper(inRow.Schema); mapper.OutputSchema.TryGetColumnIndex("Bar", out int outCol); - var outRow = mapper.GetRow(inRow, c => c == outCol, out var _); + var outRow = mapper.GetRow(inRow, c => c == outCol); var getter = outRow.GetGetter(outCol); uint result = 0; @@ -159,7 +159,7 @@ private void HashTestCore(T val, PrimitiveType type, uint expected, uint expe xf = new HashingTransformer(Env, new[] { info }); mapper = xf.GetRowToRowMapper(inRow.Schema); mapper.OutputSchema.TryGetColumnIndex("Bar", out outCol); - outRow = mapper.GetRow(inRow, c => c == outCol, out var _); + outRow = mapper.GetRow(inRow, c => c == outCol); getter = outRow.GetGetter(outCol); getter(ref result); @@ -177,7 +177,7 @@ private void HashTestCore(T val, PrimitiveType type, uint expected, uint expe xf = new HashingTransformer(Env, new[] { info }); mapper = xf.GetRowToRowMapper(inRow.Schema); mapper.OutputSchema.TryGetColumnIndex("Bar", out outCol); - outRow = mapper.GetRow(inRow, c => c == outCol, out var _); + outRow = mapper.GetRow(inRow, c => c == outCol); var vecGetter = outRow.GetGetter>(outCol); VBuffer vecResult = default; @@ -192,7 +192,7 @@ private void HashTestCore(T val, PrimitiveType type, uint expected, uint expe xf = new HashingTransformer(Env, new[] { info }); mapper = xf.GetRowToRowMapper(inRow.Schema); mapper.OutputSchema.TryGetColumnIndex("Bar", out outCol); - outRow = mapper.GetRow(inRow, c => c == outCol, out var _); + outRow = mapper.GetRow(inRow, c => c == outCol); vecGetter = outRow.GetGetter>(outCol); vecGetter(ref vecResult); @@ -211,7 +211,7 @@ private void HashTestCore(T val, PrimitiveType type, uint expected, uint expe xf = new HashingTransformer(Env, new[] { info }); mapper = xf.GetRowToRowMapper(inRow.Schema); mapper.OutputSchema.TryGetColumnIndex("Bar", out outCol); - outRow = mapper.GetRow(inRow, c => c == outCol, out var _); + outRow = mapper.GetRow(inRow, c => c == outCol); vecGetter = outRow.GetGetter>(outCol); vecGetter(ref vecResult); @@ -224,7 +224,7 @@ private void HashTestCore(T val, PrimitiveType type, uint expected, uint expe xf = new HashingTransformer(Env, new[] { info }); mapper = xf.GetRowToRowMapper(inRow.Schema); mapper.OutputSchema.TryGetColumnIndex("Bar", out outCol); - outRow = mapper.GetRow(inRow, c => c == outCol, out var _); + outRow = mapper.GetRow(inRow, c => c == outCol); vecGetter = outRow.GetGetter>(outCol); vecGetter(ref vecResult); From 33c188461eaf9916296002d156294bd255b4f85d Mon Sep 17 00:00:00 2001 From: Abhishek Goswami Date: Thu, 6 Dec 2018 13:45:10 -0800 Subject: [PATCH 031/100] TensorFlowTransform example in Microsoft.ML.Samples (#1812) * TF example * example cleanup; link from catalog * cleanup of comments in example * review comments --- .../Dynamic/TensorFlowTransform.cs | 96 +++++++++++++++++++ .../Microsoft.ML.Samples.csproj | 2 + docs/samples/Microsoft.ML.Samples/Program.cs | 2 +- .../TensorflowCatalog.cs | 7 ++ 4 files changed, 106 insertions(+), 1 deletion(-) create mode 100644 docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlowTransform.cs diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlowTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlowTransform.cs new file mode 100644 index 0000000000..2f526f54da --- /dev/null +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlowTransform.cs @@ -0,0 +1,96 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime.Data; +using System; +using System.Linq; + +namespace Microsoft.ML.Samples.Dynamic +{ + class TensorFlowTransformExample + { + /// + /// Example use of the TensorFlowEstimator in a ML.NET pipeline. + /// + public static void TensorFlowScoringSample() + { + // Download the ResNet 101 model from the location below. + // https://storage.googleapis.com/download.tensorflow.org/models/tflite_11_05_08/resnet_v2_101.tgz + var modelLocation = @"resnet_v2_101/resnet_v2_101_299_frozen.pb"; + + var mlContext = new MLContext(); + var data = GetTensorData(); + var idv = mlContext.CreateStreamingDataView(data); + + // Create a ML pipeline. + var pipeline = mlContext.Transforms.ScoreTensorFlowModel( + modelLocation, + new[] { nameof(TensorData.input) }, + new[] { nameof(OutputScores.output) }); + + // Run the pipeline and get the transformed values. + var estimator = pipeline.Fit(idv); + var transformedValues = estimator.Transform(idv); + + // Retrieve model scores. + var outScores = transformedValues.AsEnumerable(mlContext, reuseRowObject: false); + + // Display scores. (for the sake of brevity we display scores of the first 3 classes) + foreach (var prediction in outScores) + { + int numClasses = 0; + foreach (var classScore in prediction.output.Take(3)) + { + Console.WriteLine($"Class #{numClasses++} score = {classScore}"); + } + Console.WriteLine(new string('-', 10)); + } + + // Results look like below... + //Class #0 score = -0.8092947 + //Class #1 score = -0.3310375 + //Class #2 score = 0.1119193 + //---------- + //Class #0 score = -0.7807726 + //Class #1 score = -0.2158062 + //Class #2 score = 0.1153686 + //---------- + } + + private const int imageHeight = 224; + private const int imageWidth = 224; + private const int numChannels = 3; + private const int inputSize = imageHeight * imageWidth * numChannels; + + /// + /// A class to hold sample tensor data. + /// Member name should match the inputs that the model expects (in this case, input). + /// + public class TensorData + { + [VectorType(imageHeight, imageWidth, numChannels)] + public float[] input { get; set; } + } + + /// + /// Method to generate sample test data. Returns 2 sample rows. + /// + public static TensorData[] GetTensorData() + { + // This can be any numerical data. Assume image pixel values. + var image1 = Enumerable.Range(0, inputSize).Select(x => (float)x / inputSize).ToArray(); + var image2 = Enumerable.Range(0, inputSize).Select(x => (float)(x + 10000) / inputSize).ToArray(); + return new TensorData[] { new TensorData() { input = image1 }, new TensorData() { input = image2 } }; + } + + /// + /// Class to contain the output values from the transformation. + /// + class OutputScores + { + public float[] output { get; set; } + } + } +} \ No newline at end of file diff --git a/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj b/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj index 135e502d9f..414e271b86 100644 --- a/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj +++ b/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj @@ -13,6 +13,7 @@ + @@ -20,6 +21,7 @@ + false diff --git a/docs/samples/Microsoft.ML.Samples/Program.cs b/docs/samples/Microsoft.ML.Samples/Program.cs index 83a7e8c2e8..d64bdb55a1 100644 --- a/docs/samples/Microsoft.ML.Samples/Program.cs +++ b/docs/samples/Microsoft.ML.Samples/Program.cs @@ -6,7 +6,7 @@ internal static class Program { static void Main(string[] args) { - LdaTransformExample.LdaTransform(); + TensorFlowTransformExample.TensorFlowScoringSample(); } } } diff --git a/src/Microsoft.ML.TensorFlow/TensorflowCatalog.cs b/src/Microsoft.ML.TensorFlow/TensorflowCatalog.cs index 638a4287f0..5d4c73664c 100644 --- a/src/Microsoft.ML.TensorFlow/TensorflowCatalog.cs +++ b/src/Microsoft.ML.TensorFlow/TensorflowCatalog.cs @@ -18,6 +18,13 @@ public static class TensorflowCatalog /// Location of the TensorFlow model. /// The names of the model inputs. /// The names of the requested model outputs. + /// + /// + /// + /// + /// public static TensorFlowEstimator ScoreTensorFlowModel(this TransformsCatalog catalog, string modelLocation, string[] inputs, From 284e02cadf5342aa0c36f31d62fc6fa15bc06885 Mon Sep 17 00:00:00 2001 From: Yael Dekel Date: Fri, 7 Dec 2018 10:05:34 -0800 Subject: [PATCH 032/100] ImagePixelExtractorTransform support for images with no alpha channel (#1821) --- .../ImagePixelExtractorTransform.cs | 26 ++---------------- .../ImageResizerTransform.cs | 2 +- .../DnnImageFeaturizerTest.cs | 2 +- .../OnnxTransformTests.cs | 2 +- .../Scenarios/TensorflowTests.cs | 8 +++--- .../TensorflowTests.cs | 4 +-- .../TensorFlowEstimatorTests.cs | 4 +-- test/data/images/images.tsv | 1 + test/data/images/tomato.bmp | Bin 0 -> 1281654 bytes 9 files changed, 15 insertions(+), 34 deletions(-) create mode 100644 test/data/images/tomato.bmp diff --git a/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs b/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs index 6b631c8065..ddc43e9468 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs @@ -483,7 +483,9 @@ private ValueGetter> GetGetterCore(Row input, int iinfo, return; } - Host.Check(src.PixelFormat == System.Drawing.Imaging.PixelFormat.Format32bppArgb); + Host.Check(src.PixelFormat == System.Drawing.Imaging.PixelFormat.Format32bppArgb + || src.PixelFormat == System.Drawing.Imaging.PixelFormat.Format24bppRgb, + "Transform only supports pixel formats Format24bppRgb and Format32bppArgb"); Host.Check(src.Height == height && src.Width == width); var editor = VBufferEditor.Create(ref dst, size); @@ -542,28 +544,6 @@ private ValueGetter> GetGetterCore(Row input, int iinfo, else { int idstMin = 0; - if (ex.Alpha) - { - // The image only has rgb but we need to supply alpha as well, so fake it up, - // assuming that it is 0xFF. - if (!vf.IsEmpty) - { - Single v = (0xFF - offset) * scale; - for (int i = 0; i < cpix; i++) - vf[i] = v; - } - else - { - for (int i = 0; i < cpix; i++) - vb[i] = 0xFF; - } - idstMin = cpix; - - // We've preprocessed alpha, avoid it in the - // scan operation below. - a = false; - } - for (int y = 0; y < h; ++y) { int idstBase = idstMin + y * w; diff --git a/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs b/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs index 8a1ba5829d..0beb7202fd 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs @@ -411,7 +411,7 @@ protected override Delegate MakeGetter(Row input, int iinfo, Func act destWidth = (int)(sourceWidth * aspect); destHeight = (int)(sourceHeight * aspect); } - dst = new Bitmap(info.Width, info.Height); + dst = new Bitmap(info.Width, info.Height, src.PixelFormat); var srcRectangle = new Rectangle(sourceX, sourceY, sourceWidth, sourceHeight); var destRectangle = new Rectangle(destX, destY, destWidth, destHeight); using (var g = Graphics.FromImage(dst)) diff --git a/test/Microsoft.ML.OnnxTransformTest/DnnImageFeaturizerTest.cs b/test/Microsoft.ML.OnnxTransformTest/DnnImageFeaturizerTest.cs index fbd8b36b19..b9dd06cd93 100644 --- a/test/Microsoft.ML.OnnxTransformTest/DnnImageFeaturizerTest.cs +++ b/test/Microsoft.ML.OnnxTransformTest/DnnImageFeaturizerTest.cs @@ -134,7 +134,7 @@ public void OnnxStatic() Assert.Equal(512, buffer.Length); numRows += 1; } - Assert.Equal(3, numRows); + Assert.Equal(4, numRows); } } diff --git a/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs b/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs index cf60bf28d3..b420001c26 100644 --- a/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs +++ b/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs @@ -210,7 +210,7 @@ public void OnnxStatic() Assert.Equal(1000, buffer.Length); numRows += 1; } - Assert.Equal(3, numRows); + Assert.Equal(4, numRows); } } diff --git a/test/Microsoft.ML.Tests/Scenarios/TensorflowTests.cs b/test/Microsoft.ML.Tests/Scenarios/TensorflowTests.cs index f93f06912b..5b7d9a59e3 100644 --- a/test/Microsoft.ML.Tests/Scenarios/TensorflowTests.cs +++ b/test/Microsoft.ML.Tests/Scenarios/TensorflowTests.cs @@ -56,8 +56,8 @@ public void TensorFlowTransforCifarEndToEndTest() { ImagePath = GetDataPath("images/banana.jpg") }); - Assert.Equal(1, prediction.PredictedScores[0], 2); - Assert.Equal(0, prediction.PredictedScores[1], 2); + Assert.Equal(0, prediction.PredictedScores[0], 2); + Assert.Equal(1, prediction.PredictedScores[1], 2); Assert.Equal(0, prediction.PredictedScores[2], 2); prediction = predictFunction.Predict(new CifarData() @@ -65,8 +65,8 @@ public void TensorFlowTransforCifarEndToEndTest() ImagePath = GetDataPath("images/hotdog.jpg") }); Assert.Equal(0, prediction.PredictedScores[0], 2); - Assert.Equal(1, prediction.PredictedScores[1], 2); - Assert.Equal(0, prediction.PredictedScores[2], 2); + Assert.Equal(0, prediction.PredictedScores[1], 2); + Assert.Equal(1, prediction.PredictedScores[2], 2); } } diff --git a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs index 10d0db46a9..047bde9f39 100644 --- a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs +++ b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs @@ -654,7 +654,7 @@ public void TensorFlowTransformCifar() Assert.Equal(10, buffer.Length); numRows += 1; } - Assert.Equal(3, numRows); + Assert.Equal(4, numRows); } } @@ -718,7 +718,7 @@ public void TensorFlowTransformCifarSavedModel() Assert.Equal(10, buffer.Length); numRows += 1; } - Assert.Equal(3, numRows); + Assert.Equal(4, numRows); } } diff --git a/test/Microsoft.ML.Tests/TensorFlowEstimatorTests.cs b/test/Microsoft.ML.Tests/TensorFlowEstimatorTests.cs index 3a80bb8d56..a8b32b96a8 100644 --- a/test/Microsoft.ML.Tests/TensorFlowEstimatorTests.cs +++ b/test/Microsoft.ML.Tests/TensorFlowEstimatorTests.cs @@ -175,7 +175,7 @@ public void TestTensorFlowStatic() Assert.Equal(10, buffer.Length); numRows += 1; } - Assert.Equal(3, numRows); + Assert.Equal(4, numRows); } } @@ -222,7 +222,7 @@ public void TestTensorFlowStaticWithSchema() Assert.Equal(10, buffer.Length); numRows += 1; } - Assert.Equal(3, numRows); + Assert.Equal(4, numRows); } } diff --git a/test/data/images/images.tsv b/test/data/images/images.tsv index 0315f7865b..248db1636e 100644 --- a/test/data/images/images.tsv +++ b/test/data/images/images.tsv @@ -1,3 +1,4 @@ +tomato.bmp tomato banana.jpg banana hotdog.jpg hotdog tomato.jpg tomato \ No newline at end of file diff --git a/test/data/images/tomato.bmp b/test/data/images/tomato.bmp new file mode 100644 index 0000000000000000000000000000000000000000..d4c67b501a997663344c01a8ec894ce6f7b019aa GIT binary patch literal 1281654 zcmeFa*_Itwk|masK0#~s(xYCqmcBsGT6)*-=mR>F1ThbRKwzHd83aiH1P5?V=0N7i ztg2RZNmZp%OX_M#Ewx(RQdjr(yCXI>&5rqXzXK46xV}6c`r!PrBv5!vFqD{O>>iPk!=~fAk;! zTpozLu;}*d<<;dCm{(kXXTID_;$n5szn$6Pa~`<7h<&|he!V=vt1p_{`p!2ODX$J* zd0^#%H_8KjT(F3JuZus2R}B`vs{fp2apuF{bSd+1mb|^K6G>tNa>kpPw13Gc214`_KPI6^+%+t~{{v zz#tFwXZ`$pD>hrUeF$%R-KqL}pI3>!wr*$q{}#)V<<-C0&OY&bniyP3~0h^$wgRvuV+;EnP?MF`bHRQ)qqy&B6{ey;j2s*ZR~Tn4Um zOLeWP|4y}6eyHq~9~RZWI!|R^R2(kqJY(}^QT?m)jI}SDg(^N=l)d8Nm3`%bl?S$u z2Nsp@)p?f9V`F)BQT3%dPgNhs+N=6L)?QtA?0JgmsBLE+`&_T9e>EtP{cJq{-l{&1wJ)mfSJxeT z{%t3uiW?gHT(7HtH7=EX{>a>&XQ+5!QT#LZJY)H@>R<8s%D(cz$^+ZS16BSm%08BV z#`drHv$8L$%2ai5QTEqW_p9p`a%S6^tNY(>_OWrYsJLDBd8+4H_I0cCS3FSJ$MVnE z{)^((?NdQkce3)p$^(na>*_pXd8X=L@!nW_#kXVaRlOK%Uv_=3gidwcs{c;4SAMAM zl^?49i?WZ6tDV|^Y@UwoKbB9b{uPgnwO714*1oEAD-WzZ@ajD8O4WjP-MMwz20K z+rNspH^p8(@7ViaRQ`@VPsJydz2dR4_C@jVs?x1Iu=2pG_rRj^w>r;QKB@XwJT}%o zmRGC(Rb3ccN2>mdvR8Uw+4g^3EQ`8sar%Ef_WWc0T%CXHdB*my;%&Rx$39opzk04J zAD3+(%QF>^jkQ<2I@bQ`E6eJND-WzZFg8!C{$qKg>OYpp7R9R-pI7#(E-cDk)$!M5 z|9>Wns~X;kTedmQuEStuKR@|9+n3VZRglt-6-?9D;cs@5%GK#c_4r zvG&TJm3=H0cbW9?)4w(9TPJ`a91 z)qMC>7NwD>$MUv1f9^@kqWbeZ!~O8OdMe%q29^2;F!1ng+YOHs?u{c7j8mJzEmNCg z`K0PUmftF#9BZ%mxw0>>vaHU$^1#XiRo;!YkL7`}{8RDRSo>6howLJK!Veu@R~@ge zTj`mx_GRm(vFF$QR}a2T>>)g+Hmh)*df=PYL8G&%+phdJmQSkwWBF|?e^&j+^7g6% zt~{{v!16pWHXp10RlZmDvHVl<*jRhTtChXt^QylJ!JI&PU>gG0Hr4HUpI3>i`*Hi6 zL$lPwt9aXPc6Y*%d-MLwz|}oe*Q)wgd0pAZ@=wKk6;F<}SNvSrmseRZL_pEBIE!5$p?Z>O!MHh&lvY zu)OxBKaSsw+qX0Jdv;vq^;r8@9;^7UvXABAu{^(fD&y)&S00$(17q{6>OVHGD}Jc# zV|i&T?^XSsV;xsD!uvR0RDoWUt{8isp8Fqrs%HOxRV>ZUFc#haRn@<;hv(9;2K&OB zy3kmlpklZP&$tNYB) z#gWbS#ad>FA05J&WF%Md`MT`7$$?;guku3Gc?RCb)NW(_IhJQC9;^7UvXABAu{^(f zD&y)&S00$(17q{6>OVHGD}Jc#75`NBvHVu`uXuGTA%_kX|5x@(54jJ5aNQu9=Lx_+QB@@_Kr0PnwS><`#c#5O_&@gB{Ht za+_h7bLl8NkmP$x#;9Fmb;$1QKis7Qhi_^(Tru}SAZ4RlMn&2->FoDE90&h!fDoKI zLm09uZbP@JO`U%>Ftg7q3d%f2i->L%aHe(KPuiLA$65v!qgA(+2X>eTX2_I$NPl|v z@AWhX9z_@ET=7q^SM)WreKsN;PTEro!N3k<>rT~w$J+<`bNC4M_r&FL2Lx3%2Fq-L z_gan$cRs4g;ou>}jKJnF^#o_L{|v_AH$R)JD?6tzpknMy`(rIb>d~st$^&nr2gc??)gRBEcU39bV-HDxN8F*cH#j}8+MIl`2POOt}p#p zU*6v8xyITD-eDM{r8aYDAqcavX{o|>W_$B0a~g+uZ|b<YQWySLa!_y}Hk__Tl$A za0KV?mO}_Zn2nXCirCrg)m3J*4|mS+ap8#nUt7!A0GPA?)cYLx$dx~vR6Er z?M<46(V2O;#L7jXvb>4ab9K)v5A=JWiuSQ~T%~!4Yo{UDv$)E1w`YE!g?)(0IbQGgN0=3V`zYPEOefj7vck|9N#Mptw*By_4}uQi+0_hA3r+bpgNm%gb?l&K2BJ-WZ>?B2 zleTmFN@A^a#cuP!k|fjc(}o1h89LSC$u2gS3Nn+prQ)TgJ=0pY1WHs>6gm(LVQKH< z3_lO{a}qbtrf$u>Jw_ckC#c*w_O!ayo&%fL(#XCVkR$E2Sf)NK?<#j|`cTFm=VO<~ zs0p%mqO#&P?_I2gw})%}ACmUyAUm*WsbZi>UotuY3ilAH6R%@u>_G1; ziM87V#x8%};X-H`J#+SXZVt!HZczcqLAVLT9!(&^nF=Z%%l0M>_80@Ns_7&=1UF01 zD==2~HssRPu|w=~Q~-Xg--Fv759t#wp=QspE__#xv{!GL;i2TK3iwz6YdC0EGlQl~ za%I)T&7+1G^9{&?!=bJI_sIf)rgl{zJhhtHO9Opg1w#+BH7QZXOsB4_90&FeRKJXB zvdc^P>hZUs2bK}%L(Un}4b6~`q3N0H7~Rxl?u!Zr0#c1S2j-~=D%c0o-SBRiQE3t7 z22e5fSo@Hg;(f-Re@OrEXrr1#{5IqsLZ9Y6RM&!i_(0FuGsNM-o7L^0rXeyke<;Mv^L#QDK#k!J|9KPyWZ z2sJf=0)hIxDSC9^l8Ppx3?0-ALz6?rc~5*;Q7G)rx{d>52TFJTcRaOlOzrRn71`CTt~?Mt zFg3gfcJQ1Esn=|VY*o`U+HnsdXqVUn07h>TXYz4NU@A00V%gYl31vl1@nHb!5P;}+ zPEgT43xw~@7wX@e`@4mY;})JRJm%D?b3c&^c3B0K^OI^h+$3CU9bR{&zgEe5f0a6&46JYt>wN z;7##>VmN^6{yldp9P-Ya#7IwokgaHjj8?vfWOsWOQBEMlVB&Ji?ZXoI8_*tU z)x)BHwJSKY+4SM7h934TPH1sGBPk{M^e=3Y5x~s@qvadX_ojp*+b7k+z z7wyV|$}Pd<$6+2~n55-|uv&y(peKLj4V{xwD4tSntS?aMq z76JE-|IDL-@6*sMmd8hzM9EQuruoLU{b!~)J7PKH+r7znB=Gu*zN zYwPOXRvs80$?Q2Kaa>IWoD-9lkiTp;6Woo!fgijXn54hi!$`iXA~e}WgB3te?h5=l zXH&F1C#Y^?>}jj>Y!iEPbTo)l0Yd^2<*~64+#jXG_f|JE_0f7dG@S;ck7L3dN zBg$#B5?Yy}oy?u*htvr58C!rt|5XKFd0;zxAcX7Grt+c(c&<72d~}>Mx6)TZT=nRy zA(bdl&D5qist5FNAa~(d0@3C|K&Iu{EY;17JzI62*JX##!=+)-SZHW^M%H(!HcgG0 zu^ooQ?T|jzb^R%ZWBMm=gGDgP8vR&R#g-0 z^}N~AFSO)92XecEYRK}4hmc@1#H4d6uoEw_SeihT9>~90AhNNi;`Hy@%@ut0?Az7@ z{XyHaH>XCngcM^_2^c`gQ*Y*^mL;TrZU(ww6HEfoq?f72OjAuu#d5OaC}QWv;n-<* z9vteOnq$v7{LcG(%0B`0A2svda1}P21KKpIBux#QW*jciIQ0fXhvwrbCYx@NvGTUZ z4Ewl+SOEqgu>l~x;j#qcBW1)4d@qcV$lo zOEoAOwPtLGsPLu-XXaHgFo1Klv%2$tt`1F0PsQoqwQVcx>c)1h2Nbagb>V@anH|CC z`8Bh+Lh^bpJI-03EzT0o9<$-9DrI}1wcJ!KtBA4R9@ur}rU-l>s4mohx5M{lN6|9; z3nU;|9Jx#I% zslzG7$;A7(UDyvK?y1F1Hj`0@!&32KzdiHnY!>J0A-K9q|J~-ja>YQ9&oG+>_m2a^ zMS~>|bY)>|OpP3LQ=M88P?Qb?)e{BJcr!bk&debZ0!eBC0K2?N3{laPB@kClF%$HX zG*SS5Y}nOo4(acG97_iV(0s07?2%Z(lg(!KKP`E@vPI+T@!%ok;Ti}w(=tgr^E?WH zib4R*T~n)1?yzl(?CQpLn+I}mBIdJ^mXHpkny#;M(q)TlstS8jhkRGlIl-~>Mn878 zAqBa`k9E6a5mygzx~sZXMz`z`Ks$KhIP z_MeS={XDRElNbR2;9)@Q!8RvWFa!9hWT#R0fTplOxJiEy^g%OtgB^lVP5ID)2Y|RO z04OjBRQIQfD_0$kU6}|=(ln$}VftStOR;mhSyv?Bodl!r(ycauKVT)Lg18C}lw+G9 zj6xVAHS?H;Fi@y?`~Z6>5+fkV&(7Y&f$CKbypE_6xv>YU|3I)Es_p6?cAEzV1}D1p zgnIujWfMWjwcHHpS!pWSRcHdp3^274IfsZ?0$`tDL^JS`nqij+O&w(E2cpC|K_Ff= z=RB=$zB-ROR6q`y4dfB&w@kfC|KZr{(c-uYyxHuE>To#iGWrcT)>FtR3jzG#7PCKERVIqNr2TqXah7|JY=x}OH-C)Ju_7pQC3aePCbu0js=}5|+>h z&D_nOH+UoXDmQ~~y(#|_gwat`!zU0%Txp^RAqXMIuyhgzU5mpVIPK3Pu?Chcb=xM@ zc6A%O&;ts>9?&DAn!8@TvTBwd7y}ik85m<=6NiVMp+bedXo{oAU8J(WW6hc+((3{F zJbsmYps00DpfiUXkUfCVt!m0wuM6biW5BpwArwFrfs3{CqB>`|2Q~eMo7g?94lp){ zCXA+RNG6qrqGx!;0I*_6Wx!f>FO zhm01DDM1JWMT^(a`>>?u{FcJ0D)zk1}?;(`9K zRA3JTA#s#Y6$l9!(la;xT?tsJ9e}65R0G|4>G8N^G@S70!20ta$iUUMM3j=cdr zPIaI<^aHtTEST#5fhP>-QOwY;W&k1Tme&l|?P-Pw3C$jFP2)MF{van|>&0bC&v zySNIfLu$me3b8AnsrY~1TBi049e7+YqM7e!lts@)5S|)M_X)6^M!T8<+0Rpj7Kh9S z5jc-E;~@u(A}RzSgJ1ki>JIcSi51EOnyN3XM`{MpU#*7keJ!PSbsP&{zNl4TmvBx zMoLW$B|#W^g+~Y;MOO$&S6N_h(mM#>d0zaFVgam@!Q!&gSG!Pz?D$8nRMu2M6*?$c zm^(_P3V7aSHI*%|U0;P-9B4;P@lp?PDyzDAAIE_eL(_Zm%%Ovt-g5}nfdo?z>3Oca zOWqp}#Pb0RKLdOE;h|*Yri&lPJ(}OUjm7x}nihWqVmz7~-6yc48HRwCJf;|H7{kz` zsTxh7QV!tdK8OL=3B=G6L2t0>`b0tn0OH4+j)N{@^Y8xd@0#`=V|9qjqek&0|%T3nien6UbsVj_oie3m4jrJl$8_(1V&aSR4J7lf);uO zOLg^ryXz}r>8k)Lxkb}iV@E9gk0!ph9q)9S&2NV&yvC}C5ZRgHUH_Nn?RJW=H(_laZg@3{dL2jw(%1Su9Q644;{zp?%stGubVQ|s( zs9>BhiX3~4DJGE=mAnjCz&V{CBthjPP zC7kP*vRJ@#d>-sI2v}t(*mDy-#YHGgIxqnB83>v+?ll@KeN{b<92NXfHPt_WYD`17 zsmK z2@-*RNh3wH5b(Eu`?t;Wz}{S%o!wa7yq+!qmtMd^`= zD6SM~3{hr;I6Ti{HC0wFBC0t3U}{|p9d>Z7Up?9@_CN@&{^kPp)%@Zrm#t7=l+*)( z*kA~78mTbP!@c648GuM^HlV^c+u_5H8mq4=zg6}j<1$a5fpOy9Ldc<+!*Z#Xd;?93 z-!m~BB@mqE{%?k>LPDYCK;<~pYfYPYS@)u^85jX62LkGfAU4#@D#bNS4{nO09^>Eq z&EMp`I6v&orRUONSAGOq^o#+x&0UQR2W4bS_(p? zNsw4UXyqC)o5bobYI*M~F2SqI?Nkrs(CW7YeTB~9#UaOk(-IOVdaHDqY!=`%1| zyjzaFW{57yn6F-0{GN%MjtZXl@p8-o%@HMKMIe18Wkpcws~`znQw3$!uR4Ng$y#Vm z%U}QXUpIABW$eFPxLY-cYb{q<#Bv=$*ivgrTm|5x#C zFDO+J$#%CW_d{{X&8nxDo_@@8l8%ON#bW_f*V38QKk%XKvjLqy0GusY(8Nv3s2ipq zLNhx&Smgfzd^8=0Zd03n>_F_{N36(v1t4F&vgF6f-U!}CQv(zL29%K#1eKbs#X(k~ z>9rd;RQ4bP&}_=8N_faliPEoXd~cI_-}1 zKovzj_8xtun1^-}Zh|1L(DeB$msP-eugTRamy%FKLOO@NYOPC zTr?1f#0CPLHWa~uhfnRM^9O(fg9T0a#O-tuUFYtSGkbsrGyn}{0HNE=&2U<8<{-?0 zx+F`!bdSaFnK*bOyWK~}F^8BzB0=RqRakYIKqXs6JQRALdvS3UbcN)HYZVy*+z#BM zeEN4~5k<@Y_~S+0`p=g4^1y7TB|L?i{-nh6k0e$TxXJ>~YF5Lc0pgH_aYR!?OqGSv zchYbY@O)>CG4K$CAzYazAbgbouM1u^a#^XA741D0ZPczEDFapywG0pBxI!pqqg!|z zw0PVSGv=8EIsRqwG$HmR0#MON1uc(}Up3~C0@yj2+e>4@DwuB>Qxi(`3PfWyJj zp~TXt`A`PT5mx{JAU4DdiWvnMqmB`%_#{At&o+gH`oc2Q+EG?l~^K!0%wd5UHzC|*hiVi5uWSwcXh0RSe^3H^{} zXC5f>nG;l6$hmF^_L}IpI=|m`?vwWhcX+sU1S}?KG_}wOcqGN z@>hTLSCmqfpvwJmh>i+-XK02S))QqX1WTCLk|*7<(pqx6SIlw$>E`7 z7YQ)H7>5i_1ufITB^E0J-@!xSVAE@jOfT5VA=K%PTtHBh(M(gpc%a$$+O{n(aPetN?iFWJ3kLo9&Sd6*ICCZ z!~j87v2d%PtHcAS?n@O&Uj-E*y+I%~f~K%QkT))zDon8?y;OCR{hv;j;YWpA>S^jJ z1pL|Ime7PnP6Lo91CXoT|HK*^0I94Xv3w#+<1W%rv?%=*Q3y8ISBOldjAxC$WvEQ3 za0qyWTNdv& z57r4Y$f1Z>^j6C2{bWYc0q0 zv)mqlhDKV81E&Eckc!sOdu%Xfl<5ErErgsxQGtpuW`dCURS^JGEOIGPxe37QR1?o! z0e6ffSUt$1JTP-Cg#$yNhLA>6`YI=aTa+(j#g*!pYKqb`0Oc=1E+Umh#YI%Wo^H>K z!M$#;u-B;}*!gM*_IfuDo_ek7=K0{-oBfb|CEr$q0z3L>FUK^DcxZ+In7J8F>rDlr zfXC-T)Kor+uGItlCLL!d$fW`r3k{rx5}=9-Mq2ev&@~n7waT~3x?~lAWI&jlaV-IX z;d+BOYn3c@q=E>bi+YyOxu?mKgbuiRu&9}DEL-y9z!IKXLJM|cpUwgfa!XA*+1=h$ zTpB43oOH4aEvhU^EOCxPMe)OkVt^^$JeLvD3izP1R5HAd5a%*POQ=?%8A{uz=A!EI z>O4EZ1EV6Rr*nu;G;?6*;D?2&p0lQYfKd(+&|ZM-!!TZI(!*?Oh&AXLsLz}r zpIMe1kcik|3^6>s8O}4jsoTrIT_MK`D24&@V1;p4c)(69apjeFs?y^cUAZRMd ziKg~|M3Ftn6$u*xX{!IeSkR%lM+f$UaJk$Za=0Er4syM5aA32Dr5tJ_-!B;e3^*Ys96JT!S%c#!;bu;|&FKrV6++!$1rrn5K- z8WaJIm*g>qQp#Cc3qUGMfuT?V5KAb`6o(8>#;vj@ORQW|LeW9f3sbXl!8oMq_4io` zj9u=593Hd9RSr=EFpM6?!XE7*7rYt7RsYAqW@TX}ddU!0&JuX}AXau~kUOx@cW?#wQ$`PCVh?g)&jrUy76;5Df3I(rFs~8B zo}OMc0%j5M5-;o;U{Tl$Q-y;*@n)`HAS>^HbE*#rjq?WnA}OLH%Zc;?a1M0LZnF5L zGuF*$7atkU1jsIh=iR7*5a3{81ikc!0Ywu5kd3tDIPqxd?^7M?TE%fQP_xlvn;0>l zjkJtCzxNL~KaM*s=Y@ubgO~?bQ6a!+@gP*qb#dkTiiKNsT=hLDD_08=6#`TszU|RV zE{BMrcHAiOGXsE>`cqnd_~8fG`6&z^g`o$zZCgMx&`R?MY3n zYSIrx4hsZQBTc~KrCq9J8MtK1duj1LLvVE-+I3eI2wv*{^+zYahHB2E&a(P32~Y;T+_Oe+Lp&4zjpHPI*C5 z#tHOP4X?T?&_{nLme7sOqVG#{KFTjG?03lf_a_qeXAk~q%B`Mx zEe?dwadA2eLnZ*Mc`bc9gv#={ELT}#A;b@ZiZRBJL*h$fAy65&3}eKC25CVxSOY@pxrd*;BQ@zM8F+&iP&r3-(4gG$OMduDzM^m@Q8`g6< za1Y2!JwnUpvgB|AO>IeHX{20VQDSj$U1Bv=79s?pg5W_2F|H71A_{~bB^Fhd;+FBu zI7d>bYN)KJ&;W#Dq-L&Mq09{|j#InsY_YKmpKj`ikL?fwFGpsuD253<+JTh?$_oK$ zDuJq4DAxc~CRiG@4K><}g<}83ilTVkO>|-Gu#?EP0<4e}n=o5H63xV+1a@ znx77X=;Cl-T*64X%F-;B3oU&vL?nbSh6o{qlR^uC@rJL8%H z0HD$!;58!PP%_jc@MK>qR-FnA!;AXfCU$KjOcm6lg>WkG<5(vN!2383-O%KH;{N@H z`8dJJY=&_mscHlXILa;qU@3cci(JzXWa>b-(-+mdQw+k z?jm715Fp4cpC<&S%97TS%JRXY;tE2G5(`_5yGty_27&|ugZL3|h&>Q>3_=E^B9;NH z;Ac{(nn+@K-BB?Lz>8O|bhA~=eALd?8@ui4^7zhd$sy+fnQ@3-G(E_B^py&S11}=X zrd;okbfFNV>7^lH0M(a)pgNTw9{Tt$;vi$~=A+TK?rP#NOnFSDuQ1|{#kvHDkYG_W2M;ak08sU*>_NJyS!o`(qq8@O z5&xG?=qnc7N?)m7 z`?M8nJ!e{65>O`uL8YdD*9#&F623q%$e}IWH09s#ww5XVwstq=7L4J> zbXVbV&FQP1|%Yr zvC3pr@T+7nUuM=5hKa4d{|T4J6y-;CcmA5%V*+o`O4Xo&NGdVsmC3Nu&w9gdWh#1(%X&F_FxRsbm0 zEaI-LG;>9X6~tBlJeA~!1POt}(w45-{FiEo4d7q@`q!a@c5UgV_%9A=F0tQM8=be{ z%YShdXewPAJA@rjj8)Q$PwI1A@>LdHL2kL$qQ2$0V5=W!Q3-P^iRH6c{Cyn;9gAx% z&S*(u1(k((3$(ykWdbs`J@T0yQdufG0eF!L^()sqRL@E;jV-J@RdVc}=NdcW?4N_G zvM?I8Hw350ZID%paOo?(o2z7arU0uns3Nc&3@-|~nl~*j00!b*Xn0^{uTIk*nmA(q%3gs0=mIaq3SmH_(AOQR(I8aHhGEo60d)SNfRQAwQ&3tLzVYbtv0|Q_=%iGXx=H}S@ zsru)q^ARETB2Imk)l{}%=%orAgg8+%=!(!(w*W#nHPKC2KvdrYJjj9BQ^rbG$?o|g zBMt}FNd)q3=vE2zuyP<1N~7RIqk;3WW`HpcU0m^ZHQ4eS5cO4_xnkMHmimgbAKW&@ zl|Bplv=x+q6oT}FW~eluBh^7Eg{{<-BnkjE41ud17fQbvyc9C(+n-Ncssmy7i~C&x z)_pU3M@@0N}MC6atk10Ivc84^bU1U6|>z_~9}FxV;C5n_;u3e{+{# z*KxD+494b7!Z`L+91%a8j}fsKaSdn?RG~?lRU`;_oH$k>Rlqwt?&+U*dFVE=lur{# zFZFGLG?he%&j*>ZB@?=F(SZPd&LQ@y56;u9R(}4m*stZdI*<2p ztQ!lUI!|Ti(D+F-SOQ+hRotCOS@b#XzI9;$tWXI+v9v%Ch@EFv^k$$47hwE{rYA)c z&#bS)v%8DvMo+6|a$&qQjv8*o4AO)(kuz7UmDpnDiq*46S@EY<)KnxKoeynE!0bS`(?3w zv9*N#JOgi_N7La}U2*Y=IIpEgmgs^jB)148y21n(os6wdZ2?ndaa=f~=qig63!{mV z)y!m2S(wpsm4#?Q%t&G(s1RTXHUu8&f?R?4WVkBp72Zrerj;@%NG!ebhvJo0R-MANEJ$W0xN*dp0&LxlT2^Wup!ZAorZj5>7j{smdDw zS<#KO_;!AMQ z2#Mt%1UYR*afSMeDy*+d6jz+HlDNWz31?AI*;Vl|*%GFzpsbp*N&-WgN`lHY3DuOV zBVlMBDW#x)wq((~|9&=EK2HnJE0smy_oH1wgYj|nfO=#(L2dz{_=MJy(1J6A)(W#* z91#qSR2D}K!Wi;c1B$`rf`P_}W8oJ=Y$>r28_?m5nMz`TMi7mROiC<8FSDL0iR@Fl zF~6j*P=-{V2tpwXWlrTUfEg+*57{MZWQRTG%)_k%L;Qqhh&nVwScagLnsUG(4+TPr zppwKKQI%8%k^h-o;(Sm&NgfO@diS?xu|19Ad?j+oWtUzVvyS+decUn}t3369>TErS zEOnP^YXGjG9BZUxq{X)?B2zEx}}S^~%tXi*kiD4$9(oPpwe9&v?Y5z>d{ zgXi>15bHMbt;$jKp}Z=$d!k{mG<+I&g&9Ls*~g4yeqoEE*)wycX$=%#Omt9IHNOc` zTxrgVRo;~qC4nYnC>5}!exRJ7x{%lap!~p=lR}hI*g~#Ay-?LqN@eBbB8a+Z_|}hg z{OQ*6A}u&Mdh#{^cr8EH@0RyW(S@y^3y-Nsc0UlqKX8-_EuYtdhdC>VZBSX9%YxRT z%EDGZ&7#oSoRqY=ELT}Bv8b{@%VK5(7=n#~N8Vs?Qeq)=5yOgShB_l(xu(*idgKMm zYgw*zp%kjA(m4w3FyXNq9w(14Vjvs)Eee|qa>#i=0(-0iD~1IS-oOfS76oj*A}}LV z2$U~@|B+ctF%Uj74tYm<2dgtBLFwRg%sKN-nLLl2#o|0q3?n-5Kv!Z+ZCbEJ?I6yI ztFq^wZmCq${B~8n0ID;GZd02ouK}q32B5)`ofhAcEwxpN7f|&NzChb*nhV#Z^5O}28?*rP_OcX|MM4XJ&rXK`6TneYSrS_Cs)Q?D zO?~Ami{rvChMdmQ7efpslrtrkX0bT*n#gjA#o!?`lYS9n3^~LhV-oS{`bt#5jAjBN z3siIh@R}9MS*~}ei7@8M*k4kcdv}8%dDEYdFc=S2I^qR+t%7)< zoS=@Pg7U2th$IR@Q%7ZS^l@4~>RnRzO+$51=&u`qotWy2LQ6_aY7jsVNPuVO>{e58 zNsdx(NpS_CrRgllT?`E*mJ}Cfv?#PNl#p1Q$ihHlf!4yHYxa^Vi{wn+c1C4@G2R%B zVhKeoVwhCG+(&q00Y(Zi7nmu`DZK1A^%cq-idrszD2p)Wa*3Mj#vvsb{d2cBU6Aas z`_BV3o5f?paSkvBp6e@*Ph!nhHK(dn2oUXFiJb6}5#oFTWDPRNt1$BmJdbok-htI! z5rNXN_&gP}m6=P1$2RLL9EFU=GC4uHVISYJt1Mn$vQc{pTt_+fZfn!j1|CBfk3=%=mIWC8ux0Vk_F0FG!n_rZecO}u46&D_avs-*XJ};agg-NZ2Ega?ZS`=FJ zCnp2xZ3-=p0=F0!PAXDpNnc4~VOSxtxbUZ0R9O^SL1K|S#gn2eL?8oF?7^T#43i2F z@FD_WWPvB0GRq~F*EQm11#C(zubV@v@Je58e=#;>9H;j2z;N6QtsH^US1zkuSs{`G z`Kw5Y^OXo_Ba|djXY+_o8;ay*e&%x>xSK3lpVe8^b`U>hxJpuCJ+Do0rrfLTd zVKZ!-yLg|Z4&Q=Cyg3AudGZ#=W}}NPVAVtb-{tlSSL%!7lDDePlvUcoE{3GhH5IYS zJ%A#_0pf?Gsz4$j$12PO5P`x|fx>k*(9}FIW_3i1n7;{hl+sETfP5|8b9hWSN^Vrf zxwoaSG<*O`J&0ZR6(k6vWbsoaUn9lGc*kqSm6{D<&^2)Ju6g>E3O^uIfO)mSfjrZYTD;oek%6`k7Vc1S-+xc2>!}t+bZgHIXF( z6b)kRxNs;npxz{AS?AWvJ+Sluf#L^EVF8E=;2oVkLs z3gQZ-S>mdivx2gsrlOj{R%(hOLGv>Z4_KtEf|`QxfE3{@kTew)3)G7wjbj?6fgqzk zXf5v-%X`JrI!9;YX|y+}Hr$48QUG`BqKk@)&zL8;!n_uIMc2`v*wVw2TkyQ5v#7Nw zw|p*(6ImEU4kd*a#4{HRHzXEgfD>6Ru@F2WG>{mgi;+fQPm)1oB1%OXh-5{zXhFGv z98m@#v!t&87A3J(`fB@Yry+MZWE`tLuBM2yrlO@%R*Xi*XD9(^iu}d-if{rFex`bf zt4#0Ub|e%Qe``j%WlGPg!iZLHG2zRTe3tplRD{%$?Yg{m1F0Pn{8hxfA zB8O3=kW+jj{*ft2AId6jrLXi^kkcBdyr{yitT1z>2`f%pY0gT@3e}%#N|RKar|J~d z7oDbZSw&^#6FZa~kRK9M(ovL8LCmNrwF8jl(&NeUZn1Fh3|n2ug&S@_cO?)+m&+|q z$_1?jkqPHu3G-SMTJSBkmV_4k&zVDRU1f2kINyj7^7w6ull?)5BPYE#UU`N_)(ZC@C2I5N^wg-!kIr}*&)>hL63biNB!KUA`#jhkk}>_Jf+(OuNg!r( z^`(JOofVMQlEeZ~ffk_3@*+-s#VsR2TQaA_fbw}@>dh|&tFgasuPwG}fMD6&|f ztoRj);tECCzXswX6l7JUtg@O4f(n{}pT;yvB{c<21>u1O(>_5>Q9e<|K-CCH;SgjU z^~jcoi(YVUN;cPKy1l08;)6hU@c}u(#i!N;7qu3)n%c^83t|E(*X)=dCL=VRloKKu;0u{Y|n{*wRbddif)XWdTNJ3on>l-2@6QgLbP63fZcwU&n#F+xt%DuDrjnDl1G3fieaaoz)z6ak3a15k1W5W0o4 zp{aYwx6NnFE2U=Qe1EX#(C`-LqtnpyU|vIBj*#^QPOmEsT}lf3gh z!kF=_@Mg4g%M>7`2PQ}HRcU6WMk+CGJk$BPmdevi<$dxcJm z_5x_<)(JNMrJW6_wXD9#8@eS2yjvb3JB=(!GCF8p0!AGu5d1?gVJoc_0Hqh1jhH@f z3M~$ewye1vI1->nGYG9pUx_Luu?Qpp2*jdUIW(0O;}9{afD9lj6Ii5MW&_O}3EbY~ zc~3N8?*rUCtuoFZgwL2=`%C}OpAuBOh&B-g~X4Y2-7zUMM2|Q1rT9=Y%|cP9(3qKmrL!rg|&#UY-K&qx5w2d5LH zwZiNc)hU%KxAHI)7kmz3t9dO95Eo5nVfe6M;t>*yb6K30)N~dG9Sg#M97~CXI1y_h zkPuGPSJ3bXHn0r>kMx4Dl*B^NIyo@N#Rkj^CNeWfX{B`I970oK!I-BbyqGqH)4=Nd z%9O+ELOmYHgEeqWLm1>Z@!(-}G17d?AXH2eFi;t^PVB^@_aLsE&yf^*O{TbVS%qxk zdoi}iGSM}XPEsnX4kW45I5HQ@Pw1Db;M|=ygJ7gpb`eg%FIMMB?@UypLl)r{53kYU)`+0@5e&k~@%& z4a&;1lEdV=iIJ7EQhG8H7!}wuazc_K2$=%XS4bSB8#0m8Rw%n%D6UYKv$#Sv-?QD6}xpK8pqNL{xys5Fw;=2y(s#l6k~1L1KYV5S$_qMzUA}(N2k#(^;vc{9+EO z>ai4Ji;Br@D4n6O<^nyNrAGs-b}i2Xd9Y^CS0O?eISL*{lwyoP8 z+ezt8<}96oy;+D^l#xdyZEV5V$TG<)z78V|$=D=qYFQ zVX877l&Y?;7+p+NL@6T{34%015~0*0^*Fg9afPCc(i~>4Ag(x7h2oAyvP$0pAqhAM zqi=sw5uBldj-sHVh=Q7uCZV8$Xpx|jl!Bz8_|euil1Zl<*IUcAt>x-uX?5vrIFEMm zoV4h(Gh)n4#aRpg|oFIwiX_EC7iW(}NYM-j1K(3xHv2uZ)&9XcRu{v{)2l8MI(N`V- zz#&3Ba)cFN08&-}uBjB9;&YO@ZxPi9Z^pU5ATt+OPLUc|syAX5an|&psidaJ)zmi9 zQKV~UY?3x!ovn_HY5i>AI^q~T7uNQnXPK7H$Z$KtIkbpP^LKQl3v zUbW5g%q1B$m`IOW%c&0oL=L3NVv&{B5+8~ZG2B`h08OnGI8$$?dO1aCxu$YVEupQ4 zWfSYYAOI)iQCJBQKzKv({Bj(+sp)5H-jD8IkI6KIbLb7FB)rG>9c(>4scGSSW~hcn zc}jhSVa=0QOjv9uRdMrg2E%(0LJRT6P?WfGWyRb?IYv=Nb;c(r6l>0npmw8_2U+DG z0ig#tVMUz)%>d;<6#+RxW#xhjf&>am`h*ywg=$JN27td7X2t^I#VC_A!Ioex61XH5=pD3!$N}4sYFuI=@Pfobtcou5E+&3J z7CZn`(Ct^W=oI6O3A@rB3mB+V^JgS-Li@P(4BQ(#*fjMao&-Zu9R*)|1U zkCEr}Yw?QQN}B~?#_~tHgMvj(!~*AQ;e3}SxWs`J!`KF`MY$EU7RH@Iiz*9SRdW`1 zr%i^C$kSDph*T2GLy=f|qX{HS0#KZf0!otrMB_69KVkEhbpLuxB_sVHeU-(P@|2|t z&R3pdXqcxISIk!GE9ET36$1eAfCa@-;);=jC_*?Px|q3$I%Y1@mRW-&LN+1ekbwLt zJBl^G38Hp$!pdi>@F`FeR?q`bR+Lp#R*(&p5S*>z#1v(f)Ku0{Qd5vR&@fa`5JOw~ z=;gl`C(DIm@xA9K%el!?bnnbK1ZT&!D#=Fm#Vxz>CZ)3UIAL~6a*K)!LW}cS+WOQM z<(AKDQETY~BEP_LMvE$oLW?t6oXg@w7U!}sn&4OrEBKRaiQxuuVd(i{2qs5VfO9&o znXnDs=_Ij4DP$7F9)b`HIRt@9#z5$TD8v$E21Gv6L8$@2+)*|uAE)Xo)Jm6Fxx(f$ zJ=U^339&kJj|cKFVQ{mNmOSnr6~L~kgtNF(;3*Wv>I_%XIH_AqP7ddLu`4TyE7nsn zHqr$7g2chphgcfoiu_E920tU|Tvi2gMyeuVNzRf|Qc!%8;vW|?>-9zq_8ztHKyFDL zI61`_MHW<3_x5j_x=v53%Q?^?JhQG#psQ1!L5Wd@xfpZmBm24N z!k}<=OEgGHj4k<))s|XI#Mo3>l2`<+?Bq;rS@HWN);uc9x!19xeIQVp2q=RBAd?5< z)TT}gkZ(zME^!5*tYnfZH-o-Xo&qROF$x+NW@}f{CJP3UIm@(V=3=<>I}sv;$qQwb z#TAtmVlU(`QVD5=EJJQWR$*=g3u-q?IX_Y9cUC@Ur3ov_11KxXD$ZF^Tv1kWh6)OV zb5r~n2Z=&8l?4@K55x~OlC~5}*q&`I+N%y{wie$NFitL&1))WS_EOJHq*s{s%Claa z=Hf$9Zn@UtG?(VJAhh5a&1=C|F0?eG1>fq^p{uN(kA~z`4B~K!1?|l^0o&^(4ut@5 z#nRx@;|M|oBP150loBiJtER+K?kJ&{dYV;GDO7QE5Q=H8wpCpoV(-xat6j_UKpvXm z`YOaj)>jHA0L7Tto#y+~>aC%jKNOOvblE{xYMEqpJ|TXn!8Tqbo#83_g^`g9d|RQT3?1 zIN!zDLI^F;pbIU%Z}OEBatqTJEVWIEh0=^g^r(p}1O^tDSO^#_6j~xry`M@XY{gyt zVkqTL;KIYPlt};{^Fy%cnm`#8fKsdip}(4b{(L{Wd4IHieFaZ)Y3eHsghm8_$_iWM zEW{P{6>6W7morzM!Uzu(O#}=U1P`K!6Q>kc3^QyIa!h23D?~4H3dx0xV*(;EQO!AX z!f7jg4RmEiSw&?<@c=D>1xUm;at_EvL4Yuyf0ND%jP;d8yPW zx+LTXpu4zGbYUyq6%-d|w=}QCsV&GYpW33zf=}oiY@xEU(BkYL^eTU;1wUiq=U2>V z@w*|$0^@=OqL<`}AtN_Vc(Ef!jHdVp<14DcxRYV{D#w>Nsw}>~fl?4l2&&8^h(jz0 zNHGTA#SqMhYLWw(0ksKPprk;WD0h@lOflu73Zly@;apFH#LC5bxMg_~Vs+*o599$F zsIPJ`C{6?vP6{x_o5D{4Ne)-6ioI3f7}W^x@FI*VNxDWcB}F4OP-Ml=D98GyKxIWv zMy`pIIkkd(bT)=IkOG0OKrVxmsehcA`N9hl2>i@dvHg8=ex4<>Z-a)Xm&A-7Bw zW1S$IEF>Qmku6)}Up)(@ogNAo;uU#y-2jZb2M0WyXCwP@i%a4jB+tZzph5T+Oh~Q8 zItb@u!9b7?A$KLZP(7RPp43{P$M6PWm$>3=DwP%D4N(Wp?}^MfM|d-Nm9R)s zehu_rSaGU~vI=U!XR9cyI7!7{E=f>Hs6fC#%w#nM2}E^tGA$b$8?*svIWbw(BuIxw z1BDhY3zTk1FxO{3W$Srt-lUmq9Yhi(}uuyJIq}IzxEr?YJ zEvhUEEq*%Wj21@(Bq^aq3IgGfZNM<37jO#}&^5UQ!;Nt#u^=806$lToJr;xt$YiR- zB7q=2v4AdIUr}OZeT4+^+)+*;%RD7j6ICT01`Zcvle7bGQ(v(@vRonsB(A8eB(6|=$;{wnEaYUQ7!u718Ph3P zkdJ!L<%A597t80Lb;^qJgTjLXNUyr!0SYJ30v5c8@t2(1lBlaKyWuD-JcDG|(`S=c zxevQvfl9wgeo6nh1mYc%h@RucJgw@$!*w>EpOO#TtPE9vGT>3VL!O!UfGQ?FeDT?* zpMCz~^DjR8;>*v!`ufYSzxnE$pMQ<@+h24qp7^tQ`2YJ~VI;o)&9A@z?RUTV-FLtL z{r7+P<8OcX;dg)f(;t5L;g6lr!XT0!InU3RP--m+ElDiChc+b^RaSEfi}HYK0pW+` zrLNjSj8blc_E1xS9f0%j5G=0V4*Cj@j0eP)o}dSqszdY@+&-yVUCt_VDXyCOitk6v zVoYFSK_Vjn5C#z5$Ym@{XDD;mSBNDnh%1RJlwhPYav9YbF^Tv@pi*2h;FZNlNn|VP zKIg0`t~4P7wE)?mNgFCF$P6qL9?&6>B~&S#Bid9}XHTCz1C?{~#HkH#H%^{7zOfFO zw0^QvPQcnul+@;CeQP<6Wo>=^_zA8j);G9roH&WK>y0yT-8vrD$u&=&K6&!g#s*dZ zSi1G(>9eQKa6NbWEY|a9&SAZ9wv&PvC$}!G1#t1w=1ScNt>r?C+7jOAJ|JH2+|voI z+c)prdie`^`H=HkcPBsAx{v>O1wOe>KE9G9;eV1LM@003!NQR7<&LZ(iN&dP&Sg<$ zVe}=j5E~?XgbLr+5I-OggcAq^G1i>LVxW>J^a>XA71Mz!k2F_0D`#2tB(YQ$Q6+(M z-3*1*YwoCG9Xc;hLaff*^g!^xH__jl#6Hl#Aq*5JiXTN3K)AF793<1UwBbu0i>)%$SBREVtoq4tBqt8l94=&4CPBM=3ZWW`q`(Sy!iN|7au+U z80+&7KYI4`!>3Q4G3&tqFM0UghmYQU_+YZ$fAj$By@&U)-hFTn>z(^|wbFv^{Rj8J z+mCF0_u<3GkAAei_xQnk?>@rz{m1WO#pT|6{65zA--C>Mg7wJ-~aI0d(WS~ z|IxE<{qX5~&z?Mf`oa58KltF;6KtOVJbC}g`|n}>;JwHAe>=k!xK60`pJhc zK7Ib#XU{+X{G%^E|K!UrKJC`eKl|qMFTVcbt6zNi?JvIm<#*qF_x(4&{ms|E|Lr$_ z_}#ZZ{Qj3e{NYzW{NXo${NeY1?1a`2-Dku<{l%aC*+2PntbdMri=z8aH~(*j$d-r! zsw}>WQfQ%$2dzb+&i7M9mw(U7Dr+LOHaA;3Yx7?QZ6I4VU0i{FLA)YifV@}COn}^R zi&6=|Q>?fSs_@mzX*`J^eXwo>-xpAEXc>L#rwiz_RP8mFz0!W37ifXrek zE95d3gh0sXps$qCP*yLKdh0*GKyl^D>Sa1lK40-uAf{X(2B@hz!Laq)D#(fJSGx7u z<*QdOUD?!B7tddSF1c_H>$&r1v7SGB=G>XnXHTDopxQWdV*SjC6Q|dYpI%=-b^O@L zwc{t(uw6TG{OHE9W9w^2P8`)1;MnowlXd;r@oqi3b{wX)V<&KAw;nx>b?wNh&GqQU z8oI9^J;C)j&f2Y~)^U-S>*=) zx-gMjcn&-e)=L-8U%Ggf>*b3VxL&z*5$n~#aL?xZb>X_s+e0w;$ZU^Y8&y2rW%#VJs-I@XATP#cLe~ z3omheCB*PSAxmXJXo0a^WnuKe>X^m^$%E=4u{bME_CWmbH4UK!^^PU*hF;AOu~zjQcroRLMfFMN-CS7?5ddy^l(d453Gy}^uX}3U{4LJfCq!b z6@bDq1zWt?;WLA;_wU}tI#Fbs+m3ba-+T1%?nJVIe!4fk&6mAL zkKcWSckE6PQFMhZ%nv$!_HxA=BHk0Ra>2=<=pgWL?_BTSd(f?u<=*|f_wL`OFq}LV zo(v-K?)^LWy7l&h2Y2zrRFU0wA{6iAgb-sm+vJ9!H@izfmAyAnZ127Q7>W*)a?d{e z0PBYzeenF_Cm((MjEd{UCm(8y)KL6^oufO@?=fC*k7r*@CyI+6t{dZsg7Fz4O zU;GA>TEF`qCA9nR+x-3yzxzGL^G|=oiWz_Wbr9GR^%n~XlWa+`?LvzM9V$ygi{vT( zbd`m`6Ol?t2p~|kbPPLLf(}75D=5Xk0Q)FG1eiN&0w|jRlwv)=`&Zb{uUi7}JNM_( zQv=8wJ^G57i!GcCZ*y7z{+GCNeZ`lfAg+|l7-gn&C$RtY@@H34R>LhVOBl zJHo6H7EDq>R$-C~a);`t6H)7HM~@%t*0l*(TRXCL^vJPe*vj5DC+jgRN7p9n`mv+y zM~@ueTn~5n@UbI@*A5+9JF0bZVq9Sj!j0?3vEx`z9b2EMtW#%>pE|vE^3>YNu6g3* z*^O>e>(q&pr@INZ$z&TI;q2+|iO!y;V!UwfES|JekryspnoMtX&p-Lq)}})!w60#g ze)S608xUL9!GGP)v^E9m?K?Lo>-F0ZT6b^Wy?3i)6mUAE ze@@6G^0ZRwd8(>bvVeL?6;{CwU~C2V{tFZmt5anUxT}ZYFtTWB^a-S|6fhn|3@rtl z2wdVy@k!nWV@p|aJ^@iJaz>m>T#1$?uJpo-tdX)J4{t{Ydc`+D=* zja%1m;w<>~u(jg7@7Cni@Alovj~Tep?-@AFPDPZrooc%GKr0jx)zQ6M_qpD`dGG#> z`}eNj!+Ph&T@YZW18-l$dhOQLYqzf4xQSc5GFh+Tp}OVTjqBHLUB7x8{qPllkBkFv z-st{j3ZDW%p?E>^stcJHWn?_<4t@6o;c?>>0=-otkvLmPIg3pMS@yHB3J zhxO^R_aVA4!S&I{&p-L}<4<3F!jFkxe)YxVOX8P5|Msh2{PLS`fBo}ceE01yzx&nq zzv*5}JI?&wAO5)Y^$1))Ob8P`Y&;-~ZMjr!bAQj`hT`jT88~ zhZ&}=d8QLb*0HW1KHjZ|Cg9Ne@k49J4<0{u_~_ANhmIUO5Z1#-_j5gTWS_4Gj_lue z_|V=%hq3NEd}P0^2aX&)^m0AYop`cfzU$c9VXTm5YsZg3fvrJ%X;uwVkW+1#b31bi z>q)4^)925eK9B9$GZ#*uxq!(zd{>0vf(M|wI&a{^=%q`T-nw}C^2N(nE?&BJ35K5(~fa;DegMW26fRdvXQhkW*NE9YEZQ z9KZ=!Le`XtU;+231h|sndNgKmC`CCv@@#GCJ zZZ2QAq!n+$Sn;6&E50~jzPF+&*4CG1q_T>g8^|cJazpU$_3_ zIhU{ETL4zvBv;<2wzqF!a;}qz6LE;iyoorxb?4^I`#0}Aymj}{?R$^zJa~xp?xTnI zCSM;h$@TEP2an!=2)Xs{llQwXn9o0W`sveWpFMg0#j}sT{P5E+pMUn%r(b;Y*_S{6 z;wy~Xx8IO9fBmcPzW?qw_?gxpe!Kaf6GYj?&?5j42B1wWn*7fyizR-l#BxSOaBj(p z?q7)wMl!aG(pMs6*IF_HD9Zw<`ncV3NKfS{K_#y6;PL}&4bv9B(rc0DZ%AS4DMJHPbJ^3G$|@u^>LaJ4k=l^iR8}aXSd`lQCJ1Gv)P}g)OznxdLdoVE6Z0CC-9H5) zrSz_Zw;`l6a+&E2O@*1N)A;gt?#!vluc|gqZE7mWs^gfYnymOPcYJLFU-U57gmsNN zYO?J_)Sq9x>fqtM2aoJIaQJPk`wzXf|M1Ty z>reL|{^{OBZ|ym-A6D$%bKv0C_TZj_2lhgD9XH zS`QvScxe65;q@c&H%=Si|qcPMyR@#S)J>v2hOT$y1Z{ zG*o3LEFmx9jWg%Zo}GMC#An4b=P#eVa24x?3)e1QyngA@jmwvBUV+e>yu5XaI#2cVNkGeF3YF0^u-WE39iPNQe{CrW1unIlvv@nSO^RR2|}ft{hs{! z42eRoV;~AhECik;7Jphma?mRnLK@*MLSPDz1eEwlnNj)*byGDol-FE;SNdvET4>?t zsYFy|4}%+q9!*6BKqaoga0)NI^N zu2*jG-gx5*=$k7srS;am8+YzH2g{_k|LFe|0~!De=UcF)JfK=2qhd=fA!*Sg zv8c5e@Z2H`kPD(;CIyWYIoi7Z$v6nA(DeSk%k7SJ8v?pp0%fbjWRJKC`ijX5uj2g( zOUU9Nu8_-EFj81TMq@Nl3i-z5d5wyx_a@X(PF`W|0`E(PA&xgObn@Q~I62M+Gve{lc4ZawhM-hFTH-Tzj% z9{6du9{k7Mdf=yf_U?Ua?>?+=@89?KK4`58*uVFkgZr`WIkdN155M!yk-dA5?!&sz z*8Ru!cZ%!S!Gp(+K!9OAbo4NAch}tq$u-Py9b3nDMSfcBe#UhiN^-JJChJZ@VYyPG7iq_Tr^;;EV1JviqP4uVR67!3p?{7Btq~?q?J?x~cEUB8i2GEG+zF z5F{2xkh54pWnm%moUedSyo7~89r z>nkT`zVIU2C9cTEc;g@y^9>gy%$X~YFy2#q=88Ye2;vIwJN$6q;tI)&1q_U-X=FD@ z7-{~JfiX2R5m!`JSMX|kI~lQxPpVU>W_DR^d~&0FPz_6 zFPyo^wfkWyMB3Ie`PIX;&BgPbR=afW3g~U?MCUJ@!8)1RKYi-L=6Z4haLo(nu3R{K z?b6w6066Z#`OBRw#4`E4E|10b?8R;cz!Ueaoj^Q)wG)Wl3jcKC>&gwhz48OWHOSMO z-4{{(7z01U=zjbPxpnv9o%`?Jeef7U>;C)iK79J<(bLEHWj=nN|H1PQ@$K-VPd@$R z(=R@K@#PnvfBDsyU;q58Z!i&wISbTQkmu$X!^y8KNS;Au`THsOldq_xP`;u<19Tc| zvIL3c@2%q1`Sq3Pm%x=z6FWIKu^;6f-0oPiihy~ltx`3-4Jl_SIGMIop>ULDt~`a| zVJx1=uCF|uo8n4&?K4*tSNhErvm4?HlUJOS)_W5QF)A|hTJKF1S7FXdp8}z*I9o+! zg$XPCrwpL1@T;nmCr{$XRQPH7(No7T&vW#|@gwV#AD189n9Tc}z#I_gtf;IGA3l8O z5N4weV1-n|dgws6QZgOdyYJAxy}mv8&R(qhF(I`F!U)>~omAShw_D$NXU{utPu8F9 zdFN+)_WtypeLv&c8TagYXYc0v_TD{Q-#M@c>)TilzKwOyp*>jNfzawMxfi01cXQzI z!2`I-!-o$ZJczr+g4^x{){$dayB~cW?;exu*3ani@w5Bpi7i(AB_uopq1CzQ^y!VW zXHK5O3ZZqj`+7L}l|0_;$d-5k#EQ2tyno%EsH~1t!K@RNg{dr8Sr{o!WaV##cpb*b zV&PmCXR$E!`XLs?HlhNvnwrAG>o!UQ3Iyajf(nHL3tz+_>_riQ9}u@B2gbJam8c-- zD`XH8N(ru{^Q_dogd~>N*-(Pj^fEmbujJ4&JTP_?G?W59?g|D11&K!v!->&_NJG4l ztFd*i4n)oOU(#~-#yeR+%T!kSlmIqHB4BZGg=E3PnJX7ppkIHt#niFB9Y|d1^;Qgw zT*gZiNf^KP#!syv1n}ug(^gbglNlcTzO(zwq3&m&=kPnwvzRHz9}V&Ep!sLf+~Uur zwVwF#f?vRboj_0cQ)_IwN{n^#49gk(LG|Ppr$e^gDG+xNfqZJBfMy^>Zgq zoZry8e(~hSg_9@GV}if?IR*Z1NZan$68KE_0FHaiutiC7jIp;bOS4X`pUmH(vP@0YVD*JbQXTIg8iJcA2}a%tw|=+ERwb;`R?jYRC4EINu?8WFvA~ncVu`De&X_J>Mr-EE zr=u}^SU7nlaTO+|Q9$+TyYgA$UZb!-r#hPzSsN;w_eqBc#^7}sLhr73>3&`ef zfwDsK;XfgT1Vz@a<5yJph1B|y!za2QFX6Z7kRhFvI&>WC(SyehA3BEFDog_58($|| z4({E5a1S)oo`dgnI_JQixAwn{_3eFcVSVeJx8K?eu>{rBZQtJW)3^7$_4eCuzw@)b z@BC~Z);&Lc=jHmdx8LIW*5-nqU1yx*?SH)Qt;xFQr#MC{&VqGv@}KdFd-lJ*XWu&z zYy0-@-?tYk?*OzOR?e6mJ#Yx?+JVEz4<1=R+^r{$bU&s)x_0pJ@%@L_4jwsv2fzHzD-By5MLAZb!r3u6`Qbv$MTA|$o0wJ1|8ctee}dew{D!kieFP4J-L1qe;AIx z5XX9YZSBl)toY0U1$gEJR{R-J_m@Q7-w;h!Os`EQ-|$%7uUqlER;(wno;!cy{J9h7 zyY2e9v+L*2aJzBw+>h2vXErXKIn_;AnvkM7@j@8P`<9^ZfR-h-zfJo@nIPx0T)_ym8S{Q|RDpS<|; z)6c*D?C0NhFRB=5NGyH`;14Yj5?IKhF0oM4u>^_5w^Zl>=_@D%GAfk=B^GssWQHV` zb1blEmb!&xE6gSmM9Go?c-W0j%R{gqTVdRW7Tz1T%2oh97l3jWk54}+dHHe#7tv!A zdDYfeNN3Mz3`EdZ7%9vuFt2lJLtH^c^JiB~Z45nTD){OYX0DLeoVnsRL5eF>ZHg-t zbd+^05?B5sE4*}I=_X|G(zDSeW0$sT_#?gUkM*$PJKB*WM-ETE{T;!NrVboEy8p;w z%mf`c(9KXCc;`Q`cOGt0W?Q=dAMVWbIh~9MNRCBORpbm+Mb4<0vu%z@&N(ANQ4E;H z1Y%YMBVi>ghgn@7y!z>F-(3=dO}aRPD9)Z@+8pz2oB} z<7A1vi1>(b8CHeGMuf$L!$PAXU?Gv=urQg0Mug2`Re-A}=SK2_Jb#8|ETj!p@#L4Lg^A2J-fW)925h!CFIRK@d|%AfqgBFD@hu ziI9Z}7d|pdS@;m~X#%kbU(sA4A~`OqVv$54ItE275**l5FpL@$v1ly;Miv?w&{5fZ z#SRE;zJgy6HzAI*(9PMN&UW5E`09T;o>6e34LQDI0~Nl8{gAZTd?lw@$%$7qjH5f@ zBB5I@Vv~MI0yPQIBr+o`(+Mxgiq3Y?!!eaWho^BE6AfH&2O5OoCeSMYS8_Q)Xc}a0 zOCm7I9l-A>U%?+S_aSga#4C~(#4ZEaD1mIbfC5bHnRo62v*C?wO*Sf|&Y*G$OJ*Q_ zWBCjdI2cWdK^?MkC^IWfw%1=816bIc25Yg?l4RisYj&`i4HK7e$Md8uTg!uIu(*i% zVubO(x-*ZNi$Sjp2E)ijVm@s%945OkeZ(T{&yYjp_monG3~^ObRZfN9U)jDWa0;ApFD+%3%p|7(FS}$#X^}YQMr&2 zz#LVkg5jAwvZ4y_4Dtd@v1@(~Hr6S~&pCTK_k6+0!gKjhxuAGmL|~QGaa}B6Dqpd< z2+N94mVe{&)$5n8V1;AR3S zo?+Wk)Md%i0dkZ@asf1o63POiV@xMeR4x}lsGd+34X3p61;nBpg1$}pgdJ%q$&d!| z5qhV-LmTWk{CnHvhkWvf(9Vddb8%~u4m2!c{CCbJ}RhcDBMOqbXm&4w%Z zHH9noyb0b-#47?riX{6MMR`TMJ7i_CC5ak#K$Hl#HGZ=C&$1N z5@WGG5jFR+^@*gQ3^_y)rffe68D*uVVGUwV1}d~NV5s#+8xuD~3amWD+zj(IrZ6iP zn`srC&OMi(d!Zn&5Xv@Fn*e>%+k1HS%+?>s&l_LHCc?8x)%PZ{k`@)ZsKwEvLq#17h|*RjVS z8lov*5#0dYnDW)ac`lKyXdK44i3=J9iNK%ZD@0&Ay#-v+!dLhs_zFQ7Qz7sbo#c`Y zLcv#92nQ!WUswpZ!U{W7X;Qc%OS1_}FzG;MaW<4D2;~`AkIhI@nW;b~LRKHAdrYk; zmy5#!j4Tv^BqpCoPLdfG z1z`umlq@W0nqSS7oSF`rOG(456DzaRm=avf#?xU*%yv52K(dt7yp*()Nh!H8iCOW< zxryodNg1aTv(6-Bo{mk53-IfeQzOj<7#6hhR(G#02uHmtaOz7U4>k5>zsRggEYULkh- z`n9_^i|&z)Oi@a6|85C3d<9}*7g&_h%~!sE39-mrm6Thc7a@>R35y~Y!2l9VNgyS$ z^pkvrxQafEiyd7Fhx}f?qK-ow)TjQw2KhOZpy-c9gFeY_uw4~=MPEM}Y@a26Dfz4M z=i`F&694@qUr}EsZ>*daO=e7NzQPzs`AX*5zm~N_kfd-0@ruF~(WT{*{$$>?5Uz+K z&4w$vgdn@lit!|wi^1>UfQ(f2@j~hkM&K$bHa;;XE-5+=a1|R79s`REjfx6^g+~R3 zMFoUK9!J&>GzDEMB#gNzPz42t2L(mUTVPaBU{v7o$bg{mBgaDz1%@2Low4u(0TBm| zMIJa7eJCJq|FQV}M-%oPiQji9Y45>A*q(z)d-f;q-kRzpINv06!UOf?xyW|n|J1I*_pFtSN67DS=)AJ{ID-` z_kr|%htdxmO*3={MHa~z0+k73A!6UXJ1=A*k|Ul|2@7$a@D(CId*+4C!m{*KinM<_Uwsm;fLK(vLW6b{+IdI} zXGd%ru1Ty$Z$yYDJyRB*nPuG=!pLlw#AFhR316|rD~yaZ2qOYx(4=rhBQQLOOm87& zK)Iq37&96aF!}4TV z8V%dsWFhkhU5ME;FzwfU0+~V#EXGMnN?@unlF(*~Mp3hojQJm%`DbOuU+qa0Ql}axFdMCM+$zC@uYZX4cK@+}oMi*E92Oq-9-BO1qGn zc_}UHQhMga%FXiN1$U9kh>h#4k=PqFpH@1I3E!O4B=U~^bGIrzI`CCO7 z@7}(`*xegnG_Il#FXSt9a@yhL!mM1prG&%w9nz+5_4hT%5BcQJr%%!beO2Vu z;!a=xLbxK%OZ=BQFB`7#Mo~wme1&(FTy(}ni*Cxsp@ou6^!?iJW!mvnP zn`Hdu=1mYTAYXg|u8?w~aD}0qWCNjDeOa(#oDVLXkPuHUR4c_IkCqtEEYFLLM|D+H zG-D_~4-X@Tj9<7+stS}TQ6Zthu&@y33JnSgW-Kf?Fg)mZSWrMjz%iK}jSM&%dGu)H zk)zRj4#w>|n6Ui-W83y6Zrhu*b$80<-Km>)rf=Gjv2lCm`fVBOwq&i{oaw(Y(|1Fr z&&F)ejX9nh@;uh%d8|F@;eXnF^%>VyXPte{Ir$Vid0!HHUlDs<6?tBV2|ca}JuVA9 zFNwS^ihV9R`(AYRyWq0=g3Fq-9_tD`H=g#{blP{z>DAj#ui0_he^>s-AM$tZDcHT| z?4EsR_Z=)abfn-&K*8~#Gsi?9|S7j@SzaU!UqHii>+c2$|718qBIhP zNH;7*EVhJ&fBBno&VDKOwtNug0N7$BB7Yfb4eT~nL!Fya1#=NF~gNB zyJBYs!B-fCNm)Ay!bH3xnL+p|Ne)7P#(td0{gccf5wB!$g|y(AlliA9TtTry)e`m+ zWOfjg9be zCo&};#0YfB-2qo9w9QD3O;3(XO^QuPj7>?v3ZaCgxa5?CwB*=~gqVzk=&Z!(?D&}M zc!bB8+<=(uLoqprWAcv1o;eb8;b_d|BhfdGM%~>Lc?LYeLNYKmRi0Xilr{OUb;j!gm zQO}~{%i@xr$0k3COL>@(TAGwul9F{NCA%miuP7_OD7)ZB&e^N^=PyIh!X8~%#eMP8 z`O8-?+$bu%dFv9a==SAX$QRxzx_$TVt>Th9r4NfsAKib*?C1Pt#6k~2jtSuuKu%*R zM)rlV6b!&dEVhtE5R3F%GC?4n86BIV2pW`62(eIiAPpL9X+w^${ywIn-^KoE^Kcl!G2A2mOI$o^sbFCi=3@y_5Pa3#lAAKB`!3MtuqMMP=2c@yz&xHdjD zs!OxyO$4qmda^VARFS4~GzMab9vGSdSI|Q+UPBwf*v&3tCl%}@6-YEIk_seb1rG&W z#mTN%T(ZjW7!<5W#l=KM%L>&a8QCfVrA|<)AXG($hDHa4#2|lnG${H=VARoo@Z$jy z#{(jd1w;k}M8l39jXioa_Ryi20|#Rd9E{t)KYri7xIKI0VLNvxZQ6Na?Y6wtTXR=! z$@ANk=d(V~b6uXNf47Oo{0E~RG956qk&GUoKy)bSAv4GX7-mM#x1UGH1D-M4lxwQ|2} z>jS%O?|ai>Rgq}*4e^>H@tW%{Yp(gOy}oMwRsZ$Z)^578e(U8;+b?eU;legR*TJ*< zj-J_n{M4bKyknuc$0KusVsbG%3y(Pg;)3)YmvSOLEff2GC1<5GdunB;l7fZwoTT*Z zYRCj*}HOptd2FNh~DB#wWl4U~!1#FtUm}FT4F-VLc`i6 zU{z{LR7z45mM>vRIDT?;Y-&_YdQ?<)ctlP}=&9h)Gr^(f149Y}gD)NqDclozVOvDu zmavPPg05@`C|ZBGczrD7hpw^KNu*aa>+;V)mWXleaTZ-^w{tbn@J_ zvxQgBU%rG~?UfssuNPe|x_Rx^?HkP8^=|R);s?dW4<9fw^&uiE^htcmh^Pps>=`S` z2mrC@jFlo5Iu*JWhIN~W-_N>pf=@<852nKQA4=PQ zI0g5;2h;W*NdIAf+OEAB+xMhz*_F0&M;fC?ZOw!pwPtfB3^e8Hcfry7nn(SyJ6#c)5i6-jmsSy=VB|yO01nrEuBk^o$m=;?isl}V9fQAk;@ZfmuCW(=Z2!^ zJaIXUE2`j%D!IZcn1Ov2PgD;R27v>$>`uoBwss^;bMMT=dy|e)YC<{=3eu-&3%0-`OpP&hI!@ za478j(a3^;sDk6Mr-S29g(c-iq+~^;q{U}tB%VlRwzkMlO3O|JWo6|iX9KaAO&yrC zc;qMxLKZGi7O8tDlm!Kn2w9j2BV3b2A!4{3Vi6UKWC_`_Hu?`Jok&sA=+d~zgo}+>WKhGM#sT6)#HWatvHgy?E%9#3S7@*ii#-G{RIbP~ z3kUZvKl2a5B>zk=W8oYATl6dThl@7-+-YYZeS;m|Y5zfYBK-oREdF(6~4!^IB8h{Nn43@w7d6~;}u+6)qeiFn11!*DmMUxBY^D8^U~KPAhH zpg&`q0Bp?!>Ey5A3d&X9X}}c@iD2L=gIRi)mX@8Cnu}tm#H8$m#1ruevYG^xCCDr$ zHvN+p9hD+mwu5^LN=dOF4cU^0!7UkRVcB#{TwF|iEMswT%xa{#q%T`sY~n|YkBy6g zVHI{nbV7J!a#&8b;dy!StL z`{9Z6_9w!PWp-=It$nJk+#78?daT_B%^XLpU1#jwe=;x~Te`A!ky6_dwc$Un=wGTn zzDRXssfJ{w!SoW1krlcV3OeIUHAj|djVf^_lntcIb%z&g3@+2`S5R-$(C^gJ@6h5l zb4^-}t=lX`EkciGp?978`a0k3HLHIp_uv0)OF-G~u!{XrFG3P4qSML~PP|CXeU^FZ z!O3&?&J^A`d#MO}lR{U-F^4#TT6PFy@!i{{#dq$P-eXEH*bx;YFPX7oEGFTUAQp+J z=sAd}2&d>yn6V<9QXz{*RPX@0qAZ{?5fvBVD|TG{FY*=JebD|KO)Y$0?i+vCL+Yn& zzLIlTw%fva<=_fl{5ifNZxo1>yl=##312}6z=gLL9!;E@K`YrsMQMB@m^b00!3Ts3 z9~N1c0B6vu~->d zrS05zV*8$)tv}>#-hOJsw*2*5Pp#Q>a`nbjKI=|;(k$NU0_Rod#lGi7J{Mp@@AGz^ zmo1&{nu+h5i65FfK7yG!J~4BAYU23R%;|-xW0|S=g|WE8NL0?4utFfJ;4%2bn4!3m z>sSRdbgVUWtkZX>y|HVZiCevyd#$Nyj6RnFMDFR~WDmmPt$|aD|?XP>f!U zzKu>!lq>cmmj+=uzGBaC5rpAQ1g_v*h{JOF6_jMTvUU=X!B-Hkz*i8jP=bxUG4oHK zgK`D9%FbrCi^5S6fGdWq(l8Utfh8nl$H$+Di_3z=#%5BBj!vf*6_o~yj7)_^gr-D> zrDA)^1|<00wC zg0c=B&)IkE^zOrjI}TpmwD;D!-S^h)eB!&K%ya8Yx6Rd#>#FQm*Bd!D3q;+J!KPYo10ZuC1X$9ReaFmwm z+zS0sRsJM^YN^h^a=jrX?g(s|UjI^^{^fcDYQ{4vf+~->c?Xs$U^WN6oN9zv<*9Anpj7)fuob^2W)T5IHB?T95 z7GAz_qR$ATU1CQDp9fIk^*eRA}SUpt>{G*vCzBZ5Q`mA z2{ur~Lf6GbvxMl?=-wo#l0HxGp9`??7wS8-LEY+aHt4tf9S!QLXb^9u6C(Pu(V%0* zC*f)#Utz?-MID*VS9m97Y&GxEPzG>Or)K;0Cv|CnF|LJh1&_u>W=sUGFtB2Tg_Q9* zxPlmhtP_PRf>j`^g*ib|ZACOIfE9yPvI}YzR{RB`W<4|_AS5&(Bsd@>=vZ(d>}XIh zYSH%}jY7@&&b=vHc4eW4eC?)_e(TOU`COzjl#R=EE9V;)&Nt1R?wC6jn>m%3INmoB zKQt0Q;)@=`c*19VVL9KSLLjW<2`l-cm(Z7Be8*azsMbJK!x*T^p%(b0XI}%uLj#9; zY78mC92yOTjB!Oxj0sy{1`f@5h9_>CH^)Yv%nZd1j5*f9cuw^&Bc}#q7sia->ILp~ z0*_k8Jgbd7s*OCWOgyX1JS)vT%FR7rSbCLPdzIOEKDYIHZs%Pl^nE5=b>Gpy#BD=~ z@0Mc!AMR}4e{1KF8wUa}9}Yer7*-G-eI_<8KOyc^YC>K{Vs1uqHkRJtBftj%%7T_C zM_EW0;)A1z1x|*LMGycwq}^$c~Ex3AjWHq+Su9h@|7ExF5M`+aO3Rx zYiA0s=I39@J9P# zo^5=Wc70KsmQ9y|t<=Ewt)cyohIa2cHt)2}=hOsKO5Dk>lzNnSGm8A#?{&t$*OPp$ zHT-AIAtj?(CBuouTKy||{W4+pL`1 z#6FE4YnyyG*KOKUw?C*MB(^RxxgtHcBK!2SGv^n4x;n9&X$*5!; zMk4Cp>mgmp70rndcn1O&ao}mx2@E(tJb%x<=EHh+cQEFB_%vVOy2=wR2|eO zw&^%D=nETR1e~~FCx>R-IrdEk_D#&4JR>73HebmUtvaYxTp3o8y8$wW&dq%1X6EkH zM2+X%VCdRl;M%}-Yvj7u^WB>So{h#Hi~+rj+@W#RnSC@%_j)UjdMnSDHeMC>J}Fhs%vUoo0Cv-YLFi8+&S{=^)jVv&HbkgqTvvGps$R}`^`NBqH8f0IW3nZ7`d zqdxbL%~x!=qM8-ODjM+RqB@D#1g&mTW~^7PrGXHOqKdwl=tgZq!~l|I2nN_R?1ZWiCgKH){h*RI{Z zc;!anrJIEpiq91k=byZtmv<*OulPh>Y5Iu=Ng0n5(#s#<+b?h5f7@%rW#OvZ zHog$>pV)Z6u=c7kcds{bY2l08%-s5oT!supgF3cDdLpU5a9Z1LM%VtWf#Z*QqB%|5 zNpSj}mb^8_c z2Ney5)s3f=4JE1qDNM^`W+hjm%#o<^CRgZ=DCi8Y&>jD?;;XL|dK7ggzEggUyMmVF zd!>F&!?~qugBpg@nugL9ny=NlqZ)=|nu1|n^L|a!4r9j=Gq(|A=T|1K?RI``n~sj` z2<+Jx(Hx!CmVKf1WMTQa>rb!Td3?F(EF#wU(I${w%d{y&;Aqe zhfw`W&V})}Q!i#aGTtKQ%^LqT*H6VO>e1{;6ZLCm-qgWtv_jzuV;@njC|n`u3l#$7 zyikiq2f)}$swhcn5NUoKIDX+gb6CLXGv`r1iG!c8Hwbem6%zcJSvWs12j>{4reJj~ z7Nvl!uzPt@Od_fk6Cz{cBj)$*j0$BoRtOD_48i^hvg-7M$0PR!MD7lV-gPv7$D!2C z`_tF%%<|ci?c$#+^vk#PI%nxtXy$y`)TzkS>5hqGDNpzWD1}hKwJqb>mm4~~qLzWU*-*?pL>qSlQG>pyfh%r=89Fw>cuvhSYvQ{!3S656ZjEU0 zT^o#C8;#u?sF}Jqn0Yjqdp25lHCcE!T6x#o_|(B{ed_FdYwi4M99F#)t$E?N=860I z2fkZM*6zFuEq&+VoBM-qACI^e8g)80Aull{KPmkrvvFU>32YshfnD>#!OZsWc|fd! z(>NIQEOvAuC)6|Rx(l%(@zqP0uS3wj&K&n}x9HYwQtW=OwB$ZEBYt@A$&=EjPaeE@ z{tO9>$`=(?6)&r0N0-*tG}L11b>0YOn8XlkrtkbzKwqs;0WWx{8_WR=lh%d-mDdP_)D z{hlKg8+JYS@P91!d1~eSOkn?9&*GVe(KBW4Qw9C!D%?gTPO}Q9RhiSSpxdUP->Jmy zQ84IU%IRF9-?4<-xx)CBf@#lkldffEor;!S1|DMu?!!EfAydCmEB{fOH3Qau-B!NM zP?lUaG$3u~zWIgk_7|&nmig{@v}$Lm$JT2e+pc(RyW-@3)^%Osn(ene{I4PD;Ou?X z#rKx8&mAY9d)BUZO&xEWJC_*P-7#{l<~cR-99s;6vt_n<2<7$p4N7ddhGh3FPhSIkSZC!Hg^5d zz+qNdFraTcqh&R%EEv-;p8{^F3dUFR#xzVORrnJSvXmi4@uyS`r7GMB*b3dTl?Kw4 zdgChkQXQkWy2kI8tB)w_OsI3_l(eKOdXtM+4&c93Ig?5{V=5fUa?OE7>b*;~`xa}z zf(WJn5scHXBN%dg$8D7y3H`t66e z?moKr0O#l6h(cxv#zYkt(gB#OVx(?tYN%&~kUA!wwzhY)GrKo~uX?**^}p(S-P=Fh zH#F2gG6=*P7#kdv3=B&K$0kO`ClFX=$S^Ntq0^%4E)1%OsBAbP?cc~(h`Kc7exdPY znj+uG{+fkYwGggI@W<;UFBo?+G>|)+ugLrecQTq#z9L?XJDDYs_ewSv%`a4un=!GI zgBZw2Q6)YLIle;K6;ZArKjK4#LxZoNLV&M`az)FopjUvehDv#aZQh;Xza!IUb1t?-aP&Q6=W*V~{W_NHnK+dQL=Skv zM}`hhc@AY<`*MzbB|$0N4eV>UNbP~C>}!zYV~mJYq)LiaE47fXAW41FbnTjm5r~p| z2)-f))-n*a!SIkHls9mfo_!M$#Be7J1^|Nru^?(0xwIGou>e{wjc6FVHG#4i3CpdX zfR>48gQ-^|42Z=TSj(%)(z``w&DK5*wth8st1HE8pF6LA=(hQu&-Oe1yYFs3aAW`R ziveNhLL$#&Lzsy8)6q$%64FkkWSvUS&dbcr!8wN5Y!K@akSJsj>nw;FpDWOrIl&YM zJ|N?8?fR{2MRzD)6_=Kj-Y*7UJ$}US)$^xM5DqZS#&o-?yt<~c7DGf`ZR20@6^j1A zSFbv{dOHbUp_l%5@)ZRw!de)A*~9T4eD#k*R^*j@4yf4Av*C)(S8TXKn|$;%>@N)9 z2+=f3%LQu0=TFXj7s3^p^Zre~>h3{Wr?scErK_#Av!$)0v7^1Ay}7=%sjiu+f32&k zsl~a!l@(Q(uT(yN{<7?O1!6Cf2+yBZJbzsM_$n+<>Gr6?>@SC zr{esLrc;Goxn~BmPE7|!Np>9@S-rIjl9Y*ExxR6k8mCNIr$R}$PEn_Msb<>}t&Szy zJxqO(KFW;-6mK-S7*!TXR81yS%qEmgM^~DUuCyA} zbDq_8n$~lf;(5#(d(WDB&zO2kE&L?bt4Bo}$HW_l95?rS>>Tji+3UKk(|Jpq)#_T9 zgp9=9IB0+now(@V~ylOn}N8C=iDoB>J>P?<_iaT4uf2~ zQ3LyNuD#U2VN%yl!gre$xX&55PH{cvc%DCUT;Jfu$lyw<=Qw5HGHdAehU+$iwzhCW z-FjHZVVvtcqir{?V?Uv0F`{lhY`^-0j?Ju!iA2q8Le+F!-E5L!EmJ60;^O4~`Cuj1P}aj7Y}E7V;H3u-v4T014%j-^*8j z6S3HS_sKTfLn&FoF=_h?a3$xq3*m~+j|g8;xRP^ZI#D8XC6XM3a)pbOQ?kV?3}m=S z(jS>YIq?d7g^vt847Ua+({w*_J>V-8+QF$A#qvJ0MKHuGq=&#(NDiVb2dnszLOOfl z+?jI)IFTj)bRJH@V@|HZAyZ^WmE?@f#EkSfY-Ey=9GwbFj7*M?h>wkkV$y-3A)#SG z*itRq|}Bc{VkMwsnR!4FCmOm7%Z>Mwp7Jm=e~JLMf<5a)u-zw)Jph@@7yAAX+`#s8Y5@P z6fKBaFeCRSBPd!P3}2ag0kj&-yqjR=-YqZ-Us$uvnkM@-wf1XXiu|8Bu6yLV;lAgV z(pB3_HtfE&ZGX`Zhi~o+xPB=3>ap-k!EuFANf%<%&ctV&N=9#d__A0?H*{*xJ}`Tj49-f-t{@SLZ3v>BokhAR65}WTrI5D zkkhYTbz_Fn*4F_MtF^18wF4t%Qw!V#zEV|R_p+|0vhHPhO-0$uiWimT&@m8=G4*{> zR{r?Gi~ILpJSeR#y^j`;?pI^sP^rRuzTBXE zxo*W$?dqjk%`0?XDI&?GJ-%F1x>RF&k^1yvjhXK?r@z;pUZgX#SZ8X5wsfV=)JL0E z(3CFMm{_4bwOngLnKPws{8q>EM_t>W^z7doh=1m~{DtT7?*<;f7`p$$^Zc3T^%LLg zy@~I8vsFKVwJcY^u~W>8+-HL8IAG{Js$t!?(xO*KJg#OxqUkWou^$0e8QP2)SW7sT6Z%$CJ)236-K4-} z&cgRc6Q5ZF_i>)rq+r!7-+M~meL~AoqA8N-h@^T>QjW`%zBAL%b)4Y2&WJXB(6AZe zxy~7T{s_b}5KVC$ru6J*HLav-<`QC{EIpf94YLU~<8c+in3mZzwBCA|p^{@ilSG4R!5)uF{|g9_S63%=1dc?+h}G@4Q4PM|JJlQ*lV zJHAqXO55lsb;Gwx+T$7qld5`yDmq=N+Kt+rdSi=LXSaUe^{@5?zlzN0&Av2z?)Jcy z;`XAFn&Q&RhYw!7cwANeytbjdsj;%DzOufqy0)>tt_ew}<`!g@n>*T}g0^?Jb-rrv z>h0+5>+bD)#ZcBj|G;41;BepQSg&NFZ$dgC86Tl}0d!aNTUlhCm$1MaB%%@~LFcCp z5?x7vmE#YH*6ybSh**G4Wm&n15X8798$#cW4jm>h&V z6S!K)R}`+0^e5sKJ2MElg5pTwii%foXDE|&6)=G-xymcDGw`{>^948@0C05%IVK$C zl%11}!$5JgLOKpFNX2o?@fk@m>50*4@lna~(TTB<@i=?VC5TwBhcZ;yX-9W@*Vi+2(i&5V z;4vgDnkB4ySzS|ID@zd~8pvv}2w%zag8+WWs#L$iyny5fK8_D60Q-YX^3VAXEN<2cu(#WuGoAxj z{q0?SZJph%ZJjMBz-n%6X{v2%sBUa{SyvCMsI4ilepz0HboC?0E%F z0>1yWyyV%-yU&|%J?*`Ee zp6jf>+Z@OJ4bS_Xq0cPWd)m-<+5q_EGhwu5+SGr>ap!-y@B84f|HHP(4_iV%nEOp? z*mkN|wX56o@Z6;u!U09AS6WWv%JxIb)&rUr!@3sZ91E$Q#S~1(VoKL)Qr}i;q?mH zp~0M*!CNJrX*K;h4TCqD1~cj$sXA6H>JO;vzt-lzvT^QnUE8(kz^lNd*C#FxUAaGS z`$@;0M>VAntDiorsVZ-3sA_GhYH6-+YN~E%LJ?L&XG3!b^2;sVEv>Jb+WMM0`dhpE zI1fYjUc0Vq*9Ud`06bIE3{4{|aBhzu4}^MyAhU6@6X~ zr4~M9lNH(@*^2p>I4}G4%fS_KWFlVSPbOYWI1|i?3vfmFYN0P zfo&_-t_^n>$EHQ!rqRHjS`Gr?MZ{V>p%n(c3{Da+?^wChl<5Tv4LZ|p%c^K zIyIwV2;y>WxLSS%`d&Sm9O6QY{R~XTMpgb6LkAv#MQu24sdsd<}QFB=uIdr^`uLf9O~L_*C}W`HJe0a>^C`&}qP@eS?H<8m>v8W`}4J zoIeRyl&@q_nF-4zGl;PK$!vnWe%00Us=en`TQ{@^Y?0H{+Fak#RMX7xRaHHMSd}%k z<<(U$tIA(iJ*}*IR9=Y%B$W>y*OfeKEP2{<_vz@_8zX^nZL79cam?ONnhT(wxS zb@|HI3MymERVS9JNtS4jey26`wd&wk$^&014*y9J>)j?7sZ9TAndHyQ$G=-4U8FR* zL}e0KrJyyfs6Dq-eNIW|X9dk4mutRN)Or8?%BepsA6cS4rD^<&uK9l$*#4)1?SJUo z|DrE^&k?=X7b7J%qw72gLyAz>Wl9Has77vcT+erWpC1isR#yk~iyNL-F-iH9^rgK9Rt`c7kpZc=^6aTUuxeer~r-H4ILJC1mo=ki9=W(?I? z8dhW4wiA4pcX|#}swTtQRud@G(l8&>Fq3c{-e_A&RgK3uHg7adrRrE($Qwi6P*HCb zMOkXRX`mDk3mHRD7I15k>Ht7%spim1gJJNMqTT?~tg0L->a~Da%DOYl)hD%hzvvnN zryBPiw8U?f2ftZ4gk++w(E!J+Q()U@=h?7!cWX#;f9_?;mHU$0PX_Kj=_q~F_~dc@ z%NLD6toE9g)|$5F`mUzNE~cmz0_Z$nwRQHPW4(UWGyJ-D6jfNG1A}8DAggYvw0B~G zenqFQazT~NC&V3yYYA92FZF9(Oc2 zVgIqDT?f;)?akV_JI`lRp67-Fr!^NHe6M4Bb2FDxQ}JUXhi4`Zm8QaKb5WhCsKG?o zC~#=t**Ei<+Vc4&dt~7rcnLLBSP}@Dn%^^l?3^IGW6LQDRA|hzmaHraJo6E;euXP& zP4hMC9}|Bx1IUIe0#4)^02Pel7I{dY(Y0f^i`-d1;aB34(H9=J>7hto#AsO@N8H>x6D_yTdnG_ z@@u#DZL{@hv-WMZ_3yM@+h)J6Rk*&%aeckZ=4#Jv|qZS%)I89gn>n znp_x}UYKy=LVE7GoRjBI;e7bBg;>9Uefh6nx?FVi%B}0y?%XKC-e&-;J9kU5`S6{R zCph1M@KxEfr-ZM->gZHJ_u85|NLYY*bUaex{yDzFd>@EK^ec>07_%^Vks$C%z9JG9 zyy*|V`o|}wzwZ_PM!uqOg$A3iC|sc-7w6@|^%vlZ@)aR`awl_MB3_}R;#$a8WVZWB z{i?gGr@f=6tsT-sXJbo8Lvwq5b4zU_WQhhWy2a+a@Eq*U@Up71iYdaXD66Qx`>6gw z#c1)PcNy7|4V#*cjmi`ipMLvoxsqz{66O9yN`s43Mi;4JY1_m%iWA=`OntL-^4n!o zj4ek)`pr@a?7O9s#R}3TD<&0`rdKG<5mQuptElo0x|V|O+a;PaOMqLPcZ;>8-)W4g z8okrDVl*pYm7c>pj&ROEJZs=MqbC{{cusNMNA;aYU>xT$0~ZP3V-m?h5RRb-{%aC< zzSoSQ=OoN*)tueBch&YOPn+x@>ixBp_dYQn~Q)NAL@K0o}$W9QHAJAd@p{j%koH;22P%cz!kP}^~c=P{va{~G_xbDsognftxhb{NsL8$$LF zm0Ieyqo%&U=sQiT*^Gd=^n{a0Ai}ULQPWz2Ung*WYapD~vjy7Ca2?+1SWT;&jw=}s zD;tiYObazyOSK1=Y7H;b9#J)%QPi8j#-!@}IV2F5X%7IgmTUJZ>yJRiLUL9`e`2}D zkUDQx&2R=^_O0sRVr|KH+GBrGANpQlN?h8$esp&Va* z8Lp@=6Y&avIB{g$@t(;1c;1mQ43UdWmndAZ-T8Ck6;ZATTp>9Kz9N}HEE^=5L2SqX z@d{;E;48otDZ2t(5%CJ!PNL))y2k0kbEnQSHSPJir}MKCx#u5i#Qq?dnhz{Uts#qgISw@IOV_PtlzpT?yHI%J?}WU-?wpnV&VMU zMEsI3tl`_&8$x=rt3lgH)F5zdH11ZT&&%P5z+PG`mw6RUcRz_2H z9cVCSBl}9*uK9=Le?Jlz;jWLrrAHW=T#U4~eotTusfyNPdysb815Z)4-!6;Q9Tsca zEZ4SLuWhwo*DPG$=(wTIWn+c!&X;TVmTf-xWc!f^dybbJ2)lDA^3L(tqL`HH$yry@ zbFSr{x|VbbL5uM{#zZWLX)dh41@!ot}OxJvHbz4x%>!9(VR(kBm|J%3#G;%Pa` zMqXB6fWT^E5EdAo@D(OoSY83thoB1bNB@M-O7$zsSLoP8%4YKw8?o3a!oSH^Y)@wo zxPRzZe;@PwH-FdX5R083{C~<<=xn%XK&E_!;RzQzEEB|{0U4o~TlN+!NVFPsE+KQ^W^75v#vbM6P-47oQU%oXR9M|jQ{&1!0 zwIz!mDX-{UsU%&teCC_QlYd@1^|j*kcPi6RsJ>E={M$0gpO#O3y=Z3MmP~%TbaK&( z$z{q@3Mx~J6vmb)jxS$1t)Mcaq%x4xy3+Wq0&hx@ zH)SCFcT`&$I{(CVev3tUNU`zUrn#;Y`c9(;ZX*z#3|+?r9w?@g^4%s3T_jv5iIMvh zjOQ{TaGNysLLs`8>pW)UK4I=HHS-t~uYDt0J7eiJV&*YmPX- zV_+@oH9vy)IBpUc&wB!OSy zTEbEBwhyL$@6~LFbcGU*qZF0%(75!)liKzYXk{q(;)|u|2Cw-8N((I_)0ayaaI1HvY=z7ah2ny?)@<%XKuc~ z|MHizYH3AfPhC|TRztL->$J3Wwsl~|8ZuMpQA||rWvbg>_cKcf8U3nHGS)vnF(8!= zOR)zQ399I&=(M=ddlv>(k`bVk^4s+*;Lt)|B8au{`A>ea+?Nr)qCka*v`rfnuBd)R zUpX3oQ@o;lCFjU^qbOWacc#-Nc5;wp1~H=H6F_A`kyPM%83J(-$wA{hs?WF#fxu&U(n`1qjs*nrsR zLs1F4gR^!X%inU~?7E$Wz8kK)``&SMyKm?8#9CZ#C8{zL)m@tkWIbE;#$QU+Jhte9FR$9A5$%@C#I*sf}R z9pC)QK3TmJvnw5wq>~wiD>Oc)^W@=*LKGpTFFa(Z$+k<|x&v?}XZS9HXY!C{->wB1 zLy}S0sVnNx6Lsl{JM|sA44k_-PTd@*PR5)&09stvE|{Tvr?Gn%V;-FXXjxvJ3}VSB z3w#AL@@~hI_+Jx`ZcDFz3*TPT)orG0n$3V%{_QqvJM7oC3D>rY);79tY4X`tziL;_ zx;>Q}_m^!w{A@?Slik4&0%PxoC*6%pFHSyz&2#Ra%D;W~?9EH(i>_U|apUTB?9Orb z_N}|ccS`ORm)yTsI=>Ot18mMi`3igYV3S19JNOD^epJ5t@8K)93)7j@Z|5rv>tqD~ z-1rX6VE@;DGj;s){W|uW`MEZmsc4(P6TS#mkd(4W57Z*rf@|9GL%>vkU?1#vB;bk`EiuCcl33&clR_jv^CatbTng2tj-oD zpYReJG$2d(w6gt4+35WTZ_l5bjEtNxHmz3IYEW2Fw_LGJNp%oOz(tDEua=De%i{6> zwPXVJuS>`Ob?MljmXG~eLGtyA@$Z#p=WXTG_bLjXSR9{?Z_nbim zyQ%LaYOd@y%o%wNoA{3K-1>N)eFE=(=O5nKY?Qd{ed}}d-+hk#Wkb{lhn+K~YbFdm zNBN#oUGa#q`<#yLn2Kq?uKlRK!v*w-Dk+3X6QPF3VFtyW{ln50=Jm*c;O_ZEhG>%&Bu{C)V7$$_F1Th$5y4L&L5b4 z;PGbaPp;6J#L@tiX`%iKRamNe<12Ns_2?9cOHog<0>+UlaHbfh)t+3gHm$1tZiUt) zwv<=oPbiy=V@EF?J1NI*)Y@aj(YGrwdg@l`hnl7jPb*(Hw)G*Lc6YQegL-pES6feS z&p==A01{O_uR8k&dWKNk4$SHw9(z4C(f4kw?~P=1lJFJPuh5Ngk@*68wQL$OU*%3I zYaw4zwjoYLp@{8h>^A*^-`JqO$#zftQn`opWe~(7uam$P#VR!5!g5|r9GU#>@|Z>DM)4za7B;8 z!Li_|Y(GD5P1_%wy7Oqxx;>YC zx8C$vcgNAE*um|gol}{uxW-!Cgj1Z%99m5rngCaPa)=BziNdDnGSLZl=uSj&B6Z+Q zUQbp`#mHIMBVBgP9|SJGxCKB(cKE~{XVValLdtLc+eMv}rkH#jqglz4cOP{sJe+@q zs!v3NlF3k-HXSf+yDlC3ZrrtOJ2kA^)vem-Lp%e+*WgJt%T_guHVvx|Y%Hv8+l{-X zbtlu7eIxnGzrMFK|K-pL(-y+IbVOac;%;5XE(l|$bMDl4VYo}+(rx70 zW9-%gf->>!0!@Lbpk*0(6Jz)a&%;bTdrZB%AYNIlZ8l%iY_+D%W=#jug%18*;&ojv z8@gOKwtH-8@!rT5A2N3R3h zQ~e5Y;B)#F`ZLk5XdGBL&q64G*APE~Pb~}r)Jcg`6Mv_0wQwvZV>BJIY2$MPJ$?S4 zk8}Qi{YCUG{X&C&F6_o{}crO}Jn@tKLsbIc)SxDb;`Sf=6?g)0)1316`T zGUY1-WEzuyO(^tqwRN>Nw=qXYb+&eatXk_^n(OMDW%{~OigQW=?clWONYN(G4`GEU&|)`W%=a4EuZ>IVfxQ2roLGr{a#VJ zWTg~^Rf|+6kt;+&`!_4czf+r7q&cxjYy3N{@kP4h-|LPq)|W0dm?E|eYurrUDws^E zSkEzLGpptBR$KH&M>NNAd5a_(mgAwQ30T5~K!idmaEtGx@v8R%-=7WKm_jU``&$^- zX%@9i7T!M_xy%k;9@t2}r3mH!+bpp?6Bd-}4lr&SXH%9I=T&D@V z9;j+SK3(kjU^A|1Awi`UQioW9Xyo`8?C`KyXFyee19v8sKvo=3)&yBjsKK28Vlj%A z;q*$*%3Mpf`ll=e)5Lavj(QUJc}x^ z*NfCsvyW;%ylnZUv2D1ixgE-9XUFUAj@Q^s4cm=ki-v*Wo}sZ~U-K2)zkW-b`rsED)IC4hV3QS_ufFWX#F4+iSB%UjyQn)OvBKsnszqZ6Bb8TF zyaHcANuZfQB3^xRPX=TLX<0kjlR>uM)%>0eIG^t9xdLn^nvXSuxmmd<(y}vCveJ_? zuseG~T1-qvL}X4-NdBSY1$z#k-Ldc7`dwFD*WMBOJ+k+FX6ssRGp0G_IY(-I)KvtK9*5Pv4r^YC{d*kO z^)QsRxx;;HtH+LJ&t1)ayBpRXsNQs_V(ZbeU4hSzgg=c)e43K=Jnz)gf^!cqUc7hh z>RlYhfeVNQzIynunAyl3O9-D~2Snyrhl)y+=2jv__y=G8|00(ES-Oj(*vrD@Ek@tg*28mJMO3ABF!u1HRS@D<>S@D)2CgRF4Tm`vb`#AGtzWkiO~ z?*6X6p4P68Ml7#y>27UkZ>ec%sl$r6i+vlEIM0`8)UV(Se4{ad zg?@il9sOEO`nBrxzbZ}t+luLbQ<(W`#q6IIXTMRL{Z4TPNx;R5Q^*4@Rgo@HlR&Nd zR%PTn70Dv?sij)8OLgADmgv4&tp8>y_sw$t+%jzCWICg0@kZ6|ovQ7ertlqOqBr`^ z?{yvD>N?JHT;E_XDMR;JY;tAfGlx1W01qmfI37}M_ZfAUSuNK&9rw5T?(eu>KVb=> znw>=8`6ISLfRJVG^Ntahd}hr&ri?w2p&JwUjaaOowB9sjzkSwbt5o>IjPw4tPWxwF z4!&6%`N3iDq{a3@@xdw6E&Uc-#|%7QqrS_+4<+dRYLACm>j8oNDza^rc747ff@PY9hfljoq>D@ zNevBC-#HYUnRvgoz?KxA3|n#?=dcbDhDC_jR!hrrR>^1_Tg2lKLo7d3;g4zvaEdfj z^sdU40I?uyDd|cTbtVD<4MMpZlS<~dzxu2FMgOLLMP{$WL8!lw2K6KAS--16zx{I!xnIYANL~-yZ3$Dc z?{bbTn<35LXj<@-`qeyLeO#zQe3>Tt*~vk|SAZ*sS2Q^&2Uo}pqU=g;*&tiI0$j1h zt4o(I0j_WmFqEq^r}IvoI+2@~mYsv`z>`wb6O&R>V&c=oqjLj-&mK5(`G-BXHg7HQ zUt8+o_sGuunT30WrF*r7Tb-qAleufNnM)InJTP@_HgREg)Z{xi7-NG?=9tcALm?wZ zQA54=JY|V+XmSSbA4RZ(sYGea-&+YuD|s+;aHE-r(oQqn}47Kgq~_dh*Ppa~B_6xpogHXQ9fy zw50e!>AeT{A7V@CM^A7PVHu9|z%I4mc3dd%Lzg4^6(m$t3y=dHewTg)3F`|w_8*fdHL{YoyUK*TF5TYWhoW5UazmF%K?g@8U#bdPusHzoNB^oa z_OF`5|DrMUZyJ(6YfS!0Wrpw-$m(n5*>9DnvD|NwGLtb}q9|ReG`U!Le2K;g>aLe- zOCXOR54b|-jiUa0WrLrW>%UuW@J^BcPTBmOs`WcntdDbir|*g=soVm8=Ie zgk!ocQmo)J_MSo2lbPR?vG)YubJ%Xvtm*0z;r3aNgFmkc`XJmfCEPjbw09cjdSG^a z^aqzia}K*D?nmEPZy&PXIjQB?4XsSL@!wI+r6(NayG}W*|4-4T|1j~IF!33)+c+ce zer@4D2DA2;Alqo+FXefSn68#$(*`5IF+JBl`~)N!P1jEHd`AtuhBX{}9Jc=@>?{^1 z4j6ck^H)s?R!q`6o#JA7xx9h}BjTA|-cLFRR2@(WYK#*YX zy-TDD)ht<-dyi8c$1P6mxEFOxwz_1=YAomk3wp0?lJLEEE;WKF$#J}!WWQbGInVfF z2$3)_cjh@`0Aj7I-^COy(3AJhluFWGz4uY(vCMMB{ zaC&M+Z_w$@@E4sinU|Vk;En)y2Q(@01NjP51PkAumIj)R#hc)V;P z-;1yCInc}?-Lh~+#Vb0Osd&Zc&`ZfdR)|J&kir#B4x;#qExSTt3@Wd11q5F~aDeO( zyljvauh0z#d=)HSq4_Vm)FCm5(yQOS`s?5P?v+=51L^AJr(b^g@#lWE>xox4?|N%s z)B7F0pEWigsjNOODLXAJKAS7Km|b=$OLCc4b|ph{6#+wX*phS6Wse!8qSG<>N^~u? zU=&qYsfEM|iugfMzXDhm5UZ%sWFe`}0!@LhNbtuHsrj;myfJtwP!lTFj`0B1$@;_$h_h1$KqhP*b;R*vPUa^`LJD}nfOI8@LCv*0s&GHpa z&dct3gENDatgywQh{F<(VC4#@W%h&&vchB8IeD3WrIM+YS2T)C6W1=vE?<(5$fqtV ztox47yz;iHZ|Bvz{)yajZ3G7mR;KQYvi#pj=l_YYfvo-|O#gr2SQTRZw>7qZU2Xlh zRp$R#ZTeXl$uWVZ9>5m13~Mb9hC1N70N((ZHNmL~c9&}t9H9xW@MLckXKo$WAD-cj z%=X3=_)xW;DDk9Kc(}E*oSFqha)lr6RcTdzSfj_4+LEL${1hoOSRZhz+$gx>)%l^@ zpkOJk!WL0xj;V6QRXM`NhA4>%hF396x{b&|i|W{-yPLc$UVMsMD)W8TFR9(uwS52M{Z#)e&Dd5nn!))$GV< zv}ZLtql&eqgMoOdAxb=zQe#c2v!&MCNz~ett1Un*Y=KzVA_1COZ9#c_2AHbe#jUgF zv^euR-1wc01n0oh%m!;_y$ykcX-v^%VOJo%%FU~t!xF_1cCZbRuDMhSCjN#eg_5kNnRKFt$7@BRmFH!Yi%Jh9v+MlhRgsz1WyO0RS%20Dy zv?GYvI)^@N?E1X2@vx-& zL~+?UQOPAi$(7vlOF7buXcCG@N3+h-i)dS%%0!CzBHHbduE&KVX(9!uK$%h?M?Dp5 zV-6cAL_*OBf_N8@dUcWkBmw0rZ!}rJ@KxSeeEv8aOKR1vb$=*3hfWm~OXX5vDw_CX znFj+(K~#65+c2JdZI%oXnmLXwo`z>D5O_+1?xZino`@VJe#-tU43;8$JdBBOXktu6 zQ#A`W0zE_}A;N*s}qh1@}4+U_u3-0hUdCI@9TO zmc8sQ%O&9{y;uidzUi2~vfpd)KL&69>Hj15`h?_n4l|IZ5JzpgN&&g#CEW)x!mY_i$SlwGMZMhoamoV%=*JEGPtu&4P23FFMl`mEn%f_9o`}Q;QZ;OXfI|+03eWz*1JN zH>;kgG`!~d%%*uxotInh%WRxYt8ph)*s+DzL29J~2{NR~fC?~SKrs?_rc^s(%MGv( zdLIOz-yG(WCvU!3c7vZtIQ@lB&@^=U3~d;ZkwM|Yg?H+meW3)Tx(6L zu_RQP;wlVr<$6F@Qk5Cd1uIley#p^xsUrUuX*E`qZvoHv?G8b=E3ebR@30s2xDnWs z+hzxJrB>+)LsnZd>zt76@|qXYtDUJ8X6#9?v8PB)OwpGCh4fJN(6tAR5#j$3ljlIm zR%C{C9oHJcHOJ;VR&ul}lC=-VX|Wm+o?0s-Oe?}H(Mbyct0sEWjVAsV47l~3j;b1$-wUz~L5O@f&6NEi!^wZE{hEtfXTcd{x>#Cg~k3>l>}yFj~9mTJzQ`ZQH-; z+jVy1V<&bz_0>}^ef`R-C*FML+y@_?-}m_$bnf`_$mwH8&wO?4?1>XNwut%FkKilZ zc0myMiLd_Y%=3?Y1Hl}1ukrWr)iSuk(;p&U;T+Dw70XwUt?&pwC(|ntJt5OmF)mUV z;3&ndEy(X?P&o@<5FJ!8rFriT)MIv+!65=XqoJvoQsJ^g)g<_N7RCNcQpS7qS}s z*-f+jwmE+LY<7z`tI3_&IIFu9{t*y4|z2JH6H0Jic&GQZ7@jyx!yFWYdFQ)>WO@!MTZj|Sq( zwK?rB*u1CJn$l{`F{Rr4?!}BIH{dF<+8keLjFsvUpe!O=)!M*U2n+~rRl^_N0=UX( zMESji-)%?a_d1J*W=l6OU|Tfk&ulSs8;yb9rYptYl0QYGZk)b4!ozczpcmi3PneFg4|#nw&=c6-wMq zrfIar!HLvjw&HNbgWej@Bl&OQD@>DRs6+XOrK4bW`XOWXA1)gN^VPDyh^N716+CmP zYz3@hA5_1(b1vVDukhipd_~s`2J;o2#q308Z5imYL99}tD+eK7ErTo6$$Wou5ODSJ z$E4ffd+)(x^3SNe0$jcE#v8A`_J?2p?)4Xc@zzt%{b|?U4>s@ktY_#zOYhOD*5l&F z(}i{C@@o+n`E?i3t0k}QlA!7uKr63&lwT&x7ArGLl%mOn~q3W604JiS$6 zMVN{fmQ(EroI`Phk(m5{a8Wneq zmUNDkbX`M5R$2c@<>1xE;gMEG$XdVq)YiSn_da*xrC*)>{p+Vcc<;o%&yF8Cbo#4f z=fGEJO8evZ3Mp^8gzzW6`bXl;KmBjri>ZS5|CjR>d&Z`4g_HB`go54ausiLxTTL|V zR;$f|KphLcPGK9&S1`s!vCGV~4gu)^F^ysk7+-184-UpeYACjHg$CWt;VgP(Tyu5W zy8oj7g||oAHlHb~y_Ab(Zbh&tcdg6vhGqIzbLl~SKfKz!BE-BZ%oG~q2#IrqB-mFaI#wp3YTdJn<6q77MdbR3$AZ9vMmEXies0ZN zMuR`2$pZ~1r_ICf@Cv$Th268c?Vju=C-^G6)s@xe!G}r@L#m2_0H@4~G0sA8$Aa{fQl`&s za+VD(H0-(Fuy?L%m#g9NS?Nw|wG*Q?DQf9iAb+8q}Ay2 zJG>c9s9m3~-VsQvGbYy>Q00y)EbtW&D6(V{I#pt&34DbCge;OPWCB=){mC_i#3)(u zTAcWQPj9qjwiR8eoz*OE2|a4moc@*nBV2cKg?_Mf-3n< zUSa#Zpnaa-x{%))$mL6EHr%e_eBrMCc&8Bmj0a=u<%n)I12#3`Us}UUH2$#d|W>^HrzuV(r zf-KOc+(Joa8SVVz`RcB46--$at|(dk7xERmL}gi)pDr6jw*tzGEjobEj zZ#dG>bxhiPvb_0%xbd>6;WE6H1dV8fcQL2_LQc&$`IT4mrDK4rT(LZ>NWm2;kOV|t zFRN@aqg2fmD>&kDLRKXs=)MPQ(9}X%YM~-YBu9X(5X2h_EB;_yfPp|B3%C-fV);r$ zT)q;lh4p{{D$o>crT|oAz#tX_S4(OYm92nSU>G14%U8f28sB9qqDLW^4B!%0g=CBe z2a8*jufnsH6tS2s{w+xhVkg5_;I8G#MC?b<pjgIL^31wkyh zj}pXEg^zk5O;>V>R0#jXCOTYm+_U=Y_Xbg zU8CwQR0VoVLe2xs180EX>3`Q*ADka#(@DXDpYntM6ut^(rynx@fqZpmW{}<^v$w|V zm15b+83)zU4X@qlvU!+rJ8f{hvO4TI2u%pH-DI+wjTVyu1;b1jjQSaaZbCPs)fs1| zt$KvUIIW)2O=%3qiHT{MYHEBO9i4$$TGQ3B$xBzYSCsR=`^?e(l)9wvYC*Fqy;vKS zF&U9+TAO5Bm*x#i^@JolRwYVTV;e%(&|Xp%60Y?ky!q3 zU7O?#NwBR+vW21wI>Q~tb4T+1(5#ZnW=Vm%*q&PD&8+ih)OoOO4h7!1EpFfkme-+3 zDZkZ&r{EtTR!*Cn+hFE*dPD=W)jIcv2*DU?sM4w@tGC^(7@lu=JRsfb5e^tC zcDSKdwLNpa@o``MUU%Emvu)4Kbv?h>`pjI@6aMh5e_WGluu%3LWL0pSE#ch)$Em|>^u@zI$@ay9L3i<%*}_e}l1;OP8+@QFeixyvoDK)xKvs*L*K9+A zGPBVl>YpzeyivH}dUl69zke>L)05rdNpEyv0VB%nldJ5=!^VqYnhM`AJDj!R@~x3M z7WAG@ESTj<1MxYtNbkZ>3w(vD_ynFeG*K6xYK_eB;Y(kgY>LTtrU=~0d}AU{#>qdu z<%yZ2mjb4RfW~Tpb-M)_LZ!(&ELNkPbB$IAwH{n4^t82q=`aqyhIQ??35 zEd0N)S$+(tauuvavx)v-@d`pTa}`=z0t_LV@)cuwb=&hwj(&s0+I4J{xQY_KLVtF2 zDnXzn92ziubN2kX)5s2@`@$(S9Q^XDqX&;2f>9)?Y~OcaKic z(tN3~@d^;Dp#HJ|E%0hC=T}|JkzV6TN3+Th<5?0}MyWhgqC#*=IswG=!>NTv)C3VA9^z9wTFLH7jjUkm*zII@)FY0x%2M}-K>X7=BDC4Cyg zJPiem;RSrvI=&`6PYv86V+``tx3~E~FIfcY-hov`YHVo$v0_9T?7?c~1hE!mg|bf? zM8y)T5vb8h5-Sm?(L#Y13g8g{t*mMluST6+tI4TT1F^DeuL_#36}8JsIu(*`5~z5u z?3Y&#jMfg0G;JPfAHLST{aXL7tAl&4YeR_HS(W^&KUO92* z0zBNmVf?SIUAc@y5U4L3m&0fn^(SO4l}4q;Eddl4uPivGjs4g~4qriAltr^wM$d%f0#iZujG4+{mCr4ECXTJ44^V`v?O=CGNn)KSq zlv4d#uJ(aQHA-%x(q`AjyF=rh@TCMht%^mTJIksV+v*qxx&%HLW_>W+vNFoCCe|4e z?+i(DhNd{zraHsZ9Z)mEvm6oGj_5phV&Pm`*+Qz+1FM0|dLP6SLFZy##~i=a1Evym z`XE~sK)-7Dy8PHr1gkcLwP?d!LBCtL-ksZR<@eYSymm7L9Q-u5(+N?F zbXBf1Wwts~YR##21XQ4m@f!WnMKeh-Y42XF*c3>qngRUewi#g=TD5&%x^1p(*k8WI z+wj<8<#re{_?sX1wLdf0{>*I0vvUZ57SXaE_tow7R6XJr57|)hF6c7nwVQ-pc7BT` zx7kwMGn?CFD<52pFIDo}tOeb`7#)@(W;9F*dhHckZvX}Jy4~C+{9ky|8f@HVdq%4> zt;v$z2YxQGEAa%roDHdC21sxC2nAK>DmuNvpCF>W{>kUa2TJTkV zzg@V|mD_JA7;*x!3O9SeSJ^!l@D)C&ydFn(hb5=Op4aWlZntN)Sn_*3f?hAOig-)- zFrje?*1L;0&xwZoqQSYmUT;Q=J+;AtH;FQPuvSK$7v=FVHH{V+&=oN%*NWDMsiOIH z={8iqqvK%|*R?Lq5y^4F7Jf~U|gGKUzhF;<2Yaev5un%O*vH7e&*f7 zew{ZkX`HuN{Vs#op?5+}#VSHDlf&sX>g-mN+YP)jnY|9X*I~uhV|Ne<%Y`Rqo6q9% zQdA)l7R9RIGcBD#!E@?fh_wu>zUS$kzm1)z?0~)pY$;h`yYw!D{)1UeA3u(-*yJFa zAN(G^8be9(ExtmVT?$vIin(y^oAYPR0j|!RJbm(OvU2dySBJq@2e8uqz=4AdU+w$k zGw>Cf&4I5zc<=qU|M>13Z~f)9H$V8zAKridl}~p(c68(R(|sFHwGEtV>ONiDdA_3U zVp-d_fUBbBD}=A=uL$d|7Svo7R*ncNN3$!&vMR5{SZ((!b$j3`$? ztR=oua*9=)B6S+6zq&hLEv5YOWmo`6$%+Lka1?csB0$9idCFH%oeEpP6)k+imKCgS z4{nJ|tjfe{Q)G}O59TO320O8cMY)TGtI$l8zTYLncq#;li~T8~uPo&nFk4W7wGdz} zxrKwZ}DK;fulg4AL$ z=_+?;_~iizj^8x=U;i_n>XJ-|KWl2M~+`SbKw#?eIP3h?G%q| zS4YOjWJ<7=lB7#0UujUFf+HBW2|E3h9@i!G$|7bh|4F`LudP4v)qh(myB9~@b$>8i zVNCgo6(R5BEA{}Or*!u0z3f~ad<%@zGS125v`q9XHv(|w1l8NDb}LB^B5386$!MlQ zj#S(a;I3d&Z=5mdbq3rlsHY|;rf0A|9(NB~jT*NO*TxhVu1#H4-Td^}?B*v%%R4V( ze#MJ%F9xH$3G^>c=3ifz;#(bU2THAuCGOwUniAR+480*zJsfRa5o2E&i!On-Rq-}T zR=Bo?WjG^qQMKiZ$@jzvTqx;DmdvMBA}tjVbld=3WjFhf6D;VN7k16%wR=Q8b0CM@ zHm9)1pVMR)b@>Tvb@~thCnzxKO~(8lw`je$bjv*8p#bKe{m$Gj8~B0W=H+)#?WCaq3~;3TFu?mJ*Q0Jw?c|x=;fj!85N)nRz&23guX!tnf5D zy6{A%yU_X&ZKmO@g~941a~Rhik%>}yOH8INr^FhOcwA6?_OYi7W6HpcC7`o;jSiz{ z7MTF#t*4RRwm6&ys|8A|&+mqW<+XYc7`WY5r`u|CdTmah%d-Sne-mF}mVFOFeb0~B z>B$b*c}fQ?SuGm`ffX6vVk`1rGLeHGch#?;T+s(|{WRTwSH5B^uOJK15=!V-r1K-g zS9ca)(c~a1+gV!%BnJ&1gZ@sIX#GP%)lU zInIO2CBs*MD{kp1r*sTCL6)zO5KJjjk^o$Zl$5WaTrr^_$`upfD>Rn@SyAzd=vR3p zfKn`0Ee)7;c~q(bM1iSj5!6!3k0u1~!c;Vc_njgu%2!md0!IO&C{00kV)+UKx^w9T zcY`Z16{RT_u5J&qm6WEIQP%2A8TQ~uYjacxN?FjdA_b~-`ATeWBNix2q#?M)h+04_ zDs6$du!pQMWCEq{=u82=N-fhM(#ljz3k+`|7AOlsR$hZLr*1q4W~6oF!X`ygn^N4a zlyu0-yJSGD>b|ks4P%W%V=Y@`?c3!&kBo2Hb8UFnpM zCoi2ocjX)SQePRp3cR{<4L)pWl||r6rBG{B7%f21T zSL~M<{LQlZ6?*{ut$amK$%L$y;L5hdS4>$jDHS%^OhzjWI5(PfW;6IoJ#CpW;AeW1 zK|M1$KA};lCsh+OO4+1bZkU>$yF7mLH}A|eY`BnHeF-KiC|W=%cvQB2ZK^&x-5t%D z3yE>Bigbj=yNNcHV23(^2u-qur9hf+u1T_oqhy`qAz7hBEA$G;R%{qV*n@N-xC<}9D}h*vmDBOk$)bVlKrF_-z#mfrOLjX-;!(T= z<0)Z}SJ>+Tp@FQRTuHYsh&Op8n>-cUyj43qO;5~qJ$Ie*Rr|BPmZ#j!Pr6#3@pinh z(Eh?g>$3|LTiuc&cTvBssNYfC?*d{K_j=1V%p)xbSuDM7M%3$sIM(@eK+tW0{d?J# zIT)4_4s1sW`Ya@@j22Xsc{1BvDGer0vo*ER%xSW5n;e3eGiFL zs8u~~7^voVlFt*L7^gv>)n>v!$3H<3&>Hd=4lWe*&-1(eSWSTU!)%iJDdx2)R`3-DtKnIiY(OhR93kMY7@iC6@v%Hh zRO(D7-q)xgX`KxVOP4Ko(A9%oLw6L4j=xGi?S)nqduD~LR}2Z-fx zVMLh3Ht(=5dfa{%U4oG&7<*V2S~mC|j=JlQmi==)4gM>1Oz(q&-w7SFZ*m!2eGgyJ z^8VoDAQ0e-{N>@pM-D-}0$lA! zN1V?-Ls|PrAAR)R`yagX?uW0x^~rDl@cAphJ^1uX2X{U8#n9G$y@SV~T($O|Y3li= zw)0X|`_=N+5pnaiqQ+~54I`rZQDNPPuzIYZYD`!$o?kwmQz6T$7{AR|VggqvxT1W; zR9nfwRNyPXRg#d1S2Q!oBnOw^3W&ATxsS;VQp5`8t8lcZVS*smtqLo^RY>LtZWU1q zMT@D(_HXAaRwSqq6m0$126Qc~iID^f4>b3mdH3k*{c66I~ms!&46O0QBv!a_w>HtMpf z<$3iAVWYCJMOEA?mvqR1Ski7FmJC8x{gAA6i>h;*a$x89#@!>^pS-f?`HN4za^~gV zpL*lnGar0*{^;?Mv*$-IUr}Bekza$B3gk!TS}8{>hhV2k_r-pR7|?H){i2s;2I(O|&*AKOn>|sp`V~DPv-iBT?w3?vkpSRZQNdz2J8UMq z)o8aEY-YWci6y>r8%#E&xRL8ddQdkb(`rXm6Jttb2W>L>;_;IU&%Ug!X#EzBbBSWp z!(8p^6m7D=my*8_j`lRs_J<=#gvR;SCU`;<9iS<2)S6^VNQyNy)drm^BHb08GaHjT z7oG{9aC;PL*ZHn6p1H6+z^Ph*5h$1{r{yNEX(6j|KBslDbVEScyCCSE6ZI`1@}XIE zxRE;(_4*3BJ@6JN?DG@Pfi5?+th^3FR>)`J1&~}oDZ&8{^qKrV2PBuAE(`WR(GvFi zKwr?f@XDwE63A#US8ol7y653wz-#s-N)1K*0cd3?VM6`9u-{)iIA64BRKqG$PczX<86G>)$pXF^$se6vS9 z=qMhr3!!55*a1|CtVToS@b%&iSmcOzqK6YH$cooK!|yioI*my+5VHIzXwU8PB~+n! z%#qS)O>VFx)aVjx^>LLmsr96konb0l0{q)6tnpF{p2ka!5VB$D_)O?gdD7A;M+Fk?*$7dO%i|>mKX8+vQ`= zu~fpsl)-?VK^WZ0MeH8-Jp5M&>_1sHL)kw>rz)MN!CShYeUq5K%O31prguzKzoKx3 zZ7^Tam4nOltMBJ4tQ>?^fQlEu6)C=AoEcEuj+SufT7ups=wNd4)CshT`|9gsM~{8U z;MJjn`}YH`_A#FA9{{dC{OFzcKYQa(pa1st{V)Fd;NIuH9Del3;P96{8;*8tIMdR9 zzPa~8efO1`&XKD2vC`(z;-;~}`cVQ`wPOV}V}h!&{EBgbRFPLM=fU5yTt+j4M7d(b zE2>{1Ifznr1d*+n0AD4*;^|h9b%H2g7swGy6;^x&`uef0{fKoHU%}Q^(NrK!3xcVT z?}K3im@1gBuqP;Pm#i=41T*BJ-wo(al$WUDv?g7Kz?cs1hEjO_U1Ci-{)uAQkMK)Y zWh{@?nSd)K4Ur{;a-|Lqs94~u2)>FG!}C=b6SxvgK+eJ-Ml=yysAX!gCYbCYDarz0 zAz+`i|U&xvllf`Nw z`oUfKDtNxr=}D(E3s-lFSHah1map!-h~MjS&+h+Uf1h|D z^*L|X%kw?2EOx(q12zuQVYg(X3y4)XU@IDMiZ^(R`dz4&N5NI;rrDAqydQUJodzun zp^RmB8S?sU;K2MoUuwOnaPVektDDnkPiuB$c6d;QmEPh=sxiS$6o&2TjqYUR2Pj;Oc;NEb>r&GS3KrFO6xK3?cr_{`zz0KUS?0w5L_ZYg3S!AIr8PmJt(_R^ET`1TjSyYRa=F1+{Qm4ioQC(kM` zTvlF{sex89wMwPY0Islr9ef4t3I{X0a*&Ew3|!ezB|!NKB{5XI!WEF6^~?C`ufi3s z#q4#Ny;3i`g43t;iVwQD*QNjO3LgJUe)+vV+u;4d-wykgVZeTmKSaF3fdTwxFAeNT zo1VAnDVp-t(ycC0tjtVUvEYa*R}{D$Cc7Pt1(3Wp8foB!Y`|ipNi|jv!ZKSssZ(je zSC-34$BE1SSN~#e9l9!Rm=HJ2$MYQz#_3mQc;W zpqk_NyZ8f6{3_^Rg+rdoN3K`yoUea$v2G74+ix~MISa&UebxuELbSc$>H5WN&oAeD zesLWDCf(*P+u|xiCHV$>*=8^5oKb;Vi;S@0c=lg^-29?XM$XmaHwkP_usC@q7tD7Q|}shi=q+R&&PeSffwF~7qu z8Jy2(n9gd_Rc;R8N1|>I_NUfNq2D4jE-Z3Hpi&E;IX**@lkISmX!r2i-B@6VqO80g zBp=P_CT_R$!_m5A-V>HW8_w@Kh{zfHS=7 z{#bPw$Aqe_hZCk(Ch69s8t;#~AZ=0~Jr~gXVfOAfdz>zh&1^K_$hSBh7MIiH^jMr8 zWC(pu_kwj6;dd?B?cy~*g0C>${;G0CXC=KaX1DAl#UOZ|vSSvm=(J@g?=rXwp3Zmb zSIfk!d+`-6On6Z9EBM#pL7lINGsEck=m;rohj@kLAaTC>_5y{gbLY>VJag*!sS{rv zKYsM9uMR`II&$dCL;H>#{Nmu}UwrY&zK=gT@c#P;-v0BU*WdZ_mERwF@)w7OpE$VT zk)z#PPP7f4Z(V=6b;D@Gz|}f50`9shZNDaI851|lOBxkL4GK}6te{RNs2$I*9?z?i z@hfByuX4dxSTiV*aV6u>td^ogj>`6wVmV0#3YUUGELDbpwRcLXS)5rRrjbbKd1 z{nb5p@)dp|`BzyADp&=R72t|KrIbbCit-hTPD%U5)IF5@d!&|oEwJNYX3@*O<>iLd?vj2FyN_ZkO(DLaI*W_%~urhE7k6MN#LDvUZtrTBfy)Y8MZj z^}O_!zIl_Zti#AHMh_HQc)BkveLg&MHX$z%o-&UvHY?Vld5sUCg%lyKm7&S5kYqc_ z38q=Y(yf52s2s8+5Q0@qkt0rQN81|sEo9WsBM%5aD^8soRs*@6eqsN06!Q=VfL<(T zn=2TYg+chif|c3UI>jb!Tg_HzU&@*@$fuQt!T(6-Em#K#aH|2V&mTHUC#$lg$2H9e}2B>g}L?@{2ed(x?h>= z{q=nBFRwR0wNSak3rhzeR?&J}=|-1y*k8EbB^q$@yUgN^K2Vluy*;nTRI=GC8gxpw z`3u(Dd0m#GjSJCIEx&s%p~jNY=F94s1!AQ%+G4BpqV<8adS`NtZC&vU$cl})GNNKd zml`5Vbmg`|opY~i?-S~*!h zbUm-##%nYQyIrsu%x*T7^v_~HO7XE1R0atvv&o1YqIhr~iWa^U@D*9D31|COBnO@8 z&9*G;>2Tn~2gKrI$5)WqZiTIcXy8Uf;S{ldFCwm_@HAVSVT;X~MamdQ3+k`10s=W@ z)av9AAT`aaO$@o%+2@$7GFSbb-BG>a0OO! zSREcKim@DCoLYTu`-0Os?{fQ5jm13ZnU+o&_IyjH5(`BbU{CNXXQ%TI=PQyx(KD0zR*S3e=PQ~VqNvvmEBhWSEVf@NDCG>$%-2lg^fzU)e>LTj8VSIW%w$y zOb$;5ZmEpI74c+XY#C^sRbqjhA{ON0YSUi(6)UPtAgPQ89+Rx+y%Z` z%^jh91*W1l3mB|PM`mz*b^6$Mg3+TON!^RDz(uP#G6W?k%2!Kh#OP7W$cnz;u3Nm4 zLRx61iVm=6b%run!eUu#W%@YwQy~kKbz9ELf~OYnaw3wi#g+(JxoQepKr9lBs0G9# zd?lJfjTWiSVtPi?Ry&|9w2CH0Sqx&K9E%pcr>Y@8OUTmj8N`BV z56ZDn^j_RS5KG*ptnAlT4ouVyO|)&*_CBIs|EOZqW8*uY9DCx$(dU14?KgiIdF#)k zpYD@?eQNT;RozvYPNCMRCZ;u0(`cB5#e?80T&GON8C^Z{dvR3o{@^bnc>JH_ zD|(Qy=WzCtfny2B75gA{8bmb3GAlF0D>yOIw4mK+v!SbiL2t3>EY@j*MQgN7!NqaL zhy{blbL-U8X8DwUY|4IlV*cafuC32Q;~mYbh85)0Ly0qR4UNmY9>MjXYCS9orl4Rd ztO~SYN#JT+8I$aAk__iMQDF0Mk`CFxq@o3y3QQ5tBNd2NfXV1nQ{zT6n#?9Y90qu; zK8QtFg9gQ;YG;7kY{um{zuyb>333w()DfU5SdJqhMG|MdPCOO#dKk6J0pdcgint2k zr?}jAI#cTPNUrfZ?aB2s;3!<^IW2lz>kHQV^ZGm)Z59B40Ge38AA_7$8>!lAgMj8m z1`VE3q>c#ISfDIuXnm-*x*^;!pVtrV%Udw$!&`zo0T2s?Sk=20t9Q*eKY63=nVT?V zXnmGQSR_CAyu0^T3w^&?=zM7oO9-KHNq2ZlHrtCg+Do^15XD3I|8nN_S)@A_iZ;5d zcHO|ZeEWR$?wjIaFH(e%+6bKVFH~#~fUJ^g?OE+}%})iw#gmAV&4K*B08lEq%1M|? zVp)eSfoNAPv8>8fp)pX1KogR$jxRUEmMTVqk}4nSvep$(63Rk5!fI1$r2$=}5h#6! zjRS1N(7`7~It8b%Wa&hHn=8NFk=0-XSz(1E_9vBT(bXD~7qmV|Ug-^bd}!cOkQLIb zKr95zvcOjvtyX;UIo-~j9(zuYExpZvT6$zxfxF^O0We-}`(kvl8J48cB1dGt6J8t< z+4i+rrqyZFtCDpDwBp^XqFiB#=;G~K5pTFZcKW_(-OnN|;H!sY=8$gwS=5CGBfr@F zqHoe2FwX^4I-|quc3RyYi+{m=1L3y1V1zY0XLS3gyY>`G` zof(i-VZ|$~9K-|q6?!_e`W5A?%U3Ra3)d@JdG*aXh*w{uoAZeiUw(c3z>zQa9XfpQ z@R5Uu4xkmz2k(CYxcbZcr{8|(+)KYa{peFCHtsyuzwKnt@VU0l-!=_hty@1@)jM9^ zH74!4THbl3tZh`>I$F{^4!$aCkc%4Sc{L-sH6#4mQGU%>Zj}sW?b&E6Tn4u*MTSI$ zNH0~=v>=o#Qq4|%7?vupuyRn9RH#P8!kAs4f^lecE^)G2Dq_!(Mf24NC{)a1z$J4k zVw_HGqo7tXMhbK(9}3QqyaO_^**%)n!ycN{qiH^7yOg#AK~X3oyu?+m<|q)W(qMR{ zUY)L5i7{7+F#_XRi7h`E5>F||Yt|m_JQYE)a1H^)L1B$YEHA>i!Vs#;G-V0jfqILxktGG)o z?oyZaYN|JA8#Za1w`w|fDhD3bY}u>Y@uYn3^YUk3k-hTj=pWu5eeYxW!LKIIUesTc z>&H~n3eB`;n%G{EHSX|f)J;-Z0Q!K#u^oj8=NC|t3#kDcteUAh-sv1COh ztl%p(rK#ZSc<{CU`^SGL2>BoUYrp^Vq(2S*E`rCw-&*h&itm)gX?g(AEqnXSp6cnL z!X7yo;9PwdzOup4){0<^8BmAiL|HqN7Bo8HgiYXz^mLmsPiXWit$tE(v1)bpYbx_q zt^2%c{?i$anHNh@2BGVI*HJ6YdNX%c1N{4loepU1g92kgTmEuG;5Ld%B zDP{;(;Tbl_4u}NNT!LseJlhgq=*gVnNY@W|dMq4Y)#}vIxkO+F(d)G(tj2 zZ-Me-0jdG3h?dn0V@xmdeFy*;#4Ef2#ahJ!b7(_Tx-ozvA~<3(&YTdQ${ijVQN9TeYSqjLemp)86~z?EzkIypZ1XYD<(gPD);)m ze(+W0BmS~&ZppB_d>bmk{6H*Z15qarxXS4^La;(k5c(C;j+q_PU@er>15xnT)0^FE zgzA)fXIA?H2F*_fFkXc_bg6l5!DLdUGpfY6wqOd|m{Rk@JOzCkS*(vL)`to-;iAb{ zi7~0t7FjZd0Vz1IvU1Ujum)x4mXr!(VwoSJ>^uM-4XSwi@yWT?DPpmcP$VzQmG@Rme zp`#yt_W4Jj?*H(^qj2GV{q?gi{^HE;$4|rIs(;J5?u{2)hpsdYjnuCnuk0U#2@*P| zly!|rIdRaCAD0g;OG6`T0On!Y8MPA2z9m-UhKAH!EvmBN_v6&+BxTE!V- znQFP*bPKM)SD+(GL@Y9apdL(y2u7d_J1Jt(Ex3z)J-eU%=fOyZm*FY-oeW&9;K*pu zrz~7iz5-&=0A;Pso?s&^SHs3F*1C;Y04?Aat;<3=)@{UMT(cOyf{=yYg-cuT6@;v$ zGWAj?9jTI2sb&$2DSB7I<~_erC1_FzTNR=mMMeMhHHUzq-ObY@g-R%k8CNh7hlMqRrFPown?3JV#=bK!HPkf zVZvY-pU{u0EF%*O$43`m`14f5j%%eu)38B^5?IjUBvlwl%w2^4_UgE)kc8=V9AjiU zJgf}RvZ8pNkTlbZq^S^&0gC|PfAbO(?@vn3U*%sq%Ve z!BixAJfNMz^>a(MAl)(<^&;Ef3?fZsdJ$@^al zz%VMYS`Vy(o>tJei1vjbK)hQNL1lHga(jFQ>lZ*-BKSTH`tS}bwk?)#ovqt-z3K5A z4SVJZVl5S7Ay+udEFrvJyT@Do=xpUKU-=F<5UYH~M^{*YRq<7}2(d1@3|$4?iBeZWxihxZ z79lc(=1)TxTAibgE;feXUbSF4tY9*fuMQVz5piNeRN+i0Uxmht31x-^iHU5>3~(h) ztF(Z)z*^Z&ZmcrMYOrV3S^3Sbv{icZt0OhZZ$4WN-pL>3k0YOoF?K4UFJ zw!$(2qF=Q_KXd|d@dd%{0T_&L5Q{g#$yuFdWN9Hw;w!*ckNP@P=3~ucQl$eDT!Pew zx~y=4eoe+Sj-n{8KPfv97UvI*^}wPQNkdd*0pKB!-XCRtDABSi)$#KvEB5@$iZhwT zmrsob%)Wrtws^xof5YmVH#rymKD*nhbepeq+UN7(xjOd>jFZ)w?S#ood!8?e)G z*Q}*;L3*WN`RZP9wM@TaYTLh4+z!5?&Z008Lgf_| zuc$c#77xls$H%UXE)`$FSoGTq=PsN-bN(#g>iEf%$4;C)`t_;9M^EfK@YQEu9Q%CV zSAY5V;9ovI_NVtx{qFTM&%AJI+wKz^ww&%7{HAm0O3V6@#(~kg{_&cAIlPa`y2na8 zM<9JmIz}K~6}OB6u29)dGlO|`W8kaono(Z$NLJ+tF=RkxJ1Vcl3T}xKfr54_TT!M$ zW{?R*B320IlTJ4@AXvrZtFWc1K*CojwIZ!#09O#Nm^L)t!2!nu*8op=nICZ zyQc8KQ8d|y?aH)q1bLb!CpuM-X0<9+$;OH_P|#95oC?7T;*}h+imS$c`ZRbyzzJjp z&;o-YFs5J@JiwS`GnTB_0Yxm3)m6=f|9pk=MgpTrh?6_0G_r&!#{yzyRB9k$F*fhYoO)GmBT=#P zo0NhUrLbLD2oYvu3W znt1Dd&1Z+TN6$=M7_p419OD|BYSKJqAnUPEcSRbJ*o`C@=XRuLP!AK#S9EeMo9URL zn8H8lSO3j1?4QL~?72GlL`{XzVEu~T?Skiteue)KW6>r3it-iGf<(C@F+FKEXbi{+ zTF2DR@yWock-*chP1bC>Qo7z4Uq1B!R}~|iMcbO#tl6Y2Uv#o1JborTNf(x8Sj#c2 zN-{=dIAe0%>v%Tw>J85_hUb_gaxMJ&Ks2&HIo4$HY>IR?soc$}@gf_T)9D59pth=L zXc1c|O8^OMQDlW!(xwJnl{=9FBc=*ymIuCK-9f=%NCKkV9lQjE3T0Xe)l+-k4ImGQ z0e&-WJ_v9^Cl@5708zj#Qp(;z>aKxRfGZ5J6R%_`3$_pOmD4Ew67MGvi1YaQ!h!AdYppFZM>abD=UtuuxS|GdEgq&JVw>udITrK$P zH&g4qX^nI7)y^2WcQ*J!gp*OFW-wK_P#;-jiZ64l;ZLs2)~pj5ex5cKA)Hy2t%?XSR}PuweUf;pv^QI=zvZMU$qmncj}6dlp37E3UyMXXFR|3{?m{y5LSN4Or2nY}M`ERu8NolgP|Pr&U8cr5NY zr_1m4n4GpLr^7Vsw=eY`B-Ut2^Ne7mFXxSFH0DyLb?3K?+xQsJ&1Bb~mNe2mtS0$}uMNMOL!62z=uNfn0 z!D<$+Mu}UyM3#;|EVmlp1oPD`xB_C49?j6K5HWl?2?nlMz9N*BqXb_e709&nBYXw8 z3V}%igIF{P2)?4K6r277Us0OEmX=5b!_~d`in7+q6a|7J79eUR2T~OY3RIvemaHgi z;VB!;9s*ZDEChush+QmSfvjlYM|3AkR?8$Tnkx*}vVzlv%Rr0570etkpom2?hDZ^T zxW!i#u^2gvse4E1`>g@O*>f1m(-buegMIKs0wtdbt3q)%Bspsd-bY1}f=v3;ulk(rH;O)-e2+54jMxnFC3`MT=u4>cbg&>T9c zKX=73s`jd9d|JJ0N{^1Dq`4c~lS0U%eC2Rj(X|0nhR!2;O$?s$nDzAbhw>GhDxw2g zd=;EdVvDb6zUim@;6D<{{xN*T-Ym2C#q0q?;T&6f^GgSm-sx!>H@u`wgHcWICEXhFp)+IStQuHsFL_`iUG zRr%)GimkKN+vjR_`k`VqKe5pKH;L$=Bi~_6JhOk|gNZXnTkwdoXQ$UBUGI$yczLD7H`^ zA(#q*LykZjS*VXLF~k%Z7|@!GD3}b(pMbQ57G{a1#w3X;wrD0nYyfv<)w^I-kx;Hr zsxZQ~lr)U4B}**PfYSNZn$E0*DI&Bcf~@ z%E@dv08qUT`4Po(pj`aIPtPFZLh*{EHKxetPnQkIuaQ=Zi1B`pu)yp4hzW>-EEDdNzIAF?6kE zP}a0wUf-{Pas{}m?on2CD=Io=F9$~ zCL?Ji6EL)x-x{z zdL57=t6O|^dzBw49fw-QXj7=9qVm+rTf!08CMfPgPz0h`6y24xy z>y$+i3*}e{z!mALb1NurLD34*vWS@j)wM{69n`%O15#r4PFohHNz~}41IkLTP-j(Y zc-0!%ytCz4!e&iTtGc8^T|y{JA?{O3*K4XbYMY0r+jma)?VcXmJ3R~)>)DCDFKM6o zwdPlEYJUHo<}Y994t#ArbJ;zj^sA@+Ge*yh!GTt8q{llhaIkHMa^-SRh2c)VVwGen zUj4*Z|435$aewQd&R4ituon)>>h$Ph5$iTzeYgG!`jyG7M?t&UZPZy!@+qBi+N(7L zu8!aQ{7Zk|BUhR>PZrdx6Gf`zB4={(Vtl?oF3+1+8Q=&!F>r=V(uF6R;R%V3HW4{q zbPNPvh4XB%UQZVL;A9mou%X%trOWZfzLfGgI50rD!ty|JEp9YI%|P+CWXodZj(~LA zO%zrUxazipukuk_1^>@}Hz*5FvHTyzMa(ceNclRgxq?LlunGbcoD zgRju#d9Lw^x$>RP((NvUWCy`2z*QNR6F%ZE+2*NubV0IXUb^eLXrouWbv|#vfwnbh zzMR!<$ml?kKUP7aLTnbwRYJ8ltk}e9TD+g9iV#~en{Pykv1BihQ07`EGy<;HF7X!uaJ$!7X<59 z*pbodcXZh%GFJ<}Ls z+H?mRQ^M;5dVuS@r>NU}d1?+Q)6xgu)*fe^>)b=oy?2lu>U8ACbNjey>L z(`1{o+itj>*WoY9PLuyH_==ssf0eITa|X7!{Z41md+ApwZbxzudZw23&pl)yad0PwYQ%eBZv)`}Upv^pjJ6e(&51zdE<;ne#(CPxTL<=^OsGd&_9c z&={5q)~%P<4k#dARrSaKSJj=0%63^rn~b`N0)FFua*!iUq$Rg8MwkWf+vSyh0Gw!R|K&#R6r~c6tfC%sfeA?uZUDd zm8&JSis36tS=fFLUjc_G0;4n&Xa3x93R$T+tkyuVQoUu{kajT!nFbYty`uyjdU?(mLpF1G7-1c;bq@2_^nm zkvC0xom07(EcIZ)V0x1S39rJ9vnW{?3^=g<4#_nH%s=yc;Dlfk^f;hgiO?~y#|`r( zIzVHZ;*H475g!JWuR{re3lF$W$?2JmET2qo1G#wE_w$ZS!@z5NJ(ER4hya0GOpOVB3&6-VEFg=D-WiO#uu5fmp&>%34-?FU@F!PgYgY2q*sjUdg2ck}C-DM+R$}S@WA8oS<2uW< z@9et^fh45bPU6JfR_|q`QP+%m@4Yu$#Z~Slw&NsDZxH&z0!xR5gpdRRBqWgHUS!qj z8cpwgMv~8WJ?9x6Pb3=xgx!7LFWUQe_L<|ctypO0uj{_<>ox&+5-asFrOJpRNk)qW z3mM_mqJAxmBBr$HvFH#vqTnkmNrX8KmJX+XviIyT7G`DT~Y2&W(Q*uV*WVEJvpTp_1nq0O|#(#xx zy}$K?dtnd!ZpAt9dzPNL_l1usUO~YMOm3%>gB)BfHMcM74|3)!%2y*7FQYkl?i^_j zg0CQ6p*aXoJH)HkfAj9Eul@1YzkdH$ue|q@AN}F$-+b#B-0a8B?KpIH=)n2G{Ue+A zjdkz70=c_=c)Vqp?3-)aI@&xi4!COSnP}`DukISH>KLtT8wXsKG>sQGO(04XX;(S* zqbL&9j<#UURC@JPT7@{ZoUCj{LAeSLuSieOA8*CluKfMM{KP|#@VpS?=R$_%5UBaYtF|FhZ3BOXB z%2ryHEWJjSRV&M`mr}}tmX+5cDQcCJcStL{iHe1xLV{S^rhr&=!=k1=;;w^|Ef0ye zKP(=8Onl%e$&qKpkGvp#>Z^(`e`n^KKTyDq_44bAKYggXJZ6?n+f-_+!C+$%%R&S! zvxTh2Vm~Nc;hXD5e8u$yxh~#@8Y(uaBLAgSu)H^d;UC3b|@5P;J_;B*KnXtVM-YUGeR_*m*8z3|*6vQh;Dpl`tV=G_X z9$)?5*~WcWv1l0SQuPmAg~o-qW|A+88i1(fggFXZ`OrZtA7SyJ#~LF@vX!9-NCYZm$pKJA*_uDZSM=EJjX$7WlO zy4z2X(1xy)vrR|s^+%nk#^dgmM|@2Wdmv#|AF@_IWG~)hAc%EhzWT5m&MSylx!Vlc z1DdLX9z>xE`^iF_%pM1Nf*~cUs2WR1sXn689AD?yn6HefG)0!_!wS?vxr)d_<)-YZ zqCO_2&H_dX&zFbg$zn=WP@<5HgP%_!{^Puwe_?(42*$LbZiN-Ve5D4M;Tl;o1NjTM zh5JA+kQESXW7cG8KZ9#z;WVTze!U@)_@fZlU;_Lx!w(y^lRoyb^=%ya?cL(?sYQwvJftx1|$o1{iYR&0thnr979BrC#K#jDn0 zm7!3xEc&hTzL5(uvPNe{Y4SSAc4xvc?Dq)$ej(pC4p_9@DEbZMaywD}^WREQH-5hV zPh9ZQ|Wk#fLxo@#$}T{hh}i|HHncXLcMO*?MTCfB#7Do-2T> zj^RmX7%~|ek;%|cl&i+>iMp*)uObB`ih<*+NTCE3$XCEB z3RKju4zi-k6{RVfv4HbNO!l#SMahbqqc{->2(&H*Q-R}wyu^VMfND+B809P6q#Q+w zi-HzzQaKC1MCdDpz}Q;;82QySwEsjf!=or@QE7{w7j9D6^G7|PrN|l0CxeSyA z#LBGZ5KAg-l;t%`3!5dStZOn>Xg(?5Mh`RZ?{fA>el=%igFvrnrW8l6LHwCRjiYxCz<6#k&!$r&Gbfer@_Tce+%2)pg{QOgY?_VXWz$gZKhCvPoE|y`D2uaC#X*k1D z{ffL|leY#xTv?qKS{8>HWwrs}$=+Kt(m>356u~QlQ>QT@hmFj!8q_Ae%B&{~F{f2} z(Tw?$!aky7zVtIw?bfpmgN8(Sq%$|vx`bCbjAxU$pzlTJSQ@=Nmyus z4UtyK<-{$MSY|^*5S3JE<=5DftF0-u_KZe%T7x~K$&ub-PiwJalS_6FoHhu#vcPqN zhyy55G$s_2fLJ1o)XIvTF6FCLz)*uwysSec6?abYP>@z(jGp}FiXXL_3zLGHPI-t=ZOs&PM4GB}4u zAw~;Hqi~C(e8((S!vH5Lce!i!daC!j+8<_mpI{mv@>K0JBZ{H!A&d=r>W_h}yfug6 zzd|3-27i0^Q%w0j6K-}q2{*f@^>L@`z_gXAoZ_jBsXkV+S;eb4So9<}m?Rh!n$i z9@yaF&VqvueO@doD%oYhVj#>f;ERssM+lb3(^6YBfY_`y8_Z*fYk-T7L}O(eFDSrVXk3pdOWqPFf&$0ZDOnz zuNNWi7K77ic6!VXmxZ8|!{f5dx~+3=2Oc?`E*pw{B_{h3KG_dSS>&gog%7Y5UjO}X z0qhxAg@=m}3qL4vEy6(a6@{xC=~r0GhV^VH>=98QCkUC>@@bS*B9n-vQjtVFDG~v& z#>rym(XlHRuUt4ca{eqP2hX0paQe*I4?aBg=RdvA?i>8=?|%R5H{N^Y^$S1z$>@t; zJb&!inLP*3Z##IY|Ip}`1EYO=N4thckaLAJhK}J$(j442fn{@b{o}QpC+m79Yr9EQ zdr2#n433qyj$;o_N%I7hE98hGih<;`*C0xCDtp0mm02l4QNH4429t`%!B;4Li8z_f zxq_!XE{_z%tAv6n;=IZgMdwUK=ZO4ntAK@oY(tQQt5cl{S87!XRHf+b9#o*fn-r_q zJN`Z$2Uk>a0vb^|0wB>^mpn;|KY`+I5`0>6hyGrAUBjPP3%!d!g~RHkiPg#DwAMl0 z;tDvoP!zQ2=Rnbm!WdPtI4z5lumD#Z1XDo*QLs>qpP{(=$Sh&5G%8OLl}CsRe8m-M z7cO)R5kt!5z57)xc7q{Zuaj6Nlh?LejB%L={`4)kCtHAyM59QS-34bDy~HkYwnnZ2Jkto|B3rPb-c;CqMa;{ONDV z&?x-(A5Z`A71b|)GxPde>JQFaCgpQVJu|KMOzUmf-C%LLj9BiD?<70kBwcog$7*%k zt!{_a=W>v8yL=w^tjptd<5j`u@y&Wsb3T8~&-ze{-mK}v8@;FF{$J$*?g{@?Ect)t z59j{5ZgjZO*W$W-#lh7gzT)5t2kr&`8eb9R3Mtj#D>JTyuk5h*1@e`V@)ca{7K;nO zrBhFv^(u!&Z!zdKTH}=5FsAgqKgm4({pp%*XOgSWAQ>hO%WHs7nTF_0125mkFLEXq zIwCXhcBYERFe7I>vA`WIG=~ceSaXxzG?!RzPOWpK)jLtZs?>U0dZPnm1%Gu~qa_nj zS7`clSWv(cBzDr}DbS#xL}8X6g#(N_SRjbhN@OeLW0i29(GOSs7ThPY)edXkR%^j_ zd*OB`_)0KfOYdbxsy<6*pEa%9l+a^2A*_VA`Ea+ikinWl)5~>X(@S#+Z)#XfWwM3Sa1C}0=hwVlf-GRRR7CCyj z2p>b25Fib{f;A3bak`oiGlst-m~GnGIXhU(gYPJ-&uyCpXp!$J7Z8j7qxt1dezN!|nd?n7T5@%G9qWwWm zzhcepB$FYjXo|3W8BY_)}))Kywfv-SW=o@l;g#*V|SXs#N6%OPaz*ktTgI*za7jlS|QbqU*UM$ij ztdVC_Nz=j4HPT$tB@|gTVkfBgsTyQeLer#v$4yvjT`V|Gm&%w~tv zYGh=PW9auRNdf)&D*$yAf7#_y0b=;u^u!eaGz8No1z8k%!=8x;@R|)xmbafm+wj~o5zCYNP!h@c3!Cq z2@G*%+N>5Awg+an`?A`+@L8ocI>@=*0~Zay&Y0e8$!fD_wwX~`9cB!jLC3&OJ-FOq zc&3H9eo9%aKgUEV3j=7STOHV3LxL*@Oj(=tX?@zP0b}JsCUc81bF(?4&y=ynn%Qql z>$PNTw#7GV6IyiXy$)zr=?IMKb7XFIWo>b1_Ic8}Tqz){PG?4!3qexgE67~g*xC&9 zRX@@%E$H;Y#06^>h6js=W?^C{dGbAWESN?Cv{G6O=n}$wmCh#sO}LFXuZniKat56lea^&ITV$;@y3P|;;d&rb8&qhIuer*rVPc9LAp%{P zK!dGA>v{5sEIq(DI$w=g8>m%@)e3&YbaeS>OvPAa(MV*`6@HB}yTu5;iY-yVQiY#k zVJ7F-TA)oqxdK6$V|g!kUHuCE$ym-l{6-vkTfS>5WF%?9)N-)z0J!7Yu$_R)~>`pbW#tR6W)N zgeRFJc&5;JGFrGk!MHlsvNpjToIJlON*kFfkIg*C&wcavA22#QBiGESbn|vQW49u@ z!N4$Xr_JbcS@5l7x6NAZv-nXLXUJNFyFTd;`faS3=HD292PIE-rG=@>aSFPhk zEfd7kPC`UsyBhcBwAW0~fkEnK4<4D~`1&Zc+UTS4via*FbH`f96m5;c7v@;v_68UU3hX{$7r( z)}@GOtxpr7KEhX&wOGpX^A(*{49J3@S{A2YQ6m<|SBu0eyaELB6%8Pze6>iz@*@^v z-V4Q4!lH0RS&K9Z+4az@*Ihchf!m3e-~HMjF^*_s897o;g@XzHd7wjWeS|BS@cDu!fpUNFpv(sX7pv*?Q$!IlW zyaMe(hi)4Gr`95A(P=Pi6zZ87z1qxNo?#yT%2Zj``Q~lTy3O;#I%840McAOtX*MR8 zt7CFzlF$Sc*kZG7aRPfr`PFEliC1FFZDS(yH1JuK^f4Hy&u*JTuodDLP&k0K1Rb+@ zFUD@c~^!Wcn+AxQ#wpQtXu@MBj$7)u~fl%g@UJxrZIr81U*D~qb_%7R$x13)=7;TSSfp@da3XygpvNF;N?lX-ci|mh><++nI{3SBv@`Fq!A{>QWnJ36)dW zeUnlEj!7#1W;_*uN7~quGcpW|eBhL?9qEz1;@UFS<(7a*RQIYX-iieFl&ft6ves zLT3;ogy10yhjW;9-&!nPwz>qgMy0uGu}0C5bs**4$2~pw*Gp z=uEG7lZn&<6S~qddA4vQy7LKKMI;!)KOC=5!O}FSfDc(!ZRl48be|~ z$SivIwRah%g^_9)myem5)-v;RK8JDEYj$}Ib~nChycXLW5DP+<*WpAk=_ig@i}(r; z7a`VPlAr=Sn6}CH%Ecma$SEoKa{obEH z{NvlFe)qd`Z~W%UkAHmei(mc_@$83>pE+=RWcQJ=q5Y#Uv_rY-7#@M`s&jZ8;uVFf zrojo=t^ik6-Q(q*H0o-!xaA59S7d{8J_ZKsM}>7`=nQhrL7F3)UNMEDoxy;pD}UM* z?GMsq2544j3j(oXbH`(`pc#dU+Jzj3YjB0YtISE7$v``Uev?%?_6hp+t3`ap0Ttz| z1;Z7RsR&}h97T&fWcz&|xl-jSK)>REihJ53vf}P@e1(G_u>zH=!1DrMaYnC2Di$7c z|522$$aWr9zoPv?jj>cj-G8pXNIC=n(Ku?d<@S;WVIW~q-_FP3E82>pZ}d09 zfTj2UXSwD7mcM=JzuTfae+OSJ60aa(QT>YhVVb^5lP(6R96M`Kbne{dutNy7xa=l3 z3O|=y4Ni;Rfpu(@ue28Zv_WC9>+o1DR~fZVgN3;yVorQfR@!;4q)VIEY)>w>q?KCI ziWEgn#^lnOxB_Kju|7WEkW}CVU&RX@vANa=fjX{4HTWdcbdU+k8Hf6jQ07WTdP9vJ z5#qEEC!W$|2VB9TjKC{6uP}Fp>3&pprzNM0=vR2RMMI4M$$%YizM0+Ry%=%_TxJH`f~Vqww{O9rWB9sMk%i6UGSlZikq zXjugVKB!nUpdhEqCFp=a=g95$pzsiKmayA~eT6_QB%LC$frK_3m~T13)E${G+v9{; z9kX&p!=|c3E{qC-vXCZBAEB z=oPL{w5{X0?vJpeklU~}LX(tj+Yo&^F5|bq`GA>rGBV@5%gwmVE{j&hFfgXgme>h^_Hpn!O7%!y~mi(5|B57-Mkz0ji)3T?UdS4$c^B@M3h zGApmhoKosY$hTxwUQH?XB^Uc5vkl4R)+mgp6-`AWP_984St?q=JH0_Dimx>#)R}qg zqL##3Q+%}nUY*1mBUa%cmI0&vaE3#_LVpm71a_}L!h#hCa0NjFE*%=3ge%TSwIY$B z?3h31{ZR-D&E4t*UjeI#X0_RzGvr9$Y=L+M6MJ%}A!~~Z=BxD0uDBL`&bE0_7T^lD zD}INK*J{R286sY_+9707zQUH|oB^aOcwxc9%pmkDL}LN5N_NiT3I~4CHgx)YaO1-; zhIS!TEPk^d4MFq=;k(Z1g8SMo>~QCG`|^6c!cHf2EyOn!4IsPei-$p*ai*A}F=#*wyCaO!WabWG>`RAgq90W{r$f6PlGN-5SDI+w2(w z_Vj*FTHkC!vp2lb{a}{vfi&a2NxGGZx>bq#)d{-$A{EOvP292e%I&MhzVP4}5bK_e z6Q8;FgPU%9{r{|Zb4A3N{C02CZf5gwrspWru={G!W|Oc(g_X?_1y^8j+9()B!LPL; zYm9g^a-?h1r-E{(H)P2-WzVcjSMaLbD`F=$X3eb45a)F>EBPY_zQzO>Dnd&Xkrk?t zqG<#~1Q$-n)tDm7HHmeG_*xBiHzzgE5L#yW3OVigYU0aE5esm|YL#TY5#r8}7K=i- z8Tg9y3irC;cyB$%z?4DNS=uz5T=&mJe3+K}+GS3h7>PV(AoHo51$ zuw)ovbhmn}7PkYQSfXFqy*{Voe}u0%yZd$h?SCs@&8RgTVsRsca@eoX8Vum8$?-Ap z)i^qYmqsp~zj*e{g;S?4y!XN7KfH70)iK#^AiF>6?Ifh4szl9TVUy*xCVCXbZyAPS*=k@hYo& z9AuSQMWWg{b zMfr-NNT5p`=Rlz19tJ8`^bDY)a7D)h$r&K@QJ`A%kkZt3Z~8$iMMQxLOhpk3hgH1s z>%tXoE`}>0RzMm&JwwH!`qg#$in3N9Tz%vWIcqUueT1(7TF|nn9gE{DXyYUut5{4C z3qyr~E4bWY!2(~gQHqij#AH>7V8O~FxmZ#`1DPnyX_n@<62yYt9f*a{hMInK2}MAx zwq0T%)|LaZt%nsmkvn=)ao|bW(dXqSzoa0E4d2#$^ZVNG{Y3kNpR0cI^7Jcjs^5Cw zeCo18Wnhd}#$;y+HF)`DeeRE^j>DG+?V4WRT*>t8nGDGjX|^>@rVw zi7&NqHX+v+C-fl2F+SIokWa!zVl+_jqo9cHXnNo`SMhjLb@0mEh4y}&BG1(O(d zWF{bu3V{jP?7lzD_n~k`egc>Z2RPD!Rj^%U^*dm@%I^#H3#FPA?)u%XZHpp_Fjl z7GyYJ3y%wwg_NhndUfg0JQ{+4D?x`dr^6>~_vUxc=l9G4v~V+Tvky~;h;Bf85jpUP z9tCJ&OcJ1ljQ574Ou=@uU&|V{lfD?@RS){G#u`?3JO{QV{H6FbigwN-Su3?qFWlkG z-RToec>0f>TDhRnQc!n{f{wKJ8itW@u;G)q?#Ed zw8yhsa*%elDQ7CI5ac`)oHu=c@|6uj#p(=6Vy$z7a5}Qo7@DtIl{SH2U6(x>UaAT! znJF7)!ppVcW%{*3SyZ_pszOI}%VtYLoi?S-fUhGWUIED@yLExDpi)vr3qC7ISh%7^ zNEUW)Hy7`)J~GhId&Q1OB*v#S7%(-7 zWdU(QTYRG(FnqUVAp<+#pT)BMV5Q_poT^k*UFbR{&T22PV3Q$2xaS&@@q+#sIjgMNT{5s%L_OE3Pe=M-5l3=L%6* zta3&8D!pQY^aRT%S*ulmcvT`!DiH&#II@Clg|;9iD?eWm&lOpGLwDa$pyIC7bA@yT z8n8e!tXO;1N7e#zh(&=41QkeGIO7g9tJVCmH7Vm5?xRJA`&K25`3LhmI=Xb>9Q8JH`oLoPO-f)6afG{iXjPhy@kv2fxt#?APjFy{UWSZOx@ImrOOU zGlQ=fy>-s)oU?mo?L@vJ1B2um$XD)pk9QtjLR#M0In>AL4}Qv5|2$6PIO;}+rF=#2 z(55Y}f%y!J#s&#t->5mrI@|sIL5ITzn>&V2EncU^>#(?-W{1OU#T+4muZX+dXh2~x z6NU=0E<~m^>OIW3ih1HI6E*GUI>WyigUMz5dki~Cy z@EdKhl{)BG1hHy$$#pv9CXp6fJK2Pi(XIzyWp)^VSnx#)Fay|S!T=g#sYuHDW*d5T zxB{y%7?^=KYeZUM%?<46;I1Jj(X9F%xB{+#SZD|Wu|QVwtp*gY-5Oo1g<1t$J17fq z#cw0#cHkDm8PdBDhfd%Me|`px@$4TCwgC))tV)Mx0XYP9AZP9HVS`>$lZp*;x99hg zhz3j%rZ-vhy5K|IF_D>$~pfIyJM1*%N%G4nh0Sp)XuUR_$hiNDzb{VK7;9b9hOP+(b| zZC;+BU7n!7H%7N2Qhk4vW@V)2o{iGat-Sb!wO2mB>cZz&p1*bF$n7h~Z@X{w*5xC& zEWh~q;Q|Yz3t|QErN14j)u7q-gk&1HKoeAYL=n_K03dt2A%N-G; zp@oVK0&(tUW<#!YRptZ;YGvv;iP$R92IXRAXv8Z)EY{_Y<{;K>lT|od(0(Owg>^YlvEXb6Vv)^~NTt|if+4Nx zsCW3QOvN5^%>jGuK}W-3Ps1U6A;{DKEb-VJ4ellMUTTWTx5NpdVp(I;NX2AW!ZLIl zQk3gblxz6Yyo%X%DVmT>Go0{iljM=vs^lDHMAGZOJIiQ1j8yM7yS$j)CaF0109?7P zh;9I4x!m@Dup7AaxBrFy;6H<}G-|CMu~aGqVUZ@GQb9r*6cUtFCSnn53Oz!KSP-(1 z-+u1w*ashq-+o8_)0ahntHY1I_s|oU4?jM+=fu>uBNIc1#s>~g_8*w+*-Z>rm=^39 z7Pak|f~TFr6>RO-;Hqr`a8=wo2~T@|(*)J5U}#_Pv||YlX$yj^C|m{d6~wDRvLZQG z?5H3T+R+(A(LFeTE96{-3C77*!2r#Qo+)3^04s`E)Vm(2^Kfv5oAmrw`3l+*hgje% zS{z(a_37jA)gmox5nsWA1+v1Q_Hp>i?}7h|d;=UWUseLKzWN>A zw|=NYy8Cy3P6FQl>kaiAZ_iw~;+WAg20LSNG8PwV)`V$$ADhWQ_zLE$IU-)U826`q z_0Qv^zx_|X5l{u*|Cn&a+3Gm)iePoX2lqn#k=JL)R;=?1KY-&z$ja<-nGoiKj)w_y zr@^2ynr0kMgVC(D+KmQ_PG`~27|ePXb6LS0ermF!>umcjtDr$8Y;Xgd;!AW%6{e(8 z8#pRTU`j6WMCO>Vlo@Vk43{NT=-_)vt#!ip0#7^A7?SX?$qI17JMxOeb8E9EVz3>r z(FC}{4wu9xTSB81y~5-sdwQ!YquoiKT&GW{QbGTz-pQo57;{1y!YDlIV^-!5irPrS_OXSvVtyffn*YVP!`YLvn{Xt;dd;!K8LmY?C3r z#gy1)NoX|#t_rs^;43Oy;Q;Xp*Q6FxN;}^A?f5H@mlfYM({!9cix4fyvfaMQee*DS z74LMzPG7py*M5S*szGF;LcbFBI?)G2)`XzlRnR*pZ1-iidNP|`1g<)~!Y;3%)05TV z%-=GLJko-}S!BPXFpY?P(1_|ntVG#fH?{&H*s5r!t#F4G@n6`%2iPD=*LFAf3QU5} zDu3%N{(Jd-`s{7??CoBDuPeII7FuappKpF3!*EZW8v4~Lp5?wM{j!jmWxIULJPIwMMfZ2NZw3znKrIc7f~WlNAzfi0VA0ZvWS)iX2uR^ zv~+uP`9n6SSUAAqUb5YRRgyxiK-vwm1!5V6{WHL-&d09S9I)0MvNs)dlK`wQo^ zSmHAwsOk86O|Nbot_iOP9}`7r*jF`y?%qZr_3U+4*r0;|vv z%&Z!NawVu0q2Or;SwXzw$chS9G&_`6Bt|8m)5I=r_5)S!B!bzYRv|kSMS|IdQo>V#VJ?xz_slt zSwU!`WW{=@(CH%uCF({+pAh&o@D+CxXSRqPi~jjIkXlgBaG)M|?$ZEbLCe~ZF6P85 z9H3tzcLfu{!2%gtgmAfs3M5>CtYEdGb}X`*26)7NIQAd_v$SRi8(Lx{=RIFHh`S3+7^%q!+HNDHZ=usdROz(uU8f1cId2Wjg zh*i`nh1VV1bE>vT>jtDvTV)-)r@Qvbdk)I_k4m>bEEzs2-T$=w@U!w0FUlW(Y5JM3 zE5Gy~s;~b*_3fXi{__`FAl55y%>3qU_32A?shZI{7=sg_H3!79xjj&^$P|mmi?kN_ z-aT$+*2j3gjN5mWmTzvJD^3ida@LLB)&uVcVgNk@F_zxse(7%qkpIVj$Bq820`CX@ zg9o0uuNM`s{!YGvYz44F_!amHok7Z1W`z0Jy-o)bL>*4O(~H?bvR+VY#DXT1%4Cx1 zy`$62k!LVNEos`}3Cg-aR?C%{;tRFOW!l7IbwYtL0fC~hwUEJo4HPPxD#@!dAjt}m zp&%=a@PoCoJKWLbO2jW@wa!Ktnqy0?@%7fYCNm|gxO#JJo!^d?-RVnhc3|Bwzrl<~ zAv!x~t|irKku{0{a5nwhi~&8kn_{Y`^ZLEyeYxEXi3y?QsW^8cLDUYE0(d}cLWn4a z{!nRMWGf#{U;tbJvB=C|yEU%aNGU6>!GLK&s912e1F;BMwW8JN%^#Xe@3z8u1@Vfi zWSG%I=MjX1-OxD$*!65H-!qGXwI0F?AQ1S6`9b81!p)wALAF*$cB@<1HkaMv%WU#w zwZMzz1!AEx+nhp}-to-MzFY)5^uvoaiz4Rv9j?j)zQXNxbOW*17(4jj&O-OFWE3o9pYT`_v=vMaaWH+Jj27eBw^%;)ZX|1&Gz z{?~hdck8-8u891wc8JMu^QF~Vi+ZkN2ox`;t5Pmv*b{nVSdlceNE}uoMZ6WS*@(fw zB5 zbQFz36f{Z1Utmon{!8(%3JHr&7b4371B2izNLaAA!{?9wAZ-7jsp|dM=&fox=BmRM z>_gW2BlhNFo|Z>un;)LT^kvbo75&qwMrBlu1{)Y7^Nk1$iOF_FrrN^!=HMhvP?9n< zLmi%_rMscAHwPP{!%~ryWeww*5;Al#X>TL*O*PAiGzQEH!u;wo+b}t(HR#-o&*ZiK z9efqo$`7EbYsbHvuL3EH+S=(A+_iLs(7%OHttAqcT1~d(sFWy#jIueSGPtlvmyiu= z5KW0E#zj|P!Wy5rI3oJf`=VdGGXAx1Tzvf5i^rcBJ^GY*-^t1CC&mYlUD@)`XzxK1 zb=A3jqJ8^h+jeowRuQ7E09W<>lSsR&?wzdcngm>xvC0)>tHP#<{Dmk6#Iu8};I@Kn zg)~tFG9ZnCDpy>#6>SC5RCd4>8iE{LQNBW3kZx_JJwak+U#O4_Ix2__&QsUm3IXgX z6R4#SMO7?HT=Z81=?iDvq@6+V70TZST!{U|LqF{-AQ0s%7KE-(R_OiF_8)~Vj-~=1 za%UE|2weHeil$=)bP2f`!$8F1x`gNzeoVfiT|!(bU*QVA!b1+L=zt;jFamR$ELl;? ziYbr*tH4)i6yl0ADYj8a_^JqZSiUMD>Fy}-6*0YIuCPo>5UWBCA&aCp)W|XsJ<6(B z*GH@!vbJGa*M5255!v8z*^WnLyC0YCdx{vb9{Hl;$*(G(|CZ{d@2bA>L-n_Qs{P)7 zsek^O=H=gLe*2E@!%GhF3_&cji!nOpt!~mN#7|W&@4U-<)#JGezH)g0SD*6L{}yZu znVzoH3)~rgMcG_Uk*cq9LdHNg5 zx-FNBI@IhoI(u<1gLH0Tv$e35eN2fFeM5`~@+v&ZRUYiS z;Z<9azz|=Fxie7`-0XGwD8yIdot@tXU%EL3PW4tj z-l-8{N$3m7p9_UyKs*7Jig@-e2R;n|703$Hf{@fAs#L%#a2I|FUU$S{74KlcFfg|x znE{#@h6iEjD%#~kXAqnIfLI7GE#2jTAqx5x_zHuBBonpQ340az3RWvY8jugx*6NLJm?H!O?S-4m@| z7NtVn6E43qL~=*4_zUZ&Kv{QglH466T^1z0d;P>6t440U|H9pCuH1V6*_)Stc*}if zK7aqIf4%2VH{J5)O*jAQre(j`m@iV;3$iR~4!Y1EgmP6ntOZ%sVPoS% zw#H+Q))U^g$L3leokQGx^+WEQU1r{9ZG4A1u1O0O3u_8vbG$K`zNi#W1m6zIicGUa zq!~k!^r1Y&IFLJ`DJE<%TotM3=Q2StqL74Nj;R^LRYvDAphM@idpu4zwjQ{y&Kr@P za1Dz-a=NZ^#oheKQzgdIyhEtV)Ir=(+J;>%aWSEdxFFGyc~ee|WTefap3ryhOs%84hXhaQ*g zespTv!(&^HTT*1LkH7n|{B5RuSC$LYDYXwrc!m=AwGy=wR#SpBZTzzEU z&C-p`sgsMzY9V4BAqjzG6-ZwkSplwSQ59=7ZxjVX`?cbYui=do>lACXB3Itjx+D>< zYZ8f#iYrQ1fme>BI0*{}+~fd^KExe=e=s0=)K69mFh}wsEUOv%0Eb$BO?vOh3E!CSwiSnGALINvZ$5?#6nT%qKHN1EEF2+#eCc~LU;B~z-+!w5{x7vZepU0c*Ho|m zUi1D%y9A9wC)t*TMxoVx)#jOZdat@?8P6;O%5u2pz*k6aq52hWqBsc)2l}o}-^UT{ zf(m?3UvyrI1fS|xe-pYb{RdonXVJeo_wW z0Jw^&C2+;@6(-A)n;dbKhPWy{99cjt$XO6c5^MB8tb}TqqK&C7wuD+8zsZ7YLaiPi zXswXz(EW3p;=^09xo3wjz4emD%CS?(zsgV!a;0W^YbE z{xlzO4lAe8c7%l<{Dwaq$`uODK@3a6)gM;+r zblMycD>B6qm12uXwuB~{L*wk>2~I4~iA=SwjWsJVBze_;1Y()5!UT(; zhFK&K;*}PrEF&zAAEhIXp#JLNqrdMXkFLvCfhkyjp+&e>2Ti zr>74*0=^Or9v$sFc(MDy#g4sK2w!cTY8j$%B_?pyHwERYs(T7>Ro)J_6{ZD=a>aHA zuWi5))=H4j4!EL*_64|NBiV~5Xlf|z>nO@s)MUlUR#2`eUvY2+<%&xUCHrmy_=ZB`_uRcOq0el6}LV>LOaFqbNJK?K9rlRKu6DLrC z97R!z11kJ3oVkZozgkLJxKD2q+)AASV*LfaLYFX*uh1x@y+V$!=qw>9i^3JhR|KwD zzTyxI|K>=?qI?yO{_KLBMOllgSkwUTPsf50t60YI73~!g6{|!7#7ZiYp#l*Ljl#?Z z35;0qy8A=1x}+p`v`<_;C~n#*ZQCd9IU?PBOfq;vyyKCnz4$4BHDbZ*?nkWes=oR| z)wh1E{{Am@KYi5*A?vlbbRS%Dpiv0EqFyX$S)eS3XC8 z6I+cDelz4Oj27a}7tlo@s=*qJ6%3`Sm~xbcwRPc5Tty@@V)4=;L@TjQn^32MZb@uW!zxc!Id69Z zs|16V!d>nHEIr(X=A9cV6ND^a4a8z!fZKXod;~Jp670q9^jVU(Mah@LD|Ub2Te7RBN(LD^m?Cc-mzNYFwA| z)XU-(%M#@GC8_QVm)#XUeRr7bt`HGIv6hETtOy%j5pwzV)u+F(>eOwk&fK!{^k-xTpS|z=P0QX2N|U3ON1WS`0ySU;pcR#;3KJ-Bz8%7 zWdX4ejg{5`3l_FOLbk%dpcC_w;4Ab=(J&>40W-WZZ~P%x+~+hu};1-L5Pt*hK; zB$S0l;SmR_4o0k_-iG6IH4k}<_nI>YRmq(yf>C1dn4W}nAun==?ZHpd)Bxe=Aa zvC&^;szp!We&eD;AYb83ZS8@4#i>}FeigWUFi^tc99e;K7NsnzWzhxh*x}A17BRb1 z$^u-WW2l%`OC(CMNF@VDuOwkv z(vWN^zO`|shcKZ$L@0+l3j!ClY2jfIdz0!{e!gNUi{4q_D>*0&UyFzwvac{YU(SIR zXMp#23>S9jkXYkl5lL?-CFxiRMbgA#8Lw1|qD@1L7N#H?t4@}ViNZ!HBrKBH&><@7 zl9cvHs28h#tGIcWv;*6p4~qtli+7xmV3v@Cjy^Me;)Q8=-O-?V;oIt$zN7xy4|L!D zss0Bqn}7VO@u#n8utVq6WhW4e@Rb{Ig~L^gYu@g?>YBak^!fRUQWn*(0#&R}^{am# zO)dSCm(tYIJBzT2d-%!l6*5H~7J^tdNLEh36~|Y0r&(uKX-r!5Jj63Lt(mbnnV-L5 zLkwW$fQDZ?9$h?+t#j*Ao+VpxRvD_P1WrcVZS7$+iucE8W5#pwdhgz@_nvG-yA+f=eH9F;ck?yX~ z+vZB^G30EuAqqYlnc+YzvPs#F!sH;-6-Z1aJG_W!KsZBYrv-z7;0i3Nrpgt++77b1 zMp=z+4zY52<|$tZ`g{bgSiZ{Licvx0!YbVb)^g?#I>A?DX0YFeYzj=n!2w>ik0}|x z3cdma^1F;kq{n9qEh}%3;kVCjD!>q*@_}^Sy~)}YNt)$}8eGvXT%M%7H%0kins!;d z@~&vb9pU0T!Y1wv9lJYtbb08NdqYR=4;@_*bouTLm+n||;noK)-15MMoA1By*_CI% zu>SPDk(X9PT*NgvT@jtD+rXDB55MqW+!zk)lg0dUM^@b|^vvw~xr)sU);&iR%3#pq zRqGO~h>0C}q?-it4H=4{ELBjZc75v1##Ci!wmK?D8mb{g^9+7YpkM3y1WWA1vQ(sMuo!UsWG41F^6X z8-=|>NM~(4JllGFzUHv4U|5qq2)nx)tR7eD1Y$*Hxg*l8=ns;DBa7z>O>jWG3QEwS zIT)JeU6-t1$Dax1tAwT2O%cD@e`H!|Wz2Jo!N!;^K8MYNIc=BI&sQ8f(V-LW;a@#0 zf-5{+#8=lvtUw7X(BdAbUvY31cnAb7oMFrYVo}P{V~;yTEOZJXWD!4>S}s>hC2E;O zCzlu}MpdUjRDJt)tKlb$HXTCK3_;bob=)WAr*x)2q;gB>2Hl!WR!RDb! zcEcd6T=kIM&44T7X=mXIvK2g6Tw733GsfE5;k**VaK-UeN(oukOtV9|@xVa7iiDLt zfUk(BowZuw{hdoC9EzMNF5$4K>wtL6LlPL>cReZtVjX@` zar8OGBQGkS{L0L;-N@Ll2@ax7-C>)Mw)0!nJ*F9Ryod;0v|8eiB-<2B`#jR zHL2K=QtwJ{aihJ2-DL%Ni1Mg2p}&aW_Qgcl)7VAWu3nIWQ3gYfpaYJ_+r z_KFlZ3W8NwsUDO2(N&hBCRgq16<3h<^*tiydn ze2o^Bh_rS#{EFs_f>MB3(1oA~)g57IPcVN6vfmA*duNfCUbb&e*pGL7*v_5MuOM5& zCIt9FL<9PR2p1(G?HCV)NCiWClP|r_L-i}P2vKQ`ZWNqpP_eS$a_?|q3y*N~tgwGR zd-H5I5DN=*w$Ia9LRhUT_b_<_j_Q33{tAd@NHE|v&usf5v*kG^wqY7rh0hb-d$1Ok z`v^AA^IN>oukKBjuS`+j%O|xQh?T6mH&u;wgb!qC&^o+7O|>jhdS}$coe^Vqgp7V+ z{lz=hUAk+-$jY#(`$8s`Z5jb$-L>)ZJ;9gn4jH*C1Y~vT=2h?96@2#o=qtCZeebq) zr&h&G+#hk}mIvSe%)Rg2bjz=A`s}Yr{oBjXzM=(sL9;iyNY1O&AjttXEubbmc)>Z- zSe>&mL%AtUwJ~*OL$Z7WUkX|a6RIN%*@?me!quSYm`ZhQwJNDui-|&hyBfwe@D;`< zu~rfi7C@_PryI+X(%O{>9W6rO>VURtx20%EUA4zpy_f95MqyWD`Cd!;9xKvE8}@m+ zk1=g0+*L;mr3Vbyi=EV_BR?ccXb8*DhNS9(c*ac$<}jXPZKN8j$yX;z zLo@Y3X&MAOWE9&I5kL|9%9p^(NN{qT7I-tpeyiLM=!o!chS8EhUHZ|EOmHw;cz zcTd64j)6hsh;kvKG(i;8f}C;%Pdf@^MHdSqp&hanfvbQ`%`jY{&<&&wK}3cEs32R> zo*;Ewg=SBLWQ_x^NF+Nu8%TSCG2B81Y&90=Wnuc&y%!WD>% zj1y8X7D229%JL^-(P*q<(IUR05*CrOFjrV9Nve{7uhQxyXc1;LiG(ep9CkG!B&@Pd zMMbxwX0xnrP=f8x9eYK6ha`g!Nq3%53_l`2__+KC60x3BJo=^Sr@l7x%r|FV{EqI$ z|Ik3lg5&*rKPO%+*xg@yYv$|~OcY)v{Xt09?yF{mV$G6*ipB92X&SN?EY5}XsebkU zKfYoo2$70t!RQl-y~5U7tIec0>x>TbjM1Rbxy?@I>?P*V$q&2tj1+VlBMP+1wT{#} zM{JQMxy*!M_V6?<_$prDipz3gwmmA-kXG(VuXLr?5L;1tnfHsz~>3e~}dT2R(np**xi zn^kpu zaweuyi2`4ta(cY5*@Lg(+JYY*iWVl<(2asG3$t%LIK*0DpJr0JO{oYA-R8m8!rX0k z09D^}3}h?lR|UH;kLE$_6`FUXIoNL{Z9xQ2VVTQjE0U~o`q+gyZO-&Yds@8{e1(Gl ziWc}PwZVm=h?UmjOl!4gbh>l4%;gPS1zfRRLSU+|bSFV9^vGI|Gf4VsdzefPX7s?| zZAtCWB{nJJ>!%TZgS>rtlu(Lw|SJLG59@K z{*ZPmUa{+M$kt}S72SP9;EMIMqag^g;x^!buXu%%sJQ%bRCFFGe`+X{tDFgruc)CN zZg#*G!q@}hiu$amQ=N)ez$$9H!u2|QMVo{0isKNl;|6Ei3Iu3zok7A>h!15w?Th#- zkg_OQt%~gEGW!DRs%RnPFd?l zd`0&aQpy5cQNF^CLipYZT$RXC2`DrQD`ZJkq(zuoE6pH?B@#4>h1gEmCLy7t9rChH zS#>WYELqbwN$aqtV&d$EFWFp+J`qp`$0iJoDt&)u5~w{zLoa@9Vzu zeKovTKX_U9%iri<{;le@-z(3LInma!dKj~Zv3TbISB^PISl8z(f>>Aw?Q{;&AEZuD=3`&6w0?p{~6LMFps)vuPeA?QmqeQ~Cq z4ZH}m>z!Q=lhy;5H2DAu~8id_~$Y$qPxl*c~c!x@=Heq`pjLnGCtlWOJbjV$> z#hlw~z~bHH26=9;Bc@CNuT*laJ+|DGSY-!Lu?<1=1WARJ7{ZJ7VMRKO?XSz0(|jb{ zK`J;*(aH6W$YRynj0tQOMnfv1L__FHU-G7xOG;uA(pJ%;0i158AP<^EK~stnEjyqT3rs#U3) zRjH~qS=vp7mJKE5)rAJs%53eusWW#aNN$UmxHV|xb89bvuReS4>6`EU;I@@#?p$;J zu65_`TzmHRwWsggaOSQ}7nX&LK;wc<>-LQoZd-ru&ftqTum0e+4QD}FpIh_czbt?I zrn`QB(>-r}=Dzno_rR&oul(@l`~Do3J&o-B)HWqP^Nda_8oS^tyb(m_7=!uBwegdA ztzNVbFpUU%@cN|DptN!53<+f#bbc|gz-!QB>NBB5x2bFbv9U0}VcStu%_IgY@ufi` z76QSLUm+X-VIqc<#5I)dCTbNlD;!GqTFdv_u?4$gzY7@-=pMEl_qIPeTYt<^eb`cd zz=92rnEK?^PbXBVgu zGhN{+riApF@VMXq_FZP$#F%GiF(Ja(XGvTRV|OqnqtB+XTjv?ZW(Idb;X?ki-R=W- zAtcL3h|5Lfu0>>phl_4rm$kU>XsTaP=hdPs2UoO9xb#Y8E_%hU8Z9P+MXxfbHF_;@ z3mY7W3rjm=GOG-h8JSX|(&<%N_lKvoKmYmExBq?O#pg#MUOoDp`0+1JKm3e*{|PaP zYTrKz<*Iwv1fs5>Tr~|&Hn6*Is(U7&TtT)f?U)eyJg@X` zZ{j32m@29e2-JJ;y^Q*(_bws2=)GfOuyMi0#=Rv@oX=UOmrebW{F68~F&!aknr1W_ zmFc~zz2E)3kMuGKJG=YeBzx_C;X2owXGVie1T*uS``qU~sWZ`_b4LHTsX*#ir)G6x zYoFb2g9Cty-sG@~f)=GO_qN~$j0(=yjF!cEu0U2OYFVdrG>56Un}92>xI1x}`CYM+ zwQ&RY3ihoyqG-{+A-Tfz{=k)@7l_LzRpygAfbva~l1d-&%aGE7`D#u9rTkdK6}hs8 zD|!W1aYgkjFc|CCnhDNAmyq}{^W&`D3s7mIy8n9 z=U7C(BlsO*4zppT)acIc(~+f-Swc!#(yYb-c(IDx2eCS*453(9owIGAan}Gs-H|xD z@W9MESedaC23-2jjdVlzpZw|lmE%}=t4k5JRqkmKV z_?N0*{C@2EUE7#}v9J=B5uc2h84DJ28<5@rJC<{bNLa)Gk9Hx_u_l;_31)JNnVe?G zJ3Gr)6I0U<)|~f!iVA;)uiRPUe+2ye1OAZiKa)9U`s4ja@D;n&gG>(UUD%0ZGs26d zH&``li)z%QAJIEC7Uta#ChE5IqsbCcBT1~#qU#r%Z$PgnGE*CqV?aYNxxkgcvxlUr zL(<03E=(x1h8L+LO4OjH%w`)h$@4b43O0`yZy7JyVlUZdDcNRDtd%sMU=nHv<7yPJ zd}C%mwb2<YArJUVjGa6SnlF&Gk)}f}{0*xqly8*L%`~xK9v;6RQ*&$cSAt&GpReaC^xMKNg z9Boiw6~rrS4@7~lkSbrgdmM`evpY;!JBT#S^aev(oiVM!f;6zqmI;s*8mTzMR~b! zjxX}x-DX7ZGP~P~r9TC`(KK`{PaXtTtxHv}O&aq~ndPezlV7RPt5~z9SnHLqTb-p^ zmNdLLYT&V8!F=DIdFyUJ>Um?qn(K?#-SY6cv&`?dhwrVWzBfDqZbGeEVsr3?ln4+W&i!gCcEQ(6&uHMV4AS@0h!y2h)vPA8U+WHp=6 z$qg+XOy6ilt2D5DAbW=mh!x#11lwDBmxkKi^LH9ybSKHH`w&ew#_QG;?K6}dF!2u& zxGFnhE>JujTuNoe=*PpSKAJLZ{Fd!8leMTTLvQi_8#$roMu|+m; z)cORKPuf^O7R*5pCEm!0sEsRaK|BvIA|wdvG;fmcMNFD4m>7XDhuF^>4g8Nh;PLaOKWd6tU)z6*o4xKE9u;b@BbA*tQ_YS7b2I&BV^3)Po22hq?2WSFD8A zJ<4Jci^y4DP@GwdidxXM=I|AiD+*U|VbKCpxfjP*99iK2$2->_qai0Z>uCdXTK>J^$pZ|2lxpVJi2V|1iEH;a3)}4jKi#J;fHk*sK7>l-ILGxI}UPnrkBDF;cszJmk#Hi2` zHP)kqmFq%Fbws%;)&Z;hbCo`1FR&uK#28Vmhao4BH{zW(fC69PS&#M~5`$?zJcKMf zsfm7-GX(uAs!+K;xi_k0D6x7hs%#*(axk?)1r@9L2t%~2is7UhWk{hUp+<>6Bns%2 zP&=B^q5)YUZk;5qvwQ_H3RWNZ)iDnUvf>}Kl^(Je>@gM}wDEQu3in$oj!%&O;6X?6 zJ{$BCywTGt+-1S;>HMwchGR@g_c(qCNS-gks=+q&2d&hI#p`6y6Qs>SB!i*QGtFpo z6l|O3ZJx;PvS)3?_@N!nEAUm>z9}#j;0gywSQtyhjzVZwbd65KNd_+WjEx%fC2@r# zD{ZqHTXL}RhqseS>ta@<%2vh`#NzmBU5?t1Z}u%Sttpw|E6;58^5l`lF|viBqJ;r{ z3w?SQdiN||cV~(BZSWNfSGSh>-CXKVT7(M%uP+I|y*^EjCC=F3JkRgim)87wzTXYF zvpja#2=*n+mV7S72P(V6- z9iuQit&S9XCG`a4^ey)Pd2zG4W~(zIzi(~AmGELQ79{!Q3DCpckl&Zos)H9Rp-EYH zjKNO%F4rJkx=Gl6?QU~9<){+bERz>D%`6FWr3`KRbVaL#JTpgtL%uW6ijtg zS9#J@d&XXIoB&wesVOL6$h&|m4=HW2d}DMWj96x*j|OFFgR=BtX_kmoTUe4gC|={A zG!~Ge3C^@F4U~ta+GDfSL2AoVjFe4VmYMGqGwz3Jk#-g=PIk8p)tDmRrQxLDv zAG9F_+Uc-NU?a4N;M0GZued2f&VPl2J73Kqs0VM-2h;oYCIjG#ydW6haM$S6TI?Z3 z!w}T7qNd%o||<>BXEmOuUS&=b!O9zHADbF6>+A{93eii0K3O-wr{iFifzD=J~J5H&S1jc(!Rh*!AtpM3SdhOdx+MbI6ICq?D!QA{ zpbRS+iYQek*W1BYfUDpV4X0fB<&1%^He?P5@KoNZ$OIk>$yW#D4sXbitxXX_!Hz6e z`eX?GusAG#&@W4b&=nw7WWgAG0*GTG2`xNvL@{J5WoqMSXrVB=RE`2)q1Ty$9EQq~ z@DdpU;XzhdxRczdOl#Mqw5l-i$L=xIf>qGzLk<9$2i$K5SrzZM5Wd=Ls5&`SbkI_C zz=}iO9wWIExV6WKcW0Q*aE1FCcA(;&HvTSW?j~bKt3Id0n&0WjZo%ZB5iwAZupn5W zEtpboi>oy7w=<1L7`&%vwj;kC6PJ?(+b0WmOcn2%;_sb?gaxLe;uRJe0X~F{7Wr9 z6=qMqc6G6K6;HDwQ@J#0cu}+ze6=XBf3aWx5}&@M8+tuR^#HM!LcaoE`Q2O?aBX4a zwbkkU7&=I(vq4&%A9(E{&yOEje{G)kjRh=LE#3exR^L)z@=ajM@(RDTCgl1kf3%**La5u7W6nwQZc+e|y7&gE4DL1yCW)45YBoy7rsUPBR zGN;y#@H(tP`J%Gj3^KJs`LgJ$5e$K*v|#=BX!b@u871h(UTiyAP`F(O6$^hIc{^d5 z(--W~74OI2k%i-{x-;X&2XvJu%wSEj*DdgAPg_h0EP5cGN%BXl(U{Sa6u+4i2UmHu{eY{S zYVg$zT&0&0xWb%Za)}_hcxGCVdahy%g(yl^(Rl(Cfh%_R4c6bVB?esG*BN9z?cghF zU+4IWG8K1qrzY-9;fiA_?xy>J6R#*}J;+zyaeXMNR=Hm(Yk{vkV+6D)Ypuq#NR;qF z#B#@~`_CL);XwGxUBz> z)T{-*q84{n&YE$#-CvE2EJ1uddq!HAW}A{HBm zHDkm=;wZcQS(@K6P}nBncMJfrDmP2eB5c?xfziEXpQ!Voc-sl-?#E>?xL{NCv6?~ehozW$xD4}Yls;m?&n`}N2lK2-~3PK_B}0LJPf z-9QpLN+hfqzQWU=%o4KVl?&n(t6#x~H9j@*pW-Wb1?I1u|LbJ(zvDanl^@YRP`|Q~ z&CXV{(Q43IjB2Z1ZPTEP6V4GMbMjdQuf8vTqamqApH*(oDTQVQL%TMHr-{r`M`nPm z)bY87lp+h{tms^I6i<~>Y00RwCsZ0xm<~&6R70UkYgM5#Th*Cus?2s}X8UM-oj9#^ zh`+~{(o8yW$=EqqXMn#wyi`YJE3^WkT%o{MzS*M)_YTMz!!=D20AqZLU z2E>)?pkLt$9*~3lmR|4`I)-8Jx2KEo><%xG2lGT|4mKcBp>Y)By;;rLl-kjfEsppq zMQnvUs}(IO9UhQaJDfuzs<0|rm)5Gxfi1jU3Ah3yK@R~eL6|B%Xe&SLC_7{;J7VV_ zva~+MRGn~@9JEv(cjfKY;m(r%W)zkK;*rP_$7(zzUY757R_>oF$Fk<`@xran!Y&so zufu`nDT1)b62?ZDy$Oz%?POX{FiCYL>;s|^?%*r_Zl-iEgZ((*D+pE<2LW16TtQO^ zFGYc=kODy?&9VC!bdlIR49v*v(!!Ml3ls{`UtW3T;Z;|^w(8ovH8&S~gUU!q>4J5CS-AH1 zX(jrvF8t)7uUvWPE1&(#yqjNH((};bPw~mxGvPL*NQ{j?yyALXKC?PvAS_4TabPm1 zw$CH*qoA}u)pXl~vv1e$VM2LW+i@4|VBbt3g5oifiD}mCRulfQLEMAnShjx>{aHl2 z<8KY51h^_h;wXz)P_ZChbM*;HEtzVc$TvM<9r3y;_U9!g5_?+nnPLc zp!Fb7xj)z)v8d4;=bx86 z{@lQcr$h(N^miZW-FiSk#H(Gx=Iz49t^M#ARBsjluIO&T(iRb_q)AxZNY)GH*9ibu zyz2gJ>=vvPvL(tW7pGCUDi$Uc2@(s5oBdu61M9hp&J&^B$tpZYL@O*?2)H8RmD?u4 z8Rd%Y1A?zOrt$$)!1bLZnp5s63hs_{5?6&p1={0_@5lMUN4h@%BO9bxpVjPDxHfjC z(BLC*6)mDb#WB_DDCk#Wt{_kS^_o?#W+-cI97aV*-Opfdx|W-wHH%ne#*oB~rbw`h zkgg*n7A%&s;JKo8PtjtfE$;p~U*!f2K~}Wp%o5(CEH;3Y<0}raC}=^v0$jm`g)2ad z)3P92(V{{Y+J(gB4#b+3u#&5!DK%2WVP!N7k~pj;3FLyJ_5s#~MI@|xtbX1lY27F3 zJUFoBC{o%6fLMrYIP%Q!iRXr&cwzWF60u$#d+v?Vi*Js-^tSTyd&<|pp?d3EWAA^Y z{@zb?KmMiq=f73{@v2TJpGNK|$!~Wt;45NuM`FX|gkxe7OhvOAaG-p}hPqGwldt}+ zAot(TSEM`WK$$F7y~V7vm^C(o)~QuFEZTAA8$VF;+iw)KsiVskaTS`ZQd?rSHnqf@ z%s0j6X+zQ!At~}Wo+h*0&a0V7E3+jQ8xr~Yj2deu=HP0KDbUrv&rwSudB3)oA3^JoG@)=PJ76i^ZA8{(`F;m~w*wMZ zKHjgfg|OL>(`wN6KQIN*0!pzd5Fom7P*(P4eZ|441FtjKeGJbSwjyGgHAV0%1$^aMreDR^t_)UOTYcvV?AG8(D&$)zDJk$U_KG?4qgd;9+96d3B4MW zrc5fDUf|W|7bET5&m{A&)_3b`w;R{R{64Bsus-QVbddu5jAi1nWm+tsWg|F-6Kgb> zQ%G!7L3^w|%;1NU(l&;0dxSQmbqzyT1YhOv(&g>a@OG+krNuh3V9hc>!m2rKt2$|^ zIA*Ci<*Yq50Yb(QC`KEwWeHoEkhB)T*TfVX6H6S4yz$5+OL)94Dn*M=8Nq3a)iILv zS|+Z9Srw)VOw!>4NI>HC%FTEC49xI23FdVObiHhY8SM{5sSLq>GeTy`Xab;e>@%oX$!v>WQ8Cs5;}^ZVwD+i zrPneyu4;bt1Hrf6?|t_@@ukcCFT65v={3dK=foHoJova^*YUn>hx@wr3IJD4+XS_n z`)W4{Xj`zXy&oYA{FZ)xvxwg$DQ*xI)eG}$g?TjsUbP^%sz0kjkXbIsst{(Ci__t` z0=*Ns5&^E_*+N6mU9$pQMe&3f6{MO~c#aTo1+qe6Su;HC_rVq7+1bt@byeMiD;U&S zvH~Ny^A$xV%1*e_;y8-p69p%L7X7{4Z=nz70E|8ixo~bJ@QRtK8QJRd_{trz9)v3n zz39RH0q(5D-Y;GgGq;GkH5*|{h7D&*I1}K2AIt2Rrcv$U13`H+~9Y~d2Ra8{JzY3U2=&wHdmceY)mb)0I`UQm8S+%rB~RpsxeNe zNiNkVmFW^nRY~QV)LH}b5MoLd9gj0LhZx>w6I85}=CO=+b@oPG?q+Olmcf|Rc7}ml z8bd;>WIbzsz3WtlNcKEU8T+>(l;{RISKtQuEpk z`2XdTe!Zf5qTwJDS0&5o&=qgDLSD<;OzdG8a>STF%+f@*Isnbe4%rISgt;^58&xOl z)hF#WryO`YE_2ZA3kn}52xp&Jpxx#a$(6x}>fB;gAsYVqc_Fo4B8_*Q_ zu~%36NhRC?4< zdD2#S!VWbOjbI3XSO|_te6l{NL62TxJRd5qD=ym+oob0pF@&Wn!!ng2*~l7I`KDOo z3YZOXCa>@z%zF4F{_)&5YyuNA3jGQu41BmX+u?=96gRL6G-aPM;^4G0b}O0-R?C#p z=Cb}@@D-wYydxeO=axZ+6MWIvRUfcr$v*ok`vIb{rHsc*Gj+x4p zqWgf)#h@%?jnX)Gnv2EZ780>$;;?2DvD{TGDs54|Vz2OD4JVY#6UwBCtP_Z9cy{PR04Eg)dDw{>fK=Ph5KttGIjS*xR_9Tr2~3Kvn~OjnDpa&Hj4uldt-?rPhK3QJAk_!9oGC{4)^w(hKn_tzKWS(HT)92`lc8Di;-RvBZ{3 zB8x?_Wzz6MK?9->x{TSaYIwu)HW{)yRB<)p{4Kitty)x3w+`Js=ol48uvOUsxI#yX z44oaf!lhGv3S)yBxTA4EU#a!!X<4ma30A>P}2lAD%$2c)@ma z$xcUU_jti3TmD8nW-3XXJGKqBIr275(dEv$8z-<=5pV@tdr|i^CI@-j>}C5AC2B`< zDF)?G)yEj5Ky*Dr_zDL=6k?$2PcdLBWJeTZDIvz+HmP|#j758`_!2_qF>05w)s@(0 zUzsLamMErtwKPt?Jaueskr8~gy21dyT9Q4wBx7`8;^6#9@gsqK;H$@cdKdW;{}pKw z`uBOjukLdjX$(s+AsBPVvs4(hU7x&rJZYt9}R^((8c1Fq(+ zy|&Qz+OqK5YhdwB?pYE4$ya=S^`+H+dT8;Vzr5ti!gV)Sgk4)5dBY><_9I@OBCT{y z^sNQ{|K=U>*_ySYfPmrXjJ}ZckNpz<6rOcEqf!}`CH7B64f?0b12e`pB+0Q=$0tJ; zTBJloErLD(>e!&2wb6_<0f|kcnO(YO=$0Fm>0M)aJ9Y44VVn?EywAixU@6*b%-%i* zH433$d|x)W=Uh=;9R9oLA|S#?yY7Iyde0w+m(OVS1< zjf7=@tcHD(Ro-zrEY0x@(*z_~{o*va4UTn5Klr_X8FMmflXVJHMmAz~9d}ruToJy4 zeuXEhh2^Vp8?-HgTkJ3TJf>${-6gCC^{WT@%DqeI&Q9)3<$iX549*;18FX5^!v+U| zN~6{3EG9D}5!-(KtHE!5z2}2($X|PP;Dt-lmtP-!_9gkT^F7DT_wPD!7s?fZtKEY3 zZgJCAaor}8APUWj%2q|qeI%Q`UI?-(s1xVcit=kDdDY^aDlvhpG7;bkJ;CH6L2{u8 zMaha>^Cj{55=vI|ItQ*GTY;|tSCp@UGK4`HBEXe9Uy)#|bb?s!e8s`lJ-$km5X8cg zWj6VfFjaz>@D=jap<-c$4SPRs-dhwn8@-M@Ii{kAS+eS16Wxc8689BruL4u8jObq# zDWGjZN>G%VmPZIsn9n7Y70Gf}B#T&467qoAdkBW%F>Xb^Z_K99}FoiQS|1gm0Z zYIU3dFR-7BSZEXycFK?vtV$o?t{5gFe8pbzRr(kl@OV%e0WYf2@6~0 z#T`TZje}*IWtCfGh;y&mCaB*jY}qS@gthhfz>ZT=%o6T`M4qt(@W(jLH%8-3=rZ3U-b(mi93Jlv7Z0-1)g-F@;T*;`V7sRV-p@Qp-$n*fUtB%c{4eR_U{_QLf&U+iWT9bQE;h zbDB+AjRx>lb|<2oMybUe!SC7amQCjwL`%iij>OlGA#4gRWdvklXbzDM0Xf8H<)1wY zrh;Zgm8&)J1K!EqKYn62IdY#l@4R~ zicf~XH%+iM;Z9(-I3!OJUoi@Y6*_>CrJ}^@K_FJ;uJPpBq4-K^PMfA??-b6Ew(_?a zig#E_cUee*oEyhPKj!-Pyv~#z z)?&0DSKQq73e$OEvi5`t)%N5>*9E5S$*H>I_S&OPSo+(~Fh~G{4I4%*cD?|B-MPoE^E6!9WMQS({N#}NE0ke6s<{7ERB&ZPejz! z5Cp4$x`{PaCLoqazS<*eY;p3?ylBz9kpB4rz3^XwuK-so{DoA%TIx@-7ZwKIUKn+2 z6<-py!<>7BiRpB%NSAyq;O1Aok;!lq{lWQbZ#?34<MP2cXxdynxfT3@k;xVs32B%*QNxtgicgx?u*C*;PKJlLf zrQS&`SH~2LM&*qKqz!FI8uCpZS)VlQpP|G*mGxt-n`;fqI1Sg|QSbfG?uwMlstLl`! z>NFNWyYlxL)3&Hk2-=En9E+*Z#g<#*_|E7&XK0!+G*ugxt_@64`6sIpKpL2016=vW zk^qOuES+ES?ewbO%dL#o&NxlejEk`t)RWU@%cKD_g9NVZljF8&T&?3S>%@KCL3gJ5 zqO&_|aa}^{b)SPw?$qQ?R@|9GG7hfDZ?TxLBL`p7XpLH=wP=}-|D^cv+dc2SCwdE> zD_DOoDV}<6@YDs-k#jx!&fM8@=uX%EzK&h}E!+F+Hwmgcg$QA&=n%6*g8hX}{g@C0 zTp@uWuSS?#DFjw!BZPs~tkU^{loCNou@IV7Jg+Z~*B_rJh|3cv6bzu^^9SPcBvHBW zR-pq(3KN16x#AGyv1j&EtYWXkZAF9G5gZD^iUo3Vb!RG^VO+;+2rZD+?3wh)kQRa~ z2|&OO<#@6v8P}QQ1oj?$m20cbSf}pyrE_<#j_zL-)rZ0XwI)i)6{}gna7BuC_b5}j zW74uP!SZn7@^CSGr-ZvM3lq`%aa|EX?q3->fMUy)1PsvU^BR_R<5UX>}ULHp7D7;vg-ql=sSM|y_l<$0~`TFpmmiLF+mVk#jxs3J;Kp#@5Iz3&*{3W&8nRROqK z86$#hh37lSYGt%=Wpw|Fs6GS^hw_wo=HexB4N|_-q~I&a%j?tn0&+!gJEt`o{jvrA znXnM_`lR*wX7q;UOJH9IUqzQopjMS`w*#?K>J+K#nKS>&v_CW6cEMG9 z#@KMy(*Crw^}Mb5No(U1=EldZ4X3U3C#`iSY&A#hl?Sc$N2bg7z=dTl+F{S%Vnc>` z;r7Y&Rx=X7k{b-*D}q~_T!^&?S)mZikiFTGvjz4l=8J;+THfumu;43gy4X zn?X~#JM?)wjrqGw^a`?qa)rWHK8jnpJ5A-snA}~H>+%Lxq=;4}4thjOJz^A#W97@z zM}4XszV!|u*773t(wwmc$%BNiLVF(z>|GSlx5QsS_$r_ue6<++aRPf51>IQ`+_NaA z*S|p>x6_rplkuz4Er{v+Dq=;~-+FW{78H`StNH8yvLN7!NA$Io$=Cdg`ja;+bB=1V z4{IVD6wBjo&hz_tMZ}GD3Aa3>5D0(sv9)&=di5+_f4#KX8XWiW%C$c!C>k>!Sr;K#TWVy!4orw^x_dtX?3YB6Gmvqz$&M0dfYyN5koW(t*&u* z_th6MmHYjFg|Em5sDJMlWh(BG^uX~I4luXFgEca$Hd-ch2Ik{W6+ir8|F=HqfBj9- zORvZPSLdD^I{mct=vm?3)3>{i-QKvrr(;iFTQ>~tf*Pz8L;^#rnBOWP4MA46f{z_= zm0Kg^Rf!;2WmibD%1FU#MY@5oS``SPR;3gZrUG2W@<>JV#4$Nyz}4(v;LLa+G%N5G zT7k2CB?)GiHB-KFhbwo!0$F_#UxAuvhyejAHiwGdL(j0Fz<;;{U-oj zM8e`&i)|ND&;nusvKQ@YH%J0Ed z#%zC(Z4}OcRu-|%Q)d?t3vw3pt9z9@5IkFyuK-lEC|`lK2v*Jd-sx5y)`B$z$|8JK zIt0X;86;%08q#ZI7$oF14FRzVT4lwZG9VVRupnX8?UXd_9$<~`Vq`UJJ1*OKN_yZ4 z`N1b;$DWp*cys-utfV+dtNP|Cj1t z|3P{6mO(Z=MMhfS!kT0iYYJEYpy!z-g zk{^g4?4Yye_;lkLrsD88ZX$~S$_x$F#Fgd};2wO5idv9g^7c4#b~ycu2Un#ESH%x3 zi;*pk99$edgfxbACFZcU=?%3uuL{%30_~zS`TQ8s{II?SL4At?dzbk4E%WO|a}W{} z@~kLdEe^T6B(6WC#T>E85z=f~o+7BWP47U(l=GLAxY_*qV|eccqPbJ#7b5tNYPVXnPMUyNHILiso^V#4b`&2m^7d*ncd4^?851|^qU$wL6^0nTJ*vRv zH8&_S8^~I%Lb7Hi1l?PKtdCtn5*Uz{2_(3GJdk>>)+RvZ7R)MFLUMD~?9K5N1u9(0 zd|!-!A{K{L)G_{GeuDcXAnp3$9z2+G)P2_o(W(fMM`%B!Dq0*MQG$99uJ8bNxB^z8 zmWBvW9w8#GmW7IEiilnJxNFWgWLZl>)h(`v2wiJd&JueiNC{#k4iL&pk^;26lV$jv zyFO)*yKYF8)ANS3LA-!t-4(7r>G$(hrX0jYi!)_$iWXXjbNMOK}IwB=;4W|)0I&kpZz+rrdcy{2#3$inpt)8DX&b;mNKVoWZ^3>OxO!(zp&Ief)xS^w0pK9`IBR(Jll z^OeD+G&qNyHl59CH%&0#`l*uNb~}|nlu|TWRPQLRvzNC_r}GU7Jat^gP(scq@)^*h zi7(M$eF+kra$2p0o19RnFeeC~6)fT;{w@m$6|i-K_oDGx)`C9do@)vD|=XJ^3bxY%~ddByxj}r!_D3Z z@EcmWVbE)X$jkTtEDwD7{a-Nie15S!tS=(Z;2jUZ9wcy;Jhm=jczyDiXRO>aPUaCM zT!HpVqHJ}%$SXn~HRfLU}k)=vM^pZ-4;_60|Ta?u6j>R_^1x@advp*t|i&b^R zQF$226@;RJSa8rG4vT+8TY1V@amolStM06$_MEfwjIHR14v2*~+05;xRG7OO&C#W% zuzX`kjuD6j2@Ct7BGO2OCTjxY$3l`-QCX(obYpPJNKn*g2Tp2+t<0!neAu9MOd*8< zy+G0#9LLq^AmfB2zMcFJM^GGG;Q)@}?&l8faP{D)CwFS%bSw%^6r#90alpqe4y)+> zc#PhNz+wA{(unCN`N-7Ie<}X(UxjafUHI}F;)|E1B=p!Ek!*G-S4|R_*m<=g2v#7g+3W=oWGlcGunMg}U{z8fDZo`MPZX0YjLs1z z7RqAtq=2i?Oj7XOg=b45arg|=XZFfd&Jlhr-VigW{KvaKLk1*8$OdJwMQ zt)dOV2l#4s%M7brQ4NYiDNc-{h(&RWBP+JqN3yDD!Au29)tqAQ#QpbIxNv1S0I8oA zi&*z?XbuZeOyX22N>=XIrNKfJhgdjJ#G0Y6P$7OV{U-D~yPt8-Ob-z)%UMhOY{zim z9%Z3jEmx`b@6kn$C-z$%JZgs)(RCq8-79Gt%4$C$j9q2wG+jVMS?-SDf1hEE=JwI^r#lh1r528zW;f;}tZ=q2* z_S)B#Z+%%%kT$wh|jVpDPs z_Bd-|3RGC@oK|Z>NM>%Q9ZjyxP9vhu$sQS4#MuxANJB`?g1ZTU?Pw8VDGb{j)WuY2 zQX1{Rs>ljmC|`vgf&n~50K%*?hGDY8b9r^F5Lu4VB_?zOd2I}Ytfe8h@L3WsZmcvd z#fMGL=pGm;k%tVQ>bm59IIj@fz9FMGt;vwvX+s7(whH=X_Tgy=mph(@_?Q!zBZx)H zMCDLIjRH)SSUZF;F8m?Wn#SVm5yTBdB$XUA@DJ!Uh?+^D?vJ<;iU?PHcVAwd18JT(?~s-|~WU)ARPNFN}A;JiYzW z6qGJB3bEv_=BTyd#6&HO?mKOT+pT$9%-EO%k5F2hzHBd(*=fq#;wst6U>A1oCP)5u z;=BT1p$fV|S+3$euHwB;1js|V0$+it@aL>N>H;dDz*p=6$`w5AU@bE`Ul@4g?Zlto zRd&o-e8^gS#8I%%h0Jr`JegP0z?y_%5Aao#d_kmSN$OBYgDbGf5!5>FU2R#OH}Y5_ z`0DPXLASwIV5(*QK&(56Wq`ANLBOr~0XG(f-CdO`-N09^evCiBjBG9J~Csl6#Jl|Vij*^QkzWPSv^rza)h!5m-ba1nQA}JWNe1>N{gtC z>}^VjwNF@U&)OQ!JDV>|G(9x|EvxjX36fx5 zH_64y=&(SBjjb@n@U1b0)~H-_RJJiH(-4`V4^C40#VP{gMnlsyafQ}^#8JNx`Kt9l z{ny_z3KyfFV2tpvgRCfAp*e_OE+Cc-#jz6(e-pj}Ww~?IgJ+6iU&L2Bqf=*`)}S-^ ziRxS5?)~Nm(pTSray9VOCF$eOOOKr&*mqjAP7Z`0PEhAn+aV}R`n*`W}x zN*W}vuS2lPtrllhi)d;nQrQt~HIuziBuOX~vwc7USELV^CjnVS=Sl!nz$&m4uFVH|${nIsg!iNHF#)wAOfaW7SekS22olv!v{#gxCIkelz3&dgx0DU35qOJu47wJt>lIqP1fOEHQ<3!j5XWyw%JnNmDqaRq2` zLx>2@a;GdZTF4_VEQ~BsEemi(6s`OL(j^3C%@$ZIrg$i}1o8W^$uaNX2D zG^MdI)+q+j?%*qeSZvWn&or^&pM3SXT=ciP^S_&~%yyfJ_)lTM_cVq>vpQKjG zvTCg*os;>^mZDZ?Qkfcqf$8N&e(fZ$)QUicgi=FN1+vG{3bf=vX4zyx)H&b^dDlD= zd9CMl=}FF2n+8e|FeZPK35_r;Q^pdU*eYFgg$_AF;H#h_#9Ixn&p`iu$TwYqRv`U| z6keze&C|^D`>gc{15?)Gpc_Ce&)9xn*qAfr=m};wy4EH3VnhW`z_rPDNv3F?WPL`@ zhRmMe0&!rTFr&o;%_@?lB*~+SMxu&`(XUUeQl>ZRf(!ZpR|z$8bOuS@DXK#QCQNA= z0bFHo!nVONfC`k2+-?=EUMMu|jw)Uq@ozGY{ zUvzX|p4#==)YeOmZ7(^uzwFxn(gb0xi!MCA{i*Th$DLJ&&5b7~8;+CRc0eq+u=t>+ zof8=y#=NZ#B#?uz&?tnEg`X<;3b71qXArTX*1Wu6Y<{xn(TF?qLWo+0WjFWu${#y%Zax}tV{ugPy6oY=DuZ{) z=<+nt!bn!s^1q36cN##7VZha~J*!fB{7Pk+yT%JnFlqZ7VeKQU^Lri%`FKh2O+>*j z@$PwK#g(tE`ShXXzyBAn&*p6q&+`&5Ufcf_kDs1=iFxrO=AqR;4dt0VV+H|NerZad z)RA?Ga#-^41_!KK7ccXS9qjh7h#rd0F@&Y*)&(kK z(nnG%el9XF+6hK$c7U%YtqcnK70XxHq+v$Ud{IhS?tJCWUG8Uh35#p}<3Op&{YuG- zyT|>(^zoc*g#(qj3`Q%GTae`Qmus5ueP8&|cSl}%P4esulIJfgo_Jn*;sV?$!tUe! zn-267L;KeL#x4Oo?Ien!wI3l2C1?xQiF2zZaIj}piOD9xasl$#QJ4^f+ln*<3#FI} z1X&@&3ic}S72qnSK*o|4rvFG0c?wGd)5IwDo*Cte<15x@MPw_$6~|Y!AxPm0R|;1c z_v84A!j(H;!O;!A0$g#b6n6$n0j_A_%;DC3oj%q>Mahc0pWfv7YI$h?oTBQ}9Js`YwJuo>#6saP(?divF;#|%Z>}g``DNcjEXrD3Ko>Beh`rBSkIK4_5AHf8-~IT&z9%IIo|d3X2*i5) z1v#SKpLu2U`Bz6Sy)p9oyUMrUSHANtoi$7^T>oX7O8Os!QkTUiu zgf`5W+ev;q8#GF?M*m~`aola#lo|fY^=}0y{|mm)Kc27f0?WOeT1^&g?=b7f$85~6 zdzi8H=FZXZE@xBep}NUR2em@^m1iZsUfq}LXsIO%*mA|@KtuZ6<`2! z3vauHe5l`U$la#T+os1LUUsKCvt=x!c{Htgl+6@1VR#VnM2U5#$TCe>sTvhnIO3Zl z+mI;*UqQd}Ma~u5AH=7~Rnfh$N6z!PvNE!FMPv_ZVc=EF46cc}i-@GgeGC@gV7Y31 znL43L=b3N|&a0q&v43tq7V+R3Ri;R)R!5hN#FUJoVoS&3E5;Hkm6=V(=n8pK!&rRn za7LRhzHTT98=PBJsraLA)WH9Yf;lI5hX&4^tZk#bUCN3hSpI9ls372~^&GKXZG3hd ze6{`3B-AR9)s`3SJ71mL^9Dnh>Sd@{uI|gO?pMdVUzvck1r=+{^OKvOoos#5Rgb)^ zBleclOzqL>qHZhVMvJ>0g*#j@2<2~c68&nc1Hl5gvaW1`SlBs;nz9KEZpgUOB488(2k(M9JSCFcf z#R>h2wIS7pHN3&a@jZ(oZZ8VH`IztZ#Xi@U2i!)j3ca%?soy(W98@u!yxoz!#~IS1 z^+**h4DWkn!&S15(Ch9pueNbM_&}KX!w_?GKblW4Q3#;L|UR zoO)V%PfQLahQ&A@M1ppNLn90>W3Z5{&Y>&-Bro?vfPFoK*E7 zO;NRqUO5$u^3}4?-en!!gCfP_a-n3yT=BN@b{cEG4WOO01G$FJU@bg!Mz1CCo!yLz`?i3u}P32;stN z+dt5GK)UtF(6*yPJ5I{GPs=D`p-Xt=8OfRFht6J7TzGl(xyz$3zBcmOJF2(7u72x% zHO2|w`@Z%&KiB-|x2oS=)!deu$FPKSni$;?ipAx(Q^cBGl=C?&2sxmdbNDM*#eM02 z0T=zP{x5JplR1aa{i)JB-G>MNoqPt;bbw!$HFemDg~MjYG8_}(D|Y3e)o8Qhwcf=@ zjLfd5dtyueT-<5lH5rf`l2)sUEgR0;Xer)oPb(kIt2ZYXYI3Ta$>oOhI&)fs85$99 ziyfQqkad|y7W2U!WI>iAvcwS_%G;#N#b)GAHPYI_FnOD7mHU|h3@ zfIBXa1FnElRI}Rrf^F|x%%|r=-2AH?i&fh%^)+*UGg*`Y>u4wVPu}aUHw+o*3iK?U1FjLi> znArFX)AR&GQ@PLwgcO4-$v$O6x@dqru84TzAF|e*n#2SgOxooq?C25}?s7y{j3C?w z=B3q9ibcWVh2fGf2i~1dD?Q7W(&+On9&U<=*{EHr$x!edS-){0ZLphrN5|2MQ67aw?+hV?y)oDoBY@rAGg9CkEA@_*LAmmZk}nUItO_GtLbRAM zE`hDt{yBq){)i}3fUn}~M>p_pRUIao_VA>|H_IRcW_2lxcj~JTp&P5p=p5Ym6jOJ~ zQM6Bqzj2zj0Tqj@>N94*cGVtx?cT}ieNK|?zZFX!^+D5dmogK& zjwWD8mMjP({47)9CTD2(NfH3D=wKjSb3;==KbNmSRwQ77<103Z0C0t31*;hsI?YU= zD86r#;(u6 z{w2_$;Gv?TQ?P_XCXRw|02D0>!0Vxq;uFVI_|hy;iE&sIfWpJbb%9?$3hS;=^f%Jq zwKx!uBrQeMz5fJE@+@fKN)U^sEL=fZEN;Q^NaUKQ?R*KdkUQAfN8UkNo>V%%X zlZl^}^atscOcs6^>yE4$3EI$SYX{ z{b~c^e|ba1?VKXQ{2(LxgT#Jsa_veIuu!(q(th=>-Wp)188BQFtkV-Q!%nC;VQztgG-(owr?60{_xpV_~{#i@>` zCxBS3=cX}=*zyDeqUH`$v_@beVzBy)oFnzJ3D0j|8A_WWIr!u{h#`yF|E zZAAy{<;TXsOO?l+O;0ko)}5I~0A}?`+-boY#L^>XWXmHe6CtSlL&h@PJZ2^95s#Qj zCiy8dX5z|^ThK-X)cNI-7NJL^Xns&Xa;_c@zWueR9=}RMaI+1yzJdr%kA&WsAAAGD zn;s!OOM>p;{bEsI4=CuXfw$*H^m%5EMb=KNOdWbGvS(5F9glEW->)tXxVAj>_R`Rs z%cJhDNa$Oh*ze67il{XBi2Q!HtenO88{YZvivf<8S8*a_> zfvxMVM~rwqZ#c9{9a23KSSnwiHRzQh^GqP&Jgl~Wq#zO&1WPd9&Q?c?JtL*7qh+fi zWh)}2u#R{q4WXNiR&rda0kaPkI~fds z`nuzj7#Gd!R^{(h!suRq8cAZrh*f_E>%6squExj5$(X}YYrzf!cJ4sogh+@cGGgH| znOM4qi7p+BD1h6_5}j*}$+iV1sH3y2=nSG=2*ip=)^CU$4a=IE=l8pv=lVu$jM;#k z1_pbnEaMI{qT{Kqhh&FXZRCs2rtuv@JTJKXGQ z*Ccs>4taQ%35^yD5t-lrS^eD~$>05^@U^!^7cWbmx;Sv=nSrBcMSD*PcN`UL+}GRM z-P^pizrIUY+b*tbkrdVmXmBWvx1wuqvMQz6af8)?Sa5^YHfRMB!3tA>uvf9+>hPlb zCIgU2QJV5e>fewenAu{JD#XDzjYNauilSi)peed-nFd?Yo7_FPNwzH~3P~l1IsFP} z1cXx8I-M3IJ;B-F1sb-=ron(B1j{4)mxYs}$q6*=lNXx zTfi!UNwfe|3;je3{e|3>+=H8+SBw0FsKx#wQnR21)*}Bc^c=&)OT#5>k%LEsWLf0E zvM4dG%cCXBqogZh23ADN=#{$%gEO89a?g0h>i9w8)Jhs49mCYY^=WcaDTCfAQd;ZN z2583+d_`UE+`2>DBq3T^QkvV~?!^LAp+$(o=W80~K-^jIVzFUaH0B*z7N!VM(Iv8& zVi}dNVoC-8S1IL#DHT%KU^1%*v+Lx%1{ox*;`TvoBt)D$86?~#YuY>5ynnFmz+lHg z88Nyak#0FIBZ*j#%l4j?5yX0C@EDue@WhMq^BAOkRe?|}j1yjdSMl!uqx$CeH85iR z@YmX3eysh=U9(s*Jt-dR45|KhoY+$2ru=^?^_i8 z2TbTE)~drx27?PE*q$6zBnc~*;j8dGM^p}guTt8Sc#BMK8G@RTvqhP|LtBoGHjsn% zn<`3 zZFjrfw!0nrX}ayP-IgUW0H_?0W96KH`}TLgLa{_jYFqZaKc=O1*43?B1uy`_{nbAE z>|+IAHC>%Ui;#GGx>U7i8H}x=%?BX`t8LI-MRq9S*%5MuWBMUe>H&QQrUl!H9X+ed zoYig0>@w!`nevYlz5+5)zA8Ow#l}O#jDxJO0})4%6|&_~*!F_L36>=09wlT|azDNU=B3N)&9Jt-h)d$2A8@IEZ;eZ%{vgXNbPuxD|mKeH1w`XkD{3! z1*4lYBwG`QHb#oqA%+yN9gM~e>l{YnqBno7k6@jzaJ?TJrAU$$CB!u1k0JNR2y%qG zCLntX8?-}h6Or9BLcf~e?*aX7LNa{)(I@lkX2Ste=UD0UlR zt^q(*hL-MeeG!Q3xUry9iET$kK-?a4<`Gp^rvht|Fa&|vQm9yw<+A}<)7}YF!D%|g zG9aA6Eo@?E=(ua}NMNkO#e2jjLhF%e!)*J8;CKG_0Heb~9HmunF>A~^H5NY`^w62n zumAspuMBF!S6Z`CZ3AB+P#An=FsKY>l|nnCHfj`FgF-W>H!xhj=^y@Ss zXp9y8C}+N+=Uc-$_C_IVZl`1gxI)A_`ngRdxAk?$_x)bBE>3l@|9$Tiwkgt6Yv#5EDsT(ET~mpeA?GrP@JeLMa8QpSOq!) zM=f_3EprnRvT~|tuQ;Bid}T*0CoRj)az0`p7A=;q#FVlqXo0T?Vu7!Mr0ar*=n0PN zLxzD^IIa(slD;A9gtwcrKv_t2p!H=_7NWEOR1UtvIW0S4InNcca77S{Xjwz-!f4`t z_e&qav^dqTKv~eQzLKw^@}&;GN-P^ntr#LL!dj?U5^N-dgjIS#THZEP)h?}OU0AHq zUE16y-G6)tx$Z!$wlkux3)1dO(qmVKPhA_PUU!l^dUO2sJL53A-~ay1`#+rh`=6

-tOi z%4C@{Ss-DNaU`n->&*06xuLgW?ptrr8B7|rMx_Q^>E`e=R<5^ASecJL(WO`aBE3?W zSz}2qW#URqAz4$2<+Ev3ve?}5ggkj#i5WWw0auCDI#`%8nss@H3?L}%*DE_^#{C+t zDDrmxfD$WUpb2E`!<}mqww9bj%J^OK_+4sjdkH9>az|7GZ0*@ITN6gSvt$S?@=Tk6 zezh@TFtOHH*}-hbyVUp*@&+dYtC`2}lMse0uLS8DpHCZmn9J`lTf-iCB=AxG=|kxE zZ3zDy1{=@JVRRnD%4KfpLa!W2a0ys$3W(*F!NpGJh_Y!aUg3zC2#-u*RLv}6y+g{z z!mGwZD@LQL$KxBPW9r9}nx?^5Df^}o+n#+$meVFL>{gba&{UpQlX$C3tcm>%>%kjV zkkz5LENwukTh_z3ZAWO`nmcl9?(i*wSa=rbWw&6x2B#JS<5oS1d`F^!whUTRryAxc z_@)ZF=dkq#9l-PhdVm&=gsj?35U(;1o3dH)D!oIW(P_x+)@SwTseV;?hUixytAe9C zESLsFWp`t@3l<|1vcgtGII*#~2fGomN3Za>t_(R&XSJm-$jZ;CYF@Ndon}(^X?^oX zH%1GV1P=TU&(Hpw>u;Y6dgxoN58Q3?Yt-&2oLU*fea`Q5%2z88#lWVJE%Sc-oY%v@ z@p-T|VZ^OayR$&EB24tWcmER4hbufELA*k(_I^P4%I6EDH+bbLJ+kF1L$J8HAA%(K z3JKLKeaY>P*x4Sa=McjSph)D2^(5PvPXc&E^z0AisP2nh?h9FQDyj>s!cnn78}>nH|a z!SIGH-aYD?a}17?Md(HW#YKDN#Ggs<2>pa3ZK4K(GzD#yvPodOgdk`Dx)*%6BfVnoIA7CCB# zk9d{89k(cJtquTR0j@w+L+gTvQO@V=Ln-=(KrCmr zu?`zUiX>8p1#VW8zxo|wm;~fE^*|JLc$8kBUxDJ4?@2p5*Djp#g<6p z%Z8FFq{)!5YQ!1!qU=U-UNehW2T4{#WjkD0(uOWFOSr37+0dAFF=!3+2y$qx}72jbO?;V*p>VUU!=n zdNue8`jtbx!ilrj{X6~YYf(W1R0Cnh67DYr#0$S zR)c;XZ2^sq;m$I^4Exz~=(=g2!jX`2 zSy+V%o_4piu`LPG?MY)UNn@L1B;c!^Nod(~(Hw*k9hNHO{$c(L8X7{b@Ke`6eFDFk zxKb5%UhItJgy$n(auA8%FflIo`Sl`?Ph69Q-r1vpMU&XQi=8sAXa(jnxtg;=CuEd6Yp#^hjJ<2d*V!-I*<Jhoz}vm^To%R{61@Tk2&`!TfGL10iq`e2@6aG zz5-m8y2PR`H^u=DeZi ztgZYg6J9mFC4u+6{~!M!&)@xbpFaSod}}lzdrkhkv^$E%SHuh~3iy19|D%%j_-&sTVVvBLM^nozD=`gCB4 z!8Lt)MPNVWE8Nb|XTgn$-gj^$l!coSaLb3YI!LfCLb4%tXhXvAh9tn1culNuZ76?r z0C%O&Ak;IWp?MM;7|dX++y_xB*aEHLQ{hgByB_Q=uxWwieA6aV>gG0vKgw-kqRQpD zt@GOx9tIRh*l~s_>={QW2Yh!a`&2+I2zp2rL)R~M&nO_O3~Ci<>d0(PhphSxBC`}V zXXc<^<#!!i}ldLhF{PW;8}U#4C8(6>8=`en0);N00BnFMj=|=<0R;g{%Bi zFYC*< zBTBCmW;KX%_6Q62ii`J)VZI1koJK-lxespvh}jqmM=qt51jBe5JicT!CPP0rv79Q)w3>o~9zJB&B^!SdG#% zZ`3^pey1tVoN3pzanCGZj}eHqJx;PUhAe1Cj1@ZR;45gtKB*H?#X2Gm#|hC`-yHIQ z%!6c%AXIRBrSG+3D~ezKa8U6$EH^0M zYzcZp=nuktg`vWLykSh|#nj0ns-_^|z}XHd1DO(V<2d*#g>1%|$vQZj-=U~FWhygz_fk4JkOUPzS9{)EcgcvfuDVM~6ofdk$LBl)_g6^8`0D5{ z{)gYMmxO<@En6D0TNio27_?X8S~{^ResEFHgXKX3s{(l|kdzoK09-8#>|YkrkJh1g zrPjaRygqGYk^dn2gFr86Tri(6^Y|Qvmg4q=k-#FIPmXGBxNt?lz?vWq%?=`SSZtso zQlo)G%e@Dec=r=R3*fB^6Rn9FS{psQCR(yOO1vskv?^S%64#*~uv1qgH{R_L_=@OP zo&&2q`mqL)7VCR~#Rb0#AAE)WAa=$fiZ--BjzQNo!GFMbDPSAZqn?@E*jjwuN0ILu zS3jNDB#)>aN!<^2GGQK*j9zxAFgFUOLfmM|{&Dnk^STtEENaaHVioo3Awt5V1$7Gz zMD&rfTh++XO|3Ttq>YDWC_>ZK2x{<-nQ;%D@(Pgoh3VG%jA5HkaQb9O!f0rW%EjZ; zJ#FIQX-27HOh$w0NrF2bd*q9<||i)Taua~IjmL%5VJ zjHLjvNFwo9)k^;%)T)4CN39MVSxuxZyE|)Lhy?Az4dJlC6Zbn2wb*zE;?9a3VgpM_ zLMaJ{2Vx=20XgvO79O%{2Q~w=gIU&MRV-wX+RGCQqfxAiMG=d}IQXVYh=i3ngaw7f z=#DI`Jd)LbjnCovM8!hTC@ffzuoAE_r(BX+B~GgmW!8&wc8jnsr>I3#+$t)=g7P+T z%@H9%tS&L)u&9K!|D@>93nFw0JI;%`E{SPk!wattBN6N38$(3Gx<7sEefc{dD?a$4 z@`pcD{Ngv7U;k0X8?h)LVPS6Aia0DXONcluXZ^|nS2&@3^(%!jwJ$L>ae)jyU{p}{3RmWHj_}Xa=bBYDNB8WvYqV-0D+5)~(s0~Vk8AGFjNoG&a z?~}?tZSG-W^QS}7XA?_o-tp4NoT;eHvD6Y}ZiN~7?HSeP_;MBCDrvVoW3L)~mD{Q= zJgh0~)E0JX5bIQS6h2{jKHl16?5=eN%2mpqv5c0<;!cxq(Xe0flwa|TTh^#Y_N05( z@hrMAU&ooT~3DqqXC${EbOt79nGEQ{4N_FW*#x*bejNIdA+9G9wP!m3y)g> zSEZ+|B`;X=kLgQJn@e9XRh_ezoi>)fU_kmMGB7J&G$B<0a8---J(tW)*O1Lqi=`N)~a66*3(AZ3z-@MqE{p7*tGXE^Xtiv<-A|lBF+*Q(pw-O3u)n@EAg}}I&F&&(y z@Qs`Ch#ZH0+0pKbvPn49yPskR;XYpe*xKZi6 z@KjzX8^#VRyI;J}3*qWf0$>&FPi%pFh}GOqRtxzFz$EY$g(W*mvFie}8Lhd7}$$D>?(nwgv5eb!Wt~+tz*Zr5etZgMj=uf9Eb&@J9W7yl!;ls5`(YMA_QMi2@8k?39G72NcKK= zh!KZHm*!9j3y5|2tPqHW#0DT1wiCkZe(qJt%WsWczcc;Tec9~~6!*U`|L8}GpZ&j@ zPkyWVf@>O`Wt1kek$=d#V~Uyr~jNS?ha1)hLiuo81=P(bjM%U zU&>chzao64GFWu1euW>Lz$pU)WHkz6$5I-!m=Kqnn2&xsomleAf;}@;txQb59E+P% zD{TIW!y)OTDaFc+5_Nv1t*V*HXZ5ZBeJPxJy;o zp{VFH7LoVzv#CwvQB{JxgYu&{neZC9chNM&EASQct1Wonkv?782{*eDJtHKyd!sU&m$~Gy!3_%G_%~kUO$Qx5YieEfu+=-kBo6Rbau;&a_7%rP7QR zB%>;j+#c5eAxoatBFE$)_$qndWNPaa%vYHQk?cKNc1%Nt2VYcyuXewzYkF1R@}?1d zb?Byv@YQ`w*Sogv_vU&~@6LC>Gl!x=76dM~MM(In`4ux(=QLh2)xKz~I%BLmZ-X9E z*rNeoA+ZYK>hSaAcbf})t+|~>6rn66v>%&Cm7QS9PcfJiEIVbZI6V(rJKzc%hY_|= zac&NLRej!8an?-8>YTCioWAP3q57f$>w8d*S1e7h%pZ7_*>}yl|5Z!tbyLfA&Hgvk z`(M{KT~i|;wGOGL=NPQc*;z2)TF4D*m`Z3jL>^Rz?vuM$jIK@?cs}?M<*VfZyw#z? zm0`T)5rgZJ1l|>j$QGMNm2zp^;3B_=D+9PIdg61~nc8wq|77pCAb8jmbaO99ylxws9a< zbk&Scj-c`cgO%AxZ;PxSLwXxccgJ8Lge-WmFu?)yRrX;SunN6GGD6s^&g+on9+@rZ zlA*Bn2&q}QZK})`HCAw9U6fz?SV)Eph~*wJ7M*DeiqP%w!(z=@xO{g84SNMlc!wx= z_>C{!@cZ&T5XvHw9jGip$BjSAwGOf!e5 z_D{c1fAA6Kop*&d-V$DZg?IKc@ANs*@zdgtK4E*eWN*8qYB#sMkyBL5$*CAjFBv3l z!8}1ywjd#s7ndObSw*C9!;^R+iM-%MzJDCwCzkISE!-I)*bye!f=N9x0ZL*#$(JdF z56W_hl%Swr0f)AP@ElM?G06c-h-Ak(edypQ$3xH*UhtH(g-->zawaSK>w>8ORFtV8 zrLOTK&1EN76&VbqMS%)Z)f2LE5~h|yl49eeaIyq^1xukjCIm^+l=BtGYjBYio0IU= zk;Nz8lATYA6{+}3JoxC);kekH_q-c-u^S(>gCi}RgMSw96wtzl+d*q#S`b$jyCR(7 z$)|mC4r$9pNQFJ#mv~E;`3)`i z8(Ho@yv%nP#})pgD+5OfZo#G%V7F$`)**;%U7!SrMa#K|xUhvs3Pp?5f}BOQEaHJD z`=i;>Le_&vwi(jJI!P3+aI`1A&w#JWdyJ*sy8L!UZre-=-r9Fzao%L^Az4W$_7o3ix6B^9#dzfp z1(Yj%3uj!?M!a%ocBGDMjuk~$*`mtKYk~&BSCOT9pESfmauLbieu;^x&>(dItrig7 zmhjKKQ+b|=gCw`4U^uaQCca`iAOqpOJg;P)XA;*x11ku{*fxmX9~wYt0!CF3!xfu< zH5^hjj)tK9ewsTJTsRh5GKR)b^>OC#YYgJ25)jYTJR4m%lCWnyv1tO%tAys!l>L*) z@kZJt5UaRXNzCo%?R>QlX$)_h+isZ;yrnyQ*L38bx$_+>_^S8)c_0?x3PKj;t0Q;j zSP9EU`3lw#rAg)lnm?R{6(uP_BwWR+r|XT-98(0j_pkWA?nl)L*gex;D4(Ifb_P}g%r#7Zd z9@#4MsUBOO%v~1xczFnSb%HcGG8zw?9JeDBSFjs?Za3AeMuyo+LH| ziXdSv_jrgTQZ_)6i^)D1u`-+HbN4e)v0x*_H-l@^pl7zA>I4&7H5y(s4i{Y9u9>Vh zgt;kc#%%Kb=@RsCyEH&YK`^VFI^eD z^s?yG1%BTt?%|%neQkq#4+$Ff3rp(gO@Pk`ksidcB~Jb1~CPmgxWT7awP!AlE# zWp5V(#=e2C09^}wMN}*&zIxsj%!U@>0$%~K*jnTva@6ylViaL5FUeA$A%a!lE58w( z&~qTx3SZJUT;o5oHqc(H{fFS%S|2#H9+QTS0&C$sn8@1L&cIR@mP*_AN`tbHxUx{l z!XgDXBl(K6xaEeOh6z$M3LWEwM8YC`HB4PtG|nAw%xO>qk+6VR`QWSJSfmn_NYElo zsuU5Idz}P94amYGh_z2xa)7w7st$>&+eI`HtD#%CyGOW>HDbZ&P7sUrV)a~*9=j}k zk;F9&UAR8<(v7hJ7vmgDV?5F>#{^f5Je;m*c&61^rY8$c|tl0Mc9bbJ* zVER@c<{&GFob?rGwQ$LSSTz3%NAQ)+G=~%{qgAh;(-PeZKbpxXp~b46vuUwbRL69*R0?i^apF9FJZ$DJ4J}@5}fDF*D!#l13MM;8S{G$c|Dj)(?ih$ zQOX%j#37^{Ao_ zPDLM>4c$GvJx2n*!gL{QQ7c2iSKJj*gKHAG9%a*UZA{2s^ZM+e#nFSS!UiE&VOkLN zy!Yoz{lC~6FTqwm*Mw30R9q7x*^_{Oy*9-`o7)dz(LBwDrLfmq$>sV6{S|Jo-2| z!tM^W3Ozws-$7hR#M%@n1!zIY0?Ta;lOXRMKbAcbv49DC$|l^BMA79czwBY}Ofe`c zu3jBjJOUG1+%9EI!)*L+1z0O>zXl0c2yH;GF!jJJW@D3ECh#o*Aq(FUfUCTY85H0O z`W5&J{c*Gi;XF)hR>alJgco6Dz%;V3{8MB;2{Wz{BOw{u^*#dF-Q7cH17Z!EJ;%Hw z4cmg1D_wt=TJY~vGRB0NZ5=G`W*L}={Rc}AU&mBmciow_aQQ%N=jJ za8zm4gzL~ayL=$Mq(8Z!KR$N=R`%FTPE-mfG;siA6`a5gisJ^x@dIM`zEMKY2*J)!!47PP z3>Iz<6e6+=#qJRnlMRxPutLS#!lfkdE>r-|xnsBwC*(Yg5A_LgptK-JK3f=yp<8bp zN5}3Pn$V8prcfR}IgEMK!4AG6ZY#TN1uG!Cj|Rhl(5c|6qU&SW4Kh!o(U;XHM{Xyz zNTHoaQxu%FDf$#W(vVM_(-$~Bd?ud?Jv-2mhf_hSdaBq)p0j@Slz8R*&>>qnE)p1H znF?^Vc!yw-ivU;!*$PGZYSB&+k*(Y%OFX1YJ%><>J%_&MA$iVIy4YuAiSOw1-oq%I zEcF}16?zd5X{`(xTkbPN5X)}_M@JFjA`L_d1`=r@P>bf**|jVLma+xbVoONnEX)N% z$f8Bp(PGU9fh(4;fLPG7NV_l@poKe&6cdHmk_P)O3}FrIQxdhD0U_sA_rsK08ex@asuqsu+1PV1E? zcPL|8l%B=ItD^_tW{1SIEPQY!l&ciMw)~;6CVj?HCi=h}`0BZc$4dhrt_kj688iU0 zS|0dtQ;ZP#W&s6C!1MYLE<#6P$^u{EriQ?0+9KQ%got(F+F z)5kZYjKPAnB#6J*o4dr5#7!^W!CAER5!J80x8<`%+rD_-^}$jPvJw#v8B7!+@|1=) zP`(0I0l<)mwJmITD;7Wp5V681Wje0hf}x4P?3u_CH4+=bie}L(Osvznrwd(DI3C%8 zyu(a-E7_)vJ=(}agB>e*pFE*?8lhOQW1(xDb3{oZHIB&3kL%GjENmM&hd8^6af{UN2$rti z@%K0H&m+PefpuCe$T1nTUskFdwDomN<$V25;VU?=XpuBLqaJXjHW-z93p#^J1M`4m z{K1chZoMPCenWimCE=M1!oD-Su2Y=0Uf$kzPQ(7e%H0EK3qrX{EqatvIFMKXxEhSf z7>r082um6aN#cei@a<$3E%1pHc!rC1h6=ZXbpwS!Ym#v8D?}P6DR#phFc=|JAYo7z zSPT3^5sL^-3#{eLSB~w0Pf?cRIz6E)o3UHanXl;Q#3U@f8=vAU$X19hmXHOh_ zMX-wMS}r`|ymA#e^A%((%2z-v@Rc)SfwG?WkUIDZG_}ZUi1L+#wJ2rbB2Je3j*-5h zw-l6x!pSmk;@tvht%gSneyxRRL<}UZSy(WQlz8AbzCh&11>`Ikw8ACSnuWJZ1hJxt z^&KZbtetVg3nc|*+4+h^EZQg}h-D9L0BB(;A)DpS?tPBP8;LF&jVT!tRrWoo#!QemxL#;3eUVM zJ@>}&OK*?8a%bYkdsBBlp85DE@*n?F_Q~%Qf8=PS(^iF<#*JE^PJU8psgb%d|1EMLvd8|Sq~wTdxLG0eNa8ci(xRc`fkPNOlUMjw(j z7F{@-QlY_4=ct^q=-kQd8dFkoYq-%594Y^$~xvE%O-rXhY=O-2JtFw1aP%Eiig^YWfrlL9nqqI z%o*^NSIQ`+LcmwS`O|5QM!!r+e6^w!O9pfK;U$u!y2*gt{`g94JRhp)oGWfK`J_LF zmIcZREs(k=JqpSdg%u8?!iz?dYUNo?`slKWu;NivMCo{PqcWyyDzsRd)F2Bi5WqQ> zylWz%eiTPUOdvsmDpy$Yi?PAH!^)CgEj2KMuZoUhcu-SCG7VIXS6~j;x4vnBeg)eV zsrM{h@0(FQ?^(fDkgy^Y5ux!|>Q-8rweNJC<&IG=K5v$}R zl0x+er$V#>v?&r)b&NCt(G5g!D9q=jV5$>FB#g0Tg5Fj2q6HIzmzE5ru)u(&mCL$ZFBD(6Rob>#=~!E58XgY zs;=d_vFSBSBkWjLj8zv6MW?md$5g3D)Zx2iJ9CFNCJNRj@K(hRu1 z@!xL!;%~No_P5*qxXAT$!dDnq^c`5`&w(t4f|i8<2kO5kEOL*V z49Zl;l$b+uRrqFei4(S8VK#(4z*_L2!bu=jX6u}1w%9vYnz7%Oveyv5TZK6N^i~}x z3(*b8fW2B`?3uxpYlzvtoW$IRiMay_xt!>%!SK}npoIS5L=FsB0r3K#7@=3Bz%85)*$QN} zEl{}GPk`FsD?}tL%D&5(-M4~~2Yij7X9BJO!*s|LC-fmjELiEFZjmA?7T-}%A(rz) zyk-Hhhyq4u2JwwYqOPd(DnQ6iuwoI6ovg^19>Nn?>(rN+>MOib&RP?}Bow8oXOHw* zdIU{50hNQV$RyyGaLCzhzF?#J(hERGUo#c>lvCaDkc^ zcmOIYTY;~ha}}Z-d_|grJ0+w%q>JFl@{lg^8eZx>0yztX6URBOqwoS;#|b@>7uflV zvK9fYWtc|v5z~AvGMR|-g=bPgv+O8kZNMN*u*5-G6tuQS3=z1B9LCW#W|;O39gRY2 z$AX)es#p-RNHkXZ5Q!em89|GX=DG*x4#CqKSu`4rjn9P=1T{S6!ot$$?A;=`unPB! zkvK{kg$G4Q#KLMqc(E`}*w{l5Yfm3~g+g|sP;&T;r1PAx_oCo95bHI`xi^Nc+!}rT z{^ZRMC*S+Q%n$!r_Va(2|N1ipB&-QFS(2rhXPo&8UUw>C5yWEkt7j?eJN@b};VuVX z;jhv8-1%?SasCzkLFATMjfAhvX1&?s;4Ab8k<4eEU7}9T&+v1*JbX}Wi@LcNtN~J%X+o2wHJ42 zOFQ)CT}JQ~T7(7d3bY7Qn?`dF%ClOqd~i0datckl;G!A#RH;|`7$)U|a%O>2q!k#! z^}zarSW!^cv`_jN5_;DKKg1pcn6EsN`B0+J>`TCYz>cEFDg1fV4H@k^I;qf56;4Nm3KrAzQP?7@!hC`UU|bs6O!A}AB4Z1 zEW~+34|6-l1|59W{eh+TeH(?VWxC%Xh;{hZ9B60%8&u*F-4GwCh!K^1U&{{t@C?~S|7;4eGN;>HiU{Wlnv86 z;A&ljXiMVg_VlS88PnU-rq{;}BdQcB4a@vUgNBS{xp7u|@_|?w%UZniF_bI76|7tc zSi*bBs$o zKd5jzsv5CuSgSM=QabIIKbpE1>$Xi{l~b7qj6f`GrG)1YOK>phiR7!y!wO72pmGn( zpmSn)7FY$iqCpL%Coq_RZAXgeT3JMeDmWkQ=?Sv^IcsXGmjt%RVd_-%#g(&b9)vrJ}8%c|h z6of3+nng0Ha00}l1<}dDSLEc&hy{*v#wy1-WiYl&_$9s~h=s*@K&$|0X!huJ8jub& zvSX45m*BmlzLBrk2|f~@@O9-(R^Px^&f=0YJ3WO}j)#udIC$xsPiU??O~;}~`g#Xn zv6MwTS5&{EWJSd*>b#xqg*x`a;}g=9w|Nq4vJ z%>iYF=8i-Zj6uS3xUeuth^*1{TC(w(R3nMQf(r|ASTqr76f2{uTc1L3V*xV zfPiQ{GOv^t=I=i-#25d#WWOr4))1LLom`=e$Q=t!mjC_i@=!y=j0av*PXVD^rxjlLJSZwuh>TYFV z?hq#XB1;uM86yy;5-N4!1#+L%5e(Jth~Qw+7~W#wJTz|>vF)CzQnwVz-?;oMHZr3# z7+pH$m&w}^`yZ)|Gp*;Dgc?b1%WPKjRD88Kt7#^qX(ny=baMSva324_1tz{~!X@t0 zz#LuzP^x?ax>jJ0FttJ1{t^>ZAWEp8j;o!FtQ^j6)yC9}gq8A>c8w-Bj-+E+aQ7Hg zxa>o6s$9|gCTaIzxlE4=i|4T97rE;Q7bSdk)qp+D`(D!>AT2`TY^VLf-Vf&>TcH3~ zl&|{UBYf3$pE+`S-oaOx*CSg9U$R2KsyuA~VpW_qBBH9~1s&2A;HN?%$qIQ^NUf|n zH(z_6!I7TSn|JAwH*X9nrVZ&-^tmy$(5y9%- zeDB>kkX6@RGYW9kch7e0?)=d^bA2~0oi~if?=iU2dDDFG4TO5>pvz&AA`JYEujwl< zA&JybaNLx5Kowj+=}{)xmOtpfYbf@xEajLs`}ADq3C4eyd`%jEbv$=d9Ct%x|C;dr zb#dICxl_KSDz}`;jWNh`z>=Cr8v`C~^zYwD_=*P>S|27}6DB}ga7{RWbHa#wmfSN( zxg&LIL(It9NGXCKmiuv5`H?(!01^sX$4YnmMEo+Zfu)|0mjbSWIP1dsTjPed#Sd?Z z8N#Q4e)3I%y1K5=OaR)R|w+5O`Oebkh&*~!i$A>xM76~e9HyoO$8K8 zdFPG?7thAk8N9MUSu-)U>X$^-RbGb% z?sud&$(RW_qtcTo;HUVCQqxlxzw+Uq!dE1dL1&=(S6YKbp*AB*RAXd> zL(Jd*L~{3C;nml9#M6FW)Q5q=F7g=@S7wD9!>V$oYC;VaxaiP0T= zMYbg4h@!YfK?@B;bPq{|k)_PESkSVYvT%;LGv}hu;+6LPAe(vRtY1;_%D#Gr4VVJD zfPW}u0g5OKv2qhpt0@0C@CGM@Qk>jSPkMH27tdjoqSs+Zw-*Kg-Fb8v5KmE?I$msl zmX(sIphS<(zoPxzS>0MVC%>7UVp0nMR$qBSnTkGzb3odY-xz*}P_%Xm7P*nKGu48v z9TqG~Rurxr_ym-qG)4K!!CKBQz*7{la6-;~hERxlAQBc^OT9(3?0ki#(!L^EXd*i( ziwr~riqJIN5-Qmm2E-a7poMHwXju!sET~x2i-mvVc*9QZ?ljt+rn{3h&^aSW_#uf6 zIm6-kqmc!}m?dOg?j#EfF02%|un%M9iUws`T{Uz5OOhr#{bOx%Y&Y$Hgn6I$f$O@h^TXe|4 za_|)#9*ovtltXw#s$tiu!L;%(iVs;*cbOvcCsNCm(YfO&L|~?2LIqAo2)2n*WP`3IGVVyHb~P1!S2(Q8WGGZj%W z6kR!q$SEwn^-3e}AmYk3S&g>P>}g;X;A&IQLvP%7!UpjDj}TGc%+c-f{H%RUWQ7Vj zMIOn#$l{6ks%eN+d9CWU%S=M8IHrs_p{Q zaB;r&9C2;I8^7l&v-=9O`|2DDaE0a3`(Gtnq}yO(f7=SH72vAx-u%(~bHJy*`_`^| zMpW-T3oahJ%N)hCZ`*oqn0szo`)=F%?pQkBhVNb9eiMrnO|3Wd``$9tzoMxz@rTT1M7mg03l?JE)U_r(6v5B;*vJyou~H9R&Pm|z=$H?Y6X-l zAA(7co-il4){94#C^FmS%LPFpENgWzcT=<&$+UjCO3#cLq^NC(Abf?zh)dlcFLCRK z4Sty`qDOfI%vc3%qrY@($Owciuf%D1qHyPh9oi#l7>z=&j1kvlaoRp+YaGu#Z78B# z?V2KjV=%SZ5?L)zYSzXz${=B(ONeFMNO#X@Q(&tmtX3$bye;435Ee;KS=J%c6AauxG$ zznQ*y?*Y<8FTTt<_mcS7Sy9&sLHjY$zE1w`gWRe;gT*xidF7mpLQYZ^FFKVQlg38^ z1BL{tY!w(Q@QdPmhH)WVZ4KgW3FL3~=hL1b$ZDOJV3h|g9{JLEljSmyj{rFYu5i<2 znTkYFu|*%UMluR|hB6*~^p$rqm zTJU1wT|C~@1F7|<^*Y1qHet-P_$Fu+Q z$MS#tx%^kZmVer>6HhR5J%gc=@A&F3L8d?JWB(d_WyStM{P48%6|uOR%ocP7l}4M~ zWMh8yZ^nYM-{sfNAYT#lQ*mXp`Sr%EDouW!xnws}+Qh^csbF^pUm>D19Z{l(G{{@1 z?lo0*=&Jklh27vQ;zA+Kkp;fWJs_(&Zb5SpiCEeD6*1N0p+zHM#nYjMvt*7VNt{rw z$*i{orjNMAaJ>^nIBpF5tZ*N5<`%Ovj*qFX9SMT8raAAdk;pPxYJ+}z^cP;K1GT*j zAS!+Lct+EBbOkT5PMlOP0b(UJjG$u(3w_}sT};Jr#vXY}{ft-Y!|1XRxZk%#{x&F= zA5k(ClqZNNl_u0qCNxaN){bH;V&&2KjD6FQ<$Q2p#=a?RNQ|x)hL#OvADAKSo-X25 z#vhHqRkyOBS5bUSh2_pAC#K6^kTqP^(umHM*LC14BKzFVDAJn{Rz>{?O}8+nY?sTg1oSb8EiuE(2}q#5?n(-ks}vA8s*I z_dN^btfRLX)Ulh)(VKIYNufc^B1bXJ%7QDpHQ>gPUi!&@2GiJjRp=vUj4rq+cc5t;1Yz3#94J?SE3Dn|FI0t=8@#gR+u}ynhhsY-57O2Ww?}km z^fFftEL(8JFLfD&U;-5jIvDs0t8;Lt#+qr2tgZ`sRNc+ECQAHrr`GDvia6CWVgS@C+aSfcw_1N3Xul#U7lK z=lESGx$Q@}=nMj`>h=zn*7oO@Kh7@Vr04Mxk#LpFk4WH!#`A(OBpA&DS@}lry+e5( zA^fd@+>L1P`E%D}o1_mBtkC3L>Fy{l%S zDPldvSI-Jrl&qeHt1v#hHJ*?K=@5fbS7q7lvzjEd)e27EBa{R^| zzj?}6M8aY-uOMWBsVH0#Si*E6J4;BR%RW@d4jAGJo?7NLjKT>DShdJq`kaToU1$e0 zHqrqh4*)G^z5-$qzVak{53y+xMG=b#S@vEbX&0hsyKr+bW+JHXolPaB)^|H%*;OpK zP>}Ra)7>d$d8bPJGNphkGzzH~3ofj%{GrIgq39ARELd@6;slJASBg?<#OV#GX8+-5ihuc6`M>{%vR_~wm0?#73Bqz9meDqcP8K0(<$YQEzF`OP#I9asYoLj3;Em6i~PsZmf zq6-vhbw=!Q#`+j+5`?x>+M}sHsw?kMmUpWvPiV067%f7`K5TPPU)ZM3ZI#E@Ny@s7 zxd&85J&Ct7{_&Q`{|GG*=CvqN8%N`7MDaDkti2OxL_)txsvk?N z8;h+P%4t!?RS%c66I0gah~Gq&LjL7?B!A(P*`M4nAh9b>DM1b}w5w}kHs^W|c`R(WrBK&;c zbuA*W&?4+WM8kaxlCLl}2>%teV38ComajVQ&clC&Ere*gHIv4o6)9LW`4dxw)aVY6 zdd)>kHCqTY($D-7U@<*T_CR=hfJ-FEPGYv)a- z>-Kyv2nuj@pE>z1^TNB#$@k_@Av@xIThDzfEbe``31XeROX|cO=7o2dz)@}MtP`22B=-{>P+(H0CKa0T^=(iBZ-U+>FX=fhd<@d(fbxB@`|t`M}fEpZfZ6%D3%Z-6~GVjdEs7k$0_M}(lIQYsXNfcLW^vj#TM$5Q*ZDh@COoKeJNr7~%gyvZU zyr;LyFhZMkNSV{7%{{Df=vM_tRk^*Y+#WUfDzn`jzegKct&OTOg%%jSlH`7A+MSWp zTm6QM8yUdWX1^(jS4bJ%?lZZ~cVt@-Z@d3Le%jCIOo$tWdr}WTG8ss09lp#Ag}Wqa_bk8_KFVHxTx4!M%U3`w zYE!3J<$R>wK<6W#b=cbRQ3zWj;uTG3C##v+IYF|~hIp>na?Xk3f~*`xbuAo$BA^st z4-tRZHXI2?cy^|bz2e{&N>GlM(T88bSM>AHfkEfvv;KDWH$bfE4l#<&Sa9?Q>F@1X zA#2UMPZCkh2JJdGXhh=|VDg%C-y9 zD3ojnlEV6q?00+odjuMV!|-Ck@eTLMWg?Y^)xUk9(NPt-7t$ZL>O}hYy zRewa#fQiCR0fel*-NKeW(IF%@oDp=L7xp3HvL}dCFFLN{ppE`M}KS&?aE+LJ;nzLHx z@xBy>D>_1GnuCfZpVKJkRaTqEVqt#!kjbk1G`2{T(P&6%&?eT%Qmd7f2br=Krlfg3 zvsxdSI|;`-4DBiP3gr1bSO|M_JM>FYQ&9^{HTShtVDWD@a)Iisv0v zqb*o;#FXD=gyku$XgsW7IwW_7jB=!ot`Ea_{zKo?F|-A@hW5K-lT@St&h~9l{n)qb zmh#XiRe;4eVTB@e^k9R>NIM1$U1Ip|@f@F2j$is?Ob((y7*hDKqF0x`ciboIv&_Bn zf`f+iJ#wg6aW#^LlT26%$2IYHan(bBu8=}5D!yhU24=-Q)0xe)=mvty5X@E5slkvV zJ_({E;1)wND(EqpoV_6pCm09_FmJq zy=6H;+l`v@Y5(Cs_Z|9D0IHFEfy>U=4%%|~yWPTaGeyl*>p$J}?zi0MLPWC0rSN5du{f`zZoH(aw6zNoIcW-Gp8N;@gb zJgtuJoQgX*9?~EPZyFBXJrucrI{Anx?eJVgoqBtkcx~9@)j$ z&u6>~-hka4p;8pxeYPWZ%r8Uk5-$xaRtF;Bz1oC944=mENYrdP1AtVnf`D4yS~>2uiU$M*jch|8NCQBN?7eVqo(a z3>v*bW-==g;ezI%M#C74%pbqdz4sC4?K_-nH~8nS@{hjA?L5wF?dBqoVHa*9^@GKg zgSo~18F>SVncTQkenheWAq@U8T%TyJM+DbBjO!A@*&fK*3@E~qZV~`11WnQP#Ev7j zy)1`{g{Qm)AYR-kLA=CK<8Sv|p)*K>%RpH;p$$YB(SgK3Bh)6CuOe6r7VE;IG{p*5 zj!{99#sHYNV;IcvVF(GwU+`BO_NU;aBLr4kMBQitBU*yWg z3F^5W_7nQ>d#>bJfCqV{8~G3v;gf};d_s?oi#U05oi$vMj8L|o1TV0`>@+(RNAkN` zNMoSzLq8j#J7Cog#uKcSS!X1S9tC)+oq z3K)*m+)n-N^hkml*rG;v0#R&p&>?5B?LWv<1fp;xYLy$DT7;~YdW|ph8C&8tLWC^Z zWL%gibO>2Ap>&C-7;r_8_#=?j(!f7Y@D-&jJ8R)f2K2%|WHb#CmbETGib5(WBD3hf zYr1M@Tja=&m{FJ5vF*`AJ7Ul)L_pRE0j)Txchaa&$|&CH2c(0q*yvG=6JqabF4;&J zfg4aU1_{}9gq6~?s-g61adx95t5HB|7e8;00C8B@`;4pxnmAh1E&^f^$~r9E)hTK2 zmIAI?j|mQ*6ttb@A2~1Ry(}WDpRbcRtZTQ&-*{)@-p5n_^QZEk{4eDvpXwe6EaOUo zScZ8V=aJB2LGl&DU_8x!ueZ%(ZI+$2=55w_a<_jL1D@i9uY33{P~`v05Ba*kpJ)GK zapft#Vk}k$30PJmA{vZF!dJ369S{qzRBM>4H}(FhzlHrfZI3CnPMcUh8(TP@Ri{a- zP-WEWvg%AR#WS&Gvn1tUuNvS~-fO5iX{vj{Q2TV&%DsH*HJ;VY!IV--%`Aw_np zEMqSTWJumU9bZ3*&LCnu<0^4a84t^od#6mgB~2{xe7MBp0Xjq*0w0HEDO|%bJ3qM6 z>yN7g|F}KAA21f0H;O*^i?^A;YylZ0OddrCXM423Ego+@xZcTJ-*iqOGNS7+_oql~ z7z!wOgmeXDSS8kvM^_4w2!RG=R?Bp7VSjqlBoHg4kW+fZSa*!cX;DDwf-N7{BP)0y ztHQ&|jD2J2dqU^79s<#;J<3Vrbl7X zOy_MP7Qso5G*Pqy;kg200j|2=oj-DazT+L{2%3X;=MUYo+4+i1X@Coh@YNLq5DT^Y zCCi>G)~XkE)fbF)mn|qDR>e6Z_^R&mJc{tuB`XT9ctT=0UN$#gHtc@c0h2Ik33I({b^VK~fWMN``|6S9gHY)C{)al*4m;gK)jlmJ8f zzzRRI7G$QFDLH4$A9fD-lbP~Cogc2cGcd;-`l}!Y~{dqRZ=}zP{zqD7)Z_hY>SjA_D=NPd+(L#jUWNA0<2)~y@^eg6jfLxiWJpl|8{a? z$Bvz5J8>*Gi4uuAGZ^&Vff@92?(>~*FoKGmmFwSGD{EP6ucL#(U@$WPG0(jF-R};o zbPlex55#a)VdYz~!mE6ldj*y`Nmg@strw1KEj4wnnd6`peC1Ll_{tqSjo>TvE3;H3 z(y`PoAt%zEW#%gcD0+MXu>y1gpVZJ%;A%Qwseufi%U047&n<{jt9=pXrevq7NGmgInNOxe7q=|G>2$t= zroPHopck;jia;5vAf+U+F>s2n#CKQkDljR`V^d{Gur_TL}kE0~G` zu|zr6KjG9s&2nyh(n<6RC13HiW+Rq~(9|?|*6x_gEUZmKEGcJ+BYgFx0<;8TO|3Ot z9#Cr;SPx=ZhcGEzXB}E^8(z;I2Ygvj7XQ*cVjDfC5DOJctbR_dr9Y@Heb$F&>&bFw ziZE_XO(J|%P@S{^6-$@AsVb#Nm%dq-iG;OXx2l*#cNMY7b>GJhXtAKM@LBh}lF zX?LE~>^`H}d%p6(B`w>ZPhG8H_4Cs=8?WAJe&x;PJ0CQ^|AUUd{paqV{>t#1(qRS( zJMlnHDC~VUj*qK+HA%{n>9u|uUyV$TQ-9&BKOOo0B)%G(P(;Wmf0##Qm#}|qxNm%P zcv5V>SiiHxGwuOl=9znx*n+-*l={&0#+cj=zogoL&^+d+Hk?J{;B?gvps80^}<)V$NK20p>CyNMA_Lb0vWcnV{-~#(;%EzSuH*(tpRD> zynkHmS^bz*$v5Wx$|j)MF{IVjuinzLX1-I2eQ3=bm)~Qqa13KYwu|-67T%vYhA?RM zv3<0b5J=aA26SD*c#}6x`X)EgE00>+j-LwU3J(?g4LvhH4a%#GEvk>(+z`F74pSEC z4Uy~Wf^xMS*Pm4KkB-4toWoa1+uP!|G^X$E#3Yumt$xi3?vCk&YfJ1Jx< zLL;eF&?9xS2lEwt#qc2F6~lvDUmV{2LjRVlhCMe%556wMBAn!suY~97?a3o=O|a|v z@Y@P?=>FI96;p&5-345|Fudie*h{$fic#zeq!afnYXpg8A4RZIBiR+=tqPx;peM+J zX5ebu3u0Pu<1^Uo$!1qXl<4)bJvS!z-b9!iDYF~(X?{?}EcZ{JaQ+1LrVlvB{s*o=;49jNUfCd4SK!+2 z&`mw?RZMX|5its13GwPTzwLSdgUY*a>0Y|2MZ7w3vHZa4ianTh_iHz7D_^~#G&8RxIlD9= z^-)A(NoZ`De^iB6ScQ9VIirG3ft7XvXTZWi)&7mxV~D`uVB7#`UyhWx2Mof-XA2a$GM-@<+RjqL7K8EZH=aN6BCynWQp~+yaFWi(5lqV)>v+dQwxcRPoBh zRFB!yK;*MR$U?Ica+YKLOoux7ibkPH1e+#*nH!Fr$j{0nk3u{UwFfGX+)1HmsrUJd zM`6?AXDYNT+J(NgQlMVpUn^EctJ|V+ze^QM3R&=#YgCvH1rY7^JhkToh=gqx~}b5Gk!=`yxy(zjKpdkJ;|x&1^%@u`a4=QOkk4?j~SRzJU3bNSVJ;znP(+j#rkrg#3f?YsZjj)e8g z2gcHxf##kGB&-27tAUM#W7Oo7NM+(HAz}T6ul{thlDF`u^3@0-4HNh);41-IW3&{M zzES0SKkW@o`k8lpMPxxw;^y(N>@M%P+Ev?>wfouNJQ}g8J7L{m;)a2kbv+4%yWlvcBhM9fZg=n@Ik6ofVRs?>QcR=(g zZG*}kBFY?MH5MTy3%ou-(ItVIHo9#%LmW!nMx%3LS>(E=gsp91>-0X^j}o`HWbWg; zw?j}-8*0~`m`vZ*>y`Eh2`g=9Pgs66-9y40R&Hxe-qFtRBRLZA6*;K11rv4zH>gG6nMKS~CcHzHbJn)_(`AVu-$LyCbB&i_nG}W7}^j8(tn? z{rvEz8%ov{W8#U{+{?yu-&3yqNJ&0#OuI6fc0mc--S3!Fw<2D{MB$1E?b6T+$jTy8 zw>(O-BvQL9wwm+?kKBImyg|E^w#8w(+1@3yTpm2`@X0Lu2k_Ng2PCXVfH(ejN*LYM zu|eR9noFcJD^#LeY!lj6L^Uj+)fl8(;IEnMQ%UzwjUv=9ao1yIXEtq)v)G{{D9fSb zaeDz*P}VHRQe4fvpp&A5VS|@lM#OE3|d0&;pmEbAz(dulV{m3L;RXI8sc zcDv8&w!n2Ap__Z*tHfP~gyL=%g5z&bCA0lN_sYE;r2VftJ(7B;KXES~_9g8Zh%fFZ zXf$j?SH#*r|BP<0#2&YpF8kmP8~-+2zg8>HR%`cmJC7b)mwr2!ZY!r6kD!L7F8`6R z=2y)|v7W5ATN&(`97n|(CZ!7AGWL%epeYky^(lg{_|#Y*d^J5F>ksjj3Rm!zKrFtb z&%iAyot?@Lf26;6zwGrl^)I}vz4)Bw`1wl2tDT3mg}b%+MdjHAr75c(C1jMuB$b56 zmjsc}9wFe$Ij~&7m07tWKbCnwXORc{q@CzhQOGQ^ra-Hb79o6PZV}3fFsWZn=PM*E zGhbnVmwd&N6vlUval34oOOpa*(7WGcTF>3S5!(Rlbr}<|wW_#aHsyzQk8%(2|*B^SI{(KT){weE2>o{81M<(^hci_kA!FA*yu2aCHN zzM@4KSDUI?^*NBrIXX znw%U}Cdb)@BmesXUkSwei+=T|Q`Y})z8V`H7e9mHNd}SF@;p8=HZeNR4~)O1)m_T| zOP_hi{K7jy?~~dTl+_)X)fczQ;1aHh%IS&EGlpb#5Hz}S(@^}z{-mvaEc?hiJ+%4h z@y*YS6h1vz^sI5ymA&Dmcd8%0$~oLmnkYe1BD8Z;b97-_^u{iq^tzB$ zT{H*hI(=*%Smheq`i&(&`r;@2B(!pc078$FX-*eB%>Tj!qKVi{f$xxpBi*Y1&_^~%urW@@6=H;3j` z$82oMKQ^-Ml9IW%7ylJlm9u|!fl{cC9(vx9;NL#w&zPji497SD4%B#Nltpg0=rPN$Dep zUKdCsVmu{ZF)(OCtjWW)2r;*_g7CF5{Ov?wQM+%5U4+FijqbQUvi+JY_79F>qvA;8 z4;5?kwefA&m8~x-FxAFqM+9!YFfPeTz|}QjXy3!W=9edSzcRV^rWhMUxnjMb5U*Yv zKl(atL6j>oKPb#s_r@qRD~@0laCPjpLC(P{+*W8-QnpgtfD%)oenuQRx5QD@l_Tvx)ssdFvBo~5#eGeW@46w6oL=kL`nkSkPkMaD zO6VsXA4tApWi!rp8GAK{y>HItVh0?sgcaFKy9nP@NWEoblT}pnve3HuewFil1zgSZ zQb!9tmqZQhv$yGU3MII;OM_MM$~(N$C8)x|zrxnHQp#40AuMt&U*K3V-@bCr z6kM74O2U;?w7!A3L5aAzp!4 zU*Ic|S<3C3;7Z*vDB+6N>**qunXjOf$85FJ%N%2F_9jhloIxPMtYu#=>kE7}tpg}q zesYtbU@Fw12X8MV|4si8VzA~w5NLXsgJ1=nv5RzKcbiR@*7P1uu#c>CC__Dsl zS6{jVd^p=lPpQr*AzrcIMh*;q^$17F5}5RLMLa1CCNT`9YkECyU#o_filc~_u-D6# zJ!0jwI_iii3+OV*S+t&2IZL{;%#JMK#qt&smMFh!fmkYEsf%?)MixRAT|)Q@ue+Qm z6w&SpHDa6)O+e+VfOH+Z2=OOJuBxU*7@uDgzgAsGxIrhfMpX%m7GdTNP0kMOs-2oO zyR^dSt|Hdjy;U3bYYPunB4O<~p&U`7^}PPv)#@w4g;jt3jk-79Z+h$d z&ENfT$4`IR^Q%w0DryEr7M5AUnw$W!#wNa0ys-a;ul{tl61VWD@D<7xy+Gg!@rqr~ z%E+VwUkxfh`%GC`_#fd}73frqQP74CPw!1B81rA*%zDA(jT6zUjie~RSBXXasm1+S zhf%ABwmvt3V6ydvp`zyu8?W>fobSj#*PVB!n=T9hch;@kRlniF5SrDhQ{7bV>7KlE{ls3ej(NkC0sKy?zr7d#)m9?hUmBu6 zNFBI63Sz-mQo@3=BwsN+NSXKJfLJU6Ja9*me6{oCF@abw${O48A|rv~Tj0eaeH8Ed zrsqe{vT$1o!<7hOnAlDT!%NEUSCl<3D|=pMOL+R7$+MWs-lEZ| z?7TC%=bln@YjW#r%DU?#t1kBv^Ugk<f1wL9)w1hli zEisQT#H%SQHQ|K3URwuLJB2s#+U*hFsm0c<%g(*aGsxiV+rHHHpDtf( z?Ce(>nv{_dg&5F*!LGrH-rnIZtX8Hc7%99s#M^5RB59rqM_I!OAa3#`2(@P?gOG4sH+1*Uk z6;TY10hKmB6&7CQ^aRB)0(lw^gzXEsnr&keuOwfY;R?P&%i<_ogp6~VE$(9C9%s8c z*#}n1pA;wJhvjJs8a4A3&B5t>1yfDuEAgRpXQ}WET%mmlxZ-!-|xEfD7na`b z7FFw?)E1o46_(STxM?7zcxct((VS!bUB&6MLy8m)JJBE9+QY&@-*g6Jt89byOFYU6z?qLl!Sc~ei;ot%{?<15lfPc} zefL-&yStstG%)sd@>tCS9f}+G_s2)D81D+M&lG!5uDQ*y?$s(&?EfPPHFWH zY1N)QrV-ic)))KLyI(b)H%;q`<1~FVAVbK=MXD;$of5xHK{By?q^9E5YbQaZHaG`JQ z#eU$Y@cDs_&-TmwEBb@QFNqf6{#&%qm>(2;g$qm6eGp5))z|VBjXZ%^uZ+W2_@<^1 zOPp_cUO3)G`smehcIYr&2vgBFcz?3FHP=!MLF=QBBHKt2-!*&%GJJC#}3{Y zJ9JZsS7yFqJW$r@yAx;b3pwlP?E%Cq+JfNL@z)2@ucWj6{M&f0Mo_Leo_lMQ{@{r> z2T!~;eEKb6bO*6cfmrV-2cWFCm7VvL4X+Mwc}>}UPuX!>S^xaN=9ecjo)&x+bKDTJ zw};SC=QRy>*)_JAwKf?wc3E|fs~TMk+I@=*{@aFpiUvIM`>bMXmj-CCwM)KwlGz8x z&j@EAU17FUnS?9Et0&p9OZ=k>SBrxhSA@2JSg2U@eRU8Nkq#6()gsaQ(Fp&QC+F;p z*3a@Pf0Dh@0aXhE^$Yz)EuI=L;w>$i0KA}+oFnrI?Lr%!B3tPaF7hsSj%y*Y!8)QU zAjjaA)aH`d;+EXXB1ILknmluw{noXIZSFGh)z1E?t?j5-;aeJ60G)Mwh+bjZ!M@b} zeaUda6)KJLwy4yU4*P7UJzk8H zf)YynV@kXu$~;0Uh+?qusbo}8xY?1doXY3emqAu@Y&ElOw6mzGwxBq%(LRX;VXK=h zHV6y!GV>K?IV|q#ti5RgiV&JqN-*%|{rd|2B zZAF=HoLi9JaR#`^%{u93I_o40nGuT<$V$SMS;FFZFqc`#g0;*&N=~@4_$qaajrmC2 zIx3bFvc$4xHR?(tmg)S9h^6us&{a=8;ZP@ON&uBZ-M4M4McLF)Gt6aM{kUxvHPc2X z%9e>vQ%&iIrm;=a6NNR)MK2#9>va~oi-|%}-olY3##sDot(Zqa6L*Jc%KWtUJO z)^3bgRY+Jx2di*lZ8@slMxPzTIH9zFU|r5HKkh2k4>vPNII0+!B^(n$4ItK+C`;ulG0ryCJN*3re@Kw|(&hiR_~d{4 z?R@EJF@N~^6FTiobY#cJWj+J_K@ra|I>!7U)>wqB@j+#BaH6AIDLk&XkN*UHDRYMs zmT3%0?T^YbtSlH!UavTY={@6`qE;JO^qjbPFkwqCi}{GzTz$5C?d6`Ls|G@!Hec;4 ze5R-HnLf0vO;-lio;NT^2w!FF?Hz6w}f?Uk-|OVZ@-S35)g|o^V~j8r z6NSE673gD;>l^%YG;mkk=9W~Zy>@nxl1g)MWpN9`kJzq2th|#(y!zP52=zj_T79NB z@4N}C)?G3nUeO!`u5gl*u0ZX(&9c9d9oGkF5gxcTF7&H6gau0?)}c2|h&8=GxcBy` zx@~#V%vVCfy8d~cd`lsKofU^T-chUMP$6(7d|6k=w>&qp<@uo@S5G>XQ7Biy)hss_5o%|9RRLGaLR;XgC4mj|eX8gc zGEO+#y$soEI$zBLvAlKA)SN)=+)(}e@R|jowete?bA5I5P_fifTZiJ+9MBTUIevQU zpgJN7X%{lZ&y-)-no(vR7yFmlMAm>{5=m^==6}!Ek#L?L8 zz2v%MWkj2(;6QG4%hC+~hMN6oCc_y6UW!sXu7*9ToO1Z)`WG76Ks%2$J? z79sX2!B^uwC{j|hl3hac@oV@>#u5`dZywuy$sUY znKa)~v3g@g`kJzY%tuklk3tek{9;SJB1+st%3J~~?R`jPuf)o}*i|#%xdONnfnzo( zSK3+DI#DWO$pJTUK80AH^Od7cTCgNvnNk{9V5Fiia3vHiRlFj4l%Emel?bQxt(1sm z0ciPFauk$h^0%vqrTX51EArlD!|_XeC8F9@TRTuBbSL$NTk_SnY|1GROW2&I;EEFg zS9W^ISKqd(c+485OMD?;HPt25A@2wdp@Bu~4I4SAgQBGt=K@rmO$A@kK;#m0NyL(^ zz$vm4-g2pxaa8$A`?Y)}Z-iS=JzAQ_9ksyKAK@!;PgG>$-ZDW+sjX5qh<<98(0s?_ zH3hl|pc0D$W&M|YB~`313Rz#~D@<4l zM~};0AQo9z$22=mR_;23%f0I0rK*$9)I9ZK-KCdmp1oQ3(%t%-Z#Ufiu<5)1(Dsi% z>-x8kIzKKOtZgR&T-c zyc1eNSM^1&HO3baW;K*`bRh5ipcrj?*0Axp-c8SS7e3#^=-l=fhk3c*c-feHvJ2TK z@8p2wD>f#Fu5HNNJL;KIow`klS~EPy;QCA5Lq1s^{Haw~x#)>U(=q?e;TM(LKT@1merzA}@qFK3E(?2TpH%OV)MTI1 z?3&%?y{6qdu{5->6<~1B_-x113L}B`@gG4}XjYJwN9v=L;-1u(Dz2d{uONaP#fq9j{MNJ6~5eT_4VT zdLZ@Uz{)em*kk=jSialaJU6#{ZRzmZ(&@L8O9lh?3UzXczd`K~|QL9UK=2)X(>+p68*XkRxdMiQERa zazJaYCyhc8;J}id1;I7*W9sL|)Nx!K+q5LQd0`l9qXlKn@#Oo82rPOBOMLYrGsl;R zLcMKpBa}s8jzd%vE-aV$*7!n2EVYSi@ks7+N$PM&Xhg#D>+g=nC1`8@iE=IX$u4 z`eL@xE9}C}PBAu^u&Z6@SEyJA`twdttUfV8q;1NsJ{FymGRMNHko11P!~xGJgJW=y zZ9uzAL^qCihmb~_fI5$eZd>mb>{y-wy_QzLUbXgXG0kJd? z5lUFx`A0rHOn#MdykGd+)s;Gs6#|vHkge!0iiY6eP_F?<8yy^(>@|$_4~T7p4<7Zr z_ubFldbb>B`_nI!pSfId@+m!IgGIY5^0$;_uP;x|Esalq6qQmM7+>NYRR&x+1(ezO zR9Javmbq3eaw_MaHmRW$hIYYG*19LGszlkS)^UzD+Q+Rm=E8|hOH1~ocyTvPezEt2 zLU^1eUU4oM*hg;)=$2~-19aAbI#h8>}=9;|cH@JrIJdFH)9BEkK)^K~9$wh6+sLrv(;QhySaVq7Ot~}wT2sxzI@MlPOB2wnUx^cytmJWqb*-svYGzvN zO{Gd$Gpx0;q^>22Yf8%!=@S&71?5766dMc|xK+zup;WQNKX!I7F*58c8-;qCaFL5; z8KS3%zq607v&V>%AXF^k8vN7hSeHZGXjpcQ*qF0g&$^u00=-nRK&+G^oy@|*=$^Gz zlf7LdR4kRU@^|Uh?$)l~t1CR9-F!r&N?4V9RSE0Z<*KvK*Is_9{@EM#S8vz7aJ#<()A~ief~*+iB=?yS?BP-9YGQa;889l>ZfIN+|HFF~t6@4K*K~(v_68+& z#N-;{^9I8+ja~`u-YeVStEjbov6!#+4rU)8VB6h>X9w_gqhE>U;L}~}FL$rI*j?~c z*Tzc&h*ud0+md&;uw^E7R}U}~TF{ubYb0l{*mvU;QE%Z>^^Ms-+j2w+PM-Lig}+^3 zSF*yhEGVTVAf;}m)j#61vX3Hp@;CfeJ4V%6`05t9ee$@&k6l*&j(NbSO|?!dAGoDe z#%&$3Ppoyz=yXhLwOd)o3!GbaP0E4cRVODrSCwqJq6FlXc%**{-9>F^kcH0S>-D(t*Pa~*u1FJIeNsfVvk-?ghP<uwD~V;9Y^AhS8q%v8_Bn=j z*#@?;F~=>o6GB+-Q{fccWbM^p=}LdFW4UETVE8YqdX*l9b%cYo_zivV(>QPiFVW#6 z0&9F|7+s5aQA!9_U&mK$#p3!RlwoMN&q$i+C?TaiePf0prLj%<`F}RO{ox~GuU>t< z>e6%Nr!VV{oUST9Sh;R{SGklV&*H9D|2U1Rl~$y6#-YSS}E5EzM4|5q$Kh!>q-j5GVzs-=9>_g zohn;R@fG?Nd?jGasth*c1zk!ga1_M2z_nKTu{mNE!iJ3*n@tT5x#}ffO_7zbqa!-; z8n(-GHqctQPvk)gyOq6*P2Uq38#CpWM`1(Acbs&cqPls zSE4_t^3`KjHPj3%{S2!rSpu|Vqj1_$B9@d8C1^<@OIq=H2&h;fmQ=B1uTUnvOT=0c zSZx_xjTcK~)duOU!)gR>MN|`o&BFKTh!vVC5KGnEB zDvzXUrlmaN+X#xBZS&|Gvp@Q)IS=OAR4#FSxWMUm9$~HgiT>*)fA11k;+MfMc8R6G z#yPyzEv(Ziv@UE-!_we?vIzTGXhF4Kj@~szLwt}$WT{<3wdblf|NPFdq8{(Ode^K< zJV-QRtmA&k_CVTyFjeMWBeIo$w$3j4cR{)I7Dteyhzq*R^Y7{hrulC*; zr$2a*E}`Trx`cO?LwALKbrh=-lY_61!dI{sh=s)+zCyf$uY~+1jaaXas;x$oezle6 zpc>jB`3n8Yq+(qkm--d`LHLT=_0l*lEa2+UEm2sokg)dM7(IMz0>t8Y@aD*&TcfgF zDB|4jOfp)?h5KF|*!zlcKZtc}_|R*^M{W-veSL`NQ3Bq@8l87(2u_@QbND>X!4D=c zaC~o^3nzB#h<-q^0Y<2&z+?-MSp3GD7n6lPpBCY|k%KiMC7 zq&MtP9~FGCKl1o+^og<1LqmQ$dfhg3*yS{=SXnLftAH|+Mdx{!VNjp#UWx_9s;VW_o`D1GpT5TKI zu*kQx`Y_9Vs?l)cUI>R zibbQ4*^pJo1}P-0w4KJ4oBN~I^@Qej2d4KB)Zi4>NeUKxWgpgrAOkj7`&HTbH@F1$ zu5f6!a%i2m{D(jM^<<}_boY*q4k`mZqr-#n)qtR+p@9)1+=oTqkhf7l6mUhUI+se7N6;Ilf&$1E` zOd=@2vSOx1<;)ejnHFePUy!Y)Qr$R?VSa!<6NO47IZ1$JSf zfO>Q$OQdqHdYrAU=^^4?JUNb{e>cxbq*Osxl+={?)|7NiaUv1ObOjf|SCH8BUfx_+ zld1!C$Tp?auOw~qjks^(8_8vVu0s5gg_lW|=|lJGIquag2L!a{xYY{h)mLh|3k%BP zLzyXx*Bv2CmQ2KgugvCE8$8UEvqO=S!OMZrlEhH|u!UZJZp?8|YW?1-jR(Y1!mY=(+tr{31_=*c)SY;` z`ux@UD=#-Zf201zJM}m3H;F;Q@3wyL#~nZU_ugNAYACN8?@+TwK`i*{Yb30RKPX^* z6%zjVC4U}N`CoafUwxwfpnf$mHp-41Wt6S~*BKZa?H*Qs@(U#{=NH*~`ojvk0<+qq z^9(VodqdLNBC>j7*9=9i8V*S94an$?C@@BEG$igYq#qv2K4oM<-uh<^=vQzBM>00^ z&UWP-Z(DPkhz0}VRno2|^4nRFlfJt@aFrg{mV2@`Jg*xk6$T07jJ~<(ce5-?XDlh1 zXIHtv`4gMKGKT=2jbCZpx{i4s|IGR)hB)}knO3Mg>G%t$=trsBdOVXpb5Aa1V~#`I zBg=>fE-Q7;NqV1E3~=e4(<+xm{njm8W1I9KdG8<Y_P_;VQnUgYAb7 zu^)%#5jWaQuMoZ>I|2hde1&+Gx{q+k4)`klKo9#9S?CO6F($eG;_&*X1_fMQ8W8=# z=LYEyA}FCO@$kuAN52BGfGY;~5U&{AV`>k?GV>KiEWuaz6ro?;odB_5Ey-8=Unieo z6#WXsl5jX2#NxQ` z6--%UdtM$nEQBnxH&{YS;Sj zt#Jwo3&fIq1!7(LP`N;h@B`(@+v7(*P_zD$^XsmVDM{}PZ1hG^V zt3Uc!f8;SkN472wuE*BC&|7T|x@o{Frq4vVX=b~XKj~3E+ouxG z)tqqsqPV)nsZDcIYL?}-*lp~!Ti0uq-L@>LVNq1oyuk8B!Ievcb<0?TsK$Wv9+p3< zOl{kTHB$0J#4CYVVYSxbb+!>r_E9aiQLT2-tq$=mwz2j0E1SGlb@3)8cx_kMM#xG? zSP?}Xk()bc6k=;tayPK{H+ibg^^ELWlANb``u`nr$WEO7hiA zR2|7z7TOsrXbApczQT)TQgsAh0UIJ5(o1j7Pmp{iEm)Y_RlWkTgl5GXF{=Qjq*S%Ut6>4#^w`Iwx|=wPxGj8NPC~Ha-2ts_g56}cn>zF=##*_n z6vPE+nSQqg^N3wS+ya&@I)Hp@V2Kh;rM4PnI}euN5=1Jn>c8YG30EA&9h%J2EL*=ndebA*Ig8#B};y2Qm$0TyW}fk+bPBd&4?xSD{SrGve8TZ zO7fM2t8ZIYQ&Vt-ie(bABxQX`(fR^k*;ha5P$Nxia^R6CiXV!IZcuxLCd5+b3NZ%I zk4DEP>77EyX1WkL%O<48HcY%Esfbk#Vg-O$nKhwV)g-#hz0Y{vS^A8GB@#!~jX7yW zS`aH+hPv;lT3xKm+gY`4k8b@w&BlY3n~zp*JyEgqbR}7%2QO%4i|`UbSg+R6B7F5; z!>zZQ-gvL&{r|V)@BgLiUq9-8P&wF$%bh90NwJaeFMRbsgKPgSzT%xN64uzr2wrzq z8np~6=U(avO8 z&DkrVV}`|V=h~KoSTn6Ze$wH=0;dNnd_P&@_fP+i)!oD`4el#TXb9Q|)jCJiScLv+ z+cm@XYrTP)B_WxOuCZFHurkZAM{Y?qtb2yC!Zvg|rj%O7{r1#bO2Y0Q*UU28gohrP z)!x~So>{fY#ca{7k11+T*<~OgV%6bs*7SI1Y2YqK8e=xMB=71;+SM7er75AfHT^*E z%3bX%_jK|Kh>*ojXUSLSSL@CXz*mJ=hA>~DCZQS?t0@YQ73M3B6k+KU;?=>|6!=Q$ zSFcTy?= zgy&q;t&x3*X*Y*X+*eM#Imy!Jqi>8HyF1E)!qaanXE0^GBie%S)rEKI8;)Q6U=lft zj^WX_M~{D~?0~Qp{p~$0y!Tb9BwsyO&ZM8|SB@1hDxE9n|5LiB!<%52e^z|@D&GysH_N|)b!-;-T zXro0;hh=Q%vdGp&Aq{hVt5B#!1JSiizVYX!hgTx85XY^2mS@G2{@U4LRdZu%7AG|> z&1zY)rqQOb-DB5az}``hqCV@)x}|YM$7`%2HN<^5M2n#zdI6MOgx)GxEB>yA)B{=| z7JNmsz%rDnqISCo;@8@2Birnv+w5Zq9jzy}%O$NXD4&fvok&>l6%tl>Q3n!M{LUWu z3QaX_e^2J2{e zT9de zw&AQPDoM>(#jCW^(8SV!s7K5U(iyb!DqH3PTs@j^TRz{Wa*mZ|mQ|&!>H5_ZE41iW zGp#f;tw_I=&MPTr;l(txhd9XTvl+Xz5pF<*zsM6^I32 z*#=aDSQMY7q<)3c1z*t?RN+c&sRJ8;2NM*TGRcKsT22L;_zJ|b5*94_gD6*&RIZ3b zki{Be6|uym-VD1kDfn;%-j@2*QS_mfc-8Y#&i6tm;c`-KrO?V-0E~iSd6Qe&9M@8g zB1^=*obA<%Ju4~Fz2w$K)8a)pEqTkLTy=6gHdB_|GDO5 zzFQr@X|8D>*3WYlHP@w%nlq(hnOMuIme?zj8R)=DFP4d~tgF9mr8n`FRgE~|L$$tT zStaW+>*~j#G8_F2+iF=d)d6!C{5IRMcD7TES|U)wB;1K@(JEqLdYAhOy~Q#eA(ndS z7yH&yym8|ai=cYKviJ+YTp>&>-8Yk zop)Q_{eI`){-o>2zv%q+qketsq{tfm3t#=upq2Vv{u_KXK2Bc%zG7(xgM`S8rA+yJqBDqi6}EaHZqrD@_K}o*L)j;WSDiJA zjl<6xHxf-iJv~SsRKZyTvF!vJaLhQ+NiqY@D`cXm^)11+GM_$xFuL zmY=X>ZKmZz=CkIzJ)CL%k$+nAKb9#!uU1apQWkps+#y&Okkm8F?mt`-Klyo+lCbOJ zMP7d!mD}VJhwZR}{Ww19t@bgMHW3e<6Usf(Dm>E5_gq()$qL)l>c6JRGpoTbx7{~y_6)>r=?q(ko83U(DHXYiCT33>sYJ!?$$L6e)C5RcIlKpx^PP{j9^1I5R_a^tfGrs5k#O^mn56ft*+v8ha z9ock!WZl)#+-FDAK&%S`BsL_TAC5XTK%+2Vcen4BE_ao$md95ujjSTPePM`(B8Gil zkWNhb1y;}D_avxpaaa>%Ws}fj71zc2yufNfP#)#L)ohni+Jd~zm+$cJVR2?@(*RcFAOj?ncz z5rw^xn|mXQx-hyEhs9Dtmgew#CDEPkGX}pbmWl5J>3gtdMQv^gUDx7AU};<@E1s=< z+MPoU4neK+97_EYj27;-c3y1`etk)$AY1Sxd)PYzF@U77f5#DIRK z0@WCC4V2)kL5`wDCf#BPhGA$ezv-JpN4HXWL_n&E{V%5i%KpJiYfIDd*mKmYU5q9!mVtPV+kWF^Q_B( ztJ#*CCoI0eSC0c!5Em9K%Bm8+nnEnWSHRU5_)3+qOr0tC!A;FiP?bse3f7XL4N|`n ze5JarASS`ZiaA3al~s(Mr{cGt-QH7x_17kSr9xPps#L@XFu>`}`l z5|bng!B^B1ZYC}3t9%7qnN=xyYvP$#w=quVD>N%vTp7Mv=v7CV`D(tG3RmuetmeAZ zOfL{itp>?gX2haVD5WjYF?6nz3!p^=)>M}e#1iZTVoCiZDPtZiqG`mh2VsvlBlxuj+-~sN$mdIWG6F$DM^Wi}VKGcDY1VyT@01 zuN14F1JdKt~HXEn02ZTzFPO(z@`@labc0PNfs7Z#o9qO z<`8Q@tSHUFj6H_LEnP7i+k#frV!m1)_%Y%t8v~cJ63(go8*@LJvHa2FmL=<5QA70K9^Z73Sxy1d~E>WMvtS^1(?-V*I%R@`ZZ+A;< z^hj(Zl8Y73&M_4cc^z)amG0>pubk?rqF!FzJTmK@QnW59WuO;=6?_F*aYVmD(E_ew zidsX~)__>?+gstQ>?1~)it+jQ;`Wuy(F@$_q(z9!o#i;_SMU{K^+Y0Ue_;fr2>oh0 zUqM+!rlVhpNY7i7hq071LU>2;m56J=?#@O+3|9zNZ-eRq?ek?whGpzLJQA>0O3m ziT0(6SVGIX!S)<+pJ6cgiXwTQWjfrMov#e+zB#!6*6_jCM_A>2_>Hk6_ePJuIV$<; zGP8v5E0^D!xb*%ce1+Hj^am4X{*Q9x-N^&*jPHAMV$a<%_-g;%33gcTygk1C=H!-F zCf8k?$bM#o#L?u-!y**xlp*{`pYXbG?sUy>w$H4$POe)KTfH>8dP!9EqKK+REJzBk zWyB8)`ohrqB~dMw30=0yefG(HmT?_ig0A(XSEXnIVr6$_V@~CKH_bv9-qZ^()@+o4 z0R4if`Xx!t%d$Ew^ZV?J`doMScpvHsIc5wz(C50L!6LQPa^)lEG_6loT|j0-U`8XG z#Qjqne3R=u;;TR`=cqcnusXI3*sJ1|V@wl#MRU+9qyfIN3T?GOT#67zEX&9SBrMmo zChy!Gt&zqd8IxH)uF zlYefTXHu_gl+n(w&oy+IxKUfb7T=fwOV@e}hepSMUIYjCn070he|_#^-(bJe*V{ij zNQ`^`;BddtuSQU$7zG>~G7@7Y_CJ3OUyT?C*rziz(9f4)ZXX=!#}v=bo&Irso$()j zTJz!eYTtaj`jy*NPd~3Cmf^rr-IiUNyiMg9`68`7CapXovD`nR)IIPsC!a?aZV#8R zx5)OF^bBWp-Xk{}qTOC~L}dB@oL&`%Qa|iLabQ=m7>N5KF?zGT-_Y{*AmMi-mVSs3KyW zr8z2OzGwLYuL@!93#!4wx5QsVKYDqPZbgXJGOWrrs+P_5G@)3%YEPCp!YM{;{P|>d zd24temcIHG-n!*9M%7w@1I@d}GN1{4Y!M2Kd(A>O{9Pu#;zP(ml-j~W!lG#5K~~a^ z&QZ1$x#tqD04K-_Lgy3&oG(^7)>k=DO=RN&=bdsh{If#nH@HLc*F-u_R?l zzWNeeAz?`plO01=>ZogRYD9RuYIHZv52}czHUv#7mSnAOS?EQn5|$*cuca(i!V+5! zr*{b@U(rQ1Z-tgrDY+3*PCjqYBlx<=re4Pt2`6*-bvLYjs~iG z2}81ULc+?gid|b3yH1m^L6f+#5($f0LL@ASSd0^j7U3=pL8I#qXwa`VpQzYzM)U{G zd?kY1U#cagffk|28hxwnjrUsL{$9)Xf7<%bAGQCkyt}qzM5HvZE9a{g_rLJf{|H|} zT+=OB)AJbwUyWjHpBNigItG=e@6`pYd6-tLB>c#GL^F2Osj?*sl`DCuc zM@!v5wGY;?XV5OPly0DFbgfTxFFlCu7nRd@6_=!v<)IJlqqY879moro{v`paElv@Y zXpST#OVd7)}2*$T~pwzm~Aa_+gc=Ir62A?XUIKiK*HK^d05?~JW8tPu9snwNt$;I z`axC%Q*g5XwzBsYyJ=8<2xm|ZzaeTmU(pi;rC_SPw;32DlY#Jd!B&HQ{xqb;v9sT}#Lc(87)-)XTW9ybg*HDY1>!<}0 zwF{$~mM3)BrS>^y7#&jjSH!f<4`%L_I8Cq6ga1xZJ_es=y zCE&l(yT&x|cXdTbC46NS(r6LfVinqEAJuIW-o7HZ0rAQ%rrA2O!7{SiF{zHM(ZIaU z!2FKjb(pUNU&R;q(F9b(MLTi#Aztwtm2sdW^+3DWo3pPoaesT> zI8GVbWpmIhWX)vVjI&-1p;G&d&L+bcEkCeIL{}x6U&ceA8tdbHI%)l>!$MZaFezc}0Gd5$%U13z_3)h_Y--7>#V!8ttemcfsf1peM3 zrYs<<1($DNTAyRgBmZ1|Oi{0UdM%-&Zpm!GX~i(_7+G!~Uh27$SH?!`&}uu?T+5KQqZO z_vmQBDJApJ5Goc89g??mPmbiA7+iB^c*7Oq(be{{hOm|ZEtRh%S;17M z(9zpsv=E@e=tbfdvx->jIc|N?Q2fdeVO-o9?h@>W;3i81dA%`w<~?dZaaN4i@2;T{+s{VINU2a5{>^ILqg z+ez874QasQ?hx3%+_l=mt;W%}(cYupEwIDFz1cmq&(XCuB>Gpnex*y9G>(ptf;A$v zE8-aj+45{;Ko|$W5STUfpGd|M#YiE1C3tMOU(|?5zA`YBG_2G%PyF;}b$|PV#B#D1BC{T7~UOl$5i8D;96zJ-~Lwm1Psov{H}wPXoxGDQHP8?2E~53QVmESg8$6f%dibArEbWN}So@5>d~CU5xpS+zBrh zEQ*CMpdHc~dJ9m5xWxTIW#ULK97Mz=(qtuAOLeE4_{yE&xGE`3E%4&{C{$H^iRi|v zvz9#V6d%foPVp_&9;no>zI5cfqxnE4ln{b|D>O8khHm0W!7ajv@1zH})R+hn=`cMh ziysONS)N!GjjHj$b0R70TUI)=`3krKtG>CSikfNNFw>^abmW9cQuQe32hCW8Vn&Nl zAeQ<&#w;fTo(y#g#BxFL6HQepOX_$c!@;B0BrWENY7C=K;tF~!Z@6@pLxj>b$=n~4H zhP~(15yEHmXJ61?e6i}vE7kB7DGe{*uY2X~+MDmxvh*1j*1!Iz;nT{|#vY|_Off3T zkTNkeIWmT!nncSnwmvHpoG8lpV%|!zl=I~X~)5gu02R1&{UjSbn?aV&VmbSY& z`#{I)!@b!DdvgxY`(g7{fw{ukMJm=C`wht|Q>%O`C!93gAC7yNGktK6{ z{>>pq=f~b>*N@4(TH^odq3cTQ=03-Sa)L+rO?FMv+r^dyd%0T9*H+ zYUC^`mQbu6-Lcy`B8ytm_8a52cYs^OYf~g|%PqYbM+b9Hk7XVoAeAET!f^hj;f>FT z^~yx4vwWFwb*cY|$wAERwm+azTHOoU9Wq<((%LMOo0;ren%uZNwb>%G%`&UqG0)(X zZ**L3u*>XSkAA9c^ z)#jOXe`n@-Qa!O_$8j%Udhfl1=)L!9Y-}*txcA^IH@4u{F1LoPtYY#)dv2!2s5Dpb<4x1E0nh1u9~+W|t0+l9de~Z6^9&&k7@0RopejMKaw> zihy@?-)8uBO!wwt$P^+K#wDyHcyj}rk&tW^)Px^^7&@~F{>eG|7RJpIDkYgq`VydaYuYrTjo-6a#d$kUQ1LiHzuE#T;82f(!nVd#20tRmr291 zRVum#qP-iZ(>j=I9n?C84Fnvrecc_rlr^}8$~!E7X3|eG0Ioy&0(6-ktT7m zsafNavPRPz0IqgWp9o^ndUzh@I@{0_B$UNEvL2n=iF#~a*ZpEWT4d_bYE-ZSP>p3_ zwc`R|?+@@5K`h61QY0X2l&=U`(PFhbJbNT|m<@24jifdB1j0K=C>FHGK4zjS3=)Vimg@}fA zd-#y(A34)`^!d(Xmjp+z2u@w@IRCQfmA@)J_+0bdErUSJD2;@#bnKdUGc!as zK7+3yVX=ILFHsb+{>xYYci{SCz8a5M_&0$03Yd!^!K;5C@<{(XmdHfssZ;Z`iP;M9 zRdTtesE*01GDl`A3zsoDwM<-@0T`1|YfM}|7~iN#-2k{6M1K(5_aI)C@77fA8USDA zZBS;cloqVjCM*`_uI^vB)4XUev-}v7xTrs(NY2KFNU_!#P6_z09-sT&qqDxB>cpMq zat|NwX8HesG|}nqx7?!@^L;uT!+K!T@=6ifMBVaA=iA4@L&cA)(YU1%pGdq3rDm|pHI67Roe<*E> zp0ipLvP|h)E%zvwz~%0gBezeNTc`HUOBK)0=(WvN*cYhmiqx)U2FF63O_pk2l6*## z=;@%2r+oR)ub%ecVfPKz;!MQb3T6zk0VlX?N~Cyhs=_8uV_T?lEa~@Nq6=DX2x!px zEmd1*@a86RXGb;7jo{iw3$XdZBca_hf$N=gCph~q_$sb~pHwBvtm(%G4bN!2iQkzY zaN9peh>rkhOTh~?KSmH#VVDupJ}pQvH=@HPPH4v=d}SZmgEo+R3YO;dxTlMKa-`s^ z@Jcm4m>>uX43B&Uq_{({Vsq70*k}n~U;0{A&L&;jI!)>tEv8?C%isxA_+}}*l9WD4 z1J>SM5$TMz8_};kf@IdNUG`pz*><9NcAfK`KUsD7CxMQUX^do}=y<*|>OomV)IveQ zLTo!^D@a(suD#pcIY`{wyXP|cJLKzTqlCn_ySy^KSjBV zB9>*Y4RA%&DqD1*NQMKHmS;R9v;1VpNdT5ug$S`d(#EE+)bi#v*+-AuBn102S>vSLD zoQI1ZD+w%og^OukWa7_)I#ja)4gr%adL9B6nMIBk>sYdITH2BDK4!A|*?@=rMXt5)N zqv;JSWl`dyw@@z@{cl+$9^Dj4MntW#Lx(hliGayUSX9LVWsNzqU_}F5vEmhLNP}L7 zX8;lwp4AyXZ8QCNbC69JDzJ|hILGqwGX#x764#K)E+x!u1r>9OxIiojS@ET931xg# zQh6&$bgyZ_M#6$+ZG^8H`IN8fw(uLb^C@5LIMTNJMB8Cvaql>CUU2v#|M-RW=dN~L zdqeilha_wCHs2tF=FBi;yB&j0)bJ=@p)obWP`I-26{W2I>R10q%6b5?#_Lz~dQ`yvS-l{%*_~HL6lKYIC+DSJPOsd!T5CvS?d>$yQa-W@YwTdD;p|_DV%!jVNc8vSOPlZ>_dy zgDEhl-7UVwJE_|l6(|hbWciq!k920KZ#Qj7PgqHTZrnI>v3Q(SD-5yDuU?-OtL^R)kUu%Zvcd3gQ zNDcFYZv(wx_p*=Ty2f>q22h&VJ5_{MV`#o2vREBas`kzhrPK{2E-^wP3n`X?9%B~+ zv}C}om>Lz5w>h<{wADlDYs|?j^%SwPHW`6f*;@w-cZ07?rF%`4hlWb`lUQ~13YVOO zDT`1RBrNpTkZJ|QBEbt|1?38GMfEERU6ijVVy!~ckoC8tKM3G~vmJr(WrqeJVSz@V zU!gxph6fKrp#ntRuixVRF`oizcjAYtKzenlFE zr%jc|OiN`PhEui-MmMShm&(1Xq@Lwc*8;gy9-L8qHo0=Ue1$`i%CS`IT&{O1 z)!7$l=BLYN#7PjkJ|(!59V+BM||Mtlt-P1YJc_~V#LZe5;5U9+l zSnXP_aVzh)&hDC#&^$Hj=b4dr=0);sqC1>8-7c}nWNvYbzZrnc%)*wq@{Yvvp5*er zgp&T?6tQhUGs&wA;M>J;XNCOygwypu*?eY|(p0dA@hwr=BnlVAVuqsEmLr6O6t2eMvi8%{QTVNw05*R37rL0t8H#*jL7H-7?CSA^EZQ*uPHeOeo zjL9ptP`W}(Rem`N_jtJ{r#~QJaHbP4IKed2t|c_i;2ow};3l18FLw6o^9a6^zVPot z4E+sY1%tt?8A3s*!csylmJ-5ughOvQbv?0!U`22EB8bm z+i#kv?Ad{Fi^Vf?j%_lcT9$>cs8~wR@rcDHy?3ynMT9JhSa4xMAp~Nf z3p_WFZxhaUh-!m4iGO9EQ~_xbW(mTwd62N8^SPWt-Y*fWEv>3GgIKWIAYozMbIA&# zUm<9;c5@q2MiGIv=|J1I!@OO``3Fw7A3ocD_<24M>)3h0vzI$Azbe5-!oPpr_x)Y9 zNXe*R!6Jw?WTbpGMEZmO<*WZANBzNn{s3a(%io|ZtRFNFn@5-)6;rw9hG+6WBe6TC zSdm#`ipx;P7pW7<)rqB=l(ND2VtsV3CNxVCp0D6k3~*}j`C6T_PM^QiT(HML`Kow_ zdX%p=sj}Dffv-@xYt>mR)tM{Qp+y3}EUtUu|6{S16!F z*+*f4P7`cbcwq-%q@B|qnxn+L;Y7Qyt%KmA>Ge$Ro*Q)6JxPSm7amEyK52c3bnr|; zQiL+JKogLs@XL`$R}I3nA6}-#J6dAh2-3a6%KMQCiHxtf+JX3bT|)g}Or18iL7TST zoV-S#w!x6LaWHM8E_b^zZ-*go2NAL`M_G2j0O4rS@nI}_UIMT9nPE08lmst~MqOQG z)?64_eSR246|K>B+o=C))P+U)YUv5GX9vs!z9RY+%U7($o$wWWRu<(77nmg^g-|TE zIfxUI8ued+tm;l;T@E%9A|Q(dHK2&Oon4^AdfmyYMEZL{yaH$euFxV}itWs&43O7o z6O&LDYsJT(dG&=ML^QyHwG|QWuQ5AbXNdFam7&cqnYJLs{l#JM)rt#d5>OAOV*OXO z=gbzq0$eRQV_x*E84^~>p~2!qW=L2WJ54E@jj`)=VawG4HA?4ViE~MxYiYkrnaa6T zzWeY@?`MsNx?U#huxkN(L~yLQQc0_oh}@nd(u6hj9ZZ<+{9{8 z>LLYXE0C3Y1m7{J*$y93y~Nf&GMf|$jw zw>xmeE=fWM_~Fx}&M7_4NkX?Y5hN^71RsyboScN+b zMZ2*O!H~Djgya(>Wkrlbt*Fwyxd8U9qaIy0$I1q$M%)UUV`yD5lvXq{YFvdA@7&blZDR z&$~ByZqr0oxuRqRTRZA8wuqH|PRn?{dUP)D(RoO|LIl=mL<7A5Wf4=Bbqhr-kQFUR z3MBBHT{n!(0n55)#1uf7LLtw}f>$;e1#o@SsH?Hg;5fb_wsumMnL(movCb=S zl%*Aj3w-%Id?mmNLQD(7ZG}`%oMaZy1<{=?Q@wbz{M#HjLZ|rdfLvr6>q3hp7@dx* z>W?gzgcfwiS4zY3cu5O|rK^>N%X`XJi>f#EF5lDNxKCBLL$YL>aM`Y&75jU)oK!B~ z-LdANc-sl(j+6b{jw?1D>D_jsZ}%DXx`X1CyM=4^%a`wwFI+D!YG^N5CIqKtEtSNU zc7*1(`e*Rq=?X8Dh2RrPv;edZjRhS;s4irT5yOnouh`}tWh%--V~P_|t1QaZC|uEX zoGe^nHRt%UaFm6t9=O0ajQX#}I)l{uMY@Dze1%3K;OaN|3IvOylm)9Blq=|0U?h5t zY%3tvs1b{H3CE>3JdTw*Y~kBTR4LLP*jO$6=Xe|xWi8+qi&*1NV#l&*S$O%4Ar_ge zBomZypkbQ;W>LL)m;=SaEb9<>-FcY9f&thoX>^t_G>aFR+e#3t04i2nTroesM1Vpl zR!TV+QY2YQxU`i@SQV@I)oa_9Y(PW<4-r@!_k*w6b{=cne@bxhO#7j;{3Fk|AA7$2 z^u^9guSj0~tL%d>WdFFK;E9cWgM_bO!Me{^5U>8LU;P0g{lB>D0m^y+v0!}z%Pq=SWj34N2ulZdY;&wBfmm*m7-ZtvRW|kho$X zW3#bn&rsnWL*cH067&amYRZth3DVfC%~`8RZ~ z7FDQpj=Srb%5_O>PO8%{Im~!uwb(}ACOKLeJ?Q-fXUY6h#SRgzj*&bv&B5uk32ydD z>2-_}IK&8?O;LVolT%OC6&&mkuPlgV;Vaxu{q2;mh-}63RsCs#STKo=4id6(MO7>~uTW5~@Y@l% z!d{+dP3RAzaDmPsZ4MF(7Pk4EA7WW?WW%))3t#PgW0bEpy9Xoy@hO*S%7Kb%Y+(YWIa& zlv}mVy?W50L<2L`yma}jM9B<}czUGpneZ+k*0c~IPLLIr3C@b`wMtRg=jhyuhrn09 zm5g;--=rY!Bc3-OalbLa`}Q-z+}WY6R$+YfP;5dw?L*sKVz|C(%^?Ncs1klmNoQD= z&^NBbHI%==w`qajJ;dAE`1Q>9mCf_(ogQ@ez!yx#`H`q~vZ5nQ`p%)?I)zKN#6D4C z7t`w$D|Jm0J12L+-|mzoaZVJ$h2@Qfo-AL5mdm3T_s1_oyorLWcwVVOm#}cF0dSSG zRhPG&EJVoKq)%Nv01H-FX`gq7I3!z(u@2u@O+dT>eB~C>?-nGt_v*2B6WhA=&bIAv z^x!@{^U7=QGD^6obb9lEc}O?LSC|ik>z(DRQ47{D^($g=2U=nEvvC-FC1IO`Z~sm3 z@|#`Ho^L;NtbOZF-s<%&%a*s5E#hSsw8W;enG6vvE&(kz?#;99?@gV5@2NS>lV-O} zn01dTSH#&qtBLZ}qqrEI7R12dsQ>D*d2JTHqEm!)kZ?R-S>Os6P!jO&#`bH7FMXOD zF#@4oMfhqAu`GNA6^rr}yD^7EiCXxIc(cdSL@j(p0~u&@kOGwjvG678clv{)X`*DS zvMm?M3{bA7z(elA!yE1_{}vdoFnSYGAO%_ZWbguV+I%vaLkrr1bDATHctP1W^OyHj zZS2cj(p0xyN|E525;2@9A5zQC!;CS-DOEMJs2SD5%JNW_2gEJ(VaN9e z(UiPzt!Md)wg`WZuRvC45z>9o4Pn`B1hMjXF@v`E09@&z! zzisOw9xPbuMw+<5#+98Mg|dR zi9$F7bH6$G-~QnL3z+(i$9RCR9#FC1yfO_j{X@*FANPf2evR1n40QcUrO~PVsd>YZ zS&E1pc}k^`M7U=uAYnxnlKr!ZHDu#Ve1j%wtrlF7xkZ(?T@CT7bceQL=RnD}fr5?d z>@^BR`e6BTa$O%%8iI=NW;FIC*UE!(I^o9jP7wMh%e-T|AD#8lWZSQwd!5N%$V{=h zKFjT=z$Cd>RPSuZTLE#sb3Ff5uyOzq?%{bou!B0sHYF|6h2^V*^Oas%?N9rDX%+S} zavPjDJXB~-AE!i*C`bZVG2QMQp=+$r1x+7Lw_Ow;F{Jh}U63j8InE(Q|nv{ zkj{rD{Ouw8FLDlzWFOb09#F(>=?hrg`p4(c57Oc2hEuuszUqQmcL~6k{W9m93(&Um?E0b6ECoWeYWi+%@>X$7+wwrf?!Xr}Q z8L6JSpxNHH*ELu<->qwbo6yd)$I7|eJwUv`{_BlfWPN%dmY#G2v2IYWC;HXD$5&_$ z!n&c;Gkn3|7yl5y`PcSquXP@Oj=%q~V9O33CI@SlwH6?NI;SNnsU;w?#XYFS&Z}vj zbMv$X_a@J|GjV3qq}i?5Z#9ZlGYMazISAPbdlp?c)V>L=&EQ5i^$kZEhG7&{i(%MPR6lGmx;TFQ!tIS}RXjC=ATG6_R%o_Nt1FeHH7(wcBM4d!)P03~o88*mg>`^`vCW z@vdFZiT9nC?l{$X@T}S3Y1gCj-(V|~I?McX!L}Rh52}ujp zMSu)0oQQ@%3>Tg~6cw-N)@e);k_%S3qM@Usr(-uotcea?G$MocYAqS0h{<}ys{O&T z7C^iLUm@pV0-Fp^$(jC(Q@*mC)J~5bI^%nVR6(VPg_j~3EoAwM4QfE#D9PWL7tn^p zQRkR07*T*&?s08M#0ttlwL!&#*BxHF7+Pk6DY&*gP1K*OgVuBxaf7@&}TuOq?QJXqGarKpUOkA6tq6z&_3*bxI>~ zdc)kFyF-uVI0$TqepS3pUA$SHw;st?5U1!x-b5AO&3Ez0WgE;gzoRNRtQV5BTLExt$3u8}!Y?T3)1k4TY2q zgjQ;!YX(tai?oqT2BT{Yk#&aXdPC$gUBa57)b+zatjtYA=^Ksdn+)08z*oaStinB} z;{9eIR@L#LMd%Wqg5ZNp2GSD*L{a5x!$mVv(8(kZkg&${6$S>8EDGzFrD%VUp1>*;S*CN;Y~d?33Q3C)Mt69z#uDA3Vv&3a zcGuzP`K*abSm+O0#4D20z|Ie{DGft_sP)&(8(uVTeq{u3MfnPF1@3~mebwcmm6wLu ze1-0be1>VNSC2iGZeuJ#IZ@|HH#hw?N{G?vLB1jo#T6v%NcR zyEM37;#$z%&d5?h>QZrh6)(Q3DR+5SugPC~Pmk2@=dX!hd`t22+p?>#i9Y^%=x<*dUwvPF{uSxD zmlWq;(Vf1e*>${U=PB97qdg6~I+pDgE!if{U(udX+Zk8Y0oO-JUJpjw@WT)`A^Zu^ zBBVe?5$l0gAcZS{6D^8ZxRGjB1gltoyM?dDR4*b`QJ|vdG@ovi1+)lavD*o0^wzKW zijo!1QMh9H3baGX%F-W1TM%#s%3=dZ*>o&>Sm4MeXo^)diS!pI%vWrGkn$BIEGlGy zwMeo06+J0j;RM=&uc(MhRV>mXgoNdcU@_7wgoK40FgyuZO*k)5h$ZjfE8DOR7qm!X zg|MlxQ=*8&>R>}h+k#odip~=Nu_$H57BeMEKCt1rD5qN-try9 z;tmTI_zD)RF}~^sUqQk;cTJ3-(O2J>y!%YDT zHO3MD{omhy27ZsP%%%~&nd#Is_51Gxr{0Mx)@Ideq6sZuDk_TlYzVTk4vq8o@06n4>FPH|%Eh<2OEE@Trq#`d^Ezrt)+vdk^D&m%+O zmDTT=)#sg~^vhTI6>0p7)q$njkP3a&VnfUlQ)G=Xe6cQk(O~$J!N^)85G!igAP_66 zL6^RnsXoqRY#B~jJD9vym$!2`Ynvf|r?K?Fh(*GJ^t6)YD-aZb3bGXt3tN)GS0F3Y zI_w|B1$+DI=S?UoWP!5aze2KT^^pOL5Mr}ob8fr1(Pgx<0YFSH9GDOiLaD^#CV#>nVaE5_+g#urZ3rlnG49Vky zGoM68oE`)a0a}uJtJQHxc|LBx%@Q2 ztLZVP1ogdOM&8Kaezu-wP*tc%@FIL8GGK8*?hOU)_tQ3beD1FOi z;48<3UPq49J5B4B(hoVyDX|w4mRGg{uP(UU1Bzt9C2}%FSVKZTz*otQM6F6%(FfTo zp-~>YtcO$E6I&<68lC9MKF?GvkQD}`C<77)Z2d&`{t|0Xk)4m!#!GDL+3gdln!4bg zTcBdz{HB1QZ@^bt2AIU|Z8jlalq}0Zgec;!kb{K)_h0z0usjEzD;**lOu#VAboVKk ze|;-`@9)Al{@Qc;Jn!HM-j02|b=z8(t>Tqdw`3G@W7As$V_Q5zn(VxZtsQBiljq+h zwyT*<;H$@`-FtL;)8jK+P>;>#qL6bn0j)q>&uN9{3Rnfr3Z216=C(XMrc|Dr zxz`oXzutG|6*K`mUi(P%-sgjFec1or$Er{NVf^x6rZ2A>zPhga_#etIzZ*Dp>4$ed zm4ADW`S9!Eb1zA*yr#MMy8haG%-(adb%#1P9`9YWiMMcLXThrO^xE$DYH>ofGNxP$ zkHnn7*11qx0)=z@gtPqc3A7u3=3h%#7N(*AM!#W}PZ!w^1Mb3NKGyUO=pvGxPdCb< zS>fV0`3g6ZPH>Kvp#Vod6hXzM^^;RU`4=;z98E04nemPW(nF)>GF?{a2Qg*xlLahOsW; zXp0cAN@oe_-e>G3#Kuw=PA3+QcqWq1Yw)@UqzkY#Cp3p2kt>MI6L1PTusVm6-wX*0 zgM^41MOFh43ob11RmIAdnhji7+}G_9Y(6O1euTg4m|)LIJ}g*B8O1KblNY+5yCS^! zity!kd*1q}=d2g4 ze{W@= z$L6b2EA?>|Do#~@{8CNIs)4k1y7Y}>xVLy&`uVPHom|72qmiX>aZt zJ!fHWd^N^rB=Ob#VFjYkxlGwwX13Q2hv1H>_BX7(`SZP7{&&`gn@%!0%a}=yUpR)h zB5vz1^S+$v{o&sChu&>xrg^>>kRpQP-6i#A-3cah?GT9-O6`Mf*e$KgIjIBXn$QiO zm3>r)dpulN9gfl6@L9RUiqR0nrU?fmZ$}fjLJ|9Mf|S5jvJAok_{t}@A8_T9i*39b zkX2CGKybM(v~n=A+7Ml11YbogHiRuUL@qT)*A2(i4<)Q(;u?p5SmAXVGzw!I5!b+E zZyCwmF#;8vtb&9E#KK9!uh@Ks>SH6cC|vy}UjeizUjeQ_ zSroKLIO-T;5!=-nGDS!R3E5-@8qomr75Iv^U=0JVkRwX@YV>@O+4usp{zYcpHD=9K zX4T~pB)6khTqao6aA|}V);dRJ zlcSoS(KjWk;}P$>k9*vG!o6vNYZIayX8DO``iN&j|M8WOcVw@YS)R=Z37P5EI^Bsk z#inKAf}fvu{^_BquTS>+qA=gtegWfexIgvgLPf=mPN%}u?{*`SHrYzSE? z4__w_Zj^=95BOH7T{7j4$-VaRcxB2x()-;~6)wq0Y3TFJBrh!JKSw>!y~8O$K6ifG`~~mh4Oa$>J2ABn44Tx48Kry$tTG!0evhvRb`1_2 z42)1b{Pn+j-~CYh(wpKF=lJ`M^S116UA?(=Nn>k4Wpi>aH#()&2a%WoO;|OEz^lm% zZcmzf_et2UX55rArev9c9THmaS3Rg!pi zGCVlOSK}}6#!dGN|N5NjA%7CFq7GP=M+`#K@uCV4qffLTM1YTbaG6FF3Ss2SSSrb1 zF0I_CtlA`9yiKxfmvq%Z#rmU)ttVv%FRRbIZa93g=gPaflh;IVd}4U_3*(FL^}X?l z=B-crKl!Kj^Xr;_{cOC^H2nP?!;g0jclpCDT?|jeeDibP^*aMUG>?4O#C-7e;7jkw zFTLJ(?OnsOFRAxFFWG!T*sz=5ctBdSO`1{Hj!doag3hplUZe-ug|nS3|L(c|qS^i; zAQoZ5afT}jPL!|cT@z7P?xk+4@qG2`>rq_`v8$flP_d|xMcRcJ z3UDTydRWQ=X#KKS@*!9?Hckkvq6Pg5Xk;lONwI5m$Z69xidd{{MQ^8?6_u@Ug0)ak zz{n%8WJM8cRK;>2o02SKHQt3qt$_Digm_&c8O#fbqx`wlZxGxL6$_a{KrA#0iPt^3 z!-800BovDuo!gEUVN5}5Tv1D6DH7emSA6gl1__bUP}az;UdKhs=;}TEjRyr=kMMUM z6QD(iDMGXekDVpE2+v&ZMheyoZ%f|zNb>h@q~G0Dcl9xluyn(4xsw*5fepfX0I?nj z8vQr*E6NZ5g{j|r2FCLht}TdVHk%Rs`gto8T=a!kTuVrr0wIbhPPRTWW6&>28kwgH z$QTIA(ZlPWRb$|k_r+8x5R8D~@$~h2n6L77>WX&j5YAA#OIx~4jaX4M2Vu2JLts1B zBG+jm%fyLG<&dy~3IyP**?!+obN_CZ=PkEr5%xPgWq*CD(~mRVZ$0h!!$Xt)`q1@%DYnOvBZln?`@ASwLA-KKkbtb9UxCo5cm=qE zenqzN=BolqG?cHPUqvh&j9O%jTx<%jHiT9iB9@qrJsM%s{O8Mq~0? z>|AC__c53#Oy8t0*fmsskSRL=${L20wB*?l*u^nLh`_7$*O>KJn6;NiHeO-2y~M1) zOlAoYbbtb1StP8Lh`=I2Sfo)%Ebb=;;fz8ieDzVdutBz*j`VIxvv4UyC#Mpt0Z>leB9vcB3YGgC=xkzkhunabzu3xh_3-caeT8#; zdS-iyu^wfr+ifgkn(TD*DZ6`KLze1&N_4K7{m!W}`=oZ4bfHJO z(lu2D#Bxtl;FX5g7+hFnkdWP#0|^Tu?HD9XSgyk6K_FIKy#ix{*zfFL!rOL@DcCsV zo!;u3-jPy06q==R2o~DIH)zE$ zqr^f%J=Qg=z*jV<9Sy=^lMd$t`W5a4fI+-6pgE{vT6qIseAWBT`#o1)>OS_IVAqkh z4Le(xZ)jOq&&?@qj>{s=L65LzI}#{*=V{xUljh%=IOon2Gw)8Ae)owP&5uuOer$Tn zqcgdW&1$9k6|jn?iQ>YdUjeQjVG#>z6~+cFTXLwmoz4%^WKm0Z25)L)G~Su4)f>-O zv_D8)STnr@Y+3Zh~P`8P-QGpMHjS&=iW@JYOUNLUA9xcaHF7f)!o`{ zZJUqx?mMeIa!GOYlI+Y&vR6MeoPV|F%})mY`i1`8FSMV0H}vs$`kQS`Qzz3YXT&N- z(93XzMy^QTA{yvYj&vwTeio>{ZC2gxWWH@;Ui(1&_NUsnJ{^AkH8mb^_t~DE&-JZ3 z*i*Arl)thoV@bDP+8zJQb_4*zLx{+6tj|MN3+tR_`6Io#V>%q~m@D)qA0JQ_cCzpl z1u(P=Es_`R!k44pFy^*;0ISCPvZi|pEoBLXAn9A|Y90&^qG)H(qFe!ZDDzBq5#s5k z;?iVi(G(Xk5DO|6(X}9RQS@SUttaifX=nqRrH|P_Od595uq=!UvT%idA}PvIR2hR> z1>MZjkH*V`tjoc^9=4AR%CZ=-C~@IcguBS9Yq~H9I;v+IF=CNtRRGFDTt5&CANG+r z>Vi4qgm$P{*#11OQOJ+V<#P(!VhUS0MJ);Ct%!4{8=p&72uP6o+7@cT+IXM?Tb_3w zYe$O^Q-p_~Z9jUp{rK~pr>_vcy7rdjwZHZJ?epGmZuIj+L$F^$zZx7N{lVWOs|O@3 z3k*B}3BUQ}?*Ybt`0c-buE?{BNLOaK-^5({jB!l*)H6X4m7{fwy63~;`Nzvb(zLKh z1*G=*r|Ltq1|zbhDHY1ZYE^8F3clQ=Ms*sJ+qdcpb`O^99jw@^hq=9Ei>7#!CI@Q= zm&=oCC5yLTbSs|9{03c0l9pyv{ocvIfe_oVr4Gj;^{88AQU{~{L^HIpJsYA zO>@2TwCgRrI3J(;^Ca6FF0rjodB2{t_V%$47{8>hx!$+))*2S>W3Z#yFJDI1x)m$j z^TaWACchG;b!@96esm-W=Y?^hGuTFTBK3umm2;c~`W05D!kO$G-;08HH40bYt8}Gz zwhD#!J{0NTl>-W0QzLM>gHlFE2ocM0&PXJzOCuY=T1dgdAmQ^Sz}32oqQeg)YIiwNtlj4Zui0$-s~2n!bFD!7CJj%ane#D$m9J?u%5e)oQmz1FnnoF4a2cg#-4L zT3aAivBs`gXIHF8yAbJEPlXC51-2m_>q&Pm;L0jQil82Jh$h-XA-VY%r|+J0{_aVK z8-KC6J7Hey6LSPlT6MsgGs*RaP5j+iv0wbz>%|$-pU;oKX&u>ylxF)Vsa0s_^uVUS z_*c^%Uv@ij;R9IOrcYX%qdanlB94==_8%! zOu2WKobpvzr6OV>*_8tc3y6g{EQ}LIES7{W6k!>YS6*|?0R{`3ApB<44<}X{Lo(Gi z{sQ{|(jRmRR$6;YoC5niLKJr1-M0P`JAb*8r+oH;j}D$|?=yj~hIIolxNFGnn?W=7 z58~nv_zImv%(=+r=AUlLKlwuP=3jfCzXJ1B$Ie4-YqxXj*K*62a?=Z&qEedzVp^O6 zo2)$UOmn#Xl=ZENb8kSTFK^Cz9RDZx%R5ZW| z%EF0vw5d+*Q%RmS@n6v-E6P{s64FK?`z7^dk(0Hux1ue`=DlO@39((_(a;&Bd^KSK z-@;e8_&vTFk65E(h+P-mh=;o%j6lD_8$BWj<_2_I1qo*R-ofG? z%jh$Tcb^d+xTwJ#;?nJ+s?EJctK@-M_kwe~0yD|1#B5*gY_i*>6MtF@UjeZ!l=XXj zh1;nki{h4rzNj`e9-7E z=5}H$jax!1#{IlgdKMr@6mwgCSY+@6+A&yX|AfuA6P<4S@BDB7;?RUw^%GWiADQ*9 zhbDjc(4@aTW&PtEXZ{r19~+M`>z-%qV%mcX216=To<&0M(q5PR9;YmkYfdlT)U2Z0 zW{0)d$96A>?68l)4mh&2z%^cu4Q|$$!H*DPaNd!_j?X8`V8O!tAVAAA9jbzw?GN^2 zY!DWAOcA2OD+kB(6%Y$5Rv1`ou_1byIbx|HVZ{h03KLfiMb>K4){bx*)D;JqqTLMo zgBHGm532e&ScO@37Kmk9{=9j#xyE|2HeO}TSFH02a0TL`h}CdPx16*H;dM7RoEd`4 z9mxq;M2K*BAQsH(uwW5~)p2wo310!^sE|ebgAlK<>lugzxPr%i;fY}s_{xGrXg-`-I(%N33r>JohfzRY;pecoAMmv!Z^qP5;?G_oiL?9s4vsm}-7lw{5r-FC_a^ z?lUo8C2dvZ9MzScHx`~7%s4v0*`|nCBlE2hx)*o4iS}rDdMn~uwfvfP6gk4Tn)xUdXj21i+3;y z6^6KyAldFyFajj$8L^*RHwFosQnum#+P%Oiyg;dNc#| zNEAi-!VtM0)DD?6L%-IqunUg0l7T(t=Us3%Ed}Z;sV^I!8EcP5lFLX)<&mn zL73Pn8u~fUHtc3dc3V)^t>RU(yoUDr-RkJ1F-usrQNt-| zk16R5%<4hYYF=Sid@X97{etyX6plXVQ9hCTkGM0L7?roxED`DU5_LIFogR ztXf58PHC}-MfmC|N3ua4g|%ELEDgsikRTS~u>9H(>uMcNEGuXhlhDz4zE5%+)|djZ zLfFs-xZI=ix!8P!ba%M0$VS2%KDA&`zCwyS<*VH%I`^FD*n5%(zB+t{Kgw4x2`|6i z^TIpgcRrSW`p^EKn)IDAMvI_PxUh!)|Kcl9#2+xmfB0R$KrF&ngVI4}>qXPFkgqxA zy5NkyyhTjm5+)$I5B)pv6`WULxr}GLI<8PvwA>W8P=+7=*uj^z(U1cp_bx;6ZfrW% zL%%B8s)G5daJ@QjohE%ne{793y1FN%s3U$6);jk@7R$X-1c($y)Tcuv-yyuyGhQ{p z>LxZjKkdN%^Q`NCvH$6Rt^W1DHa|Ty_oqM2`kDMBw7ECSrDM8H*W_7klV&$fbGox2 z;?@MW59dYRbkFT{Eog&);9Vti$`@ggig#H*5X&y9(=MsoArYH&I^kwV-V4lE;473v z4CJg{oG21cl+fpvsBmMa2r)wFou!6^<(H=oD%ORR4u+KKL(2@|6~?d%JuFzzuV_Wp zn4^{squ{~{tI>0onNrp;Ia?SY7W6CRj+X9a5V(pe-UkV57(F{;H9rBv64P)FWCCY6 zS&IWWfpP`7!U=Obn2N#`99a~xEd4>!B0Mt$3l_vH5}t5^#Fm1HSS5<7IyQpe0%Qe` z{la4mu2B@OaANE9ulb517CMZ01i0L3qY!89DW>)`vA82}9&lyZoJjc!{Xy^*{9}-? zXg&k_gCHni)dPG5*^2TNzJcmjtKh-;d~tLl@3h#0Lp zVX8WAMi5r{33KT&W6@D#@i7w+D{sG{@X&DKVFuBoaT|1;b^53^gFy`gSa9QCulKGu zcr7=&*AKeZ>Rp!V?5Z?2q8WkW*+D&sReap~=EHNp zf86@UV>UlNZ2RrQc0W93_w%E6w;p%62Y1$-(5?ju-7Z<8Iq`Sw(pzS6Zg>{-IA@5g zIPJu>9xV&V(PXV3iD>AmIAhK^p)9?qE5Ak%EA_B4ewQR@mB3?Rn`>b^UWyKhQoxmU zRPX!M*P=xjyHo?jg3%p2E;&oo@pS_cRlU)RWGVF; z@RfUVvv+EH{&EJ344gtqe{g}X*v1#Lw>?h5{ejUYpD>+cu-3|3inxZLIBrVOTfMNk z>KMZSgM0=I1{(E@UOP0X*O-lJ%2!yF16w=UkHZ#e4q`v7o@wK&zWhhmdmr|^^hVFg zvt0*{cW&9!-ng-?rlGB%ipxoF@{eh93vRadXqw@4@2LfM30%#-GXe2dV{r8VUp+pX z*sjnY9PJFU{wrXWrGAgE5H1SD!U?$mLtB=njD@d|e?|2xid9s_8au}p295PmxS|*J|MQDpfiZ(5h*PHUC>D>3mi4RIfyTR?YXk>6)v7|?0UkfbCPSA2i7vC6u_jBg* z8`{@DFn;hQ^Zpm6cRwBY>IU=ab<=0x8NR*C+!ipcVy3x^Y3>|p?i%8W&D}~ys%3iA z%$?4@yPe8biK(e;_`7?C@9r`md^PaGThf>Qs(tnaIU*f)9PikELbCBlZ{vPBbhEOx z3asFXF6r@1YIlm{TLv!k=izB{`xl zWK<9@9t&mRf{IswD^gA^q@0^jQ=GZ%S^?g4EDP}FA#rqGaGP~F-!`(%F@}eE2ag1v zcM{J(wKX`6BsL%sD>84Cui(N;u4pAK!n!sDxmT?w`V~e9*J8`_k3+>6~8Ulm<>OZ@r=;49Vjn**)g!x|H#AO5%a3PKjuuYN%V|Hc>p;Slx5 zfBpNXfxLvb2gjM9L(LRgSj1>oe_S8;lbRs@|;zDnT^WS1_k&ks!EbltHwf{&_Yo_ zPDfm&+AEomJjVLw`4cnhU#xH8&2BRL;(XHK_Mhi}_t5mO|GePl|IEMrgx%cDIn^Q@*d%4`btj{sG$0n`IF+=Q_ zDqg^8x914yv>>ePjvy;A6=W+;FWm~~%#nE{_IswNJyTTPX&S#Qtxty9H%slGhgi`; zRAA9Sa4{q-V<@slVZo|4q2R)bUNRJ2JA#T?HUbF?aSgEz#?18$r%s35(U^Kw_GUAY z^|l&{k%G04XiDHK^aqhLx)h?&8HAq?K?|a_8f1lHnF?4%Yr}T_n8uh1Wa z{|YHs2>&F?5{auCA=7`P*~jwLF{TP|b%LB#Fkc;Ipj@G_))|OJCJo2*tLl>@D5_#X zzrr7E>1i@)2$>5`t=f|q9%RrTBxLp6$nvuc3KzuO4wpL%S2V&M&URWLsC6$gYhN4! zWfA=fwszRAt}<({Fa)l~_-gevh889V!B=&c2wVZNkmZi5dDaBJT0EYwjt`X{HI*DO z;w(EhT(IAezt@<%*OazJpSW=#dFxQhb|!44A-K^H)M)T)H2O3ey&4Uk%Z+Xg21r;A z3w6$w2Ky4dZO*{_6!pA#l})@Bd}SXyFfUa4Kl6TiX!Mul_63Pu&N)Jl(jLzevHwDqL%PT#S8bmxofF;uRKU$=0&ck`N?a2=y$krRML(qO z5#>K8FTbX)dRbfgf-2`+Z`Nr^?9T3>aD_tTf&rE*Mz0x$-5nbS ze~+(B!;GTebf-!7`IlYqyf3=?vhet`oqG;0-hSC3kEK59+!y)9*dtMNefi)*{~3C`fFb^=%K9Y|)F?#qYO(3EB(*D(0}Q5&D$Rjzx%22&5zWt zy(b4Iz4M9klk56#ZZg+z4}blG?)uOAo6W;59SmQ{bV(VZjNyyS(gCKumk}zM)^6rT z6Z6dv%-=pWU3sPNjStMHu1djIcyQ=fYxj$5cZi{1A;^?dDhSQ%bmBBQNAqn$+U5rc z;KQf-6%fl(_u&e1)|kEO*ZS2T@D(L1dRoLQTv+rgT#WLShwvHqZj^)Qh6bntr6M0h};&cU&R)46U$lilb>ORrze?Yr%G|8Z-=P6t6>RCx+t`=08aoGjQ|o0gU&St#M^*N6%H3n)UtvSl#?zs~;!1 z+4o7cn>~fx+N+-l2t$~!dIC(-%L$FjxMOs z5L#jmDKP@Ea7I*`NtbY;5pWg37WAvgB|~tzbLyGcF*3??wB=~*t!5)`>1k-l!gtL?{@}?mIcTASrhzM z=nv9rIBi;X(ol0$5C1s{2tCaJv7k+nCC{tb2(x z2MJ#xmVqJ`fa)>{zoN}Sh*uyhAlAxjOv7dKimSsAvIt*2Pj(Sv3nB8amOMKI39I_# zaP{et%2UJT$A?grCr8Q-n+pyMmK+``Im+bjF=IPn!e&G4Mq|VpL+I+k;8h0y)dt^H zM&C6ipVdasl}6_!11?Jpt~Exdib1;q-GWT5b+X1fL2VnSwTV%ok%5=l!}Gs?*y_8- zY`=fnz4<8*?lbOeP2)2RZZE|zRH*Ts$T0Wc|lToMV@_97PnOzR3~-G>4D4LEkkLa z&^s@pdv<6CBrL}ynP)~nBrKo2KC+8&p*k83_qu_EdM#GrpfI=*U#Ek`5sSs4VsWY! z;U&_b9I;Qb81sVxN!kVe-EI*o&nTT|jKLvLVecUWV!4G5&U2G2@D}>TwEku0j<3Ea z{XyNph;cxVwx0<H63;NX{Tad7D&k)liHT-l-`pM_QH{TUrxF&@8 zYS+Pz4LdsOR`bi2@G^^d;R#rX(`4t>LNc#xTArMH54I}=iXxEVk!PA7d#35pX)P44 z9^|XJJah&vhy}g^T#**x+?HRPuK-u5xjs?^4N;3bhzP6&(8Bu~%vXe$AYLKA-Mt+W z7U>m|lm^lhL>LjamH?~hdO^GcQk}ul)4JU*z9P{{zv3$j zRA>dlWcBOwNmtZ2F?wj64C^>0Jh-crB%p62|^-EV)aefwk08z1&xzr%d+5B=Z2 z8T|C0`j5ZTeD<~GtLxhDZkq1$8GzPJuIUzU=+l2n|MAnn=ill-`KRfXcl%#?Tl>a) z=8LatPG3`=xYT#@g@H{+#oJD4Rv}q#eNSFPPjsmuC>v9aE$;C`Sg+XzR>$0cu2})3 zMTnvo;47$bM7(18YCK}08%Ve6;DQ$JrTxM2Iaiddri=w)Su9u<^A)`yxb-`HMG*_0 zD>^m^YQhChGzuYS!FfgOSPosdMzfEmk`eu4ussAxUxH#Y2kw zwxfdGC)o)1XWPc})$^~2FT5_g`gYIXzEppHUH#)dolHwC?z*7|E$$ZE6)q4lit~YA zfT9O(v3yC)+Q0hspA1nC{M~=`+>jdpw+PDM%9xn48+hXg%uofVYJ54eurxJMuCfQ> zvyD;dy5LMLHpqk)_rZ=EQ7ubo&`|v|yJH)355s-V(Y*_TgpXVOFvS@%!tKXxf12oW@1f~m z{g3U>56$`Uq4_^Qyx{iZ4tJ(_w7^jeyZcNZgyM>yc5AYR4iM4$u*cu+bDL5&jQB0= zb1fD-7YflLbSv!jDDHF5lR2getzre3j(1Fu*vE)$BSlUzQfE$|LzEO{9V)VolDfqA zxhD0yC96;#X==}OjZe1LKTi+73NA9@M1>Ta!ph9SWrm;%Ls*q5Y>_E^F@Y--MhF2{ zVM}y5+nC~gOxRKlQW`QgGSTo~9c2plGTGaW=$w}9CzE|7gz5x#%@8?g#n}<~sZeXr z4{x}{Y`Vnkc!Ak=b!5xsq0LuDHeF>fR0z0Qc@8mI;3#5l$D(I^|I(u>$XQ1}AP)jm zLl72RSYRaDC4>tLSVfemQ_!`@H6V)UW@80{!gpdk5MV~w>=c{d{@nk^-g`i|d0tt& zxp)3#;v%cE_uhL28`yiN#7=dwiDIWjsyADbRV-P(+A6l(632GzIK4Qr(vys=nwi>bWM?6|zR<&t*uMa|}ywWKcSHeX=M z&S}YN!qd79Cv=%;5$>Cg-aQ$;XEvf`HmrFzboYE{tCq8OF|<>H^1Y) zs`_nJ`J2N#9uAk?87{u6%s)SuxOdWLqr@(a@17%qEzmJZY#%4Jj*{5NDg1KgF;N&$ zfPghs#I~`R3RP_7SRCN0asrLQq{^xI9plJtK*tc=1^Pl#R?vpgz$~SIn#vzbrm$!p`%Z8xQ6uuq9`53h^uRQ`6`VPN6rb z#{QdWHGEiLt65mz=gBQ(`5y&_Rs$M)8UVPJpAT&WY=C5oxaq6=*8aMhX$*f z`ir*l5^_3&VmjS}x^3Khj2ybx+H|k7>PGApcH%sXJ%grXZ15@59!w7YyE%`PMUTFS zxt)!@qOlBySLBXj1tBymS`@BurJJ28R#C+XT|%lGfS`=MmedC9*d-%@Fj-;c4V%hN zUnQ4cB8j601+Ox-E|XVbHc1lBD-wmoj`PzFAjB&axn+5BpXPhhlq6i)pd^yRfV3-2 z^b_2|bQ@Y!q|G#dOecaOfxQmg!Qfm)RMALQ^>|5>5;KFaT{WDLHJ%V+VDQjI`SaIQ z=kCtmewVrR_5xVw+H0z-_eUOnfBu!XmG{4+eDBAad+#WKTd%)A`P%o!-uYhlI>iXFE6wZ9$xA(H0@aCn{UviVvw^ z1)4&kAxL_H>>2$*(jr_AY<~n=ej?;B82O1%IG||q6+J(8#itM*wgxUNkD+y*LKF^Q zAvmLeLpV?_!iVht$9}lbR|2|R`mmFTEfV|=+@31t#IsyYYR=ol`h=?EwpA#ym(}% zKyWYY?v4??XwG`X3A_`D7t1f156TM9g3%o=cMKBplQ#BbY~$xu3i7LZN@}~&BCOua zZ#c5VSI=J@ID1oc>ayU4i`}QL@-E!&yK;Z%+C$0Bx23OOitq>Wk3O0BtbGo`CNh`s z$`13Tp?+m}He@Zs^AkDYuip3=Z2T|$@QL3dEPtHTF`srbA=$t7Nfw8uPlTk5hoz3k ze0CXa{ZD8q_Xc)J~4G0EMbOs-q%4;HX|RRqL3Fi~`HGN*-zSJ>uU z&^njbG?P}XiYyVvZyAm(lyKIM`ld_mVmlq;dfXDlE?mhv=WcUPyc_*|h0|}0eE;;6 z^KYJU{rnpipZ*{FKY!Ed%cou2pY!T6!Va&H0cP%xlgdz8n0j@-;Pp%3td8IkcqW3&XG8I&`3#5sD@kt=hG9Ywr0 z_=+MH;L1N|+CO(PXv0iM!E9*Z94&xWSlL225Nq>7IQVM27JS8J^(#ycCN*Gka51A5 zfecK1!#pZ=k2ZO?I{Sb&WB+W{{+abh=ZOXDG@0_N1l7Du98Gms2`qt>(Ei*13UlBt zbMRH&!B-dd-_h)QRf}7|SJ1VhBZ8LZLrc zdt1$lSL(_eEMnc#Ro`N&ZVMVWr-MabGXC5UQpp!B=Vojm|GlP7<0@FU-!V)iZ!9tZ^pW25};MH8;;pO)*{FQ%Gib z|0CJ`Hzb#D4V}Co*#80_U#h&WfBhz2Y-U$LRF|`VkA(}*$d0$hx@VO|_jBeQNWOx( z{V5ZOS6u|I%zD0s>tk>QzGBlDo^T}+77?uUd_@m`iLanXk)(Bz!k1|;lq_> zDzZZdwWKrPmDH6=93)bSyac=S(s?wRee^hA!RyY3Ezr*r0COVi>R95Ud`0F2QTog! z>TE|p3xohO5`JG{pa#F|IP1mf70Tj91<7PMMmmG_&kyc7ErKDc<*yXz;ci$Pm|IYYp-<^2)y(vOm z?@io)f8xOhlc*bSjNW^H=GNQeS6)+Ie`EZmJF=JWjbT&a@ym*1m&f+LG}>}n4zw$7 z8rs-8R@g8RQzF86kt2-v(L>hZB$feOL_{iwOb)`_ZirZp7{}-aU%|CS#*^MB)JWcJ*v+;=_Vf zdzja9f`9Ow;ONEPGuK6@uJxU`)N}G`*Go4Al&`M8A-(h6k%vD}eDKSOU;b$tDGlRl z!dLUUC*+G#$}(gs!}Ak4;jiBKzYwS%`zJr~TLjYu;$FZ1>5N~>FM_j>d^H}BD38b( zkH}QP=pLD^ib_(2Cn>}7RFP$q(c7jHs%G#zAIVp_ZR+(0<_izZ6&{=}+&`JWPnFpq z&25$=Sv0R{9Om}S+VPZ1W$adQLgipYXa$G79K155}sjUWlt0#DI#$Mmu&H1$LWpde#Jg zzBcr8nf0n5lP(k-Ko^uCWT&IJs+r z(mh#8`N}(e!Y_L&FmD!o6}WyHhy@i3E-Zi+(Xz@GA~tCs<0~K*CI=JinbZ~rDee%j zlA1Nxm6On@&fL$Wx6NlBnA>nvQ*=TH{i^gu1c7SHU)CYI3K|jRtCkyD@YUW|2x5V+ z_P?SAU+sNG133#8EGSo$tVnJH%;Xp$yfTk!ygb)*m5lO%uaNeDqSo}~IYDSvOMHdC zAS6R#Cpfo$0b^;$XkbpbyBl8d=#2Sj85hELO!LaCMDoyulV856f4q=Sl3s*Mb>MMMu&%$bbeGz>_JzoK_09Un0ZojKTixBgJ)i=}?*VT})c4FN#f*M#A z3zS6*tc9h|K&-77NUsnQR^f{a>rZH~moW2~CjFR}yJtGGZ6>O1E^O}{5G#DYHu4}7 zb%=>Lzyvnw{Oh$LElhA7L4q`quE6F*~RrbrF4gF6UNpL zR^Qeg_>gJ--fYcVqva39Rc|OZ-IkYLp3FTp8CpFUuvN&}CPz>MR4?$AUG#`UjKY>X z?3X(eR5TU3X#z_vDPKXwioq5YgrAg8fUmGR2SPETvEnMmqqdF(7s>;3l|E?-_n1+q zaG48d)HRI2m3@E=Ml5?@sYig^&2PlgrO!2Jz|rUDB|Cnvgl082k6Z?_C5PoJ2v!UD zcpg2$CB7nXMLr<0q6#_lWxMLfKb3s%`;vPP#TRc4Qocem1I$-BCEZae?LM4#JI@YN z$F6m@J=o@qg@ecxMFFl5D~cB3w@kZ9eO1DOctw-1NPp0PvXC-Lf<~>nmnAI05?R>` zRxFpHeg)GMUgN-5c$uSJ8;V%C;`I)nGSC^8r@atd>9Q17sn<93d_}-%IgXM@SZEPa zH#-|UN#P1272Tf#@yc+tABaT)+tHqyp)nizN52=bp)vCv? zpe)K%hSx`e6&ry?KJ*tG;*(y#(!&)FhIgR1FR5FeLcoM`YNoliflwcdRQ<3h1sEV98F&nR3eUo8>4zD4LPz^W!aUxBP> zJ!9YbtV0*s4&czu9=cb%634rdN4K$8x0zqJRdA03_O)?|-Q6Re?~}j}Od*ZJ@b!Fd z0WZF^CwX%ZX%SZOmiVfT4+~b)^F0U7@{hbMIDJic`o_SER|O}obe_G=yL@jD`qkCf z#e}au8vEYQ#(w(y@y>p=0^==w_I-zO~MiC4T=Iv7# z9$Y9sq%Jx%1Mv#Y!Sp(DUJEwkOv2orRwGBCXu|f9w^8S!wo?nIxmU8TjKopgC zip)7eXlmDI?=Ex;0qwtO7x%FB24fTS`)6%_yVCv3Ro?A@DAUj$i%^~=hmVD%KrC1> z(Iv!8F_x1)=l&B~?7JRIpH+~p>|=-QVuqX(pkEC`u!3v_ zzH*JmN;nmPtE4663UK9{IR){G@)cTy6tRLfOoOkWUq!I`6=#zg`c=d>4b`uRgjJ=X z5$=(dQylDlZq}vj))Bti$7Jo-O%%aE_|sDN0QQlxwZxN5qAR4%q|5!)3ygK%D9 z^C0D`dL(oqtqbv1$ho?wBhD*QIFLf3d)q?>h}H6dK>@BB?kytu3gHYm0I{gQ9dK2B zOIvdrrYr`OweuFPY~5t4Z!%R^bXc0R<1$lznb~@gbO{Sz)B>?`PiQk=&~X}-Va*c} zt<&MHvtey&@Ky8?ChjA2WMCnA!D~qUqh4ov+VS+)<;H&8EM8%4Sc*W{hkg#yzZc{~VnTRNv z4#-w{#ErOeM?9jG?vV;-&ahp8I50u&=s!&LD?1O7b3mW7@2|ar?D~I)7ng+qu?;2k~kJ0^3d6|1Q2l?3Es|C|?1tSp5q9K><~<^l*iPp08+& zaG9WpABGkME$|gPFHf$9uY>3eqLD;pE6P_Cu5ew_rk44N$X51bdmX7K@fF}o&sTc5 zTIDImd9}9)`W5!PfUm5>#R!3czdayhD1P&3ex0hMMZRPI$j+m~yN*h#4h_^C6}Frl z+5O_k-ZLo4p_fNaURND@NqFpn=)`5&$t%OBZpcqvS3G}t_}I(Cr>>2?cxCMD^@%gr z#!p^SoVX}Ic~yDx%GmQ4&cPUQ=`o<$Qw>5YmX|o z?v|{t?29dGkH~G0Ef$944q#D>OY9)bs92K+jsjeftX+0ykg8aQjY5zW?hmU$M%Sp05a8q0k3h7OwzT zgsfG?`8hZ;V|RypxeOU*&ZR$wXzjJtL|ZPx+Bdd9wsEy7y34?!%q z9wW~#Q{OJwi6LQOgBzB`qfzJ;%kxhpUMx-yFKPoXrnmwPCKcb~r6ed+E1;Of%-q1)dXe(n2;@BT#bqtBFo z;!O{a|BJ8w`IHC!3gMzJKNtwj`j6=1g^&zoRQ5!AsWvK05tuBB%vL7m&BW);L>5eN zipIE`rc-M)sdY11yJz$FFBBY{FFH6&^ee(wicLr73fjhz+zxYlPQ!TCu5s>0QQVf{ z=q*zJ^?jb{LmsIzvXv%j7|UxMIRa;2p?grDspq>!fiHa|W+=^_hhX(pKgxzPWbEH%7Tk@%M)$N~hxj3^+_j4t1X=lJ zj3IRa@e7cx+~B#29d(G5Ig!qw+$Dzam0RMNN6HwnU1dxHuKcp5Npmn~Iv{rjlm-2Y zN?4(V#E1pH;*_dk!HU?Tj@qV)-l2iN9V3Ks)mrdXT&)KB6?Q#?uTpn2se2aFT6LNG zG+750)*sdsvJqID&$2sfSiS<)==lmF)t;NgYDM@8{;RtT{8tEP0C9n@&=Uk-p*aY+ z!lC854%Kj(c-?C*YIa`Kq0n%oJDRbqxdsq&ZV{kG6)nnCpecl9Vb~B2K@<+icEBH` z$`u|7T7gSy73;I2e1$9ZTrI&B5R2t28p{B_LU8a*cNAGz#GS=%d`75y%?$>7a}XC+b(NUjfmkT4el9w#*>H+TSQ$qb;`UC( z?4OH0xDb7CA!5H86?Iq_bCiLK6}(%AxCZx1gpSU-Zkly2n6l5FfPQ74Hs+YD0$R#iGM2xe$!uU;lKU+q z`Svkl`)G+*#w1+s0fpniWhx9uL{_MPSWvOJEMK7^2)=?!j5jg}#e#|zwGEUt9Z@nF zRxlHqGZm0F;TbJ=;z*psq@LU{Xjxd4;}xXz4j!}dAcAlpw=0PIwqlCGPxtK946(3- zuSiY<99Q^Yc9wPqahSzoLhK)8X6F#wuKLYq(vN;1dh5HQn|FmTT^V@(jG*-}ucE#O z=Bwn~&XCwIo&3HqarpBptM=7aypQt@D$v^TC;96hIep zijjDw4*RAJMHWjkDn{~ZWhG6LZF^-E2c+Bg4ptqK>}c!X-rCoAVicfNe_V9nCB>0T zs@4}phc1jhe|7SO>(eK$&mOukw)c!2O4RX7Gkaf@x1Jn6e106Y`=o65iQ&T+CU+ei zXgxg!n%aAIy5)HlygC(a;ytJ5x3>=O*sI)(RzkHnskARRy*((sCoE5bmvZd8cZ?ju zTU2Ce3$d|pmqlQgb!ZRPe>+C?xX19k(IreJ ziCB?&{OBScf<{v}ce5!Cgs-;m;aBhLsXx-a=VbT6bNmxm2F~0Np1nSB@=6c*>f)>Y zh-iR{b?Yt3YwwSK=f{eVf35g^=Y&w9L(1p^*^={>5kmckhMJY(`H4ErU%m04k23!w z9|01ZmGjK8OWomlol%AJh!Tb8Dx*}LSU44tGZLFWk+fkZHg}S%9I9zFO=1g=ui7MQAq) zQFu03`0mStB_1h)Yuxb`04ctIe%k4GP{SO#BIIc!#tofB8xG75B6I^OS-HoLLAJ7w zkRlTjeC5Q23rj)8E2>-(xXPXZT+tR`(E2$FS0M#+oFX;xUzN;Tih?Uf=&f2eqgoPA$vWMne!&fv{SUhTzM9C~^nf4Sx!y=PQa>6#0NvXbwWZ0$fqPLURy^ zMOuVxf3WTvS)Qm*X21cIg%)88ELe{$?vL~sffox|4Mpd4`KNT5$JL1kXA%$1Cmhno9nx_3 z6U2%805f&`)l~3)w^VSLHO%s?R^ej`mnlL@2We9t1ArDmcW6Fo=qk&6sC*iXL`}NA?0ga_ex| zc0u0Z5%f9_z0Vx^h&l8Dv-e%H11R&LEUKnAs#28QqA5GX1g;-(iXXskam(<2Jl_!N z?z=%AP^<{uq6kMTutF8JLlubyo!Iyc7uJq3B)XHt(dseoj*-Z1*aSTuwQ-75Fd3XZ z4wt(px`Z6DOSsf0P8FCq=io1O^_98$4m&WUjeK1^`G+e{S!i~b}lfT4Q6t0%8j>4t!K&-J_wq4=C!WCI^rzb1g83bRg z@s`4WwboYxz9JqTKgc@+R^fxLi4wm|Nyr9q;^yJ3>e2PPB#6M;zHekptEBS4=#IUz zZEd1m$CS0l6g5YO+s;nzzcAf)ZesW8vDPya4JQ@NC&ucYmp7l9Y&kvAa9rMUQq_D? zS#wN=J42g7ktI8A!q#*1b;l?0Wp^G@HJq5+eM-BjX}F+HR@9)(+%8Eh8w$@w5>C5U zf*>GO>K-e!4<~~{*pY`0AcEk`Lc}J^fmf8W=yl2E&Wcxh%EE#63PEMyt0lzxFY(pa zaf|9#R8WF`Ma3)XzcRG7>pO$=@C5zpFY*=HZ}=Eot?}tM^2Lg3ET=|)u#YZSH1Qdr z1-_!ggVg0tyU%!nQNALG^(4Mxf$A~7qFPqxO6M+uSneG_EK|Qu^MFpPkZxN}w;NaB z8N)-D&_A^Y5*Av738j3pD`y)&znYH}tZgm)ingwrLtV`$diI^=!{YwpwL$RJi`V#P zuJv5F(+dd;g(<=Z-y41BNAeGUDgW#XRsZn9#5}QJsY#dc35aFLREFojic(L!!~aOS z`R6|<{07FlqUPhDM;8smmgxMGWl=d}u{mR@MKkdwQ;6J&$yG&UDZ)0$ImODj&2u^R zOnT#N?!MW=L+ZlAOMF#)XaZHRSC!wYDrlQXsgh>Zjpf!)aZ81%wGe|SWOpSXA z5`jAHKjo$QiZ|725<4S_k#xMvDNRxgQ_P2ylxPxp4sK1)0k(OzavXcaI$d zQ`v6lhEG$;P((_eJxt8iz zvDNDM8V&dgD+rSsb%~9dgoXvQ2y+fFxrfM>=lr9((i4bHAc0hyPa}Pu@D*vyu`YL@ z%N|&;?l60AFQVYQBIYY}3~w!hrce;CKv_+&mtWPuD9cL5{C?FQ)D_E_dRvGpONhsEn_-f~!MN|zdVG%pla=LrVeUd?nJclM2>Dg*z zSqojlD%ST7zS?wtvE)2c@Zw^|vH94&6S4cIWB03L4ydE{sl)fpqd0q!(lC$yAQDGi zcT6~K97li9t$5aH{iJiwH2BIXO=X)f0`ba{D?)RSRLqch%mB(Px*ug8+ix95 ze`GAKPMBCX6178$CC>S+OnAvS){%RpAj@6kmottnIpC|nQaSi4qFfbOF;1@A$5^Rq zocP--l+op*Sek>kGs(5nq;ao8h91_vW3D=1~!*tr|AqCN;}@Rpdn z4mbsh1EUAML%;jkADP(&fRh?)1*hjUGwKD6mPEM2(@sDOs73G!mUm`mmKhsokj(J2 zUrFBkQ1Zq*((89bXD$sLIoa2|uXj^5KclcGBC*|@^M$SFm#b~QfT4W_;L41LwDu+a z3X_A&e6{3nrz3>R`W4-b1MzA}zru=Vn_h@lBqV{o(gPK>Nb;v+FUhB34>)7u4}}g9 z3VcO4%4NxLMPKjm@`p|$1uz_z&IqS+0AKYzf-6?JTGFrB&LAbLXPjBGVudW~w!(E8 zt6*g(1-JrM={;A(*3R-3_TnK-0QsV;eTPhg#NaEl;C_I#PsXrM#t^qynpUpJs+0n; zN?S%iS(}?jwzn#`waUx)jcsim-qI?qK0@-G4VGM(8LcthgdU zQW-ye3m+}QlDb}4+{;_LcOL9)eu1~|i~tc0Cod12y*_mIM*q1Ry|B1ricrs2Z-1#3)a8*>Ys>N2yGsjVk%mH7h5V$+QwjJFm}BvB1Mu>I-jz69()y< zDG$#YjVd0C+B_b=Wj>=um$iEi;hCk!HN{8giw;kt%8pKNIzC&tZ+!i3Wp?8zcT;a> zEmGXo5k&)>4Z^7WSp)?`fP!-6l-ywx^M!4&(AHOG=Yu5bFMTt9!72Wwee_4GT)$`J z`k7M*ma`8S`|^>@W*UfrK|Y>@<``M>7a)w((!bxzf5^hG&(gQoCa}*osNd3$Zxb+p z8%YHW*aV93#jV+LiX4HV-9AEQ8zzBxzHv<(bwssCKtn^k5^sDuU2w#=FfaNuW zuQr|0AUUDt%2GNy#hBd)a=%TO3S_na6>baO)oAq!^|RIDZa3UI|5+LsWE{c!i|i@RTAP|&Yn!Gd6gRvP82 zov-LnxP|f+$ck=)rg`rz7^1{t9d>IWdWF?DNNxiX-H|v7{c8Qmg_J|nWaIO`>9~FK z;4ALldAP7bTW5lH&jvKj1vYB}8Z;i2)8H%Hf-$??F`F!^z00&V8=Iqh28bJCBFo|KaT9 z4D>5Z5NRo2X>r7IoQ3%%zGB5I%x@#TT`Rw2 zhaf4hJ2d7Cx1cYq+&fm=v?Hw@eD#b84@~upF{@uuzB1;2+qCyvrX-x<+ZOz#lm<4a z;fZ_&rZU7T3Rk#=0@YvQE7}SqFL12W7_t=#XFXrh!9${AIg$|3rxA6vT$HSy$X9yA zLiU1Tt#A|4qI`vJAjpaq?GLW?k^lpcFKQepvk6yPhRMu1LWfw1Te8F{UVu=Vgw2ZN zEwZ%j;tjh-Htdq+RY}vf^k;1EFKZqvs2zrxIjdqIzfM-#tjMk!LY@PH6!L3F^LFAE z?1mN=HA}N=`*L>m<~K_=G>J27`ZB8favQ`sb&`Z_{Nx?|IkmFPYH50M6T!AQrAD!dKWtfV?{9) zKe|8w2@Bnv+;TxlZQtf5K2or%4|X*?&j()}xzPXorM@%Q24A{0c>Wd;i+}!B@1<7< zu^IgSdy2Pzp#1KSM}GF((XPRHSlqEC=U;sF&*iJ(NoGs)AG{L349%Ov%wST{bXMtn zTJdaR>0I(gbxPTMT;X^`F*e;yrBy8^mCwO^Rd`gr@daJk^P0^kG#ii2lpLDC=E0)< z6R9<#x^qm`31;(Qrnqe(Y5QpS21#(5(kE8#l>q0J(4N}|vpN(2u#lyHw@vVuYutbS zl;ejML4URgXoq|aZ4*)m|_$HTe-B(@oQ@bPCILPUToYd^jP zCSLspt$l~EF~lxVWa&F(8zgoJm09=>*l}d8(PMxsGzWcBracnIonqxK@d~$OIX3NL zTF^Ip9GVq43g@um1;3mL?xsZ~Gr-dYb9-c&7FVpxL6AGPG;G>^m} zK$yCrL*&At2V|BIaD_t05aJcAR`?Y0x5Eu}1t|^0ZH_+$Fx!b9AC#-}S|Ap*sb$0> zCM%+1VQi3X5#mZx*;#1|aD~rSonP{6(cU8M5<*f#8xcj>k3Kg1MYNXY2iYk?h*uP} zs2z*;3L!T&-qkkU*EB!SwjgEnp|0(X#eHuv2i_s(EASO=Y-5dzXD%DzaoQ#mllZy>zuCiJd^!`CTah4 z{DIko0}GU|BHG9lAxUXy)q<~lYv|bru=^ZL^^8Vo3ouTPD zW@19Opdo{Vle2RKuVA;*zK4^)gqhwKNzT7_xdsn4-yJpgN$Wyn;N*ZT3wy3%J{LCFA#Z7Z@ zWdo_(rCBw~_>F_y(!QwTK5j{WOsO!oOccLKlDJipRxL@b5vNuO6Dx#Cm7>&YSyF{G zcIyy#;{fKe!-{(&O8en-g&BiW*cX)Bo3vw+Q#j%r&$El>*+%vf*92yQf`@FvrBJyWLWDHl3Y0}?ifsteGqOXmSP*3tD53}4f+*3o z{vyL#7a(5eFEI*`uJIM&JA=Z9D0Z)xFOKYSmxlD|Yt{ zi&$h}kjhqTeFsea#U$MnVe-r36|f40ZxNzaQ8<7_A@rI1_glc^?>B^wp+2;hYy=?d z2;m9D<|ZB_Ecmae^9mL$j14|z+d=$SEL>sx0Lbbon@$w?ii{RIb|8Zkh=oQW5X&-< zZxh6K4CA|^OBg30aab9=h+G~v5+-jHWNt^wsGy{thed=t5Aqsc5bQq1KX|qm64sfk zLoeMNI)8Hjhz0%XCNaYwd4w&WP{6_&N=3;Ck@`oHN9 z>T^f`i5T-g_OC_<4T?F>#MlL5in$P|s; z%HWK3gt!CxgV;idU4+4f6WlGxYM7~bkxAOcV9RsFwwc%}O$rQYO-xcFgsg?QT6KKA zI;~|sYp)u7Rs6yuzQS@qgePF24^xD7R~cwT)b)gBTq`oKULh(LQLe6QsD9OWWdSaC zVsu~TEBZ&ET0krxWY0*nE%D3XybA=x&#NQ)vC zr7S!=P!=8@=^g?CuM;~KS(`|b+qJ-|dIWRbT&%yT#Y7=Qr{>pDi!Bcs)SfpMQEhKA zd*5aNSGc|DH603kRd-)khY`YiI>1%cU4mF(E!=`?ysK+?m4v;6uiAi`uQRwpziPOp ztHJi1tGdc-BnxZXWd{Ahf-@Q{B1}6n8xQdc{`URq=zVIy6{mF`7239dAb0G_@v5G6 z-981rg1_CpWX`2v&Ng%0B5}klMn(`TLSzvwwu>LOiG_)sg)4|!5&fp&OJx$)N60F? z-;C355<P)fMK28s!x6+ z`{*aKx89ZBc_6uXU3&cV;GRRgoz1-)HulA5_WDP5I{363Id(j2*^V61uk+Q@W;~d$ zz*ql{E6SihNYDRGeAV}?BmY?^9sw=3U5G0+Ss5~w;rXj%MOXbI-1|{OP)}Kqu!xGa z9LS*OD;BOqk2MEDR?x3dkMWgBu*8%jGUW(OL;El>>X|A5T?4Tq3x{LNMw7OVWt2~* zZ6A*>8;L1Kp!x`kTPh1L6a{1p{4z0T%@54(3&|BCJ}W$5ik%m+n`IagirOfNC>2F* zltphIi7FcoE0lO=3DBNK`DPE`>Ydr=nbzx`-0Pby@yrm}$M79u2kg1%1NI>@0+uR6 zxLOz7hXP-b3hXmN(~#{F8a4_kUs28KF}|Yl?D~*)z}0Hs0YkXL`ETc|#}Mo9je;(}EZh#Dnxjb3{nRE4+&*GmY#`?R&62Q;%5WOw=vnb%%1Iz;@2w^b0_1nCnIwvgVIJZLYPoC6}4$JVxuf&`^@@!UCy4VqNB5btKy?mfGeyW z1Yebbvi6PVHH~K14d*vcBy1ICRLkMNiYXoTj^n#T@@#{+`q(zChJfUu6TBl zCQASjY1jG-)&zI04F416;#UXoVZ5{m6G1dU!nQf57gscB)_G$=IV^%5tDL`F=h|-W z%>!TA1@s%a^f-i!Lb<{+XcLcKn*gy@kjNo?#LTbHK1>Sp703!>gWf3Sxjk~HRP*J(67!h;47qzZa zOF@jf$<(vD5KQdU&<>hHTM&i*AZ-!iO!$ffQnBI{ntE)r53N8PsQHS(6&tnyazhcU zV#$ik64IiJo0p|6d={b>9tL`dBz^R&g=qJ>>r5@=wX4i7cK;x*Sd>$XhkA?Ibwj%g zBaJt-fU8=pAiPDmsPT2C>5ZjoeyBsWys^0ZO@^L^S1qd%gYXSwU{RPkY^H=)vR{HW+`}D5y zB8A_#roMyb{)1)^T8Rm;8%=N&Y=KsReduE2*+||C1a(7kiZ0XOr54X6jCcfW7<11M z1r{nIwoD|}EX41eh3$%T26s-OQfjA@s#OU!V5*6X-4m%zW62HUDfNg(nL>{~qG&uk ze>yNt=^iaXe=vGIW9vKQ6|S;!6FB)wqO%y6;Bg!G0b92p);Iq~HA#%?aInv7b+eiU zxUaPHSdKGG5DWj2um;_nno&(KfB5t8&p(lV_#+vT+i%>Jp1UkQdTOv`Kd+*WuEh~} zaXM|h+Sl15O0*ra75adN`qfir-AjCh<{;&(uTj>wEU_ZzEB>oT;?=T>^+>#07P2TQ z>3fC5Oy$5s5z2Dp|3$KL=)>-2T2CP|)E*KR+2ai53S{-1iwLF1D*b`-)w0`)bO{Z_ ztH=3@!WHHRm-q^4VXw z<3TwR@6`UtBBgJp$Rnj6cW_Vcb4%>=NFMM`8Cx+OKYB>M-c*tJYn2-PRK`b%jHEMEZ{>Aq?9%I3im*-8j!8u$ut z=dG}ik-Ya`Dytw?&H`cq#t>qUjv<`z><%VuWm@C56y6RYi)M`?0*hu|LC8Wq=fGP5 ztg?Z?)kO}rod|GP;n=alseOgh7kDj0A{JiAEd6`zf_ogpc&^c1SWSp5tf=*Z#L~XB zt-X1bf}&mgjg8&q`?_}>;ng391*`AqOUM^RXK?W4+e4Rb4_>@A@bb<6%XdY$-jso_ z-u`I#2cO74{X*3%(@kp_L@??7?RxPF2fcWO!@u;ae-ML#ul~emQaAm|D~%tVI~$QL zkIPoXrK_UTC%MQgNF5E$98D;j#K1V-Q)g7p=hbWS_stX^pD8^)Q*>mq^ynn`3gXqK zV>9bpuyb%IqehZeCC#lq00I^I11fVRC|7zDSO+CA>L%hO1QsNvd^GYVGobjfTQ!<0bAKElX z(DKY2h94%ZbQWY4y;TdKLN+PjDrPGKzQXN=txQP4ctq(8ic>lnQatXNEl#XnOvL`d znnkn-<905*MgcTK4Dq2p0DUo zA#Q)vH@rZ$=fDp}^Rh?`=>_b4M*KU;Bi)Jg(W|6gNQ+Pw9_LMF*Y(An*A`H_ZfNR| z&u~ZA@XBJ{tJ+#L{vK%RUeng2Ue}-+-_SL^xd^_(q4D8j1L}3Ao*))e1HO8V*?FI- zxkn12?hR}}7G?|)_Ksyb@Lz$fFhaQV`XcrcLb=-dvIcOq<$?wgSS4q5$Y;nqsl$q# zxVG8oz4L&psJ+_oHceP7=?sSK)`sj^3~FBV+okoaocGu^@4k7#wPfCT!<=Qxm`VJI zMeHcboGY`4ky*!%Sa7A#vP{B-;474A*bsbK6tM_gg$ULL^{xsOAoYD^VDBnOtikcfK-KRl-Mh7Bs^=u+?5|3Kkwovat~Fka~?2rmw0g|%77?pztZ;y_2w(R zeg(ur1lAf{(d`NluMHBdMf3qwqX>~x;s`7UKACd3Ps8#jLh{DEQzdqhJ$@O(Zb?F5 z5>!p=NWN42kb8>MB~j#@AOtUw>C~73M{FA>nQ20Ym_cO1+eY>yxfJu!w6H16fh)!+ zE4Ha3GsGr*kiA7@8$IX{C$x_0x8V+0MGTk*3oN15vWR6GGKeD0LE0Y->Bnzc;Fh6m zrSBCQ@|7O3a4>8O0<_o`A!&>f!3zI&+9mv2L-2`w1*ZBsU+L!u^*eE%$X5U@idbJ! zu9idEK~}7HYdPv_8Lse;GRMzmka%e>5;_%Cu2`%BTrE{FnU*30Q*65s9YdmMEmIZ{ z3n~`YU#{~YDeg34lwOzOu#nuo6y31IS5I3bz@ZzIWr$c%u`mmdw_rdl8-Jc%NVii& zhev!@KpGD&tk~k-)XjacU=`HxN*cSiwe{>gEC65aJ>7fwtl;^Vd(T|!2VY&jLlBGd z)y;>pd*2y(>%)=n|6=r$KaKH(y2%A*?q7WM4}zz^;mb}hFrRiXvBf|0N$WG}FxWc*Qt+Rlu#A-$EF7<{+b;17W(i5{~&(9PeC45DS<*O}6=L%a@ zY1N|4nvuNv@$6k=q4_*cexG-8Ps1rDXD4Ig|GAm(ppm->&1kq!jorj1?jz=&%9YN8 z*c$?sz&Lcom?JZd8Z?UPMzkYxvn>+^rm@{fdNhgTtqtim3F#%hLhl}A;hK5_sQ4xx z-6oy_2v*pqiR^dFKq0WoCPZWrGyvHOKP=XqA^h6$OjdfQj$yc-lRxK^t#nBd1{Y2S z=Bt7VCa}I4%XZNLOsZlw9bpQgO>I)ca}{1Thbtw=vVQ`XV|8WEnmpn z!yuv|sg_A?B!yjsh{J-{J*i2XzFVEMUsG`G5no}15c>fsy&x2H=M^2~pnAer(6Sb& zhaC2G654QGM?$e~GE~AMlm#o)@=ieT6_hJttbzu0k@%*7K)3>~%Fi#5jf5|2NMJjg zGKzL&`FXM{2Ssz;^|#|tev#RMO*+^R4e)z;5f2YB>04h~#9|%t2U!3EcR}5%!e4QX zfeEVO@mNM_UO%^o6n!f8*ZOsGSuGh7@Uf0yVwm_>6l=Vtd+P_S!Q%^YvP}HW5t#i*gWBpt| zmha6EjuYyImqVOTT<*R%}`hb9|M|@=aXZfH^DHjJiLO12$la_h#!i+V#h^kSo^W$^FKgZ_w^ zP|*IZUzgYMPjNH*IAXxR3-Jk!DqC76Tgwr4aD0z zBfn1H^l`t83FQ7|*D^1?$AlJ7!C;ITE~lhH@D;r4{_B+<8B*MWGYaGL05I+r4frZ{ z%R*8)6IMJ0#0oA@hLvC?oMy)@B1hXoZ25e?aUb-4$$(65&KSM2;C z&;@!?CyEO@Av;OZh-!~-rO1O>a-OQc9&@})&k7Ue6F?v77UvbsRPZ|iwx%S(%!&*^Aw zIma%fW`R)8S5T8GuV|{TW2wzT<+ZtrtFu)%W~*<`RoZHevF;5W$V!h` zfUBmrn5H)w+@byrrVg7EA28MT7c1|QBL1rf4El!k4@iHo;Z+T>a-pDKy~>bX&)3Mz zAXTog9|tQ4iFkEVi*V806NIl~+vcNM7b16S!uIG;AS)Dh;)LvGu-`eLZqd6+1HN+I zsCLYswa%QfNt?7xR@o#^*d~oz#mFt9hoNE_g$Yf=B_?5FQrO5AE{5hn5eqYhgs(V# zt2n(YLi(QN^snFyt_T}=E`+~2M6fP&&FYRA%=Ci3>rq99>q?3rA@Uzc1n6JI{dEu8{i1*MJqCXBg8$Q%NPFfiyEFzn(d zwDB5n4wTx<4mZo&8EDrN?Y1+gJVt0AJ*^c2J^-NGzRdcK1Bir+u+$!Fq^ zeuUvc>8lT=7q5#?pO+kZLDaa1zp=bKA*&}Ky2r`C!_=)~m0j1j%(}j50*6)aw~YB; z;VX+SqF?>v`AUyfUlFWehF@+7K7Mc-0#g|x7S6ckG1*FwSbEtChh^mo=JqAG6*MbD zxPquf`3mCIl78hcSsfq-Vo?>#IAqWwQf$T{sN)nP#rjmt?0Bb50hZPV@UaII;u4y7 zkd-iXNUkA+Xy@U*A%1-!WCqLV@eUEc9#IydL&TK|EhR)~#Sv4)!U1Gu6CtsUB!w?) z6$W^d0;p^vg#a@wDZuTBjDU@h6zgVZ-ByH`NCgc%Rus1=O@Xf{VlCY{5MB<$BA#}t zTrFLf9qfLRK?Qs)e{)ANL^&KKM4Jb zc+%L;AimWoyi!00B)Kc$B!U%X7cR1o5ZiD>=pHUxvxxN_Gls^#0>Bk{GlfwX52S@) z3kl&XiLC26)vDoNQf>>;Skmy(7DO=h6qr5#Q`1?=ukDc#*@k&4Z?GUekSg5P_q&Hq4 ze&sFc>+cW0{}cI7eyjMRcOFX|{>4}SAa?p&zAX6ahrefnGC%Up=tu92o2!b;R759? z#Ux=x&UARrTymKfwkz&7NqqJ2`iA-9Rt*;7Y&;qi4#fZ>BQ6 zma$I|z)2OeeIclDEOIkmS*UkHbo5qPJ>rrR9?|>XJC%n>C!cV==Xg2`y5k zR5Q`r=h5E*U%`SEQ@Mb`y64y$4K@;DgfMqMlXp;u{HvlDm`!L7f^b-*se;q#@?ym$ zUBx90lF93DlP(z5ui#y8h9e8(gsk}rWJTeM@)c$VX%qt-Q;&*`52Yq?-cph*SlXI=hI%0a(S772t~5d~R{$xy7=xx=pClT7p;*zR*Ct z%v4-qDz9SmvaaHqw)&<~GOCnVa_ZYd*Vc~hF$J4Y5!fR?l>q(~9D2~Rxeq7w-f<7qpmGis-Dn$^j> zl!>(pbOsBLXx1N?%W9j>*{epPJ0h^c3nqfHRQ}0h-f;@I$YJLYsdM0PNbH=GZ@*K3 z#34}iZ$=$9ZgOj<-{uzna74-S)tnak75IvD2IpoMvCMgHYGDD+c37|&c;G+p9R2xk zgdhA+_|E&XS6-7|xGFq(R(SAvVSP(a@s^Ib%pN~(kG)^JiEH~x+fMM+|1m~LyWsEO zD^|t&s(#h`lno{a`%!xF3WsI=ia4gIenqt`{S{gk#j2<6`T8Pl#K+*ue&E^Vdd6Xh z*3?*eJ}6+4Q@j!l zJ7`B15mJ<8q|78#hyw->5lcnbDU3XGpu^(|`JL!m;e+5MC{g$d04m!^2x1a^2z(+O z#T8;57&?PE;JJs56;QhH^rOTDMGGpHMd%=k8nF=Dj;JnVBOtVj$W#BRe)Sk%=|=^> zf>ltl0;H6k^x#AfkKIo1LvO?#49N;-)L-SR#}JG52O(ZjxT28`NYGm2HAD*HmCuk~ zyh48yWQ7rR6bn~G5(LEJdj+Y`Are2Wq;RH1&XE$_0dLsxjb*0`VSPu8L$9eX_=-># zb_+tn0$Jfo%DEduSZiH+fLLn~kmb;`(yr$@+wN8NycITGD{MPgI&`ga>|W*Axze$H zl~V`q36%ipjuw7hHbEW8EAx!&^iSyw&*_ORpZZ zGqu3{_&=DS?2m)<#9?`J359b>c~dbdrf9Fa zPt6j^oWp} zIu1T((*f($H!MH+ch~P{)~ipx#n{IF!8H6c>&Py14%zayHvG@lnVms9hhz86hV35n zuagDTje1wf97+alG6&4!dQHLw7U4o7iHArm0z{U9;%A-OGb$jmF?jNu1@z(74f+*+ zC?HXK>s(c?b;}IH~9r2mkz0=m|h3?zTBj85YASqqfXp zR1mm@nZYA>8BXCuas?B=T^CU{70rGM=47N`MVAw}g3BFn6<@1ECDtv#f|YkjOZVd- zoB>o*j!qn6snE!~yjXs5p%TGU;4Ab8v1{)t$zEt?vqnja@G`Nu1FPB)jm1J&Bdkyt zHAqyz+#k&6G?*QfMXc=D84We;(&93>ZbMNKoH?S-t)8T;+|0>f0KSRrPIk^F!vyhs=qe5Q3^l zv3yncrl$6d1#*2$TmQ}?s^L27zm7QBaVYzd`X&uC?^uyY;!%S53Tuj?yWQ#VWUK3EO_OBrgLEjp!_fCy_ z#hmMo8K*5%4rP;0$hIw2yB29&HY_;jsvXj2>{Dm06DKWVlql=?ajQ6$W$YMm3uPUv zu#S_HiXE|xmD;DsZBj<9Qbx^_N6b=2Op-^9Q2 z1xqYYL1-=@VhC&6__)&DOz1(-4h~)-@gfMbAN=k92yZWdMk*)$!HU`BopV_Y3mNsZ z@fC`MDtRh?YMLk0TgFkaxT8Oq+BlWbph>Dy#~_Vu{d7Qz$^**?B8EL81-$@tmFG{w4A|GdF)X=YhQgd?)^BOf7Aw;X4mxFI^Tse@3+b=umY-&xW!NZW_-!l5gkJY2woHoOS0nO*{Y1 zsOOu;gs(`Wka*n<{8vxZul`&4ilk-%u9mNWs}+v@sIMT_GF%b!)iPG;58x|=Gw1~? z!^5+%zPn-N4H2v!D?`~z&ssPjyd4Gcid>=I1W4Bg!tH=5!qHX!gYdeq4H!h^ly#)k zIG`6olVzA>l{cA^HV>DWhe}LB#O8?13X_?!f$AV1L?{qd3RzX5gs)|pq;UeegPwXL&qe@D}C{=QQJYH>e%weP}>e(=@xds6V#)%%j`4}^E$ zkszW07WY4N%}U4q;e4f!bN>e+%Kxvw?8E}|)+bER`X9%ajz(lna&snwlSiU5RNTyo z5KJd!&nA~Fq-~y0-uC~o_ZHA`-q+giU3Z-}oH!=QVrFI@F*CDd%aWOyotPaahpAz3 zoEXw1ZBjaIPuo*SV&RB2q!DX08lL}s_WK`=6(?y=zVGW@>#m!wz23e9lI1sJ&2OIl z>}StqubVC2r9x&qnuDt!(qggQ+J|+3tLkIpRfor`4^P$~RU@aR=D>W-0d3xf@%&A& zV5w>kPyB9zz4#jz5Yyr6E_L=A`oC9vvf&&nKQt9o(owRL*~hiJruDhz_O3|&dTDH@ zU3#B=;dS3NeZ>zm(W&|P)3X5wrGfk8u50@ps(S76du>xFo(R7m=r=o|Ia;44_Lc0Y@J z2I>Zqigm%vl`C$(BEABW3EA%mRX}GDMXW^wgC?Bg*!qYL;+2)J(AV1toC6&}u!4T| z2*DDH>snm4!VzQzltMK=U`92ZH`SjrVqs$ALxf&s~Xi zf8D(4O==N>vKI3d_U3FwY&(VupNDK^T=z8Ffac&6l*Um1kO8sn^$%-H(2RbR2_M(0MEO#5Ou`3g;x&p2-V6v%P%AXjY)A7t{k&*tw^ zc#LN@GIV$u)7)S1#kBlrX_i?cNd;PWbs9J`&XJ zm@f1l!O30zlDh+>D4g8sH-e+(okl$APQYA7>A($ zy8#ALq}1dR1Q{>`_r23SiNoM4LbW*j^6bZvi)nx~^ef5Ky`Zh3I{<}^epCxfk*8-=n0Duyw^tNh3b zmNP99@@cq0*!+m#1>8j8gl`z+X$<_hAYqa7YI*Po-*vcHyaK+WEl7Vy;X30=-ry_v z)ezOFHzX`ruwZcqUm^dBnuD*5pg9P>dj3txYab}c;{L0# zKYgVglv9Mek(p2?X6EyQe4KFcEFnL~6Hq_&fB&E6pMUoc559Wuk1VL@50Q^FM4pZad9V z+InvDyLPAF@1b4a;s1|>wK}-jSBZv}C3L%&_6D_$MC?|kJzx+$s*66aa^E@VyPFUbBqjuT8JJcQx*vb5>OUYET6P7zsxawxI>y~Y74`( zAcg@UNpabV_XL4VP_95zfU7mS;=sLII_KSWv5w#<24#({6r(BaICj`Bx!_fi@2cox#O?#r;>DuV@%h zuwao~Mtmi>)))ASQxvB)&RtBDi{x;aGRxRCk_^^&qZCOyDalRwkeM(}uPS z^BbPkZg@eD40ia^k-&hdzzr|yaNP4A@zw4R*p3g(J1&{Fe`MVDp<(kS{nii7+dg7j zK46>PHg9;tyb+Uw0$%~KXpr!Aw(T`4z}1$Q*`^oh4+75NPh9&r*w0k=fC09v+6U*W z9#EH^(G{OGmY!mnhgjlHa|G<`t(u@VL&$nlaGNQx#T2l{6wu59TA0^rljmxKXQSS` z(GbvN3~T^j>HR9Sex+KUA`K)g*PMB$ERB7}ylvXNP1>Ao+9cr0C0~UGA~Y_~QjJ^j ztV`h(Qs3Pwr`>B*Zgq1mRdaTwGs_F6SL95sOjE8(Q`)2`md8t$MGvir!K9e*UKuS# z>%lcnfm#vV1D^&~G9iA=EpZH?*{&j`d+HcWhQ2wIQMLNuikZmTSwxIR*N?}o9!EF> zlCLs1&*p4X(UBkWo zUdPV6!BIBiJ684*U#$>R;FXQ|_EkM7&R2^O>n6Su@)_W4Cqp|YqW@LCvM?1VD}G$e zS5_p$LqBz)vLY7X3amxt`JH?v{XV|3vKA4Sz*jil6C~w)#SzQe{Nj`ac?a?)6e`P5 zUNp7mtbD~Oiyt{z(J^M|CceV+Zz@m$M=Y=bP>LfKAPU-xS2E2Kf~N4%@+Q9G7J3r9 z5(jb4i*CmBlcQTOtnw}x{{i@q1h}F?sMi8talz^aRtaN+bW?~AwJ3NCe1)49vf?KT zOl5^DoDg3H%dyFklNB!Mjmx_bl@HJYaaqG%!B-T|?l-&?5oAcz=c{mp%qu=Nh|VCj z1w)3(b`^eOtq=Y~;DU2-1+sDwMVkXo5)M}wJpf->;3{nx!<+tTBmU{b0qGJ%WaH?U zGUAmu;2zuW64mPz(SrpdnEvAh$^v4c(+4q%Knq%y&?V%X3gK^G;42EkvSeYQQRvfk zm-kn9`+T)D=~No#Q*aQo4e&d!vfPqKaNfSjDYfHQTzI_w^v9VcUH@drygv z2p0GA7kY`WULJV*)uHF#8h!e;k>_5Qyn0do&XrMEu>SL}+MeMb`Rd=zQ2*@v&*+G+ zd~>cw=ZZ6{^x&(Q{0ZtCxkvimNu zpSXPF5c}sP0e`d!!zOWPtpoQ4eu*I(e;8wuhU~Mt{F{a&wvHzpRad^u63^)a_Ko>( z9rIkHaI26zWl8KrBk(Gaq8_hsOjP=$Od*2^darl#q-WANa@s@l=Aj<=WlZ>Ijbm~L z`{RNNCy;H0?1j*>33O*LEeKmXlqkRwAPST6fu+OMM_J(>7E(nL7F30jJuG9r8Bfb- zGlrKdF+7;jYQ`64uBQP*gpT4NbPpk6AY`jr%mv;qd!Qaf=|*C98lJl$Mv`gzJkA@M2b*k-2hDITYSArOI!)^5t_Vq9T6(EWORs933_5s{vHRmB2 zAqIgSg?uFFSLU_IPJdhr|J8=)jGJF%o3TCcCG*A?j2lod8W3rP@jysesGXPCu8-Kx z%WTI-4B{2lCDN~8fQSET%bPH0k*%G`iWlGtgM@&q7fl#V=ng{hY53syrhN=fx>V6i!VLohw9()zhW}s>{`n4E+R~!A;5MTMMHhMQ3yqjop z(8^c7RXU%tdG8X9cj3Hy!JKQZ$^ogf=~ED|oHM6fvZmZ}X1xmLeTs;|JPTCr1(R-A zsZ%!PSvl)brE)8uvo9jJTAnenG*M=gBwrRca!>fcl88YY^jYHM_*@LQ;(P_*2j|1x zf$TM$K-5A@0(|9~JnEXFz`%ZB(Ud<{pZ}^x` z#JG3(L{PXYAoA<@+_xk%#8*b>SMw$#Tq!`7cMo zS8u!}f93_+JV@aTM+ciX_vTb~MWpt4W6`H?`*OFA#e8+!%3c%&wj&E`RSyb?h2!_} zmDPU*zJjKN0ue#NTFglQJLjCQ@Exph#ZS0$#lYIE8!~$Pg z5$mR&Ecl9VDdOUlm9L1k7Sh^<7NNy?1-=4Yk@!ac2}Y>nDJ%*4HeU&d1%#V1!RaL@_UlXd(&&X^44@z zZt7~>)7^Hkd&_Y#T7=x!vKMAN8w$224NjG4Rz}zh%CeSE94~^Xgbq(Jc5Xrf@W~L0#I) zikeMWJ{Vi9C~PwpZP90M9WOdKRefT%;oKa|SA#< z*}6lfyp2;am3@IZ9ijQqR|oL2M00sX;9tE&ecs8v%L4u@wZQ9Q{!0+U%}kYbtl^Q_Y7hgj5zYpPQcaH3zX9i z#NzrD;0k25^$7!xd?yayWy1-Le;>|Q6cZ{i)wlTya0SW&U!g(C`O50Hx(TkpRH({x zrs}iisp9myE~` z-Te;R^*%{hyFO&Q1-`OS7WfLf77XwhGsOC11U0-t1!FuVVqq!as|0MIH8cmY2Zy}$ z7w9Jk4K_VO1?N@mL#*m7D>=!E?q?Aj)WK`jfoRF}QJ1jU zAan+)IT%o{_p8`8|-iW_y!oODhbb3i*!H0qeFbRsX7 z0_pDdsS*e5CCrl9XUl9e70XlQOA@7b!#oo;4Bs*phSu?Wn;a|@r?VoYpWZqnNANnt zK5-NzESRsnQl>nT$D9)t&Pj5ZoZZvq0fmz>b=v4Ub$s)@fAKK-(eX`V(e*MwRMzI% zysac*p(mJ$%&W~4SzBgux2X$u=yNw|lF`mC84Jr*Lcfa3*FSD(KCTzv*4?+l$ZL)h_B3uxY9AS2oZrbqg8Xhf_|kj8K*Snj{d1%{(9uvr=uTT znRxw8`Gx0(&ObJIpNUHkITF|d<;wxxYixCSE`iuDr zl*N06R=(o?D-y4QhPi&Vn6CuH8sywa7j}_;g}*yms~oO4UxBQAG9-v^CAi8S4$c_~ z&6S}5T2QonL__Xzy;j76iiMRT7Rm}06JjAdNIv9R&k6EUQn zSegUAy2tnHWq~ANIUtKWw%s?SJ1j?>P|_>x%4siI-%-D#2Q9))M|yVL-+l0G&wY=G z&pz4n@UwjgXMlty@RjWOH>4OLeB&ee2cIcF|C8#=?s*N+N@)BU%toEr1jPET&ftx> z(I5Hh2Qt(@dph{)^V!GtcJWMeba8%a)_00X3($7 z4{9+G*K}Uja8`|c22}Iuc|emAy_;wo6^KRh70{QNG)h(lYCn z)9szp>zMHMJpq5dC*UuEIg(pEKU;l_?S71{@c+x5&VRVo{)?2lv7h~k!Jf1<=8Gk< ze_5XTch8z3pEZ)e4P$<5$2{v4ZskgsT#7Bis28vbe1*w+ctl;|N1dW%d`lFP9s@Jy zLUJ@{&3LAayNMKjS<`-5Q@&Z_VI_0Hg)>2glU&O}ggZh+QGg}%{t#)E+^SAkGaX(l zg`TkfA%=vaysZokME^WVMEOMN4!GP^iS?S8s#*BBKv_AP7)B1!F~l)_1IyURvbQj7 ze9qfrDm-L>#l7l;x&Ac9@yu%3YbE}_J!|{%YQAk)cE%?6KB4K@K-g8CJvMw=L+<{)Z@F;>B@U*w6vB>!f=Ju@! zYIxDK>3IWJ3aaH>1W1BBjhz8U)4EQaKr$_O4y?Fs2UAv(ZO&P)M^A=`L-Cn zSL?l+btuSIP_dv{p~!ZHYB2fM8N93YK2bIurbPFJ|5 zj)8pe-p4=W0+t{(tcaimBFGnA8ZAZ|!@bdccSrW!6+Uoh=)jWjp}Rx+VO_?iITU_@ zW1SO*2O(QIMoFNf24$%sTS2_SR`IZW4Hj@forowlpS?$ zUi=d~B)5xsQGd{dY*8d(VSe!cQ1NeQJzCxD=nPtmcLq6eeIH-l=}RrboA?UmEAGF7 zc*Xh3+QZ@}Ty;T{3#~4W5S%Ln0s&l6myPok8OvkH*)9}ymQhyv;vOy7w5$a`JlWBO zdpH$fY>*}g5h)- zDtt3!oU**gcO-`&9>EU$PeC%_f*)Q&dw*hR!6G$1h2oS&W;5iA3brdy77%NRrKC$C zPc2xm=!A=Vi)>dG+!FYTw+Q)_)`K|!|Sa1}I+Y&!hg zL}5k}%9RyyxswflJ76Siy5LEOeklCiEtg18t~gxzWsiWYf^&zU5`+~<(47y-kp^aB z`P#5&Lch=@6hp;w3h#xGg#u{7l!a{$KrG^`g)X60#o}ajL&b8pY|Md+7&}ew4Yp6J4!q_WP8+UuAi#G;(ldwMn=?%sM|*S@pe#~f6e~Ab55>LDKX-!7_K3JUG{_XRBh6_JF_5-iLjF!Fr z3-Hyasik9ubu2z_27DD?s*Ej>rB+Qv2mid>rZRIS50R$ z;48|%IzC%>R7G_}UAB9wa<8g%=M4A?Ey9ShuH4P)gqBGg(ck=vhL%PBw_lb5FJs5J zuGp$Em*hV>CH>wm?7y(+FdezpC7s9KK$yVTiMLo}?f8ab!p5X(DD4$8t#XDrGIEty8s5GoegvFav3RuB_P_OZO}ETmk4(1seg zeH!K?$|fuKFpLn!)y&1$szFwW#)A9>6$>$0Fulh&YZKR?S7?UUy>LG(Iclc;&L_-u zr;PPy4XYn8wmxitHJvv4JxjIq83T$I`C(}?yzWn%IA0;90f+^-LIJK&*c*tQgWxMN zUkO+R%HsLz;43UNcT>qJ zQ^^^9WD__-~Ggh89 zmYrZZ2lb`*v(V-l@0zLbRuY_l9=e6WWF4 zgLj9D?+G2aJ9rRvyKm2(*nJZwv5k;nmls)$l~Z0TfhmOQVK!G9Gj8g~yHhIr){JK+%|a}Sid2g`jTCcOj3 zL*u%lGJZBVVewy~KWNezH85eJS*X{-e?>-j#+1{BzkI3q{8#cTACJC&arEWaB;c#l zkM{37Azr_?x4N}Et*AXLx!XOw+s>z*^ee%Cb*F7F7q1rUS7;0JvWizYvG}h9m%Ej( zC|`6DUs;LhrjsAaT{nHXC5pl7w&I>E!R5|-h2Pg7{64bi%{?gaS{Mu@e|nH zt$c+Rp-sf-$_TljU&+v-0$*|cs>7fsZ5OEB|C+MXcqKGSrGlh1J=9ZcYz4RiVo^~b1I-MQauqa;&LFVL!dJrBAm>|LRe2qtjJ~CmW8d zTTUCGUlnW}O#Pc@jgUgnX157h!{O#sy|~~^PmZ# z>j0z$zA|D_GVMx!o^~bg!XiS!;=c1aw&!_4!V<`eb}L^nZ+X%TzJhp#g@Akl5pH&z zz`hQq;(P_c3I(YOf)$!;ICA|8Mkaoan_R+zr3#2uPemrH>IcnL=gp;O4aKK*MW^&7 zXN=|NjrEV}o1W0FxnO9;hMMQ88@L7a65IL$!@`=@3wjj#gOHoro-wvPt4D-8HVl%* z{UY1@0o!+(9k|90e9ZP;VS6u|sV?RfrpH(ySZZLb+n>^}_ZuLeS zaqiW?yz5y&oe33K$NXxUcMWr|GPzZnTq}%DMLMT^jbrYtL)J8TuF@x6(#J`>g6D)n zt7J&80-`&FlNO4_f><}UBD8l|FcuE>EDh24#=FrH$;mNMt9)7PsSvLw#7@_Qb)Zq$DET#Ld#TGGM?0| zr=irw>F`Qf!46a2HXWAW>ho@>Rxq42s|TtuSy<&Zt&D2eY=l!x9;qzYUxfb?g~ln_K4{D24C6q-C;X;+sgi1 zY=q{ZZSQTX`cSvq^`maJ>$BE3_{yOdH~+(-=O+-d1PO~1)JPycLAtw`dZwB zQ{?pD~?sxBOfQkMB(xf#bUk!V$r0tg|CK* zPoM(v5`Kem{c15&A%ezQ2p~l{CQ8s;#%BS_a$hKNPl-l|4WK)v(cl}(H&f1qsl_@K z*6#Ra%Ry6UctMi^Iba(mui!ixDj)}lg%>IT7!b=RLk2>#QWk}GB}!0&mL&rs^1lT& z4UtbmD;Fi4vX`; zNkWT8!b*n2OGhF~WvH-XX;9uUwm^EN_IZd92ZWDQJ=i+x6h(_i9U^*QddJFU6tpbc zV6waOMj=8Q5XC@z1)vg+;H%r6uj5E@qev+AZbypa^59OJ&`#UXuN|YVV_i;QdPh`V zM{-36g0M>0^;B-|XxT4bf27M|!FsIw+=ZS;pY7%O?cl2y-x+=JJsBbz-nlCKBwZ6G0gE+%_2EK4S;o{TM% z$CZpGS5Kzb&mdp4V4WeOZ5ICa>iczdC$-o=h~>^Gh*vO<3&`=<)GE2{RZN_I@v z9@JIu*FnEZTO*IHMJT$`BSYbtrod>-9iE-{cy-%{4f`a^;t?}NOX2&1i~bf-+Mc(5 zvUuYx_7&!BRL9lH@qry1L&^`ZqCLze@yk^r@sjY)yMnsz@Dt-b4%$0O)6m%C;K@YM5 zV#U>IQQXIcH%f?LNo&aKp1;dfe9%yNlvUry>Q0)Q&zjdg!Zr!bL39QoUV*82ZNnnJ z=M1}FBnbzWB7Z_^ht%0b}LEhK46KO;63Oc}BAy z8wOuzyWV6w;F5ZYA-sLVGqfV74O?;qqdV^0{IY4sYX&TI-glAhzr+rF#136!M?Yal ze!&i0F{AchHtqY!wELoQ*L(V1?;3W!W3=)WEq8v|Y~?HD!*4-m1uf!1I)qs$N{T2y zZ7w)wsy@ejYh_Vw`m6(>CY@WA!4vFMV|EqFqn3HrF|Ru2UB`TDSU?TIl}`=xsAjH} zCYN$E%DzBnpQo|UQ9ER+oYQBWQ>UF%CY_U&ZX!88r^Dp}BRbaH*ai=yKutEm1IvT^ zmId|FLg(Oqz}4LW19$oN-{#SMt9uug3orE_!1M9ivkM&pSs`iABLS2(jX73P4TF9K z{}ox>g`qI_ya;~PidV(iaS>1z=DNALR9t^ z6^IM>i`Gnv8fLQA8ZsJs^S7s&|)BpBQlFvRLy>xjL=623kCm!kBajdIlXLtE( zacWU_cxt~_RIi<1C;aU^p8;co1Xn`cYD*R@D_kw+tK01RZxNV^Qx-qxeDza7!uo%Q zuPog_LCE6D6}MpB#8+@(3FdYwVU^G#wDQ#rxLTMTV89N+6$u)8v(%O>Qy zeb{x^nCpJYlr@@I+JPxT?8+%w-@bZ(@A@O%+fVcyI4?f&Xb&vzuwc>t!B-^DzOH!j zov|0+RlM}x$m^F!uKarP4`0qG5ix3H2D89d0%$>}`cC}{eDxz@{Xl^F7f&a?`VDi7 zxs;SEi_e%w=2c!Z%WRxas8oWlQfp>XYo~KsG-(YpIUDB-cd06mYpU;4H=Nctp3+qv zoh?5wQF&;p`rr)ss(7ceWamW1p4prYihTHT*D2E1$|B1KLkg%*hsZm)Q|@x_zQaj; zulw*4H?e=}SWMYyN{dvqx;M6_C#g=BT&a#JoQNqK2`+Ars~arZt}fi6iL4oMNo}`J z>|7bqjdwb{!QF+ZUi^cC<}A$Kf~M?3rMAH%D4P%o_Q5$tkAbh8Vw8klF-q6ODR>~|I@3;NabMp&>&&U(hM<2ia1UMvJ&fvmt+v>6B6meCM= zlEf?0uaKtx6a|X%Q9;O77)}&AfV4Ugh=t}F{7Y~#p^Mi>xe6wl$b+bL0Zw%j7Wp+j zWfp8ij4qDC`VEvOU%vLW1LJ8`rPESFp9`9AMEKG`@8+zBL-K z6C^2@GLw51b1X617wLnVnOiw?uVB8At*V(%C6#9-3ut1#4a}}k>sVq~nX6urrE);N zXpYt?L+zYC=a#B=5luR$DBRNk(s<*L+egY*29MZ<%WXrYHt=s?XIaPq;0kq55W&?Q zK7F^k^}y@_zPi`1-#$zV62s>O+V2dDdtkRi#2^qWG)EnhJ%?!v7_ne+PeM4GNa>NH zToESrNEr>zp9?RUM~51G9#cINQK1yAo=IPa3Bg%}?xwbmrL~R0c}0k|buMwWGH0_k zy-l4^JB4u3bi_@h$igxvlS@rLF;chCA?R0_AM^@Q0I_@{C%nSOeF7%}qpri>-YrA8 zsNlcC6yZFrc-EM8I{iF?MUgnFH?iK4x!?RjdgYq*gAbK&zCHTFt1=`)AA6vC`;qSE ztzAWRoua~?5Rup|tjpGi<_DKJblqXojj_QyR}FE#y2Z8+#q}#rSy-LJ^(*2l`~IKW z^?|R*f924N`U!~3v5)f=mzx&rQP8z+Dm)WmRs4Qy6oVy%VWA;t zrK}q#Lgv-tE+I!OYxor`SXQ#SVRR1|u}WB0rNyef@Do_DR)mgP`h($Q1&4kGzTy%V zEDe;?9xufZv|~IKi9)bjC&~z-gh+N|Rq^&8S<{iUATX81pq`42gcA6vtYQlx0TudL za)1F>sBjZH6;;kKrzxC(3V>3fg;IzxD52p+$M8Z)SiuMyVH~#tb4Gl#hw!x^G?KzH zNEyShAqOqUK9FRg0s&f3_`3kKKv}SDATVcTv;vd`@rolB8iicBLYMHK;9(R6Ss;it z6euOZ5$6JKQIZ&nUs7CQE5Q=PE_4R1UU!SWld%v+f{_cR0+7k zj|wcr)y5A`G9bh&AQoa71m_jhKkM}bkpiy3pTULl&|-N=(O_t4e^}{Icp0%4k!Vc(2Fk7Th+e793f7ZA%euGcNT&pE1#nugKc04=l*L0KyUJD_4M_itb3+kUTC zJCfTWVcqH0j^-frE3D1|V!`DO3F}_JF67%>e6AlY!Y5xI;SuideKP;tTLW)hlU@8w@rQrRjwp3n zY7SadES<>=xDv1cM@#X}pp~z_uXXsH_x}sR`2Y9ADlPl)cPuF5)6^n)Z2EX&fvRF7 z%Uz>~gq2V_ky16A-K5E0t;uQCVSEYwLFiZ2$LAqo)t}HHBeZni82AdwR|pqfeZo+& zccyUb7$hw4Rqn*a^4984r=lJxS#un)6Cgd#28KrcJviPq`*d zpgdBiQX5(2E|v;pYBE>sCbGi5dlZ)G#8qoD*RYg&Rha0T&-`h%@3cPpzn%)nQ9yHzCzwT-7(%LA+p{`N=6ppO3FxA_VdERwUHHG;3; z!rJ_#e$(T6@Rg-Mh&0hBkkc-NwrmlT#uJ%!N&13V`kE&61XbV24L(BqXsQn*eb)a$E8`_<3>UO-Y-}cJ< zo;OUqcD-TR{iYe1wEtap=mWO@qWR#5ro)%n(QEAZ&)Kn0*|E>4j(lQ1^fB?(fe&>D zJ~SNu(0KSm)1ixOA1qkdJSgzhj+f1-?JrUx(;m|e;43gBa!_H&M?t?TJB?%pma&(` zZh~K3gD`flD&|&db}nH~#muXkxs)=80+VMs^Q~rHK&(pUUSa}VLBDb-H`o@cP>y9< zN>{+lAXdMjGG|>fRPLE`kgyz5q>gD)+qfZ0{D>K~4VT+RDpw)}Q5ZqOevKu9cy(9M zz@7eZUiD*w=?*XP-Cmf19fWEM%bzo{plM;#Aew`BdwqrdgZAMA*qvc7Y~sKXE-bj* zF>v9VF^$j8(6WN^XF`gnBPwRY%f>~`SjIGt2={`my37qzg*)bxR?Bj?&tYs3I|mWl z9$Tlx8lChtny^AeTg%kcv{xsaQa@4o^%6g9#uB_nqq&>y^`kf(TA zJyejSsO6U<0Tl%0n`eXO%N(xjCP@GY_~^3uUlM?YfKMj41rjyLOWr{0%ZZQmik-=V$ptQj13C@E6!JU zx_91%JB1b@vK&_gcdiQSz_;>5)@WLLR6$o_MQ2)FdqHbw_14a{2Ya^N-?!&X5BTcL z1q)w2fh{>maerO*?#J^lzbAe5(#U(Cj{ffNGeb%(A{wx%L2Dv;3VgMIJU{Z)!j<^9 z{l7}bKKnZhOurnJ-jkR!l~%67Lg&=lxr}B_O7&c7jXJNz1UV~zohh?TU9xw+7*f^I zx!U`+_4jKl56_nGpR7DM&G~BWX>;YiS-{nr`&rqpx#I0J8LjfvW_e&=h z)2<0KE^)JN33HywYPXbG&vbQQwlN^f;F>TEb<0CE8D6B%X=C1*qfuqh7N)^bklkX+ zG`{JIfXwm0tcl1Xby_2ft<)sd8z5f6h=o}~*xjL7A*F#v2-{$K*B9@m5Us-fy0XIt z_}iP2&miy>=~sfko%jl?1|d}m1ucs#?p)4V;47|Qk$8p8gBTlp!o;Jlpji=L34sj! z#A>gC`D%lZY(NoGf-Pzt7BRm-g*Z@j@lZIytRE7qZq&=B^)DIMKBryt%>3FH_1F&x z3;Lm}BtspzY})(24t4MY!-4mW``*^>e_Max9mBzQjr)Z<_`dn@2kgit25LI;k?HtV zf~)&Jp*sEvJN9$)v5(o|D@Ksj!4LFDJ~AKs(0t?rGroN9o8~{O- zdnt3uH~ZnFkU8ZVyvvzq8S|(h^Oakr#;r!{R;yl;Gi+C^TvZ^q&sVIY#YQ}>ZZvc&Is2+ui=*7$E_ALALcvn}L=E_d8}?A!MOZ=#@4WQUs-! zMOI8EG|VQinGx3C%%-oKs6B$s<^npme; z^{d}1E?-r?cX8rpZ;U?klJvo+`j4FN-h8O5Zev$oHTX)5U0jnTb@yHHRgpfTY^#0(d;>8<(5RQ1)kNbiIsfw)EBvLIX zR1i{N2f`7$6SxS{3S=m&NCjSsD3$=62&>8@psBcug(}DTu;IjNX+n)8p-K{us+7e5 zC(5OGeq=FujqrUySwXp^hT#wO&ZJ%>>|XFJ(GQ%&*C0~IkX&Sf^__dMAY^eZi%kAP zrag5_znOnUEmYwiTsPiS#R=yt&aqaYf^ua&$H`*8S{5{dBH0SI_Fy6_44zo{N{GGU zd_@wLFf728E8ZXEV}qnzVS-X{{zJU-7sM-puVi7Ra$r?#l_a*xa%Cb)hQkVnL-U6N zvikfp`~1@TeN+39IOLhsIU>rB5)GtVex z^qYKj1FoQA{it94n+Oa02Y=he{L?OmrGA-Kq)IBD$!aiWG(fX5rB-WGsx+DPhV=UR z>=u1i>m2%nh5Kg855sw-Z8~KDVpSiW1z%Mifd6V5<}2hgRPCEv4L9i}11BO;^}o1g(#>FZ#3*W_$v z+1s_?E10hu&#=|V7kz-ie}#>K=<%W1hde7F7S#e@p+5+|vN+y_tWoZ~63p$SdZBpQ z6%6fGxB_3Hz*iixz*o?(s1-=ga|4n;H@$4y@-q{X6Zj5gwE5sd-~NUXg_9j`oA!Ue z4q(;~$y8X!{DvOZ^rIiMUw`C% z)8UH-6u1j@=#ug1Wp?Ze?IOJY=Y&|Oqu0!+!&l4)KQi*Cfvix6-ZdV0%WwdB?*d-| zu@>{yw&!VK5G?MSp1#3Xh>s{c3FS)XQ#7cduV6*8ARvrjqmC}bW5 zX75twoU8K&TveI8s&vk!lMW?vkD5`Zib0pk0hg+NkLn?hiXo@MUgx}i$Lzip$(^e* z`t34@RwVS>BuEezx>VRyOB$)L77bX%`3i`2XW$?tEPU?1)2|P@1ooQIfKuqds_=en zaCV3sunFmLjm28bfspL^gmShjWY8&6x+1VIu@XIZhUkW+fxTFl6O^k?tYy)ay0}`M zZ_aRHqY5jYW9nrIO;R}93%0AXHcln3QRHo(Pg^$w=T+)j75FNxSrb({fp7-!RaDMw zWTpz?q5<&~&fpw0jOL(cupIM)=nsOgpkIZjzC18a`3&&4V}20+D@r8SzN{XPQJ1t zmNlHg+8?y?l?AR4F}h&5vf8dVUr{(iFfjwj!8U4?Dq68B2C@~`uZTmeWQEZ@mt;BS zNjX`e`$qYe!hROCt6(+vNe4%PrfA9!G(|eq0#i}{4*kBIk)T`&fC`Pjz&xyZ9j0No zA_i5qW@J;LUP7!;F z#Gov&mT&4H5DVp(HUz|iki}IjAl8b24!rkqzM|F7G)Rc0IbD?7;MIYwQQDOQz6$7o z3kz;8m)K5kQF~}kXG}?FVr54LT7(vwl;JSN_8st0^^>Iw0K0$)A$!XR3N;H$T; zs4jl0y6}4Ud!H&l|I1X5M6HH+WoGl-g+)C<{O?D;`ZpofH@+?S>c9F~Ov#nxB5`*4 zTv)oaa4oCa%8(HqSE5QNQzw+prqyWE8Z?EQjK#a>E01UdzS1?HGBw<<5>B|@vJ6!OzE(@sM|5NZ$)@NpXB1r!KDHH z#8*NAuAnEZz*+=+iV@(7>sQXPGaxI+s9DFDIoHH_x1@Qy=yCh#DLRi=hvYCfP*(Ds zZTN5;96OoQUMZv5YgkB*$}35k)x^T{G?9hch(ct-O9QjV@#`+Ecq(Hp#bJq>w1~!v zub;)H!sNBug6-J*Ghe*dSh#P#XrH?5@O;B5Br~uLkFiaUvu#h&vKuR3!O+gNtlclM z9WNl!oe~=$XK}kb_g@LagUD|u=M|-0J!a-n@FZjjW$g*J;?bwf$d6k0oB&s^P$m^@ zR6E}>?|IJ*h&q6-AApL=46hVAfXC3z`|e#uUL z#!i01PF`atuA1+^!cZr!vQxql=f^Lbk3qJ&Y9RVAVx-7)sZ> z=0oq84!>nS_$C8i?ZartF@K-YD~~?p>MDu`;7?X@Yop;^4|S+43kv z*G}5SPQu^^hF{EAcLxpK6*zc{cMp1nXoB1oAjbP9W(=`a92we>^z9;gU1Nq<1&duG zB?%?WK6ubQb{suKEFypVN`|FnzpMH-1;MCjij=lIZ`4i6% zoq4>M^HoJ#dscZ@LT^KwnI3>!0oofsM}Tzp*cwXK?kAi2XD6@F#hteZtw+9j@FxArnlDP?6(X>NVK>fH;BDUO!jY#raq)`t6*|tG zuPptkI4LwMkQL0;xg1yM2P}YRd^v(CV5l|6%;4(QwdTP&?vTIG@(k7SdF6`l~f}W)#4~ku91prMv`kK zsr54aUd?P)WHrk&nq`@*OxN zMVA^~)V!hK+`*un0U(xNMz3$W*eA6Y6_7R%m_7(m3l)$tgluiJ`JhC>Ph}UK&bJhsChCA3EQSErbs| z-G>ycN1q>ngoXa#YnP@z{DtP3H^py%EdRxSO?D2c=8XR{zA{_s^4bauLCamo!KcucW zGFNk-w(*3a?zpz<;4JtG$qWt0G%#P4@0l*!g`l3H;vLiI4}!0f8t{8y3^8sLe-}Rl z12DpDF=>T2qou)rT^9Bis95*-{@p&b&mkP!1KZt19f7&xh>~7W?O=Y3ymY;?WbJ5n zqb#{v=9AXrlGtw(HLw&CRtVMd;QnPneaix=EoeckZ}Jsdg!Yjzx=%Yrs~jR{9HQo& zV%07Qb11hYjc1D9GfD50Vsek0^GKNWOVhX~O`y?(opC8OEFxd)pEeVgs}0T3#Fw$y zQX}*$bPN-!b&+NGdub8fP`-ynS5L;(O@~z~Q(ARJyI9s1L+%cB@c~WMaRb5`n$DP8 z9%LIIMTXTPzCw1W5Xb=5qM1Q~ub^LjgRh=4@Z>A{LRj3f@)&7XkDAEP{)Av|e}WK; zG%KN?Vj=k)$)6jMb?}NAf$9WT?_z4t2qzV2>c|yK0jSWqBcbUNcH&bDRz3J@_VDl6 z`CqY|=45b{X=&V` zQ|^pg(VS<+j7Nn$ev_)`esjrbb={-dbuXKF0~8tZZ7-RcpEbgGmUC<-ymcb5UgcJx zbj-qP0OhI#rL$<(DRCBwJwPnp8N_i(=rB2T!$xlN?_DMgq~9LU4I|CHA>CM#Yai9` z7(Eb}HRU53#f#Z7YzV6{oWi7zp^{a?G$O`YksJcPvX7FwCMc2EfWamE_#RQCE~!}+ z->5{$Rl{+XzFt{fWlyuZ+ArM z9KspUB1FC@BCs5M2JO9v;J-q?DEwE$Q)YwU$r1Cbqc?*2y*UkCrPWxVjGYD`c@D)ca-W(KI z>wnHy)ExY-;<&{h0$gzz<9Fhwm7Tuta}g5(+^ zw>B@HpGkZrG{ry{P;$^UBiSlm26+bqq=IH;AuGHF5QD(y*9cdHO$5nPohB1=ahPr;E6=7}y{(5cGgkef(!0$f!>gBne$5vo=WvVt&`*)*2X zG>Tq8MuQZU*(A+gEzemaL*=fO<+jMP*UGcjC_r2pP4e_+B~eyAeFq>`d?kHzp=l_O zC>RaN8^zBSm?Z~gA)C}EO^#Gj1a-n#h%g-dWhubH!dKE8a3yr!7b6xw$3?qIzvB3c zBXA33g(E+)Qq~Q=3dG(7>0;-VO}Lah+c{qWuD;DzgjfsCcHSJMxl5rl2)GI?kfEzW zd{rU~FPFzxkH*(1;;Iz5s+_L?SCFtU6o4)v5DO|6x--@aNbe8I81hRSKq31bM>GwA zSdI}L_Mx3Hy3-&b-%BXySC;-DrC_yNEm+X6phegt=OMD)J^t&U?5@b7?%2|f9hHl--Z}%;cI5AxfPPhVY#!nj_^SGl3Vc<2c&_<=JAK(xNq^#VyL*2AQa01bpS1)Mpzrusm{LS@<9d8%Z!h56x;Z zU#-MWPoV`5xI%w$Rrr`g)TDjXtX<@cUBs+?q{=ByC)VoRlEoE!*p`PeMTq90 zE@Pb@mA%oFzula+akBh~0kTyaOho5d^LfMihvB4R+b=BWSKuoln4S0vjY6cj1F?9E z5TT=(B@|rlXeJ_7bi<<8wmZ8#KgjdjdA_KXuMmcV=!W0_Np|fM$+eGXUwcytzIyDLA^f`Ld{x$R zJ-rP5!6A!&#revm|4vxhZ3pjIHFB%%(5>ha3Kn;=U@eqY!m|3ZpkG0`vItoYy^yf1 z{wq)u&aHA5;ge8+D}E=AKM_U_Id0LDze}(JQwdU)McP`7Sc~;5tAqv6vhvlp5DR;s zhq;P{h=#lTWLC1`aK-g2@D(iCf-9SL?eVe%GAt-IXbw8Wf;Z4Lv%nR0P;$+R$cmBy zpzWjAra;FHpaN|i&;oP`%~kMiEI1NYp;09i)G9$olHko9g*SQx6~>fFP=c{a1`vv? zz)_Y|tpHYWvP!F00IRZ_lsT(MbJr*fT7+7wzz5j8HM0D*vix=Of_0<$Z3<8p5G%U{ z0=^P3mfA3yVnHl9|IWa%+sLMRc-%2&9b!_^J4;w~)8ydqmW z8Wt-a&D3s7B{E@HtW%)mSoHMYVX;!-FH&Ho%&aYP&wU~?6vgo|=sNAX8d{sp5bbN^h zUBdiUmcQOuvRhZUd%9@=V>!0Sf z&gqLS8%e2_>5|mUsP(>7}f^x@pJ<19;vB>f<(ds#`?5>RU+QjDR)K+!* zUWSy&g5BE6;|3%%V7W8G8Q{NK|ESO(1YePIMJsX^^A!@YV8Mclg%b=9qR<@N^rQiq zSAeT^59?uWhdqAnBSw(bCL}Egy|Wv9^)$l@n|05aH(+_q%TTTeu3)Cx{}G&26hd|6 zstIl=(yTrqxIzc;d=(22iTeI%3IOcSm$B3GYMb2i_5Q;xzh$ zh-E-64JEjuUQgT@q%F$%i5f(lHGXMhHykD-Y(m9h1#0vM$%TdfV2osWKu$0ltFys$lzk`i3d^uaJC| zwQ)XoGt%dDqB?a{$y8YOL|lvcQ-LzsNDyK)IujBrIMN z81;2d7v-_tX;@w!0;gW>UV#TUi(D)@h7vdy`><&dV2Weqx}cY z_OILDUBdY)e;_ci&o#UU`B(5?Ed^iMQ#iw&_9M63p@u_cufR)- zbt;Ze{GOZc=b&Zn58lLAKq+4QelBW#ldk|>)K%oBcP?T5@9-7&<5EoYhlqI zSOUrwUM-*tV1~dO6oHP;AbNtpDyTg|k#Ys|k6$Lmr<219vK0wd6cWxWG!Nkkl%)V4 z6pC7b!t@`GT&)6DakwI;s!=A_jo~DL>{Yla71fff3JxnsQkji12v&t_$4b_Zmu{FS z*)UeJZnUUXUfd=xTCXVHFj|OeQ{=VE^VTYI+eSHMWi~6*8lh#4is}?#HppVprK5yc zpuPe*fFBi@qXcF7q>mD1rHuix+>%CVp}A<>DRJC6e!?Ms9632;MT<~?wNMSpAGdcT9T$u?VhUZpWf$zK9SOcVUVU z7I)|mt3tnYiTTPutt%w2Goq+HvGRIaPl=y> zZ4@oSx311!`sLj7@AUsa?7an)T<4Xxd++)O0%i-E)XdCW&CJXwW@g!vELoD-GDFO< z9gf3s?3j~G?m!FzTWaR+f@Uf_|MQ)%RqC;4X3hMwW@g=c?X}lgr%spJa=W_fIq!b= zyWjfL~M5f#t+(A1J z)6}g2R~W5=+(i?11MdMYw6wELwO_RwjhV-?Dl(l+3AG0^jm+V%GcUwvh7%T2rs6S|54zZz*Th zjv+lkAzS_2dgmL$=zjdCmJ>f!PQD>pfe2QIURCygSK0rHvj1ge-%H8?fa(>|6O=;M zk=K?CS4eiE*u0~bXp(+M@D(u0y2f=6D)d#!5KCG&9b9Gd$hRO~xnx>br&=H@BrL}? zWlg5yo~!s2*u3+NeuZ=4RpyYg8RwL~(6WW78dFf|L`a1;sckZC^JKxnnRWcQXrZ#A z`bA~)i^_)Ql;$Uu(tD@V4^DXG^}42wtd1v{r|V|g%@Kp#Xv;h+L zu3Qqv-4Ygm$?V*8~hjrp$*W>%GGIf|KUGBPT=R$CGoe zZrb?|`e|j(s4Q9*7EPuFg0B`AcwaS}%|^2YbGv>@`RI?Mzx|yE7ya({hX@xv|Kz}_ z`vwl39@u!GvwYp<^vcUgMO{J3saC5d4qGu4URoGxb#W$m4GX0zH$-y z?ZVSeB9>FnH=KJZ5q_mwu)evb_pkZVnqKN}T!c1d*P|p6Njmy#H{ut%|H`$CT7E@d z{}Nd(?+*%~a_^`9#;wm@^2#?n26(2p=-$KAzUkFZaq=zi0a-khjx3Huk1h)u9lj}u zNzxG{EQBokW@o#(T~d}1Uf701;1!C)$qEpn zAR-!kSI(8;RauC3fUTE2Bu9$~HG%>uNR zI)+%YT7?H5%a+P)y5u^2LX{RRi+&}I%TU^dg=0Z^W2`?5%+dN~Ga)O|-k>b(Se_b| zn`@bz#rn=IHrPWN7|97Jd;EMHV@6mqB}-~tGJVv(GMk6;mysW`Sh6Fg1(_3+X$&Ip z3bGQVW-?sYuVRp`gm@*#2F2u{h-noO^aSG*lq$kn(}z7WhUm}w<_!Dg623YrB&?<3 z!NKs-Ve~5szLG_FJK>^o9upNydeS6c;l-jN3VOryd*m@J4;2du3()F1F zMaz%GQ4-y8VG-wkGqRP}wVGA?Dp99D*0K)+&yFfikbklfFsi$#kt zz3#KJjh9>Yb#A_+WAEuJ#~wCX6y#90Tuiw|R zf>1Z3@|BGgceu)CwQ>-6rJ|I@s>oWJBK(@K{zX#yKf4HD4b3SB?!D}%c{jFXIJ9#z zi*~TkndO6%<-4b-;%(#k8z;Hn@lNk$e<*hin1OE&xq3^`)s>;0w*`LT8q?zy*U#oA zm#EHLgD(7EjvvKU%%{|h`(<2+EbbTO}4tO&ozZa6wD zVzYDdh;#aYV^$yA3D*=3dz6iORA}AGMqNrq9r6ZOW%hwsj-+#^=tRnB(i9O5kd?D4 zUb!Wa06FiSGVhx*>yt7am_FwbuM0?@I{lo&G`(jsny|trU_#Ao(N-m^U5P9mPpq6v ztuxi^QBoSG$E6Yn-KLT~*2)7)>0VpKQFGOC<_FCzcW%2+S^JRSD?}&Z zNGHtdT46L2>vGucr|Met3K6xKF@(W@E8(_!$c$#SgjEk)I2Q?`=Y_UIk-$kL`?@D> zY_Zw+ylwN#wym#Pw}0QX`=^!zzp@FkB7y4n%8B1Ar`}eb>Kxz3xGt0{;OgA_wsY@W z&wXII=R+%X--lKKSAS3*`jhg|--!aW9{7V`EfDM6I~L@uv;0{g?YB1gN|@V4-hu`3 z>iAD=@YQjwRzFnvN`))PO7a!i3z*pH2|`v!UbEqBUkYS+UQvT71XjsHzN0@VlsXlz z*0LD!uoBxe8CY&|1+Fqw{VLT2VoAPo$xz&K6i+Pfd8WW(b6lg6+^R%YEU@+{xk*WE zvLrW~*uEWArwuIcjBFZA+dh%Ldop#GAz|BC?52^ZjYB~#U7lrEJqc<^)eEP3q=;Og zr%Y#AyqR+=9o$Dy!%ZRm(%H^vp_q(_7cV@S7K?63>{ZeGc8 zLqR#StQri>n@5Lg<+==BqJ$%l!gDxKI|2Lhqz;kHpcalKH5zso0u{oXv|7 z)jb4WHQuEZ?O4p-IF-3!T=G@^_GxmlQkusznr8{W!V(#gJsFf@2uRX<$7ll+r~D!& zgW{$Fq6Wzt1+F~9$9*FQy@G$gXaC2BNriyBdF%Xw&Aey@supY}quGRz1#~I83FV_d zXn*^r_KjbTzViKnXJ6`j_=*0z?io6GVtCVmo{IICGOFP#kN;GxbbZ@Jq%{1EV>f)YRI0gMI@|w6zOwtNzC=@YzT*0q;EF5D$x5Ev`ARa? z@}uA@mAg1ftdfXjhbuc@$z1n;jjw16in=yAXzvfM3LmB5D;2KB1Ycc)tEJAMMvT{? zTtOWuJb_u-!0fT$Y~f$x1{SzdH`ENtlqvyKrAulRf|ck1s=Yti{<{WOOY?vNsA}ol z4U1?6rUKEdFtO7XY!V*z46IhIW5U6{Ry6(~E5NCItG05x7NDx$F;=~8v})@}&Gxa{ z9om|m;!5SVk&3OuWm|_~tx2hhy5BKnX&Q96lCaQ&SopyNWtFnvRToyQ z4=vOO=aKU!5Q|QzCkqQGDxyba z$$3NMLHJ5qt(KT7Ts)*LxSX#3VMe&uXT|zrwfmL?ITFzJE|4!+U5M95;a zkhwzP!ivAjNFYiAiSB3%Oc5>xVO>Kk_(}wgf>?f+ZV$Y;BKR|>$j>|zF8QZj4$8VL zrU=WvNUOP6y6%gXyO1CZ@0gW*ReyAjhM=%u?VifpG>U#zymdCQS`T0OrZY3eZCxLHb)*01t63u$Bo+ra zhh4qh|5J|yHi`7z7JRX8AES%&9tofNr+pEc-%C3?ys+Ol{R?-^MTh9Wtd9E2ZQ=jm zm(}kWd&xBs8~n6;TCZ2$uvgKjf4L#3b~?CmCa`hFuWr)2YQn8l=ae(DI(5h?Wpqud z!98UP+Z9aZoG?vE7nVrG(#-g!O#7uw1!v9jw+hIZeC<7@@qpr!I>tvNqIXK8k#1mo z^-OA$Ik|d)5Y)6rW5Fg{(@`aT?KnC^(+P!;BIf7`*@my!j8l7;1$_^`YQJAu2VBwZ zI)9z7Agk?9(do19Pz%8d#KOT&@g;i{gCr~dph$vJOTg75?2Zw^o9iC6t$*CM?kUU0 zXKgsQw|v*K^M|(GKe6urh3&Aqp60}R%E|YYQ|~LMF;gw8v+s(w;JxqJ?)yN9R6vw0 zfC{*J`0teS|BqUK77ri5l=XoPzB>DRD|~hLo3>NGwn?};j$lRP!jIr9ntS391v@Y7)RkK`*v zzxI2SlKqzO8f`%Ff=i~+DQ(d)&9pMbDEZ1ERdLEtC9E8qSFRDhN@`K^wkixAuzDt? z#YVb*);e3kR{kHcyxWknemcHIAKRpjs2TPv>Gdh>@h<8^xmuIi8onysr6e>M$iGTz z8;)-tNNLrU>@uNZrL>RGC@kK!$d%lU3t6r6NtHA3RU~e}^vU3q39sle?`U0U{DNd;L$p(DTsaeJ9Ti3cfnnRk`7EX3gixB^~6q`^R>=gtu~R{$8a%y{>scNUnV!>tsWJ0>cR7r-d@ungr9fS_ttDp|>_D%k-< zvr_$30-}}`c?&cF0jlMd(l`uRVP&T$2wY{hj^(!Na8ea*&=qYMtJtcq+^$Et0UNDa?9p+o+%{CcWq@Pl4oGaYbgP!8p|ECc7?akltoG52R@#O7)CPJJ zW4ONKs>foh#-b~=QRVumvT<%g!-@d0(BQ_G2NElPs2}u{0ov&ajJ7P(`BEp@LMl6CFWG%G|d05m7VqtL!&Fds1 zi_t=c3W;tY7mHkX*SHSE1jq2J@D(n1k;yAB?-;qoJj8 zAuRXvnBq$*)fbA^ecrsMbJNid`0CCFI_`e7n-na`SC2eD@Wd;_FaAjP`p-cu-ETjb z{HKd^W0PO=)&C68kvXQDSDt%g2K_3j(vVa!m|Hhq(W+!s;Buc&sF-DtkgNvyiruob z2rKs4@(`wu%vT?`RNc8)ab%|a@HB9Rgw=4z!p3vTrgKW!ZbQYM>9Spuc^k*juaaxW z{n_b|I>3MlW|i9`y5TGID?W7KEBELgM}pTvFFHhi@eQwkxHaT6b`5~~u5p*-2nOUI zUNFc@6hnef+UF40TbGoQEz0W1Kf5P?f`02$(C=3~5>#afZ(DVmEqF(#2e#8p z0ur{M>Q{(Y6ud+&=PTe!^40l2e}%8!7asWQ;uQ%DKeGwAVn|Rhl~u@AKTr;T&wBWE z+u_%ivaR;KWFbBjz7k}mmPliG$-4gq!B?^;DEUgl)%GXNSkBn4DEX?Lxqx#@;T~gP z>5v~w15+2AG>eWYrq!Ay{Yvmvy5K9fY^z(=A}zw0dLqb_+^veHRje*bZk%T^cUqf~ zZASzkMCoj3$s8-c{jv-Me)9e=?ynO^*Tj!FMGx~HPKw&i zkd=QIh;>sy7nxTKfb%6BRyX>6P_}EK+UjMQlD#F-?w&%HD?HK)ui&?1~&ur5*-laaes(H~@9+oFsaly^TEk$m;i z>wV9>*#F>Tz3|n6<3sRO)uzi?wU;%eol%*ce(@b{p3Y&Vt23_$+ZOLXWZNZ9dV*;*rYC$Qgez&%Mk2q=At8Lt?=N#6m z+A&nUW29#1Xw~*nlrE}lo33<=PEuC>dTn;QF0)OS*`hR<53wC}3%)EVL}KD=Lkqp+GEDEGSD%0jTXlD2rsdCAdRTQc z=@P0E*2NorE=ay|2>WzR>}Q^dpZTVK8j^h}y69qZ<)?*fFE{P(+IXaM&s|rKJ*5eBKqNUsCHFe{R8~dd9dyrAA;qzeq>Wb17A2@=#=FGelBIP_8*|o-J0^}h#!pbLfL7wf z8cgr;de#(@M?_CBIBPnxVBR-%%v&=On57TPpGc{t1=^VZkQyMAO9J;C2AC*Kofb?PJKu8(Y|KD3;A-*Qf!2LzC)bMM%ICCOJb{sdxu zWVa)sgSp5Uj|v!gGZ@wg0;!m|e{dweEh! zLa_vghlgHM4&u6hNrYUH!$4b57OpcKXf#RT%Z5edzIF+O2O_$ zzoLHMf_bNudB@}h_{t&0>;ze<{&vwNB&8wChPvgNH6K`NjcZmk?Mh0U;46INAXZ8< zlr3HVsk{kQH5&y(ORFhQSTI?DY?ZD6IaC)Dqbl< zYj5`Jy2T&6U=OUtbOzeg|GoO1efP6aX?9iQ8<`Em!6~1|<%|}#C|(KOo(cUf;eCNA zQ@)9YH6epxNk+fu@qiSAXVOqwo$_t3PhFx1qYBNu-e?Z`rfJ=hdNbSE%Cs0+Ib6P9 zA)*03uQ`O~W+>P;pSN`e|5fb~h1VD_Hu{4J+g&F)K zrs6`po+eEnN zZ^nN7^U>#D>wW6Q{s$iGm-^M_L*3PzKhLedoKo5mlieMV*yRy^)j7ChwO{vbp1n7& z>HD@z-?!Ebeq+tx->&KV8z*qvR|}c`4{<0 z+O8y1$)miooUg#JuS!^7;w!L9)vpGZh6k}=$;@_QuT*FIGPn|ZUKoO-*r|ka+t9mxM)J%2s?d+x_a8sjhjegl3goGfW()ng}W~ ztgx)hqqMK953gC-8wFh9NEbc9c41I2T(1SLWM{AnxY{9FfdEzGZf(;ZZS!7T^FDEG z+B?R>`aPo{7RS2XqqVz7t9K4VS+%=#)jJWsv@{mWHVaMz*mBovAvZD{7%)UJ!ymiU zF09tZR_LLuh!Q>R!q7r3logUcj>{r2+u+a70TC}vC|ZnrSt{@NHe;P_fc6W!$;*~!LdELgM%+4U?fSn$;~#JX^U_h+~GUqruxuUum2?|kZ|`IJRD5rvnM z%0DY;ztpg+bN!LdohPpxy}uKS`{_qJkg(vZho0|$_`Db${@`6|{)FAOh=87nzm&h^YVECPP;Bu%=WSn?D|rH=R^w%xg5|@KMz;r)i$g zUvDnnW@6`T(Sh0W!;4jSm}>82%kv`oRmCAb2xe;z&mdkk99^hCY(&CJZSAYsXHBV} z;7-jyeZVuh-!EN{-El=QyW6fhhKiTj%7BY3W&k936mOcS-91~sf3|qjKt^j%Ty?*E{4+mEp zyo<-&a)%vL2UaHytxgGSjo*-J?Iq)vtA%oD{BUuI(`Z)D*dIakRw zHvYW{Rf~KUV?f<@k3zm5_ho6#W=-9Ea@`Dk)p$hNaJN#Ye)bjam?x@8IMWU&UJ4EYT%=-zt3Us-ay_)*r18{>jFX;v)1V;0nnL z+toF=q9J(X`xd0CL$6sSRv}N(0Yt45g4HXc_I%e&?S93?V(5b^T;avq`yvfN5h5y< zEs706qTnkWXi~@9eBK0KZG6Z^=skOd+E_cd+Z0eZ=$F6XoV?(axZsp*a!9r~rr4ZQ zl~u{i4~i*5=X5J`g-+=Uo_VIA3MIT&iEFSWHk(sgtm$i&%(d3cc1v1|F|BEV@TkP9 z*`z8|phdD2!0FJO*`Umc;GA)~z`XNI#ByV6ZFIk5L?4TrSFm(Rg{ze;2ng(56*TA^ zKI9qQm^)H7yqb--1xh*6)YG2$BB z6Z-=)^kKO(?8T^#Ri7hkpLBpB=T(zutj;qtrS&pdgMhGQT4gXdC);V5Rd8CZ) zHKS#vwoIfq%}D(UzM?q@Uj=KXgOg|cqDU5<@(t60SYeu3))59q4)}+?_3L+S<8y+q z7#kEJ?kZm)VPSA51BZQNBRcEv-yN2G_1vqyPrWd(#8)SWwjSxJ-S$O(aAkieDT_b29I>R3#Zfx1q<#fosfhJuz7lX1CIT7ItX4#6 zDY-|F2{`89Bw5+viVP^Q0fdmsm4A*dAW!HyOds=R2w71CFUWLINmfWz?53gdC->AW zbpva~<{C|%nDi0DfNJ8pJTB)eNm;Z7RlX92E8r@B!zg_{bSKqzH40q8OG38VBWOx6 zm0B(P1!alzeIt#W92nyWW!3H)t`oSWtKVaQvOp~4FGMi3tfDOf+xY%a99>AfUcyx0 zpvCu|RHKip(#4ePBFe_XibwIiqh*B_j0fk5jv>Ej;$BA7KU1`1FsbYqB4jalDRWuv ze1)im2i`7KaUw18_7m|?T{tMLTycZ+9RIccAmSATU&(^9;4367p<-nZ5t$-`8bB;L zNGSO#qGCwGmEXEAiHQ>U6uO589_nFs*UisM)U;XLh$zj9SeD&XoRrY@we1*lm zTc@N|f9x9jm!h`uVrJqh^@+uk(FOCGa!YBOwWxhDrD*{-OUY&>XWcA2XAAeumL8lh zKeAALYyp2ed{uR5f*>pqtMS+(#s0yZjU!2oT_xM*H1+&`4f&<@lYdSFjyv~T{J+5I zTT_|99SzhJoIw#d(&6S`ML6WHF3loXgb5|G}PTs59li6L{b zV%t>Hfw}tqhWdSD)w{wrV!5KJR*G4?Sg(+ImD{2uRhYx`#t9USEuTxQ zo#%E8|5ak`TvF|H&RR=W`(j4hY~fZjJFjy$&mb?516j1&$Ou@|T}suRmb%jxmgCSI zRP`(C`iDjCI()U^Q6W`jn6OXrLP-t&9gz3*D@dtb~6E+LjAt3O!o|APg`yXtJWkvMwl zH&%5;jvBx4J?p_&Ej0V~)9h1|7Ix#LV)~CF9pWV+TkU<-vhQ`v-q*}*g5Jkoo|nZ} zS%0$&x7D)>k{zBauu9ZZ!rqSqP3%~F*vuB}4G-D~WktMNdrpL11Qn0?=FK}L%{#=; zIVKq$6U~m0RkE@&!Ms|dtVl9DVs}roIHWB&XDoUaSbdAF(X~oUtu3M6me62Ls5d4y zE~Ye_xIvJmsh@?A(IGJw#*|D(mP~{djN>s(?Y~kl`C$K8eoN7px29)tcRkV`QgC(y*ADsIa;x4m!nj@qr7d!&msP%J%XSGSOwu zST}*oF>lLk?v`mn8?rY|B4?HCu%TbEd@#9Uk^W$iW;`TiIv{Z(C}}z{cG@dM9~M39 z6FL@@FyR*39jnoW#lHRb7nEsoZ`eP`B(29cw#zlF(;=Yy7O(D`+ze-x(875o_)5)VxAWEVJgempB{@o7`A_=W|H`fJ zukp8g3~>A_nvW0~kq%g}dYzQswrvNR1Tcy3C%6j0d z$cjFS6CQFgthg_&W4fh;wKDL^Eq)@I z;Tm5F%KE1Jg>QO%iUsSIz|U5Oe#Qu)TkK~@SOMw6;vQS_Np|zanjIZ&hdM+=!@Zp+ z9_~DG{tAe7_KB_spX-CKo_=*u@)iE8KmKLnOMGRsT(`3S=l$(p&l}ol>Z>RGaAr-) z=!{ank-&t1tlB(ZwP8NLakj8ki7#4AE;E<5Ti`1!?s*%OvaL!=!+6%_>B7D9l}F7; zSfvMM$`4E-Td^po^58g>g~h#yft14wH3t{y52iLT+k&f;jI~}q!r&{!0Rj-ju8`<{ z0NyKcU`6P~6;YR0#C)+Tsb@t}*NVhWhm>BI)PC1grfa)m%I6C=D!eSia{5a)@|T%z zJ7`;f)Lge)yYAR@#n!>fEtBP&XNxvWWwq*gcVgW+&6K33DSkWnP4UguVqo^pFnFg= z`e!Z#=UT!F6t+CCiP4cnL@H5e_F`<2l2E4Pwjb!M)^BuwJD|B z72t~gAbgcrL!)pSz6vYpDcWu>+hav-fUIhdDNq*k^_9qacUkaXiTo>k+7A+4C0c&C zs?e__Uu}KT%sOMqS393&uFz!Xs|}B1yRtAF*!B>8Kr0Qwh6e;(Q4RMgjSmRKYGLv5 zqe}DR7Rgt;exe+DQ^9$4=R39&WUzmvoFm5SkIFrNQ0`*}5ChfkZ3HY(g0kLL&b_6a zdDFIpTkzEfONb>UEZ|CtS0WwjZ5soFs-aztU-*Fm3|+cKK}l@Sy8R> z)y@~qyIwZKSNp$brQj=Cggalb?s!&6b~{zc4z)@&Mwy*d;fhF#ZI9ZvKBDr~L$>uO zes_!J;JR}RV=5sfI-i^cm!x^exLK#fMW+OlQ=-)&QMoo{vPwgM)jSzl zt~o~kVrx*TIikuIU8h9XSfQ*`_C>asN%AChKe=%>v3@$PW+JwFJho~AY>p@%$8zkS zG3t*=CUwjsad1sc-HsaMI0`CwNm9Pr=VdhRcuvGEG07ZPn!=bp5d1R z;`@VBM~R8UXUMm5jJXnBK9SR|pu4)oce*9?^+zX|{6lr_1TFf0npHfPp8MxMofyhqT(r&4Ge5WhU)ii8qQPvr&R1{! zeu=NX^Ge?n&-LAVzUTOv{v)Rbci!38vgc}9`<1-TgMD@EH{4D-i47!&ht^#@ue_t5q>t3d$0E z6+b4)3b?{h1YEI<54a-gDj-_~7X;^PLyL6bB?jT-EFHsYC3DwtF40*_ZlF|4Rl|_9 zQPFf$`)*=OAaEs7NxKkhZ5AGN;0nJwu60^|b-PCd zHSHgzTKA8%>>C!KbzrRRu$BU`nh%V$9vWMFR1aS@f?fv=RKq?U@3*qVn4wyHTgVNB zqYJszE^MV^Xh>}`a8I3BXMnQeDh)IW!x$l&~5l7&Dl$yb!Ifc;j1TK`&ao&EFt`wuf9q`|20?ODjAd;x%xYBf|=;owv7_VlU>@KC|PFwj=kuQpV1z(YW#rz=7_Vx#i z%k?V}R8E%=%6ig_umhr?T%ljFpJ$x{=XMSTj z{Yx`-=GWGH-cs&;Tf~Q=ZV5-$yL1wTY(*Rv%9V({dPfWllE%PtXWZqVQ?uMn^9d_&p)x^4UO#_i8pc0FfPl`F(61s^+ZD`YFd zS1Md>Id9!W7{z_2b@$oY&M1v1mAX3>!Y(q_Eryj&xF%0H#mzXzQge>+##OPDd1agh z%5q4ytx7VlNL*N%wBV3xq*3UcX>!jtd*xaDi){YI=HN0*M71@h&JtH=PHZqG;lFA! z;>%)pWK7jWRHZ(mj5}*>aE?9za|p}el850dBrL+&)4^3z(ZiS?E=fPAOos`&!S^3ZgXOp;P(V<8k>La++u z%1bjsbC9J`3DrjOuds^6*BSD*nu>N>6PiY|H_VpqwiIq(pg&l1L`i5Iq+?jNOG&L4 z{lVCx>A(~%{wu<-(60jGrAH1Rc&Z`5DJ=c5Bqe8zDe098M?Y^s(>#yWCbflDa zM`U*TCw6$p^tgxjt@P`qMR<$P5QqgY$(A3KMJ;JoYVy@muh6ycTVR#iC6wV;C|6&u zW%~r}5*8m*r`h)!Imw5+39srzUIEP+XswCFkFZ&bVE2&@QtsR7~keJ}BvMnQ}TU6Kjt`X7o+da~_S6~%<)plUC zao><+s`ewgwMVt>M_{c{iid)@4(X7vK&<8iqMG*WYj@IW9HA<n1Q8Kw}gP_d+Jb?qn; zv81BKAfcDaS4dcXc|*Q={eJlae);`KSl6mQ1jH&C2t~*$8=z4rwXEnGu|_AfOvqW$ zm4kMyf~G95)zhMY!8Tq1DU9+8@w)j!|gM*F8r;lIFb4l ze1!#TW#}gk5g$86|HV7`VsOso*wPDG&6g^-b+jJr+;aTt-m@LY9_ToEzVq(KI^nDP zp6NUP;^3374!`iDu^;|=oO}lSS3`QyA6&HiuWU9MFDgF$zmm@&QX2jv|L{%zL-il| z-v3{CEP`m0mb2k zOMG>EVAskJGOq?0p<;2HLu|*2xU07%blj@xzCE+YDZkgLpnFY0pKI>Gnyh{|7DHzZ zis6oo{_xy^gtF1>dVO*Gc;&{4s!jUJjk@x6`ic#cW$UL3Nl0kYW;AFso5r(SW;2@R zV=D|1g*wpy$~5?A7|340aJ43G!X;+HJ$A-BY0fuk4#e_^8TW|Qd&lWl_;)~A9x=lS z#kPp-dH1-HknAZaUat4Ec!4~ir!BrHbcqyCvy zHu#EGAk|1cBnrd=x5V+h((;6D{R_5j-xGXwMCGfK@2GI~fdX6!_Y|1)j^*CBAuG$- z-&)W9#wK#t-!Pv1srl5;tn3h!iv>}%#CYJ_CbHb2EP-V2!dJ@Kw}kDAX+b?&S0*hwq?n`^ix>xzNZoQQ9yunWvHS~-LB*z!l7;YcQ)GpafUJm$ zxv;XCkdmp8qKV+bacm(0S$h9;o$$BAR|!M#6~nFK^%=>s=6;8e{?);v9D;`&f=2z~ z%x>X2qHKr_bPBoX5!tiC_w%UyIkxoj-FV*)C@_SU>QT&CTpn3GMeqoFb6BqAoiZ%> zDkvM&H@L+0q%|qg6;qI2#@dB~?G_?xNo&vFvdG*7S-x}zNogqBxj+m^#cq~ZO~)2b zB$u1w3KycX=Vfz{X~CexIgb#XcYqeY3XIo!M0^nzf2ncpB|YXVo7H48k=iaIMs1{u zigRKGC0}(9F1(}i)%Slo_MKM;AAhd@-p52f1AMjXMDMx-SF1N&Eo$z}sO-gpMT)y` zLbnHNa|3#B^PvWB@*4P-kgz0T3BI!1uIxs4;L6TdOC}WP6%J|kc*;Gm_8`88YE(|b|j!stb4x3R|2$D#G+9s7U;;Lb%?YTTqJse%z=g! z^r0sN<@N>U^iop1^2-$dcAvCv@04y33l|oM#Z(vTp2>kEkU^TS?D`cg9`vi5{64)c z@RJoGAFmGk*eU8`77+&JT#hRJEWPPc#n#T2{ar+i?m2T6i#shsT<**H>iHjz{@^!) zum1e$^w)g#KL~0Ti)Ct2`N3O+^ZY8ltUJHmSlVvNuANS=TF9<5XEjZy)acX7XLIUp z*=;sH2v~?#uye6w&qC!vbNwBbx;qwODpusQpE09i(JO?nO7@^-@zKBv!U;Gev|x~p z@NkaMucG?M2*N+?5Z>z;)$I_|wKC@FZShxEq;##$86+3ft!l`vYS6WOz_obLwP46C zZ^%>R>x$pUz^p-5lSkzBCYBDTSC8g4=<=Gh1uc4E8*87)Z<)wyG-T8nQfm!_lqOeC zMVA_gTaPH53SxRCP4AncCqb0(ZI`$i_joK=^Pcf@9@vdLo_|HJ5IdHbD1<3jv?K{yWcY3{g#muX`=KH z`G&s|Ay>fF0gUS}TEqb0l1F{Xnl6&No)AtdBsjrW&zg6?U_HRj!5@oVg!^Apc0Om` z{)~zKAbx(HOf7lZuPIk7XWaZCaHXt0YiT%PtT`;^F;m(m8IcxCS92#llEtRTRpH~S zqgZdW=n`*siL*M#+Z^Jps}pSS)k^p(!RU}|a!Rv;SgX^_ju{p)SD0m9lVyUk2*~oz zUi8Uc^vhoK&03&*GUvTB=6up-ebQ%q(#Dw<_hFp}u{=q`n9ms@q~uG~0lo_BUmYsS zA!NWIXn2+Xm}3|w8KYa$CC~V7O{L-*Ip7>6`h6=SuZpRqxZc~sFS+uj9o-#WydWBm zvAt>aihIHU-=BX`MByCCSKP3pU(p|=YnjwInX!I0Z>uqP%VI8+wRx8LLEwrJLfn0H z4AEn=S{7r9##xRNS6~cJpYw^;g=kprJQb3>;2Sx~&OyJ>vCu@lUrdL8#Gkk9=`_r< z)!A%eFOGS^h_BsjhOSh;GMlYdFl&jgemnm9Pqoi}cR=vfqx~oE=|6gUaQDgH4Tn1# zwsw@ZcW2l3CKPsuXLJQ7b$dniItF&H@a?(Pr~hW30fAT^eYE@}M1fVZmc%PuSSnvF zeN{ELf0eSngi?0gl0YTDYVQx)zsiZ7ujEC$l^wW}B~`5D5BVeZh=yh1SHj;e&FyMF zgX(M#8kBIwQD$CUv$Ze5mEfy5Q39^WaUy3xU4?`8f_UXaNIUWDs&WNc;klwMi0d0Y z2g4O`MWI{~t&r9{$`l@O#W)`LM3FItqlqw2v5Oh#Y8gsvUed4ZG$r|phjtiSVzbua ztk$92b;B&UDcCd$Vv!LFU(wO~DqqPip)_B?SEyCA1*w+9+NOh}jR!_3?Cz8bSEyLy zR4a}wVSpd26^KQEgC56w$tGRlMt$x&Lsq-!AfR7?SenL(r26rsx(PfL3Dpyf7Dkq9 z1!5JCs+2XxRvj7nP8XG8#|K=>j}FA5X{gFsLv&gxbS;;pK|5=a$}Y04L_)i?vJdck zC!*Sg(k0GU$MljcD#JzjgW}I73zSkz*sfIBO2jqDi;}q9(guWzl|AI0JLHo$?3+L0 zS1`hejlGswiyhDe?W2e?Y_ z@lEfs^Od)zk77oib%f+VVsXEL{B|)vcnQSfDB|3`E_~bPlbZuRy*2og+k^kID&k}J z#EXI1mm`ZWq%>YE-O|y#uXEF}tGn;Ma_GK}J021e*6GK)?tQvf>Q|Dl@L&Cld}WUk z{m<)H|AXlA|B8P#ceS%dXg=6;q~GgZFPR=-6lU903YEv7e2W;V_g)|zvx z&E#rkt~b)iz=g|)pyaFiJB=hWG#;OCyW3QIY)T>)rmWJvQ-wPwayA->p60WX8x|~% z_!ymHkOhX;#0-hgujrmtv0b;uT@kBovihBhwQDL39*tU$`eBcnVYiAQ*Ww`%%RN^h zR!{*GC|VL50yFwUvj!sb2IC5c9u-6R@DZa@QLMym|{bCJ`FxY zXx=2#TP!P)eC3fi%Zd~D%8eB{G1G3blV0)DzKJuAA;X?=hBc9+?lC&=gmHh(v}^PT z+fNe8t>FceDGla~R%=}4cy_zFd?(`7ME1JHw3eAv7VkD0aLyF$6g|_@gA4fE+3(DL z93-s9GZv-@HxUQ=pp6K3*4z-xE+QJ#6nDr3GZK6y5R0r)2t-O)>b_cRJw2iV>GwsP;UE$4n= zzWe89;OfK=&3C+RJoc*b=xdgfKUPltl*NL|+21Jl{7(3?go?!ioj1**erp4+PX1hY z+8GtZ%`V~Ux;0(pCH@mYCin_3nMADJ&)Y<-=&Qokj-0jaY2((XjF`>P=tLC56YL2x zi5B5wLcfx-)w+8uz#e5_e5Dn_oM}n}vU5z&LybZn*twA$d7Up)A zy5hi~Szza@*M2hg%qv5WJUejDBYh{%^&L4iu;*0IrlVcWJG-hk^yb#~Xi9oW!3s(4 zrbXx+(!G+D2A^IK>n8918&tmfwwpjHsb5`(tN#RF0YY}}k|*|~9kG^VE495SzN$_R zO4&-PSe$>$YiK!N+2M+r!R6=wB46DYG)UR`O3n-}7q0|g#g92-4Z@GD(XL@Qg3N(* z0at!Gf*hoB6<#V5&dE6!a8;#M;R^j~DYQ!Pl`38lS0(w16ZndiG{6-nlCS6h$|j(c zs_YFx$yY+=QoD*A0W(^J%jXB}{&o}h&Ye(q))CsH-O;b?iCCUlgI+mDRVA&L`8R(@FnxLLf@dnuocKA#Lu#AY}8gQAi;NNK>0 z%v?N1ET>5LN(8nGxWd-%e(@V_;wa=S&(FT){qc>yAlApX1^v^?@V~H}*Dv$)h>}Yw z4VOzcbu{ej*l_IXuG3fHt78vd#e#L}(e8Vn?tA#f!6&{uWalgLucW_yd4KS0zWVQD zGn;kZtaRy=(v2Uwr~Z9h)m-g1rG1xDvr)-wT@Wfw^Fn#OQc`QnY&B(WGPBM&Z`%S9 zh}DN}bw@3=M+INe9Be!>S9ixWMt5PsIy_svdqVORO|N?_*hVy(Je-|D!AWQGexljg`92n#PP8hQ8+_$Gr4|| zLCC!Iw*8MNp@sd0n-v;`nQaS|dz8!#v%>bjXAZt9IW&(2tKqZ_3l=K~vAAy#3-z$L z+xbd_w2MGdm9Io}Ir`NT7MY7Bj9AZzVz=Ph$8BwoTACjb1!aL)vRWQf*7AfGlDVi_J6>0g|3EqUQ|0t8t~Ux*AxpWNJ;+9_tiDSFW<*60{* za*VamD^wj>Mu!BGbF$Sr)w(ie(ILZ#c!d|sIo;@*VRFwfxuu!C(`}w9R`+DHTcX7^ zUY5-*-n1rRnmjbZI>ZuiYR85F~Vy;H*ieIlPm#?6nNdn?c2j zESg8ZimzHssGX0l98YgsNNJr;Y|>-F%Gx^PqFq&)_-O;ct zR`Yef8k(m+_>13;OTIe)O#iv_y~oe?!dH9m>fU;+vwe4W?dHDxhMu&Fp7`9Zh_o*M z_#XF&UZ-m*_G^WJ5DQ1Vs&IvxKZ>CIXh)PTQT77`YS#WA5)*s

Yg8=RH^iOA7G^h1Ka|C8HP@s8SW9#a>GY{O6-U`RWt2MgAKidYh~#1VHE<&)Lt z4}Ikfd1v%_v7iL3%Ix<@>2XWyaZT)AgNZ${8~deaYPY+l(>3WTJ4c-3KX*&G0$(vm z=$k6uJy=J$BKQj|?&w$W6>ueMiLbmqy}{?>oBaQBOTa(f9`a8vahH5EE=QJL)--)y zyt$)cZ|8<%9lP)DfUk}|aOLHwb z9vuF;whHHK{p!COw78`eTXJTU?WZoTj{hjS%2=^MDXgEU-DE1?Y@#8ksh=sUG*>q& z+3Rdc?Q>b17V@_*GD42|w*HtEzN$YqkA4MQfmochoHR?m!he;u$&kH%4!#O2(fQ|U zdBLxw#Sk^*$d)L)95KBsV!LimxO!V!pL2=My>7;%Y1(hYjL+I}?-ql5z1FQtyQXx^ zC4bl@XJk#ruxFOeH^<zx`wEtHrkd$H`b#;i6I=Y^Y;(j5xTos5l>`MYLH4$K!HoGU%NP`8}s}hz#EV;@VVv&3$5epTIMxp2uJ|z%~oU7JH%q@>v31nz_ zT%kl~@JS(FZ9*XWzHR>-HkLS_Vr=kz%e^1j9{fmo=tJAXA6Os#KzZyv<%&Su79Mcz^GmYequE|(*&oIJQ?x~AjX%>$Z zvumQ!C4Rvr&Pefn9AjpkW2c?tCPc*>oZ@vde_ISFv51xi1XHe&z2dbR*~g34F>?6U zz(H6GpZuES3qXEY_EdD9ktKs+`7^PV780(!H9CA;%%9>y_h4mVXis#J3Bx=Yws3xM z_6%L}q-sU#S4s7YvDK6CRa*OOa*J58M1PRnkL-=pC|87EWvm;|*)+qvDtwhzYmCmH z4oRCxEmJ~MXT74t{z1=ZLs;s3M8Z^1#At{J7rpB2_oFvIQYJ}efUj)Rif!I(ROaRt z_#tMktcoPn>-^dCOeyYE!@wmUo5?dxvbHc-^kmtE7Z zDejHQ>J3ir_lX^F3m|G z$c!D6d*3fGb&QRD!Ur- zoguqT&+44ydMznf3=byM7${s=Oan>2646){W0=q=*(k*JPv9yuCe#vQsirI_3j@3- zD*{=>oFaDD0#|mvvSXFhvX*xSS4H>G7R0{wCAfmh1Yf1l%ggZ;q3t|brQX7T7xr5id?gO0rc|IsPtf)68vC0`^rd|te%t8R&}I``k(ap?XlNLZ4u9(sPr z&R4(tXyzX-&dGdH^sBG=>c5(%zWV(IUzrr`obu3Xlefoy7*sTuQ$L?v&|AA{zHIY+ z%DTCP+Uc?~Q+1s}RAYSGZ2J0HJ_2ZA)ErhC?^GI(TM?|9PcAl{oNqmCY)}{FR2-bf zf`uHBy?H#Xb&^g>XtCBen-Ho2x+fwjE_&d$ux>^?R>b$fS2t_A9g4MHO$+`T%mEwc z1!A>Nc{fkE*BM+Z^e#oCY!Yi*j7)t zA6VtVnr02F<};5+=M7Sd=pvhFvGoz!H=2 zT$5XZE~eNToH;}EC?>ANngurFFh5wd$(GtMl~^^N-D(7}xHDzgEqB{o<`zTdR$bK{ z*0LjumB);-MJV~I{eH`O7Cozc1!9p%uvCv(QFSPpu_<#$k+8O4bbnge2uD3>V-HR{ zp!I~(`lRr*bFz+@tQTkw+IIcGeBc+BJKwS~GkAvSzduviRhm#v3iupN0(IsB4x_+{JSm#xQM7dr$`{X)6>*MhQ; zu$bTs$314k|(6sIzQ`;FM;#JE@OV$24y2-Is17hd~?@HFVuV&IU zR__u!zA{3Keg$H!j+k_aoN|hqb3ww2pizhnWpkoY7`*^uImMgFAYHAQbIzP!lfB5^ z9M{YRkE}({%mq(EWm9L}+3uY%#s7B-pmK~ctd7#Jikes*J;|#=jrTV=C+LYv!>-I+ zk{C^j;f{EzXhX1?@Mgau7J~D>ie=Y3xj!V`pea@Sl69ayR{7Q8SFy@_Cic3=cC_qL z;`8SNHF^LnFnx+%A!~8)chMgV%o>lYFp&t9+H7H-F|}nbYr`T+Z*r3k{i=MgP1B+k zTXJ?SvMz@i3hBS1Kgd>v;M8%JSGj#TmK9SFGx^MsKp6$oG zHXP_}-Z@a#)}L2DlvXwnpF0qdHV}|FQ=-8sNzd+hlq-)6y=SK0TigKZ0`v9349XR2!%D_*96=N) zR|2kTb+8AvE6GiXRs)D7D2qg_{o2L@I*LTW`u!sf z3>6+6<6+Z5{c^r)-ap=QV4`K8p?R;qVYgv9UlnaKz*jl#2J#ui_GkF2T930`AXcqd z22J!Rg@grniA_4CV|daCS;dH@mY6G)Di-ZRX?<4*3|ag!WTz~IEMcQkOUP5#+*Yz7 zD4K@xeXC=8FhKG_Bh6Qw@Jvd=6?_Fyp<+Q+D2*VNP_zihieD14Rws5lCijpI#9Wno z)}VX#fM<3;WydPdj6V0Y9=FsUR^PfLbvwrij-pmaeX%O?^HmX7(6LvO9U9R=z#aLG zDs)|58Sxp@E~r?L6??rY2I>*ATw<=Q4(pJ7CG{)j2cazCzoNW9yTSXD8~r|puWk+c z2gm5oyi+epzAD<-S+iS2xbHgMvG1O%2k-j=3Cqq`kH0dAe)YrOOn%K*Uj?53_A9nU zvq-^u^Y0Z0%}4Gz`qZkKtO{+#y6Nm@T@1G#b@Qdw77CWg+-lLZPN%J%rbSqNNNGB$ zw4GL3@3vsRqSl?WqGDmeD&04M|ElJQ5r2Eu`kBP~Ntz%SsCbc!fH?NMMGSygV)>M2 z__p}Y8xyXs$Q|^kn+sfT4cxfszhTa|ea5SK+OvMrt~7j9k>Y25314!%>DgmJn*Zi27$Jbz_)_c#1Lxjmwj zE8eLy6cdFom|Nl)yLa8<2r@NbNL~?onOJX}Gg!7lvS*@;78991D-_$V@u$U>>T+7m z8I49(QIm*;=zZpOrS7PbvR0qHb+Yl4Qhvll{wn$QAQnOH8y-~J@3X;IY7o|vgoTPF z5lb}M$c9o2@roj-VGGNSfhgh{P_dp?)<3PRdrBDEK`i)c{j&-?owvVg-StD$p~nA>Ae@y3uzES8#)Z31Q^QD3B3~t5Yn8uNwQ?gYPMWttA6+S_I88^m~-!0|5^9k z^IwzouFbY0JC>29-+tcbeO~h5C**-MCMt$>yhR zo1eArc*(x|HL~wbvi~irT+zd$*Z=GS8p+Aw(78H0@Dyt*;>pa z_{C|_v2}@7&x;tG8>XBWuAU#Mb&k>lvD{*epe#?m8J#|8Sqo!~E)b)HCO463p?K6i zWgKLM@=BTXN;P^WPk1F`UEsJ^;;6f5*iEGO#EvA9-d&(|jnU5M=oWDGF0pzh-d~S* zOxFapt61qACq*FJf&>W`!a53b-}qdtLW?XIM$|KODv%YLfWZkm-?)CLrhZ~YM1}@& zZ2<|r@r9%Q0!dtv1yLl}KM3(Ew$O_G1!xh5WDZ5=8{of!euXi~f)%7_4btXE@yk-U z%9h$3t()NHXvC$KxO_{v*Z{ubrI{gNA;R5{ql^&hu^$J0 zLvPNH@oDeL)r2l}wi>}#h!Do&LGaZC_zHiR`9X)j9sR)kCP$|6UlseGIy+EgcU7B?i3`ah~-a(tl2(P z!h&Fh0$fe!D~MM#{Azl6OJIygIr!=~Y85!@M$eutl#hztX4p& z>B<%RAsf{2+v7AL>-GT3S8%qo%|Y z0$)+YDyE1UET($|+T{etu9TTQuFhMvtZ;UNF%xMSMLF3qK_q zTTzIZEEq^AVj(LG)-}e6rG$#bn%;p}uwy~Ra_|*Sz*i2>6;9xO$NLLV3S`Bu9d!8H zac2i#v6G3o#bLq1-B?izeZz&KK9>ZETcXq>x!*HI?vWz(PLukiQ^5-5lOhFKc>usf zz3%aS&b&^i$ji>rohY2p_3oT5Hy({1!VzJ2Uhy(?2^aD&IdRXq#$WJ*i7{^g_A?+Q z{H5p{g0HYB2U~LPfOv(729IBEcK>CD`=wjG=pw?I0hl5@55Afe^gZRP*q{9pFLDa5 zh^jkt+pblt?xHJl_H?g1bamt5E8C7<+j*kr(8K-EuTK6|{wTv&?|n7;Wm;SB%RS>=D;KZs!>{Lt}(l?;-I(z9MU>clEjVu?Ph zZn&_+BB~j~s8njnSXm3H*+6nvkmQ!}jAdqIYnN{$wR=eIZfni1@rDD&hP`8r`^M{b zkCkrH=B-o}tkRWjGS+S*73)c6n+YL5zNs=i$+5oxdpVsVWoWg)g@rE60-@9?u5W&d z+^=*js+|P3j0Lrg1+|O?HjD+Zwn9Q5SN-T6|# z3@zRSF@)$Y?4g}*EO?4lz#)Ta+=U{AbIcI!2F5E80zWV4Y+RB0p4UidK@TpA0+VQH z1M-Nd5S=I!mROJg3E7rgAaCA5iWXZ_%1z=z6QYgy1rzAY#N>})wMty6QCKpL=x=OV z;T7mo>dgqng8mR&p%yn9(BEm;OY+xER_q`ZciBs~nqb7L-EU!nM(KX%W#FX~*wk#o zEFmmd=n^{Y?sV_-^}^i{gax>w64ryH2}Z03?0_p2SgQlw!^dsQpEj?1$#U1b1h)1= zpW0~*JL9~%=VSXxC|92m=vUyY`#&J}zenzS8+>Ko^Sov2laretGi`j#vgrx3@o_sA zE^m6qzV$h*6r>5F5VCf@N_M?QcD+jB3fluQ4~Xf%rObpQh98$;1o9+7L4|{X3ZR9= zR5-^Fzd*CC9wf|wBt4mWXzcdt`>AF%RVVFB@3kyGW?pjK(s9Jvv~LocC-|y(%}7?; zP(r1em#>OM{9vLAK^g8GB}9_>Aggfoyik>M*x-UFohx@3wUCPy$)j!pBklLaO?ryV z?ie8y8Qnx9KB-2Zlu7qQBep`%PcSYNV?m)EMl7!slSdNL;Hh$joxE;54Pww-qG+L0 zJU4Q{8;qScN_{?h!+Yu+XO3$r>s3;GsjU0s;g>6X#!lB!rB?stywMV5|d zoYFH^6__yOFVyh!%n|AOfCNQs!6=>^XbRAsP+}>BI6PMoMS{sey4f~O3mbey0rF`L zNcT!yWEE6lWha&l5=?@?+@8P6p1EWaQ@ZIL#*D@AU)d1}0*uFQaXj}h-->*O0D&r@ zfW)SmLj?NRWMg#vh;PJTc;ZAvk<}yq`?ij=qiSNF!0)utj`hyA38TqAVFGU9SKWr+ zckFjITS>plcIIRGJ7=^s|LWO+dmoh_I4Rw6xNp+|>AHOqL^LehDrw&!t63^7Ymw%` zg;mmvpoYlA-T*Lt-wf#L&AcJ#ZuNRZz+lfynE9C2d3n=c+;AJymzJ$Ov?mLe+zkPDwDxiKEqdj3mEKru^-+R zs6a6b2hkZ+va6hDhRIQPF!~kUi4#p1;;?Xq&LBMPu5qwBP+L372mf1BK~^+N1QrO$ zFKBq7?S*kUFh*RtCZS3Pz@Rzpm1<}^$+c>huaJif@rpHHWwxq;SS)3+TGp>eFcOp~ zrWm-QbA_nXI%Qftb#Q^HussJ03b9ECC93SlIm}&D9K$X`eC0NE-eNVz387-ah=pP& z3Nchzx^kdmjj9r6^>ymXwd%4}%90fdggKz-8t66JTDtdHL(>?hyz$z~)w=T4LuD&z zVU-59m4OfOKp@AVIhe@6l>iDAx}xXQn<-py^B;Mt9!2{;|3} zhN>M%+8jbk1AWI_KbpIIIHPSSsc|T=Vaz|hA09z9jpG5$qk)Tz{q^{C497A*W-ac=-f!vksz5$_*sKj!!xWdBAGvEz0soEM>I+k2( z#?J@ueYs0X-ZH|k&=#yB=}X2^77rC}FxTuRjR#2MK~lTV2E?j6XhBv($4Sz5A4M$e z!2xc8vi^{-mOez5JZJ}Ev3y01?hn|}90Xr=K-WU#JHuD&F%O8Q-iHVXwZk`c^kdtx z5ADZ4qRw_87Ry%;eQ0~=1M&bQtT${2Ub5~wW!m|SWyjO@?awg!)l&p~wdEASn&$1N z>9imUpmo>FWZMg5!?R@d6V_#qSg4+cfBK|t@d+EXkX`4iDTspX_9(TD0jns0(HY4p z!~%e^@$6XN%ofP1{m3M$riwq0mhN)D=)46h}IOY5UURm(jJy4-TRXNx;s)B6{Qr7mIxmI8RkYrO^P(B#oT zk=l*ZgMsUkr3A|>7w|5|l#FDz5f7on6Z3;v2E>XYZXzIEhxg*}d;T&Q@ z)?!Li2>QVMAjIM1)=?l9I);dw%UMD)nyqP#Rxn#craCrfjGt?aP94Q^=b%`vf3zx+ zuZtEAhsBP?Re1#X-DZa9h0Tns(iR|4o zn$xdiKaTPQ`h)k&wjYsfJ|tPcuYb+1{^i>w9h;;L%cYea(xQgGjEY_i62@R`N-Xgc z$UM0Umq@vD=yeGTe8tpkUxu$3$2-ecEL`2VZt5?HkHH4~JqladNo?zYjp|Vx_l2t>+D~5`e z4HqsM#@JvMf(NN`rA@A5ntNv9_9Ll9R}qdW8~gkr%{fmd4A>yDol zw@3%XV$(Y z;w$LYbWLHRg56FCy7Ntu!wH7>FIRyC95pYxYgWjWI}i;TbO}fBl~WYuE7zDlPoB&N ztr@P=J+{v?PP$OoIhX&lbNtUPsTV!6E(a8J2VdtaFHyH^%r&>Dt~>oM&I`CYH}L8$ z9zWdSdmfF#8)m(?Q1J82@Xv1Y{u0d;q`1!sJ-d)|E->vP_)1)JIj_B|d}UY5rmod{ zde%Ik}%8iS7!A~3vUf_i;y8bG(h0T%#bdnuVHCf}0JzF&BY-PmIXq^@^fqKyD)F|M)H;;0d^CI2 zSowC+u%FcIA?Omae1%=lbX^V;)PRkIKrGZ$92S$<06t+0FarsT#<{co!H22m3NwSP zk22!bW8f<*^sDu!t=rzT?|qM8OCVMXB1-h=8QVQ)>?c2_=BtN3r}gm1)<-_F{RLgZ zw`@mVweCM{fr_>3lzk`6?N8e`J!Reaq-E1nmQ8=PZ8>Gz`n+xXivTU#win5k=jhgE zOag+hmON+$@6i?2_gX1p!AQo0f&#RtRP``f{xG)u5Im^1d#CQR?0zb9Q4#}jov>nk zGu_~PuL-i%;$v38Rr?Wh^MQ%_o#WNpM{BlWA-EA)i;=m#Vd;|4l>UJDesA=lqTml# z&JP)w<1d}MzBBQ6+=q4U>Niw*lkIhfj z&5T#vChWge(0@y;d}i#x988JE>S4Q@7pZj$8$<@08%Gbub$v9HU!rw($|xaDot?t5Hw!56op^bkY5U#jEE#1jLEYI3#>junU7E! zlA?5r?ZWa(QI!=T?eXRI1)T1%jA4kSF8n@Wg*~>yieYeG>3B*r;g#x=nv9u?&A_VM zWhTg0aJi?pGj^;cmW+0DYV#zfDpKpM_?6`34Dqr?g?Z-0B7**4glHr(ek?X&0u{j@ z@y3*8EKuRiCr{~Y;|yOR^2$0nVKvhXEX)qVeudw2=vOwd7E!9nhoAO4`076N2OsF) zc~rXPZs~^o^0m9=E4E7)Z4*pDGnHqdW)tf2}#4C0i4)_W~b_n4_Hw`^COb!CBayoQbZE6&970}Uh z@D)y2xMJ+?Q&WT#uCDVHx`EiCKb&vts!QjY67>1tS(4 z+JG)$HZ6o*z`KiRtl#KYw8sZ;)ij;T!Bl7$I?jK`R}5TT=PTH;m`0%l&UWw>TWkg? zZb#vY9_1mDxCteYt>#8vofUfN_P~p``u~C>;A(bgC-@4tgRk6T(H87R@B&5!QSNcq zV0WL-!;TU<1qn_I$wm?>;vM>t+ki`cEMjwk|0?M6+<Vb`#z*HdsIGWTL!)y16Ft7R*$fv;BW?qT_A`_W$T)xn4Q zk3Ti=z^Q>J{-y$7z5k8jm!47GsQur3_1{8fKw8WcnC&LK?v5DA^B)n&5bLtXn=5kB!Iy2(RD|eY|_fEEe91cyi95%Kd zHnkiwAEwZv*`PXg@ID3ot4|9DbGu*yedq16bb5H%cY#R}^4+NH8ifz!w z)Q<5gO_3#I9Y+bLN`p2HHZ3BZyLc(7TubOhS0Yuce49!`<}Ng2x&k3zUA2a z))OCEPkw9zS^ecJ`=ejkpZePN)R*=rJ|%xSV*_Fxea&+4Wy^t=?0cWL?|jy_^=a$o zC(X2;F>ig=y7`m^h=l@cv9S!`E5x6IuYiJWCoJvv&@If1?meN2`8xX4<=BvFEwL2hnj?uLLmDugrADoW1T2ELi zvR)NFu~Lkx!Ak8M42Mr2_{u3z;S#3xh#L0bj<`e&FN_*;;~9_^>m?fV6jO(lFOnHV zMtAX~J0z^EvA{C3U&S~^C!DeswIcp&c#4#d`?&Dy5O&G9k9ognd2v zD=1&(EF(F~Y}h|owAzlz+?3YQoE26)mvWZcaXEv9iB!C*z>{zsYsayCFe+svCc_*g z&~p+eBjQFwqO?eE=Os+IhxdELAt?2`i`~RLNo@F?He2wA!j;uL3A)176g9Y8F-Hi# zQfbJipDW(`Pz(L)iRV<)`D*h4g@dn})<~ZUa*3tEb4W~P$4r>c)O1rh&82OP2(%Tm4mOqOTWP+makZ3V$Y{D)%5e} zpU8e402Ru?SMXM`tDL8k70XvR-X5r$8LXrgf>;I$SF}GEu7G$oCrUM)uYg!|6%I2q z2zwr#8Dwnjzs?LIItID@dD`eA3Rkcmi7+}>p`qO|hAyc4h;e%V%2!QO%|R+&E!G2m zP!z6OreYabJ61Nrtgy0qDiMoXsvP}6v20(kh?T?@WKVWJR-13l_v?m2_M;VZbX&?0nrs3>AhpGiBnM9xlp8xKHORAQsy ziTq%AhB6|1APfPdNm5^d1cJc4aHP0PkSU6tgHDmZ%n1BwM(Aq+#h=`n@t6Fam%UOy zyV>`{I|DB{1$NB|{3Sr#6OnsKSa~6<`AX^Xt61?2zFN1xXZ?Xr@D&yj9(Yhj^BJB~ zKJhBmuRi#8sJ7fslMTV3yS>z@*f3XS-@9Rzp{@_l5<@oq5NzA zwC>0v7>&l#YD}3m*4#RKPPMsmDS=!QS2v#2Y)#JxbK@loM2h+G0}JAmQ)+y?lBW2I`Z)!LtX7h_h(u+p{Y5>nONGFE zm?S|;6=)5Kpa>(v&ghVou9?9vAAz0wA=19ha^PN4Tt9qPNW(*usk^FfbBbI)Yp~eu*izzyz%zeUY(XsV%3)&M(r%l^8_jBmClFVVMCg z_r#hB-^8nnwv)rpW5tOQ=Bv6LWW$3*P@_t28!lL9&0b|J+e#K4Al17_?Oxgw}+BvU=h>`_tdspZ}4(@B?}7Tk_Nwz^^A}iMqemA+ z>Rn=ny+ot1onsRm*2B5Rjk&~+yNHL}vxbAKjlm6LUe#)s3dQ_l#hgs(Euy}gczw5W z=3T5gJ}lw_?n=d8DeR>-)GL#1~S zZvA+4jWM)%#5DKQKci&u+EE~rN6Qc)ZH7Kj%$+o4>c zMM$$o8NOou?G`Ih>&a(dDBk->{n8uiC!Zg@?+JxNzuNSxekE<&D6d;7D{t$|UnI$@ zktLVOV{_!;Dg9^>(iM$Sa;y!Sj#w;TA=({?1-?SDe08gj!ePN;buC!1XlTQAz5*2C z+@VBG=P34^r6%@n(?5&71xFUHe#2K&dz=I0)M~{jSHLRNKjSO31=#{(!Fh$|Ak6La zW3}Kbc4iQsc27*iP+PkOwss`O0GPvbhcNO0j{&cA2r0yfD^IA>Q^!%Ic8afRsK1@< z4?@36YtkUKMM7(yyjd)^At~WQD|06w6mES>cElAs`ACtoRBUzgQAm)E`@- z5SAz@VpU+jA+v?2PLqVVEm)+miwCYP)&Rd~5c_ms&|(u!fpAnVYocjd$+dJ`5$E~s znxZB8;-y2ts-h)Ynj^Y|iE0N~;S-xFBI2)f26t{Yu`yvg;ufRsz~QvP}aXFsRvn;C&`%nQ5d6x4}hK;pH?yvu^}i)oFQ!B@>& zx>xM!S-Y=$-To`!s~yL?_dg&(zUV_Q3_SUo>g6-TXU-Zf_l{G(vNQd`DgV{K`Re}? z%`q)m%2$XTM2dUAnk?VZJf2iGnOSAdshrFx9V z=a$gt#Jh$$Rutwe@Fc`L5mJWcaU@O%8e*45VVRUri7+A$p%z!=2-fzBLkC9&XA2}F zh)d$C3E#LbkC@9of^IKC5A0arG`Co#r$FJ8APG+HbdGrY{a?uQpONs?zo!*Q(u!q? zHQ#2hxFTv+#r7PZ~>s@;~#-4iwYO$~>wi;h|mhXop8%0jz@4@@o10bGG(@L|XO*5B||8^c%a z(61h~F9G&EL)JiOdX?;ci|l(FLx0vI$YJ<^b_StbQQ7J{`*S~%7th;YId6OUXY1*6 zwo~8P{`w_>koDNdv_x0=gdH?T`v$I7PPF5Putc%VM8(l zRwbgxM{W7X5l2rE3*f{SC<}ar+zI+uM`_`q(I>+ZOWRQdf8lO+w%U$aTak7}+k#ex zudFSHriKTrw;8Lqm@?Y&dW*eXUBRi{i0DUnte;To$;DRXZUly7tuO+Yz*o)zgDxR@ zkH}FU?t~Z5=)*T6Cl)&;5nTm0RCu1%Lp0_t8e1UJxu$9T$_xRu!^o=jsvYoaRJ&IU z%*~eFE|T2BlieDnoE5DHVj=QqZaB=SnmfZ0O*4osP8e9=6!phcNU;ZsO0P>s5W53Vk@OWLs&YKK%!r-Qp}7X9=&Qm~D>f+6-GVVayxdVx5M1uKV#NkSEMxMGL;gSE zaxg}%h48nO8UvK1k(I*|~y=*hF(6XTUx3QbFToWZ9W9 z5M-fkf2*0md1VA&;U)GT`AVlJUwox}|6|Q7udAPWLCx@$Y}auK`h!%z+9N}MaLE=~ z+s6Ke)w0UPeT6N3IrY+%ihh1xe`LBmKqU1Mpi8K5jiN?(idZ27kg(u#M}e ze-LoBfUle4D|TiuQRkhkLnZ^uSHamjte%W4)N_jTQ+!pa17E>$gr*p64pymXMgjP$ zc97*9maicGfKq5%uw9eWfi59>g%E`pyVUigL%CuTv8I}X4e+!pP}BJ;u3Q>Z)WDHSkr`yH#+)*IGG|V_}j0;721LfU$MJ#{+O?j ziv_n8_Bc3)_stHxip3R3=ff~Pn!bL&iZC)qVq<%*?O$SscIp)Ri?~Z3%_hEv6*1vDW%n#fkn@2 z?A6G>+6xP}t?f9W@1Dm@?Z+lDcTZi-_mSp2p|w%$SEc{&aWt8jz0Xrl#F zgq3&EE^5WjNw!Pabd1I|AkiI(4L~eb!lIOQKaCzmK`BB()apPn7I(lE+am0E94$iY zQYv2A*PXU+f1T`ohwOg`h-EwW0XgvzdFV6p*jMBkz|~Lo(--WoUa`M*&GzOM+Zz|H zfBV_`()YIKzOg;?1$pWd^2qz-!MDl1uahHxvmJcdw*Rzk-;37WFIe|rknlya?UZff zGuHJ_&|r2dsoh8SQ?n2Sf?|usr;Zb(>6i_=T-_lv1Ud?kcUv1EU?P?MZfnzF8-1`S zpck6Rht1%wmczEDgH}}i9#hp0BUV)wuNzBiRf}o{qVoEJQo4eYdtt!}K~5~TiG_AK z1zo$-@0yc;-#ov5CqL!+3kGwKloo>3y@Ok7PWHxOUU049yNAZ7tl zQo(Rg)o4(iKBQS6-k}d^(fQU6F3ca8EtcKJ>%Tp6a8{&dVXOgsgJ7IHKBdUYRzraTMk&@Kw%o zGcr%mAIw>4$y{p6TTaj%Ol~yAS3&z46P1~`*<)eJL!4A&bczwNS5b+R(Fv2`g3-Vz z4No{36K6`ym<;Csc=!>U2BU&_LB$#ziw(&PNMB+3iu$jp1Ivp1E95xqhsl@U47~q| z`jt0SPoEw<`Q*Ue5BBfAN4oWhbju<6y1o6YcF2}*leTQ=YgjF*TH0IE+MCxPNvo8` z705VQ{UKudNWEhe7$*T@Q56e`Scq!~polfwpDuu&>8qgnl}|s4)vs7Bi{&dES-!f( zS8}T#x`fcOki#%_&ffC-BkNbkR3J`RZxv4P;Zz@x0V?+P>9??yHN7*)I@=w5H7iU- z;VNPfe8n~g=Tcic%vaj~gs+f!g=q!oS7;7KU?Eqbj$5L~T6thYdaRXb+Jk zu4?oXp@1pk%H{FpayaC1OsK@mof311a9B~F7{1$hK!{M%x;JhO|QDPm1ql*5Ww4*d$L3$)^-a9g39zkt_!XK>dY z{+-CAy2<_L8{K}o+2iM1y)V#kD!PLC(w(85P7$5XQP%)hFasb3%Q@!C0>PDqvFHzW zdMEUF$6xb_yBe5)*R<}Cv>upCk$mN!)Q?&9g`B=Q;k~zq_5L~N{H@%xw?&`*lgAIY zxp&QW>$}0}?Hj@m1UEh!xAHAcCPX@BsczLU=mJpS_Fvwv5=_VLiC=SDlF$M!r2qqVm28%tQFw2q$wWz1Ub(W6Q3X$ZaICrK2&GMo3uVMU%^H42SC2m(P#NSM6p&hJI+t#iZ$ zKatdf+vUS$=3!%$bh(LG<1W@Ch}(m!_2UfP=K4)Y%6CO8bb;clw+DYSi~r@K;}CfW zze$?VuI4ulmTf1+n+Xrb%VtwjlP$7Phw&d_)!0J*`JIoF&G!?(gljt=CeW|2b|f9_UvIRpT&Pq%@h*Tk&3Lzz7&BzD|!H3tEIl>q+q@JDnojLw`h-JFWEx zNb50z6!%3(K%`cjcOcOnMyBgVcb2c96alWNlN`f(5VEfK2hk#g#U064EMKiYW#8}; z+3^P1_by$~eDEDR;0m7hN5M|tl2bp}UOI1kwbS-?m+dn-`J|tm=_PMpC2w4?zjDt0 z;y3nFUy?^Zusrmx_5QbP_r75}_PYJZYXo&SGO}JFyHDF8WNm($@)dGH5h4l?6?%NY zDwda8juMDe6q&$U_fTJ1&4J0Py;Pv1PX2v()RVP)O*m;dU|n>`)^fKUOvM(ks$s7g z)ws{nu$S%!FW)>-y1|&gVl=bOfVZLWtX^p8;puWZMTnQp$ZJjkm+$btG~1`s$xrI+ z4+%@XAV}*LHtZ6ncaPHh3WmLe*c&c$=5)hK>X)L0wt(?@SFt7_XFRmr9Nl1!U1H%a z8;@N%#$7rR+@N#IQ{E|(&EzO%MXL}a$?}yOuiuL=DQG68OGrwMzI=rhZ(eZ~vZ#V? znCrM@gOMeAY$b~<#_s813~NW`4k3fxJ6^Id`tpWjgkLo36#g?O+Yp&CgaDj{ky0$R zt}>R4CAaCbmKoER7!l!% zreQ{#MODbbKAXa>}^7nj|-ZDt56_;|zQW(r^Q(~`8MvDqX`-3!h6w&Uq zm@IcnSx}tVf_NpaMkYf);7U{_$8#0t_VDcPi0nQ`MdnBVTAX}PmI5<tWu$XSPWlj9DKz}SU@b~vZ6(Z5kfjem_sdCtiS!A@fB>4 z2vG!OIrs{JKZrwg5SIhGaDrl25VA*R(K8jVrt=lZ3Wh7d6<$n`mw3DHi!f89?2*R9tJIuR zYRaoNi%Uk(nB$fi<7y^x=?znE)-rS68gu1N8qv^loOX!d&wbq8PeJOOLxX}dM7JzA&)(n!O4<<#BT5p-clnnWzo6v$V^FiYHv_N zZ-B7RmoN2=g%4){t}0jr(MhD~ZRltty)A}%uDk@WZ%NQzq{H>hi=lVo1kBH_A@ayL zX>NrsnER<)@Jr!I@2x*Vf)jr77XDnk)>gRQme_0%))_b@!?9(Pk$Iz>LSxNFf?x`M z>1bruU|8B(2Xj$!fzA_FK38LD_g zOlzIQ@K@nFdlptSZX`9kNXZr}rYva}^#Ex&NH-E<>GKqI93cRemV523_tEYfTui?f zLs=lMcDTwPup^(L4MG+|-5)255tsmH6z2O7RE6f?yX5W<2*j(C@K$|Jp85uGMNa>0 zd-am_oo@SwGV-~C)@S|X6A3xfMc%w%{oD8E7r(JS`L zkVCK94j{|@6#^A&`*Ze;yqvTXP)fELctO$Lxx?=n?vpD5cpUbc0jbn^suh3Bm_ z=C3p%?LRVK8kU7lhmM=43t}WJFJ9klpI>fs`+2Tk@B9G8e1DaD_^?Ze&Lvdi8mYm0 zqu`{`h|Do;L_!BSEK36q6`bct5OqREQ@k#wng{{&%gmzH6Y*SrBnGp~zST!+E1IyM8FQWf-IMFbv~c3eS^cT`%54k>-KPajc`| z78oM4AzlqeW@@o0Corj>ldBI;(}rSgq6pvBpuf-n#6k)^GDTt2g0C+kM~~2kn3AEG zGA*7h8675UR>Ja1R=mOrOHfw!3UgYAv2;Bt!W!|K32Y5WC^sXdG_jO~C5|Q)5n&dA zcoiZX2`6pU6f`CiPVR4uT;1H)Ndd|R)R25D-2jxtaYaP6rL15E&*BXp4|p{X83?o&NHfe*J8=C#zyj zX<2O0=v2PwubP#EuYgr7VzKDO=Bwj~8*d5thq^VepRF1GQk<|!R<{Mo9k=70-WWuq z7@%UYSOrfzZo!dh3qrXXq{;(r=G`{uh`*11h%^+=%~M4tY+X! z2Y);KSKuokH=Y~l4|0k%(67K(@VCR%O^4>LJFfs&;ySe$UMVCeAebG*0#<3$PzkG5 zo!L$clr;rfU@cmVa)l(ThQSOb?443gquSBb6C?SGDM(oKq*?{@Rd`lUP-+6;S=X%Bg)80 zC3x$PFQ-$4iFKyr2C6e8HBNxCQkso;b1Ym(vR6!k;wrWi+9KRxg9{5n7Jg8mEOZGM z9fI74jw-;G zbXVZrX9zN{kkCe6A#4t7v_tBwxtwpY+<^>$JRi-u&th zmKVPau@5Z|zmK8;Sts5mNB?fS`!(DCzghR3w%+xe4Lb_gKTc={JJ=7qn2*u# zndK|M5(@v{tM{6!_F5|Um{EW$z&16T(WwVJ4W`&_t=(e*Wi2{L)4 z?HM)f89n3{t#*wDe$zQVez6&m2nd1>$rwO?IXKzqFE(MT4l>&#N=ylh?P+W6*;`23 zdb6-?6mt!3Ndt3ZWb-%!;4ApA+<7n&T?>luN9b8`vpu0`KvXIh6w6>3jww<^Hx7OCf62}FcO$B5}rO5kfKFr z5PBD{*ubwCLF>6~KlbnTq6totF)_y;9;*%tSH#3$c=)d)1Ea)XBO_)L z!&erTuZ)zhX!sSAfP>y3dWV#|zCWjV??c7k-_t$&(%{3-D33onu-kl#Ng@ol4OoOmy`OE&l@D(@; z3jtZ6!Z}XZb1G+D2P*c&fm`?xD8)_!;>2u?3KLOE0Om4+Q2q=D^}uLdzZ$ck6qse@F%%F#1?H3Yswe~|jyS-z^Gh{f_1^C9DJ48s?UOG*{(%7_=;@~UO%d-a)lUDtbx9cRgkTqtWMdbSgfiU0AImUh0Y^z zh+ig;&ZmvSi0t0bjP9_^p2%z&VtZ*7C{ZzmD)5hkucnv^-9zYF*9(|LyNaa)Xz)=w zYS1RuYfzM_n4)fZIy0!jzeo!td7ZCVp$vQlFP6i9h5LZ4pj_cdg)GWfY6o9A^sC?V z6(X?ykgsTqFpObcFbi=%J|1tju%2%G}XZl?LT+Isn1r0k2R9tw8X;LUsS&(OibI9N@y&!5B z#$YeuH3(UrqN^U_%OUwP?~JYmiI-hcyXJ~IX9=#&j_LjnpC51Z`{9m=AH1`_3vGJy zhJ@9EZI8G$Y~!tcv~=J38|FQBi|3cK{V&Y&{~3{bZo;4Z(=TvKE+^G><}be1v_-ZO zQFZ%z*Y54ycC;JAgU26NK)-tCHT6sHY2W>F z1LrfA_TEGJYRNt3j-w`Q@5QE=^i>n-t8B52M!(!a_hcm;?8p>_)PQ%Ts7#r#K%P{o z%37q$Yac3T*A=vC@^GP4u0+yKXi~4ApxYZ3!5F%608e|s)qKiV(uG_(-VogorNx)J z#0_A<99{z7|QL;>wZ8oIbqTq|^?VFDJshv4jG1Lg^$YTb)pb>G&~GIdvK*R7}8f z4Aw$cL*Y_F$LDLtvf=-4v%!;`vDjR^f#4?tzC!=BWRtaW2X(nu?XrP4Sj1{N1P>A| zfYzd;cKDZ&i$y7m0WH|Y*-bhSwIFSwT5vpRZ+(Ekh(-H@Pg~Z$Xrp|Eb%AHdv5(1# zPpn8|c;?;UbDb8c||MXfx$*CdE5R zKD5dfb3nG1ZW!cJzH;TNv5UtwQtA@k6`i8Vt0&22!--`YVevpbF6T=IIe7}$@bDx- zQy6cc^a)cq99uXNQ(%C;1b-B$JEIY?{{#|NND`Lj4EqY8Wg#%ljEM;-SBMsguQCcM zh6|UIO%D)+?!wRxH8p?bB#iW^f;HCCjg-t0@sYFKlGb7Zy@RhrrIy4p5|KR4Ni##a zibMws)-Se38;C+g_j`pKJxN)z1)m_r1Tlx@z)v%_I<*VZM zzPu)BR&9S`sYIAdgBpSpq)gT*O@d|l3UOEtzM2)F_(T2bHXkXw9EaJD^9P12Fcr4d zU}p_WS=0Fnw@klrI)mYMd=~qO?5E%e&|)Plpcl(jRJID4N@lpOUs1k7bC9OBGs+c9 zR=+y0ggOil0@bqTy6@zK@CL!pj{xgVX?z90>%0meV*;w!l}ovVZ_Z8=Z`MN>rz& zYSs0^ji4qxKx(pLj%U?lljSRfGcXZYl&=Kk8WiMQHlhJ34O4uT zKM1*))vwr;QHWO#mpk3!!C0_Z7Zy_4F)jETPdht5$ObYXO%(Tm(H%R~7RF0abT;sM zp&`h+t5|5;-zL^r&HxxzOH`d1O0nn8O~lb_9#tb zRKcdEcKYY{`V}UM8J9cz_y0crOMe0^U)jdNS2(pJkdR0Wq;$pEptPUk%dP2^B(2n% zR%uDCu^`n`Tw?=Y;TkQu30a6FXQiollNBkN&Cp>W$k1Ui8|5oxNa7v&@B{|%OSVmB zuQO+?u|?O7cxMgzWa_ zDv@(CdjmvWm}7N|=|!6xZbMvLU_&%^7b2n5m9KJ-Qw8RXE{GdIx-#|*y2mST4?Byg z#_~ zV3Kk*EaPf;R;Rc^7oDc%rVT^ON+=(T$g5DStHnq}y(!M_C4JA|xu z>?mXkzIRX-L@k7owt=ip+S~3Yl&=ss@u&rSwdMuume(m?-Tg7S=X3l0Us@mk)^_Sg z+utsdcNn-rL-0!#`BqJS(2?(Tw(o&hD%;m`+vgI?r#;pWuiD?aXnXZ1+v)G@&wg!x z`b+Zo=j4%3$b%n}`#&Tn&~SW<9DJ4Re4cE268Eh|VJ_KbVhi9@zTHA=2b#!MR0X&TcR)jMt7+=5wp&ZLSc^AW3fEZ*R+$P` zne&%hv)U5kYms94ecSpLFE*CClCyTO_PWwKHqj|jwEIX4Fl6kT%dSaQ7)mOS)`!_o)vDvE`J zXc5970S@FBj$w*WP&kP=cPw;9VU=@K!AM+%32(mCPgQHcvgVqtB)M5vxY}H{0h@8m z)A=f6*;wu>bMZO?J73Bov$)!ds+ENE zUmi3NgUK{*BSvf*L*A9uPWj485akZOqUI|)Lpa4(m>Iru&G^x0>UTcSJ@>c4N1h$L z_i@#s`xQHn$+sU;pg+j!S2SXDO<(=0zSR#(r1WQb3i-)oUedbY_T1{={_q~uwvnAI!fVohnD5QC&vl5{n}gshcO+jWVRDcT52Avzhrm~1jD!V! zGPc}6`Knw`t=-I07#2LH_zJzY#-Wr&`qZYO)Fxdz4+Y9F~$}7GzApMe8(vP0;7d_&x z`6kE_!-#!_Zi!t%`7)1;3$E!uxMhFiRrq;W!#934?*+G?PCoFoQ$yIDNi{FQ~*{?QG=Pu}2k3Yk}bp8q|XgHE9r5krcXLEq#{;47>kM8s&_`krOGCD5-n z9_+sBp1!;PqPXuV{Oy(;*vK14eRBu>a}0jz;{hq7 zbYeYuke4waDpX|E4VJWP%a>`&mMSZkDJz$%OOa_@r^0JEyjb3GT?oaU&+D2W+lvbj z$X_UER@B3sFz6I7b5GL-W|}5sTKn*Fo!E+C6Mxs`(_{McDp`ZOM~XgfoDz z_P%e&&cTyk+5YmK<>?zpA+-y*wTBuICsdztS;k`-xZ zn}dg_kqq7{z*Xfg6PT*>E-R{dySaGlWYOly;w`4qZI;rlHte}B-D)n|W(G~+hyq#R zKDYy0h+r+;XewN9DnQKC$_Wh6egb<`b_9Ox8r_4o76^(DUVFPr{ewi^a!@V_twL0o^`^*JM~Y zFz^+UGaQ_u$JTETzS>VT>c-VNhY$GhhkW?jh2ee9p;x0b^yxL0{1%c?Hz6+53kp=+ z?0#O3Jgdo=+in$?qo+I&k}UHT_V_2r0>uNa9LiS!t-wTibQb<~8mEBegNZc+s|MpL zC(s$hlLWz`i*}KQome%cDOhPNT8rVq33LVv)>?|z*&%r5uA$)`1*`3dXh3#uQmq|N zQGTu^GQ}t=AQ1@@3|~!hlFi5$4HfF+Gt9ARgHg%nYL|YEL2UH;bw;xh|FIA(X29?D zgdKbZ{faTS(^pq!mI&ckX3`_Ee)6Seimy(oPCPLPzS?z|e1}x_Fb^ z!B=JNlA@OWyhcSvl{~SCY2qhJk>rCGp<9#$eB~U9q8p$8YkcJ|xh+8IsM`Z&D16AC z{N}^yhy|KrfeK6o&~khTtePF7WD9-6UlEH5VnM~CF7YNX6}4lbC~hJ5 z0Az(7&5M}0s(KvZX$NS*e8tS`)zdjdz!l3^Ft=00st1*6vpO_|%MC>?PS5zEh^egJ3XBIC4!03IL@OHozuSDVCE5u|`zADzUd`123 zj0I~tUjecHQNO}3vI4)$DBu$-PyJrPVj~)GJ9{7S6>{AvUkRlMU}Ot#D3GmCm|ll+ z1-QC14s+K(+VJ%I z;=T7rZr9&u*-~4w;$6w?jWgp+*efkFXw{PgosoD+EUj(`i0 zu;xYlsKjbS0vHpPz{{6wf>?eS$T1;-zoE*F=<8|0zA|IS3en}sCB}&B=B&mX+ zi6$r>5m%a1YVBxPBsW;&YsOMqCO{EI8_@$I=n$b*1RaKUk&aoB(Y^eHeaR6Fh>ex) z9M9ij%G*W+ZR36gTAyr#hjlu2?!yyFyvFQeC}F zSJa|QsZirhEdVRa<9b{K-MH?+Qe79Z9IM}u=nkRYEopE8GTxI^ZgJYs)Dh?4pMr#4 z8Kq`^vLQ8(L??^|2&D_7e-26Rj)pB+B+INJh$O|9c5dEybjBELEy&!CNYzB8>p)Y1 z@dF`=O5he~3h(KnViVM@_|lQox`})QGE|NLs48fvJNi%)2x`EQE+6b5u*x99J*&fp z_e8W!G5?jj5=I*AbF`Ffw4+5>wv{3lZJF(~(?;PgTRn0u;9J{I9q({40kIJ9&YH5| zae|E;MYSyS3Qv&M`)IW=5*932$Y-G8S8v$&pRphRf;@QE_SiYgsb4HFU$MT`Lv2@I zC>XfXP`EnB@YRn1Ewyb5v1GO{r1p=y$-A9&qVVOP$f@t?)Zr6fkw?C;Klmwm7?cHB z>>aZAG+m&x_F>y9EN*7_3T;8yr~p?rd!bpGAYMVOD%xf$L;(ABxPqm>Y?~d&;>{NH z{_wz2u;HPoa<#)&hMwS7Gf1pp!(`rC(8VOJRWzU=Z<(cVrM+M|$!MKSsvk|L9-%F{ zLb?PK?s1r%5QYF~K^SsiqSlA5S5{#5!CR=pk{Xn6yc(3n&7VlELqZw>Zh0ptgHng* zN8=h(j}15;m@P%zhe+ow)*@65Q-^ayCEmheggiKf3}8`?ufX6Eshk_!wII9;OTDui zNzNjBX8mMxId;>?g2k6oYlreX%*oYQNvVOV8J($xU=>$v!aW6r6ZlODPgiquhsEW# z;3QR82Byg-u^bk%1_lPPz7sj^km*udwYkd>*lsLXW6WPY4!8n}vzdl zZj*Iv>|eB6Qnyl4vs_ZqE-7x66*Va`YvhoZV{>J&U`)YP64(>c*?gkCJY#Svr$#KMhkT=10y%c)%xE_gD%a-}$2bvwiBlhkK7+=&x@xvE}_=`HHs&`Iiu{ zuGOEjga2(vb^U*cjv!>qer+Tyk>zgU@1jmEWi7orG4vdB<#A9ABox zeUygLy}T!SPIvZtGbse{72X?R+kV(0XCj$bKOwDqT3Yj%#IBseJ9{&C^=9lahA-`P zE$DDa?_!PDSf2*lpk~*IX8-ucsLaN+vc`f1EhWoaN>((&SF_f%mal9tT+)#?t0OXB z#Wn%wgjze+m$TD6rNu6#MXY;HYMGJPigGnQxocvS$|<(T#`je2>X!J5^LAb*!{Yj5 za!|CcxFt8(gq|Mb_p5KF$}P6pKegK{NlmmDPE_ZFYA;lg=s1Y za*biorYC;3CNQrtw!AxeuC`>Il(xVSRot1kLW*O2a;b!D6+avCN|!|rB%J^2B_bNIE5ldxzc9KNZr8(Cb8xxeDb2bQgSXJH;42*OOcW|b(+$(OGMy** z>QMn#M0G7;uOBgWi~GvgE%@7aeQLRHujTQBmY0s3Uq5Ahx6<^{CF56Bmhb2c zHp`vCcImjxS0EOAB@nCCa-_*Zm+(NH`THt)obbHmqf@xzMbi6g2Q4rEBE57#%ppFr zPkMy4JMXcb(6IhFJ^IxuB(;Ym;kgnMg2KUGvRhYln-)DPZ@WGhSlY&kF@FbsDzj3^ zQy>;v7JOBJuC)!mG5}ZH!tLCfyIHUBRn|J5F(Wo_d`DSpOmxsPmKi{-l!dzRlAh3_ z?(mY{z+5%Pb-&Dx;M`u{^iEe+>qa*_MYp=fGG^H68q-PAluNYQHiVWR7)D04nYGa){74gz23kj4sher|AAE!7ZabuTJ)FvWN_V|rlcRDarq zjiX&FouV6oZFZe!Eih%$10!A5My%x75_y26m6AXkH_se1Ta&WX7(J&mby-i@te&_G1*WPJ+ z=JnPGpKZSL@kT3Op_6=uFr`-SzW>190^p@fAP?M2+%jfUlHu!Bn!MrQEJy)gO=Jyj%Z>SUil`K&9X) z1+Emn;zk9otb7GrktV9l3}S0nk{MV9J~g6aTBMq*Limd0cHqj&SI%tfP9eWtO@6zG zXvh%}SX>eOCwwIYA2|#P7ZWZlfmjPg$tsqBE4kX3(No%j3S7ZgN|Dp902Mf-bOs?S zf_d3}Kt;`}Ph3!&vZOYBS#$bQLLP(yhUuM+SJwVuB)h|u4UE^(7n^Y? z#d(EzMVzmqTrru#if0xKDToDMd1W+HivLPUX<$Vfh^6oqa795@e}pT@3hv_e>k-S! zS7M@&J_rd6p;b17Fa%yfq^VFlm?M{ty zJWaO_@roVp(}OSCgvz9W6dFKQdw&DZf2FMm3NeB%EbzT*BrfBpA91Mrm+ z<1U;TXo>U|q>g^+wx>)}!hU6qGCDx+GAVVgl(j&L$m^w1SiC`^a}hhQk3}A2mGfCy zey0fP6jOxcNj_*;{-}A)lPDe*B1X&Z(`DV(m$pq0UwIXGx@GrHiE10?S3lmPik^Z; zWPNB#Q%aF4Z+?66vJN_f6{{M{S2n;}`HR}p=XAuBsDiSaJ<}T8)0`%x1=V1 z7Hrb&5b;gzCTPVm;R3H{EbjDiJn2;CtH@(VD&Zs2H@Pzc5sWPlWzB5KNu94}w>3w0 zwkFQ*PoA&OSb~n%$D2dad`-gKKEx}6A{idcTPMxAT}oY{Q}_yI;G+|B5vF>KMmW0h zs!<>o1;kqLfE;I;EY=aOeofl+ zj)l!Qd%ib6aoGIwN#kG68Q!lnd~(J3b+vI{y_spjBRH?xgm`sKZ8_1Qlypob;leuD zWd2!^u&!FZx@i97jOE=ErZ;~zy>{62@GknF!WX>iXh{Xx^JhUw2y8>8itAP{I5Tv#k zsNBsw(tvaWT@bCAu}aT^Y&vLche7H|U8GN5WK5cGjx6iPRTW&&8IaRKUI9&N=oc`i zUFaIG!gl2r-vMQLBy{YSoG-4lDUQjinqp4sY<#GZ@lX%QsZcFhRxo$B8^&0pgX zqJ_0=LpyBz8+=j>F46rng1V*!s3!%qkcBnYvvHz#%UIW{iQZK#2t(UUpC#qamy*i5 zqZmBTtxKEVA5)?Z&TI+E>f~MmuR^o6UJ308P2Ndu@fD_kOd@dG2omJajO{9NmNssl zHe-c3Wl1k?E82p&Yx@c}>VT`fb=tzsrjlDFBrK}pHfh0q60s*J!x;IPBEj((Ss?i) z>B2H3&*;9yLMbB63}1Oi^!P{j`b2aEg!iPCbY!pCdqt?gGW2Q`Ck3*r>ZyJYk&3))kDv<-1T_#E?K`KpMm*7_-f9& z+F7e>DpuCgA1qkhP_VEmV|G&#-kuy`arYuwG_nSNZ9`Gw5jX7?qj`LxXY?#@uh-WN@9aDSE#tz z86F((-+momH7oj+!dC;%b~3NR)mFHoIcOUv^ee$v37t+vjU~2G@Rf@kfyLej-)zny}juGW`3G*8g7uF{)Xi8q#k~B}?6VwD>MUrt^+zeb@k68RJ@C>*@ z!s0&*xxrU)GZ_rzg(molIYjg;Mta#5NJmdtw&e76>k;Bouu8FP(I5Q1zx_A9x^`DH zaK+OCU-ABFHo4jzB> zrU}OejXOkhaG34MQBJ4ExSbsDb#ijR*%_f1P^)m83(%4mI@?EHw2waL7$H}BOET?Y=6)u;M6#;qfWz=uKKMrckNw?09Mzn>q-#_Q!33O= z`-2K!p;%!G62EvK5XH^bQttTk_}}sjh-X#q5AysAwB>k+=maa-K>nWJuU%aHNbahMd zin{zIjkyb3GUljg5ylj^#gui0mUM)cclngHxfVA&m9|XFtDTT}#V)_uF0sKeuKEW1 zFC60zC(Nyy7W7Z2fRn3lmBLa~VOfpQ6?K6Hm!}7x;Z@+B+K86x64m4$*W#H{NBVln zdWqBp??jSIs~L4c7*AiM56x>2&+UybH>b|SUECR6+6h@D&eg(Hv;yf^MU{2&C4mgG z?W!kXzAkfxg!w9Ip@Eq%o)h+)K%V5=3kw#$nk^ReE7s*e8YR2f_-rPzL73JEZMaj+ z6=KR#Kx;l;EV*bJiWt&|ge15a3)bTj1BMGRy+0+bdO=$InzWVttG^rW|H1s+De0v% zhPN*n|9;i`%aCG+YhS;uTyEW=gVZtVbE;i}qh z02-A(pu$T46-978;3UVZiW9yp+{TYDaip6}EEvxT2tmX>$MiLtv{f2H%W0>Iy~fM* z^qmtI>5>)`+-edAq&bG@3QgiXQ$%Sm^LL?zJx+1_W$An~+C7q55V8WYdH^k!)Ufg| zqEHITH8J&I8{OiT(oG5$4lC!l4*sQ04P+NUk4uzxvaiZDQez+1xN9^dboq7(jh z->%64?F8UJWmEiH9Rf9D9P6fdw@mYCoa$NQ9IT4UHY8Q(VoN(C^4Tih8Ise^5^A5M zmhhbZm;zHowjnad=n>Q6mn1rdOiYC3x49-<;7TO2tdE^3bdU=-OKHn{3N{%FHfZPW zmdbffHk$I+8Y*^3MVl=^IFe@8dJ&#hc&n7Kv_EONK6a5Vps+76Q}R!gyrNBb+JjSi z!_)f$WBWtmjjn#JK~X(r719jXe?0y{bG;;W8TBGol($b_$tKYv6nup=Ep%yvm>(2~ zWj2y9D)SYQcgN3leY;=v*|!~k{ix%mw>lnuvF+|BT6aIvy6yg!4R^#x^7@YOK4I%+tg7To2DcYUDvQn`gS z&6ILGx4==t*)DHzmXD1HR&)bls)4}vYspvi2d&$1luQO>E69q@AZ4G>ZJ*ew6fHt~ z{Ou`fr!+NdC|$(}VJ9gKlu)tqx-nuQx) zPn4`>SekrjG9HFf6 z(mLU%n#f{IE$Dx#;ixWka62g>>VcgC+R7Oez#kH!iQx>%jRH5h$vZXPsl#N1dBDp=c zh4RgA@{tP*7JNm-DEgJMg^-nXoL>CctXIBS{L!Z)=jJ(Wn;rSsy406;ZutJg^`CyX{;kug%MT22-WVcHb94?KuXEOQ%HQfH(FgtefoE~a_h8kvndW7?dQEn&4d7Whe^z`VnuIU#7 z^Uuf4x=_6O^3vOCHt(t3{Y35k&(}ZoX7ls!Hox(4%Lm_deEE~$tK*fLj$V?5*)mMn zF#8mP6sZw1%?`u$he-}C4B|Lrp%Ki?==yf5>&zzKg9ydBqfNOLz_aESiZCB8W% zO>Mi7%F-ODt^bz~fCaPs$Q=!a{8#aZlT%^iA3VEZj7_9vsNRVvvwUIYfNVyvI^{ zx0JQb6eS|uyL?J}T(i4v6Vx_AjT8LpC;HUj-0+NU3eRXuF6zvj*`BkYIem6jN_iDA zJ1G^7iDk_RVhemb*Zyg%bPeR!htb!dJ0#`s3zm z;^*p!z)D|fB6*b*cbI`GLOv^L6ao@5W|&KMN+6cv{RXtq{RT`~w_8xOpe!ZF0P8ru zcQIP{;DCgsWH>NRxawJH?Mu?;x20_#>+jode*U!d@>%2Gt{6Y5GJjJ`TD!<$IMOPV zEBNZT+IXtdKtWmX)zNmc8pORI*1<;0fjZ+4RmN{F89%#V{_w2jtrM2N95cOn)cEQ_ z)3bXm4}D_U{ibF8v&Q9*m=@k|Ubx3XXRw%cHoG)9sq(jK3pUHvbdItZCF@aoT7Cke zwptXv67tk015p)}0#rO3gjdMse{p04l_?2I#MQ2q$SqG>ZbI!rx{05sPhBFVER=vN zc3%=!5I@IAo;j^vy4Cb{$^Q>8)XE{Bd<;n;xmwzSUa7sF>3!I%oZ`Chu21r>cZ%wA zjqRNg(mu(rX?j4bU$SYMe~UaVC>HC$`68bopcmO{del|tq(&@Q)4aPJ12yEA+PKsa z8b95m#W|=QGagtKkk;mv&=i&O+(H3vC6mGI)tkIBA0d*q<^DF_PX=^3m zDssNjE2lRwOA1J#KPUw!nL<u+A*|8?6nTTNdYE)dW za*<@Qm>?_o%B1j>P6RaY{>tT(!dJw+8>F+By1)Nf_4z;B{`#@%<-ZEPy5~vN?uXl~ ze6@6I!@Ldk6|1VtSJaj)ZOC8PkTth1V|GJIg~$gB#)6g7fW6<2E$smf@YQ4=x`eeh zUNz%9tH-+6f>`jCj95a&x}L9wJ2%QJ0tecE|8>6lBV0jKR@_qf%6f#R;44ZNtN^V> z?t-icZ2t|hh`sv#HfM#e1{OMlSh4Es`HBf>F$DdueANYCF_a2l1>_^Q5(2}F28n01 z3p+?`079Y=x@9-G;ztq~BMVua2uIdjwXA6U7B&T9p;NJ{2Fil3IN{6WdGJ+R<`Ncv zcagph1))mu#)T1G^z!l>IY*>YJn9@3%}SP)M5L8m&G1r05pkgMxL=XTxe@?Y(n!=i zoCdJ*8-;$AT`RKRb4jyNF+9t~W^_d-KZQb8iboxkQfTUW#Nw_$^3?!k6}GX{8Rd$n zMIJZn;U?6;DM+6pxF9%`4 zS1=VvuxeshwNj{6)RfSxHX#>n!p=_&zre{@|H{!m7e~2Yq!Ty9mfe4+Zk}>%u+8CH zY>o_{a2&WAIp)OBu_uO1JVC>bSccJVXW^?!!KWsM{5sC>XS;~XDSl^p;p5u1k2r51 zf9NKkS4KrYZIk-o*z`?_cR#uQ>rcFQ&UfEYmh#NDx$nMo$4^JzxGF7p>g?2nw}v}@ zICSFolc&{8n$gTV%ovwTBONb{aKQ8>3cea@dxmccS&4EuIl}oA{`PU6=f?YG)v=C zq;X2?VIePkHOBoaT^QE*x<}RoB-KY|HOJ*NMrKq+q+g9KsO3F8eO_zU(zcv6>dbX& zT2U$M`y!Y3dCgMW=c}BG`yH|vv9FsFbUI;nH@ml(vdLR)bnw3DRB5lEXTQvBt9b91I61*={kO>KBNZ1Y65br!b<8lKQ0xlmN2w)d-_6S z(%e35S5OxH!IXKr*qOp{ox0EnWg-4!zJjtKD@wtwifvNT61K>h@;5;d=vQWD?IDW7 zt-@DB%Zx0nvfaP&RfVzunsh9Ax=&!3ZCK5u^Ks^zmf%XbZ8Z17Nv^s7n|JwdhkScl=HT2EQ|3JHsQQLz*w z782ICS6HJXHWj{ugmv7k@D=^R`#(1AeBHSADZ|nSjq&L&OH z20f~az^5%Hg{HDM>ND4CIdTg?g-*pN9K{LPpkNhDMe(#y3T4p?6F9m?$f4;g%&fLd zT4W>uBw@aRLcfy9N`!4PxGMI=6!oHFVN41u>?b}wtUwc4Y(&U%Pb8(P-!(z)nbz-< zrt?fD+05t?+dVzF)jqs~4TCN*JyZP~9m6|j1htTM#R*6M43l$oH@eoez&5)G)r_!~ z>Hh6A{JNckwbOk%2qb5%-W2!d$sR-y)Hp>n;MTJZu6B=Y4b9TKMzxN2yD-kRa)R4s z0-$^o`$$cB+ly>%3FC)+H!NkqMqzE*v$G1;j0ark}c+}m3=dJN@d$< z4jM!htvq@Oe&=qH;+GiMMHMq!3d}QyXDq zH9hx0wMMEl=-9%*o2L=QN?az<6SQ38E6!#8N?f;+;SPC;9wEcRmumZeJk<92x2m`Q z-UeSi{!;sWPyCLrl>N@^A1qs5Q@o@;Zvo;}ed?^nm}7O;Ox95SArcdkq+( zly!k(V;~*6GSbw$>9|2tl!#0(BK)=~Fdqjy3GKqluFMIFyC5!k1)==lCiEzIig19h zWDsfzg{#Ez7~ybYjPqGS5~c>7ogRK_ zdf2gP!Kds(&N@d{P7V3lujtSq-}_wWJiGPtb9a4zV)o0gr#y5|=F@k@J-mMQdryV! zx;=i|8(EuAj`RO)(2&=LPTw~z;N)cQlT*DejCDLW*12+&Q>B7c3SSBFYQ||RU(vO^ zp0B1yR=T8K3dpUDDZfy(>I!_dWlt^UcKGVqcUoTkQ1$i~?Vo<%_02CmKb_E?y`t~d zkajn~S0L7(eDz<%qW`vs3Ap0dHNKMn0%*^ww9>H#DWvF-OVZWUSrYpP`9x#PTzGfq z2nR1>g82Abti?@KahH^_L5GAj^KOYQVZ}YHN72r^Pe12AebMf|k~@t>cSxCA&B<#` zvCAw$6$baLK8J+fNkOXdUgE8InqRejz*X1qD!;gfpty#>*xIm^hPc9(v^iqKY2LcF zl5L&EySiuZF*B-`zQq{1MDJDJ?^SBCO%cg6_@*WX{>*stSf6hkBTj}Sb_c|${o`sF z`0`I?1?@$0t{fS^^}Wb&$MW5zueahk zCJH8TPH*;79R9sYvV_H3C2unL+ZD=+n%M(cr7x3EuE-ZH-NHIK=HQG4s5I<)UTaQX z&UBbmxAM!`C$uS*1&9)S)oD7`p;Pz@ zA?tXDiJ%4~Ecgl|7PEwh8cYXjP2X3WzP>7ba!Gpktof~z!ie?C&!(roG2i=vVf!nF zHBT6qp`q=ul-;H)+DWv6reK>k?^aFjCS4v%%dNDu^dOevq!QTz8+1afy9QU-^Ax_K z?FDC22v!PT5k$s0aph1b8;#NZc~1X}wDHrMpA`v!snu~4h{$L5_# z-hyP~T|LQ%T!n7mGy};VQvzDxt7!opw!vNO+;Ix+o9^9dAJFX((lO1i8NQ;Xc{fe- zsi%D6`>4PaJ>#gUAVVKz`MY|JM9`g^nSk-t#7<8 zC{7AbkOE_kK2bXFaARm}-{fhZJ^7rercY|MnEL4MUwZ=;OK`BY%Pm4q#PAAmCG!=C z#V--zZj$O-O$SeOeED7Ld!M$y`cB6aFSqS^N_G3ADyx14U*W&{7rsiF*^pS?NVt7q zX6-e;swedqvYJBQ{~BM7mRp2Em~ySR^3_l)T>bCsSHg=WHv|U6u6R)o&OnM<#Y=PD_5)| zR9b`zUx8TYJc<{~I#CE^iMqA{d4Q~B;v(r?iF_CKDrG}7eL%%SB`1Fh@?n)TL1cL% zj~5E^%W5Fuil{3@C*Ik}QCHv!zEX<227<5%YA7HJ6xVv22&5`)QVOyfKrG}e5md$6 zWr0|H3ykQB?(`ckU58lQ2wA~b!FY`ANLPl&sQRGCANwFAmSCJ@RiacoD$P8DN1Nr+^Fn;CXfNcl}%{%_#m-Z zd8EsgTWl}gGVS6`lg`~Z{v>Xz!Q&3yIPParV}H4E^e=_79rAdyq{<*t+L49(XC~zCA%(S0~-SebwLJ-0;Om z%RhL3=U2b3fAdo0f**%F|6_)CwVhY{B&VjS2)w=*hPwPb%Ht?$@CsZZTPY{-)%E%n zd^OzZB>L4@kF)4k(;_Z7C13QVbK)wOqp-%gZI{XdWYub%;)d8>W}<|^^7y!UoX=6Zv)_JR88;rj`x zjwmuQV@@nXT)C0iC=iRFhU`^R{6a&@3R40MIcZBoj5DQvEheI;(`x)~8&ll)u7N1J~m(b={E)vjUn-VrT+#4I&W>JJxm!{v~j$M0S8P zWJiZc2w8VXII_fY8D)jv9YQ1_@87EXP_*hntK)HyHQVnZB)(KD%Uo@2v6d zlScekFa2nK{2%5!-_~z_PPgI_-NO5f^Y4*ZWCK}|#-F!UE6@Gu(WBrc3{*;iQi`ie zK`BL{QjR&T*C2Jj{)Wo@?Ryjw> z1BM|jtkm&M(T(%C0uR`Qik(Vy45xUi0}>^t&@N)(ZT&i(L;9T~)GRNyaT80Wr+9P# zT~obP)BV~exYas_bS5I|7D;ZAeRhHE{4TqoHpj3wx7ZH*$l8QCX3`bF@0i)TtTp0? z5(3?7EeTh&0@K&@lx!DRB?c!2o$7NoYfE?IW7Oqr)u*o0$IUmz&&C`m1*A$oWV4q^ zQF*ioO##u8f28Cat@n)7dWTEi!75wFw=Y$TOyF+j1m)N(Ud|>H_I3go48nY6LX?K9 zcx`h4v3SK>xS|z&WtLRk(($w1U;ohZ(HCuhd0+j^YwZs{-FDZbZQCDc-FR0c{ww-} zbJy3Euc#?oR$IKdF=u`Q`Rz3B6UrJQiyA_58pr`7WYs>RZhA-^L9Ys5O_22~g|9@g z6?+3+>VGTeMks59D;sb`fYpDc6yrL_|B$b&Sf!jO%9V&@7d|U%2m>d->ksk(0apR- zLbeKO1FpmlQzi#P+kq>_1{Juni|e+F?^5`RQ5w0r9Fw~g{R#_~bGrJPe$|D3HP9l= z73&DG-h~vag?E%roXl5NdlFif+%cqSh%akk7LOBQ9hdjWuu4WLrflfnF~mo%0#c{A!HQVH3>%VL#lD;JcJUoqSy;fILGTr#6GdO|kEI|J)WnG*PjOUe%F0x~ zJ%-HQ0;DPyI=8U2k-gYKh1l9#(64-QRo<+47S-mJr4r67dGnyWd~iU&!hD5&9lipt zXc3ApW1AG6YH~CgK8@32n}( zS2`tKR``nk;IiGd+aIdG@3}_I?JvI9_Loo8AO5rR9|wB&9_~JPrvGBSv7b@mYkZ|p zmU8@m%@u$C_myYh+OsXoSbQOppGB9jPtvO_(%*lUd{e*T_3NG0OPhm6SYkP$)>2%V zi9y1gl|(;TVixqJtkkn}q=2CEyQB*E>RvN(SR{^$Sf@uMmjBWvEW1l8-YuoBw}j2t z`IqQCvUK)w%z3G&2DS6i24c{ts}tW1~iNonD; z((>0WE8o^`|H}B_Vd>GMx>wHY|K1?|vsL=31HLl-+F<~$PIT#ycMj-Rz?H&RxUf#P zn@_Zvj#9Du{)6 zMWTh!{TP=Zv{wE{MfW*jK0RnN%|XN~_==zgnuADK0NC7RXz5BUBr#LL<%8mqD%XD2h}7O^=P~Lk+xMceATYunn@nb6Wr?U z0@aQo-AF0XfC#Mk!4qqAlK_c0iCq?wjSksS>UHlu^DG_z0 z81u5X=u2*wvTik`ZP3Rq(L~QSM3hLOS(0C(#WSHRy3ib!W%f@n!dC%Nl7GC}JIdf4 zCe3g@pPjo`rB-%2%Pl`G_A1dH)Js~uq$B)F@Rbm*WVjNDg;H&zMTprFqgIzzI&-Oi z?}0Y9JY#Wx;f;=mpKZVU@wQzLs5al-wEDLCrCS^EU%^);%W5&VXV0ron^~7w(GXo+ zkA8(1!bdra+U-~}!pvq!!xVqfAGGli!D521+^dz~P_Yp38e$D|X`=oQ_=*X^f9JEZ zA{G#34&7}lX=GSC@Byn?TU`AXKWfGekz9syU$ zT~4W;z}5A9CA-|a;47b;ZY2&22}>YWkuVx5%{6o$1+f&qQV?qZtK`uM;yCx#L>Z{j@~f((4bNK2aWyi#wq)5arkkl{r++8$0oR+9`Ag5vU{at z&{f;OqvL!&v`u=Y?7qEopZ{p}2OkFCx@uBIs>71pxvxF;)X9^NAFO=itZwBq7wlr+ zAMf#{XJn0AXe&-FOkAXwO!QLSG_7igL)9?v%E2zjhC7~xss30v>i@`B-nGj z^FQ+p@PzYDtJtoTzX>jEF>-WEdbLri>5`V+cHATDY9QvTnHqXzx$Ie+EwOVgVW!-y zSt2DaF=THLrjdf}5}y%BSTbT6QLg4ZV4VA)q2k{D(mVT0ZrA7SGN!CIu;@9kLhF&; z@08SU8`(2Gq-$Dm2Lakp){MX=;TVjl_e*RE%V`IASo)m3yt{0RcHSM9x$;urkpS##YNc32K|7=KmkAgfbddQpnMUFItgi!R~G4%4YN<4Kj_ zm`Z=7#rSihX>XnBA6E_Uo!7l}Tu;#Gb9<%xKQeB8xqs<{eRJ;UE8D3Nqk`MaN?cVY zaJ9jhzSfw!%0yu^lZ{mZpA@IMas;Tj3yz|$SeV)$=0`Rny>gr z=oZ}tGT8@p;?ZJ9vqzNKG0-sCy=$B!DpuEIge>2#iS8{k{MAXNQrawOsxKD^t)B7y zLUGDu)KqdysTa*b)ZHbd&Pwb60^`} zk$Q|e$V#grn4Lit;;%GXNh>zvC{06!uMn?@!V-MNdK~epXOqsQ8twig?O5FZ_DTDT zZ?-@7g0NuS_K@JKwK88R`jryio-wCBxuQP4tS-E;mi2Zlw)0A>C9T>%k_7DPX~DIy zD~M&|TZ@D>#>-owhqmx3#a?kvYul(|agvB;7 z*aN;o#R`{m34|A`tV7g*goSuDFpei9jo5u`WuCBd71??u!1(3W2cSU}s{C@BFoAod zH@GI(5e(>(SV#DbXL@^Zo(6xcZ)S&An#wDU6<{h@%RfhjX`J5zl2M#ca92n5cwyrM0nv?aKtIT*MqX$@vsRbdN1f*AKJMA2#wVT2aMDkM*} zU8I8oRPvD|E9IQqMXFW4Fj@Up+$dv}LRJ73X$*?tiW9&5c3;G+oHq7Yxn(xHv$HHq zMZ}70TB|dgab&(4*o>nL55@`!%bpgYd?bWH?DdFhfUIo7YbS)&P~$^t#|77jh9HJ3 z-%G>2E(~=&dyC7NTO5uLo__e|DF=s4Iyl7U(9L5G-ZbjajU#>;G~(w$BYzq+cK;xo zA8wjKQ|}mxw`~8D#od?-W9hMru z?(fgNcID*VU!O17wJ)IX>uG`iu=PLU9bJoZHPyG0-r^+Rt0SGyk94cF38=fpwQ|t( zv%_4ghB{OZ$J6d`ei*iPVz2DaaX!NF!Z3%k!yL|tay&K6>EuY)Gh;k4*q@smddWHQ z3Vf9~>r%zKD)tZVdaUum7n`1bQ}xRG?eNt{-*$faQ|EU_)W2Tntx=h{wz4wS^(X%` zbn)kR;Zj#z$;!@H(MGTcmpg+oU*;U(3%%Z({~(#`Bl zoYRxJ#F)Q|GAT-GXyy>bZ)`)CoSEE6BB zS!;}$t3^OJ4jd4R5{<%JE$|g5lz=XxqPB}92iXHpl7mSZEo5yUg^ps-&-fO<^fabYVx({lluUafWc9;&S4M#e(g0H%@qOQ#kDiW3gSEt$y z)X6qID%Qak)4m4N7gfgh&TId2T>r{J>8XE8_kCdA^nzyT1De^lYbw|wxI@A`MkgyB zsS3;b2218TOZpm7*opuuMVsOTWP+v;skns`P>OR(VJZcyxXaol;I@ctz_ZjP6{pbJ-qVq`{7bkQJ2I z>zS;Euaf3U#Iw7`W6J8b3vFS~a>{IJqE9t#L6+$_gp1&J+fdevYn;Mby<&S}3up_H z+W=hEaPPE$dY6c98h^qJAIP3)LqLM$9wklmY`1Y$!B=(xz0RQ;yTGo=-YwGuRL&9I zekq3FEDK8n=%feccGKJpE9*qVp#{vBV5R^WB5!|VCVida^x zvYsf)6JqK0>(~Y=7z|yOV>b9l2rB&x39D51#PD;Bor~kJ>kM{DB)s{4i+f-a#Y2$K?)s z4YfZ!()s8(mtV)b9vu{k*&i&iXq<()@< zIQsC9Kd*T8vy5B6o)G@lMBi`SqpCf_Tc>$lW&!$?fb)|BF0r_f2-A`7=Weh&cC*7- zE*1t)yM)2s3Rkz-%E_V_vHrkU#JMYcH6if4L!9tmCC;jxwXu5reGPX$)%fVEtR#}mX8uF^S$p*5ce4(d4-|9kQiE4vmuGIjaA8GvO$ch864E{`OwFYH zj1U!888+=Y2DjSCkECU%A9H7w+# zZLt>OiZ1LfT_Y8+rg@dNbfYDXaJs$|&HfO?$dscd}i3v{m~$8gAqZ7-gYX?uoDfdVkFTH?N1g=W3eaK}fXAcq(ML;O zg2mlPb1<@^H+GIbZmuB>ffU@DtpT(YFBU=;p;F{Fz*?Eh2$<>zu_)5r*_cU0gI}gL zFvsAX*5{W-h-jmT9*yro#X`S=og5=tSkmg1)CFHTN4GkLHn~T1OmMp58m96|YYNGy zS12-V_!mg3r$=0$OJpx`Sa$v_ZkAZ1;}F#A5Y!J@IfwUovSc~Q$Z(%Wd@tW3ah4QT zU`U)Rg_nwSxVZU!JVhBREk&D=F?IYi;CV}3qfJ@e4`0Dlx!cXfcSu}uP-S;Y1v{mb z)#msmQo;f$pr9+PNE6BMV6w#{SRa=uB`}$oVRVaBvw4s&!{Uu2CjT^hNj>5flYqpB z3KKiJl~KH-GA*c+`sEg(g?j-j;jt7J0RB&ZkSi*T@D`oPj6qA?Fa3JDleaM8l0b=mXll4sV&mDUo(6qHlLayswyTDN4O zUpd6oA|BErgs&9+YP@$f;+4Wz5D{=SltRCfu}WF8%+b0ZkU_uU?hO=)pb(Lg3;M?+ z{ww}#3Suc@6i1qd3Sxm($_YnkO7XWVaHa5-f><0Wg|EiTYX=p$vg%jD+%AV-(I2#r z?-9+x#GdQ@?F0S6)K0NbHKWTZy_3!L&gnwM^33k>%I(I8<-;IhL04d57cD~kMZ$=tFDj%CLVv5nL5 zYzJ3O46d0RS~n%MfwGHio*rI@E6F3ZgH(C6EFwC|l2US|pe!<`C`2u!Eh496RZ9FZ z;3&SA0IgD$a?T01F~}+)AH*6M@ngZF{Nr?jMijb}k_d_qmb;W&tam|8NLZqTRAnvk zTlkLz6slpWz@pBeqHfABuLIAOXLcK z7x|ApH?BC(ZvNc#J@>wG_2TQ7FF$?w>`RARHavT3`2$sR@9Oo=K07VsIH4t6yG`+~ znd*0SO3#mXhWm=18%#NhdHQEiuO~>`z;5%-L+F&4JH~{^0C~Eb!Ir2X#vxHLQBlvXTui zDw z3`GTYny1NMF0s#--;%Y|n6^-xwoq5NMq;#(6MBLoV0?i#W0{f4Ty88_CuOY=Vg;K` zvQ}F}K^kk#6embnEbUboL*Xl;-6=YWx$E>yla(`LxK5k5Q9JiesdT%g0^j>KoLV~3 zo7<{mu?~F2Xkppi=F&Zec~43UUNo)wo3!TS+IQir6Z+>4NOyl^S^Hei zq6fR@+|^&YojF0mKh0@tbt$Vg6eqBB@(P^-5A^lGCuquAz>?_q$&|&pQrrS!p<*Fw zfmI4$DXJHDL1n@OzD8nx5DANh8WL8@Vnzrpz*WLLy;ATMaD^8Oz9P|`_?5gb_a->kHv4-}tVK znNmQUX@X1Tw1{J?AC!ong|A#9)i$10w!y8wNv5$*jkf+>(?gq4#=tF?aE(Wd-WHoy zn94Ju*E6v@G|$8isnC4AZ>BoDq@TGD3|IU(JP9%LI}zx}>)?pK$FoOwHG8vwE2?m} zBu?lN-zH^mkzyBXBWIbz%FID|J<+A+;8YC@2QggvVY^B-_^0=K#dZ3}_9bUZArYN6 zcAtLpv((i;V3|a_;?;VMuh6kHSloFni;2SD@fBZO<14cXzUt9Qr!J`XA8h~h>(;m4 zQ$7EN>XGMEd!Fpr{b1|5+v=8WuUGgA{YvmvMSW~>t(Y9ls`1IJLA-*moZ@Q)UxnAg zS4E6Un$`XN^HBLU*W$(yrPs< zIFmzbP_a;}CWh8QS-=(8Ns%Br)cN$yw#ROm!q%JPH%~f+?TXpJo5t-QGVc2!}m}*Nn2O9_2{v)p=+N#G;h8pmG9kjc}4TA<|EDIg5BToCqvDU_R$3 z2UntB1?67MSbSyS&bloR)!qGE!xL||zVv>_+n;uS@MYI$dpq|X>N;?u<4jdgyGD2b zteVyJC;t`*{=DPA?-}3;mowYNm92bXtMHY)KZEyRc?PM|ARV|Wt=v-^Q`GE}*yWer z9aCZEB0YINvt4Eo3#$x)+i9!K*_+JtJ4nHT0cPHB2C?QoY+U)IwDuWMoGfSd?GXWB zSzAo_Ov2`9{EPYni;RKAQb3X9nXPBBms4_wG8jVmwnIddb7Z4iObfljk z(7v3pv@3m44_U`d3(_KFelTsWp2y%J$psY__T(%vA#){EbY(8mk?T%7GJU>2jm)cZ zcGh>YRvsrjh{b3j5*F>kqO}r7$f^)LSZYXG&<9+_&+A0LQp79xifwv|zg>8nvAD0d z!-zyH4p30$0a71VjnMLbg(y&U6~!t1}(? z(;eEAZT;LuK-P~)Ql<1Nl;d&SDe->#ysA=Hl|F9C^k+DX!1!k2BewT#lwF&tiTwOr{nghQnOzc zeUolvH`XMPLJwH-M8z%WWWWJ<=4aak!Ob#z7ntT0n*~ZJWy|8%usOHrRu()kF@>c+ZM6tS#A;D|8(0O z54Uc*s}a7Mx2X>O3ckvmR~KJa8(CP3gHd)~)c{w{i8YR~wf509g0DjACI!|ge8ngJ z7@yivp0&T@tLrFBvDy`QwqsJ|?SLjz+J+4_jZ2ofN$a!Wr zMl6M|kg$C71w5eD03ic>g^*ROz}3Lfidza|S)mK=3Mgpx&2Pc@?VZ~!i&rfS)43$J z@+LVoykUZWEju(vd0b_VfGAgSbgv#u_DE2JT}-R8yT>!54ZFDF8wb+lY9;+Zac5yK z4;15m=vRnloe?uT!^^vZOOVbw_!7a5+r_=Y#4f!0|BkP$K&3oDdGPf&a=TJI6+*en zS0h;Y=C)Hl`5oSQ?VdR*g|FN)+bP$KHkWjjb6TrYYO7;Pivm}QkKHz=QSen+h6yMAi)N!7* z6Me-*EsQk5_uP1&vlBee5tldC>GBwd%Su{1$)bv8rH~brrPN603u4o>^Z8M(=P+MU z@D=_mn}D-UiI+kPFJ~^fvUGRtjwc)UyxjEkTP?4C(DAp=yFdE6=Zo*V_Z{y1<+Qr8 zvA-K3`%k_S?|c7$e#v|#e``Tlyf<^fOJt5bW5i#KNz8b6>!pT%>G_Y;(}TWbHy(b^{0&B0geA91RPn7Ag|FE8Te_V`YB|ol z4HVG`xZ0-Y$Q)nMEUFv)NT;FrY4EKC#+VGlY!9yLh?(Qkv*`Kk#7og&A z7R^f38ZAYTJ_Sjl)QM0~IkFx=g0FxIuu8y^9RJ0aNH?-xCp_&Gb90cDLRr8S-grbU zir<35gBk{6(IQm%isW-TgGmdFbO{x{;!7kfyjb$yXR(C@xI)5;nW+P`vX@K2`MrK* zJZ5$WWcB%`cL!!`!V67~Q7!IqYVV{T8}G}$DRlY_P?ld>9|57F!c=Y{J+}TejtQsS z)6d(7)wo7j$5;z5Z_dQ?IBVd|Gw) zqpi2z*R*P9!~D&4Wvc{VWy}@L!SK8qhD3bRYq`1+a1~dLcx4||XB#de8u&zmuMkoc zzCywp;aMjT%cFjnd&5AvvF3*Q&-hB|75?#Pjk@AQ$r@E!fgBY@i=$Q1;zXGoM8cA5 zK)kY2mJM(f*db!C_5y2r*zd&$E!n}2p^~#?^`!HxDmv1O>Ote=paQ(U`#IFcuJ(BQ!-zPFi$*P zG8D<;5}BWc;+w)(qMIr9*NEfzM^nm zAzu9fu3CUAism2%UkRp)Zs4e#fK^J-9GntXPl25jt>YITkAnvNc+2o3Lq`2LYWlYmy*{>!e#fie zO^3XDH@vy;+2beHfAaE-MaiD)3%7mu&U@{REB^N3`uF#~bxE4@$WiaYPu){~w(;EW z7<|?~^zw|T>T$uBCInVZ46GkV&XtcygtPIvf^sDS*xk>Mbv--A`8>&@V;s(nayUoq z6+oq^SOa9`R5{9-KlTMi3&nIH0a-4mDf)xtUrh)&<&ttSvg}IE$}21Gs=e)*riWi^ ze&%hGuiD@Jy!-F}?EC76o*xc(9zNSy)vE0k&VfJmE17WqKOK1Y6~AP@!o@)vnYe}j z8^mBI!`y`AkXv}FR!W`!t8aDP9xs#loaHlxdGn&3 z#>(krwJY#KV_#Gq08$kAm}5A71qxIV9V^;q|xh-CK& zZSx3G2jaF$ZgIm{7*_*U`6M((W$Q9#Ns+ng@Z6S!vhL)WeMuF)Ni)0C=JjSS?oD6B zU|>ha;?7xHrHrNRd8>Q!R`w^)#ZKQ_xKheqWXM>c0lF6Lmc)Xdc|AZ^>byQi2(y-& zXcQ*Q?!ojviy^^<{lrA9dsHgiC~=av)Q_A~{NBY|P3%%B-zm-7&AAcQD%&aAg(8}An~^|QjP8rsmGh>V zMLCZhkdU4}xTOEON%wyMeoeYS2WkKg=el5Di&LZ;42F8 zigOAh7E5O^UlEOtmPOMLElY`UARP+@DIllIKf5b9zmE|-w5;%aBZ%&q*hx2#aHud~SgzR?Ig7Se&>4)} zGeYWolQpneU{+s5k&YaO@Zvs38WGd@Bk{)|ZUh!AFNla7n zRAj6-&>zg$tRpXu{vdL0A>$Zp2(g!<=jdZAr08NPJXar`Yr)y>7i|nlmVDxA4r=+P z!STkRP<3j&Dk1jUi%n7|@1Fvn#BiWctK?F!N{(T;_TDPL!Us@GJf0UhS5c4``jrL0 zR!5I?{IvSpA6v0tz4>ALGq1Hh^la-rk2i0-zj^hp#sypI%U9RsEvZeLQy*PY6OvO) ztR-WEu1U3uas^!3N7l~>trzC@ka`dcU1hv)EpR1TgkJTdycvWQL)IsvhtN|amVE@`h(7Le-OTM%;=&pVj*V<{VJ;mzVgf&;49zU4p;)B2)bro z!i)7E6ivG83gL(Xb^P;%c;%TTHt}J*BC#A(H_tOoSk$Va|BJo%fU4?F+xAhh_by-; z1O=p5>AiOl0l^A_V%J1XjOoTi)9a*6GMPyxnIw~#9upH2V@xrLT>(Kl9N++_H}?Pf zosEo>N!~T{U*G$#_gi0*XYIAm<_L$Q9IkUe_j5ni-=0F$fmQb{;fmBw_O2}6I zG89IA_LXOM@$7qh&2X!DI`QY-}laG{z^zeO?u_kio5FfJ=JvRrRFDCJNQ=Dt4EYaj%q&oQuX;Mh6k1PT}G94 zvok;O|61ZD;rIXk2c7}Q>UxPcv*Pc~4RizG(9kR9;k@ar;Tmzm{y1$Tz*;dNP2V+Zj}Bc1qCBjnK^r(a#Rl&h}R?2+}PK(labiKK8;uW#$?Q zZDDb^hVd@vrRv!}^#oq+eo%733FFu46Q`sbNX!CLhn~^x;8tLwc1eU{X{tIRM-x+^ z%_x;XEGso>;#POWuj$Oev%OZ4wxMV9gHlLQRa8k+E?J;!4e6zQX$-REcd}xYg{m|V z=`CW-N?vP3yaKD>t0J80xQ0rN*xG@Ov`vf<>Y2(1uG0DSx=qSKsM*|?ve87*2ZXQE zHX9MF;49G(M6km0y}geODPYxd_7c(@++tXJFU~6qEkX)!IdH`>d6Q@na>9K;EF#~T zC0qx){!!ZTSHr>2EzkXE`RgV9d-b|wZQ7IVI$DH3G9gG?@ShRuY`5WDw~qR`OM}LB zLZSPpQS*A0>iKiM4}RZq_vbx3Ue}gAqc6CxH|w_E#2u>W3U$PK+JffDa($GLt+eD^ zF$f7R@~t7+S=*rf7A(k0F3~8o&JRLa^u;*JaD`e$oI6%|xw3DTSobVvkP0&2W;MfK zm$2f?M7{z3E8q&OqBDqiB~upX=vVL+Ml4RC8qPT~#}ravKu6+T0RC{?P~NE|#~NLu zm2NS;&f%R4g4-R17pn~|3#?igtbk`7qeRR^+(WdCU#~9MBc*N9 zq;J+2qC;9oDPdHSOwg}#_6xp3ze3T9Ue9O%UVU9qrnEF&3e7YJBB*=8x&O6Du^ zb1nEvZV{>s(uFGZ@ozfc`AG5SH#?ttQSrc2?RP!gviD$1<-VrXyPB3))~Bzl7krg{ z%_sTVl7xE3Ob7=MZNb30*@5I=H4uJ<`AYuP#soU1AmB>02wfY;xmn@L%2yzkOj-Rs z!5jDrxRPtY{r<}n7xG2>bt>>lZt&fRSl5}V-{o#)t$zQNtX~bZ1?3YVUU_%P`c=P* zd*vWvv_eN}eBF5Z66FP5_k*S99Z)@D=qBzDgE@gtQ2)`jt}fRaTGO zIQv()k`3@OU&&UUm|R1j&6D-UQwBqU*M)PkDfbE}8WtQK%(e|50^ z)uD5)+RnQ=0+Wwx-BjPEnV~Io)^LftrI5xZqC5YUulgw~t&UALq6^Oew` z(#Ts7_%wi011I2?%uxeZIN_pCPLB*@Hy&j-yG&LBu4oRXc0yJ{yh>_!OcHzrT+NH` z?+gN0bE8}4Mz!6@S0I*Hh(kQPRlkxE3!gv2(Ioe33|F+(hR;4d)b4xRsi%fcJ~eXM z_tdcQUyT@dY}lxygNJ`Sh*7~2#|Di$W;6Ozn+YEboALI<#cvLGIy@`p!R7b7bMUht z51;(Hs~0DjeA1=l!cb1 zqD$zAmpEMI8f|coW@m2KT%XoiUX2MWB*)-p_t>6!K}{}EoeRUSMU}L#y;X7xYx0Wh zatp6_jk=P%g*?d`kEphwWUYT<2U(-xnfk~aT}-|zEW4fD(crAsl=ZzyWu4J0n=>j^ zF{@gZ?b5Bgmlj@29O8=cH|aG2HX9`KeQem(Qr*u2Fy1 zqWwl;IHk~^Rv1q!4Dc0ht9}(rsUP4g^sDbXwO_UA-mUBT)A^3ae`vq=>(>3BD!09+ zD}UBpd|01xn<{a;I%bnDdV?{l+yq}mZ(tX*p0xJFN&-~z^IOtH?u4*T@s}>Q1v&P2 z39Tk}tI0|%VHC2}Hk|FOgy!FdNPA!nG+`7ZK|x$%^|NfmA`6SYEh`|`==rzA7DDo` z=nO(xGGEEKg@lz@COU@bR|)F`+t4wj*rXGjkGiGzNa^&;)SzO)SKi5*C2_rh8GTNn zB3zx!EAKc1`vSLk%IwDl1ya2Vsa33d*WtGRGEQT55G)i2Bm;RDF(NEf-)%H5*RPJhM8Q$ z&F<`Xj+K0tN+~Jk#KiBeHA*UGPc#Z)xMFOOqmZ!Z7Siq$H^e(7k#xn+v-sJT`AQb8 z#K*B;S@}w5Hj>f+mR+qkp8TQv-A_7RdArlPh_L;@BW<_b-@18!%bJ^-3pO=mls3Ru z!C7@U+g;=9onsoXwa*T&wGXIeV9mk5(ZNR~Uk&gT64nIwdg@>K>K_so&a3Nug*^3d z^3{MyCAaykCaYH3g1{A5xXB&l7*d(9WDC~y7Gb~bihGjW4qQ$1QBL!{p+$H-|4Q%` z7OYU=zoG|7D0HXa<|~=022cvP^2<~86IX`7OSw?4C=6GS73MUQEBK0s=R_1kLTkTx z6|a~V*FHD4Z4Pk7%wSa8Y{mv7MNYfi7DT*~`HIY|X#v7;1zfRJkd@5<$?*9<4??z@ z`VDY3jGBCE^tA7(5#x^!8FggH@FRn4KOSuR#h@W4Y=(VdGyZ)WhgWPDJv=?`pl9iQ z_kQuiV?TaZ^7P@N$L@{VUGd_jQ!iXNdG{x8X5Vw5^4SkIJ$^R4=&Px&@67W(>)?KA zQAGQ^rA?DOE`fP70$L|{FlbOs5ZnykOB39Fn&5gC@oLJV`pFBMCM;l&^HunYhbJuS z*w^P?89hhrK$Mj$m}=kz#2W2*iKHwLYaEl0PG?6to*BF7+!U|NbHb|J(`%zvU(3I_ zVe6sB+n=dF^ryz>-fI8z5%jB0`0Ci#x)VS2p0CwmZrAkxLgy#`m-RdR{0}_?*ZGQ< zVX+@ad{x0$;<8u^DWV#TS`9YZ8jW<{>wVLLzj99}$((QlDL7LKUq;{1y7zg7w3PSn ztIhb6nYG7fg2n=0tvx7}Jd2$ zWG6^erO|hV(k)NvmfP!=rN$$=Fa{%yo+}lT|&HCc9R$(%-T+FgSq^m#5wmt3CY}~Poa&72Y$DXy@cgY zNn2miW14#OOWpIQd*7;3(eL|OVfePg@SQ^cL%SFi`~_c~MZfCN|Jb3z{eGfV`}eA@ zCx2|c=j+CO$D8(krri3vmY!qDBgU-#+Jqgt_${W`N;CQu^Pyk>6hwkkT)75MQM&cJ z6Qhw<+Z9Kdvgi_uPG6--M3mnwJXb=}w0f?TB+(SF7p+M`jaWJfQ#r@g3J)ItHYy}GC8hHu zY!MWlu)b&A-BMzCFMZ+I(w?II5_l=XuMlrIKg&Ezy(>5)qM0(aLXMn>vlJ<*wT$B_E4c&65sso{tl}ne7Q6&CaYbe@AWA-=OUNCpy+RSo zz&$C%E8t3a+O4*$>zi@9>;jecLEU10Fr){*VpHn8NVS+Bw2lzU{XyB^j{gd*!r$(g zta3_GV{ylbMM?vq?#>B4@Re(-dP$mUNqX;+e!l93F)(8Fd$InRue@^vXnAEL8FhMQ zi9GWq8G^4ws=3&7EH?+6;VU2(ePgVX5U)f}@H$_CSVJ6Y2iaE>4^F6;T|hg0#r8e) zpG8T6VXP=6R?nbiiH>1v2h=Ar(i2+2C*K^EPj)X%g^`}4m8q=Il}UI4c63^t0ug0` zlEF!qt>pgTz;!MTT;asc+ABn=qDeX+TMh7)j93GFg{>XLaZ( z73XFrp3B}KrIbkIONZu48D-+v9Fgq4g_f|D5*-ta=sDX2UlkHLexG^GVavLQEM*T_ z;H!;~^dV%ez0a`bfVuR3iCCxHeHO76WSc2$qtCxs$0-jD}hZPB51EGHIehas0D+d~*v&;O|VTb1^sChhSy-Pi5v z?-W}6SN&-W*Smy2cj<`6`cbJl+o3(vu07pqI8xnv-*=5WPtQBRTEZt~kO-6kR;jDop#xRw#9Zdg0!eM^$$(_@T;G;OOMAUzOmlskyy7&ziF)58E&s;Y1H~xBqUeUe?4GcEy8!q6-WG6sjTBH)R5YdT;5lJMo0V?#8{z)qERf9E42l;(jsISQ=PJ1 zO;&%xCUwbO!q^X7C9ju=qh~~sg@a2XrLat~ZO|)93+ye4l@PDo!=;5jof!pv(TSg{ zyCk!Yz;^NG2ltc^rzooni#JKK|4JxVR+Jj}`ToW0s(e#Eil1(+^z-@FufJ=5|ES`R ze{Fs8#V%4tZ+}p^rtri@-d z7sX8g$-1vuvur&tpGM zEY2$x;?+FzMH74G#C6dkbWUSJN9UN(OVd!;-4lD9lY2=~#)Y+*DZ&huM}`0`@2r8k z-aE_ggsD6-6-%;&i^)Bs6X9qvjl2|vYnlSv)xxAkmcxK3V9iEChS4}LJT zEd1nrEj;r9%e10(n8K_gV~=edfk1RExp=@nOnLM8twJ28DG$1V))EgDl)`hBrQ|4= zd~x8&H|3Vq!O=BS+<9?Ehg?f?y4|w67NvE#q$(EUZ_iY6vLKT&a{MmSs;HvP;f@NwErtF&R+l{YaOodksCfR(C%;mb-1C!5^!xMtzM1KMak@**Oi!Y8 zuiE=xu?xCv7f{Vx4ED&|d9{faxzANQA2P41r+L%}XZxa>iOw~XT&{8y;a84VsIl{_ z#?G%EBS&B@r~<1*iJ3uS8Lav3)|62Y3kw$EU1JuXAM190TEMx*X;=J~pHJOTUAn(< z-yvK=g_XR|BsA|IW#cFAM+9`M>iy zf4|Sa`3!XGrDr~qW<fjhp$d5G~X)JKUwn`Xbui|-SM=4rxb{Fu2XleUHe^=_N^<*{omGCe0g=l z7fl;KZQt~6cg3I8#gAyS@6eKgNiqZODeDFt;7Y`^iwgi_CK=y39nuS1g*r^o}))kyVk7RcxU@2=EZ(&bns?39(3pEbk^-AK+moi)cg! zwaAqA%NC9--wX}u)^wI#V|v}MB*cP19M$M zxu)C_OaTAoI1f*zy05@{anlK!hh-)@H&i7zNW70yPw*Q z9#_2jcf}uG?t0*@3E}r-D)c7S0{ECOKMlZgGjsy$go&to%SP=`j8f}%TWVn)*EBRbLj$7O& zBNiXxoFf@lxVHE_Rxzptb{bHzWFPx*N3ymWIL8SOYZ2fecPGcYH$z$DttBIte4Og5 zU>go_HQirfM+J6H6aB$%JM^oiy$&JWGedi334c3laWo(na5a||&#^u8<9b0U^s5C) zs@buflw-2mB}FR~Evqk!=mzIx6?_F{5jV;kMi7gmXQmofK?t%|zgJxFRdy#30)e3R z&^V*8B*9eNbV_J)0z490=SDQq6oap@w~bqLRhX}4T^Z_dd8l1AWjnok$h4YK_6_jW z@L6^2BpT~>ZJg(|DS?f%Xk$gOE4jy=1PS6gV_W(B%o8n4X$7>9mB=gww9u?5unMyl zYL(!tG_gq0ExC&$ar9iVn$kM~KOPv5c7Uk{_-f#aj99?cK!I2T1zb@)2C&MRWQLUX zg~_5nNMO4hD{9>~DENv%QQCs=6$MuHla+r1Ay;e_oZ?$G#rxt!*9&7E&yAXScEt3b zhfg^@V$!!`7!@4z#gGx73?FmMcBC*|4TY};ePA;L3G0Z>n5S*V?VB8UN8as!d;Z6o zW7^KUj{bRUqO-?_#5K>~cJJ{w-c;0-{^^CaFZ}hR4(Ybn8uD*F6O{X1SWesGrOo5! zoMSC1|Jw0f%?P|`7ktqnpo*TLgLkuoR|_hx*b?PkH^ZxT+LEhN-D@T<76oNtaTjET zcYS`<7!iI|MT<~Gj9U3hh*vl8)u=gQicnl!cuD4~37!`nLN6}KzPz;LeD;>=jdwNP z@mR}4&$ayFFYSM_^3~ry?SAis>Z4O#KU`5acA8|q`n6Yp|LWSm!&Eok=fC<~zklpM zdp@M4J+^sF-cS1jR`g7Qs4%kaSHVpbTkN~OF_VqH$kEk^RjiysgY z7S4A3SCx-Sn{j17Y*}|uzxrOoiaQOE zADCytU=vXwA-y_m2xU1ulSCIA@$o1NTkZMg_A_MWgDawTP4S!s`-JJ zaJi?fk}}Jrg&`Nia+J#|rGT_%TvZ9HROuC{Qo5Y2dO!*(YYO>j+6Ha*7E|sv3!f@`i!pt(7WE0l z!X=fl*_eZs-AY!na@CJmR`F`tO%lcD0j@Yw%XXSq??==!rd9T$UuAC56z-O8c}Daf zp)dF7gl{#Q~7!4uA}Jl21R^nY(E<)$-Yc@-3o*TGSx^XH5iR1CqV~d+m7sEq z>%sIMk|{Zdwt2$colqjCIZA zi!wjx8dK*K(Rc%2F|KtDxS~0Tt$jee8s}aQV!>DAmk4u|m4#Y_LEVa2z||!33@#iteVN`O{C7jT7yHNaQE)x0PT;?kVXLd6o5*&|9gk@~eD&ulat9a9XP2la!=(+W;S!-)zR~-2l&c%M%9pMRfDHq8ST)3kcHzNi~FcW zSI08HX638-(H)L4tbXqCBOoqY%cpfrY;j6%VGWH_e2eUW0_Wr}Eg!wIRbE*d{9__w z!b*nnV}`62rSQPoK`9|_QlZ2 zpAQ}Xg|!>_38|sO#(ros^v%J;j@Svub@ zt^UI!9vfDs9oY5a#gCu)>BzoM|C)0DeKETqyZ^oJq>58RXFWM~!RrhBPTRYlpY48) z9STzx*Vy~k&IqvbRnVpDd{yt@C0vMtue`2J_o$ubUdK+ZDXyX>FRDeq8n>X@s$3yn zUDvOmtOb-6rDVRc#*L1geNpDCQI2Az&X`4KCV8Hp8*$k$zb0zkmHeI8wj5}^>xs5U zUTAyvRrsp?m3KSd`n2c06WyQxr2e^9t58e7^VP4t`u(rIhEIdY{p05)=ftnP@;X?? zms%=P8X`(0Hoip{nqmqJ3B|^=b(YLZi5x{j6qnuESA0-}x34=yf6%h&F=^}L(xyi& zWrqw_#4;7#X(_l}!i&QGwa9W^$XWxrqroeUKFc-ysPa0%yQi=sVNHoBlv9dlAyZyretXjU6_@YTv&h0ug=D(@z<@KfzD;ip=Dv$Sj%nud`^p^R7q%|YNQ zpB2wr&HULUc36|;j^}EZrD!jIM05rbuX48=GdF7iFjgY+MKND-g^wa(WpCFM-!82^ z(6{ADsq!i1-S23h{7U!wMe|4XrY~A`C)>2&wF$mD%@UjrF-|B#N3G{4yR;{j>K{5Z zKPt2*TXk>L_Ut`Xmv`icyuY6*KianaLtVw6HOmk6rta-Y+M>r*C5%;Kt6)E0vFAnR zE1Xwy*AJ|cOC~GM(WzjnY`*breJtIJS>Fp*5mparz#EvdV5)?Tnhg(%Z9($FYj!dw zZvn2v`a<~1+J&T7C{!%Egz!~d8HFGT2q4v_tY>kd4ktO&h3*O6-if`KR6XOneUsJhF`eE?#CW!c z zQmXpSTsEA!Rd;6~9F z5pBUn#^HtY%3CxCab6)_;cSPmtcZm#MKtNGwy1ybm8@UM>?9w@ySB?GkQJK>Im$wo zd>z&TuCS?FKaWtuWN#HWsR6RWQYGh(5*p9fWpoko3b;bBy0JNE7uYl1UkOdk2vj0r zS!1s{gr_~UM^uPPKA}H2CqgwhQZxr=NA$o~vE72dkLBFaRKBGq5D z=)uj3sdr?Pvi0)^6`H(r)Q)k@AQo0A9vEnc^BlMW^C(UzKI)m>BLfvo#R+^RHwOp! zNvsspj6`qxhKsb=vFrt#6Ut8DE~+f4s*$n>K_rhQ~P@gv)@pV*8#I%vcv zwxd6?8UFWSquv@l;hmvl-yAyOk2b@0+SsiO+x6hC58nJs!^yk9c*%3aviU`sn_hYT z&F0HT)irsCZW$R9SNh!14Ug3<2!C_j+#~asoSVPo${e@q1>VimoU0}*ygVnc+0MV# z4trHVHHCNuQ?W08fUkrR(W4f)vKEw8H`VnTh&93KDpQ2li}R~&$+7a4K&*cK%F0)x z=U#@daB7MEp!4~0u4kwEUR)4!C9vpfLPd4)-o~Bxw;y;)@z{%s=U?l1`7Om?-|u?o z^PVGLcAmVTJ72HId<9?eLh+yeccV7-@0|aqpa1v!{};~yhIgG-YFE*;LKQ!TZ^S#x zfS5p*dt8Z>xS}tu(2`tah+U&6b~1IFl(k!0cAK=~0B|LhJt(m#XVc@-md7oXj~F*U zX5LJE(<4$TOLOiLW-U@3SRE3%-WXnH4qIpODd_cGrp7am0l+c3*)?9l_THs=eN#No zyT&S5<~uvMibXljq4kTx8>x^i@jGZqbQ2Yr))l!-7h7lw%TX=O>_)`Ad4wiVG_F;h02h()v}DP)_No6_Uw&)%eP4@NW!cz#;)p$U(+oU6lxWB zMor=-niZz0)Cx6Ma7LjlC9m)0eAVqz+GaHpR^eU=(-g<-?Ply)vfD}+u6FiewW8oF z3asLj-1L|PVxd;0RP>-?iALcz14n2I!HQ3t&eUFMHyYR4+ojxX`n(;c?5+BuTl$vm zR+rvy*!6<>t~U)&9yk5%V&6yCEXSI3U$<($Zx<5Q09?s@#qoqvOR+=e+jb4}gRfpy zZvX0X=9}N-zV&n2$1RnASC&5CoqwPw_2wRt++JxBsa8aT60Km>uWsZkP)ZC)Ztg>| z8JLvB&(Cxr$7MY1EheJ9NG~UGf=Nk^Bzn;)qri{S``Acm;!4tH4NXIlX-|Ry$q&}9 zqSeGjbFB}uq7o~_;vtSQWyM*gB#hITN2mP!s}&S5ebcvE3v-o z@MWQ_(mlkc6}h6%Iku58L;oBVV}>EQhWSftIpSSMumakHvXM8XAW|1rnUJkmxf{Di z7q?C;N11_Ggm3W85?lsHu{e_5MocIqiRa1yLndv~!Bp{;Ju*<`6Cb);&nQREt>#q+ zrM%nvQg>Awv$!xYQ}T#b`^BgO6EuEt-CnUhehHSqL=n;8AFByX zt3CLHR;!UrIs(_FUdI09m%ra2EBW%T{Ve|`Uy&?aU90}~bkB#!dj9lA=O6y8I`CM> z?GJVCx~F~PEzPTTHszK##}-=oDz=uLgT!9Z8JzChK-3RpMO-#DjyLRH&BE5cxK_wk zOPU}YI0wk#q`ylzV8L<)jgXs=wnWK;ew@%lq!oyP3YwCybA^`Qb-wziKO*xLpBd!} zieng%#4Sz;HRU)KzFN{IlhuvRD#EOzQ|FgFI235<*UURy^GRm6pAjPsEqD^RHhO;#9}7t znC3&dO6^{hqI5~7y5JwwB%wGZDQAZ^JJ2BWX&Se%dgQ#zn6E}STp4b6W!Q`=QPZm_ zn<*Da-r}ZNj!iRN>KFR9JGfuPoiabTS?qyg)h6#Oe4FikT4)Q3&Y)+ry;qZicauEm zA)15#!dDX)3dFizoYx{kg31(pCC?AiF{Bt?80%C8aWQN-$?YO*ab_>AUX)Z5y0SKH zbIsb@T5dYre%~{S$N!{w;dRBI-&VZwk@CnFJ@23B`0kSaQj>|{Vg_C4`~Ro^{X0x` z<9+_q&;R@V|Ep(!*JiWStCgx%Qr!CEeuZrjxL=3>%GSpfT2cxv>B}vN%MDA5)uHPQ zNtI%Linhp#yQOsxNb3$u>mQOfKGs+Hn7Q&%!>(tgJugbTpBIR=>TaoUKVxMgd6F$5 zVWq~PRT|$ymEUqz*s8whHBxYaF)&B(mD)o(w@0!vFq;6bE`%m#mS+ERMO`%xm^b`=680rr;~Es+gALMipd*)e4AOb({DQ z{3eT6Og=8(Wr4WpMdE}n;`U%^+n+)s6A zzv&RiAFSucyEN3PF71gn)#ojm7cRDzesntPt#3*`zKs8B%R8#_KWYjNs?%;(!B=F5 z3JGfy-XUWO=6@ko^~pOHt$P(skd<5@2LWGXP%u7;og4N5Jbh`<#^R#jAqg@JeM@nVH^p_aesnBu;S z3NZ#7!9TNXR#mF}SrV72z{G_J2CAV-C2tfvjL5S>q{`mTU$L}epTwp}z7dv1@{L57 zb)(TOzm-O(F?G9+y+XOSn8|h{N;h+_mMXcAQ4WDvD_OzGDCoAn@KtJzWP!O_-Zcbd z^!X%NykhkMv4+fIiD3=DB$aQx)-zh~pCI|ib*$LXaK6eq5-6z+l1bZdUN`?&-9c+q z`@j4+3%=t28($+DGeK2SdkN$ zQRfy{$Li17OB=AY6O>I%VhfVz2cG9t-la3CV@ZsRG?+qIH?%-kX*baMV*vNNmY+tk) z{gBO+ebWN=7vB4~H=62xlzN{z@nQ6)H49eeE&szqcYpCd%GJvkzRy2+Fm%`b+g`rB zwD73?lJ}eg8>TI2n73FtXOUut^R;>2&9nV#=LB7w8BjZM$wf3<2cI?v?+ypAu9=>l zGrbfJp4zSsCTW(G*FsCe&qL-O{ zi}e1h^au-?n?y6H@D4F2xcZ=I5mr2GseIhD;W6E|KlJT=N!s&AX#-0M@3zokAqEOV zPW&cA=sGovZX?#2;49Q-zg&%Pj+y`&ie06E7I{;GvU2pOSU8Xa(|i0OgPBYiB!SmUOTo4n2`$R+o_kp`|$ZgnS z$lPkA)yP)_)kvz$*rdTK2F|S_mlD3Zsc-!OsrYtn=>xrc|D=8N6Z7k5r4MVR&l(J0 zwP;SYslRJieb=EussgTLz5=(ty3SW$w)7B2`rP^Ek|QUUz57GuiH6FfExSH2ZG6eF z{D3-Trz&xyUf!aJQ=QDK)U75l6SvWV!HJny-yp#maJHD9P|FIxNyAmGz-q3AkD# zm)CZr~Y0~x>8^$!gGhKluTFDJjjS0<5(rR%IZcUusjp$$iJE!CWZ&?{F|oxw-9AH*;`~XKvq+| zNlvJRudLSbE}5+Q`N|DJ3D)X|D?n?!dpk$j@y)w_3I)EKx8~ z2wzS1=$hiGl&1yJuY?iH9c*hPiG8F?Er}3QeALEV&nXpfMs!n@I_pefN1jOgJ+Hn(%65V9QObOR;d1iA*UfLqkB z^Ho}JKWL>1Ez2dXn{rMi@lQxq@CIZha~CHNjl6|ppomeD;xh65M5BG=-~?t$W4 z?k~Q5YvE5@m?YxvnFV0tl4uL~YF13ALsYw6WQ$#R(+qloVRh3(Yo`TOBVNrQiouKc z(92KVxE1Oa8+YEot#`gV@!#*Bl^V+bnf3z9B&&F}f z($XjQzxCZIt#ry+y?yxWBYBV9>$zcPVEN-Ew_fmxJK?bS z>jl1S#j2a`dUe|38dhkrlXRNb`DwmqXNT3y@UNO1)@JA1YUkbM;MqOXt7o=X_bl&@ zS-!0zV%M4;kH$!l`*g1cc^Bst_iGbr4!T^uULv~;fOv(rT6U1#l(mJ@_^r-b=r^?jDo2hAHEGi`j#2x9GeUfT5> z=~(8HI}L03=pA&C%#4z;i;Y0b;1Z2*elPvO$W;;p#Yk9y7F*s}fP#?a7|}#}q(`DM z7%eNVH8S7$=cAH$QhRWAPbLxvVe$n+yy8fUkdxpHWmtBvZ(=K+{Dfjl%Gy3MLfI`C zyRwIoKtdRp5F~y9zQPR!V&NfQc9S9hW?@sOyO+)C>di*TA$^k}d4n1R0fx)z z@@~k1hG5Q4W5!l3OvMQ?3!HNlpjD|x#gh37Sfcpj+=M;&Byb4BhTH*^qIHPI#aHAa zxCMuB6ZGPbSjQ5Dtt)er2Jvdyc8lPvUDBF63C`-=@UU*zbG?T@G`x0J`k+oaYUQhM zT2&`odj|MQ#5E{{geCLUH=UZ}&E3a44Cq&@j+`j@=;vLhS~t93zw13i*|XZL{mO)` zO8AOBFEj+PwI^)U;+{gcp-tAm{fBer0T~qCDn=u-wg6gY;wgBRc;rbHJ$&yx8`(Q` zF{Q2WRsJ3`MchQiLBUr%V>~|`6F2rSShwX-u?Yyo%HJb)20?C2CNgq}ensLak0F`4 zJa!#c7@5}-TeCNci=2oJkL5u);iGfmBu&`+%RpHbkM+@N# z0~I?W!&Y`C3QGBQ%WVa`>oinX=;`9#4(`wi@qQ~ ziG9XC){BV*!QWsjarxI?vwoegOqM==wDl(G!j;~yzwi0%gzn|PwLbq!_u*$c?tHkN zDZ-7nwXWLHlD@7XEVs@(rH<8~JTStqh<2RfLkAWbzr{t8wmv zsVEt%22Nzwf}r3l0^ZqFDBpo|`97S8D=sY)+!XNDME4F^*TUIOk`*mT!WP_;rG9V{ zmMXzf6TMm?tH}sfzO9(pDK1X)Z)0GP!q85C5Wb=uLe;Y)HNX`blyjmHMuMznMRai# zhAZ+J;)DxJ_}gPNLbFOR2$YJ~_aD)<5)3ZM2InMk1-@F8tpA68m8KE;)pfp-&j;WN zz5=C)e&Psa;e^MV1nmGeKDnCt(ap0$>)9me6xo4bIMKC6gip@9!t~j2hfB88FASM- ze(Ts ziIsx$&ku9>iDk`0rhGGG^5?@QeKvILXABA24E@k%$a{lqj|>_1md)UoZH7KLWzi$k z0&ekHdHeqNjy-+m$49Rp&JCcwpS%j)u1p1%G4zixi<@%)DmheJ)P*K09mam1+LxXNFWy^1jSsM?2p(dmkkm+-7nLh@x-kD>}IXu{^{kBlt=#Quik<=H@jSu{c^!V5_6`R-&+6KqxpaA-aPZ#zCQlq$cpBr_;f3Y?q9lO zwRq)9@ddo;HX$_muaNvmpUyS2jtxsp`W7kAx3L#LfR~auG9t>p%fwMNS^7v{&`xASb>Za=IcdhWVYTZyp6EO zm?9}O&*&N7wm7n3NnCSOp&_+Y3d-t=EYNZc&+84%=|;su&Z2L~c@|Uu>$K1m#wwB) z5U+5pm)|McqqX{(O2#TaIfw;WaUx4t9IYgFgINpJ zYnuV*7#=WrsBrmiWAT37n){SHpY3_zecj7H^u1ftcdXfbyhZa>OAiqZasgM66$67H z*4LdH5bH#HFBuM>wy0meq9}X+)Y^}K+Ht&g@3D^UZ)wV()o1TlX5OsFbA?z0x1if# zG@{u@`wt^`)@Gqr0aQFWq<--nBK`3+G6Ddd;!=}mA+xq>v$yFA_LyNs9%tSz9lXlL z6;|xzIpGPBk3#FaxsS?2_<~&tBi&-c#6_17pprdcnBD;@uJrRBh*fFk=|Wr**00Td zaitolY|Xt=*a&f6#je#PtQUFpB*Y?Nk-#8QWVe_K_es&E9gtNfo0>PP z<2Lk=HxgUXUC6M;ZcEXf(z5-1%kQ!*yW5m|k1_8qWA5!{Rt@IvlajYtB1^S|riOz3 zc_!9R`=wfZlKOlS`q(oVoFMr`nKM^OKJhC5v_8KK{WRZGcReGNB_u1601O}Tb0`{o zs8~o?taSe6f7WE+-+FE8=PU6UZs-#7S$N-sG~M1M{cuKg^tkfK$2~8-*7?x0?FSxL z-1qU;7ivqn!!D5`y>d~S(IMrK8 zrl=fmHN#&?G$;zy1dm2I3VZ^wC^90531!XAG+z<9$wPGrR!sFFK&hP#I5^m6ELBbm z>YN(fWgpfJKDlRFJhCJ_>JDKYvm!f1D-ioSxE0gwmTqxL5%@GGn)a5F6fDO$_3Q`& ztVkmvqgYy)pe2A5xWa=7}9vc*fVzir7?3akDPgN$h5PAC;mKm z;+Y|n&JLb@4m;MM8JBFPp0jbdGGbxFX!oWGUQKo(?GE9>E-pK)aAC<#CjmBboy12h z$z+&F16j$kl`N)lq`2sjrJ(>n$M|-bkOxe0B1>>We7k-wpTJRM7|I{zgqw27SD2=x z7A7m5aI+^U2MXAm7vD)+aAr)qLv-8ph=yq)b-%Ocd!o|(6*B`A_5sZzVwXFFT%HwL&Ep&x_W-U2 ziaE!D0jsJH0M`P4n^{4Q2c5 zcRtX%|MB+wpKE{eRmIEibiV#!$46gvAN{7|+w(p33aQ5+%j`{ z->>rw@b{&bEWD;0^?JGq?MCU%Z>5=0pD!hAG?U%OQdp_vp05cc-MxUpb~9cVVT`F1 z`HC!g;dfuzA&KzzlKa$U51Yy!G^~3-zxF=e+I!7w@96`vR`XlwRw;F-*jPv2QD})+ z8iLIRxC$%oBLUBcd6Q&?Xc1OQKB>B(Y)e4824`|WT6b8UHY7(Kl+_)cuMWxW3C-(G zSx1tC!Y`#YJXf8vT4J0qZiUE-OkAr;T&rd(5SKehK$aI922;ySc-oWJ7&u~dXB$q! z8WjK*UDA1cA~V#7qSRp&G@m>)86RwDSo;O$b&XR@M$JElz*|ppxP07knjH z%i1Lb%&wpG(Fc@i3aN@;`t%1m(kKMs&gH`8MekDPD;S3UV8I?!$sIcSgS(#Z zLBIObk9}{~n2$D@zHHTg-KP4cLw%}UB@nA!ObZeyDrN>d)L(Y0@n6v&{IFj6;-!|I zU!2``qH5pCroBhoZhA{w{;V-)e-C^GrJxQ0R46tmOL&b~=)+%Sf7S%EliU07r-4{J znUED_9fvey+UIc=v%Hx0*$yqEj=&Y)6nupg@qHo;1;1@ z@rBT!;3d57L^KfEfEye|3wIWXMeGA`l|}y8PQiXO?`SDcE2#hv+sxaHZg zh-Al;z}MmnrBoP1$ki6{Y_J*<3l^)lX>r0-<%flYwfr^_zp!jyU;Z7I5`y>dF&Ex1 zl{_Hj-zBB*HYaX1kk&48Q&(dNGX>{ba9(*Qn!OV(eo2ydoa7b5vL{XQa`HvhkwsE) zzQrm0i;sVlSP=+R@dJp;Bzl7)JdnjW{1kHgS4>#H{d)E5e8o$pkg%*R!tit7Ti>sUd^ejJwLh#7fr{N&ewD;cpcw@+|u z9Jjb0xS~S`S>e9|vBdE@FAbbPQS+EKkjd}UC|ko4e-E~5U-ZB<7r32 z0w@j{Geai-OxaF8J7ns)p))RG!Lpft z5xyF_pl+CR{Wy;Xhfu|W7(lU=2|*F16;Ez+$HI8ADiQI@GaH{+w>1@^4c`|)MSqYZ zaD|@66`%`6OU{=V04KTACpS0cbKq*=CU+aC8~I8gmX)t)P4W!LB|s~&bG|jReRfRi z%-B}@=w`ccAzlGjEE$~UT{GSD>X-#*N6h?j`1I3*$DbN9{<|ULzO@;CY}EMAhK_m9 zX3RS_Bi|S@@=Y6?X9f*FY%^ic;EzjW-i zt0!MN^YOO7JQlQTTkgZJta$Ldz`~aetGdeG%rA+@uD z>%l7fzy?stA)w8{AF{F*=fvX&H*lYsAyqR&uZUw1yP}#vEYT?RX_8Cy3OySUvS<`e za;=@X_$u-HGFiz-8L>t$tP*AAD-k!k@Y1Bkm+ZYS%?;DOk`=UPfxOVO_S4RzCU z)ez7!m;tSJy|nHA7VnIU;U#^(1?HeNdawNMpyguqGjR$LMOayMN$V|WP^5Vm?2}gB zBdt2vx8i_q^&zo~kX<>e4~QVFHTPP|?vvIZl#1_Uv`S3((+`SXZHZnbp@W4NS$MM_ zw!+9Uc8w&w?kVb}xx$moSUrR75z9>>dD_riO?bYR04~HU@04c$jP{`H&d_B&{;6$Y zxjl3V=?um!*W-96s3EglB85J+6wS)Qz+l04shHkexru;xobyB?B$jrR-6NzJlrDDr zW^OXjH{^&Ti!>@M=->@03Z#HS5ggMtb)(h546qZ-AeW2~{N9&^8f;e-kS1dlOeJjX zR@HH7&-@|=KPQWWAHc*!fj=~bg zIc6%%>PhSQH_3V(Rq}cR=!Mma_;ebAXjv2k3-kx!E^J*)3-ZTh&lM7uwI3=}JHA2w zmN8#pXlL&>P)A-UWW_Okry>7#spNo!p?%#$QvTh2MF%7*hkZ=jjIrXOni;tjaFu5Y z&bF}7*(=`Y9dGuF@AHnguye{IP8*tH_KDJZr%4zNlUJUq>z3#RnuK5_1b-F{Qh=8H z`uTr^uLQhVEPPI_PO5D%egBi{(=WT;dcTvPhDV<7xc9MkdDnCC*2d&j^&xq6K530} zL#t^KlKMC=LP;zG$`yz;*`sCBl4is!_zEA?ICmjlQNRx+M*Q$f-9TB~l!K~-24&p| z2rpq)=bY=DaO7P6gVu|}(@t@>>Au~hF@RXCW=6bXau9Eo)n0{iMXFW%R6iPmjdp>M zRV$l+?E{-!;#Bi7&ji#jj_YxZ>Ld)@ZfPqz)y&8q$|0hghM+^ZVs=yq@uAMinuSSf z3Y3yl**SO1GvEh^)bN$Htm0LM z+C59_W|go257rq2FsMWGDi!GpE{V;~i4A04A&!ap#H1b`3Cx2Yh20%GVlf%T6*`!*$^-&&g4)KzUXrbo zWw!xF0+XObS&ozc2v_(h=kmq=58WVP4Zs*YCKq29r#%mVozAmjnrB8g*+(|mh1COB zVgrs>^>~*HkceX2^#&Hm?sK_ra*)Zx0^y!r<|b z+Kk&e%6Df_`Js}7PrrHP@;S-;xAt=df4pOEX?)n8qF3s^c;Wm9kAC}B?CqPqHg4MV z@|We0pU>G*GbiBKf}qRu0vhH9HnZ8*X=xq*w(Y{JriWGAqtFJm&J1ivs+#HFC70l< zz!rxfu>)~tNCVy5nIYF4me%uVXS2H}uz6;{KT5_e2fr35YpRE^&ry@yuF8}(j?hu( zs_`y-kJV$Gt7X0#B`jD~?E0CyeEx4Axv1#?q*S6i;Ky&c!XB3D2 z*!J}69k0BneEXR4vs21%&U9Bb>btaJa*(B>n6G&AC0;K6JL`WVru<*~nE0FGFAh&F zh^563+ox9PqzjEw?xu6h68hpNS;f#_XE@lCX^dGRMdbH|E!V}ZA!I;|v?A{pro;ela(aFv9W3%FuAmtT%Dyx0`I zQX;D%beZIlNXyTX41ndE{nEN4^7RR;(4eH~6*}g^uvf(t>qGLo%LsoE5e=sJ6=qym zk;_!!x!o)xOj&ENt{T+F6|30kjI;%1u^%V7w3l$vr1grsUKDuG*c#IyRj$ga|1!!6I87K&ZkYz6ULnvUfPQ|Zj_>I{>;@PeJ zKJm#xJ30S>2nabHiy1=-rjpaK@P;vFD9D6-hFi47clVY(+P(YLURLD%<)VcZIUh8r zKX2E5*{=GcwfjqjsIS|5L9ElQ>L1$FUn$g|E7Zr^dX6@AebTCW;X=dh-&gPW^3s8C zTkbus-1e$+-Q((_dvr{6BG=FyM7p6lh)o?#0*!dgGEm7u6SPe!t`PHO#VSl?;y$>} zF#OZ_!?Q#emB9IxcZlQ)o&oLveOXTky4Rrol`v(w%bc+twaN%$$x4xIDQ9RArXsq5 z*@x@dq!yg^Fco*e3=dO@HQ5`q{3$NRl&L^0WG?thEJ?h+TpPnY)+{=G{5y$B15^w5u<@Ep_N5m9VqZlSg5)!j;TdRxL_? zIr&A8@V8%Mgx)1YuYc+fa@&lll20zT2+v*896#Ch!6#iWztQ>3OUgq}wcmEAdHWr$ zYj!oU=sBvOJ|MHnDYANAD2+mebBqdqyWAOsuV@aUUr{*BMYyQ7IVj)?t2uB*(CGEX z9|{#WWt18KDw(NdxZ->OvABa=a_p7NS3g8!_uxwmvc$)a!%`XNf+gu*5Q;wUPPNibiIAlH8L6O zv0agKWreFz^M1CSc5?W%Z%0l!MRuso@Q-bVeK2U~2R6fw3>x*e&9Fb&IlN^vWS@=0 zmay_?vkyP_=+EDM*jfA1`7bi>+ZKHD%0-*9^PjupZN=Gl+t1$o)?=yn+>~_3p{;*D zz3Hi@O%Ga@=G57H{eCV;^2)A6Yj;EGcLOsAdIr&I(Y9 z;v}#G<%)L}teF-S+$3(oV1ezkgIZ??wOEgwh=R31ERHy`{y+BK0z9g7UHhLxOR3SK z1&X_qAOYeEad&q&2nmve07-y^;O_3lp}0$NFB11mGVVU_cfYeJySu07yY~5i-}gWJ zx=wrEz0S(aFv(0NYyH;!+|T_mrgz5o-VI5-m~%9;0t<2$R;Q6w6tRX@p-Kqo>N2YQ zN8=9zOv^{wzMtq`ZXI6do!%IiSDRB>U$}`|x`V%RPutd?c}Fe^&)yc^cr1AOUed_X zOVuAc2T}3=o3H-!!{EQ;JvjX03RkAqp*< zl;wf^@Cc4wcpJ?FPgMG(>yg!fLKv6_j%X?+A&`8Q3Q`h66%e-$#7bF?(=oM9eOe$Z z2Cf*s0%GC3j0M2JIT}m?{RezCJ(dH$04wmbWRY}h^h_1ECZnlcAweYWV1Xy>d+e!ks z0%hH3lU(IUuD6P>Gz%`Zh_}D4UHG_S){R%i_v=e;3)1$rg_d#r7m9r5Dya#|Ft-{t zN{Ckh^E7xN(4FuzcEM$a!!|Qc?vhRf%QjYNT0SP~PCU!&~7sTN=BRHg34j7DnKu3GeDH{;^ZMT?YkgwzV!<-7p*ZP;P~mavb1$lQo8nOm59v4vE{pc-yx_VEn31z$2L5U_hOjxXHOdm6uwIjV#e z=sO#3&qb*07)Rbvn5dyEDJ zUl3?&gu8gGk7AG`*Em2k#DzBueSaQ&L`Wfre2kBHq^Dr44>eXn@EPU)sU}GI3Sb7Z z8sP;zqp$%E0eb+nXbKi`(Y?h}f|XN(6=;{9=+B)Pz%h@KxZzWpD7T6c*d@!X6U1gw z0V~-~tx;@4MXtnG1`19T`KJW9UL;nwm zaPK(0@`nkv{cKwi2Wk?`w~P{*M)E;xW4+m!TXys|G+a3owbt;4XvZjJnR~Lg~=3MgXfk1z33N zhGF!NJ7=hS8`dxn&QK3FcKSIq^tEs3Yu(V(tOipBJB@h#bwBztzvz0eLzlZ>{(x%W zrO&@P*RJEqb{&p=@zr5g`|YguOIht>hkC8_TC`*1!zWLO{0!%<$C}yQ1IJRHV-jp51q!Pvu`G$ z{hw&nRsRY(6%3Y={xQkgp zFFcrTry!NIiRPJ1YCv9G=W$ivkE!@!bUD`dqpPsMS2Q`+pke{Kx{a;qHSxnhi}$0a zznkp+!9J?aH={18pk`iK!;;OM<-2&R_i?u!=N`L)u{gq;PkC=@WNo64&UQ6sxc|*p z|M@}i-}D|_^P({U-noz7T57M=s0dd^iub9FgI?Ij3DGZa8Z2?f+#E0UG*?*+>N2s! zCPYRHE=ej(OTozqxj?9%;2eYkO43@AxRxSTe3>@7R2{KY3B(Fsh^R>wn!-HLCYFS@ zFO7Yy$|78Z0-|ZCz$!wCe8`C&Qd9}2d8(`f=s*)2jJ^%kTLmjo9|T{`h*lv-)IABq zmsM5)BJfo(`uC&NC<6MVYcN|7hIYS9olCsLDyS92Wfj142$dpv9-xJMcxYL8SO^SJ zX*x{I!5nAogRkJ?1_&Vn3xVob5R~B7Mw&XU66PqQ7ySXQfKmp&Vi1cd7lJxM07xi- zWoap2W$6(d3UG>BOoOmo5+yWfD_sM}I6}AZ?Lb_B8EmP3g_;_|E)Y5jN`Z4eR};8E z9aJcdT_efZBPqP7*!*03uu^lRT70HKbfr~voh<-gUEv6>a)mcI;`@dw;jeAdYaGdC zj^rj^ewi&iS;N`#x@Ph7n)$z$7u~B{dXqQ%upqd!)jN;pHcJdq2z-UEK&mxmD+1=L zL(tWT3DnT&vQcp}M9mxc3X}!J`lMe0pTJZ=5xiLRSE82?r9kvrqBaV=#DfhImKKbJ z2znUvC|PA|=mPp8w-aVIL-sqAD-4nGLnP^3JCut3|yq~El`FmA;HDk zw5=p;xhArVidQ~(HW;_4jSc;bTFYS6vb9jIrXurdhSJ=VMzvdcVcMYH+*51nuCfgw zCO%}eW2;5YQjk;o;MoF|vqA_@`^b8dJ39ddJF&N1D^yPop`D?y16dS2k9`J^- zJB_XS=dVP!?+ecSBHDjUxM>%+bVKWcGH&`p4pJIC;#y3->y4Znk?{*h5GLc`?87t< zLbhVyiV9f<%?kc2@YN@{qB}J3lUm%tSAZf0rTz$1pY}meSRkcO>&MAdumXc&%iP7V z8N*cgj?6d37EjV+B6=0eM>+{DLx{156ved>4$VVt>PI>>PjDB|Fg>`89C7rf;HV_y*+KIq*B@gMNxiK$z& zXc(YFio|iIe0sXvIzeO_#hn()1!9eHZW?G=)qCQ*ABMm9s?Xyudp-K9=aVnHgRdTa z(c^Ku9uM31dIH4yV!&@-4XgNibY-_Gb%<5~xmm?YFd_kS)otQLQ^RX%(Im)FZNK+mLVR^1D%=%*f&DF!eBwb)fasqx9@)ci*64( zcDvuc(^Xc7)9_h+)#+4+Zw|9M?E2#SE#LNB$@(&CQp~38llQVu9KRx|Jy?F*e0IR( zxq)u$X3xF2ch~!e8}D8ZE?H`nnB=&4S;EeEVKT2v3R zs57WoygpNfXp{gr(PH24n1mgJ9@T>KUtd%Pr!~@B%-SeWP2tkI;S@=KGbz2r4ARy= z_zL?5v8x3;6vn1BgRi<8)gXMn)9CUpV=J)$TG;P|FpUvdl(Ntz+QX>gCzJ9amhZ

JwoF$7gNy0dL0TEdai`j2r-qf&LpI6dMaiSirnLfQz)6{Dv3p3 z9tw7$N;q3=V=$CW8;HyQg(R?$gf1bG%So(3!irym08$O?SP{!)V6E6?G%g8^{}@Vw z@(qT&!8~IT!8ecKSValZBae=IgvKM7(ac-p5Q@<}#4RUQ=y+q26DqI$A3_+a$Wg7G4EkwF)nB1($ikYaH=yt^{y( zzg2#XE4|2-T;WPDu!Tn|TDHAwSo)^1@Kw{YM-657h4YR}seZLU;F%>s7zKhC@S_3? zVWaY&r-H!@FbTc_SmO2Kg@dV}AThov&<+>|TL2H(2229&2q1)g?BGjUXT*bN=^!-W z7Dk4`<1tMJB9&nrA!g`kQU0T4!I?Tx7MKcgQplb~;{(RcU?04M1yBby!3>WlVH7h^ zt6i)BjD%EDxZ>f_!nbos7Nf8|0TYH zU}ac(#Eaqyas8yh1(cTXMC9#z`L9nT7q7~XotEy{$6vLTTeO-#yNDl?-R7OxYT<`L zgpI@O>IYgjqaXs`Z@2LcoyXJ}#H;2m<4`4R{sGQ*L#!wei{UF;A~ehaL?#vE!uk_m z0hy?c$^cZ@2Tft&7A$6;iC@56X2xelBeo3AD*zSqZ<$9TtAUc3aWl??!yVe_fQe`_ z(L*@cx@NFNm602Fv}4OyXN){0BlIY=;gR?!<9wBv*uMAEc70!V z81nAh(dE6&>fx5Rh?WAbkoN37U*j@U;gBk~j+2>2O2-8Vje=yx!3wbA_y9Rw{>riV z`Dv>obT%2@2aqQg9A#Me5?BSef}n>Tyg%5YX8MV*K9LpVEJ#~SRdf{W?%qaieSuibgWa3@I@JAWR`G+;Z(k38(Z0_kR=2yXPPae%{#N_% zuCu;6&uV|@vo8;R+35hQ!$#H@YrpPWW}C3jEq_bS;mhkDJzsJEy5;;tmf65Q!R8?w z=cgWAIsfebImZwBmzJ6&X9kq6&)I)x)|M*U==;_o6(emv3^HpPZYvsW%jsiL+sC>b ztFLwS0E?ypW+?Na2*^XXX@85h0T#_z{mq-Opw`l5+SK2yxxXp9uW4(4b54It-T*6p zKMQUjGy3>`=76haEbK7r)0aRpuojIpnNWj;-jMD_)m_I?zWQ#|hp$JLe>18ADi*Y? zuH&kDPN?iVt$dixdn32^R-skyDUIQC8?%b*7p-Y5-Ns$Dhrj8FaPK+EshhGZ4<$%u zsBP7u3x=Anbd2qao~wWV1OI>J!usc9&);ADpS=U%E2gGPkMQ_4B9TX_(n|0<6)4H` zTCh;1ZH&ML;q6g6Ow2L$Q(5@RC%JNMgVi31)D4B%aE?e-LeJd-;=7mxEg|T!id{vc z-4j=7<6*^2lr`0>wF@q2i|EY}o#?N`W8j-YhaeWv2oJ*oVxjiO3?eju79CcI;vn{YF?casgR&s3KT=-7SQetxY+~pXdo)W+)~$*&?0p-z<8kq*9ZZN z63pC1ihKkb$qzPxzlA*8ld8{GaqI z=J1P4B&`llNcl>w)!{oy6{Mx67(@)fWO zzH|yylc{V)^FJ|d84;jUxHME@8W+Bof#$-2=Axgb^6#0T?;200pU`HVef&dKQ0k$9nP{%lbu~CqCln>WAiPxspQxCf?c*ZE$%AMUpAavyn{pZ9#&uGiCcRKj}m^{96k+cz;#I3rEsJ6|4F zs0vx62*{KA%~jw}1|R+zsaoqqEvCLsiI7eVl_45>Vz3fFClCuvIMPco(i2pv9O0oH z<|ZHFLd7eCW+jFeH`-4=E>JZR=6obMpe*R6pj%oTr1BkrYMh_cC_rWuD8(`kmKX&~ zj6)?8LPZn8gwtZgW-%hOSb=E_-y~9C6v!Rn)jGtL-P64C`|)qS81(3~-gjBu?yx%D zV10d~-M80SpP#{)W>$wo?Y`ap*>~$$pU;7ny~m_%pJFIiN4GzJoU&{4*tp=&t;bEv z4old(Wc`CvD{mc7+_8E_-n_6iJLez0Uv#1(IRCzB(0$W@>amX1<6XD|%#qj8h+wh4 zmUaCd>iXJM!PDN`tQv#M`=hsXDi0qyL|uJ?EASQe2U@TPS#SoLQ>zv9sDYNeq4tuY z4wzHf*2}aJ(WCt>*bHJ}i+yJOj$5?l5T8jP7K(;=a!?ljcEDHPjwr{1as|HnVN5l+ ztLNmZ{^k`U>^@BNEVqlS_04FB&1=qH*06Y8QeEyE&#$(Bg3Plsw z5GzW{{D1S+|Mn30Z+s^X$T+NH;W&>i4)vHxh=QS9uLELTeLy18D;-0cp;=iaz?7v% zE+{H@*8W;sKP^rr?(sA_(rvcVDM#j(ul6Y>p(Qlh9VfxGGLo@En}UkrN)lg6qKinV zA?|=y(+u5YI4@&@uywT9HdcyQEpQj=hMv%!VsyYKhe%|E5|?-#hQ~R^sIddt$}Lfk zFP-BwSlBm!mJD|xToeF?a-mxyK_elyxCLqkh--SZ9Cy)tr*w>@Q30{wH3Ch+8HEMZ zfoz2mAu4)k^nSTQ~r%D+1XWw%|Nha-^Pf ztVy)}L(7`iO=ZvOwpR$Zztb08)TC|`1uSj|EZ_vp7b908B$t9Nk`tihAW;)|!{|tE zSt|D|71gYMm*Wg%s0S7@k{7KHf_5@xsBGFsx6(o<%;89NK)Dky6K^TD2FVN040Q_& zR6}z+4cZsz3)&Z|a=0CM1_M0u5U`Lp<&h;zTTO!z5IhxKjQoEjHprQrPs|IzXg|D- zz%yi`VvD}Q8Ho~vY|uA9MTQzHltAAsW!736;)-&l+bk{YUd|b`t0H(Ig)6)?DwNI9 zz_0F?ug3ra?_4$V*nRUgRC+~Cvrr#cs0}PsL#@L2@@QNRl%Ok?j@ras@V9%-S7Pj- z=RAdLj>vVE9OA7_fEJ3bLm)BpMt+EDlDlf`4AmHWt%*As?4huuqo*X>_VbbDzF@T=eK9X&CfhT<#V)vcf^+Rqiz*Ba={vOrFoQgVZ|K;; z281L4qCi&PV=UlkbQHF9AII))+}h2Ejk+Q2F@)91&=^R4?8a@~>1qWp^_<++a}pQP z3ZSE2rWn2q4;2IfL_e_+Da@E;2Aj4GKy=k)n(_D}3W(I=Et zF=~J@tcP*!5DU%_vnE$GzeH-hlQexN-x*=|e7tix%$Z}ITVXR8>m;4(PewV)MmnQr zr?g8U6M|K!YE1~#qINdMk8c#hcAle8SW9eSF^$$D0~yNCv}mzkYCB4l-Y$@=lhw{;#PF)8MoKNB#pcmM^H{M} zvdlJ9Z8uYGn<2MLStNy;W<;@u_cG6n z*#7*=p|>}8J^dwW^?d)O^8%KYCTuvDvFW{Y;>#gc_Xb+M?r&LvXpuqI=zDD)WYad# zhCR@x8LPh)3|9^CT!Et?d!n3&4<7U=W?>(-Ke|jY`t}bCniV80!^7~H0cNeBFD&dp z_TxfsV>1H1Pn=b_M3&g<>LeK2$6ONXig!@xK zP77WFVgb5lL@Pl}xR-f=W2_RCg?oWy=q_}O*1DjM8KZ+o3k5_l7y!&IiI(IbA0b8( zoB%4o85w*9hdGtA48aKCD^M1U?zo)>Qe_(|gV|af03q+7c!ve42XF=4!U9~OQ4X&H zwsz16;38r#O6=A{ikK!q8 z`ADz-DSZ1Aq!I(_^l}H&+8j3dRJVg_^J>)Vm&3Oah>NE!O!LYLOa>6|jUZc&_Lw zB0+dCv{}QmVa}j^x?o1C+#*2X97>!5=(6;Zn|mt}%P`rUOmM{@X)?i014YZyPcc1Q zKHl}6UsmJ3o1_wh)D4wEdi_F$V#-krU!hQFaHN04qyKoQ{8#yk{Nq173I5d|pGvM& zHZ%#JJ(pd-tvLOQWY5pS)w}pb8wEL~g4o*?%2lmbV$fUmk>IN+FOtnTAmP_XN2)C9hwto6@)1-PP_*x)Ne?m7}dF{-AI z1%IFof0zS*j3XByG|rJd+73y=ZNsfNLoK-j&3LHeK;!xmCBn(VA5BEvCkZImjc-Q2 zD2h8+JtjA!hy#nucf($odyB{0S2%<;AP;|<#|zt_Dz_MsrB54%mV$WEl03mfVdSa6 z2xdr3BfNMpSwVn;$}~QJGh>#*yMP#nH%y5Zm`15axU-@80K-5d23S(sBxt6mAz)T* zo1mDUsffi$vM5m$m4otnsniulEP%%gXX7f$x98*268>mhf+Az$=; z*slBC&$`@VeShPN&UZfRbQf_A9eTX!Jhc9+L2tVlRSdDK#brz82D0cNDLG2kohIvk zCS|)x=~l969myy~_MkGNK#p(foh@_Dl-egr%wr{(0BaJVm=vZm4pkY2DCr7TVL^@? z>nDYK3h&QIKmNF2$=Fco*buo4uV3`;FiA+rjFbJ-~W##M1ryoZ=rch1Vj!mUlq_q47%!r6CTeCDR?#v}RD_e#Wy3KW#DaESQCR|d-Zrz@{N{(=9u z?EmA4^Y`9={tn<*!yM|d!*H?&Q_=LKSwI>kWcwL3?mfb3)GAhsj$_LRtyQQ7u3h^8 zV(mjg%Pp2>FF0Yy$b6L_Bx{HOOSI_5LIhUKGHv8iP2>_SoWt-B!{~yb0F-8MW~WuB zEPaR}f0Ej4l9W{B$MJ zBV;oo6et&ZCK6xt;3pA}1T+}xklc>R&hClY=}}_mI2i&NpjM#>=$0bJsAcNt2Iir_ zha{+(N|z)dWAla*0^1e3;=WY-L_{jI^y$o+`BWC5pQu*&VU`jzn>+rs-^} z@I({;WSeyR`^FvR?A_J;CHLPSZ6@1ZljI#0IfsRF4ya;Q2$NQkNN9`&)QC=3p)Pch z#(%yXhhn^(SWH($ZUg-GfFBh5W`ZzPD0ZLCcp>yFWeDXfJ@^X2R>8~kAtfYai7u!}gQ#{YYN3FePp$aiWcbHm zyRu5q+QtzR50y_0neI&-y@{QR*3409;(+mjgvPeJk%{hF%Rpk{rkmp4=9yZt^9m`K zl4dm#YKaQ}M;CM|dYq&OPv*J?Y=zsU73Z%?4xJHh z*v~83#?N0POkK#_Fh<$7Csjo-Rc$2t@%vwfrSCBcyy7(+fc#6!%w+Rz}MmSZPhM|K`I^IWu zdKM~WX3@kEmu$%Kh?7kUZ=Mv@Y!uOInW}coA{Mb)(+H7Wq7w7yflL4r+eEozh8ipl zK7kU2kX!6KrYW2-=?N(!nHtATz2i(RipAi650t!f)F=YtQU#5=sMduQs3R7uBNiz` z3#8!-#ZiT_m|_Gk%hK2C(${IyRx4wdh*Fm;la{DFQ`yr3%KMq!A87INn;|zp>vgGJ z_si|OUSfT95|d&&{P3V1f;Ydu$?AOfn_+J{je9oMqdGWGIcF^?-Agu~BKt0pooC7B zpUJipWYvDMbT`T0O6IKBWvtXBF2xUkkY2R$Dj`kdM|H~3Ii~6CQ#E!eYP)1DmTLyF ziP4#bs>~y_7LhvhNVQG8)+LL86{lxPoMwrfvqY}hLa#Xz93?^)$ng?VmP<2Nt8zA~ z=WdbbZWk}yBU-diUa(U&dyOtSAJ?`L%(Coe{NamX&sjb1ectZ{>zf;&eRrv2r?c(a z?fty{uCKp6_}SN6SzoXCtY`5khgI(BTjp#(dcLX>*;bcYtF}JAFg4YWWjw$oe`Yz+ zT<2A7cyO-h^1gW|wkB`iTzvdd%8KVUkx$X1J=*C#C=T(|!)NeO69iLX4YB78wdVog z8K%N^fE61xKE^kN(jNAilApm?^-;nCu~Wn{=b$(UxEgFN7-+=>U*TUda7D?=stwU4 zz$)lh@F>F8h3L_~rVWNK;a9Bhg@|})StbqLCShb}T~~}T8dr;GEc6w29$yaH>~CE) zbb7^juX3x1YPY1CulCP-#r_zHiy zAIXZ^um1WF@uwrk|8HO7=lT1;f8`xuek1gezyEF|5|vm@xEfL}B6%AdJd$cK&d)kZ zVIHnTVuMpWK{%CtAc3LR3MZNx(dZVI{5{B1vCO_V8LNUBM#t{o~flF@H3IEq;gBa z^MJSvr9dzMOw^FVD3a09hY)=5%rp`qpE4DwCcKcoCqU4d>qyQnGUteP_9=Pl8D83z zwt0_u%ir<#u*DZd@*4ul2ST0+bq`uq&xGWql7O#XVy+x+R1#Vu&4Ml{ow|(_zJuDvLx@gB4v{}V>6+4xYZC{7npo0H#swLjj0-*qz(6ut{+3Xj3(0Xqy|A=?v=`8a3UV1xGs ziX#o)kY@|Z0=Obw0Xe9kJJ1xc3O|q6OmR{vW*Z@iONJP+5SZtYEk93%t^vH;D9ywn zbREhnUfn`9K9+$6D&&el!h%ADBzE`)Lr@LGGnA0frG^_Ri9z*3#3;4sqmKj8_YUz| z%P3+VMoRgxil&S8d@Y!R+ONxLi&^I(tS~;}p;OvD51ZSvDZFv-_k5gr%cCh*(kTX&-|$ z(KbfKg8zzQl@SuL8lf*Cr3xiNnt5f+?P6#K1YbeC0$kAr-Dyf&Pg71GOAea*(B((X zmO&Eh5V1wD$TUbeH9&|oJY!EDA~zAi3I{t*VK{REw}#jXaMq)ZS|)77ivD0!h1ky? zruE-Xc-zOSa)9Nl(GE{NVrr9e+SY6*g)3BxSLs62;nlxy7E(DSq;Zme>jYmOHAjUg zfLK$)b_D7eDWPlGWvS$DS8EbUpw1 zccQx7&n~qC&T~O`3 zsNQ&5gWip#BDrG}Z=`K4EeQ7dmDT$WT|eAleR-k%*XKKQJle6twy(cl%j#If`g|7a zv(VwrrE_;(S$W~o;a9h6i0pj*hs7rj*<^(FcAnr~knvbAe4yfFAKC1(I3s!Yx|ofd zmz;UI^oU@%{V6C{!yGF6SiBnMQa9RFfPPXS7Rc%&V%c$lSdgtCUNL+%(CQx^b<_~) zvT8GcmO-IHHPD;`#6qnQY!9+Bz|}_#8)C&{bSjV))vT;3Twx8hrTdt0gq<3hc*K}kLcNk(BsKx#Ak_-Idhgv=^ZZXKqv3n#XLWSWcC zIfS@IQs+7C(we98D^U9uQcWOeu{O9+7Y5c^qGJ~JLvaggIt%qUaN%T%G83bOKrKPq zHdLnQ1<0ZSfGu99@F@oL=SlSY^Y`#n$!`=Bh`>lUvCbm0N0 z7&^r$9Al(NyGp}(6Y58*VkRO_7rv0GIVnG#w z+sYtV(VX^wG!o9G)^r+8iI%`wP;v}%%1j!B?hBVY!Z}fAgozzat$FI;`DoKqMHJ}6 z^68NcniUizWL89CI(C^hYn^`n4wAEvWE>``r{y!RwiP~Y*-|aIEF-7trI*>NtId*o ze8np{sUhScTXCB$z1+Y({ed_@g)S+wSh&<12l!JNjFQ-L># zfPhSgqA=YS(CITc69K3ITKE=#uAl|_U@+}MeKc}A^U#Y*-y9$oC9zyIR3hF}1IU4U z3YK`B`yh4!aiiVuwEmSItD1CZAbN+pdr*6={X1 z-Qc5Ou!@RS$n!PyT!XX>K@EBwNWoWh8Lm|SbO`-FoA?I`{er06aK+hOwc+g3yGkVri|Gb z1&DDiosc^UgBET<@)aG9L&Fx}X=ijNC{YlvdYN&1TX6c>aFCxc(MyQ!*p6AD^m^SC}K4JNM7dX2j@!01lrewV-PcD}xWaU&KD_PeSh`GDQ_qE{T4F6rFe^e-$`=uI=S;Jx%n%({*c^!tv~xnapI=#*cEc{ zBH43}Y(J&ne4MO1LRK9lrTZvxE!{&To{ zB)g2vDI@b%lLec};_YO~9&O11^~%HAwa4|)%66Qi&wTO*Iek-q>YD7AJBsU%$=N$( z`)M*~g<^U{O>dJ&Uk$t0zW-fT&zr37S6E#xwC{4h{kNxBpB-eeHh=l``Y$^zVtoh9CaJk~R}Y~}TfC+Z&``fw$4cTvINEi*D=UFXGbx_5HR-IFDk4<_wjZIPQY^VsgV zt^2dKKh57QA8d1dh|Mb_m%72W6=+XD<0r;ggV^b`AUo~=n>MWe)~#6mtXlh6wESe= z45<@Nb!_n^w#;LGe&Ge8-3_3Hf?$`i6`jYv?`rg+`=oamWjJVh`AD~SCP5#Z zl4=99>*E)&vzBufuIH5Q9cSrjTLWg%P$D{>h@ z$6Vk7Exq)Erh;5E2&3e~^_7p#)0m zhRVa}Kvj5w9^+kTv8jjvu}~bu&j!rE790p_0t#X!LkEK_!*oQZYfv0yK#Q8#XW^0% zkA&ET=hQ^xt0N24F^lxLc#K-C14zLCABDM%=(60PpR+?Ze;=8(hol|U&buOC@kaJD zM|Dq2?u*DHF1hqxbmDpaeV+W8On19Ua`s)ziQgLU@YIjR<%W$u0`( zU^=67qKjg(v&6!)B_>zA^)e|3Tv3=(sSz>_5zsUGH1EIMMfSev{|pHiwzU#`zsp`W=z zJ8P9TdyNj|x!g_Kyp8IGTQr5+wM%yCmh94CE#0jx*-JSG+p@i+bdPTK@|NrsJp0g_ z{Y-Wbn0mPPxC8CGukJBq@0VTHcKl&o=iXbt?7E8e-CEWU$5`K8VD)<3dE8smu!f+y zO&gAoi+9Mw$K?4l^7;*V^`1PhQr&wmx${DM^C`LZkX-(i{BoC^xkYj4_%-tLb#n9? zIdYX8yi5*UB6}{9-4~#Dk=>Wb-m7Hq70OQAPm$ec$nMi*Cw5Mgz30iH%jDQKa_Sa2 zdyiarK(0L|cb=19Uyw(y$cwk+w@UKhCE0aUKYKCPA>>2fDZhR`;BveE*W2~E%Ibce z)#()L+oP;6ce2`TV6|V*`l5{0aW0GH%KFB}Ix6?Xn-8z`l5^~ryWd}NEspmtP7W{5 z&e~LR=;e)dw~iH_*`Bmxx$Dw-*{4qjtv-;n`N^Eks@_vBVs1N9&c?W~hdZ|nbV7cq z2vekoP8VSPiLwUTqKlCIiLXFSSOaW0hD9;#_kGR)YaXx)h{eE_fv?#8Pz1E4Lmp8R z1lvPOg-$ihj)M%_5z{$iT?L-F=1N8~wr;dD`VCu#Ik16Sp#9#abw5t4{n4ZbDY#w6 zeV}kP0mZ@h{jAG}IaZAGdS@P9?w(c~I=3-tF?;SxUg1XGs@;NZN5w}jNiY7Yy!Tr1 zTfMeTMx-!6)8hnnW*YeFqYU!j4pe{o0se>*fA9a*JAjOakKPZ%z05hlAW5ol2Q}9L zwFDZ%Q<7^>NXEhz-(-$oraWXmhNO^bXrBpDPV-kFp4tJF6-XRI;6ylGNo6y&X1Y_ zpcL4Nw&7E1(qbRWGnr<#N1%T{k>*?>ryV=Ua(7Em;I428P5C4V-x^^}$efH9gWsT! zF~vcU6_hJb5Ckj8R=}#T1v-!w7OqmMl0$`b`h)#}uc(0vIN_QqL-`EQ1wsQ&!K4J% zf{;ZAxXeO&C!Gon({)(BI%=T?%6tsSsz?g}h$}_7JxvLVUr&;^k<9)2tV8Nq`xW`8 zZ`JeiMfYEd7l81G}aRLkx8{&skEd z$R_WZa&!tnv%(t!%KDh|kVh}=QDjVCL(%18{@rbx{IHrVF(W%wA- z4}&HIbXTYgS**p`4H$-69xPHIbLgK)`^1Nb$y7)81oCY0Y@RvNDCDabBCS-6VrwwS zFi#qkFTw1~%=L&|(}ymUqc9z~K zaa*A#2)KfGi)KP1bPFG2$W}4%1ZpjURi^&9NJluE#vxp787Q_4kl05OBRBa35B+#& zy@?l@@*Pdnf%xsud1DPO!1-hRSAdy{|g9Dnm+UdeV|!5Tg!tjO6M z@5DB%;Fd8?XvnJRYtaY^tIGuHu!6sx=7>_h;(iCjLZIjbtkzCYu23AD0OF#2)ny`| zZV}9Gn8@615}L@`2G|NQG7l5|22XDt(b)Y>Gn zozukTQ7tyKMnwZ| qO!NXNnia0JFG9QSbhCjy}emoJ*K*?vGqOs z)0j&?40`&*@H*CykH@+;_|IYu(Lk6x1pugI;Zx*JdRSl6FWOuG6=fBAv-;zRAVXXNU0a`~D5!ejL>kJMM5 z=&n2=mmcb`Jt8+jQ_twXefWkveMerFlQ&hQvW}EjYu;B9e3yHV$;O?^(D=#;Hcz__ zzulqljn96%((b1VtS-k`U+@3CiT28E=ul zF%?sXkZ6i*Seh^|8;oH_n8WBK9_}cj!TQSPDVv=9EbyTI#%=*arjcH5R`D^)0 zHw)J7741GLJ#}4q{h8|V2YESLBht|0go4h0|HxN=qMCo7FaL@t{=V(gJHVjT$HThe z+8Mu4TwtpV*@v`Bs8!RWKa6%fxtgnjGE8PKZ41h5i~u;zCDT&~f?}qIsI0^E$Zde* z9d>tz5aJL`9FlYnX|R0~)LC%O3P!Hf5`sBy;U)U;Qe8-q5-EIMnhs!!e2>}X-g>+CXNL4Pl_)XDzrD`1_c@B}>D3p-WKwp|5 z*q)KYfEg(962DAsECLW_(!h3Xv6!|&U>k%NJUQxjxMDR>5Wc$_MJ4fO?Wudtm7%V=?(Hvrf7Hnx;ZX1P0}bMx7{l zM29~pgRxyn;YuZE#49cMN{RVYM1g8N<9jz;uKq<8;L~61zqo@VFs_U~Nm$GQ8D&C^ zmb7v;Z!3h4UJ0-LDmZafNF}U2yyDG*IVJprTux91&nc2K(W7CwL+xPOrXEx3x=w0B zBOwrr;j2#LxZjWGemj=k$%x%~VjEUhOktkH>57RqNLioA|K9i`UjeT0Z+oG`-Co3q zR~Sib?A2x#*lHiy<`LiKm)ho&-V!*wEjo{%S}e(4F3nvjTd+b>uu{Bem2BZk<(wtb zlzd^_Tv23>)GvkS9NJ`R_j=HXTZ6|x?KA#OKa+a2Yk{)RK0n;5Y3P&}4qhJ?m6GLa zguC~P?>!*T-;e{x)yea!+!IX2gvfh`m#ORH3xJXj*^W>$%doaEkEnFpCp@)>Yxj4J+9n+R=($gXzSUg z6OR=(F-wgD=2*ln9^sk#{WMQj*O9DYQ&~NRu~1-p&O3HZ`(^hPT?gJ`b-K&y{p!0h z&n?2LH=ZN+DhTlD)k_t-nQ+?l4b9SewyddD+t5PloAq@~`kDq(U9YdKr%?0(tCqaO zs-x>|t^Q4o?sc^u$n>&`yr|Z^tX03PQ@*HGyr@#Vs8GEu*S;<%;HdH%QeI1{>PdBz zzP3fv#MNLHKy?#&_=2q1Ah378+h_QNZiA88b(7WgI(}YOr(>+Ic7O5pwvHV)w`*U* zYM;&OlF?yM5^GSPReage*KNNMQcm>6=Xbc~N7>DZoOy6_+~(CwFP&U`a;HarLekbH zDZAG$zJ76cdGq>vBKMpJBi$a3c4?gAqrm+1vCeFWSH}M8-Zrg>NW~a#2ChDl6`+Wz z{TX>_5Y6!?prRawywE=t1b;6JLh?fM1r`Ho5X8W2!|i!!yBO^#9PcI`7u_5G~sQ6lVR+CW`c#^v3{y$4_QHmLw#4RNR#>sdJ^ zsKOz(Dj=&qCa*DLY0JWO{N-DPn+}K$oRyuwqrCe@g*U&BtEMrO^l*TSF#O5lj7+bv zn2d&hAF^V8l7I1V^gs1sIOwBHfkl6ps8FD1(7?((N2X63sIkm#4K2bxOH6J+XrdjPN89;(NyE|$U-tU^@1`%LpV_)=AF^6 zAX%VF7_vYIl3^?ja6kpkgD#{-lUgFs3xP$mvaCiG@2-dE9F;5_ zVCgXye(ZcY3NR26NiawwupN&?SS9VM!_P@A+-OV#dr;??!O@LdutOW};Aw~78-8rW zX69*WwW~k_%WEVRuV_ty&b(etQa4htN!UPAcj_|^Di)lRthwFx9L9Al5qZcE*JW-ekg%fHYU9?E zq)l|f5o)*wznmWDolw}%@SYp8tpJ(WN6I=-3UGzsbP%Qo!rN!5;LD=#A%Z&RtMN_( zl|fdh28J!xq8Nb)1btHO~K4OI`yiAF0)GAHfS`xKV z7hbA~TA@e$6e2xgk725)_!2ipmf>Dq=rRrX9GRaODUNA?sPthTQ)sa&Wj*L!9bHD} zj7OGg5P1<+qQ--<(9IfDtY*Tj@WBfLYb~c%cDMNwC|A@dn6F_BR|sLiwSZSPlDIHw zQRkjU9ODT>L~X+fLfSDF(<(q`?XPhPhsjFe5UjHDlOY9bikk$jaJG>=G{rC8qdjs)q@$`y?J7@ZC5lT5P61*u$i?#}qdB3iF6@2E_E? zaUNWh>hO2!nZyfB=Y(XoM(1$i^EsJCg1nWIB^%|dcc?b+Rc$+ft$fQK^@bhlk`1!_ z<)WM=qO2mxtU~du#k|neDvz)lOV^5tHgzM+YB5=6gniRc>*_J~m1C`6x(8RUT~8L} z*Kb@|cjcV!)l0JXs4g(2+9rx^pGl@ADQq*OUh^PTX=9;L7s!M2ctHz!;Y;MSg1ktV zR7O&v=$2{|N;Ft7sHd+a$;(ygWty3*^f_zMnrB$pVX$hwK5v5#n$^P1+C`A9wrQ5_ z)G=hW@d(*wXzSZ~lI%G{4xA?kf1yV7BNufCevzHIqdoUPdEzer(r-%l^yNQHbn7(9 zmG#p&R-bXK-Xll2g?uq`;!u0H-#DVjmAWCuD_HGH+xNW1>i&?`_fbEKXLB~mua}b# zEu^Y}@P&lK(KR(FKv`Tq;Rpy#b__TUh7i^tvm z@Mh}nl@qdpjAsQV?^?6y_`x;5oZEcsa>mgO9&6`B?kOv|cVpqXdj%&NeHXAhPrfz9 zSB5|{BR4*j&tV?yQJ!ta-U3WV8tJGQY%haOHFO3af)&UL+dnR*2nfUiI6<(2oHfc> z0k|4yEf{1Y#6ncaAX~v;JK+#}5gvo5Vt5Su!yUz=UFCQNlm+pc@aB6@We1$d1 zS7IKj8Rf#p!bcjEHN?JQlymD~`<6cD4c$#@VZQ2SRPm#6MXyPf2<#nbSvl09e3D9+=^(2poozfZ50%-5D-r5GJye6}KlfPTm5khmkBH@i97uo)tTpg8ErZBF z)d^@6P)h(I1{X9s3iaXWtpnZ|5DQvX2+r&|;372@ow8|46)*`lb(HxGVWDc=hzFnn z5UwiEbQz}GKvxMv&2F{^>Iy_Ea2G&}$rJ^KL3D!4i&31wS8!q*fQoJbSHJ`8pk7Ir zA@&MP#dNx%qYdl#eZz1)G_~buKLrkexR@{juUtLc(X=EA2N686z_wyqaexgQ0ugr7 zH;}XqBz?1H&OYtz{oLYn?7Mt&wN`lXWnGJcysi|!Ymim)Wz9PBPDJk2k##4^%Z~hZ zyM^3rB^RnRfU75Ba-)H4zh1fe%!{KB*|+P-tw#N|2F(Krxxppdp4RMo+jP23wfj}$ zrE=*r9?FRFJ3R3VLay@2s%zYN``86XNX8a@(ncM~DtQZuTuuCoRpH=Xbk7x1&v_6U z03pOdorB*DUVYlrr$N6S4S}Dl3qz+N7!2)oS+YQcN@Yn<6T~kO$`yQ8Aq!!(lF?Z+ zi{((0!j~vQi|Cl7kR`G-G%#)<$(u>yMp_|_Tc^ju4rnuWtsc-7vzj{708|WU;TCWg z7TqyW7C;Nc6~C6&Sffhyu`3W`rA%6faW;z3WvZ|eZCI%;e7PPA8KJ=?I&4ErNd%-b zv`ns`t%6}Cbh+m%{EGCnAc%ekDqF!qugA3j4DDXCh)tvlQ^;*2^iZyB!-#o+hQbvP z%TH|=r1Hdg@gS*@&AVyNAKasw9m0M~E8&+NQJ;EDp0yATj*$4n!7}{syfP2vxFzyKW(iRNOkE(zDiY6MCSJTo zwqlE7>pt~?pS8!&=uV&4p7}+0{H*%$apj)F3gFhJJ*u)Tvc+q7WxEymYk9dPlB@z* z&O$O1(RsnmqimiIG<$3t${jlO;ixIM78R3YhslQJ?2>{fzr7$8)dYf=S89`8s>n7| zrzUTX!=VnRi&>`&)G*H%4BfyLXYb-Z*pS#f}@M+NX0*>;@lJWckUA$v~i_MX!2 zJEJ*pR&(gA>gajZ`5WZoUH$FnAEdk1LLyC85m|WJ`B#_5JKGD-4h}Z0>RyF zaCdiif`kM@NQk?8y3gLemHGw(g5^02oqF>jn;m`!hgu$AdMk9jw#&w$uLwzHfU zZ{Pc}?L`X5>mdv3?Qp609dl?O7P%sYEL;qrmhhZiGGA6xmT z_((pnODXx*u40^@VVI)|1ARkX1w-Ate(~rW;@S0^XU}L?@lbp9&(*@3JOe`Ate;W(hwNDs{~4ibC_nu$YQ=t*hV+SDCk2dW;SffSA{oc9*Dbl!J z`i*Hqm^A`%fUgEkZu-%xahOxnlpwBM9M>b66S;&pYdwGIj^53O>2-JdhV1S$O;(Av zv{B9J)k$d_Fdnq94!n9a`$yOEU*Y(FctQG~`9hpTfGah&IH43=u{0OW2(fS+M(%i~ zHi9dPukc5!HfSKdYZsIBQj)Nude$a`OT64BT5B6a=L!*I;~1lXA(c&#+%Z%SBFid* zwTWdN6KTT*9-FY&4g$miVLKhFOgj@<*Dd?sE zvC#0sCr3*QV`$)Nz`bCH?t^DT;4gZV#t#Q-G8J$X(3Hu~3Op62*w8q`4z^&fAW{Mv zp#yHP7r>0$GQHOl4Xi978N&Z`#xAO_p1p_6J3vx)lUaM&xra21kE>Umm#@Fjo>otC zyUG2k?(7ChS&O2r+ki)yPMv{6$aAjhNi(^Yr+!*XY6K*=MO7{$Ssg%A>q?{Gc2n=u7I|q8dBKrqcgU-Bw6S=mShV#X zZ_eSC)Fa{rCn=K#pc1>6gzqAeyJ>XY#k*iSuz9i3UNmUQw zYN&}QB0NAm z{G+uO7d!|wXu+D=RfT2ZoML7A3)Q8&;^UX4JC94&?~%>lD2-Vl@=5Hm3F{o^$u&_} z{l)=efxqJZZ3GXULEx+RhIf82s?&t8jDU(dUm@Zdx_6_>o$9Y(2MR|8%xxhMaIj;~ zFjxLqpDswDJQKPj<_Q6==C2oT*sb2SUk#q>#CiSY8-}|N$dgR+EX$Dnf@EbIGN0-m zX6WyylbiST=dY>`o{=rz-HDO*O@~D5_wcst;~zO|*t}PnFu!f8Th6ajo=k8mwQ?_a z@OZgqo%q}t{qF76OXgile=Gxq6`Wi>(Z9rIW)IB916JxW(~AOE<4Ye*`d4Sw4CQmBKvod8~xw@=^6qk~U22$RDXEajVMw)n}jZfMEx5y`}RRRf-D2Z4>Q5G;*kBlH$m@CxsgrrAGxILtiLt40`nMbZY zAc+gxhD^Hs{x|1fCBb}rn|bdR^Ts)b+55qVN8fyV6Z6(Q=A9VkEl=iS=MN?Y?=2}y zQ}FNcn$K4iF1dGV#?5`>*F<_8T(rOR!HJ5j3w1^Q+ty88F*oe=!Iha0_m$Noo_^-E z;)(O(_E7;XpoYfzYLRU;I?w>`eWWgq@!^m0?m>d~2sil%N7Zn9^>Ake@YN6(DOfBN zszYxjwTwh{Z|W*bT)aNaG_Hr68{!=dpQ}Y<;I* zpd@mmGV>2`g*Cx+Et{5+e{@~{-#Y%ES(E+;zV;7%1&F1?Qm=>d7JXj;Euzy>e1!t? z`V(U5^(vKKEi({~oII~4vo|%)T(1e3qj8Q^JH{HUBIJ(KSy&?2hD$y0j31`62v$!+ zaB8I1E)Lmz@D!zCF;FN4LC}u-va!JP&>2HxVR(m5oUPCYz^W9pV@8~V`#A3DkeXvu z4f|+LVeAN=R3@K$YSg_4vCwR!E*s$SBjBh^y!qC=P6MRJ7XM6PPdX-@K}|! zh~A6brt{w*#ONSU@mW_}|h0FS?(wQb#xrh9ZHGSJIywX7|uY^_(XL zvhrRifq(4wJCvyde}!1IsEzt)NUb(P%C0ctD>>pwLN}`ckU&OJ>|~^^0IN*M3R{Y@ zUgImEDS#^!fR>5Z!V&BMS)l;2BDNU7VgXa301?qoi~#?J*}zRCa3d`frUFTYf=@dN zDdc`|wF0JEMKE~*rqO#5!JMFbDgpj?nqjby*W+QqGF$_DH_I>`I)j#>dW%5TCe(nk z3S}+Ar1nw0p|ez5kCLhm!jX|S8R033P)kHQA~7HV2NCIr`%@q_3@annJrlTswjHz; zrUi|Hc4)5fL}LV3cn2P0Pyky0e0+s94&N6NpSydZj+VVFlVugCZatQqzbQU=PO|l| zbmb1&%$2<%b9-E4d1n6YgY8>DVSPQZ71_0jr@MSIBF|*dXjc2TYzmu#vsNQx$z3sGW;|bM@Bl?wa&^l@Wc3OW-4?b$+apAB=tA;~T! zd1d5fA*louhgQ6Slr`upK|r;!Q@j(H??%5r%A5hd8u8mF7TEb|Sg_YKesg+h!L!ox zyZKdXPh^d7JjA@Sgz0yl>38qbpL1NJyVsxAUwTPy<olMTWq9^67>svZtOa!71@$j{_5Um~1HPgi0}7(BzW;GE zFbc1X)E4lS`qhaxzQlOC4S4j%LpoqUQ7c)sw{8CKvbd$Xkhz!~)HuYkb}=ejxK7Qc zUJcU%)#gF$v|z1OxE^y@U<6TM!u(5?HB{&oKCgJt2bC3-uhT%ip9^3h zgtAi9Ithb*@cl-Y5ZDP67SI}a7=SA}Gf2%v>G5FWR3Lhb@Uw#10t%Td*t*ZsJ11#i zHSdu^Q>wAW5n3q-XQ74sL3}t+ACQUfQXL9h790kGR%)rT{dX;F`c@oMuQ@4Nd9-Es z_3nFk-V~5F6)A2qJbB4$6A}SScv`~Ik&HUwiHG^+e07VIRP&T=y`-^|RJ4)YMqNo4 zDdm&Ac9PE{r2-i0tjXf2;x@Ik@lqeooH9o20S9Oy* zA*t;pH40KHC6y|2{F!jU!RCbDx$qA}%Q9}aCStn^JvfZl1+LclFW18l0R~W%qgz0I zK`~$z49*Hn;|dl~3NV-k9z(tT(QH2%7NIJ?#84(xY6F?V`68eKE?0}v^;akGXur(bt9Z%Rxlb0dE zqCzb6UU$&xiGVd)*esv{gs5>;z?xTp%6qvk5VAVs?-+Jqs0FDN{&%p$b(=$JD~Cj# zLxSEpkvPRu!4;4dm@5bfL0C--&{&0Nk-G$Kp;d^&I;baNffyE!MI6#9C%t$Ap|({9 zB@5mcmx@MV)T?761P5^h3JUB1L!n;tSC|)h%?#reKvuZI{tx0SQ{uB!4N^i?P$79< zD1DG6z5Y;k`Wo$4Y&;}IVov-DK|o5UZ6s&3JLhMc_HU-Nd^Mrvi?Qu6yn@vg6zmXL zVGDdkCGK#(8r6k-oG*T((abQv8Zcgn1ZV09I;jH&+JhYgqda;i_z5h-d)yKQVe@)t zuaT|Yt=V%_f9NE_rWIFjYaczKR!(_Eq^MM1Ql>$UPkE)Ps#;fG&K4E31qIr?eBJYx z01xQ6DbHM0+_dDP?<&lemJtriwvtQ0%UbE-0=)CKFs~0FS+v^yZ5s0z*YT$Yudxtwa0E5pj1D3M|bL;4yIdY?z89aYc4!cU3sLw z@kDj;p77=q$>SXL^CBHa@Jedw{2dGp8d{-i0vDuj?IKNGIsll;PTdrr_&2^E_t9@Q z%-{)MTDkw?8_EnHov{DF^B#Uib>-c>oB|%FRH(BF+rzvc$9%Hw?XTcLn(@QL66eIm zrN`LacgfzHv5Ax5@E) z?i^CvOsYY4HIce@($GQbJ4r3yP}5Dx*b)wT*+9~=$)=qOZ{O09!ykXsKkegB9(?rq zqjx{M%6xEy`Cwn44|gzcu4R~1hKc>)tHgH(`Y~@0`N1u8-J{0^gk0&WztUPb=kl&` zE27^EGh;l5dK_7Nq$cxZ&C8IzTPMv;88$N}{?gf1PjcgqK3;i4GCm?}Qb_C6FdA_< z)R*&5iTueAza6VPMEt)ydv z#?dm|i2<#QTsH8P5n>tF6Wp&~!L7f@&i}K>=l{?*j??t#ADmJYo}-PY*EqQV=w({^ zjmM2Q3`~71Sp^6#nO@&1CuMD9$EmKRJLK^z>EIsll~t6|GD;4EtciZzQv)^T0eV<3 zS|AlVRBs(YQ5M2!B2x>GI zpipYb>zP4dFrz^P2rk$xgrMJt4&ZF0>=946K+K{$E{S?)B=W$*6yb&mI#c0I*BLsL zOOn<#NrRkd=)S;!!Px+X-V=x*m>&B{6vu&bg)sOgg9y3;ju%1Y|hmmf6VdBM$VX3Kc={$AcqDg~qf z{8fvlxQo^rEZ{Myr!ZOwlInNkKDG#A#y0?Ge#G3sR&xLCoBAExOqF%>=B1<_gFP%#~G` z0jUI*!K{50+-}L#K$&%j&@Z9$&=peHMxc(9Q4R`n2|Q|;t`YjE0%V0AI|?)}n(4-e z7upA8QAU)dr5|NNR0evj;X_flvgm^P57*B>FVg?m-alG?@g-QjmNjYuz+;rwOS6l` z8Ts;iPvz(m9=$Bxby~J^k977%Vbo&2TVlr~Ki*IWB;+>XmVquI^}-s(`*0-h)3H4! ze+CQ>Ld|Z_%$8s@D=vxHQnBe z%DopAhpuZ(XzJ8m^{Kn6^J(nmbp7RY?UfAewM^Z$O!oFu_HMT3VGjE^PxB;K`Mg9^ zRHZMiGgLGhs$1yzUVW<`!*|>+7S)C5G0GGf%Gwp9976s!((3Ove$0>K-kV_C&%uKk zGk(?W+jrZV?^l=Qwl&q!=3bsAvz*+@CR>h5J;PsqJM7Zu1F!TO@c7d&p1l3;-S>12d&`Up>$Fi1zb$jAEijtvnF z^%so@RuA)*BQ+KUCm0;U@3DIMk|8}0!ix(IKM_hV5b+@xTiz(fID z{eiE3u@{<97IxrXHPTfw(oHJvdsh&9uZ@=DO()R^oQx34wA>rdFuEJYGiy9jQwg}Xj zP?pO~YKsk*Av9=w5UPSc8k{HL-3`S%X3&7R093#^pn12J%HXk&!aiDW=qh3Yh`zws zQiL@nibkKTgxL{`@>#5)-9~s(8bd1}83O$R9)|i1pqDrI6}~d{0WlL73^oIaIn*js zkM5z*9O4U?ty#n)i8#Y9HHo+bP{kV@<8;UXa>c;}J+>~12JB#qeN!R0Cm8r`mO2uy zS)jjw;*2(!k?RQ5v#Gna^L7bVALU(suC44QEizIINmZi?%u<_@+|8FS+;TUwTvgkJ zu^l1*#c4^)eNg5FR0oQo53(|JWiV-7Kl;(dTnib!t*BCnuii=j`l=S=o!GN!(U~ zAwJkcL4^eh%X=~FwMc_4AglKxIe=5r9x@Y?kNZgi+IGN88}z_YuL18VlX5K zSOrD;Mg#0X16Hg3SE;b|S*8Tr3EC5|+;zSjtyl=&;ERQppQ*eT>%A7~kZ$Asy6`(- z;w_=R!-J7_B)D{!smfx2k{S}<4W2_nkbK3Z=Z$&T^hc_avq zTp>+8q#F%fxe`zcN$5JY?A23=QN9T9MCo99;h4aa5jo+)0VfR9MLUMLWWB8tM5sN`#b!hUdB-y3%^c=;tN76I5WCt(GHyxEP z-X)p2t~Y3&z&27i+MN$Ctbvo82TbICF`oCyZyg_v;C?<{2z+IfTD_*NMhdWRY9RI1q@vVN{F3D7lB_IgUXe1tMEN{d z{IWp(9L9{fdbANAJQCl#-+SXm&)E~)qkG#9>~6YyPjcsOEACrt56QaUwQ5DadwAZ| zkZR}oy|C%ndWoD#BRA8??FZ!gBlgN8&6(@c%XeAS)dz-a56Q**hKp(B)J^r}N95`g z>eYEY({Lk0cO#R%^GtU?+weG_q!-eHkreuIS?B;(ko;-`Wvg0A1BcYZ9)V+M?Vy@- zRF8mkcIkMX+AehB_`22}Qrsf_X{smi)vq4mZ$jVx({N_!u+a%Ir|U~jROY0$S3KoZ zJ+80FtZ6!ay`b;MPRyI}Z@jy)&nG9|{`$V!;rf#!0cARx^61P49G`-RreL z3{Tz2)ynww^5o5m1v?bW_R2RMS8hM6+<#eh_?q(Q4cXBf(&M*fr|wG5-xFSbAiQ+H z7h|Ls?y3>BbNMbTL&?H55(n?Xfxlk<=D5HIJ^LA^5S=Z@)yjC7s)qN2GfyGdC7K zh`g|8?D9Cror~w(K7Fdb;aUTmu)WxSVeKz=Pc6dvW4-xi0otiS)q zCBvOXkg3CCfgW-0MfT++f5qfL1wxv}`}N``Wfm$O?ZvZ@J!0=NQaHNt}rd^N_m7u#PP+o27E3Jc&0IPE7J-Vat>%ou)a$>~42@yp3|09OOe z>c2B@_}R8$lv~@xK#pBpr&~&A@S@J?>$?{2>RESKu;-lg>^*hb3pTS@QPF1TkkOO} zEGXcq=)Kl-Up6fcbkV?ZT(8ln@%HrMoyiPaPyi??StTSZ4+d$384^cnjbOq z#SM#ChCn=QfVYEvJnJw+<1(9dU#J7hf+_|!-Qb!`b9^ByjFc4ap+=Y#2#FN*Gs~gp zq4~VHQv(ASO}}+WmjkSdGP+HIW&vMB9R{e=0!3DURW37CXx>3e1bl_KK)-n;XaOB9 z#P}g*4`JZonn>;9Vg3NWcVz1zzR-QT0sT$;NEW>z+8&BzQBHA)oF?|sI`<@G@esFo z+5@GQ^$GAVrRhALGt_SJk`T&$X))mlF3WeG0=}KIwrl3?mTbAGK~JzsNb1F;zK1kH zmo6rSJ-F{*S$V1^qnT7mNKOOkQqgA_j8P;15%!d=y)0i#S|tWZeQJ2*es25CXAMuv zyKvCU9H|a`h!ybx=KHgFIn}E^-UZrqnP!2wtP{UquC<+ASzmi3vUkNM?B^xrDeV1wh`~bGmes!87rRh0~A@m~H zw@ws#%-4J1hYv5ZdBkS4tI4Iy&IA$p%2E=Kyv#{JFNMN<5DLEh@1W75O3#P|?` zCkwvrU;Oh3&jhJ8GmyZmRTZK2Ky@QG^a(6#Vb zZnwUx_myWtH}H9vp^IYybXJ-9MOU+h?@dyJpjvVZVC)zWPi<@vhwa z7b>6Mt1CF2_2hXIXU~O)Oy3bqUw29wF{y8Sx~I?A7e5}E`K^8F-(4%d^Qjvg&^$S= zcWSJ_Jce%($G1)7yU*_NTi6@ELNtA?D0RDR^-;st3uOCc087K>3+$Hj%57&wdoPIh zoR#f5soZr;zj})@D5`eM#K-;qe);2mmp<%w`MrJ@nNN>V5BU!^Gw&>8-bZv`G|WQ# zz8l8$bz_*xKTVsy@aX*$h4lwZDwjV>9~2QbVd1QCsj;gco=-lvJ5w*ZDdrxmcoDFB zoy+E>!6$Y{UOBR^==tfkjw2<)mFK%{qw;LRI?ViqW`Roc5O$KEa!Qa6B-MC-GTKKq zDa0@)kR21C1-=^Rt3l^(jGuIbuXMPNbeN}nxQA?%r)*N78a2uTqA9?5`B-lS;1)72 zQP`gdROYMt%~OVV!3TR%fOJwo@6-^fRitWKxNJ(O$SFx5vYd?f=VO0jV9$gA>VJze z3lWd^7Yuf29pTYs9-#)MHq^Caq-Qtq72p=gE1K7A$HfagU%+;+Ql?o1|4j(#y$#o9e}Tm0^p-u8Cr#2g0igzgXE-`xeP9WdVIfl4RC$5n0 zN9tT-h?H@X$xxr&OHWj7zFf2UY+hP%@7;V3+Ij6N(rq9u za@~u1J}9rJ)td4iQh_EQCZ-7~>Qde>;oN-DdMk_bFt_J+M(fK;aZ#fPRu8RG4aV8{ ztX`x=4hy|_MQR;a$TspdwVf)wE3=YQBP4AaQX!>Eg#ic(h#DZuZJr!lrN;s#I5dy>8Zc2VDQcuG zI|C2HvI8mG^9-Kz^!7xM0eOJSqpas$nueo>5)QgWA#o+#1@*sc-C=-)^@ta zDn{uD*dL~LP9Rnx#4QTv8ukltZ_`f>Wg)|u=BIKEC$9d)#mC?uL2P~H&T-l)k-b*) z4XMY-rbA_UB|I610P(PCd3nbanVJJGjgjC=cc zQ|d=LNu~sl_XgL0IIJD9&iyC$(q!k6ysyRzz8KB#H-;iBY>_3$3Q2VdDSAQj^QbNP z%VLrZO6moho~r?x$}G@k7P8qT+QJG$DQL7>Qddu!8c7Qj<9vdtlg2L6z@y)PGl$@- zn~Kvxczgm_1+yvYHzhWpAuE6tRX5>tNe_>7cff>-R!)rKo5yxp#&_B#@hoB+on~`A7j=bg zmiw+11g;T8uIiyC6-&hd32jpxat95$@!^-J-}>m-d!L^AbigI%y*=-Levo;4%^UA7 zXPA_}?nn0<4&8AH7>Z+Z5C@AW0>Ht#DE?; zaj40m8nbZSlxmopaD;~l`*2i%G<}@6#4K1fHAHP5DsxIy<5}A&N#U3%N7*IH?50a? z;^gLG0<$pTl%Q_hfXsq~4sn#(#tmqkU-w9l4y#!97pE5MIIVT8euxX#h_!f7wqtbb z7h7r*_p?>|peYUCnYAG3`J2gYFm)efUi*tf{p0{H#s=+UxLz|lBNp~1t?pg0rFY#y z(ay7ylXp}YLjZN$$kp~Jh}0N32#F?0DiGnXAl4u2gb`x>bzs#W-^o8~h5DcRc}+i9 z(@n#8n*bPl1+hS8wZmP!kz9I;(HnO9ieB^r+>_*vaZ39b)*_Uh&6k*Y zu+w~5>maRlh{ilbIW1f@Ekd=FGDYcTzjttI^1(%rH-f?x0z_<(Jr1-yv`|B3;DfMoW?m`Z4)l?NMK!} zl+KX~*9fIsq{@Ffi9+8Ht-vUQbC?e15>9ZW_fvYu6F0>DMCxGK9WaY{&mbNN#3|Zf z9jbJnPW+MAgP7?A;uWv+Pb9v{#AOx>Iu6qJfK(DPkA%%v&0MR3tMk=dlFcQXFLC2G zJq=y)X!|wkg1wxSovo=S_!}OQHFq={(=>ZBbu;%CZ@AW$xz}vi%P~Mk`U#JF_>Dbt_ZX63)Cpn= zW*TdV-y#E4?%3iKU~Mol ztkh_fLrP_|U(gufc>uujdNq9g@9WFtue4a;do%q(s}K*u2G}BLYMMm_)zYU$vimQT zSJPFeZYqvkQtUk`UAs>__jgtJVu@Qkf3k1u59XB^HXmXq{(5rvXb%E#^|ujlY~WHr zHBRv9h>p)kcl00I_0{+;I5P~KEEs48Cn7QY8!3O~)&+kmuLMEVd;!ctH||sHJE1y# zMe!h0ot>`&XsHJ};!yi0%(78?Ot@l7i5Nx!*oRAifPnC6=jvKGx`tLwW2>gAjcsUD z*4D`yniM$R)S_x`Rd?_Rs*6waVj7z?ZLOp@Uz?vTyLt6Rb3;#4eK)6toITSqcX4g# zOzsq~l4-%Evo=evJ*8w+Nj|BsAZ?(Y>S^u_o$_m-E(ai^prI&j`#WONZ(J6 z_WAHopZE4MZ*G3`eYme9F);qE4`woqAH%pXj1BXV`MclSyg$l?`OR+nnQQaz-S^qE z?f1f`R|JhiQ$xqCOAft$V8)|!*JSOf7mqBydBOkC-Z9G-EqRuv$nZQ}_L(UdUOF2OJ*TskdEHI~^3`~FgY~A*YP5XC~8@`{~I%skmYOo!5s8joB&z32{9IJ?S zujKBaIsEu#qSOtNmAk~-PDl@3Ra|_eO)tLQOy2u5t?Z0wGO7}%RC$}&{{?6agU>kB++zI-abx; z0627K920d;Gqf(r7$Kwqf4HThKwr3}>QFE*GMPhS(B35l%^)3K?+~xEpH4pm)HROr z1g$+BuU&Go^lYK>W*yo6guC%pLq;bl5EB?CTrU@=H|r{-Bpc!rcoS(zVFyK495uCY ze^IR}X(eT?B%@4vw^VwyP_*xE)x%oVjS|7}%(_RlvI+_PkbDk#(QK%InoC1k0iRT) z1%~kWoK58TL(S&%EoYyKz*Av95rdx`skTL|>13%GzPy8NM9Y$=u4>a`V(4lXS-e9O zvrIg98-ZLuFqMT#KAgvbk+zIx>r#Xu)7i}MrO1OuPf!xRSxtL?Xc|%poIV8Z2a72T zLj@Zp<*>UCTuD2&Af|klYN)bnB}>_=m3kb9*A}*(3)OJkGLcjm>qFto0Vlhqs6FPe zu*I;Cm!iFkX-(580fq~_=OA`jfzeJ3bzLkI^PWgD%sj1c4aw1^)2=?Vzy zaMoJyHJC5pdR@G*n&iz5=e)Wjfd^5ReXPWD z7I)Ep(UlxSBW!|LYQ7+m85D>UG)mS@Ex><=uS}O3E(_FO><@^m*WVG{!F7!P4%{QD z&>bqQJk^~n_QE~Y@vHK~=jA(&Ddzs(JAJh{aCWz4P}6U&O@pmlf3m`i6&dWP`D&8j z^Ko5YjOTwfp$BePpNwe#a!lt}6FOnb008#2ncy4q-fyjF{4N}XCU^@RqD6tT#PN%J z7j2T7@YR{C%8cji%VG@}Arrm=dDSVP_^PW%BbE@kg2?1V1QT*VtZrjZ4Xs1aQ$S)c z$KV=TT3Hl!0IPthKn-z?$8n^!O)C(PrY6$TMDm~W@}9Qs+Hkd-L-GsAvQ5>9ezuJ$ z8{_fPF0%ah6KsAXX{CZFeLF|h!PBC1htJanS_~Fa@JR0<&7G90;tENpl)$7&h(^3d z+oRG`VoE>>H+W1zWc5eU=4Q8nnjzpr0Km z{OmvdyTF*f(;VKiwjJo}wK4rp{DCdeJJ&6|bmDNq^D|`yN1kV7b+oPa)9>)#zB~5rSG$-G*E4UgWZqxPe6p#}hpVZF6%+OT z`w0x=!1Q%w-m&`d8<&A&0tZa+WBQL_{{9;?)p5dxZC=NY&b)DX)`jC=``D)5IUap^ z&(w9v(=Y7alyPnGg~RJ_o(tQ(-g)QBIq6qdy?E@fZquemrE9O(C2TE^Sl!~D%yXTg zbf1N%Bw`+=u#A`5rATd(#r88yVGJc7O^^%c{FPlskV+aSVn89g>^LU5gOb_ z@X+Kolc3o*DL_0qP%2*F<$Kx0=sPERpSG?&BJA5effi( zTZg-M{^lcu3l=(q-&wYR978P5&(@rutlGbw+VG|rvICx? z{&NMP^Z6jJ7H$!*KOo+BL4M|*^5!%4S(&bmqwSFxAhiV_j=(_^z5;*sC!+e}dNp3R zf82QgB0H}alYjmD-|!PKol~59Jg&cLB+@tNXNKBDsfD)>)eFJF2?Ff0c5)+|2HAux z(<1H5Ek)uyQ-NPy%MdBrg@7@({(5U~jiry;CO`+p3lznZd{p2TpoBqLM(Du4T7+v* zlyivGSVpK#EQDRG78jSPfW9E7M>!-I&8%5Bj zL|4cO;0ls3q$LIuYd93!;0p-S5b zg-g8FIgtg`?3kc(ovB6t5qEI+nOd;Q7)En}bpvd`7s6$tCvpRC?M3y~GP3)j_+~Z9 z?Il+#l)E1{oG9#k%p-RiWd%I#qjGKmN5$2VW(6taXi7Q^IStAO#of=U6`7UthsC}3 z^Mxpkcb(^sfxZ6sKffE8h7?=fHNzDOGY@(@kirG^;Hb*?goGE<`l>XHEu z4X{io&r~g))cl=)JvF!&KYY6wwS9!vF-q?YP#vQJ z^tFo-JH^Uf;xvwtER<9(;S?%cdFxywh=nH${EXigvk>hxe$PBkLOKFUj4~mnJzUfjQ@UZ{BO7d z|I44^HNJZF&EeVw5hqfBs8QrqNHPl)7=OF@ScO2elUL;j&MVd*R?gol1sClR&#?~W zjdkY^GHZlO_NQYphu$^FLfU__5GV@}>x;3Jw(38T2hS^H;tVwJL8dcIu;D2LN?}T% z&^b;LG`lx`iE!a2(T06e@K^s9Up2zGz{p?m_=e6NeXmH5pEJarDm9VIiBw7?64DDI zh)=-@z!euHEfnKiLpz6JA?#z@&e2!bNJ`6kii*4P^0}2&qK+=oEg+?Ny=_%w@5Xyw z&6EvWy1qPoZnZ^l(HM`?A$D0#Q3WUNQ3kl7hcvXydIT&@oPiJ>7U*d338?cq5EQ3} zbciUv0u|h?HgqaA9a0v26a`DrSwT6*Md_({tc$er^`>g+(t+ASa$g%yi^Q21IbDTC zvWhw#)bDw9%Dfi!(%^>Un$9ZP+u9ce4*c>d7n)j|K9LX%#VYaPX;jWe$Fr-G0ayCGmK#-Fh53( z4n8s_=F;bOyGF;|9};qBNWlFm(JzO(rj2xdIMgQXo3R&|zpa1gt4+)YbNYaudN+aT z8^iR8X8J}jOaQ~U8vB4_-v88b_~ba|tBK6#Lzxc-emvD-c;w8tY+c?* zmmTZumL<$ah~Qa7bx#T5{pQ<>@L7jM z^~B(At62F+ueM=st&@U8zxfC-X$a*N(r}PM2t$USZ95^p`q8otEkfX{pRHSm+PD2; z-8j;0vni3a^*jqlwXlW4F!Y$_5QhXUIyitpc9913Vkou=Aoj+x4KP>)X~1P!hH0^# z7NVIJ!kPzb%mP(5VPG3rjC7g%tKs)(6|A!fr9sk;5yU=>fRsQvgwr>jBM4;4E+DBQ zXzRxz)L@UV60D;{nV)W|rw*_RIn*}jRQlp3tH)QT`~G_8FamIemLC(i3Eyw2%){8>Yrh7NbU(($)9^% zv;(GyjigXbjnl7`$@Zi*JZ^#!D9Pz2cWX2mjpT8GIJaC~P@~N*S3WJ47B_=UBITW= zq?6>fkr%Dnj0WYy8YxJs%X#hT4f301Jts4p58i7`D@< zr6Scx*pZSQR|{-H*CnjGb1YMSqnhLhXx*-6FBR~gwW(Y5grg^QGE&;5E9)c~6`~u@ z+jDC*4MujW9lb>zX+sK9KAFFxF=SyQW|6`$o4Hi$3&XKFN{j}C!gm5(7L8oQ_y9E) zgQ1y`QPfjEhea%$#{i|^0SkvQ_>5s>4+SO)&?^R}{jdd_r!jF{Xgk6)0SpzK@NMJ7 zM*CI*GzDY@1nZPQg%Iw^22fShdq0JCjZq%Sbcz!zs&#}C)15XE8ra)H7w4A<5gpE> z4osA}pU5su;W6ES#;#q6$}Wg?4kiwP#L`{s6iB8y8LYjDU6|e)06s*A!6kElWZwWq zV}3!8=T`ffT``-umvX7k6^1eKYpzr(F&GGK1l<}f7XYphy=nqi{|;ZhS`PobcK#xJ zf8Z-zt;X-1vT*2lb_nzp^@^Ml`O|z&+H(yc)+Hm0weO;4)jr9>ZL)}ke7CsHX~CU8 z*)$HA()6uO&w#1j{mgnk9^3Kh_|9+4#r-D=Oo2H4r*wWjl|OJ=7vkG~vKPWx6oDql zbasvvAagKwQP=#9f{puSh`hOQlg-G|VF0kaQeRz%AR9fper;S$J5MbZ6RC`nR}ypy z#k6{R36$aBsXzfWw;E88KQ*`M>3*BOtsR6F*e58XlvS0zMTNX)PwTT^wB#4_s_SKK zjXJHAJh6+VF zsOu@j>LR=@!i6vl5DwZfQ(9jSY35Tj)z}O@82qCQHI3TpW^Glgwz^GM&DEl6INF+a zeQm40zQuqty|Su?t!XeIXSuwEWLK~+YIHAZltZk7J{@QMnU&qgE_RINc$dw~kCwkk zYpJ~3Sa!1PdFq8jv(D^aaPjEUYo}JV{h&ZDRk zCw?!?-Cb1}eg4QVGeT`Q%-vp)wms+8mh|&+``665wEsM>!hP2o-@ThIG*=xgd||V4 zanRoV7wSZNo^-7~*|B)1VBG<-WGhKrNn-Js29@jrwf|DB_Y$?oLIviOU1w{YXR(em zS=%JFb-c=Iy3#UEg|eEiwoB5PM@Ugt(TZu2Vyj4rMYzZ&LWWiX28*y`A0vmt)-_(` zF++`>e!x67WWFkFu@X2c1_PCgg>lP8i?`_$mkZ`@)+Dc2C9l^dtky&?SB5Ua+M=|M z<|c2$>Mb7^*gig}V_YC_O1J>uiA9tI_-eElWv&3OfUE$kFpBt-b?5gM-0v+p-%o4# z!4fvS4KU*!Y+XCbt#N8_r&&n5MRdFSY=QT@p5Uc?qb-AQ{b4#yaQ=Y;jrix~nyOZv z2oDjMlQBw}jiX~!Y4Ga7?DZOA#8Fse&@uei{NP_?9sMVL8#uq1KcXW@^tS*f-UP8A z!or*MFK#>deNo~TgpQ$%B~b2FbI7esviXQ|=2~&kJfZt6F~|)Nu#g{n!1n-(mjHv6 zx8By5-us{~2E;-Sk+u;-^cax01;rGo2lBxVP!L!qz$73dU?gBO2O}r~&@vTX24)Op z7i7Tv7p)*I-4D{*2I%myRz6Tw5eqNfG*2z?mAShbUldH5m9NGwP;ce0whmN75(cIV zYIdhcokOS=-9v==I)>>S!dVF2?IJXwM~nl5)R_U`3i~e8^_U{`nt^dbnQy8nc4P1Q ztJ+IdNVsg97H?Ga~^rgCt0mp7>X8hsRh%cLXuHR?&OktMcS+u!}StQ zMoVwChBRr(OD+x0sFjjpF?rre9yVz1lnL%t2p-i+9@ohM*dA1gvzpYfDlO#bpVrDB zSITeXi_Sf5f8HQzh71^=1b$pBDe55GE=t3fHH0nh3Y;enov#UpG3;!e7g8V>66d)F zAWTe&!fh2O3lIx~dmgE3Kr9ekaDPQQAs`lFpdtGQzJlcxpcEAHfySXupfX@804j*( zAb#+iqqmEbqp1gy3a|>#w#Wj2Y69Xt`&bR26fhMqt&yivS%zcYlURj=OGG}P7Cm3U zZ%iD3Z?yK&iK6FvC>4;$6ep$*o zp|vu~l7G zC9JKKR1}H|vxE@8W)^)w6uIEls|1WJC7-s8jpq+_|_uH+}Jg8(UvKop>z{0_nGZks?CY=J`2=*<+YIwA^Bxe_->6=T8OSjG80LHCogsAp z?V-%)6PS00Fnxbwm_ZEl_dd+GZ_pjQ3_ZtzZw{L9wYeAb*^eW`llEj6Uuopt>*(3` zJbO{vwXl=BM$Zb5IJN~@%nQ>_&AoNR`}esE&g@)&>*T$j`Y8(&CoP+^x2D*A!|I;{ zgCqAGJ6G9#{DolOUCE~7;+?0-`u$|d7BXv{HhB|?UC)NDP=v0~(CE@dEKQA{!-B!a z^onze#&M?Fc7_r_%Tx}@T0He&-rQ*h3vvlYKz1<*Q;LOEkC8@)sjv51$vPP%v-8^5$GVQKfQHBu^wLQK*Y;y+v9n#i|_=Gew{ zhpZr$vBKb$WSB<-oQJ1HieTh8&L5ubZKK@zU}7rXb#L!Dbjxz*c63&8e!q(~f-c%G?zzlHqp75t=)-Anfix1ANAxFx;uRF+YwEU9N9 z+ry)cNyBNfWiYxR(Oc=O)q`H9Cc+ALUix5XTy6eB6xM%j75djl|1UoQz$x5qq^~?2 zaP|SQ0IqP>apvhCyn!(_s(6Q|H7p#U%3_sNt@@&z9K9}_zr8tn1t)GTi|KgyP(ipru0tW@Mi!xnCoMs)W1LXxi3Si0HQ)J;KHFN8A3S=Gp)i&O8YcH7_h%eZm`YN14 zwJza$3vZF7kJvU?X&1sGs2a*EsN5aG^w4WTmqq6aF+Uh<@SRCucohoUhgmv|BKjr^ zBNlTuoK{`NKzAd_XvbEQRx5o{r^>7{T+0xp=VQd1WE7H{S*oXHCA5_3RL2|oTmDxa^H4xnKpVb<&s<1?pJDKDO#>p$l zlXCL7N|V*9sg{!_18LTf=S^_9A|)LJ2nswEG*{gy*k19;?U#a`m&%UZZNC0ec&|iw zzl8s&N|eRbJ!~@^dDeC{hkGfr;d!kbCKA+Jo+V8(ay^Tru51g8YY9q}MkC&69)ae~ zX}$r|Nv?AZSO%egL&5AFGktgh^_p)0aDsG-#tF?~5%>tdDY%WH0L&;hn@zp$U6VB~ zGt~f8C^%+BVCoZYpYUrn`iVJC*V;vETw>{{Cj2p+V)S0XadX|uD}EY2&>87 zV$XOAvvFR`Jyni@23ud&(o^LXMW(u`fTpaxiGx2lD`MwE8B0qaV&zZFeU-pbHX$-J zd|jdqQ@v!P9E5NVK`YrIS{j(#dhs5e`4UQAiLB@vs5bynnU=d(TjMW2e)m}ed=>t9 zx&-3-a{>HcuAjfan{>&3jR^@L7A|QjB-7ET-?}CRLJ6PcNnnR{|C!>_J<*xlk`vdJ zJ5S11?2=AjCh$z;TZMIxa_{`vp|jtVw*FIj{mpy&n+v~~)cxrg-he4WbOu3QePh8N zWZjJzxFOC$IA2W;5IDt3{F5c&a|O7yE!iyEyid0KsQlPj+3hst;|#^~98F%4zPLZ23b?H8ZHeS*q*l$cwyC`xmH8LSVffzh1VND1ZG)k(_A5l zEWDW0u0t!Zt-~NukeXUyaRIk9ulwQ6_VXvYmM?4e534nEs`+(l&46*`Uyd&Rd{o}o z6Y?kfl`q;YIGs)&l#%obl8NV|GB&eBR#>NbSs^Q|kQY@bi_5iem@h1%hVZETVnbf3 z7Vf5)E`&K}MzKD<$nY4mhehNG9)Ak(+(fc+$@3iYJewfR=V7ky#&g+~EYZC}X?nS$ z65)VS46!cxWSAxVubAH^*{xo^^wF)9YsV5#{l4VZ!Pw*L6E5wv+Bn;C+ajkUn^UvX zoDc1cy?$|dM*95gH#R&>-}X3bqCZlpXu|lv4FVVW%|4i==CNuz=*y+W0=pGK3_8L4Pd_h_05q}EN9K%{P4+{ z%Ie$g+`~D!yB?*d9zL|->~Xu*DZizL`tDm9dw$!3wBzyTwk^JO^kP%q)#kz#H%?j2 zpOJFr=-GzaZ@j(xIl1hAUiz3r&gIAs-s|0UL3HRc*?*pFI-uWzEbP-{=|S!6opKo0 zBy2WB!_^Se;#gZK6HcWC9&<6KWI*c>2@5VMS}Zd5NlG9qJj5X>17b_>+0b99eCNPj zM+144X~#2J#QxU^LDgAiebCKz+O|??sM$zhTW>hRP##ToO$|g?+vHH=Rw0hvBPoM z?A&s0eVY`?q*RkdCIzgbE(>aZ1y_H-SJ?M&eDxaip#Q={P~fW}Oz7nxN8gxE)-fLbCEfiJ6@OA?D^P$GTcD>Z#2IBYhgc7kd)!XL8xGz2^eq#fa^ zg3}3%tW~Na7SY*l7lAK=qAU$s8;%N=${`koRhpuu>H>f(6y#Od z8f{vf7rLfP)dWjbI69{=!VYfVE8D2f*EmA7kCXMgxUdbo_$vF!7A?_cP2mn*snQ-1 zYzs_Jf^5On>by;RWuWdTA0$e9$d3I)^&m|))H*3vH()6rY1?hscFKxp?Lqw0NJ0fc zIp(->X-ow>zLK3#F`QJ&OUxIe23b-zNy;J7X)@>#LXa>tgCCelg0e|q7V%3bk;TL} zRqCBA4oH=G#_R8epmWTN?jNxZ<+p{P8OUmVvjS{#`|{2~ce1xywM zWex{q_cQ`QaiG{h5-~KCk(6?XIfzm!hf+%VQwsajiiR`FBsn!CuaRWek=zzi-h-ip z9hlpd*dz$8;rJK#r#BJ*)OMfL7b!Koyf#5`r=$@XQbV+M#w}aHZv7UOBP>cxHIl)9 zwfYzr3jA9Pp_w3tasnovfLNeTAyBCBN`a9MQ$b=T>JTBMC}&cEv9`?s^E(+lRA5pe z!4rlAPdKRnsNk)F!r1!#;?;ZjyU;2isIDb~I}yqlJCA`!CxY{Ey)J*v9*zo_SNo*v zbR;X(gp0TH;g-}s3Fj3LI2kBwyDnM0g}X(Ytb;0u5?P@_)~Zuskfpn%5E1=VrGMRi z!Ah;cmFlbwI{Xcqysdl1n{@?Cm23B%X^YCDg>KoHM$lF*6ADC%fFSJz#ZtURo-i3` zhvJvXKCkfA@8Igyek1$84*&lNU*QPxtSZh9jQ!4TQAN#AWekm%87?W~6qmBGEG_53N=o@9B?45>ffW|>^NTpJf?_VLtV&p2D{gFl zsosDnVOcq=uAbY}CN3`TZERwOoOd}zwBqRkIq$Dtr1ja+Nlmx}df+9tMVf=t#wpZ|gLr9N9G&3lN zm(@&4dq^z`jJA_$tF;(oW`4YM^XSbx?P_YyMh2V=bTo-_(tdpTa+ITas<*D!JtaGf z?@W%I%7`+_PgAykY#0=DJ}}5CI^HHe`D$Q@iGQF?MrL4Zt7loUbwu#<%w*+@=NIhJ znY&A!@%b3Wo9{8+e4p{g`;7NKWxVk&P0?k#fkC-y*D ze&n``;HI1KrknJNz3hyQ<0`(3Bxx%}BWY1Y7CY(4NcW2t<1>$+V2Q$1KQk3+aK2gU7G3FWw`U z?#a&FpdK3LQs0F@p1YU;|aNlV(MuIiYuxMBL5)^AtW%~ooitK7OktrmM)M-lIT>iKb$nFK<{<}}MuT4VA%DVGuSV71 z{Z|x={q4Vc9s1CJ*)b^GQxG79lL{R(#aFZ;$d~fL2!EM&;J+JI0lVepY~c$X|Hr!(Tjaq!*DD~r1)oKeQ9rOe$M~RZ2Z0jD`4sEg@^%J#tN*aC2 z`?~fRbeuBpx#7rmj3wcvB(;&GR8oghYAH!66Q`AnGAbomu zTPTgp;|FE4eKUBT8GP?_AT1OvDVLKmGxyJ(0=`$tOyB4vT$vVbtDf0)=UWY{%q z$SsuT5-4=`7rF$9+(Sg3(E^_YZXi1U3e?IWNma-JLmp9F zC{%|hNn+Aum{y}4MpiXRt0wVfB(6f3UN6jRla(?_W-Cc<;HNhUvzw(E)q;dvR%mih zQ6r5-UVF;gxUYDnRtv<$P%{V--VI0Kq@utH(LH3}K*JNBtKHD#HMCgIB7+>lL5{RyHaXIk#Z*pI z-md@Hyy-f&0$eGaLVp5Rcr;$W7d>Fb(NbhJ;%dWrU=qaVgTFLTT+YfZ_ZRnY>qI2A`?g`8K$1fY4u#^RSHECu4 z6v}Mx2S;Vz_Wp%xy&H4~cODwhIW=(9gk@|#aOF13`Z4(R~a?^7;8M(ZiLJ=HCd4jm+{jPZTO2YFW6W zamv!lu?wq4{Zc%3Y2~;j<)h|ijQuTjiB|2wYn*G2P^+VpCZ8nYoj`nIq#og7w_qV^ z=s5a_9K88X-h;0GLykVf_TF44KZ%`>_=%h7iHGEY6UP+=0X;d-y@s)aPbkMbgq2)G zsgfBTm`TYT!e2ZMYFfSb_Vg_Vj9Kd$GZ!=F{xbjKnG>O2YEP|{?2Qmdm}ZHfC1IQO>1> zVI8eO9c>B(z*Sy{>H+{Rj^#>FmT;DQlYs;@{FBa@>-Js7@ zIoYdvvTK)Nx7NA7Jr{aGYd?0Q*W~fwbw`$C7%w#m%-;bNTwYlxw)0KN_K!T^@msfpLC$*BGVkd-P~r%cwXkj<)OGfxl5@P=hTwiY81$zbyRaJXt4|+tBexLuu1{GDkKOW#1={-3xr|$+~7Q3 zKn{Y5LjNqGU#0-y$}dIanN~F1lNk_a9!?*yT1yv#XetuNSaEk1Wy{TBy;#<{)1Q zJ}y1+5_KLZTZrvy8OYR+_9^St8Q=TxIf{jFIMWjZs~irWk{Vue-e6jZC^ViK9Lw?w8FcXFK6aOw+lr1{9#S_P1jJgb z-aBhc$2V)*K((5j~W5Xls! zW=k@2W!d?XtUOUht|&cMke(yNGBaNU%PA7)mq|-2W#zRp5Xixgf(&VGy|Ao?Rny3W z@n^zzRNdxz`yoHc->BV(}CQyCpYZK_2)9nCt{eX4cGYQ@OdlV z`P)Kj)&l*D?R!pF9lBI~_Eyso(;V}s_2D@ry`Ffb4ColUu2;V?dxgQ5voslF*L=Ke z?-;ccj794h+ji`7e0tEw@wktjZEnK#xInk^OuOu;JF#AdZuhT6d)-ctFbZ}(>i$gM z`O*I8Hd+s_pYpTUbGO;$d~=I~*&4gczgeGBerBcPWUK3GzxuX0W5Lhk^$$)zX*9?9 z@*Km9R#~M7?S0msziHwhyZgHBp4-oi-6PDx({H5|yH~%k%_#6Fua0VKeV7y--c%mf zQ)?dOvBl!#Vc+|!ZX5n+vTuXEnHRUtG}iTuhmBRNXJB*TwFs}d2Q+uzwy;W!dz780 zZ)<-iG`WUN^^i+i#kqCdj7nZMocRcq=1~p&h;+Gg4=^>FqAd3?;uK7MSr0vlwG*+h zlbJq|89yMWZOHLkGdOeV6!q&a*X*hqO((nwQvE z?z<}9b493a!o^32&BY+JA4Z8ua~^_#=d9=*!3{cf4f1Pka8T=>eW7)-aldiL$!naW zro;Qs59~Y7(!Ibz!`^bJXWjnpwYp5y>she74WgrqH2Z#4>p@KX`wcI^W2bEPl`Sxu zEx5UD_LipK)LyLE+qGtY=k}BR6kMI^&@}4Ydx3f2GV{o_9^0tG&CLO8F` z%~m%{U-Zxs5KLtS`3hYNg;=x>ynNJrHO9(UHbdSO4c8O$LIkV2$3?pK`$5^V1 zEVIi5OB9+>J4Hae0gV}w3#)a-%l8SF>=nS4?xWHTR5esruug}wMr(MjCTE=%7o;iV z+OOHo2k1h+n4FL!h7iOWW$_v{G13Xwf{_L1n3ek&iPmw6>Y;w9J!_w8KWNf&!+{f&NfJLX|A0LK=mcT{%Glpn`T4nkfrTBLS(z zFNydi5}$aAg*+l7pri$Y-WvMwN zC7;9~eJocHmnBKeqS4I6d=ghm;%Z0?pjQXU>>=eSo5&>zHN?zrNd07k$`PjOVUGGq zkq*=njEKf5vKvzNa)T!d5@Dde17#sx2u=ljSxQGmTMtQ~h_1*8plJe7a6L@60a+RtLI$|UV!o!IjR;mhstX8TB;p$wg zDTE@NuN3k)kiFGNdK~;KZ`+^n6+JH*;S6#y{Q>JNv#2L3 ztvxEUFD0KJnlKQSB!DaRu`B!5Gp_kV>9H$pC@rkt*EN4<>#VKqKWy&%cHO|FRRdoy zV@+8#Gv> zPt6x+V2)5M$tw{SmI>j=s%an%&9c%eRzs_(rIpmyLb^d%UB|0xW>>Zf0}@)V-7i|J z7P@##`XZ&e?^e`IUS2hNUh&62mW`fMKkk=?3BNW>`mJvAuN9wvpZ@mvkk6(^kDU`g zb#ca=^@R(RE0$~3uio3dM)$=p$_)@o0`X?%=DO)?swOTi`23fo4`+scG%J8H;Q?dv zEyfHB#*de0tGj4eW-QQp^wl)=QD3b6^wVXGH)k>4o&nDJyUSJ9-m?2x`_L4Fvscry z13KCtq{Q3~^YYA#dy?Q6*Iph}mwVa!*|EnrE_vJSy?0IH_7%&-pv_hnc01kv?Z&AE z_l#!TIK)t%{l>{n%e>5gzHhwn;ibBrj2EIPs8()`We3*Q)N zPdTl9IwkN{S(;g5u(8J@uae}DrqX$PRaPF=)3vsGlA57qb#LKOV~?^I$XqUMlUCLV zYw9F*?WC-U}esdAdBai&xAOvk?SU5Bsq8Cdii-|n|~%6jC9$e_EtK*#;}o<=@kqC3KX3HyC`m2X!Jr~l8E z{}#R8zs*77rlU20V5Mji^65P8GMWeOy8>LvmoK~SKj14Vyr9$(DwWdqL4St=F|0yY zjV-1{@hX}~OfGrgBeQTK=kN25Sq|*K+^ci0Ps3dNnkeyxLz^`cKSH2L{w9^zMNZgb+QY&pDdG(T<261+yFs(t9 zR3}WR5yw|bl%l6L=(Y!*WRkP*WlGvmp+MQ-C3W z7+1uPFXYALK~j@?)> zIL+RAd>Due(LjJJ(5SGaD+gtp^dx{)@Ku6mwHf)OIueCJmI|zOBy|ErLH~0lXmUCd z+(ck4+()bfcWp<`pf*kX1Pt2@r4DV%b;oT5JOg-J3s=})xIJkk5kY6w!$bo`0S@q# z_V(`7>(xH~A}oP4Ln4Vq+3Y}8m{YQZP`Xki$x=EyS~?63Ro=|e^c6m=KXiZpHcu}O z6o>s1Un!nVFt6mN2AVt64QcN#f|Rrvjh0)~C@XIxl`rT7AvBTg88!%Ss*69l@rZND znv045?vu<_T5StdUO*IR`bOr|^{nX|xl>lNrmkU4U)zUby9-qYS85Dw)f?Dj$UR^@ zXn1wV><;hNW9~yc&Qm88$AJyQT>J#iUR(!{ArF6!n-9y?o9P`m5RS=yyeKMBqOiyW zF}A=$VmKi&?9f*JsA5?d!VU* zU}u3I5B2q$W_EGiAoSQ{GDPRi%eHA{Y}i$?U_<4M<>lWlsrq(V-PrjRU(P8WJ*#-s z_k|x#&Hr?2;pp#6KA4jE{^ay`CnvozDTeWN7-K>ZW1>G@j*nn`6~P!AL086xGR6lo zCiyX@c#Qhl_nUPg6W6%CHR}pv=IM8SJ^j@>6UIa(24n6gAIyLEjUQ;%-aAv?`);ZB zJ?ELHE{@Ybx%H`|M|=0PqM~~-p^nKBp+!mdNq&#hLa&6m+Ga#pgt-~JKe^y*551{% z=0*qo?RI%TSoZYFq-*+&gX=%LqBHfO;WXRRlWrdS>FH&q2>WrSM;IEL-r29V&CTv% zQ=Nu`{Tj1t4{{2PJp6PYJP&PST0|rmxO-fR4!jWUt>bX(OoX#dajdqRm0rNZ_cfNz zxu(C<^VXs}XVyJ3on?IBSg?z>{XOf>j(1t z;pi44yx#SKG*!vkT1jp`yP}eE>T-%jnWf^KD$xD~QJL(-d`Wz^G(KGxog@v5lLkeL zy}|@8Azaj+e&j2D;3=_j6WwwW-gMww+Vd^#gqC)qTh8Q$gT(T=1nY1iUbh!te3@#R5-=zQGWsdT(?j6SmP?~nl{yxaLFVSMnSLvONNyNsM z@76a#oCH+`r>&~~etj*~VbFYm>QUPa`gWb|2ir=+xO=Zj&;CmQSG^}K`_J8GncEIN zaOJoJ@w_8hVaff_;b~^ly6PD6pv(aN74-Dr#X@((mxv)CDF&nSCA9hrzQV&vXMBI- ztAD5N_J8$Q(LKNHmw)$P;Wy-6G+n~*`759`(o`scRb*G_eNmrB%$TkDH_`@lx8?X~gXnr1}b(z82QPU@?`=mSf2vu~2st0&G^;kO& zFjWqBYo6@VyV!Nyy!*_ZZgYphCt<`Zl>~y0UQIILx~V3mos<@yQzOZ&9L}j^HT6Qz zS=`Xht$Q)l&_2}M#c5;m+xvNq{eAWQ1NBUHO$WE4QBYDRD6SD9al52mTu{f)spVwX z3}@C3r&SLpR}3bUuo8PMrvv_(|dhKkRmxQCQ>%c^@sHJyTrHf~`(Kdn-bTEfrDV}sU`mMe=zIV#7SNw2DsxY^TR$i+cBC!?Zfgk70t=0p^E!xLAbPf;g zJ3>_UiZqY{aGdNuPE<}vkkq4kh69~s>Wu^{M(yl_GyxNa^gWu4sYe}CG>Kpz&C`TY z)TGV_hyw1VZ*?TAcJrW~u}ozcSPQTU;%3-_j85cuLOXQTZZ5oD%Xd**w@I@PPhBo}oGS${n87WBxrm&J)LBd$%EH&q0{I$AGIJ z$JuAt(TjzuDjxoW?!JA#!Gpn3+>mHqSge4;s#sB2j4&jM7K8SW;K2N_jO6-9^Mm4r zA&KI!6frz(5o!GB3_*N0KfOSJSYctgq^LqzTE%Z}qvD@6&7!6klEzkPbv;xd#B~iM zGglHEDKNFH-J+Jfcw^@LwPioAubH?g?W_55qkm2q^K1H7^RhqrF75qq(?6V=^Zqwk z?@rEmds6C~6BFK?9P`$w}#SuI;wJ!`QUw>*HEKTsblJ^xm=O z^k5$zSO4+$ImUqyg>Ea95>Z=P6eVC!IOI$jahkT8edHfm>T!_~5WXV9-*t zD;w|LnRMjP3M+FpySv*TTk7~ex>g!znBl+b@x@tJ530CZ-7Jk(wYz#L-t%<8v%}7J z%%VKQdK>Jjb5~q38na_VbW=l7|DdH;fJ;_c^b1ZQO3b%P%NwLk4WzD2^rD`0HIb$g zVSh7et|2wmq@YZkUnPNiKdTC&<=g_0D9XifHf9${({p7>nUa`vDg0VNiDJJvsZX@T zGeY7XCUOfEIR}fKgC&kZQr9rz5=xvys9^;LdYe-adF(?TcuMcO3U4`bt?bx0pATC* zu9(xL&`|v@N13eWK*6<_}lS-n~X*wt3m1xN^86-G?*!%DueaU@i$<_Ou)7J;} zFAnT7=utY#+!l0q2OFs7kd(V-18L$W?XPOXRR*vL$H&QfyGN@8|jcH5rVdXwjY1YRJxdJ;gFsv~)rd^MA_ z8gWSnkd-96cCe_KQ`|6I-Zb3QEkey!NFa7Gg?%h38+B5^d8a2pJ9iy#B(!vzC_DWlrlDZCYMT?-USys!cQ(1CKrp6OKA2tR>R4u_wytH0gSj8`?6_wRv zU0Bu#4WqKx0FT zw6;N9P|Z&-;KdDI3zr16gT3;sVsy16y>KO ze-v>(GI^sF0a|x|zhs};!|0ft;ru!bMuYA_-2ssTh=M+G7#b~rw+i42t}4*6LSy(qSy%#GA#~^;!S{_2_=NMl z!uiOY^^O+$#YqAa#DNK-&}3m$hA&ety8ociOk#Az#%#xDlGiuz=B^r+d1pUp~pe_HAr6B6E< znDWMiq&LRLzA+){orw|eObUDJYyY>$d44qB^@A^-j2de@dc4g?pI?6OlM{@0H5rU$ z48~Fh10@1ci+KX$?XMYcf5mwBE5rwr=VDLalobmG0a; z9PGaNq4_%N^Oxeh?&pMuw-lTBJu^%4yIB->DDb)R^Xn^b8_&LQXw=@VZ>gu7ua22;%L3>`R$$Fjw+sx8z0@@`taVZ zqI{!>u&;IZd}nw<$fu~<%b(aIGzo2 zxEk$oImF32Imop#<6&;x3|*C_Ca2x&s&DxP8d%;5$*wBs7RJ;KMU@ZaRPd@QB^~vo zqn5PRl8#2wj*DDPnreZsWTh3Nk{T((g>Z@%Hb^R3NhyTIY9$rb(z0q9s(a&prI*N( z3uQ^U(!^{j=nAnJQlw-e)f!ouQE4PRjYOuCxGa*8L*g;%gQg8qH4}+nH1P-}Zh=yd z5E;xpMB*JL35Y@DQ5cpWiAs^cx0#$RL4C=hDpF8^RzWI2-hkvXsIciI0>1#hy|?iC zqe0VqoI@r(+Gl!q8uV{C*t_;13-xhO>E_1`?VwssT~+_h%Brbrs^@HLSft(tD*M*s zOt`IfpJnd3)OX-&zupz*v74+TH+oOp>NB$LzjSZl=F`Ea9&GP0UPK}*r!q04x!z!eVW@7MpqD}ccT zlX@}L4yCbR`51&H?UNN?iM9(x3&0lo!biYQFnTL!n&AjM!v-0x6OJ#)U-$^Uz1(2{ z)hv7prxv2nD2@hj0#wk_Ln>QIb|s0;Bf;qs?_{ATv`Y$6Q@vOA1rkUk+~wE zA4@#rD1QmTHE@}r-2`SSWaJ^voYO$^S}1i2rH~7Opg@;uCdF;Eln)40C7@B&ll&?? z>@q-?!b(wo`A}Z@KxHGR4)tsLr10qWv#BB+g61@^GIB1s7Jyhn9I!m;4EmD_!}%#P zEC2LR$XUcv5s?Up04`!N^%cX0MbvN1;!_*o(T5J$YykCb<;H@LHHWUFDBA!Otx*VX zn5tW&h6s`cVEEG*{wdOv&wmt$gj@A0mOjd$tobxsr-;6Rz${dbPhWvix-{6kI53n71C71 zjCx8=3ojtgLWtQjj{cQS4Z|)i!{I%rW!myW*gL>!KLlhY*?K?>?F+*z#!$?mZ|kA9ptE=8qzoIH?VB6v}=vXmA*i4hC0_cO(DVg%DV_O!dH!1 zEW?e&)>j+kU7dUbje!60R?=qw7kky3fB)d+rq|;?94^iWjbH>{$)7vL~ljx}d1bD?VgdZY~=W$isXblQ?{cAtO8UhvFQjEqN*bFXnMJA^gf(p<1o2VAM!|J&gZz7osJ>NzI6l%koI-ll z4dQ35@tM3RZruFzvA?8`o|FFJj08jj-~2lEoyiGrPl$bEZ1@{vg5Dh)_|b%bk0yA% z`-S71pFVtNl=a)6n1Aq@>Bpmu$4oGq{mZ2}KcD#Q%XM$OJ(Ka~l(#;d#&{E|o}V(_ z`HJ!8XvXK0M$KEy`1+e4c4=CKMdk8D5$&D#Q&OEPO3hQ^Hrw9Z>UwX*bMws})`t>Z zjgsBst1c|cvUT=uu5FrKx!(aUCpGWoXtcB!M^J^M})z#d} zYGFm4w6u@eeo%G^ z$tm>86-WALaA~nyVg@pSKRle zcGS3(LiD4mS_-Y#{2~#AZ9(J%o_Fy@3~}I^?eMWHJ$ueERSf$fG`dWeIaj%T#=2HH zo4s-Rn#LbCHvP1tZ7D>tz{owxQaLlAV=}n+a_=Dv)^V$$BNn|Utp`rr={L0Lzwm%{ z_3`=8fg{q^Y9{)-%i9A5!k0bgJukC{FUqu#W0kp{3fZzmTOhj?R7f{h0BsIOH0&%W-`pg#B z(x(rGC$wDtsODFbqB>H7k5Hb5lFVgPPPs{zSvQzfKbTt8n^4-7R^Fe7WTI+zav`&z zn$}+{EFY|>;np<@AyL@XN%{w9Iu*_q#urlP!h;x`3`hop6n&w@2qD=zf2w+ z1(=~DF@0fATp}o1C`2vj(DriphVMY`KT&w|0N7rc;!0qN9~t=_`H$fe;=#k8lf&FU zp;>$+ff^GYQ2IT3G!(321>hEcf!++dpmza3O4k6hfVhCl0LHL8jtVC-47ZzHLl&hZ z;cUcH4^-KOY9OryREi2{!b7tfy!{$cRwXZ~h@Fzd&n%E-7Ryo!g^5Mn)Cv(|sR*j3 z)>4$^l|&rF$OCtB9Zpwt#!pf53Cl`>G-0g<`^Q3q2UvVy*0LjstbQ?4cnY#`(!EN%ymu3*4>~#y5Fu`4iNl_@1^8dgFlwTCwAhcv~O63y) zTnhQEp-X%shokskVfex;lV4B8RUKK%@6yN{`9*%Y(Th)*d{W&-a_ab~+?1F*6rah7Oy&kgalJx#cHZ3ku7WGJyu()q;49vA zxNo^`AIihc-pQP^qX%M)OSO8p9c62u* zZ;tV0eD3z=66}06&gY2F;}a1ss;;*WhS_U*-dlOoaQU@k zJMNo&vVAdQ#g8AUte9(}KU#0=gp(?(o|(+Padg>J(`C=hmOi<*)%(%hyO-BF-&YTE z(eib&EX+LM?Xu>E*}A(H+8%bB?p@z-+w4|e>ZO<QD~>E2+KB#ma}Nz*c98JQ$2hh*oG+&rKrD85p0 zkFrVvyob0j>Tcr)Q6Lg&4mAzj`UU}Lcz}NuNINQ$g+})UMGZT8_uD!Tncp8YxYDO> z$lQ9ExlE_y2c?<`D@&)XX`HyU=IbTphz!nCZb!ZNjYs;GPBK-E`!p{y4_FKwwPG1R z;GMS>oVwL-czfW~y+I?}!AnmDZ#oP;^Wpf#3gD3|s1%phaWQdbgKR7#{d`(KuBy3b zr{^(10$RW9 zt6%>1t9P$f{`7(T&8v^F@*h4$n^B;E-Qa4YQ=KudJcJx@oAk zb-3XLr?H*e!4xw4C2$Hs0*Lx~fb!%p=B1NLKFwD+qh21~$O+K@M~?GPD=&BWkDra~ zj}ykv{$ad+wf@K7{_FaG{5{2%zwY3avT6qgaTZ=LKF&dZ@>{`@bh@I z(r)j7w6llb+Qn&X<5$&5%c=+ji1JJL*+ty!Vm=17tWqf?zo6I;VTx4v;d3eT0nU8y zXmZDeTzMigyft*ll&O2JQ|C;V=II`krBgl1+;NDx^&oSL9&_`7{w)UwV4DuG*6(Aj zMU^p~0oaDU1DkXQknF!*A9_kC*TT{=8rXHRbK8;n?MG|Wjam+0ZMy!f-p;=XBWyt( z>F6hY9EuMIP~!$5Sso6PpBIIgimNTZOo~ecT!j_+;?BXnlYy~!sLOU-3 z3Of4)b0T7dsp%vun_zH+UoF^lTvs|se(8#*yDMj zvE1Moen_k!AcF57#)lWnHJW4cNLqU(t%IZ+LGvL}Qcj*e8#r(z{g;_UQ3eAANP^m!bKjjdbK>$<8;>5n;qB+1mHRj?`QX$0+s!XsPYB;} z_u5w)o8Q{B_@LkOD`}xiElzEGZn5IY70n3yL&=`IJ?@^0c0Cp5c&8}-M3DW~JC`@y zJU>o-;{=UOj334@ex5vDc^PBgM8=HI8DrmkbIJGfk7<6gb@7*)>poUr{g&#ouMKuD zx@n~9b^n`-`Ww7$XFRyH$o0;k*+3T&QFUHgIh3#mANFw-Ei{_u3@b;d^l$HiCvdN&O9o= z<=yR;AOoi&p@^iG$nr|W#l`%RVt#3f2)GMasHj>B_Cjfcq_|E{-XwwYVJ#AWnx%kG zb1jpW+W%In>^k&}?!(P?>Ju6f?X0ERvv%GlJqP)+4%Nf6-FI~;(~kx&*|8q@a=qe(v3b($ z3Q=(#7vTVK4lrqDi$y#!4J-@|^Rn_QwrU=qv3RRzbWXpNQZ<0PFslXEUEbBa?0sJK zM*oSG*RvD)`>0;$D}eZ+;svi4Mg$4~&@2_W3oa%R#a(Qn+yrEpPlkB}H8Lm_3UN2Q z#XJImZ>TBCNz96v8c5E-4}ev4pg}uUH0{^9 z2$-wS{i%=Zjx&`NGJht!_Ec*2L~`Y^*wj{He4BsPiVxvsq*}uba{dnYk`4FD1I}#+ zv5l+bk&pCQpv)zL_@|(h3DwHz2AUEO5E~G3rL;bIBvcHa>&rX(vdh9-#hLzVuHtL_ ztMUtgJ{%BBaY=A#U?PN{5iFLGtj@mgkDgp{(Y&$&0*KUg+&3(3?BJEva`MXfX$4|v zl|!Z0HAL{-Px!z?Y~>)ne2)i}VEywlmaUrr za=8BCG#nEhD@=mtDp{J8EKJVeA`FyW2wepNVXLI%q4W%1d;*Jtt7LvcG7m1S@K_F< z41N)Ve&NFbkz5L~!g+vLU}5=;#0tHlgmM#jMss{)IRS}0{{)U-{6I`L7rN^)nM2T{ zE2<{IS~V@CwvE8A(u%6hJ*1^e)Y2t@GzU$tY2&_lLAts~K@ka#7}7rAyK;-&>K#6F zSGvqx>NatgNwRzX2IL(Cc?Y9OSB$UMsPA7v#(bT(MV1+FwTIPCjm+oP)|qFs+i zxomlCw%+!#ZqUY5TT)xMS%@sw>~yv2?ty(&#;#7)pyiI<{*~ z(9_RN4~;k0n{i%W$=2e_J=HOT$i}Pn}hIo6^3f-O!hx;JH05-eEd8Y2w-lJB% z1^`#LJ5JneJAJ#;)OOh7iSWuJ4kCjl51F@JS*|gXuxuI9vxwr(#w+#a1c_EPkCrBLLXN}Po;QH|5^Y1&$hqDJ<;EX z_WE9d5rp+Ju!x3Ar{oH(C3kZHHDP$Asjo13fbZo}u|>{1qTf<@#zx#zwACwGDvhJb z?e)C||B@s7XV2-MwvSvc1-Qkaj4&b>9SFjULc#**;u9uE)`hx<2)?kStpl=G*3u(w z?vgch${O0~Fjw776B`SwFy4`}2Dw}qg1NOMPhsdkTL=b3>{i_Z`UI(m1UKr?b&!@$ z+CO*ok=|kIo}&3U3X@aFa3tVXDQ+%~9IZvsQ;jU?rBj^IzvLpm&OeGPp?Fwukn*dd z=(iP@V^~0J0zyC8rhGm$dI&CDHgy`qQH=4fu9XYXQ4Dh_d7`jnzHbcQB~19#Pkhf^ zjFkBccX=nT4IaG2(l|e?a&}O~h^2Xsb-G;UYlyt)Bgr}J@Wei!h;}f4 zTKh=*FiFT0ZQEo0#WZEchf5i6u3)^gobmSejE^QUep>L}!bOv}Za?Sb9@o&;Mo2p$ zby9X}Z=H8_=AD#avk0%4?gp>w+>nlHn~ZQ{f4k%E_a9ZJTuk?0WNoAwV7uzhS&Qrt zySj{vaUREG-NqeJX8iaiWA@ll+m=t(-17da`HZPwjo!GJG38UntnrN5-|m>KzG-3{?SDBnYPZvaJ7t-d5(4!d?mCyCz!%rAI{SHEf=^9Bcynzb zo9mU9`_9yb@6FfwLFd7k?Y9=33Z1meYnE>O$BWz<(2z zG~?@)fqxE%72H+|$r0=$-%}_>b>3H~3YuIX+{g(@*hV-1_j>)$JhlpQ09}HLMmbzy zePIcs{7lf8E=GlmbWDVlV~U4aX`+2=Y5bO9AqQA~+|(a{x}jw*dcWNLLDioP5Iga&LU+ zCAj);cXvn59G3$(;uXH4*MVYX383;Ji<^uk%bu51f{QEV(W?!rC0l^;OIp{$FRkTg zmkDSxX)4b@A7u1KIVb5-{7J zdJ3HU`N--DPeAPeL0AksJQ`~49I%n&Ge`=Go5zWx6J=PsdUpB;55^@C5VW9=6q_oI zOOwH3Qp8b7BH$~CGmL;Mj(aHEHI(BT!gUK3x`&B9LZz-jBG*8%M=gA%zx2}23l;`mH%bYfo#P<=h+`7|`k8k)rbT2=Lfm970PgZ#E35#lxd64Jt= z*$~LLt?41LMeX~~Sx;HGnKAAM#`|L!6J{{Jn2vzpr$4TnzEMrx!cy7XByONRZMZL` zw;Kuu_aZ{H%#5|J86A0KvBl)b42>;}?AAl46RYd={0{RHI|jNhgLdcC)9-rFl?f4*b+drFHK3%+?nW8G$# zJ9}r$}y!h6IlaU_lZeKJ`3i2Fix}F#RygK(`c3gZQm!7B_yE29GbpXi!pH2+> z@wXIZz3NLgtf!%}fHabrLGtrR5wc@w2A>$oGS#ifZss79uDM%+DH5DlKu5^PMus(3 z>YK%-)tut0p~@y3BhCkdQaHT9yhYF=L1U{SRoqkt2azzYoEI6LxeZTXkXhxVN4T%}(B z!`cc6%#NB<_T{{aiHob}ZEHgI>vsK~UB`O03%rTOvb)YQvxof`tvf9rblKVWMugJvcX~c8;ZfI33B!Zv$K+8zF_N0-llKxeab*Os z>bS+dZ+~6gBN`#T%1f?~bWJb!#M}Rz;mQ8{_4gI{-@5{I#Qf_E1M6Sf4d-i91MK6+shwN}5TiMpbN50#)HxIQH zfUhtb$e$Wqd7xiFtePfnQ7yNyR+>=256|Fx#q#Wfc?yVS`BY?bPXM;nk*k9zt`45L z!M%J}Y+)AiZ9sst>16<#y6{{Z8*%Byp-|o6vm`^jPK@;U9YtAl;H}4W4ib5Y&SJI z<7)4ipX8Vq>sy^_;_-0T)icMR+?~Hq?=M` zjXWNj$9i2)^t+zq>sXa)ndD`a74*0|$vDYF&B=1waW#W*2aV^J`y8x18}g4h-F2zV zUVL&t)3#avo#;RK|j-AFi46!O9=rUp9T>wuK)p`(gZs1)r~;`_aldjGx9c z=1*q)Jn^k1-@m_V&X=ky8S}p0`pn`|PB>%1bjH~C7*js`diTaPw=NzCb3Pj5{@t0q z(+&6DDNA)}ERJ9{MzvO@Fq@qUQi5O9#&Y_cYAY`VN4gi*G2R)+_;~i)&y5i?422zpZ-wt(e$i$r$&7?FKp(H)J;cAPFr+8_aw1d zREQ5sJK5Ecz#=KdJn!(zv;rmsWGEc4o8b<WF(^f6<+_S@BRPZsQB*R%fGL{fAR{@!Qmg-OQeKfrR@`=1od~( z_K1Iq5f)3@$`v-!m=&<<4dy69rQiF4TYexPnk^H0t(K zyyO>rm_wS|*%ggLLsB{JksQcw+xm#^IEw(W%pXfGTJwx7S!XR-mv3{f z+X`(QBu_k~Pu;mL0i2LHQDP3wwNJ~J#wM}+L%JN?I-ETR?mup^ckT0osBfUeCs^j{ zFLdzYIC^tjeA!UP0+l@=QWzX93W^p5MoayoWWEtH-*Dm;Mm$H%HAw0X`6J+~P?}We z9xgyqA^ci?;Zna4F)TC&p(_a_O#P#Ufw4lbNVb0h*EeoBIE@S0mf}tllr?zUE$ir2 zyPr4jV|+D}F=h_qor&+xUdQ-;#Yc;_Ej@8r<;KJFfe|_nZMR;#e8};U+HF(K`_~LS zAD;Dne9_JRA7zeDE`?L|>q+D{21AEDd(K-Z~xYZWQXc!ua5-vj-USre5%K z+I8EM@$nn;PwKwCdj1#6E5B4;&R8<_%bklD3jw7je7NF!#;mcU*8Ify;S0u+8Sk(C z>D`qx8NW??Z^f)nHvP)L`mf)7h_C!Q{hd`mzqexUdu!&O4EH!0=d;7%=FO^9Z6Dip z)+P{VxSJM}HqhmqlX}I^eV5g>;~uW_Po4Tn|HNhg*l|mg8Sl&(HCgGc&;JK|ZvhbFNdh5C9EdA%cXxMpcXu~J+z4^EnapJRzt0Th=5l}fs@_*$ zy_>17P9L4jJ}v9)-o4h{%||&mo)uVhlpi}Pjvdj$4%=c3J{Wx@wljSZQ}LQp6&o&B zZn$2w^+wj7>#4`@r(I_+DyGV-2@p>OJh9+!>>Gmh7o`62@xIZC-r+RGfO;{(xujmFUPj@qTWpzIT${ybAroDzF}>mq($UPq9T*vlAo*Rc$8? z+>oIMMa6PJLaSLyw^`S5)FFI2I3LB75prtD03AmL5e58Kw2lVv;%KimIKIL?>ePO0 zh=S-g?=K`GKVGT32ZT{>zsl6iJ2&oPCzs`TQ-?=|?7Wf}pV0JyEm1kol z*h3r5|6vXo03WL%{TkfyJJ9yvLki%pzuD(MX~p1Qf&}CSFwKXL%m2wWa1IV+amYhU zFei(Xa=-(i_CUW1_}XOZ9N^D+T`JZBxJC0?;T|b?AaDl~aHYZ1!yngAUK8`XIQ&NI9k(CWa?JN)^yU#I_{|4aSZG@H|Y>(7@H0<4<_hauW>>O4mwPkIgQzQ;=Brx$;5OB$bqoBMg+@Fv-64mLEYZ4I^MR)#;L>3rQ6)D z)6RX^+-bnVdC<~j$l7hl)?);)maG4Sckq;NDA@-<6!@t^C_pLc9*k(oH5lPq=OHBL zV4_p-gmdTwjG@k9qt3xYoBy>v zxj7Xl`_$xSPj)yJC3se*N48aXR%E2~HAlABSm(qUrv{tng{sFpndbT1mPXi>#@H7p z1~-)%B?P+`Bo>c#Sj78lM7XKOc*;k+t0egV)_Q3q$7!b}9qslx%9YJopWn+|Jt6R! zwjAC0gC87CuF424zjx)Lq9}Iz40d7fya%VToBOdd+vnXmh#lFm;QIbm%qL+U5IeMC z8Otf`I@6*%N3c`$s~?_Sbo)5{`v=RPT%_k_SAkbqv7MV<-90AFw~_1dr)YPf zFsG~fa!>5_-?~|ghk6G#)mWz|MmK)b4i3B_E^+WZ&tom)eH?->v}~~LXEAKYnk_fz z)?eCqfOW}U*7aw^R-IK`a$IZS5vzFzJTRt^dHdq$?N48DC~x7>q9rGaR-G$c53BIb zW-dP-xBg80VV1l{LQOnhdL;Dw)oq5ILs3dGs)9hNUR2kOzICCjs^Q|sv5Gcg<#%Fv zE1{xg61bO3YbU<~=&A>iBlN8fqN-498$JmL%*ov_0SA$C9VCdL>+B|d?}C`FNsvIQ ztQ#+>o&YgQpoq`NL5XQ7GHx0Gyj=iNGN0s@?!C>{u=i5w`or0459Mq+mb3d(!O8oT z58k!1i}mu!j0h==h$xN-D~^C-4xh>}w{kbDn}P>ZQHcT0I5 z7jD0N(>qP+pC%8JaFMDBGQ2NR`?WuEyrt6mtN-ns)+fy6a~7ak0HSNCzE&zm!bfjf z&BUD-w4s}t)5ca>P8(aOIbO|YF2OIP`M7?0UH*={y{6QGpipE%@{zu7d|*bU)wtejfQVM#3KtbY8UoX?BN1eQr29 z`1yQiKYiMHe{@!VyJt*&5WnH8U#*4$3&ZcSy}BvF*GMG_8v)QoYf$(*2VMl2J2XOW zXdA3;9|td1el;;ZZvrI3y<>>Z!BbYAL~}Q?o*hBcVnV}wT-TClU_&x>BwD!Llh$6N7GA@)zGHTQ6Tm)X>oaKS z+Ue-sYva*n=iTiVHWHeS!gG;BWUr!a0keSZv8P`)U;42A-22^kMeltybIxlI{XSq@ zUS?lbtQs96;b{+;$|*m_INrlJ*3Bs=IHEZ_xIW9XA}OJ_CZI9jw>DSW%i@EDx>|_4 zR)mkBjqzE5_s4nOJkyl9CdDPMfx8uQjlRVCU>+}oBz?@MrXzD-Jm zQM|uGu#-xhk9KCbYP_#oU7l4*l7NR92m_O$HFIP@I7}AuYGlGBl|7v z=8;Y8cMg7H$8H_PZXU(J$8};Cc60}}d(}xLVHOK@W@BZp04M$s54ofWem92?Hdc~O zj+%k~K4qom+1W1)jW5b5-BQ+ltYvAPR>{mQk6~M|^`|gwFSh>1ssrz_jj#9IFkXAo zV8v0Z#RuK!nEdANjhe@pyl`*s0;U3tDG%G9v+zja%2Q=)PM57aRk-#9>u~M(t1<{L|FZ(rUzAYpvv#4vK>_d`5x8H1)eT&aKQ=} zP&tJ8K}f(-_`Mai0;y>W&d~?_8^vDC{yR6Bh4FSavvN+vVJY%tg%a>o_HTNN5a8YYvNl=@V7x5mOuzQy%A49C;)2 z@=L z^`QT|g}+^YIQN79oduxcL4Cw?Y9*xRP(NW0UQ1`^f3CFnIlq5s`)23ApE0-nXA9sq z{+0fGcZ(pk7q8aokmZ6~#@C$ualb*&~$90)cZQ{Z$8O`J~4$61W( zn$Y*}sILzV4WJHK4GJ?sRVX8y5EeDy7uM$)=OtUzlI?5O}9^NzIA@t zx#Le1rPV|IzXW^pSn5jn+IZCFXeImEl_weHL_o*v$EX`I4 z@o+3ow<}J4pdocc^etd20Z(iFtQeITf91G%;@HFe0#yH2zPNirEbf>#&Ta^GF}^5UXbm$1jD zFqYHvAD^eD3+vuLqGP?WOPo_E!B?>${+Pn3V@B>3-|Xvrl;*_0M}R#q8CM5T0e z%DB6I^m2b`ZOLq8_&GZInTZv*wJW!Yy-8Bm_PZ}IhV$66quBhD*z)_>dX8oEpB8NX zyqI2jC8P0*J@(6Xdo11=uwoZbvnDUtojRW(YawIaBE~{G#-as#3Kuh#t~gS;_GI~n z(r9wV}!m^tA)$ItKD9APWVqEgQXW4lh^6{q}(ec+s7|I z$S(Qgm1Gl#a*dQjhlXF5T{6isim2x?D5w8JLA&>hcE5=&ER3B3wwd@+LSZwZwQp(w z=+_a5alrB&{8f=efn_~_tY8!91BtnHawhv9aa0^I_ty22Q+^o)|fHTD6Q zZ&f1&7!MM1$kAyO*T7K_)iw730ce7r{e+Iqq^>P41`VJHWN{$4JtdC<>L;*vzNZHb z_M_oGL>R|WR#zw0msLKUT|i1m#}^6uhEKZqjoW!lSh-A?*pD085sd6fdNva(CY{zk zWE)?Ksq?sz!>G9{!PsE};^#rxP}5>S-)>CJwAT>!xDr78*CqgoXq3G89Ch%X9=7`} zwwDc~JN7`*(kiXy+9$z?4<(31YLZ&t=!bYQ!TSfk_QK+{5_a86!o$bjdPMA-bvEO z2>=zly|F@K=sOn+PDfLxuZ0{ohRdJcS-*;`p5NLf$gxxS?KvHVwE}Mza6UX? zB74$QcE^|Z>pnlDmw&UI=k_}Odn?$lZTj$Vi`dhpTsPNzc(7gkCHCUna$e?*!q2d0 z7qO?8HVM7h_W3Q7s^}`=mn#Hc9QYz~NJ(thC!VYNDleUE%ZZ_Yi#dLY{3Z(JBO`&i7kACE$3gj zQFiH8ExL^c^VeG}-s}b{LQA%UFW;TIa7PNZIT_oKf^Er}PhT{Tz7*SDuz;~_!QSEp z2MZS+EnIdofBEU$H5Uu%ZYa@XUjP^DKE+pXJ|SFM7gL_WVQHGrl5L!6J6C zVlK&IeuWYd_43bp719RZG>p47^m4_$2S}jjY{g z(>5JQT)!`E-M*}4+tM~L7H?q8*>otC@k-it&Ju3bHU*nuZTAUn$9_HMK~1Y38Qm6f zjaFD2$S?Qfl~^6OLNl*QgQ7#fS@={y0rJW~Ho*jC(>6KPMtzfEJ3F$kKT60%dDW<> zd8~DWM3dYCl@EYP!$c50BLcDF7@FuuBa!*Z7hl|;&#-0rQKqNz;#mWotyC{DAgkZN z)l8F|-5brc+5gqT>~EYa&shK}+pl@0e^B_XD*iOrKO2qz;r=-j|4XYuSMV!=Tl@F) zE9z+m=YWZSAsw2ve|ti|Jy% zh6gA;U8J5aN^cL&x!ur&%BvtQiwv|^5YP_M>7F6T*%#S*A`3SJJP(c`$k=_t%yY`j zm1Jl)Wo(bk9FVI1n4Tp`+kC>_4><%QGoLYwfDul)ghhKfv8^``J(oE6{Nr^|%@=AG zoJJ0BO|9QqSR51N;;_&b^)S~<3v{fA*G=*K;HV<$Z}P%eK`kW&TQ`6Ht>dSqxK}^B zd{~t2nAkhjgdm;72(ye7kJ1Y5peR+3P(A;6*X;5K3QBU`KFp$0uO!uVTq1OwBjc)j zqG|`tQ)`SfTRE)~@2dvgk@9~e9dejOX93fTOJefI1v&Q&v?UWGWfLMfoh;SkLY=De z-5V=EhI&1>F}!W4`p8Oe73=+5I!asLvo3uO0rp!@Doe09n$WScoHJG3{Y7|%=<5Ue zpEk;|ul@XNKKHc+?=COnxv`w{#v-=M>x3R2(%`>nCcR3CWj@y(?D@H+AHdCZA7j0W zJv@V5+`C6zY`rY+8VSy`21;86*zOsr@8;vUpd!!f?;{oxY>=DpP+X=F5#^X)pcWDK z*2dz!o1;*OpJ-UHLVUbTbTqq%!y_w`+nRct9&v8E&W&wj!RFsuv`q+G^ATGmkFC_i zRvBY!?B}m@r(5T@U~K^1`Y^in(R3T*=r$!U-I2XwcOl)5JZxj?{9U>8cIIHav*+!} z#&%~yTELLAbYK3OBgN~E6m2+~zu{Qkh9g;P4yLXVzI)(z&0a~AloEdZ76w=sEU^S65V^VYw{h?&9S-xU7q|LFhf1^?J^zl=PC zvi?EgkAWEa{J z{}#nxzkSBiv|l}=TnYXB2%6q_aPQr>uRBQ-pw&1|!Bs6GGz*W^g%B*D#(=0dDicTl z5T&MJxT=8&l#d`n2h>s^xCRi*F_2*8HEHTHW#~jSb|G1KkRSxzFKQ|#m5^IJRo6g) zu;eiqd&bbvAhEA^d~lEib0;t^!K5L?Ew{9y%5S)mczhZPjwZSKPvW6g-qV&|BweR| z9p`>E>mEg;PGe^Zgm{@dA>T;k=!ee(DqFO(OT`>yR>gKbhDpFz%J#b9DXKwX*F-;I zhYqj3efzGGtc0JdTU}`ysXw6Ot6fDJhqXHWt6S?Hp54cJfB*ZZ`?)!{zIt_6U0ux2 z_mZs4aZwRw4GmsLXBq!cY41?2&@|iRYWuhv&(yZ4;_<|qsgTP4pt7#yT0%Lng%6^r zoZf_riIhfUo8D)YMc}olxb`vj&SzH4twJ1hk8bQ0<6;!&#h%=F?PLGM%@RAwcu-vmdv#~A z!1GOV@772>-y-{RG4IU{A0KTJW!WI`fbR9hRs75=c<*ff^ny{Ddp-{{#>9}+wDz?{M$7?9y3=yZ={UfJ&#>Gja@(Y%*OPdi6OtA?`s=dE-PDZb4!hg zNY$ut@en_5Pbaa6AhpaCVPD@Dmga(Ce($`U`Mtb^JpwO_s2+SFwecK~S8y%dB{grW z@`CNgOLsXh+UkL=b;VYDEZP{jWK+=6O(9D+MJ(PJy>LV9!Y#>5=`)t?&ZeVJr=w3@ zxHE0huFOR{vlh|kEZbSIY*z{0uHpp@MavJAuRZp4-LcALOofYf=PlWlznDIM>5jr> zJBpWXFIv1Uf6>-Fy6xGEwr8znELgWEe+@(78iu0PyGvK?E{C*YclrFyIdt^7OZVn2 z+n>F3f6~e$$(zn+GF;2L{w!Zaw$0v)0&!U&5}i?qq7#vuKhe-;M8#x4#-vZmq+39{ zj!U`Zqfxz{_n=QYu2~OZdsdz!+9qADp2*T_LSOH@qg{Vk=ww+rs&A%(t3jyCXmAQx z{or+vJW3i1Nsg8?)!udE6t;gm)2q7|gx|i_RmdCdYDR<(^5g`yCxoVlzr6eda0M;x z_s>j&{jUnY-(_xl&I1391^z{4`~5bwk%cq`pZEa3;nZ% z_b2$9-K_KzBmMGy{RYpdPYVz;6>H(h>@Q#U#~#{I{D;6vKR>YV=V$hXzxMqd|NgSg zOv9ON1X$St#)h})yqd}lhna#hQwCwMK;*tDpvS^#c_s)1e7cZoOn_JbwLqZoYvXW5 z-DE{QIW2!II(?Gr#hP#kfN1k6GdH5C+q9)8*)eDuXcHomi5dA*f-Z8~CzQGn;z!>fwdn0~~Q}nL95}Us6Ls{{gVti6=wvlbM7Fh{NLqFuh zyl;Jxz|J1RP9MM+w_TADyQeI>hmCbVC)-mEEq+V)M+zpw*1<<#NgR6o=@zfHwpU?P zDak#)(>1QgJ-**5rq45V+&OW?IAIK$_i`@EP<@Vu%1VVTbdKMP;h`f^DA<1sbq$F3IF^p#F4 zihKxk(JV^3ZKb_OmLGd`bv5^MjOEG-&L=m_G;dkyE#i3c+|OaP;G2z~I576>i@0ws z;<~ni@8-g{7uWLN!5*K$?jOdUp4rI%V4LW(oli#$DOEP>;zM`zdaJzL53Xp1z@ z0)ZE+qE)K_KK0UTG z;|uf_@bh%4s{Z8VEg2N3pOe9Aq%R*5Vp~weZe+9S+Uo@;Io4hO!t~tj@QbMR=l$m~ zxX|tKUbZ)A=}!M;^j^!hc`o1Tw{m;vifxgLH$^Vq61|K*VbRu@CEMec(#Nf!PguPp zW!=t>s-kG~}cfrz~#Y=V+FWFYOczf>ho!Kk)WGrKdU$!S~!_n}g_cEV}v`FX; zz#@pa3{(bDTaBo+5&>yLLwx{cxX{x{?k=>__A=M4rGqK)jcOh0Y+91I>`;5TLck*^O~l8E7=wdsbW2>)hyvW5YW*}tgTQ%A)` z>igt{u^UHMaNk?Wb7uwD^(E{Vma?5+`R?*!)-$WwudaA?8PXb#8#_L}U{V)c@sSm~ zao~WO@H(C++eF#0SGTr*5xVPdyIh!!L6i%-aOCjk_ZL+^t$p+Gg|(4HNPvi^hkQ_w zj9(4XT zL`oHmI$a`gYEw{`24*S{+X6CqkcUpqK*1ptN2f`B(;;Qu4mte>J(nLoDJZ!TC6}S_ zEM)7~t7qG+Vccl$Fl=EvqHFxa%%Lkh8C85o?F7_Bz$K7^@^j9 z)bz*sGdLlx%$N@fGxnJ&{KY=APp6iD&h7!k0z=Lm{lloVT{Aa?0_>sXGY<#~czD$> zA}ks-pq)d#5nKUxhy2gT3eKC|`TYF$Kfe0!SE3o4`n!MPE7~3)%9tch16rb53^=i2 zWs|0dvX)7ND$p~2g;gf1;j6YkX7|g zHViHZ$m!0cqN%tdQbsij$sqb9jC#e5SonVTP9WF?4)`Wa1E%r?Uej3Q6-Mw2pYjWt z42>X%#lTXv>A*xvILMR-bbE!iS9c(<)K=Gw@0@y((%y|nx&5)@>MO6BuDfXnaY{!7 zP4_CO@6lG?p(?XNjQ4#)=phZ6%Vz3ak?u-mN!-z{OW)sL!NYPy>*H$S7Z}HFjP?3_ z);mktST^#paff*B`p8Q!#zimkZsGImOW#~y!G3k?2j-*D++S z+m*g{XWAP2)K%M3R&PmLwIy@q*6dYVb60N9UA;38<^GFyx&wI|59jPS znYaIP-l@9@ZUcRad*?b@W~?_N(H@1zj>rinml1+9hJ;D9%fG(Is# zoQ4)d9wz~RGPI^yUE;5n_?J%dj{~!Nv;U+_{oc83&I1243qZw#5n(3LHvg!NKkM$y z4t(tR4O!7faN0;cbD!A-xcG1WnK2)&Fk`FV_Rnn2G8c8^`bK!z4lO>=Mo-RMuo(O)p^j~mR~{vQ_R@Dpoc=rm$b zED1Dg90MtK{>VN6+4&P3{D(awhN9BPb1NvoXw(GEkOScU0s&C`KbZo;S}KZ$0SGoh znH{91N2k!&Hc)LYeE-py@gnDnL##)i%iR{yVb`+!=;)-M6rvpEDC(uj@2;ek;qu%} z{ByANdq?A=g1pz{6d-O&Av9Fl&yQJA@s^C-6E&UprndGul}@?eAOt<`+jwRpDr`k@ zd6SW8V`1?lp|K;O31eOneL;yt{{&)SDzXb1f*}>aRY(f*jv|J{QDCuHN+!z4MEO8O zpNoJlH7*AMuNMgJge45cWKWd#Ae-cFUd!y)MtO@asjt0ly84PXw)qXV^5$Vd?Xx=i z%SE|&s7YV-wA(EAk=etRFFsf@H;ykkV9AGPSDdsi*{Nadw-)l;UnTYuN1p< z-aEPU$;CB%k5}+N-Ymw3J-&pU*p1yfvxxI4y`t#LaF3Th_SmsK0$x^k45Su6Kfgop z`7uQy@wgy~s9=StFbV$v+tM=K%uIT&_bjF+>N%MTiSZA0v~MaXKG8N%iHz}YZZ<8* zP)iAOZLg5e2;&TJ5{wGs2@HG+C`(9w>D8ATuD#_{cJs;T1#1*`pr`_5>^$^Ip`Me! z^1-8=lV_q19t~$Y8o_WVY#US17N(G`2P3u|jNY_Ais5KF)A5{r$1`^wOr~c_-gO|A z{y^IHeW^S5XYXXn-NBT*eShvYrmXD;vgi-x&>zcYIFr5iT;_pGnP=|g+Z02;yHv~G}AfwYUfZBUVgCnS`YofPj3V=PVdyUVXbPn#)wP{i^tTA)# zjLbs$^(eUn`Na0P26wp!kGlnu^lf{ryvHp4`s~7b-blOO;L%{X{dV`A*PE`~mvQp4 zPE3oatyl1HzAG+xMev=Ji=}5tb~B>1BNDV7h{(bl3lV5VX?}QUF?i=m9fQc@Q&7q0 z@YU=NX|DYLLks*d7E}K@*!(vi7!m&Rp?=$Y;H)ZcX;T1`;_UZ79>hp~K zkybTb(nx9gP8u0UV-SS}{EBF56cGn->Mc?yB2^6yX}SAcc*DKp%Dt;xV%mPuma*xY z-hmoEKB``Bnt^V*QSJ@}5hkggFAc=_oYgMLao$${d`n&aqMRI$o8NOY7Zy#cx0=?< z&S8!Tr3qgLl4^#FyHI&Is_a4~t*EF0ovosl=4SAt5aQ3>!em6JhiE+rSa1r31UwN(`Q z?AXPukdK|xSaVTt)oEqA{el>F7Tb02hN|T$HHD*^5?8Er-$eNDkeAvl`T4fH6KkaZ zZVl-tQ63LM?DreVZdVq>xbLqNd$WZ9`5i~&^TwL%1bB}qD`K~=?fAsAj+bS@(-ZSp z_Aj`%ciYR$o1R=;eEHCRw)@9YEqE^OcFmDkwVbbSNLM(ZX}`zX!`+TVkkYfw=-%1uI1A>$5?9fBVd zI0R!}3dY@i6LIEdDB}@t#$$e4_Ihk(^x1hRY!hSfMuyOp+XL3@42FE|uHaqA5*Sa$ z?>P~7=wj@(rz!VeC%xv+5|t^^Fs?DSsZPqCtZzph{itsY4T7K~V5(utz$5^wp6)?% zN5}Z!0II1Ut8GC6(XDP_9i|@59tq^MI^>&!9FxXGjMA0eD#T3F&#)VBxgx?U@38F# z8@6-@w(a1i>-VpG5aKs7bji+4Xsk5~_7~CDQM9!Z($>&+b8eoR?1CkMRHjfG-vYcN z{PXsOBddRQ^#6FLGxzi6EHGz*ISc&lEdT?=AY}@O8idu9&OUu0ZDyeEY#WfB1d+Nu zHJO ze~WD<7j>b6CY+11xEU4Jp@jUQ%(97`DndDgH-kQ16Z+NyT&bwE3Kf)2GL*6c)N+Gl6Wm=}3gQXh z3lTA4yfcR~>QJ7Sk0#BsGfoMJEj$NPW`~v@zs&mSi%VoiZ6~pF2%i)lg4LtrsDAGOumW#a}AGJ6^B3(r(n^0!F%Y<8_e(tct5IzFTno$%=EVw?AnLS=)c{_lfUl&K>P5 zo*c>>?sm?IGYa#1BPXWfXrX9j;vN^-J`S_zh=#17CCq*1EHGz*ISb5LV9o;n+5+%? zJw%)sMbpEGqT^(H@W$mwLcE_0H6%>59bu591L8~_wl~m&-41;7pY^M z98wNWx!r@N!Ev@>Y1wr{nN@=oEz{qC-y7Uh6@>IsVszF>QXw(9Xez6GI;Ci$tP$1x zKyXe$?PzTmYVJdg-Ke1x)|8Q9ItfHoQj1Yy0gBHag*kr8jI#Ps z#~%@>R+?kmQKgU@BODVV9374^-`Xj_z46^6?DmmG%m-K8KeYSRrG2k&Y`Sxj@!56k z_%7_k&MnU_uVXn!&-P#`%k^EKxHpNg&3kci8T&Pi_54zP7VPZr-F1Wm%$1Y;o*2l#v(^!Iw=&L(P9E#yq*Q z+^+9V>B&0bAt!mcH#~f}`1*}i%q)kv1v!oF11mdnyU1+>)G>;BM$jk$*K`Me<}ir& zfyB%tY61D1ZZtTG28L1Z0P5{S!^7xD2O0-o=QtuvpmA`1PNME!oE{xw_aKM4xbaB{ zaH7g7`|3$q-2GZR|ggp7j*Zew6#Unee*8JF^LFL^>h|9(UrC|;8&M7@^z``Y3o6x0W=L2 zesG%HJw6E#Zq6n3ue+4FqjMIRv%s7M<}5(708INq49PH=*flw77vj&YC@rX^bn5wi zR!QMUqP(I8>S`W#q9$4&&2)5w-K}B*qe{}E3R2n#qvhS*_R&fDKG9}jxe}IPPo-_{ zh!|ht)4wlitr}4HP}chRTlq)wrfR;?E?N0J21Y!_x`uI~8WBGB`N>(MJ=#%0b~$O< z5usoFe8U@R<-J@ke|W#;-nFCrZ&^)rv=ZYKgMuv6vc#=i+)_$Qe~?RCNVydw;QFX- zfXJw!uN9=Uq>gH!DlH~wfxt>RD)@?$3kZ2tl)P$E;Wt868zsMXsJ0t@{W1Nm6SWQD zh^w>#!O3aGC@CK$=2H^$r_&0jbIM??8KtHXHMC4Ox072trXhN#qZc2qTOhco28AV$ z2gD&mcXXYrZ1uUAr6&>=Glef$Yl$t8J#o9j!aMC#S# zzYlT#5bGtL5&F`{5)zAx9(Sa(aFWjh2i;qy>Z*AuhGoTiCApttyiLAln13y_uCLnm zj$0qdX-d zoJF0$Sl4`w81IXx4CWzH zS7z$3P1JA5i=GqZd7-PwZlb0V<;iQWqZ;aLpBUzr7Om>*q!{4hTwCJS@y)EE?2&`% zF(v8cFP?6G@p9gY3v~O>JQq>)j?T%eYHA(o8$@F;O+1Xq-BT0b7lPC`h5AWoh=j(b z(F9eo6nLhHQxu5$8=k`brGRicM#doC31+nWN#ihoUD{aV5gKgi;iYP7{pg*Lh@!zO zez}X!`7UyPyudFZV(Y%3_rllz8WGd=xt`!*&P+J~YQ>NnUA!2D{5g*Q`EM*w#p4Zb7URqYr*;m;& zQ9pt@h^S`@b&i573A|O5l(^W0xg0CZ$t<#0I6QvF0rapYtSY{bH2Q=&eU*%CCr4~@KN+>XOSl57B+fZ8@ zuH!Q>fKNiTbWrMA$zPjMGLS_iA$gne%RJ?quBNX%mcDRX1h&wA`5J3%z9cs9^}KZ~ zpNxZylCyUSynEzic*Q_LB-&dxJyIem#HO{9#ZpHw)Z>P(oOEIUufN@Sm5*Ba2^Lj( z9^Y#Pd|h<16McVtwJ1+_tS{9sNWZEm%f!dE^~F<0_7{sT0IWJrcV@@3OFOqcIKKGo z&Se+&%|EsKk}&(StH&2#I<@`@^IkFTMK5n3l;Pa+;lXO|yV$dH80Ui<9u|V}!I!k; zCF6XVjg@3gPTFcyA&+6)wp&_T@!r9B&qq}-ikF1nUzNTfMv1Byl(Oj(@9VYJP@mxt`+tr7MpR&(CdSczJ z+q<4VJH+vhSw?}!)P_}EU&b|1#@S!VEkw^h&L%p`JGIo*Cq~`ITiwy0PuEn;%3jLe znMYrbUH!`wIq^rbq9@oM?YMK{#OsGgUNH0Me9`cAwM&Ze$x`N zst#+ZGRue&bGQ89WO#_@*;N@{Hd7S=Cqoe3Rt|A-D^0aai}ue-4@yffZfohLklH6k z!I4YlDgnL)m>0(xOCS(&k~}?0g86cs%nA<kgr*_l9Z*&en(9J?vfeiToMatuCrK*~T1I=(3 z%d9YNGnL0GpWd6Q3)>riart;eCQ z_fA{yjk1Qir@wuCYHU?)T5aPG3Xlk)y8fZYA7dRosH-3K52Kdv({)W#tsSVT6_33s zshX^7LB-WmFf*7_Iu5Z_nZ@G;)ufE#(UMwJR*%YRAqu@OuY9!h8?mBcDz9?5_!}X+ zd_1p;T-}Va%W#RG%o3EGi?0Vv%$iQkB?H$)S=Dqy6KZTEwYE%kb)o)2)Y*%^Hj#46 zhU2rx{iBeY<VTCiTrf-?qZ7ce>)5;_AIv-zBO+f|>2tW7 zd-l{p7&)_{npR%&TUV>+mb%(Gv7T)eZ|x0VS!%n~m+NIEnHFY>2YD$cgqfFQc{SF> z4G-vMCM(BB-d0t(qpWz8kB{!$nRPcVEjYes;fd}0o}Jyza$@iE^D8dxU3K}u13CWn zw@$6Qd11|io15M~fA8nC`t@Dx&WTlTnb!)vTq45379XfslxF;`Ks3_XtU8U|!SGX% zo0yNIc5;||RgS2WiD6=BF?qC*JQmsB=w4N9mL9JW8EBOiXPX`8UY-pw*srn9G(DZe zz{IJb+&Uvi!N=dFxUhsm@UJgV8~Kqm+#WyBnmp2#F!%$e7XfZb`8u0q#z+M?nij-- z4zPY=Br6eW|1r>7DaHf%^CP<(V*A=l5V?RvEC-5CgucYZ$%KR&W#@cwapSbHZ8agudv@GE6e8CPT7NRKPRYXDhGAL64G;3Z*a^G;J+)Z9kW&dV}3SKcvH+9pKQ z$Vc5LL(MBAptLurq}L!I*Dks$s~Lslb|;mMRqiR%7*E>cGTQ~ zW3cLm>3RsFZln}cj^{$GdnqZq1m%{XG=NnpC@B@iCZOm9T$%^g3FZ_L%f1rpn@P>z ziQjvsJ3+pu54HB7udOJ#c+w+kTtai;(u=BhyEJIkYmCyuVUcyjZ#W7}>W z+;Wp?-;>jCwLj4_pWJ-s(7IcPR$e}~|Lx0%M!Fj~Uhd_8znSgnMK#&QY)>)v7gy}f zv?}w|3le0KJhJX|yD6Q*91ENH>RoLW(d3;NBs*aHiGIg)Z)=P^tNKeV0 znoJz}5!h0Z(BGUs_5;GP0_#i7QX-PO8kPN=KRFn`(UN9S5`JqWFX3$>>Z-?XqWIa% z)+95*ASEiMr=<#!A>|PIst}`kCRDrO9p$1spb7Pxa*ve_&m7?eOy3$F7RM`sAYJT9#mx6yQ_<9HNt9 zfMCCOSf`|G`iE%x20N!_duHYpboT*qXyX{r+~N~|1EXV;)3BNYO_HZz5eJo5fx2Rr zx|Vrje3AmpBEY^trH`BIIOZ%cXMs5j%voU00{`9uR1$qSjf_j@P|2@wp&Wd00S@XZ z5-c7B?PuaN1tv#e00P7X@i0>$a0|<+s0>;_a7}`t5JszZgbF&xQyK=#`%vW|O8Z8R zE}2NHMb6RfE-~L7!dm>327TiPyrcTU630Sfhca?dLgHvu70S;c6y!`+mr^QANYy2z z=4$k&6PEujztC%WlAk_XK zH+GTR2XL?l^NdBcD6a;ErXXD>0)VR>=ZhEZOTo5AV`~EzEO%VCP-p3aPZ%959@c*9 z;Yn5P2hR4~{_Yy#AqEj)pFG_@xH*Xjc*}?T3%T0x+8L@uddmhmRv=<*UmHxuDMv(` z<&FHDJFQ22oeSZI=3#-oV zSbu3Z)1z~{?j76t@F+m6W#F1d4i$9qof+GXs;iw7RA9s@nT zeXSBvu530c+*TS2es+o>P9I$K1>BAJ-Si{^taTFnL>&wqbK;A}KrfG!_`N!%r?!~X zRW#MvgvKhT`_z40mL5HNlTTRM%GEWspm1D0 zf_4s~4z^;Bb|#5&7AbKKnJMYr-|NuSSJG%~W4U33=NBI<>+}FQAESqg@AtmEa_uAM zV-@j3Zy#S5=RPO!@}4~ZJ42bPVy|JL=o|IViXLVznUSw#g*cVOUrPu+5$1X%!pWy4 z!zw4p`0(=9J7>2)xWdHFvhKm9t|%&9AvAsI45P z1&xwojN)S1)UySWL2i6tBlp;*2ch*l@kcl1`wvP}GqJISTvJbIY^6X@dP_TMXePCGP`dhX zBBu5p)Czd14i!{V@<9(J2gRmL2SpOXV#%@T(@6!C)CxjY?RZH$;cFkI{0A9G9?Dx$ z?pG9RfX7sq6a3Z|d=@q)S^?d~vTei0(2@>@iOK?p@Sp z8DB2%7Wv8Alh4?SMOyZjh|qIINhwED&w|9{@4$^Qklp>QXs9K-yEe2k+d4H$&fET_ zh760M$R`)0_ohm$TA%L9a^I8Xey#oamB#1OoR98H^6(peVON$=aWz-A)pw5Z%V;Q3 zaWYkOGSBF2jBBlp``#Ge(d1QGXrG%dU`TD~K=m+h_zfjwO@YK=XdEdtaXK`G9GOKVj{XiwSf2)U$S<)}cN*ietJMG%Z+U!3_R)HkZVC8++JoUglbm>;)|zCl*}8w(Si z)Do?j?`OD!pGmdevx_*XVCqp_R$8Uvs+JB#l@uH>t7$%HipRr|M7H!mAxljMZ(;x}! zZh6=46EDrg%rhNQ`n-!qLn_Au%KIHsEA@gCMNRF+tsHLh3qBDSfA(2i%G^T5$=M|( z*}FI|zP-VtH0S1LzNoh9z`BCi&g!`C>eQizjM3)A-r8z3+JJ~TqwU4SKJcyi<|grc zk@U=nFB|HOsxA*MFSkuf(+LWHr>@VZYyM8vNX*31GBV97Jk=w)P}e(7#Vy$`vC%85 zGa$bwsfL(QOU$mB_}WGRu-!9=vpwKaHZ$Og#=8!c{ZF4?mH#())vT8Cr_<*)<}5I0 zfjJAzSzyirf3?7@Q3LQ53_kw=Wl^UKselSJZg9nCJg0iGU?vgNn@LnXBgm&n#A%pG zfuobCt978XxFx5wrx=2xI#CrwG!>EqV*0$pd;MaDeWC{a;zom#CWDg+;mOmHR1)=w zL<)GfLLv#l;e?O~Vt5P*0;LilP$~=M6r<9w=vxy+YN6IHTz02>5cg;Sc`0y90)2Nc zFf0vC4~|1l`qtXp^kcN8hm={|(E`EhZ74Z!3IJY6!c=Gy0x6uJ6chlQ?O7U7lG+TzFgEXE!^YxFD~(d;urZk8Yku zNmX_klj1ht->J5~*BE#r)&4}H`;{btO=0Zj#~j6oZV~^KYpQBY{O=!XDl)!!y7<_U zl}C^4zIJifr4w7vF>SxhxcAPH702jT9Ajj9aQ6(?yPa=d!pW=dU%jg;&2DWv@6Nq5 zs>VWLd7r&=oC^kxqG~J?tL)MX>~k{n2qR$~^(m8Gj$boEyDE#&U}$Tpe!Q=kt)8@- zsa}+)dWd6SZB8{Bs+#OiZ>~$O2S|b*e~?(P<;?uGx3ERemaUe=Fxmgd-gSUUQEcnE z=4)0^Q80lDl9Zr`5)_b}l$>)8uB1gWEU=_q*qn3DIp>`7vN^Jm&1v#X*ZZq`cUZW1 zpT76JfUo;I)KsVH>N7Q4Kj)nK&kHZ<2>x;7`8Um98e;S6C$>Lq%3N_W?^m!5wNEHAlv>4%3}r6#!*=KSVh zX`B$gP50`zdpBzMJL<&xA9pd^Z*g<=l~cMHo9fgL$Q6OAU z78a@_?j6Bbn4yqk4NI!D@$QdkTM+I>!Qkb>aV=~g!p~xv}j?HdJ$ZkU{ zdT9emF9oZo#5J_tE}+RVw9_G^%O;@RAxH$Y;~Gk=y~OrjQU?#29a2TRNbOt{4sMK- z2XXVF5$O@JBpxj5psUKq`aV+GKoE#k+esS15H2B&J)rMa0BGIqQWRCU6;!l0bWx5_ zM8c=!6Q6L2gLjX$ht$+fVdz4%Y?*TglEa!^drq})QoFlsd(F3VvqyfD_}+-f*MC+`ctI`lV`7EXnewKH`a=B(PyKN3mFJHv zJAY)i-j&GOiV{YWFKNl>tV?fe*me2pvmZ_pJo}a4>7NA89uz!vK_Ji)JZ13oOO~&{ z@AdLWPMTyep&^oGkF>!==G zpNdQNaW_4av^^8-6U$A*vTpe&9x-;k>$M6oDfEI_qcJDB8OezVEVxnS6W){iHBqgPk za07&9ciU)=OCg9!l>`y&5+%qM{!{R0kipnBV0!z-xfl0IvaF z0}t20zu+qvrnq?ra|&yo0&)Vf0xu9PS>&V&$5K^jD+&duv{>(;h%xUdRnC0r)XOjTZ zo>({#J6DRbd;>^es5~N4mXIvZ%B8VY`FErR_2RN7WhF?f0Jp%d&_ODjNNFwQY`ePtem&LeA?3DG1GJ3|04D6Pm|vMGGI3=MsDi2pMm2D~uLPw=MxKL%WR`k#lMe|5)G16K$J&whK+)>Q^(eyxq5f$}TO za?gv~Z>p*0s=LQc*PuA|Qnb5adf0_fhb_jM3(x&_%+dFlRib4KIb+uGo1XZHwq&nn z&kl{A-I}spSCl(0k{uVwZcVcJQtPH`#k%p5&UN0g=>yPp3p8msZ)Hzx$c%vjYqb`IitZaGSxUXX6czjqc_Z3cjFjbav^sMlZ1^~ zvQF=!yuCNIHeNnIX~&ww4tlov;ZB7S>GJ9V(qSAC^6t0u1+PsRFl@Kr<-LOEj{WoX zn}TPq37)&HaQkVVG1+2L4lz82#&TW@Oo6omxK6iKL#k)Jt*PJ(z z9=p+e>`L7Qy{22%t>&&>*G;Nzyd{<%T?Y1d&0O2<{l(_)ZF=@K?vZkzI3_rqMCZf6 zD0YjI8ap>xd3IZRcAB`f=-byDyEIyRwVJxt0ap3ONCOgNNOj~FBaF!=k?G3ZGLl`S ztbuv6g?a-q{3IZIm%+Y@|0!T02OX~T{|5|1KUjId2l(*c2!E8<0IvaF1H1-!4g6UR z^x-S+f9OC~_y4EZ~N*nU}71fdS5)zR~+`@aTecR34I!v8=ESwdVZbaWfuIr%Cw^!=h5s0BJ zF|Z*4vYoAMzO%08S>@xi#F zw?}2XF+A?|Pb1#^JnXG6BiQEPA-w( z40k`MdvTM-(bboZ7^DQ94RyF2<9^7+_-ugll1s-wS+nw@Z=6x$W3xoo18Ev`)>%37?SIUz_OM>Xd4nbkH)7{kEH()PHu+4h^4wZpxA zDsD$KRcA@Nd`oV}HP*+q)E#j(8M<-yLd|`88Qux<@}QW zg4Z@a{o0Wi-ncAyPV?mfmM_2S_R{;_uYVHs_So=Ge@L6YrDn;YuDJ)BepYW^bEa$E z>Gn-0S~eW5J9wc>?Of-1ZN;f8ZI^Yr&t7Xeu2E-XOOBnb)-jb|xYnp^D$z0Rxu)N8 zQM>NCVUxCbn~r6xp>24t5_k?-af6l}u^6!2u;+&EpZ&@3*Yg_SHNb0t*8s19 z|5gKi6U~Djkly3m`oX@7SdXFVN>o(ylvsrLEb1{t9_SviP^yGOhMU(w9YooK;5KZ$ z%Vg!YC8jljnX0mpl3!()A-%HH$*0rKT?mJunXS~=Ms8@M)U#Glv5~PN)w7it*oh47 zMP|-oOIL}Fo5bEj?BXqT^OgIC67OIpFoJ}|5X8oZ$1tFM0I?OsFH#a1LtFyn_C7@4 zR(``ozT;T!&wFa7ud5ihq+ry-!VwEgKAv6h?zHT8zD*tYb>jQqrG7Le<-PII14f0s zJu3Xwp#e`1Fn{i)OC!e{jr?Bw*^wIuFW$Y^!6{7K;@@6!&|Yhs{y7aV%YFKnjH3L` zyIUQz)4Lw-ZkZpq+tO&};j8T=voz zAAT@n+zX@LdE={3KAJIU#Pu*J>y_EX!5+Lhs^rN8!KNNc<7^HhR?sO z_0+5SPrqgP)(D?>$A!E$()-OX{f5s^{bpUsB5Q*1(IWmxpX$tJ`W)1 z-C$j19oy7}jZw?`zI(?fJBa)JPVNo%eg|olJMq9lewWt(uK`{Iyasp;JWK=Zq@jXL z_pa!jWmtSgDH?hw7gzzf$ad;R>~pff)q?X1xl#^S484*wbAdYxLd{%o$$&`+Cq*KV zv5OK?YZB5L({GC?hgBU(%P0O3N>^{Gs~2PK#F)b^Xs0x^1-OzzbnT>i_A=PDP@-<& zATe@~m^jHSU6eNN#NLBAdJtD1;^9X;eTi={2@I!*%P)-hMGz-ng)`6vyp4{;(2i_8 zShQ?^)xzDivv*W|zqx$Mri#g%$|tTZ7`Gzl>s7g<=f;nk88K=`_{U#+4;kw+Vp70& zKZb7Fm7snlXXXyGX$NlwbjYHVVlZ2-H#qgf@l9BTcfjKM88@?SS{II5=^MueS)|0S zzjpnyUrcZXv5XdJyNRwlDo+~-&zmYv>Wj{pO3#@o58dcEr6;{&Ma~(?FPW1IX3~om z@^i+LtJaFMrp*Ta-7|M-y*OdU@Sm2x`{OrnOdj*%xS=nO9XfdWgt0%*9=Z7Y59fUK z+2Sd0eD(Q&$rE0i@!ebV=L}x3=!5w`35I_y7&3X2R)l`IQpZ_j3!V6qz-ZzW$auyO zA5e{D6W=5zIFs>8>I}^l1!uPfX4YGWrm5>&{Gy|C*3ojO&c!2kdN-3pF2@C|)Yf?a z=ege;*u34~l6gi@Ky7MJeR_C(zR&Hfqq;WF51u6uOc^*>ZS(}w4@X2FI#)KdgY;tmA{nF z-J=&VjZe*<#E&ET^tDUVsrcrlXvu4M!j9)M0 z8C$p7c(jgPc=Wk1=8jr-z_Fy>=Wf$3YvT`B&l|OE=~mNAn~gOVT{x&_uczVX6w+9E z*278DCpfyA_+}9Ma1l&KMxHWVXPK70?7E%uvZdsjtxU^7NySld%Z2E=6Mc7Lo~?&fj@cFnIjcH~ ze>G^z*9d(dyXxn$8`gdL)9NplZTR4u>4F!BOk01+F-B_V+ZhzWq-K)1RGQB^FjeN8 zC=AJz$K;d997RZmC@{SvBBv)Py*Z^yme&FRle${{-0I_JuDN=hwYIzH=onuQ27P6& zwDXF${Vwe@YQ~rC(?jE%OQY|URx>0x3ju8Ef@ekwUi$IfQ5V0SA29C6fMMTy4xZrl z))#KWriTyvA#%jrgkduxKAjOZZc*Hq3nE9(3Y@SY;+v(hKderlx2a(H?((JEi+|ox z1dG6^d=q>#`I5}8fn@|d(DCbx{q ztB@5`2+L|jKvr#CN|A&}6m$tIXRuq8guAl+hm_No81$L0yNyVDyASNEkS@ znGR6u0(D7o45OD4x#K!+)gfo{Vq4(`IbZS`;5EQ&fY$)8fk&nRmKL7QbezB?BSYM~ zn24mL?2ZtItjNUL#H^l7BxtTASrteTNIXM{lb_t$Q)cBZH+5C&J1DeZ#j+>2?8!|# zfDoc(LvC2p5)cdM3#KhYCkA5Vpfq-5%v^}2JF#-3V(m)BhOL-6DD^BQSIs0EMxu)b zU6&2It{C+|u9=9mO(k$j>f4B&{E3Bor;T@8{vA@&N?!Y9reM&=W43R}BE+e@XvOu5 zf)O9=vDVQ`4H>m{$y1X*+pMo~-p@%pBJ{AO%`P3|_<9CBT7L1wB}8H8C$aXCS$HW- zJs2Z*M%PJd;HofkR~ov@AjV#D6K}bhmqN#;2felSBu+s@$G*Wbu;r9V^s~dKPu{%e zxp8A(pE~jNDPKJM<)<%A9R2aUS+7j_R`AV)*Oo1Ob?(eJ=S&+gcbZ`2u+iK1Oh0%` zF!%$(TOSJsPO=JYOe-egvEtNhd2*I0qlih!l^{MXg=W4cD3kK!$+zY4xuTRJX+nNa zJecrXNQ0b2Jlb#3l|0@ zDMHf};py_|Y$h?ENhwfd6f0r2DyWndRf%hw~f(1YNrAIf5o z>;?-l*xv)a44H9g9ifFpDin)>rl?7af-&GLSilr2e8mOH^Z4qK{VCxe%xi$x0IvaF z1N}76H{W3{!-U0V;ZZUYIT1@qw}dozlEMmcRBC-ld~JMoPfjIW^p~7Z{32xz{@sqg z5_>;|rKjA$S!(3W=sOZ!2Y?WA%a$T57F;PQta4Gn_+#S60E|J*-HC+<;g%v=xGBxu zWGF2?6qfFQUUFQl<0QT6D8At&F>;cbJIT#ml~x|a+J`tt65B9&Ksu>vBPqqqz|r#s zLq;#Zq+wr{b3HEbll4p9Sv-B6?pfok@clMg`z>!+X2qB!L>KqkvyC*nP z9Gs#Eh*x+;NSp$Nj(%c$Uzwe^!pdD{>!q~wBli9TC9MQ7@Z32CD7}J-n?K{=C$|X@ z-E^&SNtNjM7koB%)0m}eM=oCc<~QSktX`Qs{?+fNygqBzz!fW>->^ZjW!(!0HhzBX z*c>yBf!mi2+OtzIZH8dPD8a{L1%gl9lcXhWBsifPiLh(yNl}Tipip_ch$$)|#igXM z7)MB1B`GMTDTF}TT2v`3C>K>WDw=ypyMpMs`ffXS@v?*cuX((tR<9^WnT zU#phA=GU~fYRT)=(zYDVK60sE?LxJhX8CVAHR?C3&l)$LH*Gp=Qm^YOGWM2O_)AQ^ zJFNq{JYuE(DTsd54JfNC^fK$94;F z#R7OrC&EhaG7Wl=^oj=2Fd6}LJbGb4CPv9z#LN@`n3Zdp%!R&#h# zLt>6Nqln~GkmLdq6t8dz?6LC`ItBnWNljek#?EpBM}>i-5<=G)Iw)v#mV?C5QA)*0 zV&Eu7Y2*walgu2L%2SRTsd&n*yc8(8cyA+r#tf#C00M)ROCWI#Ar9fhCPL;d_+rd}g$n?po?EtJ=&4g<^tGls z8Gd7-xzPT`6uq+_ol+M}`(E(gyMm8~3k2^zHFQC011WANpw>h#n92&KqFh;4PAV!% z1(FSwld5V`Q%e&&mXyhhOGJ0-Derw#2Wf0!?zGCA7;=Y@(8AI)mKK-X9PBgWobuyO zxEWmyvop@{wa5-INcK4Csdv`P$TB^^BEheU2rI-=$I!Heo6ZSdo<3yEeof`|LR*8glqT-Q9x+XxG4%n-&B=w5j3QB1i72;JR9r2oX;QXylP(DoG55OdII|Tn z6&JtWA762&deIRJu7IF8h=o-cz)NDOv|Fe|e3rOdO0gG1x_dxI!u@FQ(I?#Jqy8=N z&*wG3Yk=1PuK`{IeHx(nik%4&!$O6%31E)Haq8CT5fjX8;MVSzlZH+NVu_P-?xx=E z%q*5>mM|%|NpyxhI9>*GmP?S-!Cz|UEwk~G+juH$Je5|kO}Q!%=nl{Y!xYdL1z;{R zV;2cxcpwyCc`JdbAn?Rlz#r!=w+v(~0u{CajC~NX4JN3-h~yM3w+K;4d^t(O%iFWEaK(+&%d}6}6eXGG#U8XXSabbsR7;IsMBvtImrj_Q-|m)_fO?{h zlr({f3q+=})Y7TBBsGV`r_)fVgmjXeL6S2`N*2k;p^~0MQZke& zIV7=wgy+irv%B-U$dX?!k6yGv@ZpFz$4`3mn{Ndlek2$*`uz=CUS7HO%X8ND-LfIv?%`y;uP-F#%AiFk@#p=Dtip5tj zKHall^+7Lc%aSPosKCi4$1U&!q7L}v?&+naZ7c)(sGPKSGrcid)L^BG*6M>;^zuIG z8{GUSuK`{Iyasp;@EUkT8n_Q%^~K0w@&XenCP{YUV+eSPx>+((4rtZ1z!TJ-QPh@G zCP6TBa;_{QNd%hEkVGaRnn5xh?=a#X%(w7eYikafSuQmyw(zl2=TS>bMXdx-wDSg>BS(L-eaFiySAVT}O|zuP>TbDDPuDt*3~&P{8!zJznN?M6{J@ZrjRI|vv1G;_1vhX`=@WccG@D>DqLz8NzD8e;BvoY zT4(6kZ0g#oZ{J|$)oJbBW#)RV1|G2nFganSqJWAO zl|=zGM6hMixeBwE6iIc!N2-9|M+wUU$cqpe(*^X>0{9`$E0yGwD8T)mn#aUvGH|^` zrI4^h5*(+%dPDbM>fm$oBMua3F$i;_rY#6UsywM!dJ!uxVgmr?LxZfa>fgo}q@%>y zkCrz6#3Wd09U%)!VS-YH&ySuxWWgGTl5&?jCDYaSezb1RxhRJN?z%6|8o&P5)$5^= zJ8qiqylN4c-&!pr$(7RVT4i+)$-5)GT_Xg6{9O^b+f5B|^(>8iLpu$UuWlk$cS$Ae zNcGg3RNcTJOLZgCQa8blq$sOr0BMjp2y9>=w=b!aMrT*iAai5OjhfeHA3r|+;DKou zHFgE~JIfWCW%+MxoF{l)FmTiSY1fXg_b}ZbM}kl4n&gS)ku6BjDl8Dx(4_P6WND7b^2ANp1y6E0kuHDaycje;1j!WeBS( zt0X8h3qj!7nNcCls#g?u6YIzn;JvZSmtXVu@F~l&%Z)`=AnVMq__ngdj!NAy_w|}5 zkX}dK-pnRDIkvOS?@s-~llaFkjjbi@r@Qd{m!qK%^MY@2P2O?FJmM$teswhe;B&# z7)GP0(j)xFYk=1PuK`{Iyapbz2H5}hV@`soP*(p6$hGOL$4!WsH8GQcu1F+ju;LyR zr6M?PgruRBhGFNGN;3*1Y5CIBTv<|%G(JOu895@E1jmzrXyO}zB!hJAp+^XD4Wih| z!I#*1Gk{w_S#~~DY`loAC$aIMrJWbf5-VS&d5FR`T59T3EqHI*fbSN^cFRq&GhUuM z_4_jiFUEUrvc4=B@#b4gW*Mht=*MJTaE%KokVh2}&$y1{Vv>+AkIw3-Y9ZC_q_mMi zf*xcOzSBe#^uR7u-$_d_igO2Rn+d{)5Sv}!q-bhYG&W0X8pXL49fft0vN|a;R@XNv z8=95f3R2uG(zJ73bxvcuvGG^Oj!f0m@F9!`Q5f7UpJi}r#L+d2j4ud=z9e{4@WriT zOT5i5SEldwHh<=asqZg8yhAJ2Kau1CM^%u7B4tdD9NCB0I~5j>j;tT&9!702WYbDBs_JGtc>HFewWuQzPE85)$> z)vOclbJ52-pgP<6c7kbA(2Y=!AAZ@o&*X+ta;RBalw*Eo;GG&xpNPpDkBnP>^uV=1 zv%tH7$!vb@aKyYxyuxJxF>)ZQ;5cDSI=)$1a!yxXu?V0VAz?6B(I7q670AIV8oH<2 zSa3xl7EN};3AJdk!~NkHNQRZGEVyEgSVSrop;1H1-!4g9_aIIQ*W%U)Lujj;ir1ArEYNs%pDLJ-@G zVEEEHWlotm^>#;mR(o8QG%8IVnIaEOlGBj#1UhB=M$3I8;Yp-ei!*RJv!U!xpds}q z?pVR+O)NZ>wjo5x-e1}R2 z(jeAT!xT(dRShVCuPDT-y(_700?B~1qCs9#Cx=n%ZVPE`A@vRNng(fAt30}(`ii^n zZWFT)*RR$q%Xg8t>(mzsCJz_P8G9hqa=GDY!AH*sMm_h6`qE#@!!|@XJoUpjf{!Py zIiE6MqQUNycY+d0SO)Qk>hX;h!mk>dgcXHK82un2@yhTdT81Vtq4BbiI4LA7Q3eT5 z5GSN45>pgOX(TCwBxRAfOq$m(C`ICz(iKxmlIuy*U1AWDHg@IKp$nItadk0{3pR-L zF^KkYy`7vXZ3%BEiEXVgiSyMBcRTB1?2s9i+){P$;)JKyKUbk)*asy8a`jBoySmFj; zh5rtKTZ_hTsRFe6p2C0f8sIg+Yk=1PuYpITf%_*q+6g)dBCLh0J#bpJOE1JVbnzD~ z7@&>>r2{-TGSVp{9TL(CPh1Z{ybw|uBCtHKg2^nE!_@_@r`SwUWSSVDD=b+Nk{}O? zQwGK;5xC_Sq4W(^`UNXN@a`5$ECZybfnuiw=A22|(?jPiyJ&hVDO=6j=JVA{EwbWl z%Tkx>oYKyTy%rxg=ZvOiNQP-*-6huoL;qH7$40v##?hYuvjMzeZJ=Km6BMb$GQh}q z5*{auN|q(0GYMG)f$OPxB>gt^$K{vPSS+mRqlu?$n9?d~ais`?nDdG|1Z8~_@&}Qc zI%Rde9MMo!-Q=>ZM~;*+NK3n#9_P?gzskdG*y){u<&y;O2n54l7Azk3!oHc)Jv5f5 zISjvX?5SBZCvU(0>}Q()c+d2uAvUXaR+)Ja-*~z-(kq;}hbY~G72Xka0VATOd6BfZ!$rro^cn$Cx;5EQ&;E`%Tg_!zR&?7nt z_H7TG>N`w7`%WRG8LlFPX(2EM0^beH3z!5ul%z#Lnna|oo7`zvmNzR)8s&v`ikxz3 zMu{xtwiJX9vFSY#DcvFQ9U;-35ivbpK`oBq9oCWUA%)~Oi-b3(Z(Of!?N!|{@6_?n zw=B;i61VcS#TV4}xfsnnb7J_a!#bf22BB>_er<++qHA`|cA>;3fQE9}BI1h;`f~76 zI(soL-o(R~cm^<@feNnxg=di5D_G(iCJT&_hb73P)2Nv%F`L3IguCa!8COWzsLD!7 zNiiudCneR?>UFz{DX3N!*OLYzF?NsKc|prIEz=+?HI6XT^iKN{(F=zb3zm%?aBbh` z*5{{sUR@kz@%DAK_pcmZ?dUFe;cLN1*9A}OzC6hP^$+~NU6g*#fSAJ#8bHI$tbA!{ z>qi69EV!^Vc0~Y~!7SY6AhNLbkb}_G+D#7fe2A4RgBUglB@^>t^qoa|u0ms9p}u#= zEzi3qVV!n~66?5jlZfhb4pEmq!p-864WmOWQ^GYo%ymPZQ`;-^B#nLr$xdmZdLeeZ zZXVVNH;ZV@$W!#(Rw^9R)7P9kv-y(78B2@U>Z(RU@l}INj4u>6Br#ETiG_ebC?$-N z`^MRTC{_46MMr>MFj&#xJ&?C^0a|^dg@sWnn-!%8fdNWNfUAI%V6&nrbEI%X6C^au z9`wbuc{`8RC`l(n#}x<~gnj-&^nrzBl*k>>3wR8T@*A%KUIV-acn$Cxc=Q^$H|5;} zODfdV+db7azu#Rh%!V%eVrND%CPAg5ODPj!x}-wk76b+mSRvYEq(wrSdT5+^bqm2N zx#By@f(khzxH9v_X@&CeTwzi@x$cuC7(VsuLuZ1-k{{I7W~lGcO7t1gI3OjW zCvo;AE`G!{knspodWI?jDD#yfAX)~;Tx1fZYe>!_*?A;4o8)DaOqjWf2m-p2ikY-h zk_{^R5?OHru?b9DrGCLGC&#g|{CKMWT!(AJuJ0Xdb!MEa_Rq;)zZ6EUi*^<4Su@?% zbfuNurdxpAiQj@(6mrqwIbIo$u&zQsPA>` z2>4o!T!@h~6+>sD??8}SS=UCXW36CCscXaN*%C14>p3vSZbaKoc*{ws?;+N9Zqs&e z1<<AWgXq@p4sA>S)m^qX_FMA>1n0yXXlh2e$mAws3bkMz96DD>t>MUQJag` zg3J#ZUp7wyZ)j}^>8T`Q{b2X?XAaNZv3}9+O+Cnxzp))N|dcK zNsCO>%^=x$kjk0ny`c~c{{YOfqv)0i#f0A8DiLyH zCMGj|{k{VOhmV~wZ+UP|X%iuxfa?mH5J#j$s20U?teJ}gb=0&4;qle)|3>*Uyasp; z@EYJX@E_Cwi=8kl^_P42ibGcYyQ9KdsBjyt(4&3cLOSbV?t@5arZ{AhLs}Cl>d5Mg z36#!{Dgniok|1In{8}I~ZI_VxZc;BIwG!f@Ni=n4Z4F>0q@c-^jqO zSW~2aLwZVwIipK17?O*|QqG10`Xq4=_)NYhAs#YA+?Orm8e2jT*is~J)b z`4TH1#@tI{6QFR4qKg4tV;K){z{Yj?ruRgaNFvKxLrbb05@U@*JPkseE#tggvm?!8 zd_t@8)4CfSvyy#Eat$Lqw_X1YqHd*i#!k<_IHj710&b^TNBZr%e0t`(Woy;;sbAAH zb#+P1&ja9TA&PdwKspK2O$DAiX{Qq3xI)Otg$gNv5MYl;As2#hK}kUsHVzj_7-^4O zBxa=W_lbzShse8#ypt&63Uenfn)}Mo4@b}V{;Yv%dAFpUHH6l7(quX9ee75)JnIEl zDip`#D^|gL!E1om0IvaF1CNCU`hzR3ejmi@*T?(L_8#vKxq9pD1Pa3iHBhj23+f=^ zM9L^2)XITGJXEMxkrU89zyozhC`l--{(RxmgU(JTV?swA+x_;&`D*?aYVP`G>A@>B z)W>bv>RwzMSuS#pZgLD!fC?Uz4$dLevu5TlxAiBMevE~m3}Wdor{c$$c`1>R2MKrd z?W6|wGGHxa>OlxS2u{IkYVIYo^psh8Nx-|s1#W>*0tPNOKjIfg{3D2O4DpF0J_(F( zBGNIFuv8M0MZ!~Msb$13qj}>c{f#>M`&@0z>q`zNM^87>8F}V}V8L9$_;IhTSRt4> zZ{F2wS;D)X2{D1`b<{-wEw*_3ItKUGF%3v*&`o^sTGOI{@L3espS5d35QOV>r*97Fmn+7@W(L7-i;(62C z#VEwjBP%nsyg0S3Dyk;;rl0Mv1{aSQYhH3OHVARCiu21CHq;PNdQ)X=)or`TfM2d^ zy!+)S!D}zh+^}l%i6b}cECVwVireZtD85nvDYZ(GUJ3Rn1*}pmw5E0~x~q^QSgRW( zRtmr>g@_7Zm8_G$8LQ&tSuBq-}S=Oe%s>{oCC3Mq>_ zMjEAKpvVgGUDPv3fqlQB%2#;}@EYJXz-xflz$4QDH!E>)h{H~tDWE^%>fbpW#|0?+ zZT;|7-|~FQATSNbOt*GS?Ns`QB`_904# zPzI$f1SIf5#43>TmzsN1zoDrovK)$Zox1c~It|^sO}s>=-eOZvp{a++%3F%$gW!&! zfm?k95jOrxhhPObf{km7hQkzh=TB-yu^&V9Wz%S-+kN<34@Mm*8X~_{E%kZ zFBgjsUn#$6(yVRQbIYzr$D#X{eW$Ktx4vVyu0w~OUAvJ}x4F9n2+%%IiFd@Cdc_dG z1QL`)!qX_&1F=I!3CSoXaXG@IJYjaZGOwD+t56_`VP0W(aYZ-GgD@Mmi;=sVI`6ua zuoscLVo_dQZD4v*cwwflx7`&N)8NA7=<>|;=CZu5n#_*Mpwd*UIN!rYm)D%xyZZDV z-4Oe7(q2NE%SlHCY0v7YPHrqW3iMcW?7-yZ3l?l!cS84yZIEX`Nj|(vAirFNbZX4h^0D^_Yb-ue&=G$-1+D~>4wk_KgXm={fcblWmkEix|7H8KG0;);yZ5G#&ylRqOMG{Ix2}RPFJV{KhG9rud zix>JO$b4dD9x)QPNU=+V*f~P(98O^gV3K_hQXI-4wy1oqjFW>h zh;?9xbEL!-`3)n*?okqtNRek`w_l7XC_x&Y$^bY);F_ce3ItiXBqBo*oz2AO)090a zMI^n1$tqRmmdo=i(iMNy{3DmLeZFF6&0eB*d(%e%*1M}kJYw{fv1I8|$K4Q`2;}%yh zx)@w=(z8kQ^~sCPYOk$kgpGhnL{TnkNvJc0e5E~to5M%4@e&Xt-rCZlcnm1$A_unkvw{69N9ljaybwu7s7{IC;qJZ(L zm9CP&Sb+sg%waRERHY*d!-mb0L&p*R(LscNJihAtG4Y?g26zqd8sIhXH`V}VJPt?o zVkms5gnezwpoMs6g$!Oqc3l|ULq8omsGk`A!N#8jKYqQ+%WAgOjW18`TX##NP}+r6 zY#odect9v42%bhz7ZUd%S{}?u4IRp=CU8~J^f$=cl3plGxh;;*6-8wVLsNypNy5PR zo`5*9UyK~QSl&@e7@)wBL1vGrn{gNR*_#5r0Na+@&>$UkP{vEY)9VB^m396eUMdaW@r)lZ7^t1L22 ziMbZ+w%7R5KBIHDf*nK3bHa-Yd@}ME9ynzgl~>qBLUY^VOGN3FviRJ#jG`X!qJuyl zf)vlybx2C6D6f$rb@J^hQGR82@f}fFji|gqf|sprS2lE!79nNB04)Xtbgv-T2OAXi zm%*yT2^SdG`&CdU8uy)`LDUsL|`4F>k+Uj0gD7{I$IO+vJwljQp;}pB}XTh z-%hPAzHVpc78hn3;BMgUWaR5|!BqF6xjsb8!NSAMNzNz=YAfEHI0oQq<*iH4&zt(gtqY5+bv`@1Lo*?y zgrPysol?4TAML^f9hM@_=|`pl)=HzME7{P8hrGHFf;#`01%8rdM+F*BTn`H#4?KjJz4@vHj*b ztx%V99_H`;`1MsEht&3l=-LYZ!aNNJ2Zz{{3Mnb?W-2=sbWLR!0Wm&aj`BjuX$ksP z#I6A~A^9T&7AGp~%igzAfT6)`2x-IFekMWs_5Zg>qmN&m`e&hDp&aY{=p)*NJt79U zi>?rCmQYI1yD~Anb;T`r3+~oe_jEQBS>~Pcgp%Btf=ozaXvw>(UytTjLLI_o*tXByvp3 znD@nEuu#$skN~qVj;OE!MIEf^1|?$VkxK<=1kNwyNoOC~9y;s;(gz^qXf+MK6DMVbQS=!y%*9dJ#E=3p^ z8t;(6_t*Pgk1eu-P9XfoYk=1PuK`{Iyapb<27VV`;RM|aAb)CaCwyvb7ktgl-oxRqEx5$1ZD_LnxuU`;=6!2}Jv3Z`Qm1RwyFB|?l~>Vd!n zBu2^}*6ahs0s#v|udp|AmJ^hekX(sXX{1$3nx&*kOmjdt3Mm_EZ5I{bF$&3ANJTTD ziJt4J(5yOjq`H}u!#GwaFKEaS_QuN?mR*ycT^ z7yU~UO`=?WG1fTYq_1wTXBgw_m6_m~8t;*mbjH9arJ@P|vav_fE>Q>(CWTZotQ(J# zL-i520!l%+7v!j1RhR@76~2NV0G6l;i!Gt3KPVi;!o{e-T?<=5IZWqo+j-8`RhHEHb13U&t76+(M z_U|V77Sed(@B4KgtE8Dq?qZEG38!6SU83T5R$E=4knmusJ{`ds2)U}hBT~; z^CWW9H$@;c%7}9&s>)uQ3Fdk#oSBGPgeWAV!di`l@HzGgb<)Xc8X!bELAt~M3Di&q zyulG;=9N`y7@K^vbMp&xr@XR$-uJpE=jtAxdUmTuu!V-N@nMV0d-P5nv%2=1%}uMg zfcW}S@AO35upl#E&-BVNybgsfaLFP0Oh3G#dIbtXxL_)lk$^kizf|4DK`bCF)sa6~ z;f4yAVHtJRT}XDr=@TUcJ?wifOlOpE10s}3I0AOyV6g%h^y(S0a$KF`<=|T2ZkIAj zu@p{U`o0g{R2_IsDi7@>e~{MzuK`{Iyasp;{HHabf<3(e=stV}V+-a@b~&3usuaIH z=F^3jPQSf=#jNwE3$Qj{A{D_7g*zbNdO#==OjKwP_aHN!L`eCD&ZV=q)RBJo;&mi z&`YA|=mE~67b6HNqqx{S%>Oqhv!}ivE0{KX)baJRwAFsrS6`;3rWIkQ6=J(v=hQy^ zvsVIa<66q2?-T~+r3UAvIzl-#`lLg_M9kn5)fUP@Y*05SAM zgO@iq=(|{N)j0ar+=3x;|WgYLvwE(n)3N{tiqCLSb3!-SZ z*j-8k@xaC&0o;1vFg=1>`mR@90EmSvxmF%1vHL{J0~+9Wc@6Ly;5EQ&;4#s_<2Jec zX|-@*hu%h^0voSQE1dC)My=DZDC(3ue(vb2pAA`}d10#hu__p^5U0(0^WcE%k|Vtx zP45ebAfQq&$e^%_w$3(-S(vNzmh3fsn5h@q-RlZ%Rb zyzeag4N-u=Uc)W81B}C)0v$$VC#^mG%fQ(aUz|Vj{as52Y??Fv%+8|`jvMW?&qllb z>R~qN;D(s%Xhr%(MbPa6R@b zDY!!97A;x$ga_ezwxj@t27lo=YtOM*O*UWkD zrzxMRZy9!I!>pSpj)XXzj&MB|X!n!OnN1d2m!f^v>Rix{@w3T_vrdjsH`Xx^^afiL zAZ53lLM#gN5OGB#(NMd${U72h^dACz#R<3Dzx=(?$YIMrRNjxNR1NljO8>g*f+w_c z?|b5Fyasp;@EYJX@FZ*CubzhftZw@CM}=zcSyyNXH}!&H<-YKy$<8Id>gDa9g6zy|oDa2xraK};N2t9>v$yt0w`^ixH0!Tr9<>u2z-k&$+({0PX zKD~d`iCqIXFP?Z}&q~v)3w6)`c>Tl{C&S67_YYpbaL%Qphg>bLNBdm~aM$v8KdPtg z92LeH?!f#-+f*ugq@btZlu0#&4FOT?XD_7Wbk6CapZ@PV`_SXuEqKcB$$h$IzXjD( zP(ho2u>8GtxZ|p)bFDqJtB>j+r)R$8HSk1hfOGEm`uCscH{mi(fRxuLj^&%PE z%gw(W#^5l|!|@esT%r(*_3^>}g9=(xP2cxdC^q{SVad{2aK&Cs@l~I27o#<<0{Q}E z#>0X+9H0S9Ju)e70mSMOBaDn`k_!E@lhn*^P5Nc)TPtTjGk@|&+n2qvY{vWBm%X)V z(OX-VyuWJ&GFgv1zH6P8_A!6QqaM~ac`1f5!H11+tvae!A?^mhR+B^ow8lLVivqgD z;ESI;Wq9mRy7X;qs64`9FCLt8c#GHSh#$Kn1RNeDwr>i+tA~ zg$DkUbIqT5);)a1P0%WQMR}8y08!$ow2&{CPJesTlKsK%mDFI>7jeWQA`V}vG>S%W zgaaG>Y)}+F^%s_Zf#rp9YU|dz z=ugx*`1I26hX1;0gPraUMii>r}7OHLjBc>au(wt6H!ZkCCJ4EQ`L%7W^Evbb+S zr5RMXBiy6^%y)F}M%;zl|FyiQuKV@;$B+LH`tw-b(qGSfo!7tDI#q>}LhMx4I#AzEByuWqX2(|6yG~kMjhf)$@H#K3g zXo}TeUqBa*z-b5@5#S9Tglirs`#qgS4s4*5>Y9h2<*w;p_QDme|6Gg~eO#~L%HBFo z0qQ^)F%3(n8`jwg#W2m$zJY9^QzjM?c_)$UxZA!mZrJdZvqx-OGI-mf=T}S{cYf~& zYHL0|vT=K`_0Xf6UfDcXyo&+p&mX|dDE=v0vNLX9IYeLud4h+ByQP_0GWBchfx z3he;o9*D@|y2L%VuY0O~{x=n_=c*F7W9#1t*X;ke?B7)Xy6TA!RQg}@IMg5L9>2?L z;K|Yehp#wrrLucJS+BtNme;_e(!k>|2R*D055ZSrISfG*9SIqrh3<^vq%RhIJL%ZY zL2DPMi`!}?peO)f!6g($AjBPLif(8fQBKGj!ydtD9JnvCnmfW?`>+Cy;{JbK^Y*Z7 z?;TTJ-*+q8fZ*|}#^}Y|Yf`FH36j9{Jq7IaEI~aZQi!{hLeN{qmt~KbJ^AS`KKRFs zF<4ai&h91etF8L*=!Q`j_s+AqI{d;O!HTJZMPH3LvHgovyOx=0?(sAotF~d#!tXK~ zE0N0&5DWCG2%ZNg1m`Ec*HiZOp%YfO_d%nFeJ}T2(7%!2tN%+Q_rBS_sIY%`Yv2)T;IW#F`a08p!tI^+ zG5brv@l6d-Aksws8*zF;;u~Y$TX0$Zxw%t}lOmf~{})9ll(-#;neXgd_T`0rGxbkujRkQJWF8uzG* zxG#cxll~=lTvhfz^>6B`>)A^GFZaK%|C?ri@6`Q|dz9b`J>fTA15cI)R6ig5t$=We zA-EE|stggT%ecyu^^SaRpL7j8F#J@zfAxE*9`wXk`p>x!PLB9>19AwZ-9lwoc46w^ zuRfi6a?h~6Ygb-BUrm_1GU{Bz{h)wCMTHiia|$ZhIH*6bmIwMnQ4t#X(SToLpZ`#*S`gEA%s9%pTutepx-1%|Oy4R+SePhvgZ*G|T$??sDk8Swm!k)p$ zH+*_-=X;0O2o_CvdeyYAt{q%tf9)sRt1HZ}yz}$SYoVSsG@6ShToxghG6(t44J7N4 z0^!l@$;e}fl0{kcp|qEr^nKEIR@GG%di36=(oA3ffA78ix5w{2^8VVRdhY$b;P-e9 zJgFMsenA|#qAQy37am_dso(40tKYx(cmA(_%inwM6LOpbGps#_0ws19W?=waXq=fE z=t+jGeMDN{Qa$|p@e8i1k2<<--2ROT9Su!{(zc6X=VE+{#!8f zzn%2|Y9-^3Ub|da($%>9*uGCz%o(zF?(6Gje|miDTL)GQIJjca;WaR44LQ2*`OUMR z-8AE!UzdMxbo#fnfGut&1Am@%+}XUAFm06ofl|05(j5s4ftNTfwTs129fXU7e+a%} z2d?S`RX|I1M=k$Q-`n%vNHh0>S|;9$!7$Z-;+6r-9#_ZvJZh z{N8h)kmC=-SABLK)_4U7)Qhi#@{Z4EOkA#i{`+gk2d$ia(EL^t>k~wn7B~9?IdE1i z&L;Esz*h)UuaR^#5&4RfzkIx8#^{~PpPm2pD{FszcmK-QcQ1Wm+x!ntY=$9g(66fm z%fEeR-_O&vfBWt-D8fuX*|Kcuh2xdfiq+3^3LpxoDw04^z+Ffu#6qlI7h(Ugit(dT zCMu98WJLWR_p`rF_&TqFzn=!U-!)}==4qt&8U4oz_L}L-j*g2D@o!z^{TyyH_y)RGwqMArMs2>e3Fr=NK z&MYW_1q0N-_Iu4fL5jk=bSY(>MA1rQ4McR^&3fLiJ4S9;^6J8G2W?*X+3~Hf>{$HD z?j<8H?O$Mj?SmuhU)(tR{R1mU9NoOyPJ64f(Ff}nPu#Pqj3@y_DUK3jks?+qQaCD; zf{%-uwm5+f`y(jSRhGbqUho9z@PFCWzxwyfx6fnwv!2X4XTo~W>;V5DqSo0r!YV=}l%X@8fI4{!Mi(h_h^r$X98m{y4pu~?{0ANp$4*!>3?HgA7C$E9O zrv_9%5TGWN6$?NM_zJrmzEWL-3U}o1sSEzC9#;+YAA$Yre|3NRKkLb?KO6-CU$OQp z?ks>7C679E?Tq02k=p~zSD0NKw`Zkbz%$v+)$K%rzXSK{`R{|T5Fgbcr!U9_y|;@x zfwE4S-1>6e(pP3rdS~7I&kk*VY5lyBr*=-de0b=QjlT4e?1~hpxOjz&|n)x{iCJ!5T zVb>mS!`T-P3P!!R^XkcVqUezI^hRj2mKkpBpal*U^fmQApJX3Z3+O-RoL#aNe4FPD7vfV@HR3MIZf8XY3ZKc`Vbc$ z$_f|Brcx`9ul}16^7s7f#=q774-NeBzn1?$^ufR7-;48M|AQoJN ziu5}Lv(N2*eeJY$wpY}Ht=?ZTL-5{)xEg_P#6)HpG0)uJ(b*T24I=725p-rc@x9axBWa|!L*pW6}3bJP>M{22(+M$E0r>uc$$Sza_X2xZbUqy2W~`wFaJk4PE$Dj2$ zUkSXy8I(}z-B^N(H$|)+O+1r)(m zD%%_P8(y4yVefud{k^XGlaBpYduIYIMRD%&`^s z%D&#qz6!{`bJ;fmSu_MxKv16~V&W3@yqFmCG>J*{7)?y_5>Z4S+udjDneO+mu6vuw zox60+bkB58|L4q^Tes)yufFtu@iMs5%7 zcMgz?C;~(ebd)(EO7vynmB3?wSojM9(v(=GhH*3=dHv{Liq;z@7O%|YXj~mxTkH)X+tL**fwzEl5ccvH+cP`e;DyEj2MO*F%v6{tuRE# zpHNxy6%At{96cI|RbfuO{05=8tgpOwy=qwS4+#X4KtsRBK*C_5b4DOn_hCc%=Erd< ztE{q=8`{8ylkZgwfjw@URW#(`l{Zcv)?>kxPtAE;P^ZVM%hC52jRScc9e}d)3eaM4vB@jm z*n|kZfj-5UcG&V>;wK3NlR!g1oM6&msdGgjSNCB<`R2!QbBPAC1IKLTF-CRp7ccZ% zH2Lzroo|>lp!@1sZRQkxd+ zP(UGp!V%GhWxk|AStN{ZD0D{sy{dHLhUJ&s-SPY$?XDeJICSTVk-JuQoHc6r_T^pY zkNZlOR_FKX*kbaaPV*R8ScDhqEogg`xAQHg%DQ`M2u2;7Q3 zG#t;5TgzrCBwl`!Krjh3^fL)24VF4r1afsBHk5CE9G6cAn`>cV_)4tZRBC7|_HQqo zKLI0yw=JLAZT+061_j76~P&!NMn7vIk zLJJemu-YoLP^qDRW`uw6`cInO+xe!E_s)3Yp(W4l9lfJu*-Ho3zJB^f|_U%6zH!z((4BlgzOKNij&*J|v2EvAp? z@zCOS3nuBl}O`y9dy)2$$-{(Sti_dh$n`$x}xZ*=j)&xqwoKdFw$?EHRr z*^mD$e5ObskOb=eu>_I?3!O6p_1=Uy==Q6jeEojhl2zt>AR!^58aB$o9B*ib*ou9- z+7|V@X~ysy=M}Y@Gpgm3!RNI5&ftYJ@HCuQ3=;w)BDOFNW6qHO$Fpi~At|oJ;HQv| z&^dB_^-4|j)QOil9ShUZMUlmvDFY8(#zdcDjHvxRG~^7<^aJi1vN&{qogUT-?6>(%rj1 z#r*k%e%j1850Kg~f=QMiBtQZrKmvgyV85lv14{@AywPv-=11?|J#ok_lkc1O%$DJY zHr%yk&J7dpE4cgu%pejAG>8>R4RmCg3y=GLh!7Vp`6aI-IG{IA0!cPN8V4;K5C9M? z!;VJ-&?|+t?&bT<58m5#aNmY~d)EB$;IIuVt{c*4-ZPJ1HK6e8op0&#;FR;ab^O=% zty&G~-*>^Rb#9tpVj|{xkI~_Of6`2#j;)7AOOTG5l@I^G4rASN(p#F zf~N`{TNpfzMzGYCv?;5u!ZfWf60vtqe75r7TeDZ_eeg8LaJoVx4 ze;+A7Y8c1G5~jv!T{{`Ufa6(F*HA__k1G|*L1TG$@RI~cfCNZ@1R5vcVAJCZSD~2L zF9u^?ufF%o;VY-L7}evv{_UGh@85aZq$>yB`PJrEpqB;`!hDJlV}<5mS#5ugFAA7p zkWI=Pz`>gBAuWc%Qy2p#R8zSgFHK+=5lfY#pGt?I`Yaa4zZFITrHJMA3 z1{oGJ?bBsZ_(=jJKmsH{0*w<$yP@&piL%wM63@p)D~-sUoh5CH@A=9-EiWI}^Lv}; z-nndAL9>g$f8<%r(WJQxV3B6=J3q-q2TaTO7G&du2J6&JvVh zBjpw}6%w!%FHD=qG%j=m;sFEnfpL7$q{0spAOR8}f&3;AH1U8{*`s1k5vFZNcO7}+ zt{FqV)vI0W8H4WKzPQJ#nP0oDRp-$I%f)ujVn67F&i<>KIa ziH8C-bI(D!CI-F|I}+lBZYxME=quc6wX^G!y``7V)qN{U#1{#W011!)2{c6@SM5|z z@)eRGp<~w;&4`DM$luC8nYm;6rT2dK=2=6>J-g%j=|ftM9Z+z^g;@8g41fK@5PxkbDc=4_C$9?(s*5fxUJ!M3~I+TN$h~?`ds|IgVdTywg$Y^^W1^o>{EIY34Dl^&Z2E7~OtoCOvahcW;4u;)f!q*Ktp{=wC38)e zfY+|CYCOKSm--6rJ_fYVg;k+p=1^qfro|WB)wb>2Q9}-FEZn-N^TLVew!7wpp@of@ z7Dq1@_0?HX1X+bQFv?l(&`wzFLaw7FAw*IXt*H&(0^s>t5`&jRvFs&Uwrz`Sw$E2zrFpE z@tsyr@3&)F!Ht(a{NhuUMzl(+^1HsWqgB$jkva+rN~$K1Pk!NF=~)UbbaUZNeU*%e zX%ZlTY!Yasb#UaXY?g(2r4n%DBGO6P1cb!GTx>D&m%#eu*aGJ5k3W3yk@YRd^lmwI zV6&+MZk#jnf}ZXFvum5v1|BoS5(NJ4ze-y>DV+$OXJ57JFvuhL3a+HYvXq#T^LmZb zz5%j);#n+AkpKz!jeu%(;kWVyJzmx8OZ&zN#EgV&uVIF*bI|UW8PVN8efIv62d*1a z_}!&rub$MW`S{)iSN{9Y|M6~_SWtlas)-Rj3H{1R8HmQoqf2 z9{EA$K|M*d3zB#_4h+^)d$7@aSQ=Jxdso_{etJE|L% z5lzE_mvPaZg<0w#QE4zp5+DIb2)JGII}#+Fq)ou>>l-}(V)-UC`V~m9P%$WU%Bg*XDdy8tg#nH1ufh!wCajSXjC# zrbjn6dAq-YzWrr`xy8)qR_1 z)eDP50wmA~0k`X!MzZEtSs>u{^$nhXF+PZuL!qxuSB0UkuxX2?$1C-!dE1r^D4E#2 z_^yJ^S6w*h_G`u#UR>DWlp)s1gt(IW$`Sw;x8x(ZAFSgZNm${zlt?b7%pzH0$s?&Y znFo{XGn?rw{N$Mh3HXjcHh+W6%R2KFO5A;t0K_HOL-z@X;4gt6CZSRznku*ublp-}NkRxr1 zD@$J`Dh+MQ+J*D?Z@qoa@N;{$?zVl_WfKaU-Piquq2nn+OxJ>csiq*Pke2!ie&k#u z>MJ`+ugPnaWdpbC2l^+0AP~s%&z9MFgn--kwnvJ|FzgU$L|gH0=5Z2Nkrw?3KU&@nk$wjfEWW;Gu?nbM#8RW^TN^yO1?Jn=8F5KWS)3 zljM_)oy(L>8@KBR`X_-P5Xk0lmU(%HfZO-BcS_1gnk3M$zCsUGywZqa&Gw$tN38wn z{=S@shpMV#B%ZwrW1k%4_?46nY5;4cL2}l6|fg}^w_#mcWRYk*) zSmlPc=e5HR*YtH$(fIe*MKy*f_X^Uw(+9>u9~Es;os;u`c7-tZ>$ z74oCLs#UFxtHDMtO-{aYq7I-B5(qkhocws1DG88(rwAms{^)VSyLRmbJXaXUhR)b? z@U3GXj959d=%Gb@H_p9w?x+@v$A5e1-EW-udzBH7C!)|&cyO@F)K`#d;)N1okq2`c zJp3z_H#wdlq?O=!&{$q@ev$wQBopx9Pn98&010H7K(dBO)>pBF27y!=tFAJ%<9fxa z13PB!S~Ge_$@Q~Gwpcu_`HbO1wk$o4wMfO%q;be8j1;2(3jZK}IYfy0?Qlqal}tD_ zo#o5zcy`Hc3ezM&0+N8^KQuZd0TS>jf#h~y76BqD9E)HCFk)z>M)ZTzNADanaOU1k z-Pg>#u()TptqacYf5(xJeuD{GF+4$N;Q2wlG!{lLme5;rIV`c@gP5I_?40R31blkK zSYfk0B>@shC1CwPm>>ZXAOWl0mnh7Ao}rH7=%KyZ%pv_yB7<(Y|80)D^oQd<@tApsKb`|q0Z zlK=^1PM~%y@1KMa+{UmC4(4KEVpXgf4-!IRz4^!AjW3zA@adf+_pG{p?ywefN4A+c zeAV;&L>Ct3y6bV6mBwQyqhaVO2njL3NPX3myv+OxI_rTgBMFc|<^-JmRni*?kbv(9 zh-yS`Vqy@OA%VWBur5{*#VFzItxJj?nmuIu!cMCuw^~qiZs9F+ z4(&Q_XlLS~P(m!}4zsd^h85l6J025z=(v5$x#S;m5${yz4INLlv~$Ff>*$~pwu~e| z0+|zV@Fz)EJ|bYJ&d~!V-@G#>MtGAsve_#NYe;gMsr$ zPuyKHdf(bEn`U1-dqnH0L;s+iDK+BN@o;&lI%;m0Q;v==b8rxlGr65|b*>aqsrss+ z<7vm~wVN>WmW3WjfCRkuyJkGuA&_=MVDrT6l!Cc9LO@i54d^SeY$ybh`3Zf6s~?Uh zs`N;OQMvBP&2#pwDcZfF*Q3ijZkp5VfuUpeY(8d$FouX}?Ra=l_F~C?Fq^lHr_3x7 z71a~Tfi-kI*+w<($aQqk30p=IAc4#YIQWyKD<2V1t^ZZ+eT0IcifSZ9U&$*DI6`8X z`U;ykM-ryQ5~GCSk3*jpFQ2yIr~8WzKGc23qBhGXHJ>8mm5^AP zI7M`YC3Rt0%v0|x`Bm!i>>rUj3gW6>eU*AVlL##!9!X^0vd{wwkbp;i&J2SDNFav< zWKC!V0I6W5QxoPQ4BClU>9cRVFtTLU!XNK_;Kj#AJhi3Mipl@cz3s~%yp3lGPe&^; zza2|rVSo^CskNlUf5%5|)z<5Z21iViG8YYs^up*Ww78J$fgrVsV zmlW?_Tl~V_yB}ZMX4&Lxiu!$w4lYAK6%9i>$-XI^g`&w95>i~rt}T4lF;jxzhNepP zsT7O{jccj%FA0!9_6T^eDqu(?Kmr~o5Q{+^8IcGy5q2lU@L%|p5$&^hO3y6|3m;#5 z`?f{r4g20LQ%4;&^m4>+iQZYSRsfWsq@cZS*dM=n$bYSb zBZur*^SPl#%=OQuwRwv8z`s`iRULhWuk6H}O&f&d>$ioS2@)WItP${arNQ7yfCTC# zpsGFUb<)s5)J|9vN(c@-7$|zOu;C!~#L=pZ=$|T&PyNBh`}b|?v3Grol`}4%FzE1m zzdj=**4dFkS-IhC1c~E#CwE@SOjYA|+rA+bemS1C289`s00~$G{8B$K&ip2z+CJtt zjnvYq@=>)<&9RXqqV|wq>rx|Ohd%7!j5l~vIi>SnU@J2#IQmpfiKj_7hZ=}U{<_ig7?Alv~ADquxuKY^^B#=DXlr}gTC@BVV`3r|je`Ki0MEW3I3#K9Yu ze~#@44dYBSEXD+5F>Ig%AJHhbjjmB@GGSGG_*bdjs?%Xu_@V@?AR^K>w*rRW~e$+6~k%h6s zh@QYep;gPt!dh;V%WfxaooF**$7ep-Se%(40TRd>fqbe%SQh^hu-PnHX8tW{dXZZN zx0y&U7yD<)Fko*gD^Xox(`P(L2yul(RYgTC5`oR}XlUl{?T}d0j~pyM^n*5&MsNAq zOMlfue=9#Do*7KUWkm-+(xL1S!zcc=AB=UnbYkaI$$1_aOV0cx0TRe2fjp=>ScE(v zV5dz?w#kDcD-|KR>~_*h&eJeP3^+@^kbokBUcwv4#mk(&Ua4v5r#fXATVH&>%c=#% z&p+8~>*`LkCT)B9c`+9&q2VkmYn&(-YLII+NdMNiO(tNk^ls8&o*Br?!cP()fou}U zvucFJ@hJhN#!$qxF+j$FnuG;YSsnE%UQC5L<9gJvoERhh?x6IW_sju?Cm0<}ZkFj_-(ETc4dTWt>3nvHTU+FYno4-)^EjHOwbaK z5{BYf+dXb1Fsg|1W|FDz&JAWO!%~w#FbTL_K{?f4sQPicT=btF0k2(0x8qUu@05Ld zS0}@>5PX>ysm2!2P*0i>u8d+K_f4<8e)rmSR~L^TxPH^8no$)=2t^f*L5YQRv9^1L znfmMWV74+WH3?MJm- zv`>$K->;);{Hl~x{X1o^8mA-nn09r>(9R^Hv8Xx8swxhZRT|bYEO^{}pGX+x33G9V zxCUePUj?F#VIfH%s038&Rj2BIxBaM=i}vXe@cVUCjbD|Ls(+{KRpWHT9$i-DhF)qU zpq&zNF@+sd+3}Ph#sWjSQ5F~5<(!UT*FtlVQl0a!g4*h^+$0bT0;=_@Q}w^wepJgv zdr83U>q7t8A&`^%8Tty^2_i}=C`@q|v)yGV<}-sZkjuYAfSKcmzMp&*j8=lBBmojg zhk$B5oSoJo^ITNRMSDpgbMo}y1p+y_pHXlKCnzZBE4-2ZCy9la%sws2Cnpg)o@qLD zF3%wW5(owX$19vV4s+Gk)Cq(C+9aTQy==O*%vAl;K0N{!@a!jKz*bk4wa?($zAVIq zel7Er{qUU4F&M1`OGyGGkPZQ7YnxPW>9r1-pOqR3kJurgO5TpoWE)lgv`>#fGV?5_ zWi^CM<#1uSn3N^DvrrH@Bdcam(3DtNj?MAx(y4QK4hfJzFbFta;nZ=MUh9zgS#`qT zzcvY|lDFyBGE?M=)h&xw;@v=S^O36MZK z1l+Ea(rXH@~RwE{nI`@0;<==5qpFx7hVuGkq}cg`U)xRhEuaOq4=U^ zY8E6G^;J5xHqRje67VwtN9tQT@g0HmT66!NrFwzO{hSNq_`MAm;?!sO#v{`vheB7@SYlE8=%Ds^#^&eav5^1R#fZP664!y;LWS-UQ}+^ zgsPFN#^bhs)p%%60wh2Js`V1>^Min@-tfD9eq1&8O5t`*s2aK3@z6gBkN^p|O+dBY zqkVo5@Vj18jXyuGntP@2T0P@-JoHZjBtQag6L7D_re`k`P}L`?_Ax`p+y3!X%WNL; z^0lxNn`%8^$7iyQs()4cV;QRTc(P5d(tf{c zZvU=T{j1um@>lgwdlDc467U)U)q0Qi`9UBPog;M=#8tieD)o3Kc{z2i->;f#y`frO zRsX8?UW-RHe%g}&36OwO1XSxo+WU)uQ@X_|KmMZU{b-%42k3_cNPq-Lz*z#`Z$TJg zCIp<)MNaw2q%1r)m4LIg0lkp`36KB@I7J|}#^#ZHBH)xRa>`FWvGqb(oT>-thXhD~ z1W3SH0$yk%7=wERoYF;3`EjqL!Sw8GZ9s1%KmsH{0!|SKW~;M<9%X`(8~mztqtgn1W14cNWduqUT!cZ_xZ_ensek1W14c zNWcvOL2Hkk(nU`Bahtyz_e>@}-7as?{AY44crFQ$011$Qdjx{k9yz6pobux~fA{X3 W^gO#=-k|wU?<(*t5+DIP1pXi7!_|)f literal 0 HcmV?d00001 From 2c87b19f5743df70c3d04a8b0d26a52ae6184af8 Mon Sep 17 00:00:00 2001 From: Ivan Matantsev Date: Fri, 7 Dec 2018 11:02:32 -0800 Subject: [PATCH 033/100] Provide proper calling conversion for x86 framework (#1833) --- .../Dataset/DenseIntArray.cs | 72 ++++++++++++------- .../Dataset/SegmentIntArray.cs | 17 ++--- .../Dataset/SparseIntArray.cs | 6 +- src/Microsoft.ML.FastTree/FastTreeRanking.cs | 3 +- .../OlsLinearRegression.cs | 9 +-- .../SymSgdClassificationTrainer.cs | 13 ++-- .../VectorWhitening.cs | 9 +-- .../SafeTrainingAndModelBuffer.cs | 12 ++-- src/Microsoft.ML.TimeSeries/EigenUtils.cs | 19 ++--- src/Microsoft.ML.TimeSeries/FftUtils.cs | 22 +++--- .../Text/LdaSingleBox.cs | 42 +++++------ src/Native/CMakeLists.txt | 4 ++ src/Native/FastTreeNative/stdafx.h | 9 +-- src/Native/LdaNative/lda_engine.hpp | 8 +-- src/Native/SymSgdNative/SparseBLAS.h | 8 +-- .../CpuMathNativeUtils.cs | 46 ++++++------ 16 files changed, 161 insertions(+), 138 deletions(-) diff --git a/src/Microsoft.ML.FastTree/Dataset/DenseIntArray.cs b/src/Microsoft.ML.FastTree/Dataset/DenseIntArray.cs index 1e437456e6..bc28bf8e84 100644 --- a/src/Microsoft.ML.FastTree/Dataset/DenseIntArray.cs +++ b/src/Microsoft.ML.FastTree/Dataset/DenseIntArray.cs @@ -7,6 +7,7 @@ using System.Collections.Generic; using System.Linq; using System.Runtime.InteropServices; +using System.Security; namespace Microsoft.ML.Trainers.FastTree.Internal { @@ -70,13 +71,14 @@ public override IntArray[] Split(int[][] assignment) } #if USE_FASTTREENATIVE - [DllImport("FastTreeNative", CallingConvention = CallingConvention.StdCall)] + internal const string NativePath = "FastTreeNative"; + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] private static extern unsafe int C_Sumup_float( int numBits, byte* pData, int* pIndices, float* pSampleOutputs, double* pSampleOutputWeights, FloatType* pSumTargetsByBin, double* pSumTargets2ByBin, int* pCountByBin, int totalCount, double totalSampleOutputs, double totalSampleOutputWeights); - [DllImport("FastTreeNative", CallingConvention = CallingConvention.StdCall)] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] private static extern unsafe int C_Sumup_double( int numBits, byte* pData, int* pIndices, double* pSampleOutputs, double* pSampleOutputWeights, FloatType* pSumTargetsByBin, double* pSumTargets2ByBin, int* pCountByBin, @@ -154,7 +156,8 @@ public Dense0BitIntArray(byte[] buffer, ref int position) { } - public override MD5Hash MD5Hash { + public override MD5Hash MD5Hash + { get { return MD5Hasher.Hash(Length); } } @@ -178,13 +181,16 @@ public override void ToByteArray(byte[] buffer, ref int position) Length.ToByteArray(buffer, ref position); } - public override int this[int index] { - get { + public override int this[int index] + { + get + { Contracts.Assert(0 <= index && index < Length); return 0; } - set { + set + { Contracts.Assert(0 <= index && index < Length); Contracts.Assert(value == 0); } @@ -266,7 +272,8 @@ private void Set(long offset, uint mask, int value) _data[major + 1] = (_data[major + 1] & ~major1Mask) | (uint)(val >> 32); } - public override MD5Hash MD5Hash { + public override MD5Hash MD5Hash + { get { return MD5Hasher.Hash(_data); } } @@ -291,8 +298,10 @@ public override void ToByteArray(byte[] buffer, ref int position) _data.ToByteArray(buffer, ref position); } - public sealed override unsafe int this[int index] { - get { + public sealed override unsafe int this[int index] + { + get + { long offset = index; offset = (offset << 3) + (offset << 1); int minor = (int)(offset & 0x1f); @@ -301,7 +310,8 @@ public sealed override unsafe int this[int index] { return (int)(((*(ulong*)(pData + major)) >> minor) & _mask); } - set { + set + { Contracts.Assert(0 <= value && value < (1 << 10)); Set(((long)index) * 10, _mask, value); } @@ -436,10 +446,12 @@ public override unsafe void Callback(Action callback) } } - public override unsafe int this[int index] { + public override unsafe int this[int index] + { get { return _data[index]; } - set { + set + { Contracts.Assert(0 <= value && value <= byte.MaxValue); _data[index] = (byte)value; } @@ -471,7 +483,8 @@ internal sealed class Dense4BitIntArray : DenseIntArray public override IntArrayBits BitsPerItem { get { return IntArrayBits.Bits4; } } - public override MD5Hash MD5Hash { + public override MD5Hash MD5Hash + { get { return MD5Hasher.Hash(_data); } } @@ -532,8 +545,10 @@ public override void ToByteArray(byte[] buffer, ref int position) _data.ToByteArray(buffer, ref position); } - public override unsafe int this[int index] { - get { + public override unsafe int this[int index] + { + get + { int dataIndex = index / 2; bool highBits = (index % 2 == 0); @@ -546,7 +561,8 @@ public override unsafe int this[int index] { return v; } - set { + set + { Contracts.Assert(0 <= value && value < (1 << 4)); byte v; v = (byte)value; @@ -607,7 +623,8 @@ public Dense16BitIntArray(byte[] buffer, ref int position) _data = buffer.ToUShortArray(ref position); } - public override MD5Hash MD5Hash { + public override MD5Hash MD5Hash + { get { return MD5Hasher.Hash(_data); } } @@ -640,12 +657,15 @@ public override void ToByteArray(byte[] buffer, ref int position) _data.ToByteArray(buffer, ref position); } - public override unsafe int this[int index] { - get { + public override unsafe int this[int index] + { + get + { return _data[index]; } - set { + set + { Contracts.Assert(0 <= value && value <= ushort.MaxValue); _data[index] = (ushort)value; } @@ -700,7 +720,8 @@ public override unsafe void Callback(Action callback) } } - public override MD5Hash MD5Hash { + public override MD5Hash MD5Hash + { get { return MD5Hasher.Hash(_data); } } @@ -725,12 +746,15 @@ public override void ToByteArray(byte[] buffer, ref int position) _data.ToByteArray(buffer, ref position); } - public override int this[int index] { - get { + public override int this[int index] + { + get + { return _data[index]; } - set { + set + { Contracts.Assert(value >= 0); _data[index] = value; } diff --git a/src/Microsoft.ML.FastTree/Dataset/SegmentIntArray.cs b/src/Microsoft.ML.FastTree/Dataset/SegmentIntArray.cs index 5ddb8db873..2ac14484bb 100644 --- a/src/Microsoft.ML.FastTree/Dataset/SegmentIntArray.cs +++ b/src/Microsoft.ML.FastTree/Dataset/SegmentIntArray.cs @@ -6,6 +6,7 @@ using System.Collections.Generic; using System.Linq; using System.Runtime.InteropServices; +using System.Security; namespace Microsoft.ML.Trainers.FastTree.Internal { @@ -489,31 +490,31 @@ public static unsafe void SegmentFindOptimalCost31(uint[] array, int len, out lo } bits = b; } - + internal const string NativePath = "FastTreeNative"; #pragma warning disable TLC_GeneralName // Externs follow their own rules. - [DllImport("FastTreeNative", CallingConvention = CallingConvention.StdCall, CharSet = CharSet.Ansi)] + [DllImport(NativePath, CharSet = CharSet.Ansi), SuppressUnmanagedCodeSecurity] private static extern unsafe void C_SegmentFindOptimalPath21(uint* valv, int valc, long* pBits, int* pTransitions); - [DllImport("FastTreeNative", CallingConvention = CallingConvention.StdCall, CharSet = CharSet.Ansi)] + [DllImport(NativePath, CharSet = CharSet.Ansi), SuppressUnmanagedCodeSecurity] private static extern unsafe void C_SegmentFindOptimalPath15(uint* valv, int valc, long* pBits, int* pTransitions); - [DllImport("FastTreeNative", CallingConvention = CallingConvention.StdCall, CharSet = CharSet.Ansi)] + [DllImport(NativePath, CharSet = CharSet.Ansi), SuppressUnmanagedCodeSecurity] private static extern unsafe void C_SegmentFindOptimalPath7(uint* valv, int valc, long* pBits, int* pTransitions); - [DllImport("FastTreeNative", CallingConvention = CallingConvention.StdCall, CharSet = CharSet.Ansi)] + [DllImport(NativePath, CharSet = CharSet.Ansi), SuppressUnmanagedCodeSecurity] private static extern unsafe void C_SegmentFindOptimalCost15(uint* valv, int valc, long* pBits); - [DllImport("FastTreeNative", CallingConvention = CallingConvention.StdCall, CharSet = CharSet.Ansi)] + [DllImport(NativePath, CharSet = CharSet.Ansi), SuppressUnmanagedCodeSecurity] private static extern unsafe void C_SegmentFindOptimalCost31(uint* valv, int valc, long* pBits); - [DllImport("FastTreeNative", CallingConvention = CallingConvention.StdCall)] + [DllImport(NativePath)] private static extern unsafe int C_SumupSegment_float( uint* pData, byte* pSegType, int* pSegLength, int* pIndices, float* pSampleOutputs, double* pSampleOutputWeights, float* pSumTargetsByBin, double* pSumWeightsByBin, int* pCountByBin, int totalCount, double totalSampleOutputs); - [DllImport("FastTreeNative", CallingConvention = CallingConvention.StdCall)] + [DllImport(NativePath)] private static extern unsafe int C_SumupSegment_double( uint* pData, byte* pSegType, int* pSegLength, int* pIndices, double* pSampleOutputs, double* pSampleOutputWeights, diff --git a/src/Microsoft.ML.FastTree/Dataset/SparseIntArray.cs b/src/Microsoft.ML.FastTree/Dataset/SparseIntArray.cs index 6d4098f3b7..8b030fc85a 100644 --- a/src/Microsoft.ML.FastTree/Dataset/SparseIntArray.cs +++ b/src/Microsoft.ML.FastTree/Dataset/SparseIntArray.cs @@ -6,6 +6,7 @@ using System.Collections.Generic; using System.Linq; using System.Runtime.InteropServices; +using System.Security; namespace Microsoft.ML.Trainers.FastTree.Internal { @@ -490,12 +491,13 @@ public override void Sumup(SumupInputData input, FeatureHistogram histogram) } #if USE_FASTTREENATIVE - [DllImport("FastTreeNative", CallingConvention = CallingConvention.StdCall)] + internal const string NativePath = "FastTreeNative"; + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] private static extern unsafe int C_SumupDeltaSparse_float(int numBits, byte* pValues, byte* pDeltas, int numDeltas, int* pIndices, float* pSampleOutputs, double* pSampleOutputWeights, float* pSumTargetsByBin, double* pSumTargets2ByBin, int* pCountByBin, int totalCount, double totalSampleOutputs, double totalSampleOutputWeights); - [DllImport("FastTreeNative", CallingConvention = CallingConvention.StdCall)] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] private static extern unsafe int C_SumupDeltaSparse_double(int numBits, byte* pValues, byte* pDeltas, int numDeltas, int* pIndices, double* pSampleOutputs, double* pSampleOutputWeights, double* pSumTargetsByBin, double* pSumTargets2ByBin, int* pCountByBin, int totalCount, double totalSampleOutputs, double totalSampleOutputWeights); diff --git a/src/Microsoft.ML.FastTree/FastTreeRanking.cs b/src/Microsoft.ML.FastTree/FastTreeRanking.cs index dce1e7b398..ae4b04a4fa 100644 --- a/src/Microsoft.ML.FastTree/FastTreeRanking.cs +++ b/src/Microsoft.ML.FastTree/FastTreeRanking.cs @@ -17,6 +17,7 @@ using System.Collections.Generic; using System.Linq; using System.Runtime.InteropServices; +using System.Security; using System.Text; // REVIEW: Do we really need all these names? @@ -1090,7 +1091,7 @@ private static void PermutationSort(int[] permutation, double[] scores, short[] })); } - [DllImport("FastTreeNative", EntryPoint = "C_GetDerivatives", CallingConvention = CallingConvention.StdCall, CharSet = CharSet.Ansi)] + [DllImport("FastTreeNative", EntryPoint = "C_GetDerivatives", CharSet = CharSet.Ansi), SuppressUnmanagedCodeSecurity] private static extern unsafe void GetDerivatives( int numDocuments, int begin, int* pPermutation, short* pLabels, double* pScores, double* pLambdas, double* pWeights, double* pDiscount, diff --git a/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs b/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs index c4fdd85e22..c2a2e0c821 100644 --- a/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs +++ b/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs @@ -18,6 +18,7 @@ using System.Collections.Generic; using System.IO; using System.Runtime.InteropServices; +using System.Security; [assembly: LoadableClass(OlsLinearRegressionTrainer.Summary, typeof(OlsLinearRegressionTrainer), typeof(OlsLinearRegressionTrainer.Arguments), new[] { typeof(SignatureRegressorTrainer), typeof(SignatureTrainer), typeof(SignatureFeatureScorerTrainer) }, @@ -380,7 +381,7 @@ private OlsLinearRegressionPredictor TrainCore(IChannel ch, FloatLabelCursor.Fac internal static class Mkl { - private const string DllName = "MklImports"; + private const string MklPath = "MklImports"; public enum Layout { @@ -394,7 +395,7 @@ public enum UpLo : byte Lo = (byte)'L' } - [DllImport(DllName, EntryPoint = "LAPACKE_dpptrf")] + [DllImport(MklPath, CallingConvention = CallingConvention.Cdecl, EntryPoint = "LAPACKE_dpptrf"), SuppressUnmanagedCodeSecurity] private static extern int PptrfInternal(Layout layout, UpLo uplo, int n, Double[] ap); ///

@@ -429,7 +430,7 @@ public static void Pptrf(Layout layout, UpLo uplo, int n, Double[] ap) } } - [DllImport(DllName, EntryPoint = "LAPACKE_dpptrs")] + [DllImport(MklPath, CallingConvention = CallingConvention.Cdecl, EntryPoint = "LAPACKE_dpptrs"), SuppressUnmanagedCodeSecurity] private static extern int PptrsInternal(Layout layout, UpLo uplo, int n, int nrhs, Double[] ap, Double[] b, int ldb); /// @@ -476,7 +477,7 @@ public static void Pptrs(Layout layout, UpLo uplo, int n, int nrhs, Double[] ap, } - [DllImport(DllName, EntryPoint = "LAPACKE_dpptri")] + [DllImport(MklPath, CallingConvention = CallingConvention.Cdecl, EntryPoint = "LAPACKE_dpptri"), SuppressUnmanagedCodeSecurity] private static extern int PptriInternal(Layout layout, UpLo uplo, int n, Double[] ap); /// diff --git a/src/Microsoft.ML.HalLearners/SymSgdClassificationTrainer.cs b/src/Microsoft.ML.HalLearners/SymSgdClassificationTrainer.cs index 0cbe520303..c8118a0160 100644 --- a/src/Microsoft.ML.HalLearners/SymSgdClassificationTrainer.cs +++ b/src/Microsoft.ML.HalLearners/SymSgdClassificationTrainer.cs @@ -760,9 +760,9 @@ private static unsafe class Native //To triger the loading of MKL library since SymSGD native library depends on it. static Native() => ErrorMessage(0); - internal const string DllName = "SymSgdNative"; - - [DllImport(DllName), SuppressUnmanagedCodeSecurity] + internal const string NativePath = "SymSgdNative"; + internal const string MklPath = "MklImports"; + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] private static extern void LearnAll(int totalNumInstances, int* instSizes, int** instIndices, float** instValues, float* labels, bool tuneLR, ref float lr, float l2Const, float piw, float* weightVector, ref float bias, int numFeatres, int numPasses, int numThreads, bool tuneNumLocIter, ref int numLocIter, float tolerance, bool needShuffle, bool shouldInitialize, State* state); @@ -833,7 +833,7 @@ public static void LearnAll(InputDataManager inputDataManager, bool tuneLR, } } - [DllImport(DllName), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] private static extern void MapBackWeightVector(float* weightVector, State* state); /// @@ -847,7 +847,7 @@ public static void MapBackWeightVector(Span weightVector, GCHandle stateG MapBackWeightVector(pweightVector, (State*)stateGCHandle.AddrOfPinnedObject()); } - [DllImport(DllName), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] private static extern void DeallocateSequentially(State* state); public static void DeallocateSequentially(GCHandle stateGCHandle) @@ -856,8 +856,7 @@ public static void DeallocateSequentially(GCHandle stateGCHandle) } // See: https://software.intel.com/en-us/node/521990 - [System.Security.SuppressUnmanagedCodeSecurity] - [DllImport("MklImports", EntryPoint = "DftiErrorMessage", CallingConvention = CallingConvention.Cdecl, CharSet = CharSet.Auto)] + [DllImport(MklPath, EntryPoint = "DftiErrorMessage", CallingConvention = CallingConvention.Cdecl, CharSet = CharSet.Auto), SuppressUnmanagedCodeSecurity] private static extern IntPtr ErrorMessage(int status); } diff --git a/src/Microsoft.ML.HalLearners/VectorWhitening.cs b/src/Microsoft.ML.HalLearners/VectorWhitening.cs index da47e4e599..e3ff5c8ea7 100644 --- a/src/Microsoft.ML.HalLearners/VectorWhitening.cs +++ b/src/Microsoft.ML.HalLearners/VectorWhitening.cs @@ -18,6 +18,7 @@ using System.Collections.Generic; using System.Linq; using System.Runtime.InteropServices; +using System.Security; using System.Text; [assembly: LoadableClass(VectorWhiteningTransformer.Summary, typeof(IDataTransform), typeof(VectorWhiteningTransformer), typeof(VectorWhiteningTransformer.Arguments), typeof(SignatureDataTransform), @@ -585,7 +586,7 @@ public override void Save(ModelSaveContext ctx) private static class Mkl { - private const string DllName = "MklImports"; + private const string MklPath = "MklImports"; // The allowed value of Layout is specified in Intel's MLK library. See Layout parameter in this // [doc](https://software.intel.com/en-us/mkl-developer-reference-c-cblas-gemm) for details. @@ -624,17 +625,17 @@ public static unsafe void Gemv(Layout layout, Transpose trans, int m, int n, flo } // See: https://software.intel.com/en-us/node/520750 - [DllImport(DllName, EntryPoint = "cblas_sgemv")] + [DllImport(MklPath , CallingConvention = CallingConvention.Cdecl, EntryPoint = "cblas_sgemv"), SuppressUnmanagedCodeSecurity] private static unsafe extern void Gemv(Layout layout, Transpose trans, int m, int n, float alpha, float* a, int lda, float* x, int incx, float beta, float* y, int incy); // See: https://software.intel.com/en-us/node/520775 - [DllImport(DllName, EntryPoint = "cblas_sgemm")] + [DllImport(MklPath, CallingConvention = CallingConvention.Cdecl, EntryPoint = "cblas_sgemm"), SuppressUnmanagedCodeSecurity] public static extern void Gemm(Layout layout, Transpose transA, Transpose transB, int m, int n, int k, float alpha, float[] a, int lda, float[] b, int ldb, float beta, float[] c, int ldc); // See: https://software.intel.com/en-us/node/521150 - [DllImport(DllName, EntryPoint = "LAPACKE_sgesvd")] + [DllImport(MklPath, CallingConvention = CallingConvention.Cdecl, EntryPoint = "LAPACKE_sgesvd"), SuppressUnmanagedCodeSecurity] public static extern int Svd(Layout layout, SvdJob jobu, SvdJob jobvt, int m, int n, float[] a, int lda, float[] s, float[] u, int ldu, float[] vt, int ldvt, float[] superb); } diff --git a/src/Microsoft.ML.Recommender/SafeTrainingAndModelBuffer.cs b/src/Microsoft.ML.Recommender/SafeTrainingAndModelBuffer.cs index b1305a5a90..d2c918a55b 100644 --- a/src/Microsoft.ML.Recommender/SafeTrainingAndModelBuffer.cs +++ b/src/Microsoft.ML.Recommender/SafeTrainingAndModelBuffer.cs @@ -185,21 +185,21 @@ private unsafe struct MFModel public float* Q; } - private const string DllPath = "MatrixFactorizationNative"; + private const string NativePath = "MatrixFactorizationNative"; - [DllImport(DllPath), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] private static unsafe extern void MFDestroyModel(ref MFModel* model); - [DllImport(DllPath), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] private static unsafe extern MFModel* MFTrain(MFProblem* prob, MFParameter* param); - [DllImport(DllPath), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] private static unsafe extern MFModel* MFTrainWithValidation(MFProblem* tr, MFProblem* va, MFParameter* param); - [DllImport(DllPath), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] private static unsafe extern float MFCrossValidation(MFProblem* prob, int nrFolds, MFParameter* param); - [DllImport(DllPath), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] private static unsafe extern float MFPredict(MFModel* model, int pIdx, int qIdx); private MFParameter _mfParam; diff --git a/src/Microsoft.ML.TimeSeries/EigenUtils.cs b/src/Microsoft.ML.TimeSeries/EigenUtils.cs index 42392dd026..f54c237bb1 100644 --- a/src/Microsoft.ML.TimeSeries/EigenUtils.cs +++ b/src/Microsoft.ML.TimeSeries/EigenUtils.cs @@ -4,6 +4,7 @@ using System; using System.Runtime.InteropServices; +using System.Security; using Microsoft.ML.Runtime.Internal.Utilities; using Float = System.Single; @@ -442,7 +443,7 @@ private static int Imtql(Float[] d, Float[] e, Float[] z, int n) return 0; } - private const string DllName = "MklImports"; + private const string MklPath = "MklImports"; public enum Layout { @@ -470,41 +471,41 @@ public enum Uplo : byte } // See: https://software.intel.com/en-us/node/521087#4C9F4214-70BC-4483-A814-1E7F927B30CF - [DllImport(DllName, EntryPoint = "LAPACKE_shseqr", CallingConvention = CallingConvention.Cdecl)] + [DllImport(MklPath, EntryPoint = "LAPACKE_shseqr", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] public static extern int Shseqr(Layout matrixLayout, Job job, Compz compz, int n, int ilo, int ihi, [In] float[] h, int idh, [Out] float[] wr, [Out] float[] wi, [Out] float[] z, int ldz); // See: https://software.intel.com/en-us/node/521087#4C9F4214-70BC-4483-A814-1E7F927B30CF - [DllImport(DllName, EntryPoint = "LAPACKE_dhseqr", CallingConvention = CallingConvention.Cdecl)] + [DllImport(MklPath, EntryPoint = "LAPACKE_dhseqr", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] public static extern int Dhseqr(Layout matrixLayout, Job job, Compz compz, int n, int ilo, int ihi, [In] double[] h, int idh, [Out] double[] wr, [Out] double[] wi, [Out] double[] z, int ldz); // See: https://software.intel.com/en-us/node/521046#7EF85A82-423A-4ABC-A208-88326CD0B887 - [DllImport(DllName, EntryPoint = "LAPACKE_ssytrd", CallingConvention = CallingConvention.Cdecl)] + [DllImport(MklPath, EntryPoint = "LAPACKE_ssytrd", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] public static extern int Ssytrd(Layout matrixLayout, Uplo uplo, int n, float[] a, int lda, float[] d, float[] e, float[] tau); // See: https://software.intel.com/en-us/node/521046#7EF85A82-423A-4ABC-A208-88326CD0B887 - [DllImport(DllName, EntryPoint = "LAPACKE_dsytrd", CallingConvention = CallingConvention.Cdecl)] + [DllImport(MklPath, EntryPoint = "LAPACKE_dsytrd", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] public static extern int Dsytrd(Layout matrixLayout, Uplo uplo, int n, double[] a, int lda, double[] d, double[] e, double[] tau); // See: https://software.intel.com/en-us/node/521067#E2C5B8B3-D275-4000-821D-1ABF245D2E30 - [DllImport(DllName, EntryPoint = "LAPACKE_ssteqr", CallingConvention = CallingConvention.Cdecl)] + [DllImport(MklPath, EntryPoint = "LAPACKE_ssteqr", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] public static extern int Ssteqr(Layout matrixLayout, Compz compz, int n, float[] d, float[] e, float[] z, int ldz); // See: https://software.intel.com/en-us/node/521067#E2C5B8B3-D275-4000-821D-1ABF245D2E30 - [DllImport(DllName, EntryPoint = "LAPACKE_dsteqr", CallingConvention = CallingConvention.Cdecl)] + [DllImport(MklPath, EntryPoint = "LAPACKE_dsteqr", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] public static extern int Dsteqr(Layout matrixLayout, Compz compz, int n, double[] d, double[] e, double[] z, int ldz); // See: https://software.intel.com/en-us/node/521049#106F8646-1C99-4A9D-8604-D60DAAF7BE0C - [DllImport(DllName, EntryPoint = "LAPACKE_sorgtr", CallingConvention = CallingConvention.Cdecl)] + [DllImport(MklPath, EntryPoint = "LAPACKE_sorgtr", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] public static extern int Sorgtr(Layout matrixLayout, Uplo uplo, int n, float[] a, int lda, float[] tau); // See: https://software.intel.com/en-us/node/521049#106F8646-1C99-4A9D-8604-D60DAAF7BE0C - [DllImport(DllName, EntryPoint = "LAPACKE_dorgtr", CallingConvention = CallingConvention.Cdecl)] + [DllImport(MklPath, EntryPoint = "LAPACKE_dorgtr", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] public static extern int Dorgtr(Layout matrixLayout, Uplo uplo, int n, double[] a, int lda, double[] tau); public static bool MklSymmetricEigenDecomposition(Single[] input, int size, out Single[] eigenValues, out Single[] eigenVectors) diff --git a/src/Microsoft.ML.TimeSeries/FftUtils.cs b/src/Microsoft.ML.TimeSeries/FftUtils.cs index 85f40013c2..8b4414e423 100644 --- a/src/Microsoft.ML.TimeSeries/FftUtils.cs +++ b/src/Microsoft.ML.TimeSeries/FftUtils.cs @@ -5,6 +5,7 @@ using System; using System.Reflection; using System.Runtime.InteropServices; +using System.Security; namespace Microsoft.ML.Runtime.TimeSeriesProcessing { @@ -162,44 +163,43 @@ private enum ConfigValue CceFormat = 57 /* Complex conjugate-even */ } - private const string DllName = "MklImports"; + private const string MklPath = "MklImports"; private const string DllProxyName = "MklProxyNative"; // See: https://software.intel.com/en-us/node/521976#8CD904AB-244B-42E4-820A-CC2376E776B8 - [DllImport(DllProxyName, EntryPoint = "MKLDftiCreateDescriptor", CallingConvention = CallingConvention.Cdecl, CharSet = CharSet.Auto)] + [DllImport(DllProxyName, EntryPoint = "MKLDftiCreateDescriptor", CharSet = CharSet.Auto), SuppressUnmanagedCodeSecurity] private static extern int CreateDescriptor(out IntPtr desc, ConfigValue precision, ConfigValue domain, int dimension, int length); // See: https://software.intel.com/en-us/node/521977 - [DllImport(DllName, EntryPoint = "DftiCommitDescriptor")] + [DllImport(MklPath, CallingConvention = CallingConvention.Cdecl, EntryPoint = "DftiCommitDescriptor"), SuppressUnmanagedCodeSecurity] private static extern int CommitDescriptor(IntPtr desc); // See: https://software.intel.com/en-us/node/521978 - [DllImport(DllName, EntryPoint = "DftiFreeDescriptor")] + [DllImport(MklPath, CallingConvention = CallingConvention.Cdecl, EntryPoint = "DftiFreeDescriptor"), SuppressUnmanagedCodeSecurity] private static extern int FreeDescriptor(ref IntPtr desc); // See: https://software.intel.com/en-us/node/521981 - [DllImport(DllProxyName, EntryPoint = "MKLDftiSetValue", CallingConvention = CallingConvention.Cdecl, CharSet = CharSet.Auto)] + [DllImport(DllProxyName, EntryPoint = "MKLDftiSetValue", CharSet = CharSet.Auto), SuppressUnmanagedCodeSecurity] private static extern int SetValue(IntPtr desc, ConfigParam configParam, ConfigValue configValue); // See: https://software.intel.com/en-us/node/521984 - [DllImport(DllProxyName, EntryPoint = "MKLDftiComputeForward", CallingConvention = CallingConvention.Cdecl, CharSet = CharSet.Auto)] + [DllImport(DllProxyName, EntryPoint = "MKLDftiComputeForward", CharSet = CharSet.Auto), SuppressUnmanagedCodeSecurity] private static extern int ComputeForward(IntPtr desc, [In] double[] inputRe, [In] double[] inputIm, [Out] double[] outputRe, [Out] double[] outputIm); // See: https://software.intel.com/en-us/node/521985 - [DllImport(DllProxyName, EntryPoint = "MKLDftiComputeBackward", CallingConvention = CallingConvention.Cdecl, CharSet = CharSet.Auto)] + [DllImport(DllProxyName, EntryPoint = "MKLDftiComputeBackward", CharSet = CharSet.Auto), SuppressUnmanagedCodeSecurity] private static extern int ComputeBackward(IntPtr desc, [In] double[] inputRe, [In] double[] inputIm, [Out] double[] outputRe, [Out] double[] outputIm); // See: https://software.intel.com/en-us/node/521984 - [DllImport(DllProxyName, EntryPoint = "MKLDftiComputeForward", CallingConvention = CallingConvention.Cdecl)] + [DllImport(DllProxyName, EntryPoint = "MKLDftiComputeForward"), SuppressUnmanagedCodeSecurity] private static extern int ComputeForward(IntPtr desc, [In] float[] inputRe, [In] float[] inputIm, [Out] float[] outputRe, [Out] float[] outputIm); // See: https://software.intel.com/en-us/node/521985 - [DllImport(DllProxyName, EntryPoint = "MKLDftiComputeBackward", CallingConvention = CallingConvention.Cdecl)] + [DllImport(DllProxyName, EntryPoint = "MKLDftiComputeBackward"), SuppressUnmanagedCodeSecurity] private static extern int ComputeBackward(IntPtr desc, [In] float[] inputRe, [In] float[] inputIm, [Out] float[] outputRe, [Out] float[] outputIm); // See: https://software.intel.com/en-us/node/521990 - [System.Security.SuppressUnmanagedCodeSecurity] - [DllImport(DllName, EntryPoint = "DftiErrorMessage", CallingConvention = CallingConvention.Cdecl, CharSet = CharSet.Auto)] + [DllImport(MklPath, EntryPoint = "DftiErrorMessage", CallingConvention = CallingConvention.Cdecl, CharSet = CharSet.Auto), SuppressUnmanagedCodeSecurity] private static extern IntPtr ErrorMessage(int status); private static void CheckStatus(int status) diff --git a/src/Microsoft.ML.Transforms/Text/LdaSingleBox.cs b/src/Microsoft.ML.Transforms/Text/LdaSingleBox.cs index af643a2907..4facdee991 100644 --- a/src/Microsoft.ML.Transforms/Text/LdaSingleBox.cs +++ b/src/Microsoft.ML.Transforms/Text/LdaSingleBox.cs @@ -18,66 +18,66 @@ public struct LdaEngine public IntPtr Ptr; } - private const string NativeDll = "LdaNative"; - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + private const string NativePath = "LdaNative"; + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern LdaEngine CreateEngine(int numTopic, int numVocab, float alphaSum, float beta, int numIter, int likelihoodInterval, int numThread, int mhstep, int maxDocToken); - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern void AllocateModelMemory(LdaEngine engine, int numTopic, int numVocab, long tableSize, long aliasTableSize); - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern void AllocateDataMemory(LdaEngine engine, int docNum, long corpusSize); - [DllImport(NativeDll, CharSet = CharSet.Ansi), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, CharSet = CharSet.Ansi), SuppressUnmanagedCodeSecurity] internal static extern void Train(LdaEngine engine, string trainOutput); - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern void GetModelStat(LdaEngine engine, out long memBlockSize, out long aliasMemBlockSize); - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern void Test(LdaEngine engine, int numBurninIter, float[] pLogLikelihood); - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern void CleanData(LdaEngine engine); - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern void CleanModel(LdaEngine engine); - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern void DestroyEngine(LdaEngine engine); - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern void GetWordTopic(LdaEngine engine, int wordId, int[] pTopic, int[] pProb, ref int length); - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern void SetWordTopic(LdaEngine engine, int wordId, int[] pTopic, int[] pProb, int length); - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern void SetAlphaSum(LdaEngine engine, float avgDocLength); - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern int FeedInData(LdaEngine engine, int[] termId, int[] termFreq, int termNum, int numVocab); - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern int FeedInDataDense(LdaEngine engine, int[] termFreq, int termNum, int numVocab); - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern void GetDocTopic(LdaEngine engine, int docId, int[] pTopic, int[] pProb, ref int numTopicReturn); - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern void GetTopicSummary(LdaEngine engine, int topicId, int[] pWords, float[] pProb, ref int numTopicReturn); - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern void TestOneDoc(LdaEngine engine, int[] termId, int[] termFreq, int termNum, int[] pTopics, int[] pProbs, ref int numTopicsMax, int numBurnIter, bool reset); - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern void TestOneDocDense(LdaEngine engine, int[] termFreq, int termNum, int[] pTopics, int[] pProbs, ref int numTopicsMax, int numBurninIter, bool reset); - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern void InitializeBeforeTrain(LdaEngine engine); - [DllImport(NativeDll), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath), SuppressUnmanagedCodeSecurity] internal static extern void InitializeBeforeTest(LdaEngine engine); } diff --git a/src/Native/CMakeLists.txt b/src/Native/CMakeLists.txt index 0100cc353c..35e4878dab 100644 --- a/src/Native/CMakeLists.txt +++ b/src/Native/CMakeLists.txt @@ -37,6 +37,10 @@ if(WIN32) add_compile_options(/fp:precise) add_compile_options(/EHsc) + if ($ENV{__BuildArch} STREQUAL "x86") + add_compile_options(/Gz) + endif () + set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /LARGEADDRESSAWARE") # can handle addresses larger than 2 gigabytes set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /DEBUG") set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /INCREMENTAL:NO") diff --git a/src/Native/FastTreeNative/stdafx.h b/src/Native/FastTreeNative/stdafx.h index 46b89b02d2..d44a0a420a 100644 --- a/src/Native/FastTreeNative/stdafx.h +++ b/src/Native/FastTreeNative/stdafx.h @@ -10,14 +10,10 @@ // #pragma once - #include #include - +#include "../Stdafx.h" #ifndef _WIN32 -#define EXPORT_API(ret) extern "C" __attribute__((visibility("default"))) ret -#include "../UnixSal.h" - // Workaround for _BitScanReverse using the gcc-specific __builtin_clz, which returns the number of leading zeros. const int LongNumBitsMinusOne = sizeof(long) * 8 - 1; inline unsigned char _BitScanReverse(_Inout_ unsigned long * index, _In_ unsigned long mask) @@ -63,7 +59,4 @@ inline void qsort_s(_Inout_updates_all_(num) void* base, _In_ size_t num, _In_ s // __emul(u) is VS-specific. inline uint64_t __emulu(unsigned int i1, unsigned int i2) { return (uint64_t)i1 * i2; }; inline int64_t __emul(int i1, int i2) { return (int64_t)i1 * i2; }; -#else -#include -#define EXPORT_API(ret) extern "C" __declspec(dllexport) ret __stdcall #endif \ No newline at end of file diff --git a/src/Native/LdaNative/lda_engine.hpp b/src/Native/LdaNative/lda_engine.hpp index 95a107f355..a87bcbed73 100644 --- a/src/Native/LdaNative/lda_engine.hpp +++ b/src/Native/LdaNative/lda_engine.hpp @@ -3,7 +3,7 @@ // See the LICENSE file in the project root for more information. #pragma once - +#include "../Stdafx.h" #include #include #include @@ -17,12 +17,6 @@ #include "alias_multinomial_rng_int.hpp" -#ifdef _MSC_VER -#define EXPORT_API(ret) extern "C" __declspec(dllexport) ret __stdcall -#else -#define EXPORT_API(ret) extern "C" __attribute__((visibility("default"))) ret -#endif - //ignore all such warnings since our stl class will not used internally in the class as private member #pragma warning(disable : 4251) class CTimer; diff --git a/src/Native/SymSgdNative/SparseBLAS.h b/src/Native/SymSgdNative/SparseBLAS.h index 6d13e9cff2..fdfa1740e2 100644 --- a/src/Native/SymSgdNative/SparseBLAS.h +++ b/src/Native/SymSgdNative/SparseBLAS.h @@ -5,10 +5,10 @@ #pragma once #include "../Stdafx.h" -extern "C" float cblas_sdot(const int vecSize, const float* denseVecX, const int incX, const float* denseVecY, const int incY); -extern "C" float cblas_sdoti(const int sparseVecSize, const float* sparseVecValues, const int* sparseVecIndices, float* denseVec); -extern "C" void cblas_saxpy(const int vecSize, const float coef, const float* denseVecX, const int incX, float* denseVecY, const int incY); -extern "C" void cblas_saxpyi(const int sparseVecSize, const float coef, const float* sparseVecValues, const int* sparseVecIndices, float* denseVec); +extern "C" float __cdecl cblas_sdot(const int vecSize, const float* denseVecX, const int incX, const float* denseVecY, const int incY); +extern "C" float __cdecl cblas_sdoti(const int sparseVecSize, const float* sparseVecValues, const int* sparseVecIndices, float* denseVec); +extern "C" void __cdecl cblas_saxpy(const int vecSize, const float coef, const float* denseVecX, const int incX, float* denseVecY, const int incY); +extern "C" void __cdecl cblas_saxpyi(const int sparseVecSize, const float coef, const float* sparseVecValues, const int* sparseVecIndices, float* denseVec); float SDOT(const int vecSize, const float* denseVecX, const float* denseVecY) { diff --git a/test/Microsoft.ML.CpuMath.PerformanceTests/CpuMathNativeUtils.cs b/test/Microsoft.ML.CpuMath.PerformanceTests/CpuMathNativeUtils.cs index cb794cf815..8624ba90e9 100644 --- a/test/Microsoft.ML.CpuMath.PerformanceTests/CpuMathNativeUtils.cs +++ b/test/Microsoft.ML.CpuMath.PerformanceTests/CpuMathNativeUtils.cs @@ -20,70 +20,72 @@ namespace Microsoft.ML.CpuMath.PerformanceTests { internal static class CpuMathNativeUtils { - [DllImport("CpuMathNative", EntryPoint = "AddScalarU"), SuppressUnmanagedCodeSecurity] + internal const string NativePath = "CpuMathNative"; + + [DllImport(NativePath, EntryPoint = "AddScalarU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe float AddScalarU(float a, /*_Inout_*/ float* pd, int c); - [DllImport("CpuMathNative", EntryPoint = "Scale"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "Scale"), SuppressUnmanagedCodeSecurity] internal static extern unsafe void Scale(float a, /*_Inout_*/ float* pd, int c); - [DllImport("CpuMathNative", EntryPoint = "ScaleSrcU"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "ScaleSrcU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe void ScaleSrcU(float a, /*_In_ const*/ float* ps, /*_Inout_*/ float* pd, int c); - [DllImport("CpuMathNative", EntryPoint = "ScaleAddU"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "ScaleAddU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe void ScaleAddU(float a, float b, /*_Inout_*/ float* pd, int c); - [DllImport("CpuMathNative", EntryPoint = "AddScaleU"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "AddScaleU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe void AddScaleU(float a, /*_In_ const*/ float* ps, /*_Inout_*/ float* pd, int c); - [DllImport("CpuMathNative", EntryPoint = "AddScaleSU"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "AddScaleSU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe void AddScaleSU(float a, /*_In_ const*/ float* ps, /*_In_ const*/ int* pi, /*_Inout_*/ float* pd, int c); - [DllImport("CpuMathNative", EntryPoint = "AddScaleCopyU"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "AddScaleCopyU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe void AddScaleCopyU(float a, /*_In_ const*/ float* ps, /*_In_ const*/ float* pd, /*_Inout_*/ float* pr, int c); - [DllImport("CpuMathNative", EntryPoint = "AddU"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "AddU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe void AddU(/*_In_ const*/ float* ps, /*_Inout_*/ float* pd, int c); - [DllImport("CpuMathNative", EntryPoint = "AddSU"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "AddSU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe void AddSU(/*_In_ const*/ float* ps, /*_In_ const*/ int* pi, /*_Inout_*/ float* pd, int c); - [DllImport("CpuMathNative", EntryPoint = "MulElementWiseU"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "MulElementWiseU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe void MulElementWiseU(/*_In_ const*/ float* ps1, /*_In_ const*/ float* ps2, /*_Inout_*/ float* pd, int c); - [DllImport("CpuMathNative", EntryPoint = "Sum"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "Sum"), SuppressUnmanagedCodeSecurity] internal static extern unsafe float Sum(/*const*/ float* pValues, int length); - [DllImport("CpuMathNative", EntryPoint = "SumSqU"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "SumSqU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe float SumSqU(/*const*/ float* ps, int c); - [DllImport("CpuMathNative", EntryPoint = "SumSqDiffU"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "SumSqDiffU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe float SumSqDiffU(float mean, /*const*/ float* ps, int c); - [DllImport("CpuMathNative", EntryPoint = "SumAbsU"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "SumAbsU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe float SumAbsU(/*const*/ float* ps, int c); - [DllImport("CpuMathNative", EntryPoint = "SumAbsDiffU"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "SumAbsDiffU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe float SumAbsDiffU(float mean, /*const*/ float* ps, int c); - [DllImport("CpuMathNative", EntryPoint = "MaxAbsU"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "MaxAbsU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe float MaxAbsU(/*const*/ float* ps, int c); - [DllImport("CpuMathNative", EntryPoint = "MaxAbsDiffU"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "MaxAbsDiffU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe float MaxAbsDiffU(float mean, /*const*/ float* ps, int c); - [DllImport("CpuMathNative", EntryPoint = "DotU"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "DotU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe float DotU(/*const*/ float* pa, /*const*/ float* pb, int c); - [DllImport("CpuMathNative", EntryPoint = "DotSU"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "DotSU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe float DotSU(/*const*/ float* pa, /*const*/ float* pb, /*const*/ int* pi, int c); - [DllImport("CpuMathNative", EntryPoint = "Dist2"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "Dist2"), SuppressUnmanagedCodeSecurity] internal static extern unsafe float Dist2(/*const*/ float* px, /*const*/ float* py, int c); - [DllImport("CpuMathNative", EntryPoint = "SdcaL1UpdateU"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "SdcaL1UpdateU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe void SdcaL1UpdateU(float primalUpdate, /*_In_ const*/ float* ps, float threshold, /*_Inout_*/ float* pd1, /*_Inout_*/ float* pd2, int c); - [DllImport("CpuMathNative", EntryPoint = "SdcaL1UpdateSU"), SuppressUnmanagedCodeSecurity] + [DllImport(NativePath, EntryPoint = "SdcaL1UpdateSU"), SuppressUnmanagedCodeSecurity] internal static extern unsafe void SdcaL1UpdateSU(float primalUpdate, /*_In_ const*/ float* ps, /*_In_ const*/ int* pi, float threshold, /*_Inout_*/ float* pd1, /*_Inout_*/ float* pd2, int c); } } From 14c7a472579afa3ce98bad2e3495e4c0524471b9 Mon Sep 17 00:00:00 2001 From: Artidoro Pagnoni Date: Fri, 7 Dec 2018 11:14:20 -0800 Subject: [PATCH 034/100] Clean up of TextLoader constructor (#1784) --- docs/code/MlNetCookBook.md | 107 ++++++++---------- ...FeatureContributionCalculationTransform.cs | 12 +- .../Dynamic/FeatureSelectionTransform.cs | 12 +- .../Dynamic/GeneralizedAdditiveModels.cs | 12 +- .../Dynamic/PermutationFeatureImportance.cs | 14 +-- .../Microsoft.ML.Samples/Dynamic/SDCA.cs | 12 +- .../DataLoadSave/Text/TextLoader.cs | 38 +++++-- .../Text/TextLoaderSaverCatalog.cs | 54 +++++---- .../DataLoadSaveOperationsExtensions.cs | 11 +- .../ValueToKeyMappingTransformer.cs | 11 +- .../Utilities/ModelFileUtils.cs | 4 +- .../TermLookupTransformer.cs | 43 ++++--- .../Text/StopWordsRemovingTransformer.cs | 12 +- .../KMeansAndLogisticRegressionBench.cs | 8 +- .../PredictionEngineBench.cs | 35 ++---- ...sticDualCoordinateAscentClassifierBench.cs | 39 ++++--- .../TestPredictors.cs | 22 ++-- .../StaticPipeTests.cs | 2 +- .../Training.cs | 2 +- .../DataPipe/TestDataPipeBase.cs | 2 +- .../Microsoft.ML.TestFramework/ModelHelper.cs | 41 +++++-- .../Scenarios/Api/ApiScenariosTests.cs | 21 +--- .../Api/CookbookSamples/CookbookSamples.cs | 18 +-- .../CookbookSamplesDynamicApi.cs | 69 +++++------ .../Api/Estimators/CrossValidation.cs | 2 +- .../Estimators/DecomposableTrainAndPredict.cs | 4 +- .../Scenarios/Api/Estimators/Evaluation.cs | 2 +- .../Scenarios/Api/Estimators/Extensibility.cs | 4 +- .../Api/Estimators/FileBasedSavingOfData.cs | 2 +- .../Api/Estimators/IntrospectiveTraining.cs | 2 +- .../Api/Estimators/Metacomponents.cs | 2 +- .../Api/Estimators/MultithreadedPrediction.cs | 2 +- .../Estimators/ReconfigurablePrediction.cs | 2 +- .../Api/Estimators/SimpleTrainAndPredict.cs | 2 +- .../Estimators/TrainSaveModelAndPredict.cs | 2 +- .../Estimators/TrainWithInitialPredictor.cs | 2 +- .../Api/Estimators/TrainWithValidationSet.cs | 2 +- .../Scenarios/Api/Estimators/Visibility.cs | 2 +- .../IrisPlantClassificationTests.cs | 7 +- .../SentimentPredictionTests.cs | 34 +++--- .../TensorflowTests.cs | 69 ++++------- .../Transformers/CustomMappingTests.cs | 8 +- 42 files changed, 344 insertions(+), 407 deletions(-) diff --git a/docs/code/MlNetCookBook.md b/docs/code/MlNetCookBook.md index 7ff7b1f8eb..87e8b82647 100644 --- a/docs/code/MlNetCookBook.md +++ b/docs/code/MlNetCookBook.md @@ -95,7 +95,7 @@ This is how you can read this data: var mlContext = new MLContext(); // Create the reader: define the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(ctx => ( +var reader = mlContext.Data.CreateTextReader(ctx => ( // A boolean column depicting the 'target label'. IsOver50K: ctx.LoadBool(0), // Three text columns. @@ -115,9 +115,7 @@ If the schema of the data is not known at compile time, or too cumbersome, you c var mlContext = new MLContext(); // Create the reader: define the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(new TextLoader.Arguments -{ - Column = new[] { +var reader = mlContext.Data.CreateTextReader(new[] { // A boolean column depicting the 'label'. new TextLoader.Column("IsOver50K", DataKind.BL, 0), // Three text columns. @@ -126,8 +124,8 @@ var reader = mlContext.Data.TextReader(new TextLoader.Arguments new TextLoader.Column("MaritalStatus", DataKind.TX, 3) }, // First line of the file is a header, not a data row. - HasHeader = true -}); + hasHeader: true +); // Now read the file (remember though, readers are lazy, so the actual reading will happen when the data is accessed). var data = reader.Read(dataPath); @@ -155,7 +153,7 @@ This is how you can read this data: var mlContext = new MLContext(); // Create the reader: define the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(ctx => ( +var reader = mlContext.Data.CreateTextReader(ctx => ( // A boolean column depicting the 'target label'. IsOver50K: ctx.LoadBool(14), // Three text columns. @@ -175,19 +173,17 @@ The code is very similar using the dynamic API: var mlContext = new MLContext(); // Create the reader: define the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(new TextLoader.Arguments -{ - Column = new[] { +var reader = mlContext.Data.CreateTextReader(new[] { // A boolean column depicting the 'label'. - new TextLoader.Column("IsOver50k", DataKind.BL, 0), + new TextLoader.Column("IsOver50K", DataKind.BL, 0), // Three text columns. new TextLoader.Column("Workclass", DataKind.TX, 1), new TextLoader.Column("Education", DataKind.TX, 2), new TextLoader.Column("MaritalStatus", DataKind.TX, 3) }, // First line of the file is a header, not a data row. - HasHeader = true -}); + hasHeader: true +); var data = reader.Read(exampleFile1, exampleFile2); ``` @@ -211,7 +207,7 @@ Reading this file using `TextLoader`: var mlContext = new MLContext(); // Create the reader: define the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(ctx => ( +var reader = mlContext.Data.CreateTextReader(ctx => ( // We read the first 11 values as a single float vector. FeatureVector: ctx.LoadFloat(0, 10), // Separately, read the target variable. @@ -233,7 +229,7 @@ If the schema of the data is not known at compile time, or too cumbersome, you c var mlContext = new MLContext(); // Create the reader: define the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(new[] { +var reader = mlContext.Data.CreateTextReader(new[] { // We read the first 10 values as a single float vector. new TextLoader.Column("FeatureVector", DataKind.R4, new[] {new TextLoader.Range(0, 9)}), // Separately, read the target variable. @@ -302,7 +298,7 @@ Label Workclass education marital-status var mlContext = new MLContext(); // Create the reader: define the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(ctx => ( +var reader = mlContext.Data.CreateTextReader(ctx => ( // A boolean column depicting the 'target label'. IsOver50K: ctx.LoadBool(0), // Three text columns. @@ -365,19 +361,17 @@ You can also use the dynamic API to create the equivalent of the previous pipeli var mlContext = new MLContext(); // Create the reader: define the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(new TextLoader.Arguments -{ - Column = new[] { +var reader = mlContext.Data.CreateTextReader(new[] { // A boolean column depicting the 'label'. - new TextLoader.Column("IsOver50k", DataKind.BL, 0), + new TextLoader.Column("IsOver50K", DataKind.BL, 0), // Three text columns. new TextLoader.Column("Workclass", DataKind.TX, 1), new TextLoader.Column("Education", DataKind.TX, 2), new TextLoader.Column("MaritalStatus", DataKind.TX, 3) }, // First line of the file is a header, not a data row. - HasHeader = true -}); + hasHeader: true +); // Start creating our processing pipeline. For now, let's just concatenate all the text columns // together into one. @@ -428,7 +422,7 @@ var mlContext = new MLContext(); // Step one: read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(ctx => ( +var reader = mlContext.Data.CreateTextReader(ctx => ( // We read the first 11 values as a single float vector. FeatureVector: ctx.LoadFloat(0, 10), // Separately, read the target variable. @@ -482,9 +476,7 @@ var mlContext = new MLContext(); // Step one: read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(new TextLoader.Arguments -{ - Column = new[] { +var reader = mlContext.Data.CreateTextReader(new[] { // We read the first 11 values as a single float vector. new TextLoader.Column("FeatureVector", DataKind.R4, 0, 10), @@ -492,10 +484,10 @@ var reader = mlContext.Data.TextReader(new TextLoader.Arguments new TextLoader.Column("Target", DataKind.R4, 11), }, // First line of the file is a header, not a data row. - HasHeader = true, + hasHeader: true, // Default separator is tab, but we need a semicolon. - Separator = ";" -}); + separatorChar: ';' +); // Now read the file (remember though, readers are lazy, so the actual reading will happen when the data is accessed). var trainData = reader.Read(trainDataPath); @@ -603,7 +595,7 @@ var mlContext = new MLContext(); // Step one: read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(ctx => ( +var reader = mlContext.Data.CreateTextReader(ctx => ( // The four features of the Iris dataset. SepalLength: ctx.LoadFloat(0), SepalWidth: ctx.LoadFloat(1), @@ -653,9 +645,7 @@ var mlContext = new MLContext(); // Step one: read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(new TextLoader.Arguments -{ - Column = new[] { +var reader = mlContext.Data.CreateTextReader(new[] { new TextLoader.Column("SepalLength", DataKind.R4, 0), new TextLoader.Column("SepalWidth", DataKind.R4, 1), new TextLoader.Column("PetalLength", DataKind.R4, 2), @@ -664,8 +654,8 @@ var reader = mlContext.Data.TextReader(new TextLoader.Arguments new TextLoader.Column("Label", DataKind.TX, 4), }, // Default separator is tab, but the dataset has comma. - Separator = "," -}); + separatorChar: ',' +); // Retrieve the training data. var trainData = reader.Read(irisDataPath); @@ -821,7 +811,7 @@ var mlContext = new MLContext(); // Step one: read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(ctx => ( +var reader = mlContext.Data.CreateTextReader(ctx => ( // The four features of the Iris dataset. SepalLength: ctx.LoadFloat(0), SepalWidth: ctx.LoadFloat(1), @@ -917,7 +907,7 @@ Here's a snippet of code that demonstrates normalization in learning pipelines. var mlContext = new MLContext(); // Define the reader: specify the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(ctx => ( +var reader = mlContext.Data.CreateTextReader(ctx => ( // The four features of the Iris dataset will be grouped together as one Features column. Features: ctx.LoadFloat(0, 3), // Label: kind of iris. @@ -952,17 +942,15 @@ You can achieve the same results using the dynamic API. var mlContext = new MLContext(); // Define the reader: specify the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(new TextLoader.Arguments -{ - Column = new[] { +var reader = mlContext.Data.CreateTextReader(new[] { // The four features of the Iris dataset will be grouped together as one Features column. new TextLoader.Column("Features", DataKind.R4, 0, 3), // Label: kind of iris. new TextLoader.Column("Label", DataKind.TX, 4), }, // Default separator is tab, but the dataset has comma. - Separator = "," -}); + separatorChar: ',' +); // Read the training data. var trainData = reader.Read(dataPath); @@ -1011,7 +999,7 @@ Label Workclass education marital-status occupation relationship ethnicity sex n var mlContext = new MLContext(); // Define the reader: specify the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(ctx => ( +var reader = mlContext.Data.CreateTextReader(ctx => ( Label: ctx.LoadBool(0), // We will load all the categorical features into one vector column of size 8. CategoricalFeatures: ctx.LoadText(1, 8), @@ -1073,9 +1061,8 @@ You can achieve the same results using the dynamic API. var mlContext = new MLContext(); // Define the reader: specify the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(new TextLoader.Arguments -{ - Column = new[] { +var reader = mlContext.Data.CreateTextReader(new[] + { new TextLoader.Column("Label", DataKind.BL, 0), // We will load all the categorical features into one vector column of size 8. new TextLoader.Column("CategoricalFeatures", DataKind.TX, 1, 8), @@ -1084,8 +1071,8 @@ var reader = mlContext.Data.TextReader(new TextLoader.Arguments // Let's also separately load the 'Workclass' column. new TextLoader.Column("Workclass", DataKind.TX, 1), }, - HasHeader = true -}); + hasHeader: true +); // Read the data. var data = reader.Read(dataPath); @@ -1157,7 +1144,7 @@ Sentiment SentimentText var mlContext = new MLContext(); // Define the reader: specify the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(ctx => ( +var reader = mlContext.Data.CreateTextReader(ctx => ( IsToxic: ctx.LoadBool(0), Message: ctx.LoadText(1) ), hasHeader: true); @@ -1207,14 +1194,13 @@ You can achieve the same results using the dynamic API. var mlContext = new MLContext(); // Define the reader: specify the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(new TextLoader.Arguments -{ - Column = new[] { +var reader = mlContext.Data.CreateTextReader(new[] + { new TextLoader.Column("IsToxic", DataKind.BL, 0), new TextLoader.Column("Message", DataKind.TX, 1), }, - HasHeader = true -}); + hasHeader: true +); // Read the data. var data = reader.Read(dataPath); @@ -1274,7 +1260,7 @@ var mlContext = new MLContext(); // Step one: read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(ctx => ( +var reader = mlContext.Data.CreateTextReader(ctx => ( // The four features of the Iris dataset. SepalLength: ctx.LoadFloat(0), SepalWidth: ctx.LoadFloat(1), @@ -1330,9 +1316,8 @@ var mlContext = new MLContext(); // Step one: read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(new TextLoader.Arguments -{ - Column = new[] { +var reader = mlContext.Data.CreateTextReader(new[] + { // We read the first 11 values as a single float vector. new TextLoader.Column("SepalLength", DataKind.R4, 0), new TextLoader.Column("SepalWidth", DataKind.R4, 1), @@ -1342,8 +1327,8 @@ var reader = mlContext.Data.TextReader(new TextLoader.Arguments new TextLoader.Column("Label", DataKind.TX, 4), }, // Default separator is tab, but the dataset has comma. - Separator = "," -}); + separatorChar: ',' +); // Read the data. var data = reader.Read(dataPath); @@ -1395,7 +1380,7 @@ var mlContext = new MLContext(); // Read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. -var reader = mlContext.Data.TextReader(ctx => ( +var reader = mlContext.Data.CreateTextReader(ctx => ( // The four features of the Iris dataset. SepalLength: ctx.LoadFloat(0), SepalWidth: ctx.LoadFloat(1), diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/FeatureContributionCalculationTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/FeatureContributionCalculationTransform.cs index 1c28df327f..d88cff2cdd 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/FeatureContributionCalculationTransform.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/FeatureContributionCalculationTransform.cs @@ -19,11 +19,8 @@ public static void FeatureContributionCalculationTransform_Regression() // Step 1: Read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(new TextLoader.Arguments() - { - Separator = "tab", - HasHeader = true, - Column = new[] + var reader = mlContext.Data.CreateTextReader( + columns: new[] { new TextLoader.Column("MedianHomeValue", DataKind.R4, 0), new TextLoader.Column("CrimesPerCapita", DataKind.R4, 1), @@ -37,8 +34,9 @@ public static void FeatureContributionCalculationTransform_Regression() new TextLoader.Column("HighwayDistance", DataKind.R4, 9), new TextLoader.Column("TaxRate", DataKind.R4, 10), new TextLoader.Column("TeacherRatio", DataKind.R4, 11), - } - }); + }, + hasHeader: true + ); // Read the data var data = reader.Read(dataFile); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/FeatureSelectionTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/FeatureSelectionTransform.cs index 7508815dc4..f0d0442d42 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/FeatureSelectionTransform.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/FeatureSelectionTransform.cs @@ -31,16 +31,14 @@ public static void FeatureSelectionTransform() // First, we define the reader: specify the data columns and where to find them in the text file. Notice that we combine entries from // all the feature columns into entries of a vector of a single column named "Features". - var reader = ml.Data.TextReader(new TextLoader.Arguments() - { - Separator = "tab", - HasHeader = true, - Column = new[] + var reader = ml.Data.CreateTextReader( + columns: new[] { new TextLoader.Column("Label", DataKind.BL, 0), new TextLoader.Column("Features", DataKind.Num, new [] { new TextLoader.Range(1, 9) }) - } - }); + }, + hasHeader: true + ); // Then, we use the reader to read the data as an IDataView. var data = reader.Read(dataFilePath); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/GeneralizedAdditiveModels.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/GeneralizedAdditiveModels.cs index 827d04a586..26fadc0148 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/GeneralizedAdditiveModels.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/GeneralizedAdditiveModels.cs @@ -19,11 +19,8 @@ public static void RunExample() // Step 1: Read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(new TextLoader.Arguments() - { - Separator = "tab", - HasHeader = true, - Column = new[] + var reader = mlContext.Data.CreateTextReader( + columns: new[] { new TextLoader.Column("MedianHomeValue", DataKind.R4, 0), new TextLoader.Column("CrimesPerCapita", DataKind.R4, 1), @@ -37,8 +34,9 @@ public static void RunExample() new TextLoader.Column("HighwayDistance", DataKind.R4, 9), new TextLoader.Column("TaxRate", DataKind.R4, 10), new TextLoader.Column("TeacherRatio", DataKind.R4, 11), - } - }); + }, + hasHeader: true + ); // Read the data var data = reader.Read(dataFile); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/PermutationFeatureImportance.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/PermutationFeatureImportance.cs index 0c95abacb8..7150f12835 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/PermutationFeatureImportance.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/PermutationFeatureImportance.cs @@ -22,11 +22,8 @@ public static void PFI_Regression() // First, we define the reader: specify the data columns and where to find them in the text file. // The data file is composed of rows of data, with each row having 11 numerical columns // separated by whitespace. - var reader = mlContext.Data.TextReader(new TextLoader.Arguments() - { - Separator = "tab", - HasHeader = true, - Column = new[] + var reader = mlContext.Data.CreateTextReader( + columns: new[] { // Read the first column (indexed by 0) in the data file as an R4 (float) new TextLoader.Column("MedianHomeValue", DataKind.R4, 0), @@ -40,9 +37,10 @@ public static void PFI_Regression() new TextLoader.Column("EmploymentDistance", DataKind.R4, 8), new TextLoader.Column("HighwayDistance", DataKind.R4, 9), new TextLoader.Column("TaxRate", DataKind.R4, 10), - new TextLoader.Column("TeacherRatio", DataKind.R4, 11), - } - }); + new TextLoader.Column("TeacherRatio", DataKind.R4, 11) + }, + hasHeader: true + ); // Read the data var data = reader.Read(dataFile); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs index a6c1904f6a..09dea18ff1 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs @@ -24,16 +24,14 @@ public static void SDCA_BinaryClassification() // Step 1: Read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(new TextLoader.Arguments() - { - Separator = "tab", - HasHeader = true, - Column = new[] + var reader = mlContext.Data.CreateTextReader( + columns: new[] { new TextLoader.Column("Sentiment", DataKind.BL, 0), new TextLoader.Column("SentimentText", DataKind.Text, 1) - } - }); + }, + hasHeader: true + ); // Read the data var data = reader.Read(dataFile); diff --git a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoader.cs index e748967714..a3d5260531 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoader.cs @@ -25,7 +25,6 @@ namespace Microsoft.ML.Runtime.Data { /// /// Loads a text file into an IDataView. Supports basic mapping from input columns to IDataView columns. - /// Should accept any file that TlcTextInstances accepts. /// public sealed partial class TextLoader : IDataReader, ICanSaveModel { @@ -1008,23 +1007,38 @@ private bool HasHeader private readonly IHost _host; private const string RegistrationName = "TextLoader"; - public TextLoader(IHostEnvironment env, Column[] columns, Action advancedSettings, IMultiStreamSource dataSample = null) - : this(env, MakeArgs(columns, advancedSettings), dataSample) + /// + /// Loads a text file into an . Supports basic mapping from input columns to IDataView columns. + /// + /// The environment to use. + /// Defines a mapping between input columns in the file and IDataView columns. + /// Whether the file has a header. + /// The character used as separator between data points in a row. By default the tab character is used as separator. + /// Allows to expose items that can be used for reading. + public TextLoader(IHostEnvironment env, Column[] columns, bool hasHeader = false, char separatorChar = '\t', IMultiStreamSource dataSample = null) + : this(env, MakeArgs(columns, hasHeader, new[] { separatorChar }), dataSample) { } - private static Arguments MakeArgs(Column[] columns, Action advancedSettings) + private static Arguments MakeArgs(Column[] columns, bool hasHeader, char[] separatorChars) { - var result = new Arguments { Column = columns }; - advancedSettings?.Invoke(result); + Contracts.AssertValue(separatorChars); + var result = new Arguments { Column = columns, HasHeader = hasHeader, SeparatorChars = separatorChars}; return result; } - public TextLoader(IHostEnvironment env, Arguments args, IMultiStreamSource dataSample = null) + /// + /// Loads a text file into an . Supports basic mapping from input columns to IDataView columns. + /// + /// The environment to use. + /// Defines the settings of the load operation. + /// Allows to expose items that can be used for reading. + public TextLoader(IHostEnvironment env, Arguments args = null, IMultiStreamSource dataSample = null) { + args = args ?? new Arguments(); + Contracts.CheckValue(env, nameof(env)); _host = env.Register(RegistrationName); - _host.CheckValue(args, nameof(args)); _host.CheckValueOrNull(dataSample); @@ -1285,7 +1299,7 @@ private TextLoader(IHost host, ModelLoadContext ctx) _parser = new Parser(this); } - public static TextLoader Create(IHostEnvironment env, ModelLoadContext ctx) + internal static TextLoader Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); IHost h = env.Register(RegistrationName); @@ -1297,15 +1311,15 @@ public static TextLoader Create(IHostEnvironment env, ModelLoadContext ctx) } // These are legacy constructors needed for ComponentCatalog. - public static IDataLoader Create(IHostEnvironment env, ModelLoadContext ctx, IMultiStreamSource files) + internal static IDataLoader Create(IHostEnvironment env, ModelLoadContext ctx, IMultiStreamSource files) => (IDataLoader)Create(env, ctx).Read(files); - public static IDataLoader Create(IHostEnvironment env, Arguments args, IMultiStreamSource files) + internal static IDataLoader Create(IHostEnvironment env, Arguments args, IMultiStreamSource files) => (IDataLoader)new TextLoader(env, args, files).Read(files); /// /// Convenience method to create a and use it to read a specified file. /// - public static IDataView ReadFile(IHostEnvironment env, Arguments args, IMultiStreamSource fileSource) + internal static IDataView ReadFile(IHostEnvironment env, Arguments args, IMultiStreamSource fileSource) => new TextLoader(env, args, fileSource).Read(fileSource); public void Save(ModelSaveContext ctx) diff --git a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderSaverCatalog.cs b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderSaverCatalog.cs index e5de3573ee..b4cf936a38 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderSaverCatalog.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderSaverCatalog.cs @@ -5,13 +5,8 @@ using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Data.IO; -using Microsoft.ML.Runtime.Internal.Utilities; -using Microsoft.ML.StaticPipe; using System; -using System.Collections.Generic; using System.IO; -using System.Linq; -using System.Text; using static Microsoft.ML.Runtime.Data.TextLoader; namespace Microsoft.ML @@ -19,36 +14,37 @@ namespace Microsoft.ML public static class TextLoaderSaverCatalog { /// - /// Create a text reader. + /// Create a text reader . /// /// The catalog. - /// The arguments to text reader, describing the data schema. + /// The columns of the schema. + /// Whether the file has a header. + /// The character used as separator between data points in a row. By default the tab character is used as separator. /// The optional location of a data sample. - public static TextLoader TextReader(this DataOperations catalog, - TextLoader.Arguments args, IMultiStreamSource dataSample = null) - => new TextLoader(CatalogUtils.GetEnvironment(catalog), args, dataSample); + public static TextLoader CreateTextReader(this DataOperations catalog, + Column[] columns, bool hasHeader = false, char separatorChar = '\t', IMultiStreamSource dataSample = null) + => new TextLoader(CatalogUtils.GetEnvironment(catalog), columns, hasHeader, separatorChar, dataSample); /// - /// Create a text reader. + /// Create a text reader . /// /// The catalog. - /// The columns of the schema. - /// The delegate to set additional settings. - /// The optional location of a data sample. - public static TextLoader TextReader(this DataOperations catalog, - TextLoader.Column[] columns, Action advancedSettings = null, IMultiStreamSource dataSample = null) - => new TextLoader(CatalogUtils.GetEnvironment(catalog), columns, advancedSettings, dataSample); + /// Defines the settings of the load operation. + /// Allows to expose items that can be used for reading. + public static TextLoader CreateTextReader(this DataOperations catalog, Arguments args, IMultiStreamSource dataSample = null) + => new TextLoader(CatalogUtils.GetEnvironment(catalog), args, dataSample); /// /// Read a data view from a text file using . /// /// The catalog. /// The columns of the schema. - /// The delegate to set additional settings - /// The path to the file + /// Whether the file has a header. + /// The character used as separator between data points in a row. By default the tab character is used as separator. + /// The path to the file. /// The data view. public static IDataView ReadFromTextFile(this DataOperations catalog, - TextLoader.Column[] columns, string path, Action advancedSettings = null) + string path, Column[] columns, bool hasHeader = false, char separatorChar = '\t') { Contracts.CheckNonEmpty(path, nameof(path)); @@ -56,10 +52,26 @@ public static IDataView ReadFromTextFile(this DataOperations catalog, // REVIEW: it is almost always a mistake to have a 'trainable' text loader here. // Therefore, we are going to disallow data sample. - var reader = new TextLoader(env, columns, advancedSettings, dataSample: null); + var reader = new TextLoader(env, columns, hasHeader, separatorChar, dataSample: null); return reader.Read(new MultiFileSource(path)); } + /// + /// Read a data view from a text file using . + /// + /// The catalog. + /// Specifies a file from which to read. + /// Defines the settings of the load operation. + public static IDataView ReadFromTextFile(this DataOperations catalog, string path, Arguments args = null) + { + Contracts.CheckNonEmpty(path, nameof(path)); + + var env = catalog.GetEnvironment(); + var source = new MultiFileSource(path); + + return new TextLoader(env, args, source).Read(source); + } + /// /// Save the data view as text. /// diff --git a/src/Microsoft.ML.Data/StaticPipe/DataLoadSaveOperationsExtensions.cs b/src/Microsoft.ML.Data/StaticPipe/DataLoadSaveOperationsExtensions.cs index 57adb1be4d..a5f0172935 100644 --- a/src/Microsoft.ML.Data/StaticPipe/DataLoadSaveOperationsExtensions.cs +++ b/src/Microsoft.ML.Data/StaticPipe/DataLoadSaveOperationsExtensions.cs @@ -4,14 +4,7 @@ using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Runtime.Data.IO; -using Microsoft.ML.Runtime.Internal.Utilities; -using Microsoft.ML.StaticPipe; using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Text; using static Microsoft.ML.Runtime.Data.TextLoader; namespace Microsoft.ML.StaticPipe @@ -40,10 +33,10 @@ public static class DataLoadSaveOperationsExtensions /// Whether the input may include sparse representations. /// Remove trailing whitespace from lines. /// A configured statically-typed reader for text files. - public static DataReader TextReader<[IsShape] TShape>( + public static DataReader CreateTextReader<[IsShape] TShape>( this DataOperations catalog, Func func, IMultiStreamSource files = null, bool hasHeader = false, char separator = '\t', bool allowQuoting = true, bool allowSparse = true, bool trimWhitspace = false) - => TextLoader.CreateReader(catalog.Environment, func, files, hasHeader, separator, allowQuoting, allowSparse, trimWhitspace); + => CreateReader(catalog.Environment, func, files, hasHeader, separator, allowQuoting, allowSparse, trimWhitspace); } } diff --git a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs index 7a5bf18995..d351a6bfc1 100644 --- a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs +++ b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs @@ -483,13 +483,10 @@ private static TermMap CreateFileTermMap(IHostEnvironment env, IChannel ch, stri "{0} should not be specified when default loader is TextLoader. Ignoring {0}={1}", nameof(Arguments.TermsColumn), src); } - termData = TextLoader.ReadFile(env, - new TextLoader.Arguments() - { - Separator = "tab", - Column = new[] { new TextLoader.Column("Term", DataKind.TX, 0) } - }, - fileSource); + termData = new TextLoader(env, + columns: new[] { new TextLoader.Column("Term", DataKind.TX, 0) }, + dataSample: fileSource) + .Read(fileSource); src = "Term"; autoConvert = true; } diff --git a/src/Microsoft.ML.Data/Utilities/ModelFileUtils.cs b/src/Microsoft.ML.Data/Utilities/ModelFileUtils.cs index 97948b764b..5debae70b7 100644 --- a/src/Microsoft.ML.Data/Utilities/ModelFileUtils.cs +++ b/src/Microsoft.ML.Data/Utilities/ModelFileUtils.cs @@ -283,8 +283,8 @@ public static IEnumerable> LoadRoleMappingsOrNu { // REVIEW: Should really validate the schema here, and consider // ignoring this stream if it isn't as expected. - var loader = TextLoader.ReadFile(env, new TextLoader.Arguments(), - new RepositoryStreamWrapper(rep, DirTrainingInfo, RoleMappingFile)); + var repoStreamWrapper = new RepositoryStreamWrapper(rep, DirTrainingInfo, RoleMappingFile); + var loader = new TextLoader(env, dataSample: repoStreamWrapper).Read(repoStreamWrapper); using (var cursor = loader.GetRowCursor(c => true)) { diff --git a/src/Microsoft.ML.Transforms/TermLookupTransformer.cs b/src/Microsoft.ML.Transforms/TermLookupTransformer.cs index 83a36c55af..741b112645 100644 --- a/src/Microsoft.ML.Transforms/TermLookupTransformer.cs +++ b/src/Microsoft.ML.Transforms/TermLookupTransformer.cs @@ -349,27 +349,27 @@ private static IComponentFactory GetLoaderFacto // If the user specified non-key values, we define the value column to be numeric. if (!keyValues) return ComponentFactoryUtils.CreateFromFunction( - (env, files) => TextLoader.Create( - env, - new TextLoader.Arguments() - { - Column = new[] + (env, files) => new TextLoader( + env, new[] { new TextLoader.Column("Term", DataKind.TX, 0), new TextLoader.Column("Value", DataKind.Num, 1) - } - }, - files)); + }, dataSample: files).Read(files) as IDataLoader); // If the user specified key values, we scan the values to determine the range of the key type. ulong min = ulong.MaxValue; ulong max = ulong.MinValue; try { - var txtArgs = new TextLoader.Arguments(); - bool parsed = CmdParser.ParseArguments(host, "col=Term:TX:0 col=Value:TX:1", txtArgs); - host.Assert(parsed); - var data = TextLoader.ReadFile(host, txtArgs, new MultiFileSource(filename)); + var file = new MultiFileSource(filename); + var data = new TextLoader(host, new[] + { + new TextLoader.Column("Term", DataKind.TX, 0), + new TextLoader.Column("Value", DataKind.TX, 1) + }, + dataSample: file + ).Read(file); + using (var cursor = data.GetRowCursor(c => true)) { var getTerm = cursor.GetGetter>(0); @@ -444,17 +444,14 @@ private static IComponentFactory GetLoaderFacto } return ComponentFactoryUtils.CreateFromFunction( - (env, files) => TextLoader.Create( - env, - new TextLoader.Arguments() - { - Column = new[] - { - new TextLoader.Column("Term", DataKind.TX, 0), - valueColumn - } - }, - files)); + (env, files) => new TextLoader( + env, + columns: new[] + { + new TextLoader.Column("Term", DataKind.TX, 0), + valueColumn + }, + dataSample: files).Read(files) as IDataLoader); } // This saves the lookup data as a byte array encoded as a binary .idv file. diff --git a/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs b/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs index a9459df027..feaebea475 100644 --- a/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs +++ b/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs @@ -722,17 +722,13 @@ private IDataLoader GetLoaderForStopwords(IChannel ch, string dataFile, { if (stopwordsCol == null) stopwordsCol = "Stopwords"; - dataLoader = TextLoader.Create( + dataLoader = new TextLoader( Host, - new TextLoader.Arguments() + columns: new[] { - Separator = "tab", - Column = new[] - { - new TextLoader.Column(stopwordsCol, DataKind.TX, 0) - } + new TextLoader.Column(stopwordsCol, DataKind.TX, 0) }, - fileSource); + dataSample: fileSource).Read(fileSource) as IDataLoader; } ch.AssertNonEmpty(stopwordsCol); } diff --git a/test/Microsoft.ML.Benchmarks/KMeansAndLogisticRegressionBench.cs b/test/Microsoft.ML.Benchmarks/KMeansAndLogisticRegressionBench.cs index 990915128e..1adf7d4065 100644 --- a/test/Microsoft.ML.Benchmarks/KMeansAndLogisticRegressionBench.cs +++ b/test/Microsoft.ML.Benchmarks/KMeansAndLogisticRegressionBench.cs @@ -18,7 +18,7 @@ public ParameterMixingCalibratedPredictor TrainKMeansAndLR() var ml = new MLContext(seed: 1); // Pipeline - var input = ml.Data.ReadFromTextFile(new[] { + var input = ml.Data.ReadFromTextFile(_dataPath, new[] { new TextLoader.Column("Label", DataKind.R4, 0), new TextLoader.Column("CatFeatures", DataKind.TX, new [] { @@ -28,11 +28,7 @@ public ParameterMixingCalibratedPredictor TrainKMeansAndLR() new [] { new TextLoader.Range() { Min = 9, Max = 14 }, }), - }, _dataPath, s => - { - s.HasHeader = true; - s.Separator = "\t"; - }); + }, hasHeader: true); var estimatorPipeline = ml.Transforms.Categorical.OneHotEncoding("CatFeatures") .Append(ml.Transforms.Normalize("NumFeatures")) diff --git a/test/Microsoft.ML.Benchmarks/PredictionEngineBench.cs b/test/Microsoft.ML.Benchmarks/PredictionEngineBench.cs index 39342cf224..c711ab63f6 100644 --- a/test/Microsoft.ML.Benchmarks/PredictionEngineBench.cs +++ b/test/Microsoft.ML.Benchmarks/PredictionEngineBench.cs @@ -38,19 +38,16 @@ public void SetupIrisPipeline() var env = new MLContext(seed: 1, conc: 1); var reader = new TextLoader(env, - new TextLoader.Arguments() - { - Separator = "\t", - HasHeader = true, - Column = new[] + columns: new[] { new TextLoader.Column("Label", DataKind.R4, 0), new TextLoader.Column("SepalLength", DataKind.R4, 1), new TextLoader.Column("SepalWidth", DataKind.R4, 2), new TextLoader.Column("PetalLength", DataKind.R4, 3), new TextLoader.Column("PetalWidth", DataKind.R4, 4), - } - }); + }, + hasHeader: true + ); IDataView data = reader.Read(_irisDataPath); @@ -73,17 +70,13 @@ public void SetupSentimentPipeline() string _sentimentDataPath = Program.GetInvariantCultureDataPath("wikipedia-detox-250-line-data.tsv"); var env = new MLContext(seed: 1, conc: 1); - var reader = new TextLoader(env, - new TextLoader.Arguments() - { - Separator = "\t", - HasHeader = true, - Column = new[] + var reader = new TextLoader(env, columns: new[] { new TextLoader.Column("Label", DataKind.BL, 0), new TextLoader.Column("SentimentText", DataKind.Text, 1) - } - }); + }, + hasHeader: true + ); IDataView data = reader.Read(_sentimentDataPath); @@ -106,17 +99,13 @@ public void SetupBreastCancerPipeline() string _breastCancerDataPath = Program.GetInvariantCultureDataPath("breast-cancer.txt"); var env = new MLContext(seed: 1, conc: 1); - var reader = new TextLoader(env, - new TextLoader.Arguments() - { - Separator = "\t", - HasHeader = false, - Column = new[] + var reader = new TextLoader(env, columns: new[] { new TextLoader.Column("Label", DataKind.BL, 0), new TextLoader.Column("Features", DataKind.R4, new[] { new TextLoader.Range(1, 9) }) - } - }); + }, + hasHeader: false + ); IDataView data = reader.Read(_breastCancerDataPath); diff --git a/test/Microsoft.ML.Benchmarks/StochasticDualCoordinateAscentClassifierBench.cs b/test/Microsoft.ML.Benchmarks/StochasticDualCoordinateAscentClassifierBench.cs index c2fee89f24..bee16e0d7b 100644 --- a/test/Microsoft.ML.Benchmarks/StochasticDualCoordinateAscentClassifierBench.cs +++ b/test/Microsoft.ML.Benchmarks/StochasticDualCoordinateAscentClassifierBench.cs @@ -64,30 +64,29 @@ public void TrainSentiment() { var env = new MLContext(seed: 1); // Pipeline - var loader = TextLoader.ReadFile(env, - new TextLoader.Arguments() + var arguments = new TextLoader.Arguments() + { + Column = new TextLoader.Column[] { - AllowQuoting = false, - AllowSparse = false, - Separator = "tab", - HasHeader = true, - Column = new[] + new TextLoader.Column() { - new TextLoader.Column() - { - Name = "Label", - Source = new [] { new TextLoader.Range() { Min=0, Max=0} }, - Type = DataKind.Num - }, + Name = "Label", + Source = new[] { new TextLoader.Range() { Min = 0, Max = 0 } }, + Type = DataKind.Num + }, - new TextLoader.Column() - { - Name = "SentimentText", - Source = new [] { new TextLoader.Range() { Min=1, Max=1} }, - Type = DataKind.Text - } + new TextLoader.Column() + { + Name = "SentimentText", + Source = new[] { new TextLoader.Range() { Min = 1, Max = 1 } }, + Type = DataKind.Text } - }, new MultiFileSource(_sentimentDataPath)); + }, + HasHeader = true, + AllowQuoting = false, + AllowSparse = false + }; + var loader = env.Data.ReadFromTextFile(_sentimentDataPath, arguments); var text = TextFeaturizingEstimator.Create(env, new TextFeaturizingEstimator.Arguments() diff --git a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs index f96f6f205a..94c7d98155 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs @@ -606,12 +606,12 @@ public void RankingLightGBMTest() public void TestTreeEnsembleCombiner() { var dataPath = GetDataPath("breast-cancer.txt"); - var dataView = TextLoader.Create(Env, new TextLoader.Arguments(), new MultiFileSource(dataPath)); + var dataView = ML.Data.ReadFromTextFile(dataPath); var fastTrees = new IPredictorModel[3]; for (int i = 0; i < 3; i++) { - fastTrees[i] = FastTree.TrainBinary(Env, new FastTreeBinaryClassificationTrainer.Arguments + fastTrees[i] = FastTree.TrainBinary(ML, new FastTreeBinaryClassificationTrainer.Arguments { FeatureColumn = "Features", NumTrees = 5, @@ -628,13 +628,13 @@ public void TestTreeEnsembleCombiner() public void TestTreeEnsembleCombinerWithCategoricalSplits() { var dataPath = GetDataPath("adult.tiny.with-schema.txt"); - var dataView = TextLoader.Create(Env, new TextLoader.Arguments(), new MultiFileSource(dataPath)); + var dataView = ML.Data.ReadFromTextFile(dataPath); - var cat = new OneHotEncodingEstimator(Env, "Categories", "Features").Fit(dataView).Transform(dataView); + var cat = new OneHotEncodingEstimator(ML, "Categories", "Features").Fit(dataView).Transform(dataView); var fastTrees = new IPredictorModel[3]; for (int i = 0; i < 3; i++) { - fastTrees[i] = FastTree.TrainBinary(Env, new FastTreeBinaryClassificationTrainer.Arguments + fastTrees[i] = FastTree.TrainBinary(ML, new FastTreeBinaryClassificationTrainer.Arguments { FeatureColumn = "Features", NumTrees = 5, @@ -729,11 +729,11 @@ private void CombineAndTestTreeEnsembles(IDataView idv, IPredictorModel[] fastTr public void TestEnsembleCombiner() { var dataPath = GetDataPath("breast-cancer.txt"); - var dataView = TextLoader.Create(Env, new TextLoader.Arguments(), new MultiFileSource(dataPath)); + var dataView = ML.Data.ReadFromTextFile(dataPath); var predictors = new IPredictorModel[] { - FastTree.TrainBinary(Env, new FastTreeBinaryClassificationTrainer.Arguments + FastTree.TrainBinary(ML, new FastTreeBinaryClassificationTrainer.Arguments { FeatureColumn = "Features", NumTrees = 5, @@ -741,7 +741,7 @@ public void TestEnsembleCombiner() LabelColumn = DefaultColumnNames.Label, TrainingData = dataView }).PredictorModel, - AveragedPerceptronTrainer.TrainBinary(Env, new AveragedPerceptronTrainer.Arguments() + AveragedPerceptronTrainer.TrainBinary(ML, new AveragedPerceptronTrainer.Arguments() { FeatureColumn = "Features", LabelColumn = DefaultColumnNames.Label, @@ -749,7 +749,7 @@ public void TestEnsembleCombiner() TrainingData = dataView, NormalizeFeatures = NormalizeOption.No }).PredictorModel, - LogisticRegression.TrainBinary(Env, new LogisticRegression.Arguments() + LogisticRegression.TrainBinary(ML, new LogisticRegression.Arguments() { FeatureColumn = "Features", LabelColumn = DefaultColumnNames.Label, @@ -757,7 +757,7 @@ public void TestEnsembleCombiner() TrainingData = dataView, NormalizeFeatures = NormalizeOption.No }).PredictorModel, - LogisticRegression.TrainBinary(Env, new LogisticRegression.Arguments() + LogisticRegression.TrainBinary(ML, new LogisticRegression.Arguments() { FeatureColumn = "Features", LabelColumn = DefaultColumnNames.Label, @@ -775,7 +775,7 @@ public void TestEnsembleCombiner() public void TestMultiClassEnsembleCombiner() { var dataPath = GetDataPath("breast-cancer.txt"); - var dataView = TextLoader.Create(Env, new TextLoader.Arguments(), new MultiFileSource(dataPath)); + var dataView = ML.Data.ReadFromTextFile(dataPath); var predictors = new IPredictorModel[] { diff --git a/test/Microsoft.ML.StaticPipelineTesting/StaticPipeTests.cs b/test/Microsoft.ML.StaticPipelineTesting/StaticPipeTests.cs index 35da6afbc0..0cff6c1794 100644 --- a/test/Microsoft.ML.StaticPipelineTesting/StaticPipeTests.cs +++ b/test/Microsoft.ML.StaticPipelineTesting/StaticPipeTests.cs @@ -882,7 +882,7 @@ public void TestConvertStatic() + "1 1 2 4 15"; var dataSource = new BytesStreamSource(content); - var text = ml.Data.TextReader(ctx => ( + var text = ml.Data.CreateTextReader(ctx => ( label: ctx.LoadBool(0), text: ctx.LoadText(1), numericFeatures: ctx.LoadDouble(2, null)), // If fit correctly, this ought to be equivalent to max of 4, that is, length of 3. diff --git a/test/Microsoft.ML.StaticPipelineTesting/Training.cs b/test/Microsoft.ML.StaticPipelineTesting/Training.cs index 959f316111..ee5e2626cf 100644 --- a/test/Microsoft.ML.StaticPipelineTesting/Training.cs +++ b/test/Microsoft.ML.StaticPipelineTesting/Training.cs @@ -976,7 +976,7 @@ public void MatrixFactorization() // Read data file. The file contains 3 columns, label (float value), matrixColumnIndex (unsigned integer key), and matrixRowIndex (unsigned integer key). // More specifically, LoadKey(1, 0, 19) means that the matrixColumnIndex column is read from the 2nd (indexed by 1) column in the data file and as // a key type (stored as 32-bit unsigned integer) ranged from 0 to 19 (aka the training matrix has 20 columns). - var reader = mlContext.Data.TextReader(ctx => (label: ctx.LoadFloat(0), matrixColumnIndex: ctx.LoadKey(1, 0, 19), matrixRowIndex: ctx.LoadKey(2, 0, 39)), hasHeader: true); + var reader = mlContext.Data.CreateTextReader(ctx => (label: ctx.LoadFloat(0), matrixColumnIndex: ctx.LoadKey(1, 0, 19), matrixRowIndex: ctx.LoadKey(2, 0, 39)), hasHeader: true); // The parameter that will be into the onFit method below. The obtained predictor will be assigned to this variable // so that we will be able to touch it. diff --git a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs index 16ceae22d3..9ff3432ca6 100644 --- a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs +++ b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs @@ -438,7 +438,7 @@ protected bool SaveLoadText(IDataView view, IHostEnvironment env, // Note that we don't pass in "args", but pass in a default args so we test // the auto-schema parsing. - var loadedData = TextLoader.ReadFile(env, new TextLoader.Arguments(), new MultiFileSource(pathData)); + var loadedData = ML.Data.ReadFromTextFile(pathData); if (!CheckMetadataTypes(loadedData.Schema)) Failed(); diff --git a/test/Microsoft.ML.TestFramework/ModelHelper.cs b/test/Microsoft.ML.TestFramework/ModelHelper.cs index 316a9ea755..4692942e83 100644 --- a/test/Microsoft.ML.TestFramework/ModelHelper.cs +++ b/test/Microsoft.ML.TestFramework/ModelHelper.cs @@ -14,7 +14,7 @@ namespace Microsoft.ML.TestFramework #pragma warning disable 612, 618 public static class ModelHelper { - private static IHostEnvironment s_environment = new MLContext(seed: 1); + private static MLContext s_environment = new MLContext(seed: 1); private static ITransformModel s_housePriceModel; public static void WriteKcHousePriceModel(string dataPath, string outputModelPath) @@ -41,17 +41,34 @@ public static void WriteKcHousePriceModel(string dataPath, Stream stream) public static IDataView GetKcHouseDataView(string dataPath) { - var dataSchema = "col=Id:TX:0 col=Date:TX:1 col=Label:R4:2 col=Bedrooms:R4:3 " + - "col=Bathrooms:R4:4 col=SqftLiving:R4:5 col=SqftLot:R4:6 col=Floors:R4:7 " + - "col=Waterfront:R4:8 col=View:R4:9 col=Condition:R4:10 col=Grade:R4:11 " + - "col=SqftAbove:R4:12 col=SqftBasement:R4:13 col=YearBuilt:R4:14 " + - "col=YearRenovated:R4:15 col=Zipcode:R4:16 col=Lat:R4:17 col=Long:R4:18 " + - "col=SqftLiving15:R4:19 col=SqftLot15:R4:20 header+ sep=,"; - - var txtArgs = new Runtime.Data.TextLoader.Arguments(); - bool parsed = CmdParser.ParseArguments(s_environment, dataSchema, txtArgs); - s_environment.Assert(parsed); - return Runtime.Data.TextLoader.ReadFile(s_environment, txtArgs, new MultiFileSource(dataPath)); + return s_environment.Data.ReadFromTextFile(dataPath, + columns: new[] + { + new Runtime.Data.TextLoader.Column("Id", Runtime.Data.DataKind.TX, 0), + new Runtime.Data.TextLoader.Column("Date", Runtime.Data.DataKind.TX, 1), + new Runtime.Data.TextLoader.Column("Label", Runtime.Data.DataKind.R4, 2), + new Runtime.Data.TextLoader.Column("BedRooms", Runtime.Data.DataKind.R4, 3), + new Runtime.Data.TextLoader.Column("BathRooms", Runtime.Data.DataKind.R4, 4), + new Runtime.Data.TextLoader.Column("SqftLiving", Runtime.Data.DataKind.R4, 5), + new Runtime.Data.TextLoader.Column("SqftLot", Runtime.Data.DataKind.R4, 6), + new Runtime.Data.TextLoader.Column("Floors", Runtime.Data.DataKind.R4, 7), + new Runtime.Data.TextLoader.Column("WaterFront", Runtime.Data.DataKind.R4, 8), + new Runtime.Data.TextLoader.Column("View", Runtime.Data.DataKind.R4, 9), + new Runtime.Data.TextLoader.Column("Condition", Runtime.Data.DataKind.R4, 10), + new Runtime.Data.TextLoader.Column("Grade", Runtime.Data.DataKind.R4, 11), + new Runtime.Data.TextLoader.Column("SqftAbove", Runtime.Data.DataKind.R4, 12), + new Runtime.Data.TextLoader.Column("SqftBasement", Runtime.Data.DataKind.R4, 13), + new Runtime.Data.TextLoader.Column("YearBuilt", Runtime.Data.DataKind.R4, 14), + new Runtime.Data.TextLoader.Column("YearRenovated", Runtime.Data.DataKind.R4, 15), + new Runtime.Data.TextLoader.Column("Zipcode", Runtime.Data.DataKind.R4, 16), + new Runtime.Data.TextLoader.Column("Lat", Runtime.Data.DataKind.R4, 17), + new Runtime.Data.TextLoader.Column("Long", Runtime.Data.DataKind.R4, 18), + new Runtime.Data.TextLoader.Column("SqftLiving15", Runtime.Data.DataKind.R4, 19), + new Runtime.Data.TextLoader.Column("SqftLot15", Runtime.Data.DataKind.R4, 20) + }, + hasHeader: true, + separatorChar: ',' + ); } private static ITransformModel CreateKcHousePricePredictorModel(string dataPath) diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/ApiScenariosTests.cs b/test/Microsoft.ML.Tests/Scenarios/Api/ApiScenariosTests.cs index 3b9db811e5..6027be2805 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/ApiScenariosTests.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/ApiScenariosTests.cs @@ -52,34 +52,25 @@ public class SentimentPrediction public float Score; } - private static TextLoader.Arguments MakeIrisTextLoaderArgs() + private static TextLoader.Column[] MakeIrisColumns() { - return new TextLoader.Arguments() - { - Separator = "comma", - Column = new[] + return new[] { new TextLoader.Column("SepalLength", DataKind.R4, 0), new TextLoader.Column("SepalWidth", DataKind.R4, 1), new TextLoader.Column("PetalLength", DataKind.R4, 2), new TextLoader.Column("PetalWidth",DataKind.R4, 3), new TextLoader.Column("Label", DataKind.Text, 4) - } - }; + }; } - private static TextLoader.Arguments MakeSentimentTextLoaderArgs() + private static TextLoader.Column[] MakeSentimentColumns() { - return new TextLoader.Arguments() - { - Separator = "tab", - HasHeader = true, - Column = new[] + return new[] { new TextLoader.Column("Label", DataKind.BL, 0), new TextLoader.Column("SentimentText", DataKind.Text, 1) - } - }; + }; } } } diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs index 104855c2a9..66ba6ba137 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs @@ -43,7 +43,7 @@ private void IntermediateData(string dataPath) var mlContext = new MLContext(); // Create the reader: define the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(ctx => ( + var reader = mlContext.Data.CreateTextReader(ctx => ( // A boolean column depicting the 'target label'. IsOver50K: ctx.LoadBool(0), // Three text columns. @@ -99,7 +99,7 @@ private void TrainRegression(string trainDataPath, string testDataPath, string m // Step one: read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(ctx => ( + var reader = mlContext.Data.CreateTextReader(ctx => ( // We read the first 11 values as a single float vector. FeatureVector: ctx.LoadFloat(0, 10), // Separately, read the target variable. @@ -178,7 +178,7 @@ private ITransformer TrainOnIris(string irisDataPath) // Step one: read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(ctx => ( + var reader = mlContext.Data.CreateTextReader(ctx => ( // The four features of the Iris dataset. SepalLength: ctx.LoadFloat(0), SepalWidth: ctx.LoadFloat(1), @@ -256,7 +256,7 @@ private void TrainAndInspectWeights(string dataPath) // Step one: read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(ctx => ( + var reader = mlContext.Data.CreateTextReader(ctx => ( // The four features of the Iris dataset. SepalLength: ctx.LoadFloat(0), SepalWidth: ctx.LoadFloat(1), @@ -328,7 +328,7 @@ private void NormalizationWorkout(string dataPath) var mlContext = new MLContext(); // Define the reader: specify the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(ctx => ( + var reader = mlContext.Data.CreateTextReader(ctx => ( // The four features of the Iris dataset will be grouped together as one Features column. Features: ctx.LoadFloat(0, 3), // Label: kind of iris. @@ -444,7 +444,7 @@ private void TextFeaturizationOn(string dataPath) var mlContext = new MLContext(); // Define the reader: specify the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(ctx => ( + var reader = mlContext.Data.CreateTextReader(ctx => ( IsToxic: ctx.LoadBool(0), Message: ctx.LoadText(1) ), hasHeader: true); @@ -506,7 +506,7 @@ private void CategoricalFeaturizationOn(params string[] dataPath) var mlContext = new MLContext(); // Define the reader: specify the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(ctx => ( + var reader = mlContext.Data.CreateTextReader(ctx => ( Label: ctx.LoadBool(0), // We will load all the categorical features into one vector column of size 8. CategoricalFeatures: ctx.LoadText(1, 8), @@ -573,7 +573,7 @@ private void CrossValidationOn(string dataPath) // Step one: read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(ctx => ( + var reader = mlContext.Data.CreateTextReader(ctx => ( // The four features of the Iris dataset. SepalLength: ctx.LoadFloat(0), SepalWidth: ctx.LoadFloat(1), @@ -633,7 +633,7 @@ private void MixMatch(string dataPath) // Read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(ctx => ( + var reader = mlContext.Data.CreateTextReader(ctx => ( // The four features of the Iris dataset. SepalLength: ctx.LoadFloat(0), SepalWidth: ctx.LoadFloat(1), diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs index 1c14a5b158..706b4aad7d 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs @@ -41,9 +41,7 @@ private void IntermediateData(string dataPath) var mlContext = new MLContext(); // Create the reader: define the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(new TextLoader.Arguments - { - Column = new[] { + var reader = mlContext.Data.CreateTextReader(new[] { // A boolean column depicting the 'label'. new TextLoader.Column("IsOver50K", DataKind.BL, 0), // Three text columns. @@ -52,8 +50,8 @@ private void IntermediateData(string dataPath) new TextLoader.Column("MaritalStatus", DataKind.TX, 3) }, // First line of the file is a header, not a data row. - HasHeader = true - }); + hasHeader: true + ); // Start creating our processing pipeline. For now, let's just concatenate all the text columns // together into one. @@ -93,9 +91,7 @@ private void TrainRegression(string trainDataPath, string testDataPath, string m // Step one: read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(new TextLoader.Arguments - { - Column = new[] { + var reader = mlContext.Data.CreateTextReader(new[] { // We read the first 11 values as a single float vector. new TextLoader.Column("FeatureVector", DataKind.R4, 0, 10), @@ -103,10 +99,10 @@ private void TrainRegression(string trainDataPath, string testDataPath, string m new TextLoader.Column("Target", DataKind.R4, 11), }, // First line of the file is a header, not a data row. - HasHeader = true, + hasHeader: true, // Default separator is tab, but we need a semicolon. - Separator = ";" - }); + separatorChar: ';' + ); // Now read the file (remember though, readers are lazy, so the actual reading will happen when the data is accessed). var trainData = reader.Read(trainDataPath); @@ -171,9 +167,7 @@ private ITransformer TrainOnIris(string irisDataPath) // Step one: read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(new TextLoader.Arguments - { - Column = new[] { + var reader = mlContext.Data.CreateTextReader(new[] { new TextLoader.Column("SepalLength", DataKind.R4, 0), new TextLoader.Column("SepalWidth", DataKind.R4, 1), new TextLoader.Column("PetalLength", DataKind.R4, 2), @@ -182,8 +176,8 @@ private ITransformer TrainOnIris(string irisDataPath) new TextLoader.Column("Label", DataKind.TX, 4), }, // Default separator is tab, but the dataset has comma. - Separator = "," - }); + separatorChar: ',' + ); // Retrieve the training data. var trainData = reader.Read(irisDataPath); @@ -240,17 +234,15 @@ private void NormalizationWorkout(string dataPath) var mlContext = new MLContext(); // Define the reader: specify the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(new TextLoader.Arguments - { - Column = new[] { + var reader = mlContext.Data.CreateTextReader(new[] { // The four features of the Iris dataset will be grouped together as one Features column. new TextLoader.Column("Features", DataKind.R4, 0, 3), // Label: kind of iris. new TextLoader.Column("Label", DataKind.TX, 4), }, // Default separator is tab, but the dataset has comma. - Separator = "," - }); + separatorChar: ',' + ); // Read the training data. var trainData = reader.Read(dataPath); @@ -303,14 +295,13 @@ private void TextFeaturizationOn(string dataPath) var mlContext = new MLContext(); // Define the reader: specify the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(new TextLoader.Arguments - { - Column = new[] { + var reader = mlContext.Data.CreateTextReader(new[] + { new TextLoader.Column("IsToxic", DataKind.BL, 0), new TextLoader.Column("Message", DataKind.TX, 1), }, - HasHeader = true - }); + hasHeader: true + ); // Read the data. var data = reader.Read(dataPath); @@ -371,9 +362,8 @@ private void CategoricalFeaturizationOn(params string[] dataPath) var mlContext = new MLContext(); // Define the reader: specify the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(new TextLoader.Arguments - { - Column = new[] { + var reader = mlContext.Data.CreateTextReader(new[] + { new TextLoader.Column("Label", DataKind.BL, 0), // We will load all the categorical features into one vector column of size 8. new TextLoader.Column("CategoricalFeatures", DataKind.TX, 1, 8), @@ -382,8 +372,8 @@ private void CategoricalFeaturizationOn(params string[] dataPath) // Let's also separately load the 'Workclass' column. new TextLoader.Column("Workclass", DataKind.TX, 1), }, - HasHeader = true - }); + hasHeader: true + ); // Read the data. var data = reader.Read(dataPath); @@ -436,9 +426,8 @@ private void CrossValidationOn(string dataPath) // Step one: read the data as an IDataView. // First, we define the reader: specify the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(new TextLoader.Arguments - { - Column = new[] { + var reader = mlContext.Data.CreateTextReader(new[] + { // We read the first 11 values as a single float vector. new TextLoader.Column("SepalLength", DataKind.R4, 0), new TextLoader.Column("SepalWidth", DataKind.R4, 1), @@ -448,8 +437,8 @@ private void CrossValidationOn(string dataPath) new TextLoader.Column("Label", DataKind.TX, 4), }, // Default separator is tab, but the dataset has comma. - Separator = "," - }); + separatorChar: ',' + ); // Read the data. var data = reader.Read(dataPath); @@ -498,14 +487,14 @@ private void ReadDataDynamic(string dataPath) var mlContext = new MLContext(); // Create the reader: define the data columns and where to find them in the text file. - var reader = mlContext.Data.TextReader(new[] { + var reader = mlContext.Data.CreateTextReader(new[] { // We read the first 10 values as a single float vector. new TextLoader.Column("FeatureVector", DataKind.R4, new[] {new TextLoader.Range(0, 9)}), // Separately, read the target variable. new TextLoader.Column("Target", DataKind.R4, 10) }, // Default separator is tab, but we need a comma. - s => s.Separator = ","); + separatorChar: ',' ); // Now read the file (remember though, readers are lazy, so the actual reading will happen when the data is accessed). var data = reader.Read(dataPath); @@ -527,11 +516,11 @@ public class OutputRow public void CustomTransformer() { var mlContext = new MLContext(); - var data = mlContext.Data.ReadFromTextFile(new[] + var data = mlContext.Data.ReadFromTextFile(GetDataPath("adult.tiny.with-schema.txt"), new[] { new TextLoader.Column("Income", DataKind.R4, 10), new TextLoader.Column("Features", DataKind.R4, 12, 14) - }, GetDataPath("adult.tiny.with-schema.txt"), s => { s.Separator = "\t"; s.HasHeader = true; }); + }, hasHeader: true); PrepareData(mlContext, data); TrainModel(mlContext, data); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/CrossValidation.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/CrossValidation.cs index 1eeaf9c13c..3c5cabb3dc 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/CrossValidation.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/CrossValidation.cs @@ -27,7 +27,7 @@ void New_CrossValidation() { var ml = new MLContext(seed: 1, conc: 1); - var data = ml.Data.TextReader(MakeSentimentTextLoaderArgs()).Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); + var data = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true).Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); // Pipeline. var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features") .Append(ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: (s) => { s.ConvergenceTolerance = 1f; s.NumThreads = 1; })); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs index 536a74283f..8694b5b199 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs @@ -30,7 +30,7 @@ void New_DecomposableTrainAndPredict() var dataPath = GetDataPath(TestDatasets.irisData.trainFilename); var ml = new MLContext(); - var data = ml.Data.TextReader(MakeIrisTextLoaderArgs()) + var data = ml.Data.CreateTextReader(MakeIrisColumns(), separatorChar: ',') .Read(dataPath); var pipeline = new ColumnConcatenatingEstimator (ml, "Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") @@ -41,7 +41,7 @@ void New_DecomposableTrainAndPredict() var model = pipeline.Fit(data).GetModelFor(TransformerScope.Scoring); var engine = model.MakePredictionFunction(ml); - var testLoader = TextLoader.ReadFile(ml, MakeIrisTextLoaderArgs(), new MultiFileSource(dataPath)); + var testLoader = ml.Data.ReadFromTextFile(dataPath, MakeIrisColumns(), separatorChar: ','); var testData = testLoader.AsEnumerable(ml, false); foreach (var input in testData.Take(20)) { diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Evaluation.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Evaluation.cs index ad249fa6e9..b09dbbe82d 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Evaluation.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Evaluation.cs @@ -22,7 +22,7 @@ public void New_Evaluation() var ml = new MLContext(seed: 1, conc: 1); // Pipeline. - var pipeline = ml.Data.TextReader(MakeSentimentTextLoaderArgs()) + var pipeline = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true) .Append(ml.Transforms.Text.FeaturizeText("SentimentText", "Features")) .Append(ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: s => s.NumThreads = 1)); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs index 2c750a7b81..392bf95b85 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs @@ -29,7 +29,7 @@ void New_Extensibility() var dataPath = GetDataPath(TestDatasets.irisData.trainFilename); var ml = new MLContext(); - var data = ml.Data.TextReader(MakeIrisTextLoaderArgs()) + var data = ml.Data.CreateTextReader(MakeIrisColumns(), separatorChar: ',') .Read(dataPath); Action action = (i, j) => @@ -49,7 +49,7 @@ void New_Extensibility() var model = pipeline.Fit(data).GetModelFor(TransformerScope.Scoring); var engine = model.MakePredictionFunction(ml); - var testLoader = TextLoader.ReadFile(ml, MakeIrisTextLoaderArgs(), new MultiFileSource(dataPath)); + var testLoader = ml.Data.ReadFromTextFile(dataPath, MakeIrisColumns(), separatorChar: ','); var testData = testLoader.AsEnumerable(ml, false); foreach (var input in testData.Take(20)) { diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/FileBasedSavingOfData.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/FileBasedSavingOfData.cs index 667581c9b3..b85f5e646b 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/FileBasedSavingOfData.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/FileBasedSavingOfData.cs @@ -27,7 +27,7 @@ void New_FileBasedSavingOfData() var ml = new MLContext(seed: 1, conc: 1); var src = new MultiFileSource(GetDataPath(TestDatasets.Sentiment.trainFilename)); - var trainData = ml.Data.TextReader(MakeSentimentTextLoaderArgs()) + var trainData = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true) .Append(ml.Transforms.Text.FeaturizeText("SentimentText", "Features")) .Fit(src).Read(src); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/IntrospectiveTraining.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/IntrospectiveTraining.cs index daf2777047..6cf2898db4 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/IntrospectiveTraining.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/IntrospectiveTraining.cs @@ -34,7 +34,7 @@ public partial class ApiScenariosTests public void New_IntrospectiveTraining() { var ml = new MLContext(seed: 1, conc: 1); - var data = ml.Data.TextReader(MakeSentimentTextLoaderArgs()) + var data = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true) .Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features") diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Metacomponents.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Metacomponents.cs index 182451a30f..032ef53787 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Metacomponents.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Metacomponents.cs @@ -26,7 +26,7 @@ public partial class ApiScenariosTests public void New_Metacomponents() { var ml = new MLContext(); - var data = ml.Data.TextReader(MakeIrisTextLoaderArgs()) + var data = ml.Data.CreateTextReader(MakeIrisColumns(), separatorChar: ',') .Read(GetDataPath(TestDatasets.irisData.trainFilename)); var sdcaTrainer = ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: (s) => { s.MaxIterations = 100; s.Shuffle = true; s.NumThreads = 1; }); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs index f2285b5200..b5ba493b2d 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs @@ -26,7 +26,7 @@ public partial class ApiScenariosTests void New_MultithreadedPrediction() { var ml = new MLContext(seed: 1, conc: 1); - var reader = ml.Data.TextReader(MakeSentimentTextLoaderArgs()); + var reader = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true); var data = reader.Read(new MultiFileSource(GetDataPath(TestDatasets.Sentiment.trainFilename))); // Pipeline. diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/ReconfigurablePrediction.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/ReconfigurablePrediction.cs index ffda5b1ce1..52a9266f3a 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/ReconfigurablePrediction.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/ReconfigurablePrediction.cs @@ -22,7 +22,7 @@ public partial class ApiScenariosTests public void New_ReconfigurablePrediction() { var ml = new MLContext(seed: 1, conc: 1); - var dataReader = ml.Data.TextReader(MakeSentimentTextLoaderArgs()); + var dataReader = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true); var data = dataReader.Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); var testData = dataReader.Read(GetDataPath(TestDatasets.Sentiment.testFilename)); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs index 016acd6220..22ec24c29a 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs @@ -22,7 +22,7 @@ public partial class ApiScenariosTests public void New_SimpleTrainAndPredict() { var ml = new MLContext(seed: 1, conc: 1); - var reader = ml.Data.TextReader(MakeSentimentTextLoaderArgs()); + var reader = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true); var data = reader.Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); // Pipeline. var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features") diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs index adab64dec1..59bd307dbd 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs @@ -26,7 +26,7 @@ public partial class ApiScenariosTests public void New_TrainSaveModelAndPredict() { var ml = new MLContext(seed: 1, conc: 1); - var reader = ml.Data.TextReader(MakeSentimentTextLoaderArgs()); + var reader = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true); var data = reader.Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); // Pipeline. diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs index a117de429c..6c47365926 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs @@ -22,7 +22,7 @@ public void New_TrainWithInitialPredictor() var ml = new MLContext(seed: 1, conc: 1); - var data = ml.Data.TextReader(MakeSentimentTextLoaderArgs()).Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); + var data = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true).Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); // Pipeline. var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features"); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithValidationSet.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithValidationSet.cs index 2a7030ea94..bda23779c4 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithValidationSet.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithValidationSet.cs @@ -20,7 +20,7 @@ public void New_TrainWithValidationSet() { var ml = new MLContext(seed: 1, conc: 1); // Pipeline. - var reader = ml.Data.TextReader(MakeSentimentTextLoaderArgs()); + var reader = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true); var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features"); // Train the pipeline, prepare train and validation set. diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Visibility.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Visibility.cs index c91e2498a5..ebab7c3a3b 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Visibility.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Visibility.cs @@ -26,7 +26,7 @@ public partial class ApiScenariosTests void New_Visibility() { var ml = new MLContext(seed: 1, conc: 1); - var pipeline = ml.Data.TextReader(MakeSentimentTextLoaderArgs()) + var pipeline = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true) .Append(ml.Transforms.Text.FeaturizeText("SentimentText", "Features", s => s.OutputTokens = true)); var src = new MultiFileSource(GetDataPath(TestDatasets.Sentiment.trainFilename)); diff --git a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs index 4752119236..1de3d82ace 100644 --- a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs +++ b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs @@ -26,10 +26,7 @@ public void TrainAndPredictIrisModelUsingDirectInstantiationTest() { var mlContext = new MLContext(seed: 1, conc: 1); - var reader = mlContext.Data.TextReader(new TextLoader.Arguments() - { - HasHeader = false, - Column = new[] + var reader = mlContext.Data.CreateTextReader(columns: new[] { new TextLoader.Column("Label", DataKind.R4, 0), new TextLoader.Column("SepalLength", DataKind.R4, 1), @@ -37,7 +34,7 @@ public void TrainAndPredictIrisModelUsingDirectInstantiationTest() new TextLoader.Column("PetalLength", DataKind.R4, 3), new TextLoader.Column("PetalWidth", DataKind.R4, 4) } - }); + ); var pipe = mlContext.Transforms.Concatenate("Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") .Append(mlContext.Transforms.Normalize("Features")) diff --git a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs index b04a360168..ead36a22a5 100644 --- a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs +++ b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs @@ -25,17 +25,14 @@ public void TrainAndPredictSentimentModelWithDirectionInstantiationTest() var env = new MLContext(seed: 1, conc: 1); // Pipeline - var loader = TextLoader.ReadFile(env, - new TextLoader.Arguments() + var loader = env.Data.ReadFromTextFile(dataPath, + columns: new[] { - Separator = "tab", - HasHeader = true, - Column = new[] - { - new TextLoader.Column("Label", DataKind.Num, 0), - new TextLoader.Column("SentimentText", DataKind.Text, 1) - } - }, new MultiFileSource(dataPath)); + new TextLoader.Column("Label", DataKind.Num, 0), + new TextLoader.Column("SentimentText", DataKind.Text, 1) + }, + hasHeader: true + ); var trans = TextFeaturizingEstimator.Create(env, new TextFeaturizingEstimator.Arguments() { @@ -86,17 +83,14 @@ public void TrainAndPredictSentimentModelWithDirectionInstantiationTestWithWordE var env = new MLContext(seed: 1, conc: 1); // Pipeline - var loader = TextLoader.ReadFile(env, - new TextLoader.Arguments() + var loader = env.Data.ReadFromTextFile(dataPath, + columns: new[] { - Separator = "tab", - HasHeader = true, - Column = new[] - { - new TextLoader.Column("Label", DataKind.Num, 0), - new TextLoader.Column("SentimentText", DataKind.Text, 1) - } - }, new MultiFileSource(dataPath)); + new TextLoader.Column("Label", DataKind.Num, 0), + new TextLoader.Column("SentimentText", DataKind.Text, 1) + }, + hasHeader: true + ); var text = TextFeaturizingEstimator.Create(env, new TextFeaturizingEstimator.Arguments() { diff --git a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs index 047bde9f39..0c2ae72004 100644 --- a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs +++ b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs @@ -257,18 +257,15 @@ public void TensorFlowInputsOutputsSchemaTest() public void TensorFlowTransformMNISTConvTest() { var mlContext = new MLContext(seed: 1, conc: 1); - var reader = mlContext.Data.TextReader( - new TextLoader.Arguments() - { - Separator = "tab", - HasHeader = true, - Column = new[] + var reader = mlContext.Data.CreateTextReader( + columns: new[] { new TextLoader.Column("Label", DataKind.U4 , new [] { new TextLoader.Range(0) }, new KeyRange(0, 9)), new TextLoader.Column("Placeholder", DataKind.R4, new []{ new TextLoader.Range(1, 784) }) - } - }); + }, + hasHeader: true + ); var trainData = reader.Read(GetDataPath(TestDatasets.mnistTiny28.trainFilename)); var testData = reader.Read(GetDataPath(TestDatasets.mnistOneClass.testFilename)); @@ -303,17 +300,12 @@ public void TensorFlowTransformMNISTLRTrainingTest() try { var mlContext = new MLContext(seed: 1, conc: 1); - var reader = mlContext.Data.TextReader( - new TextLoader.Arguments - { - Separator = "tab", - HasHeader = false, - Column = new[] + var reader = mlContext.Data.CreateTextReader(columns: new[] { new TextLoader.Column("Label", DataKind.I8, 0), new TextLoader.Column("Placeholder", DataKind.R4, new []{ new TextLoader.Range(1, 784) }) } - }); + ); var trainData = reader.Read(GetDataPath(TestDatasets.mnistTiny28.trainFilename)); var testData = reader.Read(GetDataPath(TestDatasets.mnistOneClass.testFilename)); @@ -398,17 +390,13 @@ private void ExecuteTFTransformMNISTConvTrainingTest(bool shuffle, int? shuffleS { var mlContext = new MLContext(seed: 1, conc: 1); - var reader = mlContext.Data.TextReader(new TextLoader.Arguments - { - Separator = "tab", - HasHeader = false, - Column = new[] + var reader = mlContext.Data.CreateTextReader(new[] { new TextLoader.Column("Label", DataKind.U4, new []{ new TextLoader.Range(0) }, new KeyRange(0, 9)), new TextLoader.Column("TfLabel", DataKind.I8, 0), new TextLoader.Column("Placeholder", DataKind.R4, new []{ new TextLoader.Range(1, 784) }) } - }); + ); var trainData = reader.Read(GetDataPath(TestDatasets.mnistTiny28.trainFilename)); var testData = reader.Read(GetDataPath(TestDatasets.mnistOneClass.testFilename)); @@ -491,16 +479,13 @@ public void TensorFlowTransformMNISTConvSavedModelTest() // of predicted label of a single in-memory example. var mlContext = new MLContext(seed: 1, conc: 1); - var reader = mlContext.Data.TextReader(new TextLoader.Arguments - { - Separator = "tab", - HasHeader = true, - Column = new[] + var reader = mlContext.Data.CreateTextReader(columns: new[] { new TextLoader.Column("Label", DataKind.U4 , new [] { new TextLoader.Range(0) }, new KeyRange(0, 9)), new TextLoader.Column("Placeholder", DataKind.R4, new []{ new TextLoader.Range(1, 784) }) - } - }); + }, + hasHeader: true + ); var trainData = reader.Read(GetDataPath(TestDatasets.mnistTiny28.trainFilename)); var testData = reader.Read(GetDataPath(TestDatasets.mnistOneClass.testFilename)); @@ -625,14 +610,13 @@ public void TensorFlowTransformCifar() var dataFile = GetDataPath("images/images.tsv"); var imageFolder = Path.GetDirectoryName(dataFile); - var data = TextLoader.Create(mlContext, new TextLoader.Arguments() - { - Column = new[] - { + var data = mlContext.Data.ReadFromTextFile(dataFile, + columns: new[] + { new TextLoader.Column("ImagePath", DataKind.TX, 0), new TextLoader.Column("Name", DataKind.TX, 1), - } - }, new MultiFileSource(dataFile)); + } + ); var pipeEstimator = new ImageLoadingEstimator(mlContext, imageFolder, ("ImagePath", "ImageReal")) .Append(new ImageResizingEstimator(mlContext, "ImageReal", "ImageCropped", imageWidth, imageHeight)) @@ -673,14 +657,12 @@ public void TensorFlowTransformCifarSavedModel() var dataFile = GetDataPath("images/images.tsv"); var imageFolder = Path.GetDirectoryName(dataFile); - var data = TextLoader.Create(mlContext, new TextLoader.Arguments() - { - Column = new[] + var data = mlContext.Data.ReadFromTextFile(dataFile, columns: new[] { new TextLoader.Column("ImagePath", DataKind.TX, 0), new TextLoader.Column("Name", DataKind.TX, 1), - } - }, new MultiFileSource(dataFile)); + } + ); var images = ImageLoaderTransform.Create(mlContext, new ImageLoaderTransform.Arguments() { Column = new ImageLoaderTransform.Column[1] @@ -732,14 +714,13 @@ public void TensorFlowTransformCifarInvalidShape() var imageWidth = 28; var dataFile = GetDataPath("images/images.tsv"); var imageFolder = Path.GetDirectoryName(dataFile); - var data = TextLoader.Create(mlContext, new TextLoader.Arguments() - { - Column = new[] + var data = mlContext.Data.ReadFromTextFile(dataFile, + columns: new[] { new TextLoader.Column("ImagePath", DataKind.TX, 0), new TextLoader.Column("Name", DataKind.TX, 1), - } - }, new MultiFileSource(dataFile)); + } + ); var images = ImageLoaderTransform.Create(mlContext, new ImageLoaderTransform.Arguments() { Column = new ImageLoaderTransform.Column[1] diff --git a/test/Microsoft.ML.Tests/Transformers/CustomMappingTests.cs b/test/Microsoft.ML.Tests/Transformers/CustomMappingTests.cs index 840676b1d5..11f9a440ee 100644 --- a/test/Microsoft.ML.Tests/Transformers/CustomMappingTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/CustomMappingTests.cs @@ -56,10 +56,10 @@ public void TestCustomTransformer() { string dataPath = GetDataPath("adult.tiny.with-schema.txt"); var source = new MultiFileSource(dataPath); - var loader = ML.Data.TextReader(new[] { + var loader = ML.Data.CreateTextReader(new[] { new TextLoader.Column("Float1", DataKind.R4, 9), new TextLoader.Column("Float4", DataKind.R4, new[]{new TextLoader.Range(9), new TextLoader.Range(10), new TextLoader.Range(11), new TextLoader.Range(12) }) - }, s => { s.Separator = "\t"; s.HasHeader = true; }); + }, hasHeader: true); var data = loader.Read(source); @@ -95,11 +95,11 @@ public void TestSchemaPropagation() { string dataPath = GetDataPath("adult.test"); var source = new MultiFileSource(dataPath); - var loader = ML.Data.TextReader(new[] { + var loader = ML.Data.CreateTextReader(new[] { new TextLoader.Column("Float1", DataKind.R4, 0), new TextLoader.Column("Float4", DataKind.R4, new[]{new TextLoader.Range(0), new TextLoader.Range(2), new TextLoader.Range(4), new TextLoader.Range(10) }), new TextLoader.Column("Text1", DataKind.Text, 0) - }, s => { s.Separator = ","; s.HasHeader = true; }); + }, hasHeader: true, separatorChar: ',' ); var data = loader.Read(source); From 0e85f5e89bb6ff383f99152fae9032d1bacda1eb Mon Sep 17 00:00:00 2001 From: Vadim Eksarevskiy <42353187+vaeksare@users.noreply.github.com> Date: Fri, 7 Dec 2018 13:33:03 -0800 Subject: [PATCH 035/100] Fix Dnn Image Models being downloaded on all machines --- .../Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Redist/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj b/src/Redist/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj index 2401de0df0..a962325347 100644 --- a/src/Redist/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj +++ b/src/Redist/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj @@ -102,11 +102,11 @@ From 4e8ad350d29cb205ec5fdaae786f7af35bd7cb83 Mon Sep 17 00:00:00 2001 From: Anirudh Agnihotry Date: Fri, 7 Dec 2018 17:14:19 -0800 Subject: [PATCH 036/100] negativeOr corrected, comment added, tests added (#1813) --- src/Microsoft.ML.CpuMath/AvxIntrinsics.cs | 20 +- src/Microsoft.ML.CpuMath/SseIntrinsics.cs | 20 +- src/Native/CpuMathNative/Sse.cpp | 16 +- .../UnitTests/TestVBuffer.cs | 4 +- .../UnitTests.cs | 189 +++++++++++------- 5 files changed, 148 insertions(+), 101 deletions(-) diff --git a/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs b/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs index 2156ddf5fa..5e7845007c 100644 --- a/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs +++ b/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs @@ -877,11 +877,12 @@ public static unsafe void Scale(float scale, Span dst) Vector256 leadingMask = Avx.LoadVector256(((float*)(pLeadingAlignmentMask)) + (misalignment * 8)); Vector256 trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + ((8 - misalignment) * 8)); - Vector256 temp = Avx.And(result, leadingMask); - result = Avx.And(result, trailingMask); + Vector256 temp = Avx.And(result, trailingMask); + result = Avx.Multiply(scaleVector256, result); - temp = Avx.Multiply(scaleVector256, temp); - result = Avx.Or(temp, result); + // Masking operation is done at the end to avoid doing an Or operation with negative Zero. + result = Avx.And(result, leadingMask); + result = Avx.Or(result, temp); Avx.Store(pDstCurrent, result); @@ -930,13 +931,14 @@ public static unsafe void Scale(float scale, Span dst) Vector256 trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + (remainder * 8)); Vector256 leadingMask = Avx.LoadVector256(((float*)(pLeadingAlignmentMask)) + ((8 - remainder) * 8)); - Vector256 temp = Avx.And(result, trailingMask); - result = Avx.And(result, leadingMask); + Vector256 temp = Avx.And(result, leadingMask); + result = Avx.Multiply(scaleVector256, result); - temp = Avx.Multiply(scaleVector256, temp); - temp = Avx.Or(temp, result); + // Masking operation is done at the end to avoid doing an Or operation with negative Zero. + result = Avx.And(result, trailingMask); + result = Avx.Or(result, temp); - Avx.Store(pDstCurrent, temp); + Avx.Store(pDstCurrent, result); } } } diff --git a/src/Microsoft.ML.CpuMath/SseIntrinsics.cs b/src/Microsoft.ML.CpuMath/SseIntrinsics.cs index b83fd6bbc6..89d6dbce03 100644 --- a/src/Microsoft.ML.CpuMath/SseIntrinsics.cs +++ b/src/Microsoft.ML.CpuMath/SseIntrinsics.cs @@ -824,11 +824,12 @@ public static unsafe void Scale(float scale, Span dst) Vector128 leadingMask = Sse.LoadVector128(((float*)(pLeadingAlignmentMask)) + (misalignment * 4)); Vector128 trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + ((4 - misalignment) * 4)); - Vector128 temp = Sse.And(result, leadingMask); - result = Sse.And(result, trailingMask); + Vector128 temp = Sse.And(result, trailingMask); + result = Sse.Multiply(scaleVector128, result); - temp = Sse.Multiply(scaleVector128, temp); - result = Sse.Or(temp, result); + // Masking operation is done at the end to avoid doing an Or operation with negative Zero. + result = Sse.And(result, leadingMask); + result = Sse.Or(result, temp); Sse.Store(pDstCurrent, result); @@ -872,13 +873,14 @@ public static unsafe void Scale(float scale, Span dst) Vector128 trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + (remainder * 4)); Vector128 leadingMask = Sse.LoadVector128(((float*)(pLeadingAlignmentMask)) + ((4 - remainder) * 4)); - Vector128 temp = Sse.And(result, trailingMask); - result = Sse.And(result, leadingMask); + Vector128 temp = Sse.And(result, leadingMask); + result = Sse.Multiply(scaleVector128, result); - temp = Sse.Multiply(scaleVector128, temp); - temp = Sse.Or(temp, result); + // Masking operation is done at the end to avoid doing an Or operation with negative Zero. + result = Sse.And(result, trailingMask); + result = Sse.Or(result, temp); - Sse.Store(pDstCurrent, temp); + Sse.Store(pDstCurrent, result); } } } diff --git a/src/Native/CpuMathNative/Sse.cpp b/src/Native/CpuMathNative/Sse.cpp index 607af332d1..0a2715d4d8 100644 --- a/src/Native/CpuMathNative/Sse.cpp +++ b/src/Native/CpuMathNative/Sse.cpp @@ -714,11 +714,12 @@ EXPORT_API(void) Scale(float a, _Inout_ float * pd, int c) __m128 leadingMask = _mm_loadu_ps(((float*)(&LeadingAlignmentMask)) + (misalignment * 4)); __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + ((4 - misalignment) * 4)); - __m128 temp = _mm_and_ps(result, leadingMask); - result = _mm_and_ps(result, trailingMask); + __m128 temp = _mm_and_ps(result, trailingMask); + result = _mm_mul_ps(result, x1); - temp = _mm_mul_ps(temp, x1); - result = _mm_or_ps(temp, result); + // Masking operation is done at the end to avoid doing an Or operation with negative Zero. + result = _mm_and_ps(result, leadingMask); + result = _mm_or_ps(result, temp); _mm_storeu_ps(pd, result); @@ -757,10 +758,11 @@ EXPORT_API(void) Scale(float a, _Inout_ float * pd, int c) __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + (remainder * 4)); __m128 leadingMask = _mm_loadu_ps(((float*)(&LeadingAlignmentMask)) + ((4 - remainder) * 4)); - __m128 temp = _mm_and_ps(result, trailingMask); - result = _mm_and_ps(result, leadingMask); + __m128 temp = _mm_and_ps(result, leadingMask); + result = _mm_mul_ps(result, x1); - temp = _mm_mul_ps(temp, x1); + // Masking operation is done at the end to avoid doing an Or operation with negative Zero. + result = _mm_and_ps(result, trailingMask); result = _mm_or_ps(temp, result); _mm_storeu_ps(pd, result); diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestVBuffer.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestVBuffer.cs index 799e2e64cb..2edbf0a7ff 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestVBuffer.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestVBuffer.cs @@ -39,7 +39,7 @@ public void TestApplyAt() Assert.Equal(1, buffer.GetValues()[2]); } - [Fact(Skip = "Bug 1802: https://github.com/dotnet/machinelearning/issues/1802")] + [Fact] public void VBufferOpScaleBy() { var rgen = RandomUtils.Create(9); @@ -60,7 +60,7 @@ public void VBufferOpScaleBy() } } - [Fact(Skip = "Bug 1802: https://github.com/dotnet/machinelearning/issues/1802")] + [Fact] public void VBufferOpScaleByCopy() { var rgen = RandomUtils.Create(9); diff --git a/test/Microsoft.ML.CpuMath.UnitTests.netcoreapp/UnitTests.cs b/test/Microsoft.ML.CpuMath.UnitTests.netcoreapp/UnitTests.cs index 25996ec42c..7e2f1e5cc6 100644 --- a/test/Microsoft.ML.CpuMath.UnitTests.netcoreapp/UnitTests.cs +++ b/test/Microsoft.ML.CpuMath.UnitTests.netcoreapp/UnitTests.cs @@ -2,11 +2,10 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Runtime.Internal.CpuMath; using System; using System.Collections.Generic; using Xunit; -using Xunit.Abstractions; -using Microsoft.ML.Runtime.Internal.CpuMath; namespace Microsoft.ML.CpuMath.UnitTests { @@ -21,8 +20,6 @@ public class CpuMathUtilsUnitTests private readonly FloatEqualityComparer _comparer; private readonly FloatEqualityComparerForMatMul _matMulComparer; - private const float DefaultScale = 1.7f; - public CpuMathUtilsUnitTests() { // Padded array whose length is a multiple of 4 @@ -130,45 +127,51 @@ public void MatTimesSrcSparseTest(int matTest, int srcTest, int dstTest, float[] } [Theory] - [InlineData(0)] - [InlineData(1)] - public void AddScalarUTest(int test) + [InlineData(0, 1.7f)] + [InlineData(1, 1.7f)] + [InlineData(0, -1.7f)] + [InlineData(1, -1.7f)] + public void AddScalarUTest(int test, float defaultScale) { float[] dst = (float[])_testArrays[test].Clone(); float[] expected = (float[])dst.Clone(); for (int i = 0; i < expected.Length; i++) { - expected[i] += DefaultScale; + expected[i] += defaultScale; } - CpuMathUtils.Add(DefaultScale, dst); + CpuMathUtils.Add(defaultScale, dst); var actual = dst; Assert.Equal(expected, actual, _comparer); } [Theory] - [InlineData(0)] - [InlineData(1)] - public void ScaleTest(int test) + [InlineData(0, 1.7f)] + [InlineData(1, 1.7f)] + [InlineData(0, -1.7f)] + [InlineData(1, -1.7f)] + public void ScaleTest(int test, float defaultScale) { float[] dst = (float[])_testArrays[test].Clone(); float[] expected = (float[])dst.Clone(); for (int i = 0; i < expected.Length; i++) { - expected[i] *= DefaultScale; + expected[i] *= defaultScale; } - CpuMathUtils.Scale(DefaultScale, dst); + CpuMathUtils.Scale(defaultScale, dst); var actual = dst; Assert.Equal(expected, actual, _comparer); } [Theory] - [InlineData(0)] - [InlineData(1)] - public void ScaleSrcUTest(int test) + [InlineData(0, 1.7f)] + [InlineData(1, 1.7f)] + [InlineData(0, -1.7f)] + [InlineData(1, -1.7f)] + public void ScaleSrcUTest(int test, float defaultScale) { float[] src = (float[])_testArrays[test].Clone(); float[] dst = (float[])src.Clone(); @@ -176,36 +179,40 @@ public void ScaleSrcUTest(int test) for (int i = 0; i < expected.Length; i++) { - expected[i] *= DefaultScale; + expected[i] *= defaultScale; } - CpuMathUtils.Scale(DefaultScale, src, dst, dst.Length); + CpuMathUtils.Scale(defaultScale, src, dst, dst.Length); var actual = dst; Assert.Equal(expected, actual, _comparer); } [Theory] - [InlineData(0)] - [InlineData(1)] - public void ScaleAddUTest(int test) + [InlineData(0, 1.7f)] + [InlineData(1, 1.7f)] + [InlineData(0, -1.7f)] + [InlineData(1, -1.7f)] + public void ScaleAddUTest(int test, float defaultScale) { float[] dst = (float[])_testArrays[test].Clone(); float[] expected = (float[])dst.Clone(); for (int i = 0; i < expected.Length; i++) { - expected[i] = DefaultScale * (dst[i] + DefaultScale); + expected[i] = defaultScale * (dst[i] + defaultScale); } - CpuMathUtils.ScaleAdd(DefaultScale, DefaultScale, dst); + CpuMathUtils.ScaleAdd(defaultScale, defaultScale, dst); var actual = dst; Assert.Equal(expected, actual, _comparer); } [Theory] - [InlineData(0)] - [InlineData(1)] - public void AddScaleUTest(int test) + [InlineData(0, 1.7f)] + [InlineData(1, 1.7f)] + [InlineData(0, -1.7f)] + [InlineData(1, -1.7f)] + public void AddScaleUTest(int test, float defaultScale) { float[] src = (float[])_testArrays[test].Clone(); float[] dst = (float[])src.Clone(); @@ -213,43 +220,42 @@ public void AddScaleUTest(int test) for (int i = 0; i < expected.Length; i++) { - expected[i] *= (1 + DefaultScale); + expected[i] *= (1 + defaultScale); } - CpuMathUtils.AddScale(DefaultScale, src, dst, dst.Length); + CpuMathUtils.AddScale(defaultScale, src, dst, dst.Length); var actual = dst; Assert.Equal(expected, actual, _comparer); } [Theory] - [InlineData(0)] - [InlineData(1)] - public void AddScaleSUTest(int test) + [InlineData(0, 1.7f)] + [InlineData(1, 1.7f)] + [InlineData(0, -1.7f)] + [InlineData(1, -1.7f)] + public void AddScaleSUTest(int test, float defaultScale) { float[] src = (float[])_testArrays[test].Clone(); float[] dst = (float[])src.Clone(); int[] idx = _testIndexArray; float[] expected = (float[])dst.Clone(); - expected[0] = 5.292f; - expected[2] = -13.806f; - expected[5] = -43.522f; - expected[6] = 55.978f; - expected[8] = -178.869f; - expected[11] = -31.941f; - expected[12] = -51.205f; - expected[13] = -21.337f; - expected[14] = 35.782f; - - CpuMathUtils.AddScale(DefaultScale, src, idx, dst, idx.Length); - var actual = dst; - Assert.Equal(expected, actual, _comparer); + CpuMathUtils.AddScale(defaultScale, src, idx, dst, idx.Length); + for (int i = 0; i < idx.Length; i++) + { + int index = idx[i]; + expected[index] += defaultScale * src[i]; + } + + Assert.Equal(expected, dst, _comparer); } [Theory] - [InlineData(0)] - [InlineData(1)] - public void AddScaleCopyUTest(int test) + [InlineData(0, 1.7f)] + [InlineData(1, 1.7f)] + [InlineData(0, -1.7f)] + [InlineData(1, -1.7f)] + public void AddScaleCopyUTest(int test, float defaultScale) { float[] src = (float[])_testArrays[test].Clone(); float[] dst = (float[])src.Clone(); @@ -258,10 +264,10 @@ public void AddScaleCopyUTest(int test) for (int i = 0; i < expected.Length; i++) { - expected[i] *= (1 + DefaultScale); + expected[i] *= (1 + defaultScale); } - CpuMathUtils.AddScaleCopy(DefaultScale, src, dst, result, dst.Length); + CpuMathUtils.AddScaleCopy(defaultScale, src, dst, result, dst.Length); var actual = result; Assert.Equal(expected, actual, _comparer); } @@ -364,12 +370,21 @@ public void SumSqUTest(int test, float expected) } [Theory] - [InlineData(0, 27484.6352f)] - [InlineData(1, 27482.1071f)] - public void SumSqDiffUTest(int test, float expected) + [InlineData(0, 1.7f)] + [InlineData(1, 1.7f)] + [InlineData(0, -1.7f)] + [InlineData(1, -1.7f)] + public void SumSqDiffUTest(int test, float defaultScale) { float[] src = (float[])_testArrays[test].Clone(); - var actual = CpuMathUtils.SumSq(DefaultScale, src); + var actual = CpuMathUtils.SumSq(defaultScale, src); + + float expected = 0; + for (int i = 0; i < src.Length; i++) + { + expected += (src[i] - defaultScale) * (src[i] - defaultScale); + } + Assert.Equal(expected, actual, 2); } @@ -384,12 +399,21 @@ public void SumAbsUTest(int test, float expected) } [Theory] - [InlineData(0, 393.96f)] - [InlineData(1, 392.37f)] - public void SumAbsDiffUTest(int test, float expected) + [InlineData(0, 1.7f)] + [InlineData(1, 1.7f)] + [InlineData(0, -1.7f)] + [InlineData(1, -1.7f)] + public void SumAbsDiffUTest(int test, float defaultScale) { float[] src = (float[])_testArrays[test].Clone(); - var actual = CpuMathUtils.SumAbs(DefaultScale, src); + var actual = CpuMathUtils.SumAbs(defaultScale, src); + + float expected = 0; + for (int i = 0; i < src.Length; i++) + { + expected += Math.Abs(src[i] - defaultScale); + } + Assert.Equal(expected, actual, 2); } @@ -404,12 +428,25 @@ public void MaxAbsUTest(int test, float expected) } [Theory] - [InlineData(0, 108.07f)] - [InlineData(1, 108.07f)] - public void MaxAbsDiffUTest(int test, float expected) + [InlineData(0, 1.7f)] + [InlineData(1, 1.7f)] + [InlineData(0, -1.7f)] + [InlineData(1, -1.7f)] + public void MaxAbsDiffUTest(int test, float defaultScale) { float[] src = (float[])_testArrays[test].Clone(); - var actual = CpuMathUtils.MaxAbsDiff(DefaultScale, src); + var actual = CpuMathUtils.MaxAbsDiff(defaultScale, src); + + float expected = 0; + for (int i = 0; i < src.Length; i++) + { + float abs = Math.Abs(src[i] - defaultScale); + if (abs > expected) + { + expected = abs; + } + } + Assert.Equal(expected, actual, 2); } @@ -496,9 +533,11 @@ public void ZeroMatrixItemsCoreTest(int test, int[] idx, float[] expected) } [Theory] - [InlineData(0)] - [InlineData(1)] - public void SdcaL1UpdateUTest(int test) + [InlineData(0, 1.7f)] + [InlineData(1, 1.7f)] + [InlineData(0, -1.7f)] + [InlineData(1, -1.7f)] + public void SdcaL1UpdateUTest(int test, float defaultScale) { float[] src = (float[])_testArrays[test].Clone(); float[] v = (float[])src.Clone(); @@ -507,19 +546,21 @@ public void SdcaL1UpdateUTest(int test) for (int i = 0; i < expected.Length; i++) { - float value = src[i] * (1 + DefaultScale); - expected[i] = Math.Abs(value) > DefaultScale ? (value > 0 ? value - DefaultScale : value + DefaultScale) : 0; + float value = src[i] * (1 + defaultScale); + expected[i] = Math.Abs(value) > defaultScale ? (value > 0 ? value - defaultScale : value + defaultScale) : 0; } - CpuMathUtils.SdcaL1UpdateDense(DefaultScale, src.Length, src, DefaultScale, v, w); + CpuMathUtils.SdcaL1UpdateDense(defaultScale, src.Length, src, defaultScale, v, w); var actual = w; Assert.Equal(expected, actual, _comparer); } [Theory] - [InlineData(0)] - [InlineData(1)] - public void SdcaL1UpdateSUTest(int test) + [InlineData(0, 1.7f)] + [InlineData(1, 1.7f)] + [InlineData(0, -1.7f)] + [InlineData(1, -1.7f)] + public void SdcaL1UpdateSUTest(int test, float defaultScale) { float[] src = (float[])_testArrays[test].Clone(); float[] v = (float[])src.Clone(); @@ -530,11 +571,11 @@ public void SdcaL1UpdateSUTest(int test) for (int i = 0; i < idx.Length; i++) { int index = idx[i]; - float value = v[index] + src[i] * DefaultScale; - expected[index] = Math.Abs(value) > DefaultScale ? (value > 0 ? value - DefaultScale : value + DefaultScale) : 0; + float value = v[index] + src[i] * defaultScale; + expected[index] = Math.Abs(value) > defaultScale ? (value > 0 ? value - defaultScale : value + defaultScale) : 0; } - CpuMathUtils.SdcaL1UpdateSparse(DefaultScale, idx.Length, src, idx, DefaultScale, v, w); + CpuMathUtils.SdcaL1UpdateSparse(defaultScale, idx.Length, src, idx, defaultScale, v, w); var actual = w; Assert.Equal(expected, actual, _comparer); } From 287bc3e9b84f640e9b75e7b288c2663a536a9863 Mon Sep 17 00:00:00 2001 From: Anirudh Agnihotry Date: Mon, 10 Dec 2018 15:22:55 -0800 Subject: [PATCH 037/100] Reverting dead unallignedCode paths (#1845) * reverting dead unallignedCode paths * timeSeries alignment corrected for avx, multiplyadd added, performance tests done for odd input, enabled timeseries tests * name corrected --- src/Microsoft.ML.CpuMath/AvxIntrinsics.cs | 684 ++++-------------- .../CpuMathUtils.netstandard.cs | 411 ++++++++++- src/Microsoft.ML.CpuMath/Sse.cs | 427 ----------- src/Microsoft.ML.CpuMath/SseIntrinsics.cs | 649 ++++------------- ...AdaptiveSingularSpectrumSequenceModeler.cs | 34 +- src/Native/CpuMathNative/Sse.cpp | 632 +++------------- .../AvxPerformanceTests.cs | 10 +- .../CpuMathNativeUtils.cs | 91 --- .../NativePerformanceTests.cs | 125 ++-- .../PerformanceTests.cs | 17 +- .../SsePerformanceTests.cs | 6 +- .../Transformers/RffTests.cs | 2 +- .../TimeSeries.cs | 2 +- .../TimeSeriesEstimatorTests.cs | 4 +- 14 files changed, 869 insertions(+), 2225 deletions(-) delete mode 100644 src/Microsoft.ML.CpuMath/Sse.cs delete mode 100644 test/Microsoft.ML.CpuMath.PerformanceTests/CpuMathNativeUtils.cs diff --git a/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs b/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs index 5e7845007c..1b19b46949 100644 --- a/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs +++ b/src/Microsoft.ML.CpuMath/AvxIntrinsics.cs @@ -47,6 +47,25 @@ internal static class AvxIntrinsics private static readonly Vector256 _absMask256 = Avx.StaticCast(Avx.SetAllVector256(0x7FFFFFFF)); + private const int Vector256Alignment = 32; + + [MethodImplAttribute(MethodImplOptions.AggressiveInlining)] + private static bool HasCompatibleAlignment(AlignedArray alignedArray) + { + Contracts.AssertValue(alignedArray); + Contracts.Assert(alignedArray.Size > 0); + return (alignedArray.CbAlign % Vector256Alignment) == 0; + } + + [MethodImplAttribute(MethodImplOptions.AggressiveInlining)] + private static unsafe float* GetAlignedBase(AlignedArray alignedArray, float* unalignedBase) + { + Contracts.AssertValue(alignedArray); + float* alignedBase = unalignedBase + alignedArray.GetBase((long)unalignedBase); + Contracts.Assert(((long)alignedBase % Vector256Alignment) == 0); + return alignedBase; + } + [MethodImplAttribute(MethodImplOptions.AggressiveInlining)] private static Vector128 GetHigh(in Vector256 x) => Avx.ExtractVector128(x, 1); @@ -154,17 +173,18 @@ private static Vector256 MultiplyAdd(Vector256 src1, Vector256 mat, ReadOnlySpan src, Span dst, int crow, int ccol) - { - fixed (float* psrc = &MemoryMarshal.GetReference(src)) - fixed (float* pdst = &MemoryMarshal.GetReference(dst)) - fixed (float* pmat = &MemoryMarshal.GetReference(mat)) - fixed (uint* pLeadingAlignmentMask = &LeadingAlignmentMask[0]) - fixed (uint* pTrailingAlignmentMask = &TrailingAlignmentMask[0]) + fixed (float* pSrcStart = &src.Items[0]) + fixed (float* pDstStart = &dst.Items[0]) + fixed (float* pMatStart = &mat.Items[0]) { + float* psrc = GetAlignedBase(src, pSrcStart); + float* pdst = GetAlignedBase(dst, pDstStart); + float* pmat = GetAlignedBase(mat, pMatStart); + float* pSrcEnd = psrc + ccol; float* pDstEnd = pdst + crow; float* pDstCurrent = pdst; @@ -173,118 +193,36 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr while (pDstCurrent < pDstEnd) { Vector256 res0 = Avx.SetZeroVector256(); - Vector256 res1 = Avx.SetZeroVector256(); - Vector256 res2 = Avx.SetZeroVector256(); - Vector256 res3 = Avx.SetZeroVector256(); + Vector256 res1 = res0; + Vector256 res2 = res0; + Vector256 res3 = res0; - int length = ccol; float* pSrcCurrent = psrc; - nuint address = (nuint)(pMatCurrent); - int misalignment = (int)(address % 32); - int remainder = 0; - - if ((misalignment & 3) != 0) - { - // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - while (pSrcCurrent < pSrcEnd) - { - Vector256 vector = Avx.LoadVector256(pSrcCurrent); - - float* pMatTemp = pMatCurrent; - res0 = MultiplyAdd(pMatTemp, vector, res0); - res1 = MultiplyAdd(pMatTemp += ccol, vector, res1); - res2 = MultiplyAdd(pMatTemp += ccol, vector, res2); - res3 = MultiplyAdd(pMatTemp += ccol, vector, res3); - - pSrcCurrent += 8; - pMatCurrent += 8; - } - } - else + while (pSrcCurrent < pSrcEnd) { - if (misalignment != 0) - { - // Handle cases where the data is not 256-bit aligned by doing an unaligned read and then - // masking any elements that will be included in the first aligned read - misalignment >>= 2; - misalignment = 8 - misalignment; - - Vector256 mask = Avx.LoadVector256(((float*)(pLeadingAlignmentMask)) + (misalignment * 8)); - - // We only align pMat since it has significantly more reads. - float* pMatTemp = pMatCurrent; - Vector256 x01 = Avx.And(mask, Avx.LoadVector256(pMatTemp)); - Vector256 x11 = Avx.And(mask, Avx.LoadVector256(pMatTemp += ccol)); - Vector256 x21 = Avx.And(mask, Avx.LoadVector256(pMatTemp += ccol)); - Vector256 x31 = Avx.And(mask, Avx.LoadVector256(pMatTemp += ccol)); - Vector256 vector = Avx.And(mask, Avx.LoadVector256(pSrcCurrent)); - - res0 = Avx.Multiply(x01, vector); - res1 = Avx.Multiply(x11, vector); - res2 = Avx.Multiply(x21, vector); - res3 = Avx.Multiply(x31, vector); - - pMatCurrent += misalignment; - pSrcCurrent += misalignment; - length -= misalignment; - } - - if (length > 7) - { - // Handle all the 256-bit blocks that we can now that we have offset to an aligned address - remainder = length % 8; - - while (pSrcCurrent + 8 <= pSrcEnd) - { - // If we aren't using the VEX-encoding, the JIT will only fold away aligned loads - // (due to semantics of the legacy encoding). - // We don't need an assert, since the instruction will throw for unaligned inputs. - Vector256 vector = Avx.LoadVector256(pSrcCurrent); - - float* pMatTemp = pMatCurrent; - res0 = MultiplyAdd(pMatTemp, vector, res0); - res1 = MultiplyAdd(pMatTemp += ccol, vector, res1); - res2 = MultiplyAdd(pMatTemp += ccol, vector, res2); - res3 = MultiplyAdd(pMatTemp += ccol, vector, res3); - - pSrcCurrent += 8; - pMatCurrent += 8; - } - } - else - { - // Handle the "worst-case" scenario, which is when we have 8-16 elements and the input is not - // 256-bit aligned. This means we can't do any aligned loads and will just end up doing two - // unaligned loads where we mask the input each time. - remainder = length; - } - - if (remainder != 0) - { - // Handle any trailing elements that don't fit into a 256-bit block by moving back so that the next - // unaligned load will read to the end of the array and then mask out any elements already processed - - pMatCurrent -= (8 - remainder); - pSrcCurrent -= (8 - remainder); - - Vector256 mask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + (remainder * 8)); - - float* pMatTemp = pMatCurrent; - Vector256 x01 = Avx.And(mask, Avx.LoadVector256(pMatTemp)); - Vector256 x11 = Avx.And(mask, Avx.LoadVector256(pMatTemp += ccol)); - Vector256 x21 = Avx.And(mask, Avx.LoadVector256(pMatTemp += ccol)); - Vector256 x31 = Avx.And(mask, Avx.LoadVector256(pMatTemp += ccol)); - Vector256 vector = Avx.And(mask, Avx.LoadVector256(pSrcCurrent)); - - res0 = MultiplyAdd(x01, vector, res0); - res1 = MultiplyAdd(x11, vector, res1); - res2 = MultiplyAdd(x21, vector, res2); - res3 = MultiplyAdd(x31, vector, res3); - - pMatCurrent += 8; - pSrcCurrent += 8; - } + float* pMatTemp = pMatCurrent; + Contracts.Assert(((nuint)(pMatTemp) % 32) == 0); + Contracts.Assert(((nuint)(pSrcCurrent) % 32) == 0); + + // The JIT will only fold away unaligned loads due to the semantics behind + // the VEX-encoding of the memory operand for `ins xmm, xmm, [mem]`. Since + // modern hardware has unaligned loads that are as fast as aligned loads, + // when it doesn't cross a cache-line/page boundary, we will just assert + // that the alignment is correct and allow for the more-efficient codegen. + Vector256 x01 = Avx.LoadVector256(pMatTemp); + Vector256 x11 = Avx.LoadVector256(pMatTemp += ccol); + Vector256 x21 = Avx.LoadVector256(pMatTemp += ccol); + Vector256 x31 = Avx.LoadVector256(pMatTemp += ccol); + Vector256 x02 = Avx.LoadVector256(pSrcCurrent); + + res0 = MultiplyAdd(x01, x02, res0); + res1 = MultiplyAdd(x11, x02, res1); + res2 = MultiplyAdd(x21, x02, res2); + res3 = MultiplyAdd(x31, x02, res3); + + pSrcCurrent += 8; + pMatCurrent += 8; } // Add up the entries of each, with the 4 results in res0 @@ -293,7 +231,7 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr res0 = Avx.HorizontalAdd(res0, res2); Vector128 sum = Sse.Add(Avx.GetLowerHalf(res0), GetHigh(in res0)); - Sse.Store(pDstCurrent, sum); + Sse.StoreAligned(pDstCurrent, sum); pDstCurrent += 4; pMatCurrent += 3 * ccol; @@ -304,22 +242,22 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr // Partial sparse source vector. public static unsafe void MatMulP(AlignedArray mat, ReadOnlySpan rgposSrc, AlignedArray src, int posMin, int iposMin, int iposEnd, AlignedArray dst, int crow, int ccol) - { - MatMulP(mat.Items, rgposSrc, src.Items, posMin, iposMin, iposEnd, dst.Items, crow, ccol); - } - - public static unsafe void MatMulP(ReadOnlySpan mat, ReadOnlySpan rgposSrc, ReadOnlySpan src, - int posMin, int iposMin, int iposEnd, Span dst, int crow, int ccol) { // REVIEW: For extremely sparse inputs, interchanging the loops would // likely be more efficient. - fixed (float* psrc = &MemoryMarshal.GetReference(src)) - fixed (float* pdst = &MemoryMarshal.GetReference(dst)) - fixed (float* pmat = &MemoryMarshal.GetReference(mat)) - fixed (int* pposSrc = &MemoryMarshal.GetReference(rgposSrc)) - fixed (uint* pLeadingAlignmentMask = &LeadingAlignmentMask[0]) - fixed (uint* pTrailingAlignmentMask = &TrailingAlignmentMask[0]) + Contracts.Assert(HasCompatibleAlignment(mat)); + Contracts.Assert(HasCompatibleAlignment(src)); + Contracts.Assert(HasCompatibleAlignment(dst)); + + fixed (float* pSrcStart = &src.Items[0]) + fixed (float* pDstStart = &dst.Items[0]) + fixed (float* pMatStart = &mat.Items[0]) + fixed (int* pposSrc = &rgposSrc[0]) { + float* psrc = GetAlignedBase(src, pSrcStart); + float* pdst = GetAlignedBase(dst, pDstStart); + float* pmat = GetAlignedBase(mat, pMatStart); + int* pposMin = pposSrc + iposMin; int* pposEnd = pposSrc + iposEnd; float* pDstEnd = pdst + crow; @@ -327,116 +265,7 @@ public static unsafe void MatMulP(ReadOnlySpan mat, ReadOnlySpan rgp float* pSrcCurrent = psrc - posMin; float* pDstCurrent = pdst; - nuint address = (nuint)(pDstCurrent); - int misalignment = (int)(address % 32); - int length = crow; - int remainder = 0; - - if ((misalignment & 3) != 0) - { - // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - while (pDstCurrent < pDstEnd) - { - Avx.Store(pDstCurrent, SparseMultiplicationAcrossRow()); - pDstCurrent += 8; - pm0 += 8 * ccol; - } - } - else - { - if (misalignment != 0) - { - // Handle cases where the data is not 256-bit aligned by doing an unaligned read and then - // masking any elements that will be included in the first aligned read - misalignment >>= 2; - misalignment = 8 - misalignment; - - Vector256 mask = Avx.LoadVector256(((float*)(pLeadingAlignmentMask)) + (misalignment * 8)); - - float* pm1 = pm0 + ccol; - float* pm2 = pm1 + ccol; - float* pm3 = pm2 + ccol; - Vector256 result = Avx.SetZeroVector256(); - - int* ppos = pposMin; - - while (ppos < pposEnd) - { - int col1 = *ppos; - int col2 = col1 + 4 * ccol; - Vector256 x1 = Avx.SetVector256(pm3[col2], pm2[col2], pm1[col2], pm0[col2], - pm3[col1], pm2[col1], pm1[col1], pm0[col1]); - - x1 = Avx.And(mask, x1); - Vector256 x2 = Avx.SetAllVector256(pSrcCurrent[col1]); - result = MultiplyAdd(x2, x1, result); - ppos++; - } - - Avx.Store(pDstCurrent, result); - pDstCurrent += misalignment; - pm0 += misalignment * ccol; - length -= misalignment; - } - - if (length > 7) - { - // Handle all the 256-bit blocks that we can now that we have offset to an aligned address - remainder = length % 8; - while (pDstCurrent < pDstEnd) - { - Avx.Store(pDstCurrent, SparseMultiplicationAcrossRow()); - pDstCurrent += 8; - pm0 += 8 * ccol; - } - } - else - { - // Handle the "worst-case" scenario, which is when we have 8-16 elements and the input is not - // 256-bit aligned. This means we can't do any aligned loads and will just end up doing two - // unaligned loads where we mask the input each time. - remainder = length; - } - - if (remainder != 0) - { - // Handle any trailing elements that don't fit into a 256-bit block by moving back so that the next - // unaligned load will read to the end of the array and then mask out any elements already processed - - pDstCurrent -= (8 - remainder); - pm0 -= (8 - remainder) * ccol; - Vector256 trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + (remainder * 8)); - Vector256 leadingMask = Avx.LoadVector256(((float*)(pLeadingAlignmentMask)) + ((8 - remainder) * 8)); - - float* pm1 = pm0 + ccol; - float* pm2 = pm1 + ccol; - float* pm3 = pm2 + ccol; - Vector256 result = Avx.SetZeroVector256(); - - int* ppos = pposMin; - - while (ppos < pposEnd) - { - int col1 = *ppos; - int col2 = col1 + 4 * ccol; - Vector256 x1 = Avx.SetVector256(pm3[col2], pm2[col2], pm1[col2], pm0[col2], - pm3[col1], pm2[col1], pm1[col1], pm0[col1]); - x1 = Avx.And(x1, trailingMask); - - Vector256 x2 = Avx.SetAllVector256(pSrcCurrent[col1]); - result = MultiplyAdd(x2, x1, result); - ppos++; - } - - result = Avx.Add(result, Avx.And(leadingMask, Avx.LoadVector256(pDstCurrent))); - - Avx.Store(pDstCurrent, result); - pDstCurrent += 8; - pm0 += 8 * ccol; - } - } - - Vector256 SparseMultiplicationAcrossRow() + while (pDstCurrent < pDstEnd) { float* pm1 = pm0 + ccol; float* pm2 = pm1 + ccol; @@ -450,326 +279,133 @@ Vector256 SparseMultiplicationAcrossRow() int col1 = *ppos; int col2 = col1 + 4 * ccol; Vector256 x1 = Avx.SetVector256(pm3[col2], pm2[col2], pm1[col2], pm0[col2], - pm3[col1], pm2[col1], pm1[col1], pm0[col1]); + pm3[col1], pm2[col1], pm1[col1], pm0[col1]); Vector256 x2 = Avx.SetAllVector256(pSrcCurrent[col1]); - result = MultiplyAdd(x2, x1, result); + x2 = Avx.Multiply(x2, x1); + result = Avx.Add(result, x2); + ppos++; } - return result; + Avx.StoreAligned(pDstCurrent, result); + pDstCurrent += 8; + pm0 += 8 * ccol; } } } public static unsafe void MatMulTran(AlignedArray mat, AlignedArray src, AlignedArray dst, int crow, int ccol) { - MatMulTran(mat.Items, src.Items, dst.Items, crow, ccol); - } + Contracts.Assert(HasCompatibleAlignment(mat)); + Contracts.Assert(HasCompatibleAlignment(src)); + Contracts.Assert(HasCompatibleAlignment(dst)); - public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan src, Span dst, int crow, int ccol) - { - fixed (float* psrc = &MemoryMarshal.GetReference(src)) - fixed (float* pdst = &MemoryMarshal.GetReference(dst)) - fixed (float* pmat = &MemoryMarshal.GetReference(mat)) - fixed (uint* pLeadingAlignmentMask = &LeadingAlignmentMask[0]) - fixed (uint* pTrailingAlignmentMask = &TrailingAlignmentMask[0]) + fixed (float* pSrcStart = &src.Items[0]) + fixed (float* pDstStart = &dst.Items[0]) + fixed (float* pMatStart = &mat.Items[0]) { + float* psrc = GetAlignedBase(src, pSrcStart); + float* pdst = GetAlignedBase(dst, pDstStart); + float* pmat = GetAlignedBase(mat, pMatStart); + float* pSrcEnd = psrc + ccol; float* pDstEnd = pdst + crow; float* pSrcCurrent = psrc; float* pMatCurrent = pmat; - // The reason behind adding the if condtion instead of boolean flag - // is to avoid branching in codegen. - if (pSrcCurrent < pSrcEnd) - { - Vector128 h01 = Sse.LoadVector128(pSrcCurrent); - // Replicate each slot of h01 (ABCD) into its own register. - Vector128 h11 = Avx.Permute(h01, 0x55); // B - Vector128 h21 = Avx.Permute(h01, 0xAA); // C - Vector128 h31 = Avx.Permute(h01, 0xFF); // D - h01 = Avx.Permute(h01, 0x00); // A + // We do 4-way unrolling + Vector128 h01 = Sse.LoadAlignedVector128(pSrcCurrent); + // Replicate each slot of h01 (ABCD) into its own register. + Vector128 h11 = Sse.Shuffle(h01, h01, 0x55); // B + Vector128 h21 = Sse.Shuffle(h01, h01, 0xAA); // C + Vector128 h31 = Sse.Shuffle(h01, h01, 0xFF); // D + h01 = Sse.Shuffle(h01, h01, 0x00); // A - Vector256 x01 = Avx.SetHighLow(h01, h01); - Vector256 x11 = Avx.SetHighLow(h11, h11); - Vector256 x21 = Avx.SetHighLow(h21, h21); - Vector256 x31 = Avx.SetHighLow(h31, h31); + Vector256 x01 = Avx.SetHighLow(h01, h01); + Vector256 x11 = Avx.SetHighLow(h11, h11); + Vector256 x21 = Avx.SetHighLow(h21, h21); + Vector256 x31 = Avx.SetHighLow(h31, h31); - int length = crow; - float* pDstCurrent = pdst; + pSrcCurrent += 4; - nuint address = (nuint)(pMatCurrent); - int misalignment = (int)(address % 32); + float* pDstCurrent = pdst; - if ((misalignment & 3) != 0) - { - // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - while (pDstCurrent < pDstEnd) - { - float* pMatTemp = pMatCurrent; - Vector256 x02 = Avx.Multiply(x01, Avx.LoadVector256(pMatTemp)); - Vector256 x12 = Avx.Multiply(x11, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x22 = Avx.Multiply(x21, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x32 = Avx.Multiply(x31, Avx.LoadVector256(pMatTemp += crow)); - - x02 = Avx.Add(x02, x12); - x22 = Avx.Add(x22, x32); - x02 = Avx.Add(x02, x22); - - Avx.Store(pDstCurrent, x02); - pDstCurrent += 8; - pMatCurrent += 8; - } - } - else - { - int remainder = 0; - if (misalignment != 0) - { - // Handle cases where the data is not 256-bit aligned by doing an unaligned read and then - // masking any elements that will be included in the first aligned read - misalignment >>= 2; - misalignment = 8 - misalignment; - - Vector256 leadingMask = Avx.LoadVector256(((float*)(pLeadingAlignmentMask)) + (misalignment * 8)); - - // We only align pMat since it has significantly more reads. - float* pMatTemp = pMatCurrent; - Vector256 x02 = Avx.And(leadingMask, Avx.LoadVector256(pMatTemp)); - Vector256 x12 = Avx.And(leadingMask, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x22 = Avx.And(leadingMask, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x32 = Avx.And(leadingMask, Avx.LoadVector256(pMatTemp += crow)); - - x02 = Avx.Multiply(x01, x02); - x12 = Avx.Multiply(x11, x12); - x22 = Avx.Multiply(x21, x22); - x32 = Avx.Multiply(x31, x32); - - x02 = Avx.Add(x02, x12); - x22 = Avx.Add(x22, x32); - x02 = Avx.Add(x02, x22); - - Vector256 trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + ((8 - misalignment) * 8)); - Vector256 x3 = Avx.LoadVector256(pDstCurrent); - x02 = Avx.Or(x02, Avx.And(x3, trailingMask)); - - Avx.Store(pDstCurrent, x02); - pMatCurrent += misalignment; - pDstCurrent += misalignment; - length -= misalignment; - } - if (length > 7) - { - // Handle all the 256-bit blocks that we can now that we have offset to an aligned address - remainder = length % 8; - - while (pDstCurrent + 8 <= pDstEnd) - { - // If we aren't using the VEX-encoding, the JIT will only fold away aligned loads - // (due to semantics of the legacy encoding). - // We don't need an assert, since the instruction will throw for unaligned inputs. - float* pMatTemp = pMatCurrent; - - Vector256 x02 = Avx.Multiply(x01, Avx.LoadVector256(pMatTemp)); - Vector256 x12 = Avx.Multiply(x11, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x22 = Avx.Multiply(x21, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x32 = Avx.Multiply(x31, Avx.LoadVector256(pMatTemp += crow)); - - x02 = Avx.Add(x02, x12); - x22 = Avx.Add(x22, x32); - x02 = Avx.Add(x02, x22); - - Avx.Store(pDstCurrent, x02); - pDstCurrent += 8; - pMatCurrent += 8; - } - } - else - { - // Handle the "worst-case" scenario, which is when we have 8-16 elements and the input is not - // 256-bit aligned. This means we can't do any aligned loads and will just end up doing two - // unaligned loads where we mask the input each time. - remainder = length; - } + while (pDstCurrent < pDstEnd) + { + float* pMatTemp = pMatCurrent; + Contracts.Assert(((nuint)(pMatTemp) % 32) == 0); - if (remainder != 0) - { - // Handle any trailing elements that don't fit into a 256-bit block by moving back so that the next - // unaligned load will read to the end of the array and then mask out any elements already processed - - pMatCurrent -= (8 - remainder); - pDstCurrent -= (8 - remainder); - Vector256 trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + (remainder * 8)); - - float* pMatTemp = pMatCurrent; - Vector256 x02 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp)); - Vector256 x12 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x22 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x32 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); - - x02 = Avx.Multiply(x01, x02); - x12 = Avx.Multiply(x11, x12); - x22 = Avx.Multiply(x21, x22); - x32 = Avx.Multiply(x31, x32); - - x02 = Avx.Add(x02, x12); - x22 = Avx.Add(x22, x32); - x02 = Avx.Add(x02, x22); - - Vector256 leadingMask = Avx.LoadVector256(((float*)(pLeadingAlignmentMask)) + ((8 - remainder) * 8)); - Vector256 x3 = Avx.LoadVector256(pDstCurrent); - x02 = Avx.Or(x02, Avx.And(x3, leadingMask)); - - Avx.Store(pDstCurrent, x02); - pDstCurrent += 8; - pMatCurrent += 8; - } - } + // The JIT will only fold away unaligned loads due to the semantics behind + // the VEX-encoding of the memory operand for `ins xmm, xmm, [mem]`. Since + // modern hardware has unaligned loads that are as fast as aligned loads, + // when it doesn't cross a cache-line/page boundary, we will just assert + // that the alignment is correct and allow for the more-efficient codegen. + Vector256 x02 = Avx.LoadVector256(pMatTemp); + Vector256 x12 = Avx.LoadVector256(pMatTemp += crow); + Vector256 x22 = Avx.LoadVector256(pMatTemp += crow); + Vector256 x32 = Avx.LoadVector256(pMatTemp += crow); - pMatCurrent += 3 * crow; - pSrcCurrent += 4; + x02 = Avx.Multiply(x01, x02); + x02 = MultiplyAdd(x11, x12, x02); + + x22 = Avx.Multiply(x21, x22); + x22 = MultiplyAdd(x31, x32, x22); + + x02 = Avx.Add(x02, x22); + Avx.StoreAligned(pDstCurrent, x02); + + pDstCurrent += 8; + pMatCurrent += 8; } - // We do 4-way unrolling + pMatCurrent += 3 * crow; + while (pSrcCurrent < pSrcEnd) { - Vector128 h01 = Sse.LoadVector128(pSrcCurrent); + h01 = Sse.LoadAlignedVector128(pSrcCurrent); // Replicate each slot of h01 (ABCD) into its own register. - Vector128 h11 = Avx.Permute(h01, 0x55); // B - Vector128 h21 = Avx.Permute(h01, 0xAA); // C - Vector128 h31 = Avx.Permute(h01, 0xFF); // D - h01 = Avx.Permute(h01, 0x00); // A - - Vector256 x01 = Avx.SetHighLow(h01, h01); - Vector256 x11 = Avx.SetHighLow(h11, h11); - Vector256 x21 = Avx.SetHighLow(h21, h21); - Vector256 x31 = Avx.SetHighLow(h31, h31); + h11 = Sse.Shuffle(h01, h01, 0x55); // B + h21 = Sse.Shuffle(h01, h01, 0xAA); // C + h31 = Sse.Shuffle(h01, h01, 0xFF); // D + h01 = Sse.Shuffle(h01, h01, 0x00); // A - int length = crow; - float* pDstCurrent = pdst; + x01 = Avx.SetHighLow(h01, h01); + x11 = Avx.SetHighLow(h11, h11); + x21 = Avx.SetHighLow(h21, h21); + x31 = Avx.SetHighLow(h31, h31); - nuint address = (nuint)(pMatCurrent); - int misalignment = (int)(address % 32); + pDstCurrent = pdst; - if ((misalignment & 3) != 0) + while (pDstCurrent < pDstEnd) { - while (pDstCurrent < pDstEnd) - { - float* pMatTemp = pMatCurrent; - Vector256 x02 = Avx.Multiply(x01, Avx.LoadVector256(pMatTemp)); - Vector256 x12 = Avx.Multiply(x11, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x22 = Avx.Multiply(x21, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x32 = Avx.Multiply(x31, Avx.LoadVector256(pMatTemp += crow)); + float* pMatTemp = pMatCurrent; - x02 = Avx.Add(x02, x12); - x22 = Avx.Add(x22, x32); - x02 = Avx.Add(x02, x22); + Contracts.Assert(((nuint)(pMatTemp) % 32) == 0); + Contracts.Assert(((nuint)(pDstCurrent) % 32) == 0); - x02 = Avx.Add(x02, Avx.LoadVector256(pDstCurrent)); + // The JIT will only fold away unaligned loads due to the semantics behind + // the VEX-encoding of the memory operand for `ins xmm, xmm, [mem]`. Since + // modern hardware has unaligned loads that are as fast as aligned loads, + // when it doesn't cross a cache-line/page boundary, we will just assert + // that the alignment is correct and allow for the more-efficient codegen. + Vector256 x02 = Avx.LoadVector256(pMatTemp); + Vector256 x12 = Avx.LoadVector256(pMatTemp += crow); + Vector256 x22 = Avx.LoadVector256(pMatTemp += crow); + Vector256 x32 = Avx.LoadVector256(pMatTemp += crow); + Vector256 x3 = Avx.LoadVector256(pDstCurrent); - Avx.Store(pDstCurrent, x02); - pDstCurrent += 8; - pMatCurrent += 8; - } - } - else - { - int remainder = 0; - if (misalignment != 0) - { - // Handle cases where the data is not 256-bit aligned by doing an unaligned read and then - // masking any elements that will be included in the first aligned read - misalignment >>= 2; - misalignment = 8 - misalignment; - - Vector256 leadingMask = Avx.LoadVector256(((float*)(pLeadingAlignmentMask)) + (misalignment * 8)); - - // We only align pMat since it has significantly more reads. - float* pMatTemp = pMatCurrent; - Vector256 x02 = Avx.And(leadingMask, Avx.LoadVector256(pMatTemp)); - Vector256 x12 = Avx.And(leadingMask, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x22 = Avx.And(leadingMask, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x32 = Avx.And(leadingMask, Avx.LoadVector256(pMatTemp += crow)); - - x02 = Avx.Multiply(x01, x02); - x12 = Avx.Multiply(x11, x12); - x22 = Avx.Multiply(x21, x22); - x32 = Avx.Multiply(x31, x32); - - x02 = Avx.Add(x02, x12); - x22 = Avx.Add(x22, x32); - x02 = Avx.Add(x02, x22); - - Vector256 trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + ((8 - misalignment) * 8)); - Vector256 x3 = Avx.LoadVector256(pDstCurrent); - x02 = Avx.Or(x02, Avx.And(x3, trailingMask)); - - x02 = Avx.Add(x02, Avx.And(x3, leadingMask)); - - Avx.Store(pDstCurrent, x02); - pMatCurrent += misalignment; - pDstCurrent += misalignment; - length -= misalignment; - } - if (length > 7) - { - remainder = length % 8; - while (pDstCurrent + 8 <= pDstEnd) - { - float* pMatTemp = pMatCurrent; - - Vector256 x02 = Avx.Multiply(x01, Avx.LoadVector256(pMatTemp)); - Vector256 x12 = Avx.Multiply(x11, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x22 = Avx.Multiply(x21, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x32 = Avx.Multiply(x31, Avx.LoadVector256(pMatTemp += crow)); - - x02 = Avx.Add(x02, x12); - x22 = Avx.Add(x22, x32); - x02 = Avx.Add(x02, x22); - - x02 = Avx.Add(x02, Avx.LoadVector256(pDstCurrent)); - - Avx.Store(pDstCurrent, x02); - pDstCurrent += 8; - pMatCurrent += 8; - } - } - else - { - remainder = length; - } + x02 = Avx.Multiply(x01, x02); + x02 = MultiplyAdd(x11, x12, x02); - if (remainder != 0) - { - pMatCurrent -= (8 - remainder); - pDstCurrent -= (8 - remainder); - Vector256 trailingMask = Avx.LoadVector256(((float*)(pTrailingAlignmentMask)) + (remainder * 8)); - - float* pMatTemp = pMatCurrent; - Vector256 x02 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp)); - Vector256 x12 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x22 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); - Vector256 x32 = Avx.And(trailingMask, Avx.LoadVector256(pMatTemp += crow)); - - x02 = Avx.Multiply(x01, x02); - x12 = Avx.Multiply(x11, x12); - x22 = Avx.Multiply(x21, x22); - x32 = Avx.Multiply(x31, x32); - - x02 = Avx.Add(x02, x12); - x22 = Avx.Add(x22, x32); - x02 = Avx.Add(x02, x22); - - Vector256 leadingMask = Avx.LoadVector256(((float*)(pLeadingAlignmentMask)) + ((8 - remainder) * 8)); - Vector256 x3 = Avx.LoadVector256(pDstCurrent); - x02 = Avx.Or(x02, Avx.And(x3, leadingMask)); - - x02 = Avx.Add(x02, Avx.And(x3, trailingMask)); - - Avx.Store(pDstCurrent, x02); - pDstCurrent += 8; - pMatCurrent += 8; - } + x22 = Avx.Multiply(x21, x22); + x22 = MultiplyAdd(x31, x32, x22); + + x02 = Avx.Add(x02, x22); + x3 = Avx.Add(x02, x3); + Avx.StoreAligned(pDstCurrent, x3); + + pDstCurrent += 8; + pMatCurrent += 8; } pMatCurrent += 3 * crow; diff --git a/src/Microsoft.ML.CpuMath/CpuMathUtils.netstandard.cs b/src/Microsoft.ML.CpuMath/CpuMathUtils.netstandard.cs index 5ecbc62be1..a046bbba98 100644 --- a/src/Microsoft.ML.CpuMath/CpuMathUtils.netstandard.cs +++ b/src/Microsoft.ML.CpuMath/CpuMathUtils.netstandard.cs @@ -5,6 +5,7 @@ using Microsoft.ML.Runtime.Internal.CpuMath.Core; using System; using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; namespace Microsoft.ML.Runtime.Internal.CpuMath { @@ -18,57 +19,411 @@ internal static partial class CpuMathUtils public static int GetVectorAlignment() => Vector128Alignment; - public static void MatrixTimesSource(bool transpose, AlignedArray matrix, AlignedArray source, AlignedArray destination, int stride) => SseUtils.MatTimesSrc(transpose, matrix, source, destination, stride); + private static bool Compat(AlignedArray a) + { + Contracts.AssertValue(a); + Contracts.Assert(a.Size > 0); + return a.CbAlign == Vector128Alignment; + } - public static void MatrixTimesSource(AlignedArray matrix, ReadOnlySpan rgposSrc, AlignedArray sourceValues, - int posMin, int iposMin, int iposLimit, AlignedArray destination, int stride) => SseUtils.MatTimesSrc(matrix, rgposSrc, sourceValues, posMin, iposMin, iposLimit, destination, stride); + private static unsafe float* Ptr(AlignedArray a, float* p) + { + Contracts.AssertValue(a); + float* q = p + a.GetBase((long)p); + Contracts.Assert(((long)q & (Vector128Alignment - 1)) == 0); + return q; + } - public static void Add(float value, Span destination) => SseUtils.Add(value, destination); + public static void MatrixTimesSource(bool tran, AlignedArray mat, AlignedArray src, AlignedArray dst, int crun) + { + Contracts.Assert(Compat(mat)); + Contracts.Assert(Compat(src)); + Contracts.Assert(Compat(dst)); + Contracts.Assert(mat.Size == dst.Size * src.Size); - public static void Scale(float value, Span destination) => SseUtils.Scale(value, destination); + unsafe + { + fixed (float* pmat = &mat.Items[0]) + fixed (float* psrc = &src.Items[0]) + fixed (float* pdst = &dst.Items[0]) + { + if (!tran) + { + Contracts.Assert(0 <= crun && crun <= dst.Size); + Thunk.MatMul(Ptr(mat, pmat), Ptr(src, psrc), Ptr(dst, pdst), crun, src.Size); + } + else + { + Contracts.Assert(0 <= crun && crun <= src.Size); + Thunk.MatMulTran(Ptr(mat, pmat), Ptr(src, psrc), Ptr(dst, pdst), dst.Size, crun); + } + } + } + } - public static void Scale(float value, ReadOnlySpan source, Span destination, int count) => SseUtils.Scale(value, source, destination, count); + public static void MatrixTimesSource(AlignedArray mat, ReadOnlySpan rgposSrc, AlignedArray srcValues, + int posMin, int iposMin, int iposLim, AlignedArray dst, int crun) + { + Contracts.Assert(Compat(mat)); + Contracts.Assert(Compat(srcValues)); + Contracts.Assert(Compat(dst)); + Contracts.Assert(0 <= iposMin && iposMin <= iposLim && iposLim <= rgposSrc.Length); + Contracts.Assert(mat.Size == dst.Size * srcValues.Size); - public static void ScaleAdd(float value, float addend, Span destination) => SseUtils.ScaleAdd(value, addend, destination); + if (iposMin >= iposLim) + { + dst.ZeroItems(); + return; + } + Contracts.AssertNonEmpty(rgposSrc); + unsafe + { + fixed (float* pdst = &dst.Items[0]) + fixed (float* pmat = &mat.Items[0]) + fixed (float* psrc = &srcValues.Items[0]) + fixed (int* ppossrc = &rgposSrc[0]) + { + Contracts.Assert(0 <= crun && crun <= dst.Size); + Thunk.MatMulP(Ptr(mat, pmat), ppossrc, Ptr(srcValues, psrc), posMin, iposMin, iposLim, Ptr(dst, pdst), crun, srcValues.Size); + } + } + } - public static void AddScale(float value, ReadOnlySpan source, Span destination, int count) => SseUtils.AddScale(value, source, destination, count); + // dst += a + public static void Add(float a, Span dst) + { + Contracts.AssertNonEmpty(dst); - public static void AddScale(float value, ReadOnlySpan source, ReadOnlySpan indices, Span destination, int count) => SseUtils.AddScale(value, source, indices, destination, count); + unsafe + { + fixed (float* pdst = &MemoryMarshal.GetReference(dst)) + Thunk.AddScalarU(a, pdst, dst.Length); + } + } - public static void AddScaleCopy(float value, ReadOnlySpan source, ReadOnlySpan destination, Span res, int count) => SseUtils.AddScaleCopy(value, source, destination, res, count); + public static void Scale(float a, Span dst) + { + Contracts.AssertNonEmpty(dst); - public static void Add(ReadOnlySpan source, Span destination, int count) => SseUtils.Add(source, destination, count); + unsafe + { + fixed (float* pd = &MemoryMarshal.GetReference(dst)) + Thunk.Scale(a, pd, dst.Length); + } + } - public static void Add(ReadOnlySpan source, ReadOnlySpan indices, Span destination, int count) => SseUtils.Add(source, indices, destination, count); + // dst = a * src + public static void Scale(float a, ReadOnlySpan src, Span dst, int count) + { + Contracts.AssertNonEmpty(src); + Contracts.Assert(0 < count && count <= src.Length); + Contracts.AssertNonEmpty(dst); + Contracts.Assert(count <= dst.Length); - public static void MulElementWise(ReadOnlySpan left, ReadOnlySpan right, Span destination, int count) => SseUtils.MulElementWise(left, right, destination, count); + unsafe + { + fixed (float* psrc = &MemoryMarshal.GetReference(src)) + fixed (float* pdst = &MemoryMarshal.GetReference(dst)) + { + Thunk.ScaleSrcU(a, psrc, pdst, count); + } + } + } - public static float Sum(ReadOnlySpan source) => SseUtils.Sum(source); + // dst[i] = a * (dst[i] + b) + public static void ScaleAdd(float a, float b, Span dst) + { + Contracts.AssertNonEmpty(dst); - public static float SumSq(ReadOnlySpan source) => SseUtils.SumSq(source); + unsafe + { + fixed (float* pdst = &MemoryMarshal.GetReference(dst)) + Thunk.ScaleAddU(a, b, pdst, dst.Length); + } + } - public static float SumSq(float mean, ReadOnlySpan source) => SseUtils.SumSq(mean, source); + public static void AddScale(float a, ReadOnlySpan src, Span dst, int count) + { + Contracts.AssertNonEmpty(src); + Contracts.Assert(0 < count && count <= src.Length); + Contracts.AssertNonEmpty(dst); + Contracts.Assert(count <= dst.Length); - public static float SumAbs(ReadOnlySpan source) => SseUtils.SumAbs(source); + unsafe + { + fixed (float* psrc = &MemoryMarshal.GetReference(src)) + fixed (float* pdst = &MemoryMarshal.GetReference(dst)) + Thunk.AddScaleU(a, psrc, pdst, count); + } + } - public static float SumAbs(float mean, ReadOnlySpan source) => SseUtils.SumAbs(mean, source); + public static void AddScale(float a, ReadOnlySpan src, ReadOnlySpan indices, Span dst, int count) + { + Contracts.AssertNonEmpty(src); + Contracts.Assert(0 < count && count <= src.Length); + Contracts.AssertNonEmpty(indices); + Contracts.Assert(count <= indices.Length); + Contracts.AssertNonEmpty(dst); + Contracts.Assert(count < dst.Length); - public static float MaxAbs(ReadOnlySpan source) => SseUtils.MaxAbs(source); + unsafe + { + fixed (float* psrc = &MemoryMarshal.GetReference(src)) + fixed (int* pi = &MemoryMarshal.GetReference(indices)) + fixed (float* pdst = &MemoryMarshal.GetReference(dst)) + Thunk.AddScaleSU(a, psrc, pi, pdst, count); + } + } - public static float MaxAbsDiff(float mean, ReadOnlySpan source) => SseUtils.MaxAbsDiff(mean, source); + public static void AddScaleCopy(float a, ReadOnlySpan src, ReadOnlySpan dst, Span res, int count) + { + Contracts.AssertNonEmpty(dst); + Contracts.Assert(0 < count && count <= dst.Length); + Contracts.AssertNonEmpty(src); + Contracts.Assert(count <= src.Length); + Contracts.AssertNonEmpty(res); + Contracts.Assert(count <= res.Length); - public static float DotProductDense(ReadOnlySpan left, ReadOnlySpan right, int count) => SseUtils.DotProductDense(left, right, count); + unsafe + { + fixed (float* pdst = &MemoryMarshal.GetReference(dst)) + fixed (float* psrc = &MemoryMarshal.GetReference(src)) + fixed (float* pres = &MemoryMarshal.GetReference(res)) + Thunk.AddScaleCopyU(a, psrc, pdst, pres, count); + } + } - public static float DotProductSparse(ReadOnlySpan left, ReadOnlySpan right, ReadOnlySpan indices, int count) => SseUtils.DotProductSparse(left, right, indices, count); + public static void Add(ReadOnlySpan src, Span dst, int count) + { + Contracts.AssertNonEmpty(src); + Contracts.Assert(0 < count && count <= src.Length); + Contracts.AssertNonEmpty(dst); + Contracts.Assert(count <= dst.Length); - public static float L2DistSquared(ReadOnlySpan left, ReadOnlySpan right, int count) => SseUtils.L2DistSquared(left, right, count); + unsafe + { + fixed (float* ps = &MemoryMarshal.GetReference(src)) + fixed (float* pd = &MemoryMarshal.GetReference(dst)) + Thunk.AddU(ps, pd, count); + } + } - public static void ZeroMatrixItems(AlignedArray destination, int ccol, int cfltRow, int[] indices) => SseUtils.ZeroMatrixItems(destination, ccol, cfltRow, indices); + public static void Add(ReadOnlySpan src, ReadOnlySpan indices, Span dst, int count) + { + Contracts.AssertNonEmpty(src); + Contracts.Assert(0 < count && count <= src.Length); + Contracts.AssertNonEmpty(indices); + Contracts.Assert(count <= indices.Length); + Contracts.AssertNonEmpty(dst); + Contracts.Assert(count < dst.Length); - public static void SdcaL1UpdateDense(float primalUpdate, int count, ReadOnlySpan source, float threshold, Span v, Span w) - => SseUtils.SdcaL1UpdateDense(primalUpdate, count, source, threshold, v, w); + unsafe + { + fixed (float* ps = &MemoryMarshal.GetReference(src)) + fixed (int* pi = &MemoryMarshal.GetReference(indices)) + fixed (float* pd = &MemoryMarshal.GetReference(dst)) + Thunk.AddSU(ps, pi, pd, count); + } + } + + public static void MulElementWise(ReadOnlySpan src1, ReadOnlySpan src2, Span dst, int count) + { + Contracts.AssertNonEmpty(src1); + Contracts.Assert(0 < count && count <= src1.Length); + Contracts.AssertNonEmpty(src2); + Contracts.Assert(0 < count && count <= src2.Length); + Contracts.AssertNonEmpty(dst); + unsafe + { + fixed (float* ps1 = &MemoryMarshal.GetReference(src1)) + fixed (float* ps2 = &MemoryMarshal.GetReference(src2)) + fixed (float* pd = &MemoryMarshal.GetReference(dst)) + Thunk.MulElementWiseU(ps1, ps2, pd, count); + } + } + + public static float Sum(ReadOnlySpan src) + { + Contracts.AssertNonEmpty(src); + + unsafe + { + fixed (float* psrc = &MemoryMarshal.GetReference(src)) + return Thunk.Sum(psrc, src.Length); + } + } + + public static float SumSq(ReadOnlySpan src) + { + Contracts.AssertNonEmpty(src); + + unsafe + { + fixed (float* psrc = &MemoryMarshal.GetReference(src)) + return Thunk.SumSqU(psrc, src.Length); + } + } + + public static float SumSq(float mean, ReadOnlySpan src) + { + Contracts.AssertNonEmpty(src); + + unsafe + { + fixed (float* psrc = &MemoryMarshal.GetReference(src)) + return (mean == 0 ? Thunk.SumSqU(psrc, src.Length) : Thunk.SumSqDiffU(mean, psrc, src.Length)); + } + } + + public static float SumAbs(ReadOnlySpan src) + { + Contracts.AssertNonEmpty(src); + + unsafe + { + fixed (float* psrc = &MemoryMarshal.GetReference(src)) + return Thunk.SumAbsU(psrc, src.Length); + } + } + + public static float SumAbs(float mean, ReadOnlySpan src) + { + Contracts.AssertNonEmpty(src); + + unsafe + { + fixed (float* psrc = &MemoryMarshal.GetReference(src)) + return (mean == 0 ? Thunk.SumAbsU(psrc, src.Length) : Thunk.SumAbsDiffU(mean, psrc, src.Length)); + } + } + + public static float MaxAbs(ReadOnlySpan src) + { + Contracts.AssertNonEmpty(src); + + unsafe + { + fixed (float* psrc = &MemoryMarshal.GetReference(src)) + return Thunk.MaxAbsU(psrc, src.Length); + } + } + + public static float MaxAbsDiff(float mean, ReadOnlySpan src) + { + Contracts.AssertNonEmpty(src); + + unsafe + { + fixed (float* psrc = &MemoryMarshal.GetReference(src)) + return Thunk.MaxAbsDiffU(mean, psrc, src.Length); + } + } + + public static float DotProductDense(ReadOnlySpan a, ReadOnlySpan b, int count) + { + Contracts.AssertNonEmpty(a); + Contracts.AssertNonEmpty(b); + Contracts.Assert(0 < count); + Contracts.Assert(a.Length >= count); + Contracts.Assert(b.Length >= count); + + unsafe + { + fixed (float* pa = &MemoryMarshal.GetReference(a)) + fixed (float* pb = &MemoryMarshal.GetReference(b)) + return Thunk.DotU(pa, pb, count); + } + } + + public static float DotProductSparse(ReadOnlySpan a, ReadOnlySpan b, ReadOnlySpan indices, int count) + { + Contracts.AssertNonEmpty(a); + Contracts.AssertNonEmpty(b); + Contracts.Assert(0 < count); + Contracts.Assert(count < a.Length); + Contracts.Assert(count <= b.Length); + Contracts.Assert(count <= indices.Length); + + unsafe + { + fixed (float* pa = &MemoryMarshal.GetReference(a)) + fixed (float* pb = &MemoryMarshal.GetReference(b)) + fixed (int* pi = &MemoryMarshal.GetReference(indices)) + return Thunk.DotSU(pa, pb, pi, count); + } + } + + public static float L2DistSquared(ReadOnlySpan a, ReadOnlySpan b, int count) + { + Contracts.AssertNonEmpty(a); + Contracts.AssertNonEmpty(b); + Contracts.Assert(0 < count && count <= a.Length); + Contracts.Assert(count <= b.Length); + + unsafe + { + fixed (float* pa = &MemoryMarshal.GetReference(a)) + fixed (float* pb = &MemoryMarshal.GetReference(b)) + return Thunk.Dist2(pa, pb, count); + } + } + + public static void ZeroMatrixItems(AlignedArray dst, int ccol, int cfltRow, int[] indices) + { + Contracts.Assert(0 < ccol && ccol <= cfltRow); + + unsafe + { + fixed (float* pdst = &dst.Items[0]) + fixed (int* pi = &indices[0]) + { + if (ccol == cfltRow) + Thunk.ZeroItemsU(Ptr(dst, pdst), dst.Size, pi, indices.Length); + else + Thunk.ZeroMatrixItemsCore(Ptr(dst, pdst), dst.Size, ccol, cfltRow, pi, indices.Length); + } + } + } + + public static void SdcaL1UpdateDense(float primalUpdate, int count, ReadOnlySpan src, float threshold, Span v, Span w) + { + Contracts.AssertNonEmpty(src); + Contracts.Assert(count <= src.Length); + Contracts.AssertNonEmpty(v); + Contracts.Assert(count <= v.Length); + Contracts.AssertNonEmpty(w); + Contracts.Assert(count <= w.Length); + Contracts.Assert(count > 0); + + unsafe + { + fixed (float* psrc = &MemoryMarshal.GetReference(src)) + fixed (float* pd1 = &MemoryMarshal.GetReference(v)) + fixed (float* pd2 = &MemoryMarshal.GetReference(w)) + Thunk.SdcaL1UpdateU(primalUpdate, psrc, threshold, pd1, pd2, count); + } + } public static void SdcaL1UpdateSparse(float primalUpdate, int count, ReadOnlySpan source, ReadOnlySpan indices, float threshold, Span v, Span w) - => SseUtils.SdcaL1UpdateSparse(primalUpdate, count, source, indices, threshold, v, w); + { + Contracts.AssertNonEmpty(source); + Contracts.Assert(count <= source.Length); + Contracts.AssertNonEmpty(indices); + Contracts.Assert(count <= indices.Length); + Contracts.AssertNonEmpty(v); + Contracts.Assert(count <= v.Length); + Contracts.AssertNonEmpty(w); + Contracts.Assert(count <= w.Length); + Contracts.Assert(count > 0); + + unsafe + { + fixed (float* psrc = &MemoryMarshal.GetReference(source)) + fixed (int* pi = &MemoryMarshal.GetReference(indices)) + fixed (float* pd1 = &MemoryMarshal.GetReference(v)) + fixed (float* pd2 = &MemoryMarshal.GetReference(w)) + Thunk.SdcaL1UpdateSU(primalUpdate, psrc, pi, threshold, pd1, pd2, count); + } + } } -} +} \ No newline at end of file diff --git a/src/Microsoft.ML.CpuMath/Sse.cs b/src/Microsoft.ML.CpuMath/Sse.cs deleted file mode 100644 index 8b1c4da70f..0000000000 --- a/src/Microsoft.ML.CpuMath/Sse.cs +++ /dev/null @@ -1,427 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. -// See the LICENSE file in the project root for more information. - -using Microsoft.ML.Runtime.Internal.CpuMath.Core; -using System; -using System.Runtime.InteropServices; - -namespace Microsoft.ML.Runtime.Internal.CpuMath -{ - /// - /// Keep Sse.cs in sync with Avx.cs. When making changes to one, use BeyondCompare or a similar tool - /// to view diffs and propagate appropriate changes to the other. - /// - [BestFriend] - internal static class SseUtils - { - public const int CbAlign = 16; - - private static bool Compat(AlignedArray a) - { - Contracts.AssertValue(a); - Contracts.Assert(a.Size > 0); - return a.CbAlign == CbAlign; - } - - private static unsafe float* Ptr(AlignedArray a, float* p) - { - Contracts.AssertValue(a); - float* q = p + a.GetBase((long)p); - Contracts.Assert(((long)q & (CbAlign - 1)) == 0); - return q; - } - - public static void MatTimesSrc(bool tran, AlignedArray mat, AlignedArray src, AlignedArray dst, int crun) - { - Contracts.Assert(Compat(mat)); - Contracts.Assert(Compat(src)); - Contracts.Assert(Compat(dst)); - Contracts.Assert(mat.Size == dst.Size * src.Size); - - unsafe - { - fixed (float* pmat = &mat.Items[0]) - fixed (float* psrc = &src.Items[0]) - fixed (float* pdst = &dst.Items[0]) - { - if (!tran) - { - Contracts.Assert(0 <= crun && crun <= dst.Size); - Thunk.MatMul(Ptr(mat, pmat), Ptr(src, psrc), Ptr(dst, pdst), crun, src.Size); - } - else - { - Contracts.Assert(0 <= crun && crun <= src.Size); - Thunk.MatMulTran(Ptr(mat, pmat), Ptr(src, psrc), Ptr(dst, pdst), dst.Size, crun); - } - } - } - } - - public static void MatTimesSrc(AlignedArray mat, ReadOnlySpan rgposSrc, AlignedArray srcValues, - int posMin, int iposMin, int iposLim, AlignedArray dst, int crun) - { - Contracts.Assert(Compat(mat)); - Contracts.Assert(Compat(srcValues)); - Contracts.Assert(Compat(dst)); - Contracts.Assert(0 <= iposMin && iposMin <= iposLim && iposLim <= rgposSrc.Length); - Contracts.Assert(mat.Size == dst.Size * srcValues.Size); - - if (iposMin >= iposLim) - { - dst.ZeroItems(); - return; - } - Contracts.AssertNonEmpty(rgposSrc); - unsafe - { - fixed (float* pdst = &dst.Items[0]) - fixed (float* pmat = &mat.Items[0]) - fixed (float* psrc = &srcValues.Items[0]) - fixed (int* ppossrc = &rgposSrc[0]) - { - Contracts.Assert(0 <= crun && crun <= dst.Size); - Thunk.MatMulP(Ptr(mat, pmat), ppossrc, Ptr(srcValues, psrc), posMin, iposMin, iposLim, Ptr(dst, pdst), crun, srcValues.Size); - } - } - } - - // dst += a - public static void Add(float a, Span dst) - { - Contracts.AssertNonEmpty(dst); - - unsafe - { - fixed (float* pdst = &MemoryMarshal.GetReference(dst)) - Thunk.AddScalarU(a, pdst, dst.Length); - } - } - - public static void Scale(float a, Span dst) - { - Contracts.AssertNonEmpty(dst); - - unsafe - { - fixed (float* pd = &MemoryMarshal.GetReference(dst)) - Thunk.Scale(a, pd, dst.Length); - } - } - - // dst = a * src - public static void Scale(float a, ReadOnlySpan src, Span dst, int count) - { - Contracts.AssertNonEmpty(src); - Contracts.Assert(0 < count && count <= src.Length); - Contracts.AssertNonEmpty(dst); - Contracts.Assert(count <= dst.Length); - - unsafe - { - fixed (float* psrc = &MemoryMarshal.GetReference(src)) - fixed (float* pdst = &MemoryMarshal.GetReference(dst)) - { - Thunk.ScaleSrcU(a, psrc, pdst, count); - } - } - } - - // dst[i] = a * (dst[i] + b) - public static void ScaleAdd(float a, float b, Span dst) - { - Contracts.AssertNonEmpty(dst); - - unsafe - { - fixed (float* pdst = &MemoryMarshal.GetReference(dst)) - Thunk.ScaleAddU(a, b, pdst, dst.Length); - } - } - - public static void AddScale(float a, ReadOnlySpan src, Span dst, int count) - { - Contracts.AssertNonEmpty(src); - Contracts.Assert(0 < count && count <= src.Length); - Contracts.AssertNonEmpty(dst); - Contracts.Assert(count <= dst.Length); - - unsafe - { - fixed (float* psrc = &MemoryMarshal.GetReference(src)) - fixed (float* pdst = &MemoryMarshal.GetReference(dst)) - Thunk.AddScaleU(a, psrc, pdst, count); - } - } - - public static void AddScale(float a, ReadOnlySpan src, ReadOnlySpan indices, Span dst, int count) - { - Contracts.AssertNonEmpty(src); - Contracts.Assert(0 < count && count <= src.Length); - Contracts.AssertNonEmpty(indices); - Contracts.Assert(count <= indices.Length); - Contracts.AssertNonEmpty(dst); - Contracts.Assert(count < dst.Length); - - unsafe - { - fixed (float* psrc = &MemoryMarshal.GetReference(src)) - fixed (int* pi = &MemoryMarshal.GetReference(indices)) - fixed (float* pdst = &MemoryMarshal.GetReference(dst)) - Thunk.AddScaleSU(a, psrc, pi, pdst, count); - } - } - - public static void AddScaleCopy(float a, ReadOnlySpan src, ReadOnlySpan dst, Span res, int count) - { - Contracts.AssertNonEmpty(dst); - Contracts.Assert(0 < count && count <= dst.Length); - Contracts.AssertNonEmpty(src); - Contracts.Assert(count <= src.Length); - Contracts.AssertNonEmpty(res); - Contracts.Assert(count <= res.Length); - - unsafe - { - fixed (float* pdst = &MemoryMarshal.GetReference(dst)) - fixed (float* psrc = &MemoryMarshal.GetReference(src)) - fixed (float* pres = &MemoryMarshal.GetReference(res)) - Thunk.AddScaleCopyU(a, psrc, pdst, pres, count); - } - } - - public static void Add(ReadOnlySpan src, Span dst, int count) - { - Contracts.AssertNonEmpty(src); - Contracts.Assert(0 < count && count <= src.Length); - Contracts.AssertNonEmpty(dst); - Contracts.Assert(count <= dst.Length); - - unsafe - { - fixed (float* ps = &MemoryMarshal.GetReference(src)) - fixed (float* pd = &MemoryMarshal.GetReference(dst)) - Thunk.AddU(ps, pd, count); - } - } - - public static void Add(ReadOnlySpan src, ReadOnlySpan indices, Span dst, int count) - { - Contracts.AssertNonEmpty(src); - Contracts.Assert(0 < count && count <= src.Length); - Contracts.AssertNonEmpty(indices); - Contracts.Assert(count <= indices.Length); - Contracts.AssertNonEmpty(dst); - Contracts.Assert(count < dst.Length); - - unsafe - { - fixed (float* ps = &MemoryMarshal.GetReference(src)) - fixed (int* pi = &MemoryMarshal.GetReference(indices)) - fixed (float* pd = &MemoryMarshal.GetReference(dst)) - Thunk.AddSU(ps, pi, pd, count); - } - } - - public static void MulElementWise(ReadOnlySpan src1, ReadOnlySpan src2, Span dst, int count) - { - Contracts.AssertNonEmpty(src1); - Contracts.Assert(0 < count && count <= src1.Length); - Contracts.AssertNonEmpty(src2); - Contracts.Assert(0 < count && count <= src2.Length); - Contracts.AssertNonEmpty(dst); - unsafe - { - fixed (float* ps1 = &MemoryMarshal.GetReference(src1)) - fixed (float* ps2 = &MemoryMarshal.GetReference(src2)) - fixed (float* pd = &MemoryMarshal.GetReference(dst)) - Thunk.MulElementWiseU(ps1, ps2, pd, count); - } - } - - public static float Sum(ReadOnlySpan src) - { - Contracts.AssertNonEmpty(src); - - unsafe - { - fixed (float* psrc = &MemoryMarshal.GetReference(src)) - return Thunk.Sum(psrc, src.Length); - } - } - - public static float SumSq(ReadOnlySpan src) - { - Contracts.AssertNonEmpty(src); - - unsafe - { - fixed (float* psrc = &MemoryMarshal.GetReference(src)) - return Thunk.SumSqU(psrc, src.Length); - } - } - - public static float SumSq(float mean, ReadOnlySpan src) - { - Contracts.AssertNonEmpty(src); - - unsafe - { - fixed (float* psrc = &MemoryMarshal.GetReference(src)) - return (mean == 0 ? Thunk.SumSqU(psrc, src.Length) : Thunk.SumSqDiffU(mean, psrc, src.Length)); - } - } - - public static float SumAbs(ReadOnlySpan src) - { - Contracts.AssertNonEmpty(src); - - unsafe - { - fixed (float* psrc = &MemoryMarshal.GetReference(src)) - return Thunk.SumAbsU(psrc, src.Length); - } - } - - public static float SumAbs(float mean, ReadOnlySpan src) - { - Contracts.AssertNonEmpty(src); - - unsafe - { - fixed (float* psrc = &MemoryMarshal.GetReference(src)) - return (mean == 0 ? Thunk.SumAbsU(psrc, src.Length) : Thunk.SumAbsDiffU(mean, psrc, src.Length)); - } - } - - public static float MaxAbs(ReadOnlySpan src) - { - Contracts.AssertNonEmpty(src); - - unsafe - { - fixed (float* psrc = &MemoryMarshal.GetReference(src)) - return Thunk.MaxAbsU(psrc, src.Length); - } - } - - public static float MaxAbsDiff(float mean, ReadOnlySpan src) - { - Contracts.AssertNonEmpty(src); - - unsafe - { - fixed (float* psrc = &MemoryMarshal.GetReference(src)) - return Thunk.MaxAbsDiffU(mean, psrc, src.Length); - } - } - - public static float DotProductDense(ReadOnlySpan a, ReadOnlySpan b, int count) - { - Contracts.AssertNonEmpty(a); - Contracts.AssertNonEmpty(b); - Contracts.Assert(0 < count); - Contracts.Assert(a.Length >= count); - Contracts.Assert(b.Length >= count); - - unsafe - { - fixed (float* pa = &MemoryMarshal.GetReference(a)) - fixed (float* pb = &MemoryMarshal.GetReference(b)) - return Thunk.DotU(pa, pb, count); - } - } - - public static float DotProductSparse(ReadOnlySpan a, ReadOnlySpan b, ReadOnlySpan indices, int count) - { - Contracts.AssertNonEmpty(a); - Contracts.AssertNonEmpty(b); - Contracts.Assert(0 < count); - Contracts.Assert(count < a.Length); - Contracts.Assert(count <= b.Length); - Contracts.Assert(count <= indices.Length); - - unsafe - { - fixed (float* pa = &MemoryMarshal.GetReference(a)) - fixed (float* pb = &MemoryMarshal.GetReference(b)) - fixed (int* pi = &MemoryMarshal.GetReference(indices)) - return Thunk.DotSU(pa, pb, pi, count); - } - } - - public static float L2DistSquared(ReadOnlySpan a, ReadOnlySpan b, int count) - { - Contracts.AssertNonEmpty(a); - Contracts.AssertNonEmpty(b); - Contracts.Assert(0 < count && count <= a.Length); - Contracts.Assert(count <= b.Length); - - unsafe - { - fixed (float* pa = &MemoryMarshal.GetReference(a)) - fixed (float* pb = &MemoryMarshal.GetReference(b)) - return Thunk.Dist2(pa, pb, count); - } - } - - public static void ZeroMatrixItems(AlignedArray dst, int ccol, int cfltRow, int[] indices) - { - Contracts.Assert(0 < ccol && ccol <= cfltRow); - - unsafe - { - fixed (float* pdst = &dst.Items[0]) - fixed (int* pi = &indices[0]) - { - if (ccol == cfltRow) - Thunk.ZeroItemsU(Ptr(dst, pdst), dst.Size, pi, indices.Length); - else - Thunk.ZeroMatrixItemsCore(Ptr(dst, pdst), dst.Size, ccol, cfltRow, pi, indices.Length); - } - } - } - - public static void SdcaL1UpdateDense(float primalUpdate, int count, ReadOnlySpan src, float threshold, Span v, Span w) - { - Contracts.AssertNonEmpty(src); - Contracts.Assert(count <= src.Length); - Contracts.AssertNonEmpty(v); - Contracts.Assert(count <= v.Length); - Contracts.AssertNonEmpty(w); - Contracts.Assert(count <= w.Length); - Contracts.Assert(count > 0); - - unsafe - { - fixed (float* psrc = &MemoryMarshal.GetReference(src)) - fixed (float* pd1 = &MemoryMarshal.GetReference(v)) - fixed (float* pd2 = &MemoryMarshal.GetReference(w)) - Thunk.SdcaL1UpdateU(primalUpdate, psrc, threshold, pd1, pd2, count); - } - } - - public static void SdcaL1UpdateSparse(float primalUpdate, int count, ReadOnlySpan source, ReadOnlySpan indices, float threshold, Span v, Span w) - { - Contracts.AssertNonEmpty(source); - Contracts.Assert(count <= source.Length); - Contracts.AssertNonEmpty(indices); - Contracts.Assert(count <= indices.Length); - Contracts.AssertNonEmpty(v); - Contracts.Assert(count <= v.Length); - Contracts.AssertNonEmpty(w); - Contracts.Assert(count <= w.Length); - Contracts.Assert(count > 0); - - unsafe - { - fixed (float* psrc = &MemoryMarshal.GetReference(source)) - fixed (int* pi = &MemoryMarshal.GetReference(indices)) - fixed (float* pd1 = &MemoryMarshal.GetReference(v)) - fixed (float* pd2 = &MemoryMarshal.GetReference(w)) - Thunk.SdcaL1UpdateSU(primalUpdate, psrc, pi, threshold, pd1, pd2, count); - } - } - } -} \ No newline at end of file diff --git a/src/Microsoft.ML.CpuMath/SseIntrinsics.cs b/src/Microsoft.ML.CpuMath/SseIntrinsics.cs index 89d6dbce03..44bf8abcaa 100644 --- a/src/Microsoft.ML.CpuMath/SseIntrinsics.cs +++ b/src/Microsoft.ML.CpuMath/SseIntrinsics.cs @@ -41,6 +41,26 @@ internal static class SseIntrinsics 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, }; + // The count of bytes in Vector128, corresponding to _cbAlign in AlignedArray + private const int Vector128Alignment = 16; + + [MethodImplAttribute(MethodImplOptions.AggressiveInlining)] + private static bool HasCompatibleAlignment(AlignedArray alignedArray) + { + Contracts.AssertValue(alignedArray); + Contracts.Assert(alignedArray.Size > 0); + return (alignedArray.CbAlign % Vector128Alignment) == 0; + } + + [MethodImplAttribute(MethodImplOptions.AggressiveInlining)] + private static unsafe float* GetAlignedBase(AlignedArray alignedArray, float* unalignedBase) + { + Contracts.AssertValue(alignedArray); + float* alignedBase = unalignedBase + alignedArray.GetBase((long)unalignedBase); + Contracts.Assert(((long)alignedBase & (Vector128Alignment - 1)) == 0); + return alignedBase; + } + internal static readonly Vector128 AbsMask128 = Sse2.IsSupported ? Sse.StaticCast(Sse2.SetAllVector128(0x7FFFFFFF)) : Sse.SetAllVector128(BitConverter.Int32BitsToSingle(0x7FFFFFFF)); @@ -118,17 +138,18 @@ internal static Vector128 GetNewDst128(in Vector128 xDst1, in Vect // Multiply matrix times vector into vector. public static unsafe void MatMul(AlignedArray mat, AlignedArray src, AlignedArray dst, int crow, int ccol) { - MatMul(mat.Items, src.Items, dst.Items, crow, ccol); - } + Contracts.Assert(HasCompatibleAlignment(mat)); + Contracts.Assert(HasCompatibleAlignment(src)); + Contracts.Assert(HasCompatibleAlignment(dst)); - public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan src, Span dst, int crow, int ccol) - { - fixed (float* psrc = &MemoryMarshal.GetReference(src)) - fixed (float* pdst = &MemoryMarshal.GetReference(dst)) - fixed (float* pmat = &MemoryMarshal.GetReference(mat)) - fixed (uint* pLeadingAlignmentMask = &LeadingAlignmentMask[0]) - fixed (uint* pTrailingAlignmentMask = &TrailingAlignmentMask[0]) + fixed (float* pSrcStart = &src.Items[0]) + fixed (float* pDstStart = &dst.Items[0]) + fixed (float* pMatStart = &mat.Items[0]) { + float* psrc = GetAlignedBase(src, pSrcStart); + float* pdst = GetAlignedBase(dst, pDstStart); + float* pmat = GetAlignedBase(mat, pMatStart); + float* pSrcEnd = psrc + ccol; float* pDstEnd = pdst + crow; float* pDstCurrent = pdst; @@ -137,128 +158,29 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr while (pDstCurrent < pDstEnd) { Vector128 res0 = Sse.SetZeroVector128(); - Vector128 res1 = Sse.SetZeroVector128(); - Vector128 res2 = Sse.SetZeroVector128(); - Vector128 res3 = Sse.SetZeroVector128(); + Vector128 res1 = res0; + Vector128 res2 = res0; + Vector128 res3 = res0; - int length = ccol; float* pSrcCurrent = psrc; - nuint address = (nuint)(pMatCurrent); - int misalignment = (int)(address % 16); - int remainder = 0; - - if ((misalignment & 3) != 0) + while (pSrcCurrent < pSrcEnd) { - // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - while (pSrcCurrent < pSrcEnd) - { - Vector128 vector = Sse.LoadVector128(pSrcCurrent); + float* pMatTemp = pMatCurrent; - float* pMatTemp = pMatCurrent; - Vector128 x01 = Sse.Multiply(vector, Sse.LoadVector128(pMatTemp)); - Vector128 x11 = Sse.Multiply(vector, Sse.LoadVector128(pMatTemp += ccol)); - Vector128 x21 = Sse.Multiply(vector, Sse.LoadVector128(pMatTemp += ccol)); - Vector128 x31 = Sse.Multiply(vector, Sse.LoadVector128(pMatTemp += ccol)); + Vector128 x01 = Sse.LoadAlignedVector128(pMatTemp); + Vector128 x11 = Sse.LoadAlignedVector128(pMatTemp += ccol); + Vector128 x21 = Sse.LoadAlignedVector128(pMatTemp += ccol); + Vector128 x31 = Sse.LoadAlignedVector128(pMatTemp += ccol); + Vector128 x02 = Sse.LoadAlignedVector128(pSrcCurrent); - res0 = Sse.Add(res0, x01); - res1 = Sse.Add(res1, x11); - res2 = Sse.Add(res2, x21); - res3 = Sse.Add(res3, x31); + res0 = Sse.Add(res0, Sse.Multiply(x01, x02)); + res1 = Sse.Add(res1, Sse.Multiply(x11, x02)); + res2 = Sse.Add(res2, Sse.Multiply(x21, x02)); + res3 = Sse.Add(res3, Sse.Multiply(x31, x02)); - pSrcCurrent += 4; - pMatCurrent += 4; - } - } - else - { - if (misalignment != 0) - { - // Handle cases where the data is not 128-bit aligned by doing an unaligned read and then - // masking any elements that will be included in the first aligned read - misalignment >>= 2; - misalignment = 4 - misalignment; - - Vector128 mask = Sse.LoadVector128(((float*)(pLeadingAlignmentMask)) + (misalignment * 4)); - - // We only align pMat since it has significantly more reads. - float* pMatTemp = pMatCurrent; - Vector128 x01 = Sse.And(mask, Sse.LoadVector128(pMatTemp)); - Vector128 x11 = Sse.And(mask, Sse.LoadVector128(pMatTemp += ccol)); - Vector128 x21 = Sse.And(mask, Sse.LoadVector128(pMatTemp += ccol)); - Vector128 x31 = Sse.And(mask, Sse.LoadVector128(pMatTemp += ccol)); - Vector128 vector = Sse.And(mask, Sse.LoadVector128(pSrcCurrent)); - - res0 = Sse.Multiply(x01, vector); - res1 = Sse.Multiply(x11, vector); - res2 = Sse.Multiply(x21, vector); - res3 = Sse.Multiply(x31, vector); - - pMatCurrent += misalignment; - pSrcCurrent += misalignment; - length -= misalignment; - } - - if (length > 3) - { - // Handle all the 128-bit blocks that we can now that we have offset to an aligned address - remainder = length % 4; - - // If we aren't using the VEX-encoding, the JIT will only fold away aligned loads - // (due to semantics of the legacy encoding). - // We don't need an assert, since the instruction will throw for unaligned inputs. - while (pSrcCurrent + 4 <= pSrcEnd) - { - Vector128 vector = Sse.LoadVector128(pSrcCurrent); - - float* pMatTemp = pMatCurrent; - Vector128 x01 = Sse.Multiply(vector, Sse.LoadAlignedVector128(pMatTemp)); - Vector128 x11 = Sse.Multiply(vector, Sse.LoadAlignedVector128(pMatTemp += ccol)); - Vector128 x21 = Sse.Multiply(vector, Sse.LoadAlignedVector128(pMatTemp += ccol)); - Vector128 x31 = Sse.Multiply(vector, Sse.LoadAlignedVector128(pMatTemp += ccol)); - - res0 = Sse.Add(res0, x01); - res1 = Sse.Add(res1, x11); - res2 = Sse.Add(res2, x21); - res3 = Sse.Add(res3, x31); - - pSrcCurrent += 4; - pMatCurrent += 4; - } - } - else - { - // Handle the "worst-case" scenario, which is when we have 4-8 elements and the input is not - // 128-bit aligned. This means we can't do any aligned loads and will just end up doing two - // unaligned loads where we mask the input each time. - remainder = length; - } - - if (remainder != 0) - { - // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next - // unaligned load will read to the end of the array and then mask out any elements already processed - - pMatCurrent -= (4 - remainder); - pSrcCurrent -= (4 - remainder); - - Vector128 mask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + (remainder * 4)); - - float* pMatTemp = pMatCurrent; - Vector128 x01 = Sse.And(mask, Sse.LoadVector128(pMatTemp)); - Vector128 x11 = Sse.And(mask, Sse.LoadVector128(pMatTemp += ccol)); - Vector128 x21 = Sse.And(mask, Sse.LoadVector128(pMatTemp += ccol)); - Vector128 x31 = Sse.And(mask, Sse.LoadVector128(pMatTemp += ccol)); - Vector128 vector = Sse.And(mask, Sse.LoadVector128(pSrcCurrent)); - - res0 = Sse.Add(res0, Sse.Multiply(x01, vector)); - res1 = Sse.Add(res1, Sse.Multiply(x11, vector)); - res2 = Sse.Add(res2, Sse.Multiply(x21, vector)); - res3 = Sse.Add(res3, Sse.Multiply(x31, vector)); - - pMatCurrent += 4; - pSrcCurrent += 4; - } + pSrcCurrent += 4; + pMatCurrent += 4; } // Add up the entries of each, with the 4 results in res0 @@ -266,7 +188,8 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr res2 = Sse3.HorizontalAdd(res2, res3); res0 = Sse3.HorizontalAdd(res0, res2); - Sse.Store(pDstCurrent, res0); + Sse.StoreAligned(pDstCurrent, res0); + pDstCurrent += 4; pMatCurrent += 3 * ccol; } @@ -277,21 +200,23 @@ public static unsafe void MatMul(ReadOnlySpan mat, ReadOnlySpan sr public static unsafe void MatMulP(AlignedArray mat, ReadOnlySpan rgposSrc, AlignedArray src, int posMin, int iposMin, int iposEnd, AlignedArray dst, int crow, int ccol) { - MatMulP(mat.Items, rgposSrc, src.Items, posMin, iposMin, iposEnd, dst.Items, crow, ccol); - } + // REVIEW: For extremely sparse inputs, interchanging the loops would + // likely be more efficient. + Contracts.Assert(HasCompatibleAlignment(mat)); + Contracts.Assert(HasCompatibleAlignment(src)); + Contracts.Assert(HasCompatibleAlignment(dst)); - public static unsafe void MatMulP(ReadOnlySpan mat, ReadOnlySpan rgposSrc, ReadOnlySpan src, - int posMin, int iposMin, int iposEnd, Span dst, int crow, int ccol) - { // REVIEW: For extremely sparse inputs, interchanging the loops would // likely be more efficient. - fixed (float* psrc = &MemoryMarshal.GetReference(src)) - fixed (float* pdst = &MemoryMarshal.GetReference(dst)) - fixed (float* pmat = &MemoryMarshal.GetReference(mat)) - fixed (int* pposSrc = &MemoryMarshal.GetReference(rgposSrc)) - fixed (uint* pLeadingAlignmentMask = &LeadingAlignmentMask[0]) - fixed (uint* pTrailingAlignmentMask = &TrailingAlignmentMask[0]) + fixed (float* pSrcStart = &src.Items[0]) + fixed (float* pDstStart = &dst.Items[0]) + fixed (float* pMatStart = &mat.Items[0]) + fixed (int* pposSrc = &rgposSrc[0]) { + float* psrc = GetAlignedBase(src, pSrcStart); + float* pdst = GetAlignedBase(dst, pDstStart); + float* pmat = GetAlignedBase(mat, pMatStart); + int* pposMin = pposSrc + iposMin; int* pposEnd = pposSrc + iposEnd; float* pDstEnd = pdst + crow; @@ -299,120 +224,7 @@ public static unsafe void MatMulP(ReadOnlySpan mat, ReadOnlySpan rgp float* pSrcCurrent = psrc - posMin; float* pDstCurrent = pdst; - nuint address = (nuint)(pDstCurrent); - int misalignment = (int)(address % 16); - - int length = crow; - int remainder = 0; - - if ((misalignment & 3) != 0) - { - // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - while (pDstCurrent < pDstEnd) - { - Sse.Store(pDstCurrent, SparseMultiplicationAcrossRow()); - pDstCurrent += 4; - pm0 += 4 * ccol; - } - } - else - { - if (misalignment != 0) - { - // Handle cases where the data is not 128-bit aligned by doing an unaligned read and then - // masking any elements that will be included in the first aligned read - - misalignment >>= 2; - misalignment = 4 - misalignment; - - Vector128 mask = Sse.LoadVector128(((float*)(pLeadingAlignmentMask)) + (misalignment * 4)); - - float* pm1 = pm0 + ccol; - float* pm2 = pm1 + ccol; - float* pm3 = pm2 + ccol; - Vector128 result = Sse.SetZeroVector128(); - - int* ppos = pposMin; - - while (ppos < pposEnd) - { - int col = *ppos; - Vector128 x1 = Sse.SetVector128(pm3[col], pm2[col], pm1[col], pm0[col]); - - x1 = Sse.And(mask, x1); - Vector128 x2 = Sse.SetAllVector128(pSrcCurrent[col]); - x2 = Sse.Multiply(x2, x1); - result = Sse.Add(result, x2); - ppos++; - } - - Sse.Store(pDstCurrent, result); - pDstCurrent += misalignment; - pm0 += misalignment * ccol; - length -= misalignment; - } - - if (length > 3) - { - // Handle all the 128-bit blocks that we can now that we have offset to an aligned address - remainder = length % 4; - - // If we aren't using the VEX-encoding, the JIT will only fold away aligned loads - // (due to semantics of the legacy encoding). - // We don't need an assert, since the instruction will throw for unaligned inputs. - while (pDstCurrent < pDstEnd) - { - Sse.Store(pDstCurrent, SparseMultiplicationAcrossRow()); - pDstCurrent += 4; - pm0 += 4 * ccol; - } - } - else - { - // Handle the "worst-case" scenario, which is when we have 4-8 elements and the input is not - // 128-bit aligned. This means we can't do any aligned loads and will just end up doing two - // unaligned loads where we mask the input each time. - remainder = length; - } - - if (remainder != 0) - { - // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next - // unaligned load will read to the end of the array and then mask out any elements already processed - pDstCurrent -= (4 - remainder); - pm0 -= (4 - remainder) * ccol; - - Vector128 trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + (remainder * 4)); - Vector128 leadingMask = Sse.LoadVector128(((float*)(pLeadingAlignmentMask)) + ((4 - remainder) * 4)); - - float* pm1 = pm0 + ccol; - float* pm2 = pm1 + ccol; - float* pm3 = pm2 + ccol; - Vector128 result = Sse.SetZeroVector128(); - - int* ppos = pposMin; - - while (ppos < pposEnd) - { - int col = *ppos; - Vector128 x1 = Sse.SetVector128(pm3[col], pm2[col], pm1[col], pm0[col]); - x1 = Sse.And(x1, trailingMask); - - Vector128 x2 = Sse.SetAllVector128(pSrcCurrent[col]); - x2 = Sse.Multiply(x2, x1); - result = Sse.Add(result, x2); - ppos++; - } - - result = Sse.Add(result, Sse.And(leadingMask, Sse.LoadVector128(pDstCurrent))); - - Sse.Store(pDstCurrent, result); - pDstCurrent += 4; - pm0 += 4 * ccol; - } - } - - Vector128 SparseMultiplicationAcrossRow() + while (pDstCurrent < pDstEnd) { float* pm1 = pm0 + ccol; float* pm2 = pm1 + ccol; @@ -428,310 +240,107 @@ Vector128 SparseMultiplicationAcrossRow() Vector128 x2 = Sse.SetAllVector128(pSrcCurrent[col]); x2 = Sse.Multiply(x2, x1); result = Sse.Add(result, x2); + ppos++; } - return result; + Sse.StoreAligned(pDstCurrent, result); + pDstCurrent += 4; + pm0 += 4 * ccol; } } } public static unsafe void MatMulTran(AlignedArray mat, AlignedArray src, AlignedArray dst, int crow, int ccol) { - MatMulTran(mat.Items, src.Items, dst.Items, crow, ccol); - } + Contracts.Assert(HasCompatibleAlignment(mat)); + Contracts.Assert(HasCompatibleAlignment(src)); + Contracts.Assert(HasCompatibleAlignment(dst)); - public static unsafe void MatMulTran(ReadOnlySpan mat, ReadOnlySpan src, Span dst, int crow, int ccol) - { - fixed (float* psrc = &MemoryMarshal.GetReference(src)) - fixed (float* pdst = &MemoryMarshal.GetReference(dst)) - fixed (float* pmat = &MemoryMarshal.GetReference(mat)) - fixed (uint* pLeadingAlignmentMask = &LeadingAlignmentMask[0]) - fixed (uint* pTrailingAlignmentMask = &TrailingAlignmentMask[0]) + fixed (float* pSrcStart = &src.Items[0]) + fixed (float* pDstStart = &dst.Items[0]) + fixed (float* pMatStart = &mat.Items[0]) { + float* psrc = GetAlignedBase(src, pSrcStart); + float* pdst = GetAlignedBase(dst, pDstStart); + float* pmat = GetAlignedBase(mat, pMatStart); + float* pSrcEnd = psrc + ccol; float* pDstEnd = pdst + crow; float* pSrcCurrent = psrc; float* pMatCurrent = pmat; - // The reason behind adding the if condtion instead of boolean flag - // is to avoid branching in codegen. - if (pSrcCurrent < pSrcEnd) - { - Vector128 x01 = Sse.LoadVector128(pSrcCurrent); - // Replicate each 32-bit slot of x01 (ABCD) into its own register. - Vector128 x11 = Sse.Shuffle(x01, x01, 0x55); // B - Vector128 x21 = Sse.Shuffle(x01, x01, 0xAA); // C - Vector128 x31 = Sse.Shuffle(x01, x01, 0xFF); // D - x01 = Sse.Shuffle(x01, x01, 0x00); // A + Vector128 x01 = Sse.LoadAlignedVector128(pSrcCurrent); + // Replicate each 32-bit slot of x01 (ABCD) into its own register. + Vector128 x11 = Sse.Shuffle(x01, x01, 0x55); // B + Vector128 x21 = Sse.Shuffle(x01, x01, 0xAA); // C + Vector128 x31 = Sse.Shuffle(x01, x01, 0xFF); // D + x01 = Sse.Shuffle(x01, x01, 0x00); // A - int length = crow; - float* pDstCurrent = pdst; + pSrcCurrent += 4; - nuint address = (nuint)(pMatCurrent); - int misalignment = (int)(address % 16); + float* pDstCurrent = pdst; - if ((misalignment & 3) != 0) - { - // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - while (pDstCurrent < pDstEnd) - { - float* pMatTemp = pMatCurrent; - Vector128 x02 = Sse.Multiply(x01, Sse.LoadVector128(pMatTemp)); - Vector128 x12 = Sse.Multiply(x11, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x22 = Sse.Multiply(x21, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x32 = Sse.Multiply(x31, Sse.LoadVector128(pMatTemp += crow)); - - x02 = Sse.Add(x02, x12); - x22 = Sse.Add(x22, x32); - x02 = Sse.Add(x02, x22); - - Sse.Store(pDstCurrent, x02); - pDstCurrent += 4; - pMatCurrent += 4; - } - } - else - { - int remainder = 0; - if (misalignment != 0) - { - // Handle cases where the data is not 128-bit aligned by doing an unaligned read and then - // masking any elements that will be included in the first aligned read - misalignment >>= 2; - misalignment = 4 - misalignment; - - Vector128 leadingMask = Sse.LoadVector128(((float*)(pLeadingAlignmentMask)) + (misalignment * 4)); - - // We only align pMat since it has significantly more reads. - float* pMatTemp = pMatCurrent; - Vector128 x02 = Sse.And(leadingMask, Sse.LoadVector128(pMatTemp)); - Vector128 x12 = Sse.And(leadingMask, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x22 = Sse.And(leadingMask, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x32 = Sse.And(leadingMask, Sse.LoadVector128(pMatTemp += crow)); - - x02 = Sse.Multiply(x01, x02); - x12 = Sse.Multiply(x11, x12); - x22 = Sse.Multiply(x21, x22); - x32 = Sse.Multiply(x31, x32); - - x02 = Sse.Add(x02, x12); - x22 = Sse.Add(x22, x32); - x02 = Sse.Add(x02, x22); - - Vector128 trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + ((4 - misalignment) * 4)); - Vector128 x3 = Sse.LoadVector128(pDstCurrent); - x02 = Sse.Or(x02, Sse.And(x3, trailingMask)); - - Sse.Store(pDstCurrent, x02); - pMatCurrent += misalignment; - pDstCurrent += misalignment; - length -= misalignment; - } - if (length > 3) - { - // Handle all the 128-bit blocks that we can now that we have offset to an aligned address - remainder = length % 4; - while (pDstCurrent + 4 <= pDstEnd) - { - // If we aren't using the VEX-encoding, the JIT will only fold away aligned loads - // (due to semantics of the legacy encoding). - // We don't need an assert, since the instruction will throw for unaligned inputs. - float* pMatTemp = pMatCurrent; - - Vector128 x02 = Sse.Multiply(x01, Sse.LoadAlignedVector128(pMatTemp)); - Vector128 x12 = Sse.Multiply(x11, Sse.LoadAlignedVector128(pMatTemp += crow)); - Vector128 x22 = Sse.Multiply(x21, Sse.LoadAlignedVector128(pMatTemp += crow)); - Vector128 x32 = Sse.Multiply(x31, Sse.LoadAlignedVector128(pMatTemp += crow)); - - x02 = Sse.Add(x02, x12); - x22 = Sse.Add(x22, x32); - x02 = Sse.Add(x02, x22); - - Sse.Store(pDstCurrent, x02); - pDstCurrent += 4; - pMatCurrent += 4; - } - } - else - { - // Handle the "worst-case" scenario, which is when we have 4-8 elements and the input is not - // 128-bit aligned. This means we can't do any aligned loads and will just end up doing two - // unaligned loads where we mask the input each time. - remainder = length; - } + while (pDstCurrent < pDstEnd) + { + float* pMatTemp = pMatCurrent; + Vector128 x02 = Sse.LoadAlignedVector128(pMatTemp); + Vector128 x12 = Sse.LoadAlignedVector128(pMatTemp += crow); + Vector128 x22 = Sse.LoadAlignedVector128(pMatTemp += crow); + Vector128 x32 = Sse.LoadAlignedVector128(pMatTemp += crow); - if (remainder != 0) - { - // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next - // unaligned load will read to the end of the array and then mask out any elements already processed - pMatCurrent -= (4 - remainder); - pDstCurrent -= (4 - remainder); - - Vector128 trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + (remainder * 4)); - - float* pMatTemp = pMatCurrent; - Vector128 x02 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp)); - Vector128 x12 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x22 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x32 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); - - x02 = Sse.Multiply(x01, x02); - x12 = Sse.Multiply(x11, x12); - x22 = Sse.Multiply(x21, x22); - x32 = Sse.Multiply(x31, x32); - - x02 = Sse.Add(x02, x12); - x22 = Sse.Add(x22, x32); - x02 = Sse.Add(x02, x22); - - Vector128 leadingMask = Sse.LoadVector128(((float*)(pLeadingAlignmentMask)) + ((4 - remainder) * 4)); - Vector128 x3 = Sse.LoadVector128(pDstCurrent); - x02 = Sse.Or(x02, Sse.And(x3, leadingMask)); - - Sse.Store(pDstCurrent, x02); - pDstCurrent += 4; - pMatCurrent += 4; - } - } + x02 = Sse.Multiply(x01, x02); + x12 = Sse.Multiply(x11, x12); + x22 = Sse.Multiply(x21, x22); + x32 = Sse.Multiply(x31, x32); - pMatCurrent += 3 * crow; - pSrcCurrent += 4; + x02 = Sse.Add(x02, x12); + x22 = Sse.Add(x22, x32); + x02 = Sse.Add(x02, x22); + + Sse.StoreAligned(pDstCurrent, x02); + + pDstCurrent += 4; + pMatCurrent += 4; } - // We do 4-way unrolling + pMatCurrent += 3 * crow; + while (pSrcCurrent < pSrcEnd) { - Vector128 x01 = Sse.LoadVector128(pSrcCurrent); + x01 = Sse.LoadAlignedVector128(pSrcCurrent); // Replicate each 32-bit slot of x01 (ABCD) into its own register. - Vector128 x11 = Sse.Shuffle(x01, x01, 0x55); // B - Vector128 x21 = Sse.Shuffle(x01, x01, 0xAA); // C - Vector128 x31 = Sse.Shuffle(x01, x01, 0xFF); // D + x11 = Sse.Shuffle(x01, x01, 0x55); // B + x21 = Sse.Shuffle(x01, x01, 0xAA); // C + x31 = Sse.Shuffle(x01, x01, 0xFF); // D x01 = Sse.Shuffle(x01, x01, 0x00); // A - int length = crow; - float* pDstCurrent = pdst; - - nuint address = (nuint)(pMatCurrent); - int misalignment = (int)(address % 16); + pDstCurrent = pdst; - if ((misalignment & 3) != 0) + while (pDstCurrent < pDstEnd) { - while (pDstCurrent < pDstEnd) - { - float* pMatTemp = pMatCurrent; - Vector128 x02 = Sse.Multiply(x01, Sse.LoadVector128(pMatTemp)); - Vector128 x12 = Sse.Multiply(x11, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x22 = Sse.Multiply(x21, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x32 = Sse.Multiply(x31, Sse.LoadVector128(pMatTemp += crow)); + float* pMatTemp = pMatCurrent; - x02 = Sse.Add(x02, x12); - x22 = Sse.Add(x22, x32); - x02 = Sse.Add(x02, x22); + Vector128 x02 = Sse.LoadAlignedVector128(pMatTemp); + Vector128 x12 = Sse.LoadAlignedVector128(pMatTemp += crow); + Vector128 x22 = Sse.LoadAlignedVector128(pMatTemp += crow); + Vector128 x32 = Sse.LoadAlignedVector128(pMatTemp += crow); + Vector128 x3 = Sse.LoadAlignedVector128(pDstCurrent); - x02 = Sse.Add(x02, Sse.LoadVector128(pDstCurrent)); + x02 = Sse.Multiply(x01, x02); + x12 = Sse.Multiply(x11, x12); + x22 = Sse.Multiply(x21, x22); + x32 = Sse.Multiply(x31, x32); - Sse.Store(pDstCurrent, x02); - pDstCurrent += 4; - pMatCurrent += 4; - } - } - else - { - int remainder = 0; - if (misalignment != 0) - { - // Handle cases where the data is not 128-bit aligned by doing an unaligned read and then - // masking any elements that will be included in the first aligned read - misalignment >>= 2; - misalignment = 4 - misalignment; - - Vector128 leadingMask = Sse.LoadVector128(((float*)(pLeadingAlignmentMask)) + (misalignment * 4)); - - // We only align pMat since it has significantly more reads. - float* pMatTemp = pMatCurrent; - Vector128 x02 = Sse.And(leadingMask, Sse.LoadVector128(pMatTemp)); - Vector128 x12 = Sse.And(leadingMask, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x22 = Sse.And(leadingMask, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x32 = Sse.And(leadingMask, Sse.LoadVector128(pMatTemp += crow)); - - x02 = Sse.Multiply(x01, x02); - x12 = Sse.Multiply(x11, x12); - x22 = Sse.Multiply(x21, x22); - x32 = Sse.Multiply(x31, x32); - - x02 = Sse.Add(x02, x12); - x22 = Sse.Add(x22, x32); - x02 = Sse.Add(x02, x22); - - Vector128 trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + ((4 - misalignment) * 4)); - Vector128 x3 = Sse.LoadVector128(pDstCurrent); - x02 = Sse.Or(x02, Sse.And(x3, trailingMask)); - - x02 = Sse.Add(x02, Sse.And(x3, leadingMask)); - - Sse.Store(pDstCurrent, x02); - pMatCurrent += misalignment; - pDstCurrent += misalignment; - length -= misalignment; - } - if (length > 3) - { - remainder = length % 4; - while (pDstCurrent + 4 <= pDstEnd) - { - float* pMatTemp = pMatCurrent; - - Vector128 x02 = Sse.Multiply(x01, Sse.LoadAlignedVector128(pMatTemp)); - Vector128 x12 = Sse.Multiply(x11, Sse.LoadAlignedVector128(pMatTemp += crow)); - Vector128 x22 = Sse.Multiply(x21, Sse.LoadAlignedVector128(pMatTemp += crow)); - Vector128 x32 = Sse.Multiply(x31, Sse.LoadAlignedVector128(pMatTemp += crow)); - - x02 = Sse.Add(x02, x12); - x22 = Sse.Add(x22, x32); - x02 = Sse.Add(x02, x22); - - x02 = Sse.Add(x02, Sse.LoadVector128(pDstCurrent)); - Sse.Store(pDstCurrent, x02); - pDstCurrent += 4; - pMatCurrent += 4; - } - } - else - { - remainder = length; - } + x02 = Sse.Add(x02, x12); + x22 = Sse.Add(x22, x32); + x02 = Sse.Add(x02, x22); + x3 = Sse.Add(x02, x3); - if (remainder != 0) - { - pMatCurrent -= (4 - remainder); - pDstCurrent -= (4 - remainder); - Vector128 trailingMask = Sse.LoadVector128(((float*)(pTrailingAlignmentMask)) + (remainder * 4)); - - float* pMatTemp = pMatCurrent; - Vector128 x02 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp)); - Vector128 x12 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x22 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); - Vector128 x32 = Sse.And(trailingMask, Sse.LoadVector128(pMatTemp += crow)); - - x02 = Sse.Multiply(x01, x02); - x12 = Sse.Multiply(x11, x12); - x22 = Sse.Multiply(x21, x22); - x32 = Sse.Multiply(x31, x32); - - x02 = Sse.Add(x02, x12); - x22 = Sse.Add(x22, x32); - x02 = Sse.Add(x02, x22); - - Vector128 leadingMask = Sse.LoadVector128(((float*)(pLeadingAlignmentMask)) + ((4 - remainder) * 4)); - Vector128 x3 = Sse.LoadVector128(pDstCurrent); - x02 = Sse.Or(x02, Sse.And(x3, leadingMask)); - - x02 = Sse.Add(x02, Sse.And(x3, trailingMask)); - Sse.Store(pDstCurrent, x02); - pDstCurrent += 4; - pMatCurrent += 4; - } + Sse.StoreAligned(pDstCurrent, x3); + + pDstCurrent += 4; + pMatCurrent += 4; } pMatCurrent += 3 * crow; diff --git a/src/Microsoft.ML.TimeSeries/AdaptiveSingularSpectrumSequenceModeler.cs b/src/Microsoft.ML.TimeSeries/AdaptiveSingularSpectrumSequenceModeler.cs index a473ccec29..b9534e8133 100644 --- a/src/Microsoft.ML.TimeSeries/AdaptiveSingularSpectrumSequenceModeler.cs +++ b/src/Microsoft.ML.TimeSeries/AdaptiveSingularSpectrumSequenceModeler.cs @@ -4,8 +4,6 @@ using System; using System.Collections.Generic; -using System.Globalization; -using System.Linq; using System.Numerics; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; @@ -290,8 +288,8 @@ public AdaptiveSingularSpectrumSequenceModeler(IHostEnvironment env, int trainSi _alpha = new Single[windowSize - 1]; _state = new Single[windowSize - 1]; - _x = new CpuAlignedVector(windowSize, SseUtils.CbAlign); - _xSmooth = new CpuAlignedVector(windowSize, SseUtils.CbAlign); + _x = new CpuAlignedVector(windowSize, CpuMathUtils.GetVectorAlignment()); + _xSmooth = new CpuAlignedVector(windowSize, CpuMathUtils.GetVectorAlignment()); ShouldComputeForecastIntervals = shouldComputeForecastIntervals; _observationNoiseVariance = 0; @@ -345,13 +343,13 @@ private AdaptiveSingularSpectrumSequenceModeler(AdaptiveSingularSpectrumSequence _state = new Single[_windowSize - 1]; Array.Copy(model._state, _state, _windowSize - 1); - _x = new CpuAlignedVector(_windowSize, SseUtils.CbAlign); - _xSmooth = new CpuAlignedVector(_windowSize, SseUtils.CbAlign); + _x = new CpuAlignedVector(_windowSize, CpuMathUtils.GetVectorAlignment()); + _xSmooth = new CpuAlignedVector(_windowSize, CpuMathUtils.GetVectorAlignment()); if (model._wTrans != null) { - _y = new CpuAlignedVector(_rank, SseUtils.CbAlign); - _wTrans = new CpuAlignedMatrixRow(_rank, _windowSize, SseUtils.CbAlign); + _y = new CpuAlignedVector(_rank, CpuMathUtils.GetVectorAlignment()); + _wTrans = new CpuAlignedMatrixRow(_rank, _windowSize, CpuMathUtils.GetVectorAlignment()); _wTrans.CopyFrom(model._wTrans); } } @@ -452,18 +450,18 @@ public AdaptiveSingularSpectrumSequenceModeler(IHostEnvironment env, ModelLoadCo { var tempArray = ctx.Reader.ReadFloatArray(); _host.CheckDecode(Utils.Size(tempArray) == _rank * _windowSize); - _wTrans = new CpuAlignedMatrixRow(_rank, _windowSize, SseUtils.CbAlign); + _wTrans = new CpuAlignedMatrixRow(_rank, _windowSize, CpuMathUtils.GetVectorAlignment()); int i = 0; _wTrans.CopyFrom(tempArray, ref i); tempArray = ctx.Reader.ReadFloatArray(); i = 0; - _y = new CpuAlignedVector(_rank, SseUtils.CbAlign); + _y = new CpuAlignedVector(_rank, CpuMathUtils.GetVectorAlignment()); _y.CopyFrom(tempArray, ref i); } _buffer = TimeSeriesUtils.DeserializeFixedSizeQueueSingle(ctx.Reader, _host); - _x = new CpuAlignedVector(_windowSize, SseUtils.CbAlign); - _xSmooth = new CpuAlignedVector(_windowSize, SseUtils.CbAlign); + _x = new CpuAlignedVector(_windowSize, CpuMathUtils.GetVectorAlignment()); + _xSmooth = new CpuAlignedVector(_windowSize, CpuMathUtils.GetVectorAlignment()); } public override void Save(ModelSaveContext ctx) @@ -1130,8 +1128,8 @@ internal override void Consume(ref Single input, bool updateModel = false) if (_wTrans == null) { - _y = new CpuAlignedVector(_rank, SseUtils.CbAlign); - _wTrans = new CpuAlignedMatrixRow(_rank, _windowSize, SseUtils.CbAlign); + _y = new CpuAlignedVector(_rank, CpuMathUtils.GetVectorAlignment()); + _wTrans = new CpuAlignedMatrixRow(_rank, _windowSize, CpuMathUtils.GetVectorAlignment()); Single[] vecs = new Single[_rank * _windowSize]; for (i = 0; i < _rank; ++i) @@ -1311,8 +1309,8 @@ private void TrainCore(Single[] dataArray, int originalSeriesLength) _maxRank = _windowSize / 2; _alpha = new Single[_windowSize - 1]; _state = new Single[_windowSize - 1]; - _x = new CpuAlignedVector(_windowSize, SseUtils.CbAlign); - _xSmooth = new CpuAlignedVector(_windowSize, SseUtils.CbAlign); + _x = new CpuAlignedVector(_windowSize, CpuMathUtils.GetVectorAlignment()); + _xSmooth = new CpuAlignedVector(_windowSize, CpuMathUtils.GetVectorAlignment()); TrainCore(dataArray, originalSeriesLength); return; @@ -1349,10 +1347,10 @@ private void TrainCore(Single[] dataArray, int originalSeriesLength) } // Setting the the y vector - _y = new CpuAlignedVector(_rank, SseUtils.CbAlign); + _y = new CpuAlignedVector(_rank, CpuMathUtils.GetVectorAlignment()); // Setting the weight matrix - _wTrans = new CpuAlignedMatrixRow(_rank, _windowSize, SseUtils.CbAlign); + _wTrans = new CpuAlignedMatrixRow(_rank, _windowSize, CpuMathUtils.GetVectorAlignment()); i = 0; _wTrans.CopyFrom(leftSingularVecs, ref i); diff --git a/src/Native/CpuMathNative/Sse.cpp b/src/Native/CpuMathNative/Sse.cpp index 0a2715d4d8..48fa6fb8a4 100644 --- a/src/Native/CpuMathNative/Sse.cpp +++ b/src/Native/CpuMathNative/Sse.cpp @@ -60,133 +60,31 @@ const unsigned int TrailingAlignmentMask[16] = // Multiply matrix times vector into vector. EXPORT_API(void) MatMul(_In_ const float * pmat, _In_ const float * psrc, _Inout_ float * pdst, int crow, int ccol) { - const float * pSrcEnd = psrc + ccol; - const float * pDstEnd = pdst + crow; - float* pDstCurrent = pdst; - const float* pMatCurrent = pmat; - - while (pDstCurrent < pDstEnd) + const float * psLim = psrc + ccol; + const float * pdLim = pdst + crow; + const float * pm = pmat; + for (float * pd = pdst; pd < pdLim; pd += 4, pm += 3 * ccol) { __m128 res0 = _mm_setzero_ps(); __m128 res1 = res0; __m128 res2 = res0; __m128 res3 = res0; - - int length = ccol; - const float* pSrcCurrent = psrc; - - uintptr_t address = (uintptr_t)(pMatCurrent); - uintptr_t misalignment = address % 16; - int remainder = 0; - - if ((misalignment & 3) != 0) - { - // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - while (pSrcCurrent < pSrcEnd) - { - __m128 vector = _mm_loadu_ps(pSrcCurrent); - - const float* pMatTemp = pMatCurrent; - __m128 x01 = _mm_mul_ps(vector, _mm_loadu_ps(pMatTemp)); - __m128 x11 = _mm_mul_ps(vector, _mm_loadu_ps(pMatTemp += ccol)); - __m128 x21 = _mm_mul_ps(vector, _mm_loadu_ps(pMatTemp += ccol)); - __m128 x31 = _mm_mul_ps(vector, _mm_loadu_ps(pMatTemp += ccol)); - - res0 = _mm_add_ps(res0, x01); - res1 = _mm_add_ps(res1, x11); - res2 = _mm_add_ps(res2, x21); - res3 = _mm_add_ps(res3, x31); - - pSrcCurrent += 4; - pMatCurrent += 4; - } - } - else + for (const float * ps = psrc; ps < psLim; ps += 4, pm += 4) { - if (misalignment != 0) - { - // Handle cases where the data is not 128-bit aligned by doing an unaligned read and then - // masking any elements that will be included in the first aligned read - misalignment >>= 2; - misalignment = 4 - misalignment; - - __m128 mask = _mm_loadu_ps(((float*)(&LeadingAlignmentMask)) + (misalignment * 4)); - - // We only align pMat since it has significantly more reads. - const float* pMatTemp = pMatCurrent; - __m128 x01 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp)); - __m128 x11 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp += ccol)); - __m128 x21 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp += ccol)); - __m128 x31 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp += ccol)); - __m128 vector = _mm_and_ps(mask, _mm_loadu_ps(pSrcCurrent)); - - res0 = _mm_mul_ps(x01, vector); - res1 = _mm_mul_ps(x11, vector); - res2 = _mm_mul_ps(x21, vector); - res3 = _mm_mul_ps(x31, vector); - - pMatCurrent += misalignment; - pSrcCurrent += misalignment; - length -= misalignment; - } - - if (length > 3) - { - // Handle all the 128-bit blocks that we can now that we have offset to an aligned address - remainder = length % 4; - - while (pSrcCurrent + 4 <= pSrcEnd) - { - __m128 vector = _mm_loadu_ps(pSrcCurrent); - - const float* pMatTemp = pMatCurrent; - __m128 x01 = _mm_mul_ps(vector, _mm_load_ps(pMatTemp)); - __m128 x11 = _mm_mul_ps(vector, _mm_load_ps(pMatTemp += ccol)); - __m128 x21 = _mm_mul_ps(vector, _mm_load_ps(pMatTemp += ccol)); - __m128 x31 = _mm_mul_ps(vector, _mm_load_ps(pMatTemp += ccol)); - - res0 = _mm_add_ps(res0, x01); - res1 = _mm_add_ps(res1, x11); - res2 = _mm_add_ps(res2, x21); - res3 = _mm_add_ps(res3, x31); - - pSrcCurrent += 4; - pMatCurrent += 4; - } - } - else - { - // Handle the "worst-case" scenario, which is when we have 4-8 elements and the input is not - // 128-bit aligned. This means we can't do any aligned loads and will just end up doing two - // unaligned loads where we mask the input each time. - remainder = length; - } - - if (remainder != 0) - { - // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next - // unaligned load will read to the end of the array and then mask out any elements already processed - - pMatCurrent -= (4 - remainder); - pSrcCurrent -= (4 - remainder); - - __m128 mask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + (remainder * 4)); - - const float* pMatTemp = pMatCurrent; - __m128 x01 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp)); - __m128 x11 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp += ccol)); - __m128 x21 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp += ccol)); - __m128 x31 = _mm_and_ps(mask, _mm_loadu_ps(pMatTemp += ccol)); - __m128 vector = _mm_and_ps(mask, _mm_loadu_ps(pSrcCurrent)); - - res0 = _mm_add_ps(res0, _mm_mul_ps(x01, vector)); - res1 = _mm_add_ps(res1, _mm_mul_ps(x11, vector)); - res2 = _mm_add_ps(res2, _mm_mul_ps(x21, vector)); - res3 = _mm_add_ps(res3, _mm_mul_ps(x31, vector)); - - pMatCurrent += 4; - pSrcCurrent += 4; - } + const float * pmTmp; + __m128 x01 = _mm_load_ps(pmTmp = pm); + __m128 x11 = _mm_load_ps(pmTmp += ccol); + __m128 x21 = _mm_load_ps(pmTmp += ccol); + __m128 x31 = _mm_load_ps(pmTmp += ccol); + __m128 x02 = _mm_load_ps(ps); + x01 = _mm_mul_ps(x01, x02); + x11 = _mm_mul_ps(x11, x02); + x21 = _mm_mul_ps(x21, x02); + x31 = _mm_mul_ps(x31, x02); + res0 = _mm_add_ps(res0, x01); + res1 = _mm_add_ps(res1, x11); + res2 = _mm_add_ps(res2, x21); + res3 = _mm_add_ps(res3, x31); } // Add up the entries of each, with the 4 results in res0 @@ -194,10 +92,7 @@ EXPORT_API(void) MatMul(_In_ const float * pmat, _In_ const float * psrc, _Inout res2 = _mm_hadd_ps(res2, res3); res0 = _mm_hadd_ps(res0, res2); - _mm_storeu_ps(pDstCurrent, res0); - - pDstCurrent += 4; - pMatCurrent += 3 * ccol; + _mm_store_ps(pd, res0); } } @@ -208,443 +103,90 @@ EXPORT_API(void) MatMulP(_In_ const float * pmat, _In_ const int * pposSrc, _In_ // REVIEW: For extremely sparse inputs, interchanging the loops would // likely be more efficient. const int * pposMin = pposSrc + iposMin; - const int * pposEnd = pposSrc + iposLim; - const float * pDstEnd = pdst + crow; + const int * pposLim = pposSrc + iposLim; + const float * pdLim = pdst + crow; const float * pm0 = pmat - posMin; - const float * pSrcCurrent = psrc - posMin; - float* pDstCurrent = pdst; - - uintptr_t address = (uintptr_t)(pDstCurrent); - uintptr_t misalignment = address % 16; - int length = crow; - int remainder = 0; - - if ((misalignment & 3) != 0) - { - // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - while (pDstCurrent < pDstEnd) - { - const float* pm1 = pm0 + ccol; - const float* pm2 = pm1 + ccol; - const float* pm3 = pm2 + ccol; - - __m128 res = _mm_setzero_ps(); - const int* ppos = pposMin; - - while (ppos < pposEnd) - { - int col = *ppos; - __m128 x1 = _mm_setr_ps(pm0[col], pm1[col], pm2[col], pm3[col]); - __m128 x2 = _mm_set1_ps(pSrcCurrent[col]); - x2 = _mm_mul_ps(x2, x1); - res = _mm_add_ps(res, x2); - ppos++; - } - - _mm_storeu_ps(pDstCurrent, res); - pDstCurrent += 4; - pm0 += 4 * ccol; - } - } - else - { - if (misalignment != 0) - { - // Handle cases where the data is not 128-bit aligned by doing an unaligned read and then - // masking any elements that will be included in the first aligned read - misalignment >>= 2; - misalignment = 4 - misalignment; - - __m128 mask = _mm_loadu_ps(((float*)(&LeadingAlignmentMask)) + (misalignment * 4)); - - const float* pm1 = pm0 + ccol; - const float* pm2 = pm1 + ccol; - const float* pm3 = pm2 + ccol; - - __m128 res = _mm_setzero_ps(); - const int* ppos = pposMin; - - while (ppos < pposEnd) - { - int col = *ppos; - __m128 x1 = _mm_setr_ps(pm0[col], pm1[col], pm2[col], pm3[col]); - x1 = _mm_and_ps(mask, x1); - - __m128 x2 = _mm_set1_ps(pSrcCurrent[col]); - x2 = _mm_mul_ps(x2, x1); - res = _mm_add_ps(res, x2); - ppos++; - } - - _mm_storeu_ps(pDstCurrent, res); - pDstCurrent += misalignment; - pm0 += misalignment * ccol; - length -= misalignment; - } - - if (length > 3) + const float * ps = psrc - posMin; + for (float * pd = pdst; pd < pdLim; pd += 4, pm0 += 4 * ccol) + { + const float * pm1 = pm0 + ccol; + const float * pm2 = pm1 + ccol; + const float * pm3 = pm2 + ccol; + __m128 res = _mm_setzero_ps(); + for (const int * ppos = pposMin; ppos < pposLim; ppos++) { - // Handle all the 128-bit blocks that we can now that we have offset to an aligned address - remainder = length % 4; - while (pDstCurrent < pDstEnd) - { - const float* pm1 = pm0 + ccol; - const float* pm2 = pm1 + ccol; - const float* pm3 = pm2 + ccol; - - const int* ppos = pposMin; - __m128 res = _mm_setzero_ps(); - - while (ppos < pposEnd) - { - int col = *ppos; - __m128 x1 = _mm_setr_ps(pm0[col], pm1[col], pm2[col], pm3[col]); - __m128 x2 = _mm_set1_ps(pSrcCurrent[col]); - x2 = _mm_mul_ps(x2, x1); - res = _mm_add_ps(res, x2); - ppos++; - } - - _mm_store_ps(pDstCurrent, res); - pDstCurrent += 4; - pm0 += 4 * ccol; - } - } - else - { - // Handle the "worst-case" scenario, which is when we have 4-8 elements and the input is not - // 128-bit aligned. This means we can't do any aligned loads and will just end up doing two - // unaligned loads where we mask the input each time. - remainder = length; + int col = *ppos; + __m128 x1 = _mm_setr_ps(pm0[col], pm1[col], pm2[col], pm3[col]); + __m128 x2 = _mm_set1_ps(ps[col]); + x2 = _mm_mul_ps(x2, x1); + res = _mm_add_ps(res, x2); } - if (remainder != 0) - { - // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next - // unaligned load will read to the end of the array and then mask out any elements already processed - - pDstCurrent -= (4 - remainder); - pm0 -= (4 - remainder) * ccol; - - __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + (remainder * 4)); - __m128 leadingMask = _mm_loadu_ps(((float*)(&LeadingAlignmentMask)) + ((4 - remainder) * 4)); - - const float* pm1 = pm0 + ccol; - const float* pm2 = pm1 + ccol; - const float* pm3 = pm2 + ccol; - - const int* ppos = pposMin; - __m128 res = _mm_setzero_ps(); - - while (ppos < pposEnd) - { - int col = *ppos; - __m128 x1 = _mm_setr_ps(pm0[col], pm1[col], pm2[col], pm3[col]); - x1 = _mm_and_ps(x1, trailingMask); - - __m128 x2 = _mm_set1_ps(pSrcCurrent[col]); - x2 = _mm_mul_ps(x2, x1); - res = _mm_add_ps(res, x2); - ppos++; - } - - res = _mm_add_ps(res, _mm_and_ps(leadingMask, _mm_loadu_ps(pDstCurrent))); - _mm_storeu_ps(pDstCurrent, res); - pDstCurrent += 4; - pm0 += 4 * ccol; - } + _mm_store_ps(pd, res); } } EXPORT_API(void) MatMulTran(_In_ const float * pmat, _In_ const float * psrc, _Inout_ float * pdst, int crow, int ccol) { - const float * pSrcEnd = psrc + ccol; - const float * pDstEnd = pdst + crow; - - const float* pMatCurrent = pmat; - const float* pSrcCurrent = psrc; - - if (pSrcCurrent < pSrcEnd) - { - __m128 x01 = _mm_loadu_ps(pSrcCurrent); - // Replicate each slot of x01 into its own register. - __m128 x11 = _mm_shuffle_ps(x01, x01, 0x55); - __m128 x21 = _mm_shuffle_ps(x01, x01, 0xAA); - __m128 x31 = _mm_shuffle_ps(x01, x01, 0xFF); - x01 = _mm_shuffle_ps(x01, x01, 0x00); - - int length = crow; - float* pDstCurrent = pdst; - - uintptr_t address = (uintptr_t)(pMatCurrent); - uintptr_t misalignment = address % 16; - int remainder = 0; - - if ((misalignment & 3) != 0) - { - // Handles cases where the data is not 32-bit aligned and we can't ever use aligned operations - while (pDstCurrent < pDstEnd) - { - const float* pMatTemp = pMatCurrent; - __m128 x02 = _mm_mul_ps(x01, _mm_loadu_ps(pMatTemp)); - __m128 x12 = _mm_mul_ps(x11, _mm_loadu_ps(pMatTemp += crow)); - __m128 x22 = _mm_mul_ps(x21, _mm_loadu_ps(pMatTemp += crow)); - __m128 x32 = _mm_mul_ps(x31, _mm_loadu_ps(pMatTemp += crow)); - - x02 = _mm_add_ps(x02, x12); - x22 = _mm_add_ps(x22, x32); - x02 = _mm_add_ps(x02, x22); - - _mm_storeu_ps(pDstCurrent, x02); - pDstCurrent += 4; - pMatCurrent += 4; - } - } - else - { - int remainder = 0; - if (misalignment != 0) - { - // Handle cases where the data is not 128-bit aligned by doing an unaligned read and then - // masking any elements that will be included in the first aligned read - misalignment >>= 2; - misalignment = 4 - misalignment; - - __m128 leadingMask = _mm_loadu_ps(((float*)(&LeadingAlignmentMask)) + (misalignment * 4)); - - // We only align pMat since it has significantly more reads. - const float* pMatTemp = pMatCurrent; - __m128 x02 = _mm_and_ps(leadingMask, _mm_loadu_ps(pMatTemp)); - __m128 x12 = _mm_and_ps(leadingMask, _mm_loadu_ps(pMatTemp += crow)); - __m128 x22 = _mm_and_ps(leadingMask, _mm_loadu_ps(pMatTemp += crow)); - __m128 x32 = _mm_and_ps(leadingMask, _mm_loadu_ps(pMatTemp += crow)); - - x02 = _mm_mul_ps(x01, x02); - x12 = _mm_mul_ps(x11, x12); - x22 = _mm_mul_ps(x21, x22); - x32 = _mm_mul_ps(x31, x32); - - x02 = _mm_add_ps(x02, x12); - x22 = _mm_add_ps(x22, x32); - x02 = _mm_add_ps(x02, x22); - - __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + ((4 - misalignment) * 4)); - __m128 x3 = _mm_loadu_ps(pDstCurrent); - x02 = _mm_or_ps(x02, _mm_and_ps(x3, trailingMask)); - - _mm_storeu_ps(pDstCurrent, x02); - pMatCurrent += misalignment; - pDstCurrent += misalignment; - length -= misalignment; - } - - if (length > 3) - { - // Handle all the 128-bit blocks that we can now that we have offset to an aligned address - remainder = length % 4; - - while (pDstCurrent + 4 <= pDstEnd) - { - const float* pMatTemp = pMatCurrent; - __m128 x02 = _mm_mul_ps(x01, _mm_load_ps(pMatTemp)); - __m128 x12 = _mm_mul_ps(x11, _mm_load_ps(pMatTemp += crow)); - __m128 x22 = _mm_mul_ps(x21, _mm_load_ps(pMatTemp += crow)); - __m128 x32 = _mm_mul_ps(x31, _mm_load_ps(pMatTemp += crow)); - - x02 = _mm_add_ps(x02, x12); - x22 = _mm_add_ps(x22, x32); - x02 = _mm_add_ps(x02, x22); - - _mm_storeu_ps(pDstCurrent, x02); - - pDstCurrent += 4; - pMatCurrent += 4; - } - } - else - { - // Handle the "worst-case" scenario, which is when we have 8-16 elements and the input is not - // 128-bit aligned. This means we can't do any aligned loads and will just end up doing two - // unaligned loads where we mask the input each time. - remainder = length; - } - - if (remainder != 0) - { - // Handle any trailing elements that don't fit into a 128-bit block by moving back so that the next - // unaligned load will read to the end of the array and then mask out any elements already processed - - pMatCurrent -= (4 - remainder); - pDstCurrent -= (4 - remainder); - - __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + (remainder * 4)); - - const float* pMatTemp = pMatCurrent; - __m128 x02 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp)); - __m128 x12 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); - __m128 x22 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); - __m128 x32 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); - - x02 = _mm_mul_ps(x01, x02); - x12 = _mm_mul_ps(x11, x12); - x22 = _mm_mul_ps(x21, x22); - x32 = _mm_mul_ps(x31, x32); - - x02 = _mm_add_ps(x02, x12); - x22 = _mm_add_ps(x22, x32); - x02 = _mm_add_ps(x02, x22); - - __m128 leadingMask = _mm_loadu_ps(((float*)(&LeadingAlignmentMask)) + ((4 - remainder) * 4)); - __m128 x3 = _mm_loadu_ps(pDstCurrent); - x02 = _mm_or_ps(x02, _mm_and_ps(x3, leadingMask)); - - _mm_storeu_ps(pDstCurrent, x02); - pMatCurrent += 4; - pDstCurrent += 4; - } - } - - pMatCurrent += 3 * crow; - pSrcCurrent += 4; - } - - while (pSrcCurrent < pSrcEnd) - { - __m128 x01 = _mm_loadu_ps(pSrcCurrent); + const float * psLim = psrc + ccol; + const float * pdLim = pdst + crow; + const float * pm = pmat; + const float * ps = psrc; + + __m128 x01 = _mm_load_ps(ps); + // Replicate each slot of x01 into its own register. + __m128 x11 = _mm_shuffle_ps(x01, x01, 0x55); + __m128 x21 = _mm_shuffle_ps(x01, x01, 0xAA); + __m128 x31 = _mm_shuffle_ps(x01, x01, 0xFF); + x01 = _mm_shuffle_ps(x01, x01, 0x00); + ps += 4; + for (float * pd = pdst; pd < pdLim; pd += 4, pm += 4) + { + const float * pmTmp; + __m128 x02 = _mm_load_ps(pmTmp = pm); + __m128 x12 = _mm_load_ps(pmTmp += crow); + __m128 x22 = _mm_load_ps(pmTmp += crow); + __m128 x32 = _mm_load_ps(pmTmp += crow); + x02 = _mm_mul_ps(x01, x02); + x12 = _mm_mul_ps(x11, x12); + x22 = _mm_mul_ps(x21, x22); + x32 = _mm_mul_ps(x31, x32); + x02 = _mm_add_ps(x02, x12); + x22 = _mm_add_ps(x22, x32); + x02 = _mm_add_ps(x02, x22); + _mm_store_ps(pd, x02); + } + + pm += 3 * crow; + + for (; ps < psLim; ps += 4) + { + __m128 x01 = _mm_load_ps(ps); // Replicate each slot of x01 into its own register. __m128 x11 = _mm_shuffle_ps(x01, x01, 0x55); __m128 x21 = _mm_shuffle_ps(x01, x01, 0xAA); __m128 x31 = _mm_shuffle_ps(x01, x01, 0xFF); x01 = _mm_shuffle_ps(x01, x01, 0x00); - - int length = crow; - float* pDstCurrent = pdst; - - uintptr_t address = (uintptr_t)(pMatCurrent); - uintptr_t misalignment = address % 16; - int remainder = 0; - - if ((misalignment & 3) != 0) + for (float * pd = pdst; pd < pdLim; pd += 4, pm += 4) { - while (pDstCurrent < pDstEnd) - { - const float* pMatTemp = pMatCurrent; - __m128 x02 = _mm_mul_ps(x01, _mm_loadu_ps(pMatTemp)); - __m128 x12 = _mm_mul_ps(x11, _mm_loadu_ps(pMatTemp += crow)); - __m128 x22 = _mm_mul_ps(x21, _mm_loadu_ps(pMatTemp += crow)); - __m128 x32 = _mm_mul_ps(x31, _mm_loadu_ps(pMatTemp += crow)); - - x02 = _mm_add_ps(x02, x12); - x22 = _mm_add_ps(x22, x32); - x02 = _mm_add_ps(x02, x22); - - x02 = _mm_add_ps(x02, _mm_loadu_ps(pDstCurrent)); - - _mm_storeu_ps(pDstCurrent, x02); - pDstCurrent += 4; - pMatCurrent += 4; - } - } - else - { - int remainder = 0; - if (misalignment != 0) - { - misalignment >>= 2; - misalignment = 4 - misalignment; - - __m128 leadingMask = _mm_loadu_ps(((float*)(&LeadingAlignmentMask)) + (misalignment * 4)); - - // We only align pMat since it has significantly more reads. - const float* pMatTemp = pMatCurrent; - __m128 x02 = _mm_and_ps(leadingMask, _mm_loadu_ps(pMatTemp)); - __m128 x12 = _mm_and_ps(leadingMask, _mm_loadu_ps(pMatTemp += crow)); - __m128 x22 = _mm_and_ps(leadingMask, _mm_loadu_ps(pMatTemp += crow)); - __m128 x32 = _mm_and_ps(leadingMask, _mm_loadu_ps(pMatTemp += crow)); - - x02 = _mm_mul_ps(x01, x02); - x12 = _mm_mul_ps(x11, x12); - x22 = _mm_mul_ps(x21, x22); - x32 = _mm_mul_ps(x31, x32); - - x02 = _mm_add_ps(x02, x12); - x22 = _mm_add_ps(x22, x32); - x02 = _mm_add_ps(x02, x22); - - __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + ((4 - misalignment) * 4)); - __m128 x3 = _mm_loadu_ps(pDstCurrent); - x02 = _mm_or_ps(x02, _mm_and_ps(x3, trailingMask)); - x02 = _mm_add_ps(x02, _mm_and_ps(x3, leadingMask)); - - _mm_storeu_ps(pDstCurrent, x02); - pMatCurrent += misalignment; - pDstCurrent += misalignment; - length -= misalignment; - } - - if (length > 3) - { - remainder = length % 4; - while (pDstCurrent + 4 <= pDstEnd) - { - const float* pMatTemp = pMatCurrent; - __m128 x02 = _mm_mul_ps(x01, _mm_load_ps(pMatTemp)); - __m128 x12 = _mm_mul_ps(x11, _mm_load_ps(pMatTemp += crow)); - __m128 x22 = _mm_mul_ps(x21, _mm_load_ps(pMatTemp += crow)); - __m128 x32 = _mm_mul_ps(x31, _mm_load_ps(pMatTemp += crow)); - - x02 = _mm_add_ps(x02, x12); - x22 = _mm_add_ps(x22, x32); - x02 = _mm_add_ps(x02, x22); - - x02 = _mm_add_ps(x02, _mm_loadu_ps(pDstCurrent)); - - _mm_storeu_ps(pDstCurrent, x02); - - pDstCurrent += 4; - pMatCurrent += 4; - } - } - else - { - remainder = length; - } - - if (remainder != 0) - { - pMatCurrent -= (4 - remainder); - pDstCurrent -= (4 - remainder); - - __m128 trailingMask = _mm_loadu_ps(((float*)(&TrailingAlignmentMask)) + (remainder * 4)); - - const float* pMatTemp = pMatCurrent; - __m128 x02 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp)); - __m128 x12 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); - __m128 x22 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); - __m128 x32 = _mm_and_ps(trailingMask, _mm_loadu_ps(pMatTemp += crow)); - - x02 = _mm_mul_ps(x01, x02); - x12 = _mm_mul_ps(x11, x12); - x22 = _mm_mul_ps(x21, x22); - x32 = _mm_mul_ps(x31, x32); - - x02 = _mm_add_ps(x02, x12); - x22 = _mm_add_ps(x22, x32); - x02 = _mm_add_ps(x02, x22); - - __m128 leadingMask = _mm_loadu_ps(((float*)(&LeadingAlignmentMask)) + ((4 - remainder) * 4)); - __m128 x3 = _mm_loadu_ps(pDstCurrent); - x02 = _mm_or_ps(x02, _mm_and_ps(x3, leadingMask)); - - x02 = _mm_add_ps(x02, _mm_and_ps(x3, trailingMask)); - _mm_storeu_ps(pDstCurrent, x02); - pMatCurrent += 4; - pDstCurrent += 4; - } + const float * pmTmp; + __m128 x02 = _mm_load_ps(pmTmp = pm); + __m128 x12 = _mm_load_ps(pmTmp += crow); + __m128 x22 = _mm_load_ps(pmTmp += crow); + __m128 x32 = _mm_load_ps(pmTmp += crow); + __m128 x3 = _mm_load_ps(pd); + x02 = _mm_mul_ps(x01, x02); + x12 = _mm_mul_ps(x11, x12); + x22 = _mm_mul_ps(x21, x22); + x32 = _mm_mul_ps(x31, x32); + x02 = _mm_add_ps(x02, x12); + x22 = _mm_add_ps(x22, x32); + x02 = _mm_add_ps(x02, x22); + x3 = _mm_add_ps(x02, x3); + _mm_store_ps(pd, x3); } - pMatCurrent += 3 * crow; - pSrcCurrent += 4; + pm += 3 * crow; } } diff --git a/test/Microsoft.ML.CpuMath.PerformanceTests/AvxPerformanceTests.cs b/test/Microsoft.ML.CpuMath.PerformanceTests/AvxPerformanceTests.cs index 43efecd89c..50a3b06fbe 100644 --- a/test/Microsoft.ML.CpuMath.PerformanceTests/AvxPerformanceTests.cs +++ b/test/Microsoft.ML.CpuMath.PerformanceTests/AvxPerformanceTests.cs @@ -11,6 +11,8 @@ namespace Microsoft.ML.CpuMath.PerformanceTests { public class AvxPerformanceTests : PerformanceTests { + protected override int align { get; set; } = 32; + [Benchmark] public void AddScalarU() => AvxIntrinsics.AddScalarU(DefaultScale, new Span(dst, 0, Length)); @@ -112,15 +114,15 @@ public void SdcaL1UpdateSU() [Benchmark] [BenchmarkCategory("Fma")] public void MatMul() - => AvxIntrinsics.MatMul(src, src1, dst, 1000, 1000); - + => AvxIntrinsics.MatMul(testMatrixAligned, testSrcVectorAligned, testDstVectorAligned, matrixLength, matrixLength); + [Benchmark] public void MatMulTran() - => AvxIntrinsics.MatMulTran(src, src1, dst, 1000, 1000); + => AvxIntrinsics.MatMulTran(testMatrixAligned, testSrcVectorAligned, testDstVectorAligned, matrixLength, matrixLength); [Benchmark] [BenchmarkCategory("Fma")] public void MatMulP() - => AvxIntrinsics.MatMulP(src, matrixIdx, src1, 0, 0, MatrixIndexLength, dst, 1000, 1000); + => AvxIntrinsics.MatMulP(testMatrixAligned, matrixIdx, testSrcVectorAligned, 0, 0, MatrixIndexLength, testDstVectorAligned, matrixLength, matrixLength); } } diff --git a/test/Microsoft.ML.CpuMath.PerformanceTests/CpuMathNativeUtils.cs b/test/Microsoft.ML.CpuMath.PerformanceTests/CpuMathNativeUtils.cs deleted file mode 100644 index 8624ba90e9..0000000000 --- a/test/Microsoft.ML.CpuMath.PerformanceTests/CpuMathNativeUtils.cs +++ /dev/null @@ -1,91 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. -// See the LICENSE file in the project root for more information. - -// The exported function names need to be unique (can't be disambiguated based on signature), hence -// we introduce suffix letters to indicate the general patterns used. -// * A suffix means aligned and padded for SSE operations. -// * U suffix means unaligned and unpadded. -// * S suffix means sparse (unaligned) vector. -// * P suffix means sparse (unaligned) partial vector - the vector is only part of a larger sparse vector. -// * R suffix means sparse matrix. -// * C suffix means convolution matrix. -// * D suffix means convolution matrix, with implicit source padding. -// * Tran means the matrix is transposed. - -using System.Runtime.InteropServices; -using System.Security; - -namespace Microsoft.ML.CpuMath.PerformanceTests -{ - internal static class CpuMathNativeUtils - { - internal const string NativePath = "CpuMathNative"; - - [DllImport(NativePath, EntryPoint = "AddScalarU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe float AddScalarU(float a, /*_Inout_*/ float* pd, int c); - - [DllImport(NativePath, EntryPoint = "Scale"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe void Scale(float a, /*_Inout_*/ float* pd, int c); - - [DllImport(NativePath, EntryPoint = "ScaleSrcU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe void ScaleSrcU(float a, /*_In_ const*/ float* ps, /*_Inout_*/ float* pd, int c); - - [DllImport(NativePath, EntryPoint = "ScaleAddU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe void ScaleAddU(float a, float b, /*_Inout_*/ float* pd, int c); - - [DllImport(NativePath, EntryPoint = "AddScaleU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe void AddScaleU(float a, /*_In_ const*/ float* ps, /*_Inout_*/ float* pd, int c); - - [DllImport(NativePath, EntryPoint = "AddScaleSU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe void AddScaleSU(float a, /*_In_ const*/ float* ps, /*_In_ const*/ int* pi, /*_Inout_*/ float* pd, int c); - - [DllImport(NativePath, EntryPoint = "AddScaleCopyU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe void AddScaleCopyU(float a, /*_In_ const*/ float* ps, /*_In_ const*/ float* pd, /*_Inout_*/ float* pr, int c); - - [DllImport(NativePath, EntryPoint = "AddU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe void AddU(/*_In_ const*/ float* ps, /*_Inout_*/ float* pd, int c); - - [DllImport(NativePath, EntryPoint = "AddSU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe void AddSU(/*_In_ const*/ float* ps, /*_In_ const*/ int* pi, /*_Inout_*/ float* pd, int c); - - [DllImport(NativePath, EntryPoint = "MulElementWiseU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe void MulElementWiseU(/*_In_ const*/ float* ps1, /*_In_ const*/ float* ps2, /*_Inout_*/ float* pd, int c); - - [DllImport(NativePath, EntryPoint = "Sum"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe float Sum(/*const*/ float* pValues, int length); - - [DllImport(NativePath, EntryPoint = "SumSqU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe float SumSqU(/*const*/ float* ps, int c); - - [DllImport(NativePath, EntryPoint = "SumSqDiffU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe float SumSqDiffU(float mean, /*const*/ float* ps, int c); - - [DllImport(NativePath, EntryPoint = "SumAbsU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe float SumAbsU(/*const*/ float* ps, int c); - - [DllImport(NativePath, EntryPoint = "SumAbsDiffU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe float SumAbsDiffU(float mean, /*const*/ float* ps, int c); - - [DllImport(NativePath, EntryPoint = "MaxAbsU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe float MaxAbsU(/*const*/ float* ps, int c); - - [DllImport(NativePath, EntryPoint = "MaxAbsDiffU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe float MaxAbsDiffU(float mean, /*const*/ float* ps, int c); - - [DllImport(NativePath, EntryPoint = "DotU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe float DotU(/*const*/ float* pa, /*const*/ float* pb, int c); - - [DllImport(NativePath, EntryPoint = "DotSU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe float DotSU(/*const*/ float* pa, /*const*/ float* pb, /*const*/ int* pi, int c); - - [DllImport(NativePath, EntryPoint = "Dist2"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe float Dist2(/*const*/ float* px, /*const*/ float* py, int c); - - [DllImport(NativePath, EntryPoint = "SdcaL1UpdateU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe void SdcaL1UpdateU(float primalUpdate, /*_In_ const*/ float* ps, float threshold, /*_Inout_*/ float* pd1, /*_Inout_*/ float* pd2, int c); - - [DllImport(NativePath, EntryPoint = "SdcaL1UpdateSU"), SuppressUnmanagedCodeSecurity] - internal static extern unsafe void SdcaL1UpdateSU(float primalUpdate, /*_In_ const*/ float* ps, /*_In_ const*/ int* pi, float threshold, /*_Inout_*/ float* pd1, /*_Inout_*/ float* pd2, int c); - } -} diff --git a/test/Microsoft.ML.CpuMath.PerformanceTests/NativePerformanceTests.cs b/test/Microsoft.ML.CpuMath.PerformanceTests/NativePerformanceTests.cs index fe1a3a8386..92c0cc86db 100644 --- a/test/Microsoft.ML.CpuMath.PerformanceTests/NativePerformanceTests.cs +++ b/test/Microsoft.ML.CpuMath.PerformanceTests/NativePerformanceTests.cs @@ -6,58 +6,69 @@ using BenchmarkDotNet.Attributes; using BenchmarkDotNet.Running; using Microsoft.ML.Runtime.Internal.CpuMath; +using Microsoft.ML.Runtime.Internal.CpuMath.Core; namespace Microsoft.ML.CpuMath.PerformanceTests { public class NativePerformanceTests : PerformanceTests { + private const int CbAlign = 16; + + private static unsafe float* Ptr(AlignedArray a, float* p) + { + Contracts.AssertValue(a); + float* q = p + a.GetBase((long)p); + Contracts.Assert(((long)q & (CbAlign - 1)) == 0); + return q; + } + [Benchmark] public unsafe void AddScalarU() { fixed (float* pdst = dst) { - CpuMathNativeUtils.AddScalarU(DefaultScale, pdst, Length); + Thunk.AddScalarU(DefaultScale, pdst, Length); } } - + [Benchmark] public unsafe void Scale() { fixed (float* pdst = dst) { - CpuMathNativeUtils.Scale(DefaultScale, pdst, Length); + Thunk.Scale(DefaultScale, pdst, Length); } } - + [Benchmark] public unsafe void ScaleSrcU() { fixed (float* psrc = src) fixed (float* pdst = dst) { - CpuMathNativeUtils.ScaleSrcU(DefaultScale, psrc, pdst, Length); + Thunk.ScaleSrcU(DefaultScale, psrc, pdst, Length); } } - + [Benchmark] public unsafe void ScaleAddU() { fixed (float* pdst = dst) { - CpuMathNativeUtils.ScaleAddU(DefaultScale, DefaultScale, pdst, Length); + Thunk.ScaleAddU(DefaultScale, DefaultScale, pdst, Length); } } - + [Benchmark] public unsafe void AddScaleU() { fixed (float* psrc = src) fixed (float* pdst = dst) { - CpuMathNativeUtils.AddScaleU(DefaultScale, psrc, pdst, Length); + Thunk.AddScaleU(DefaultScale, psrc, pdst, Length); } } - + [Benchmark] public unsafe void AddScaleSU() { @@ -65,10 +76,10 @@ public unsafe void AddScaleSU() fixed (float* pdst = dst) fixed (int* pidx = idx) { - CpuMathNativeUtils.AddScaleSU(DefaultScale, psrc, pidx, pdst, IndexLength); + Thunk.AddScaleSU(DefaultScale, psrc, pidx, pdst, IndexLength); } } - + [Benchmark] public unsafe void AddScaleCopyU() { @@ -76,20 +87,20 @@ public unsafe void AddScaleCopyU() fixed (float* pdst = dst) fixed (float* pres = result) { - CpuMathNativeUtils.AddScaleCopyU(DefaultScale, psrc, pdst, pres, Length); + Thunk.AddScaleCopyU(DefaultScale, psrc, pdst, pres, Length); } } - + [Benchmark] public unsafe void AddU() { fixed (float* psrc = src) fixed (float* pdst = dst) { - CpuMathNativeUtils.AddU(psrc, pdst, Length); + Thunk.AddU(psrc, pdst, Length); } } - + [Benchmark] public unsafe void AddSU() { @@ -97,10 +108,10 @@ public unsafe void AddSU() fixed (float* pdst = dst) fixed (int* pidx = idx) { - CpuMathNativeUtils.AddSU(psrc, pidx, pdst, IndexLength); + Thunk.AddSU(psrc, pidx, pdst, IndexLength); } } - + [Benchmark] public unsafe void MulElementWiseU() { @@ -108,83 +119,83 @@ public unsafe void MulElementWiseU() fixed (float* psrc2 = src2) fixed (float* pdst = dst) { - CpuMathNativeUtils.MulElementWiseU(psrc1, psrc2, pdst, Length); + Thunk.MulElementWiseU(psrc1, psrc2, pdst, Length); } } - + [Benchmark] public unsafe float Sum() { fixed (float* psrc = src) { - return CpuMathNativeUtils.Sum(psrc, Length); + return Thunk.Sum(psrc, Length); } } - + [Benchmark] public unsafe float SumSqU() { fixed (float* psrc = src) { - return CpuMathNativeUtils.SumSqU(psrc, Length); + return Thunk.SumSqU(psrc, Length); } } - + [Benchmark] public unsafe float SumSqDiffU() { fixed (float* psrc = src) { - return CpuMathNativeUtils.SumSqDiffU(DefaultScale, psrc, Length); + return Thunk.SumSqDiffU(DefaultScale, psrc, Length); } } - + [Benchmark] public unsafe float SumAbsU() { fixed (float* psrc = src) { - return CpuMathNativeUtils.SumAbsU(psrc, Length); + return Thunk.SumAbsU(psrc, Length); } } - + [Benchmark] public unsafe float SumAbsDiffU() { fixed (float* psrc = src) { - return CpuMathNativeUtils.SumAbsDiffU(DefaultScale, psrc, Length); + return Thunk.SumAbsDiffU(DefaultScale, psrc, Length); } } - + [Benchmark] public unsafe float MaxAbsU() { fixed (float* psrc = src) { - return CpuMathNativeUtils.MaxAbsU(psrc, Length); + return Thunk.MaxAbsU(psrc, Length); } } - + [Benchmark] public unsafe float MaxAbsDiffU() { fixed (float* psrc = src) { - return CpuMathNativeUtils.MaxAbsDiffU(DefaultScale, psrc, Length); + return Thunk.MaxAbsDiffU(DefaultScale, psrc, Length); } } - + [Benchmark] public unsafe float DotU() { fixed (float* psrc = src) fixed (float* pdst = dst) { - return CpuMathNativeUtils.DotU(psrc, pdst, Length); + return Thunk.DotU(psrc, pdst, Length); } } - + [Benchmark] public unsafe float DotSU() { @@ -192,7 +203,7 @@ public unsafe float DotSU() fixed (float* pdst = dst) fixed (int* pidx = idx) { - return CpuMathNativeUtils.DotSU(psrc, pdst, pidx, IndexLength); + return Thunk.DotSU(psrc, pdst, pidx, IndexLength); } } @@ -202,7 +213,7 @@ public unsafe float Dist2() fixed (float* psrc = src) fixed (float* pdst = dst) { - return CpuMathNativeUtils.Dist2(psrc, pdst, Length); + return Thunk.Dist2(psrc, pdst, Length); } } @@ -213,7 +224,7 @@ public unsafe void SdcaL1UpdateU() fixed (float* pdst = dst) fixed (float* pres = result) { - CpuMathNativeUtils.SdcaL1UpdateU(DefaultScale, psrc, DefaultScale, pdst, pres, Length); + Thunk.SdcaL1UpdateU(DefaultScale, psrc, DefaultScale, pdst, pres, Length); } } @@ -225,42 +236,36 @@ public unsafe void SdcaL1UpdateSU() fixed (float* pres = result) fixed (int* pidx = idx) { - CpuMathNativeUtils.SdcaL1UpdateSU(DefaultScale, psrc, pidx, DefaultScale, pdst, pres, IndexLength); + Thunk.SdcaL1UpdateSU(DefaultScale, psrc, pidx, DefaultScale, pdst, pres, IndexLength); } } [Benchmark] public unsafe void MatMul() { - fixed (float* psrc = &src[0]) - fixed (float* pdst = &dst[0]) - fixed (float* psrc1 = &src1[0]) - { - Thunk.MatMul(psrc1, psrc, pdst, 1000, 1000); - } + fixed (float* pmat = &testMatrixAligned.Items[0]) + fixed (float* psrc = &testSrcVectorAligned.Items[0]) + fixed (float* pdst = &testDstVectorAligned.Items[0]) + Thunk.MatMul(Ptr(testMatrixAligned, pmat), Ptr(testSrcVectorAligned, psrc), Ptr(testDstVectorAligned, pdst), matrixLength, testSrcVectorAligned.Size); } - + [Benchmark] public unsafe void MatMulTran() { - fixed (float* psrc = &src[0]) - fixed (float* pdst = &dst[0]) - fixed (float* psrc1 = &src1[0]) - { - Thunk.MatMulTran(psrc1, psrc, pdst, 1000, 1000); - } + fixed (float* pmat = &testMatrixAligned.Items[0]) + fixed (float* psrc = &testSrcVectorAligned.Items[0]) + fixed (float* pdst = &testDstVectorAligned.Items[0]) + Thunk.MatMulTran(Ptr(testMatrixAligned, pmat), Ptr(testSrcVectorAligned, psrc), Ptr(testDstVectorAligned, pdst), testDstVectorAligned.Size, matrixLength); } [Benchmark] public unsafe void MatMulP() { - fixed (float* psrc = &src[0]) - fixed (float* pdst = &dst[0]) - fixed (float* psrc1 = &src1[0]) - fixed (int* pidx = &matrixIdx[0]) - { - Thunk.MatMulP(psrc1, pidx, psrc, 0, 0, MatrixIndexLength, pdst, 1000, 1000); - } + fixed (float* pmat = &testMatrixAligned.Items[0]) + fixed (float* psrc = &testSrcVectorAligned.Items[0]) + fixed (float* pdst = &testDstVectorAligned.Items[0]) + fixed (int* ppossrc = &matrixIdx[0]) + Thunk.MatMulP(Ptr(testMatrixAligned, pmat), ppossrc, Ptr(testSrcVectorAligned, psrc), 0, 0, MatrixIndexLength, Ptr(testDstVectorAligned, pdst), matrixLength, testSrcVectorAligned.Size); } } } diff --git a/test/Microsoft.ML.CpuMath.PerformanceTests/PerformanceTests.cs b/test/Microsoft.ML.CpuMath.PerformanceTests/PerformanceTests.cs index 6726603b5a..d2dcf3cfff 100644 --- a/test/Microsoft.ML.CpuMath.PerformanceTests/PerformanceTests.cs +++ b/test/Microsoft.ML.CpuMath.PerformanceTests/PerformanceTests.cs @@ -17,10 +17,16 @@ public abstract class PerformanceTests protected const int IndexLength = 1000003; protected const int Length = 1000003; - protected const int MatrixIndexLength = 100; + protected const int MatrixIndexLength = 1000; private const int DefaultSeed = 253421; protected const float DefaultScale = 1.11f; + protected int matrixLength = 1000; + protected virtual int align { get; set; } = 16; + + internal AlignedArray testMatrixAligned; + internal AlignedArray testSrcVectorAligned; + internal AlignedArray testDstVectorAligned; protected float[] src, dst, original, src1, src2, result; protected int[] idx; @@ -93,6 +99,15 @@ public void Setup() { matrixIdx[i] = rand.Next(0, 1000); } + + testMatrixAligned = new AlignedArray(matrixLength * matrixLength, align); + testMatrixAligned.CopyFrom(src.AsSpan(0, (matrixLength - 1) * ( matrixLength - 1))); + + testSrcVectorAligned = new AlignedArray(matrixLength, align); + testSrcVectorAligned.CopyFrom(src1.AsSpan(0, matrixLength - 1)); // odd input + + testDstVectorAligned = new AlignedArray(matrixLength, align); + testDstVectorAligned.CopyFrom(dst.AsSpan(0, matrixLength)); } [GlobalCleanup] diff --git a/test/Microsoft.ML.CpuMath.PerformanceTests/SsePerformanceTests.cs b/test/Microsoft.ML.CpuMath.PerformanceTests/SsePerformanceTests.cs index 4499af9ee5..e079e8bd7e 100644 --- a/test/Microsoft.ML.CpuMath.PerformanceTests/SsePerformanceTests.cs +++ b/test/Microsoft.ML.CpuMath.PerformanceTests/SsePerformanceTests.cs @@ -101,14 +101,14 @@ public void SdcaL1UpdateSU() [Benchmark] public void MatMul() - => SseIntrinsics.MatMul(src, src1, dst, 1000, 1000); + => SseIntrinsics.MatMul(testMatrixAligned, testSrcVectorAligned, testDstVectorAligned, matrixLength, matrixLength); [Benchmark] public void MatMulTran() - => SseIntrinsics.MatMulTran(src, src1, dst, 1000, 1000); + => SseIntrinsics.MatMulTran(testMatrixAligned, testSrcVectorAligned, testDstVectorAligned, matrixLength, matrixLength); [Benchmark] public void MatMulP() - => SseIntrinsics.MatMulP(src, matrixIdx, src1, 0, 0, MatrixIndexLength, dst, 1000, 1000); + => SseIntrinsics.MatMulP(testMatrixAligned, matrixIdx, testSrcVectorAligned, 0, 0, MatrixIndexLength, testDstVectorAligned, matrixLength, matrixLength); } } diff --git a/test/Microsoft.ML.Tests/Transformers/RffTests.cs b/test/Microsoft.ML.Tests/Transformers/RffTests.cs index f231082757..d647eddbca 100644 --- a/test/Microsoft.ML.Tests/Transformers/RffTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/RffTests.cs @@ -37,7 +37,7 @@ private class TestClassInvalidSchema public int A; } - [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline + [Fact] public void RffWorkout() { Random rand = new Random(); diff --git a/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs b/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs index 39340d225b..f20de461f4 100644 --- a/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs +++ b/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs @@ -158,7 +158,7 @@ public void SavePipePercentileThreshold() Done(); } - [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // Test is Flaky on netcore 3.0 + [Fact] public void SavePipeMovingAverageUniform() { TestCore(null, true, diff --git a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs index a7892701d7..3d7cfd5d32 100644 --- a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs +++ b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs @@ -41,7 +41,7 @@ public TimeSeriesEstimatorTests(ITestOutputHelper output) : base(output) { } - [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline + [Fact] void TestSsaChangePointEstimator() { int Confidence = 95; @@ -75,7 +75,7 @@ void TestSsaChangePointEstimator() Done(); } - [ConditionalFact(typeof(BaseTestBaseline), nameof(BaseTestBaseline.LessThanNetCore30OrNotNetCore))] // netcore3.0 output differs from Baseline + [Fact] void TestSsaSpikeEstimator() { int Confidence = 95; From 74e3d1c3d02433a4b3e1049470cc3fd33bfb411d Mon Sep 17 00:00:00 2001 From: Senja Filipi Date: Tue, 11 Dec 2018 10:00:56 -0800 Subject: [PATCH 038/100] Add a test for 1259 (#1849) * Add a test for 1259 --- .../DataPipe/TestDataPipeBase.cs | 14 ---------- test/Microsoft.ML.TestFramework/Datasets.cs | 27 +++++++++++++++++-- .../Scenarios/Api/ApiScenariosTests.cs | 21 --------------- .../Api/Estimators/CrossValidation.cs | 2 +- .../Estimators/DecomposableTrainAndPredict.cs | 4 +-- .../Scenarios/Api/Estimators/Evaluation.cs | 2 +- .../Scenarios/Api/Estimators/Extensibility.cs | 4 +-- .../Api/Estimators/FileBasedSavingOfData.cs | 2 +- .../Api/Estimators/IntrospectiveTraining.cs | 2 +- .../Api/Estimators/Metacomponents.cs | 2 +- .../Api/Estimators/MultithreadedPrediction.cs | 2 +- .../Estimators/ReconfigurablePrediction.cs | 2 +- .../Api/Estimators/SimpleTrainAndPredict.cs | 2 +- .../Estimators/TrainSaveModelAndPredict.cs | 2 +- .../Estimators/TrainWithInitialPredictor.cs | 2 +- .../Api/Estimators/TrainWithValidationSet.cs | 2 +- .../Scenarios/Api/Estimators/Visibility.cs | 2 +- .../TrainerEstimators/MetalinearEstimators.cs | 23 +++++++++++++++- 18 files changed, 63 insertions(+), 54 deletions(-) diff --git a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs index 9ff3432ca6..97ec7bcb60 100644 --- a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs +++ b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs @@ -790,20 +790,6 @@ public class SentimentPrediction public float Score; } - private static TextLoader.Arguments MakeSentimentTextLoaderArgs() - { - return new TextLoader.Arguments() - { - Separator = "tab", - HasHeader = true, - Column = new[] - { - new TextLoader.Column("Label", DataKind.BL, 0), - new TextLoader.Column("SentimentText", DataKind.Text, 1) - } - }; - } - protected bool Failed() { Contracts.Assert(!IsPassing); diff --git a/test/Microsoft.ML.TestFramework/Datasets.cs b/test/Microsoft.ML.TestFramework/Datasets.cs index fa467dc378..5760df712d 100644 --- a/test/Microsoft.ML.TestFramework/Datasets.cs +++ b/test/Microsoft.ML.TestFramework/Datasets.cs @@ -2,6 +2,9 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Runtime.Data; +using System; + namespace Microsoft.ML.Runtime.RunTests { public class TestDataset @@ -19,6 +22,7 @@ public class TestDataset // REVIEW: Remove the three above setting strings once conversion work is complete. public string loaderSettings; public string[] mamlExtraSettings; + public Func GetLoaderColumns; public TestDataset Clone() { @@ -175,7 +179,15 @@ public static class TestDatasets { name = "sentiment", trainFilename = "wikipedia-detox-250-line-data.tsv", - testFilename = "wikipedia-detox-250-line-test.tsv" + testFilename = "wikipedia-detox-250-line-test.tsv", + GetLoaderColumns = () => + { + return new[] + { + new TextLoader.Column("Label", DataKind.BL, 0), + new TextLoader.Column("SentimentText", DataKind.Text, 1) + }; + } }; public static TestDataset generatedRegressionDataset = new TestDataset @@ -355,7 +367,18 @@ public static class TestDatasets { name = "iris", trainFilename = @"iris.data", - loaderSettings = "loader=Text{col=Label:TX:4 col=Features:0-3}" + loaderSettings = "loader=Text{col=Label:TX:4 col=Features:0-3}", + GetLoaderColumns = () => + { + return new[] + { + new TextLoader.Column("SepalLength", DataKind.R4, 0), + new TextLoader.Column("SepalWidth", DataKind.R4, 1), + new TextLoader.Column("PetalLength", DataKind.R4, 2), + new TextLoader.Column("PetalWidth",DataKind.R4, 3), + new TextLoader.Column("Label", DataKind.Text, 4) + }; + } }; public static TestDataset irisLabelName = new TestDataset() diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/ApiScenariosTests.cs b/test/Microsoft.ML.Tests/Scenarios/Api/ApiScenariosTests.cs index 6027be2805..b800fcca62 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/ApiScenariosTests.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/ApiScenariosTests.cs @@ -51,26 +51,5 @@ public class SentimentPrediction public float Score; } - - private static TextLoader.Column[] MakeIrisColumns() - { - return new[] - { - new TextLoader.Column("SepalLength", DataKind.R4, 0), - new TextLoader.Column("SepalWidth", DataKind.R4, 1), - new TextLoader.Column("PetalLength", DataKind.R4, 2), - new TextLoader.Column("PetalWidth",DataKind.R4, 3), - new TextLoader.Column("Label", DataKind.Text, 4) - }; - } - - private static TextLoader.Column[] MakeSentimentColumns() - { - return new[] - { - new TextLoader.Column("Label", DataKind.BL, 0), - new TextLoader.Column("SentimentText", DataKind.Text, 1) - }; - } } } diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/CrossValidation.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/CrossValidation.cs index 3c5cabb3dc..b660abcf00 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/CrossValidation.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/CrossValidation.cs @@ -27,7 +27,7 @@ void New_CrossValidation() { var ml = new MLContext(seed: 1, conc: 1); - var data = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true).Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); + var data = ml.Data.CreateTextReader(TestDatasets.Sentiment.GetLoaderColumns(), hasHeader: true).Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); // Pipeline. var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features") .Append(ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: (s) => { s.ConvergenceTolerance = 1f; s.NumThreads = 1; })); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs index 8694b5b199..0315256930 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs @@ -30,7 +30,7 @@ void New_DecomposableTrainAndPredict() var dataPath = GetDataPath(TestDatasets.irisData.trainFilename); var ml = new MLContext(); - var data = ml.Data.CreateTextReader(MakeIrisColumns(), separatorChar: ',') + var data = ml.Data.CreateTextReader(TestDatasets.irisData.GetLoaderColumns(), separatorChar: ',') .Read(dataPath); var pipeline = new ColumnConcatenatingEstimator (ml, "Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") @@ -41,7 +41,7 @@ void New_DecomposableTrainAndPredict() var model = pipeline.Fit(data).GetModelFor(TransformerScope.Scoring); var engine = model.MakePredictionFunction(ml); - var testLoader = ml.Data.ReadFromTextFile(dataPath, MakeIrisColumns(), separatorChar: ','); + var testLoader = ml.Data.ReadFromTextFile(dataPath, TestDatasets.irisData.GetLoaderColumns(), hasHeader: true, separatorChar: ','); var testData = testLoader.AsEnumerable(ml, false); foreach (var input in testData.Take(20)) { diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Evaluation.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Evaluation.cs index b09dbbe82d..eb76f87158 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Evaluation.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Evaluation.cs @@ -22,7 +22,7 @@ public void New_Evaluation() var ml = new MLContext(seed: 1, conc: 1); // Pipeline. - var pipeline = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true) + var pipeline = ml.Data.CreateTextReader(TestDatasets.Sentiment.GetLoaderColumns(), hasHeader: true) .Append(ml.Transforms.Text.FeaturizeText("SentimentText", "Features")) .Append(ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: s => s.NumThreads = 1)); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs index 392bf95b85..d396c23c80 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs @@ -29,7 +29,7 @@ void New_Extensibility() var dataPath = GetDataPath(TestDatasets.irisData.trainFilename); var ml = new MLContext(); - var data = ml.Data.CreateTextReader(MakeIrisColumns(), separatorChar: ',') + var data = ml.Data.CreateTextReader(TestDatasets.irisData.GetLoaderColumns(), separatorChar: ',') .Read(dataPath); Action action = (i, j) => @@ -49,7 +49,7 @@ void New_Extensibility() var model = pipeline.Fit(data).GetModelFor(TransformerScope.Scoring); var engine = model.MakePredictionFunction(ml); - var testLoader = ml.Data.ReadFromTextFile(dataPath, MakeIrisColumns(), separatorChar: ','); + var testLoader = ml.Data.ReadFromTextFile(dataPath, TestDatasets.irisData.GetLoaderColumns(), separatorChar: ','); var testData = testLoader.AsEnumerable(ml, false); foreach (var input in testData.Take(20)) { diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/FileBasedSavingOfData.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/FileBasedSavingOfData.cs index b85f5e646b..8408bd45c8 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/FileBasedSavingOfData.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/FileBasedSavingOfData.cs @@ -27,7 +27,7 @@ void New_FileBasedSavingOfData() var ml = new MLContext(seed: 1, conc: 1); var src = new MultiFileSource(GetDataPath(TestDatasets.Sentiment.trainFilename)); - var trainData = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true) + var trainData = ml.Data.CreateTextReader(TestDatasets.Sentiment.GetLoaderColumns(), hasHeader: true) .Append(ml.Transforms.Text.FeaturizeText("SentimentText", "Features")) .Fit(src).Read(src); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/IntrospectiveTraining.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/IntrospectiveTraining.cs index 6cf2898db4..49c30579e7 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/IntrospectiveTraining.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/IntrospectiveTraining.cs @@ -34,7 +34,7 @@ public partial class ApiScenariosTests public void New_IntrospectiveTraining() { var ml = new MLContext(seed: 1, conc: 1); - var data = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true) + var data = ml.Data.CreateTextReader(TestDatasets.Sentiment.GetLoaderColumns(), hasHeader: true) .Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features") diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Metacomponents.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Metacomponents.cs index 032ef53787..099a8f5484 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Metacomponents.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Metacomponents.cs @@ -26,7 +26,7 @@ public partial class ApiScenariosTests public void New_Metacomponents() { var ml = new MLContext(); - var data = ml.Data.CreateTextReader(MakeIrisColumns(), separatorChar: ',') + var data = ml.Data.CreateTextReader(TestDatasets.irisData.GetLoaderColumns(), separatorChar: ',') .Read(GetDataPath(TestDatasets.irisData.trainFilename)); var sdcaTrainer = ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: (s) => { s.MaxIterations = 100; s.Shuffle = true; s.NumThreads = 1; }); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs index b5ba493b2d..9e96cb4dcf 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs @@ -26,7 +26,7 @@ public partial class ApiScenariosTests void New_MultithreadedPrediction() { var ml = new MLContext(seed: 1, conc: 1); - var reader = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true); + var reader = ml.Data.CreateTextReader(TestDatasets.Sentiment.GetLoaderColumns(), hasHeader: true); var data = reader.Read(new MultiFileSource(GetDataPath(TestDatasets.Sentiment.trainFilename))); // Pipeline. diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/ReconfigurablePrediction.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/ReconfigurablePrediction.cs index 52a9266f3a..8d95868a4a 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/ReconfigurablePrediction.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/ReconfigurablePrediction.cs @@ -22,7 +22,7 @@ public partial class ApiScenariosTests public void New_ReconfigurablePrediction() { var ml = new MLContext(seed: 1, conc: 1); - var dataReader = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true); + var dataReader = ml.Data.CreateTextReader(TestDatasets.Sentiment.GetLoaderColumns(), hasHeader: true); var data = dataReader.Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); var testData = dataReader.Read(GetDataPath(TestDatasets.Sentiment.testFilename)); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs index 22ec24c29a..edff84341e 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs @@ -22,7 +22,7 @@ public partial class ApiScenariosTests public void New_SimpleTrainAndPredict() { var ml = new MLContext(seed: 1, conc: 1); - var reader = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true); + var reader = ml.Data.CreateTextReader(TestDatasets.Sentiment.GetLoaderColumns(), hasHeader: true); var data = reader.Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); // Pipeline. var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features") diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs index 59bd307dbd..97e21e5836 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs @@ -26,7 +26,7 @@ public partial class ApiScenariosTests public void New_TrainSaveModelAndPredict() { var ml = new MLContext(seed: 1, conc: 1); - var reader = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true); + var reader = ml.Data.CreateTextReader(TestDatasets.Sentiment.GetLoaderColumns(), hasHeader: true); var data = reader.Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); // Pipeline. diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs index 6c47365926..a471bb7858 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs @@ -22,7 +22,7 @@ public void New_TrainWithInitialPredictor() var ml = new MLContext(seed: 1, conc: 1); - var data = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true).Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); + var data = ml.Data.CreateTextReader(TestDatasets.Sentiment.GetLoaderColumns(), hasHeader: true).Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); // Pipeline. var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features"); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithValidationSet.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithValidationSet.cs index bda23779c4..14593d0b85 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithValidationSet.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithValidationSet.cs @@ -20,7 +20,7 @@ public void New_TrainWithValidationSet() { var ml = new MLContext(seed: 1, conc: 1); // Pipeline. - var reader = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true); + var reader = ml.Data.CreateTextReader(TestDatasets.Sentiment.GetLoaderColumns(), hasHeader: true); var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features"); // Train the pipeline, prepare train and validation set. diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Visibility.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Visibility.cs index ebab7c3a3b..958c26f630 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Visibility.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Visibility.cs @@ -26,7 +26,7 @@ public partial class ApiScenariosTests void New_Visibility() { var ml = new MLContext(seed: 1, conc: 1); - var pipeline = ml.Data.CreateTextReader(MakeSentimentColumns(), hasHeader: true) + var pipeline = ml.Data.CreateTextReader(TestDatasets.Sentiment.GetLoaderColumns(), hasHeader: true) .Append(ml.Transforms.Text.FeaturizeText("SentimentText", "Features", s => s.OutputTokens = true)); var src = new MultiFileSource(GetDataPath(TestDatasets.Sentiment.trainFilename)); diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/MetalinearEstimators.cs b/test/Microsoft.ML.Tests/TrainerEstimators/MetalinearEstimators.cs index ecdf43da67..0fb1e167ac 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/MetalinearEstimators.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/MetalinearEstimators.cs @@ -2,10 +2,13 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; +using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.Calibration; -using Microsoft.ML.Runtime.Learners; +using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Trainers; using Microsoft.ML.Trainers.Online; +using Microsoft.ML.Transforms; using Microsoft.ML.Transforms.Conversions; using Xunit; @@ -65,5 +68,23 @@ public void Pkpd() TestEstimatorCore(pipeline, data); Done(); } + + [Fact] + public void New_MetacomponentsFeaturesRenamed() + { + var data = new TextLoader(Env, TestDatasets.irisData.GetLoaderColumns(), separatorChar: ',') + .Read(GetDataPath(TestDatasets.irisData.trainFilename)); + + var sdcaTrainer = new SdcaBinaryTrainer(Env, "Label", "Vars", advancedSettings: (s) => { s.MaxIterations = 100; s.Shuffle = true; s.NumThreads = 1; }); + var pipeline = new ColumnConcatenatingEstimator(Env, "Vars", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") + .Append(new ValueToKeyMappingEstimator(Env, "Label"), TransformerScope.TrainTest) + .Append(new Ova(Env, sdcaTrainer)) + .Append(new KeyToValueMappingEstimator(Env, "PredictedLabel")); + + var model = pipeline.Fit(data); + + TestEstimatorCore(pipeline, data); + Done(); + } } } From e2e1aa8a2b43aa0b5ea5d8b3851b6a0d175f7916 Mon Sep 17 00:00:00 2001 From: Abhishek Goswami Date: Tue, 11 Dec 2018 11:48:59 -0800 Subject: [PATCH 039/100] Test for Metadata Support In DataView Construction (#1836) * verify test works on all platforms * code review commnets * review comments * review comments * addressed review comments: Assert.Throws + cleanup * addressed review comments: added comment --- .../DataViewConstructionUtils.cs | 11 ++- .../Scenarios/Api/TestApi.cs | 73 +++++++++++++++++++ 2 files changed, 83 insertions(+), 1 deletion(-) diff --git a/src/Microsoft.ML.Api/DataViewConstructionUtils.cs b/src/Microsoft.ML.Api/DataViewConstructionUtils.cs index 9c75f7189f..6acc64ecaf 100644 --- a/src/Microsoft.ML.Api/DataViewConstructionUtils.cs +++ b/src/Microsoft.ML.Api/DataViewConstructionUtils.cs @@ -951,7 +951,16 @@ public override ValueGetter GetGetter() throw Contracts.ExceptNotImpl("Type '{0}' is not yet supported.", typeT.FullName); } - internal override Delegate GetGetterDelegate() => Utils.MarshalInvoke(GetGetter, MetadataType.RawType); + // We want to use MarshalInvoke instead of adding custom Reflection logic for calling GetGetter + private Delegate GetGetterCore() + { + return GetGetter(); + } + + internal override Delegate GetGetterDelegate() + { + return Utils.MarshalInvoke(GetGetterCore, MetadataType.RawType); + } public class TElement { diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs b/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs index a907f8dd70..d219fefec3 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs @@ -187,6 +187,79 @@ public void TrainAveragedPerceptronWithCache() Assert.Equal(1, globalCounter); } + [Fact] + public void MetadataSupportInDataViewConstruction() + { + var data = ReadBreastCancerExamples(); + var autoSchema = SchemaDefinition.Create(typeof(BreastCancerExample)); + + var mlContext = new MLContext(0); + + // Create Metadata. + var kindFloat = "Testing float as metadata."; + var valueFloat = 10; + var coltypeFloat = NumberType.Float; + var kindString = "Testing string as metadata."; + var valueString = "Strings have value."; + var kindStringArray = "Testing string array as metadata."; + var valueStringArray = "I really have no idea what these features entail.".Split(' '); + var kindFloatArray = "Testing float array as metadata."; + var valueFloatArray = new float[] { 1, 17, 7, 19, 25, 0 }; + var kindVBuffer = "Testing VBuffer as metadata."; + var valueVBuffer = new VBuffer(4, new float[] { 4, 6, 89, 5 }); + + var metaFloat = new MetadataInfo(kindFloat, valueFloat, coltypeFloat); + var metaString = new MetadataInfo(kindString, valueString); + + // Add Metadata. + var labelColumn = autoSchema[0]; + var labelColumnWithMetadata = new SchemaDefinition.Column(mlContext, labelColumn.MemberName, labelColumn.ColumnType, + metadataInfos: new MetadataInfo[] { metaFloat, metaString }); + + var featureColumnWithMetadata = autoSchema[1]; + featureColumnWithMetadata.AddMetadata(kindStringArray, valueStringArray); + featureColumnWithMetadata.AddMetadata(kindFloatArray, valueFloatArray); + featureColumnWithMetadata.AddMetadata(kindVBuffer, valueVBuffer); + + var mySchema = new SchemaDefinition { labelColumnWithMetadata, featureColumnWithMetadata }; + var idv = mlContext.CreateDataView(data, mySchema); + + Assert.True(idv.Schema[0].Metadata.Schema.Count == 2); + Assert.True(idv.Schema[0].Metadata.Schema[0].Name == kindFloat); + Assert.True(idv.Schema[0].Metadata.Schema[0].Type == coltypeFloat); + Assert.True(idv.Schema[0].Metadata.Schema[1].Name == kindString); + Assert.True(idv.Schema[0].Metadata.Schema[1].Type == TextType.Instance); + + Assert.True(idv.Schema[1].Metadata.Schema.Count == 3); + Assert.True(idv.Schema[1].Metadata.Schema[0].Name == kindStringArray); + Assert.True(idv.Schema[1].Metadata.Schema[0].Type.IsVector && idv.Schema[1].Metadata.Schema[0].Type.ItemType.IsText); + Assert.Throws(() => idv.Schema[1].Metadata.Schema[kindFloat]); + + float retrievedFloat = 0; + idv.Schema[0].Metadata.GetValue(kindFloat, ref retrievedFloat); + Assert.True(Math.Abs(retrievedFloat - valueFloat) < .000001); + + ReadOnlyMemory retrievedReadOnlyMemory = new ReadOnlyMemory(); + idv.Schema[0].Metadata.GetValue(kindString, ref retrievedReadOnlyMemory); + Assert.True(retrievedReadOnlyMemory.Span.SequenceEqual(valueString.AsMemory().Span)); + + VBuffer> retrievedReadOnlyMemoryVBuffer = new VBuffer>(); + idv.Schema[1].Metadata.GetValue(kindStringArray, ref retrievedReadOnlyMemoryVBuffer); + Assert.True(retrievedReadOnlyMemoryVBuffer.DenseValues().Select((s, i) => s.ToString() == valueStringArray[i]).All(b => b)); + + VBuffer retrievedFloatVBuffer = new VBuffer(1, new float[] { 2 }); + idv.Schema[1].Metadata.GetValue(kindFloatArray, ref retrievedFloatVBuffer); + VBuffer valueFloatVBuffer = new VBuffer(valueFloatArray.Length, valueFloatArray); + Assert.True(retrievedFloatVBuffer.Items().SequenceEqual(valueFloatVBuffer.Items())); + + VBuffer retrievedVBuffer = new VBuffer(); + idv.Schema[1].Metadata.GetValue(kindVBuffer, ref retrievedVBuffer); + Assert.True(retrievedVBuffer.Items().SequenceEqual(valueVBuffer.Items())); + + var ex = Assert.Throws(() => idv.Schema[1].Metadata.GetValue(kindFloat, ref retrievedReadOnlyMemoryVBuffer)); + Assert.True(ex.IsMarked()); + } + private List ReadBreastCancerExamples() { var dataFile = GetDataPath("breast-cancer.txt"); From 25abf91e70d50d6dbbc260beea2b8a0232665443 Mon Sep 17 00:00:00 2001 From: Najeeb Kazmi Date: Tue, 11 Dec 2018 12:54:10 -0800 Subject: [PATCH 040/100] Public API for Tree predictors (#1837) * Internalize and explicitly implement ICanSAveInIniFormat, ICanSaveInSourceCode, ICanSaveSummary, ICanSaveSummaryInKeyValuePairs, and ICanGetSummaryAsIRow * Internalize and explicitly implement IFeatureContributionMapper, IQuantileValueMapper, IQuantileRegressionPredictor. Rename FastTreePredictionWrapper to TreeEnsembleModelParameters and all descendants to XyzModelParameters * Internalize and explicitly implement IValueMapperDist * Adding public constructors and sample * nit * Address comments --- .../Dynamic/FastTreeRegression.cs | 52 +++++++++++++++++++ .../Static/FastTreeRegression.cs | 2 +- .../Static/LightGBMRegression.cs | 2 +- .../Dirty/PredictorInterfaces.cs | 24 ++++++--- .../Prediction/Calibrator.cs | 24 ++++----- .../Scorers/SchemaBindablePredictorWrapper.cs | 2 +- src/Microsoft.ML.Ensemble/PipelineEnsemble.cs | 4 +- .../Trainer/EnsembleDistributionPredictor.cs | 32 ++++++------ .../Trainer/EnsemblePredictorBase.cs | 2 +- src/Microsoft.ML.FastTree/FastTree.cs | 27 +++++----- .../FastTreeClassification.cs | 26 +++++----- src/Microsoft.ML.FastTree/FastTreeRanking.cs | 32 ++++++------ .../FastTreeRegression.cs | 32 ++++++------ src/Microsoft.ML.FastTree/FastTreeTweedie.cs | 32 ++++++------ src/Microsoft.ML.FastTree/GamTrainer.cs | 2 +- .../Properties/AssemblyInfo.cs | 2 + .../RandomForestClassification.cs | 24 ++++----- .../RandomForestRegression.cs | 38 +++++++------- .../TreeEnsemble/TreeEnsembleCombiner.cs | 10 ++-- .../TreeEnsembleFeaturizer.cs | 16 +++--- .../TreeTrainersStatic.cs | 4 +- .../OlsLinearRegression.cs | 2 +- .../AssemblyRegistration.cs | 2 +- .../LightGbmBinaryTrainer.cs | 18 +++---- .../LightGbmMulticlassTrainer.cs | 4 +- .../LightGbmRankingTrainer.cs | 32 ++++++------ .../LightGbmRegressionTrainer.cs | 32 ++++++------ src/Microsoft.ML.LightGBM/LightGbmStatic.cs | 4 +- src/Microsoft.ML.PCA/PcaTrainer.cs | 2 +- .../Standard/LinearPredictor.cs | 37 ++++++++----- .../MulticlassLogisticRegression.cs | 12 ++--- .../Standard/MultiClass/Ova.cs | 2 +- .../Standard/Simple/SimpleTrainers.cs | 36 +++++++------ .../Algorithms/SmacSweeper.cs | 16 +++--- .../UnitTests/TestEntryPoints.cs | 6 +-- .../TestPredictors.cs | 2 +- .../Training.cs | 8 +-- .../EnvironmentExtensions.cs | 2 +- .../SentimentPredictionTests.cs | 5 +- 39 files changed, 345 insertions(+), 266 deletions(-) create mode 100644 docs/samples/Microsoft.ML.Samples/Dynamic/FastTreeRegression.cs diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/FastTreeRegression.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/FastTreeRegression.cs new file mode 100644 index 0000000000..e4e3861d2b --- /dev/null +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/FastTreeRegression.cs @@ -0,0 +1,52 @@ +using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime.Data; +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Microsoft.ML.Samples.Dynamic +{ + public class FastTreeRegressionExample + { + public static void FastTreeRegression() + { + // Create a new ML context, for ML.NET operations. It can be used for exception tracking and logging, + // as well as the source of randomness. + var ml = new MLContext(); + + // Get a small dataset as an IEnumerable and convert it to an IDataView. + var data = SamplesUtils.DatasetUtils.GetInfertData(); + var trainData = ml.CreateStreamingDataView(data); + + // Preview of the data. + // + // Age Case Education Induced Parity PooledStratum RowNum ... + // 26 1 0-5yrs 1 6 3 1 ... + // 42 1 0-5yrs 1 1 1 2 ... + // 39 1 0-5yrs 2 6 4 3 ... + // 34 1 0-5yrs 2 4 2 4 ... + // 35 1 6-11yrs 1 3 32 5 ... + + // A pipeline for concatenating the Parity and Induced columns together in the Features column. + // We will train a FastTreeRegression model with 1 tree on these two columns to predict Age. + string outputColumnName = "Features"; + var pipeline = ml.Transforms.Concatenate(outputColumnName, new[] { "Parity", "Induced" }) + .Append(ml.Regression.Trainers.FastTree(labelColumn: "Age", featureColumn: outputColumnName, numTrees: 1, numLeaves: 2, minDatapointsInLeaves: 1)); + + var model = pipeline.Fit(trainData); + + // Get the trained model parameters. + var modelParams = model.LastTransformer.Model; + + // Let's see where an example with Parity = 1 and Induced = 1 would end up in the single trained tree. + var testRow = new VBuffer(2, new[] { 1.0f, 1.0f }); + // Use the path object to pass to GetLeaf, which will populate path with the IDs of th nodes from root to leaf. + List path = default; + // Get the ID of the leaf this example ends up in tree 0. + var leafID = modelParams.GetLeaf(0, in testRow, ref path); + // Get the leaf value for this leaf ID in tree 0. + var leafValue = modelParams.GetLeafValue(0, leafID); + Console.WriteLine("The leaf value in tree 0 is: " + leafValue); + } + } +} diff --git a/docs/samples/Microsoft.ML.Samples/Static/FastTreeRegression.cs b/docs/samples/Microsoft.ML.Samples/Static/FastTreeRegression.cs index 66ddc6772c..ba271b25ce 100644 --- a/docs/samples/Microsoft.ML.Samples/Static/FastTreeRegression.cs +++ b/docs/samples/Microsoft.ML.Samples/Static/FastTreeRegression.cs @@ -30,7 +30,7 @@ public static void FastTreeRegression() var data = reader.Read(dataFile); // The predictor that gets produced out of training - FastTreeRegressionPredictor pred = null; + FastTreeRegressionModelParameters pred = null; // Create the estimator var learningPipeline = reader.MakeNewEstimator() diff --git a/docs/samples/Microsoft.ML.Samples/Static/LightGBMRegression.cs b/docs/samples/Microsoft.ML.Samples/Static/LightGBMRegression.cs index ca257d864f..9ab90eaf95 100644 --- a/docs/samples/Microsoft.ML.Samples/Static/LightGBMRegression.cs +++ b/docs/samples/Microsoft.ML.Samples/Static/LightGBMRegression.cs @@ -30,7 +30,7 @@ public static void LightGbmRegression() var (trainData, testData) = mlContext.Regression.TrainTestSplit(data, testFraction: 0.1); // The predictor that gets produced out of training - LightGbmRegressionPredictor pred = null; + LightGbmRegressionModelParameters pred = null; // Create the estimator var learningPipeline = reader.MakeNewEstimator() diff --git a/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs b/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs index a1baf261c2..f2d1ed6cfc 100644 --- a/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs +++ b/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs @@ -38,7 +38,8 @@ public interface IParameterMixer /// Predictor that can specialize for quantile regression. It will produce a , given /// an array of quantiles. /// - public interface IQuantileRegressionPredictor + [BestFriend] + internal interface IQuantileRegressionPredictor { ISchemaBindableMapper CreateMapper(Double[] quantiles); } @@ -59,7 +60,8 @@ public interface IDistribution } // REVIEW: How should this quantile stuff work? - public interface IQuantileValueMapper + [BestFriend] + internal interface IQuantileValueMapper { ValueMapper, VBuffer> GetMapper(Float[] quantiles); } @@ -101,7 +103,8 @@ internal interface ICanSaveInTextFormat /// /// Predictors that can output themselves in the Bing ini format. /// - public interface ICanSaveInIniFormat + [BestFriend] + internal interface ICanSaveInIniFormat { void SaveAsIni(TextWriter writer, RoleMappedSchema schema, ICalibrator calibrator = null); } @@ -109,7 +112,8 @@ public interface ICanSaveInIniFormat /// /// Predictors that can output Summary. /// - public interface ICanSaveSummary + [BestFriend] + internal interface ICanSaveSummary { void SaveSummary(TextWriter writer, RoleMappedSchema schema); } @@ -119,7 +123,8 @@ public interface ICanSaveSummary /// The content of value 'object' can be any type such as integer, float, string or an array of them. /// It is up the caller to check and decide how to consume the values. /// - public interface ICanGetSummaryInKeyValuePairs + [BestFriend] + internal interface ICanGetSummaryInKeyValuePairs { /// /// Gets model summary including model statistics (if exists) in key value pairs. @@ -127,7 +132,8 @@ public interface ICanGetSummaryInKeyValuePairs IList> GetSummaryInKeyValuePairs(RoleMappedSchema schema); } - public interface ICanGetSummaryAsIRow + [BestFriend] + internal interface ICanGetSummaryAsIRow { Row GetSummaryIRowOrNull(RoleMappedSchema schema); @@ -142,7 +148,8 @@ public interface ICanGetSummaryAsIDataView /// /// Predictors that can output themselves in C#/C++ code. /// - public interface ICanSaveInSourceCode + [BestFriend] + internal interface ICanSaveInSourceCode { void SaveAsCode(TextWriter writer, RoleMappedSchema schema); } @@ -178,7 +185,8 @@ public interface IPredictorWithFeatureWeights : IHaveFeatureWeights /// Interface for mapping input values to corresponding feature contributions. /// This interface is commonly implemented by predictors. /// - public interface IFeatureContributionMapper : IPredictor + [BestFriend] + internal interface IFeatureContributionMapper : IPredictor { /// /// Get a delegate for mapping Contributions to Features. diff --git a/src/Microsoft.ML.Data/Prediction/Calibrator.cs b/src/Microsoft.ML.Data/Prediction/Calibrator.cs index 17be128709..722e4fa94a 100644 --- a/src/Microsoft.ML.Data/Prediction/Calibrator.cs +++ b/src/Microsoft.ML.Data/Prediction/Calibrator.cs @@ -152,7 +152,7 @@ protected CalibratedPredictorBase(IHostEnvironment env, string name, IPredictorP Calibrator = calibrator; } - public void SaveAsIni(TextWriter writer, RoleMappedSchema schema, ICalibrator calibrator = null) + void ICanSaveInIniFormat.SaveAsIni(TextWriter writer, RoleMappedSchema schema, ICalibrator calibrator) { Host.Check(calibrator == null, "Too many calibrators."); var saver = SubPredictor as ICanSaveInIniFormat; @@ -167,7 +167,7 @@ void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) saver.SaveAsText(writer, schema); } - public void SaveAsCode(TextWriter writer, RoleMappedSchema schema) + void ICanSaveInSourceCode.SaveAsCode(TextWriter writer, RoleMappedSchema schema) { // REVIEW: What about the calibrator? var saver = SubPredictor as ICanSaveInSourceCode; @@ -175,7 +175,7 @@ public void SaveAsCode(TextWriter writer, RoleMappedSchema schema) saver.SaveAsCode(writer, schema); } - public void SaveSummary(TextWriter writer, RoleMappedSchema schema) + void ICanSaveSummary.SaveSummary(TextWriter writer, RoleMappedSchema schema) { // REVIEW: What about the calibrator? var saver = SubPredictor as ICanSaveSummary; @@ -184,7 +184,7 @@ public void SaveSummary(TextWriter writer, RoleMappedSchema schema) } /// - public IList> GetSummaryInKeyValuePairs(RoleMappedSchema schema) + IList> ICanGetSummaryInKeyValuePairs.GetSummaryInKeyValuePairs(RoleMappedSchema schema) { // REVIEW: What about the calibrator? var saver = SubPredictor as ICanGetSummaryInKeyValuePairs; @@ -221,9 +221,9 @@ public abstract class ValueMapperCalibratedPredictorBase : CalibratedPredictorBa private readonly IValueMapper _mapper; private readonly IFeatureContributionMapper _featureContribution; - public ColumnType InputType => _mapper.InputType; - public ColumnType OutputType => _mapper.OutputType; - public ColumnType DistType => NumberType.Float; + ColumnType IValueMapper.InputType => _mapper.InputType; + ColumnType IValueMapper.OutputType => _mapper.OutputType; + ColumnType IValueMapperDist.DistType => NumberType.Float; bool ICanSavePfa.CanSavePfa => (_mapper as ICanSavePfa)?.CanSavePfa == true; bool ICanSaveOnnx.CanSaveOnnx(OnnxContext ctx) => (_mapper as ICanSaveOnnx)?.CanSaveOnnx(ctx) == true; @@ -239,16 +239,16 @@ protected ValueMapperCalibratedPredictorBase(IHostEnvironment env, string name, _featureContribution = predictor as IFeatureContributionMapper; } - public ValueMapper GetMapper() + ValueMapper IValueMapper.GetMapper() { return _mapper.GetMapper(); } - public ValueMapper GetMapper() + ValueMapper IValueMapperDist.GetMapper() { Host.Check(typeof(TOut) == typeof(Float)); Host.Check(typeof(TDist) == typeof(Float)); - var map = GetMapper(); + var map = ((IValueMapper)this).GetMapper(); ValueMapper del = (in TIn src, ref Float score, ref Float prob) => { @@ -258,7 +258,7 @@ public ValueMapper GetMapper() return (ValueMapper)(Delegate)del; } - public ValueMapper> GetFeatureContributionMapper(int top, int bottom, bool normalize) + ValueMapper> IFeatureContributionMapper.GetFeatureContributionMapper(int top, int bottom, bool normalize) { // REVIEW: checking this a bit too late. Host.Check(_featureContribution != null, "Predictor does not implement IFeatureContributionMapper"); @@ -682,7 +682,7 @@ public ISchemaBoundMapper Bind(IHostEnvironment env, RoleMappedSchema schema) return new Bound(Host, this, schema); } - public ValueMapper> GetFeatureContributionMapper(int top, int bottom, bool normalize) + ValueMapper> IFeatureContributionMapper.GetFeatureContributionMapper(int top, int bottom, bool normalize) { // REVIEW: checking this a bit too late. Host.Check(_featureContribution != null, "Predictor does not implement " + nameof(IFeatureContributionMapper)); diff --git a/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs b/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs index e0244fb3a7..c4007790d2 100644 --- a/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs +++ b/src/Microsoft.ML.Data/Scorers/SchemaBindablePredictorWrapper.cs @@ -171,7 +171,7 @@ private ValueGetter GetValueGetter(Row input, int colSrc) }; } - public void SaveSummary(TextWriter writer, RoleMappedSchema schema) + void ICanSaveSummary.SaveSummary(TextWriter writer, RoleMappedSchema schema) { var summarySaver = Predictor as ICanSaveSummary; if (summarySaver == null) diff --git a/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs b/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs index 3fd5ef95e8..70c71486f3 100644 --- a/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs +++ b/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs @@ -560,7 +560,7 @@ public static SchemaBindablePipelineEnsembleBase Create(IHostEnvironment env, Mo public abstract ISchemaBoundMapper Bind(IHostEnvironment env, RoleMappedSchema schema); - public void SaveSummary(TextWriter writer, RoleMappedSchema schema) + void ICanSaveSummary.SaveSummary(TextWriter writer, RoleMappedSchema schema) { for (int i = 0; i < PredictorModels.Length; i++) { @@ -691,7 +691,7 @@ private static bool AreEqual(in VBuffer v1, in VBuffer v2) /// - If neither of those interfaces are implemented then the value is a string containing the name of the type of model. /// /// - public IList> GetSummaryInKeyValuePairs(RoleMappedSchema schema) + IList> ICanGetSummaryInKeyValuePairs.GetSummaryInKeyValuePairs(RoleMappedSchema schema) { Host.CheckValueOrNull(schema); diff --git a/src/Microsoft.ML.Ensemble/Trainer/EnsembleDistributionPredictor.cs b/src/Microsoft.ML.Ensemble/Trainer/EnsembleDistributionPredictor.cs index 6346abfc80..9cccc362b4 100644 --- a/src/Microsoft.ML.Ensemble/Trainer/EnsembleDistributionPredictor.cs +++ b/src/Microsoft.ML.Ensemble/Trainer/EnsembleDistributionPredictor.cs @@ -24,9 +24,9 @@ namespace Microsoft.ML.Runtime.Ensemble public sealed class EnsembleDistributionPredictor : EnsemblePredictorBase, TDistPredictor, IValueMapperDist { - public const string UserName = "Ensemble Distribution Executor"; - public const string LoaderSignature = "EnsemDbExec"; - public const string RegistrationName = "EnsembleDistributionPredictor"; + internal const string UserName = "Ensemble Distribution Executor"; + internal const string LoaderSignature = "EnsemDbExec"; + internal const string RegistrationName = "EnsembleDistributionPredictor"; private static VersionInfo GetVersionInfo() { @@ -45,9 +45,11 @@ private static VersionInfo GetVersionInfo() private readonly Median _probabilityCombiner; private readonly IValueMapperDist[] _mappers; - public ColumnType InputType { get; } - public ColumnType OutputType => NumberType.Float; - public ColumnType DistType => NumberType.Float; + private readonly ColumnType _inputType; + + ColumnType IValueMapper.InputType => _inputType; + ColumnType IValueMapper.OutputType => NumberType.Float; + ColumnType IValueMapperDist.DistType => NumberType.Float; public override PredictionKind PredictionKind { get; } @@ -57,7 +59,7 @@ internal EnsembleDistributionPredictor(IHostEnvironment env, PredictionKind kind { PredictionKind = kind; _probabilityCombiner = new Median(env); - InputType = InitializeMappers(out _mappers); + _inputType = InitializeMappers(out _mappers); ComputeAveragedWeights(out _averagedWeights); } @@ -66,7 +68,7 @@ private EnsembleDistributionPredictor(IHostEnvironment env, ModelLoadContext ctx { PredictionKind = (PredictionKind)ctx.Reader.ReadInt32(); _probabilityCombiner = new Median(env); - InputType = InitializeMappers(out _mappers); + _inputType = InitializeMappers(out _mappers); ComputeAveragedWeights(out _averagedWeights); } @@ -101,7 +103,7 @@ private bool IsValid(IValueMapperDist mapper) && mapper.DistType == NumberType.Float; } - public static EnsembleDistributionPredictor Create(IHostEnvironment env, ModelLoadContext ctx) + private static EnsembleDistributionPredictor Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); @@ -119,7 +121,7 @@ private protected override void SaveCore(ModelSaveContext ctx) ctx.Writer.Write((int)PredictionKind); } - public ValueMapper GetMapper() + ValueMapper IValueMapper.GetMapper() { Host.Check(typeof(TIn) == typeof(VBuffer)); Host.Check(typeof(TOut) == typeof(Single)); @@ -132,8 +134,8 @@ public ValueMapper GetMapper() ValueMapper, Single> del = (in VBuffer src, ref Single dst) => { - if (InputType.VectorSize > 0) - Host.Check(src.Length == InputType.VectorSize); + if (_inputType.VectorSize > 0) + Host.Check(src.Length == _inputType.VectorSize); var tmp = src; Parallel.For(0, maps.Length, i => @@ -155,7 +157,7 @@ public ValueMapper GetMapper() return (ValueMapper)(Delegate)del; } - public ValueMapper GetMapper() + ValueMapper IValueMapperDist.GetMapper() { Host.Check(typeof(TIn) == typeof(VBuffer)); Host.Check(typeof(TOut) == typeof(Single)); @@ -170,8 +172,8 @@ public ValueMapper GetMapper() ValueMapper, Single, Single> del = (in VBuffer src, ref Single score, ref Single prob) => { - if (InputType.VectorSize > 0) - Host.Check(src.Length == InputType.VectorSize); + if (_inputType.VectorSize > 0) + Host.Check(src.Length == _inputType.VectorSize); var tmp = src; Parallel.For(0, maps.Length, i => diff --git a/src/Microsoft.ML.Ensemble/Trainer/EnsemblePredictorBase.cs b/src/Microsoft.ML.Ensemble/Trainer/EnsemblePredictorBase.cs index 72a81b12e3..4b9ddb89dd 100644 --- a/src/Microsoft.ML.Ensemble/Trainer/EnsemblePredictorBase.cs +++ b/src/Microsoft.ML.Ensemble/Trainer/EnsemblePredictorBase.cs @@ -144,7 +144,7 @@ void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) /// /// Saves the model summary /// - public void SaveSummary(TextWriter writer, RoleMappedSchema schema) + void ICanSaveSummary.SaveSummary(TextWriter writer, RoleMappedSchema schema) { for (int i = 0; i < Models.Length; i++) { diff --git a/src/Microsoft.ML.FastTree/FastTree.cs b/src/Microsoft.ML.FastTree/FastTree.cs index 8676331bd2..c790783cc9 100644 --- a/src/Microsoft.ML.FastTree/FastTree.cs +++ b/src/Microsoft.ML.FastTree/FastTree.cs @@ -2794,7 +2794,7 @@ public Dataset GetCompatibleDataset(RoleMappedData data, PredictionKind kind, in } } - public abstract class FastTreePredictionWrapper : + public abstract class TreeEnsembleModelParameters : PredictorBase, IValueMapper, ICanSaveInTextFormat, @@ -2811,7 +2811,8 @@ public abstract class FastTreePredictionWrapper : ISingleCanSaveOnnx { //The below two properties are necessary for tree Visualizer - public TreeEnsemble TrainedEnsemble { get; } + [BestFriend] + internal TreeEnsemble TrainedEnsemble { get; } int ITreeEnsemble.NumTrees => TrainedEnsemble.NumTrees; // Inner args is used only for documentation purposes when saving comments to INI files. @@ -2839,7 +2840,7 @@ public abstract class FastTreePredictionWrapper : bool ICanSavePfa.CanSavePfa => true; bool ICanSaveOnnx.CanSaveOnnx(OnnxContext ctx) => true; - protected FastTreePredictionWrapper(IHostEnvironment env, string name, TreeEnsemble trainedEnsemble, int numFeatures, string innerArgs) + public TreeEnsembleModelParameters(IHostEnvironment env, string name, TreeEnsemble trainedEnsemble, int numFeatures, string innerArgs) : base(env, name) { Host.CheckValue(trainedEnsemble, nameof(trainedEnsemble)); @@ -2860,7 +2861,7 @@ protected FastTreePredictionWrapper(IHostEnvironment env, string name, TreeEnsem OutputType = NumberType.Float; } - protected FastTreePredictionWrapper(IHostEnvironment env, string name, ModelLoadContext ctx, VersionInfo ver) + protected TreeEnsembleModelParameters(IHostEnvironment env, string name, ModelLoadContext ctx, VersionInfo ver) : base(env, name, ctx) { // *** Binary format *** @@ -2933,7 +2934,7 @@ protected virtual void Map(in VBuffer src, ref Float dst) dst = (Float)TrainedEnsemble.GetOutput(in src); } - public ValueMapper> GetFeatureContributionMapper(int top, int bottom, bool normalize) + ValueMapper> IFeatureContributionMapper.GetFeatureContributionMapper(int top, int bottom, bool normalize) { Host.Check(typeof(TSrc) == typeof(VBuffer)); Host.Check(typeof(TDst) == typeof(VBuffer)); @@ -2963,7 +2964,7 @@ private void FeatureContributionMap(in VBuffer src, ref VBuffer ds /// /// write out a C# representation of the ensemble /// - public void SaveAsCode(TextWriter writer, RoleMappedSchema schema) + void ICanSaveInSourceCode.SaveAsCode(TextWriter writer, RoleMappedSchema schema) { Host.CheckValueOrNull(schema); SaveEnsembleAsCode(writer, schema); @@ -2976,13 +2977,13 @@ void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) { Host.CheckValue(writer, nameof(writer)); Host.CheckValueOrNull(schema); - SaveAsIni(writer, schema); + ((ICanSaveInIniFormat)this).SaveAsIni(writer, schema); } /// /// Output the INI model to a given writer /// - public void SaveAsIni(TextWriter writer, RoleMappedSchema schema, ICalibrator calibrator = null) + void ICanSaveInIniFormat.SaveAsIni(TextWriter writer, RoleMappedSchema schema, ICalibrator calibrator) { Host.CheckValue(writer, nameof(writer)); Host.CheckValue(schema, nameof(schema)); @@ -3156,12 +3157,12 @@ bool ISingleCanSaveOnnx.SaveAsOnnx(OnnxContext ctx, string[] outputNames, string return true; } - public void SaveSummary(TextWriter writer, RoleMappedSchema schema) + void ICanSaveSummary.SaveSummary(TextWriter writer, RoleMappedSchema schema) { writer.WriteLine(); writer.WriteLine("Per-feature gain summary for the boosted tree ensemble:"); - foreach (var pair in GetSummaryInKeyValuePairs(schema)) + foreach (var pair in ((ICanGetSummaryInKeyValuePairs)this).GetSummaryInKeyValuePairs(schema)) { Host.Assert(pair.Value is Double); writer.WriteLine("\t{0}\t{1}", pair.Key, (Double)pair.Value); @@ -3187,7 +3188,7 @@ private IEnumerable> GetSortedFeatureGains(RoleMapp } /// - public IList> GetSummaryInKeyValuePairs(RoleMappedSchema schema) + IList> ICanGetSummaryInKeyValuePairs.GetSummaryInKeyValuePairs(RoleMappedSchema schema) { List> results = new List>(); @@ -3309,7 +3310,7 @@ public int GetLeaf(int treeId, in VBuffer features, ref List path) return TrainedEnsemble.GetTreeAt(treeId).GetLeaf(in features, ref path); } - public Row GetSummaryIRowOrNull(RoleMappedSchema schema) + Row ICanGetSummaryAsIRow.GetSummaryIRowOrNull(RoleMappedSchema schema) { var names = default(VBuffer>); MetadataUtils.GetSlotNames(schema, RoleMappedSchema.ColumnRole.Feature, NumFeatures, ref names); @@ -3324,7 +3325,7 @@ public Row GetSummaryIRowOrNull(RoleMappedSchema schema) return MetadataUtils.MetadataAsRow(builder.GetMetadata()); } - public Row GetStatsIRowOrNull(RoleMappedSchema schema) + Row ICanGetSummaryAsIRow.GetStatsIRowOrNull(RoleMappedSchema schema) { return null; } diff --git a/src/Microsoft.ML.FastTree/FastTreeClassification.cs b/src/Microsoft.ML.FastTree/FastTreeClassification.cs index 3effa46c02..5fd1feaa94 100644 --- a/src/Microsoft.ML.FastTree/FastTreeClassification.cs +++ b/src/Microsoft.ML.FastTree/FastTreeClassification.cs @@ -36,17 +36,17 @@ "fastrank", "fastrankwrapper")] -[assembly: LoadableClass(typeof(IPredictorProducing), typeof(FastTreeBinaryPredictor), null, typeof(SignatureLoadModel), +[assembly: LoadableClass(typeof(IPredictorProducing), typeof(FastTreeBinaryModelParameters), null, typeof(SignatureLoadModel), "FastTree Binary Executor", - FastTreeBinaryPredictor.LoaderSignature)] + FastTreeBinaryModelParameters.LoaderSignature)] namespace Microsoft.ML.Trainers.FastTree { - public sealed class FastTreeBinaryPredictor : - FastTreePredictionWrapper + public sealed class FastTreeBinaryModelParameters : + TreeEnsembleModelParameters { - public const string LoaderSignature = "FastTreeBinaryExec"; - public const string RegistrationName = "FastTreeBinaryPredictor"; + internal const string LoaderSignature = "FastTreeBinaryExec"; + internal const string RegistrationName = "FastTreeBinaryPredictor"; private static VersionInfo GetVersionInfo() { @@ -60,7 +60,7 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00010005, verWeCanReadBack: 0x00010001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(FastTreeBinaryPredictor).Assembly.FullName); + loaderAssemblyName: typeof(FastTreeBinaryModelParameters).Assembly.FullName); } protected override uint VerNumFeaturesSerialized => 0x00010002; @@ -69,12 +69,12 @@ private static VersionInfo GetVersionInfo() protected override uint VerCategoricalSplitSerialized => 0x00010005; - internal FastTreeBinaryPredictor(IHostEnvironment env, TreeEnsemble trainedEnsemble, int featureCount, string innerArgs) + public FastTreeBinaryModelParameters(IHostEnvironment env, TreeEnsemble trainedEnsemble, int featureCount, string innerArgs) : base(env, RegistrationName, trainedEnsemble, featureCount, innerArgs) { } - private FastTreeBinaryPredictor(IHostEnvironment env, ModelLoadContext ctx) + private FastTreeBinaryModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, ctx, GetVersionInfo()) { } @@ -85,12 +85,12 @@ private protected override void SaveCore(ModelSaveContext ctx) ctx.SetVersionInfo(GetVersionInfo()); } - public static IPredictorProducing Create(IHostEnvironment env, ModelLoadContext ctx) + private static IPredictorProducing Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); - var predictor = new FastTreeBinaryPredictor(env, ctx); + var predictor = new FastTreeBinaryModelParameters(env, ctx); ICalibrator calibrator; ctx.LoadModelOrNull(env, out calibrator, @"Calibrator"); if (calibrator == null) @@ -108,7 +108,7 @@ public sealed partial class FastTreeBinaryClassificationTrainer : /// /// The LoadName for the assembly containing the trainer. /// - public const string LoadNameValue = "FastTreeBinaryClassification"; + internal const string LoadNameValue = "FastTreeBinaryClassification"; internal const string UserNameValue = "FastTree (Boosted Trees) Classification"; internal const string Summary = "Uses a logit-boost boosted tree learner to perform binary classification."; internal const string ShortName = "ftc"; @@ -177,7 +177,7 @@ private protected override IPredictorWithFeatureWeights TrainModelCore(Tr // output probabilities when transformed using a scaled logistic function, // so transform the scores using that. - var pred = new FastTreeBinaryPredictor(Host, TrainedEnsemble, FeatureCount, InnerArgs); + var pred = new FastTreeBinaryModelParameters(Host, TrainedEnsemble, FeatureCount, InnerArgs); // FastTree's binary classification boosting framework's natural probabilistic interpretation // is explained in "From RankNet to LambdaRank to LambdaMART: An Overview" by Chris Burges. // The correctness of this scaling depends upon the gradient calculation in diff --git a/src/Microsoft.ML.FastTree/FastTreeRanking.cs b/src/Microsoft.ML.FastTree/FastTreeRanking.cs index ae4b04a4fa..c17d86f9e8 100644 --- a/src/Microsoft.ML.FastTree/FastTreeRanking.cs +++ b/src/Microsoft.ML.FastTree/FastTreeRanking.cs @@ -34,9 +34,9 @@ "frrank", "btrank")] -[assembly: LoadableClass(typeof(FastTreeRankingPredictor), null, typeof(SignatureLoadModel), +[assembly: LoadableClass(typeof(FastTreeRankingModelParameters), null, typeof(SignatureLoadModel), "FastTree Ranking Executor", - FastTreeRankingPredictor.LoaderSignature)] + FastTreeRankingModelParameters.LoaderSignature)] [assembly: LoadableClass(typeof(void), typeof(FastTree), null, typeof(SignatureEntryPointModule), "FastTree")] @@ -44,7 +44,7 @@ namespace Microsoft.ML.Trainers.FastTree { /// public sealed partial class FastTreeRankingTrainer - : BoostingFastTreeTrainerBase, FastTreeRankingPredictor> + : BoostingFastTreeTrainerBase, FastTreeRankingModelParameters> { internal const string LoadNameValue = "FastTreeRanking"; internal const string UserNameValue = "FastTree (Boosted Trees) Ranking"; @@ -113,7 +113,7 @@ protected override float GetMaxLabel() return GetLabelGains().Length - 1; } - private protected override FastTreeRankingPredictor TrainModelCore(TrainContext context) + private protected override FastTreeRankingModelParameters TrainModelCore(TrainContext context) { Host.CheckValue(context, nameof(context)); var trainData = context.TrainingSet; @@ -127,7 +127,7 @@ private protected override FastTreeRankingPredictor TrainModelCore(TrainContext TrainCore(ch); FeatureCount = trainData.Schema.Feature.Type.ValueCount; } - return new FastTreeRankingPredictor(Host, TrainedEnsemble, FeatureCount, InnerArgs); + return new FastTreeRankingModelParameters(Host, TrainedEnsemble, FeatureCount, InnerArgs); } private Double[] GetLabelGains() @@ -455,10 +455,10 @@ protected override string GetTestGraphHeader() return headerBuilder.ToString(); } - protected override RankingPredictionTransformer MakeTransformer(FastTreeRankingPredictor model, Schema trainSchema) - => new RankingPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); + protected override RankingPredictionTransformer MakeTransformer(FastTreeRankingModelParameters model, Schema trainSchema) + => new RankingPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); - public RankingPredictionTransformer Train(IDataView trainData, IDataView validationData = null) + public RankingPredictionTransformer Train(IDataView trainData, IDataView validationData = null) => TrainTransformer(trainData, validationData); protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSchema) @@ -1105,10 +1105,10 @@ private static extern unsafe void GetDerivatives( } } - public sealed class FastTreeRankingPredictor : FastTreePredictionWrapper + public sealed class FastTreeRankingModelParameters : TreeEnsembleModelParameters { - public const string LoaderSignature = "FastTreeRankerExec"; - public const string RegistrationName = "FastTreeRankingPredictor"; + internal const string LoaderSignature = "FastTreeRankerExec"; + internal const string RegistrationName = "FastTreeRankingPredictor"; private static VersionInfo GetVersionInfo() { @@ -1122,7 +1122,7 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00010004, verWeCanReadBack: 0x00010001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(FastTreeRankingPredictor).Assembly.FullName); + loaderAssemblyName: typeof(FastTreeRankingModelParameters).Assembly.FullName); } protected override uint VerNumFeaturesSerialized => 0x00010002; @@ -1131,12 +1131,12 @@ private static VersionInfo GetVersionInfo() protected override uint VerCategoricalSplitSerialized => 0x00010005; - internal FastTreeRankingPredictor(IHostEnvironment env, TreeEnsemble trainedEnsemble, int featureCount, string innerArgs) + public FastTreeRankingModelParameters(IHostEnvironment env, TreeEnsemble trainedEnsemble, int featureCount, string innerArgs) : base(env, RegistrationName, trainedEnsemble, featureCount, innerArgs) { } - private FastTreeRankingPredictor(IHostEnvironment env, ModelLoadContext ctx) + private FastTreeRankingModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, ctx, GetVersionInfo()) { } @@ -1147,9 +1147,9 @@ private protected override void SaveCore(ModelSaveContext ctx) ctx.SetVersionInfo(GetVersionInfo()); } - public static FastTreeRankingPredictor Create(IHostEnvironment env, ModelLoadContext ctx) + private static FastTreeRankingModelParameters Create(IHostEnvironment env, ModelLoadContext ctx) { - return new FastTreeRankingPredictor(env, ctx); + return new FastTreeRankingModelParameters(env, ctx); } public override PredictionKind PredictionKind => PredictionKind.Ranking; diff --git a/src/Microsoft.ML.FastTree/FastTreeRegression.cs b/src/Microsoft.ML.FastTree/FastTreeRegression.cs index a187accb26..5a19f91e54 100644 --- a/src/Microsoft.ML.FastTree/FastTreeRegression.cs +++ b/src/Microsoft.ML.FastTree/FastTreeRegression.cs @@ -28,15 +28,15 @@ "frr", "btr")] -[assembly: LoadableClass(typeof(FastTreeRegressionPredictor), null, typeof(SignatureLoadModel), +[assembly: LoadableClass(typeof(FastTreeRegressionModelParameters), null, typeof(SignatureLoadModel), "FastTree Regression Executor", - FastTreeRegressionPredictor.LoaderSignature)] + FastTreeRegressionModelParameters.LoaderSignature)] namespace Microsoft.ML.Trainers.FastTree { /// public sealed partial class FastTreeRegressionTrainer - : BoostingFastTreeTrainerBase, FastTreeRegressionPredictor> + : BoostingFastTreeTrainerBase, FastTreeRegressionModelParameters> { public const string LoadNameValue = "FastTreeRegression"; internal const string UserNameValue = "FastTree (Boosted Trees) Regression"; @@ -85,7 +85,7 @@ internal FastTreeRegressionTrainer(IHostEnvironment env, Arguments args) { } - private protected override FastTreeRegressionPredictor TrainModelCore(TrainContext context) + private protected override FastTreeRegressionModelParameters TrainModelCore(TrainContext context) { Host.CheckValue(context, nameof(context)); var trainData = context.TrainingSet; @@ -101,7 +101,7 @@ private protected override FastTreeRegressionPredictor TrainModelCore(TrainConte ConvertData(trainData); TrainCore(ch); } - return new FastTreeRegressionPredictor(Host, TrainedEnsemble, FeatureCount, InnerArgs); + return new FastTreeRegressionModelParameters(Host, TrainedEnsemble, FeatureCount, InnerArgs); } protected override void CheckArgs(IChannel ch) @@ -164,10 +164,10 @@ protected override Test ConstructTestForTrainingData() return new RegressionTest(ConstructScoreTracker(TrainSet)); } - protected override RegressionPredictionTransformer MakeTransformer(FastTreeRegressionPredictor model, Schema trainSchema) - => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); + protected override RegressionPredictionTransformer MakeTransformer(FastTreeRegressionModelParameters model, Schema trainSchema) + => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); - public RegressionPredictionTransformer Train(IDataView trainData, IDataView validationData = null) + public RegressionPredictionTransformer Train(IDataView trainData, IDataView validationData = null) => TrainTransformer(trainData, validationData); protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSchema) @@ -441,10 +441,10 @@ protected override void GetGradientInOneQuery(int query, int threadIndex) } } - public sealed class FastTreeRegressionPredictor : FastTreePredictionWrapper + public sealed class FastTreeRegressionModelParameters : TreeEnsembleModelParameters { - public const string LoaderSignature = "FastTreeRegressionExec"; - public const string RegistrationName = "FastTreeRegressionPredictor"; + internal const string LoaderSignature = "FastTreeRegressionExec"; + internal const string RegistrationName = "FastTreeRegressionPredictor"; private static VersionInfo GetVersionInfo() { @@ -458,7 +458,7 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00010004, verWeCanReadBack: 0x00010001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(FastTreeRegressionPredictor).Assembly.FullName); + loaderAssemblyName: typeof(FastTreeRegressionModelParameters).Assembly.FullName); } protected override uint VerNumFeaturesSerialized => 0x00010002; @@ -467,12 +467,12 @@ private static VersionInfo GetVersionInfo() protected override uint VerCategoricalSplitSerialized => 0x00010005; - internal FastTreeRegressionPredictor(IHostEnvironment env, TreeEnsemble trainedEnsemble, int featureCount, string innerArgs) + public FastTreeRegressionModelParameters(IHostEnvironment env, TreeEnsemble trainedEnsemble, int featureCount, string innerArgs) : base(env, RegistrationName, trainedEnsemble, featureCount, innerArgs) { } - private FastTreeRegressionPredictor(IHostEnvironment env, ModelLoadContext ctx) + private FastTreeRegressionModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, ctx, GetVersionInfo()) { } @@ -483,12 +483,12 @@ private protected override void SaveCore(ModelSaveContext ctx) ctx.SetVersionInfo(GetVersionInfo()); } - public static FastTreeRegressionPredictor Create(IHostEnvironment env, ModelLoadContext ctx) + private static FastTreeRegressionModelParameters Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); - return new FastTreeRegressionPredictor(env, ctx); + return new FastTreeRegressionModelParameters(env, ctx); } public override PredictionKind PredictionKind => PredictionKind.Regression; diff --git a/src/Microsoft.ML.FastTree/FastTreeTweedie.cs b/src/Microsoft.ML.FastTree/FastTreeTweedie.cs index e49886884c..aa0b4e90e0 100644 --- a/src/Microsoft.ML.FastTree/FastTreeTweedie.cs +++ b/src/Microsoft.ML.FastTree/FastTreeTweedie.cs @@ -23,9 +23,9 @@ FastTreeTweedieTrainer.LoadNameValue, FastTreeTweedieTrainer.ShortName)] -[assembly: LoadableClass(typeof(FastTreeTweediePredictor), null, typeof(SignatureLoadModel), +[assembly: LoadableClass(typeof(FastTreeTweedieModelParameters), null, typeof(SignatureLoadModel), "FastTree Tweedie Regression Executor", - FastTreeTweediePredictor.LoaderSignature)] + FastTreeTweedieModelParameters.LoaderSignature)] namespace Microsoft.ML.Trainers.FastTree { @@ -34,7 +34,7 @@ namespace Microsoft.ML.Trainers.FastTree // https://arxiv.org/pdf/1508.06378.pdf /// public sealed partial class FastTreeTweedieTrainer - : BoostingFastTreeTrainerBase, FastTreeTweediePredictor> + : BoostingFastTreeTrainerBase, FastTreeTweedieModelParameters> { internal const string LoadNameValue = "FastTreeTweedieRegression"; internal const string UserNameValue = "FastTree (Boosted Trees) Tweedie Regression"; @@ -87,7 +87,7 @@ internal FastTreeTweedieTrainer(IHostEnvironment env, Arguments args) Initialize(); } - private protected override FastTreeTweediePredictor TrainModelCore(TrainContext context) + private protected override FastTreeTweedieModelParameters TrainModelCore(TrainContext context) { Host.CheckValue(context, nameof(context)); var trainData = context.TrainingSet; @@ -104,7 +104,7 @@ private protected override FastTreeTweediePredictor TrainModelCore(TrainContext ConvertData(trainData); TrainCore(ch); } - return new FastTreeTweediePredictor(Host, TrainedEnsemble, FeatureCount, InnerArgs); + return new FastTreeTweedieModelParameters(Host, TrainedEnsemble, FeatureCount, InnerArgs); } protected override void CheckArgs(IChannel ch) @@ -316,10 +316,10 @@ protected override void Train(IChannel ch) PrintTestGraph(ch); } - protected override RegressionPredictionTransformer MakeTransformer(FastTreeTweediePredictor model, Schema trainSchema) - => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); + protected override RegressionPredictionTransformer MakeTransformer(FastTreeTweedieModelParameters model, Schema trainSchema) + => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); - public RegressionPredictionTransformer Train(IDataView trainData, IDataView validationData = null) + public RegressionPredictionTransformer Train(IDataView trainData, IDataView validationData = null) => TrainTransformer(trainData, validationData); protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSchema) @@ -446,10 +446,10 @@ protected override void GetGradientInOneQuery(int query, int threadIndex) } } - public sealed class FastTreeTweediePredictor : FastTreePredictionWrapper + public sealed class FastTreeTweedieModelParameters : TreeEnsembleModelParameters { - public const string LoaderSignature = "FastTreeTweedieExec"; - public const string RegistrationName = "FastTreeTweediePredictor"; + internal const string LoaderSignature = "FastTreeTweedieExec"; + internal const string RegistrationName = "FastTreeTweediePredictor"; private static VersionInfo GetVersionInfo() { @@ -461,7 +461,7 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00010002, verWeCanReadBack: 0x00010001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(FastTreeTweediePredictor).Assembly.FullName); + loaderAssemblyName: typeof(FastTreeTweedieModelParameters).Assembly.FullName); } protected override uint VerNumFeaturesSerialized => 0x00010001; @@ -470,12 +470,12 @@ private static VersionInfo GetVersionInfo() protected override uint VerCategoricalSplitSerialized => 0x00010003; - internal FastTreeTweediePredictor(IHostEnvironment env, TreeEnsemble trainedEnsemble, int featureCount, string innerArgs) + public FastTreeTweedieModelParameters(IHostEnvironment env, TreeEnsemble trainedEnsemble, int featureCount, string innerArgs) : base(env, RegistrationName, trainedEnsemble, featureCount, innerArgs) { } - private FastTreeTweediePredictor(IHostEnvironment env, ModelLoadContext ctx) + private FastTreeTweedieModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, ctx, GetVersionInfo()) { } @@ -486,12 +486,12 @@ private protected override void SaveCore(ModelSaveContext ctx) ctx.SetVersionInfo(GetVersionInfo()); } - public static FastTreeTweediePredictor Create(IHostEnvironment env, ModelLoadContext ctx) + private static FastTreeTweedieModelParameters Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); - return new FastTreeTweediePredictor(env, ctx); + return new FastTreeTweedieModelParameters(env, ctx); } protected override void Map(in VBuffer src, ref float dst) diff --git a/src/Microsoft.ML.FastTree/GamTrainer.cs b/src/Microsoft.ML.FastTree/GamTrainer.cs index 7943af8835..15d599e237 100644 --- a/src/Microsoft.ML.FastTree/GamTrainer.cs +++ b/src/Microsoft.ML.FastTree/GamTrainer.cs @@ -1043,7 +1043,7 @@ void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) } } - public void SaveSummary(TextWriter writer, RoleMappedSchema schema) + void ICanSaveSummary.SaveSummary(TextWriter writer, RoleMappedSchema schema) { ((ICanSaveInTextFormat)this).SaveAsText(writer, schema); } diff --git a/src/Microsoft.ML.FastTree/Properties/AssemblyInfo.cs b/src/Microsoft.ML.FastTree/Properties/AssemblyInfo.cs index cd27563c10..cf6d8d8d42 100644 --- a/src/Microsoft.ML.FastTree/Properties/AssemblyInfo.cs +++ b/src/Microsoft.ML.FastTree/Properties/AssemblyInfo.cs @@ -6,6 +6,8 @@ using Microsoft.ML; [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Core.Tests" + PublicKey.TestValue)] + [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.LightGBM" + PublicKey.Value)] +[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Sweeper" + PublicKey.Value)] [assembly: WantsToBeBestFriends] diff --git a/src/Microsoft.ML.FastTree/RandomForestClassification.cs b/src/Microsoft.ML.FastTree/RandomForestClassification.cs index eb4a773cf9..0f2cbe5fdc 100644 --- a/src/Microsoft.ML.FastTree/RandomForestClassification.cs +++ b/src/Microsoft.ML.FastTree/RandomForestClassification.cs @@ -25,9 +25,9 @@ FastForestClassification.ShortName, "ffc")] -[assembly: LoadableClass(typeof(IPredictorProducing), typeof(FastForestClassificationPredictor), null, typeof(SignatureLoadModel), +[assembly: LoadableClass(typeof(IPredictorProducing), typeof(FastForestClassificationModelParameters), null, typeof(SignatureLoadModel), "FastForest Binary Executor", - FastForestClassificationPredictor.LoaderSignature)] + FastForestClassificationModelParameters.LoaderSignature)] [assembly: LoadableClass(typeof(void), typeof(FastForest), null, typeof(SignatureEntryPointModule), "FastForest")] @@ -46,11 +46,11 @@ public FastForestArgumentsBase() } } - public sealed class FastForestClassificationPredictor : - FastTreePredictionWrapper + public sealed class FastForestClassificationModelParameters : + TreeEnsembleModelParameters { - public const string LoaderSignature = "FastForestBinaryExec"; - public const string RegistrationName = "FastForestClassificationPredictor"; + internal const string LoaderSignature = "FastForestBinaryExec"; + internal const string RegistrationName = "FastForestClassificationPredictor"; private static VersionInfo GetVersionInfo() { @@ -65,7 +65,7 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00010005, verWeCanReadBack: 0x00010001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(FastForestClassificationPredictor).Assembly.FullName); + loaderAssemblyName: typeof(FastForestClassificationModelParameters).Assembly.FullName); } protected override uint VerNumFeaturesSerialized => 0x00010003; @@ -79,11 +79,11 @@ private static VersionInfo GetVersionInfo() /// public override PredictionKind PredictionKind => PredictionKind.BinaryClassification; - public FastForestClassificationPredictor(IHostEnvironment env, TreeEnsemble trainedEnsemble, int featureCount, string innerArgs) + public FastForestClassificationModelParameters(IHostEnvironment env, TreeEnsemble trainedEnsemble, int featureCount, string innerArgs) : base(env, RegistrationName, trainedEnsemble, featureCount, innerArgs) { } - private FastForestClassificationPredictor(IHostEnvironment env, ModelLoadContext ctx) + private FastForestClassificationModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, ctx, GetVersionInfo()) { } @@ -94,12 +94,12 @@ private protected override void SaveCore(ModelSaveContext ctx) ctx.SetVersionInfo(GetVersionInfo()); } - public static IPredictorProducing Create(IHostEnvironment env, ModelLoadContext ctx) + private static IPredictorProducing Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); - var predictor = new FastForestClassificationPredictor(env, ctx); + var predictor = new FastForestClassificationModelParameters(env, ctx); ICalibrator calibrator; ctx.LoadModelOrNull(env, out calibrator, @"Calibrator"); if (calibrator == null) @@ -192,7 +192,7 @@ private protected override IPredictorWithFeatureWeights TrainModelCore(Tr // calibrator, transform the scores using that. // REVIEW: Need a way to signal the outside world that we prefer simple sigmoid? - return new FastForestClassificationPredictor(Host, TrainedEnsemble, FeatureCount, InnerArgs); + return new FastForestClassificationModelParameters(Host, TrainedEnsemble, FeatureCount, InnerArgs); } protected override ObjectiveFunctionBase ConstructObjFunc(IChannel ch) diff --git a/src/Microsoft.ML.FastTree/RandomForestRegression.cs b/src/Microsoft.ML.FastTree/RandomForestRegression.cs index 1b70ab32f4..8b32cea962 100644 --- a/src/Microsoft.ML.FastTree/RandomForestRegression.cs +++ b/src/Microsoft.ML.FastTree/RandomForestRegression.cs @@ -22,21 +22,21 @@ FastForestRegression.LoadNameValue, FastForestRegression.ShortName)] -[assembly: LoadableClass(typeof(FastForestRegressionPredictor), null, typeof(SignatureLoadModel), +[assembly: LoadableClass(typeof(FastForestRegressionModelParameters), null, typeof(SignatureLoadModel), "FastForest Regression Executor", - FastForestRegressionPredictor.LoaderSignature)] + FastForestRegressionModelParameters.LoaderSignature)] namespace Microsoft.ML.Trainers.FastTree { - public sealed class FastForestRegressionPredictor : - FastTreePredictionWrapper, + public sealed class FastForestRegressionModelParameters : + TreeEnsembleModelParameters, IQuantileValueMapper, IQuantileRegressionPredictor { private readonly int _quantileSampleCount; - public const string LoaderSignature = "FastForestRegressionExec"; - public const string RegistrationName = "FastForestRegressionPredictor"; + internal const string LoaderSignature = "FastForestRegressionExec"; + internal const string RegistrationName = "FastForestRegressionPredictor"; private static VersionInfo GetVersionInfo() { @@ -51,7 +51,7 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00010005, verWeCanReadBack: 0x00010001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(FastForestRegressionPredictor).Assembly.FullName); + loaderAssemblyName: typeof(FastForestRegressionModelParameters).Assembly.FullName); } protected override uint VerNumFeaturesSerialized => 0x00010003; @@ -60,13 +60,13 @@ private static VersionInfo GetVersionInfo() protected override uint VerCategoricalSplitSerialized => 0x00010006; - public FastForestRegressionPredictor(IHostEnvironment env, TreeEnsemble trainedEnsemble, int featureCount, string innerArgs, int samplesCount) + public FastForestRegressionModelParameters(IHostEnvironment env, TreeEnsemble trainedEnsemble, int featureCount, string innerArgs, int samplesCount) : base(env, RegistrationName, trainedEnsemble, featureCount, innerArgs) { _quantileSampleCount = samplesCount; } - private FastForestRegressionPredictor(IHostEnvironment env, ModelLoadContext ctx) + private FastForestRegressionModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, ctx, GetVersionInfo()) { // *** Binary format *** @@ -91,12 +91,12 @@ private protected override void SaveCore(ModelSaveContext ctx) ctx.Writer.Write(_quantileSampleCount); } - public static FastForestRegressionPredictor Create(IHostEnvironment env, ModelLoadContext ctx) + private static FastForestRegressionModelParameters Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); - return new FastForestRegressionPredictor(env, ctx); + return new FastForestRegressionModelParameters(env, ctx); } public override PredictionKind PredictionKind => PredictionKind.Regression; @@ -111,7 +111,7 @@ protected override void Map(in VBuffer src, ref float dst) dst = (float)TrainedEnsemble.GetOutput(in src) / TrainedEnsemble.NumTrees; } - public ValueMapper, VBuffer> GetMapper(float[] quantiles) + ValueMapper, VBuffer> IQuantileValueMapper.GetMapper(float[] quantiles) { return (in VBuffer src, ref VBuffer dst) => @@ -128,7 +128,7 @@ public ValueMapper, VBuffer> GetMapper(float[] quantiles) }; } - public ISchemaBindableMapper CreateMapper(Double[] quantiles) + ISchemaBindableMapper IQuantileRegressionPredictor.CreateMapper(Double[] quantiles) { Host.CheckNonEmpty(quantiles, nameof(quantiles)); return new SchemaBindableQuantileRegressionPredictor(this, quantiles); @@ -137,7 +137,7 @@ public ISchemaBindableMapper CreateMapper(Double[] quantiles) /// public sealed partial class FastForestRegression - : RandomForestTrainerBase, FastForestRegressionPredictor> + : RandomForestTrainerBase, FastForestRegressionModelParameters> { public sealed class Arguments : FastForestArgumentsBase { @@ -188,7 +188,7 @@ public FastForestRegression(IHostEnvironment env, Arguments args) { } - private protected override FastForestRegressionPredictor TrainModelCore(TrainContext context) + private protected override FastForestRegressionModelParameters TrainModelCore(TrainContext context) { Host.CheckValue(context, nameof(context)); var trainData = context.TrainingSet; @@ -205,7 +205,7 @@ private protected override FastForestRegressionPredictor TrainModelCore(TrainCon ConvertData(trainData); TrainCore(ch); } - return new FastForestRegressionPredictor(Host, TrainedEnsemble, FeatureCount, InnerArgs, Args.QuantileSampleCount); + return new FastForestRegressionModelParameters(Host, TrainedEnsemble, FeatureCount, InnerArgs, Args.QuantileSampleCount); } protected override void PrepareLabels(IChannel ch) @@ -222,10 +222,10 @@ protected override Test ConstructTestForTrainingData() return new RegressionTest(ConstructScoreTracker(TrainSet)); } - protected override RegressionPredictionTransformer MakeTransformer(FastForestRegressionPredictor model, Schema trainSchema) - => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); + protected override RegressionPredictionTransformer MakeTransformer(FastForestRegressionModelParameters model, Schema trainSchema) + => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); - public RegressionPredictionTransformer Train(IDataView trainData, IDataView validationData = null) + public RegressionPredictionTransformer Train(IDataView trainData, IDataView validationData = null) => TrainTransformer(trainData, validationData); protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSchema) diff --git a/src/Microsoft.ML.FastTree/TreeEnsemble/TreeEnsembleCombiner.cs b/src/Microsoft.ML.FastTree/TreeEnsemble/TreeEnsembleCombiner.cs index 84b04ed793..e621b87b8a 100644 --- a/src/Microsoft.ML.FastTree/TreeEnsemble/TreeEnsembleCombiner.cs +++ b/src/Microsoft.ML.FastTree/TreeEnsemble/TreeEnsembleCombiner.cs @@ -57,7 +57,7 @@ public IPredictor CombineModels(IEnumerable models) predictor = calibrated.SubPredictor; paramA = -(calibrated.Calibrator as PlattCalibrator).ParamA; } - var tree = predictor as FastTreePredictionWrapper; + var tree = predictor as TreeEnsembleModelParameters; if (tree == null) throw _host.Except("Model is not a tree ensemble"); foreach (var t in tree.TrainedEnsemble.Trees) @@ -99,14 +99,14 @@ public IPredictor CombineModels(IEnumerable models) { case PredictionKind.BinaryClassification: if (!binaryClassifier) - return new FastTreeBinaryPredictor(_host, ensemble, featureCount, null); + return new FastTreeBinaryModelParameters(_host, ensemble, featureCount, null); var cali = new PlattCalibrator(_host, -1, 0); - return new FeatureWeightsCalibratedPredictor(_host, new FastTreeBinaryPredictor(_host, ensemble, featureCount, null), cali); + return new FeatureWeightsCalibratedPredictor(_host, new FastTreeBinaryModelParameters(_host, ensemble, featureCount, null), cali); case PredictionKind.Regression: - return new FastTreeRegressionPredictor(_host, ensemble, featureCount, null); + return new FastTreeRegressionModelParameters(_host, ensemble, featureCount, null); case PredictionKind.Ranking: - return new FastTreeRankingPredictor(_host, ensemble, featureCount, null); + return new FastTreeRankingModelParameters(_host, ensemble, featureCount, null); default: _host.Assert(false); throw _host.ExceptNotSupp(); diff --git a/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs b/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs index a6b5e4ebb7..7eef450580 100644 --- a/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs +++ b/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs @@ -259,7 +259,7 @@ private sealed class State { private readonly IExceptionContext _ectx; private readonly Row _input; - private readonly FastTreePredictionWrapper _ensemble; + private readonly TreeEnsembleModelParameters _ensemble; private readonly int _numTrees; private readonly int _numLeaves; @@ -275,7 +275,7 @@ private sealed class State private long _cachedLeafBuilderPosition; private long _cachedPathBuilderPosition; - public State(IExceptionContext ectx, Row input, FastTreePredictionWrapper ensemble, int numLeaves, int featureIndex) + public State(IExceptionContext ectx, Row input, TreeEnsembleModelParameters ensemble, int numLeaves, int featureIndex) { Contracts.AssertValue(ectx); _ectx = ectx; @@ -421,7 +421,7 @@ private static VersionInfo GetVersionInfo() } private readonly IHost _host; - private readonly FastTreePredictionWrapper _ensemble; + private readonly TreeEnsembleModelParameters _ensemble; private readonly int _totalLeafCount; public TreeEnsembleFeaturizerBindableMapper(IHostEnvironment env, Arguments args, IPredictor predictor) @@ -433,7 +433,7 @@ public TreeEnsembleFeaturizerBindableMapper(IHostEnvironment env, Arguments args if (predictor is CalibratedPredictorBase) predictor = ((CalibratedPredictorBase)predictor).SubPredictor; - _ensemble = predictor as FastTreePredictionWrapper; + _ensemble = predictor as TreeEnsembleModelParameters; _host.Check(_ensemble != null, "Predictor in model file does not have compatible type"); _totalLeafCount = CountLeaves(_ensemble); @@ -448,7 +448,7 @@ public TreeEnsembleFeaturizerBindableMapper(IHostEnvironment env, ModelLoadConte // *** Binary format *** // ensemble - ctx.LoadModel(env, out _ensemble, "Ensemble"); + ctx.LoadModel(env, out _ensemble, "Ensemble"); _totalLeafCount = CountLeaves(_ensemble); } @@ -465,7 +465,7 @@ public void Save(ModelSaveContext ctx) ctx.SaveModel(_ensemble, "Ensemble"); } - private static int CountLeaves(FastTreePredictionWrapper ensemble) + private static int CountLeaves(TreeEnsembleModelParameters ensemble) { Contracts.AssertValue(ensemble); @@ -643,7 +643,7 @@ public static IDataTransform Create(IHostEnvironment env, Arguments args, IDataV // Make sure that the given predictor has the correct number of input features. if (predictor is CalibratedPredictorBase) predictor = ((CalibratedPredictorBase)predictor).SubPredictor; - // Predictor should be a FastTreePredictionWrapper, which implements IValueMapper, so this should + // Predictor should be a TreeEnsembleModelParameters, which implements IValueMapper, so this should // be non-null. var vm = predictor as IValueMapper; ch.CheckUserArg(vm != null, nameof(args.TrainedModelFile), "Predictor in model file does not have compatible type"); @@ -707,7 +707,7 @@ public static IDataTransform CreateForEntryPoint(IHostEnvironment env, Arguments // Make sure that the given predictor has the correct number of input features. if (predictor is CalibratedPredictorBase) predictor = ((CalibratedPredictorBase)predictor).SubPredictor; - // Predictor should be a FastTreePredictionWrapper, which implements IValueMapper, so this should + // Predictor should be a TreeEnsembleModelParameters, which implements IValueMapper, so this should // be non-null. var vm = predictor as IValueMapper; ch.CheckUserArg(vm != null, nameof(args.PredictorModel), "Predictor does not have compatible type"); diff --git a/src/Microsoft.ML.FastTree/TreeTrainersStatic.cs b/src/Microsoft.ML.FastTree/TreeTrainersStatic.cs index 176f0cead8..5893a06eda 100644 --- a/src/Microsoft.ML.FastTree/TreeTrainersStatic.cs +++ b/src/Microsoft.ML.FastTree/TreeTrainersStatic.cs @@ -48,7 +48,7 @@ public static Scalar FastTree(this RegressionContext.RegressionTrainers c int minDatapointsInLeaves = Defaults.MinDocumentsInLeaves, double learningRate = Defaults.LearningRates, Action advancedSettings = null, - Action onFit = null) + Action onFit = null) { CheckUserValues(label, features, weights, numLeaves, numTrees, minDatapointsInLeaves, learningRate, advancedSettings, onFit); @@ -144,7 +144,7 @@ public static Scalar FastTree(this RankingContext.RankingTrainers c int minDatapointsInLeaves = Defaults.MinDocumentsInLeaves, double learningRate = Defaults.LearningRates, Action advancedSettings = null, - Action onFit = null) + Action onFit = null) { CheckUserValues(label, features, weights, numLeaves, numTrees, minDatapointsInLeaves, learningRate, advancedSettings, onFit); diff --git a/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs b/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs index c2a2e0c821..37c7a59643 100644 --- a/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs +++ b/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs @@ -739,7 +739,7 @@ public static OlsLinearRegressionPredictor Create(IHostEnvironment env, ModelLoa return new OlsLinearRegressionPredictor(env, ctx); } - public override void SaveSummary(TextWriter writer, RoleMappedSchema schema) + private protected override void SaveSummary(TextWriter writer, RoleMappedSchema schema) { var names = default(VBuffer>); MetadataUtils.GetSlotNames(schema, RoleMappedSchema.ColumnRole.Feature, Weight.Length, ref names); diff --git a/src/Microsoft.ML.Legacy/AssemblyRegistration.cs b/src/Microsoft.ML.Legacy/AssemblyRegistration.cs index 3766199c56..5f633a0f67 100644 --- a/src/Microsoft.ML.Legacy/AssemblyRegistration.cs +++ b/src/Microsoft.ML.Legacy/AssemblyRegistration.cs @@ -43,7 +43,7 @@ private static bool LoadStandardAssemblies() _ = typeof(TextLoader).Assembly; // ML.Data //_ = typeof(EnsemblePredictor).Assembly); // ML.Ensemble BUG https://github.com/dotnet/machinelearning/issues/1078 Ensemble isn't in a NuGet package - _ = typeof(FastTreeBinaryPredictor).Assembly; // ML.FastTree + _ = typeof(FastTreeBinaryModelParameters).Assembly; // ML.FastTree _ = typeof(KMeansModelParameters).Assembly; // ML.KMeansClustering _ = typeof(Maml).Assembly; // ML.Maml _ = typeof(PcaPredictor).Assembly; // ML.PCA diff --git a/src/Microsoft.ML.LightGBM/LightGbmBinaryTrainer.cs b/src/Microsoft.ML.LightGBM/LightGbmBinaryTrainer.cs index 4217ee0612..45d55f4bce 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmBinaryTrainer.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmBinaryTrainer.cs @@ -20,16 +20,16 @@ new[] { typeof(SignatureBinaryClassifierTrainer), typeof(SignatureTrainer), typeof(SignatureTreeEnsembleTrainer) }, LightGbmBinaryTrainer.UserName, LightGbmBinaryTrainer.LoadNameValue, LightGbmBinaryTrainer.ShortName, DocName = "trainer/LightGBM.md")] -[assembly: LoadableClass(typeof(IPredictorProducing), typeof(LightGbmBinaryPredictor), null, typeof(SignatureLoadModel), +[assembly: LoadableClass(typeof(IPredictorProducing), typeof(LightGbmBinaryModelParameters), null, typeof(SignatureLoadModel), "LightGBM Binary Executor", - LightGbmBinaryPredictor.LoaderSignature)] + LightGbmBinaryModelParameters.LoaderSignature)] [assembly: LoadableClass(typeof(void), typeof(LightGbm), null, typeof(SignatureEntryPointModule), "LightGBM")] namespace Microsoft.ML.Runtime.LightGBM { /// - public sealed class LightGbmBinaryPredictor : FastTreePredictionWrapper + public sealed class LightGbmBinaryModelParameters : TreeEnsembleModelParameters { internal const string LoaderSignature = "LightGBMBinaryExec"; internal const string RegistrationName = "LightGBMBinaryPredictor"; @@ -47,7 +47,7 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00010004, verWeCanReadBack: 0x00010001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(LightGbmBinaryPredictor).Assembly.FullName); + loaderAssemblyName: typeof(LightGbmBinaryModelParameters).Assembly.FullName); } protected override uint VerNumFeaturesSerialized => 0x00010002; @@ -55,12 +55,12 @@ private static VersionInfo GetVersionInfo() protected override uint VerCategoricalSplitSerialized => 0x00010005; public override PredictionKind PredictionKind => PredictionKind.BinaryClassification; - internal LightGbmBinaryPredictor(IHostEnvironment env, TreeEnsemble trainedEnsemble, int featureCount, string innerArgs) + public LightGbmBinaryModelParameters(IHostEnvironment env, TreeEnsemble trainedEnsemble, int featureCount, string innerArgs) : base(env, RegistrationName, trainedEnsemble, featureCount, innerArgs) { } - private LightGbmBinaryPredictor(IHostEnvironment env, ModelLoadContext ctx) + private LightGbmBinaryModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, ctx, GetVersionInfo()) { } @@ -71,12 +71,12 @@ private protected override void SaveCore(ModelSaveContext ctx) ctx.SetVersionInfo(GetVersionInfo()); } - public static IPredictorProducing Create(IHostEnvironment env, ModelLoadContext ctx) + private static IPredictorProducing Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); - var predictor = new LightGbmBinaryPredictor(env, ctx); + var predictor = new LightGbmBinaryModelParameters(env, ctx); ICalibrator calibrator; ctx.LoadModelOrNull(env, out calibrator, @"Calibrator"); if (calibrator == null) @@ -132,7 +132,7 @@ private protected override IPredictorWithFeatureWeights CreatePredictor() { Host.Check(TrainedEnsemble != null, "The predictor cannot be created before training is complete"); var innerArgs = LightGbmInterfaceUtils.JoinParameters(Options); - var pred = new LightGbmBinaryPredictor(Host, TrainedEnsemble, FeatureCount, innerArgs); + var pred = new LightGbmBinaryModelParameters(Host, TrainedEnsemble, FeatureCount, innerArgs); var cali = new PlattCalibrator(Host, -0.5, 0); return new FeatureWeightsCalibratedPredictor(Host, pred, cali); } diff --git a/src/Microsoft.ML.LightGBM/LightGbmMulticlassTrainer.cs b/src/Microsoft.ML.LightGBM/LightGbmMulticlassTrainer.cs index c787a606fa..46300386ab 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmMulticlassTrainer.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmMulticlassTrainer.cs @@ -82,9 +82,9 @@ private TreeEnsemble GetBinaryEnsemble(int classID) return res; } - private LightGbmBinaryPredictor CreateBinaryPredictor(int classID, string innerArgs) + private LightGbmBinaryModelParameters CreateBinaryPredictor(int classID, string innerArgs) { - return new LightGbmBinaryPredictor(Host, GetBinaryEnsemble(classID), FeatureCount, innerArgs); + return new LightGbmBinaryModelParameters(Host, GetBinaryEnsemble(classID), FeatureCount, innerArgs); } private protected override OvaPredictor CreatePredictor() diff --git a/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs b/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs index f9326f00ac..bcc32d533c 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs @@ -18,17 +18,17 @@ new[] { typeof(SignatureRankerTrainer), typeof(SignatureTrainer), typeof(SignatureTreeEnsembleTrainer) }, "LightGBM Ranking", LightGbmRankingTrainer.LoadNameValue, LightGbmRankingTrainer.ShortName, DocName = "trainer/LightGBM.md")] -[assembly: LoadableClass(typeof(LightGbmRankingPredictor), null, typeof(SignatureLoadModel), +[assembly: LoadableClass(typeof(LightGbmRankingModelParameters), null, typeof(SignatureLoadModel), "LightGBM Ranking Executor", - LightGbmRankingPredictor.LoaderSignature)] + LightGbmRankingModelParameters.LoaderSignature)] namespace Microsoft.ML.Runtime.LightGBM { - public sealed class LightGbmRankingPredictor : FastTreePredictionWrapper + public sealed class LightGbmRankingModelParameters : TreeEnsembleModelParameters { - public const string LoaderSignature = "LightGBMRankerExec"; - public const string RegistrationName = "LightGBMRankingPredictor"; + internal const string LoaderSignature = "LightGBMRankerExec"; + internal const string RegistrationName = "LightGBMRankingPredictor"; private static VersionInfo GetVersionInfo() { @@ -43,7 +43,7 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00010004, verWeCanReadBack: 0x00010001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(LightGbmRankingPredictor).Assembly.FullName); + loaderAssemblyName: typeof(LightGbmRankingModelParameters).Assembly.FullName); } protected override uint VerNumFeaturesSerialized => 0x00010002; @@ -51,12 +51,12 @@ private static VersionInfo GetVersionInfo() protected override uint VerCategoricalSplitSerialized => 0x00010005; public override PredictionKind PredictionKind => PredictionKind.Ranking; - internal LightGbmRankingPredictor(IHostEnvironment env, TreeEnsemble trainedEnsemble, int featureCount, string innerArgs) + public LightGbmRankingModelParameters(IHostEnvironment env, TreeEnsemble trainedEnsemble, int featureCount, string innerArgs) : base(env, RegistrationName, trainedEnsemble, featureCount, innerArgs) { } - private LightGbmRankingPredictor(IHostEnvironment env, ModelLoadContext ctx) + private LightGbmRankingModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, ctx, GetVersionInfo()) { } @@ -67,14 +67,14 @@ private protected override void SaveCore(ModelSaveContext ctx) ctx.SetVersionInfo(GetVersionInfo()); } - private static LightGbmRankingPredictor Create(IHostEnvironment env, ModelLoadContext ctx) + private static LightGbmRankingModelParameters Create(IHostEnvironment env, ModelLoadContext ctx) { - return new LightGbmRankingPredictor(env, ctx); + return new LightGbmRankingModelParameters(env, ctx); } } /// - public sealed class LightGbmRankingTrainer : LightGbmTrainerBase, LightGbmRankingPredictor> + public sealed class LightGbmRankingTrainer : LightGbmTrainerBase, LightGbmRankingModelParameters> { public const string UserName = "LightGBM Ranking"; public const string LoadNameValue = "LightGBMRanking"; @@ -151,11 +151,11 @@ protected override void CheckLabelCompatible(SchemaShape.Column labelCol) error(); } - private protected override LightGbmRankingPredictor CreatePredictor() + private protected override LightGbmRankingModelParameters CreatePredictor() { Host.Check(TrainedEnsemble != null, "The predictor cannot be created before training is complete"); var innerArgs = LightGbmInterfaceUtils.JoinParameters(Options); - return new LightGbmRankingPredictor(Host, TrainedEnsemble, FeatureCount, innerArgs); + return new LightGbmRankingModelParameters(Host, TrainedEnsemble, FeatureCount, innerArgs); } protected override void CheckAndUpdateParametersBeforeTraining(IChannel ch, RoleMappedData data, float[] labels, int[] groups) @@ -178,10 +178,10 @@ protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSc }; } - protected override RankingPredictionTransformer MakeTransformer(LightGbmRankingPredictor model, Schema trainSchema) - => new RankingPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); + protected override RankingPredictionTransformer MakeTransformer(LightGbmRankingModelParameters model, Schema trainSchema) + => new RankingPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); - public RankingPredictionTransformer Train(IDataView trainData, IDataView validationData = null) + public RankingPredictionTransformer Train(IDataView trainData, IDataView validationData = null) => TrainTransformer(trainData, validationData); } diff --git a/src/Microsoft.ML.LightGBM/LightGbmRegressionTrainer.cs b/src/Microsoft.ML.LightGBM/LightGbmRegressionTrainer.cs index feeb227ba4..9e3868bf96 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmRegressionTrainer.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmRegressionTrainer.cs @@ -18,17 +18,17 @@ new[] { typeof(SignatureRegressorTrainer), typeof(SignatureTrainer), typeof(SignatureTreeEnsembleTrainer) }, LightGbmRegressorTrainer.UserNameValue, LightGbmRegressorTrainer.LoadNameValue, LightGbmRegressorTrainer.ShortName, DocName = "trainer/LightGBM.md")] -[assembly: LoadableClass(typeof(LightGbmRegressionPredictor), null, typeof(SignatureLoadModel), +[assembly: LoadableClass(typeof(LightGbmRegressionModelParameters), null, typeof(SignatureLoadModel), "LightGBM Regression Executor", - LightGbmRegressionPredictor.LoaderSignature)] + LightGbmRegressionModelParameters.LoaderSignature)] namespace Microsoft.ML.Runtime.LightGBM { /// - public sealed class LightGbmRegressionPredictor : FastTreePredictionWrapper + public sealed class LightGbmRegressionModelParameters : TreeEnsembleModelParameters { - public const string LoaderSignature = "LightGBMRegressionExec"; - public const string RegistrationName = "LightGBMRegressionPredictor"; + internal const string LoaderSignature = "LightGBMRegressionExec"; + internal const string RegistrationName = "LightGBMRegressionPredictor"; private static VersionInfo GetVersionInfo() { @@ -43,7 +43,7 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00010004, verWeCanReadBack: 0x00010001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(LightGbmRegressionPredictor).Assembly.FullName); + loaderAssemblyName: typeof(LightGbmRegressionModelParameters).Assembly.FullName); } protected override uint VerNumFeaturesSerialized => 0x00010002; @@ -51,12 +51,12 @@ private static VersionInfo GetVersionInfo() protected override uint VerCategoricalSplitSerialized => 0x00010005; public override PredictionKind PredictionKind => PredictionKind.Regression; - internal LightGbmRegressionPredictor(IHostEnvironment env, TreeEnsemble trainedEnsemble, int featureCount, string innerArgs) + public LightGbmRegressionModelParameters(IHostEnvironment env, TreeEnsemble trainedEnsemble, int featureCount, string innerArgs) : base(env, RegistrationName, trainedEnsemble, featureCount, innerArgs) { } - private LightGbmRegressionPredictor(IHostEnvironment env, ModelLoadContext ctx) + private LightGbmRegressionModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, ctx, GetVersionInfo()) { } @@ -67,17 +67,17 @@ private protected override void SaveCore(ModelSaveContext ctx) ctx.SetVersionInfo(GetVersionInfo()); } - public static LightGbmRegressionPredictor Create(IHostEnvironment env, ModelLoadContext ctx) + private static LightGbmRegressionModelParameters Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); - return new LightGbmRegressionPredictor(env, ctx); + return new LightGbmRegressionModelParameters(env, ctx); } } /// - public sealed class LightGbmRegressorTrainer : LightGbmTrainerBase, LightGbmRegressionPredictor> + public sealed class LightGbmRegressorTrainer : LightGbmTrainerBase, LightGbmRegressionModelParameters> { internal const string Summary = "LightGBM Regression"; internal const string LoadNameValue = "LightGBMRegression"; @@ -119,12 +119,12 @@ internal LightGbmRegressorTrainer(IHostEnvironment env, LightGbmArguments args) { } - private protected override LightGbmRegressionPredictor CreatePredictor() + private protected override LightGbmRegressionModelParameters CreatePredictor() { Host.Check(TrainedEnsemble != null, "The predictor cannot be created before training is complete"); var innerArgs = LightGbmInterfaceUtils.JoinParameters(Options); - return new LightGbmRegressionPredictor(Host, TrainedEnsemble, FeatureCount, innerArgs); + return new LightGbmRegressionModelParameters(Host, TrainedEnsemble, FeatureCount, innerArgs); } protected override void CheckDataValid(IChannel ch, RoleMappedData data) @@ -155,10 +155,10 @@ protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSc }; } - protected override RegressionPredictionTransformer MakeTransformer(LightGbmRegressionPredictor model, Schema trainSchema) - => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); + protected override RegressionPredictionTransformer MakeTransformer(LightGbmRegressionModelParameters model, Schema trainSchema) + => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); - public RegressionPredictionTransformer Train(IDataView trainData, IDataView validationData = null) + public RegressionPredictionTransformer Train(IDataView trainData, IDataView validationData = null) => TrainTransformer(trainData, validationData); } diff --git a/src/Microsoft.ML.LightGBM/LightGbmStatic.cs b/src/Microsoft.ML.LightGBM/LightGbmStatic.cs index 659e6da141..3cdad93257 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmStatic.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmStatic.cs @@ -49,7 +49,7 @@ public static Scalar LightGbm(this RegressionContext.RegressionTrainers c double? learningRate = null, int numBoostRound = LightGbmArguments.Defaults.NumBoostRound, Action advancedSettings = null, - Action onFit = null) + Action onFit = null) { CheckUserValues(label, features, weights, numLeaves, minDataPerLeaf, learningRate, numBoostRound, advancedSettings, onFit); @@ -144,7 +144,7 @@ public static Scalar LightGbm(this RankingContext.RankingTrainers c double? learningRate = null, int numBoostRound = LightGbmArguments.Defaults.NumBoostRound, Action advancedSettings = null, - Action onFit = null) + Action onFit = null) { CheckUserValues(label, features, weights, numLeaves, minDataPerLeaf, learningRate, numBoostRound, advancedSettings, onFit); Contracts.CheckValue(groupId, nameof(groupId)); diff --git a/src/Microsoft.ML.PCA/PcaTrainer.cs b/src/Microsoft.ML.PCA/PcaTrainer.cs index 914db596a0..fee4a25d07 100644 --- a/src/Microsoft.ML.PCA/PcaTrainer.cs +++ b/src/Microsoft.ML.PCA/PcaTrainer.cs @@ -498,7 +498,7 @@ public static PcaPredictor Create(IHostEnvironment env, ModelLoadContext ctx) return new PcaPredictor(env, ctx); } - public void SaveSummary(TextWriter writer, RoleMappedSchema schema) + void ICanSaveSummary.SaveSummary(TextWriter writer, RoleMappedSchema schema) { ((ICanSaveInTextFormat)this).SaveAsText(writer, schema); } diff --git a/src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs b/src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs index 538fb51c65..b5e1271c4c 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs @@ -344,7 +344,7 @@ void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) SaveSummary(writer, schema); } - public void SaveAsCode(TextWriter writer, RoleMappedSchema schema) + void ICanSaveInSourceCode.SaveAsCode(TextWriter writer, RoleMappedSchema schema) { Host.CheckValue(writer, nameof(writer)); Host.CheckValue(schema, nameof(schema)); @@ -353,9 +353,12 @@ public void SaveAsCode(TextWriter writer, RoleMappedSchema schema) LinearPredictorUtils.SaveAsCode(writer, in weights, Bias, schema); } - public abstract void SaveSummary(TextWriter writer, RoleMappedSchema schema); + [BestFriend] + private protected abstract void SaveSummary(TextWriter writer, RoleMappedSchema schema); + + void ICanSaveSummary.SaveSummary(TextWriter writer, RoleMappedSchema schema) => SaveSummary(writer, schema); - public virtual Row GetSummaryIRowOrNull(RoleMappedSchema schema) + private protected virtual Row GetSummaryIRowOrNull(RoleMappedSchema schema) { var names = default(VBuffer>); MetadataUtils.GetSlotNames(schema, RoleMappedSchema.ColumnRole.Feature, Weight.Length, ref names); @@ -368,16 +371,22 @@ public virtual Row GetSummaryIRowOrNull(RoleMappedSchema schema) return MetadataUtils.MetadataAsRow(builder.GetMetadata()); } - public virtual Row GetStatsIRowOrNull(RoleMappedSchema schema) => null; + Row ICanGetSummaryAsIRow.GetSummaryIRowOrNull(RoleMappedSchema schema) => GetSummaryIRowOrNull(schema); + + private protected virtual Row GetStatsIRowOrNull(RoleMappedSchema schema) => null; + + Row ICanGetSummaryAsIRow.GetStatsIRowOrNull(RoleMappedSchema schema) => GetStatsIRowOrNull(schema); + + private protected abstract void SaveAsIni(TextWriter writer, RoleMappedSchema schema, ICalibrator calibrator = null); - public abstract void SaveAsIni(TextWriter writer, RoleMappedSchema schema, ICalibrator calibrator = null); + void ICanSaveInIniFormat.SaveAsIni(TextWriter writer, RoleMappedSchema schema, ICalibrator calibrator) => SaveAsIni(writer, schema, calibrator); public virtual void GetFeatureWeights(ref VBuffer weights) { Weight.CopyTo(ref weights); } - public ValueMapper> GetFeatureContributionMapper(int top, int bottom, bool normalize) + ValueMapper> IFeatureContributionMapper.GetFeatureContributionMapper(int top, int bottom, bool normalize) { Contracts.Check(typeof(TSrc) == typeof(VBuffer)); Contracts.Check(typeof(TDstContributions) == typeof(VBuffer)); @@ -487,7 +496,7 @@ public IParameterMixer CombineParameters(IList> mo return new LinearBinaryPredictor(Host, in weights, bias); } - public override void SaveSummary(TextWriter writer, RoleMappedSchema schema) + private protected override void SaveSummary(TextWriter writer, RoleMappedSchema schema) { Host.CheckValue(schema, nameof(schema)); @@ -500,7 +509,7 @@ public override void SaveSummary(TextWriter writer, RoleMappedSchema schema) } /// - public IList> GetSummaryInKeyValuePairs(RoleMappedSchema schema) + IList> ICanGetSummaryInKeyValuePairs.GetSummaryInKeyValuePairs(RoleMappedSchema schema) { Host.CheckValue(schema, nameof(schema)); @@ -511,7 +520,7 @@ public IList> GetSummaryInKeyValuePairs(RoleMappedS return results; } - public override Row GetStatsIRowOrNull(RoleMappedSchema schema) + private protected override Row GetStatsIRowOrNull(RoleMappedSchema schema) { if (_stats == null) return null; @@ -521,7 +530,7 @@ public override Row GetStatsIRowOrNull(RoleMappedSchema schema) return MetadataUtils.MetadataAsRow(meta); } - public override void SaveAsIni(TextWriter writer, RoleMappedSchema schema, ICalibrator calibrator = null) + private protected override void SaveAsIni(TextWriter writer, RoleMappedSchema schema, ICalibrator calibrator = null) { Host.CheckValue(writer, nameof(writer)); Host.CheckValue(schema, nameof(schema)); @@ -553,7 +562,7 @@ public override PredictionKind PredictionKind /// /// Output the INI model to a given writer /// - public override void SaveAsIni(TextWriter writer, RoleMappedSchema schema, ICalibrator calibrator) + private protected override void SaveAsIni(TextWriter writer, RoleMappedSchema schema, ICalibrator calibrator) { if (calibrator != null) throw Host.ExceptNotImpl("Saving calibrators is not implemented yet."); @@ -617,7 +626,7 @@ private protected override void SaveCore(ModelSaveContext ctx) ctx.SetVersionInfo(GetVersionInfo()); } - public override void SaveSummary(TextWriter writer, RoleMappedSchema schema) + private protected override void SaveSummary(TextWriter writer, RoleMappedSchema schema) { Host.CheckValue(writer, nameof(writer)); Host.CheckValue(schema, nameof(schema)); @@ -640,7 +649,7 @@ public IParameterMixer CombineParameters(IList> mo } /// - public IList> GetSummaryInKeyValuePairs(RoleMappedSchema schema) + IList> ICanGetSummaryInKeyValuePairs.GetSummaryInKeyValuePairs(RoleMappedSchema schema) { Host.CheckValue(schema, nameof(schema)); @@ -698,7 +707,7 @@ protected override Float Score(in VBuffer src) return MathUtils.ExpSlow(base.Score(in src)); } - public override void SaveSummary(TextWriter writer, RoleMappedSchema schema) + private protected override void SaveSummary(TextWriter writer, RoleMappedSchema schema) { Host.CheckValue(writer, nameof(writer)); Host.CheckValue(schema, nameof(schema)); diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs index 55f6a8b5e5..68fd9463a6 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs @@ -781,7 +781,7 @@ void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) { writer.WriteLine(nameof(MulticlassLogisticRegression) + " bias and non-zero weights"); - foreach (var namedValues in GetSummaryInKeyValuePairs(schema)) + foreach (var namedValues in ((ICanGetSummaryInKeyValuePairs)this).GetSummaryInKeyValuePairs(schema)) { Host.Assert(namedValues.Value is float); writer.WriteLine("\t{0}\t{1}", namedValues.Key, (float)namedValues.Value); @@ -792,7 +792,7 @@ void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) } /// - public IList> GetSummaryInKeyValuePairs(RoleMappedSchema schema) + IList> ICanGetSummaryInKeyValuePairs.GetSummaryInKeyValuePairs(RoleMappedSchema schema) { Host.CheckValueOrNull(schema); @@ -832,7 +832,7 @@ public IList> GetSummaryInKeyValuePairs(RoleMappedS /// /// Output the text model to a given writer /// - public void SaveAsCode(TextWriter writer, RoleMappedSchema schema) + void ICanSaveInSourceCode.SaveAsCode(TextWriter writer, RoleMappedSchema schema) { Host.CheckValue(writer, nameof(writer)); Host.CheckValueOrNull(schema); @@ -851,7 +851,7 @@ public void SaveAsCode(TextWriter writer, RoleMappedSchema schema) writer.WriteLine("output[{0}] = Math.Exp(scores[{0}] - softmax);", c); } - public void SaveSummary(TextWriter writer, RoleMappedSchema schema) + void ICanSaveSummary.SaveSummary(TextWriter writer, RoleMappedSchema schema) { ((ICanSaveInTextFormat)this).SaveAsText(writer, schema); } @@ -985,12 +985,12 @@ public IDataView GetSummaryDataView(RoleMappedSchema schema) return bldr.GetDataView(); } - public Row GetSummaryIRowOrNull(RoleMappedSchema schema) + Row ICanGetSummaryAsIRow.GetSummaryIRowOrNull(RoleMappedSchema schema) { return null; } - public Row GetStatsIRowOrNull(RoleMappedSchema schema) + Row ICanGetSummaryAsIRow.GetStatsIRowOrNull(RoleMappedSchema schema) { if (_stats == null) return null; diff --git a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Ova.cs b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Ova.cs index 36a05b7cc2..398fce9015 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Ova.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Ova.cs @@ -365,7 +365,7 @@ ValueMapper IValueMapper.GetMapper() return (ValueMapper)(Delegate)_impl.GetMapper(); } - public void SaveAsCode(TextWriter writer, RoleMappedSchema schema) + void ICanSaveInSourceCode.SaveAsCode(TextWriter writer, RoleMappedSchema schema) { Host.CheckValue(writer, nameof(writer)); Host.CheckValue(schema, nameof(schema)); diff --git a/src/Microsoft.ML.StandardLearners/Standard/Simple/SimpleTrainers.cs b/src/Microsoft.ML.StandardLearners/Standard/Simple/SimpleTrainers.cs index bae20cd10f..6cf3a8241b 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/Simple/SimpleTrainers.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/Simple/SimpleTrainers.cs @@ -111,7 +111,7 @@ public sealed class RandomPredictor : IValueMapperDist, ICanSaveModel { - public const string LoaderSignature = "RandomPredictor"; + internal const string LoaderSignature = "RandomPredictor"; private static VersionInfo GetVersionInfo() { return new VersionInfo( @@ -129,9 +129,11 @@ private static VersionInfo GetVersionInfo() private readonly Random _random; public override PredictionKind PredictionKind => PredictionKind.BinaryClassification; - public ColumnType InputType { get; } - public ColumnType OutputType => NumberType.Float; - public ColumnType DistType => NumberType.Float; + + private readonly ColumnType _inputType; + ColumnType IValueMapper.InputType => _inputType; + ColumnType IValueMapper.OutputType => NumberType.Float; + ColumnType IValueMapperDist.DistType => NumberType.Float; public RandomPredictor(IHostEnvironment env, int seed) : base(env, LoaderSignature) @@ -141,7 +143,7 @@ public RandomPredictor(IHostEnvironment env, int seed) _instanceLock = new object(); _random = RandomUtils.Create(_seed); - InputType = new VectorType(NumberType.Float); + _inputType = new VectorType(NumberType.Float); } /// @@ -158,10 +160,10 @@ private RandomPredictor(IHostEnvironment env, ModelLoadContext ctx) _instanceLock = new object(); _random = RandomUtils.Create(_seed); - InputType = new VectorType(NumberType.Float); + _inputType = new VectorType(NumberType.Float); } - public static RandomPredictor Create(IHostEnvironment env, ModelLoadContext ctx) + private static RandomPredictor Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); @@ -184,7 +186,7 @@ private protected override void SaveCore(ModelSaveContext ctx) ctx.Writer.Write(_seed); } - public ValueMapper GetMapper() + ValueMapper IValueMapper.GetMapper() { Contracts.Check(typeof(TIn) == typeof(VBuffer)); Contracts.Check(typeof(TOut) == typeof(float)); @@ -193,7 +195,7 @@ public ValueMapper GetMapper() return (ValueMapper)(Delegate)del; } - public ValueMapper GetMapper() + ValueMapper IValueMapperDist.GetMapper() { Contracts.Check(typeof(TIn) == typeof(VBuffer)); Contracts.Check(typeof(TOut) == typeof(float)); @@ -371,7 +373,7 @@ public PriorPredictor(IHostEnvironment env, float prob) _prob = prob; _raw = 2 * _prob - 1; // This could be other functions -- logodds for instance - InputType = new VectorType(NumberType.Float); + _inputType = new VectorType(NumberType.Float); } private PriorPredictor(IHostEnvironment env, ModelLoadContext ctx) @@ -385,7 +387,7 @@ private PriorPredictor(IHostEnvironment env, ModelLoadContext ctx) _raw = 2 * _prob - 1; - InputType = new VectorType(NumberType.Float); + _inputType = new VectorType(NumberType.Float); } public static PriorPredictor Create(IHostEnvironment env, ModelLoadContext ctx) @@ -410,11 +412,13 @@ private protected override void SaveCore(ModelSaveContext ctx) public override PredictionKind PredictionKind { get { return PredictionKind.BinaryClassification; } } - public ColumnType InputType { get; } - public ColumnType OutputType => NumberType.Float; - public ColumnType DistType => NumberType.Float; - public ValueMapper GetMapper() + private readonly ColumnType _inputType; + ColumnType IValueMapper.InputType => _inputType; + ColumnType IValueMapper.OutputType => NumberType.Float; + ColumnType IValueMapperDist.DistType => NumberType.Float; + + ValueMapper IValueMapper.GetMapper() { Contracts.Check(typeof(TIn) == typeof(VBuffer)); Contracts.Check(typeof(TOut) == typeof(float)); @@ -423,7 +427,7 @@ public ValueMapper GetMapper() return (ValueMapper)(Delegate)del; } - public ValueMapper GetMapper() + ValueMapper IValueMapperDist.GetMapper() { Contracts.Check(typeof(TIn) == typeof(VBuffer)); Contracts.Check(typeof(TOut) == typeof(float)); diff --git a/src/Microsoft.ML.Sweeper/Algorithms/SmacSweeper.cs b/src/Microsoft.ML.Sweeper/Algorithms/SmacSweeper.cs index cf4de98e66..9c7bf0199b 100644 --- a/src/Microsoft.ML.Sweeper/Algorithms/SmacSweeper.cs +++ b/src/Microsoft.ML.Sweeper/Algorithms/SmacSweeper.cs @@ -106,13 +106,13 @@ public ParameterSet[] ProposeSweeps(int maxSweeps, IEnumerable previ } // Fit Random Forest Model on previous run data. - FastForestRegressionPredictor forestPredictor = FitModel(viableRuns); + FastForestRegressionModelParameters forestPredictor = FitModel(viableRuns); // Using acquisition function and current best, get candidate configuration(s). return GenerateCandidateConfigurations(numOfCandidates, viableRuns, forestPredictor); } - private FastForestRegressionPredictor FitModel(IEnumerable previousRuns) + private FastForestRegressionModelParameters FitModel(IEnumerable previousRuns) { Single[] targets = new Single[previousRuns.Count()]; Single[][] features = new Single[previousRuns.Count()][]; @@ -160,7 +160,7 @@ private FastForestRegressionPredictor FitModel(IEnumerable previousR /// History of previously evaluated points, with their emprical performance values. /// Trained random forest ensemble. Used in evaluating the candidates. /// An array of ParamaterSets which are the candidate configurations to sweep. - private ParameterSet[] GenerateCandidateConfigurations(int numOfCandidates, IEnumerable previousRuns, FastForestRegressionPredictor forest) + private ParameterSet[] GenerateCandidateConfigurations(int numOfCandidates, IEnumerable previousRuns, FastForestRegressionModelParameters forest) { // Get k best previous runs ParameterSets. ParameterSet[] bestKParamSets = GetKBestConfigurations(previousRuns, forest, _args.LocalSearchParentCount); @@ -188,7 +188,7 @@ private ParameterSet[] GenerateCandidateConfigurations(int numOfCandidates, IEnu /// Number of candidate configurations returned by the method (top K). /// Historical run results. /// Array of parameter sets, which will then be evaluated. - private ParameterSet[] GreedyPlusRandomSearch(ParameterSet[] parents, FastForestRegressionPredictor forest, int numOfCandidates, IEnumerable previousRuns) + private ParameterSet[] GreedyPlusRandomSearch(ParameterSet[] parents, FastForestRegressionModelParameters forest, int numOfCandidates, IEnumerable previousRuns) { // REVIEW: The IsMetricMaximizing flag affects the comparator, so that // performing Max() should get the best, regardless of if it is maximizing or @@ -231,7 +231,7 @@ private ParameterSet[] GreedyPlusRandomSearch(ParameterSet[] parents, FastForest /// Best performance seen thus far. /// Threshold for when to stop the local search. /// - private Tuple LocalSearch(ParameterSet parent, FastForestRegressionPredictor forest, double bestVal, double epsilon) + private Tuple LocalSearch(ParameterSet parent, FastForestRegressionModelParameters forest, double bestVal, double epsilon) { try { @@ -332,7 +332,7 @@ private ParameterSet[] GetOneMutationNeighborhood(ParameterSet parent) /// Trained forest predictor, used for filtering configs. /// Parameter configurations. /// 2D array where rows correspond to configurations, and columns to the predicted leaf values. - private double[][] GetForestRegressionLeafValues(FastForestRegressionPredictor forest, ParameterSet[] configs) + private double[][] GetForestRegressionLeafValues(FastForestRegressionModelParameters forest, ParameterSet[] configs) { List datasetLeafValues = new List(); var e = forest.TrainedEnsemble; @@ -369,14 +369,14 @@ private double[][] ComputeForestStats(double[][] leafValues) return meansAndStdDevs; } - private double[] EvaluateConfigurationsByEI(FastForestRegressionPredictor forest, double bestVal, ParameterSet[] configs) + private double[] EvaluateConfigurationsByEI(FastForestRegressionModelParameters forest, double bestVal, ParameterSet[] configs) { double[][] leafPredictions = GetForestRegressionLeafValues(forest, configs); double[][] forestStatistics = ComputeForestStats(leafPredictions); return ComputeEIs(bestVal, forestStatistics); } - private ParameterSet[] GetKBestConfigurations(IEnumerable previousRuns, FastForestRegressionPredictor forest, int k = 10) + private ParameterSet[] GetKBestConfigurations(IEnumerable previousRuns, FastForestRegressionModelParameters forest, int k = 10) { // NOTE: Should we change this to rank according to EI (using forest), instead of observed performance? diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs index 107c83eb7d..a32ded52bf 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs @@ -347,7 +347,7 @@ public void EntryPointCatalogCheckDuplicateParams() private (IEnumerable epListContents, JObject manifest) BuildManifests() { - Env.ComponentCatalog.RegisterAssembly(typeof(LightGbmBinaryPredictor).Assembly); + Env.ComponentCatalog.RegisterAssembly(typeof(LightGbmBinaryModelParameters).Assembly); Env.ComponentCatalog.RegisterAssembly(typeof(TensorFlowTransform).Assembly); Env.ComponentCatalog.RegisterAssembly(typeof(ImageLoaderTransform).Assembly); Env.ComponentCatalog.RegisterAssembly(typeof(SymSgdClassificationTrainer).Assembly); @@ -1952,14 +1952,14 @@ public void EntryPointEvaluateRanking() [ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // LightGBM is 64-bit only public void EntryPointLightGbmBinary() { - Env.ComponentCatalog.RegisterAssembly(typeof(LightGbmBinaryPredictor).Assembly); + Env.ComponentCatalog.RegisterAssembly(typeof(LightGbmBinaryModelParameters).Assembly); TestEntryPointRoutine("breast-cancer.txt", "Trainers.LightGbmBinaryClassifier"); } [ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // LightGBM is 64-bit only public void EntryPointLightGbmMultiClass() { - Env.ComponentCatalog.RegisterAssembly(typeof(LightGbmBinaryPredictor).Assembly); + Env.ComponentCatalog.RegisterAssembly(typeof(LightGbmBinaryModelParameters).Assembly); TestEntryPointRoutine(GetDataPath(@"iris.txt"), "Trainers.LightGbmClassifier"); } diff --git a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs index 94c7d98155..315c1d54be 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs @@ -43,7 +43,7 @@ protected override void InitializeEnvironment(IHostEnvironment environment) { base.InitializeEnvironment(environment); - environment.ComponentCatalog.RegisterAssembly(typeof(LightGbmBinaryPredictor).Assembly); + environment.ComponentCatalog.RegisterAssembly(typeof(LightGbmBinaryModelParameters).Assembly); environment.ComponentCatalog.RegisterAssembly(typeof(SymSgdClassificationTrainer).Assembly); } diff --git a/test/Microsoft.ML.StaticPipelineTesting/Training.cs b/test/Microsoft.ML.StaticPipelineTesting/Training.cs index ee5e2626cf..a5b4eb954f 100644 --- a/test/Microsoft.ML.StaticPipelineTesting/Training.cs +++ b/test/Microsoft.ML.StaticPipelineTesting/Training.cs @@ -423,7 +423,7 @@ public void FastTreeRegression() c => (label: c.LoadFloat(11), features: c.LoadFloat(0, 10)), separator: ';', hasHeader: true); - FastTreeRegressionPredictor pred = null; + FastTreeRegressionModelParameters pred = null; var est = reader.MakeNewEstimator() .Append(r => (r.label, score: ctx.Trainers.FastTree(r.label, r.features, @@ -505,7 +505,7 @@ public void LightGbmRegression() c => (label: c.LoadFloat(11), features: c.LoadFloat(0, 10)), separator: ';', hasHeader: true); - LightGbmRegressionPredictor pred = null; + LightGbmRegressionModelParameters pred = null; var est = reader.MakeNewEstimator() .Append(r => (r.label, score: ctx.Trainers.LightGbm(r.label, r.features, @@ -771,7 +771,7 @@ public void FastTreeRanking() c => (label: c.LoadFloat(0), features: c.LoadFloat(9, 14), groupId: c.LoadText(1)), separator: '\t', hasHeader: true); - FastTreeRankingPredictor pred = null; + FastTreeRankingModelParameters pred = null; var est = reader.MakeNewEstimator() .Append(r => (r.label, r.features, groupId: r.groupId.ToKey())) @@ -812,7 +812,7 @@ public void LightGBMRanking() c => (label: c.LoadFloat(0), features: c.LoadFloat(9, 14), groupId: c.LoadText(1)), separator: '\t', hasHeader: true); - LightGbmRankingPredictor pred = null; + LightGbmRankingModelParameters pred = null; var est = reader.MakeNewEstimator() .Append(r => (r.label, r.features, groupId: r.groupId.ToKey())) diff --git a/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs b/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs index ccd041f7a5..a95bca433a 100644 --- a/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs +++ b/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs @@ -22,7 +22,7 @@ public static TEnvironment AddStandardComponents(this TEnvironment env.ComponentCatalog.RegisterAssembly(typeof(TextLoader).Assembly); // ML.Data env.ComponentCatalog.RegisterAssembly(typeof(LinearPredictor).Assembly); // ML.StandardLearners env.ComponentCatalog.RegisterAssembly(typeof(OneHotEncodingTransformer).Assembly); // ML.Transforms - env.ComponentCatalog.RegisterAssembly(typeof(FastTreeBinaryPredictor).Assembly); // ML.FastTree + env.ComponentCatalog.RegisterAssembly(typeof(FastTreeBinaryModelParameters).Assembly); // ML.FastTree env.ComponentCatalog.RegisterAssembly(typeof(EnsemblePredictor).Assembly); // ML.Ensemble env.ComponentCatalog.RegisterAssembly(typeof(KMeansModelParameters).Assembly); // ML.KMeansClustering env.ComponentCatalog.RegisterAssembly(typeof(PcaPredictor).Assembly); // ML.PCA diff --git a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs index ead36a22a5..c027accb99 100644 --- a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs +++ b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs @@ -8,6 +8,7 @@ using Microsoft.ML.Runtime.Data; using Microsoft.ML.Trainers.FastTree; using Microsoft.ML.Runtime.Internal.Calibration; +using Microsoft.ML.Runtime.Internal.Internallearn; using Microsoft.ML.Transforms.Text; using System.Linq; using Xunit; @@ -71,7 +72,7 @@ public void TrainAndPredictSentimentModelWithDirectionInstantiationTest() Assert.True(predictions.ElementAt(1).Sentiment); // Get feature importance based on feature gain during training - var summary = ((FeatureWeightsCalibratedPredictor)pred).GetSummaryInKeyValuePairs(trainRoles.Schema); + var summary = ((ICanGetSummaryInKeyValuePairs)pred).GetSummaryInKeyValuePairs(trainRoles.Schema); Assert.Equal(1.0, (double)summary[0].Value, 1); } @@ -142,7 +143,7 @@ public void TrainAndPredictSentimentModelWithDirectionInstantiationTestWithWordE Assert.True(predictions.ElementAt(1).Sentiment); // Get feature importance based on feature gain during training - var summary = ((FeatureWeightsCalibratedPredictor)pred).GetSummaryInKeyValuePairs(trainRoles.Schema); + var summary = ((ICanGetSummaryInKeyValuePairs)pred).GetSummaryInKeyValuePairs(trainRoles.Schema); Assert.Equal(1.0, (double)summary[0].Value, 1); } From 342e13f8f32279493b3b3e0642a46b2e2690141c Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Tue, 11 Dec 2018 13:26:30 -0800 Subject: [PATCH 041/100] Remove some ISchema (#1861) * Remove ISchema in many simple cases (not finished yet) * Some more easy cases resolved * One more easy round --- src/Microsoft.ML.Core/Data/IEstimator.cs | 4 ++-- .../Data/ISchemaBindableMapper.cs | 2 +- src/Microsoft.ML.Core/Data/RoleMappedSchema.cs | 16 ++++++++-------- src/Microsoft.ML.Core/Data/Schema.cs | 2 +- .../Commands/EvaluateCommand.cs | 4 ++-- .../Commands/ShowSchemaCommand.cs | 17 +++++++++-------- src/Microsoft.ML.Data/Commands/TestCommand.cs | 2 +- src/Microsoft.ML.Data/Commands/TrainCommand.cs | 4 ++-- .../Commands/TrainTestCommand.cs | 2 +- src/Microsoft.ML.Data/Data/DataViewUtils.cs | 6 +++--- .../DataLoadSave/Binary/BinarySaver.cs | 11 ++++++----- .../DataLoadSave/PartitionedFileLoader.cs | 4 ++-- .../DataLoadSave/Text/TextSaver.cs | 3 ++- .../DataLoadSave/Transpose/TransposeLoader.cs | 2 +- .../DataView/AppendRowsDataView.cs | 2 +- .../DataView/CompositeSchema.cs | 4 ++-- .../DataView/RowToRowMapperTransform.cs | 8 ++++---- src/Microsoft.ML.Data/DataView/Transposer.cs | 6 +++--- .../Dirty/ChooseColumnsByIndexTransform.cs | 6 +++--- src/Microsoft.ML.Data/EntryPoints/InputBase.cs | 5 +++-- .../EntryPoints/TransformModel.cs | 2 +- .../Evaluators/BinaryClassifierEvaluator.cs | 10 +++++----- .../Evaluators/ClusteringEvaluator.cs | 10 +++++----- .../Evaluators/EvaluatorBase.cs | 4 ++-- .../Evaluators/EvaluatorUtils.cs | 10 +++++----- .../Evaluators/MamlEvaluator.cs | 2 +- .../Evaluators/MultiClassClassifierEvaluator.cs | 6 +++--- .../MultiOutputRegressionEvaluator.cs | 4 ++-- .../Evaluators/QuantileRegressionEvaluator.cs | 8 ++++---- .../Evaluators/RankerEvaluator.cs | 2 +- .../Evaluators/RegressionEvaluator.cs | 8 ++++---- .../Model/Pfa/BoundPfaContext.cs | 4 ++-- .../FeatureContributionCalculationTransform.cs | 10 +++++----- .../Scorers/MultiClassClassifierScorer.cs | 6 +++--- .../Transforms/ColumnBindingsBase.cs | 8 ++++---- .../ColumnConcatenatingTransformer.cs | 4 ++-- .../Transforms/ColumnCopying.cs | 4 ++-- .../Transforms/ColumnSelecting.cs | 2 +- .../Transforms/DropSlotsTransform.cs | 4 ++-- .../Transforms/GenerateNumberTransform.cs | 6 +++--- src/Microsoft.ML.Data/Transforms/Hashing.cs | 8 ++++---- src/Microsoft.ML.Data/Transforms/KeyToValue.cs | 6 +++--- src/Microsoft.ML.Data/Transforms/KeyToVector.cs | 8 ++++---- .../Transforms/MetadataDispatcher.cs | 17 +++++++++-------- .../Transforms/NormalizeColumn.cs | 3 ++- src/Microsoft.ML.Data/Transforms/Normalizer.cs | 6 +++--- .../Transforms/OneToOneTransformerBase.cs | 4 ++-- .../Transforms/RowShufflingTransformer.cs | 2 +- .../Transforms/TransformBase.cs | 6 +++--- .../Transforms/TypeConverting.cs | 4 ++-- .../Transforms/ValueToKeyMappingTransformer.cs | 6 +++--- .../EntryPoints/CreateEnsemble.cs | 3 ++- src/Microsoft.ML.Ensemble/PipelineEnsemble.cs | 2 +- src/Microsoft.ML.HalLearners/VectorWhitening.cs | 4 ++-- .../ImageGrayscaleTransform.cs | 4 ++-- .../ImageLoaderTransform.cs | 6 +++--- .../ImagePixelExtractorTransform.cs | 6 +++--- .../ImageResizerTransform.cs | 4 ++-- .../Runtime/EntryPoints/OneVersusAllMacro.cs | 2 +- src/Microsoft.ML.OnnxTransform/OnnxTransform.cs | 4 ++-- src/Microsoft.ML.PCA/PcaTransform.cs | 6 +++--- .../MatrixFactorizationPredictor.cs | 2 +- .../FieldAwareFactorizationMachinePredictor.cs | 2 +- .../TensorflowTransform.cs | 4 ++-- .../IidChangePointDetector.cs | 3 ++- src/Microsoft.ML.TimeSeries/IidSpikeDetector.cs | 3 ++- .../SequentialAnomalyDetectionTransformBase.cs | 6 +++--- .../SequentialTransformerBase.cs | 6 +++--- .../SsaChangePointDetector.cs | 3 ++- src/Microsoft.ML.TimeSeries/SsaSpikeDetector.cs | 3 ++- src/Microsoft.ML.Transforms/GcnTransform.cs | 6 +++--- src/Microsoft.ML.Transforms/GroupTransform.cs | 10 +++++----- .../KeyToVectorMapping.cs | 8 ++++---- .../LearnerFeatureSelection.cs | 2 +- .../MissingValueDroppingTransformer.cs | 6 +++--- .../MissingValueIndicatorTransformer.cs | 2 +- .../MissingValueReplacing.cs | 8 ++++---- .../OptionalColumnTransform.cs | 2 +- .../RandomFourierFeaturizing.cs | 6 +++--- .../Text/LdaTransform.cs | 4 ++-- .../Text/NgramHashingTransformer.cs | 4 ++-- .../Text/NgramTransform.cs | 6 +++--- .../Text/StopWordsRemovingTransformer.cs | 10 +++++----- .../Text/TextFeaturizingEstimator.cs | 2 +- .../Text/TextNormalizing.cs | 6 +++--- .../Text/TokenizingByCharacters.cs | 6 +++--- .../Text/WordBagTransform.cs | 3 ++- .../Text/WordEmbeddingsExtractor.cs | 6 +++--- .../Text/WordTokenizing.cs | 6 +++--- src/Microsoft.ML.Transforms/UngroupTransform.cs | 2 +- .../StaticPipeTests.cs | 4 ++-- .../DataPipe/TestDataPipeBase.cs | 8 ++++---- 92 files changed, 248 insertions(+), 236 deletions(-) diff --git a/src/Microsoft.ML.Core/Data/IEstimator.cs b/src/Microsoft.ML.Core/Data/IEstimator.cs index 5994a671bd..30a0ae58f3 100644 --- a/src/Microsoft.ML.Core/Data/IEstimator.cs +++ b/src/Microsoft.ML.Core/Data/IEstimator.cs @@ -14,7 +14,7 @@ namespace Microsoft.ML.Core.Data { /// /// A set of 'requirements' to the incoming schema, as well as a set of 'promises' of the outgoing schema. - /// This is more relaxed than the proper , since it's only a subset of the columns, + /// This is more relaxed than the proper , since it's only a subset of the columns, /// and also since it doesn't specify exact 's for vectors and keys. /// public sealed class SchemaShape : IReadOnlyList @@ -287,7 +287,7 @@ public interface ITransformer /// /// The estimator (in Spark terminology) is an 'untrained transformer'. It needs to 'fit' on the data to manufacture /// a transformer. - /// It also provides the 'schema propagation' like transformers do, but over instead of . + /// It also provides the 'schema propagation' like transformers do, but over instead of . /// public interface IEstimator where TTransformer : ITransformer diff --git a/src/Microsoft.ML.Core/Data/ISchemaBindableMapper.cs b/src/Microsoft.ML.Core/Data/ISchemaBindableMapper.cs index ab6a0223a1..10fe337b0a 100644 --- a/src/Microsoft.ML.Core/Data/ISchemaBindableMapper.cs +++ b/src/Microsoft.ML.Core/Data/ISchemaBindableMapper.cs @@ -87,7 +87,7 @@ public interface IRowToRowMapper /// /// Given a predicate specifying which columns are needed, return a predicate indicating which input columns are - /// needed. The domain of the function is defined over the indices of the columns of + /// needed. The domain of the function is defined over the indices of the columns of /// for . /// Func GetDependencies(Func predicate); diff --git a/src/Microsoft.ML.Core/Data/RoleMappedSchema.cs b/src/Microsoft.ML.Core/Data/RoleMappedSchema.cs index d3c571c62a..0c167372ae 100644 --- a/src/Microsoft.ML.Core/Data/RoleMappedSchema.cs +++ b/src/Microsoft.ML.Core/Data/RoleMappedSchema.cs @@ -11,7 +11,7 @@ namespace Microsoft.ML.Runtime.Data /// /// This contains information about a column in an . It is essentially a convenience cache /// containing the name, column index, and column type for the column. The intended usage is that users of - /// will have a convenient method of getting the index and type without having to separately query it through the , + /// will have a convenient method of getting the index and type without having to separately query it through the , /// since practically the first thing a consumer of a will want to do once they get a mappping is get /// the type and index of the corresponding column. /// @@ -32,7 +32,7 @@ private ColumnInfo(string name, int index, ColumnType type) /// Create a ColumnInfo for the column with the given name in the given schema. Throws if the name /// doesn't map to a column. /// - public static ColumnInfo CreateFromName(ISchema schema, string name, string descName) + public static ColumnInfo CreateFromName(Schema schema, string name, string descName) { if (!TryCreateFromName(schema, name, out var colInfo)) throw Contracts.ExceptParam(nameof(name), $"{descName} column '{name}' not found"); @@ -44,7 +44,7 @@ public static ColumnInfo CreateFromName(ISchema schema, string name, string desc /// Tries to create a ColumnInfo for the column with the given name in the given schema. Returns /// false if the name doesn't map to a column. /// - public static bool TryCreateFromName(ISchema schema, string name, out ColumnInfo colInfo) + public static bool TryCreateFromName(Schema schema, string name, out ColumnInfo colInfo) { Contracts.CheckValue(schema, nameof(schema)); Contracts.CheckNonEmpty(name, nameof(name)); @@ -62,7 +62,7 @@ public static bool TryCreateFromName(ISchema schema, string name, out ColumnInfo /// of the column might actually map to a different column, so this should be used with care /// and rarely. ///

- public static ColumnInfo CreateFromIndex(ISchema schema, int index) + public static ColumnInfo CreateFromIndex(Schema schema, int index) { Contracts.CheckValue(schema, nameof(schema)); Contracts.CheckParam(0 <= index && index < schema.ColumnCount, nameof(index)); @@ -72,7 +72,7 @@ public static ColumnInfo CreateFromIndex(ISchema schema, int index) } /// - /// Encapsulates an plus column role mapping information. The purpose of role mappings is to + /// Encapsulates an plus column role mapping information. The purpose of role mappings is to /// provide information on what the intended usage is for. That is: while a given data view may have a column named /// "Features", by itself that is insufficient: the trainer must be fed a role mapping that says that the role /// mapping for features is filled by that "Features" column. This allows things like columns not named "Features" @@ -88,7 +88,7 @@ public static ColumnInfo CreateFromIndex(ISchema schema, int index) /// in this schema. /// /// - /// Note that instances of this class are, like instances of , immutable. + /// Note that instances of this class are, like instances of , immutable. /// /// It is often the case that one wishes to bundle the actual data with the role mappings, not just the schema. For /// that case, please use the class. @@ -184,7 +184,7 @@ public static KeyValuePair CreatePair(ColumnRole role, strin => new KeyValuePair(role, name); /// - /// The source . + /// The source . /// public Schema Schema { get; } @@ -274,7 +274,7 @@ private static void Add(Dictionary> map, ColumnRole rol list.Add(info); } - private static Dictionary> MapFromNames(ISchema schema, IEnumerable> roles, bool opt = false) + private static Dictionary> MapFromNames(Schema schema, IEnumerable> roles, bool opt = false) { Contracts.AssertValue(schema); Contracts.AssertValue(roles); diff --git a/src/Microsoft.ML.Core/Data/Schema.cs b/src/Microsoft.ML.Core/Data/Schema.cs index 90c87fc54a..1d84e2ebad 100644 --- a/src/Microsoft.ML.Core/Data/Schema.cs +++ b/src/Microsoft.ML.Core/Data/Schema.cs @@ -328,7 +328,7 @@ public ColumnType GetMetadataTypeOrNull(string kind, int col) var meta = this[col].Metadata; if (meta == null) return null; - if (((ISchema)meta.Schema).TryGetColumnIndex(kind, out int metaCol)) + if (meta.Schema.TryGetColumnIndex(kind, out int metaCol)) return meta.Schema[metaCol].Type; return null; } diff --git a/src/Microsoft.ML.Data/Commands/EvaluateCommand.cs b/src/Microsoft.ML.Data/Commands/EvaluateCommand.cs index 7ed4144262..09a95ebec2 100644 --- a/src/Microsoft.ML.Data/Commands/EvaluateCommand.cs +++ b/src/Microsoft.ML.Data/Commands/EvaluateCommand.cs @@ -143,7 +143,7 @@ public static IDataTransform Create(IHostEnvironment env, Arguments args, IDataV using (var ch = env.Register("EvaluateTransform").Start("Create Transform")) { ch.Trace("Binding columns"); - ISchema schema = input.Schema; + var schema = input.Schema; string label = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(Arguments.LabelColumn), args.LabelColumn, DefaultColumnNames.Label); string group = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(Arguments.GroupColumn), @@ -218,7 +218,7 @@ private void RunCore(IChannel ch) (env, source) => new IO.BinaryLoader(env, new IO.BinaryLoader.Arguments(), source)); ch.Trace("Binding columns"); - ISchema schema = view.Schema; + var schema = view.Schema; string label = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(Arguments.LabelColumn), Args.LabelColumn, DefaultColumnNames.Label); string group = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(Arguments.GroupColumn), diff --git a/src/Microsoft.ML.Data/Commands/ShowSchemaCommand.cs b/src/Microsoft.ML.Data/Commands/ShowSchemaCommand.cs index 4082eab1df..5df37524d8 100644 --- a/src/Microsoft.ML.Data/Commands/ShowSchemaCommand.cs +++ b/src/Microsoft.ML.Data/Commands/ShowSchemaCommand.cs @@ -9,6 +9,7 @@ using System.Linq; using System.Reflection; using System.Text; +using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Command; using Microsoft.ML.Runtime.CommandLine; @@ -113,7 +114,7 @@ private static IEnumerable GetViewChainReversed(IDataView data) } } - private static void PrintSchema(TextWriter writer, Arguments args, ISchema schema, ITransposeSchema tschema) + private static void PrintSchema(TextWriter writer, Arguments args, Schema schema, ITransposeSchema tschema) { Contracts.AssertValue(writer); Contracts.AssertValue(args); @@ -179,7 +180,7 @@ private static void PrintSchema(TextWriter writer, Arguments args, ISchema schem } } - private static void ShowMetadata(IndentedTextWriter itw, ISchema schema, int col, bool showVals) + private static void ShowMetadata(IndentedTextWriter itw, Schema schema, int col, bool showVals) { Contracts.AssertValue(itw); Contracts.AssertValue(schema); @@ -205,7 +206,7 @@ private static void ShowMetadata(IndentedTextWriter itw, ISchema schema, int col } } - private static void ShowMetadataValue(IndentedTextWriter itw, ISchema schema, int col, string kind, ColumnType type) + private static void ShowMetadataValue(IndentedTextWriter itw, Schema schema, int col, string kind, ColumnType type) { Contracts.AssertValue(itw); Contracts.AssertValue(schema); @@ -220,12 +221,12 @@ private static void ShowMetadataValue(IndentedTextWriter itw, ISchema schema, in return; } - Action del = ShowMetadataValue; + Action del = ShowMetadataValue; var meth = del.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(type.RawType); meth.Invoke(null, new object[] { itw, schema, col, kind, type }); } - private static void ShowMetadataValue(IndentedTextWriter itw, ISchema schema, int col, string kind, ColumnType type) + private static void ShowMetadataValue(IndentedTextWriter itw, Schema schema, int col, string kind, ColumnType type) { Contracts.AssertValue(itw); Contracts.AssertValue(schema); @@ -245,7 +246,7 @@ private static void ShowMetadataValue(IndentedTextWriter itw, ISchema schema, itw.Write(": '{0}'", sb); } - private static void ShowMetadataValueVec(IndentedTextWriter itw, ISchema schema, int col, string kind, ColumnType type) + private static void ShowMetadataValueVec(IndentedTextWriter itw, Schema schema, int col, string kind, ColumnType type) { Contracts.AssertValue(itw); Contracts.AssertValue(schema); @@ -260,12 +261,12 @@ private static void ShowMetadataValueVec(IndentedTextWriter itw, ISchema schema, return; } - Action del = ShowMetadataValueVec; + Action del = ShowMetadataValueVec; var meth = del.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(type.ItemType.RawType); meth.Invoke(null, new object[] { itw, schema, col, kind, type }); } - private static void ShowMetadataValueVec(IndentedTextWriter itw, ISchema schema, int col, string kind, ColumnType type) + private static void ShowMetadataValueVec(IndentedTextWriter itw, Schema schema, int col, string kind, ColumnType type) { Contracts.AssertValue(itw); Contracts.AssertValue(schema); diff --git a/src/Microsoft.ML.Data/Commands/TestCommand.cs b/src/Microsoft.ML.Data/Commands/TestCommand.cs index 407f7e713d..eb7d689b61 100644 --- a/src/Microsoft.ML.Data/Commands/TestCommand.cs +++ b/src/Microsoft.ML.Data/Commands/TestCommand.cs @@ -94,7 +94,7 @@ private void RunCore(IChannel ch) ch.AssertValue(loader); ch.Trace("Binding columns"); - ISchema schema = loader.Schema; + var schema = loader.Schema; string label = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(Args.LabelColumn), Args.LabelColumn, DefaultColumnNames.Label); string features = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(Args.FeatureColumn), diff --git a/src/Microsoft.ML.Data/Commands/TrainCommand.cs b/src/Microsoft.ML.Data/Commands/TrainCommand.cs index ff709de143..fb267c9b70 100644 --- a/src/Microsoft.ML.Data/Commands/TrainCommand.cs +++ b/src/Microsoft.ML.Data/Commands/TrainCommand.cs @@ -146,7 +146,7 @@ private void RunCore(IChannel ch, string cmd) ch.Trace("Constructing data pipeline"); IDataView view = CreateLoader(); - ISchema schema = view.Schema; + var schema = view.Schema; var label = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(Arguments.LabelColumn), _labelColumn, DefaultColumnNames.Label); var feature = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(Arguments.FeatureColumn), _featureColumn, DefaultColumnNames.Features); var group = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(Arguments.GroupColumn), _groupColumn, DefaultColumnNames.GroupId); @@ -221,7 +221,7 @@ public static void CheckTrainer(IExceptionContext ectx, IComponentFactory - public static string MatchNameOrDefaultOrNull(IExceptionContext ectx, ISchema schema, string argName, string userName, string defaultName) + public static string MatchNameOrDefaultOrNull(IExceptionContext ectx, Schema schema, string argName, string userName, string defaultName) { Contracts.CheckValueOrNull(ectx); ectx.CheckValue(schema, nameof(schema)); diff --git a/src/Microsoft.ML.Data/Commands/TrainTestCommand.cs b/src/Microsoft.ML.Data/Commands/TrainTestCommand.cs index 49f25375f5..466c759f10 100644 --- a/src/Microsoft.ML.Data/Commands/TrainTestCommand.cs +++ b/src/Microsoft.ML.Data/Commands/TrainTestCommand.cs @@ -130,7 +130,7 @@ private void RunCore(IChannel ch, string cmd) ch.Trace("Constructing the training pipeline"); IDataView trainPipe = CreateLoader(); - ISchema schema = trainPipe.Schema; + var schema = trainPipe.Schema; string label = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(Arguments.LabelColumn), Args.LabelColumn, DefaultColumnNames.Label); string features = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(Arguments.FeatureColumn), diff --git a/src/Microsoft.ML.Data/Data/DataViewUtils.cs b/src/Microsoft.ML.Data/Data/DataViewUtils.cs index 0b0cef5343..0d5d29fb43 100644 --- a/src/Microsoft.ML.Data/Data/DataViewUtils.cs +++ b/src/Microsoft.ML.Data/Data/DataViewUtils.cs @@ -22,7 +22,7 @@ public static class DataViewUtils /// Use tag to independently create multiple temporary, unique column /// names for a single transform. ///
- public static string GetTempColumnName(this ISchema schema, string tag = null) + public static string GetTempColumnName(this Schema schema, string tag = null) { Contracts.CheckValue(schema, nameof(schema)); @@ -46,7 +46,7 @@ public static string GetTempColumnName(this ISchema schema, string tag = null) /// Use tag to independently create multiple temporary, unique column /// names for a single transform. ///
- public static string[] GetTempColumnNames(this ISchema schema, int n, string tag = null) + public static string[] GetTempColumnNames(this Schema schema, int n, string tag = null) { Contracts.CheckValue(schema, nameof(schema)); Contracts.Check(n > 0, "n"); @@ -175,7 +175,7 @@ public static RowCursor[] CreateSplitCursors(out IRowCursorConsolidator consolid /// Return whether all the active columns, as determined by the predicate, are /// cachable - either primitive types or vector types. /// - public static bool AllCachable(ISchema schema, Func predicate) + public static bool AllCachable(Schema schema, Func predicate) { Contracts.CheckValue(schema, nameof(schema)); Contracts.CheckValue(predicate, nameof(predicate)); diff --git a/src/Microsoft.ML.Data/DataLoadSave/Binary/BinarySaver.cs b/src/Microsoft.ML.Data/DataLoadSave/Binary/BinarySaver.cs index d8c060c031..f442fbcbe4 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Binary/BinarySaver.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Binary/BinarySaver.cs @@ -12,6 +12,7 @@ using System.Text; using System.Threading; using System.Threading.Tasks; +using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.CommandLine; using Microsoft.ML.Runtime.Data; @@ -254,7 +255,7 @@ private void CompressionWorker(BlockingCollection toCompress, BlockingCol /// The channel to which we write any diagnostic information /// The offset of the metadata table of contents, or 0 if there was /// no metadata - private long WriteMetadata(BinaryWriter writer, ISchema schema, int col, IChannel ch) + private long WriteMetadata(BinaryWriter writer, Schema schema, int col, IChannel ch) { _host.AssertValue(writer); _host.AssertValue(schema); @@ -341,9 +342,9 @@ private long WriteMetadata(BinaryWriter writer, ISchema schema, int col, IChanne return offsets[metadataInfos.Count]; } - private delegate IValueCodec WriteMetadataCoreDelegate(Stream stream, ISchema schema, int col, string kind, ColumnType type, out CompressionKind compression); + private delegate IValueCodec WriteMetadataCoreDelegate(Stream stream, Schema schema, int col, string kind, ColumnType type, out CompressionKind compression); - private IValueCodec WriteMetadataCore(Stream stream, ISchema schema, int col, string kind, ColumnType type, out CompressionKind compressionKind) + private IValueCodec WriteMetadataCore(Stream stream, Schema schema, int col, string kind, ColumnType type, out CompressionKind compressionKind) { _host.Assert(typeof(T) == type.RawType); IValueCodec generalCodec; @@ -390,7 +391,7 @@ private IValueCodec WriteMetadataCore(Stream stream, ISchema schema, int col, } private void WriteWorker(Stream stream, BlockingCollection toWrite, ColumnCodec[] activeColumns, - ISchema sourceSchema, int rowsPerBlock, IChannelProvider cp, ExceptionMarshaller exMarshaller) + Schema sourceSchema, int rowsPerBlock, IChannelProvider cp, ExceptionMarshaller exMarshaller) { _host.AssertValue(exMarshaller); try @@ -708,7 +709,7 @@ public void SaveData(Stream stream, IDataView data, params int[] colIndices) } } - private ColumnCodec[] GetActiveColumns(ISchema schema, int[] colIndices) + private ColumnCodec[] GetActiveColumns(Schema schema, int[] colIndices) { _host.AssertValue(schema); _host.AssertValueOrNull(colIndices); diff --git a/src/Microsoft.ML.Data/DataLoadSave/PartitionedFileLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/PartitionedFileLoader.cs index 5ad393f2d2..e4eedd5f00 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/PartitionedFileLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/PartitionedFileLoader.cs @@ -329,7 +329,7 @@ private Schema CreateSchema(IExceptionContext ectx, Column[] cols, IDataLoader s } else { - var schemas = new ISchema[] + var schemas = new Schema[] { subSchema, colSchema @@ -636,7 +636,7 @@ private IEnumerable CreateFileOrder(Random rand) } } - private bool SchemasMatch(ISchema schema1, ISchema schema2) + private bool SchemasMatch(Schema schema1, Schema schema2) { if (schema1.ColumnCount != schema2.ColumnCount) { diff --git a/src/Microsoft.ML.Data/DataLoadSave/Text/TextSaver.cs b/src/Microsoft.ML.Data/DataLoadSave/Text/TextSaver.cs index e55db4df55..03fd7dfa70 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Text/TextSaver.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Text/TextSaver.cs @@ -12,6 +12,7 @@ using Microsoft.ML.Runtime.Data.IO; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Internal.Internallearn; +using Microsoft.ML.Data; [assembly: LoadableClass(TextSaver.Summary, typeof(TextSaver), typeof(TextSaver.Arguments), typeof(SignatureDataSaver), "Text Saver", "TextSaver", "Text", DocName = "saver/TextSaver.md")] @@ -449,7 +450,7 @@ private void WriteSchemaAsComment(TextWriter writer, string str) writer.WriteLine("#@ }"); } - private string CreateLoaderArguments(ISchema schema, ValueWriter[] pipes, bool hasHeader, IChannel ch) + private string CreateLoaderArguments(Schema schema, ValueWriter[] pipes, bool hasHeader, IChannel ch) { StringBuilder sb = new StringBuilder(); if (hasHeader) diff --git a/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs index 0b42c26ce0..fe63426c61 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs @@ -612,7 +612,7 @@ private unsafe Header InitHeader(BinaryReader reader) private sealed class SchemaImpl : ITransposeSchema { private readonly TransposeLoader _parent; - private ISchema Schema { get { return _parent.Schema; } } + private Schema Schema { get { return _parent.Schema; } } private IHost Host { get { return _parent._host; } } public int ColumnCount { get { return Schema.ColumnCount; } } diff --git a/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs b/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs index c0d15d87f2..841ff93038 100644 --- a/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs +++ b/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs @@ -121,7 +121,7 @@ private void CheckSchemaConsistency() for (int i = startingSchemaIndex; i < _sources.Length; i++) { - ISchema schema = _sources[i].Schema; + var schema = _sources[i].Schema; _host.Check(schema.GetColumnName(c) == name, errMsg); _host.Check(schema.GetColumnType(c).SameSizeAndItemType(type), errMsg); } diff --git a/src/Microsoft.ML.Data/DataView/CompositeSchema.cs b/src/Microsoft.ML.Data/DataView/CompositeSchema.cs index 9739112c22..d5e7e9229e 100644 --- a/src/Microsoft.ML.Data/DataView/CompositeSchema.cs +++ b/src/Microsoft.ML.Data/DataView/CompositeSchema.cs @@ -15,14 +15,14 @@ namespace Microsoft.ML.Runtime.Data /// internal sealed class CompositeSchema : ISchema { - private readonly ISchema[] _sources; + private readonly Schema[] _sources; public Schema AsSchema { get; } // Zero followed by cumulative column counts. Zero being used for the empty case. private readonly int[] _cumulativeColCounts; - public CompositeSchema(ISchema[] sources) + public CompositeSchema(Schema[] sources) { Contracts.AssertNonEmpty(sources); _sources = sources; diff --git a/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs b/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs index 57382f890b..938283b779 100644 --- a/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs +++ b/src/Microsoft.ML.Data/DataView/RowToRowMapperTransform.cs @@ -22,7 +22,7 @@ namespace Microsoft.ML.Runtime.Data { /// /// This interface is used to create a . - /// Implementations should be given an in their constructor, and should have a + /// Implementations should be given an in their constructor, and should have a /// ctor or Create method with , along with a corresponding /// . /// @@ -49,7 +49,7 @@ internal interface IRowMapper : ICanSaveModel Schema.DetachedColumn[] GetOutputColumns(); } - public delegate void SignatureLoadRowMapper(ModelLoadContext ctx, ISchema schema); + public delegate void SignatureLoadRowMapper(ModelLoadContext ctx, Schema schema); /// /// This class is a transform that can add any number of output columns, that depend on any number of input columns. @@ -96,11 +96,11 @@ internal RowToRowMapperTransform(IHostEnvironment env, IDataView input, IRowMapp } [BestFriend] - internal static Schema GetOutputSchema(ISchema inputSchema, IRowMapper mapper) + internal static Schema GetOutputSchema(Schema inputSchema, IRowMapper mapper) { Contracts.CheckValue(inputSchema, nameof(inputSchema)); Contracts.CheckValue(mapper, nameof(mapper)); - return new ColumnBindings(Schema.Create(inputSchema), mapper.GetOutputColumns()).Schema; + return new ColumnBindings(inputSchema, mapper.GetOutputColumns()).Schema; } private RowToRowMapperTransform(IHost host, ModelLoadContext ctx, IDataView input) diff --git a/src/Microsoft.ML.Data/DataView/Transposer.cs b/src/Microsoft.ML.Data/DataView/Transposer.cs index a0cc9edae6..901c76275a 100644 --- a/src/Microsoft.ML.Data/DataView/Transposer.cs +++ b/src/Microsoft.ML.Data/DataView/Transposer.cs @@ -895,7 +895,7 @@ private Func CreateInputPredicate(Func pred, out bool[] ac { var splitter = _splitters[i]; // Don't activate input source columns if none of the resulting columns were selected. - bool isActive = pred == null || Enumerable.Range(offset, splitter.ColumnCount).Any(c => pred(c)); + bool isActive = pred == null || Enumerable.Range(offset, splitter.AsSchema.ColumnCount).Any(c => pred(c)); if (isActive) { activeSplitters[i] = isActive; @@ -1633,11 +1633,11 @@ private void GetId(ref UInt128 id) /// internal sealed class SimpleTransposeSchema : ITransposeSchema { - private readonly ISchema _schema; + private readonly Schema _schema; public int ColumnCount { get { return _schema.ColumnCount; } } - public SimpleTransposeSchema(ISchema schema) + public SimpleTransposeSchema(Schema schema) { Contracts.CheckValue(schema, nameof(schema)); _schema = schema; diff --git a/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs b/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs index 7a5b98da5b..b0fa32250b 100644 --- a/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs +++ b/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs @@ -35,7 +35,7 @@ private sealed class Bindings : ISchema { public readonly int[] Sources; - private readonly ISchema _input; + private readonly Schema _input; private readonly Dictionary _nameToIndex; // The following argument is used only to inform serialization. @@ -43,7 +43,7 @@ private sealed class Bindings : ISchema public Schema AsSchema { get; } - public Bindings(Arguments args, ISchema schemaInput) + public Bindings(Arguments args, Schema schemaInput) { Contracts.AssertValue(args); Contracts.AssertValue(schemaInput); @@ -89,7 +89,7 @@ private void BuildNameDict(int[] indexCopy, bool drop, out int[] sources, out in nameToCol[_input.GetColumnName(sources[c])] = c; } - public Bindings(ModelLoadContext ctx, ISchema schemaInput) + public Bindings(ModelLoadContext ctx, Schema schemaInput) { Contracts.AssertValue(ctx); Contracts.AssertValue(schemaInput); diff --git a/src/Microsoft.ML.Data/EntryPoints/InputBase.cs b/src/Microsoft.ML.Data/EntryPoints/InputBase.cs index 3254289f12..7b76574562 100644 --- a/src/Microsoft.ML.Data/EntryPoints/InputBase.cs +++ b/src/Microsoft.ML.Data/EntryPoints/InputBase.cs @@ -4,6 +4,7 @@ using System; using System.Collections.Generic; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.CommandLine; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Data.IO; @@ -102,7 +103,7 @@ public abstract class LearnerInputBaseWithGroupId : LearnerInputBaseWithWeight [BestFriend] internal static class LearnerEntryPointsUtils { - public static string FindColumn(IExceptionContext ectx, ISchema schema, Optional value) + public static string FindColumn(IExceptionContext ectx, Schema schema, Optional value) { Contracts.CheckValueOrNull(ectx); ectx.CheckValue(schema, nameof(schema)); @@ -133,7 +134,7 @@ public static TOut Train(IHost host, TArg input, { using (var ch = host.Start("Training")) { - ISchema schema = input.TrainingData.Schema; + var schema = input.TrainingData.Schema; var feature = FindColumn(ch, schema, input.FeatureColumn); var label = getLabel?.Invoke(); var weight = getWeight?.Invoke(); diff --git a/src/Microsoft.ML.Data/EntryPoints/TransformModel.cs b/src/Microsoft.ML.Data/EntryPoints/TransformModel.cs index 5af6213603..4e3c2e81f1 100644 --- a/src/Microsoft.ML.Data/EntryPoints/TransformModel.cs +++ b/src/Microsoft.ML.Data/EntryPoints/TransformModel.cs @@ -195,7 +195,7 @@ private sealed class CompositeRowToRowMapper : IRowToRowMapper public Schema OutputSchema => Schema; - public CompositeRowToRowMapper(IExceptionContext ectx, IDataView chain, ISchema rootSchema) + public CompositeRowToRowMapper(IExceptionContext ectx, IDataView chain, Schema rootSchema) { Contracts.CheckValue(ectx, nameof(ectx)); _ectx = ectx; diff --git a/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs index 3dd2ca8f5c..2b8ffe7d0b 100644 --- a/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs @@ -893,7 +893,7 @@ private static VersionInfo GetVersionInfo() private readonly bool _useRaw; private readonly ColumnType[] _types; - public BinaryPerInstanceEvaluator(IHostEnvironment env, ISchema schema, string scoreCol, string probCol, string labelCol, Single threshold, bool useRaw) + public BinaryPerInstanceEvaluator(IHostEnvironment env, Schema schema, string scoreCol, string probCol, string labelCol, Single threshold, bool useRaw) : base(env, schema, scoreCol, labelCol) { _threshold = threshold; @@ -913,7 +913,7 @@ public BinaryPerInstanceEvaluator(IHostEnvironment env, ISchema schema, string s _types[AssignedCol] = BoolType.Instance; } - private BinaryPerInstanceEvaluator(IHostEnvironment env, ModelLoadContext ctx, ISchema schema) + private BinaryPerInstanceEvaluator(IHostEnvironment env, ModelLoadContext ctx, Schema schema) : base(env, ctx, schema) { // *** Binary format ** @@ -939,7 +939,7 @@ private BinaryPerInstanceEvaluator(IHostEnvironment env, ModelLoadContext ctx, I _types[AssignedCol] = BoolType.Instance; } - public static BinaryPerInstanceEvaluator Create(IHostEnvironment env, ModelLoadContext ctx, ISchema schema) + public static BinaryPerInstanceEvaluator Create(IHostEnvironment env, ModelLoadContext ctx, Schema schema) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); @@ -1091,7 +1091,7 @@ private protected override Schema.DetachedColumn[] GetOutputColumnsCore() return new[] { new Schema.DetachedColumn(Assigned, _types[AssignedCol], null), }; } - private void CheckInputColumnTypes(ISchema schema) + private void CheckInputColumnTypes(Schema schema) { Host.AssertNonEmpty(ScoreCol); Host.AssertValueOrNull(_probCol); @@ -1541,7 +1541,7 @@ public static CommonOutputs.ClassificationEvaluateOutput Binary(IHostEnvironment private static void MatchColumns(IHost host, MamlEvaluatorBase.ArgumentsBase input, out string label, out string weight, out string name) { - ISchema schema = input.Data.Schema; + var schema = input.Data.Schema; label = TrainUtils.MatchNameOrDefaultOrNull(host, schema, nameof(BinaryClassifierMamlEvaluator.Arguments.LabelColumn), input.LabelColumn, DefaultColumnNames.Label); diff --git a/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs index ca2d36c3c4..e74f470f0c 100644 --- a/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs @@ -586,7 +586,7 @@ private static VersionInfo GetVersionInfo() private readonly int _numClusters; private readonly ColumnType[] _types; - public ClusteringPerInstanceEvaluator(IHostEnvironment env, ISchema schema, string scoreCol, int numClusters) + public ClusteringPerInstanceEvaluator(IHostEnvironment env, Schema schema, string scoreCol, int numClusters) : base(env, schema, scoreCol, null) { CheckInputColumnTypes(schema); @@ -599,7 +599,7 @@ public ClusteringPerInstanceEvaluator(IHostEnvironment env, ISchema schema, stri _types[SortedClusterScoreCol] = new VectorType(NumberType.R4, _numClusters); } - private ClusteringPerInstanceEvaluator(IHostEnvironment env, ModelLoadContext ctx, ISchema schema) + private ClusteringPerInstanceEvaluator(IHostEnvironment env, ModelLoadContext ctx, Schema schema) : base(env, ctx, schema) { CheckInputColumnTypes(schema); @@ -618,7 +618,7 @@ private ClusteringPerInstanceEvaluator(IHostEnvironment env, ModelLoadContext ct _types[SortedClusterScoreCol] = new VectorType(NumberType.R4, _numClusters); } - public static ClusteringPerInstanceEvaluator Create(IHostEnvironment env, ModelLoadContext ctx, ISchema schema) + public static ClusteringPerInstanceEvaluator Create(IHostEnvironment env, ModelLoadContext ctx, Schema schema) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); @@ -747,7 +747,7 @@ private ValueGetter>> CreateSlotNamesGetter(int num }; } - private void CheckInputColumnTypes(ISchema schema) + private void CheckInputColumnTypes(Schema schema) { Host.AssertNonEmpty(ScoreCol); @@ -861,7 +861,7 @@ public static CommonOutputs.CommonEvaluateOutput Clustering(IHostEnvironment env EntryPointUtils.CheckInputArgs(host, input); MatchColumns(host, input, out string label, out string weight, out string name); - ISchema schema = input.Data.Schema; + var schema = input.Data.Schema; string features = TrainUtils.MatchNameOrDefaultOrNull(host, schema, nameof(ClusteringMamlEvaluator.Arguments.FeatureColumn), input.FeatureColumn, DefaultColumnNames.Features); diff --git a/src/Microsoft.ML.Data/Evaluators/EvaluatorBase.cs b/src/Microsoft.ML.Data/Evaluators/EvaluatorBase.cs index 906bc020b8..59f5cafeb0 100644 --- a/src/Microsoft.ML.Data/Evaluators/EvaluatorBase.cs +++ b/src/Microsoft.ML.Data/Evaluators/EvaluatorBase.cs @@ -461,7 +461,7 @@ public abstract class PerInstanceEvaluatorBase : IRowMapper protected readonly int ScoreIndex; protected readonly int LabelIndex; - protected PerInstanceEvaluatorBase(IHostEnvironment env, ISchema schema, string scoreCol, string labelCol) + protected PerInstanceEvaluatorBase(IHostEnvironment env, Schema schema, string scoreCol, string labelCol) { Contracts.AssertValue(env); Contracts.AssertNonEmpty(scoreCol); @@ -476,7 +476,7 @@ protected PerInstanceEvaluatorBase(IHostEnvironment env, ISchema schema, string throw Host.Except("Did not find column '{0}'", ScoreCol); } - protected PerInstanceEvaluatorBase(IHostEnvironment env, ModelLoadContext ctx, ISchema schema) + protected PerInstanceEvaluatorBase(IHostEnvironment env, ModelLoadContext ctx, Schema schema) { Host = env.Register("PerInstanceRowMapper"); diff --git a/src/Microsoft.ML.Data/Evaluators/EvaluatorUtils.cs b/src/Microsoft.ML.Data/Evaluators/EvaluatorUtils.cs index 205c24a8cf..0eb63d3f0c 100644 --- a/src/Microsoft.ML.Data/Evaluators/EvaluatorUtils.cs +++ b/src/Microsoft.ML.Data/Evaluators/EvaluatorUtils.cs @@ -81,7 +81,7 @@ public static IMamlEvaluator GetEvaluator(IHostEnvironment env, Schema schema) } // Lambda used as validator/filter in calls to GetMaxMetadataKind. - private static bool CheckScoreColumnKindIsKnown(ISchema schema, int col) + private static bool CheckScoreColumnKindIsKnown(Schema schema, int col) { var columnType = schema.GetMetadataTypeOrNull(MetadataUtils.Kinds.ScoreColumnKind, col); if (columnType == null || !columnType.IsText) @@ -209,7 +209,7 @@ public static ColumnInfo GetOptAuxScoreColumnInfo(IExceptionContext ectx, Schema return null; } - private static bool IsScoreColumnKind(IExceptionContext ectx, ISchema schema, int col, string kind) + private static bool IsScoreColumnKind(IExceptionContext ectx, Schema schema, int col, string kind) { Contracts.CheckValueOrNull(ectx); ectx.CheckValue(schema, nameof(schema)); @@ -546,7 +546,7 @@ public static IDataView AddFoldIndex(IHostEnvironment env, IDataView input, int } } - private static int[][] MapKeys(ISchema[] schemas, string columnName, bool isVec, + private static int[][] MapKeys(Schema[] schemas, string columnName, bool isVec, int[] indices, Dictionary, int> reconciledKeyNames) { Contracts.AssertValue(indices); @@ -1273,7 +1273,7 @@ internal static IDataView GetAverageToDataView(IHostEnvironment env, Schema sche return idv; } - private static void AddVectorColumn(this ArrayDataViewBuilder dvBldr, IHostEnvironment env, ISchema schema, + private static void AddVectorColumn(this ArrayDataViewBuilder dvBldr, IHostEnvironment env, Schema schema, AggregatedMetric[] agg, bool hasStdev, int numFolds, int iMetric, int i, ColumnType type, string columnName) { var vectorMetrics = new double[type.VectorSize]; @@ -1301,7 +1301,7 @@ private static void AddVectorColumn(this ArrayDataViewBuilder dvBldr, IHostEnvir dvBldr.AddColumn(columnName, getSlotNames, NumberType.R8, new[] { vectorMetrics }); } - private static void AddScalarColumn(this ArrayDataViewBuilder dvBldr, ISchema schema, AggregatedMetric[] agg, bool hasStdev, int numFolds, int iMetric) + private static void AddScalarColumn(this ArrayDataViewBuilder dvBldr, Schema schema, AggregatedMetric[] agg, bool hasStdev, int numFolds, int iMetric) { Contracts.AssertValue(dvBldr); diff --git a/src/Microsoft.ML.Data/Evaluators/MamlEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/MamlEvaluator.cs index ccef4706f4..110abd27e4 100644 --- a/src/Microsoft.ML.Data/Evaluators/MamlEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/MamlEvaluator.cs @@ -15,7 +15,7 @@ namespace Microsoft.ML.Runtime.Data /// and the to evaluate, print and save the results. /// The input to the and the methods /// should be assumed to contain only the following column roles: label, group, weight and name. Any other columns needed for - /// evaluation should be searched for by name in the . + /// evaluation should be searched for by name in the . /// public interface IMamlEvaluator : IEvaluator { diff --git a/src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs index 04316e3ed6..12aaa47ad0 100644 --- a/src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs @@ -592,7 +592,7 @@ public MultiClassPerInstanceEvaluator(IHostEnvironment env, Schema schema, Colum _types[SortedClassesCol] = new VectorType(key, _numClasses); } - private MultiClassPerInstanceEvaluator(IHostEnvironment env, ModelLoadContext ctx, ISchema schema) + private MultiClassPerInstanceEvaluator(IHostEnvironment env, ModelLoadContext ctx, Schema schema) : base(env, ctx, schema) { CheckInputColumnTypes(schema); @@ -621,7 +621,7 @@ private MultiClassPerInstanceEvaluator(IHostEnvironment env, ModelLoadContext ct _types[SortedClassesCol] = new VectorType(key, _numClasses); } - public static MultiClassPerInstanceEvaluator Create(IHostEnvironment env, ModelLoadContext ctx, ISchema schema) + public static MultiClassPerInstanceEvaluator Create(IHostEnvironment env, ModelLoadContext ctx, Schema schema) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); @@ -808,7 +808,7 @@ private ValueGetter>> CreateKeyValueGetter() }; } - private void CheckInputColumnTypes(ISchema schema) + private void CheckInputColumnTypes(Schema schema) { Host.AssertNonEmpty(ScoreCol); Host.AssertNonEmpty(LabelCol); diff --git a/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs index daa71a0cb1..fa2de9c726 100644 --- a/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs @@ -416,7 +416,7 @@ private MultiOutputRegressionPerInstanceEvaluator(IHostEnvironment env, ModelLoa // base } - public static MultiOutputRegressionPerInstanceEvaluator Create(IHostEnvironment env, ModelLoadContext ctx, ISchema schema) + public static MultiOutputRegressionPerInstanceEvaluator Create(IHostEnvironment env, ModelLoadContext ctx, Schema schema) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); @@ -590,7 +590,7 @@ private void GetScoreValueKind(ref ReadOnlyMemory dst) dst = MetadataUtils.Const.ScoreValueKind.Score.AsMemory(); } - private ValueGetter>> CreateSlotNamesGetter(ISchema schema, int column, int length, string prefix) + private ValueGetter>> CreateSlotNamesGetter(Schema schema, int column, int length, string prefix) { var type = schema.GetMetadataTypeOrNull(MetadataUtils.Kinds.SlotNames, column); if (type != null && type.IsText) diff --git a/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs index eb2f9f677d..10f5aa6e4b 100644 --- a/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs @@ -284,7 +284,7 @@ private static VersionInfo GetVersionInfo() private readonly VBuffer> _quantiles; private readonly ColumnType _outputType; - public QuantileRegressionPerInstanceEvaluator(IHostEnvironment env, ISchema schema, string scoreCol, string labelCol, int scoreSize, VBuffer> quantiles) + public QuantileRegressionPerInstanceEvaluator(IHostEnvironment env, Schema schema, string scoreCol, string labelCol, int scoreSize, VBuffer> quantiles) : base(env, schema, scoreCol, labelCol) { Host.CheckParam(scoreSize > 0, nameof(scoreSize), "must be greater than 0"); @@ -298,7 +298,7 @@ public QuantileRegressionPerInstanceEvaluator(IHostEnvironment env, ISchema sche _outputType = new VectorType(NumberType.R8, _scoreSize); } - private QuantileRegressionPerInstanceEvaluator(IHostEnvironment env, ModelLoadContext ctx, ISchema schema) + private QuantileRegressionPerInstanceEvaluator(IHostEnvironment env, ModelLoadContext ctx, Schema schema) : base(env, ctx, schema) { CheckInputColumnTypes(schema); @@ -317,7 +317,7 @@ private QuantileRegressionPerInstanceEvaluator(IHostEnvironment env, ModelLoadCo _outputType = new VectorType(NumberType.R8, _scoreSize); } - public static QuantileRegressionPerInstanceEvaluator Create(IHostEnvironment env, ModelLoadContext ctx, ISchema schema) + public static QuantileRegressionPerInstanceEvaluator Create(IHostEnvironment env, ModelLoadContext ctx, Schema schema) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); @@ -442,7 +442,7 @@ private protected override Delegate[] CreateGettersCore(Row input, Func toDrop) /// The columns to output /// Returns a complete PFA program, where the output will correspond to the subset /// of columns from . - public JObject Finalize(ISchema schema, params string[] toOutput) + public JObject Finalize(Schema schema, params string[] toOutput) { _host.CheckValue(schema, nameof(schema)); _host.CheckValue(toOutput, nameof(toOutput)); @@ -162,7 +162,7 @@ public JObject Finalize(ISchema schema, params string[] toOutput) return Pfa.Finalize(); } - private JToken PfaTypeOrNullForColumn(ISchema schema, int col) + private JToken PfaTypeOrNullForColumn(Schema schema, int col) { _host.AssertValue(schema); _host.Assert(0 <= col && col < schema.ColumnCount); diff --git a/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs b/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs index 5eb1b91e49..70638e35a9 100644 --- a/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs +++ b/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs @@ -327,8 +327,8 @@ private sealed class RowMapper : ISchemaBoundRowMapper private readonly IHostEnvironment _env; private readonly ISchemaBoundRowMapper _genericRowMapper; private readonly BindableMapper _parent; - private readonly ISchema _outputSchema; - private readonly ISchema _outputGenericSchema; + private readonly Schema _outputSchema; + private readonly Schema _outputGenericSchema; private VBuffer> _slotNames; public RoleMappedSchema InputRoleMappedSchema { get; } @@ -364,13 +364,13 @@ public RowMapper(IHostEnvironment env, BindableMapper parent, RoleMappedSchema s } else { - _outputSchema = new FeatureContributionSchema(_env, DefaultColumnNames.FeatureContributions, + _outputSchema = Schema.Create(new FeatureContributionSchema(_env, DefaultColumnNames.FeatureContributions, new VectorType(NumberType.R4, schema.Feature.Type.AsVector), - InputSchema, InputRoleMappedSchema.Feature.Index); + InputSchema, InputRoleMappedSchema.Feature.Index)); } _outputGenericSchema = _genericRowMapper.OutputSchema; - OutputSchema = new CompositeSchema(new ISchema[] { _outputGenericSchema, _outputSchema, }).AsSchema; + OutputSchema = new CompositeSchema(new Schema[] { _outputGenericSchema, _outputSchema, }).AsSchema; } /// diff --git a/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs b/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs index 41dff06100..4b190636f9 100644 --- a/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs +++ b/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs @@ -315,7 +315,7 @@ public Row GetRow(Row input, Func predicate) private sealed class SchemaImpl : ISchema { - private readonly ISchema _parent; + private readonly Schema _parent; private readonly int _scoreCol; private readonly VectorType _labelNameType; private readonly MetadataUtils.MetadataGetter> _labelNameGetter; @@ -325,7 +325,7 @@ private sealed class SchemaImpl : ISchema public int ColumnCount { get { return _parent.ColumnCount; } } - public SchemaImpl(ISchema parent, int col, VectorType type, ValueGetter> getter, string metadataKind) + public SchemaImpl(Schema parent, int col, VectorType type, ValueGetter> getter, string metadataKind) { Contracts.AssertValue(parent); Contracts.Assert(0 <= col && col < parent.ColumnCount); @@ -461,7 +461,7 @@ public static bool CanWrap(ISchemaBoundMapper mapper, ColumnType labelNameType) if (rowMapper == null) return false; // We could cover this case, but it is of no practical worth as far as I see, so I decline to do so. - ISchema outSchema = mapper.OutputSchema; + var outSchema = mapper.OutputSchema; int scoreIdx; if (!outSchema.TryGetColumnIndex(MetadataUtils.Const.ScoreValueKind.Score, out scoreIdx)) return false; // The mapper doesn't even publish a score column to attach the metadata to. diff --git a/src/Microsoft.ML.Data/Transforms/ColumnBindingsBase.cs b/src/Microsoft.ML.Data/Transforms/ColumnBindingsBase.cs index c76ebbc57a..3229c01951 100644 --- a/src/Microsoft.ML.Data/Transforms/ColumnBindingsBase.cs +++ b/src/Microsoft.ML.Data/Transforms/ColumnBindingsBase.cs @@ -808,7 +808,7 @@ public ColInfo(int srcSize, int[] srcIndices, ColumnType[] srcTypes) public readonly ColInfo[] Infos; - protected ManyToOneColumnBindingsBase(ManyToOneColumn[] column, ISchema input, Func testTypes) + protected ManyToOneColumnBindingsBase(ManyToOneColumn[] column, Schema input, Func testTypes) : base(input, true, GetNamesAndSanitize(column)) { Contracts.AssertNonEmpty(column); @@ -886,7 +886,7 @@ private static string[] GetNamesAndSanitize(ManyToOneColumn[] column) } // Read everything into a new Contents object and pass it to the constructor below. - protected ManyToOneColumnBindingsBase(ModelLoadContext ctx, ISchema input, Func testTypes) + protected ManyToOneColumnBindingsBase(ModelLoadContext ctx, Schema input, Func testTypes) : this(new Contents(ctx, input, testTypes)) { } @@ -904,11 +904,11 @@ private ManyToOneColumnBindingsBase(Contents contents) /// private sealed class Contents { - public ISchema Input; + public Schema Input; public ColInfo[] Infos; public string[] Names; - public Contents(ModelLoadContext ctx, ISchema input, Func testTypes) + public Contents(ModelLoadContext ctx, Schema input, Func testTypes) { Contracts.CheckValue(ctx, nameof(ctx)); Contracts.CheckValue(input, nameof(input)); diff --git a/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingTransformer.cs b/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingTransformer.cs index e3c47fa401..73d37120c8 100644 --- a/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingTransformer.cs +++ b/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingTransformer.cs @@ -404,8 +404,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, /// /// Factory method for SignatureLoadRowMapper. /// - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => new ColumnConcatenatingTransformer(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => new ColumnConcatenatingTransformer(env, ctx).MakeRowMapper(inputSchema); private sealed class Mapper : MapperBase, ISaveAsOnnx, ISaveAsPfa { diff --git a/src/Microsoft.ML.Data/Transforms/ColumnCopying.cs b/src/Microsoft.ML.Data/Transforms/ColumnCopying.cs index 9cfd65bb38..b40c4bf7c6 100644 --- a/src/Microsoft.ML.Data/Transforms/ColumnCopying.cs +++ b/src/Microsoft.ML.Data/Transforms/ColumnCopying.cs @@ -149,8 +149,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); public override void Save(ModelSaveContext ctx) { diff --git a/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs b/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs index 1193f391e9..9ad73c629e 100644 --- a/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs +++ b/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs @@ -483,7 +483,7 @@ private sealed class Mapper private readonly Schema _inputSchema; private readonly int[] _outputToInputMap; - public ISchema InputSchema => _inputSchema; + public Schema InputSchema => _inputSchema; public Schema OutputSchema { get; } diff --git a/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs b/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs index 80e7ac13fd..65e5c52dc4 100644 --- a/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs @@ -316,8 +316,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); public override void Save(ModelSaveContext ctx) { diff --git a/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs b/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs index 0bf2687c39..bc229021bb 100644 --- a/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs @@ -102,7 +102,7 @@ private sealed class Bindings : ColumnBindingsBase public readonly TauswortheHybrid.State[] States; private Bindings(bool[] useCounter, TauswortheHybrid.State[] states, - ISchema input, bool user, string[] names) + Schema input, bool user, string[] names) : base(input, user, names) { Contracts.Assert(Utils.Size(useCounter) == InfoCount); @@ -111,7 +111,7 @@ private Bindings(bool[] useCounter, TauswortheHybrid.State[] states, States = states; } - public static Bindings Create(Arguments args, ISchema input) + public static Bindings Create(Arguments args, Schema input) { var names = new string[args.Column.Length]; var useCounter = new bool[args.Column.Length]; @@ -128,7 +128,7 @@ public static Bindings Create(Arguments args, ISchema input) return new Bindings(useCounter, states, input, true, names); } - public static Bindings Create(ModelLoadContext ctx, ISchema input) + public static Bindings Create(ModelLoadContext ctx, Schema input) { Contracts.AssertValue(ctx); Contracts.AssertValue(input); diff --git a/src/Microsoft.ML.Data/Transforms/Hashing.cs b/src/Microsoft.ML.Data/Transforms/Hashing.cs index 71c4fa3147..9ea54f4897 100644 --- a/src/Microsoft.ML.Data/Transforms/Hashing.cs +++ b/src/Microsoft.ML.Data/Transforms/Hashing.cs @@ -203,7 +203,7 @@ private static VersionInfo GetVersionInfo() private readonly VBuffer>[] _keyValues; private readonly ColumnType[] _kvTypes; - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { var type = inputSchema.GetColumnType(srcCol); if (!HashingEstimator.IsColumnTypeValid(type)) @@ -216,7 +216,7 @@ private static (string input, string output)[] GetColumnPairs(ColumnInfo[] colum return columns.Select(x => (x.Input, x.Output)).ToArray(); } - private ColumnType GetOutputType(ISchema inputSchema, ColumnInfo column) + private ColumnType GetOutputType(Schema inputSchema, ColumnInfo column) { var keyCount = column.HashBits < 31 ? 1 << column.HashBits : 0; inputSchema.TryGetColumnIndex(column.Input, out int srcCol); @@ -367,8 +367,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); // Factory method for SignatureDataTransform. public static IDataTransform Create(IHostEnvironment env, Arguments args, IDataView input) diff --git a/src/Microsoft.ML.Data/Transforms/KeyToValue.cs b/src/Microsoft.ML.Data/Transforms/KeyToValue.cs index 60b692ad50..f2b4277059 100644 --- a/src/Microsoft.ML.Data/Transforms/KeyToValue.cs +++ b/src/Microsoft.ML.Data/Transforms/KeyToValue.cs @@ -138,8 +138,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, /// /// Factory method for SignatureLoadRowMapper. /// - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); public override void Save(ModelSaveContext ctx) { @@ -220,7 +220,7 @@ protected override Delegate MakeGetter(Row input, int iinfo, Func act } // Computes the types of the columns and constructs the kvMaps. - private void ComputeKvMaps(ISchema schema, out ColumnType[] types, out KeyToValueMap[] kvMaps) + private void ComputeKvMaps(Schema schema, out ColumnType[] types, out KeyToValueMap[] kvMaps) { types = new ColumnType[_parent.ColumnPairs.Length]; kvMaps = new KeyToValueMap[_parent.ColumnPairs.Length]; diff --git a/src/Microsoft.ML.Data/Transforms/KeyToVector.cs b/src/Microsoft.ML.Data/Transforms/KeyToVector.cs index cd4142bbad..d406a30606 100644 --- a/src/Microsoft.ML.Data/Transforms/KeyToVector.cs +++ b/src/Microsoft.ML.Data/Transforms/KeyToVector.cs @@ -120,7 +120,7 @@ private string TestIsKey(ColumnType type) return "key type of known cardinality"; } - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { var type = inputSchema.GetColumnType(srcCol); string reason = TestIsKey(type); @@ -226,8 +226,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); @@ -266,7 +266,7 @@ public Mapper(KeyToVectorMappingTransformer parent, Schema inputSchema) } } - private ColInfo[] CreateInfos(ISchema inputSchema) + private ColInfo[] CreateInfos(Schema inputSchema) { Host.AssertValue(inputSchema); var infos = new ColInfo[_parent.ColumnPairs.Length]; diff --git a/src/Microsoft.ML.Data/Transforms/MetadataDispatcher.cs b/src/Microsoft.ML.Data/Transforms/MetadataDispatcher.cs index bff3a20f9b..cbd7bc252c 100644 --- a/src/Microsoft.ML.Data/Transforms/MetadataDispatcher.cs +++ b/src/Microsoft.ML.Data/Transforms/MetadataDispatcher.cs @@ -6,6 +6,7 @@ using System.Collections.Generic; using System.Linq; using System.Text; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.CommandLine; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; @@ -25,7 +26,7 @@ public abstract class MetadataDispatcherBase protected sealed class ColInfo { // The source schema to pass through metadata from. May be null, indicating none. - public readonly ISchema SchemaSrc; + public readonly Schema SchemaSrc; // The source column index to pass through metadata from. public readonly int IndexSrc; // The metadata kind predicate indicating the kinds of metadata to pass through @@ -46,7 +47,7 @@ public IEnumerable Getters } } - public ColInfo(ISchema schemaSrc, int indexSrc, Func filterSrc, + public ColInfo(Schema schemaSrc, int indexSrc, Func filterSrc, IEnumerable getters = null) { SchemaSrc = schemaSrc; @@ -157,7 +158,7 @@ protected MetadataDispatcherBase(int colCount) /// the same ColInfo, if desired. Simply call RegisterColumn multiple times, passing /// the same ColInfo but different index values. This can only be called before Seal is called. /// - protected ColInfo CreateInfo(ISchema schemaSrc = null, int indexSrc = -1, + protected ColInfo CreateInfo(Schema schemaSrc = null, int indexSrc = -1, Func filterSrc = null) { Contracts.Check(!_sealed, "MetadataDispatcher sealed"); @@ -326,7 +327,7 @@ public Builder BuildMetadata(int index) /// Start building metadata for a column that passes through all metadata from /// a source column. /// - public Builder BuildMetadata(int index, ISchema schemaSrc, int indexSrc) + public Builder BuildMetadata(int index, Schema schemaSrc, int indexSrc) { Contracts.CheckValue(schemaSrc, nameof(schemaSrc)); return new Builder(this, index, schemaSrc, indexSrc); @@ -337,7 +338,7 @@ public Builder BuildMetadata(int index, ISchema schemaSrc, int indexSrc) /// a source column. The kinds that are passed through are those for which /// returns true. /// - public Builder BuildMetadata(int index, ISchema schemaSrc, int indexSrc, Func filterSrc) + public Builder BuildMetadata(int index, Schema schemaSrc, int indexSrc, Func filterSrc) { Contracts.CheckValue(schemaSrc, nameof(schemaSrc)); return new Builder(this, index, schemaSrc, indexSrc, filterSrc); @@ -347,7 +348,7 @@ public Builder BuildMetadata(int index, ISchema schemaSrc, int indexSrc, Func - public Builder BuildMetadata(int index, ISchema schemaSrc, int indexSrc, string kindSrc) + public Builder BuildMetadata(int index, Schema schemaSrc, int indexSrc, string kindSrc) { Contracts.CheckValue(schemaSrc, nameof(schemaSrc)); Contracts.CheckNonWhiteSpace(kindSrc, nameof(kindSrc)); @@ -358,7 +359,7 @@ public Builder BuildMetadata(int index, ISchema schemaSrc, int indexSrc, string /// Start building metadata for a column that passes through metadata of the given kinds from /// a source column. /// - public Builder BuildMetadata(int index, ISchema schemaSrc, int indexSrc, params string[] kindsSrc) + public Builder BuildMetadata(int index, Schema schemaSrc, int indexSrc, params string[] kindsSrc) { Contracts.CheckValue(schemaSrc, nameof(schemaSrc)); Contracts.CheckParam(Utils.Size(kindsSrc) >= 2, nameof(kindsSrc)); @@ -388,7 +389,7 @@ public sealed class Builder : IDisposable /// allow restricting to an outer class. /// internal Builder(MetadataDispatcher md, int index, - ISchema schemaSrc = null, int indexSrc = -1, Func filterSrc = null) + Schema schemaSrc = null, int indexSrc = -1, Func filterSrc = null) { Contracts.CheckValue(md, nameof(md)); Contracts.CheckParam(0 <= index && index < md.ColCount, nameof(index)); diff --git a/src/Microsoft.ML.Data/Transforms/NormalizeColumn.cs b/src/Microsoft.ML.Data/Transforms/NormalizeColumn.cs index a57f6270a1..a9e12c2df8 100644 --- a/src/Microsoft.ML.Data/Transforms/NormalizeColumn.cs +++ b/src/Microsoft.ML.Data/Transforms/NormalizeColumn.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.CommandLine; using Microsoft.ML.Runtime.Data; @@ -1085,7 +1086,7 @@ public static IColumnFunctionBuilder CreateBuilder(SupervisedBinArguments args, throw host.ExceptUserArg(nameof(args.Column), "Wrong column type for column {0}. Expected: R4, R8, Vec or Vec. Got: {1}.", args.Column[icol].Source, srcType.ToString()); } - public static int GetLabelColumnId(IExceptionContext host, ISchema schema, string labelColumnName) + public static int GetLabelColumnId(IExceptionContext host, Schema schema, string labelColumnName) { Contracts.AssertValue(host); host.AssertValue(schema); diff --git a/src/Microsoft.ML.Data/Transforms/Normalizer.cs b/src/Microsoft.ML.Data/Transforms/Normalizer.cs index afc597cf50..2c8dc6c622 100644 --- a/src/Microsoft.ML.Data/Transforms/Normalizer.cs +++ b/src/Microsoft.ML.Data/Transforms/Normalizer.cs @@ -438,8 +438,8 @@ public static NormalizingTransformer Create(IHostEnvironment env, ModelLoadConte } // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); public override void Save(ModelSaveContext ctx) { @@ -463,7 +463,7 @@ public override void Save(ModelSaveContext ctx) } } - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { const string expectedType = "scalar or known-size vector of R4"; diff --git a/src/Microsoft.ML.Data/Transforms/OneToOneTransformerBase.cs b/src/Microsoft.ML.Data/Transforms/OneToOneTransformerBase.cs index 47b5ef4670..0a59943320 100644 --- a/src/Microsoft.ML.Data/Transforms/OneToOneTransformerBase.cs +++ b/src/Microsoft.ML.Data/Transforms/OneToOneTransformerBase.cs @@ -68,7 +68,7 @@ protected void SaveColumns(ModelSaveContext ctx) } } - private void CheckInput(ISchema inputSchema, int col, out int srcCol) + private void CheckInput(Schema inputSchema, int col, out int srcCol) { Contracts.AssertValue(inputSchema); Contracts.Assert(0 <= col && col < ColumnPairs.Length); @@ -78,7 +78,7 @@ private void CheckInput(ISchema inputSchema, int col, out int srcCol) CheckInputColumn(inputSchema, col, srcCol); } - protected virtual void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected virtual void CheckInputColumn(Schema inputSchema, int col, int srcCol) { // By default, there are no extra checks. } diff --git a/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs b/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs index 56adc40906..9d5aefe473 100644 --- a/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs +++ b/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs @@ -209,7 +209,7 @@ private static IDataView SelectCachableColumns(IDataView data, IHostEnvironment /// /// Utility to check whether all types in an input schema are shufflable. /// - internal static bool CanShuffleAll(ISchema schema) + internal static bool CanShuffleAll(Schema schema) { for (int c = 0; c < schema.ColumnCount; ++c) { diff --git a/src/Microsoft.ML.Data/Transforms/TransformBase.cs b/src/Microsoft.ML.Data/Transforms/TransformBase.cs index 41ddf322e5..0e98d3f991 100644 --- a/src/Microsoft.ML.Data/Transforms/TransformBase.cs +++ b/src/Microsoft.ML.Data/Transforms/TransformBase.cs @@ -287,7 +287,7 @@ private sealed class Bindings : ColumnBindingsBase, ITransposeSchema private const string InvalidTypeErrorFormat = "Source column '{0}' has invalid type ('{1}'): {2}."; private Bindings(OneToOneTransformBase parent, ColInfo[] infos, - ISchema input, bool user, string[] names) + Schema input, bool user, string[] names) : base(input, user, names) { Contracts.AssertValue(parent); @@ -300,7 +300,7 @@ private Bindings(OneToOneTransformBase parent, ColInfo[] infos, Infos = infos; } - public static Bindings Create(OneToOneTransformBase parent, OneToOneColumn[] column, ISchema input, + public static Bindings Create(OneToOneTransformBase parent, OneToOneColumn[] column, Schema input, ITransposeSchema transInput, Func testType) { Contracts.AssertValue(parent); @@ -337,7 +337,7 @@ public static Bindings Create(OneToOneTransformBase parent, OneToOneColumn[] col return new Bindings(parent, infos, input, true, names); } - public static Bindings Create(OneToOneTransformBase parent, ModelLoadContext ctx, ISchema input, + public static Bindings Create(OneToOneTransformBase parent, ModelLoadContext ctx, Schema input, ITransposeSchema transInput, Func testType) { Contracts.AssertValue(parent); diff --git a/src/Microsoft.ML.Data/Transforms/TypeConverting.cs b/src/Microsoft.ML.Data/Transforms/TypeConverting.cs index 3388f15c35..1c781b3e32 100644 --- a/src/Microsoft.ML.Data/Transforms/TypeConverting.cs +++ b/src/Microsoft.ML.Data/Transforms/TypeConverting.cs @@ -357,8 +357,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); diff --git a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs index d351a6bfc1..cda1d03401 100644 --- a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs +++ b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs @@ -248,7 +248,7 @@ internal string TestIsKnownDataKind(ColumnType type) return "standard type or a vector of standard type"; } - private ColInfo[] CreateInfos(ISchema inputSchema) + private ColInfo[] CreateInfos(Schema inputSchema) { Host.AssertValue(inputSchema); var infos = new ColInfo[ColumnPairs.Length]; @@ -388,8 +388,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); /// /// Initializes a new instance of . diff --git a/src/Microsoft.ML.Ensemble/EntryPoints/CreateEnsemble.cs b/src/Microsoft.ML.Ensemble/EntryPoints/CreateEnsemble.cs index 7011f7976c..1d813b37f5 100644 --- a/src/Microsoft.ML.Ensemble/EntryPoints/CreateEnsemble.cs +++ b/src/Microsoft.ML.Ensemble/EntryPoints/CreateEnsemble.cs @@ -7,6 +7,7 @@ using System.IO; using System.IO.Compression; using System.Linq; +using Microsoft.ML.Data; using Microsoft.ML.Ensemble.EntryPoints; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.CommandLine; @@ -95,7 +96,7 @@ private static void GetPipeline(IHostEnvironment env, InputBase input, out IData env.AssertValue(input); env.AssertNonEmpty(input.Models); - ISchema inputSchema = null; + Schema inputSchema = null; startingData = null; transformedData = null; byte[][] transformedDataSerialized = null; diff --git a/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs b/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs index 70c71486f3..dafa19eb61 100644 --- a/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs +++ b/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs @@ -636,7 +636,7 @@ private static int CheckNonKeyLabelColumnCore(IHostEnvironment env, IPredictor p // Checks that all the label columns of the model have the same key type as their label column - including the same // cardinality and the same key values, and returns the cardinality of the label column key. - private static int CheckKeyLabelColumnCore(IHostEnvironment env, IPredictorModel[] models, KeyType labelType, ISchema schema, int labelIndex, ColumnType keyValuesType) + private static int CheckKeyLabelColumnCore(IHostEnvironment env, IPredictorModel[] models, KeyType labelType, Schema schema, int labelIndex, ColumnType keyValuesType) where T : IEquatable { env.Assert(keyValuesType.ItemType.RawType == typeof(T)); diff --git a/src/Microsoft.ML.HalLearners/VectorWhitening.cs b/src/Microsoft.ML.HalLearners/VectorWhitening.cs index e3ff5c8ea7..2a0358f74a 100644 --- a/src/Microsoft.ML.HalLearners/VectorWhitening.cs +++ b/src/Microsoft.ML.HalLearners/VectorWhitening.cs @@ -306,13 +306,13 @@ internal static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - internal static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) + internal static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); private static (string input, string output)[] GetColumnPairs(ColumnInfo[] columns) => columns.Select(c => (c.Input, c.Output ?? c.Input)).ToArray(); - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { var inType = inputSchema.GetColumnType(srcCol); var reason = TestColumn(inType); diff --git a/src/Microsoft.ML.ImageAnalytics/ImageGrayscaleTransform.cs b/src/Microsoft.ML.ImageAnalytics/ImageGrayscaleTransform.cs index d7b8a96c54..4890dea14d 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImageGrayscaleTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImageGrayscaleTransform.cs @@ -127,7 +127,7 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); public override void Save(ModelSaveContext ctx) @@ -154,7 +154,7 @@ public override void Save(ModelSaveContext ctx) private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { if (!(inputSchema.GetColumnType(srcCol) is ImageType)) throw Host.ExceptSchemaMismatch(nameof(inputSchema), "input", ColumnPairs[col].input, "image", inputSchema.GetColumnType(srcCol).ToString()); diff --git a/src/Microsoft.ML.ImageAnalytics/ImageLoaderTransform.cs b/src/Microsoft.ML.ImageAnalytics/ImageLoaderTransform.cs index 9165a7e06d..931bfcbf1d 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImageLoaderTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImageLoaderTransform.cs @@ -111,10 +111,10 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { if (!inputSchema.GetColumnType(srcCol).IsText) throw Host.ExceptSchemaMismatch(nameof(inputSchema), "input", ColumnPairs[col].input, TextType.Instance.ToString(), inputSchema.GetColumnType(srcCol).ToString()); diff --git a/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs b/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs index ddc43e9468..7fc4355e27 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImagePixelExtractorTransform.cs @@ -378,8 +378,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); public override void Save(ModelSaveContext ctx) { @@ -402,7 +402,7 @@ public override void Save(ModelSaveContext ctx) private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { var inputColName = _columns[col].Input; var imageType = inputSchema.GetColumnType(srcCol) as ImageType; diff --git a/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs b/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs index 0beb7202fd..0ff7b38c3d 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImageResizerTransform.cs @@ -253,7 +253,7 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); public override void Save(ModelSaveContext ctx) @@ -287,7 +287,7 @@ public override void Save(ModelSaveContext ctx) private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { if (!(inputSchema.GetColumnType(srcCol) is ImageType)) throw Host.ExceptSchemaMismatch(nameof(inputSchema), "input", _columns[col].Input, "image", inputSchema.GetColumnType(srcCol).ToString()); diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/OneVersusAllMacro.cs b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/OneVersusAllMacro.cs index f1d60d87eb..c4a4dcc88c 100644 --- a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/OneVersusAllMacro.cs +++ b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/OneVersusAllMacro.cs @@ -122,7 +122,7 @@ private static int GetNumberOfClasses(IHostEnvironment env, Arguments input, out using (var ch = host.Start("OVA Macro GetNumberOfClasses")) { // RoleMappedData creation - ISchema schema = input.TrainingData.Schema; + var schema = input.TrainingData.Schema; label = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(Arguments.LabelColumn), input.LabelColumn, DefaultColumnNames.Label); diff --git a/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs b/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs index 1e20ebe350..bfca634045 100644 --- a/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs +++ b/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs @@ -146,8 +146,8 @@ private static OnnxTransform Create(IHostEnvironment env, ModelLoadContext ctx) } // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); private OnnxTransform(IHostEnvironment env, Arguments args, byte[] modelBytes = null) : base(Contracts.CheckRef(env, nameof(env)).Register(nameof(OnnxTransform))) diff --git a/src/Microsoft.ML.PCA/PcaTransform.cs b/src/Microsoft.ML.PCA/PcaTransform.cs index e3fee2639f..90a613e844 100644 --- a/src/Microsoft.ML.PCA/PcaTransform.cs +++ b/src/Microsoft.ML.PCA/PcaTransform.cs @@ -285,8 +285,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); // Factory method for SignatureDataTransform. private static IDataTransform Create(IHostEnvironment env, Arguments args, IDataView input) @@ -542,7 +542,7 @@ private float[][] PostProcess(float[][] y, float[] sigma, float[] z, int d, int private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { ValidatePcaInput(Host, inputSchema.GetColumnName(srcCol), inputSchema.GetColumnType(srcCol)); } diff --git a/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs b/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs index 4af405b5d5..ae79e564cd 100644 --- a/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs +++ b/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs @@ -326,7 +326,7 @@ public Func GetDependencies(Func predicate) yield return RecommenderUtils.MatrixRowIndexKind.Bind(_matrixRowIndexColumnName); } - private void CheckInputSchema(ISchema schema, int matrixColumnIndexCol, int matrixRowIndexCol) + private void CheckInputSchema(Schema schema, int matrixColumnIndexCol, int matrixRowIndexCol) { // See if matrix-column-index role's type matches the one expected in the trained predictor var type = schema.GetColumnType(matrixColumnIndexCol); diff --git a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachinePredictor.cs b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachinePredictor.cs index 7c818102bc..26d6047cce 100644 --- a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachinePredictor.cs +++ b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachinePredictor.cs @@ -274,7 +274,7 @@ public FieldAwareFactorizationMachinePredictionTransformer(IHostEnvironment host } /// - /// Gets the result after transformation. + /// Gets the result after transformation. /// /// The of the input data. /// The post transformation . diff --git a/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs b/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs index 05234bffe0..b7a1ba901f 100644 --- a/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs +++ b/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs @@ -574,8 +574,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); private static void GetModelInfo(IHostEnvironment env, ModelLoadContext ctx, out string[] inputs, out string[] outputs, out bool isFrozen) { diff --git a/src/Microsoft.ML.TimeSeries/IidChangePointDetector.cs b/src/Microsoft.ML.TimeSeries/IidChangePointDetector.cs index 6a42483130..33ef64f938 100644 --- a/src/Microsoft.ML.TimeSeries/IidChangePointDetector.cs +++ b/src/Microsoft.ML.TimeSeries/IidChangePointDetector.cs @@ -6,6 +6,7 @@ using System.Collections.Generic; using System.Linq; using Microsoft.ML.Core.Data; +using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.CommandLine; using Microsoft.ML.Runtime.Data; @@ -190,7 +191,7 @@ public override void Save(ModelSaveContext ctx) } // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) => Create(env, ctx).MakeRowMapper(inputSchema); } diff --git a/src/Microsoft.ML.TimeSeries/IidSpikeDetector.cs b/src/Microsoft.ML.TimeSeries/IidSpikeDetector.cs index 107b2873cf..5261827c87 100644 --- a/src/Microsoft.ML.TimeSeries/IidSpikeDetector.cs +++ b/src/Microsoft.ML.TimeSeries/IidSpikeDetector.cs @@ -5,6 +5,7 @@ using System.Collections.Generic; using System.Linq; using Microsoft.ML.Core.Data; +using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.CommandLine; using Microsoft.ML.Runtime.Data; @@ -169,7 +170,7 @@ public override void Save(ModelSaveContext ctx) } // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) => Create(env, ctx).MakeRowMapper(inputSchema); } diff --git a/src/Microsoft.ML.TimeSeries/SequentialAnomalyDetectionTransformBase.cs b/src/Microsoft.ML.TimeSeries/SequentialAnomalyDetectionTransformBase.cs index 8b41ee8edf..88e30fe615 100644 --- a/src/Microsoft.ML.TimeSeries/SequentialAnomalyDetectionTransformBase.cs +++ b/src/Microsoft.ML.TimeSeries/SequentialAnomalyDetectionTransformBase.cs @@ -571,18 +571,18 @@ private protected override sealed void InitializeStateCore(bool disk = false) private protected abstract Double ComputeRawAnomalyScore(ref TInput input, FixedSizeQueue windowedBuffer, long iteration); } - private protected override IStatefulRowMapper MakeRowMapper(ISchema schema) => new Mapper(Host, this, schema); + private protected override IStatefulRowMapper MakeRowMapper(Schema schema) => new Mapper(Host, this, schema); private sealed class Mapper : IStatefulRowMapper { private readonly IHost _host; private readonly SequentialAnomalyDetectionTransformBase _parent; - private readonly ISchema _parentSchema; + private readonly Schema _parentSchema; private readonly int _inputColumnIndex; private readonly VBuffer> _slotNames; private TState State { get; set; } - public Mapper(IHostEnvironment env, SequentialAnomalyDetectionTransformBase parent, ISchema inputSchema) + public Mapper(IHostEnvironment env, SequentialAnomalyDetectionTransformBase parent, Schema inputSchema) { Contracts.CheckValue(env, nameof(env)); _host = env.Register(nameof(Mapper)); diff --git a/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs b/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs index bef7a167e7..956a4c9aa5 100644 --- a/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs +++ b/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs @@ -343,7 +343,7 @@ public virtual void Save(ModelSaveContext ctx) public abstract Schema GetOutputSchema(Schema inputSchema); - private protected abstract IStatefulRowMapper MakeRowMapper(ISchema schema); + private protected abstract IStatefulRowMapper MakeRowMapper(Schema schema); private protected SequentialDataTransform MakeDataTransform(IDataView input) { @@ -614,11 +614,11 @@ public TimeSeriesRowToRowMapperTransform(IHostEnvironment env, IDataView input, _bindings = new ColumnBindings(Schema.Create(input.Schema), mapper.GetOutputColumns()); } - public static Schema GetOutputSchema(ISchema inputSchema, IRowMapper mapper) + public static Schema GetOutputSchema(Schema inputSchema, IRowMapper mapper) { Contracts.CheckValue(inputSchema, nameof(inputSchema)); Contracts.CheckValue(mapper, nameof(mapper)); - return new ColumnBindings(Schema.Create(inputSchema), mapper.GetOutputColumns()).Schema; + return new ColumnBindings(inputSchema, mapper.GetOutputColumns()).Schema; } private TimeSeriesRowToRowMapperTransform(IHost host, ModelLoadContext ctx, IDataView input) diff --git a/src/Microsoft.ML.TimeSeries/SsaChangePointDetector.cs b/src/Microsoft.ML.TimeSeries/SsaChangePointDetector.cs index 4565f853b6..f488740ac9 100644 --- a/src/Microsoft.ML.TimeSeries/SsaChangePointDetector.cs +++ b/src/Microsoft.ML.TimeSeries/SsaChangePointDetector.cs @@ -6,6 +6,7 @@ using System.Collections.Generic; using System.Linq; using Microsoft.ML.Core.Data; +using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.CommandLine; using Microsoft.ML.Runtime.Data; @@ -200,7 +201,7 @@ public override void Save(ModelSaveContext ctx) } // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) => Create(env, ctx).MakeRowMapper(inputSchema); } diff --git a/src/Microsoft.ML.TimeSeries/SsaSpikeDetector.cs b/src/Microsoft.ML.TimeSeries/SsaSpikeDetector.cs index 00eb719b46..9cde0f6c72 100644 --- a/src/Microsoft.ML.TimeSeries/SsaSpikeDetector.cs +++ b/src/Microsoft.ML.TimeSeries/SsaSpikeDetector.cs @@ -5,6 +5,7 @@ using System.Collections.Generic; using System.Linq; using Microsoft.ML.Core.Data; +using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.CommandLine; using Microsoft.ML.Runtime.Data; @@ -181,7 +182,7 @@ public override void Save(ModelSaveContext ctx) } // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) => Create(env, ctx).MakeRowMapper(inputSchema); } diff --git a/src/Microsoft.ML.Transforms/GcnTransform.cs b/src/Microsoft.ML.Transforms/GcnTransform.cs index 21ad23b9b5..b203fc9dd7 100644 --- a/src/Microsoft.ML.Transforms/GcnTransform.cs +++ b/src/Microsoft.ML.Transforms/GcnTransform.cs @@ -303,7 +303,7 @@ private static (string input, string output)[] GetColumnPairs(ColumnInfoBase[] c return columns.Select(x => (x.Input, x.Output)).ToArray(); } - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { var inType = inputSchema.GetColumnType(srcCol); if (!LpNormalizingEstimatorBase.IsColumnTypeValid(inType)) @@ -388,8 +388,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); private LpNormalizingTransformer(IHost host, ModelLoadContext ctx) : base(host, ctx) diff --git a/src/Microsoft.ML.Transforms/GroupTransform.cs b/src/Microsoft.ML.Transforms/GroupTransform.cs index f700b35590..1f77e1dd16 100644 --- a/src/Microsoft.ML.Transforms/GroupTransform.cs +++ b/src/Microsoft.ML.Transforms/GroupTransform.cs @@ -195,7 +195,7 @@ private sealed class GroupSchema : ISchema new[] { MetadataUtils.Kinds.IsNormalized, MetadataUtils.Kinds.KeyValues }; private readonly IExceptionContext _ectx; - private readonly ISchema _input; + private readonly Schema _input; private readonly string[] _groupColumns; private readonly string[] _keepColumns; @@ -210,7 +210,7 @@ private sealed class GroupSchema : ISchema public Schema AsSchema { get; } - public GroupSchema(IExceptionContext ectx, ISchema inputSchema, string[] groupColumns, string[] keepColumns) + public GroupSchema(IExceptionContext ectx, Schema inputSchema, string[] groupColumns, string[] keepColumns) { Contracts.AssertValue(ectx); _ectx = ectx; @@ -232,7 +232,7 @@ public GroupSchema(IExceptionContext ectx, ISchema inputSchema, string[] groupCo AsSchema = Schema.Create(this); } - public GroupSchema(ISchema inputSchema, IHostEnvironment env, ModelLoadContext ctx) + public GroupSchema(Schema inputSchema, IHostEnvironment env, ModelLoadContext ctx) { Contracts.AssertValue(env); _ectx = env.Register(LoaderSignature); @@ -281,7 +281,7 @@ private Dictionary BuildColumnNameMap() return map; } - private static ColumnType[] BuildColumnTypes(ISchema input, int[] ids) + private static ColumnType[] BuildColumnTypes(Schema input, int[] ids) { var types = new ColumnType[ids.Length]; for (int i = 0; i < ids.Length; i++) @@ -320,7 +320,7 @@ public void Save(ModelSaveContext ctx) } } - private int[] GetColumnIds(ISchema schema, string[] names, Func except) + private int[] GetColumnIds(Schema schema, string[] names, Func except) { Contracts.AssertValue(schema); Contracts.AssertValue(names); diff --git a/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs b/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs index 27456da815..9b6c6bb81b 100644 --- a/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs +++ b/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs @@ -84,7 +84,7 @@ private string TestIsKey(ColumnType type) return "key type of known cardinality"; } - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { var type = inputSchema.GetColumnType(srcCol); string reason = TestIsKey(type); @@ -159,8 +159,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); @@ -205,7 +205,7 @@ public Mapper(KeyToBinaryVectorMappingTransformer parent, Schema inputSchema) _types[i] = new VectorType(NumberType.Float, _infos[i].TypeSrc.ValueCount, _bitsPerKey[i]); } } - private ColInfo[] CreateInfos(ISchema inputSchema) + private ColInfo[] CreateInfos(Schema inputSchema) { Host.AssertValue(inputSchema); var infos = new ColInfo[_parent.ColumnPairs.Length]; diff --git a/src/Microsoft.ML.Transforms/LearnerFeatureSelection.cs b/src/Microsoft.ML.Transforms/LearnerFeatureSelection.cs index a78e317319..2224d6d69f 100644 --- a/src/Microsoft.ML.Transforms/LearnerFeatureSelection.cs +++ b/src/Microsoft.ML.Transforms/LearnerFeatureSelection.cs @@ -278,7 +278,7 @@ private static void TrainCore(IHost host, IDataView input, Arguments args, ref V IDataView view = input; - ISchema schema = view.Schema; + var schema = view.Schema; var label = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(args.LabelColumn), args.LabelColumn, DefaultColumnNames.Label); var feature = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(args.FeatureColumn), args.FeatureColumn, DefaultColumnNames.Features); var group = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(args.GroupColumn), args.GroupColumn, DefaultColumnNames.GroupId); diff --git a/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs b/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs index 64dae82ef7..77e89f8882 100644 --- a/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs +++ b/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs @@ -101,7 +101,7 @@ private MissingValueDroppingTransformer(IHostEnvironment env, ModelLoadContext c private static (string input, string output)[] GetColumnPairs(Column[] columns) => columns.Select(c => (c.Source ?? c.Name, c.Name)).ToArray(); - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { var inType = inputSchema.GetColumnType(srcCol); if (!inType.IsVector) @@ -126,8 +126,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); /// /// Saves the transform. diff --git a/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs b/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs index 8273467e7e..73efd1d719 100644 --- a/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs +++ b/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs @@ -125,7 +125,7 @@ internal static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - internal static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) + internal static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); /// diff --git a/src/Microsoft.ML.Transforms/MissingValueReplacing.cs b/src/Microsoft.ML.Transforms/MissingValueReplacing.cs index 2f6ebef311..296aed0eab 100644 --- a/src/Microsoft.ML.Transforms/MissingValueReplacing.cs +++ b/src/Microsoft.ML.Transforms/MissingValueReplacing.cs @@ -232,7 +232,7 @@ private static (string input, string output)[] GetColumnPairs(ColumnInfo[] colum // REVIEW: Currently these arrays are constructed on load but could be changed to being constructed lazily. private readonly BitArray[] _repIsDefault; - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { var type = inputSchema.GetColumnType(srcCol); string reason = TestType(type); @@ -509,8 +509,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); private VBuffer CreateVBuffer(T[] array) { @@ -616,7 +616,7 @@ public Mapper(MissingValueReplacingTransformer parent, Schema inputSchema) } } - private ColInfo[] CreateInfos(ISchema inputSchema) + private ColInfo[] CreateInfos(Schema inputSchema) { Host.AssertValue(inputSchema); var infos = new ColInfo[_parent.ColumnPairs.Length]; diff --git a/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs b/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs index 0f24deb240..8c215dcb07 100644 --- a/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs +++ b/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs @@ -66,7 +66,7 @@ private Bindings(OptionalColumnTransform parent, ColumnType[] columnTypes, int[] SetMetadata(); } - public static Bindings Create(Arguments args, ISchema input, OptionalColumnTransform parent) + public static Bindings Create(Arguments args, Schema input, OptionalColumnTransform parent) { var names = new string[args.Column.Length]; var columnTypes = new ColumnType[args.Column.Length]; diff --git a/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs b/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs index d5fa0defa1..ecfc41e1fb 100644 --- a/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs +++ b/src/Microsoft.ML.Transforms/RandomFourierFeaturizing.cs @@ -270,7 +270,7 @@ private static (string input, string output)[] GetColumnPairs(ColumnInfo[] colum return columns.Select(x => (x.Input, x.Output)).ToArray(); } - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { var type = inputSchema.GetColumnType(srcCol); string reason = TestColumnType(type); @@ -430,8 +430,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); private RandomFourierFeaturizingTransformer(IHost host, ModelLoadContext ctx) : base(host, ctx) diff --git a/src/Microsoft.ML.Transforms/Text/LdaTransform.cs b/src/Microsoft.ML.Transforms/Text/LdaTransform.cs index 08ff810c2b..78642be703 100644 --- a/src/Microsoft.ML.Transforms/Text/LdaTransform.cs +++ b/src/Microsoft.ML.Transforms/Text/LdaTransform.cs @@ -853,8 +853,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); // Factory method for SignatureDataTransform. private static IDataTransform Create(IHostEnvironment env, Arguments args, IDataView input) diff --git a/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs b/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs index c57b8ffcb4..ffe54c7ea4 100644 --- a/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs +++ b/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs @@ -166,14 +166,14 @@ private sealed class Bindings : ManyToOneColumnBindingsBase public readonly VectorType[] Types; private readonly NgramHashingTransformer _parent; - public Bindings(Arguments args, ISchema schemaInput, NgramHashingTransformer parent) + public Bindings(Arguments args, Schema schemaInput, NgramHashingTransformer parent) : base(args.Column, schemaInput, TestTypes) { Types = new VectorType[args.Column.Length]; _parent = parent; } - public Bindings(ModelLoadContext ctx, ISchema schemaInput, NgramHashingTransformer parent) + public Bindings(ModelLoadContext ctx, Schema schemaInput, NgramHashingTransformer parent) : base(ctx, schemaInput, TestTypes) { Types = new VectorType[Infos.Length]; diff --git a/src/Microsoft.ML.Transforms/Text/NgramTransform.cs b/src/Microsoft.ML.Transforms/Text/NgramTransform.cs index 170141c823..f37872da8a 100644 --- a/src/Microsoft.ML.Transforms/Text/NgramTransform.cs +++ b/src/Microsoft.ML.Transforms/Text/NgramTransform.cs @@ -268,7 +268,7 @@ private static (string input, string output)[] GetColumnPairs(ColumnInfo[] colum return columns.Select(x => (x.Input, x.Output)).ToArray(); } - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { var type = inputSchema.GetColumnType(srcCol); if (!NgramExtractingEstimator.IsColumnTypeValid(type)) @@ -444,8 +444,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); private NgramExtractingTransformer(IHost host, ModelLoadContext ctx) : base(host, ctx) diff --git a/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs b/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs index feaebea475..f8335092bd 100644 --- a/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs +++ b/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs @@ -213,7 +213,7 @@ private static (string input, string output)[] GetColumnPairs(ColumnInfo[] colum return columns.Select(x => (x.Input, x.Output)).ToArray(); } - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { var type = inputSchema.GetColumnType(srcCol); if (!StopWordsRemovingEstimator.IsColumnTypeValid(type)) @@ -304,8 +304,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, Schema.Create(schema)); @@ -959,8 +959,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, Schema.Create(schema)); diff --git a/src/Microsoft.ML.Transforms/Text/TextFeaturizingEstimator.cs b/src/Microsoft.ML.Transforms/Text/TextFeaturizingEstimator.cs index 1fa39930bd..2584708cc8 100644 --- a/src/Microsoft.ML.Transforms/Text/TextFeaturizingEstimator.cs +++ b/src/Microsoft.ML.Transforms/Text/TextFeaturizingEstimator.cs @@ -464,7 +464,7 @@ public ITransformer Fit(IDataView input) public static ITransformer Create(IHostEnvironment env, ModelLoadContext ctx) => new Transformer(env, ctx); - private static string GenerateColumnName(ISchema schema, string srcName, string xfTag) + private static string GenerateColumnName(Schema schema, string srcName, string xfTag) { return schema.GetTempColumnName(string.Format("{0}_{1}", srcName, xfTag)); } diff --git a/src/Microsoft.ML.Transforms/Text/TextNormalizing.cs b/src/Microsoft.ML.Transforms/Text/TextNormalizing.cs index 52ab910488..bdb2c7ec9b 100644 --- a/src/Microsoft.ML.Transforms/Text/TextNormalizing.cs +++ b/src/Microsoft.ML.Transforms/Text/TextNormalizing.cs @@ -113,7 +113,7 @@ public TextNormalizingTransformer(IHostEnvironment env, } - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { var type = inputSchema.GetColumnType(srcCol); if (!TextNormalizingEstimator.IsColumnTypeValid(type)) @@ -190,8 +190,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); diff --git a/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs b/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs index d267c6706a..76242af78b 100644 --- a/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs +++ b/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs @@ -117,7 +117,7 @@ public TokenizingByCharactersTransformer(IHostEnvironment env, bool useMarkerCha public IReadOnlyCollection<(string input, string output)> Columns => ColumnPairs.AsReadOnly(); - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { var type = inputSchema.GetColumnType(srcCol); if (!TokenizingByCharactersEstimator.IsColumnTypeValid(type)) @@ -180,8 +180,8 @@ internal static IDataTransform Create(IHostEnvironment env, Arguments args, IDat } // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); diff --git a/src/Microsoft.ML.Transforms/Text/WordBagTransform.cs b/src/Microsoft.ML.Transforms/Text/WordBagTransform.cs index 2dac623422..67de331bf3 100644 --- a/src/Microsoft.ML.Transforms/Text/WordBagTransform.cs +++ b/src/Microsoft.ML.Transforms/Text/WordBagTransform.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.CommandLine; using Microsoft.ML.Runtime.Data; @@ -544,7 +545,7 @@ public static IDataView ApplyConcatOnSources(IHostEnvironment env, ManyToOneColu /// Generates and returns unique names for columns source. Each element of the returned array is /// an array of unique source names per specific column. /// - public static string[][] GenerateUniqueSourceNames(IHostEnvironment env, ManyToOneColumn[] columns, ISchema schema) + public static string[][] GenerateUniqueSourceNames(IHostEnvironment env, ManyToOneColumn[] columns, Schema schema) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(columns, nameof(columns)); diff --git a/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs b/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs index f83c0c26d4..ea85bfb3d4 100644 --- a/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs +++ b/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs @@ -301,8 +301,8 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, => Create(env, ctx).MakeDataTransform(input); // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); public override void Save(ModelSaveContext ctx) { @@ -320,7 +320,7 @@ public override void Save(ModelSaveContext ctx) private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { var colType = inputSchema.GetColumnType(srcCol); if (!(colType.IsVector && colType.ItemType.IsText)) diff --git a/src/Microsoft.ML.Transforms/Text/WordTokenizing.cs b/src/Microsoft.ML.Transforms/Text/WordTokenizing.cs index 18f652ad6d..501c32e65a 100644 --- a/src/Microsoft.ML.Transforms/Text/WordTokenizing.cs +++ b/src/Microsoft.ML.Transforms/Text/WordTokenizing.cs @@ -145,7 +145,7 @@ public WordTokenizingTransformer(IHostEnvironment env, params ColumnInfo[] colum _columns = columns.ToArray(); } - protected override void CheckInputColumn(ISchema inputSchema, int col, int srcCol) + protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol) { var type = inputSchema.GetColumnType(srcCol); if (!WordTokenizingEstimator.IsColumnTypeValid(type)) @@ -218,8 +218,8 @@ internal static IDataTransform Create(IHostEnvironment env, Arguments args, IDat } // Factory method for SignatureLoadRowMapper. - private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) - => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) + => Create(env, ctx).MakeRowMapper(inputSchema); private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); diff --git a/src/Microsoft.ML.Transforms/UngroupTransform.cs b/src/Microsoft.ML.Transforms/UngroupTransform.cs index 0467a5763d..e52f6d5354 100644 --- a/src/Microsoft.ML.Transforms/UngroupTransform.cs +++ b/src/Microsoft.ML.Transforms/UngroupTransform.cs @@ -271,7 +271,7 @@ public SchemaImpl(IExceptionContext ectx, Schema inputSchema, UngroupMode mode, AsSchema = Schema.Create(this); } - private static void CheckAndBind(IExceptionContext ectx, ISchema inputSchema, + private static void CheckAndBind(IExceptionContext ectx, Schema inputSchema, string[] pivotColumns, out PivotColumnInfo[] infos) { Contracts.AssertValueOrNull(ectx); diff --git a/test/Microsoft.ML.StaticPipelineTesting/StaticPipeTests.cs b/test/Microsoft.ML.StaticPipelineTesting/StaticPipeTests.cs index 0cff6c1794..900839d4cb 100644 --- a/test/Microsoft.ML.StaticPipelineTesting/StaticPipeTests.cs +++ b/test/Microsoft.ML.StaticPipelineTesting/StaticPipeTests.cs @@ -54,7 +54,7 @@ public StaticPipeTests(ITestOutputHelper output) { } - private void CheckSchemaHasColumn(ISchema schema, string name, out int idx) + private void CheckSchemaHasColumn(Schema schema, string name, out int idx) => Assert.True(schema.TryGetColumnIndex(name, out idx), "Could not find column '" + name + "'"); [Fact] @@ -169,7 +169,7 @@ public void SimpleTextLoaderObnoxiousTypeTest() // Ahhh. No one would ever, ever do this, of course, but just having fun with it. - void Helper(ISchema thisSchema, string name, ColumnType expected) + void Helper(Schema thisSchema, string name, ColumnType expected) { Assert.True(thisSchema.TryGetColumnIndex(name, out int thisCol), $"Could not find column '{name}'"); Assert.Equal(expected, thisSchema.GetColumnType(thisCol)); diff --git a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs index 97ec7bcb60..72dedf16d7 100644 --- a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs +++ b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs @@ -485,7 +485,7 @@ protected IDataLoader LoadPipe(string pathModel, IHostEnvironment env, IMultiStr } } - protected bool CheckMetadataTypes(ISchema sch) + protected bool CheckMetadataTypes(Schema sch) { var hs = new HashSet(); for (int col = 0; col < sch.ColumnCount; col++) @@ -545,7 +545,7 @@ protected bool CheckMetadataTypes(ISchema sch) return true; } - protected bool CheckSameSchemas(ISchema sch1, ISchema sch2, bool exactTypes = true, bool keyNames = true) + protected bool CheckSameSchemas(Schema sch1, Schema sch2, bool exactTypes = true, bool keyNames = true) { if (sch1.ColumnCount != sch2.ColumnCount) { @@ -603,7 +603,7 @@ protected bool CheckSameSchemas(ISchema sch1, ISchema sch2, bool exactTypes = tr return true; } - protected bool CheckMetadataNames(string kind, int size, ISchema sch1, ISchema sch2, int col, bool exactTypes, bool mustBeText) + protected bool CheckMetadataNames(string kind, int size, Schema sch1, Schema sch2, int col, bool exactTypes, bool mustBeText) { var names1 = default(VBuffer>); var names2 = default(VBuffer>); @@ -655,7 +655,7 @@ protected bool CheckMetadataNames(string kind, int size, ISchema sch1, ISchema s return true; } - protected bool CheckMetadataCallFailure(string kind, ISchema sch, int col, ref VBuffer> names) + protected bool CheckMetadataCallFailure(string kind, Schema sch, int col, ref VBuffer> names) { try { From abcb2e7fbb46f5231af7b1201859cdaefadc1870 Mon Sep 17 00:00:00 2001 From: Senja Filipi Date: Tue, 11 Dec 2018 13:56:38 -0800 Subject: [PATCH 042/100] Disintegrating Microsoft.ML.Api dispersing its content. (#1848) * Disintegrating Microsoft.ML.Api dispersing its content. --- Microsoft.ML.sln | 11 -- .../Dynamic/ConcatTransform.cs | 8 +- .../Dynamic/FastTreeRegression.cs | 5 +- ...FeatureContributionCalculationTransform.cs | 2 +- .../IidChangePointDetectorTransform.cs | 2 +- .../Dynamic/IidSpikeDetectorTransform.cs | 2 +- .../Microsoft.ML.Samples/Dynamic/KMeans.cs | 3 +- .../Dynamic/KeyToValue_Term.cs | 4 +- .../Dynamic/LdaTransform.cs | 2 +- .../Dynamic/MatrixFactorization.cs | 4 +- .../Dynamic/NgramExtraction.cs | 2 +- .../Dynamic/Normalizer.cs | 2 +- .../Dynamic/OnnxTransform.cs | 6 +- .../Dynamic/ProjectionTransforms.cs | 2 +- .../SsaChangePointDetectorTransform.cs | 2 +- .../Dynamic/SsaSpikeDetectorTransform.cs | 1 - .../Dynamic/TensorFlowTransform.cs | 6 +- .../Dynamic/TextTransform.cs | 2 +- src/Microsoft.ML.Api/CodeGenerationUtils.cs | 142 ---------------- src/Microsoft.ML.Api/GenerateCodeCommand.cs | 151 ------------------ .../GeneratedCodeTemplate.csresource | 71 -------- src/Microsoft.ML.Api/Microsoft.ML.Api.csproj | 26 --- src/Microsoft.ML.Api/Predictor.cs | 40 ----- .../Properties/AssemblyInfo.cs | 10 -- .../Data}/SchemaDefinition.cs | 3 +- .../DataView}/DataViewConstructionUtils.cs | 3 +- .../DataView}/InternalSchemaDefinition.cs | 9 +- .../DataView}/TypedCursor.cs | 4 +- .../Microsoft.ML.Data.csproj | 1 + .../Prediction}/PredictionEngine.cs | 9 +- .../Prediction}/PredictionFunction.cs | 5 +- .../Utilities}/ComponentCreation.cs | 26 +-- .../Utils}/ApiUtils.cs | 2 +- .../AssemblyRegistration.cs | 11 +- .../Data/CollectionDataSource.cs | 2 +- src/Microsoft.ML.Legacy/Data/TextLoader.cs | 2 +- src/Microsoft.ML.Legacy/LearningPipeline.cs | 2 +- .../Microsoft.ML.Legacy.csproj | 1 - .../Models/BinaryClassificationMetrics.cs | 2 +- .../Models/ClassificationMetrics.cs | 2 +- .../Models/ClusterMetrics.cs | 2 +- .../Models/CrossValidator.cs | 2 +- .../Models/RegressionMetrics.cs | 2 +- .../Models/TrainTestEvaluator.cs | 2 +- src/Microsoft.ML.Legacy/PredictionModel.cs | 2 +- .../Runtime/EntryPoints/ImportTextData.cs | 4 +- .../Microsoft.ML.SamplesUtils.csproj | 2 +- .../SamplesDatasetUtils.cs | 2 +- .../Microsoft.ML.TimeSeries.csproj | 2 +- .../PredictionFunction.cs | 1 - .../SequentialTransformBase.cs | 6 +- .../SequentialTransformerBase.cs | 10 +- .../CustomMappingCatalog.cs | 4 +- .../CustomMappingTransformer.cs | 1 - .../LambdaTransform.cs | 6 +- .../Microsoft.ML.Transforms.csproj | 1 - .../PermutationFeatureImportance.cs | 2 +- .../SerializableLambdaTransform.cs | 10 +- .../StatefulFilterTransform.cs | 4 +- test/Microsoft.ML.Benchmarks/HashBench.cs | 7 +- .../PredictionEngineBench.cs | 2 +- ...sticDualCoordinateAscentClassifierBench.cs | 2 +- .../Microsoft.ML.Core.Tests.csproj | 1 - .../UnitTests/TestEntryPoints.cs | 2 +- .../Microsoft.ML.FSharp.Tests.fsproj | 1 - test/Microsoft.ML.FSharp.Tests/SmokeTests.fs | 2 +- .../DnnImageFeaturizerTest.cs | 9 +- .../OnnxTransformTests.cs | 5 +- .../Microsoft.ML.Predictor.Tests.csproj | 1 - .../BaseTestBaseline.cs | 1 - .../DataPipe/TestDataPipeBase.cs | 11 +- .../EnvironmentExtensions.cs | 2 - .../Microsoft.ML.TestFramework.csproj | 1 - .../TestCommandBase.cs | 27 ---- .../TestSparseDataView.cs | 3 +- test/Microsoft.ML.Tests/CachingTests.cs | 1 - .../CollectionDataSourceTests.cs | 2 +- .../FeatureContributionTests.cs | 2 +- test/Microsoft.ML.Tests/ImagesTests.cs | 1 - .../LearningPipelineTests.cs | 2 +- test/Microsoft.ML.Tests/OnnxTests.cs | 2 +- .../PredictionModelTests.cs | 2 +- test/Microsoft.ML.Tests/RangeFilterTests.cs | 2 - .../Scenarios/Api/ApiScenariosTests.cs | 2 +- .../Api/CookbookSamples/CookbookSamples.cs | 3 +- .../CookbookSamplesDynamicApi.cs | 4 +- .../Estimators/DecomposableTrainAndPredict.cs | 3 +- .../Scenarios/Api/Estimators/Extensibility.cs | 3 +- .../Api/Estimators/MultithreadedPrediction.cs | 4 +- .../Api/Estimators/SimpleTrainAndPredict.cs | 4 +- .../Estimators/TrainSaveModelAndPredict.cs | 4 +- .../Scenarios/Api/TestApi.cs | 13 +- .../Scenarios/ClusteringTests.cs | 5 +- .../Scenarios/HousePricePredictionTests.cs | 2 +- .../Scenarios/IrisPlantClassificationTests.cs | 5 +- ...PlantClassificationWithStringLabelTests.cs | 4 +- .../PipelineApi/PipelineApiScenarioTests.cs | 3 +- .../Scenarios/SentimentPredictionTests.cs | 3 +- .../Scenarios/TensorflowTests.cs | 3 +- .../IrisPlantClassificationTests.cs | 5 - .../SentimentPredictionTests.cs | 6 +- .../TensorflowTests.cs | 3 +- .../TensorFlowEstimatorTests.cs | 2 +- test/Microsoft.ML.Tests/TermEstimatorTests.cs | 2 +- test/Microsoft.ML.Tests/TextLoaderTests.cs | 2 +- .../MatrixFactorizationTests.cs | 3 +- .../Transformers/CategoricalHashTests.cs | 2 +- .../Transformers/CategoricalTests.cs | 2 +- .../Transformers/CharTokenizeTests.cs | 2 +- .../Transformers/ConcatTests.cs | 3 - .../Transformers/ConvertTests.cs | 4 +- .../Transformers/CopyColumnEstimatorTests.cs | 2 - .../Transformers/CustomMappingTests.cs | 12 +- .../Transformers/HashTests.cs | 2 - .../KeyToBinaryVectorEstimatorTest.cs | 5 +- .../Transformers/KeyToVectorEstimatorTests.cs | 3 +- .../Transformers/NAIndicatorTests.cs | 2 +- .../Transformers/NAReplaceTests.cs | 2 +- .../Transformers/RffTests.cs | 2 +- .../Transformers/SelectColumnsTests.cs | 2 - .../Transformers/TextNormalizer.cs | 4 +- .../Transformers/WordTokenizeTests.cs | 2 +- .../Microsoft.ML.TimeSeries.Tests.csproj | 1 - .../TimeSeriesDirectApi.cs | 8 +- .../TimeSeriesEstimatorTests.cs | 2 +- 125 files changed, 182 insertions(+), 714 deletions(-) delete mode 100644 src/Microsoft.ML.Api/CodeGenerationUtils.cs delete mode 100644 src/Microsoft.ML.Api/GenerateCodeCommand.cs delete mode 100644 src/Microsoft.ML.Api/GeneratedCodeTemplate.csresource delete mode 100644 src/Microsoft.ML.Api/Microsoft.ML.Api.csproj delete mode 100644 src/Microsoft.ML.Api/Predictor.cs delete mode 100644 src/Microsoft.ML.Api/Properties/AssemblyInfo.cs rename src/{Microsoft.ML.Api => Microsoft.ML.Data/Data}/SchemaDefinition.cs (99%) rename src/{Microsoft.ML.Api => Microsoft.ML.Data/DataView}/DataViewConstructionUtils.cs (99%) rename src/{Microsoft.ML.Api => Microsoft.ML.Data/DataView}/InternalSchemaDefinition.cs (98%) rename src/{Microsoft.ML.Api => Microsoft.ML.Data/DataView}/TypedCursor.cs (99%) rename src/{Microsoft.ML.Api => Microsoft.ML.Data/Prediction}/PredictionEngine.cs (97%) rename src/{Microsoft.ML.Api => Microsoft.ML.Data/Prediction}/PredictionFunction.cs (97%) rename src/{Microsoft.ML.Api => Microsoft.ML.Data/Utilities}/ComponentCreation.cs (96%) rename src/{Microsoft.ML.Api => Microsoft.ML.Data/Utils}/ApiUtils.cs (99%) rename src/{Microsoft.ML.Api => Microsoft.ML.Transforms}/CustomMappingCatalog.cs (97%) rename src/{Microsoft.ML.Api => Microsoft.ML.Transforms}/CustomMappingTransformer.cs (99%) rename src/{Microsoft.ML.Api => Microsoft.ML.Transforms}/LambdaTransform.cs (99%) rename src/{Microsoft.ML.Api => Microsoft.ML.Transforms}/SerializableLambdaTransform.cs (99%) rename src/{Microsoft.ML.Api => Microsoft.ML.Transforms}/StatefulFilterTransform.cs (99%) diff --git a/Microsoft.ML.sln b/Microsoft.ML.sln index 1b7d335955..4a1eede6ce 100644 --- a/Microsoft.ML.sln +++ b/Microsoft.ML.sln @@ -29,8 +29,6 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.KMeansClusteri EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.PCA", "src\Microsoft.ML.PCA\Microsoft.ML.PCA.csproj", "{58E06735-1129-4DD5-86E0-6BBFF049AAD9}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Api", "src\Microsoft.ML.Api\Microsoft.ML.Api.csproj", "{2F636A2C-062C-49F4-85F3-60DCADAB6A43}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Tests", "test\Microsoft.ML.Tests\Microsoft.ML.Tests.csproj", "{64BC22D3-1E76-41EF-94D8-C79E471FF2DD}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.TestFramework", "test\Microsoft.ML.TestFramework\Microsoft.ML.TestFramework.csproj", "{B5989C06-4FFA-46C1-9D85-9366B34AB0A2}" @@ -227,14 +225,6 @@ Global {58E06735-1129-4DD5-86E0-6BBFF049AAD9}.Release|Any CPU.Build.0 = Release|Any CPU {58E06735-1129-4DD5-86E0-6BBFF049AAD9}.Release-Intrinsics|Any CPU.ActiveCfg = Release-Intrinsics|Any CPU {58E06735-1129-4DD5-86E0-6BBFF049AAD9}.Release-Intrinsics|Any CPU.Build.0 = Release-Intrinsics|Any CPU - {2F636A2C-062C-49F4-85F3-60DCADAB6A43}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {2F636A2C-062C-49F4-85F3-60DCADAB6A43}.Debug|Any CPU.Build.0 = Debug|Any CPU - {2F636A2C-062C-49F4-85F3-60DCADAB6A43}.Debug-Intrinsics|Any CPU.ActiveCfg = Debug-Intrinsics|Any CPU - {2F636A2C-062C-49F4-85F3-60DCADAB6A43}.Debug-Intrinsics|Any CPU.Build.0 = Debug-Intrinsics|Any CPU - {2F636A2C-062C-49F4-85F3-60DCADAB6A43}.Release|Any CPU.ActiveCfg = Release|Any CPU - {2F636A2C-062C-49F4-85F3-60DCADAB6A43}.Release|Any CPU.Build.0 = Release|Any CPU - {2F636A2C-062C-49F4-85F3-60DCADAB6A43}.Release-Intrinsics|Any CPU.ActiveCfg = Release-Intrinsics|Any CPU - {2F636A2C-062C-49F4-85F3-60DCADAB6A43}.Release-Intrinsics|Any CPU.Build.0 = Release-Intrinsics|Any CPU {64BC22D3-1E76-41EF-94D8-C79E471FF2DD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {64BC22D3-1E76-41EF-94D8-C79E471FF2DD}.Debug|Any CPU.Build.0 = Debug|Any CPU {64BC22D3-1E76-41EF-94D8-C79E471FF2DD}.Debug-Intrinsics|Any CPU.ActiveCfg = Debug-Intrinsics|Any CPU @@ -546,7 +536,6 @@ Global {7288C084-11C0-43BE-AC7F-45DCFEAEEBF6} = {09EADF06-BE25-4228-AB53-95AE3E15B530} {F1CAE3AB-4F86-4BC0-BBA8-C4A58E7E8A4A} = {09EADF06-BE25-4228-AB53-95AE3E15B530} {58E06735-1129-4DD5-86E0-6BBFF049AAD9} = {09EADF06-BE25-4228-AB53-95AE3E15B530} - {2F636A2C-062C-49F4-85F3-60DCADAB6A43} = {09EADF06-BE25-4228-AB53-95AE3E15B530} {64BC22D3-1E76-41EF-94D8-C79E471FF2DD} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4} {B5989C06-4FFA-46C1-9D85-9366B34AB0A2} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4} {6B047E09-39C9-4583-96F3-685D84CA4117} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/ConcatTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/ConcatTransform.cs index ba945f38fa..9ac39e2cd4 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/ConcatTransform.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/ConcatTransform.cs @@ -1,9 +1,9 @@ -using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; +using Microsoft.ML.Runtime.Data; +using Microsoft.ML.Transforms; using System; -using System.Linq; using System.Collections.Generic; -using Microsoft.ML.Transforms; +using System.Linq; namespace Microsoft.ML.Samples.Dynamic { diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/FastTreeRegression.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/FastTreeRegression.cs index e4e3861d2b..634513c6de 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/FastTreeRegression.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/FastTreeRegression.cs @@ -1,8 +1,7 @@ -using Microsoft.ML.Runtime.Api; -using Microsoft.ML.Runtime.Data; +using Microsoft.ML.Runtime.Data; +using Microsoft.ML.Data; using System; using System.Collections.Generic; -using System.Linq; namespace Microsoft.ML.Samples.Dynamic { diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/FeatureContributionCalculationTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/FeatureContributionCalculationTransform.cs index d88cff2cdd..5d55683bdd 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/FeatureContributionCalculationTransform.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/FeatureContributionCalculationTransform.cs @@ -1,4 +1,4 @@ -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using System; diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/IidChangePointDetectorTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/IidChangePointDetectorTransform.cs index 52729c6c97..773410e542 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/IidChangePointDetectorTransform.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/IidChangePointDetectorTransform.cs @@ -2,7 +2,7 @@ using System.Linq; using System.Collections.Generic; using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.TimeSeriesProcessing; using Microsoft.ML.Core.Data; using Microsoft.ML.TimeSeries; diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/IidSpikeDetectorTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/IidSpikeDetectorTransform.cs index 5dcb7e1774..308c47932f 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/IidSpikeDetectorTransform.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/IidSpikeDetectorTransform.cs @@ -4,7 +4,7 @@ using System.Collections.Generic; using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.TimeSeriesProcessing; using Microsoft.ML.Core.Data; using Microsoft.ML.TimeSeries; diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/KMeans.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/KMeans.cs index c25e75b0e2..fc678ed24d 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/KMeans.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/KMeans.cs @@ -1,7 +1,6 @@ -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using System; -using System.Collections.Generic; namespace Microsoft.ML.Samples.Dynamic { diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/KeyToValue_Term.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/KeyToValue_Term.cs index d6e15eeb50..5ba169b38e 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/KeyToValue_Term.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/KeyToValue_Term.cs @@ -1,12 +1,10 @@ using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Transforms.Categorical; using Microsoft.ML.Transforms.Conversions; using Microsoft.ML.Transforms.Text; using System; using System.Collections.Generic; -using System.Linq; namespace Microsoft.ML.Samples.Dynamic { diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/LdaTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/LdaTransform.cs index f2cbec7c28..79c074e7b3 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/LdaTransform.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/LdaTransform.cs @@ -1,5 +1,5 @@ using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using System; using System.Collections.Generic; diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/MatrixFactorization.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/MatrixFactorization.cs index 063a554adc..66ce70a322 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/MatrixFactorization.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/MatrixFactorization.cs @@ -1,6 +1,6 @@ -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Trainers; using System; using System.Collections.Generic; diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/NgramExtraction.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/NgramExtraction.cs index 3c9147f32a..5d1567753e 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/NgramExtraction.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/NgramExtraction.cs @@ -1,5 +1,5 @@ using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using System; using System.Collections.Generic; diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Normalizer.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Normalizer.cs index 121eede6e6..a8de69a04d 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Normalizer.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Normalizer.cs @@ -1,5 +1,5 @@ using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime; using Microsoft.ML.Transforms.Normalizers; using System; using System.Collections.Generic; diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/OnnxTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/OnnxTransform.cs index 6db1884c00..bb7b9a70c8 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/OnnxTransform.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/OnnxTransform.cs @@ -1,9 +1,5 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. -// See the LICENSE file in the project root for more information. - +using Microsoft.ML.Data; using Microsoft.ML.OnnxRuntime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Transforms; using System; diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/ProjectionTransforms.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/ProjectionTransforms.cs index 0f7761cfba..b6246ab746 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/ProjectionTransforms.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/ProjectionTransforms.cs @@ -1,4 +1,4 @@ -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Data; using System; diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/SsaChangePointDetectorTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/SsaChangePointDetectorTransform.cs index e03ba68606..d52fffb44a 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/SsaChangePointDetectorTransform.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/SsaChangePointDetectorTransform.cs @@ -1,6 +1,6 @@ using Microsoft.ML.Core.Data; using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.TimeSeriesProcessing; using Microsoft.ML.TimeSeries; using System; diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/SsaSpikeDetectorTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/SsaSpikeDetectorTransform.cs index fe9e981ecd..bd6c81bb2a 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/SsaSpikeDetectorTransform.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/SsaSpikeDetectorTransform.cs @@ -1,6 +1,5 @@ using Microsoft.ML.Core.Data; using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.TimeSeriesProcessing; using Microsoft.ML.TimeSeries; diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlowTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlowTransform.cs index 2f526f54da..8c9fb477ba 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlowTransform.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlowTransform.cs @@ -1,8 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. -// See the LICENSE file in the project root for more information. - -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using System; using System.Linq; diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/TextTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/TextTransform.cs index 57fbc7e493..23933a03a2 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/TextTransform.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/TextTransform.cs @@ -1,5 +1,5 @@ using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime; using Microsoft.ML.Data; using Microsoft.ML.Transforms.Text; using System; diff --git a/src/Microsoft.ML.Api/CodeGenerationUtils.cs b/src/Microsoft.ML.Api/CodeGenerationUtils.cs deleted file mode 100644 index 1c39f1b9dd..0000000000 --- a/src/Microsoft.ML.Api/CodeGenerationUtils.cs +++ /dev/null @@ -1,142 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. -// See the LICENSE file in the project root for more information. - -using System.Collections.Generic; -using System.Text; -using System.Text.RegularExpressions; -using System.CodeDom; -using Microsoft.ML.Runtime.Data; -using Microsoft.CSharp; -using System.IO; - -namespace Microsoft.ML.Runtime.Api -{ - /// - /// Utility methods for code generation. - /// - internal static class CodeGenerationUtils - { - /// - /// Replace placeholders with provided values. Assert that every placeholder is found. - /// - public static string MultiReplace(string text, Dictionary replacementMap) - { - Contracts.AssertValue(text); - Contracts.AssertValue(replacementMap); - var pattern = @"\/\*#(.*)#\*\/.*\/\*#\/\1#\*\/[\n\r]{0,2}"; - - int seenTags = 0; - var result = Regex.Replace(text, pattern, - match => - { - var tag = match.Groups[1].Value; - string replacement; - bool found = replacementMap.TryGetValue(tag, out replacement); - Contracts.Assert(found); - seenTags++; - return replacement; - }, RegexOptions.Singleline); - - Contracts.Assert(seenTags == replacementMap.Count); - return result; - } - - /// - /// Append a field declaration to the provided . - /// - public static void AppendFieldDeclaration(CSharpCodeProvider codeProvider, StringBuilder target, int columnIndex, - string fieldName, ColumnType colType, bool appendInitializer, bool useVBuffer) - { - Contracts.AssertValueOrNull(codeProvider); - Contracts.AssertValue(target); - Contracts.Assert(columnIndex >= 0); - Contracts.AssertNonEmpty(fieldName); - Contracts.AssertValue(colType); - - var attributes = new List(); - string generatedCsTypeName = GetBackingTypeName(colType, useVBuffer, attributes); - - if (codeProvider != null && !codeProvider.IsValidIdentifier(fieldName)) - { - attributes.Add(string.Format("[ColumnName({0})]", GetCSharpString(codeProvider, fieldName))); - fieldName = string.Format("Column{0}", columnIndex); - } - - const string indent = " "; - if (attributes.Count > 0) - { - foreach (var attr in attributes) - { - target.Append(indent); - target.AppendLine(attr); - } - } - target.Append(indent); - target.AppendFormat("public {0} {1}", generatedCsTypeName, fieldName); - - if (appendInitializer && colType is VectorType vecColType && vecColType.Size > 0 && !useVBuffer) - { - Contracts.Assert(generatedCsTypeName.EndsWith("[]")); - var csItemType = generatedCsTypeName.Substring(0, generatedCsTypeName.Length - 2); - target.AppendFormat(" = new {0}[{1}]", csItemType, vecColType.Size); - } - target.AppendLine(";"); - } - - /// - /// Generates a C# string for a given input (with proper escaping). - /// - public static string GetCSharpString(CSharpCodeProvider codeProvider, string value) - { - using (var writer = new StringWriter()) - { - codeProvider.GenerateCodeFromExpression(new CodePrimitiveExpression(value), writer, null); - return writer.ToString(); - } - } - - /// - /// Gets the C# strings representing the type name for a variable corresponding to - /// the column type. - /// - /// If the type is a vector, then controls whether the array field is - /// generated or . - /// - /// If additional attributes are required, they are appended to the list. - /// - private static string GetBackingTypeName(ColumnType colType, bool useVBuffer, List attributes) - { - Contracts.AssertValue(colType); - Contracts.AssertValue(attributes); - if (colType is VectorType vecColType) - { - if (vecColType.Size > 0) - { - // By default, arrays are assumed variable length, unless a [VectorType(dim1, dim2, ...)] - // attribute is applied to the fields. - attributes.Add(string.Format("[VectorType({0})]", string.Join(", ", vecColType.Dimensions))); - } - - var itemType = GetBackingTypeName(colType.ItemType, false, attributes); - return useVBuffer ? string.Format("VBuffer<{0}>", itemType) : string.Format("{0}[]", itemType); - } - - if (colType.IsText) - return "string"; - - if (colType.IsKey) - { - // The way to define a key type in C# is by specifying the [KeyType] attribute and - // making the field type equal to the underlying raw type. - var key = colType.AsKey; - attributes.Add(string.Format("[KeyType(Count={0}, Min={1}, Contiguous={2})]", - key.Count, - key.Min, - key.Contiguous ? "true" : "false")); - } - - return colType.AsPrimitive.RawType.Name; - } - } -} diff --git a/src/Microsoft.ML.Api/GenerateCodeCommand.cs b/src/Microsoft.ML.Api/GenerateCodeCommand.cs deleted file mode 100644 index 44fe07479e..0000000000 --- a/src/Microsoft.ML.Api/GenerateCodeCommand.cs +++ /dev/null @@ -1,151 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. -// See the LICENSE file in the project root for more information. - -using System.Collections.Generic; -using System.IO; -using System.Reflection; -using System.Text; -using Microsoft.CSharp; -using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; -using Microsoft.ML.Runtime.Command; -using Microsoft.ML.Runtime.CommandLine; -using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Runtime.Model; - -[assembly: LoadableClass(typeof(GenerateCodeCommand), typeof(GenerateCodeCommand.Arguments), typeof(SignatureCommand), - "Generate Sample Prediction Code", GenerateCodeCommand.LoadName, "codegen")] - -namespace Microsoft.ML.Runtime.Api -{ - /// - /// Generates the sample prediction code for a given model file, with correct input and output classes. - /// - /// REVIEW: Consider adding support for generating VBuffers instead of arrays, maybe for high dimensionality vectors. - /// - internal sealed class GenerateCodeCommand : ICommand - { - public const string LoadName = "GenerateSamplePredictionCode"; - private const string CodeTemplatePath = "Microsoft.ML.Api.GeneratedCodeTemplate.csresource"; - -#pragma warning disable 0649 // The command is internal, suppress a warning about fields never assigned to. - public sealed class Arguments - { - [Argument(ArgumentType.Required, HelpText = "Input model file", ShortName = "in", IsInputFileName = true)] - public string InputModelFile; - - [Argument(ArgumentType.Required, HelpText = "File to output generated C# code", ShortName = "cs")] - public string CSharpOutput; - - /// - /// Whether to use the to represent vector columns (supports sparse vectors). - /// - [Argument(ArgumentType.AtMostOnce, HelpText = "Whether to use the VBuffer to represent vector columns (supports sparse vectors)", - ShortName = "sparse", SortOrder = 102)] - public bool SparseVectorDeclaration; - - // REVIEW: currently, it's only used in unit testing to not generate the paths into the test output folder. - // However, it might be handy for automation scenarios, so I've added this as a hidden option. - [Argument(ArgumentType.AtMostOnce, HelpText = "A location of the model file to put into generated file", Hide = true)] - public string ModelNameOverride; - } -#pragma warning restore 0649 - - private readonly IHost _host; - private readonly Arguments _args; - - public GenerateCodeCommand(IHostEnvironment env, Arguments args) - { - Contracts.CheckValue(env, nameof(env)); - _host = env.Register("GenerateCodeCommand"); - _host.CheckValue(args, nameof(args)); - _host.CheckUserArg(!string.IsNullOrWhiteSpace(args.InputModelFile), - nameof(args.InputModelFile), "input model file is required"); - _host.CheckUserArg(!string.IsNullOrWhiteSpace(args.CSharpOutput), - nameof(args.CSharpOutput), "Output file is required"); - _args = args; - } - - public void Run() - { - string template; - using (var stream = Assembly.GetExecutingAssembly().GetManifestResourceStream(CodeTemplatePath)) - using (var reader = new StreamReader(stream)) - template = reader.ReadToEnd(); - - var codeProvider = new CSharpCodeProvider(); - using (var fs = File.OpenRead(_args.InputModelFile)) - { - var transformPipe = ModelFileUtils.LoadPipeline(_host, fs, new MultiFileSource(null), true); - var pred = _host.LoadPredictorOrNull(fs); - - IDataView root; - for (root = transformPipe; root is IDataTransform; root = ((IDataTransform)root).Source) - ; - - // root is now the loader. - _host.Assert(root is IDataLoader); - - // Loader columns. - var loaderSb = new StringBuilder(); - for (int i = 0; i < root.Schema.ColumnCount; i++) - { - if (root.Schema.IsHidden(i)) - continue; - if (loaderSb.Length > 0) - loaderSb.AppendLine(); - - ColumnType colType = root.Schema.GetColumnType(i); - CodeGenerationUtils.AppendFieldDeclaration(codeProvider, loaderSb, i, root.Schema.GetColumnName(i), colType, true, _args.SparseVectorDeclaration); - } - - // Scored example columns. - IDataView scorer; - if (pred == null) - scorer = transformPipe; - else - { - var roles = ModelFileUtils.LoadRoleMappingsOrNull(_host, fs); - scorer = roles != null - ? _host.CreateDefaultScorer(new RoleMappedData(transformPipe, roles, opt: true), pred) - : _host.CreateDefaultScorer(new RoleMappedData(transformPipe, label: null, "Features"), pred); - } - - var nonScoreSb = new StringBuilder(); - var scoreSb = new StringBuilder(); - for (int i = 0; i < scorer.Schema.ColumnCount; i++) - { - if (scorer.Schema.IsHidden(i)) - continue; - bool isScoreColumn = scorer.Schema.GetMetadataTypeOrNull(MetadataUtils.Kinds.ScoreColumnSetId, i) != null; - - var sb = isScoreColumn ? scoreSb : nonScoreSb; - if (sb.Length > 0) - sb.AppendLine(); - - ColumnType colType = scorer.Schema.GetColumnType(i); - CodeGenerationUtils.AppendFieldDeclaration(codeProvider, sb, i, scorer.Schema.GetColumnName(i), colType, false, _args.SparseVectorDeclaration); - } - - // Turn model path into a C# identifier and insert it. - var modelPath = !string.IsNullOrWhiteSpace(_args.ModelNameOverride) ? _args.ModelNameOverride : _args.InputModelFile; - modelPath = CodeGenerationUtils.GetCSharpString(codeProvider, modelPath); - modelPath = string.Format("modelPath = {0};", modelPath); - - // Replace values inside the template. - var replacementMap = - new Dictionary - { - { "EXAMPLE_CLASS_DECL", loaderSb.ToString() }, - { "SCORED_EXAMPLE_CLASS_DECL", nonScoreSb.ToString() }, - { "SCORE_CLASS_DECL", scoreSb.ToString() }, - { "MODEL_PATH", modelPath } - }; - - var classSource = CodeGenerationUtils.MultiReplace(template, replacementMap); - File.WriteAllText(_args.CSharpOutput, classSource); - } - } - } -} diff --git a/src/Microsoft.ML.Api/GeneratedCodeTemplate.csresource b/src/Microsoft.ML.Api/GeneratedCodeTemplate.csresource deleted file mode 100644 index b550860bdc..0000000000 --- a/src/Microsoft.ML.Api/GeneratedCodeTemplate.csresource +++ /dev/null @@ -1,71 +0,0 @@ -using System; -using System.Collections.Generic; -using Microsoft.ML.Legacy; -using Microsoft.ML.Runtime.Api; - -namespace MLGeneratedCode -{ -public class Program -{ -/// -/// This is the input to the trained model. -/// -/// In most pipelines, not all columns that are used in training are also used in scoring. Namely, the label -/// and weight columns are almost never required at scoring time. Since we don't know which columns -/// are 'optional' in this sense, all the columns are listed below. -/// -/// You are free to remove any fields from the below class. If the fields are not required for scoring, the model -/// will continue to work. Otherwise, the exception will be thrown when a prediction engine is created. -/// -/// -public class InputData -{ -/*#EXAMPLE_CLASS_DECL#*/ -/*#/EXAMPLE_CLASS_DECL#*/ -} - -/// -/// This is the output of the scored model, the prediction. -/// -/// -public class ScoredOutput -{ -/*#SCORE_CLASS_DECL#*/ -/*#/SCORE_CLASS_DECL#*/ - - // These are all remaining available columns, either supplied as the input, or intermediate - // columns generated by the transforms. Materializing these columns has a performance cost, - // so they are commented out. Feel free to uncomment any column that is useful for your scenario. -#if false -/*#SCORED_EXAMPLE_CLASS_DECL#*/ -/*#/SCORED_EXAMPLE_CLASS_DECL#*/ -#endif -} - -/*public static void Main(string[] args) -{ -string modelPath; -/*#MODEL_PATH#*//*#/MODEL_PATH#*/ - -PredictAsync(modelPath); -}*/ - -/// -/// This method demonstrates how to run prediction. -/// -/// -public static async void PredictAsync(string modelPath) -{ - var model = await PredictionModel.ReadAsync(modelPath); - - var inputData = new InputData(); - // TODO: populate the example's features. - - var score = model.Predict(inputData); - // TODO: consume the resulting score. - - var scores = model.Predict(new List { inputData, inputData }); - // TODO: consume the resulting scores. - } -} -} diff --git a/src/Microsoft.ML.Api/Microsoft.ML.Api.csproj b/src/Microsoft.ML.Api/Microsoft.ML.Api.csproj deleted file mode 100644 index 88324dca5e..0000000000 --- a/src/Microsoft.ML.Api/Microsoft.ML.Api.csproj +++ /dev/null @@ -1,26 +0,0 @@ - - - - netstandard2.0 - Microsoft.ML - - - - - - - - - - - - - - - - - - - - - diff --git a/src/Microsoft.ML.Api/Predictor.cs b/src/Microsoft.ML.Api/Predictor.cs deleted file mode 100644 index 137c58bfb8..0000000000 --- a/src/Microsoft.ML.Api/Predictor.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. -// See the LICENSE file in the project root for more information. - -using System; - -namespace Microsoft.ML.Runtime.Api -{ - /// - /// An opaque 'holder' of the predictor, meant to insulate the user from the internal TLC predictor structure, - /// which is subject to change. - /// - public sealed class Predictor - { - /// - /// The actual predictor. - /// - internal readonly IPredictor Pred; - - internal Predictor(IPredictor pred) - { - Contracts.AssertValue(pred); - Pred = pred; - } - - /// - /// A way for the user to extract the predictor object and 'delve into the underworld' of unsupported non-API methods. - /// This is needed, for instance, to inspect the weights of a predictor programmatically. - /// The intention is to expose most methods through the API and make usage of this method increasingly unnecessary. - /// - [Obsolete("Welcome adventurous stranger, to the Underdark! By calling the mysterious GetPredictorObject method,"+ - " you have entered a world of shifting realities, where nothing is as it seems. Your code may work today, but"+ - " the churning impermanence of the Underdark means the strong foothold today may be nothing but empty air"+ - " tomorrow. Brace yourself!")] - public object GetPredictorObject() - { - return Pred; - } - } -} diff --git a/src/Microsoft.ML.Api/Properties/AssemblyInfo.cs b/src/Microsoft.ML.Api/Properties/AssemblyInfo.cs deleted file mode 100644 index 05db89ef9f..0000000000 --- a/src/Microsoft.ML.Api/Properties/AssemblyInfo.cs +++ /dev/null @@ -1,10 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. -// See the LICENSE file in the project root for more information. - -using System.Runtime.CompilerServices; -using Microsoft.ML; - -[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.TimeSeries" + PublicKey.Value)] - -[assembly: WantsToBeBestFriends] diff --git a/src/Microsoft.ML.Api/SchemaDefinition.cs b/src/Microsoft.ML.Data/Data/SchemaDefinition.cs similarity index 99% rename from src/Microsoft.ML.Api/SchemaDefinition.cs rename to src/Microsoft.ML.Data/Data/SchemaDefinition.cs index 3ed4fbbbdd..d0f72b42a4 100644 --- a/src/Microsoft.ML.Api/SchemaDefinition.cs +++ b/src/Microsoft.ML.Data/Data/SchemaDefinition.cs @@ -6,10 +6,11 @@ using System.Collections.Generic; using System.Linq; using System.Reflection; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.Utilities; -namespace Microsoft.ML.Runtime.Api +namespace Microsoft.ML.Data { /// /// Attach to a member of a class to indicate that the item type should be of class key. diff --git a/src/Microsoft.ML.Api/DataViewConstructionUtils.cs b/src/Microsoft.ML.Data/DataView/DataViewConstructionUtils.cs similarity index 99% rename from src/Microsoft.ML.Api/DataViewConstructionUtils.cs rename to src/Microsoft.ML.Data/DataView/DataViewConstructionUtils.cs index 6acc64ecaf..756876dd97 100644 --- a/src/Microsoft.ML.Api/DataViewConstructionUtils.cs +++ b/src/Microsoft.ML.Data/DataView/DataViewConstructionUtils.cs @@ -12,7 +12,7 @@ using System.Linq; using System.Reflection; -namespace Microsoft.ML.Runtime.Api +namespace Microsoft.ML.Runtime.Data { /// /// A helper class to create data views based on the user-provided types. @@ -797,6 +797,7 @@ protected override bool MoveManyCore(long count) } } + [BestFriend] internal static Schema.DetachedColumn[] GetSchemaColumns(InternalSchemaDefinition schemaDefn) { Contracts.AssertValue(schemaDefn); diff --git a/src/Microsoft.ML.Api/InternalSchemaDefinition.cs b/src/Microsoft.ML.Data/DataView/InternalSchemaDefinition.cs similarity index 98% rename from src/Microsoft.ML.Api/InternalSchemaDefinition.cs rename to src/Microsoft.ML.Data/DataView/InternalSchemaDefinition.cs index f39c274df0..a33ac4b949 100644 --- a/src/Microsoft.ML.Api/InternalSchemaDefinition.cs +++ b/src/Microsoft.ML.Data/DataView/InternalSchemaDefinition.cs @@ -2,20 +2,19 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using System; using System.Collections.Generic; using System.Linq; using System.Reflection; -using Microsoft.ML.Runtime.Data; -using static Microsoft.ML.Runtime.Api.SchemaDefinition; -namespace Microsoft.ML.Runtime.Api +namespace Microsoft.ML.Runtime.Data { using Conditional = System.Diagnostics.ConditionalAttribute; - /// /// An internal class that holds the (already validated) mapping between a custom type and an IDataView schema. /// + [BestFriend] internal sealed class InternalSchemaDefinition { public readonly Column[] Columns; @@ -208,7 +207,7 @@ public static void GetVectorAndKind(Type rawType, string name, out bool isVector throw Contracts.ExceptParam(nameof(rawType), "Could not determine an IDataView type for member {0}", name); } - public static InternalSchemaDefinition Create(Type userType, Direction direction) + public static InternalSchemaDefinition Create(Type userType, SchemaDefinition.Direction direction) { var userSchemaDefinition = SchemaDefinition.Create(userType, direction); return Create(userType, userSchemaDefinition); diff --git a/src/Microsoft.ML.Api/TypedCursor.cs b/src/Microsoft.ML.Data/DataView/TypedCursor.cs similarity index 99% rename from src/Microsoft.ML.Api/TypedCursor.cs rename to src/Microsoft.ML.Data/DataView/TypedCursor.cs index e73dc88b10..06bd5dc7b3 100644 --- a/src/Microsoft.ML.Api/TypedCursor.cs +++ b/src/Microsoft.ML.Data/DataView/TypedCursor.cs @@ -2,7 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Data; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.Utilities; using System; @@ -10,7 +10,7 @@ using System.Linq; using System.Reflection; -namespace Microsoft.ML.Runtime.Api +namespace Microsoft.ML.Data { /// /// This interface is an with 'strongly typed' binding. diff --git a/src/Microsoft.ML.Data/Microsoft.ML.Data.csproj b/src/Microsoft.ML.Data/Microsoft.ML.Data.csproj index 2ebb859178..982f5b07e9 100644 --- a/src/Microsoft.ML.Data/Microsoft.ML.Data.csproj +++ b/src/Microsoft.ML.Data/Microsoft.ML.Data.csproj @@ -19,6 +19,7 @@ + diff --git a/src/Microsoft.ML.Api/PredictionEngine.cs b/src/Microsoft.ML.Data/Prediction/PredictionEngine.cs similarity index 97% rename from src/Microsoft.ML.Api/PredictionEngine.cs rename to src/Microsoft.ML.Data/Prediction/PredictionEngine.cs index b6080b4fc3..0019f93546 100644 --- a/src/Microsoft.ML.Api/PredictionEngine.cs +++ b/src/Microsoft.ML.Data/Prediction/PredictionEngine.cs @@ -9,8 +9,9 @@ using System.Collections.Generic; using System.IO; -namespace Microsoft.ML.Runtime.Api +namespace Microsoft.ML.Runtime { + // REVIEW: Temporarly moving here since it is used by the Legacy project. Remove when removing the legacy project. /// /// A class that runs the previously trained model (and the preceding transform pipeline) on the /// in-memory data in batch mode. @@ -19,7 +20,8 @@ namespace Microsoft.ML.Runtime.Api /// /// The user-defined type that holds the example. /// The user-defined type that holds the prediction. - public sealed class BatchPredictionEngine + [BestFriend] + internal sealed class BatchPredictionEngine where TSrc : class where TDst : class, new() { @@ -122,7 +124,8 @@ public void Reset() } } - public sealed class PredictionEngine : PredictionEngineBase + [BestFriend] + internal sealed class PredictionEngine : PredictionEngineBase where TSrc : class where TDst : class, new() { diff --git a/src/Microsoft.ML.Api/PredictionFunction.cs b/src/Microsoft.ML.Data/Prediction/PredictionFunction.cs similarity index 97% rename from src/Microsoft.ML.Api/PredictionFunction.cs rename to src/Microsoft.ML.Data/Prediction/PredictionFunction.cs index 7243301f8d..5b3e356f39 100644 --- a/src/Microsoft.ML.Api/PredictionFunction.cs +++ b/src/Microsoft.ML.Data/Prediction/PredictionFunction.cs @@ -3,9 +3,10 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Core.Data; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; +using Microsoft.ML.Runtime.Data; -namespace Microsoft.ML.Runtime.Data +namespace Microsoft.ML.Runtime { /// /// A prediction engine class, that takes instances of through diff --git a/src/Microsoft.ML.Api/ComponentCreation.cs b/src/Microsoft.ML.Data/Utilities/ComponentCreation.cs similarity index 96% rename from src/Microsoft.ML.Api/ComponentCreation.cs rename to src/Microsoft.ML.Data/Utilities/ComponentCreation.cs index e83cfe10b6..ab9274cc7a 100644 --- a/src/Microsoft.ML.Api/ComponentCreation.cs +++ b/src/Microsoft.ML.Data/Utilities/ComponentCreation.cs @@ -2,15 +2,16 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using System; -using System.Collections.Generic; -using System.IO; using Microsoft.ML.Core.Data; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.CommandLine; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Model; +using System; +using System.Collections.Generic; +using System.IO; -namespace Microsoft.ML.Runtime.Api +namespace Microsoft.ML.Data { /// /// This class defines extension methods for an to facilitate creating @@ -118,7 +119,7 @@ public static IDataView CreateStreamingDataView(this IHostEnvironment env, /// Whether to ignore missing columns in the data view. /// The optional input schema. If null, the schema is inferred from the type. /// The optional output schema. If null, the schema is inferred from the type. - public static BatchPredictionEngine CreateBatchPredictionEngine(this IHostEnvironment env, Stream modelStream, + internal static BatchPredictionEngine CreateBatchPredictionEngine(this IHostEnvironment env, Stream modelStream, bool ignoreMissingColumns = false, SchemaDefinition inputSchemaDefinition = null, SchemaDefinition outputSchemaDefinition = null) where TSrc : class where TDst : class, new() @@ -138,7 +139,7 @@ public static BatchPredictionEngine CreateBatchPredictionEngineWhether to ignore missing columns in the data view. /// The optional input schema. If null, the schema is inferred from the type. /// The optional output schema. If null, the schema is inferred from the type. - public static BatchPredictionEngine CreateBatchPredictionEngine(this IHostEnvironment env, IDataView dataPipe, + internal static BatchPredictionEngine CreateBatchPredictionEngine(this IHostEnvironment env, IDataView dataPipe, bool ignoreMissingColumns = false, SchemaDefinition inputSchemaDefinition = null, SchemaDefinition outputSchemaDefinition = null) where TSrc : class where TDst : class, new() @@ -264,7 +265,7 @@ public static IDataTransform CreateTransform(this IHostEnvironment env, string s /// extracted. /// The scored data. public static IDataScorerTransform CreateScorer(this IHostEnvironment env, string settings, - RoleMappedData data, Predictor predictor, RoleMappedSchema trainSchema = null) + RoleMappedData data, IPredictor predictor, RoleMappedSchema trainSchema = null) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(data, nameof(data)); @@ -279,7 +280,7 @@ public static IDataScorerTransform CreateScorer(this IHostEnvironment env, strin signatureType, settings); - var bindable = ScoreUtils.GetSchemaBindableMapper(env, predictor.Pred, scorerFactorySettings: scorerFactorySettings); + var bindable = ScoreUtils.GetSchemaBindableMapper(env, predictor, scorerFactorySettings: scorerFactorySettings); var mapper = bindable.Bind(env, data.Schema); return CreateCore(env, factoryType, signatureType, settings, data.Data, mapper, trainSchema); } @@ -295,14 +296,14 @@ public static IDataScorerTransform CreateScorer(this IHostEnvironment env, strin /// extracted. /// The scored data. public static IDataScorerTransform CreateDefaultScorer(this IHostEnvironment env, RoleMappedData data, - Predictor predictor, RoleMappedSchema trainSchema = null) + IPredictor predictor, RoleMappedSchema trainSchema = null) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(data, nameof(data)); env.CheckValue(predictor, nameof(predictor)); env.CheckValueOrNull(trainSchema); - return ScoreUtils.GetScorer(predictor.Pred, data, env, trainSchema); + return ScoreUtils.GetScorer(predictor, data, env, trainSchema); } public static IEvaluator CreateEvaluator(this IHostEnvironment env, string settings) @@ -317,11 +318,10 @@ public static IEvaluator CreateEvaluator(this IHostEnvironment env, string setti /// /// The host environment to use. /// The model stream. - public static Predictor LoadPredictorOrNull(this IHostEnvironment env, Stream modelStream) + public static IPredictor LoadPredictorOrNull(this IHostEnvironment env, Stream modelStream) { Contracts.CheckValue(modelStream, nameof(modelStream)); - var p = ModelFileUtils.LoadPredictorOrNull(env, modelStream); - return p == null ? null : new Predictor(p); + return ModelFileUtils.LoadPredictorOrNull(env, modelStream); } internal static ITrainer CreateTrainer(this IHostEnvironment env, TArgs arguments, out string loadName) diff --git a/src/Microsoft.ML.Api/ApiUtils.cs b/src/Microsoft.ML.Data/Utils/ApiUtils.cs similarity index 99% rename from src/Microsoft.ML.Api/ApiUtils.cs rename to src/Microsoft.ML.Data/Utils/ApiUtils.cs index 760ed1e768..04839856d1 100644 --- a/src/Microsoft.ML.Api/ApiUtils.cs +++ b/src/Microsoft.ML.Data/Utils/ApiUtils.cs @@ -7,7 +7,7 @@ using System.Reflection.Emit; using Microsoft.ML.Runtime.Data; -namespace Microsoft.ML.Runtime.Api +namespace Microsoft.ML.Runtime { internal delegate void Peek(TRow row, long position, ref TValue value); diff --git a/src/Microsoft.ML.Legacy/AssemblyRegistration.cs b/src/Microsoft.ML.Legacy/AssemblyRegistration.cs index 5f633a0f67..decc9b37cd 100644 --- a/src/Microsoft.ML.Legacy/AssemblyRegistration.cs +++ b/src/Microsoft.ML.Legacy/AssemblyRegistration.cs @@ -2,8 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; +using Microsoft.ML.Runtime.Ensemble; using Microsoft.ML.Runtime.Sweeper; using Microsoft.ML.Runtime.Tools; using Microsoft.ML.Trainers.FastTree; @@ -38,11 +38,10 @@ public static void RegisterAssemblies(IHostEnvironment environment) /// private static bool LoadStandardAssemblies() { - Assembly apiAssembly = typeof(LambdaTransform).Assembly; // ML.Api - AssemblyName apiAssemblyName = apiAssembly.GetName(); + Assembly dataAssembly = typeof(TextLoader).Assembly; // ML.Data + AssemblyName dataAssemblyName = dataAssembly.GetName(); - _ = typeof(TextLoader).Assembly; // ML.Data - //_ = typeof(EnsemblePredictor).Assembly); // ML.Ensemble BUG https://github.com/dotnet/machinelearning/issues/1078 Ensemble isn't in a NuGet package + _ = typeof(EnsemblePredictor).Assembly; // ML.Ensemble _ = typeof(FastTreeBinaryModelParameters).Assembly; // ML.FastTree _ = typeof(KMeansModelParameters).Assembly; // ML.KMeansClustering _ = typeof(Maml).Assembly; // ML.Maml @@ -55,7 +54,7 @@ private static bool LoadStandardAssemblies() _ = Assembly.Load(new AssemblyName() { Name = "Microsoft.ML.StandardLearners", - Version = apiAssemblyName.Version, //assume the same version as ML.Api + Version = dataAssemblyName.Version, //assume the same version as ML.Data }); return true; diff --git a/src/Microsoft.ML.Legacy/Data/CollectionDataSource.cs b/src/Microsoft.ML.Legacy/Data/CollectionDataSource.cs index a24e6e1c2a..18b6d2b6c3 100644 --- a/src/Microsoft.ML.Legacy/Data/CollectionDataSource.cs +++ b/src/Microsoft.ML.Legacy/Data/CollectionDataSource.cs @@ -2,8 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.EntryPoints; using Microsoft.ML.Runtime.Internal.Utilities; diff --git a/src/Microsoft.ML.Legacy/Data/TextLoader.cs b/src/Microsoft.ML.Legacy/Data/TextLoader.cs index 3140760cef..434af8f3f5 100644 --- a/src/Microsoft.ML.Legacy/Data/TextLoader.cs +++ b/src/Microsoft.ML.Legacy/Data/TextLoader.cs @@ -2,8 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using System; using System.Collections.Generic; diff --git a/src/Microsoft.ML.Legacy/LearningPipeline.cs b/src/Microsoft.ML.Legacy/LearningPipeline.cs index 40a3ea3b52..37adedecbc 100644 --- a/src/Microsoft.ML.Legacy/LearningPipeline.cs +++ b/src/Microsoft.ML.Legacy/LearningPipeline.cs @@ -2,8 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.EntryPoints; using System; diff --git a/src/Microsoft.ML.Legacy/Microsoft.ML.Legacy.csproj b/src/Microsoft.ML.Legacy/Microsoft.ML.Legacy.csproj index ec7d25d104..79e2ac2f48 100644 --- a/src/Microsoft.ML.Legacy/Microsoft.ML.Legacy.csproj +++ b/src/Microsoft.ML.Legacy/Microsoft.ML.Legacy.csproj @@ -12,7 +12,6 @@ - diff --git a/src/Microsoft.ML.Legacy/Models/BinaryClassificationMetrics.cs b/src/Microsoft.ML.Legacy/Models/BinaryClassificationMetrics.cs index 9e4d78ac99..3668cca261 100644 --- a/src/Microsoft.ML.Legacy/Models/BinaryClassificationMetrics.cs +++ b/src/Microsoft.ML.Legacy/Models/BinaryClassificationMetrics.cs @@ -2,8 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using System; using System.Collections.Generic; diff --git a/src/Microsoft.ML.Legacy/Models/ClassificationMetrics.cs b/src/Microsoft.ML.Legacy/Models/ClassificationMetrics.cs index bcb2ad6905..68d8620d97 100644 --- a/src/Microsoft.ML.Legacy/Models/ClassificationMetrics.cs +++ b/src/Microsoft.ML.Legacy/Models/ClassificationMetrics.cs @@ -2,8 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using System; using System.Collections.Generic; diff --git a/src/Microsoft.ML.Legacy/Models/ClusterMetrics.cs b/src/Microsoft.ML.Legacy/Models/ClusterMetrics.cs index 029cd33869..dbb6640663 100644 --- a/src/Microsoft.ML.Legacy/Models/ClusterMetrics.cs +++ b/src/Microsoft.ML.Legacy/Models/ClusterMetrics.cs @@ -2,8 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using System; using System.Collections.Generic; diff --git a/src/Microsoft.ML.Legacy/Models/CrossValidator.cs b/src/Microsoft.ML.Legacy/Models/CrossValidator.cs index 6f8e79f61b..81ac48a733 100644 --- a/src/Microsoft.ML.Legacy/Models/CrossValidator.cs +++ b/src/Microsoft.ML.Legacy/Models/CrossValidator.cs @@ -2,8 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.EntryPoints; using System; diff --git a/src/Microsoft.ML.Legacy/Models/RegressionMetrics.cs b/src/Microsoft.ML.Legacy/Models/RegressionMetrics.cs index bb5237b612..2bc510a37b 100644 --- a/src/Microsoft.ML.Legacy/Models/RegressionMetrics.cs +++ b/src/Microsoft.ML.Legacy/Models/RegressionMetrics.cs @@ -2,8 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using System; using System.Collections.Generic; diff --git a/src/Microsoft.ML.Legacy/Models/TrainTestEvaluator.cs b/src/Microsoft.ML.Legacy/Models/TrainTestEvaluator.cs index ee39b0f841..e547040cb8 100644 --- a/src/Microsoft.ML.Legacy/Models/TrainTestEvaluator.cs +++ b/src/Microsoft.ML.Legacy/Models/TrainTestEvaluator.cs @@ -2,8 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.EntryPoints; using System; diff --git a/src/Microsoft.ML.Legacy/PredictionModel.cs b/src/Microsoft.ML.Legacy/PredictionModel.cs index fd79755d9e..1c1fcf97aa 100644 --- a/src/Microsoft.ML.Legacy/PredictionModel.cs +++ b/src/Microsoft.ML.Legacy/PredictionModel.cs @@ -2,8 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.EntryPoints; using System; diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/ImportTextData.cs b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/ImportTextData.cs index 536b3aa772..77513e53ec 100644 --- a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/ImportTextData.cs +++ b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/ImportTextData.cs @@ -2,13 +2,13 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using System; +using Microsoft.ML.Data; using Microsoft.ML.Legacy; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.CommandLine; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.EntryPoints; +using System; [assembly: LoadableClass(typeof(void), typeof(ImportTextData), null, typeof(SignatureEntryPointModule), "ImportTextData")] diff --git a/src/Microsoft.ML.SamplesUtils/Microsoft.ML.SamplesUtils.csproj b/src/Microsoft.ML.SamplesUtils/Microsoft.ML.SamplesUtils.csproj index 8bc5796f73..e4d6c5d504 100644 --- a/src/Microsoft.ML.SamplesUtils/Microsoft.ML.SamplesUtils.csproj +++ b/src/Microsoft.ML.SamplesUtils/Microsoft.ML.SamplesUtils.csproj @@ -6,7 +6,7 @@ - + diff --git a/src/Microsoft.ML.SamplesUtils/SamplesDatasetUtils.cs b/src/Microsoft.ML.SamplesUtils/SamplesDatasetUtils.cs index 2c3dbb0485..714a3532eb 100644 --- a/src/Microsoft.ML.SamplesUtils/SamplesDatasetUtils.cs +++ b/src/Microsoft.ML.SamplesUtils/SamplesDatasetUtils.cs @@ -2,7 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using System; using System.Collections.Generic; using System.Net; diff --git a/src/Microsoft.ML.TimeSeries/Microsoft.ML.TimeSeries.csproj b/src/Microsoft.ML.TimeSeries/Microsoft.ML.TimeSeries.csproj index 09ce1d5758..eb00b14674 100644 --- a/src/Microsoft.ML.TimeSeries/Microsoft.ML.TimeSeries.csproj +++ b/src/Microsoft.ML.TimeSeries/Microsoft.ML.TimeSeries.csproj @@ -6,9 +6,9 @@ - + diff --git a/src/Microsoft.ML.TimeSeries/PredictionFunction.cs b/src/Microsoft.ML.TimeSeries/PredictionFunction.cs index 11195e8874..cdebe371fa 100644 --- a/src/Microsoft.ML.TimeSeries/PredictionFunction.cs +++ b/src/Microsoft.ML.TimeSeries/PredictionFunction.cs @@ -5,7 +5,6 @@ using Microsoft.ML.Core.Data; using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using System; using System.Collections.Generic; diff --git a/src/Microsoft.ML.TimeSeries/SequentialTransformBase.cs b/src/Microsoft.ML.TimeSeries/SequentialTransformBase.cs index 97bb3eb53a..702f56d046 100644 --- a/src/Microsoft.ML.TimeSeries/SequentialTransformBase.cs +++ b/src/Microsoft.ML.TimeSeries/SequentialTransformBase.cs @@ -2,13 +2,13 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using System; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Data.IO; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; -using Microsoft.ML.Runtime.Api; -using Microsoft.ML.Data; +using Microsoft.ML.Transforms; +using System; namespace Microsoft.ML.Runtime.TimeSeriesProcessing { diff --git a/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs b/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs index 956a4c9aa5..20134d7fb7 100644 --- a/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs +++ b/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs @@ -2,18 +2,18 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using System; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Data.IO; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; -using Microsoft.ML.Runtime.Api; -using Microsoft.ML.TimeSeries; using Microsoft.ML.Runtime.Model.Onnx; using Microsoft.ML.Runtime.Model.Pfa; -using System.Linq; -using Microsoft.ML.Data; +using Microsoft.ML.TimeSeries; +using Microsoft.ML.Transforms; +using System; using System.IO; +using System.Linq; namespace Microsoft.ML.Runtime.TimeSeriesProcessing { diff --git a/src/Microsoft.ML.Api/CustomMappingCatalog.cs b/src/Microsoft.ML.Transforms/CustomMappingCatalog.cs similarity index 97% rename from src/Microsoft.ML.Api/CustomMappingCatalog.cs rename to src/Microsoft.ML.Transforms/CustomMappingCatalog.cs index 92b4a564ae..92e971a9bd 100644 --- a/src/Microsoft.ML.Api/CustomMappingCatalog.cs +++ b/src/Microsoft.ML.Transforms/CustomMappingCatalog.cs @@ -2,13 +2,11 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Transforms; using System; -using System.Collections.Generic; -using System.Text; namespace Microsoft.ML { diff --git a/src/Microsoft.ML.Api/CustomMappingTransformer.cs b/src/Microsoft.ML.Transforms/CustomMappingTransformer.cs similarity index 99% rename from src/Microsoft.ML.Api/CustomMappingTransformer.cs rename to src/Microsoft.ML.Transforms/CustomMappingTransformer.cs index 7c81412d78..37b46bccf3 100644 --- a/src/Microsoft.ML.Api/CustomMappingTransformer.cs +++ b/src/Microsoft.ML.Transforms/CustomMappingTransformer.cs @@ -5,7 +5,6 @@ using Microsoft.ML.Core.Data; using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; diff --git a/src/Microsoft.ML.Api/LambdaTransform.cs b/src/Microsoft.ML.Transforms/LambdaTransform.cs similarity index 99% rename from src/Microsoft.ML.Api/LambdaTransform.cs rename to src/Microsoft.ML.Transforms/LambdaTransform.cs index b1667501a1..861907a089 100644 --- a/src/Microsoft.ML.Api/LambdaTransform.cs +++ b/src/Microsoft.ML.Transforms/LambdaTransform.cs @@ -3,19 +3,19 @@ // See the LICENSE file in the project root for more information. using System; -using System.ComponentModel.Composition.Hosting; using System.IO; using System.Text; using Microsoft.ML.Core.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Transforms; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; +using Microsoft.ML.Data; [assembly: LoadableClass(typeof(ITransformer), typeof(LambdaTransform), null, typeof(SignatureLoadModel), "", LambdaTransform.LoaderSignature)] -namespace Microsoft.ML.Runtime.Api +namespace Microsoft.ML.Transforms { using Conditional = System.Diagnostics.ConditionalAttribute; diff --git a/src/Microsoft.ML.Transforms/Microsoft.ML.Transforms.csproj b/src/Microsoft.ML.Transforms/Microsoft.ML.Transforms.csproj index 7ab146b21c..f29adffd39 100644 --- a/src/Microsoft.ML.Transforms/Microsoft.ML.Transforms.csproj +++ b/src/Microsoft.ML.Transforms/Microsoft.ML.Transforms.csproj @@ -46,7 +46,6 @@ - diff --git a/src/Microsoft.ML.Transforms/PermutationFeatureImportance.cs b/src/Microsoft.ML.Transforms/PermutationFeatureImportance.cs index 7293ce8302..5d5033c2e1 100644 --- a/src/Microsoft.ML.Transforms/PermutationFeatureImportance.cs +++ b/src/Microsoft.ML.Transforms/PermutationFeatureImportance.cs @@ -2,8 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.Internallearn; using Microsoft.ML.Runtime.Internal.Utilities; diff --git a/src/Microsoft.ML.Api/SerializableLambdaTransform.cs b/src/Microsoft.ML.Transforms/SerializableLambdaTransform.cs similarity index 99% rename from src/Microsoft.ML.Api/SerializableLambdaTransform.cs rename to src/Microsoft.ML.Transforms/SerializableLambdaTransform.cs index 38bd58e76d..9fa56b7eab 100644 --- a/src/Microsoft.ML.Api/SerializableLambdaTransform.cs +++ b/src/Microsoft.ML.Transforms/SerializableLambdaTransform.cs @@ -2,20 +2,20 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using System; -using System.IO; -using System.Reflection; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; +using Microsoft.ML.Transforms; +using System; +using System.IO; +using System.Reflection; using System.Runtime.Serialization.Formatters.Binary; [assembly: LoadableClass(SerializableLambdaTransform.Summary, typeof(ITransformTemplate), typeof(SerializableLambdaTransform), null, typeof(SignatureLoadDataTransform), "", SerializableLambdaTransform.LoaderSignature)] -namespace Microsoft.ML.Runtime.Api +namespace Microsoft.ML.Transforms { internal static class SerializableLambdaTransform { diff --git a/src/Microsoft.ML.Api/StatefulFilterTransform.cs b/src/Microsoft.ML.Transforms/StatefulFilterTransform.cs similarity index 99% rename from src/Microsoft.ML.Api/StatefulFilterTransform.cs rename to src/Microsoft.ML.Transforms/StatefulFilterTransform.cs index 6b63c37349..6600497f24 100644 --- a/src/Microsoft.ML.Api/StatefulFilterTransform.cs +++ b/src/Microsoft.ML.Transforms/StatefulFilterTransform.cs @@ -3,12 +3,12 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Data; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Transforms; using System; using System.IO; -namespace Microsoft.ML.Runtime.Api +namespace Microsoft.ML.Transforms { // REVIEW: the current interface to 'state' object may be inadequate: instead of insisting on // parameterless constructor, we could take a delegate that would create the state per cursor. diff --git a/test/Microsoft.ML.Benchmarks/HashBench.cs b/test/Microsoft.ML.Benchmarks/HashBench.cs index fe823a220c..a8036f1490 100644 --- a/test/Microsoft.ML.Benchmarks/HashBench.cs +++ b/test/Microsoft.ML.Benchmarks/HashBench.cs @@ -2,16 +2,13 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using System; using BenchmarkDotNet.Attributes; using Microsoft.ML.Data; -using Microsoft.ML.Transforms; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Runtime.Api; -using Microsoft.ML.Runtime.Learners; -using System.Linq; using Microsoft.ML.Transforms.Conversions; +using System; +using System.Linq; namespace Microsoft.ML.Benchmarks { diff --git a/test/Microsoft.ML.Benchmarks/PredictionEngineBench.cs b/test/Microsoft.ML.Benchmarks/PredictionEngineBench.cs index c711ab63f6..2ba6435eb1 100644 --- a/test/Microsoft.ML.Benchmarks/PredictionEngineBench.cs +++ b/test/Microsoft.ML.Benchmarks/PredictionEngineBench.cs @@ -3,8 +3,8 @@ // See the LICENSE file in the project root for more information. using BenchmarkDotNet.Attributes; +using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Trainers; using Microsoft.ML.Transforms; diff --git a/test/Microsoft.ML.Benchmarks/StochasticDualCoordinateAscentClassifierBench.cs b/test/Microsoft.ML.Benchmarks/StochasticDualCoordinateAscentClassifierBench.cs index bee16e0d7b..3ff43d0c21 100644 --- a/test/Microsoft.ML.Benchmarks/StochasticDualCoordinateAscentClassifierBench.cs +++ b/test/Microsoft.ML.Benchmarks/StochasticDualCoordinateAscentClassifierBench.cs @@ -4,10 +4,10 @@ using BenchmarkDotNet.Attributes; using BenchmarkDotNet.Engines; +using Microsoft.ML.Data; using Microsoft.ML.Legacy.Models; using Microsoft.ML.Legacy.Trainers; using Microsoft.ML.Legacy.Transforms; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Trainers; using Microsoft.ML.Transforms.Text; diff --git a/test/Microsoft.ML.Core.Tests/Microsoft.ML.Core.Tests.csproj b/test/Microsoft.ML.Core.Tests/Microsoft.ML.Core.Tests.csproj index b60eaa4d88..d50494290e 100644 --- a/test/Microsoft.ML.Core.Tests/Microsoft.ML.Core.Tests.csproj +++ b/test/Microsoft.ML.Core.Tests/Microsoft.ML.Core.Tests.csproj @@ -5,7 +5,6 @@ - diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs index a32ded52bf..95f70b6c8b 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs @@ -2,7 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Core.Tests.UnitTests; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Data.IO; diff --git a/test/Microsoft.ML.FSharp.Tests/Microsoft.ML.FSharp.Tests.fsproj b/test/Microsoft.ML.FSharp.Tests/Microsoft.ML.FSharp.Tests.fsproj index ea24a31732..406b4a41a7 100644 --- a/test/Microsoft.ML.FSharp.Tests/Microsoft.ML.FSharp.Tests.fsproj +++ b/test/Microsoft.ML.FSharp.Tests/Microsoft.ML.FSharp.Tests.fsproj @@ -25,7 +25,6 @@ - diff --git a/test/Microsoft.ML.FSharp.Tests/SmokeTests.fs b/test/Microsoft.ML.FSharp.Tests/SmokeTests.fs index b804b101d9..ebff475fb8 100644 --- a/test/Microsoft.ML.FSharp.Tests/SmokeTests.fs +++ b/test/Microsoft.ML.FSharp.Tests/SmokeTests.fs @@ -59,7 +59,7 @@ open Microsoft.ML open Microsoft.ML.Legacy.Data open Microsoft.ML.Legacy.Transforms open Microsoft.ML.Legacy.Trainers -open Microsoft.ML.Runtime.Api +open Microsoft.ML.Data open Xunit module SmokeTest1 = diff --git a/test/Microsoft.ML.OnnxTransformTest/DnnImageFeaturizerTest.cs b/test/Microsoft.ML.OnnxTransformTest/DnnImageFeaturizerTest.cs index b9dd06cd93..a57f752064 100644 --- a/test/Microsoft.ML.OnnxTransformTest/DnnImageFeaturizerTest.cs +++ b/test/Microsoft.ML.OnnxTransformTest/DnnImageFeaturizerTest.cs @@ -3,20 +3,19 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Core.Data; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Runtime.ImageAnalytics; +using Microsoft.ML.Runtime.Model; +using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Transforms; using System; using System.Collections.Generic; using System.IO; using System.Runtime.InteropServices; -using System.Text; using Xunit; using Xunit.Abstractions; -using System.Reflection; -using Microsoft.ML.Runtime.Model; namespace Microsoft.ML.Tests { diff --git a/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs b/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs index b420001c26..b0ab758720 100644 --- a/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs +++ b/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs @@ -3,13 +3,14 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Core.Data; -using Microsoft.ML.Transforms; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.ImageAnalytics; using Microsoft.ML.Runtime.Model; using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Runtime.Tools; +using Microsoft.ML.Transforms; using System; using System.Collections.Generic; using System.IO; diff --git a/test/Microsoft.ML.Predictor.Tests/Microsoft.ML.Predictor.Tests.csproj b/test/Microsoft.ML.Predictor.Tests/Microsoft.ML.Predictor.Tests.csproj index 482378ceb6..8f1cc4be39 100644 --- a/test/Microsoft.ML.Predictor.Tests/Microsoft.ML.Predictor.Tests.csproj +++ b/test/Microsoft.ML.Predictor.Tests/Microsoft.ML.Predictor.Tests.csproj @@ -5,7 +5,6 @@ - diff --git a/test/Microsoft.ML.TestFramework/BaseTestBaseline.cs b/test/Microsoft.ML.TestFramework/BaseTestBaseline.cs index 795d219e44..c51a6c71f3 100644 --- a/test/Microsoft.ML.TestFramework/BaseTestBaseline.cs +++ b/test/Microsoft.ML.TestFramework/BaseTestBaseline.cs @@ -2,7 +2,6 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Tools; diff --git a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs index 72dedf16d7..f3e53a0799 100644 --- a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs +++ b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs @@ -2,20 +2,19 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Reflection; using Microsoft.ML.Core.Data; using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.CommandLine; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Data.IO; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; using Microsoft.ML.TestFramework; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Reflection; using Xunit; namespace Microsoft.ML.Runtime.RunTests diff --git a/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs b/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs index a95bca433a..1107a0795c 100644 --- a/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs +++ b/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs @@ -3,7 +3,6 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Ensemble; using Microsoft.ML.Runtime.Learners; @@ -29,7 +28,6 @@ public static TEnvironment AddStandardComponents(this TEnvironment #pragma warning disable 612 env.ComponentCatalog.RegisterAssembly(typeof(Experiment).Assembly); // ML.Legacy #pragma warning restore 612 - env.ComponentCatalog.RegisterAssembly(typeof(ComponentCreation).Assembly); // ML.Api return env; } } diff --git a/test/Microsoft.ML.TestFramework/Microsoft.ML.TestFramework.csproj b/test/Microsoft.ML.TestFramework/Microsoft.ML.TestFramework.csproj index b3afcfff4b..dc14be3b7c 100644 --- a/test/Microsoft.ML.TestFramework/Microsoft.ML.TestFramework.csproj +++ b/test/Microsoft.ML.TestFramework/Microsoft.ML.TestFramework.csproj @@ -4,7 +4,6 @@ - diff --git a/test/Microsoft.ML.TestFramework/TestCommandBase.cs b/test/Microsoft.ML.TestFramework/TestCommandBase.cs index d3c3591571..378f98ccb0 100644 --- a/test/Microsoft.ML.TestFramework/TestCommandBase.cs +++ b/test/Microsoft.ML.TestFramework/TestCommandBase.cs @@ -2093,32 +2093,5 @@ public void Datatypes() TestCore("savedata", intermediateData.Path, "loader=binary", "saver=text", textOutputPath.Arg("dout")); Done(); } - - [Fact] - public void CommandCodeGen() - { - if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) - return; - - // REVIEW: this tests that the generated output matches the baseline. This does NOT baseline - // the console output. Currently, there's no console output either, but if some is added, a baseline test - // will be in order. - - // First, train a model on breast-cancer. - var dataPath = GetDataPath("breast-cancer.txt"); - var modelOutPath = DeleteOutputPath("Command", "codegen-model.zip"); - var csOutPath = DeleteOutputPath("Command", "codegen-out.cs"); - - var trainArgs = string.Format( - "train data={{{0}}} loader=Text{{col=Label:0 col=F!1:1-5 col=F2:6-9}} xf=Concat{{col=Features:F!1,F2}} tr=lr out={{{1}}}", - dataPath, modelOutPath); - MainForTest(trainArgs); - - // Now, generate the prediction code. - MainForTest(string.Format("codegen in={{{0}}} cs={{{1}}} modelNameOverride=model.zip", modelOutPath, csOutPath)); - CheckEquality("Command", "codegen-out.cs"); - - Done(); - } } } diff --git a/test/Microsoft.ML.TestFramework/TestSparseDataView.cs b/test/Microsoft.ML.TestFramework/TestSparseDataView.cs index 75cd3ca516..cb971e3be9 100644 --- a/test/Microsoft.ML.TestFramework/TestSparseDataView.cs +++ b/test/Microsoft.ML.TestFramework/TestSparseDataView.cs @@ -2,7 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using System; using Xunit; diff --git a/test/Microsoft.ML.Tests/CachingTests.cs b/test/Microsoft.ML.Tests/CachingTests.cs index 42c85cce41..de9fc59e54 100644 --- a/test/Microsoft.ML.Tests/CachingTests.cs +++ b/test/Microsoft.ML.Tests/CachingTests.cs @@ -3,7 +3,6 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.RunTests; using System.Linq; diff --git a/test/Microsoft.ML.Tests/CollectionDataSourceTests.cs b/test/Microsoft.ML.Tests/CollectionDataSourceTests.cs index d252c61798..1fc6e151a0 100644 --- a/test/Microsoft.ML.Tests/CollectionDataSourceTests.cs +++ b/test/Microsoft.ML.Tests/CollectionDataSourceTests.cs @@ -2,11 +2,11 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Legacy.Data; using Microsoft.ML.Legacy.Trainers; using Microsoft.ML.Legacy.Transforms; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.TestFramework; using System; diff --git a/test/Microsoft.ML.Tests/FeatureContributionTests.cs b/test/Microsoft.ML.Tests/FeatureContributionTests.cs index 7cbd452851..97b6d47ca1 100644 --- a/test/Microsoft.ML.Tests/FeatureContributionTests.cs +++ b/test/Microsoft.ML.Tests/FeatureContributionTests.cs @@ -2,8 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.RunTests; using System; using System.Collections.Generic; diff --git a/test/Microsoft.ML.Tests/ImagesTests.cs b/test/Microsoft.ML.Tests/ImagesTests.cs index 491be1592c..e145c3a7cb 100644 --- a/test/Microsoft.ML.Tests/ImagesTests.cs +++ b/test/Microsoft.ML.Tests/ImagesTests.cs @@ -4,7 +4,6 @@ using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.ImageAnalytics; using Microsoft.ML.Runtime.Model; diff --git a/test/Microsoft.ML.Tests/LearningPipelineTests.cs b/test/Microsoft.ML.Tests/LearningPipelineTests.cs index b527658824..64c6abd8cc 100644 --- a/test/Microsoft.ML.Tests/LearningPipelineTests.cs +++ b/test/Microsoft.ML.Tests/LearningPipelineTests.cs @@ -2,10 +2,10 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Legacy.Data; using Microsoft.ML.Legacy.Trainers; using Microsoft.ML.Legacy.Transforms; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.TestFramework; using System.Linq; diff --git a/test/Microsoft.ML.Tests/OnnxTests.cs b/test/Microsoft.ML.Tests/OnnxTests.cs index 9b53f9aba8..db6758b576 100644 --- a/test/Microsoft.ML.Tests/OnnxTests.cs +++ b/test/Microsoft.ML.Tests/OnnxTests.cs @@ -2,11 +2,11 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Legacy.Data; using Microsoft.ML.Legacy.Models; using Microsoft.ML.Legacy.Trainers; using Microsoft.ML.Legacy.Transforms; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Model.Onnx; using Microsoft.ML.Runtime.RunTests; diff --git a/test/Microsoft.ML.Tests/PredictionModelTests.cs b/test/Microsoft.ML.Tests/PredictionModelTests.cs index 30d2435b6a..069823f1ab 100644 --- a/test/Microsoft.ML.Tests/PredictionModelTests.cs +++ b/test/Microsoft.ML.Tests/PredictionModelTests.cs @@ -2,7 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.TestFramework; using System.IO; using System.Threading.Tasks; diff --git a/test/Microsoft.ML.Tests/RangeFilterTests.cs b/test/Microsoft.ML.Tests/RangeFilterTests.cs index 5518eee472..9619b635bf 100644 --- a/test/Microsoft.ML.Tests/RangeFilterTests.cs +++ b/test/Microsoft.ML.Tests/RangeFilterTests.cs @@ -3,11 +3,9 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.RunTests; using System.Linq; -using System.Threading; using Xunit; using Xunit.Abstractions; diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/ApiScenariosTests.cs b/test/Microsoft.ML.Tests/Scenarios/Api/ApiScenariosTests.cs index b800fcca62..eb79c12aa2 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/ApiScenariosTests.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/ApiScenariosTests.cs @@ -2,7 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.TestFramework; using Xunit.Abstractions; diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs index 66ba6ba137..abc66d7bed 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs @@ -4,7 +4,7 @@ using Microsoft.ML.Core.Data; using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Learners; using Microsoft.ML.Runtime.RunTests; @@ -13,7 +13,6 @@ using Microsoft.ML.Trainers; using Microsoft.ML.Transforms; using Microsoft.ML.Transforms.Conversions; -using Microsoft.ML.Transforms.FeatureSelection; using Microsoft.ML.Transforms.Text; using System; using System.Collections.Generic; diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs index 706b4aad7d..16d4248e24 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs @@ -2,14 +2,14 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML; using Microsoft.ML.Core.Data; using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.TestFramework; using Microsoft.ML.Transforms.Categorical; -using Microsoft.ML.Transforms.FeatureSelection; using Microsoft.ML.Transforms.Normalizers; using Microsoft.ML.Transforms.Text; using System; diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs index 0315256930..7eeab00c4e 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs @@ -3,11 +3,10 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Transforms; -using Microsoft.ML.Transforms.Categorical; using Microsoft.ML.Transforms.Conversions; using System.Linq; using Xunit; diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs index d396c23c80..cd77c50518 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs @@ -3,11 +3,10 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Transforms; -using Microsoft.ML.Transforms.Categorical; using Microsoft.ML.Transforms.Conversions; using System; using System.Linq; diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs index 9e96cb4dcf..912b5f50d1 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs @@ -2,9 +2,9 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Runtime.Learners; using Microsoft.ML.Runtime.RunTests; using System.Threading.Tasks; using Xunit; diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs index edff84341e..d5b43f36dc 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs @@ -2,8 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; -using Microsoft.ML.Runtime.Data; +using Microsoft.ML.Data; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.RunTests; using System.Linq; using Xunit; diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs index 97e21e5836..4d1606ee9e 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs @@ -4,9 +4,7 @@ using Microsoft.ML.Core.Data; using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Api; -using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Runtime.Learners; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.RunTests; using System.IO; using System.Linq; diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs b/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs index d219fefec3..ea412e02ea 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs @@ -2,17 +2,18 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; +using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Data.IO; using Microsoft.ML.Runtime.Internal.Utilities; -using Microsoft.ML.Trainers.Online; using Microsoft.ML.TestFramework; +using Microsoft.ML.Trainers.Online; +using Microsoft.ML.Transforms; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; using Xunit; using Xunit.Abstractions; diff --git a/test/Microsoft.ML.Tests/Scenarios/ClusteringTests.cs b/test/Microsoft.ML.Tests/Scenarios/ClusteringTests.cs index e68c25be9a..9e796428de 100644 --- a/test/Microsoft.ML.Tests/Scenarios/ClusteringTests.cs +++ b/test/Microsoft.ML.Tests/Scenarios/ClusteringTests.cs @@ -1,6 +1,7 @@ -using Microsoft.ML.Legacy.Transforms; +using Microsoft.ML.Data; +using Microsoft.ML.Legacy.Transforms; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Runtime.Data; using System; using System.Collections.Generic; using Xunit; diff --git a/test/Microsoft.ML.Tests/Scenarios/HousePricePredictionTests.cs b/test/Microsoft.ML.Tests/Scenarios/HousePricePredictionTests.cs index 7175dfb295..180de2b766 100644 --- a/test/Microsoft.ML.Tests/Scenarios/HousePricePredictionTests.cs +++ b/test/Microsoft.ML.Tests/Scenarios/HousePricePredictionTests.cs @@ -2,7 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.TestFramework; using Xunit; using Xunit.Abstractions; diff --git a/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationTests.cs b/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationTests.cs index e5b8028f14..ac76ae71f3 100644 --- a/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationTests.cs +++ b/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationTests.cs @@ -2,12 +2,13 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Legacy.Data; +using Microsoft.ML.Data; using Microsoft.ML.Legacy.Models; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Legacy.Trainers; using Microsoft.ML.Legacy.Transforms; +using Microsoft.ML.Runtime.Data; using Xunit; +using TextLoader = Microsoft.ML.Legacy.Data.TextLoader; namespace Microsoft.ML.Scenarios { diff --git a/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationWithStringLabelTests.cs b/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationWithStringLabelTests.cs index 7d1e87cd66..5a5f0ebe7a 100644 --- a/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationWithStringLabelTests.cs +++ b/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationWithStringLabelTests.cs @@ -2,12 +2,12 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Legacy.Data; +using Microsoft.ML.Data; using Microsoft.ML.Legacy.Models; using Microsoft.ML.Legacy.Trainers; using Microsoft.ML.Legacy.Transforms; -using Microsoft.ML.Runtime.Api; using Xunit; +using TextLoader = Microsoft.ML.Legacy.Data.TextLoader; namespace Microsoft.ML.Scenarios { diff --git a/test/Microsoft.ML.Tests/Scenarios/PipelineApi/PipelineApiScenarioTests.cs b/test/Microsoft.ML.Tests/Scenarios/PipelineApi/PipelineApiScenarioTests.cs index 6b96929db7..93438995ef 100644 --- a/test/Microsoft.ML.Tests/Scenarios/PipelineApi/PipelineApiScenarioTests.cs +++ b/test/Microsoft.ML.Tests/Scenarios/PipelineApi/PipelineApiScenarioTests.cs @@ -2,7 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; +using Microsoft.ML.Runtime.Data; using Microsoft.ML.TestFramework; using Xunit.Abstractions; diff --git a/test/Microsoft.ML.Tests/Scenarios/SentimentPredictionTests.cs b/test/Microsoft.ML.Tests/Scenarios/SentimentPredictionTests.cs index a8be68d31a..cb81c685fc 100644 --- a/test/Microsoft.ML.Tests/Scenarios/SentimentPredictionTests.cs +++ b/test/Microsoft.ML.Tests/Scenarios/SentimentPredictionTests.cs @@ -2,13 +2,12 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Legacy; using Microsoft.ML.Legacy.Models; using Microsoft.ML.Legacy.Trainers; using Microsoft.ML.Legacy.Transforms; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; -using Microsoft.ML.Runtime.Data; using System; using System.Collections.Generic; using System.Linq; diff --git a/test/Microsoft.ML.Tests/Scenarios/TensorflowTests.cs b/test/Microsoft.ML.Tests/Scenarios/TensorflowTests.cs index 5b7d9a59e3..1bbcbd19b1 100644 --- a/test/Microsoft.ML.Tests/Scenarios/TensorflowTests.cs +++ b/test/Microsoft.ML.Tests/Scenarios/TensorflowTests.cs @@ -2,7 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.ImageAnalytics; using Microsoft.ML.Trainers; diff --git a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs index 1de3d82ace..667d3c6619 100644 --- a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs +++ b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs @@ -5,14 +5,9 @@ using Microsoft.ML.Data; using Microsoft.ML.Legacy.Models; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Runtime.Learners; using Microsoft.ML.Runtime.Model; using Microsoft.ML.Runtime.RunTests; -using Microsoft.ML.Trainers; -using Microsoft.ML.Transforms.Normalizers; -using System; using System.IO; using Xunit; diff --git a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs index c027accb99..87732048ff 100644 --- a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs +++ b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs @@ -2,16 +2,14 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Legacy.Models; +using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Trainers.FastTree; -using Microsoft.ML.Runtime.Internal.Calibration; -using Microsoft.ML.Runtime.Internal.Internallearn; using Microsoft.ML.Transforms.Text; using System.Linq; using Xunit; +using Microsoft.ML.Runtime.Internal.Internallearn; namespace Microsoft.ML.Scenarios { diff --git a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs index 0c2ae72004..7cf3ae14c4 100644 --- a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs +++ b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs @@ -2,7 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.ImageAnalytics; using Microsoft.ML.Runtime.RunTests; diff --git a/test/Microsoft.ML.Tests/TensorFlowEstimatorTests.cs b/test/Microsoft.ML.Tests/TensorFlowEstimatorTests.cs index a8b32b96a8..925ace12b5 100644 --- a/test/Microsoft.ML.Tests/TensorFlowEstimatorTests.cs +++ b/test/Microsoft.ML.Tests/TensorFlowEstimatorTests.cs @@ -3,7 +3,7 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Core.Data; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.ImageAnalytics; using Microsoft.ML.Runtime.Model; diff --git a/test/Microsoft.ML.Tests/TermEstimatorTests.cs b/test/Microsoft.ML.Tests/TermEstimatorTests.cs index c442f660e5..1ab8619203 100644 --- a/test/Microsoft.ML.Tests/TermEstimatorTests.cs +++ b/test/Microsoft.ML.Tests/TermEstimatorTests.cs @@ -2,7 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Data.IO; using Microsoft.ML.Runtime.Model; diff --git a/test/Microsoft.ML.Tests/TextLoaderTests.cs b/test/Microsoft.ML.Tests/TextLoaderTests.cs index a2ea3ecda5..bc3b1db7e2 100644 --- a/test/Microsoft.ML.Tests/TextLoaderTests.cs +++ b/test/Microsoft.ML.Tests/TextLoaderTests.cs @@ -2,8 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.TestFramework; diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/MatrixFactorizationTests.cs b/test/Microsoft.ML.Tests/TrainerEstimators/MatrixFactorizationTests.cs index ce403b548a..7f6884ae49 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/MatrixFactorizationTests.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/MatrixFactorizationTests.cs @@ -2,11 +2,10 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Trainers; -using Microsoft.ML; using System; using System.Collections.Generic; using System.Runtime.InteropServices; diff --git a/test/Microsoft.ML.Tests/Transformers/CategoricalHashTests.cs b/test/Microsoft.ML.Tests/Transformers/CategoricalHashTests.cs index 7de79b6ad2..b9c73cf9c2 100644 --- a/test/Microsoft.ML.Tests/Transformers/CategoricalHashTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/CategoricalHashTests.cs @@ -2,7 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Data.IO; using Microsoft.ML.Runtime.Model; diff --git a/test/Microsoft.ML.Tests/Transformers/CategoricalTests.cs b/test/Microsoft.ML.Tests/Transformers/CategoricalTests.cs index 46d0b9adc9..7424f0e311 100644 --- a/test/Microsoft.ML.Tests/Transformers/CategoricalTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/CategoricalTests.cs @@ -2,7 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Data.IO; using Microsoft.ML.Runtime.Model; diff --git a/test/Microsoft.ML.Tests/Transformers/CharTokenizeTests.cs b/test/Microsoft.ML.Tests/Transformers/CharTokenizeTests.cs index 58d663b871..534d23180b 100644 --- a/test/Microsoft.ML.Tests/Transformers/CharTokenizeTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/CharTokenizeTests.cs @@ -2,7 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Model; using Microsoft.ML.Runtime.RunTests; diff --git a/test/Microsoft.ML.Tests/Transformers/ConcatTests.cs b/test/Microsoft.ML.Tests/Transformers/ConcatTests.cs index 5a5aa52e23..5f8c385dab 100644 --- a/test/Microsoft.ML.Tests/Transformers/ConcatTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/ConcatTests.cs @@ -3,12 +3,9 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Data.IO; -using Microsoft.ML.Runtime.Model; using Microsoft.ML.Runtime.RunTests; -using Microsoft.ML.Runtime.Tools; using Microsoft.ML.Transforms; using System.IO; using Xunit; diff --git a/test/Microsoft.ML.Tests/Transformers/ConvertTests.cs b/test/Microsoft.ML.Tests/Transformers/ConvertTests.cs index c7b274a4d9..7511485007 100644 --- a/test/Microsoft.ML.Tests/Transformers/ConvertTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/ConvertTests.cs @@ -2,15 +2,15 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Data.IO; using Microsoft.ML.Runtime.Model; using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Runtime.Tools; using Microsoft.ML.Transforms; -using Microsoft.ML.Transforms.Conversions; using Microsoft.ML.Transforms.Categorical; +using Microsoft.ML.Transforms.Conversions; using System; using System.IO; using System.Linq; diff --git a/test/Microsoft.ML.Tests/Transformers/CopyColumnEstimatorTests.cs b/test/Microsoft.ML.Tests/Transformers/CopyColumnEstimatorTests.cs index 418257c594..19d3a18d66 100644 --- a/test/Microsoft.ML.Tests/Transformers/CopyColumnEstimatorTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/CopyColumnEstimatorTests.cs @@ -4,12 +4,10 @@ using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Model; using Microsoft.ML.Runtime.Tools; using Microsoft.ML.Transforms; -using Microsoft.ML.Transforms.Categorical; using Microsoft.ML.Transforms.Conversions; using System; using System.IO; diff --git a/test/Microsoft.ML.Tests/Transformers/CustomMappingTests.cs b/test/Microsoft.ML.Tests/Transformers/CustomMappingTests.cs index 11f9a440ee..c26a749554 100644 --- a/test/Microsoft.ML.Tests/Transformers/CustomMappingTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/CustomMappingTests.cs @@ -3,20 +3,16 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Core.Data; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Runtime.Data.IO; -using Microsoft.ML.Runtime.Model; using Microsoft.ML.Runtime.RunTests; -using Microsoft.ML.Runtime.Tools; +using Microsoft.ML.Transforms; +using System; using System.ComponentModel.Composition; using System.ComponentModel.Composition.Hosting; -using System.IO; +using System.Linq; using Xunit; using Xunit.Abstractions; -using System.Linq; -using System; -using Microsoft.ML.Transforms; namespace Microsoft.ML.Tests.Transformers { diff --git a/test/Microsoft.ML.Tests/Transformers/HashTests.cs b/test/Microsoft.ML.Tests/Transformers/HashTests.cs index b328cb60fc..409b5f3f30 100644 --- a/test/Microsoft.ML.Tests/Transformers/HashTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/HashTests.cs @@ -3,13 +3,11 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Runtime.Tools; -using Microsoft.ML.Transforms; using Microsoft.ML.Transforms.Conversions; using System; using System.IO; diff --git a/test/Microsoft.ML.Tests/Transformers/KeyToBinaryVectorEstimatorTest.cs b/test/Microsoft.ML.Tests/Transformers/KeyToBinaryVectorEstimatorTest.cs index 34605089fe..36e37ccd8e 100644 --- a/test/Microsoft.ML.Tests/Transformers/KeyToBinaryVectorEstimatorTest.cs +++ b/test/Microsoft.ML.Tests/Transformers/KeyToBinaryVectorEstimatorTest.cs @@ -2,13 +2,12 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.StaticPipe; -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Model; using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Runtime.Tools; -using Microsoft.ML.Transforms.Categorical; +using Microsoft.ML.StaticPipe; using Microsoft.ML.Transforms.Conversions; using System; using System.IO; diff --git a/test/Microsoft.ML.Tests/Transformers/KeyToVectorEstimatorTests.cs b/test/Microsoft.ML.Tests/Transformers/KeyToVectorEstimatorTests.cs index 4468a1f3ea..16d7707730 100644 --- a/test/Microsoft.ML.Tests/Transformers/KeyToVectorEstimatorTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/KeyToVectorEstimatorTests.cs @@ -2,13 +2,12 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Model; using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Runtime.Tools; using Microsoft.ML.StaticPipe; -using Microsoft.ML.Transforms.Categorical; using Microsoft.ML.Transforms.Conversions; using System; using System.IO; diff --git a/test/Microsoft.ML.Tests/Transformers/NAIndicatorTests.cs b/test/Microsoft.ML.Tests/Transformers/NAIndicatorTests.cs index 368d0a64cf..7140f9b234 100644 --- a/test/Microsoft.ML.Tests/Transformers/NAIndicatorTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/NAIndicatorTests.cs @@ -2,7 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Data.IO; using Microsoft.ML.Runtime.Model; diff --git a/test/Microsoft.ML.Tests/Transformers/NAReplaceTests.cs b/test/Microsoft.ML.Tests/Transformers/NAReplaceTests.cs index 977ceec40d..d91b2768b3 100644 --- a/test/Microsoft.ML.Tests/Transformers/NAReplaceTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/NAReplaceTests.cs @@ -2,7 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Data.IO; using Microsoft.ML.Runtime.Model; diff --git a/test/Microsoft.ML.Tests/Transformers/RffTests.cs b/test/Microsoft.ML.Tests/Transformers/RffTests.cs index d647eddbca..4bfd6a2b84 100644 --- a/test/Microsoft.ML.Tests/Transformers/RffTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/RffTests.cs @@ -1,4 +1,4 @@ -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Data.IO; using Microsoft.ML.Runtime.Model; diff --git a/test/Microsoft.ML.Tests/Transformers/SelectColumnsTests.cs b/test/Microsoft.ML.Tests/Transformers/SelectColumnsTests.cs index 26b04fe569..76f390d38c 100644 --- a/test/Microsoft.ML.Tests/Transformers/SelectColumnsTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/SelectColumnsTests.cs @@ -3,8 +3,6 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Api; -using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Model; using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Runtime.Tools; diff --git a/test/Microsoft.ML.Tests/Transformers/TextNormalizer.cs b/test/Microsoft.ML.Tests/Transformers/TextNormalizer.cs index 5f9c08a2e2..8e777ee388 100644 --- a/test/Microsoft.ML.Tests/Transformers/TextNormalizer.cs +++ b/test/Microsoft.ML.Tests/Transformers/TextNormalizer.cs @@ -2,14 +2,14 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Data.IO; using Microsoft.ML.Runtime.Model; using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Runtime.Tools; -using Microsoft.ML.Transforms.Text; using Microsoft.ML.Transforms; +using Microsoft.ML.Transforms.Text; using System.IO; using Xunit; using Xunit.Abstractions; diff --git a/test/Microsoft.ML.Tests/Transformers/WordTokenizeTests.cs b/test/Microsoft.ML.Tests/Transformers/WordTokenizeTests.cs index 83ff4c2226..6cf2c6a46d 100644 --- a/test/Microsoft.ML.Tests/Transformers/WordTokenizeTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/WordTokenizeTests.cs @@ -2,7 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Model; using Microsoft.ML.Runtime.RunTests; diff --git a/test/Microsoft.ML.TimeSeries.Tests/Microsoft.ML.TimeSeries.Tests.csproj b/test/Microsoft.ML.TimeSeries.Tests/Microsoft.ML.TimeSeries.Tests.csproj index 41b7f2c380..bc7f32d660 100644 --- a/test/Microsoft.ML.TimeSeries.Tests/Microsoft.ML.TimeSeries.Tests.csproj +++ b/test/Microsoft.ML.TimeSeries.Tests/Microsoft.ML.TimeSeries.Tests.csproj @@ -3,7 +3,6 @@ CORECLR - diff --git a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs index 0bd4604daa..62441c611f 100644 --- a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs +++ b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs @@ -2,16 +2,14 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using System; -using System.Collections.Generic; -using System.IO; using Microsoft.ML.Core.Data; using Microsoft.ML.Data; -using Microsoft.ML.Runtime.Api; using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Runtime.TimeSeriesProcessing; +using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.TimeSeries; +using System.Collections.Generic; +using System.IO; using Xunit; namespace Microsoft.ML.Tests diff --git a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs index 3d7cfd5d32..4630499fde 100644 --- a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs +++ b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs @@ -2,7 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Runtime.Api; +using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Runtime.TimeSeriesProcessing; From c8776d882e56309b0d81100eb5e64472fad0e0fa Mon Sep 17 00:00:00 2001 From: Eric Erhardt Date: Wed, 12 Dec 2018 00:06:16 +0000 Subject: [PATCH 043/100] Remove `As` methods on ColumnType. (#1864) * Remove ColumnType.AsVector. * Remove ColumnType.AsPrimitive. * Remove ColumnType.AsKey. --- src/Microsoft.ML.Core/Data/ColumnType.cs | 18 ----------------- src/Microsoft.ML.Core/Data/MetadataUtils.cs | 4 ++-- .../Commands/ScoreCommand.cs | 2 +- src/Microsoft.ML.Data/Data/Conversion.cs | 19 ++++++++---------- .../DataLoadSave/FakeSchema.cs | 6 +++--- .../DataLoadSave/Text/TextLoader.cs | 3 +-- .../DataLoadSave/Text/TextLoaderParser.cs | 12 ++++++----- .../DataLoadSave/Text/TextSaver.cs | 3 +-- .../DataLoadSave/Transpose/TransposeLoader.cs | 2 +- .../DataLoadSave/Transpose/TransposeSaver.cs | 2 +- .../DataView/DataViewConstructionUtils.cs | 13 ++++++------ src/Microsoft.ML.Data/DataView/Transposer.cs | 7 ++++--- .../EntryPoints/ScoreColumnSelector.cs | 2 +- .../Evaluators/EvaluatorUtils.cs | 4 ++-- .../MultiClassClassifierEvaluator.cs | 4 ++-- .../MultiOutputRegressionEvaluator.cs | 4 ++-- .../Scorers/BinaryClassifierScorer.cs | 2 +- ...FeatureContributionCalculationTransform.cs | 2 +- .../Scorers/MultiClassClassifierScorer.cs | 6 +++--- .../ColumnConcatenatingTransformer.cs | 2 +- .../Transforms/DropSlotsTransform.cs | 4 ++-- src/Microsoft.ML.Data/Transforms/Hashing.cs | 2 +- .../Transforms/InvertHashUtils.cs | 6 +++--- .../Transforms/KeyToValue.cs | 6 +++--- .../Transforms/TypeConverting.cs | 13 ++++++------ .../ValueToKeyMappingTransformer.cs | 4 ++-- .../ValueToKeyMappingTransformerImpl.cs | 12 +++++------ src/Microsoft.ML.Ensemble/PipelineEnsemble.cs | 6 +++--- .../TreeEnsembleFeaturizer.cs | 2 +- .../VectorWhitening.cs | 3 ++- .../LightGbmMulticlassTrainer.cs | 10 +++++----- src/Microsoft.ML.Onnx/OnnxUtils.cs | 10 +++++----- .../MatrixFactorizationTrainer.cs | 4 ++-- .../FactorizationMachineTrainer.cs | 8 +++++--- .../TensorflowTransform.cs | 2 +- src/Microsoft.ML.Transforms/GroupTransform.cs | 5 +++-- .../MissingValueDroppingTransformer.cs | 2 +- .../MissingValueIndicatorTransform.cs | 4 ++-- .../MissingValueIndicatorTransformer.cs | 8 +++++--- .../MissingValueReplacing.cs | 20 ++++++++++--------- .../UngroupTransform.cs | 2 +- 41 files changed, 119 insertions(+), 131 deletions(-) diff --git a/src/Microsoft.ML.Core/Data/ColumnType.cs b/src/Microsoft.ML.Core/Data/ColumnType.cs index 83b5caa27d..628d955907 100644 --- a/src/Microsoft.ML.Core/Data/ColumnType.cs +++ b/src/Microsoft.ML.Core/Data/ColumnType.cs @@ -80,12 +80,6 @@ private protected ColumnType(Type rawType, DataKind rawKind) [BestFriend] internal bool IsPrimitive { get; } - /// - /// Equivalent to as . - /// - [BestFriend] - internal PrimitiveType AsPrimitive => IsPrimitive ? (PrimitiveType)this : null; - /// /// Whether this type is a standard numeric type. External code should use is . /// @@ -140,12 +134,6 @@ internal bool IsBool [BestFriend] internal bool IsKey { get; } - /// - /// Equivalent to as . - /// - [BestFriend] - internal KeyType AsKey => IsKey ? (KeyType)this : null; - /// /// Zero return means either it's not a key type or the cardinality is unknown. External code should first /// test whether this is of type , then if so get the property @@ -166,12 +154,6 @@ internal bool IsBool [BestFriend] internal bool IsVector { get; } - /// - /// Equivalent to as . - /// - [BestFriend] - internal VectorType AsVector => IsVector ? (VectorType)this : null; - /// /// For non-vector types, this returns the column type itself (i.e., return this). /// diff --git a/src/Microsoft.ML.Core/Data/MetadataUtils.cs b/src/Microsoft.ML.Core/Data/MetadataUtils.cs index 024953f173..723c38fc74 100644 --- a/src/Microsoft.ML.Core/Data/MetadataUtils.cs +++ b/src/Microsoft.ML.Core/Data/MetadataUtils.cs @@ -165,12 +165,12 @@ public static VectorType GetCategoricalType(int rangeCount) return new VectorType(NumberType.I4, rangeCount, 2); } - private static volatile ColumnType _scoreColumnSetIdType; + private static volatile KeyType _scoreColumnSetIdType; /// /// The type of the ScoreColumnSetId metadata. /// - public static ColumnType ScoreColumnSetIdType + public static KeyType ScoreColumnSetIdType { get { diff --git a/src/Microsoft.ML.Data/Commands/ScoreCommand.cs b/src/Microsoft.ML.Data/Commands/ScoreCommand.cs index 868dd83b19..a6541c8b8b 100644 --- a/src/Microsoft.ML.Data/Commands/ScoreCommand.cs +++ b/src/Microsoft.ML.Data/Commands/ScoreCommand.cs @@ -210,7 +210,7 @@ private void RunCore(IChannel ch) private bool ShouldAddColumn(Schema schema, int i, uint scoreSet, bool outputNamesAndLabels) { uint scoreSetId = 0; - if (schema.TryGetMetadata(MetadataUtils.ScoreColumnSetIdType.AsPrimitive, MetadataUtils.Kinds.ScoreColumnSetId, i, ref scoreSetId) + if (schema.TryGetMetadata(MetadataUtils.ScoreColumnSetIdType, MetadataUtils.Kinds.ScoreColumnSetId, i, ref scoreSetId) && scoreSetId == scoreSet) { return true; diff --git a/src/Microsoft.ML.Data/Data/Conversion.cs b/src/Microsoft.ML.Data/Data/Conversion.cs index 1cfe187f24..e80135d6c8 100644 --- a/src/Microsoft.ML.Data/Data/Conversion.cs +++ b/src/Microsoft.ML.Data/Data/Conversion.cs @@ -412,15 +412,12 @@ public bool TryGetStandardConversion(ColumnType typeSrc, ColumnType typeDst, conv = null; identity = false; - if (typeSrc.IsKey) + if (typeSrc is KeyType keySrc) { - var keySrc = typeSrc.AsKey; - // Key types are only convertable to compatible key types or unsigned integer // types that are large enough. - if (typeDst.IsKey) + if (typeDst is KeyType keyDst) { - var keyDst = typeDst.AsKey; // We allow the Min value to shift. We currently don't allow the counts to vary. // REVIEW: Should we allow the counts to vary? Allowing the dst to be bigger is trivial. // Smaller dst means mapping values to NA. @@ -451,11 +448,11 @@ public bool TryGetStandardConversion(ColumnType typeSrc, ColumnType typeDst, // REVIEW: Should we look for illegal values and force them to zero? If so, then // we'll need to set identity to false. } - else if (typeDst.IsKey) + else if (typeDst is KeyType keyDst) { if (!typeSrc.IsText) return false; - conv = GetKeyParse(typeDst.AsKey); + conv = GetKeyParse(keyDst); return true; } else if (!typeDst.IsStandardScalar) @@ -490,10 +487,10 @@ public bool TryGetStringConversion(ColumnType type, out ValueMapper(type.AsKey); + conv = GetKeyStringConversion(keyType); return true; } return TryGetStringConversion(out conv); @@ -572,8 +569,8 @@ public TryParseMapper GetTryParseConversion(ColumnType typeDst) "Parse conversion only supported for standard types"); Contracts.Check(typeDst.RawType == typeof(TDst), "Wrong TDst type parameter"); - if (typeDst.IsKey) - return GetKeyTryParse(typeDst.AsKey); + if (typeDst is KeyType keyType) + return GetKeyTryParse(keyType); Contracts.Assert(_tryParseDelegates.ContainsKey(typeDst.RawKind)); return (TryParseMapper)_tryParseDelegates[typeDst.RawKind]; diff --git a/src/Microsoft.ML.Data/DataLoadSave/FakeSchema.cs b/src/Microsoft.ML.Data/DataLoadSave/FakeSchema.cs index c67c6ae733..604fe3c2b7 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/FakeSchema.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/FakeSchema.cs @@ -55,11 +55,11 @@ private static ColumnType MakeColumnType(SchemaShape.Column inputCol) { ColumnType curType = inputCol.ItemType; if (inputCol.IsKey) - curType = new KeyType(curType.AsPrimitive.RawKind, 0, AllKeySizes); + curType = new KeyType(((PrimitiveType)curType).RawKind, 0, AllKeySizes); if (inputCol.Kind == SchemaShape.Column.VectorKind.VariableVector) - curType = new VectorType(curType.AsPrimitive, 0); + curType = new VectorType((PrimitiveType)curType, 0); else if (inputCol.Kind == SchemaShape.Column.VectorKind.Vector) - curType = new VectorType(curType.AsPrimitive, AllVectorSizes); + curType = new VectorType((PrimitiveType)curType, AllVectorSizes); return curType; } diff --git a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoader.cs index a3d5260531..7e8a90b75e 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoader.cs @@ -840,9 +840,8 @@ public void Save(ModelSaveContext ctx) Contracts.Assert((DataKind)(byte)type.RawKind == type.RawKind); ctx.Writer.Write((byte)type.RawKind); ctx.Writer.WriteBoolByte(type.IsKey); - if (type.IsKey) + if (type is KeyType key) { - var key = type.AsKey; ctx.Writer.WriteBoolByte(key.Contiguous); ctx.Writer.Write(key.Min); ctx.Writer.Write(key.Count); diff --git a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderParser.cs b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderParser.cs index cdcb507f51..f9d6cd2b09 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderParser.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderParser.cs @@ -663,12 +663,14 @@ public Parser(TextLoader parent) { var info = _infos[i]; - if (info.ColType.ItemType.IsKey) + if (info.ColType is KeyType keyType) { - if (!info.ColType.IsVector) - _creator[i] = cache.GetCreatorOne(info.ColType.AsKey); - else - _creator[i] = cache.GetCreatorVec(info.ColType.ItemType.AsKey); + _creator[i] = cache.GetCreatorOne(keyType); + continue; + } + else if (info.ColType is VectorType vectorType && vectorType.ItemType is KeyType vectorKeyType) + { + _creator[i] = cache.GetCreatorVec(vectorKeyType); continue; } diff --git a/src/Microsoft.ML.Data/DataLoadSave/Text/TextSaver.cs b/src/Microsoft.ML.Data/DataLoadSave/Text/TextSaver.cs index 03fd7dfa70..08bc2c3e3f 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Text/TextSaver.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Text/TextSaver.cs @@ -489,9 +489,8 @@ private TextLoader.Column GetColumn(string name, ColumnType type, int? start) { DataKind? kind; KeyRange keyRange = null; - if (type.ItemType.IsKey) + if (type.ItemType is KeyType key) { - var key = type.ItemType.AsKey; if (!key.Contiguous) keyRange = new KeyRange(key.Min, contiguous: false); else if (key.Count == 0) diff --git a/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs index fe63426c61..83617f0f52 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs @@ -659,7 +659,7 @@ public VectorType GetSlotType(int col) var view = _parent._entries[col].GetViewOrNull(); if (view == null) return null; - return view.Schema.GetColumnType(0).AsVector; + return view.Schema.GetColumnType(0) as VectorType; } } diff --git a/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeSaver.cs b/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeSaver.cs index 9ce4ab3872..6878fbf935 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeSaver.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeSaver.cs @@ -70,7 +70,7 @@ public bool IsColumnSavable(ColumnType type) // an artificial vector type out of this. Obviously if you can't make a vector // out of the items, then you could not save each slot's values. var itemType = type.ItemType; - var primitiveType = itemType.AsPrimitive; + var primitiveType = itemType as PrimitiveType; if (primitiveType == null) return false; var vectorType = new VectorType(primitiveType, size: 2); diff --git a/src/Microsoft.ML.Data/DataView/DataViewConstructionUtils.cs b/src/Microsoft.ML.Data/DataView/DataViewConstructionUtils.cs index 756876dd97..62d28e1809 100644 --- a/src/Microsoft.ML.Data/DataView/DataViewConstructionUtils.cs +++ b/src/Microsoft.ML.Data/DataView/DataViewConstructionUtils.cs @@ -208,12 +208,12 @@ private Delegate CreateGetter(ColumnType colType, InternalSchemaDefinition.Colum else Host.Assert(colType.RawType == outputType); - if (!colType.IsKey) + if (!(colType is KeyType keyType)) del = CreateDirectGetterDelegate; else { var keyRawType = colType.RawType; - Host.Assert(colType.AsKey.Contiguous); + Host.Assert(keyType.Contiguous); Func delForKey = CreateKeyGetterDelegate; return Utils.MarshalInvoke(delForKey, keyRawType, peek, colType); } @@ -299,9 +299,10 @@ private Delegate CreateDirectGetterDelegate(Delegate peekDel) private Delegate CreateKeyGetterDelegate(Delegate peekDel, ColumnType colType) { // Make sure the function is dealing with key. - Host.Check(colType.IsKey); + KeyType keyType = colType as KeyType; + Host.Check(keyType != null); // Following equations work only with contiguous key type. - Host.Check(colType.AsKey.Contiguous); + Host.Check(keyType.Contiguous); // Following equations work only with unsigned integers. Host.Check(typeof(TDst) == typeof(ulong) || typeof(TDst) == typeof(uint) || typeof(TDst) == typeof(byte) || typeof(TDst) == typeof(bool)); @@ -312,8 +313,8 @@ private Delegate CreateKeyGetterDelegate(Delegate peekDel, ColumnType colT TDst rawKeyValue = default; ulong key = 0; // the raw key value as ulong - ulong min = colType.AsKey.Min; - ulong max = min + (ulong)colType.AsKey.Count - 1; + ulong min = keyType.Min; + ulong max = min + (ulong)keyType.Count - 1; ulong result = 0; // the result as ulong ValueGetter getter = (ref TDst dst) => { diff --git a/src/Microsoft.ML.Data/DataView/Transposer.cs b/src/Microsoft.ML.Data/DataView/Transposer.cs index 901c76275a..cbd5dff214 100644 --- a/src/Microsoft.ML.Data/DataView/Transposer.cs +++ b/src/Microsoft.ML.Data/DataView/Transposer.cs @@ -306,8 +306,9 @@ public SchemaImpl(Transposer parent) { ColumnInfo srcInfo = _parent._cols[c]; var ctype = srcInfo.Type.ItemType; - _ectx.Assert(ctype.IsPrimitive); - _slotTypes[c] = new VectorType(ctype.AsPrimitive, _parent.RowCount); + var primitiveType = ctype as PrimitiveType; + _ectx.Assert(primitiveType != null); + _slotTypes[c] = new VectorType(primitiveType, _parent.RowCount); } AsSchema = Schema.Create(this); @@ -1189,7 +1190,7 @@ private sealed class ColumnSplitter : Splitter public ColumnSplitter(IDataView view, int col, int[] lims) : base(view, col) { - var type = _view.Schema.GetColumnType(SrcCol).AsVector; + var type = _view.Schema.GetColumnType(SrcCol) as VectorType; // Only valid use is for two or more slices. Contracts.Assert(Utils.Size(lims) >= 2); Contracts.AssertValue(type); diff --git a/src/Microsoft.ML.Data/EntryPoints/ScoreColumnSelector.cs b/src/Microsoft.ML.Data/EntryPoints/ScoreColumnSelector.cs index b2d2eb8450..f68fcff00a 100644 --- a/src/Microsoft.ML.Data/EntryPoints/ScoreColumnSelector.cs +++ b/src/Microsoft.ML.Data/EntryPoints/ScoreColumnSelector.cs @@ -44,7 +44,7 @@ public static CommonOutputs.TransformOutput SelectColumns(IHostEnvironment env, private static bool ShouldAddColumn(Schema schema, int i, string[] extraColumns, uint scoreSet) { uint scoreSetId = 0; - if (schema.TryGetMetadata(MetadataUtils.ScoreColumnSetIdType.AsPrimitive, MetadataUtils.Kinds.ScoreColumnSetId, i, ref scoreSetId) + if (schema.TryGetMetadata(MetadataUtils.ScoreColumnSetIdType, MetadataUtils.Kinds.ScoreColumnSetId, i, ref scoreSetId) && scoreSetId == scoreSet) { return true; diff --git a/src/Microsoft.ML.Data/Evaluators/EvaluatorUtils.cs b/src/Microsoft.ML.Data/Evaluators/EvaluatorUtils.cs index 0eb63d3f0c..22703accc6 100644 --- a/src/Microsoft.ML.Data/Evaluators/EvaluatorUtils.cs +++ b/src/Microsoft.ML.Data/Evaluators/EvaluatorUtils.cs @@ -723,7 +723,7 @@ public static void ReconcileVectorKeyValues(IHostEnvironment env, IDataView[] vi (ref VBuffer> dst) => schema.GetMetadata(MetadataUtils.Kinds.SlotNames, index, ref dst); } views[i] = LambdaColumnMapper.Create(env, "ReconcileKeyValues", views[i], columnName, columnName, - type, new VectorType(keyType, type.AsVector), mapper, keyValueGetter, slotNamesGetter); + type, new VectorType(keyType, type as VectorType), mapper, keyValueGetter, slotNamesGetter); } } @@ -974,7 +974,7 @@ private static bool VerifyVectorColumnsMatch(int cachedSize, int col, IDataView private static IDataView AddVarLengthColumn(IHostEnvironment env, IDataView idv, string variableSizeVectorColumnName, ColumnType typeSrc) { return LambdaColumnMapper.Create(env, "ChangeToVarLength", idv, variableSizeVectorColumnName, - variableSizeVectorColumnName + "_VarLength", typeSrc, new VectorType(typeSrc.ItemType.AsPrimitive), + variableSizeVectorColumnName + "_VarLength", typeSrc, new VectorType((PrimitiveType)typeSrc.ItemType), (in VBuffer src, ref VBuffer dst) => src.CopyTo(ref dst)); } diff --git a/src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs index 12aaa47ad0..916f8ddc72 100644 --- a/src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/MultiClassClassifierEvaluator.cs @@ -1001,11 +1001,11 @@ protected override IDataView GetPerInstanceMetricsCore(IDataView perInst, RoleMa if (!perInst.Schema.TryGetColumnIndex(schema.Label.Name, out int labelCol)) throw Host.Except("Could not find column '{0}'", schema.Label.Name); var labelType = perInst.Schema.GetColumnType(labelCol); - if (labelType.IsKey && (!perInst.Schema.HasKeyValues(labelCol, labelType.KeyCount) || labelType.RawKind != DataKind.U4)) + if (labelType is KeyType keyType && (!perInst.Schema.HasKeyValues(labelCol, keyType.KeyCount) || labelType.RawKind != DataKind.U4)) { perInst = LambdaColumnMapper.Create(Host, "ConvertToDouble", perInst, schema.Label.Name, schema.Label.Name, perInst.Schema.GetColumnType(labelCol), NumberType.R8, - (in uint src, ref double dst) => dst = src == 0 ? double.NaN : src - 1 + (double)labelType.AsKey.Min); + (in uint src, ref double dst) => dst = src == 0 ? double.NaN : src - 1 + (double)keyType.Min); } var perInstSchema = perInst.Schema; diff --git a/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs index fa2de9c726..f33fcaee1a 100644 --- a/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs @@ -549,7 +549,7 @@ private void CheckInputColumnTypes(Schema schema, out ColumnType labelType, out var t = schema.GetColumnType(LabelIndex); if (!t.IsKnownSizeVector || (t.ItemType != NumberType.R4 && t.ItemType != NumberType.R8)) throw Host.Except("Label column '{0}' has type '{1}' but must be a known-size vector of R4 or R8", LabelCol, t); - labelType = new VectorType(t.ItemType.AsPrimitive, t.VectorSize); + labelType = new VectorType((PrimitiveType)t.ItemType, t.VectorSize); var slotNamesType = new VectorType(TextType.Instance, t.VectorSize); var builder = new MetadataBuilder(); builder.AddSlotNames(t.VectorSize, CreateSlotNamesGetter(schema, LabelIndex, labelType.VectorSize, "True")); @@ -558,7 +558,7 @@ private void CheckInputColumnTypes(Schema schema, out ColumnType labelType, out t = schema.GetColumnType(ScoreIndex); if (t.VectorSize == 0 || t.ItemType != NumberType.Float) throw Host.Except("Score column '{0}' has type '{1}' but must be a known length vector of type R4", ScoreCol, t); - scoreType = new VectorType(t.ItemType.AsPrimitive, t.VectorSize); + scoreType = new VectorType((PrimitiveType)t.ItemType, t.VectorSize); builder = new MetadataBuilder(); builder.AddSlotNames(t.VectorSize, CreateSlotNamesGetter(schema, ScoreIndex, scoreType.VectorSize, "Predicted")); diff --git a/src/Microsoft.ML.Data/Scorers/BinaryClassifierScorer.cs b/src/Microsoft.ML.Data/Scorers/BinaryClassifierScorer.cs index 84acb75898..4e25546ae5 100644 --- a/src/Microsoft.ML.Data/Scorers/BinaryClassifierScorer.cs +++ b/src/Microsoft.ML.Data/Scorers/BinaryClassifierScorer.cs @@ -123,7 +123,7 @@ private static ISchemaBoundMapper WrapCore(IHostEnvironment env, ISchemaBound trainSchema.Label.Index, ref value); }; - return MultiClassClassifierScorer.LabelNameBindableMapper.CreateBound(env, (ISchemaBoundRowMapper)mapper, type.AsVector, getter, MetadataUtils.Kinds.TrainingLabelValues, CanWrap); + return MultiClassClassifierScorer.LabelNameBindableMapper.CreateBound(env, (ISchemaBoundRowMapper)mapper, type as VectorType, getter, MetadataUtils.Kinds.TrainingLabelValues, CanWrap); } public BinaryClassifierScorer(IHostEnvironment env, Arguments args, IDataView data, ISchemaBoundMapper mapper, RoleMappedSchema trainSchema) diff --git a/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs b/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs index 70638e35a9..28cc55f5e5 100644 --- a/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs +++ b/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculationTransform.cs @@ -365,7 +365,7 @@ public RowMapper(IHostEnvironment env, BindableMapper parent, RoleMappedSchema s else { _outputSchema = Schema.Create(new FeatureContributionSchema(_env, DefaultColumnNames.FeatureContributions, - new VectorType(NumberType.R4, schema.Feature.Type.AsVector), + new VectorType(NumberType.R4, schema.Feature.Type as VectorType), InputSchema, InputRoleMappedSchema.Feature.Index)); } diff --git a/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs b/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs index 4b190636f9..d5b7ff9669 100644 --- a/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs +++ b/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs @@ -130,9 +130,9 @@ private LabelNameBindableMapper(IHost host, ModelLoadContext ctx) ColumnType type; object value; _host.CheckDecode(saver.TryLoadTypeAndValue(ctx.Reader.BaseStream, out type, out value)); - _host.CheckDecode(type.IsVector); + _type = type as VectorType; + _host.CheckDecode(_type != null); _host.CheckDecode(value != null); - _type = type.AsVector; _getter = Utils.MarshalInvoke(DecodeInit, _type.ItemType.RawType, value); _metadataKind = ctx.Header.ModelVerReadable >= VersionAddedMetadataKind ? ctx.LoadNonEmptyString() : MetadataUtils.Kinds.SlotNames; @@ -493,7 +493,7 @@ public static ISchemaBoundMapper WrapCore(IHostEnvironment env, ISchemaBoundM trainSchema.Label.Index, ref value); }; - return LabelNameBindableMapper.CreateBound(env, (ISchemaBoundRowMapper)mapper, type.AsVector, getter, MetadataUtils.Kinds.SlotNames, CanWrap); + return LabelNameBindableMapper.CreateBound(env, (ISchemaBoundRowMapper)mapper, type as VectorType, getter, MetadataUtils.Kinds.SlotNames, CanWrap); } public MultiClassClassifierScorer(IHostEnvironment env, Arguments args, IDataView data, ISchemaBoundMapper mapper, RoleMappedSchema trainSchema) diff --git a/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingTransformer.cs b/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingTransformer.cs index 73d37120c8..0d94802771 100644 --- a/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingTransformer.cs +++ b/src/Microsoft.ML.Data/Transforms/ColumnConcatenatingTransformer.cs @@ -496,7 +496,7 @@ private BoundColumn MakeColumn(Schema inputSchema, int iinfo) hasSlotNames = false; } - return new BoundColumn(InputSchema, _parent._columns[iinfo], sources, new VectorType(itemType.AsPrimitive, totalSize), + return new BoundColumn(InputSchema, _parent._columns[iinfo], sources, new VectorType((PrimitiveType)itemType, totalSize), isNormalized, hasSlotNames, hasCategoricals, totalSize, catCount); } diff --git a/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs b/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs index 65e5c52dc4..8e0f146b7c 100644 --- a/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs @@ -518,7 +518,7 @@ private void ComputeType(Schema input, int iinfo, SlotDropper slotDropper, Host.Assert(typeSrc.IsKnownSizeVector); var dstLength = slotDropper.DstLength; var hasSlotNames = input.HasSlotNames(_cols[iinfo], _srcTypes[iinfo].VectorSize); - type = new VectorType(typeSrc.ItemType.AsPrimitive, Math.Max(dstLength, 1)); + type = new VectorType((PrimitiveType)typeSrc.ItemType, Math.Max(dstLength, 1)); suppressed = dstLength == 0; } } @@ -822,7 +822,7 @@ protected override Schema.DetachedColumn[] GetOutputColumnsCore() { var dstLength = _slotDropper[iinfo].DstLength; var hasSlotNames = InputSchema.HasSlotNames(_cols[iinfo], _srcTypes[iinfo].VectorSize); - var type = new VectorType(_srcTypes[iinfo].ItemType.AsPrimitive, Math.Max(dstLength, 1)); + var type = new VectorType((PrimitiveType)_srcTypes[iinfo].ItemType, Math.Max(dstLength, 1)); if (hasSlotNames && dstLength > 0) { diff --git a/src/Microsoft.ML.Data/Transforms/Hashing.cs b/src/Microsoft.ML.Data/Transforms/Hashing.cs index 9ea54f4897..8c40a0138e 100644 --- a/src/Microsoft.ML.Data/Transforms/Hashing.cs +++ b/src/Microsoft.ML.Data/Transforms/Hashing.cs @@ -911,7 +911,7 @@ private void AddMetaKeyValues(int i, MetadataBuilder builder) { _parent._keyValues[i].CopyTo(ref dst); }; - builder.AddKeyValues(_parent._kvTypes[i].VectorSize, _parent._kvTypes[i].ItemType.AsPrimitive, getter); + builder.AddKeyValues(_parent._kvTypes[i].VectorSize, (PrimitiveType)_parent._kvTypes[i].ItemType, getter); } protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) => _parent.GetGetterCore(input, iinfo, out disposer); diff --git a/src/Microsoft.ML.Data/Transforms/InvertHashUtils.cs b/src/Microsoft.ML.Data/Transforms/InvertHashUtils.cs index 8d041dc6ce..8b683a73bf 100644 --- a/src/Microsoft.ML.Data/Transforms/InvertHashUtils.cs +++ b/src/Microsoft.ML.Data/Transforms/InvertHashUtils.cs @@ -41,12 +41,12 @@ public static ValueMapper GetSimpleMapper(Schema schema, in var conv = Conversion.Conversions.Instance; // First: if not key, then get the standard string converison. - if (!type.IsKey) + if (!(type is KeyType keyType)) return conv.GetStringConversion(type); bool identity; // Second choice: if key, utilize the KeyValues metadata for that key, if it has one and is text. - if (schema.HasKeyValues(col, type.KeyCount)) + if (schema.HasKeyValues(col, keyType.KeyCount)) { // REVIEW: Non-textual KeyValues are certainly possible. Should we handle them? // Get the key names. @@ -70,7 +70,7 @@ public static ValueMapper GetSimpleMapper(Schema schema, in } // Third choice: just use the key value itself, subject to offsetting by the min. - return conv.GetKeyStringConversion(type.AsKey); + return conv.GetKeyStringConversion(keyType); } public static ValueMapper, StringBuilder> GetPairMapper(ValueMapper submap) diff --git a/src/Microsoft.ML.Data/Transforms/KeyToValue.cs b/src/Microsoft.ML.Data/Transforms/KeyToValue.cs index f2b4277059..b5de375360 100644 --- a/src/Microsoft.ML.Data/Transforms/KeyToValue.cs +++ b/src/Microsoft.ML.Data/Transforms/KeyToValue.cs @@ -232,10 +232,10 @@ private void ComputeKvMaps(Schema schema, out ColumnType[] types, out KeyToValue var typeVals = schema.GetMetadataTypeOrNull(MetadataUtils.Kinds.KeyValues, ColMapNewToOld[iinfo]); Host.Check(typeVals != null, "Metadata KeyValues does not exist"); Host.Check(typeVals.VectorSize == typeSrc.ItemType.KeyCount, "KeyValues metadata size does not match column type key count"); - if (!typeSrc.IsVector) + if (!(typeSrc is VectorType vectorType)) types[iinfo] = typeVals.ItemType; else - types[iinfo] = new VectorType(typeVals.ItemType.AsPrimitive, typeSrc.AsVector); + types[iinfo] = new VectorType((PrimitiveType)typeVals.ItemType, vectorType); // MarshalInvoke with two generic params. Func func = GetKeyMetadata; @@ -258,7 +258,7 @@ private KeyToValueMap GetKeyMetadata(int iinfo, ColumnType typeKey Host.Check(keyMetadata.Length == typeKey.ItemType.KeyCount); VBufferUtils.Densify(ref keyMetadata); - return new KeyToValueMap(this, typeKey.ItemType.AsKey, typeVal.ItemType.AsPrimitive, keyMetadata, iinfo); + return new KeyToValueMap(this, (KeyType)typeKey.ItemType, (PrimitiveType)typeVal.ItemType, keyMetadata, iinfo); } /// /// A map is an object capable of creating the association from an input type, to an output diff --git a/src/Microsoft.ML.Data/Transforms/TypeConverting.cs b/src/Microsoft.ML.Data/Transforms/TypeConverting.cs index 1c781b3e32..493d313cc2 100644 --- a/src/Microsoft.ML.Data/Transforms/TypeConverting.cs +++ b/src/Microsoft.ML.Data/Transforms/TypeConverting.cs @@ -370,7 +370,7 @@ internal static bool GetNewType(IExceptionContext ectx, ColumnType srcType, Data if (!srcType.ItemType.IsKey && !srcType.ItemType.IsText) return false; } - else if (!srcType.ItemType.IsKey) + else if (!(srcType.ItemType is KeyType key)) itemType = PrimitiveType.FromKind(kind); else if (!KeyType.IsValidDataKind(kind)) { @@ -379,7 +379,6 @@ internal static bool GetNewType(IExceptionContext ectx, ColumnType srcType, Data } else { - var key = srcType.ItemType.AsKey; ectx.Assert(KeyType.IsValidDataKind(key.RawKind)); int count = key.Count; // Technically, it's an error for the counts not to match, but we'll let the Conversions @@ -436,8 +435,8 @@ private static bool CanConvertToType(IExceptionContext ectx, ColumnType srcType, return false; typeDst = itemType; - if (srcType.IsVector) - typeDst = new VectorType(itemType, srcType.AsVector); + if (srcType is VectorType vectorType) + typeDst = new VectorType(itemType, vectorType); return true; } @@ -473,9 +472,9 @@ protected override Delegate MakeGetter(Row input, int iinfo, Func act Contracts.AssertValue(input); Contracts.Assert(0 <= iinfo && iinfo < _parent.ColumnPairs.Length); disposer = null; - if (!_types[iinfo].IsVector) + if (!(_types[iinfo] is VectorType vectorType)) return RowCursorUtils.GetGetterAs(_types[iinfo], input, _srcCols[iinfo]); - return RowCursorUtils.GetVecGetterAs(_types[iinfo].AsVector.ItemType, input, _srcCols[iinfo]); + return RowCursorUtils.GetVecGetterAs(vectorType.ItemType, input, _srcCols[iinfo]); } public void SaveAsOnnx(OnnxContext ctx) @@ -507,7 +506,7 @@ private bool SaveAsOnnxCore(OnnxContext ctx, int iinfo, string srcVariableName, node.AddAttribute("to", (byte)_parent._columns[iinfo].OutputKind); if (_parent._columns[iinfo].OutputKeyRange != null) { - var key = _types[iinfo].ItemType.AsKey; + var key = (KeyType)_types[iinfo].ItemType; node.AddAttribute("min", key.Min); node.AddAttribute("max", key.Count); node.AddAttribute("contiguous", key.Contiguous); diff --git a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs index cda1d03401..b179f358dc 100644 --- a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs +++ b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs @@ -736,8 +736,8 @@ public Mapper(ValueToKeyMappingTransformer parent, Schema inputSchema) var type = _infos[i].TypeSrc; KeyType keyType = _parent._unboundMaps[i].OutputType; ColumnType colType; - if (type.IsVector) - colType = new VectorType(keyType, type.AsVector); + if (type is VectorType vectorType) + colType = new VectorType(keyType, vectorType); else colType = keyType; _types[i] = colType; diff --git a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformerImpl.cs b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformerImpl.cs index 87c112b87d..8c7961c969 100644 --- a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformerImpl.cs +++ b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformerImpl.cs @@ -49,7 +49,7 @@ public static Builder Create(ColumnType type, SortOrder sortOrder) Contracts.Assert(sortOrder == SortOrder.Occurrence || sortOrder == SortOrder.Value); bool sorted = sortOrder == SortOrder.Value; - PrimitiveType itemType = type.ItemType.AsPrimitive; + PrimitiveType itemType = type.ItemType as PrimitiveType; Contracts.AssertValue(itemType); if (itemType.IsText) return new TextImpl(sorted); @@ -568,7 +568,7 @@ private static TermMap LoadCodecCore(ModelLoadContext ctx, IExceptionContext } } - return new HashArrayImpl(codec.Type.AsPrimitive, values); + return new HashArrayImpl((PrimitiveType)codec.Type, values); } internal abstract void WriteTextTerms(TextWriter writer); @@ -1101,7 +1101,7 @@ private bool AddMetadataCore(ColumnType srcMetaType, MetadataBuilder buil _host.AssertValue(srcMetaType); _host.Assert(srcMetaType.RawType == typeof(TMeta)); _host.AssertValue(builder); - var srcType = TypedMap.ItemType.AsKey; + var srcType = TypedMap.ItemType as KeyType; _host.AssertValue(srcType); var dstType = new KeyType(DataKind.U4, srcType.Min, srcType.Count); var convInst = Runtime.Data.Conversion.Conversions.Instance; @@ -1156,7 +1156,7 @@ private bool AddMetadataCore(ColumnType srcMetaType, MetadataBuilder buil getter(ref dst); _host.Assert(dst.Length == TypedMap.OutputType.KeyCount); }; - builder.AddKeyValues(TypedMap.OutputType.KeyCount, srcMetaType.ItemType.AsPrimitive, mgetter); + builder.AddKeyValues(TypedMap.OutputType.KeyCount, (PrimitiveType)srcMetaType.ItemType, mgetter); } return true; } @@ -1169,7 +1169,7 @@ public override void WriteTextTerms(TextWriter writer) _schema.TryGetColumnIndex(_infos[_iinfo].Source, out int srcCol); ColumnType srcMetaType = _schema.GetMetadataTypeOrNull(MetadataUtils.Kinds.KeyValues, srcCol); if (srcMetaType == null || srcMetaType.VectorSize != TypedMap.ItemType.KeyCount || - TypedMap.ItemType.KeyCount == 0 || !Utils.MarshalInvoke(WriteTextTermsCore, srcMetaType.ItemType.RawType, srcMetaType.AsVector.ItemType, writer)) + TypedMap.ItemType.KeyCount == 0 || !Utils.MarshalInvoke(WriteTextTermsCore, srcMetaType.ItemType.RawType, ((VectorType)srcMetaType).ItemType, writer)) { // No valid input key-value metadata. Back off to the base implementation. base.WriteTextTerms(writer); @@ -1180,7 +1180,7 @@ private bool WriteTextTermsCore(PrimitiveType srcMetaType, TextWriter wri { _host.AssertValue(srcMetaType); _host.Assert(srcMetaType.RawType == typeof(TMeta)); - var srcType = TypedMap.ItemType.AsKey; + var srcType = TypedMap.ItemType as KeyType; _host.AssertValue(srcType); var dstType = new KeyType(DataKind.U4, srcType.Min, srcType.Count); var convInst = Runtime.Data.Conversion.Conversions.Instance; diff --git a/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs b/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs index dafa19eb61..d393e6c316 100644 --- a/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs +++ b/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs @@ -603,7 +603,7 @@ protected static int CheckLabelColumn(IHostEnvironment env, IPredictorModel[] mo if (mdType == null || !mdType.IsKnownSizeVector) throw env.Except("Label column of type key must have a vector of key values metadata"); - return Utils.MarshalInvoke(CheckKeyLabelColumnCore, mdType.ItemType.RawType, env, models, labelType.AsKey, schema, labelInfo.Index, mdType); + return Utils.MarshalInvoke(CheckKeyLabelColumnCore, mdType.ItemType.RawType, env, models, (KeyType)labelType, schema, labelInfo.Index, mdType); } // When the label column is not a key, we check that the number of classes is the same for all the predictors, by checking the @@ -655,8 +655,8 @@ private static int CheckKeyLabelColumnCore(IHostEnvironment env, IPredictorMo if (labelInfo == null) throw env.Except("Training schema for model {0} does not have a label column", i); - var curLabelType = rmd.Schema.Schema.GetColumnType(rmd.Schema.Label.Index); - if (!labelType.Equals(curLabelType.AsKey)) + var curLabelType = rmd.Schema.Schema.GetColumnType(rmd.Schema.Label.Index) as KeyType; + if (!labelType.Equals(curLabelType)) throw env.Except("Label column of model {0} has different type than model 0", i); var mdType = rmd.Schema.Schema.GetMetadataTypeOrNull(MetadataUtils.Kinds.KeyValues, labelInfo.Index); diff --git a/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs b/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs index 7eef450580..ec3827286c 100644 --- a/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs +++ b/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs @@ -788,7 +788,7 @@ private static IDataView AppendLabelTransform(IHostEnvironment env, IChannel ch, "labelPermutationSeed != 0 only applies on a multi-class learning problem when the label type is a key."); return input; } - return Utils.MarshalInvoke(AppendFloatMapper, labelType.RawType, env, ch, input, labelName, labelType.AsKey, + return Utils.MarshalInvoke(AppendFloatMapper, labelType.RawType, env, ch, input, labelName, (KeyType)labelType, labelPermutationSeed); } } diff --git a/src/Microsoft.ML.HalLearners/VectorWhitening.cs b/src/Microsoft.ML.HalLearners/VectorWhitening.cs index 2a0358f74a..707e8c780f 100644 --- a/src/Microsoft.ML.HalLearners/VectorWhitening.cs +++ b/src/Microsoft.ML.HalLearners/VectorWhitening.cs @@ -323,7 +323,8 @@ protected override void CheckInputColumn(Schema inputSchema, int col, int srcCol // Check if the input column's type is supported. Note that only float vector with a known shape is allowed. internal static string TestColumn(ColumnType type) { - if ((type.IsVector && !type.IsKnownSizeVector && (type.AsVector.Dimensions.Length > 1)) || type.ItemType != NumberType.R4) + if ((type is VectorType vectorType && !vectorType.IsKnownSizeVector && vectorType.Dimensions.Length > 1) + || type.ItemType != NumberType.R4) return "Expected float or float vector of known size"; if ((long)type.ValueCount * type.ValueCount > Utils.ArrayMaxSize) diff --git a/src/Microsoft.ML.LightGBM/LightGbmMulticlassTrainer.cs b/src/Microsoft.ML.LightGBM/LightGbmMulticlassTrainer.cs index 46300386ab..2216beadb7 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmMulticlassTrainer.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmMulticlassTrainer.cs @@ -139,14 +139,14 @@ protected override void ConvertNaNLabels(IChannel ch, RoleMappedData data, float if (maxLabel >= _maxNumClass) throw ch.ExceptParam(nameof(data), $"max labelColumn cannot exceed {_maxNumClass}"); - if (data.Schema.Label.Type.IsKey) + if (data.Schema.Label.Type is KeyType keyType) { - ch.Check(data.Schema.Label.Type.AsKey.Contiguous, "labelColumn value should be contiguous"); + ch.Check(keyType.Contiguous, "labelColumn value should be contiguous"); if (hasNaNLabel) - _numClass = data.Schema.Label.Type.AsKey.Count + 1; + _numClass = keyType.Count + 1; else - _numClass = data.Schema.Label.Type.AsKey.Count; - _tlcNumClass = data.Schema.Label.Type.AsKey.Count; + _numClass = keyType.Count; + _tlcNumClass = keyType.Count; } else { diff --git a/src/Microsoft.ML.Onnx/OnnxUtils.cs b/src/Microsoft.ML.Onnx/OnnxUtils.cs index 1d6c4b6fc1..877fc7b9ed 100644 --- a/src/Microsoft.ML.Onnx/OnnxUtils.cs +++ b/src/Microsoft.ML.Onnx/OnnxUtils.cs @@ -296,10 +296,10 @@ public static ModelArgs GetModelArgs(ColumnType type, string colName, TensorProto.Types.DataType dataType = TensorProto.Types.DataType.Undefined; DataKind rawKind; - if (type.IsVector) - rawKind = type.AsVector.ItemType.RawKind; - else if (type.IsKey) - rawKind = type.AsKey.RawKind; + if (type is VectorType vectorType) + rawKind = vectorType.ItemType.RawKind; + else if (type is KeyType keyType) + rawKind = keyType.RawKind; else rawKind = type.RawKind; @@ -367,7 +367,7 @@ public static ModelArgs GetModelArgs(ColumnType type, string colName, dimsLocal.Add(1); else if (type.ValueCount > 1) { - var vec = type.AsVector; + var vec = (VectorType)type; for (int i = 0; i < vec.Dimensions.Length; i++) dimsLocal.Add(vec.Dimensions[i]); } diff --git a/src/Microsoft.ML.Recommender/MatrixFactorizationTrainer.cs b/src/Microsoft.ML.Recommender/MatrixFactorizationTrainer.cs index e42c6b002e..d858d5bef4 100644 --- a/src/Microsoft.ML.Recommender/MatrixFactorizationTrainer.cs +++ b/src/Microsoft.ML.Recommender/MatrixFactorizationTrainer.cs @@ -347,7 +347,7 @@ private MatrixFactorizationPredictor TrainCore(IChannel ch, RoleMappedData data, using (var buffer = PrepareBuffer()) { buffer.Train(ch, rowCount, colCount, cursor, labGetter, matrixRowIndexGetter, matrixColumnIndexGetter); - predictor = new MatrixFactorizationPredictor(Host, buffer, matrixColumnIndexColInfo.Type.AsKey, matrixRowIndexColInfo.Type.AsKey); + predictor = new MatrixFactorizationPredictor(Host, buffer, (KeyType)matrixColumnIndexColInfo.Type, (KeyType)matrixRowIndexColInfo.Type); } } else @@ -365,7 +365,7 @@ private MatrixFactorizationPredictor TrainCore(IChannel ch, RoleMappedData data, buffer.TrainWithValidation(ch, rowCount, colCount, cursor, labGetter, matrixRowIndexGetter, matrixColumnIndexGetter, validCursor, validLabelGetter, validMatrixRowIndexGetter, validMatrixColumnIndexGetter); - predictor = new MatrixFactorizationPredictor(Host, buffer, matrixColumnIndexColInfo.Type.AsKey, matrixRowIndexColInfo.Type.AsKey); + predictor = new MatrixFactorizationPredictor(Host, buffer, (KeyType)matrixColumnIndexColInfo.Type, (KeyType)matrixRowIndexColInfo.Type); } } } diff --git a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FactorizationMachineTrainer.cs b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FactorizationMachineTrainer.cs index be73a2de30..821cb21cb0 100644 --- a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FactorizationMachineTrainer.cs +++ b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FactorizationMachineTrainer.cs @@ -299,14 +299,16 @@ private FieldAwareFactorizationMachinePredictor TrainCore(IChannel ch, IProgress for (int f = 0; f < fieldCount; f++) { var col = featureColumns[f]; - Host.Assert(col.Type.AsVector.VectorSize > 0); if (col == null) throw ch.ExceptParam(nameof(data), "Empty feature column not allowed"); Host.Assert(!data.Schema.Schema.IsHidden(col.Index)); - if (!col.Type.IsKnownSizeVector || col.Type.ItemType != NumberType.Float) + if (!(col.Type is VectorType vectorType) || + !vectorType.IsKnownSizeVector || + vectorType.ItemType != NumberType.Float) throw ch.ExceptParam(nameof(data), "Training feature column '{0}' must be a known-size vector of R4, but has type: {1}.", col.Name, col.Type); + Host.Assert(vectorType.VectorSize > 0); fieldColumnIndexes[f] = col.Index; - totalFeatureCount += col.Type.AsVector.VectorSize; + totalFeatureCount += vectorType.VectorSize; } ch.Check(checked(totalFeatureCount * fieldCount * _latentDimAligned) <= Utils.ArrayMaxSize, "Latent dimension or the number of fields too large"); if (predictor != null) diff --git a/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs b/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs index b7a1ba901f..f555545099 100644 --- a/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs +++ b/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs @@ -837,7 +837,7 @@ public Mapper(TensorFlowTransform parent, Schema inputSchema) : else { if (shape.Select((dim, j) => dim != -1 && dim != colTypeDims[j]).Any(b => b)) - throw Contracts.Except($"Input shape mismatch: Input '{_parent.Inputs[i]}' has shape {originalShape.ToString()}, but input data is {type.AsVector.ToString()}."); + throw Contracts.Except($"Input shape mismatch: Input '{_parent.Inputs[i]}' has shape {originalShape.ToString()}, but input data is {vecType.ToString()}."); // Fill in the unknown dimensions. var l = new long[originalShape.NumDimensions]; diff --git a/src/Microsoft.ML.Transforms/GroupTransform.cs b/src/Microsoft.ML.Transforms/GroupTransform.cs index 1f77e1dd16..1b9e341007 100644 --- a/src/Microsoft.ML.Transforms/GroupTransform.cs +++ b/src/Microsoft.ML.Transforms/GroupTransform.cs @@ -287,8 +287,9 @@ private static ColumnType[] BuildColumnTypes(Schema input, int[] ids) for (int i = 0; i < ids.Length; i++) { var srcType = input.GetColumnType(ids[i]); - Contracts.Assert(srcType.IsPrimitive); - types[i] = new VectorType(srcType.AsPrimitive, size: 0); + var primitiveType = srcType as PrimitiveType; + Contracts.Assert(primitiveType != null); + types[i] = new VectorType(primitiveType, size: 0); } return types; } diff --git a/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs b/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs index 77e89f8882..cc678facfd 100644 --- a/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs +++ b/src/Microsoft.ML.Transforms/MissingValueDroppingTransformer.cs @@ -164,7 +164,7 @@ public Mapper(MissingValueDroppingTransformer parent, Schema inputSchema) : inputSchema.TryGetColumnIndex(_parent.ColumnPairs[i].input, out _srcCols[i]); var srcCol = inputSchema[_srcCols[i]]; _srcTypes[i] = srcCol.Type; - _types[i] = new VectorType(srcCol.Type.ItemType.AsPrimitive); + _types[i] = new VectorType((PrimitiveType)srcCol.Type.ItemType); _isNAs[i] = GetIsNADelegate(srcCol.Type); } } diff --git a/src/Microsoft.ML.Transforms/MissingValueIndicatorTransform.cs b/src/Microsoft.ML.Transforms/MissingValueIndicatorTransform.cs index a58603c16e..09ab2853da 100644 --- a/src/Microsoft.ML.Transforms/MissingValueIndicatorTransform.cs +++ b/src/Microsoft.ML.Transforms/MissingValueIndicatorTransform.cs @@ -140,11 +140,11 @@ private VectorType[] GetTypesAndMetadata() // This ensures that our feature count doesn't overflow. Host.Check(type.ValueCount < int.MaxValue / 2); - if (!type.IsVector) + if (!(type is VectorType vectorType)) types[iinfo] = new VectorType(NumberType.Float, 2); else { - types[iinfo] = new VectorType(NumberType.Float, type.AsVector, 2); + types[iinfo] = new VectorType(NumberType.Float, vectorType, 2); // Produce slot names metadata iff the source has (valid) slot names. ColumnType typeNames; diff --git a/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs b/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs index 73efd1d719..8e2580accb 100644 --- a/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs +++ b/src/Microsoft.ML.Transforms/MissingValueIndicatorTransformer.cs @@ -182,10 +182,10 @@ private ColInfo[] CreateInfos(Schema inputSchema) _parent.CheckInputColumn(inputSchema, i, colSrc); var inType = inputSchema.GetColumnType(colSrc); ColumnType outType; - if (!inType.IsVector) + if (!(inType is VectorType vectorType)) outType = BoolType.Instance; else - outType = new VectorType(BoolType.Instance, inType.AsVector); + outType = new VectorType(BoolType.Instance, vectorType); infos[i] = new ColInfo(_parent.ColumnPairs[i].input, _parent.ColumnPairs[i].output, inType, outType); } return infos; @@ -469,7 +469,9 @@ public override SchemaShape GetOutputSchema(SchemaShape inputSchema) if (col.Metadata.TryFindColumn(MetadataUtils.Kinds.SlotNames, out var slotMeta)) metadata.Add(slotMeta); metadata.Add(new SchemaShape.Column(MetadataUtils.Kinds.IsNormalized, SchemaShape.Column.VectorKind.Scalar, BoolType.Instance, false)); - ColumnType type = !col.ItemType.IsVector ? (ColumnType)BoolType.Instance : new VectorType(BoolType.Instance, col.ItemType.AsVector); + ColumnType type = !(col.ItemType is VectorType vectorType) ? + (ColumnType)BoolType.Instance : + new VectorType(BoolType.Instance, vectorType); result[colPair.output] = new SchemaShape.Column(colPair.output, col.Kind, type, false, new SchemaShape(metadata.ToArray())); } return new SchemaShape(result.Values); diff --git a/src/Microsoft.ML.Transforms/MissingValueReplacing.cs b/src/Microsoft.ML.Transforms/MissingValueReplacing.cs index 296aed0eab..a6579cb9b2 100644 --- a/src/Microsoft.ML.Transforms/MissingValueReplacing.cs +++ b/src/Microsoft.ML.Transforms/MissingValueReplacing.cs @@ -324,8 +324,8 @@ private void GetReplacementValues(IDataView input, ColumnInfo[] columns, out obj input.Schema.TryGetColumnIndex(columns[iinfo].Input, out int colSrc); sources[iinfo] = colSrc; var type = input.Schema.GetColumnType(colSrc); - if (type.IsVector) - type = new VectorType(type.ItemType.AsPrimitive, type.AsVector); + if (type is VectorType vectorType) + type = new VectorType((PrimitiveType)type.ItemType, vectorType); Delegate isNa = GetIsNADelegate(type); types[iinfo] = type; var kind = (ReplacementKind)columns[iinfo].Replacement; @@ -593,8 +593,8 @@ public Mapper(MissingValueReplacingTransformer parent, Schema inputSchema) for (int i = 0; i < _parent.ColumnPairs.Length; i++) { var type = _infos[i].TypeSrc; - if (type.IsVector) - type = new VectorType(type.ItemType.AsPrimitive, type.AsVector); + if (type is VectorType vectorType) + type = new VectorType((PrimitiveType)type.ItemType, vectorType); var repType = _parent._repIsDefault[i] != null ? _parent._replaceTypes[i] : _parent._replaceTypes[i].ItemType; if (!type.ItemType.Equals(repType.ItemType)) throw Host.ExceptParam(nameof(InputSchema), "Column '{0}' item type '{1}' does not match expected ColumnType of '{2}'", @@ -897,10 +897,10 @@ private bool SaveAsOnnxCore(OnnxContext ctx, int iinfo, ColInfo info, string src { DataKind rawKind; var type = _infos[iinfo].TypeSrc; - if (type.IsVector) - rawKind = type.AsVector.ItemType.RawKind; - else if (type.IsKey) - rawKind = type.AsKey.RawKind; + if (type is VectorType vectorType) + rawKind = vectorType.ItemType.RawKind; + else if (type is KeyType keyType) + rawKind = keyType.RawKind; else rawKind = type.RawKind; @@ -965,7 +965,9 @@ public SchemaShape GetOutputSchema(SchemaShape inputSchema) metadata.Add(slotMeta); if (col.Metadata.TryFindColumn(MetadataUtils.Kinds.IsNormalized, out var normalized)) metadata.Add(normalized); - var type = !col.ItemType.IsVector ? col.ItemType : new VectorType(col.ItemType.ItemType.AsPrimitive, col.ItemType.AsVector); + var type = !(col.ItemType is VectorType vectorType) ? + col.ItemType : + new VectorType((PrimitiveType)col.ItemType.ItemType, vectorType); result[colInfo.Output] = new SchemaShape.Column(colInfo.Output, col.Kind, type, false, new SchemaShape(metadata.ToArray())); } return new SchemaShape(result.Values); diff --git a/src/Microsoft.ML.Transforms/UngroupTransform.cs b/src/Microsoft.ML.Transforms/UngroupTransform.cs index e52f6d5354..5354dfda0b 100644 --- a/src/Microsoft.ML.Transforms/UngroupTransform.cs +++ b/src/Microsoft.ML.Transforms/UngroupTransform.cs @@ -291,7 +291,7 @@ private static void CheckAndBind(IExceptionContext ectx, Schema inputSchema, if (!colType.IsVector || !colType.ItemType.IsPrimitive) throw ectx.ExceptUserArg(nameof(Arguments.Column), "Pivot column '{0}' has type '{1}', but must be a vector of primitive types", name, colType); - infos[i] = new PivotColumnInfo(name, col, colType.VectorSize, colType.ItemType.AsPrimitive); + infos[i] = new PivotColumnInfo(name, col, colType.VectorSize, (PrimitiveType)colType.ItemType); } } From 9067a1be58434999e37db7504d6b4e06eca8bbf4 Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Tue, 11 Dec 2018 17:07:49 -0800 Subject: [PATCH 044/100] Warn when a graph contains a null output (#1862) * Warn when a graph contains a null output * Better info * Update src/Microsoft.ML.Legacy/Runtime/EntryPoints/JsonUtils/ExecuteGraphCommand.cs --- .../EntryPoints/JsonUtils/ExecuteGraphCommand.cs | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/JsonUtils/ExecuteGraphCommand.cs b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/JsonUtils/ExecuteGraphCommand.cs index d6e95961ec..1f30644fc2 100644 --- a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/JsonUtils/ExecuteGraphCommand.cs +++ b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/JsonUtils/ExecuteGraphCommand.cs @@ -145,7 +145,15 @@ public void GetOutputToPath(GraphRunner runner, string varName, string path, Tlc throw _host.ExceptNotSupp("File handle outputs not yet supported."); case TlcModule.DataKind.DataView: var idv = runner.GetOutput(varName); - SaveDataView(idv, path, extension); + if (idv != null) + SaveDataView(idv, path, extension); + else + using (var ch = _host.Start("Get outputs from executed graph")) + { + string msg = string.Format("Ignoring empty graph output (output name: {0}, type: {1}, expected output's file: {2})", + varName, nameof(idv), path + extension); + ch.Warning(msg); + } break; case TlcModule.DataKind.PredictorModel: var pm = runner.GetOutput(varName); From b49e2b06e94982529c4a2869602f5fa9d6cfa2ac Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Wed, 12 Dec 2018 08:21:55 -0800 Subject: [PATCH 045/100] Extend a test for adding string-array output example (#1747) --- .../Transformers/WordTokenizeTests.cs | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/test/Microsoft.ML.Tests/Transformers/WordTokenizeTests.cs b/test/Microsoft.ML.Tests/Transformers/WordTokenizeTests.cs index 6cf2c6a46d..fc01cfaf60 100644 --- a/test/Microsoft.ML.Tests/Transformers/WordTokenizeTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/WordTokenizeTests.cs @@ -8,6 +8,8 @@ using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Runtime.Tools; using Microsoft.ML.Transforms.Text; +using System; +using System.Collections.Generic; using System.IO; using Xunit; using Xunit.Abstractions; @@ -26,6 +28,21 @@ private class TestClass [VectorType(2)] public string[] B; } + +// Visual Studio complains because the following class members are not never assigned. That is wrong because that class +// will be implicitly created in runtime and therefore we disable warning 169. +#pragma warning disable 169 + // This is a C# native data structure used to capture the output of ML.NET tokenizer in the test below. + public class NativeResult + { + public string A; + public string[] B; + public string[] TokenizeA; + public string[] TokenizeB; + } +#pragma warning restore 169 + + private class TestWrong { public float A; @@ -33,6 +50,7 @@ private class TestWrong public float[] B; } [Fact] + public void WordTokenizeWorkout() { var data = new[] { new TestClass() { A = "This is a good sentence.", B = new string[2] { "Much words", "Wow So Cool" } } }; @@ -45,6 +63,27 @@ public void WordTokenizeWorkout() }); TestEstimatorCore(pipe, dataView, invalidInput: invalidDataView); + + // Reuse the pipe trained on dataView in TestEstimatorCore to make prediction. + var result = pipe.Fit(dataView).Transform(dataView); + + // Extract the transformed result of the first row (the only row we have because data contains only one TestClass) as a native class. + var nativeResult = new List(result.AsEnumerable(Env, false))[0]; + + // Check the tokenization of A. Expected result is { "This", "is", "a", "good", "sentence." }. + var tokenizeA = new[] { "This", "is", "a", "good", "sentence." }; + Assert.True(tokenizeA.Length == nativeResult.TokenizeA.Length); + for (int i = 0; i < tokenizeA.Length; ++i) + Assert.Equal(tokenizeA[i], nativeResult.TokenizeA[i]); + + // Check the tokenization of B. Expected result is { "Much", "words", "Wow", "So", "Cool" }. One may think that the expected output + // should be a 2-D array { { "Much", "words"}, { "Wow", "So", "Cool" } }, but please note that ML.NET may flatten all outputs if + // they are high-dimension tensors. + var tokenizeB = new[] { "Much", "words", "Wow", "So", "Cool" }; + Assert.True(tokenizeB.Length == nativeResult.TokenizeB.Length); + for (int i = 0; i < tokenizeB.Length; ++i) + Assert.Equal(tokenizeB[i], nativeResult.TokenizeB[i]); + Done(); } From a326dda26f087342e79ed4cfbd0afc15afa99787 Mon Sep 17 00:00:00 2001 From: Senja Filipi Date: Wed, 12 Dec 2018 11:12:35 -0800 Subject: [PATCH 046/100] renaming uint128 to RowId (#1858) * renaming uint128 to RowId --- src/Microsoft.ML.Core/Data/DataKind.cs | 4 +- src/Microsoft.ML.Core/Data/IDataView.cs | 2 +- .../Data/LinkedRowFilterCursorBase.cs | 2 +- src/Microsoft.ML.Core/Data/MetadataUtils.cs | 2 +- .../Data/{UInt128.cs => RowId.cs} | 120 ++++++++++-------- .../Data/SynchronizedCursorBase.cs | 2 +- src/Microsoft.ML.Core/Data/WrappingRow.cs | 2 +- src/Microsoft.ML.Data/Data/Conversion.cs | 14 +- src/Microsoft.ML.Data/Data/DataViewUtils.cs | 12 +- src/Microsoft.ML.Data/Data/RowCursorUtils.cs | 6 +- .../DataLoadSave/Binary/BinaryLoader.cs | 12 +- .../DataLoadSave/Binary/CodecFactory.cs | 2 +- .../DataLoadSave/Binary/UnsafeTypeOps.cs | 14 +- .../DataLoadSave/PartitionedFileLoader.cs | 6 +- .../DataLoadSave/Text/TextLoaderCursor.cs | 6 +- .../DataLoadSave/Transpose/TransposeLoader.cs | 8 +- .../DataView/AppendRowsDataView.cs | 18 +-- .../DataView/ArrayDataViewBuilder.cs | 10 +- .../DataView/CacheDataView.cs | 60 ++++----- .../DataView/CompositeRowToRowMapper.cs | 2 +- .../DataView/DataViewConstructionUtils.cs | 28 ++-- .../DataView/EmptyDataView.cs | 4 +- src/Microsoft.ML.Data/DataView/Transposer.cs | 12 +- src/Microsoft.ML.Data/DataView/TypedCursor.cs | 4 +- src/Microsoft.ML.Data/DataView/ZipDataView.cs | 6 +- .../StaticPipe/StaticSchemaShape.cs | 2 +- .../Training/TrainerUtils.cs | 6 +- src/Microsoft.ML.Data/Transforms/Hashing.cs | 18 +-- .../Transforms/PerGroupTransformBase.cs | 6 +- .../Transforms/RowShufflingTransformer.cs | 6 +- .../Transforms/SkipTakeFilter.cs | 2 +- .../Utilities/SlotDropper.cs | 2 +- src/Microsoft.ML.Data/Utils/ApiUtils.cs | 4 +- .../Utils/ToByteArrayExtensions.cs | 8 +- .../KMeansPlusPlusTrainer.cs | 8 +- src/Microsoft.ML.Legacy/CSharpApi.cs | 14 +- src/Microsoft.ML.Legacy/Data/TextLoader.cs | 2 +- src/Microsoft.ML.Parquet/ParquetLoader.cs | 18 +-- .../LogisticRegression/LbfgsPredictorBase.cs | 4 +- .../LogisticRegression/LbfgsStatic.cs | 6 +- .../LogisticRegression/LogisticRegression.cs | 2 +- .../MulticlassLogisticRegression.cs | 2 +- .../PoissonRegression/PoissonRegression.cs | 2 +- .../Standard/SdcaBinary.cs | 66 +++++----- .../Standard/SdcaMultiClass.cs | 4 +- .../StandardLearnersCatalog.cs | 6 +- .../SequentialTransformerBase.cs | 4 +- .../BootstrapSamplingTransformer.cs | 6 +- src/Microsoft.ML.Transforms/GroupTransform.cs | 2 +- .../MissingValueReplacingUtils.cs | 12 +- .../StatefulFilterTransform.cs | 2 +- .../UngroupTransform.cs | 6 +- .../Common/EntryPoints/core_manifest.json | 14 +- test/Microsoft.ML.Benchmarks/HashBench.cs | 4 +- .../UnitTests/CoreBaseTestClass.cs | 18 +-- .../DataPipe/TestDataPipeBase.cs | 18 +-- .../Transformers/HashTests.cs | 2 +- 57 files changed, 322 insertions(+), 312 deletions(-) rename src/Microsoft.ML.Core/Data/{UInt128.cs => RowId.cs} (59%) diff --git a/src/Microsoft.ML.Core/Data/DataKind.cs b/src/Microsoft.ML.Core/Data/DataKind.cs index ad8d8fbfe0..c20d9930af 100644 --- a/src/Microsoft.ML.Core/Data/DataKind.cs +++ b/src/Microsoft.ML.Core/Data/DataKind.cs @@ -171,7 +171,7 @@ public static Type ToType(this DataKind kind) case DataKind.DZ: return typeof(DateTimeOffset); case DataKind.UG: - return typeof(UInt128); + return typeof(RowId); } return null; @@ -215,7 +215,7 @@ public static bool TryGetDataKind(this Type type, out DataKind kind) kind = DataKind.DT; else if (type == typeof(DateTimeOffset)) kind = DataKind.DZ; - else if (type == typeof(UInt128)) + else if (type == typeof(RowId)) kind = DataKind.UG; else { diff --git a/src/Microsoft.ML.Core/Data/IDataView.cs b/src/Microsoft.ML.Core/Data/IDataView.cs index 64da333c3a..fe605a6fd5 100644 --- a/src/Microsoft.ML.Core/Data/IDataView.cs +++ b/src/Microsoft.ML.Core/Data/IDataView.cs @@ -182,7 +182,7 @@ public abstract class Row : IDisposable /// all that if the "same" data were presented in a different data view (as by, say, being transformed, /// cached, saved, or whatever), that the IDs between the two different data views would have any /// discernable relationship. - public abstract ValueGetter GetIdGetter(); + public abstract ValueGetter GetIdGetter(); /// /// Returns whether the given column is active in this row. diff --git a/src/Microsoft.ML.Core/Data/LinkedRowFilterCursorBase.cs b/src/Microsoft.ML.Core/Data/LinkedRowFilterCursorBase.cs index 78b4fd142b..67f66b2103 100644 --- a/src/Microsoft.ML.Core/Data/LinkedRowFilterCursorBase.cs +++ b/src/Microsoft.ML.Core/Data/LinkedRowFilterCursorBase.cs @@ -19,7 +19,7 @@ protected LinkedRowFilterCursorBase(IChannelProvider provider, RowCursor input, { } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return Input.GetIdGetter(); } diff --git a/src/Microsoft.ML.Core/Data/MetadataUtils.cs b/src/Microsoft.ML.Core/Data/MetadataUtils.cs index 723c38fc74..06994a1acf 100644 --- a/src/Microsoft.ML.Core/Data/MetadataUtils.cs +++ b/src/Microsoft.ML.Core/Data/MetadataUtils.cs @@ -509,7 +509,7 @@ public MetadataRow(Schema.Metadata metadata) public override long Position => 0; public override long Batch => 0; public override ValueGetter GetGetter(int col) => _metadata.GetGetter(col); - public override ValueGetter GetIdGetter() => (ref UInt128 dst) => dst = default; + public override ValueGetter GetIdGetter() => (ref RowId dst) => dst = default; public override bool IsColumnActive(int col) => true; } diff --git a/src/Microsoft.ML.Core/Data/UInt128.cs b/src/Microsoft.ML.Core/Data/RowId.cs similarity index 59% rename from src/Microsoft.ML.Core/Data/UInt128.cs rename to src/Microsoft.ML.Core/Data/RowId.cs index 017068a65d..09a5fa2d25 100644 --- a/src/Microsoft.ML.Core/Data/UInt128.cs +++ b/src/Microsoft.ML.Core/Data/RowId.cs @@ -9,109 +9,119 @@ namespace Microsoft.ML.Runtime.Data { /// - /// A sixteen-byte unsigned integer. + /// A structure serving as a sixteen-byte unsigned integer. It is used as the row id of . + /// For datasets with millions of records, those IDs need to be unique, therefore the need for such a large structure to hold the values. + /// Those Ids are derived from other Ids of the previous components of the pipelines, and dividing the structure in two: high order and low order of bits, + /// and reduces the changes of those collisions even further. /// - public readonly struct UInt128 : IComparable, IEquatable + /// + public readonly struct RowId : IComparable, IEquatable { - // The low order bits. Corresponds to H1 in the Murmur algorithms. - public readonly ulong Lo; - // The high order bits. Corresponds to H2 in the Murmur algorithms. - public readonly ulong Hi; + ///The low order bits. Corresponds to H1 in the Murmur algorithms. + public readonly ulong Low; - public UInt128(ulong lo, ulong hi) + /// The high order bits. Corresponds to H2 in the Murmur algorithms. + public readonly ulong High; + + /// + /// Initializes a new instance of + /// + /// The low order ulong. + /// The high order ulong. + public RowId(ulong low, ulong high) { - Lo = lo; - Hi = hi; + Low = low; + High = high; } public override string ToString() { // Since H1 are the low order bits, they are printed second. - return string.Format("{0:x16}{1:x16}", Hi, Lo); + return string.Format("{0:x16}{1:x16}", High, Low); } - public int CompareTo(UInt128 other) + public int CompareTo(RowId other) { - int result = Hi.CompareTo(other.Hi); - return result == 0 ? Lo.CompareTo(other.Lo) : result; + int result = High.CompareTo(other.High); + return result == 0 ? Low.CompareTo(other.Low) : result; } - public bool Equals(UInt128 other) + public bool Equals(RowId other) { - return Lo == other.Lo && Hi == other.Hi; + return Low == other.Low && High == other.High; } public override bool Equals(object obj) { - if (obj != null && obj is UInt128) + if (obj != null && obj is RowId) { - var item = (UInt128)obj; + var item = (RowId)obj; return Equals(item); } return false; } - public static UInt128 operator +(UInt128 first, ulong second) + public static RowId operator +(RowId first, ulong second) { - ulong resHi = first.Hi; - ulong resLo = first.Lo + second; + ulong resHi = first.High; + ulong resLo = first.Low + second; if (resLo < second) resHi++; - return new UInt128(resLo, resHi); + return new RowId(resLo, resHi); } - public static UInt128 operator -(UInt128 first, ulong second) + public static RowId operator -(RowId first, ulong second) { - ulong resHi = first.Hi; - ulong resLo = first.Lo - second; - if (resLo > first.Lo) + ulong resHi = first.High; + ulong resLo = first.Low - second; + if (resLo > first.Low) resHi--; - return new UInt128(resLo, resHi); + return new RowId(resLo, resHi); } - public static bool operator ==(UInt128 first, ulong second) + public static bool operator ==(RowId first, ulong second) { - return first.Hi == 0 && first.Lo == second; + return first.High == 0 && first.Low == second; } - public static bool operator !=(UInt128 first, ulong second) + public static bool operator !=(RowId first, ulong second) { return !(first == second); } - public static bool operator <(UInt128 first, ulong second) + public static bool operator <(RowId first, ulong second) { - return first.Hi == 0 && first.Lo < second; + return first.High == 0 && first.Low < second; } - public static bool operator >(UInt128 first, ulong second) + public static bool operator >(RowId first, ulong second) { - return first.Hi > 0 || first.Lo > second; + return first.High > 0 || first.Low > second; } - public static bool operator <=(UInt128 first, ulong second) + public static bool operator <=(RowId first, ulong second) { - return first.Hi == 0 && first.Lo <= second; + return first.High == 0 && first.Low <= second; } - public static bool operator >=(UInt128 first, ulong second) + public static bool operator >=(RowId first, ulong second) { - return first.Hi > 0 || first.Lo >= second; + return first.High > 0 || first.Low >= second; } - public static explicit operator double(UInt128 x) + public static explicit operator double(RowId x) { // REVIEW: The 64-bit JIT has a bug where rounding might be not quite // correct when converting a ulong to double with the high bit set. Should we // care and compensate? See the DoubleParser code for a work-around. - return x.Hi * ((double)(1UL << 32) * (1UL << 32)) + x.Lo; + return x.High * ((double)(1UL << 32) * (1UL << 32)) + x.Low; } public override int GetHashCode() { return (int)( - (uint)Lo ^ (uint)(Lo >> 32) ^ - (uint)(Hi << 7) ^ (uint)(Hi >> 57) ^ (uint)(Hi >> (57 - 32))); + (uint)Low ^ (uint)(Low >> 32) ^ + (uint)(High << 7) ^ (uint)(High >> 57) ^ (uint)(High >> (57 - 32))); } #region Hashing style @@ -167,10 +177,10 @@ private static void FinalMix(ref ulong h1, ref ulong h2, int len) /// that were all zeros, except for the last bit which is one. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] - public UInt128 Fork() + public RowId Fork() { - ulong h1 = Lo; - ulong h2 = Hi; + ulong h1 = Low; + ulong h2 = High; // Here it's as if k1=1, k2=0. h1 = RotL(h1, 27); h1 += h2; @@ -179,7 +189,7 @@ public UInt128 Fork() h2 += h1; h2 = h2 * 5 + 0x38495ab5; h1 ^= RotL(_c1, 31) * _c2; - return new UInt128(h1, h2); + return new RowId(h1, h2); } /// @@ -188,10 +198,10 @@ public UInt128 Fork() /// that were all zeros. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] - public UInt128 Next() + public RowId Next() { - ulong h1 = Lo; - ulong h2 = Hi; + ulong h1 = Low; + ulong h2 = High; // Here it's as if k1=0, k2=0. h1 = RotL(h1, 27); h1 += h2; @@ -199,7 +209,7 @@ public UInt128 Next() h2 = RotL(h2, 31); h2 += h1; h2 = h2 * 5 + 0x38495ab5; - return new UInt128(h1, h2); + return new RowId(h1, h2); } /// @@ -210,14 +220,14 @@ public UInt128 Next() /// /// [MethodImpl(MethodImplOptions.AggressiveInlining)] - public UInt128 Combine(UInt128 other) + public RowId Combine(RowId other) { - var h1 = Lo; - var h2 = Hi; + var h1 = Low; + var h2 = High; other = other.Fork(); - ulong k1 = other.Lo; // First 8 bytes. - ulong k2 = other.Hi; // Second 8 bytes. + ulong k1 = other.Low; // First 8 bytes. + ulong k2 = other.High; // Second 8 bytes. k1 *= _c1; k1 = RotL(k1, 31); @@ -235,7 +245,7 @@ public UInt128 Combine(UInt128 other) h2 += h1; h2 = h2 * 5 + 0x38495ab5; - return new UInt128(h1, h2); + return new RowId(h1, h2); } #endregion } diff --git a/src/Microsoft.ML.Core/Data/SynchronizedCursorBase.cs b/src/Microsoft.ML.Core/Data/SynchronizedCursorBase.cs index 83bd53a3e3..91549ab047 100644 --- a/src/Microsoft.ML.Core/Data/SynchronizedCursorBase.cs +++ b/src/Microsoft.ML.Core/Data/SynchronizedCursorBase.cs @@ -62,6 +62,6 @@ protected override void Dispose(bool disposing) public sealed override RowCursor GetRootCursor() => _root; - public sealed override ValueGetter GetIdGetter() => Input.GetIdGetter(); + public sealed override ValueGetter GetIdGetter() => Input.GetIdGetter(); } } diff --git a/src/Microsoft.ML.Core/Data/WrappingRow.cs b/src/Microsoft.ML.Core/Data/WrappingRow.cs index 6f855ad225..4022778945 100644 --- a/src/Microsoft.ML.Core/Data/WrappingRow.cs +++ b/src/Microsoft.ML.Core/Data/WrappingRow.cs @@ -23,7 +23,7 @@ internal abstract class WrappingRow : Row public sealed override long Batch => Input.Batch; public sealed override long Position => Input.Position; - public override ValueGetter GetIdGetter() => Input.GetIdGetter(); + public override ValueGetter GetIdGetter() => Input.GetIdGetter(); [BestFriend] private protected WrappingRow(Row input) diff --git a/src/Microsoft.ML.Data/Data/Conversion.cs b/src/Microsoft.ML.Data/Data/Conversion.cs index e80135d6c8..64adc729c2 100644 --- a/src/Microsoft.ML.Data/Data/Conversion.cs +++ b/src/Microsoft.ML.Data/Data/Conversion.cs @@ -30,7 +30,7 @@ namespace Microsoft.ML.Runtime.Data.Conversion using U2 = UInt16; using U4 = UInt32; using U8 = UInt64; - using UG = UInt128; + using UG = RowId; public delegate bool TryParseMapper(in TX src, out T dst); @@ -889,7 +889,7 @@ public ValueGetter GetNAOrDefaultGetter(ColumnType type) public void Convert(in U2 src, ref U1 dst) => dst = src <= U1.MaxValue ? (U1)src : (U1)0; public void Convert(in U4 src, ref U1 dst) => dst = src <= U1.MaxValue ? (U1)src : (U1)0; public void Convert(in U8 src, ref U1 dst) => dst = src <= U1.MaxValue ? (U1)src : (U1)0; - public void Convert(in UG src, ref U1 dst) => dst = src.Hi == 0 && src.Lo <= U1.MaxValue ? (U1)src.Lo : (U1)0; + public void Convert(in UG src, ref U1 dst) => dst = src.High == 0 && src.Low <= U1.MaxValue ? (U1)src.Low : (U1)0; #endregion ToU1 #region ToU2 @@ -897,7 +897,7 @@ public ValueGetter GetNAOrDefaultGetter(ColumnType type) public void Convert(in U2 src, ref U2 dst) => dst = src; public void Convert(in U4 src, ref U2 dst) => dst = src <= U2.MaxValue ? (U2)src : (U2)0; public void Convert(in U8 src, ref U2 dst) => dst = src <= U2.MaxValue ? (U2)src : (U2)0; - public void Convert(in UG src, ref U2 dst) => dst = src.Hi == 0 && src.Lo <= U2.MaxValue ? (U2)src.Lo : (U2)0; + public void Convert(in UG src, ref U2 dst) => dst = src.High == 0 && src.Low <= U2.MaxValue ? (U2)src.Low : (U2)0; #endregion ToU2 #region ToU4 @@ -905,7 +905,7 @@ public ValueGetter GetNAOrDefaultGetter(ColumnType type) public void Convert(in U2 src, ref U4 dst) => dst = src; public void Convert(in U4 src, ref U4 dst) => dst = src; public void Convert(in U8 src, ref U4 dst) => dst = src <= U4.MaxValue ? (U4)src : (U4)0; - public void Convert(in UG src, ref U4 dst) => dst = src.Hi == 0 && src.Lo <= U4.MaxValue ? (U4)src.Lo : (U4)0; + public void Convert(in UG src, ref U4 dst) => dst = src.High == 0 && src.Low <= U4.MaxValue ? (U4)src.Low : (U4)0; #endregion ToU4 #region ToU8 @@ -913,7 +913,7 @@ public ValueGetter GetNAOrDefaultGetter(ColumnType type) public void Convert(in U2 src, ref U8 dst) => dst = src; public void Convert(in U4 src, ref U8 dst) => dst = src; public void Convert(in U8 src, ref U8 dst) => dst = src; - public void Convert(in UG src, ref U8 dst) => dst = src.Hi == 0 ? src.Lo : (U8)0; + public void Convert(in UG src, ref U8 dst) => dst = src.High == 0 ? src.Low : (U8)0; #endregion ToU8 #region ToUG @@ -969,7 +969,7 @@ public ValueGetter GetNAOrDefaultGetter(ColumnType type) public void Convert(in U2 src, ref SB dst) => ClearDst(ref dst).Append(src); public void Convert(in U4 src, ref SB dst) => ClearDst(ref dst).Append(src); public void Convert(in U8 src, ref SB dst) => ClearDst(ref dst).Append(src); - public void Convert(in UG src, ref SB dst) { ClearDst(ref dst); dst.AppendFormat("0x{0:x16}{1:x16}", src.Hi, src.Lo); } + public void Convert(in UG src, ref SB dst) { ClearDst(ref dst); dst.AppendFormat("0x{0:x16}{1:x16}", src.High, src.Low); } public void Convert(in R4 src, ref SB dst) { ClearDst(ref dst); if (R4.IsNaN(src)) dst.AppendFormat(CultureInfo.InvariantCulture, "{0}", "?"); else dst.AppendFormat(CultureInfo.InvariantCulture, "{0:R}", src); } public void Convert(in R8 src, ref SB dst) { ClearDst(ref dst); if (R8.IsNaN(src)) dst.AppendFormat(CultureInfo.InvariantCulture, "{0}", "?"); else dst.AppendFormat(CultureInfo.InvariantCulture, "{0:G17}", src); } public void Convert(in BL src, ref SB dst) @@ -1057,7 +1057,7 @@ public bool TryParse(in TX src, out U8 dst) } /// - /// A parse method that transforms a 34-length string into a . + /// A parse method that transforms a 34-length string into a . /// /// What should be a 34-length hexadecimal representation, including a 0x prefix, /// of the 128-bit number diff --git a/src/Microsoft.ML.Data/Data/DataViewUtils.cs b/src/Microsoft.ML.Data/Data/DataViewUtils.cs index 0d5d29fb43..cd74d6b205 100644 --- a/src/Microsoft.ML.Data/Data/DataViewUtils.cs +++ b/src/Microsoft.ML.Data/Data/DataViewUtils.cs @@ -1006,7 +1006,7 @@ private sealed class Cursor : RootCursorBase private readonly int[] _colToActive; private readonly OutPipe[] _pipes; private readonly Delegate[] _getters; - private readonly ValueGetter _idGetter; + private readonly ValueGetter _idGetter; private readonly BlockingCollection _batchInputs; private readonly Action _quitAction; @@ -1050,7 +1050,7 @@ public Cursor(IChannelProvider provider, Schema schema, int[] activeToCol, int[] _getters = new Delegate[pipes.Length]; for (int i = 0; i < activeToCol.Length; ++i) _getters[i] = _pipes[i].GetGetter(); - _idGetter = (ValueGetter)_pipes[activeToCol.Length + (int)ExtraIndex.Id].GetGetter(); + _idGetter = (ValueGetter)_pipes[activeToCol.Length + (int)ExtraIndex.Id].GetGetter(); _batchInputs = batchInputs; _batch = -1; _quitAction = quitAction; @@ -1070,7 +1070,7 @@ protected override void Dispose(bool disposing) base.Dispose(disposing); } - public override ValueGetter GetIdGetter() => _idGetter; + public override ValueGetter GetIdGetter() => _idGetter; protected override bool MoveNextCore() { @@ -1218,13 +1218,13 @@ protected override void Dispose(bool disposing) base.Dispose(disposing); } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { - ValueGetter[] idGetters = new ValueGetter[_cursors.Length]; + ValueGetter[] idGetters = new ValueGetter[_cursors.Length]; for (int i = 0; i < _cursors.Length; ++i) idGetters[i] = _cursors[i].GetIdGetter(); return - (ref UInt128 val) => + (ref RowId val) => { Ch.Check(_icursor >= 0, "Cannot call ID getter in current state"); idGetters[_icursor](ref val); diff --git a/src/Microsoft.ML.Data/Data/RowCursorUtils.cs b/src/Microsoft.ML.Data/Data/RowCursorUtils.cs index 5e79d05448..709957c94d 100644 --- a/src/Microsoft.ML.Data/Data/RowCursorUtils.cs +++ b/src/Microsoft.ML.Data/Data/RowCursorUtils.cs @@ -573,13 +573,13 @@ public override bool IsColumnActive(int col) return _active[col]; } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return - (ref UInt128 val) => + (ref RowId val) => { Ch.Check(IsGood, "Cannot call ID getter in current state"); - val = new UInt128((ulong)Position, 0); + val = new RowId((ulong)Position, 0); }; } } diff --git a/src/Microsoft.ML.Data/DataLoadSave/Binary/BinaryLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/Binary/BinaryLoader.cs index abbcc163a7..358cb51542 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Binary/BinaryLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Binary/BinaryLoader.cs @@ -754,7 +754,7 @@ public void GetMetadata(string kind, int col, ref TValue value) private const ulong SlotNamesVersion = 0x0001000100010003; /// - /// Lower inclusive bound of versions this reader can read. + /// Low inclusive bound of versions this reader can read. /// private const ulong ReaderFirstVersion = 0x0001000100010002; @@ -2104,15 +2104,15 @@ private Delegate NoRowGetter() return del; } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { if (_blockShuffleOrder == null) { return - (ref UInt128 val) => + (ref RowId val) => { Ch.Check(IsGood, "Cannot call ID getter in current state"); - val = new UInt128((ulong)Position, 0); + val = new RowId((ulong)Position, 0); }; } // Find the index of the last block. Because the last block is unevenly sized, @@ -2128,7 +2128,7 @@ public override ValueGetter GetIdGetter() long firstPositionToCorrect = ((long)lastBlockIdx * _rowsPerBlock) + _rowsInLastBlock; return - (ref UInt128 val) => + (ref RowId val) => { Ch.Check(IsGood, "Cannot call ID getter in current state"); long pos = Position; @@ -2138,7 +2138,7 @@ public override ValueGetter GetIdGetter() long blockPos = (long)_rowsPerBlock * _blockShuffleOrder[(int)(pos / _rowsPerBlock)]; blockPos += (pos % _rowsPerBlock); Ch.Assert(0 <= blockPos && blockPos < _parent.RowCount); - val = new UInt128((ulong)blockPos, 0); + val = new RowId((ulong)blockPos, 0); }; } } diff --git a/src/Microsoft.ML.Data/DataLoadSave/Binary/CodecFactory.cs b/src/Microsoft.ML.Data/DataLoadSave/Binary/CodecFactory.cs index 39ecb66e30..735ea8730b 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Binary/CodecFactory.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Binary/CodecFactory.cs @@ -59,7 +59,7 @@ public CodecFactory(IHostEnvironment env, MemoryStreamPool memPool = null) RegisterSimpleCodec(new BoolCodec(this)); RegisterSimpleCodec(new DateTimeCodec(this)); RegisterSimpleCodec(new DateTimeOffsetCodec(this)); - RegisterSimpleCodec(new UnsafeTypeCodec(this)); + RegisterSimpleCodec(new UnsafeTypeCodec(this)); // Register the old type system reading codec. RegisterOtherCodec("DvBool", new OldBoolCodec(this).GetCodec); diff --git a/src/Microsoft.ML.Data/DataLoadSave/Binary/UnsafeTypeOps.cs b/src/Microsoft.ML.Data/DataLoadSave/Binary/UnsafeTypeOps.cs index 7de0a60e13..49d3919e43 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Binary/UnsafeTypeOps.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Binary/UnsafeTypeOps.cs @@ -44,7 +44,7 @@ static UnsafeTypeOpsFactory() _type2ops[typeof(Single)] = new SingleUnsafeTypeOps(); _type2ops[typeof(Double)] = new DoubleUnsafeTypeOps(); _type2ops[typeof(TimeSpan)] = new TimeSpanUnsafeTypeOps(); - _type2ops[typeof(UInt128)] = new UgUnsafeTypeOps(); + _type2ops[typeof(RowId)] = new UgUnsafeTypeOps(); } public static UnsafeTypeOps Get() @@ -189,21 +189,21 @@ public override TimeSpan Read(BinaryReader reader) } } - private sealed class UgUnsafeTypeOps : UnsafeTypeOps + private sealed class UgUnsafeTypeOps : UnsafeTypeOps { public override int Size { get { return 2 * sizeof(ulong); } } - public override unsafe void Apply(ReadOnlySpan array, Action func) + public override unsafe void Apply(ReadOnlySpan array, Action func) { - fixed (UInt128* pArray = &MemoryMarshal.GetReference(array)) + fixed (RowId* pArray = &MemoryMarshal.GetReference(array)) func(new IntPtr(pArray)); } - public override void Write(UInt128 a, BinaryWriter writer) { writer.Write(a.Lo); writer.Write(a.Hi); } - public override UInt128 Read(BinaryReader reader) + public override void Write(RowId a, BinaryWriter writer) { writer.Write(a.Low); writer.Write(a.High); } + public override RowId Read(BinaryReader reader) { ulong lo = reader.ReadUInt64(); ulong hi = reader.ReadUInt64(); - return new UInt128(lo, hi); + return new RowId(lo, hi); } } } diff --git a/src/Microsoft.ML.Data/DataLoadSave/PartitionedFileLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/PartitionedFileLoader.cs index e4eedd5f00..5a4742421f 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/PartitionedFileLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/PartitionedFileLoader.cs @@ -412,14 +412,14 @@ public override ValueGetter GetGetter(int col) return getter; } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return - (ref UInt128 val) => + (ref RowId val) => { Ch.Check(IsGood, "Cannot call ID getter in current state"); - val = new UInt128(0, (ulong)Position); + val = new RowId(0, (ulong)Position); }; } diff --git a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderCursor.cs b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderCursor.cs index dbeb1c891d..95adf7fee3 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderCursor.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderCursor.cs @@ -199,13 +199,13 @@ public static RowCursor[] CreateSet(out IRowCursorConsolidator consolidator, } } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return - (ref UInt128 val) => + (ref RowId val) => { Ch.Check(IsGood, "Cannot call ID getter in current state"); - val = new UInt128((ulong)_total, 0); + val = new RowId((ulong)_total, 0); }; } diff --git a/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs b/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs index 83617f0f52..44f30bdbba 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Transpose/TransposeLoader.cs @@ -336,7 +336,7 @@ protected override void VerifyView(IDataView view) private readonly object _colTransposersLock; /// - /// Lower inclusive bound of versions this reader can read. + /// Low inclusive bound of versions this reader can read. /// private const ulong ReaderFirstVersion = 0x0001000100010001; @@ -886,13 +886,13 @@ private void InitVec(int col) _transCursors[i] = cursor; } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return - (ref UInt128 val) => + (ref RowId val) => { Ch.Check(IsGood, "Cannot call ID getter in current state"); - val = new UInt128((ulong)Position, 0); + val = new RowId((ulong)Position, 0); }; } diff --git a/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs b/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs index 841ff93038..dfbc6d602a 100644 --- a/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs +++ b/src/Microsoft.ML.Data/DataView/AppendRowsDataView.cs @@ -210,7 +210,7 @@ public sealed override bool IsColumnActive(int col) private sealed class Cursor : CursorBase { private RowCursor _currentCursor; - private ValueGetter _currentIdGetter; + private ValueGetter _currentIdGetter; private int _currentSourceIndex; public Cursor(AppendRowsDataView parent, Func needCol) @@ -228,17 +228,17 @@ public Cursor(AppendRowsDataView parent, Func needCol) } } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return - (ref UInt128 val) => + (ref RowId val) => { _currentIdGetter(ref val); // While the union of all IDs may not be acceptable, by taking each // data views IDs and combining them against their source index, the // union of these IDs becomes acceptable. - // REVIEW: Convenience UInt128 constructor for this scenario? - val = val.Combine(new UInt128((ulong)_currentSourceIndex, 0)); + // REVIEW: Convenience RowId constructor for this scenario? + val = val.Combine(new RowId((ulong)_currentSourceIndex, 0)); }; } @@ -329,17 +329,17 @@ public RandCursor(AppendRowsDataView parent, Func needCol, Random ran } } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { - ValueGetter[] idGetters = new ValueGetter[_cursorSet.Length]; + ValueGetter[] idGetters = new ValueGetter[_cursorSet.Length]; for (int i = 0; i < _cursorSet.Length; ++i) idGetters[i] = _cursorSet[i].GetIdGetter(); return - (ref UInt128 val) => + (ref RowId val) => { Ch.Check(IsGood, "Cannot call ID getter in current state"); idGetters[_currentSourceIndex](ref val); - val = val.Combine(new UInt128((ulong)_currentSourceIndex, 0)); + val = val.Combine(new RowId((ulong)_currentSourceIndex, 0)); }; } diff --git a/src/Microsoft.ML.Data/DataView/ArrayDataViewBuilder.cs b/src/Microsoft.ML.Data/DataView/ArrayDataViewBuilder.cs index 38e0e1b5bf..8a89a28694 100644 --- a/src/Microsoft.ML.Data/DataView/ArrayDataViewBuilder.cs +++ b/src/Microsoft.ML.Data/DataView/ArrayDataViewBuilder.cs @@ -276,24 +276,24 @@ public Cursor(IChannelProvider provider, DataView view, Func predicat _indices = Utils.GetRandomPermutation(rand, view._rowCount); } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { if (_indices == null) { return - (ref UInt128 val) => + (ref RowId val) => { Ch.Check(IsGood, "Cannot call ID getter in current state"); - val = new UInt128((ulong)Position, 0); + val = new RowId((ulong)Position, 0); }; } else { return - (ref UInt128 val) => + (ref RowId val) => { Ch.Check(IsGood, "Cannot call ID getter in current state"); - val = new UInt128((ulong)MappedIndex(), 0); + val = new RowId((ulong)MappedIndex(), 0); }; } } diff --git a/src/Microsoft.ML.Data/DataView/CacheDataView.cs b/src/Microsoft.ML.Data/DataView/CacheDataView.cs index 0e9308dc22..f7a5b9f2ae 100644 --- a/src/Microsoft.ML.Data/DataView/CacheDataView.cs +++ b/src/Microsoft.ML.Data/DataView/CacheDataView.cs @@ -481,7 +481,7 @@ public RowCursor(CacheDataView parent, Func predicate, TIndex index) _index = index; } - public override ValueGetter GetIdGetter() => _index.GetIdGetter(); + public override ValueGetter GetIdGetter() => _index.GetIdGetter(); public override RowCursor GetRootCursor() => this; @@ -566,7 +566,7 @@ public RowSeeker(RowSeekerCore toWrap) public override Schema Schema => _internal.Schema; public override ValueGetter GetGetter(int col) => _internal.GetGetter(col); - public override ValueGetter GetIdGetter() => _internal.GetIdGetter(); + public override ValueGetter GetIdGetter() => _internal.GetIdGetter(); public override bool IsColumnActive(int col) => _internal.IsColumnActive(col); public override bool MoveTo(long rowIndex) => _internal.MoveTo(rowIndex); } @@ -580,13 +580,13 @@ private sealed class RowSeekerCore : RowCursorSeekerBase public override CursorState State => throw new NotImplementedException(); - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return - (ref UInt128 val) => + (ref RowId val) => { Ch.Check(Position >= 0, "Cannot call ID getter in current state"); - val = new UInt128((ulong)Position, 0); + val = new RowId((ulong)Position, 0); }; } @@ -774,7 +774,7 @@ private interface IIndex /// An ID getter, which should be based on the value that would be returned /// from , if valid, and otherwise have undefined behavior. /// - ValueGetter GetIdGetter(); + ValueGetter GetIdGetter(); /// /// Moves to the next index. Once this or has returned @@ -819,13 +819,13 @@ public long GetIndex() return _curr; } - public ValueGetter GetIdGetter() + public ValueGetter GetIdGetter() { return - (ref UInt128 val) => + (ref RowId val) => { Contracts.Check(_curr >= 0, "Cannot call ID getter in current state"); - val = new UInt128((ulong)_curr, 0); + val = new RowId((ulong)_curr, 0); }; } @@ -865,7 +865,7 @@ public Wrapper(SequenceIndex index) public long Batch => _index.Batch; public long GetIndex() => _index.GetIndex(); - public ValueGetter GetIdGetter() => _index.GetIdGetter(); + public ValueGetter GetIdGetter() => _index.GetIdGetter(); public bool MoveNext() => _index.MoveNext(); public bool MoveMany(long count) => _index.MoveMany(count); } @@ -894,13 +894,13 @@ public long GetIndex() return _perm[_curr]; } - public ValueGetter GetIdGetter() + public ValueGetter GetIdGetter() { return - (ref UInt128 val) => + (ref RowId val) => { Contracts.Check(_curr >= 0, "Cannot call ID getter in current state"); - val = new UInt128((ulong)_perm[_curr], 0); + val = new RowId((ulong)_perm[_curr], 0); }; } @@ -956,7 +956,7 @@ public Wrapper(RandomIndex index) public long Batch => _index.Batch; public long GetIndex() => _index.GetIndex(); - public ValueGetter GetIdGetter() => _index.GetIdGetter(); + public ValueGetter GetIdGetter() => _index.GetIdGetter(); public bool MoveNext() => _index.MoveNext(); public bool MoveMany(long count) => _index.MoveMany(count); } @@ -1050,13 +1050,13 @@ public long GetIndex() return _curr; } - public ValueGetter GetIdGetter() + public ValueGetter GetIdGetter() { return - (ref UInt128 val) => + (ref RowId val) => { Contracts.Check(_curr >= 0, "Cannot call ID getter in current state"); - val = new UInt128((ulong)_curr, 0); + val = new RowId((ulong)_curr, 0); }; } @@ -1124,11 +1124,11 @@ public Wrapper(BlockSequenceIndex index) _index = index; } - public long Batch => _index.Batch; - public long GetIndex() => _index.GetIndex(); - public ValueGetter GetIdGetter() => _index.GetIdGetter(); - public bool MoveNext() => _index.MoveNext(); - public bool MoveMany(long count) => _index.MoveMany(count); + public long Batch { get { return _index.Batch; } } + public long GetIndex() { return _index.GetIndex(); } + public ValueGetter GetIdGetter() { return _index.GetIdGetter(); } + public bool MoveNext() { return _index.MoveNext(); } + public bool MoveMany(long count) { return _index.MoveMany(count); } } } @@ -1169,13 +1169,13 @@ public long GetIndex() return _perm[_curr]; } - public ValueGetter GetIdGetter() + public ValueGetter GetIdGetter() { return - (ref UInt128 val) => + (ref RowId val) => { Contracts.Check(_curr >= 0, "Cannot call ID getter in current state"); - val = new UInt128((ulong)_perm[_curr], 0); + val = new RowId((ulong)_perm[_curr], 0); }; } @@ -1232,11 +1232,11 @@ public Wrapper(BlockRandomIndex index) _index = index; } - public long Batch => _index.Batch; - public long GetIndex() => _index.GetIndex(); - public ValueGetter GetIdGetter() => _index.GetIdGetter(); - public bool MoveNext() => _index.MoveNext(); - public bool MoveMany(long count) => _index.MoveMany(count); + public long Batch { get { return _index.Batch; } } + public long GetIndex() { return _index.GetIndex(); } + public ValueGetter GetIdGetter() { return _index.GetIdGetter(); } + public bool MoveNext() { return _index.MoveNext(); } + public bool MoveMany(long count) { return _index.MoveMany(count); } } } diff --git a/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs b/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs index aaf2e9f49e..99a2c74e70 100644 --- a/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs +++ b/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs @@ -96,7 +96,7 @@ public SubsetActive(Row row, Func pred) public override long Position => _row.Position; public override long Batch => _row.Batch; public override ValueGetter GetGetter(int col) => _row.GetGetter(col); - public override ValueGetter GetIdGetter() => _row.GetIdGetter(); + public override ValueGetter GetIdGetter() => _row.GetIdGetter(); public override bool IsColumnActive(int col) => _pred(col); } } diff --git a/src/Microsoft.ML.Data/DataView/DataViewConstructionUtils.cs b/src/Microsoft.ML.Data/DataView/DataViewConstructionUtils.cs index 62d28e1809..5e5981f8a2 100644 --- a/src/Microsoft.ML.Data/DataView/DataViewConstructionUtils.cs +++ b/src/Microsoft.ML.Data/DataView/DataViewConstructionUtils.cs @@ -110,12 +110,12 @@ public void ExtractValues(TRow row) _position++; } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return IdGetter; } - private void IdGetter(ref UInt128 val) => val = new UInt128((ulong)Position, 0); + private void IdGetter(ref RowId val) => val = new RowId((ulong)Position, 0); protected override TRow GetCurrentRowObject() { @@ -425,7 +425,7 @@ protected override void Dispose(bool disposing) public override ValueGetter GetGetter(int col) => _toWrap.GetGetter(col); - public override ValueGetter GetIdGetter() => _toWrap.GetIdGetter(); + public override ValueGetter GetIdGetter() => _toWrap.GetIdGetter(); public override RowCursor GetRootCursor() => this; public override bool IsColumnActive(int col) => _toWrap.IsColumnActive(col); public override bool MoveMany(long count) => _toWrap.MoveMany(count); @@ -602,24 +602,24 @@ public Cursor(IHostEnvironment env, string name, ListDataView dataView, _permutation = Utils.GetRandomPermutation(rand, dataView._data.Count); } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { if (_permutation == null) { return - (ref UInt128 val) => + (ref RowId val) => { Ch.Check(IsGood, "Cannot call ID getter in current state"); - val = new UInt128((ulong)Position, 0); + val = new RowId((ulong)Position, 0); }; } else { return - (ref UInt128 val) => + (ref RowId val) => { Ch.Check(IsGood, "Cannot call ID getter in current state"); - val = new UInt128((ulong)Index, 0); + val = new RowId((ulong)Index, 0); }; } } @@ -704,13 +704,13 @@ public Cursor(IHostEnvironment env, StreamingDataView dataView, Func GetIdGetter() + public override ValueGetter GetIdGetter() { return - (ref UInt128 val) => + (ref RowId val) => { Ch.Check(IsGood, "Cannot call ID getter in current state"); - val = new UInt128((ulong)Position, 0); + val = new RowId((ulong)Position, 0); }; } @@ -772,13 +772,13 @@ public Cursor(IHostEnvironment env, SingleRowLoopDataView dataView, Func GetIdGetter() + public override ValueGetter GetIdGetter() { return - (ref UInt128 val) => + (ref RowId val) => { Ch.Check(IsGood, "Cannot call ID getter in current state"); - val = new UInt128((ulong)Position, 0); + val = new RowId((ulong)Position, 0); }; } diff --git a/src/Microsoft.ML.Data/DataView/EmptyDataView.cs b/src/Microsoft.ML.Data/DataView/EmptyDataView.cs index 466da6b098..8b17aa791b 100644 --- a/src/Microsoft.ML.Data/DataView/EmptyDataView.cs +++ b/src/Microsoft.ML.Data/DataView/EmptyDataView.cs @@ -59,10 +59,10 @@ public Cursor(IChannelProvider provider, Schema schema, Func needCol) _active = Utils.BuildArray(Schema.ColumnCount, needCol); } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return - (ref UInt128 val) => + (ref RowId val) => { Ch.Assert(!IsGood); throw Ch.Except("Cannot call ID getter in current state"); diff --git a/src/Microsoft.ML.Data/DataView/Transposer.cs b/src/Microsoft.ML.Data/DataView/Transposer.cs index cbd5dff214..0756d54890 100644 --- a/src/Microsoft.ML.Data/DataView/Transposer.cs +++ b/src/Microsoft.ML.Data/DataView/Transposer.cs @@ -1570,12 +1570,12 @@ public override ValueGetter GetGetter(int col) return getter; } - public override ValueGetter GetIdGetter() => GetId; + public override ValueGetter GetIdGetter() => GetId; - private void GetId(ref UInt128 id) + private void GetId(ref RowId id) { Ch.Check(_slotCursor.SlotIndex >= 0, "Cannot get ID with cursor in current state."); - id = new UInt128((ulong)_slotCursor.SlotIndex, 0); + id = new RowId((ulong)_slotCursor.SlotIndex, 0); } protected override bool MoveNextCore() => _slotCursor.MoveNext(); @@ -1615,12 +1615,12 @@ public override ValueGetter GetGetter(int col) return _slotCursor.GetGetterWithVectorType(Ch); } - public override ValueGetter GetIdGetter() => GetId; + public override ValueGetter GetIdGetter() => GetId; - private void GetId(ref UInt128 id) + private void GetId(ref RowId id) { Ch.Check(_slotCursor.SlotIndex >= 0, "Cannot get ID with cursor in current state."); - id = new UInt128((ulong)_slotCursor.SlotIndex, 0); + id = new RowId((ulong)_slotCursor.SlotIndex, 0); } protected override bool MoveNextCore() => _slotCursor.MoveNext(); diff --git a/src/Microsoft.ML.Data/DataView/TypedCursor.cs b/src/Microsoft.ML.Data/DataView/TypedCursor.cs index 06bd5dc7b3..b467197437 100644 --- a/src/Microsoft.ML.Data/DataView/TypedCursor.cs +++ b/src/Microsoft.ML.Data/DataView/TypedCursor.cs @@ -480,7 +480,7 @@ public void Dispose() public Schema Schema => _row.Schema; public void FillValues(TRow row) => _row.FillValues(row); public ValueGetter GetGetter(int col) => _row.GetGetter(col); - public ValueGetter GetIdGetter() => _row.GetIdGetter(); + public ValueGetter GetIdGetter() => _row.GetIdGetter(); public bool IsColumnActive(int col) => _row.IsColumnActive(col); } @@ -508,7 +508,7 @@ protected override void Dispose(bool disposing) public override void FillValues(TRow row) => _cursor.FillValues(row); public override ValueGetter GetGetter(int col) => _cursor.GetGetter(col); - public override ValueGetter GetIdGetter() => _cursor.GetIdGetter(); + public override ValueGetter GetIdGetter() => _cursor.GetIdGetter(); public override RowCursor GetRootCursor() => _cursor.GetRootCursor(); public override bool IsColumnActive(int col) => _cursor.IsColumnActive(col); public override bool MoveMany(long count) => _cursor.MoveMany(count); diff --git a/src/Microsoft.ML.Data/DataView/ZipDataView.cs b/src/Microsoft.ML.Data/DataView/ZipDataView.cs index 9de68b9d55..a09a4fa7f4 100644 --- a/src/Microsoft.ML.Data/DataView/ZipDataView.cs +++ b/src/Microsoft.ML.Data/DataView/ZipDataView.cs @@ -138,13 +138,13 @@ protected override void Dispose(bool disposing) base.Dispose(disposing); } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return - (ref UInt128 val) => + (ref RowId val) => { Ch.Check(IsGood, "Cannot call ID getter in current state"); - val = new UInt128((ulong)Position, 0); + val = new RowId((ulong)Position, 0); }; } diff --git a/src/Microsoft.ML.Data/StaticPipe/StaticSchemaShape.cs b/src/Microsoft.ML.Data/StaticPipe/StaticSchemaShape.cs index 7b1edd3398..f8cb7062ee 100644 --- a/src/Microsoft.ML.Data/StaticPipe/StaticSchemaShape.cs +++ b/src/Microsoft.ML.Data/StaticPipe/StaticSchemaShape.cs @@ -344,7 +344,7 @@ private static Type StaticKind(DataKind kind) case DataKind.U2: return typeof(ushort); case DataKind.U4: return typeof(uint); case DataKind.U8: return typeof(ulong); - case DataKind.U16: return typeof(UInt128); + case DataKind.U16: return typeof(RowId); case DataKind.R4: return typeof(float); case DataKind.R8: return typeof(double); diff --git a/src/Microsoft.ML.Data/Training/TrainerUtils.cs b/src/Microsoft.ML.Data/Training/TrainerUtils.cs index 83d7efaf90..5a04db2b06 100644 --- a/src/Microsoft.ML.Data/Training/TrainerUtils.cs +++ b/src/Microsoft.ML.Data/Training/TrainerUtils.cs @@ -634,13 +634,13 @@ public void Signal(CursOpt opt) } /// - /// This supports Weight (float), Group (ulong), and Id (UInt128) columns. + /// This supports Weight (float), Group (ulong), and Id (RowId) columns. /// public class StandardScalarCursor : TrainingCursorBase { private readonly ValueGetter _getWeight; private readonly ValueGetter _getGroup; - private readonly ValueGetter _getId; + private readonly ValueGetter _getId; private readonly bool _keepBadWeight; private readonly bool _keepBadGroup; @@ -651,7 +651,7 @@ public class StandardScalarCursor : TrainingCursorBase public float Weight; public ulong Group; - public UInt128 Id; + public RowId Id; public StandardScalarCursor(RoleMappedData data, CursOpt opt, Random rand = null, params int[] extraCols) : this(CreateCursor(data, opt, rand, extraCols), data, opt) diff --git a/src/Microsoft.ML.Data/Transforms/Hashing.cs b/src/Microsoft.ML.Data/Transforms/Hashing.cs index 8c40a0138e..c68cbd5d48 100644 --- a/src/Microsoft.ML.Data/Transforms/Hashing.cs +++ b/src/Microsoft.ML.Data/Transforms/Hashing.cs @@ -431,7 +431,7 @@ private ValueGetter ComposeGetterOne(Row input, int iinfo, int srcCol, Col case DataKind.U8: return MakeScalarHashGetter(input, srcCol, seed, mask); case DataKind.U16: - return MakeScalarHashGetter(input, srcCol, seed, mask); + return MakeScalarHashGetter(input, srcCol, seed, mask); case DataKind.I1: return MakeScalarHashGetter(input, srcCol, seed, mask); case DataKind.I2: @@ -484,7 +484,7 @@ private ValueGetter> ComposeGetterVec(Row input, int iinfo, int sr case DataKind.U8: return ComposeGetterVecCore(input, iinfo, srcCol, srcType); case DataKind.U16: - return ComposeGetterVecCore(input, iinfo, srcCol, srcType); + return ComposeGetterVecCore(input, iinfo, srcCol, srcType); case DataKind.I1: return ComposeGetterVecCore(input, iinfo, srcCol, srcType); case DataKind.I2: @@ -648,19 +648,19 @@ public uint HashCore(uint seed, uint mask, in ulong value) } } - private readonly struct HashU16: IHasher + private readonly struct HashU16: IHasher { [MethodImpl(MethodImplOptions.AggressiveInlining)] - public uint HashCore(uint seed, uint mask, in UInt128 value) + public uint HashCore(uint seed, uint mask, in RowId value) { - var hash = Hashing.MurmurRound(seed, Utils.GetLo(value.Lo)); - var hi = Utils.GetHi(value.Lo); + var hash = Hashing.MurmurRound(seed, Utils.GetLo(value.Low)); + var hi = Utils.GetHi(value.Low); if (hi != 0) hash = Hashing.MurmurRound(hash, hi); - if (value.Hi != 0) + if (value.High != 0) { - hash = Hashing.MurmurRound(hash, Utils.GetLo(value.Hi)); - hi = Utils.GetHi(value.Hi); + hash = Hashing.MurmurRound(hash, Utils.GetLo(value.High)); + hi = Utils.GetHi(value.High); if (hi != 0) hash = Hashing.MurmurRound(hash, hi); } diff --git a/src/Microsoft.ML.Data/Transforms/PerGroupTransformBase.cs b/src/Microsoft.ML.Data/Transforms/PerGroupTransformBase.cs index 625bd22e9e..4ee58d1f25 100644 --- a/src/Microsoft.ML.Data/Transforms/PerGroupTransformBase.cs +++ b/src/Microsoft.ML.Data/Transforms/PerGroupTransformBase.cs @@ -294,13 +294,13 @@ public override ValueGetter GetGetter(int col) return fn; } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return - (ref UInt128 val) => + (ref RowId val) => { Ch.Check(IsGood, "Cannot call ID getter in current state"); - val = new UInt128((ulong)Position, 0); + val = new RowId((ulong)Position, 0); }; } diff --git a/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs b/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs index 9d5aefe473..843ac1c938 100644 --- a/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs +++ b/src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs @@ -476,7 +476,7 @@ public void Fetch(int idx, ref T value) // Each delegate here corresponds to a pipe holding column data. private readonly Delegate[] _getters; // This delegate corresponds to the pipe holding ID data. - private readonly ValueGetter _idGetter; + private readonly ValueGetter _idGetter; // The current position of the output cursor in circular "space". private int _circularIndex; @@ -535,7 +535,7 @@ public Cursor(IChannelProvider provider, int poolRows, RowCursor input, Random r _getters[ia] = CreateGetterDelegate(c); } var idPipe = _pipes[numActive + (int)ExtraIndex.Id] = ShufflePipe.Create(_pipeIndices.Length, NumberType.UG, input.GetIdGetter()); - _idGetter = CreateGetterDelegate(idPipe); + _idGetter = CreateGetterDelegate(idPipe); // Initially, after the preamble to MoveNextCore, we want: // liveCount=0, deadCount=0, circularIndex=0. So we set these // funky values accordingly. @@ -574,7 +574,7 @@ public static void PostAssert(ITargetBlock target, T item) Contracts.Assert(retval); } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return _idGetter; } diff --git a/src/Microsoft.ML.Data/Transforms/SkipTakeFilter.cs b/src/Microsoft.ML.Data/Transforms/SkipTakeFilter.cs index 12a8cf2138..88454f4acf 100644 --- a/src/Microsoft.ML.Data/Transforms/SkipTakeFilter.cs +++ b/src/Microsoft.ML.Data/Transforms/SkipTakeFilter.cs @@ -229,7 +229,7 @@ public Cursor(IChannelProvider provider, RowCursor input, Schema schema, bool[] _take = take; } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return Input.GetIdGetter(); } diff --git a/src/Microsoft.ML.Data/Utilities/SlotDropper.cs b/src/Microsoft.ML.Data/Utilities/SlotDropper.cs index 188f8e72b5..d678302467 100644 --- a/src/Microsoft.ML.Data/Utilities/SlotDropper.cs +++ b/src/Microsoft.ML.Data/Utilities/SlotDropper.cs @@ -29,7 +29,7 @@ public sealed class SlotDropper /// Constructs slot dropper. It expects the slot ranges to be in sorted order and not overlap. /// /// 0 indicates variable sized vector. - /// Lower limit of ranges to be dropped. + /// Low limit of ranges to be dropped. /// Upper limit of ranges to be dropped. public SlotDropper(int srcLength, int[] slotsMin, int[] slotsMax) { diff --git a/src/Microsoft.ML.Data/Utils/ApiUtils.cs b/src/Microsoft.ML.Data/Utils/ApiUtils.cs index 04839856d1..af738b219b 100644 --- a/src/Microsoft.ML.Data/Utils/ApiUtils.cs +++ b/src/Microsoft.ML.Data/Utils/ApiUtils.cs @@ -18,11 +18,11 @@ internal static class ApiUtils private static OpCode GetAssignmentOpCode(Type t) { // REVIEW: This should be a Dictionary based solution. - // DvTypes, strings, arrays, all nullable types, VBuffers and UInt128. + // DvTypes, strings, arrays, all nullable types, VBuffers and RowId. if (t == typeof(ReadOnlyMemory) || t == typeof(string) || t.IsArray || (t.IsGenericType && t.GetGenericTypeDefinition() == typeof(VBuffer<>)) || (t.IsGenericType && t.GetGenericTypeDefinition() == typeof(Nullable<>)) || - t == typeof(DateTime) || t == typeof(DateTimeOffset) || t == typeof(TimeSpan) || t == typeof(UInt128)) + t == typeof(DateTime) || t == typeof(DateTimeOffset) || t == typeof(TimeSpan) || t == typeof(RowId)) { return OpCodes.Stobj; } diff --git a/src/Microsoft.ML.FastTree/Utils/ToByteArrayExtensions.cs b/src/Microsoft.ML.FastTree/Utils/ToByteArrayExtensions.cs index 86df84b0cb..dfecfc8f80 100644 --- a/src/Microsoft.ML.FastTree/Utils/ToByteArrayExtensions.cs +++ b/src/Microsoft.ML.FastTree/Utils/ToByteArrayExtensions.cs @@ -194,9 +194,9 @@ public static ulong ToULong(this byte[] buffer, ref int position) return a; } - // UInt128 + // RowId - public static MD5Hash ToUInt128(this byte[] buffer, ref int position) + public static MD5Hash ToRowId(this byte[] buffer, ref int position) { MD5Hash a = new MD5Hash { @@ -550,7 +550,7 @@ public static unsafe ulong[] ToULongArray(this byte[] buffer, ref int position) return a; } - // UInt128[] + // RowId[] public static int SizeInBytes(this MD5Hash[] array) { @@ -572,7 +572,7 @@ public static unsafe MD5Hash[] ToUInt128Array(this byte[] buffer, ref int positi MD5Hash[] a = new MD5Hash[length]; for (int i = 0; i < length; ++i) { - a[i] = buffer.ToUInt128(ref position); + a[i] = buffer.ToRowId(ref position); } return a; } diff --git a/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs b/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs index d6654507fe..7d0ae0ab4b 100644 --- a/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs +++ b/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs @@ -61,7 +61,7 @@ public class Arguments : UnsupervisedLearnerInputBaseWithWeight [Argument(ArgumentType.AtMostOnce, HelpText = "Cluster initialization algorithm", ShortName = "init")] public InitAlgorithm InitAlgorithm = InitAlgorithm.KMeansParallel; - [Argument(ArgumentType.AtMostOnce, HelpText = "Tolerance parameter for trainer convergence. Lower = slower, more accurate", + [Argument(ArgumentType.AtMostOnce, HelpText = "Tolerance parameter for trainer convergence. Low = slower, more accurate", ShortName = "ot")] [TGUI(Label = "Optimization Tolerance", Description = "Threshold for trainer convergence")] public float OptTol = (float)1e-7; @@ -423,7 +423,7 @@ internal sealed class KMeansAcceleratedRowMap // each row. Instead the RowCursor provides a stable ID across multiple // cursorings. We map those IDs into an index to poke into the per instance // structures. - private readonly HashArray _parallelIndexLookup; + private readonly HashArray _parallelIndexLookup; public KMeansAcceleratedRowMap(FeatureFloatVectorCursor.Factory factory, IChannel ch, long baseMaxInstancesToAccelerate, long totalTrainingInstances, bool isParallel) @@ -469,11 +469,11 @@ public KMeansAcceleratedRowMap(FeatureFloatVectorCursor.Factory factory, IChanne /// preinitialize the HashArray so we can perform lock-free lookup operations during /// the primary KMeans pass. /// - private HashArray BuildParallelIndexLookup(FeatureFloatVectorCursor.Factory factory) + private HashArray BuildParallelIndexLookup(FeatureFloatVectorCursor.Factory factory) { Contracts.AssertValue(factory); - HashArray lookup = new HashArray(); + HashArray lookup = new HashArray(); int n = 0; using (var cursor = factory.Create()) { diff --git a/src/Microsoft.ML.Legacy/CSharpApi.cs b/src/Microsoft.ML.Legacy/CSharpApi.cs index c7e5c3ff89..54f1a84807 100644 --- a/src/Microsoft.ML.Legacy/CSharpApi.cs +++ b/src/Microsoft.ML.Legacy/CSharpApi.cs @@ -9380,7 +9380,7 @@ public sealed partial class KMeansPlusPlusClusterer : Microsoft.ML.Runtime.Entry public KMeansPlusPlusTrainerInitAlgorithm InitAlgorithm { get; set; } = KMeansPlusPlusTrainerInitAlgorithm.KMeansParallel; /// - /// Tolerance parameter for trainer convergence. Lower = slower, more accurate + /// Tolerance parameter for trainer convergence. Low = slower, more accurate /// [Obsolete] public float OptTol { get; set; } = 1E-07f; @@ -10661,14 +10661,14 @@ public sealed partial class LogisticRegressionBinaryClassifier : Microsoft.ML.Ru public float L1Weight { get; set; } = 1f; /// - /// Tolerance parameter for optimization convergence. Lower = slower, more accurate + /// Tolerance parameter for optimization convergence. Low = slower, more accurate /// [TlcModule.SweepableDiscreteParamAttribute("OptTol", new object[]{0.0001f, 1E-07f})] [Obsolete] public float OptTol { get; set; } = 1E-07f; /// - /// Memory size for L-BFGS. Lower=faster, less accurate + /// Memory size for L-BFGS. Low=faster, less accurate /// [TlcModule.SweepableDiscreteParamAttribute("MemorySize", new object[]{5, 20, 50})] [Obsolete] @@ -10836,14 +10836,14 @@ public sealed partial class LogisticRegressionClassifier : Microsoft.ML.Runtime. public float L1Weight { get; set; } = 1f; /// - /// Tolerance parameter for optimization convergence. Lower = slower, more accurate + /// Tolerance parameter for optimization convergence. Low = slower, more accurate /// [TlcModule.SweepableDiscreteParamAttribute("OptTol", new object[]{0.0001f, 1E-07f})] [Obsolete] public float OptTol { get; set; } = 1E-07f; /// - /// Memory size for L-BFGS. Lower=faster, less accurate + /// Memory size for L-BFGS. Low=faster, less accurate /// [TlcModule.SweepableDiscreteParamAttribute("MemorySize", new object[]{5, 20, 50})] [Obsolete] @@ -11483,14 +11483,14 @@ public sealed partial class PoissonRegressor : Microsoft.ML.Runtime.EntryPoints. public float L1Weight { get; set; } = 1f; /// - /// Tolerance parameter for optimization convergence. Lower = slower, more accurate + /// Tolerance parameter for optimization convergence. Low = slower, more accurate /// [TlcModule.SweepableDiscreteParamAttribute("OptTol", new object[]{0.0001f, 1E-07f})] [Obsolete] public float OptTol { get; set; } = 1E-07f; /// - /// Memory size for L-BFGS. Lower=faster, less accurate + /// Memory size for L-BFGS. Low=faster, less accurate /// [TlcModule.SweepableDiscreteParamAttribute("MemorySize", new object[]{5, 20, 50})] [Obsolete] diff --git a/src/Microsoft.ML.Legacy/Data/TextLoader.cs b/src/Microsoft.ML.Legacy/Data/TextLoader.cs index 434af8f3f5..5c0b185ed1 100644 --- a/src/Microsoft.ML.Legacy/Data/TextLoader.cs +++ b/src/Microsoft.ML.Legacy/Data/TextLoader.cs @@ -190,7 +190,7 @@ private static bool TryGetDataKind(Type type, out DataKind kind) kind = DataKind.DT; else if (type == typeof(DateTimeOffset)) kind = DataKind.DZ; - else if (type == typeof(UInt128)) + else if (type == typeof(RowId)) kind = DataKind.UG; else { diff --git a/src/Microsoft.ML.Parquet/ParquetLoader.cs b/src/Microsoft.ML.Parquet/ParquetLoader.cs index e5fb2609d5..6fd4e93bb8 100644 --- a/src/Microsoft.ML.Parquet/ParquetLoader.cs +++ b/src/Microsoft.ML.Parquet/ParquetLoader.cs @@ -517,7 +517,7 @@ private Delegate CreateGetterDelegate(int col) case DataType.Int64: return CreateGetterDelegateCore(col, _parquetConversions.Conv); case DataType.Int96: - return CreateGetterDelegateCore(col, _parquetConversions.Conv); + return CreateGetterDelegateCore(col, _parquetConversions.Conv); case DataType.ByteArray: return CreateGetterDelegateCore>(col, _parquetConversions.Conv); case DataType.String: @@ -601,14 +601,14 @@ public override ValueGetter GetGetter(int col) return getter; } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return - (ref UInt128 val) => + (ref RowId val) => { // Unique row id consists of Position of cursor (how many times MoveNext has been called), and position in file Ch.Check(IsGood, "Cannot call ID getter in current state"); - val = new UInt128((ulong)(_readerOptions.Offset + _curDataSetRow), 0); + val = new RowId((ulong)(_readerOptions.Offset + _curDataSetRow), 0); }; } @@ -708,11 +708,11 @@ public ParquetConversions(IChannel channel) public void Conv(in IList src, ref ReadOnlyMemory dst) => dst = ConvertListToString(src).AsMemory(); /// - /// Converts a System.Numerics.BigInteger value to a UInt128 data type value. + /// Converts a System.Numerics.BigInteger value to a RowId data type value. /// /// BigInteger value. - /// UInt128 object. - public void Conv(in BigInteger src, ref UInt128 dst) + /// RowId object. + public void Conv(in BigInteger src, ref RowId dst) { try { @@ -720,11 +720,11 @@ public void Conv(in BigInteger src, ref UInt128 dst) Array.Resize(ref arr, 16); ulong lo = BitConverter.ToUInt64(arr, 0); ulong hi = BitConverter.ToUInt64(arr, 8); - dst = new UInt128(lo, hi); + dst = new RowId(lo, hi); } catch (Exception ex) { - _ch.Error("Cannot convert BigInteger to UInt128. Exception : '{0}'", ex.Message); + _ch.Error("Cannot convert BigInteger to RowId. Exception : '{0}'", ex.Message); dst = default; } } diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs index 2ec7021cd4..4c7fad0812 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs @@ -33,13 +33,13 @@ public abstract class ArgumentsBase : LearnerInputBaseWithWeight [TlcModule.SweepableFloatParamAttribute(0.0f, 1.0f, numSteps: 4)] public float L1Weight = Defaults.L1Weight; - [Argument(ArgumentType.AtMostOnce, HelpText = "Tolerance parameter for optimization convergence. Lower = slower, more accurate", + [Argument(ArgumentType.AtMostOnce, HelpText = "Tolerance parameter for optimization convergence. Low = slower, more accurate", ShortName = "ot", SortOrder = 50)] [TGUI(Label = "Optimization Tolerance", Description = "Threshold for optimizer convergence", SuggestedSweeps = "1e-4,1e-7")] [TlcModule.SweepableDiscreteParamAttribute(new object[] { 1e-4f, 1e-7f })] public float OptTol = Defaults.OptTol; - [Argument(ArgumentType.AtMostOnce, HelpText = "Memory size for L-BFGS. Lower=faster, less accurate", + [Argument(ArgumentType.AtMostOnce, HelpText = "Memory size for L-BFGS. Low=faster, less accurate", ShortName = "m", SortOrder = 50)] [TGUI(Description = "Memory size for L-BFGS", SuggestedSweeps = "5,20,50")] [TlcModule.SweepableDiscreteParamAttribute("MemorySize", new object[] { 5, 20, 50 })] diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsStatic.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsStatic.cs index 08bef182cc..c4651628f8 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsStatic.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsStatic.cs @@ -29,7 +29,7 @@ public static class LbfgsBinaryClassificationExtensions /// Enforce non-negative weights. /// Weight of L1 regularization term. /// Weight of L2 regularization term. - /// Memory size for . Lower=faster, less accurate. + /// Memory size for . Low=faster, less accurate. /// Threshold for optimizer convergence. /// A delegate that is called every time the /// method is called on the @@ -84,7 +84,7 @@ public static class LbfgsRegressionExtensions /// Enforce non-negative weights. /// Weight of L1 regularization term. /// Weight of L2 regularization term. - /// Memory size for . Lower=faster, less accurate. + /// Memory size for . Low=faster, less accurate. /// Threshold for optimizer convergence. /// A delegate to apply all the advanced arguments to the algorithm. /// A delegate that is called every time the @@ -139,7 +139,7 @@ public static class LbfgsMulticlassExtensions /// Enforce non-negative weights. /// Weight of L1 regularization term. /// Weight of L2 regularization term. - /// Memory size for . Lower=faster, less accurate. + /// Memory size for . Low=faster, less accurate. /// Threshold for optimizer convergence. /// A delegate to apply all the advanced arguments to the algorithm. /// A delegate that is called every time the diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs index 97d45ccec2..44857925cd 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs @@ -73,7 +73,7 @@ public sealed class Arguments : ArgumentsBase /// Enforce non-negative weights. /// Weight of L1 regularizer term. /// Weight of L2 regularizer term. - /// Memory size for . Lower=faster, less accurate. + /// Memory size for . Low=faster, less accurate. /// Threshold for optimizer convergence. /// A delegate to apply all the advanced arguments to the algorithm. public LogisticRegression(IHostEnvironment env, diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs index 68fd9463a6..ca1ea84140 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs @@ -81,7 +81,7 @@ public sealed class Arguments : ArgumentsBase /// Enforce non-negative weights. /// Weight of L1 regularizer term. /// Weight of L2 regularizer term. - /// Memory size for . Lower=faster, less accurate. + /// Memory size for . Low=faster, less accurate. /// Threshold for optimizer convergence. /// A delegate to apply all the advanced arguments to the algorithm. public MulticlassLogisticRegression(IHostEnvironment env, diff --git a/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs index 43c2fd34d6..f52c33ceaa 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs @@ -51,7 +51,7 @@ public sealed class Arguments : ArgumentsBase /// Weight of L1 regularizer term. /// Weight of L2 regularizer term. /// Threshold for optimizer convergence. - /// Memory size for . Lower=faster, less accurate. + /// Memory size for . Low=faster, less accurate. /// Enforce non-negative weights. /// A delegate to apply all the advanced arguments to the algorithm. public PoissonRegression(IHostEnvironment env, diff --git a/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs b/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs index 7016568bed..481eeb0b00 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs @@ -330,7 +330,7 @@ protected sealed override TModel TrainCore(IChannel ch, RoleMappedData data, Lin // Getting the total count of rows in data. Ignore rows with bad label and feature values. long count = 0; - // The maximum value of cursor.Id.Lo. + // The maximum value of cursor.Id.Low. ulong idLoMax = 0; // Counting the number of bad training examples. @@ -347,17 +347,17 @@ protected sealed override TModel TrainCore(IChannel ch, RoleMappedData data, Lin pch.SetHeader(new ProgressHeader("examples"), e => e.SetProgress(0, count)); while (cursor.MoveNext()) { - UInt128 id = cursor.Id; - if (id.Hi > 0 || id.Lo >= (ulong)maxTrainingExamples) + RowId id = cursor.Id; + if (id.High > 0 || id.Low >= (ulong)maxTrainingExamples) { needLookup = true; break; } else { - Contracts.Assert(id.Hi == 0); - if (id.Lo > idLoMax) - idLoMax = id.Lo; + Contracts.Assert(id.High == 0); + if (id.Low > idLoMax) + idLoMax = id.Low; } count++; @@ -373,7 +373,7 @@ protected sealed override TModel TrainCore(IChannel ch, RoleMappedData data, Lin // REVIEW: Is 1024 a good lower bound to enforce sparsity? if (1024 < count && count < (long)idLoMax / 5) { - // The distribution of id.Lo is sparse in [0, idLoMax]. + // The distribution of id.Low is sparse in [0, idLoMax]. // Building a lookup table is more memory efficient. needLookup = true; } @@ -528,7 +528,7 @@ protected sealed override TModel TrainCore(IChannel ch, RoleMappedData data, Lin if (invariants != null) { Contracts.Assert((idToIdx == null & ((long)idLoMax + 1) * weightSetCount <= Utils.ArrayMaxSize) | (idToIdx != null & count * weightSetCount <= Utils.ArrayMaxSize)); - Func getIndexFromIdAndRow = GetIndexFromIdAndRowGetter(idToIdx, biasReg.Length); + Func getIndexFromIdAndRow = GetIndexFromIdAndRowGetter(idToIdx, biasReg.Length); int invariantCoeff = weightSetCount == 1 ? 1 : 2; using (var cursor = cursorFactory.Create()) using (var pch = Host.StartProgressChannel("SDCA invariants initialization")) @@ -769,7 +769,7 @@ protected virtual void TrainWithoutLock(IProgressChannelProvider progress, Float if (pch != null) pch.SetHeader(new ProgressHeader("examples"), e => e.SetProgress(0, rowCount)); - Func getIndexFromId = GetIndexFromIdGetter(idToIdx, biasReg.Length); + Func getIndexFromId = GetIndexFromIdGetter(idToIdx, biasReg.Length); while (cursor.MoveNext()) { long idx = getIndexFromId(cursor.Id); @@ -931,7 +931,7 @@ protected virtual bool CheckConvergence( using (var cursor = cursorFactory.Create()) { long row = 0; - Func getIndexFromIdAndRow = GetIndexFromIdAndRowGetter(idToIdx, biasReg.Length); + Func getIndexFromIdAndRow = GetIndexFromIdAndRowGetter(idToIdx, biasReg.Length); // Iterates through data to compute loss function. while (cursor.MoveNext()) { @@ -1066,22 +1066,22 @@ public override void ApplyAt(long index, Visitor manip) /// Returns a function delegate to retrieve index from id. /// This is to avoid redundant conditional branches in the tight loop of training. /// - protected Func GetIndexFromIdGetter(IdToIdxLookup idToIdx, int biasLength) + protected Func GetIndexFromIdGetter(IdToIdxLookup idToIdx, int biasLength) { Contracts.AssertValueOrNull(idToIdx); long maxTrainingExamples = MaxDualTableSize / biasLength; if (idToIdx == null) { - return (UInt128 id) => + return (RowId id) => { - Contracts.Assert(id.Hi == 0); - Contracts.Assert((long)id.Lo < maxTrainingExamples); - return (long)id.Lo; + Contracts.Assert(id.High == 0); + Contracts.Assert((long)id.Low < maxTrainingExamples); + return (long)id.Low; }; } else { - return (UInt128 id) => + return (RowId id) => { long idx; bool found = idToIdx.TryGetIndex(id, out idx); @@ -1097,22 +1097,22 @@ protected Func GetIndexFromIdGetter(IdToIdxLookup idToIdx, int bi /// Only works if the cursor is not shuffled. /// This is to avoid redundant conditional branches in the tight loop of training. /// - protected Func GetIndexFromIdAndRowGetter(IdToIdxLookup idToIdx, int biasLength) + protected Func GetIndexFromIdAndRowGetter(IdToIdxLookup idToIdx, int biasLength) { Contracts.AssertValueOrNull(idToIdx); long maxTrainingExamples = MaxDualTableSize / biasLength; if (idToIdx == null) { - return (UInt128 id, long row) => + return (RowId id, long row) => { - Contracts.Assert(id.Hi == 0); - Contracts.Assert((long)id.Lo < maxTrainingExamples); - return (long)id.Lo; + Contracts.Assert(id.High == 0); + Contracts.Assert((long)id.Low < maxTrainingExamples); + return (long)id.Low; }; } else { - return (UInt128 id, long row) => + return (RowId id, long row) => { #if DEBUG long idx; @@ -1131,7 +1131,7 @@ protected Func GetIndexFromIdAndRowGetter(IdToIdxLookup idT // This class can also be made to accommodate generic type, as long as the type implements a // good 64-bit hash function. /// - /// A hash table data structure to store Id of type , + /// A hash table data structure to store Id of type , /// and accommodates size larger than 2 billion. This class is an extension based on BCL. /// Two operations are supported: adding and retrieving an id with asymptotically constant complexity. /// The bucket size are prime numbers, starting from 3 and grows to the next prime larger than @@ -1145,9 +1145,9 @@ protected sealed class IdToIdxLookup private readonly struct Entry { public readonly long ItNext; - public readonly UInt128 Value; + public readonly RowId Value; - public Entry(long itNext, UInt128 value) + public Entry(long itNext, RowId value) { ItNext = itNext; Value = value; @@ -1184,7 +1184,7 @@ public IdToIdxLookup(long size = 0) /// /// Make sure the given id is in this lookup table and return the index of the id. /// - public long Add(UInt128 id) + public long Add(RowId id) { long iit = GetIit(Get64BitHashCode(id)); long index = GetIndexCore(id, iit); @@ -1199,7 +1199,7 @@ public long Add(UInt128 id) /// Returns a bool representing if id is present. /// Index outputs the index that the id, -1 otherwise. /// - public bool TryGetIndex(UInt128 id, out long index) + public bool TryGetIndex(RowId id, out long index) { AssertValid(); index = GetIndexCore(id, GetIit(Get64BitHashCode(id))); @@ -1214,7 +1214,7 @@ private long GetIit(long hash) /// /// Return the index of value, -1 if it is not present. /// - private long GetIndexCore(UInt128 val, long iit) + private long GetIndexCore(RowId val, long iit) { Contracts.Assert(0 <= iit & iit < _rgit.Length); long it = _rgit[iit]; @@ -1235,7 +1235,7 @@ private long GetIndexCore(UInt128 val, long iit) /// Adds the value as a TItem. Does not check whether the TItem is already present. /// Returns the index of the added value. /// - private long AddCore(UInt128 val, long iit) + private long AddCore(RowId val, long iit) { AssertValid(); Contracts.Assert(0 <= iit && iit < _rgit.Length); @@ -1283,7 +1283,7 @@ private void FillTable() _entries.ApplyRange(0, _count, (long it, ref Entry entry) => { - UInt128 value = entry.Value; + RowId value = entry.Value; long iit = GetIit(Get64BitHashCode(entry.Value)); entry = new Entry(_rgit[iit], value); _rgit[iit] = it; @@ -1310,11 +1310,11 @@ private void DumpStats() Console.WriteLine("Table: {0} out of {1}", c, _rgit.Length); } - private static long Get64BitHashCode(UInt128 value) + private static long Get64BitHashCode(RowId value) { // REVIEW: Is this a good way to compute hash? - ulong lo = value.Lo; - ulong hi = value.Hi; + ulong lo = value.Low; + ulong hi = value.High; return (long)(lo ^ (lo >> 32) ^ (hi << 7) ^ (hi >> 57) ^ (hi >> (57 - 32))); } diff --git a/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs b/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs index af6617d862..659b567843 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs @@ -150,7 +150,7 @@ protected override void TrainWithoutLock(IProgressChannelProvider progress, Floa if (pch != null) pch.SetHeader(new ProgressHeader("examples"), e => e.SetProgress(0, rowCount)); - Func getIndexFromId = GetIndexFromIdGetter(idToIdx, biasReg.Length); + Func getIndexFromId = GetIndexFromIdGetter(idToIdx, biasReg.Length); while (cursor.MoveNext()) { long idx = getIndexFromId(cursor.Id); @@ -324,7 +324,7 @@ protected override bool CheckConvergence( using (var cursor = cursorFactory.Create()) { long row = 0; - Func getIndexFromIdAndRow = GetIndexFromIdAndRowGetter(idToIdx, biasReg.Length); + Func getIndexFromIdAndRow = GetIndexFromIdAndRowGetter(idToIdx, biasReg.Length); // Iterates through data to compute loss function. while (cursor.MoveNext()) { diff --git a/src/Microsoft.ML.StandardLearners/StandardLearnersCatalog.cs b/src/Microsoft.ML.StandardLearners/StandardLearnersCatalog.cs index c30c4ee8e9..f1edaad3aa 100644 --- a/src/Microsoft.ML.StandardLearners/StandardLearnersCatalog.cs +++ b/src/Microsoft.ML.StandardLearners/StandardLearnersCatalog.cs @@ -236,7 +236,7 @@ public static OnlineGradientDescentTrainer OnlineGradientDescent(this Regression /// Enforce non-negative weights. /// Weight of L1 regularization term. /// Weight of L2 regularization term. - /// Memory size for . Lower=faster, less accurate. + /// Memory size for . Low=faster, less accurate. /// Threshold for optimizer convergence. /// A delegate to apply all the advanced arguments to the algorithm. public static LogisticRegression LogisticRegression(this BinaryClassificationContext.BinaryClassificationTrainers ctx, @@ -265,7 +265,7 @@ public static LogisticRegression LogisticRegression(this BinaryClassificationCon /// Weight of L1 regularization term. /// Weight of L2 regularization term. /// Threshold for optimizer convergence. - /// Memory size for . Lower=faster, less accurate. + /// Memory size for . Low=faster, less accurate. /// Enforce non-negative weights. /// A delegate to apply all the advanced arguments to the algorithm. public static PoissonRegression PoissonRegression(this RegressionContext.RegressionTrainers ctx, @@ -294,7 +294,7 @@ public static PoissonRegression PoissonRegression(this RegressionContext.Regress /// Enforce non-negative weights. /// Weight of L1 regularization term. /// Weight of L2 regularization term. - /// Memory size for . Lower=faster, less accurate. + /// Memory size for . Low=faster, less accurate. /// Threshold for optimizer convergence. /// A delegate to apply all the advanced arguments to the algorithm. public static MulticlassLogisticRegression LogisticRegression(this MulticlassClassificationContext.MulticlassClassificationTrainers ctx, diff --git a/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs b/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs index 20134d7fb7..8bba51034d 100644 --- a/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs +++ b/src/Microsoft.ML.TimeSeries/SequentialTransformerBase.cs @@ -523,7 +523,7 @@ protected override void Dispose(bool disposing) base.Dispose(disposing); } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() => _input.GetIdGetter(); public override ValueGetter GetGetter(int col) @@ -825,7 +825,7 @@ public override ValueGetter GetGetter(int col) public override Action GetPinger() => _pinger as Action ?? throw Contracts.Except("Invalid TValue in GetPinger: '{0}'", typeof(long)); - public override ValueGetter GetIdGetter() => _input.GetIdGetter(); + public override ValueGetter GetIdGetter() => _input.GetIdGetter(); public override bool IsColumnActive(int col) { diff --git a/src/Microsoft.ML.Transforms/BootstrapSamplingTransformer.cs b/src/Microsoft.ML.Transforms/BootstrapSamplingTransformer.cs index 3418dcf64c..a1f12acb23 100644 --- a/src/Microsoft.ML.Transforms/BootstrapSamplingTransformer.cs +++ b/src/Microsoft.ML.Transforms/BootstrapSamplingTransformer.cs @@ -200,14 +200,14 @@ public Cursor(BootstrapSamplingTransformer parent, RowCursor input, Random rgen) _rgen = rgen; } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { var inputIdGetter = Input.GetIdGetter(); return - (ref UInt128 val) => + (ref RowId val) => { inputIdGetter(ref val); - val = val.Combine(new UInt128((ulong)_remaining, 0)); + val = val.Combine(new RowId((ulong)_remaining, 0)); }; } diff --git a/src/Microsoft.ML.Transforms/GroupTransform.cs b/src/Microsoft.ML.Transforms/GroupTransform.cs index 1b9e341007..612b08cfe9 100644 --- a/src/Microsoft.ML.Transforms/GroupTransform.cs +++ b/src/Microsoft.ML.Transforms/GroupTransform.cs @@ -597,7 +597,7 @@ public Cursor(GroupTransform parent, Func predicate) } } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return _trailingCursor.GetIdGetter(); } diff --git a/src/Microsoft.ML.Transforms/MissingValueReplacingUtils.cs b/src/Microsoft.ML.Transforms/MissingValueReplacingUtils.cs index f9cdde6ad6..1545350513 100644 --- a/src/Microsoft.ML.Transforms/MissingValueReplacingUtils.cs +++ b/src/Microsoft.ML.Transforms/MissingValueReplacingUtils.cs @@ -171,12 +171,12 @@ public sealed override void ProcessRow() private abstract class StatAggregatorAcrossSlots : StatAggregator, TStat> { // The number of values that have been processed. - private UInt128 _valueCount; + private RowId _valueCount; /// /// Returns the number of values that have been processed so far. /// - public UInt128 ValueCount { get { return _valueCount; } } + public RowId ValueCount { get { return _valueCount; } } protected StatAggregatorAcrossSlots(IChannel ch, RowCursor cursor, int col) : base(ch, cursor, col) @@ -388,11 +388,11 @@ public Double GetCurrentValue(IChannel ch, long count) return stat; } - public Double GetCurrentValue(IChannel ch, UInt128 count) + public Double GetCurrentValue(IChannel ch, RowId count) { Contracts.Assert(Double.MinValue <= _cur && _cur <= Double.MaxValue); Contracts.Assert(_cnz >= 0 && _cna >= 0); - Contracts.Assert(count.Hi != 0 || count.Lo >= (ulong)_cna); + Contracts.Assert(count.High != 0 || count.Low >= (ulong)_cna); // If all values in the column are NAs, emit a warning and return 0. // Is this what we want to do or should an error be thrown? @@ -489,7 +489,7 @@ public long GetCurrentValue(IChannel ch, long count, long valMax) return neg ? -(long)res : (long)res; } - public long GetCurrentValue(IChannel ch, UInt128 count, long valMax) + public long GetCurrentValue(IChannel ch, RowId count, long valMax) { AssertValid(valMax); Contracts.Assert(count >= (ulong)_cna); @@ -527,7 +527,7 @@ public long GetCurrentValue(IChannel ch, UInt128 count, long valMax) // call won't throw. Contracts.Assert(count > sumHi); - ulong res = IntUtils.DivRound(sumLo, sumHi, count.Lo, count.Hi); + ulong res = IntUtils.DivRound(sumLo, sumHi, count.Low, count.High); Contracts.Assert(0 <= res && res <= (ulong)valMax); return neg ? -(long)res : (long)res; } diff --git a/src/Microsoft.ML.Transforms/StatefulFilterTransform.cs b/src/Microsoft.ML.Transforms/StatefulFilterTransform.cs index 6600497f24..2954b23928 100644 --- a/src/Microsoft.ML.Transforms/StatefulFilterTransform.cs +++ b/src/Microsoft.ML.Transforms/StatefulFilterTransform.cs @@ -208,7 +208,7 @@ protected override void Dispose(bool disposing) base.Dispose(disposing); } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { return _input.GetIdGetter(); } diff --git a/src/Microsoft.ML.Transforms/UngroupTransform.cs b/src/Microsoft.ML.Transforms/UngroupTransform.cs index 5354dfda0b..72dc32c128 100644 --- a/src/Microsoft.ML.Transforms/UngroupTransform.cs +++ b/src/Microsoft.ML.Transforms/UngroupTransform.cs @@ -512,13 +512,13 @@ public override long Batch get { return Input.Batch; } } - public override ValueGetter GetIdGetter() + public override ValueGetter GetIdGetter() { var idGetter = Input.GetIdGetter(); - return (ref UInt128 val) => + return (ref RowId val) => { idGetter(ref val); - val = val.Combine(new UInt128((ulong)_pivotColPosition, 0)); + val = val.Combine(new RowId((ulong)_pivotColPosition, 0)); }; } diff --git a/test/BaselineOutput/Common/EntryPoints/core_manifest.json b/test/BaselineOutput/Common/EntryPoints/core_manifest.json index dcbb6e8db2..c76d71f6bc 100644 --- a/test/BaselineOutput/Common/EntryPoints/core_manifest.json +++ b/test/BaselineOutput/Common/EntryPoints/core_manifest.json @@ -11391,7 +11391,7 @@ { "Name": "OptTol", "Type": "Float", - "Desc": "Tolerance parameter for trainer convergence. Lower = slower, more accurate", + "Desc": "Tolerance parameter for trainer convergence. Low = slower, more accurate", "Aliases": [ "ot" ], @@ -13832,7 +13832,7 @@ { "Name": "OptTol", "Type": "Float", - "Desc": "Tolerance parameter for optimization convergence. Lower = slower, more accurate", + "Desc": "Tolerance parameter for optimization convergence. Low = slower, more accurate", "Aliases": [ "ot" ], @@ -13851,7 +13851,7 @@ { "Name": "MemorySize", "Type": "Int", - "Desc": "Memory size for L-BFGS. Lower=faster, less accurate", + "Desc": "Memory size for L-BFGS. Low=faster, less accurate", "Aliases": [ "m" ], @@ -14144,7 +14144,7 @@ { "Name": "OptTol", "Type": "Float", - "Desc": "Tolerance parameter for optimization convergence. Lower = slower, more accurate", + "Desc": "Tolerance parameter for optimization convergence. Low = slower, more accurate", "Aliases": [ "ot" ], @@ -14163,7 +14163,7 @@ { "Name": "MemorySize", "Type": "Int", - "Desc": "Memory size for L-BFGS. Lower=faster, less accurate", + "Desc": "Memory size for L-BFGS. Low=faster, less accurate", "Aliases": [ "m" ], @@ -15176,7 +15176,7 @@ { "Name": "OptTol", "Type": "Float", - "Desc": "Tolerance parameter for optimization convergence. Lower = slower, more accurate", + "Desc": "Tolerance parameter for optimization convergence. Low = slower, more accurate", "Aliases": [ "ot" ], @@ -15195,7 +15195,7 @@ { "Name": "MemorySize", "Type": "Int", - "Desc": "Memory size for L-BFGS. Lower=faster, less accurate", + "Desc": "Memory size for L-BFGS. Low=faster, less accurate", "Aliases": [ "m" ], diff --git a/test/Microsoft.ML.Benchmarks/HashBench.cs b/test/Microsoft.ML.Benchmarks/HashBench.cs index a8036f1490..4d5922783a 100644 --- a/test/Microsoft.ML.Benchmarks/HashBench.cs +++ b/test/Microsoft.ML.Benchmarks/HashBench.cs @@ -21,8 +21,8 @@ private sealed class RowImpl : Row public override Schema Schema { get; } public override long Position => PositionValue; public override long Batch => 0; - public override ValueGetter GetIdGetter() - => (ref UInt128 val) => val = new UInt128((ulong)Position, 0); + public override ValueGetter GetIdGetter() + => (ref RowId val) => val = new RowId((ulong)Position, 0); private readonly Delegate _getter; diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/CoreBaseTestClass.cs b/test/Microsoft.ML.Core.Tests/UnitTests/CoreBaseTestClass.cs index 08e8039b51..815390d33d 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/CoreBaseTestClass.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/CoreBaseTestClass.cs @@ -34,13 +34,13 @@ protected bool EqualTypes(ColumnType type1, ColumnType type2, bool exactTypes) return !exactTypes && type1 is VectorType vt1 && type2 is VectorType vt2 && vt1.ItemType.Equals(vt2.ItemType) && vt1.Size == vt2.Size; } - protected Func GetIdComparer(Row r1, Row r2, out ValueGetter idGetter) + protected Func GetIdComparer(Row r1, Row r2, out ValueGetter idGetter) { var g1 = r1.GetIdGetter(); idGetter = g1; var g2 = r2.GetIdGetter(); - UInt128 v1 = default(UInt128); - UInt128 v2 = default(UInt128); + RowId v1 = default(RowId); + RowId v2 = default(RowId); return () => { @@ -198,7 +198,7 @@ protected Func GetColumnComparer(Row r1, Row r2, int col, ColumnType type, case DataKind.DZ: return GetComparerVec(r1, r2, col, size, (x, y) => x.Equals(y)); case DataKind.UG: - return GetComparerVec(r1, r2, col, size, (x, y) => x.Equals(y)); + return GetComparerVec(r1, r2, col, size, (x, y) => x.Equals(y)); } } else @@ -241,7 +241,7 @@ protected Func GetColumnComparer(Row r1, Row r2, int col, ColumnType type, case DataKind.DZ: return GetComparerOne(r1, r2, col, (x, y) => x.Equals(y)); case DataKind.UG: - return GetComparerOne(r1, r2, col, (x, y) => x.Equals(y)); + return GetComparerOne(r1, r2, col, (x, y) => x.Equals(y)); } } @@ -336,13 +336,13 @@ protected bool CheckSameValues(RowCursor curs1, RowCursor curs2, bool exactTypes comps[col] = GetColumnComparer(curs1, curs2, col, type1, exactDoubles); } } - ValueGetter idGetter = null; + ValueGetter idGetter = null; Func idComp = checkId ? GetIdComparer(curs1, curs2, out idGetter) : null; - HashSet idsSeen = null; + HashSet idsSeen = null; if (checkIdCollisions && idGetter == null) idGetter = curs1.GetIdGetter(); long idCollisions = 0; - UInt128 id = default(UInt128); + RowId id = default(RowId); for (; ; ) { @@ -423,7 +423,7 @@ protected bool CheckSameValues(RowCursor curs1, IDataView view2, bool exactTypes return Failed(); } comps[col] = GetColumnComparer(curs1, cursors[col], col, type1, exactDoubles); - ValueGetter idGetter; + ValueGetter idGetter; idComps[col] = checkId ? GetIdComparer(curs1, cursors[col], out idGetter) : null; } diff --git a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs index f3e53a0799..ab26f5dabf 100644 --- a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs +++ b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs @@ -874,13 +874,13 @@ protected bool CheckSameValues(RowCursor curs1, RowCursor curs2, bool exactTypes comps[col] = GetColumnComparer(curs1, curs2, col, type1, exactDoubles); } } - ValueGetter idGetter = null; + ValueGetter idGetter = null; Func idComp = checkId ? GetIdComparer(curs1, curs2, out idGetter) : null; - HashSet idsSeen = null; + HashSet idsSeen = null; if (checkIdCollisions && idGetter == null) idGetter = curs1.GetIdGetter(); long idCollisions = 0; - UInt128 id = default(UInt128); + RowId id = default(RowId); for (; ; ) { @@ -962,7 +962,7 @@ protected bool CheckSameValues(RowCursor curs1, IDataView view2, bool exactTypes return Failed(); } comps[col] = GetColumnComparer(curs1, cursors[col], col, type1, exactDoubles); - ValueGetter idGetter; + ValueGetter idGetter; idComps[col] = checkId ? GetIdComparer(curs1, cursors[col], out idGetter) : null; } @@ -1014,13 +1014,13 @@ protected bool CheckSameValues(RowCursor curs1, IDataView view2, bool exactTypes } } - protected Func GetIdComparer(Row r1, Row r2, out ValueGetter idGetter) + protected Func GetIdComparer(Row r1, Row r2, out ValueGetter idGetter) { var g1 = r1.GetIdGetter(); idGetter = g1; var g2 = r2.GetIdGetter(); - UInt128 v1 = default(UInt128); - UInt128 v2 = default(UInt128); + RowId v1 = default(RowId); + RowId v2 = default(RowId); return () => { @@ -1070,7 +1070,7 @@ protected Func GetColumnComparer(Row r1, Row r2, int col, ColumnType type, case DataKind.DZ: return GetComparerOne(r1, r2, col, (x, y) => x.Equals(y)); case DataKind.UG: - return GetComparerOne(r1, r2, col, (x, y) => x.Equals(y)); + return GetComparerOne(r1, r2, col, (x, y) => x.Equals(y)); case (DataKind)0: // We cannot compare custom types (including image). return () => true; @@ -1119,7 +1119,7 @@ protected Func GetColumnComparer(Row r1, Row r2, int col, ColumnType type, case DataKind.DZ: return GetComparerVec(r1, r2, col, size, (x, y) => x.Equals(y)); case DataKind.UG: - return GetComparerVec(r1, r2, col, size, (x, y) => x.Equals(y)); + return GetComparerVec(r1, r2, col, size, (x, y) => x.Equals(y)); } } diff --git a/test/Microsoft.ML.Tests/Transformers/HashTests.cs b/test/Microsoft.ML.Tests/Transformers/HashTests.cs index 409b5f3f30..955c2603d7 100644 --- a/test/Microsoft.ML.Tests/Transformers/HashTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/HashTests.cs @@ -255,7 +255,7 @@ private void HashTestPositiveIntegerCore(ulong value, uint expected, uint expect HashTestCore(value, NumberType.U8, expected, expectedOrdered, expectedOrdered3); HashTestCore((ulong)value, new KeyType(typeof(ulong), 0, 0), eKey, eoKey, e3Key); - HashTestCore(new UInt128(value, 0), NumberType.UG, expected, expectedOrdered, expectedOrdered3); + HashTestCore(new RowId(value, 0), NumberType.UG, expected, expectedOrdered, expectedOrdered3); // Next let's check signed numbers. From e16e536fe5bfec0c33878006595ac16998c1779f Mon Sep 17 00:00:00 2001 From: Najeeb Kazmi Date: Wed, 12 Dec 2018 12:07:27 -0800 Subject: [PATCH 047/100] Public API for Linear Predictors (#1865) * Rename linear predictors to XyzModelParameters, reduce public surface, add a sample, internalize and explicitly implement some more interfaces * Rename linear predictors to XyzModelParameters, reduce public surface, add a sample, internalize and explicitly implement some more interfaces * Replace Float with float and address other comments * More merge conflicts * Address comments --- .../Dynamic/PermutationFeatureImportance.cs | 2 +- .../Dynamic/SDCARegression.cs | 43 +++++ .../Static/SDCARegression.cs | 2 +- .../Dirty/PredictorInterfaces.cs | 25 +-- .../Prediction/Calibrator.cs | 4 +- .../QuantileStatistics.cs | 48 +++-- .../RandomForestRegression.cs | 2 +- .../OlsLinearRegression.cs | 60 +++--- .../SymSgdClassificationTrainer.cs | 8 +- ...rPredictor.cs => LinearModelParameters.cs} | 178 +++++++++--------- .../LogisticRegression/LbfgsStatic.cs | 2 +- .../LogisticRegression/LogisticRegression.cs | 6 +- .../Standard/ModelStatistics.cs | 12 +- .../Standard/Online/AveragedLinear.cs | 2 +- .../Standard/Online/AveragedPerceptron.cs | 16 +- .../Standard/Online/LinearSvm.cs | 14 +- .../Standard/Online/OnlineGradientDescent.cs | 16 +- .../Standard/Online/OnlineLearnerStatic.cs | 4 +- .../Standard/Online/OnlineLinear.cs | 47 +++-- .../PoissonRegression/PoissonRegression.cs | 16 +- .../Standard/SdcaBinary.cs | 14 +- .../Standard/SdcaRegression.cs | 22 +-- .../Standard/SdcaStatic.cs | 14 +- .../Standard/StochasticTrainerBase.cs | 6 +- .../Training.cs | 24 +-- .../EnvironmentExtensions.cs | 2 +- .../TrainerEstimators/LbfgsTests.cs | 4 +- 27 files changed, 314 insertions(+), 279 deletions(-) create mode 100644 docs/samples/Microsoft.ML.Samples/Dynamic/SDCARegression.cs rename src/Microsoft.ML.StandardLearners/Standard/{LinearPredictor.cs => LinearModelParameters.cs} (80%) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/PermutationFeatureImportance.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/PermutationFeatureImportance.cs index 7150f12835..584a5dcc52 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/PermutationFeatureImportance.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/PermutationFeatureImportance.cs @@ -116,7 +116,7 @@ public static void PFI_Regression() } } - private static float[] GetLinearModelWeights(LinearRegressionPredictor linearModel) + private static float[] GetLinearModelWeights(LinearRegressionModelParameters linearModel) { var weights = new VBuffer(); linearModel.GetFeatureWeights(ref weights); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/SDCARegression.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/SDCARegression.cs new file mode 100644 index 0000000000..c9a536c0d4 --- /dev/null +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/SDCARegression.cs @@ -0,0 +1,43 @@ +using Microsoft.ML.Data; +using System; +using System.Linq; + +namespace Microsoft.ML.Samples.Dynamic +{ + public class SDCARegressionExample + { + public static void SDCARegression() + { + // Create a new ML context, for ML.NET operations. It can be used for exception tracking and logging, + // as well as the source of randomness. + var ml = new MLContext(); + + // Get a small dataset as an IEnumerable and convert it to an IDataView. + var data = SamplesUtils.DatasetUtils.GetInfertData(); + var trainData = ml.CreateStreamingDataView(data); + + // Preview of the data. + // + // Age Case Education Induced Parity PooledStratum RowNum ... + // 26 1 0-5yrs 1 6 3 1 ... + // 42 1 0-5yrs 1 1 1 2 ... + // 39 1 0-5yrs 2 6 4 3 ... + // 34 1 0-5yrs 2 4 2 4 ... + // 35 1 6-11yrs 1 3 32 5 ... + + // A pipeline for concatenating the Parity and Induced columns together in the Features column. + // We will train a FastTreeRegression model with 1 tree on these two columns to predict Age. + string outputColumnName = "Features"; + var pipeline = ml.Transforms.Concatenate(outputColumnName, new[] { "Parity", "Induced" }) + .Append(ml.Regression.Trainers.StochasticDualCoordinateAscent(labelColumn: "Age", featureColumn: outputColumnName, maxIterations:2)); + + var model = pipeline.Fit(trainData); + + // Get the trained model parameters. + var modelParams = model.LastTransformer.Model; + // Inspect the bias and model weights. + Console.WriteLine("The bias term is: " + modelParams.Bias); + Console.WriteLine("The feature weights are: " + string.Join(", ", modelParams.Weights.ToArray())); + } + } +} diff --git a/docs/samples/Microsoft.ML.Samples/Static/SDCARegression.cs b/docs/samples/Microsoft.ML.Samples/Static/SDCARegression.cs index 4d35a28257..1ebb4cf1db 100644 --- a/docs/samples/Microsoft.ML.Samples/Static/SDCARegression.cs +++ b/docs/samples/Microsoft.ML.Samples/Static/SDCARegression.cs @@ -29,7 +29,7 @@ public static void SdcaRegression() var (trainData, testData) = mlContext.Regression.TrainTestSplit(data, testFraction: 0.1); // The predictor that gets produced out of training - LinearRegressionPredictor pred = null; + LinearRegressionModelParameters pred = null; // Create the estimator var learningPipeline = reader.MakeNewEstimator() diff --git a/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs b/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs index f2d1ed6cfc..f5dbc7f470 100644 --- a/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs +++ b/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs @@ -2,8 +2,6 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Float = System.Single; - using System; using System.Collections.Generic; using System.IO; @@ -21,7 +19,8 @@ namespace Microsoft.ML.Runtime.Internal.Internallearn /// /// A generic interface for models that can average parameters from multiple instance of self /// - public interface IParameterMixer + [BestFriend] + internal interface IParameterMixer { IParameterMixer CombineParameters(IList models); } @@ -29,7 +28,8 @@ public interface IParameterMixer /// /// A generic interface for models that can average parameters from multiple instance of self /// - public interface IParameterMixer + [BestFriend] + internal interface IParameterMixer { IParameterMixer CombineParameters(IList> models); } @@ -48,7 +48,8 @@ internal interface IQuantileRegressionPredictor /// A generic interface for probability distributions /// /// Type of statistics result - public interface IDistribution + [BestFriend] + internal interface IDistribution { TResult Minimum { get; } @@ -63,14 +64,15 @@ public interface IDistribution [BestFriend] internal interface IQuantileValueMapper { - ValueMapper, VBuffer> GetMapper(Float[] quantiles); + ValueMapper, VBuffer> GetMapper(float[] quantiles); } /// /// Interface for quantile distribution /// /// Type of statistics result - public interface IQuantileDistribution : IDistribution, ISampleableDistribution + [BestFriend] + internal interface IQuantileDistribution : IDistribution, ISampleableDistribution { TResult Median { get; } @@ -78,10 +80,11 @@ public interface IQuantileDistribution : IDistribution, ISampl /// Returns an estimate of the p-th quantile, the data value where proportionately p of the data has value /// less than or equal to the returned value. /// - TResult GetQuantile(Float p); + TResult GetQuantile(float p); } - public interface ISampleableDistribution : IDistribution + [BestFriend] + internal interface ISampleableDistribution : IDistribution { /// /// Returns Support sample for the distribution. @@ -171,7 +174,7 @@ public interface IHaveFeatureWeights /// The larger the absolute value of a weights, the more informative/important the feature. /// A weights of zero signifies that the feature is not used by the model. /// - void GetFeatureWeights(ref VBuffer weights); + void GetFeatureWeights(ref VBuffer weights); } /// @@ -196,7 +199,7 @@ internal interface IFeatureContributionMapper : IPredictor /// For trees we will not have negative contributions, so bottom param will be ignored. /// If normalization is requested that resulting values will be normalized to [-1, 1]. /// - ValueMapper> GetFeatureContributionMapper(int top, int bottom, bool normalize); + ValueMapper> GetFeatureContributionMapper(int top, int bottom, bool normalize); } /// diff --git a/src/Microsoft.ML.Data/Prediction/Calibrator.cs b/src/Microsoft.ML.Data/Prediction/Calibrator.cs index 722e4fa94a..b068a73243 100644 --- a/src/Microsoft.ML.Data/Prediction/Calibrator.cs +++ b/src/Microsoft.ML.Data/Prediction/Calibrator.cs @@ -494,7 +494,7 @@ public void GetFeatureWeights(ref VBuffer weights) _featureWeights.GetFeatureWeights(ref weights); } - public IParameterMixer CombineParameters(IList> models) + IParameterMixer IParameterMixer.CombineParameters(IList> models) { var predictors = models.Select( m => @@ -1466,7 +1466,7 @@ public string GetSummary() return string.Format("Platt calibrator parameters: A={0}, B={1}", ParamA, ParamB); } - public IParameterMixer CombineParameters(IList calibrators) + IParameterMixer IParameterMixer.CombineParameters(IList calibrators) { Double a = 0; Double b = 0; diff --git a/src/Microsoft.ML.FastTree/QuantileStatistics.cs b/src/Microsoft.ML.FastTree/QuantileStatistics.cs index b93c1919a9..ea8eb0eadd 100644 --- a/src/Microsoft.ML.FastTree/QuantileStatistics.cs +++ b/src/Microsoft.ML.FastTree/QuantileStatistics.cs @@ -2,56 +2,54 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Float = System.Single; - using System; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Internal.Internallearn; namespace Microsoft.ML.Runtime { - public sealed class QuantileStatistics : IQuantileDistribution + public sealed class QuantileStatistics : IQuantileDistribution { - private readonly Float[] _data; - private readonly Float[] _weights; + private readonly float[] _data; + private readonly float[] _weights; //This holds the cumulative sum of _weights to search the rank easily by binary search. - private Float[] _weightedSums; + private float[] _weightedSums; private SummaryStatistics _summaryStatistics; - public Float Minimum + float IDistribution.Minimum { get { if (_data.Length == 0) - return Float.NaN; + return float.NaN; return _data[0]; } } - public Float Maximum + float IDistribution.Maximum { get { if (_data.Length == 0) - return Float.NaN; + return float.NaN; return _data[_data.Length - 1]; } } - public Float Median { get { return GetQuantile(0.5F); } } + float IQuantileDistribution.Median { get { return ((IQuantileDistribution)this).GetQuantile(0.5F); } } - public Float Mean { get { return (Float)SummaryStatistics.Mean; } } + float IDistribution.Mean { get { return (float)SummaryStatistics.Mean; } } - public Float StandardDeviation { get { return (Float)SummaryStatistics.SampleStdDev; } } + float IDistribution.StandardDeviation { get { return (float)SummaryStatistics.SampleStdDev; } } /// /// data array will be modified because of sorting if it is not already sorted yet and this class owns the data. /// Modifying the data outside will lead to erroneous output by this class /// - public QuantileStatistics(Float[] data, Float[] weights = null, bool isSorted = false) + public QuantileStatistics(float[] data, float[] weights = null, bool isSorted = false) { Contracts.CheckValue(data, nameof(data)); Contracts.Check(weights == null || weights.Length == data.Length, "weights"); @@ -69,19 +67,19 @@ public QuantileStatistics(Float[] data, Float[] weights = null, bool isSorted = /// There are many ways to estimate quantile. This implementations is based on R-8, SciPy-(1/3,1/3) /// https://en.wikipedia.org/wiki/Quantile#Estimating_the_quantiles_of_a_population /// - public Float GetQuantile(Float p) + float IQuantileDistribution.GetQuantile(float p) { Contracts.CheckParam(0 <= p && p <= 1, nameof(p), "Probablity argument for Quantile function should be between 0 to 1 inclusive"); if (_data.Length == 0) - return Float.NaN; + return float.NaN; if (p == 0 || _data.Length == 1) return _data[0]; if (p == 1) return _data[_data.Length - 1]; - Float h = GetRank(p); + float h = GetRank(p); if (h <= 1) return _data[0]; @@ -90,12 +88,12 @@ public Float GetQuantile(Float p) return _data[_data.Length - 1]; var hf = (int)h; - return (Float)(_data[hf - 1] + (h - hf) * (_data[hf] - _data[hf - 1])); + return (float)(_data[hf - 1] + (h - hf) * (_data[hf] - _data[hf - 1])); } - public Float[] GetSupportSample(out Float[] weights) + float[] ISampleableDistribution.GetSupportSample(out float[] weights) { - var result = new Float[_data.Length]; + var result = new float[_data.Length]; Array.Copy(_data, result, _data.Length); if (_weights == null) { @@ -103,25 +101,25 @@ public Float[] GetSupportSample(out Float[] weights) } else { - weights = new Float[_data.Length]; + weights = new float[_data.Length]; Array.Copy(_weights, weights, _weights.Length); } return result; } - private Float GetRank(Float p) + private float GetRank(float p) { - const Float oneThird = (Float)1 / 3; + const float oneThird = (float)1 / 3; // holds length of the _data array if the weights is null or holds the sum of weights - Float weightedLength = _data.Length; + float weightedLength = _data.Length; if (_weights != null) { if (_weightedSums == null) { - _weightedSums = new Float[_weights.Length]; + _weightedSums = new float[_weights.Length]; _weightedSums[0] = _weights[0]; for (int i = 1; i < _weights.Length; i++) _weightedSums[i] = _weights[i] + _weightedSums[i - 1]; diff --git a/src/Microsoft.ML.FastTree/RandomForestRegression.cs b/src/Microsoft.ML.FastTree/RandomForestRegression.cs index 8b32cea962..977a870f9d 100644 --- a/src/Microsoft.ML.FastTree/RandomForestRegression.cs +++ b/src/Microsoft.ML.FastTree/RandomForestRegression.cs @@ -119,7 +119,7 @@ ValueMapper, VBuffer> IQuantileValueMapper.GetMapper(float // REVIEW: Should make this more efficient - it repeatedly allocates too much stuff. float[] weights = null; var distribution = TrainedEnsemble.GetDistribution(in src, _quantileSampleCount, out weights); - var qdist = new QuantileStatistics(distribution, weights); + IQuantileDistribution qdist = new QuantileStatistics(distribution, weights); var editor = VBufferEditor.Create(ref dst, quantiles.Length); for (int i = 0; i < quantiles.Length; i++) diff --git a/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs b/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs index 37c7a59643..6ff155feea 100644 --- a/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs +++ b/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs @@ -26,16 +26,16 @@ OlsLinearRegressionTrainer.LoadNameValue, OlsLinearRegressionTrainer.ShortName)] -[assembly: LoadableClass(typeof(OlsLinearRegressionPredictor), null, typeof(SignatureLoadModel), +[assembly: LoadableClass(typeof(OlsLinearRegressionModelParameters), null, typeof(SignatureLoadModel), "OLS Linear Regression Executor", - OlsLinearRegressionPredictor.LoaderSignature)] + OlsLinearRegressionModelParameters.LoaderSignature)] [assembly: LoadableClass(typeof(void), typeof(OlsLinearRegressionTrainer), null, typeof(SignatureEntryPointModule), OlsLinearRegressionTrainer.LoadNameValue)] namespace Microsoft.ML.Trainers.HalLearners { /// - public sealed class OlsLinearRegressionTrainer : TrainerEstimatorBase, OlsLinearRegressionPredictor> + public sealed class OlsLinearRegressionTrainer : TrainerEstimatorBase, OlsLinearRegressionModelParameters> { public sealed class Arguments : LearnerInputBaseWithWeight { @@ -111,10 +111,10 @@ private static Arguments ArgsInit(string featureColumn, return args; } - protected override RegressionPredictionTransformer MakeTransformer(OlsLinearRegressionPredictor model, Schema trainSchema) - => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); + protected override RegressionPredictionTransformer MakeTransformer(OlsLinearRegressionModelParameters model, Schema trainSchema) + => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); - public RegressionPredictionTransformer Train(IDataView trainData, IPredictor initialPredictor = null) + public RegressionPredictionTransformer Train(IDataView trainData, IPredictor initialPredictor = null) => TrainTransformer(trainData, initPredictor: initialPredictor); protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSchema) @@ -135,7 +135,7 @@ protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSc private static Double ProbClamp(Double p) => Math.Max(0, Math.Min(p, 1)); - private protected override OlsLinearRegressionPredictor TrainModelCore(TrainContext context) + private protected override OlsLinearRegressionModelParameters TrainModelCore(TrainContext context) { using (var ch = Host.Start("Training")) { @@ -162,7 +162,7 @@ private protected override OlsLinearRegressionPredictor TrainModelCore(TrainCont } } - private OlsLinearRegressionPredictor TrainCore(IChannel ch, FloatLabelCursor.Factory cursorFactory, int featureCount) + private OlsLinearRegressionModelParameters TrainCore(IChannel ch, FloatLabelCursor.Factory cursorFactory, int featureCount) { Host.AssertValue(ch); ch.AssertValue(cursorFactory); @@ -293,14 +293,14 @@ private OlsLinearRegressionPredictor TrainCore(IChannel ch, FloatLabelCursor.Fac { // We would expect the solution to the problem to be exact in this case. ch.Info("Number of examples equals number of parameters, solution is exact but no statistics can be derived"); - return new OlsLinearRegressionPredictor(Host, in weights, bias, null, null, null, 1, float.NaN); + return new OlsLinearRegressionModelParameters(Host, in weights, bias); } Double rss = 0; // residual sum of squares Double tss = 0; // total sum of squares using (var cursor = cursorFactory.Create()) { - IValueMapper lrPredictor = new LinearRegressionPredictor(Host, in weights, bias); + IValueMapper lrPredictor = new LinearRegressionModelParameters(Host, in weights, bias); var lrMap = lrPredictor.GetMapper, float>(); float yh = default; while (cursor.MoveNext()) @@ -329,7 +329,7 @@ private OlsLinearRegressionPredictor TrainCore(IChannel ch, FloatLabelCursor.Fac // Also we can't estimate it, unless we can estimate the variance, which requires more examples than // parameters. if (!_perParameterSignificance || m >= n) - return new OlsLinearRegressionPredictor(Host, in weights, bias, null, null, null, rSquared, rSquaredAdjusted); + return new OlsLinearRegressionModelParameters(Host, in weights, bias, rSquared: rSquared, rSquaredAdjusted: rSquaredAdjusted); ch.Assert(!Double.IsNaN(rSquaredAdjusted)); var standardErrors = new Double[m]; @@ -376,7 +376,7 @@ private OlsLinearRegressionPredictor TrainCore(IChannel ch, FloatLabelCursor.Fac ch.Check(0 <= pValues[i] && pValues[i] <= 1, "p-Value calculated outside expected [0,1] range"); } - return new OlsLinearRegressionPredictor(Host, in weights, bias, standardErrors, tValues, pValues, rSquared, rSquaredAdjusted); + return new OlsLinearRegressionModelParameters(Host, in weights, bias, standardErrors, tValues, pValues, rSquared, rSquaredAdjusted); } internal static class Mkl @@ -536,10 +536,10 @@ public static CommonOutputs.RegressionOutput TrainRegression(IHostEnvironment en /// /// A linear predictor for which per parameter significance statistics are available. /// - public sealed class OlsLinearRegressionPredictor : RegressionPredictor + public sealed class OlsLinearRegressionModelParameters : RegressionModelParameters { - public const string LoaderSignature = "OlsLinearRegressionExec"; - public const string RegistrationName = "OlsLinearRegressionPredictor"; + internal const string LoaderSignature = "OlsLinearRegressionExec"; + internal const string RegistrationName = "OlsLinearRegressionPredictor"; /// /// Name of input column. - /// Name of output column. + /// Name of the column resulting from the transformation of . Null means is replaced. /// What to replace the missing value with. /// If true, per-slot imputation of replacement is performed. /// Otherwise, replacement value is imputed for the entire vector column. This setting is ignored for scalars and variable vectors, /// where imputation is always for the entire column. - public ColumnInfo(string input, string output, ReplacementMode replacementMode = MissingValueReplacingEstimator.Defaults.ReplacementMode, + public ColumnInfo(string input, string output = null, ReplacementMode replacementMode = MissingValueReplacingEstimator.Defaults.ReplacementMode, bool imputeBySlot = MissingValueReplacingEstimator.Defaults.ImputeBySlot) { + Contracts.CheckNonWhiteSpace(input, nameof(input)); Input = input; - Output = output; + Output = output ?? input; ImputeBySlot = imputeBySlot; Replacement = replacementMode; } @@ -976,160 +978,4 @@ public SchemaShape GetOutputSchema(SchemaShape inputSchema) public MissingValueReplacingTransformer Fit(IDataView input) => new MissingValueReplacingTransformer(_host, input, _columns); } - /// - /// Extension methods for the static-pipeline over objects. - /// - public static class NAReplacerExtensions - { - private readonly struct Config - { - public readonly bool ImputeBySlot; - public readonly MissingValueReplacingTransformer.ColumnInfo.ReplacementMode ReplacementMode; - - public Config(MissingValueReplacingTransformer.ColumnInfo.ReplacementMode replacementMode = MissingValueReplacingEstimator.Defaults.ReplacementMode, - bool imputeBySlot = MissingValueReplacingEstimator.Defaults.ImputeBySlot) - { - ImputeBySlot = imputeBySlot; - ReplacementMode = replacementMode; - } - } - - private interface IColInput - { - PipelineColumn Input { get; } - Config Config { get; } - } - - private sealed class OutScalar : Scalar, IColInput - { - public PipelineColumn Input { get; } - public Config Config { get; } - - public OutScalar(Scalar input, Config config) - : base(Reconciler.Inst, input) - { - Input = input; - Config = config; - } - } - - private sealed class OutVectorColumn : Vector, IColInput - { - public PipelineColumn Input { get; } - public Config Config { get; } - - public OutVectorColumn(Vector input, Config config) - : base(Reconciler.Inst, input) - { - Input = input; - Config = config; - } - - } - - private sealed class OutVarVectorColumn : VarVector, IColInput - { - public PipelineColumn Input { get; } - public Config Config { get; } - - public OutVarVectorColumn(VarVector input, Config config) - : base(Reconciler.Inst, input) - { - Input = input; - Config = config; - } - } - - private sealed class Reconciler : EstimatorReconciler - { - public static Reconciler Inst = new Reconciler(); - - private Reconciler() { } - - public override IEstimator Reconcile(IHostEnvironment env, - PipelineColumn[] toOutput, - IReadOnlyDictionary inputNames, - IReadOnlyDictionary outputNames, - IReadOnlyCollection usedNames) - { - var infos = new MissingValueReplacingTransformer.ColumnInfo[toOutput.Length]; - for (int i = 0; i < toOutput.Length; ++i) - { - var col = (IColInput)toOutput[i]; - infos[i] = new MissingValueReplacingTransformer.ColumnInfo(inputNames[col.Input], outputNames[toOutput[i]], col.Config.ReplacementMode, col.Config.ImputeBySlot); - } - return new MissingValueReplacingEstimator(env, infos); - } - } - - /// - /// Scan through all rows and replace NaN values according to replacement strategy. - /// - /// Incoming data. - /// How NaN should be replaced - public static Scalar ReplaceNaNValues(this Scalar input, MissingValueReplacingTransformer.ColumnInfo.ReplacementMode replacementMode = MissingValueReplacingEstimator.Defaults.ReplacementMode) - { - Contracts.CheckValue(input, nameof(input)); - return new OutScalar(input, new Config(replacementMode, false)); - } - - /// - /// Scan through all rows and replace NaN values according to replacement strategy. - /// - /// Incoming data. - /// How NaN should be replaced - public static Scalar ReplaceNaNValues(this Scalar input, MissingValueReplacingTransformer.ColumnInfo.ReplacementMode replacementMode = MissingValueReplacingEstimator.Defaults.ReplacementMode) - { - Contracts.CheckValue(input, nameof(input)); - return new OutScalar(input, new Config(replacementMode, false)); - } - /// - /// Scan through all rows and replace NaN values according to replacement strategy. - /// - /// Incoming data. - /// How NaN should be replaced - /// If true, per-slot imputation of replacement is performed. - /// Otherwise, replacement value is imputed for the entire vector column. This setting is ignored for scalars and variable vectors, - /// where imputation is always for the entire column. - public static Vector ReplaceNaNValues(this Vector input, MissingValueReplacingTransformer.ColumnInfo.ReplacementMode replacementMode = MissingValueReplacingEstimator.Defaults.ReplacementMode, bool imputeBySlot = MissingValueReplacingEstimator.Defaults.ImputeBySlot) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVectorColumn(input, new Config(replacementMode, imputeBySlot)); - } - - /// - /// Scan through all rows and replace NaN values according to replacement strategy. - /// - /// Incoming data. - /// How NaN should be replaced - /// If true, per-slot imputation of replacement is performed. - /// Otherwise, replacement value is imputed for the entire vector column. This setting is ignored for scalars and variable vectors, - /// where imputation is always for the entire column. - public static Vector ReplaceNaNValues(this Vector input, MissingValueReplacingTransformer.ColumnInfo.ReplacementMode replacementMode = MissingValueReplacingEstimator.Defaults.ReplacementMode, bool imputeBySlot = MissingValueReplacingEstimator.Defaults.ImputeBySlot) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVectorColumn(input, new Config(replacementMode, imputeBySlot)); - } - - /// - /// Scan through all rows and replace NaN values according to replacement strategy. - /// - /// Incoming data. - /// How NaN should be replaced - public static VarVector ReplaceNaNValues(this VarVector input, MissingValueReplacingTransformer.ColumnInfo.ReplacementMode replacementMode = MissingValueReplacingEstimator.Defaults.ReplacementMode) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVarVectorColumn(input, new Config(replacementMode, false)); - } - /// - /// Scan through all rows and replace NaN values according to replacement strategy. - /// - /// Incoming data. - /// How NaN should be replaced - public static VarVector ReplaceNaNValues(this VarVector input, MissingValueReplacingTransformer.ColumnInfo.ReplacementMode replacementMode = MissingValueReplacingEstimator.Defaults.ReplacementMode) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVarVectorColumn(input, new Config(replacementMode, false)); - } - } } diff --git a/src/Microsoft.ML.Transforms/OneHotEncodingTransformer.cs b/src/Microsoft.ML.Transforms/OneHotEncoding.cs similarity index 89% rename from src/Microsoft.ML.Transforms/OneHotEncodingTransformer.cs rename to src/Microsoft.ML.Transforms/OneHotEncoding.cs index 0ef0e506b1..1a617c63e4 100644 --- a/src/Microsoft.ML.Transforms/OneHotEncodingTransformer.cs +++ b/src/Microsoft.ML.Transforms/OneHotEncoding.cs @@ -11,8 +11,6 @@ using Microsoft.ML.Runtime.Internal.Internallearn; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; -using Microsoft.ML.StaticPipe; -using Microsoft.ML.StaticPipe.Runtime; using Microsoft.ML.Transforms.Categorical; using Microsoft.ML.Transforms.Conversions; using System; @@ -119,9 +117,9 @@ public Arguments() internal const string Summary = "Converts the categorical value into an indicator array by building a dictionary of categories based on the " + "data and using the id in the dictionary as the index in the array."; - public const string UserName = "Categorical Transform"; + internal const string UserName = "Categorical Transform"; - public static IDataTransform Create(IHostEnvironment env, Arguments args, IDataView input) + internal static IDataTransform Create(IHostEnvironment env, Arguments args, IDataView input) { Contracts.CheckValue(env, nameof(env)); var h = env.Register("Categorical"); @@ -175,10 +173,24 @@ internal static class Defaults public const OneHotEncodingTransformer.OutputKind OutKind = OneHotEncodingTransformer.OutputKind.Ind; } + /// + /// Describes how the transformer handles one column pair. + /// public class ColumnInfo : ValueToKeyMappingTransformer.ColumnInfo { public readonly OneHotEncodingTransformer.OutputKind OutputKind; - public ColumnInfo(string input, string output, OneHotEncodingTransformer.OutputKind outputKind = Defaults.OutKind, + /// + /// Describes how the transformer handles one column pair. + /// + /// Name of input column. + /// Name of the column resulting from the transformation of . Null means is replaced. + /// Output kind: Bag (multi-set vector), Ind (indicator vector), Key (index), or Binary encoded indicator vector. + /// Maximum number of terms to keep per column when auto-training. + /// How items should be ordered when vectorized. If choosen they will be in the order encountered. + /// If , items are sorted according to their default comparison, for example, text sorting will be case sensitive (for example, 'A' then 'Z' then 'a'). + /// List of terms. + public ColumnInfo(string input, string output=null, + OneHotEncodingTransformer.OutputKind outputKind = Defaults.OutKind, int maxNumTerms = ValueToKeyMappingEstimator.Defaults.MaxNumTerms, ValueToKeyMappingTransformer.SortOrder sort = ValueToKeyMappingEstimator.Defaults.Sort, string[] term = null) : base(input, output, maxNumTerms, sort, term, true) @@ -285,18 +297,18 @@ public static CommonOutputs.TransformOutput CatTransformDict(IHostEnvironment en } [TlcModule.EntryPoint(Name = "Transforms.CategoricalHashOneHotVectorizer", - Desc = OneHotHashEncodingTransformer.Summary, - UserName = OneHotHashEncodingTransformer.UserName, + Desc = OneHotHashEncoding.Summary, + UserName = OneHotHashEncoding.UserName, XmlInclude = new[] { @"", @""})] - public static CommonOutputs.TransformOutput CatTransformHash(IHostEnvironment env, OneHotHashEncodingTransformer.Arguments input) + public static CommonOutputs.TransformOutput CatTransformHash(IHostEnvironment env, OneHotHashEncoding.Arguments input) { Contracts.CheckValue(env, nameof(env)); var host = env.Register("CatTransformDict"); host.CheckValue(input, nameof(input)); EntryPointUtils.CheckInputArgs(host, input); - var xf = OneHotHashEncodingTransformer.Create(host, input, input.Data); + var xf = OneHotHashEncoding.Create(host, input, input.Data); return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; } diff --git a/src/Microsoft.ML.Transforms/OneHotHashEncodingTransformer.cs b/src/Microsoft.ML.Transforms/OneHotHashEncoding.cs similarity index 84% rename from src/Microsoft.ML.Transforms/OneHotHashEncodingTransformer.cs rename to src/Microsoft.ML.Transforms/OneHotHashEncoding.cs index 53e75dfd8a..61e810219f 100644 --- a/src/Microsoft.ML.Transforms/OneHotHashEncodingTransformer.cs +++ b/src/Microsoft.ML.Transforms/OneHotHashEncoding.cs @@ -18,12 +18,12 @@ using System.Linq; using System.Text; -[assembly: LoadableClass(OneHotHashEncodingTransformer.Summary, typeof(IDataTransform), typeof(OneHotHashEncodingTransformer), typeof(OneHotHashEncodingTransformer.Arguments), typeof(SignatureDataTransform), - OneHotHashEncodingTransformer.UserName, "CategoricalHashTransform", "CatHashTransform", "CategoricalHash", "CatHash")] +[assembly: LoadableClass(OneHotHashEncoding.Summary, typeof(IDataTransform), typeof(OneHotHashEncoding), typeof(OneHotHashEncoding.Arguments), typeof(SignatureDataTransform), + OneHotHashEncoding.UserName, "CategoricalHashTransform", "CatHashTransform", "CategoricalHash", "CatHash")] namespace Microsoft.ML.Transforms.Categorical { - public sealed class OneHotHashEncodingTransformer : ITransformer, ICanSaveModel + public sealed class OneHotHashEncoding : ITransformer, ICanSaveModel { public sealed class Column : OneToOneColumn { @@ -129,14 +129,17 @@ public sealed class Arguments : TransformInputBase public const string UserName = "Categorical Hash Transform"; /// - /// A helper method to create . + /// A helper method to create . /// /// Host Environment. /// Input . This is the output from previous transform or loader. /// Name of the output column. /// Name of the column to be transformed. If this is null '' will be used. /// Number of bits to hash into. Must be between 1 and 30, inclusive. - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. /// The type of output expected. public static IDataView Create(IHostEnvironment env, IDataView input, @@ -175,7 +178,7 @@ internal static IDataTransform Create(IHostEnvironment env, Arguments args, IDat private readonly TransformerChain _transformer; - internal OneHotHashEncodingTransformer(HashingEstimator hash, IEstimator keyToVector, IDataView input) + internal OneHotHashEncoding(HashingEstimator hash, IEstimator keyToVector, IDataView input) { var chain = hash.Append(keyToVector); _transformer = chain.Fit(input); @@ -195,7 +198,7 @@ internal OneHotHashEncodingTransformer(HashingEstimator hash, IEstimator /// Estimator which takes set of columns and produce for each column indicator array. Use hashing to determine indicator position. /// - public sealed class OneHotHashEncodingEstimator : IEstimator + public sealed class OneHotHashEncodingEstimator : IEstimator { internal static class Defaults { @@ -220,7 +223,10 @@ public sealed class ColumnInfo /// Number of bits to hash into. Must be between 1 and 31, inclusive. /// Hashing seed. /// Whether the position of each term should be included in the hash. - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. public ColumnInfo(string input, string output, OneHotEncodingTransformer.OutputKind outputKind = Defaults.OutputKind, int hashBits = Defaults.HashBits, @@ -242,7 +248,10 @@ public ColumnInfo(string input, string output, /// Name of the input column. /// Name of the output column. If this is null '' will be used. /// Number of bits to hash into. Must be between 1 and 30, inclusive. - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. /// The type of output expected. public OneHotHashEncodingEstimator(IHostEnvironment env, string inputColumn, @@ -307,7 +316,7 @@ public OneHotHashEncodingEstimator(IHostEnvironment env, params ColumnInfo[] col public SchemaShape GetOutputSchema(SchemaShape inputSchema) => _hash.Append(_toSomething).GetOutputSchema(inputSchema); - public OneHotHashEncodingTransformer Fit(IDataView input) => new OneHotHashEncodingTransformer(_hash, _toSomething, input); + public OneHotHashEncoding Fit(IDataView input) => new OneHotHashEncoding(_hash, _toSomething, input); } public static class CategoricalHashStaticExtensions @@ -421,7 +430,10 @@ public override IEstimator Reconcile(IHostEnvironment env, Pipelin /// Amount of bits to use for hashing. /// Seed value used for hashing. /// Whether the position of each term should be included in the hash. - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. public static Vector OneHotHashEncoding(this Scalar input, OneHotHashScalarOutputKind outputKind = (OneHotHashScalarOutputKind)DefOut, int hashBits = DefHashBits, uint seed = DefSeed, bool ordered = DefOrdered, int invertHash = DefInvertHash) { @@ -437,7 +449,10 @@ public static Vector OneHotHashEncoding(this Scalar input, OneHot /// Amount of bits to use for hashing. /// Seed value used for hashing. /// Whether the position of each term should be included in the hash. - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. public static Vector OneHotHashEncoding(this Vector input, OneHotHashVectorOutputKind outputKind = DefOut, int hashBits = DefHashBits, uint seed = DefSeed, bool ordered = DefOrdered, int invertHash = DefInvertHash) { @@ -453,7 +468,10 @@ public static Vector OneHotHashEncoding(this Vector input, OneHot /// Amount of bits to use for hashing. /// Seed value used for hashing. /// Whether the position of each term should be included in the hash. - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. public static Vector OneHotHashEncoding(this VarVector input, OneHotHashVectorOutputKind outputKind = DefOut, int hashBits = DefHashBits, uint seed = DefSeed, bool ordered = DefOrdered, int invertHash = DefInvertHash) { diff --git a/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs b/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs index ffe54c7ea4..78909b7bb7 100644 --- a/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs +++ b/src/Microsoft.ML.Transforms/Text/NgramHashingTransformer.cs @@ -1,33 +1,41 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Float = System.Single; - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Reflection; -using System.Text; +using Microsoft.ML.Core.Data; +using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.CommandLine; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; using Microsoft.ML.Transforms.Text; -using Microsoft.ML.Data; +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Diagnostics; +using System.Linq; +using System.Text; -[assembly: LoadableClass(typeof(NgramHashingTransformer), typeof(NgramHashingTransformer.Arguments), typeof(SignatureDataTransform), +[assembly: LoadableClass(NgramHashingTransformer.Summary, typeof(IDataTransform), typeof(NgramHashingTransformer), typeof(NgramHashingTransformer.Arguments), typeof(SignatureDataTransform), "Ngram Hash Transform", "NgramHashTransform", "NgramHash")] -[assembly: LoadableClass(typeof(NgramHashingTransformer), null, typeof(SignatureLoadDataTransform), +[assembly: LoadableClass(NgramHashingTransformer.Summary, typeof(IDataTransform), typeof(NgramHashingTransformer), null, typeof(SignatureLoadDataTransform), + "Ngram Hash Transform", NgramHashingTransformer.LoaderSignature)] + +[assembly: LoadableClass(NgramHashingTransformer.Summary, typeof(NgramHashingTransformer), null, typeof(SignatureLoadModel), + "Ngram Hash Transform", NgramHashingTransformer.LoaderSignature)] + +[assembly: LoadableClass(typeof(IRowMapper), typeof(NgramHashingTransformer), null, typeof(SignatureLoadRowMapper), "Ngram Hash Transform", NgramHashingTransformer.LoaderSignature)] namespace Microsoft.ML.Transforms.Text { - using Conditional = System.Diagnostics.ConditionalAttribute; - - public sealed class NgramHashingTransformer : RowToRowMapperTransformBase + /// + /// Produces a bag of counts of ngrams (sequences of consecutive words of length 1-n) in a given text. + /// It does so by hashing each ngram and using the hash value as the index in the bag. + /// + public sealed class NgramHashingTransformer : RowToRowTransformerBase { public sealed class Column : ManyToOneColumn { @@ -61,10 +69,6 @@ public sealed class Column : ManyToOneColumn ShortName = "ih")] public int? InvertHash; - // For all source columns, use these friendly names for the source - // column names instead of the real column names. - internal string[] FriendlyNames; - public static Column Parse(string str) { Contracts.AssertNonEmpty(str); @@ -116,162 +120,137 @@ public sealed class Arguments public Column[] Column; [Argument(ArgumentType.AtMostOnce, HelpText = "Maximum ngram length", ShortName = "ngram", SortOrder = 3)] - public int NgramLength = ArgumentDefaults.NgramLength; + public int NgramLength = NgramHashingEstimator.Defaults.NgramLength; [Argument(ArgumentType.AtMostOnce, HelpText = "Whether to include all ngram lengths up to " + nameof(NgramLength) + " or only " + nameof(NgramLength), ShortName = "all", SortOrder = 4)] - public bool AllLengths = ArgumentDefaults.AllLengths; + public bool AllLengths = NgramHashingEstimator.Defaults.AllLengths; [Argument(ArgumentType.AtMostOnce, HelpText = "Maximum number of tokens to skip when constructing an ngram", ShortName = "skips", SortOrder = 3)] - public int SkipLength = ArgumentDefaults.SkipLength; + public int SkipLength = NgramHashingEstimator.Defaults.SkipLength; [Argument(ArgumentType.AtMostOnce, HelpText = "Number of bits to hash into. Must be between 1 and 30, inclusive.", ShortName = "bits", SortOrder = 2)] - public int HashBits = ArgumentDefaults.HashBits; + public int HashBits = NgramHashingEstimator.Defaults.HashBits; [Argument(ArgumentType.AtMostOnce, HelpText = "Hashing seed")] - public uint Seed = ArgumentDefaults.Seed; + public uint Seed = NgramHashingEstimator.Defaults.Seed; [Argument(ArgumentType.AtMostOnce, HelpText = "Whether to rehash unigrams", ShortName = "rehash")] - public bool RehashUnigrams = ArgumentDefaults.RehashUnigrams; + public bool RehashUnigrams = NgramHashingEstimator.Defaults.RehashUnigrams; [Argument(ArgumentType.AtMostOnce, HelpText = "Whether the position of each source column should be included in the hash (when there are multiple source columns).", ShortName = "ord", SortOrder = 6)] - public bool Ordered = ArgumentDefaults.Ordered; + public bool Ordered = NgramHashingEstimator.Defaults.Ordered; [Argument(ArgumentType.AtMostOnce, HelpText = "Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit.", ShortName = "ih")] - public int InvertHash = ArgumentDefaults.InvertHash; + public int InvertHash = NgramHashingEstimator.Defaults.InvertHash; } - internal static class ArgumentDefaults - { - internal const int NgramLength = 2; - internal const bool AllLengths = true; - internal const int SkipLength = 0; - internal const int HashBits = 16; - internal const uint Seed = 314489979; - internal const bool RehashUnigrams = false; - internal const bool Ordered = true; - internal const int InvertHash = 0; - } - - private sealed class Bindings : ManyToOneColumnBindingsBase - { - public readonly VectorType[] Types; - private readonly NgramHashingTransformer _parent; - - public Bindings(Arguments args, Schema schemaInput, NgramHashingTransformer parent) - : base(args.Column, schemaInput, TestTypes) - { - Types = new VectorType[args.Column.Length]; - _parent = parent; - } - - public Bindings(ModelLoadContext ctx, Schema schemaInput, NgramHashingTransformer parent) - : base(ctx, schemaInput, TestTypes) - { - Types = new VectorType[Infos.Length]; - _parent = parent; - } - - private static string TestTypes(ColumnType[] types) - { - const string reason = "Expected vector of Key type, and Key is convertable to U4"; - Contracts.AssertValue(types); - for (int i = 0; i < types.Length; i++) - { - var type = types[i]; - if (!type.IsVector) - return reason; - if (!type.ItemType.IsKey) - return reason; - // Can only accept key types that can be converted to U4. - if (type.ItemType.KeyCount == 0 && type.ItemType.RawKind > DataKind.U4) - return reason; - } - - return null; - } - - protected override ColumnType GetColumnTypeCore(int iinfo) - { - Contracts.Assert(0 <= iinfo & iinfo < Infos.Length); - Contracts.Assert(Types[iinfo] != null); - return Types[iinfo]; - } - - protected override ColumnType GetMetadataTypeCore(string kind, int iinfo) - { - Contracts.AssertNonEmpty(kind); - Contracts.Assert(0 <= iinfo && iinfo < Infos.Length); - - if (kind == MetadataUtils.Kinds.SlotNames && _parent._slotNamesTypes != null) - return _parent._slotNamesTypes[iinfo]; - return base.GetMetadataTypeCore(kind, iinfo); - } + internal const string Summary = "Produces a bag of counts of ngrams (sequences of consecutive values of length 1-n) in a given vector of keys. " + + "It does so by hashing each ngram and using the hash value as the index in the bag."; - protected override IEnumerable> GetMetadataTypesCore(int iinfo) - { - if (_parent._slotNamesTypes != null && _parent._slotNamesTypes[iinfo] != null) - return base.GetMetadataTypesCore(iinfo).Prepend(_parent._slotNamesTypes[iinfo].GetPair(MetadataUtils.Kinds.SlotNames)); - return base.GetMetadataTypesCore(iinfo); - } + internal const string LoaderSignature = "NgramHashTransform"; - protected override void GetMetadataCore(string kind, int iinfo, ref TValue value) - { - if (kind == MetadataUtils.Kinds.SlotNames && _parent._slotNames != null && _parent._slotNames[iinfo].Length > 0) - { - MetadataUtils.MetadataGetter>> getTerms = _parent.GetTerms; - getTerms.Marshal(iinfo, ref value); - return; - } - base.GetMetadataCore(kind, iinfo, ref value); - } + private static VersionInfo GetVersionInfo() + { + return new VersionInfo( + modelSignature: "HASHGRAM", + // verWrittenCur: 0x00010001, // Initial + // verWrittenCur: 0x00010002, // Invert hash key values, hash fix + verWrittenCur: 0x00010003, // Get rid of writing float size in model context and change saving format + verReadableCur: 0x00010003, + verWeCanReadBack: 0x00010003, + loaderSignature: LoaderSignature, + loaderAssemblyName: typeof(NgramHashingTransformer).Assembly.FullName); } - private sealed class ColInfoEx + /// + /// Describes how the transformer handles one pair of mulitple inputs - singular output columns. + /// + public sealed class ColumnInfo { + public readonly string[] Inputs; + public readonly string Output; public readonly int NgramLength; public readonly int SkipLength; - + public readonly bool AllLengths; public readonly int HashBits; public readonly uint Seed; - public readonly bool Rehash; public readonly bool Ordered; - public readonly bool AllLengths; + public readonly int InvertHash; + public readonly bool RehashUnigrams; + // For all source columns, use these friendly names for the source + // column names instead of the real column names. + internal string[] FriendlyNames; - public ColInfoEx(Column item, Arguments args) + /// + /// Describes how the transformer handles one column pair. + /// + /// Name of input columns. + /// Name of output column. + /// Maximum ngram length. + /// Maximum number of tokens to skip when constructing an ngram. + /// "Whether to store all ngram lengths up to ngramLength, or only ngramLength. + /// Number of bits to hash into. Must be between 1 and 31, inclusive. + /// Hashing seed. + /// Whether the position of each term should be included in the hash. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. + /// Whether to rehash unigrams. + public ColumnInfo(string[] inputs, string output, + int ngramLength = NgramHashingEstimator.Defaults.NgramLength, + int skipLength = NgramHashingEstimator.Defaults.SkipLength, + bool allLengths = NgramHashingEstimator.Defaults.AllLengths, + int hashBits = NgramHashingEstimator.Defaults.HashBits, + uint seed = NgramHashingEstimator.Defaults.Seed, + bool ordered = NgramHashingEstimator.Defaults.Ordered, + int invertHash = NgramHashingEstimator.Defaults.InvertHash, + bool rehashUnigrams = NgramHashingEstimator.Defaults.RehashUnigrams) { - NgramLength = item.NgramLength ?? args.NgramLength; - Contracts.CheckUserArg(0 < NgramLength && NgramLength <= NgramBufferBuilder.MaxSkipNgramLength, nameof(item.NgramLength)); - SkipLength = item.SkipLength ?? args.SkipLength; - Contracts.CheckUserArg(0 <= SkipLength && SkipLength <= NgramBufferBuilder.MaxSkipNgramLength, nameof(item.SkipLength)); + Contracts.CheckValue(inputs, nameof(inputs)); + Contracts.CheckParam(!inputs.Any(r => string.IsNullOrWhiteSpace(r)), nameof(inputs), + "Contained some null or empty items"); + if (invertHash < -1) + throw Contracts.ExceptParam(nameof(invertHash), "Value too small, must be -1 or larger"); + // If the bits is 31 or higher, we can't declare a KeyValues of the appropriate length, + // this requiring a VBuffer of length 1u << 31 which exceeds int.MaxValue. + if (invertHash != 0 && hashBits >= 31) + throw Contracts.ExceptParam(nameof(hashBits), $"Cannot support invertHash for a {0} bit hash. 30 is the maximum possible.", hashBits); + if (NgramLength + SkipLength > NgramBufferBuilder.MaxSkipNgramLength) { - throw Contracts.ExceptUserArg(nameof(item.SkipLength), - "The sum of skipLength and ngramLength must be less than or equal to {0}", - NgramBufferBuilder.MaxSkipNgramLength); + throw Contracts.ExceptUserArg(nameof(skipLength), + $"The sum of skipLength and ngramLength must be less than or equal to {NgramBufferBuilder.MaxSkipNgramLength}"); } - - HashBits = item.HashBits ?? args.HashBits; - Contracts.CheckUserArg(1 <= HashBits && HashBits <= 30, nameof(item.HashBits)); - Seed = item.Seed ?? args.Seed; - Rehash = item.RehashUnigrams ?? args.RehashUnigrams; - Ordered = item.Ordered ?? args.Ordered; - AllLengths = item.AllLengths ?? args.AllLengths; + FriendlyNames = null; + Inputs = inputs; + Output = output; + NgramLength = ngramLength; + SkipLength = skipLength; + AllLengths = allLengths; + HashBits = hashBits; + Seed = seed; + Ordered = ordered; + InvertHash = invertHash; + RehashUnigrams = rehashUnigrams; } - - public ColInfoEx(ModelLoadContext ctx) + internal ColumnInfo(ModelLoadContext ctx) { Contracts.AssertValue(ctx); // *** Binary format *** + // size of Inputs + // string[] Inputs; + // string Output; // int: NgramLength // int: SkipLength // int: HashBits @@ -279,7 +258,11 @@ public ColInfoEx(ModelLoadContext ctx) // byte: Rehash // byte: Ordered // byte: AllLengths - + var inputsLength = ctx.Reader.ReadInt32(); + Inputs = new string[inputsLength]; + for (int i = 0; i < Inputs.Length; i++) + Inputs[i] = ctx.LoadNonEmptyString(); + Output = ctx.LoadNonEmptyString(); NgramLength = ctx.Reader.ReadInt32(); Contracts.CheckDecode(0 < NgramLength && NgramLength <= NgramBufferBuilder.MaxSkipNgramLength); SkipLength = ctx.Reader.ReadInt32(); @@ -288,16 +271,19 @@ public ColInfoEx(ModelLoadContext ctx) HashBits = ctx.Reader.ReadInt32(); Contracts.CheckDecode(1 <= HashBits && HashBits <= 30); Seed = ctx.Reader.ReadUInt32(); - Rehash = ctx.Reader.ReadBoolByte(); + RehashUnigrams = ctx.Reader.ReadBoolByte(); Ordered = ctx.Reader.ReadBoolByte(); AllLengths = ctx.Reader.ReadBoolByte(); } - public void Save(ModelSaveContext ctx) + internal void Save(ModelSaveContext ctx) { Contracts.AssertValue(ctx); // *** Binary format *** + // size of Inputs + // string[] Inputs; + // string Output; // int: NgramLength // int: SkipLength // int: HashBits @@ -305,489 +291,448 @@ public void Save(ModelSaveContext ctx) // byte: Rehash // byte: Ordered // byte: AllLengths + Contracts.Assert(Inputs.Length > 0); + ctx.Writer.Write(Inputs.Length); + for (int i = 0; i < Inputs.Length; i++) + ctx.SaveNonEmptyString(Inputs[i]); + ctx.SaveNonEmptyString(Output); - Contracts.Assert(NgramLength > 0); + Contracts.Assert(0 < NgramLength && NgramLength <= NgramBufferBuilder.MaxSkipNgramLength); ctx.Writer.Write(NgramLength); - Contracts.Assert(SkipLength >= 0); + Contracts.Assert(0 <= SkipLength && SkipLength <= NgramBufferBuilder.MaxSkipNgramLength); + Contracts.Assert(NgramLength + SkipLength <= NgramBufferBuilder.MaxSkipNgramLength); ctx.Writer.Write(SkipLength); Contracts.Assert(1 <= HashBits && HashBits <= 30); ctx.Writer.Write(HashBits); ctx.Writer.Write(Seed); - ctx.Writer.WriteBoolByte(Rehash); + ctx.Writer.WriteBoolByte(RehashUnigrams); ctx.Writer.WriteBoolByte(Ordered); ctx.Writer.WriteBoolByte(AllLengths); } } - internal const string Summary = "Produces a bag of counts of ngrams (sequences of consecutive values of length 1-n) in a given vector of keys. " - + "It does so by hashing each ngram and using the hash value as the index in the bag."; - - public const string LoaderSignature = "NgramHashTransform"; - private static VersionInfo GetVersionInfo() - { - return new VersionInfo( - modelSignature: "HASHGRAM", - // verWrittenCur: 0x00010001, // Initial - verWrittenCur: 0x00010002, // Invert hash key values, hash fix - verReadableCur: 0x00010002, - verWeCanReadBack: 0x00010002, - loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(NgramHashingTransformer).Assembly.FullName); - } - - private readonly Bindings _bindings; - private readonly ColInfoEx[] _exes; - + private readonly ImmutableArray _columns; private readonly VBuffer>[] _slotNames; private readonly ColumnType[] _slotNamesTypes; - private const string RegistrationName = "NgramHash"; - /// - /// Public constructor corresponding to SignatureDataTransform. + /// Constructor for case where you don't need to 'train' transform on data, for example, InvertHash for all columns set to zero. /// - public NgramHashingTransformer(IHostEnvironment env, Arguments args, IDataView input) - : base(env, RegistrationName, input) + /// Host Environment. + /// Description of dataset columns and how to process them. + public NgramHashingTransformer(IHostEnvironment env, params ColumnInfo[] columns) : + base(Contracts.CheckRef(env, nameof(env)).Register(nameof(NgramHashingTransformer))) + { + _columns = columns.ToImmutableArray(); + foreach (var column in _columns) + { + if (column.InvertHash != 0) + throw Host.ExceptParam(nameof(columns), $"Found colunm with {nameof(column.InvertHash)} set to non zero value, please use { nameof(NgramHashingEstimator)} instead"); + } + } + + internal NgramHashingTransformer(IHostEnvironment env, IDataView input, params ColumnInfo[] columns) : + base(Contracts.CheckRef(env, nameof(env)).Register(nameof(NgramHashingTransformer))) { - Host.CheckValue(args, nameof(args)); - Host.CheckUserArg(Utils.Size(args.Column) > 0, nameof(args.Column)); - - _bindings = new Bindings(args, Source.Schema, this); - _exes = new ColInfoEx[args.Column.Length]; - List invertIinfos = null; - int[] invertHashMaxCounts = new int[args.Column.Length]; - for (int iinfo = 0; iinfo < _exes.Length; iinfo++) + Contracts.CheckValue(columns, nameof(columns)); + _columns = columns.ToImmutableArray(); + + // Let's validate input schema and check which columns requried invertHash. + int[] invertHashMaxCounts = new int[_columns.Length]; + HashSet columnWithInvertHash = new HashSet(); + HashSet sourceColumnsForInvertHash = new HashSet(); + for (int i = 0; i < _columns.Length; i++) { - _exes[iinfo] = new ColInfoEx(args.Column[iinfo], args); - var invertHashMaxCount = GetAndVerifyInvertHashMaxCount(args, args.Column[iinfo], _exes[iinfo]); + int invertHashMaxCount; + if (_columns[i].InvertHash == -1) + invertHashMaxCount = int.MaxValue; + else + invertHashMaxCount = _columns[i].InvertHash; if (invertHashMaxCount > 0) { - Utils.Add(ref invertIinfos, iinfo); - invertHashMaxCounts[iinfo] = invertHashMaxCount; + columnWithInvertHash.Add(i); + invertHashMaxCounts[i] = invertHashMaxCount; + for (int j = 0; j < _columns[i].Inputs.Length; j++) + { + if (!input.Schema.TryGetColumnIndex(_columns[i].Inputs[j], out int srcCol)) + throw Host.ExceptSchemaMismatch(nameof(input), "input", _columns[i].Inputs[j]); + var columnType = input.Schema.GetColumnType(srcCol); + if (!NgramHashingEstimator.IsColumnTypeValid(input.Schema.GetColumnType(srcCol))) + throw Host.ExceptSchemaMismatch(nameof(input), "input", _columns[i].Inputs[j], NgramHashingEstimator.ExpectedColumnType, columnType.ToString()); + sourceColumnsForInvertHash.Add(srcCol); + } } } - - InitColumnTypes(); - - if (Utils.Size(invertIinfos) > 0) + // In case of invertHash set to non zero value for at least one column. + if (Utils.Size(columnWithInvertHash) > 0) { - // Build the invert hashes if we actually had any. - var dstSrcs = new HashSet(invertIinfos.Select(i => _bindings.MapIinfoToCol(i))); - var inputPred = _bindings.GetDependencies(dstSrcs.Contains); - var active = _bindings.GetActive(dstSrcs.Contains); - string[][] friendlyNames = args.Column.Select(c => c.FriendlyNames).ToArray(); - var helper = new InvertHashHelper(this, friendlyNames, inputPred, invertHashMaxCounts); - - using (RowCursor srcCursor = input.GetRowCursor(inputPred)) - using (var dstCursor = new Cursor(this, srcCursor, active, helper.Decorate)) + var active = new bool[1]; + string[][] friendlyNames = _columns.Select(c => c.FriendlyNames).ToArray(); + // We will create invert hash helper class, which would store in itself all original ngrams and their mapping into hash values. + var helper = new InvertHashHelper(this, input.Schema, friendlyNames, sourceColumnsForInvertHash.Contains, invertHashMaxCounts); + // in order to get all original ngrams we have to go data in same way as we would process it, so let's create mapper with decorate function. + var mapper = new Mapper(this, input.Schema, helper.Decorate); + // Let's create cursor to iterate over input data. + using (var rowCursor = input.GetRowCursor(sourceColumnsForInvertHash.Contains)) { - var allGetters = InvertHashHelper.CallAllGetters(dstCursor); - while (dstCursor.MoveNext()) - allGetters(); + Action disp; + // We create mapper getters on top of input cursor + var del = (mapper as IRowMapper).CreateGetters(rowCursor, columnWithInvertHash.Contains, out disp); + var valueGetters = new ValueGetter>[columnWithInvertHash.Count]; + for (int i = 0; i < columnWithInvertHash.Count; i++) + valueGetters[i] = del[i] as ValueGetter>; + VBuffer value = default; + // and invoke each getter for each row. + while (rowCursor.MoveNext()) + { + for (int i = 0; i < columnWithInvertHash.Count; i++) + valueGetters[i](ref value); + } + // decorate function of helper object captured all encountered ngrams so, we ask it to give us metadata information for slot names. + _slotNames = helper.SlotNamesMetadata(out _slotNamesTypes); } - _slotNames = helper.SlotNamesMetadata(out _slotNamesTypes); } } - private NgramHashingTransformer(IHost host, ModelLoadContext ctx, IDataView input) - : base(host, input) + public override void Save(ModelSaveContext ctx) { - Host.AssertValue(ctx); + Host.CheckValue(ctx, nameof(ctx)); + ctx.CheckAtModel(); + ctx.SetVersionInfo(GetVersionInfo()); // *** Binary format *** - // int: sizeof(Float) - // bindings - // for each added column - // ColInfoEx - - int cbFloat = ctx.Reader.ReadInt32(); - Host.CheckDecode(cbFloat == sizeof(Float)); - _bindings = new Bindings(ctx, Source.Schema, this); - _exes = new ColInfoEx[_bindings.Infos.Length]; - - for (int iinfo = 0; iinfo < _bindings.Infos.Length; iinfo++) - _exes[iinfo] = new ColInfoEx(ctx); - InitColumnTypes(); - TextModelHelper.LoadAll(Host, ctx, _exes.Length, out _slotNames, out _slotNamesTypes); + // int number of columns + // columns + ctx.Writer.Write(_columns.Length); + foreach (var column in _columns) + column.Save(ctx); + TextModelHelper.SaveAll(Host, ctx, _columns.Length, _slotNames); } - public static NgramHashingTransformer Create(IHostEnvironment env, ModelLoadContext ctx, IDataView input) - { - Contracts.CheckValue(env, nameof(env)); - var h = env.Register(RegistrationName); - h.CheckValue(ctx, nameof(ctx)); - h.CheckValue(input, nameof(input)); - ctx.CheckAtModel(GetVersionInfo()); - return h.Apply("Loading Model", ch => new NgramHashingTransformer(h, ctx, input)); - } + // Factory method for SignatureLoadDataTransform. + private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, IDataView input) + => Create(env, ctx).MakeDataTransform(input); - public override void Save(ModelSaveContext ctx) + // Factory method for SignatureLoadRowMapper. + private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, ISchema inputSchema) + => Create(env, ctx).MakeRowMapper(Schema.Create(inputSchema)); + + private NgramHashingTransformer(IHostEnvironment env, ModelLoadContext ctx) : + base(Contracts.CheckRef(env, nameof(env)).Register(nameof(NgramHashingTransformer))) { Host.CheckValue(ctx, nameof(ctx)); - ctx.CheckAtModel(); - ctx.SetVersionInfo(GetVersionInfo()); + ctx.CheckAtModel(GetVersionInfo()); + var columnsLength = ctx.Reader.ReadInt32(); + var columns = new ColumnInfo[columnsLength]; // *** Binary format *** - // int: sizeof(Float) - // bindings - // for each added column - // ColInfoEx - - ctx.Writer.Write(sizeof(Float)); - _bindings.Save(ctx); - for (int iinfo = 0; iinfo < _exes.Length; iinfo++) - _exes[iinfo].Save(ctx); - TextModelHelper.SaveAll(Host, ctx, _exes.Length, _slotNames); + // int number of columns + // columns + for (int i = 0; i < columnsLength; i++) + columns[i] = new ColumnInfo(ctx); + _columns = columns.ToImmutableArray(); + TextModelHelper.LoadAll(Host, ctx, columnsLength, out _slotNames, out _slotNamesTypes); } - private void InitColumnTypes() + // Factory method for SignatureDataTransform. + private static IDataTransform Create(IHostEnvironment env, Arguments args, IDataView input) { - for (int iinfo = 0; iinfo < _exes.Length; iinfo++) - _bindings.Types[iinfo] = new VectorType(NumberType.Float, 1 << _exes[iinfo].HashBits); - } + Contracts.CheckValue(env, nameof(env)); + env.CheckValue(args, nameof(args)); + env.CheckValue(input, nameof(input)); - private static int GetAndVerifyInvertHashMaxCount(Arguments args, Column col, ColInfoEx ex) - { - var invertHashMaxCount = col.InvertHash ?? args.InvertHash; - if (invertHashMaxCount != 0) + env.CheckValue(args.Column, nameof(args.Column)); + var cols = new ColumnInfo[args.Column.Length]; + using (var ch = env.Start("ValidateArgs")) { - if (invertHashMaxCount == -1) - invertHashMaxCount = int.MaxValue; - Contracts.CheckUserArg(invertHashMaxCount > 0, nameof(args.InvertHash), "Value too small, must be -1 or larger"); - // If the bits is 31 or higher, we can't declare a KeyValues of the appropriate length, - // this requiring a VBuffer of length 1u << 31 which exceeds int.MaxValue. - if (ex.HashBits >= 31) - throw Contracts.ExceptUserArg(nameof(args.InvertHash), "Cannot support invertHash for a {0} bit hash. 30 is the maximum possible.", ex.HashBits); + + for (int i = 0; i < cols.Length; i++) + { + var item = args.Column[i]; + cols[i] = new ColumnInfo(item.Source ?? new string[] { item.Name }, + item.Name, + item.NgramLength ?? args.NgramLength, + item.SkipLength ?? args.SkipLength, + item.AllLengths ?? args.AllLengths, + item.HashBits ?? args.HashBits, + item.Seed ?? args.Seed, + item.Ordered ?? args.Ordered, + item.InvertHash ?? args.InvertHash, + item.RehashUnigrams ?? args.RehashUnigrams + ); + }; } - return invertHashMaxCount; + return new NgramHashingTransformer(env, input, cols).MakeDataTransform(input); } - private void GetTerms(int iinfo, ref VBuffer> dst) + // Factory method for SignatureLoadModel. + private static NgramHashingTransformer Create(IHostEnvironment env, ModelLoadContext ctx) { - Host.Assert(0 <= iinfo && iinfo < _exes.Length); - Host.Assert(_slotNames[iinfo].Length > 0); - _slotNames[iinfo].CopyTo(ref dst); + Contracts.CheckValue(env, nameof(env)); + var host = env.Register(nameof(NgramHashingTransformer)); + return new NgramHashingTransformer(host, ctx); } - private NgramIdFinder GetNgramIdFinder(int iinfo) + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, schema); + + private sealed class Mapper : MapperBase { - uint mask = (1U << _exes[iinfo].HashBits) - 1; - int ngramLength = _exes[iinfo].NgramLength; - bool rehash = _exes[iinfo].Rehash; - bool ordered = _exes[iinfo].Ordered; - bool all = _exes[iinfo].AllLengths; - uint seed = _exes[iinfo].Seed; - - // REVIEW: Consider the case when: - // * The source key type has count == 2^n, where n is the number of hash bits - // * rehash == false - // * ngramLength == 1 - // * ordered == false - // Then one might expect that this produces the same result as KeyToVector with bagging. - // However, this shifts everything by one slot, and both the NA values and the original - // last hash slot get mapped to the first slot. - // One possible solution: - // * KeyToVector should have an option to add a slot (the zeroth slot) for NA - // * NgramHash should, when rehash is false (and perhaps when ordered is false?), - // add an extra slot at the beginning for the count of unigram missings. - // Alternatively, this could drop unigram NA values. - - if (!all && ngramLength > 1) + private readonly NgramHashingTransformer _parent; + private readonly ColumnType[] _types; + private readonly int[][] _srcIndices; + private readonly ColumnType[][] _srcTypes; + private readonly FinderDecorator _decorator; + + public Mapper(NgramHashingTransformer parent, Schema inputSchema, FinderDecorator decorator = null) : + base(Contracts.CheckRef(parent, nameof(parent)).Host.Register(nameof(Mapper)), inputSchema) { - if (ordered) - { - // !allLengths, ordered, value of rehash doesn't matter. - return - (uint[] ngram, int lim, int icol, ref bool more) => - { - AssertValid(ngram, ngramLength, lim, icol); - if (lim < ngramLength) - return -1; - var hash = Hashing.MurmurHash(seed, ngram, 0, lim); - if (icol > 0) - hash = Hashing.MurmurRound(hash, (uint)icol); - return (int)(Hashing.MixHash(hash) & mask); - }; - } - else + _parent = parent; + _decorator = decorator; + _types = new ColumnType[_parent._columns.Length]; + _srcIndices = new int[_parent._columns.Length][]; + _srcTypes = new ColumnType[_parent._columns.Length][]; + for (int i = 0; i < _parent._columns.Length; i++) { - // !allLengths, !ordered, value of rehash doesn't matter. - return - (uint[] ngram, int lim, int icol, ref bool more) => - { - AssertValid(ngram, ngramLength, lim, icol); - if (lim < ngramLength) - return -1; - return (int)(Hashing.MurmurHash(seed, ngram, 0, lim) & mask); - }; + _srcIndices[i] = new int[_parent._columns[i].Inputs.Length]; + _srcTypes[i] = new ColumnType[_parent._columns[i].Inputs.Length]; + for (int j = 0; j < _parent._columns[i].Inputs.Length; j++) + { + var srcName = _parent._columns[i].Inputs[j]; + if (!inputSchema.TryGetColumnIndex(srcName, out int srcCol)) + throw Host.ExceptSchemaMismatch(nameof(inputSchema), "input", srcName); + var columnType = inputSchema.GetColumnType(srcCol); + if (!NgramHashingEstimator.IsColumnTypeValid(columnType)) + throw Host.ExceptSchemaMismatch(nameof(inputSchema), "input", srcName, NgramHashingEstimator.ExpectedColumnType, columnType.ToString()); + var srcType = inputSchema.GetColumnType(srcCol); + _srcIndices[i][j] = srcCol; + _srcTypes[i][j] = srcType; + } + + _types[i] = new VectorType(NumberType.Float, 1 << _parent._columns[i].HashBits); } } - else if (rehash) + + private NgramIdFinder GetNgramIdFinder(int iinfo) { - if (ordered) + uint mask = (1U << _parent._columns[iinfo].HashBits) - 1; + int ngramLength = _parent._columns[iinfo].NgramLength; + bool rehash = _parent._columns[iinfo].RehashUnigrams; + bool ordered = _parent._columns[iinfo].Ordered; + bool all = _parent._columns[iinfo].AllLengths; + uint seed = _parent._columns[iinfo].Seed; + + // REVIEW: Consider the case when: + // * The source key type has count == 2^n, where n is the number of hash bits + // * rehash == false + // * ngramLength == 1 + // * ordered == false + // Then one might expect that this produces the same result as KeyToVector with bagging. + // However, this shifts everything by one slot, and both the NA values and the original + // last hash slot get mapped to the first slot. + // One possible solution: + // * KeyToVector should have an option to add a slot (the zeroth slot) for NA + // * NgramHash should, when rehash is false (and perhaps when ordered is false?), + // add an extra slot at the beginning for the count of unigram missings. + // Alternatively, this could drop unigram NA values. + + if (!all && ngramLength > 1) { - // allLengths, rehash, ordered - return - (uint[] ngram, int lim, int icol, ref bool more) => - { - AssertValid(ngram, ngramLength, lim, icol); - var hash = Hashing.MurmurHash(seed, ngram, 0, lim); - if (icol > 0) - hash = Hashing.MurmurRound(hash, (uint)icol); - return (int)(Hashing.MixHash(hash) & mask); - }; + if (ordered) + { + // !allLengths, ordered, value of rehash doesn't matter. + return + (uint[] ngram, int lim, int icol, ref bool more) => + { + AssertValid(ngram, ngramLength, lim, icol); + if (lim < ngramLength) + return -1; + var hash = Hashing.MurmurHash(seed, ngram, 0, lim); + if (icol > 0) + hash = Hashing.MurmurRound(hash, (uint)icol); + return (int)(Hashing.MixHash(hash) & mask); + }; + } + else + { + // !allLengths, !ordered, value of rehash doesn't matter. + return + (uint[] ngram, int lim, int icol, ref bool more) => + { + AssertValid(ngram, ngramLength, lim, icol); + if (lim < ngramLength) + return -1; + return (int)(Hashing.MurmurHash(seed, ngram, 0, lim) & mask); + }; + } } - else + else if (rehash) { - // allLengths, rehash, !ordered - return - (uint[] ngram, int lim, int icol, ref bool more) => - { - AssertValid(ngram, ngramLength, lim, icol); - return (int)(Hashing.MurmurHash(seed, ngram, 0, lim) & mask); - }; + if (ordered) + { + // allLengths, rehash, ordered + return + (uint[] ngram, int lim, int icol, ref bool more) => + { + AssertValid(ngram, ngramLength, lim, icol); + var hash = Hashing.MurmurHash(seed, ngram, 0, lim); + if (icol > 0) + hash = Hashing.MurmurRound(hash, (uint)icol); + return (int)(Hashing.MixHash(hash) & mask); + }; + } + else + { + // allLengths, rehash, !ordered + return + (uint[] ngram, int lim, int icol, ref bool more) => + { + AssertValid(ngram, ngramLength, lim, icol); + return (int)(Hashing.MurmurHash(seed, ngram, 0, lim) & mask); + }; + } } - } - else if (ngramLength > 1) - { - if (ordered) + else if (ngramLength > 1) { - // allLengths, !rehash, ordered - return - (uint[] ngram, int lim, int icol, ref bool more) => - { - AssertValid(ngram, ngramLength, lim, icol); - uint hash; - if (lim == 1) - hash = ngram[0]; - else - hash = Hashing.MurmurHash(seed, ngram, 0, lim); - if (icol > 0) - hash = Hashing.MurmurRound(hash, (uint)icol); - return (int)(Hashing.MixHash(hash) & mask); - }; + if (ordered) + { + // allLengths, !rehash, ordered + return + (uint[] ngram, int lim, int icol, ref bool more) => + { + AssertValid(ngram, ngramLength, lim, icol); + uint hash; + if (lim == 1) + hash = ngram[0]; + else + hash = Hashing.MurmurHash(seed, ngram, 0, lim); + if (icol > 0) + hash = Hashing.MurmurRound(hash, (uint)icol); + return (int)(Hashing.MixHash(hash) & mask); + }; + } + else + { + // allLengths, !rehash, !ordered + return + (uint[] ngram, int lim, int icol, ref bool more) => + { + AssertValid(ngram, ngramLength, lim, icol); + if (lim == 1) + return (int)(ngram[0] & mask); + return (int)(Hashing.MurmurHash(seed, ngram, 0, lim) & mask); + }; + } } else { - // allLengths, !rehash, !ordered - return - (uint[] ngram, int lim, int icol, ref bool more) => - { - AssertValid(ngram, ngramLength, lim, icol); - if (lim == 1) + if (ordered) + { + // ngramLength==1, !rehash, ordered + return + (uint[] ngram, int lim, int icol, ref bool more) => + { + AssertValid(ngram, ngramLength, lim, icol); + uint hash = ngram[0]; + if (icol > 0) + hash = Hashing.MurmurRound(hash, (uint)icol); + return (int)(Hashing.MixHash(hash) & mask); + }; + } + else + { + // ngramLength==1, !rehash, !ordered + return + (uint[] ngram, int lim, int icol, ref bool more) => + { + AssertValid(ngram, ngramLength, lim, icol); return (int)(ngram[0] & mask); - return (int)(Hashing.MurmurHash(seed, ngram, 0, lim) & mask); - }; + }; + } } } - else + + [Conditional("DEBUG")] + private void AssertValid(uint[] ngram, int ngramLength, int lim, int icol) { - if (ordered) - { - // ngramLength==1, !rehash, ordered - return - (uint[] ngram, int lim, int icol, ref bool more) => - { - AssertValid(ngram, ngramLength, lim, icol); - uint hash = ngram[0]; - if (icol > 0) - hash = Hashing.MurmurRound(hash, (uint)icol); - return (int)(Hashing.MixHash(hash) & mask); - }; - } - else - { - // ngramLength==1, !rehash, !ordered - return - (uint[] ngram, int lim, int icol, ref bool more) => - { - AssertValid(ngram, ngramLength, lim, icol); - return (int)(ngram[0] & mask); - }; - } + Host.Assert(0 <= lim && lim <= Utils.Size(ngram)); + Host.Assert(Utils.Size(ngram) == ngramLength); + Host.Assert(icol >= 0); } - } - - [Conditional("DEBUG")] - private void AssertValid(uint[] ngram, int ngramLength, int lim, int icol) - { - Host.Assert(0 <= lim && lim <= Utils.Size(ngram)); - Host.Assert(Utils.Size(ngram) == ngramLength); - Host.Assert(icol >= 0); - } - - public override Schema OutputSchema => _bindings.AsSchema; - - protected override bool? ShouldUseParallelCursors(Func predicate) - { - Host.AssertValue(predicate, "predicate"); - - // Prefer parallel cursors iff some of our columns are active, otherwise, don't care. - if (_bindings.AnyNewColumnsActive(predicate)) - return true; - return null; - } - protected override RowCursor GetRowCursorCore(Func predicate, Random rand = null) - { - Host.AssertValue(predicate, "predicate"); - Host.AssertValueOrNull(rand); - - var inputPred = _bindings.GetDependencies(predicate); - var active = _bindings.GetActive(predicate); - var input = Source.GetRowCursor(inputPred, rand); - return new Cursor(this, input, active); - } - - public sealed override RowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, - Func predicate, int n, Random rand = null) - { - Host.CheckValue(predicate, nameof(predicate)); - Host.CheckValueOrNull(rand); - - var inputPred = _bindings.GetDependencies(predicate); - var active = _bindings.GetActive(predicate); - var inputs = Source.GetRowCursorSet(out consolidator, inputPred, n, rand); - Host.AssertNonEmpty(inputs); - - if (inputs.Length == 1 && n > 1 && _bindings.AnyNewColumnsActive(predicate)) - inputs = DataViewUtils.CreateSplitCursors(out consolidator, Host, inputs[0], n); - Host.AssertNonEmpty(inputs); - - var cursors = new RowCursor[inputs.Length]; - for (int i = 0; i < inputs.Length; i++) - cursors[i] = new Cursor(this, inputs[i], active); - return cursors; - } - - protected override Func GetDependenciesCore(Func predicate) - { - return _bindings.GetDependencies(predicate); - } - - protected override Delegate[] CreateGetters(Row input, Func active, out Action disp) - { - Func activeInfos = - iinfo => - { - int col = _bindings.MapIinfoToCol(iinfo); - return active(col); - }; - - var getters = new Delegate[_bindings.InfoCount]; - disp = null; - using (var ch = Host.Start("CreateGetters")) + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) { - for (int iinfo = 0; iinfo < _bindings.InfoCount; iinfo++) - { - if (!activeInfos(iinfo)) - continue; - getters[iinfo] = MakeGetter(ch, input, iinfo); - } - return getters; + disposer = null; + int srcCount = _srcIndices[iinfo].Length; + ValueGetter>[] getSrc = new ValueGetter>[srcCount]; + for (int isrc = 0; isrc < srcCount; isrc++) + getSrc[isrc] = RowCursorUtils.GetVecGetterAs(NumberType.U4, input, _srcIndices[iinfo][isrc]); + var src = default(VBuffer); + var ngramIdFinder = GetNgramIdFinder(iinfo); + if (_decorator != null) + ngramIdFinder = _decorator(iinfo, ngramIdFinder); + var bldr = new NgramBufferBuilder(_parent._columns[iinfo].NgramLength, _parent._columns[iinfo].SkipLength, + _types[iinfo].ValueCount, ngramIdFinder); + var keyCounts = _srcTypes[iinfo].Select( + t => t.ItemType.KeyCount > 0 ? (uint)t.ItemType.KeyCount : uint.MaxValue).ToArray(); + + // REVIEW: Special casing the srcCount==1 case could potentially improve perf. + ValueGetter> del = + (ref VBuffer dst) => + { + bldr.Reset(); + for (int i = 0; i < srcCount; i++) + { + getSrc[i](ref src); + bldr.AddNgrams(in src, i, keyCounts[i]); + } + bldr.GetResult(ref dst); + }; + return del; } - } - protected override int MapColumnIndex(out bool isSrc, int col) - { - return _bindings.MapColumnIndex(out isSrc, col); - } - - private Delegate MakeGetter(IChannel ch, Row input, int iinfo, FinderDecorator decorator = null) - { - ch.Assert(_bindings.Infos[iinfo].SrcTypes.All(t => t.IsVector && t.ItemType.IsKey)); - - var info = _bindings.Infos[iinfo]; - int srcCount = info.SrcIndices.Length; - ValueGetter>[] getSrc = new ValueGetter>[srcCount]; - for (int isrc = 0; isrc < srcCount; isrc++) - getSrc[isrc] = RowCursorUtils.GetVecGetterAs(NumberType.U4, input, info.SrcIndices[isrc]); - var src = default(VBuffer); - var ngramIdFinder = GetNgramIdFinder(iinfo); - if (decorator != null) - ngramIdFinder = decorator(iinfo, ngramIdFinder); - var bldr = new NgramBufferBuilder(_exes[iinfo].NgramLength, _exes[iinfo].SkipLength, - _bindings.Types[iinfo].ValueCount, ngramIdFinder); - var keyCounts = _bindings.Infos[iinfo].SrcTypes.Select( - t => t.ItemType.KeyCount > 0 ? (uint)t.ItemType.KeyCount : uint.MaxValue).ToArray(); - - // REVIEW: Special casing the srcCount==1 case could potentially improve perf. - ValueGetter> del = - (ref VBuffer dst) => + private protected override Func GetDependenciesCore(Func activeOutput) + { + var active = new bool[InputSchema.ColumnCount]; + for (int i = 0; i < _srcIndices.Length; i++) { - bldr.Reset(); - for (int i = 0; i < srcCount; i++) + if (activeOutput(i)) { - getSrc[i](ref src); - bldr.AddNgrams(in src, i, keyCounts[i]); + foreach (var src in _srcIndices[i]) + active[src] = true; } - bldr.GetResult(ref dst); - }; - return del; - } - - private delegate NgramIdFinder FinderDecorator(int iinfo, NgramIdFinder finder); - - private sealed class Cursor : SynchronizedCursorBase - { - private readonly Bindings _bindings; - private readonly bool[] _active; - private readonly Delegate[] _getters; - - public override Schema Schema => _bindings.AsSchema; - - public Cursor(NgramHashingTransformer parent, RowCursor input, bool[] active, FinderDecorator decorator = null) - : base(parent.Host, input) - { - Ch.AssertValue(parent); - Ch.Assert(active == null || active.Length == parent._bindings.ColumnCount); - Ch.AssertValueOrNull(decorator); - - _bindings = parent._bindings; - _active = active; - - _getters = new Delegate[_bindings.Infos.Length]; - for (int iinfo = 0; iinfo < _bindings.Infos.Length; iinfo++) - { - if (IsIndexActive(iinfo)) - _getters[iinfo] = parent.MakeGetter(Ch, Input, iinfo, decorator); } + return col => active[col]; } - private bool IsIndexActive(int iinfo) - { - Ch.Assert(0 <= iinfo & iinfo < _bindings.Infos.Length); - return _active == null || _active[_bindings.MapIinfoToCol(iinfo)]; - } + public override void Save(ModelSaveContext ctx) => _parent.Save(ctx); - public override bool IsColumnActive(int col) + protected override Schema.DetachedColumn[] GetOutputColumnsCore() { - Ch.Check(0 <= col && col < _bindings.ColumnCount); - return _active == null || _active[col]; - } - - public override ValueGetter GetGetter(int col) - { - Ch.Check(IsColumnActive(col)); - - int index = _bindings.MapColumnIndex(out bool isSrc, col); - if (isSrc) - return Input.GetGetter(index); - - Ch.Assert(_getters[index] != null); - var fn = _getters[index] as ValueGetter; - if (fn == null) - throw Ch.Except("Invalid TValue in GetGetter: '{0}'", typeof(TValue)); - return fn; + var result = new Schema.DetachedColumn[_parent._columns.Length]; + for (int i = 0; i < _parent._columns.Length; i++) + { + var builder = new MetadataBuilder(); + AddMetadata(i, builder); + result[i] = new Schema.DetachedColumn(_parent._columns[i].Output, _types[i], builder.GetMetadata()); + } + return result; } - private ValueGetter GetSrcGetter(int iinfo, int isrc) + private void AddMetadata(int i, MetadataBuilder builder) { - Ch.Assert(IsIndexActive(iinfo)); - return Input.GetGetter(_bindings.Infos[iinfo].SrcIndices[isrc]); + if (_parent._slotNamesTypes != null && _parent._slotNamesTypes[i] != null) + { + ValueGetter>> getter = (ref VBuffer> dst) => + { + _parent._slotNames[i].CopyTo(ref dst); + }; + builder.Add(MetadataUtils.Kinds.SlotNames, _parent._slotNamesTypes[i], getter); + } } } + private delegate NgramIdFinder FinderDecorator(int iinfo, NgramIdFinder finder); + private sealed class InvertHashHelper { private readonly NgramHashingTransformer _parent; @@ -800,63 +745,40 @@ private sealed class InvertHashHelper // If null, or specific element is null, then just use the input column name. private readonly string[][] _friendlyNames; private readonly int[] _invertHashMaxCounts; + private readonly int[][] _srcIndices; - public InvertHashHelper(NgramHashingTransformer parent, string[][] friendlyNames, Func inputPred, int[] invertHashMaxCounts) + public InvertHashHelper(NgramHashingTransformer parent, Schema inputSchema, string[][] friendlyNames, Func inputPred, int[] invertHashMaxCounts) { Contracts.AssertValue(parent); Contracts.AssertValue(friendlyNames); - Contracts.Assert(friendlyNames.Length == parent._bindings.InfoCount); + Contracts.Assert(friendlyNames.Length == parent._columns.Length); Contracts.AssertValue(inputPred); Contracts.AssertValue(invertHashMaxCounts); - Contracts.Assert(invertHashMaxCounts.Length == parent._bindings.InfoCount); + Contracts.Assert(invertHashMaxCounts.Length == parent._columns.Length); _parent = parent; // One per iinfo (some may be null). - _iinfoToCollector = new InvertHashCollector[_parent._bindings.InfoCount]; + _iinfoToCollector = new InvertHashCollector[_parent._columns.Length]; // One per source column (some may be null). - _srcTextGetters = new ValueMapper[_parent.Source.Schema.ColumnCount]; + _srcTextGetters = new ValueMapper[inputSchema.ColumnCount]; _invertHashMaxCounts = invertHashMaxCounts; for (int i = 0; i < _srcTextGetters.Length; ++i) { if (inputPred(i)) - _srcTextGetters[i] = InvertHashUtils.GetSimpleMapper(_parent.Source.Schema, i); + _srcTextGetters[i] = InvertHashUtils.GetSimpleMapper(inputSchema, i); } - _friendlyNames = friendlyNames; - } - - /// - /// Construct an action that calls all the getters for a row, so as to easily force computation - /// of lazily computed values. This will have the side effect of calling the decorator. - /// - public static Action CallAllGetters(Row row) - { - var colCount = row.Schema.ColumnCount; - List getters = new List(); - for (int c = 0; c < colCount; ++c) + _srcIndices = new int[_parent._columns.Length][]; + for (int i = 0; i < _parent._columns.Length; i++) { - if (row.IsColumnActive(c)) - getters.Add(GetNoOpGetter(row, c)); - } - var gettersArray = getters.ToArray(); - return - () => + _srcIndices[i] = new int[_parent._columns[i].Inputs.Length]; + for (int j = 0; j < _parent._columns[i].Inputs.Length; j++) { - for (int i = 0; i < gettersArray.Length; ++i) - gettersArray[i](); - }; - } - - private static Action GetNoOpGetter(Row row, int col) - { - Func func = GetNoOpGetter; - var meth = func.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(row.Schema.GetColumnType(col).RawType); - return (Action)meth.Invoke(null, new object[] { row, col }); - } - - private static Action GetNoOpGetter(Row row, int col) - { - T value = default(T); - var getter = row.GetGetter(col); - return () => getter(ref value); + var srcName = _parent._columns[i].Inputs[j]; + if (!inputSchema.TryGetColumnIndex(srcName, out int srcCol)) + throw _parent.Host.ExceptSchemaMismatch(nameof(inputSchema), "input", srcName); + _srcIndices[i][j] = srcCol; + } + } + _friendlyNames = friendlyNames; } private sealed class NGram : IEquatable @@ -925,11 +847,11 @@ private static void ClearDst(ref StringBuilder dst) public NgramIdFinder Decorate(int iinfo, NgramIdFinder finder) { - Contracts.Assert(0 <= iinfo && iinfo < _parent._bindings.InfoCount); + Contracts.Assert(0 <= iinfo && iinfo < _parent._columns.Length); Contracts.Assert(_iinfoToCollector[iinfo] == null); Contracts.AssertValue(finder); - var srcIndices = _parent._bindings.Infos[iinfo].SrcIndices; + var srcIndices = _srcIndices[iinfo]; // Define the mapper from the ngram, to text. ValueMapper stringMapper; @@ -970,7 +892,7 @@ public NgramIdFinder Decorate(int iinfo, NgramIdFinder finder) { srcNames = new string[srcIndices.Length]; for (int i = 0; i < srcIndices.Length; ++i) - srcNames[i] = _parent.Source.Schema.GetColumnName(srcIndices[i]); + srcNames[i] = _parent._columns[iinfo].Inputs[i]; } Contracts.Assert(Utils.Size(srcNames) == srcIndices.Length); string[] friendlyNames = _friendlyNames?[iinfo]; @@ -995,7 +917,7 @@ public NgramIdFinder Decorate(int iinfo, NgramIdFinder finder) } var collector = _iinfoToCollector[iinfo] = new InvertHashCollector( - _parent._bindings.Types[iinfo].VectorSize, _invertHashMaxCounts[iinfo], + 1 << _parent._columns[iinfo].HashBits, _invertHashMaxCounts[iinfo], stringMapper, EqualityComparer.Default, (in NGram src, ref NGram dst) => dst = src.Clone()); return @@ -1028,7 +950,7 @@ public VBuffer>[] SlotNamesMetadata(out ColumnType[] types) if (_iinfoToCollector[iinfo] != null) { var vec = values[iinfo] = _iinfoToCollector[iinfo].GetMetadata(); - Contracts.Assert(vec.Length == _parent._bindings.Types[iinfo].VectorSize); + Contracts.Assert(vec.Length == 1 << _parent._columns[iinfo].HashBits); types[iinfo] = new VectorType(TextType.Instance, vec.Length); } } @@ -1036,4 +958,194 @@ public VBuffer>[] SlotNamesMetadata(out ColumnType[] types) } } } + + /// + /// Produces a bag of counts of ngrams (sequences of consecutive words of length 1-n) in a given text. + /// It does so by hashing each ngram and using the hash value as the index in the bag. + /// + /// is different from in a way that + /// takes tokenized text as input while tokenizes text internally. + /// + public sealed class NgramHashingEstimator : IEstimator + { + internal static class Defaults + { + internal const int NgramLength = 2; + internal const bool AllLengths = true; + internal const int SkipLength = 0; + internal const int HashBits = 16; + internal const uint Seed = 314489979; + internal const bool RehashUnigrams = false; + internal const bool Ordered = true; + internal const int InvertHash = 0; + } + + private readonly IHost _host; + private readonly NgramHashingTransformer.ColumnInfo[] _columns; + + /// + /// Produces a bag of counts of hashed ngrams in + /// and outputs ngram vector as + /// + /// is different from in a way that + /// takes tokenized text as input while tokenizes text internally. + /// + /// The environment. + /// Name of input column containing tokenized text. + /// Name of output column, will contain the ngram vector. Null means is replaced. + /// Number of bits to hash into. Must be between 1 and 30, inclusive. + /// Ngram length. + /// Maximum number of tokens to skip when constructing an ngram. + /// Whether to include all ngram lengths up to or only . + /// Hashing seed. + /// Whether the position of each source column should be included in the hash (when there are multiple source columns). + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. + public NgramHashingEstimator(IHostEnvironment env, + string inputColumn, + string outputColumn = null, + int hashBits = 16, + int ngramLength = 2, + int skipLength = 0, + bool allLengths = true, + uint seed = 314489979, + bool ordered = true, + int invertHash = 0) + : this(env, new[] { (new[] { inputColumn }, outputColumn ?? inputColumn) }, hashBits, ngramLength, skipLength, allLengths, seed, ordered, invertHash) + { + } + + /// + /// Produces a bag of counts of hashed ngrams in + /// and outputs ngram vector as + /// + /// is different from in a way that + /// takes tokenized text as input while tokenizes text internally. + /// + /// The environment. + /// Name of input columns containing tokenized text. + /// Name of output column, will contain the ngram vector. + /// Number of bits to hash into. Must be between 1 and 30, inclusive. + /// Ngram length. + /// Maximum number of tokens to skip when constructing an ngram. + /// Whether to include all ngram lengths up to or only . + /// Hashing seed. + /// Whether the position of each source column should be included in the hash (when there are multiple source columns). + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. + public NgramHashingEstimator(IHostEnvironment env, + string[] inputColumns, + string outputColumn, + int hashBits = 16, + int ngramLength = 2, + int skipLength = 0, + bool allLengths = true, + uint seed = 314489979, + bool ordered = true, + int invertHash = 0) + : this(env, new[] { (inputColumns, outputColumn) }, hashBits, ngramLength, skipLength, allLengths, seed, ordered, invertHash) + { + } + + /// + /// Produces a bag of counts of hashed ngrams in + /// and outputs ngram vector for each output in + /// + /// is different from in a way that + /// takes tokenized text as input while tokenizes text internally. + /// + /// The environment. + /// Pairs of input columns to output column mappings on which to compute ngram vector. + /// Number of bits to hash into. Must be between 1 and 30, inclusive. + /// Ngram length. + /// Maximum number of tokens to skip when constructing an ngram. + /// Whether to include all ngram lengths up to or only . + /// Hashing seed. + /// Whether the position of each source column should be included in the hash (when there are multiple source columns). + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. + public NgramHashingEstimator(IHostEnvironment env, + (string[] inputs, string output)[] columns, + int hashBits = 16, + int ngramLength = 2, + int skipLength = 0, + bool allLengths = true, + uint seed = 314489979, + bool ordered = true, + int invertHash = 0) + : this(env, columns.Select(x => new NgramHashingTransformer.ColumnInfo(x.inputs, x.output, ngramLength, skipLength, allLengths, hashBits, seed, ordered, invertHash)).ToArray()) + { + + } + + /// + /// Produces a bag of counts of hashed ngrams in + /// and outputs ngram vector for each output in + /// + /// is different from in a way that + /// takes tokenized text as input while tokenizes text internally. + /// + /// The environment. + /// Array of columns which specifies the behavior of the transformation. + public NgramHashingEstimator(IHostEnvironment env, params NgramHashingTransformer.ColumnInfo[] columns) + { + Contracts.CheckValue(env, nameof(env)); + _host = env.Register(nameof(NgramHashingEstimator)); + _columns = columns; + } + + internal static bool IsColumnTypeValid(ColumnType type) + { + if (!type.IsVector) + return false; + if (!type.ItemType.IsKey) + return false; + // Can only accept key types that can be converted to U4. + if (type.ItemType.KeyCount == 0 && type.ItemType.RawKind > DataKind.U4) + return false; + return true; + } + + internal static bool IsSchemaColumnValid(SchemaShape.Column col) + { + if (col.Kind == SchemaShape.Column.VectorKind.Scalar) + return false; + if (!col.IsKey) + return false; + // Can only accept key types that can be converted to U4. + if (col.ItemType.RawKind > DataKind.U4) + return false; + return true; + } + + internal const string ExpectedColumnType = "Expected vector of Key type, and Key is convertible to U4"; + + public SchemaShape GetOutputSchema(SchemaShape inputSchema) + { + _host.CheckValue(inputSchema, nameof(inputSchema)); + var result = inputSchema.ToDictionary(x => x.Name); + foreach (var colInfo in _columns) + { + foreach (var input in colInfo.Inputs) + { + if (!inputSchema.TryFindColumn(input, out var col)) + throw _host.ExceptSchemaMismatch(nameof(inputSchema), "input", input); + if (!IsSchemaColumnValid(col)) + throw _host.ExceptSchemaMismatch(nameof(inputSchema), "input", input, ExpectedColumnType, col.GetTypeString()); + } + var metadata = new List(); + metadata.Add(new SchemaShape.Column(MetadataUtils.Kinds.SlotNames, SchemaShape.Column.VectorKind.Vector, TextType.Instance, false)); + result[colInfo.Output] = new SchemaShape.Column(colInfo.Output, SchemaShape.Column.VectorKind.Vector, NumberType.R4, false, new SchemaShape(metadata)); + } + return new SchemaShape(result.Values); + } + + public NgramHashingTransformer Fit(IDataView input) => new NgramHashingTransformer(_host, input, _columns); + } } diff --git a/src/Microsoft.ML.Transforms/Text/NgramTransform.cs b/src/Microsoft.ML.Transforms/Text/NgramTransform.cs index f37872da8a..b6a8b492fd 100644 --- a/src/Microsoft.ML.Transforms/Text/NgramTransform.cs +++ b/src/Microsoft.ML.Transforms/Text/NgramTransform.cs @@ -14,6 +14,7 @@ using System; using System.Collections.Generic; using System.Collections.Immutable; +using System.Diagnostics; using System.Linq; using System.Text; @@ -31,8 +32,10 @@ namespace Microsoft.ML.Transforms.Text { - using Conditional = System.Diagnostics.ConditionalAttribute; - + /// + /// Produces a bag of counts of ngrams(sequences of consecutive values of length 1-n) in a given vector of keys. + /// It does so by building a dictionary of ngrams and using the id in the dictionary as the index in the bag. + /// public sealed class NgramExtractingTransformer : OneToOneTransformerBase { public sealed class Column : OneToOneColumn @@ -858,7 +861,7 @@ internal static bool IsSchemaColumnValid(SchemaShape.Column col) return true; } - internal const string ExpectedColumnType = "Expected vector of Key type, and Key is convertable to U4"; + internal const string ExpectedColumnType = "Expected vector of Key type, and Key is convertible to U4"; public SchemaShape GetOutputSchema(SchemaShape inputSchema) { diff --git a/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs b/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs index f8335092bd..809694c228 100644 --- a/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs +++ b/src/Microsoft.ML.Transforms/Text/StopWordsRemovingTransformer.cs @@ -184,7 +184,7 @@ private static NormStr.Pool[] StopWords /// /// Describes how the transformer handles one column pair. /// - public class ColumnInfo + public sealed class ColumnInfo { public readonly string Input; public readonly string Output; diff --git a/src/Microsoft.ML.Transforms/Text/TextCatalog.cs b/src/Microsoft.ML.Transforms/Text/TextCatalog.cs index 52cf642d2f..0708771e4d 100644 --- a/src/Microsoft.ML.Transforms/Text/TextCatalog.cs +++ b/src/Microsoft.ML.Transforms/Text/TextCatalog.cs @@ -174,8 +174,8 @@ public static WordTokenizingEstimator TokenizeWords(this TransformsCatalog.TextT /// and outputs bag of word vector as /// /// The text-related transform's catalog. - /// The column containing text to compute bag of word vector. - /// The column containing bag of word vector. Null means is replaced. + /// Name of input column containing tokenized text. + /// Name of output column, will contain the ngram vector. Null means is replaced. /// Ngram length. /// Maximum number of tokens to skip when constructing an ngram. /// Whether to include all ngram lengths up to or only . @@ -335,7 +335,10 @@ public static WordBagEstimator ProduceWordBags(this TransformsCatalog.TextTransf /// Whether to include all ngram lengths up to or only . /// Hashing seed. /// Whether the position of each source column should be included in the hash (when there are multiple source columns). - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. public static WordHashBagEstimator ProduceHashedWordBags(this TransformsCatalog.TextTransforms catalog, string inputColumn, string outputColumn = null, @@ -362,7 +365,10 @@ public static WordHashBagEstimator ProduceHashedWordBags(this TransformsCatalog. /// Whether to include all ngram lengths up to or only . /// Hashing seed. /// Whether the position of each source column should be included in the hash (when there are multiple source columns). - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. public static WordHashBagEstimator ProduceHashedWordBags(this TransformsCatalog.TextTransforms catalog, string[] inputColumns, string outputColumn, @@ -388,7 +394,10 @@ public static WordHashBagEstimator ProduceHashedWordBags(this TransformsCatalog. /// Whether to include all ngram lengths up to or only . /// Hashing seed. /// Whether the position of each source column should be included in the hash (when there are multiple source columns). - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. public static WordHashBagEstimator ProduceHashedWordBags(this TransformsCatalog.TextTransforms catalog, (string[] inputs, string output)[] columns, int hashBits = NgramHashExtractingTransformer.DefaultArguments.HashBits, @@ -405,67 +414,73 @@ public static WordHashBagEstimator ProduceHashedWordBags(this TransformsCatalog. /// Produces a bag of counts of hashed ngrams in /// and outputs ngram vector as /// - /// is different from in a way that + /// is different from in a way that /// takes tokenized text as input while tokenizes text internally. /// /// The text-related transform's catalog. - /// The column containing text to compute bag of word vector. - /// The column containing bag of word vector. Null means is replaced. + /// Name of input column containing tokenized text. + /// Name of output column, will contain the ngram vector. Null means is replaced. /// Number of bits to hash into. Must be between 1 and 30, inclusive. /// Ngram length. /// Maximum number of tokens to skip when constructing an ngram. /// Whether to include all ngram lengths up to or only . /// Hashing seed. /// Whether the position of each source column should be included in the hash (when there are multiple source columns). - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. - public static NgramHashEstimator ProduceHashedNgrams(this TransformsCatalog.TextTransforms catalog, + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. + public static NgramHashingEstimator ProduceHashedNgrams(this TransformsCatalog.TextTransforms catalog, string inputColumn, string outputColumn = null, - int hashBits = NgramHashingTransformer.ArgumentDefaults.HashBits, - int ngramLength = NgramHashingTransformer.ArgumentDefaults.NgramLength, - int skipLength = NgramHashingTransformer.ArgumentDefaults.SkipLength, - bool allLengths = NgramHashingTransformer.ArgumentDefaults.AllLengths, - uint seed = NgramHashingTransformer.ArgumentDefaults.Seed, - bool ordered = NgramHashingTransformer.ArgumentDefaults.Ordered, - int invertHash = NgramHashingTransformer.ArgumentDefaults.InvertHash) - => new NgramHashEstimator(Contracts.CheckRef(catalog, nameof(catalog)).GetEnvironment(), + int hashBits = NgramHashingEstimator.Defaults.HashBits, + int ngramLength = NgramHashingEstimator.Defaults.NgramLength, + int skipLength = NgramHashingEstimator.Defaults.SkipLength, + bool allLengths = NgramHashingEstimator.Defaults.AllLengths, + uint seed = NgramHashingEstimator.Defaults.Seed, + bool ordered = NgramHashingEstimator.Defaults.Ordered, + int invertHash = NgramHashingEstimator.Defaults.InvertHash) + => new NgramHashingEstimator(Contracts.CheckRef(catalog, nameof(catalog)).GetEnvironment(), inputColumn, outputColumn, hashBits, ngramLength, skipLength, allLengths, seed, ordered, invertHash); /// /// Produces a bag of counts of hashed ngrams in /// and outputs ngram vector as /// - /// is different from in a way that + /// is different from in a way that /// takes tokenized text as input while tokenizes text internally. /// /// The text-related transform's catalog. - /// The columns containing text to compute bag of word vector. - /// The column containing output tokens. + /// Name of input columns containing tokenized text. + /// Name of output column, will contain the ngram vector. /// Number of bits to hash into. Must be between 1 and 30, inclusive. /// Ngram length. /// Maximum number of tokens to skip when constructing an ngram. /// Whether to include all ngram lengths up to or only . /// Hashing seed. /// Whether the position of each source column should be included in the hash (when there are multiple source columns). - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. - public static NgramHashEstimator ProduceHashedNgrams(this TransformsCatalog.TextTransforms catalog, + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. + public static NgramHashingEstimator ProduceHashedNgrams(this TransformsCatalog.TextTransforms catalog, string[] inputColumns, string outputColumn, - int hashBits = NgramHashingTransformer.ArgumentDefaults.HashBits, - int ngramLength = NgramHashingTransformer.ArgumentDefaults.NgramLength, - int skipLength = NgramHashingTransformer.ArgumentDefaults.SkipLength, - bool allLengths = NgramHashingTransformer.ArgumentDefaults.AllLengths, - uint seed = NgramHashingTransformer.ArgumentDefaults.Seed, - bool ordered = NgramHashingTransformer.ArgumentDefaults.Ordered, - int invertHash = NgramHashingTransformer.ArgumentDefaults.InvertHash) - => new NgramHashEstimator(Contracts.CheckRef(catalog, nameof(catalog)).GetEnvironment(), + int hashBits = NgramHashingEstimator.Defaults.HashBits, + int ngramLength = NgramHashingEstimator.Defaults.NgramLength, + int skipLength = NgramHashingEstimator.Defaults.SkipLength, + bool allLengths = NgramHashingEstimator.Defaults.AllLengths, + uint seed = NgramHashingEstimator.Defaults.Seed, + bool ordered = NgramHashingEstimator.Defaults.Ordered, + int invertHash = NgramHashingEstimator.Defaults.InvertHash) + => new NgramHashingEstimator(Contracts.CheckRef(catalog, nameof(catalog)).GetEnvironment(), inputColumns, outputColumn, hashBits, ngramLength, skipLength, allLengths, seed, ordered, invertHash); /// /// Produces a bag of counts of hashed ngrams in /// and outputs ngram vector for each output in /// - /// is different from in a way that + /// is different from in a way that /// takes tokenized text as input while tokenizes text internally. /// /// The text-related transform's catalog. @@ -476,17 +491,20 @@ public static NgramHashEstimator ProduceHashedNgrams(this TransformsCatalog.Text /// Whether to include all ngram lengths up to or only . /// Hashing seed. /// Whether the position of each source column should be included in the hash (when there are multiple source columns). - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. - public static NgramHashEstimator ProduceHashedNgrams(this TransformsCatalog.TextTransforms catalog, + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. + public static NgramHashingEstimator ProduceHashedNgrams(this TransformsCatalog.TextTransforms catalog, (string[] inputs, string output)[] columns, - int hashBits = NgramHashingTransformer.ArgumentDefaults.HashBits, - int ngramLength = NgramHashingTransformer.ArgumentDefaults.NgramLength, - int skipLength = NgramHashingTransformer.ArgumentDefaults.SkipLength, - bool allLengths = NgramHashingTransformer.ArgumentDefaults.AllLengths, - uint seed = NgramHashingTransformer.ArgumentDefaults.Seed, - bool ordered = NgramHashingTransformer.ArgumentDefaults.Ordered, - int invertHash = NgramHashingTransformer.ArgumentDefaults.InvertHash) - => new NgramHashEstimator(Contracts.CheckRef(catalog, nameof(catalog)).GetEnvironment(), + int hashBits = NgramHashingEstimator.Defaults.HashBits, + int ngramLength = NgramHashingEstimator.Defaults.NgramLength, + int skipLength = NgramHashingEstimator.Defaults.SkipLength, + bool allLengths = NgramHashingEstimator.Defaults.AllLengths, + uint seed = NgramHashingEstimator.Defaults.Seed, + bool ordered = NgramHashingEstimator.Defaults.Ordered, + int invertHash = NgramHashingEstimator.Defaults.InvertHash) + => new NgramHashingEstimator(Contracts.CheckRef(catalog, nameof(catalog)).GetEnvironment(), columns, hashBits, ngramLength, skipLength,allLengths, seed, ordered, invertHash); /// diff --git a/src/Microsoft.ML.Transforms/Text/TextStaticExtensions.cs b/src/Microsoft.ML.Transforms/Text/TextStaticExtensions.cs index abeb448f84..4091243062 100644 --- a/src/Microsoft.ML.Transforms/Text/TextStaticExtensions.cs +++ b/src/Microsoft.ML.Transforms/Text/TextStaticExtensions.cs @@ -405,7 +405,10 @@ public override IEstimator Reconcile(IHostEnvironment env, /// Whether to include all ngram lengths up to or only . /// Hashing seed. /// Whether the position of each source column should be included in the hash (when there are multiple source columns). - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. public static Vector ToBagofHashedWords(this Scalar input, int hashBits = 16, int ngramLength = 1, @@ -557,12 +560,12 @@ public override IEstimator Reconcile(IHostEnvironment env, IReadOnlyCollection usedNames) { Contracts.Assert(toOutput.Length == 1); - - var pairs = new List<(string[] inputs, string output)>(); + var columns = new List(); foreach (var outCol in toOutput) - pairs.Add((new[] { inputNames[((OutPipelineColumn)outCol).Input] }, outputNames[outCol])); + columns.Add(new NgramHashingTransformer.ColumnInfo(new[] { inputNames[((OutPipelineColumn)outCol).Input] }, outputNames[outCol], + _ngramLength, _skipLength, _allLengths, _hashBits, _seed, _ordered, _invertHash)); - return new NgramHashEstimator(env, pairs.ToArray(), _hashBits, _ngramLength, _skipLength, _allLengths, _seed, _ordered, _invertHash); + return new NgramHashingEstimator(env, columns.ToArray()); } } @@ -580,7 +583,10 @@ public override IEstimator Reconcile(IHostEnvironment env, /// Whether to include all ngram lengths up to or only . /// Hashing seed. /// Whether the position of each source column should be included in the hash (when there are multiple source columns). - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. public static Vector ToNgramsHash(this VarVector> input, int hashBits = 16, int ngramLength = 2, diff --git a/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs b/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs index 76242af78b..28e8361d2d 100644 --- a/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs +++ b/src/Microsoft.ML.Transforms/Text/TokenizingByCharacters.cs @@ -12,7 +12,6 @@ using Microsoft.ML.Runtime.EntryPoints; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; -using Microsoft.ML.Runtime.TextAnalytics; using Microsoft.ML.Transforms.Text; using System; using System.Collections.Generic; diff --git a/src/Microsoft.ML.Transforms/Text/WordBagTransform.cs b/src/Microsoft.ML.Transforms/Text/WordBagTransform.cs index 67de331bf3..7022c7f748 100644 --- a/src/Microsoft.ML.Transforms/Text/WordBagTransform.cs +++ b/src/Microsoft.ML.Transforms/Text/WordBagTransform.cs @@ -8,7 +8,6 @@ using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.EntryPoints; using Microsoft.ML.Runtime.Internal.Utilities; -using Microsoft.ML.Transforms.Categorical; using Microsoft.ML.Transforms.Conversions; using Microsoft.ML.Transforms.Text; using System.Collections.Generic; @@ -431,7 +430,7 @@ public sealed class TermLoaderArguments public string TermsColumn; [Argument(ArgumentType.AtMostOnce, HelpText = "How items should be ordered when vectorized. By default, they will be in the order encountered. " + - "If by value items are sorted according to their default comparison, for example, text sorting will be case sensitive (for example, 'A' then 'Z' then 'a').", SortOrder = 5)] + "If by value, items are sorted according to their default comparison, for example, text sorting will be case sensitive (for example, 'A' then 'Z' then 'a').", SortOrder = 5)] public ValueToKeyMappingTransformer.SortOrder Sort = ValueToKeyMappingTransformer.SortOrder.Occurrence; [Argument(ArgumentType.AtMostOnce, HelpText = "Drop unknown terms instead of mapping them to NA term.", ShortName = "dropna", SortOrder = 6)] diff --git a/src/Microsoft.ML.Transforms/Text/WordHashBagProducingTransform.cs b/src/Microsoft.ML.Transforms/Text/WordHashBagProducingTransform.cs index 44d0226d47..268a98b872 100644 --- a/src/Microsoft.ML.Transforms/Text/WordHashBagProducingTransform.cs +++ b/src/Microsoft.ML.Transforms/Text/WordHashBagProducingTransform.cs @@ -7,7 +7,6 @@ using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.EntryPoints; using Microsoft.ML.Runtime.Internal.Utilities; -using Microsoft.ML.Transforms.Categorical; using Microsoft.ML.Transforms.Conversions; using Microsoft.ML.Transforms.Text; using System.Collections.Generic; @@ -331,7 +330,7 @@ public static IDataTransform Create(IHostEnvironment env, Arguments args, IDataV if (termLoaderArgs != null) termCols = new List(); var hashColumns = new List(); - var ngramHashColumns = new NgramHashingTransformer.Column[args.Column.Length]; + var ngramHashColumns = new NgramHashingTransformer.ColumnInfo[args.Column.Length]; var colCount = args.Column.Length; // The NGramHashExtractor has a ManyToOne column type. To avoid stepping over the source @@ -373,25 +372,15 @@ public static IDataTransform Create(IHostEnvironment env, Arguments args, IDataV } ngramHashColumns[iinfo] = - new NgramHashingTransformer.Column - { - Name = column.Name, - Source = tmpColNames[iinfo], - AllLengths = column.AllLengths, - HashBits = column.HashBits, - NgramLength = column.NgramLength, - RehashUnigrams = false, - Seed = column.Seed, - SkipLength = column.SkipLength, - Ordered = column.Ordered, - InvertHash = column.InvertHash, - // REVIEW: This is an ugly internal hack to get around - // the problem that we want the *original* source names surfacing - // in the descriptions where appropriate, rather than _tmp000 and - // what have you. The alternative is we do something elaborate - // with metadata or something but I'm not sure that's better. - FriendlyNames = column.FriendlyNames - }; + new NgramHashingTransformer.ColumnInfo(tmpColNames[iinfo], column.Name, + column.NgramLength ?? args.NgramLength, + column.SkipLength ?? args.SkipLength, + column.AllLengths ?? args.AllLengths, + column.HashBits ?? args.HashBits, + column.Seed ?? args.Seed, + column.Ordered ?? args.Ordered, + column.InvertHash ?? args.InvertHash); + ngramHashColumns[iinfo].FriendlyNames = column.FriendlyNames; } if (termLoaderArgs != null) @@ -433,22 +422,7 @@ public static IDataTransform Create(IHostEnvironment env, Arguments args, IDataV view = HashingTransformer.Create(h, hashArgs, view); - // creating the NgramHash function - var ngramHashArgs = - new NgramHashingTransformer.Arguments - { - AllLengths = args.AllLengths, - HashBits = args.HashBits, - NgramLength = args.NgramLength, - SkipLength = args.SkipLength, - RehashUnigrams = false, - Ordered = args.Ordered, - Seed = args.Seed, - Column = ngramHashColumns, - InvertHash = args.InvertHash - }; - - view = new NgramHashingTransformer(h, ngramHashArgs, view); + view = new NgramHashingEstimator(h, ngramHashColumns).Fit(view).Transform(view); return ColumnSelectingTransformer.CreateDrop(h, view, tmpColNames.SelectMany(cols => cols).ToArray()); } diff --git a/src/Microsoft.ML.Transforms/Text/WrappedTextTransformers.cs b/src/Microsoft.ML.Transforms/Text/WrappedTextTransformers.cs index 3387dc250e..0dd175f59d 100644 --- a/src/Microsoft.ML.Transforms/Text/WrappedTextTransformers.cs +++ b/src/Microsoft.ML.Transforms/Text/WrappedTextTransformers.cs @@ -150,7 +150,10 @@ public sealed class WordHashBagEstimator : TrainedWrapperEstimatorBase /// Whether to include all ngram lengths up to or only . /// Hashing seed. /// Whether the position of each source column should be included in the hash (when there are multiple source columns). - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. public WordHashBagEstimator(IHostEnvironment env, string inputColumn, string outputColumn = null, @@ -178,7 +181,10 @@ public WordHashBagEstimator(IHostEnvironment env, /// Whether to include all ngram lengths up to or only . /// Hashing seed. /// Whether the position of each source column should be included in the hash (when there are multiple source columns). - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. public WordHashBagEstimator(IHostEnvironment env, string[] inputColumns, string outputColumn, @@ -205,7 +211,10 @@ public WordHashBagEstimator(IHostEnvironment env, /// Whether to include all ngram lengths up to or only . /// Hashing seed. /// Whether the position of each source column should be included in the hash (when there are multiple source columns). - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. public WordHashBagEstimator(IHostEnvironment env, (string[] inputs, string output)[] columns, int hashBits = 16, @@ -251,146 +260,4 @@ public override TransformWrapper Fit(IDataView input) return new TransformWrapper(Host, WordHashBagProducingTransformer.Create(Host, args, input)); } } - - /// - /// Produces a bag of counts of ngrams (sequences of consecutive words of length 1-n) in a given text. - /// It does so by hashing each ngram and using the hash value as the index in the bag. - /// - /// is different from in a way that - /// takes tokenized text as input while tokenizes text internally. - /// - public sealed class NgramHashEstimator : TrainedWrapperEstimatorBase - { - private readonly (string[] inputs, string output)[] _columns; - private readonly int _hashBits; - private readonly int _ngramLength; - private readonly int _skipLength; - private readonly bool _allLengths; - private readonly uint _seed; - private readonly bool _ordered; - private readonly int _invertHash; - - /// - /// Produces a bag of counts of hashed ngrams in - /// and outputs ngram vector as - /// - /// is different from in a way that - /// takes tokenized text as input while tokenizes text internally. - /// - /// The environment. - /// The column containing text to compute bag of word vector. - /// The column containing bag of word vector. Null means is replaced. - /// Number of bits to hash into. Must be between 1 and 30, inclusive. - /// Ngram length. - /// Maximum number of tokens to skip when constructing an ngram. - /// Whether to include all ngram lengths up to or only . - /// Hashing seed. - /// Whether the position of each source column should be included in the hash (when there are multiple source columns). - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. - public NgramHashEstimator(IHostEnvironment env, - string inputColumn, - string outputColumn = null, - int hashBits = 16, - int ngramLength = 2, - int skipLength = 0, - bool allLengths = true, - uint seed = 314489979, - bool ordered = true, - int invertHash = 0) - : this(env, new[] { (new[] { inputColumn }, outputColumn ?? inputColumn) }, hashBits, ngramLength, skipLength, allLengths, seed, ordered, invertHash) - { - } - - /// - /// Produces a bag of counts of hashed ngrams in - /// and outputs ngram vector as - /// - /// is different from in a way that - /// takes tokenized text as input while tokenizes text internally. - /// - /// The environment. - /// The columns containing text to compute bag of word vector. - /// The column containing output tokens. - /// Number of bits to hash into. Must be between 1 and 30, inclusive. - /// Ngram length. - /// Maximum number of tokens to skip when constructing an ngram. - /// Whether to include all ngram lengths up to or only . - /// Hashing seed. - /// Whether the position of each source column should be included in the hash (when there are multiple source columns). - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. - public NgramHashEstimator(IHostEnvironment env, - string[] inputColumns, - string outputColumn, - int hashBits = 16, - int ngramLength = 2, - int skipLength = 0, - bool allLengths = true, - uint seed = 314489979, - bool ordered = true, - int invertHash = 0) - : this(env, new[] { (inputColumns, outputColumn) }, hashBits, ngramLength, skipLength, allLengths, seed, ordered, invertHash) - { - } - - /// - /// Produces a bag of counts of hashed ngrams in - /// and outputs ngram vector for each output in - /// - /// is different from in a way that - /// takes tokenized text as input while tokenizes text internally. - /// - /// The environment. - /// Pairs of columns to compute bag of word vector. - /// Number of bits to hash into. Must be between 1 and 30, inclusive. - /// Ngram length. - /// Maximum number of tokens to skip when constructing an ngram. - /// Whether to include all ngram lengths up to or only . - /// Hashing seed. - /// Whether the position of each source column should be included in the hash (when there are multiple source columns). - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. - public NgramHashEstimator(IHostEnvironment env, - (string[] inputs, string output)[] columns, - int hashBits = 16, - int ngramLength = 2, - int skipLength = 0, - bool allLengths = true, - uint seed = 314489979, - bool ordered = true, - int invertHash = 0) - : base(Contracts.CheckRef(env, nameof(env)).Register(nameof(WordBagEstimator))) - { - foreach (var (input, output) in columns) - { - Host.CheckUserArg(Utils.Size(input) > 0, nameof(input)); - Host.CheckValue(output, nameof(input)); - } - - _columns = columns; - _hashBits = hashBits; - _ngramLength = ngramLength; - _skipLength = skipLength; - _allLengths = allLengths; - _seed = seed; - _ordered = ordered; - _invertHash = invertHash; - } - - public override TransformWrapper Fit(IDataView input) - { - // Create arguments. - var args = new NgramHashingTransformer.Arguments - { - Column = _columns.Select(x => new NgramHashingTransformer.Column { Source = x.inputs, Name = x.output }).ToArray(), - HashBits = _hashBits, - NgramLength = _ngramLength, - SkipLength = _skipLength, - AllLengths = _allLengths, - Seed = _seed, - Ordered = _ordered, - InvertHash = _invertHash - }; - - return new TransformWrapper(Host, new NgramHashingTransformer(Host, args, input)); - } - } } \ No newline at end of file diff --git a/src/Microsoft.ML.Transforms/TransformsStatic.cs b/src/Microsoft.ML.Transforms/TransformsStatic.cs index f5c23969a9..cebf9a6fbb 100644 --- a/src/Microsoft.ML.Transforms/TransformsStatic.cs +++ b/src/Microsoft.ML.Transforms/TransformsStatic.cs @@ -5,9 +5,10 @@ using Microsoft.ML.Core.Data; using Microsoft.ML.Runtime; using Microsoft.ML.StaticPipe.Runtime; -using Microsoft.ML.Transforms.FeatureSelection; +using Microsoft.ML.Transforms; using Microsoft.ML.Transforms.Categorical; using Microsoft.ML.Transforms.Conversions; +using Microsoft.ML.Transforms.FeatureSelection; using Microsoft.ML.Transforms.Projections; using System; using System.Collections.Generic; @@ -363,7 +364,8 @@ public static Vector SelectFeaturesBasedOnCount(this Vector inpu public static Vector SelectFeaturesBasedOnCount(this Vector input, long count = CountFeatureSelectingEstimator.Defaults.Count) => new OutPipelineColumn(input, count); } - public static class CategoricalStaticExtensions + + public static class CategoricalStaticExtensions { public enum OneHotVectorOutputKind : byte { @@ -511,4 +513,572 @@ public static Vector OneHotEncoding(this Vector input, OneHotVect return new ImplVector(input, new Config(outputKind, order, maxItems, Wrap(onFit))); } } + + /// + /// Extension methods for the static-pipeline over objects. + /// + public static class KeyToBinaryVectorExtensions + { + private interface IColInput + { + PipelineColumn Input { get; } + } + + private sealed class OutVectorColumn : Vector, IColInput + { + public PipelineColumn Input { get; } + + public OutVectorColumn(Vector> input) + : base(Reconciler.Inst, input) + { + Input = input; + } + + public OutVectorColumn(Key input) + : base(Reconciler.Inst, input) + { + Input = input; + } + } + + private sealed class OutVarVectorColumn : VarVector, IColInput + { + public PipelineColumn Input { get; } + public OutVarVectorColumn(VarVector> input) + : base(Reconciler.Inst, input) + { + Input = input; + } + } + + private sealed class OutVectorColumn : Vector, IColInput + { + public PipelineColumn Input { get; } + + public OutVectorColumn(Vector> input) + : base(Reconciler.Inst, input) + { + Input = input; + } + + public OutVectorColumn(Key input) + : base(Reconciler.Inst, input) + { + Input = input; + } + } + + private sealed class OutVarVectorColumn : VarVector, IColInput + { + public PipelineColumn Input { get; } + public OutVarVectorColumn(VarVector> input) + : base(Reconciler.Inst, input) + { + Input = input; + } + } + + private sealed class Reconciler : EstimatorReconciler + { + public static Reconciler Inst = new Reconciler(); + + private Reconciler() { } + + public override IEstimator Reconcile(IHostEnvironment env, + PipelineColumn[] toOutput, + IReadOnlyDictionary inputNames, + IReadOnlyDictionary outputNames, + IReadOnlyCollection usedNames) + { + var infos = new KeyToBinaryVectorMappingTransformer.ColumnInfo[toOutput.Length]; + for (int i = 0; i < toOutput.Length; ++i) + { + var col = (IColInput)toOutput[i]; + infos[i] = new KeyToBinaryVectorMappingTransformer.ColumnInfo(inputNames[col.Input], outputNames[toOutput[i]]); + } + return new KeyToBinaryVectorMappingEstimator(env, infos); + } + } + + /// + /// Takes a column of key type of known cardinality and produces a vector of bits representing the key in binary form. + /// The first value is encoded as all zeros and missing values are encoded as all ones. + /// In the case where a vector has multiple keys, the encoded values are concatenated. + /// Number of bits per key is determined as the number of bits needed to represent the cardinality of the keys plus one. + /// + public static Vector ToBinaryVector(this Key input) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVectorColumn(input); + } + + /// + /// Takes a column of key type of known cardinality and produces a vector of bits representing the key in binary form. + /// The first value is encoded as all zeros and missing values are encoded as all ones. + /// In the case where a vector has multiple keys, the encoded values are concatenated. + /// Number of bits per key is determined as the number of bits needed to represent the cardinality of the keys plus one. + /// + public static Vector ToBinaryVector(this Vector> input) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVectorColumn(input); + } + + /// + /// Takes a column of key type of known cardinality and produces a vector of bits representing the key in binary form. + /// The first value is encoded as all zeros and missing values are encoded as all ones. + /// In the case where a vector has multiple keys, the encoded values are concatenated. + /// Number of bits per key is determined as the number of bits needed to represent the cardinality of the keys plus one. + /// + public static VarVector ToBinaryVector(this VarVector> input) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVarVectorColumn(input); + } + + /// + /// Takes a column of key type of known cardinality and produces a vector of bits representing the key in binary form. + /// The first value is encoded as all zeros and missing values are encoded as all ones. + /// In the case where a vector has multiple keys, the encoded values are concatenated. + /// Number of bits per key is determined as the number of bits needed to represent the cardinality of the keys plus one. + /// + public static Vector ToBinaryVector(this Key input) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVectorColumn(input); + } + + /// + /// Takes a column of key type of known cardinality and produces a vector of bits representing the key in binary form. + /// The first value is encoded as all zeros and missing values are encoded as all ones. + /// In the case where a vector has multiple keys, the encoded values are concatenated. + /// Number of bits per key is determined as the number of bits needed to represent the cardinality of the keys plus one. + /// + public static Vector ToBinaryVector(this Vector> input) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVectorColumn(input); + } + + /// + /// Takes a column of key type of known cardinality and produces a vector of bits representing the key in binary form. + /// The first value is encoded as all zeros and missing values are encoded as all ones. + /// In the case where a vector has multiple keys, the encoded values are concatenated. + /// Number of bits per key is determined as the number of bits needed to represent the cardinality of the keys plus one. + /// + public static VarVector ToBinaryVector(this VarVector> input) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVarVectorColumn(input); + } + } + + /// + /// Extension methods for the static-pipeline over objects. + /// + public static class KeyToVectorExtensions + { + private interface IColInput + { + PipelineColumn Input { get; } + bool Bag { get; } + } + + private sealed class OutVectorColumn : Vector, IColInput + { + public PipelineColumn Input { get; } + public bool Bag { get; } + + public OutVectorColumn(Key input) + : base(Reconciler.Inst, input) + { + Input = input; + Bag = false; + } + + public OutVectorColumn(Vector> input, bool bag) + : base(Reconciler.Inst, input) + { + Input = input; + Bag = bag; + } + + public OutVectorColumn(VarVector> input) + : base(Reconciler.Inst, input) + { + Input = input; + Bag = true; + } + } + + private sealed class OutVarVectorColumn : VarVector, IColInput + { + public PipelineColumn Input { get; } + public bool Bag { get; } + + public OutVarVectorColumn(VarVector> input) + : base(Reconciler.Inst, input) + { + Input = input; + Bag = false; + } + } + + private sealed class OutVectorColumn : Vector, IColInput + { + public PipelineColumn Input { get; } + public bool Bag { get; } + + public OutVectorColumn(Key input) + : base(Reconciler.Inst, input) + { + Input = input; + Bag = false; + } + + public OutVectorColumn(Vector> input, bool bag) + : base(Reconciler.Inst, input) + { + Input = input; + Bag = bag; + } + + public OutVectorColumn(VarVector> input) + : base(Reconciler.Inst, input) + { + Input = input; + Bag = true; + } + } + + private sealed class OutVarVectorColumn : VarVector, IColInput + { + public PipelineColumn Input { get; } + public bool Bag { get; } + + public OutVarVectorColumn(VarVector> input) + : base(Reconciler.Inst, input) + { + Input = input; + Bag = false; + } + } + + private sealed class Reconciler : EstimatorReconciler + { + public static Reconciler Inst = new Reconciler(); + + private Reconciler() { } + + public override IEstimator Reconcile(IHostEnvironment env, + PipelineColumn[] toOutput, + IReadOnlyDictionary inputNames, + IReadOnlyDictionary outputNames, + IReadOnlyCollection usedNames) + { + var infos = new KeyToVectorMappingTransformer.ColumnInfo[toOutput.Length]; + for (int i = 0; i < toOutput.Length; ++i) + { + var col = (IColInput)toOutput[i]; + infos[i] = new KeyToVectorMappingTransformer.ColumnInfo(inputNames[col.Input], outputNames[toOutput[i]], col.Bag); + } + return new KeyToVectorMappingEstimator(env, infos); + } + } + + /// + /// Takes a column of key type of known cardinality and produces an indicator vector of floats. + /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, + /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. + /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. + /// + public static Vector ToVector(this Key input) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVectorColumn(input); + } + + /// + /// Takes a column of key type of known cardinality and produces an indicator vector of floats. + /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, + /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. + /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. + /// + public static Vector ToVector(this Vector> input) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVectorColumn(input, false); + } + + /// + /// Takes a column of key type of known cardinality and produces an indicator vector of floats. + /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, + /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. + /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. + /// In this case then the indicator vectors for all values in the column will be simply added together, + /// to produce the final vector with type equal to the key cardinality; so, in all cases, whether vector or scalar, + /// the output column will be a vector type of length equal to that cardinality. + /// + public static VarVector ToVector(this VarVector> input) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVarVectorColumn(input); + } + + /// + /// Takes a column of key type of known cardinality and produces an indicator vector of floats. + /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, + /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. + /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. + /// In this case then the indicator vectors for all values in the column will be simply added together, + /// to produce the final vector with type equal to the key cardinality; so, in all cases, whether vector or scalar, + /// the output column will be a vector type of length equal to that cardinality. + /// + public static Vector ToBaggedVector(this Vector> input) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVectorColumn(input, true); + } + + /// + /// Takes a column of key type of known cardinality and produces an indicator vector of floats. + /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, + /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. + /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. + /// In this case then the indicator vectors for all values in the column will be simply added together, + /// to produce the final vector with type equal to the key cardinality; so, in all cases, whether vector or scalar, + /// the output column will be a vector type of length equal to that cardinality. + /// + public static Vector ToBaggedVector(this VarVector> input) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVectorColumn(input); + } + + /// + /// Takes a column of key type of known cardinality and produces an indicator vector of floats. + /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, + /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. + /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. + /// + public static Vector ToVector(this Key input) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVectorColumn(input); + } + + /// + /// Takes a column of key type of known cardinality and produces an indicator vector of floats. + /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, + /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. + /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. + /// + public static Vector ToVector(this Vector> input) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVectorColumn(input, false); + } + + /// + /// Takes a column of key type of known cardinality and produces an indicator vector of floats. + /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, + /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. + /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. + /// In this case then the indicator vectors for all values in the column will be simply added together, + /// to produce the final vector with type equal to the key cardinality; so, in all cases, whether vector or scalar, + /// the output column will be a vector type of length equal to that cardinality. + /// + public static VarVector ToVector(this VarVector> input) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVarVectorColumn(input); + } + + /// + /// Takes a column of key type of known cardinality and produces an indicator vector of floats. + /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, + /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. + /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. + /// In this case then the indicator vectors for all values in the column will be simply added together, + /// to produce the final vector with type equal to the key cardinality; so, in all cases, whether vector or scalar, + /// the output column will be a vector type of length equal to that cardinality. + /// + public static Vector ToBaggedVector(this Vector> input) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVectorColumn(input, true); + } + + /// + /// Takes a column of key type of known cardinality and produces an indicator vector of floats. + /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, + /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. + /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. + /// In this case then the indicator vectors for all values in the column will be simply added together, + /// to produce the final vector with type equal to the key cardinality; so, in all cases, whether vector or scalar, + /// the output column will be a vector type of length equal to that cardinality. + /// + public static Vector ToBaggedVector(this VarVector> input) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVectorColumn(input); + } + } + + /// + /// Extension methods for the static-pipeline over objects. + /// + public static class NAReplacerExtensions + { + private readonly struct Config + { + public readonly bool ImputeBySlot; + public readonly MissingValueReplacingTransformer.ColumnInfo.ReplacementMode ReplacementMode; + + public Config(MissingValueReplacingTransformer.ColumnInfo.ReplacementMode replacementMode = MissingValueReplacingEstimator.Defaults.ReplacementMode, + bool imputeBySlot = MissingValueReplacingEstimator.Defaults.ImputeBySlot) + { + ImputeBySlot = imputeBySlot; + ReplacementMode = replacementMode; + } + } + + private interface IColInput + { + PipelineColumn Input { get; } + Config Config { get; } + } + + private sealed class OutScalar : Scalar, IColInput + { + public PipelineColumn Input { get; } + public Config Config { get; } + + public OutScalar(Scalar input, Config config) + : base(Reconciler.Inst, input) + { + Input = input; + Config = config; + } + } + + private sealed class OutVectorColumn : Vector, IColInput + { + public PipelineColumn Input { get; } + public Config Config { get; } + + public OutVectorColumn(Vector input, Config config) + : base(Reconciler.Inst, input) + { + Input = input; + Config = config; + } + + } + + private sealed class OutVarVectorColumn : VarVector, IColInput + { + public PipelineColumn Input { get; } + public Config Config { get; } + + public OutVarVectorColumn(VarVector input, Config config) + : base(Reconciler.Inst, input) + { + Input = input; + Config = config; + } + } + + private sealed class Reconciler : EstimatorReconciler + { + public static Reconciler Inst = new Reconciler(); + + private Reconciler() { } + + public override IEstimator Reconcile(IHostEnvironment env, + PipelineColumn[] toOutput, + IReadOnlyDictionary inputNames, + IReadOnlyDictionary outputNames, + IReadOnlyCollection usedNames) + { + var infos = new MissingValueReplacingTransformer.ColumnInfo[toOutput.Length]; + for (int i = 0; i < toOutput.Length; ++i) + { + var col = (IColInput)toOutput[i]; + infos[i] = new MissingValueReplacingTransformer.ColumnInfo(inputNames[col.Input], outputNames[toOutput[i]], col.Config.ReplacementMode, col.Config.ImputeBySlot); + } + return new MissingValueReplacingEstimator(env, infos); + } + } + + /// + /// Scan through all rows and replace NaN values according to replacement strategy. + /// + /// Incoming data. + /// How NaN should be replaced + public static Scalar ReplaceNaNValues(this Scalar input, MissingValueReplacingTransformer.ColumnInfo.ReplacementMode replacementMode = MissingValueReplacingEstimator.Defaults.ReplacementMode) + { + Contracts.CheckValue(input, nameof(input)); + return new OutScalar(input, new Config(replacementMode, false)); + } + + /// + /// Scan through all rows and replace NaN values according to replacement strategy. + /// + /// Incoming data. + /// How NaN should be replaced + public static Scalar ReplaceNaNValues(this Scalar input, MissingValueReplacingTransformer.ColumnInfo.ReplacementMode replacementMode = MissingValueReplacingEstimator.Defaults.ReplacementMode) + { + Contracts.CheckValue(input, nameof(input)); + return new OutScalar(input, new Config(replacementMode, false)); + } + /// + /// Scan through all rows and replace NaN values according to replacement strategy. + /// + /// Incoming data. + /// How NaN should be replaced + /// If true, per-slot imputation of replacement is performed. + /// Otherwise, replacement value is imputed for the entire vector column. This setting is ignored for scalars and variable vectors, + /// where imputation is always for the entire column. + public static Vector ReplaceNaNValues(this Vector input, MissingValueReplacingTransformer.ColumnInfo.ReplacementMode replacementMode = MissingValueReplacingEstimator.Defaults.ReplacementMode, bool imputeBySlot = MissingValueReplacingEstimator.Defaults.ImputeBySlot) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVectorColumn(input, new Config(replacementMode, imputeBySlot)); + } + + /// + /// Scan through all rows and replace NaN values according to replacement strategy. + /// + /// Incoming data. + /// How NaN should be replaced + /// If true, per-slot imputation of replacement is performed. + /// Otherwise, replacement value is imputed for the entire vector column. This setting is ignored for scalars and variable vectors, + /// where imputation is always for the entire column. + public static Vector ReplaceNaNValues(this Vector input, MissingValueReplacingTransformer.ColumnInfo.ReplacementMode replacementMode = MissingValueReplacingEstimator.Defaults.ReplacementMode, bool imputeBySlot = MissingValueReplacingEstimator.Defaults.ImputeBySlot) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVectorColumn(input, new Config(replacementMode, imputeBySlot)); + } + + /// + /// Scan through all rows and replace NaN values according to replacement strategy. + /// + /// Incoming data. + /// How NaN should be replaced + public static VarVector ReplaceNaNValues(this VarVector input, MissingValueReplacingTransformer.ColumnInfo.ReplacementMode replacementMode = MissingValueReplacingEstimator.Defaults.ReplacementMode) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVarVectorColumn(input, new Config(replacementMode, false)); + } + /// + /// Scan through all rows and replace NaN values according to replacement strategy. + /// + /// Incoming data. + /// How NaN should be replaced + public static VarVector ReplaceNaNValues(this VarVector input, MissingValueReplacingTransformer.ColumnInfo.ReplacementMode replacementMode = MissingValueReplacingEstimator.Defaults.ReplacementMode) + { + Contracts.CheckValue(input, nameof(input)); + return new OutVarVectorColumn(input, new Config(replacementMode, false)); + } + } } diff --git a/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv b/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv index ea332319f2..d1f3bf4a52 100644 --- a/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv +++ b/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv @@ -76,7 +76,7 @@ Trainers.SymSgdBinaryClassifier Train a symbolic SGD. Microsoft.ML.Trainers.SymS Transforms.ApproximateBootstrapSampler Approximate bootstrap sampling. Microsoft.ML.Transforms.BootstrapSample GetSample Microsoft.ML.Transforms.BootstrapSamplingTransformer+Arguments Microsoft.ML.Runtime.EntryPoints.CommonOutputs+TransformOutput Transforms.BinaryPredictionScoreColumnsRenamer For binary prediction, it renames the PredictedLabel and Score columns to include the name of the positive class. Microsoft.ML.Runtime.EntryPoints.ScoreModel RenameBinaryPredictionScoreColumns Microsoft.ML.Runtime.EntryPoints.ScoreModel+RenameBinaryPredictionScoreColumnsInput Microsoft.ML.Runtime.EntryPoints.CommonOutputs+TransformOutput Transforms.BinNormalizer The values are assigned into equidensity bins and a value is mapped to its bin_number/number_of_bins. Microsoft.ML.Runtime.Data.Normalize Bin Microsoft.ML.Transforms.Normalizers.NormalizeTransform+BinArguments Microsoft.ML.Runtime.EntryPoints.CommonOutputs+TransformOutput -Transforms.CategoricalHashOneHotVectorizer Converts the categorical value into an indicator array by hashing the value and using the hash as an index in the bag. If the input column is a vector, a single indicator bag is returned for it. Microsoft.ML.Transforms.Categorical.Categorical CatTransformHash Microsoft.ML.Transforms.Categorical.OneHotHashEncodingTransformer+Arguments Microsoft.ML.Runtime.EntryPoints.CommonOutputs+TransformOutput +Transforms.CategoricalHashOneHotVectorizer Converts the categorical value into an indicator array by hashing the value and using the hash as an index in the bag. If the input column is a vector, a single indicator bag is returned for it. Microsoft.ML.Transforms.Categorical.Categorical CatTransformHash Microsoft.ML.Transforms.Categorical.OneHotHashEncoding+Arguments Microsoft.ML.Runtime.EntryPoints.CommonOutputs+TransformOutput Transforms.CategoricalOneHotVectorizer Converts the categorical value into an indicator array by building a dictionary of categories based on the data and using the id in the dictionary as the index in the array. Microsoft.ML.Transforms.Categorical.Categorical CatTransformDict Microsoft.ML.Transforms.Categorical.OneHotEncodingTransformer+Arguments Microsoft.ML.Runtime.EntryPoints.CommonOutputs+TransformOutput Transforms.CharacterTokenizer Character-oriented tokenizer where text is considered a sequence of characters. Microsoft.ML.Transforms.Text.TextAnalytics CharTokenize Microsoft.ML.Transforms.Text.TokenizingByCharactersTransformer+Arguments Microsoft.ML.Runtime.EntryPoints.CommonOutputs+TransformOutput Transforms.ColumnConcatenator Concatenates one or more columns of the same item type. Microsoft.ML.Runtime.EntryPoints.SchemaManipulation ConcatColumns Microsoft.ML.Runtime.Data.ColumnConcatenatingTransformer+Arguments Microsoft.ML.Runtime.EntryPoints.CommonOutputs+TransformOutput diff --git a/test/BaselineOutput/Common/EntryPoints/core_manifest.json b/test/BaselineOutput/Common/EntryPoints/core_manifest.json index c76d71f6bc..e76f410d5e 100644 --- a/test/BaselineOutput/Common/EntryPoints/core_manifest.json +++ b/test/BaselineOutput/Common/EntryPoints/core_manifest.json @@ -22716,7 +22716,7 @@ "Value" ] }, - "Desc": "How items should be ordered when vectorized. By default, they will be in the order encountered. If by value items are sorted according to their default comparison, for example, text sorting will be case sensitive (for example, 'A' then 'Z' then 'a').", + "Desc": "How items should be ordered when vectorized. By default, they will be in the order encountered. If by value, items are sorted according to their default comparison, for example, text sorting will be case sensitive (for example, 'A' then 'Z' then 'a').", "Required": false, "SortOrder": 5.0, "IsNullable": false, diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash1-Data.txt b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash1-Data.txt new file mode 100644 index 0000000000..7321ac3a91 --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash1-Data.txt @@ -0,0 +1,14 @@ +#@ TextLoader{ +#@ header+ +#@ sep=tab +#@ col=A:TX:0 +#@ col=B:TX:1 +#@ col=F1:R4:2-9 +#@ col=F2:R4:10-17 +#@ col=F3:R4:18-25 +#@ col=F4:R4:26-33 +#@ } +A B "{A:annie,A:ant,B:bowled}" "{A:annie|ate,B:bob|bowled}" "{A:ate,B:bob,A:an|angry,B:badly}" "{A:ate|an,A:angry|ant}" "{A:an|ant,B:bakes,B:bakes|brownies}" "{A:an,B:bob|bakes,A:angry}" "" "{B:brownies,B:bowled|badly}" "{A:annie,A:ant,B:bowled}" "{A:annie|ate,B:bob|bowled}" "{B:bob,A:ate,B:badly,A:an|angry}" "{A:ate|an,A:angry|ant}" "{B:bakes,B:bakes|brownies,A:an|ant}" "{B:bob|bakes,A:an,A:angry}" "" "{B:brownies,B:bowled|badly}" "A:annie" "A:annie|ate" "A:ate" "A:ate|an" "A:an|ant" "A:an" "" "B:brownies" "A:annie" "A:annie|ate" "B:bob" "A:ate|an" "B:bakes" "B:bob|bakes" "" "B:brownies" +annie ate an ant bob bakes brownies 2 1 2 1 3 2 0 1 2 1 2 1 3 2 0 1 2 1 2 1 3 2 0 1 2 1 2 1 3 2 0 1 +an angry ant bob bowled badly 2 1 3 1 0 2 0 1 2 1 3 1 0 2 0 1 2 1 3 1 0 2 0 1 2 1 3 1 0 2 0 1 +34 0:"" diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash1-Schema.txt b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash1-Schema.txt new file mode 100644 index 0000000000..cb7ac9b5b6 --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash1-Schema.txt @@ -0,0 +1,239 @@ +---- BoundLoader ---- +5 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text +---- RowToRowMapperTransform ---- +9 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + _tmp000: Vec + _tmp001: Vec + _tmp002: Vec + _tmp003: Vec +---- RowToRowMapperTransform ---- +13 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + _tmp000: Vec + _tmp001: Vec + _tmp002: Vec + _tmp003: Vec + temp__tmp000_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [295457023] 'ant', [521979783] 'annie', [800576372] 'an', [832818636] 'angry', [922722793] 'ate' + temp__tmp001_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [197754023] 'bowled', [594010070] 'brownies', [774818249] 'badly', [867230403] 'bakes', [897860361] 'bob' + temp__tmp002_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [197754023] 'bowled', [594010070] 'brownies', [774818249] 'badly', [867230403] 'bakes', [897860361] 'bob' + temp__tmp003_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [295457023] 'ant', [521979783] 'annie', [800576372] 'an', [832818636] 'angry', [922722793] 'ate' +---- RowToRowMapperTransform ---- +15 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + _tmp000: Vec + _tmp001: Vec + _tmp002: Vec + _tmp003: Vec + temp__tmp000_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [295457023] 'ant', [521979783] 'annie', [800576372] 'an', [832818636] 'angry', [922722793] 'ate' + temp__tmp001_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [197754023] 'bowled', [594010070] 'brownies', [774818249] 'badly', [867230403] 'bakes', [897860361] 'bob' + temp__tmp002_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [197754023] 'bowled', [594010070] 'brownies', [774818249] 'badly', [867230403] 'bakes', [897860361] 'bob' + temp__tmp003_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [295457023] 'ant', [521979783] 'annie', [800576372] 'an', [832818636] 'angry', [922722793] 'ate' + F1: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{A:ate,B:bob,A:an|angry,B:badly}', [3] '{A:ate|an,A:angry|ant}', [4] '{A:an|ant,B:bakes,B:bakes|brownies}', [5] '{A:an,B:bob|bakes,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F2: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{B:bob,A:ate,B:badly,A:an|angry}', [3] '{A:ate|an,A:angry|ant}', [4] '{B:bakes,B:bakes|brownies,A:an|ant}', [5] '{B:bob|bakes,A:an,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' +---- SelectColumnsDataTransform ---- +11 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + _tmp000: Vec + _tmp001: Vec + _tmp002: Vec + _tmp003: Vec + F1: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{A:ate,B:bob,A:an|angry,B:badly}', [3] '{A:ate|an,A:angry|ant}', [4] '{A:an|ant,B:bakes,B:bakes|brownies}', [5] '{A:an,B:bob|bakes,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F2: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{B:bob,A:ate,B:badly,A:an|angry}', [3] '{A:ate|an,A:angry|ant}', [4] '{B:bakes,B:bakes|brownies,A:an|ant}', [5] '{B:bob|bakes,A:an,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' +---- SelectColumnsDataTransform ---- +7 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + F1: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{A:ate,B:bob,A:an|angry,B:badly}', [3] '{A:ate|an,A:angry|ant}', [4] '{A:an|ant,B:bakes,B:bakes|brownies}', [5] '{A:an,B:bob|bakes,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F2: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{B:bob,A:ate,B:badly,A:an|angry}', [3] '{A:ate|an,A:angry|ant}', [4] '{B:bakes,B:bakes|brownies,A:an|ant}', [5] '{B:bob|bakes,A:an,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' +---- RowToRowMapperTransform ---- +11 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + F1: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{A:ate,B:bob,A:an|angry,B:badly}', [3] '{A:ate|an,A:angry|ant}', [4] '{A:an|ant,B:bakes,B:bakes|brownies}', [5] '{A:an,B:bob|bakes,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F2: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{B:bob,A:ate,B:badly,A:an|angry}', [3] '{A:ate|an,A:angry|ant}', [4] '{B:bakes,B:bakes|brownies,A:an|ant}', [5] '{B:bob|bakes,A:an,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + _tmp000: Vec + _tmp001: Vec + _tmp002: Vec + _tmp003: Vec +---- RowToRowMapperTransform ---- +15 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + F1: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{A:ate,B:bob,A:an|angry,B:badly}', [3] '{A:ate|an,A:angry|ant}', [4] '{A:an|ant,B:bakes,B:bakes|brownies}', [5] '{A:an,B:bob|bakes,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F2: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{B:bob,A:ate,B:badly,A:an|angry}', [3] '{A:ate|an,A:angry|ant}', [4] '{B:bakes,B:bakes|brownies,A:an|ant}', [5] '{B:bob|bakes,A:an,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + _tmp000: Vec + _tmp001: Vec + _tmp002: Vec + _tmp003: Vec + temp__tmp000_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [295457023] 'ant', [521979783] 'annie', [800576372] 'an', [832818636] 'angry', [922722793] 'ate' + temp__tmp001_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [197754023] 'bowled', [594010070] 'brownies', [774818249] 'badly', [867230403] 'bakes', [897860361] 'bob' + temp__tmp002_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [197754023] 'bowled', [594010070] 'brownies', [774818249] 'badly', [867230403] 'bakes', [897860361] 'bob' + temp__tmp003_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [295457023] 'ant', [521979783] 'annie', [800576372] 'an', [832818636] 'angry', [922722793] 'ate' +---- RowToRowMapperTransform ---- +17 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + F1: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{A:ate,B:bob,A:an|angry,B:badly}', [3] '{A:ate|an,A:angry|ant}', [4] '{A:an|ant,B:bakes,B:bakes|brownies}', [5] '{A:an,B:bob|bakes,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F2: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{B:bob,A:ate,B:badly,A:an|angry}', [3] '{A:ate|an,A:angry|ant}', [4] '{B:bakes,B:bakes|brownies,A:an|ant}', [5] '{B:bob|bakes,A:an,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + _tmp000: Vec + _tmp001: Vec + _tmp002: Vec + _tmp003: Vec + temp__tmp000_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [295457023] 'ant', [521979783] 'annie', [800576372] 'an', [832818636] 'angry', [922722793] 'ate' + temp__tmp001_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [197754023] 'bowled', [594010070] 'brownies', [774818249] 'badly', [867230403] 'bakes', [897860361] 'bob' + temp__tmp002_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [197754023] 'bowled', [594010070] 'brownies', [774818249] 'badly', [867230403] 'bakes', [897860361] 'bob' + temp__tmp003_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [295457023] 'ant', [521979783] 'annie', [800576372] 'an', [832818636] 'angry', [922722793] 'ate' + F3: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'A:annie', [1] 'A:annie|ate', [2] 'A:ate', [3] 'A:ate|an', [4] 'A:an|ant', [5] 'A:an', [6] '', [7] 'B:brownies' + F4: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'A:annie', [1] 'A:annie|ate', [2] 'B:bob', [3] 'A:ate|an', [4] 'B:bakes', [5] 'B:bob|bakes', [6] '', [7] 'B:brownies' +---- SelectColumnsDataTransform ---- +13 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + F1: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{A:ate,B:bob,A:an|angry,B:badly}', [3] '{A:ate|an,A:angry|ant}', [4] '{A:an|ant,B:bakes,B:bakes|brownies}', [5] '{A:an,B:bob|bakes,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F2: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{B:bob,A:ate,B:badly,A:an|angry}', [3] '{A:ate|an,A:angry|ant}', [4] '{B:bakes,B:bakes|brownies,A:an|ant}', [5] '{B:bob|bakes,A:an,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + _tmp000: Vec + _tmp001: Vec + _tmp002: Vec + _tmp003: Vec + F3: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'A:annie', [1] 'A:annie|ate', [2] 'A:ate', [3] 'A:ate|an', [4] 'A:an|ant', [5] 'A:an', [6] '', [7] 'B:brownies' + F4: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'A:annie', [1] 'A:annie|ate', [2] 'B:bob', [3] 'A:ate|an', [4] 'B:bakes', [5] 'B:bob|bakes', [6] '', [7] 'B:brownies' +---- SelectColumnsDataTransform ---- +9 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + F1: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{A:ate,B:bob,A:an|angry,B:badly}', [3] '{A:ate|an,A:angry|ant}', [4] '{A:an|ant,B:bakes,B:bakes|brownies}', [5] '{A:an,B:bob|bakes,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F2: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{B:bob,A:ate,B:badly,A:an|angry}', [3] '{A:ate|an,A:angry|ant}', [4] '{B:bakes,B:bakes|brownies,A:an|ant}', [5] '{B:bob|bakes,A:an,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F3: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'A:annie', [1] 'A:annie|ate', [2] 'A:ate', [3] 'A:ate|an', [4] 'A:an|ant', [5] 'A:an', [6] '', [7] 'B:brownies' + F4: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'A:annie', [1] 'A:annie|ate', [2] 'B:bob', [3] 'A:ate|an', [4] 'B:bakes', [5] 'B:bob|bakes', [6] '', [7] 'B:brownies' +---- SelectColumnsDataTransform ---- +6 columns: + A: Text + B: Text + F1: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{A:ate,B:bob,A:an|angry,B:badly}', [3] '{A:ate|an,A:angry|ant}', [4] '{A:an|ant,B:bakes,B:bakes|brownies}', [5] '{A:an,B:bob|bakes,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F2: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{B:bob,A:ate,B:badly,A:an|angry}', [3] '{A:ate|an,A:angry|ant}', [4] '{B:bakes,B:bakes|brownies,A:an|ant}', [5] '{B:bob|bakes,A:an,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F3: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'A:annie', [1] 'A:annie|ate', [2] 'A:ate', [3] 'A:ate|an', [4] 'A:an|ant', [5] 'A:an', [6] '', [7] 'B:brownies' + F4: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'A:annie', [1] 'A:annie|ate', [2] 'B:bob', [3] 'A:ate|an', [4] 'B:bakes', [5] 'B:bob|bakes', [6] '', [7] 'B:brownies' diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash2-Data.txt b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash2-Data.txt new file mode 100644 index 0000000000..397f58236c --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash2-Data.txt @@ -0,0 +1,15 @@ +#@ TextLoader{ +#@ header+ +#@ sep=tab +#@ col=A:TX:0 +#@ col=B:TX:1 +#@ col=F1:R4:2-9 +#@ col=F2:R4:10-17 +#@ col=F3:R4:18-25 +#@ col=F4:R4:26-33 +#@ col=F5:R4:34-41 +#@ } +A B "{A:annie,A:ant,B:bowled}" "{A:annie|ate,B:bob|bowled}" "{A:ate,B:bob,A:an|angry,B:badly}" "{A:ate|an,A:angry|ant}" "{A:an|ant,B:bakes,B:bakes|brownies}" "{A:an,B:bob|bakes,A:angry}" "" "{B:brownies,B:bowled|badly}" "{A:annie,A:ant,B:bowled}" "{A:annie|ate,B:bob|bowled}" "{B:bob,A:ate,B:badly,A:an|angry}" "{A:ate|an,A:angry|ant}" "{B:bakes,B:bakes|brownies,A:an|ant}" "{B:bob|bakes,A:an,A:angry}" "" "{B:brownies,B:bowled|badly}" "A:annie" "A:annie|ate" "A:ate" "A:ate|an" "A:an|ant" "A:an" "" "B:brownies" "A:annie" "A:annie|ate" "B:bob" "A:ate|an" "B:bakes" "B:bob|bakes" "" "B:brownies" 8 0:"" +annie ate an ant bob bakes brownies 2 1 2 1 3 2 0 1 2 1 2 1 3 2 0 1 2 1 2 1 3 2 0 1 2 1 2 1 3 2 0 1 2 1 2 1 3 2 0 1 +an angry ant bob bowled badly 2 1 3 1 0 2 0 1 2 1 3 1 0 2 0 1 2 1 3 1 0 2 0 1 2 1 3 1 0 2 0 1 2 1 3 1 0 2 0 1 +42 0:"" diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash2-Schema.txt b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash2-Schema.txt new file mode 100644 index 0000000000..38f82b4400 --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash2-Schema.txt @@ -0,0 +1,190 @@ +---- BoundLoader ---- +5 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text +---- RowToRowMapperTransform ---- +15 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + _tmp000: Vec + _tmp001: Vec + _tmp002: Vec + _tmp003: Vec + _tmp004: Vec + _tmp005: Vec + _tmp006: Vec + _tmp007: Vec + _tmp008: Vec + _tmp009: Vec +---- RowToRowMapperTransform ---- +25 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + _tmp000: Vec + _tmp001: Vec + _tmp002: Vec + _tmp003: Vec + _tmp004: Vec + _tmp005: Vec + _tmp006: Vec + _tmp007: Vec + _tmp008: Vec + _tmp009: Vec + temp__tmp000_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [295457023] 'ant', [521979783] 'annie', [800576372] 'an', [832818636] 'angry', [922722793] 'ate' + temp__tmp001_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [197754023] 'bowled', [594010070] 'brownies', [774818249] 'badly', [867230403] 'bakes', [897860361] 'bob' + temp__tmp002_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [197754023] 'bowled', [594010070] 'brownies', [774818249] 'badly', [867230403] 'bakes', [897860361] 'bob' + temp__tmp003_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [295457023] 'ant', [521979783] 'annie', [800576372] 'an', [832818636] 'angry', [922722793] 'ate' + temp__tmp004_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [295457023] 'ant', [521979783] 'annie', [800576372] 'an', [832818636] 'angry', [922722793] 'ate' + temp__tmp005_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [197754023] 'bowled', [594010070] 'brownies', [774818249] 'badly', [867230403] 'bakes', [897860361] 'bob' + temp__tmp006_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [197754023] 'bowled', [594010070] 'brownies', [774818249] 'badly', [867230403] 'bakes', [897860361] 'bob' + temp__tmp007_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [295457023] 'ant', [521979783] 'annie', [800576372] 'an', [832818636] 'angry', [922722793] 'ate' + temp__tmp008_000: Vec> + temp__tmp009_000: Vec> +---- RowToRowMapperTransform ---- +30 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + _tmp000: Vec + _tmp001: Vec + _tmp002: Vec + _tmp003: Vec + _tmp004: Vec + _tmp005: Vec + _tmp006: Vec + _tmp007: Vec + _tmp008: Vec + _tmp009: Vec + temp__tmp000_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [295457023] 'ant', [521979783] 'annie', [800576372] 'an', [832818636] 'angry', [922722793] 'ate' + temp__tmp001_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [197754023] 'bowled', [594010070] 'brownies', [774818249] 'badly', [867230403] 'bakes', [897860361] 'bob' + temp__tmp002_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [197754023] 'bowled', [594010070] 'brownies', [774818249] 'badly', [867230403] 'bakes', [897860361] 'bob' + temp__tmp003_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [295457023] 'ant', [521979783] 'annie', [800576372] 'an', [832818636] 'angry', [922722793] 'ate' + temp__tmp004_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [295457023] 'ant', [521979783] 'annie', [800576372] 'an', [832818636] 'angry', [922722793] 'ate' + temp__tmp005_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [197754023] 'bowled', [594010070] 'brownies', [774818249] 'badly', [867230403] 'bakes', [897860361] 'bob' + temp__tmp006_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [197754023] 'bowled', [594010070] 'brownies', [774818249] 'badly', [867230403] 'bakes', [897860361] 'bob' + temp__tmp007_000: Vec> + Metadata 'KeyValues': Vec: Length=1073741824, Count=1073741824 + [295457023] 'ant', [521979783] 'annie', [800576372] 'an', [832818636] 'angry', [922722793] 'ate' + temp__tmp008_000: Vec> + temp__tmp009_000: Vec> + F1: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{A:ate,B:bob,A:an|angry,B:badly}', [3] '{A:ate|an,A:angry|ant}', [4] '{A:an|ant,B:bakes,B:bakes|brownies}', [5] '{A:an,B:bob|bakes,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F2: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{B:bob,A:ate,B:badly,A:an|angry}', [3] '{A:ate|an,A:angry|ant}', [4] '{B:bakes,B:bakes|brownies,A:an|ant}', [5] '{B:bob|bakes,A:an,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F3: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'A:annie', [1] 'A:annie|ate', [2] 'A:ate', [3] 'A:ate|an', [4] 'A:an|ant', [5] 'A:an', [6] '', [7] 'B:brownies' + F4: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'A:annie', [1] 'A:annie|ate', [2] 'B:bob', [3] 'A:ate|an', [4] 'B:bakes', [5] 'B:bob|bakes', [6] '', [7] 'B:brownies' + F5: Vec +---- SelectColumnsDataTransform ---- +20 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + _tmp000: Vec + _tmp001: Vec + _tmp002: Vec + _tmp003: Vec + _tmp004: Vec + _tmp005: Vec + _tmp006: Vec + _tmp007: Vec + _tmp008: Vec + _tmp009: Vec + F1: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{A:ate,B:bob,A:an|angry,B:badly}', [3] '{A:ate|an,A:angry|ant}', [4] '{A:an|ant,B:bakes,B:bakes|brownies}', [5] '{A:an,B:bob|bakes,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F2: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{B:bob,A:ate,B:badly,A:an|angry}', [3] '{A:ate|an,A:angry|ant}', [4] '{B:bakes,B:bakes|brownies,A:an|ant}', [5] '{B:bob|bakes,A:an,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F3: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'A:annie', [1] 'A:annie|ate', [2] 'A:ate', [3] 'A:ate|an', [4] 'A:an|ant', [5] 'A:an', [6] '', [7] 'B:brownies' + F4: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'A:annie', [1] 'A:annie|ate', [2] 'B:bob', [3] 'A:ate|an', [4] 'B:bakes', [5] 'B:bob|bakes', [6] '', [7] 'B:brownies' + F5: Vec +---- SelectColumnsDataTransform ---- +10 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + F1: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{A:ate,B:bob,A:an|angry,B:badly}', [3] '{A:ate|an,A:angry|ant}', [4] '{A:an|ant,B:bakes,B:bakes|brownies}', [5] '{A:an,B:bob|bakes,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F2: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{B:bob,A:ate,B:badly,A:an|angry}', [3] '{A:ate|an,A:angry|ant}', [4] '{B:bakes,B:bakes|brownies,A:an|ant}', [5] '{B:bob|bakes,A:an,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F3: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'A:annie', [1] 'A:annie|ate', [2] 'A:ate', [3] 'A:ate|an', [4] 'A:an|ant', [5] 'A:an', [6] '', [7] 'B:brownies' + F4: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'A:annie', [1] 'A:annie|ate', [2] 'B:bob', [3] 'A:ate|an', [4] 'B:bakes', [5] 'B:bob|bakes', [6] '', [7] 'B:brownies' + F5: Vec +---- SelectColumnsDataTransform ---- +7 columns: + A: Text + B: Text + F1: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{A:ate,B:bob,A:an|angry,B:badly}', [3] '{A:ate|an,A:angry|ant}', [4] '{A:an|ant,B:bakes,B:bakes|brownies}', [5] '{A:an,B:bob|bakes,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F2: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '{A:annie,A:ant,B:bowled}', [1] '{A:annie|ate,B:bob|bowled}', [2] '{B:bob,A:ate,B:badly,A:an|angry}', [3] '{A:ate|an,A:angry|ant}', [4] '{B:bakes,B:bakes|brownies,A:an|ant}', [5] '{B:bob|bakes,A:an,A:angry}', [6] '', [7] '{B:brownies,B:bowled|badly}' + F3: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'A:annie', [1] 'A:annie|ate', [2] 'A:ate', [3] 'A:ate|an', [4] 'A:an|ant', [5] 'A:an', [6] '', [7] 'B:brownies' + F4: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] 'A:annie', [1] 'A:annie|ate', [2] 'B:bob', [3] 'A:ate|an', [4] 'B:bakes', [5] 'B:bob|bakes', [6] '', [7] 'B:brownies' + F5: Vec diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash3-Data.txt b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash3-Data.txt new file mode 100644 index 0000000000..2b269068bf --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash3-Data.txt @@ -0,0 +1,11 @@ +#@ TextLoader{ +#@ header+ +#@ sep=tab +#@ col=K:U4[3-10]:0-1 +#@ col=KH:R4:2-5 +#@ col=KHU:R4:6-9 +#@ } +"" "" "" "0:10" "{0:5,0:3,1:3}" "1:4" "" "" {5,10} {4,3} +5 4 0 0 1 1 0 0 1 1 +3 3 8 2:2 7:2 +10 8 1:1 6:1 diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash3-Schema.txt b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash3-Schema.txt new file mode 100644 index 0000000000..5c934cf14d --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash3-Schema.txt @@ -0,0 +1,48 @@ +---- BoundLoader ---- +5 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text +---- RowToRowMapperTransform ---- +7 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + KH: Vec, 2> + Metadata 'KeyValues': Vec: Length=4, Count=4 + [0] '', [1] '0:10', [2] '{0:5,0:3,1:3}', [3] '1:4' + KHU: Vec, 2> + Metadata 'KeyValues': Vec: Length=4, Count=4 + [2] '{5,10}', [3] '{4,3}' +---- RowToRowMapperTransform ---- +9 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + KH: Vec, 2> + Metadata 'KeyValues': Vec: Length=4, Count=4 + [0] '', [1] '0:10', [2] '{0:5,0:3,1:3}', [3] '1:4' + KH: Vec + Metadata 'SlotNames': Vec: Length=4, Count=4 + [0] '', [1] '0:10', [2] '{0:5,0:3,1:3}', [3] '1:4' + KHU: Vec, 2> + Metadata 'KeyValues': Vec: Length=4, Count=4 + [2] '{5,10}', [3] '{4,3}' + KHU: Vec + Metadata 'SlotNames': Vec: Length=4, Count=4 + [2] '{5,10}', [3] '{4,3}' +---- SelectColumnsDataTransform ---- +3 columns: + K: Vec, 2> + KH: Vec + Metadata 'SlotNames': Vec: Length=4, Count=4 + [0] '', [1] '0:10', [2] '{0:5,0:3,1:3}', [3] '1:4' + KHU: Vec + Metadata 'SlotNames': Vec: Length=4, Count=4 + [2] '{5,10}', [3] '{4,3}' diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash4-Data.txt b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash4-Data.txt new file mode 100644 index 0000000000..c637325df1 --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash4-Data.txt @@ -0,0 +1,13 @@ +#@ TextLoader{ +#@ header+ +#@ sep=tab +#@ col=A:TX:0 +#@ col=K:U4[3-10]:1-2 +#@ col=KH:U4[0-3]:3-4 +#@ col=N3:R4:5-12 +#@ col=N10:R4:13-1036 +#@ } +A "" "" "" "" "" "" "{AH:{an,angry}|{annie,ant},KH:}" "{AH:ate|{an,angry},KH:{5,10}|{4,3}}" "{KH:{4,3},AH:{an,angry}|{an,angry}}" "{AH:{annie,ant},KH:{4,3}|{4,3}}" "AH:ate" "{AH:{annie,ant}|ate,AH:{an,angry},KH:{5,10},KH:{5,10}|}" 1024 116:"KH:{4,3}" 138:"KH:" 183:"AH:{an,angry}" 210:"AH:{an,angry}|{annie,ant}" 284:"AH:{an,angry}|{an,angry}" 359:"KH:{5,10}" 575:"AH:{annie,ant}|ate" 645:"AH:{annie,ant}" 723:"KH:{5,10}|{4,3}" 774:"AH:ate" 791:"KH:{5,10}|" 819:"AH:ate|{an,angry}" 837:"KH:{4,3}|{4,3}" +annie ate an ant 5 4 2 3 0 0 1 2 1 2 1 3 1024 116:1 183:1 210:1 359:1 575:1 645:2 723:1 774:1 819:1 +an angry ant 3 3 3 3 0 0 1 0 3 2 0 2 1024 116:2 183:2 210:1 284:1 645:1 837:1 +1037 1:10 3:2 7:1 12:2 151:1 372:1 804:1 diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash4-Schema.txt b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash4-Schema.txt new file mode 100644 index 0000000000..5cfe007982 --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash4-Schema.txt @@ -0,0 +1,81 @@ +---- BoundLoader ---- +5 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text +---- RowToRowMapperTransform ---- +6 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + AT: Vec +---- RowToRowMapperTransform ---- +8 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + AT: Vec + AH: Vec> + Metadata 'KeyValues': Vec: Length=4, Count=4 + [0] '{an,angry}', [1] 'ate', [2] '', [3] '{annie,ant}' + KH: Vec, 2> + Metadata 'KeyValues': Vec: Length=4, Count=4 + [2] '{5,10}', [3] '{4,3}' +---- RowToRowMapperTransform ---- +9 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + AT: Vec + AH: Vec> + Metadata 'KeyValues': Vec: Length=4, Count=4 + [0] '{an,angry}', [1] 'ate', [2] '', [3] '{annie,ant}' + KH: Vec, 2> + Metadata 'KeyValues': Vec: Length=4, Count=4 + [2] '{5,10}', [3] '{4,3}' + N3: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '', [1] '', [2] '{AH:{an,angry}|{annie,ant},KH:}', [3] '{AH:ate|{an,angry},KH:{5,10}|{4,3}}', [4] '{KH:{4,3},AH:{an,angry}|{an,angry}}', [5] '{AH:{annie,ant},KH:{4,3}|{4,3}}', [6] 'AH:ate', [7] '{AH:{annie,ant}|ate,AH:{an,angry},KH:{5,10},KH:{5,10}|}' +---- RowToRowMapperTransform ---- +10 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + AT: Vec + AH: Vec> + Metadata 'KeyValues': Vec: Length=4, Count=4 + [0] '{an,angry}', [1] 'ate', [2] '', [3] '{annie,ant}' + KH: Vec, 2> + Metadata 'KeyValues': Vec: Length=4, Count=4 + [2] '{5,10}', [3] '{4,3}' + N3: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '', [1] '', [2] '{AH:{an,angry}|{annie,ant},KH:}', [3] '{AH:ate|{an,angry},KH:{5,10}|{4,3}}', [4] '{KH:{4,3},AH:{an,angry}|{an,angry}}', [5] '{AH:{annie,ant},KH:{4,3}|{4,3}}', [6] 'AH:ate', [7] '{AH:{annie,ant}|ate,AH:{an,angry},KH:{5,10},KH:{5,10}|}' + N10: Vec + Metadata 'SlotNames': Vec: Length=1024, Count=1024 + [116] 'KH:{4,3}', [138] 'KH:', [183] 'AH:{an,angry}', [210] 'AH:{an,angry}|{annie,ant}', [284] 'AH:{an,angry}|{an,angry}', [359] 'KH:{5,10}', [575] 'AH:{annie,ant}|ate', [645] 'AH:{annie,ant}', [723] 'KH:{5,10}|{4,3}', [774] 'AH:ate' + [791] 'KH:{5,10}|', [819] 'AH:ate|{an,angry}', [837] 'KH:{4,3}|{4,3}' +---- SelectColumnsDataTransform ---- +5 columns: + A: Text + K: Vec, 2> + KH: Vec, 2> + Metadata 'KeyValues': Vec: Length=4, Count=4 + [2] '{5,10}', [3] '{4,3}' + N3: Vec + Metadata 'SlotNames': Vec: Length=8, Count=8 + [0] '', [1] '', [2] '{AH:{an,angry}|{annie,ant},KH:}', [3] '{AH:ate|{an,angry},KH:{5,10}|{4,3}}', [4] '{KH:{4,3},AH:{an,angry}|{an,angry}}', [5] '{AH:{annie,ant},KH:{4,3}|{4,3}}', [6] 'AH:ate', [7] '{AH:{annie,ant}|ate,AH:{an,angry},KH:{5,10},KH:{5,10}|}' + N10: Vec + Metadata 'SlotNames': Vec: Length=1024, Count=1024 + [116] 'KH:{4,3}', [138] 'KH:', [183] 'AH:{an,angry}', [210] 'AH:{an,angry}|{annie,ant}', [284] 'AH:{an,angry}|{an,angry}', [359] 'KH:{5,10}', [575] 'AH:{annie,ant}|ate', [645] 'AH:{annie,ant}', [723] 'KH:{5,10}|{4,3}', [774] 'AH:ate' + [791] 'KH:{5,10}|', [819] 'AH:ate|{an,angry}', [837] 'KH:{4,3}|{4,3}' diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash5-Data.txt b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash5-Data.txt new file mode 100644 index 0000000000..5f2313d397 --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash5-Data.txt @@ -0,0 +1,12 @@ +#@ TextLoader{ +#@ header+ +#@ sep=tab +#@ col=A:TX:0 +#@ col=KS:U4[3-10]:1 +#@ col=AH:R4:2-9 +#@ col=KH:R4:10-17 +#@ } +A KS 16 1:an angry ant 4:annie ate an ant 10:3 15:4 +annie ate an ant 4 16 4:1 15:1 +an angry ant 3 16 1:1 10:1 +18 0:"" diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash5-Schema.txt b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash5-Schema.txt new file mode 100644 index 0000000000..1d0983478b --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash5-Schema.txt @@ -0,0 +1,53 @@ +---- BoundLoader ---- +5 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text +---- RowToRowMapperTransform ---- +7 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + AH: Key + Metadata 'KeyValues': Vec: Length=8, Count=8 + [1] 'an angry ant', [4] 'annie ate an ant' + KH: Key + Metadata 'KeyValues': Vec: Length=8, Count=8 + [2] '3', [7] '4' +---- RowToRowMapperTransform ---- +9 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + AH: Key + Metadata 'KeyValues': Vec: Length=8, Count=8 + [1] 'an angry ant', [4] 'annie ate an ant' + AH: Vec + Metadata 'IsNormalized': Bool: '1' + Metadata 'SlotNames': Vec: Length=8, Count=8 + [1] 'an angry ant', [4] 'annie ate an ant' + KH: Key + Metadata 'KeyValues': Vec: Length=8, Count=8 + [2] '3', [7] '4' + KH: Vec + Metadata 'IsNormalized': Bool: '1' + Metadata 'SlotNames': Vec: Length=8, Count=8 + [2] '3', [7] '4' +---- SelectColumnsDataTransform ---- +4 columns: + A: Text + KS: Key + AH: Vec + Metadata 'IsNormalized': Bool: '1' + Metadata 'SlotNames': Vec: Length=8, Count=8 + [1] 'an angry ant', [4] 'annie ate an ant' + KH: Vec + Metadata 'IsNormalized': Bool: '1' + Metadata 'SlotNames': Vec: Length=8, Count=8 + [2] '3', [7] '4' diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash6-Data.txt b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash6-Data.txt new file mode 100644 index 0000000000..a5ddd89354 --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash6-Data.txt @@ -0,0 +1,9 @@ +#@ TextLoader{ +#@ header+ +#@ sep=tab +#@ col=AH:R4:0-15 +#@ } +16 1:521979783|1996464617|800576372 4:1996464617|800576372|1369198847 7:800576372|1906560460|1369198847 +16 1:1 4:1 +16 7:1 +16 0:0 diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash6-Schema.txt b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash6-Schema.txt new file mode 100644 index 0000000000..fe9970765f --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeInvertHash6-Schema.txt @@ -0,0 +1,41 @@ +---- BoundLoader ---- +5 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text +---- RowToRowMapperTransform ---- +6 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + AT: Vec +---- RowToRowMapperTransform ---- +7 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + AT: Vec + AH: Vec> +---- RowToRowMapperTransform ---- +8 columns: + A: Text + K: Vec, 2> + KS: Key + B: Text + E: Text + AT: Vec + AH: Vec> + AH: Vec + Metadata 'SlotNames': Vec: Length=16, Count=16 + [1] '521979783|1996464617|800576372', [4] '1996464617|800576372|1369198847', [7] '800576372|1906560460|1369198847' +---- SelectColumnsDataTransform ---- +1 columns: + AH: Vec + Metadata 'SlotNames': Vec: Length=16, Count=16 + [1] '521979783|1996464617|800576372', [4] '1996464617|800576372|1369198847', [7] '800576372|1906560460|1369198847' diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeNgramHash-Convert-Schema.txt b/test/BaselineOutput/Common/SavePipe/SavePipeNgramHash-Convert-Schema.txt index 857b3b6773..bb63230041 100644 --- a/test/BaselineOutput/Common/SavePipe/SavePipeNgramHash-Convert-Schema.txt +++ b/test/BaselineOutput/Common/SavePipe/SavePipeNgramHash-Convert-Schema.txt @@ -2,7 +2,7 @@ 2 columns: CatU8: Vec, 9> CatU2: Vec, 3> ----- NgramHashingTransformer ---- +---- RowToRowMapperTransform ---- 4 columns: CatU8: Vec, 9> CatU2: Vec, 3> diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeNgramHash-Schema.txt b/test/BaselineOutput/Common/SavePipe/SavePipeNgramHash-Schema.txt index 215b737a64..f0f06eee11 100644 --- a/test/BaselineOutput/Common/SavePipe/SavePipeNgramHash-Schema.txt +++ b/test/BaselineOutput/Common/SavePipe/SavePipeNgramHash-Schema.txt @@ -45,7 +45,7 @@ [0] 'versicherer', [1] 'lediglich', [2] 'air', [3] 'worldwide', [4] 'ein', [5] 'spezialist', [6] 'fuer', [7] 'risikomodelle', [8] 'wagte', [9] 'sich' Hash: Vec> HashBig: Vec> ----- NgramHashingTransformer ---- +---- RowToRowMapperTransform ---- 8 columns: Label: Text Attrs: Vec @@ -61,7 +61,7 @@ Hash: Vec> HashBig: Vec> NgramHashOne: Vec ----- NgramHashingTransformer ---- +---- RowToRowMapperTransform ---- 9 columns: Label: Text Attrs: Vec @@ -78,7 +78,7 @@ HashBig: Vec> NgramHashOne: Vec HashNgram1: Vec ----- NgramHashingTransformer ---- +---- RowToRowMapperTransform ---- 11 columns: Label: Text Attrs: Vec @@ -97,7 +97,7 @@ HashNgram1: Vec HashNgram2: Vec HashNgram3: Vec ----- NgramHashingTransformer ---- +---- RowToRowMapperTransform ---- 12 columns: Label: Text Attrs: Vec @@ -117,7 +117,7 @@ HashNgram2: Vec HashNgram3: Vec HashNgram4: Vec ----- NgramHashingTransformer ---- +---- RowToRowMapperTransform ---- 14 columns: Label: Text Attrs: Vec @@ -139,7 +139,7 @@ HashNgram4: Vec HashNgram5: Vec HashNgram6: Vec ----- NgramHashingTransformer ---- +---- RowToRowMapperTransform ---- 16 columns: Label: Text Attrs: Vec diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeWordBag-Data.txt b/test/BaselineOutput/Common/SavePipe/SavePipeWordBag-Data.txt new file mode 100644 index 0000000000..a9376bbb4e --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeWordBag-Data.txt @@ -0,0 +1,113 @@ +#@ TextLoader{ +#@ header+ +#@ sep=tab +#@ col=Label:U4[0-6]:0 +#@ col=F21:R4:1-10 +#@ col=F22:R4:11-90 +#@ col=F23:R4:91-120 +#@ col=F11:R4:121-130 +#@ col=F12:R4:131-147 +#@ col=F13:R4:148-290 +#@ } +Label versicherer lediglich air worldwide ein spezialist fuer risikomodelle wagte sich versicherer versicherer|lediglich versicherer|lediglich|air versicherer|lediglich|air|worldwide lediglich lediglich|air lediglich|air|worldwide lediglich|air|worldwide|ein air air|worldwide air|worldwide|ein air|worldwide|ein|spezialist worldwide worldwide|ein worldwide|ein|spezialist worldwide|ein|spezialist|fuer ein ein|spezialist ein|spezialist|fuer ein|spezialist|fuer|risikomodelle spezialist spezialist|fuer spezialist|fuer|risikomodelle spezialist|fuer|risikomodelle|wagte fuer fuer|risikomodelle fuer|risikomodelle|wagte fuer|risikomodelle|wagte|sich risikomodelle risikomodelle|wagte risikomodelle|wagte|sich risikomodelle|wagte|sich|schnell wagte wagte|sich wagte|sich|schnell wagte|sich|schnell|mit sich sich|schnell sich|schnell|mit sich|schnell|mit|einer schnell schnell|mit schnell|mit|einer schnell|mit|einer|ersten mit mit|einer mit|einer|ersten mit|einer|ersten|schaetzung einer einer|ersten einer|ersten|schaetzung einer|ersten|schaetzung|vor ersten ersten|schaetzung ersten|schaetzung|vor schaetzung schaetzung|vor vor neue neue|todesfaelle neue|todesfaelle|durch neue|todesfaelle|durch|ehec todesfaelle todesfaelle|durch todesfaelle|durch|ehec todesfaelle|durch|ehec|* durch durch|ehec durch|ehec|* durch|ehec|*|* ehec ehec|* ehec|*|* ehec|*|*|* *|* *|*|* *|*|*|* *|*|*|ehec *|*|ehec *|*|ehec|* versicherer versicherer|lediglich versicherer|lediglich|air versicherer|lediglich|worldwide versicherer|lediglich|ein versicherer|air versicherer|air|worldwide versicherer|air|ein versicherer|worldwide versicherer|worldwide|ein lediglich lediglich|air lediglich|air|worldwide lediglich|air|ein lediglich|air|spezialist lediglich|worldwide lediglich|worldwide|ein lediglich|ein air air|worldwide air|ein air|spezialist worldwide worldwide|ein ein spezialist fuer risikomodelle wagte sich lediglich air worldwide ein spezialist fuer risikomodelle wagte sich schnell lediglich lediglich|air lediglich|air|worldwide lediglich|air|worldwide|* air air|worldwide air|worldwide|* air|worldwide|*|* worldwide worldwide|* worldwide|*|* worldwide|*|*|* *|* *|*|* *|*|*|* *|*|*|air *|*|air lediglich lediglich|air lediglich|air|worldwide lediglich|air|ein lediglich|air|spezialist lediglich|worldwide lediglich|worldwide|ein lediglich|worldwide|spezialist lediglich|ein lediglich|ein|spezialist air air|worldwide air|worldwide|ein air|worldwide|spezialist air|worldwide|fuer air|ein air|ein|spezialist air|ein|fuer air|spezialist air|spezialist|fuer worldwide worldwide|ein worldwide|ein|spezialist worldwide|ein|fuer worldwide|ein|risikomodelle worldwide|spezialist worldwide|spezialist|fuer worldwide|spezialist|risikomodelle worldwide|fuer worldwide|fuer|risikomodelle ein ein|spezialist ein|spezialist|fuer ein|spezialist|risikomodelle ein|spezialist|wagte ein|fuer ein|fuer|risikomodelle ein|fuer|wagte ein|risikomodelle ein|risikomodelle|wagte spezialist spezialist|fuer spezialist|fuer|risikomodelle spezialist|fuer|wagte spezialist|fuer|sich spezialist|risikomodelle spezialist|risikomodelle|wagte spezialist|risikomodelle|sich spezialist|wagte spezialist|wagte|sich fuer fuer|risikomodelle fuer|risikomodelle|wagte fuer|risikomodelle|sich fuer|risikomodelle|schnell fuer|wagte fuer|wagte|sich fuer|wagte|schnell fuer|sich fuer|sich|schnell risikomodelle risikomodelle|wagte risikomodelle|wagte|sich risikomodelle|wagte|schnell risikomodelle|wagte|* risikomodelle|sich risikomodelle|sich|schnell risikomodelle|sich|* risikomodelle|schnell risikomodelle|schnell|* wagte wagte|sich wagte|sich|schnell wagte|sich|* wagte|schnell wagte|schnell|* wagte|* wagte|*|* sich sich|schnell sich|schnell|* sich|* sich|*|* schnell schnell|* schnell|*|* * *|* *|*|* *|*|ein *|ein *|ein|* ein|* ein|*|* *|*|sich *|sich *|sich|* *|*|fuer *|fuer *|fuer|* fuer|* fuer|*|* *|fuer|fuer fuer|*|fuer fuer|fuer fuer|fuer|* *|fuer|ein fuer|ein fuer|ein|* *|*|air *|air *|air|* air|* air|*|* fuer|*|ein *|ein|fuer ein|*|fuer ein|fuer|* *|*|wagte *|wagte *|wagte|* *|sich|ein sich|ein sich|ein|* *|ein|ein ein|*|ein ein|ein ein|ein|* *|*|schnell *|schnell *|schnell|* sich|*|ein *|*|lediglich *|lediglich *|lediglich|* lediglich|* lediglich|*|* sich|*|fuer *|sich|schnell sich|*|schnell *|sich|fuer sich|fuer sich|fuer|* +0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 11 10 9 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 1 2 1 1 1 1 3 2 3 1 3 6 5 9 10 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 290 4:2 6:1 9:1 26:2 34:1 46:1 54:1 58:2 68:2 69:1 70:1 71:1 72:2 73:2 74:1 75:1 76:2 77:1 78:1 79:1 80:6 81:6 82:6 83:5 84:157 85:143 86:131 87:5 88:5 89:5 114:2 116:1 119:1 123:2 125:1 128:1 142:186 143:185 144:184 177:2 197:1 225:1 228:3 229:6 233:183 234:531 235:1030 236:12 237:6 238:12 239:6 240:12 241:6 242:3 243:6 244:6 245:3 246:6 247:3 248:6 +0 290 4:1 26:1 84:33 85:31 86:29 114:1 123:1 142:32 143:31 144:30 177:1 233:32 234:87 235:160 236:6 237:3 238:6 239:3 240:6 +0 290 6:1 34:1 84:11 85:9 86:7 116:1 125:1 142:10 143:9 144:8 197:1 233:10 234:22 235:34 244:1 245:2 246:5 247:3 248:6 +0 290 4:4 6:3 9:6 26:4 34:3 46:6 54:3 58:2 67:2 84:448 85:427 86:406 114:4 116:3 119:6 123:4 125:3 128:6 142:485 143:484 144:483 177:4 197:3 225:6 228:18 229:36 233:473 234:1374 235:2662 236:24 237:12 238:24 239:12 240:24 241:36 242:18 243:36 244:18 245:9 246:18 247:9 248:18 +0 290 4:2 6:9 26:2 34:9 54:3 58:1 62:1 68:2 84:361 85:343 86:328 114:2 116:9 123:2 125:8 142:392 143:391 144:390 177:2 197:8 233:383 234:1115 235:2168 236:9 237:5 238:9 239:6 240:12 244:45 245:23 246:43 247:22 248:42 249:1 250:2 251:1 252:1 253:3 254:1 255:3 +0 290 2:1 4:2 6:1 9:3 18:1 26:2 34:1 46:3 54:3 62:1 84:329 85:317 86:305 108:1 114:2 116:1 119:3 121:1 123:2 125:1 128:3 134:1 142:345 143:343 144:341 145:1 146:1 157:1 177:2 197:1 225:3 228:9 229:18 233:341 234:996 235:1942 236:12 237:6 238:12 239:6 240:12 241:18 242:9 243:18 244:6 245:3 246:6 247:3 248:6 256:6 257:3 258:6 259:3 260:6 +1 290 6:1 9:1 34:1 46:1 54:1 67:1 84:69 85:64 86:59 116:1 119:1 125:1 128:1 142:68 143:67 144:66 197:1 225:1 228:3 229:6 233:67 234:189 235:358 241:6 242:3 243:6 244:6 245:3 246:6 247:3 248:6 +1 290 84:17 85:16 86:15 142:9 143:8 144:7 233:10 234:24 235:40 +1 290 6:2 9:2 34:2 46:2 54:3 55:1 58:3 84:188 85:179 86:171 116:2 119:2 125:2 128:2 142:200 143:199 144:198 197:2 225:2 228:6 229:12 233:197 234:573 235:1114 241:12 242:6 243:12 244:12 245:6 246:12 247:6 248:12 +1 290 84:37 85:36 86:35 142:34 143:33 144:32 233:35 234:99 235:190 +1 290 4:4 6:3 9:1 26:4 34:3 46:1 54:1 68:1 84:250 85:239 86:230 114:4 116:3 119:1 123:4 125:3 128:1 142:262 143:261 144:260 177:4 182:2 197:3 225:1 228:3 229:6 233:255 234:737 235:1425 236:21 237:12 238:22 239:10 240:18 241:6 242:3 243:6 244:12 245:7 246:16 247:9 248:15 261:3 262:2 263:4 264:2 +1 290 4:1 26:1 84:11 85:9 86:7 114:1 123:1 142:6 143:5 144:4 177:1 233:6 234:12 235:16 239:3 240:6 +1 290 84:16 85:15 86:14 142:7 143:6 144:5 233:8 234:18 235:28 +1 290 9:1 46:1 54:1 84:45 85:43 86:41 119:1 128:1 142:40 143:39 144:38 225:1 228:3 229:6 233:40 234:111 235:208 241:6 242:3 243:6 +2 290 4:2 6:3 9:5 26:2 34:3 46:5 54:2 58:1 76:4 84:408 85:390 86:372 114:2 116:3 119:5 123:2 125:3 128:5 142:440 143:439 144:438 177:2 197:3 225:5 228:15 229:30 233:431 234:1257 235:2449 236:12 237:6 238:12 239:6 240:9 241:30 242:15 243:30 244:15 245:9 246:18 247:9 248:18 263:3 +2 290 4:4 6:4 9:3 26:4 34:4 46:3 54:4 58:4 62:1 67:1 76:6 84:533 85:506 86:480 114:4 116:4 119:3 123:4 125:4 128:3 142:580 143:579 144:578 177:4 197:4 225:3 228:9 229:18 233:570 234:1671 235:3268 236:24 237:12 238:24 239:12 240:24 241:18 242:9 243:18 244:24 245:12 246:24 247:12 248:24 +3 290 4:1 6:2 26:1 34:2 54:1 67:1 84:127 85:122 86:117 114:1 116:2 123:1 125:2 142:136 143:135 144:134 177:1 197:2 233:134 234:387 235:748 236:6 237:3 238:6 239:3 240:6 244:12 245:6 246:12 247:6 248:12 +3 290 6:1 9:1 34:1 46:1 67:1 84:72 85:69 86:66 116:1 119:1 125:1 128:1 142:76 143:75 144:74 197:1 225:1 228:3 229:6 233:75 234:213 235:406 241:6 242:3 243:6 244:6 245:3 246:6 247:3 248:6 +4 290 4:2 6:3 9:2 26:2 34:3 46:2 54:3 67:1 68:1 84:251 85:238 86:225 114:2 116:3 119:2 123:2 125:3 128:2 142:273 143:272 144:271 177:2 197:3 225:2 228:6 229:12 233:267 234:774 235:1504 236:12 237:6 238:12 239:6 240:6 241:12 242:6 243:12 244:12 245:9 246:18 247:9 248:18 263:6 +4 290 9:1 46:1 58:1 62:1 84:24 85:20 86:16 119:1 128:1 142:28 143:27 144:26 225:1 228:3 229:6 233:28 234:75 235:139 241:3 242:3 243:6 +4 290 4:1 6:4 8:1 9:1 26:1 34:4 42:1 46:1 54:6 58:2 67:4 76:1 84:344 85:324 86:305 114:1 116:4 118:1 119:1 123:1 125:4 127:1 128:1 142:382 143:381 144:380 177:1 197:4 217:1 223:3 224:6 225:1 228:3 229:6 233:376 234:1101 235:2152 236:6 237:3 238:6 239:3 240:6 241:6 242:3 243:6 244:24 245:12 246:24 247:12 248:24 265:6 266:3 267:6 +4 290 67:2 84:103 85:100 86:97 142:105 143:104 144:103 233:106 234:312 235:616 +4 290 6:1 9:1 34:1 46:1 58:1 62:1 84:27 85:22 86:18 116:1 119:1 128:1 142:28 143:27 144:26 225:1 228:3 229:6 233:28 234:75 235:139 241:3 242:3 243:6 +5 290 4:2 6:4 9:3 26:2 34:4 46:3 54:4 58:2 59:1 62:2 68:1 84:423 85:406 86:389 114:2 116:4 119:3 123:2 125:3 128:3 142:454 143:453 144:452 177:2 197:3 225:3 228:9 229:18 233:447 234:1311 235:2566 236:12 237:6 238:12 239:6 240:12 241:18 242:9 243:18 244:18 245:9 246:18 247:9 248:18 +5 290 54:1 67:1 84:56 85:53 86:50 142:56 143:55 144:54 233:57 234:165 235:322 +5 290 4:1 26:1 76:1 84:16 85:14 86:13 114:1 123:1 142:16 143:15 144:14 177:1 233:16 234:40 235:70 236:6 237:3 238:5 239:2 240:1 +5 290 6:1 9:1 34:1 46:1 54:4 55:1 58:1 67:1 68:1 84:126 85:117 86:108 116:1 119:1 125:1 128:1 142:141 143:140 144:139 197:1 225:1 228:3 229:6 233:140 234:408 235:796 241:6 242:3 243:6 244:6 245:3 246:6 247:3 248:6 +5 290 6:1 34:1 62:1 84:80 85:77 86:74 116:1 125:1 142:81 143:80 144:79 197:1 233:81 234:234 235:454 244:6 245:3 246:6 247:3 248:6 +5 290 4:1 6:3 9:1 26:1 34:3 46:1 54:1 58:2 84:157 85:149 86:141 114:1 116:3 119:1 123:1 125:3 128:1 142:169 143:168 144:167 177:1 197:3 225:1 228:3 229:6 233:165 234:477 235:922 236:6 237:3 238:6 239:3 240:6 241:6 242:3 243:6 244:12 245:6 246:12 247:9 248:18 +5 290 9:1 46:1 54:1 67:1 84:79 85:75 86:71 119:1 128:1 142:80 143:79 144:78 225:1 228:3 229:6 233:80 234:231 235:451 241:3 242:3 243:6 +5 290 4:1 26:1 54:1 67:1 84:19 85:15 86:13 114:1 123:1 142:23 143:22 144:21 177:1 233:23 234:63 235:118 239:3 240:6 +5 290 58:2 76:1 84:154 85:151 86:148 142:159 143:158 144:157 233:160 234:474 235:940 +5 290 6:2 9:1 34:2 46:1 84:127 85:123 86:119 116:2 119:1 125:2 128:1 142:125 143:124 144:123 197:2 225:1 228:3 229:6 233:123 234:354 235:682 241:6 242:3 243:6 244:12 245:6 246:12 247:6 248:12 +6 290 54:1 84:10 85:9 86:8 142:9 143:8 144:7 233:10 234:24 235:40 +6 290 84:5 85:4 86:3 142:4 143:3 144:2 233:5 234:9 235:10 +6 290 4:1 9:1 26:1 46:1 84:2 85:1 114:1 119:1 233:1 +6 290 84:10 85:9 86:8 142:6 143:5 144:4 233:7 234:15 235:22 +6 290 67:1 84:10 85:8 86:6 142:10 143:9 144:8 233:11 234:27 235:46 +6 290 4:1 26:1 84:60 85:58 86:56 114:1 123:1 142:60 143:59 144:58 177:1 233:60 234:171 235:328 236:6 237:3 238:6 239:3 240:6 +6 290 62:1 84:17 85:15 86:13 142:15 143:14 144:13 233:16 234:42 235:76 +6 290 67:2 84:16 85:13 86:11 142:14 143:13 144:12 233:15 234:39 235:70 +6 290 6:2 9:1 34:2 46:1 84:53 85:49 86:45 116:2 119:1 125:2 142:45 143:44 144:43 197:2 233:44 234:120 235:220 244:12 245:6 246:12 247:6 248:12 +6 290 4:4 9:3 26:4 46:3 54:1 67:2 84:311 85:301 86:291 114:4 119:3 123:4 128:3 142:325 143:324 144:323 177:4 225:3 228:8 229:15 233:319 234:931 235:1816 236:21 237:11 238:21 239:12 240:24 241:18 242:9 243:15 268:3 269:1 270:3 +6 290 84:20 85:19 86:18 142:16 143:15 144:14 233:17 234:45 235:82 +6 290 4:5 6:2 9:2 26:5 34:2 46:2 54:5 76:1 84:194 85:180 86:167 114:5 116:2 119:2 123:4 125:2 128:2 142:219 143:218 144:217 177:4 197:2 225:2 228:6 229:12 233:212 234:607 235:1161 236:21 237:11 238:20 239:11 240:21 241:12 242:6 243:12 244:12 245:6 246:12 247:6 248:12 271:2 272:1 273:1 274:2 +6 290 4:1 6:1 26:1 34:1 76:2 84:195 85:190 86:185 114:1 116:1 123:1 125:1 142:195 143:194 144:193 177:1 197:1 233:194 234:570 235:1123 236:3 237:3 238:6 239:3 240:6 244:6 245:3 246:6 247:3 248:6 +6 290 4:2 6:2 26:2 34:2 50:1 84:45 85:40 86:35 114:2 116:2 123:2 125:2 129:1 142:52 143:51 144:50 177:2 182:1 197:2 230:1 231:3 232:6 233:48 234:128 235:229 236:6 237:3 238:6 239:5 240:9 244:6 245:4 246:9 247:6 248:12 263:1 264:2 275:6 276:3 277:6 +6 290 6:1 34:1 84:11 85:9 86:7 116:1 125:1 142:10 143:9 144:8 197:1 233:10 234:21 235:31 244:6 245:3 246:6 247:3 248:3 +6 290 4:6 6:2 9:2 26:6 34:2 46:2 54:2 58:1 84:250 85:237 86:224 114:6 116:2 119:2 123:6 125:2 128:2 142:275 143:274 144:273 177:6 182:1 197:2 225:2 228:6 229:9 233:266 234:763 235:1464 236:33 237:18 238:34 239:17 240:33 241:12 242:6 243:12 244:9 245:5 246:10 247:6 248:12 262:2 263:1 264:2 278:3 +6 290 4:2 6:4 9:1 26:2 34:4 46:1 54:2 67:1 76:2 84:219 85:206 86:193 114:2 116:4 119:1 123:2 125:4 128:1 142:242 143:241 144:240 177:2 197:4 225:1 228:3 229:6 233:236 234:681 235:1312 236:12 237:6 238:12 239:6 240:12 241:6 242:3 243:6 244:24 245:12 246:24 247:12 248:24 +6 290 4:1 26:1 68:1 84:47 85:44 86:41 114:1 123:1 142:48 143:47 144:46 177:1 233:48 234:135 235:259 236:6 237:3 238:6 239:3 240:3 +6 290 4:1 6:1 9:1 26:1 34:1 46:1 54:2 62:2 72:4 84:281 85:269 86:257 114:1 116:1 119:1 123:1 125:1 128:1 142:301 143:300 144:299 177:1 197:1 225:1 228:3 229:6 233:299 234:882 235:1738 236:6 237:3 238:6 239:3 240:6 241:6 242:3 243:6 244:6 245:3 246:6 247:3 248:6 +6 290 6:1 34:1 84:25 85:23 86:21 116:1 125:1 142:26 143:25 144:24 197:1 233:26 234:69 235:124 244:6 245:3 246:6 247:3 248:6 +6 290 84:25 85:24 86:23 142:20 143:19 144:18 233:21 234:57 235:106 +6 290 84:6 85:5 86:4 142:3 143:2 144:1 233:4 234:6 235:4 +6 290 1:1 4:1 6:3 9:1 14:1 26:1 34:3 46:1 54:5 58:1 62:1 67:1 84:331 85:316 86:303 100:1 114:1 116:3 119:1 120:1 123:1 125:3 128:1 130:1 142:351 143:349 144:347 147:1 177:1 197:3 225:1 228:3 229:6 233:348 234:1023 235:2008 239:3 240:6 241:6 242:3 243:6 244:18 245:9 246:18 247:9 248:18 279:6 280:3 281:6 282:3 283:6 +1 290 4:2 6:1 26:2 34:1 50:1 54:2 58:1 76:2 84:134 85:125 86:117 114:2 116:1 123:2 125:1 129:1 142:144 143:143 144:142 177:2 197:1 230:1 231:3 232:6 233:141 234:405 235:778 236:12 237:6 238:12 239:6 240:12 244:6 245:3 246:6 247:3 248:6 275:6 276:3 277:6 +1 290 84:25 85:24 86:23 142:15 143:14 144:13 233:16 234:42 235:76 +3 290 4:7 6:3 9:3 26:7 34:3 46:3 50:1 54:4 67:4 76:1 84:535 85:512 86:492 114:7 116:3 119:3 123:7 125:3 128:3 129:1 142:573 143:572 144:571 177:7 197:3 225:3 228:9 229:18 230:1 231:3 232:6 233:560 234:1633 235:3178 236:39 237:20 238:39 239:21 240:42 241:18 242:9 243:18 244:18 245:9 246:15 247:8 248:15 253:3 254:1 255:3 275:6 276:3 277:6 +3 290 4:1 9:1 26:1 46:1 58:1 84:113 85:109 86:105 114:1 119:1 123:1 128:1 142:112 143:111 144:110 177:1 225:1 228:3 229:6 233:111 234:321 235:622 236:6 237:3 238:6 239:3 240:6 241:6 242:3 243:6 +3 290 4:2 6:1 9:5 26:2 34:1 46:5 54:2 58:1 67:2 76:1 84:478 85:463 86:448 114:2 116:1 119:5 123:2 125:1 128:5 142:499 143:498 144:497 177:2 197:1 225:5 228:15 229:30 233:492 234:1446 235:2836 236:12 237:6 238:12 239:6 240:12 241:30 242:15 243:30 244:6 245:3 246:6 247:3 248:6 +3 290 4:4 6:2 9:3 26:4 34:2 46:3 50:1 54:3 58:1 67:1 84:387 85:371 86:356 114:4 116:2 119:3 123:4 125:2 128:3 129:1 142:409 143:408 144:407 177:4 197:2 225:3 228:9 229:15 230:1 231:3 232:6 233:400 234:1166 235:2273 236:21 237:11 238:22 239:11 240:21 241:13 242:8 243:17 244:9 245:6 246:12 247:6 248:12 271:1 272:2 273:1 274:1 275:6 276:3 277:6 284:3 +3 290 4:1 9:1 26:1 46:1 58:1 84:96 85:93 86:90 114:1 119:1 123:1 128:1 142:94 143:93 144:92 177:1 225:1 228:3 229:6 233:93 234:270 235:526 239:3 240:6 241:6 242:3 243:6 +3 290 4:1 6:3 9:1 26:1 34:3 46:1 54:2 84:210 85:202 86:196 114:1 116:3 119:1 123:1 125:2 128:1 142:219 143:218 144:217 177:1 197:2 225:1 228:3 229:6 233:216 234:633 235:1240 239:3 240:6 241:6 242:3 243:6 244:12 245:6 246:12 247:6 248:12 +3 290 6:1 34:1 68:1 84:14 85:11 86:8 116:1 142:7 143:6 144:5 233:8 234:18 235:28 +3 290 6:1 34:1 54:1 67:1 84:32 85:28 86:24 116:1 142:32 143:31 144:30 233:33 234:93 235:178 +4 290 9:1 46:1 54:1 58:1 62:1 84:26 85:21 86:16 119:1 128:1 142:28 143:27 144:26 225:1 228:3 229:6 233:28 234:75 235:139 241:3 242:3 243:6 +4 290 4:2 9:2 26:2 46:2 54:1 58:2 62:1 84:119 85:111 86:103 114:2 119:2 123:2 128:2 142:133 143:132 144:131 177:2 225:2 228:6 229:12 233:130 234:372 235:712 236:12 237:6 238:12 239:6 240:12 241:12 242:6 243:12 +4 290 6:1 9:1 34:1 46:1 54:2 84:188 85:183 86:178 116:1 119:1 125:1 128:1 142:187 143:186 144:185 197:1 225:1 228:3 229:6 233:186 234:546 235:1072 241:6 242:3 243:6 244:6 245:3 246:6 247:3 248:6 +5 290 9:1 46:1 54:1 84:21 85:20 86:19 119:1 128:1 142:20 143:19 144:18 225:1 228:2 229:1 233:20 234:52 235:94 241:6 242:3 243:5 +5 290 9:1 46:1 54:4 84:55 85:50 86:45 119:1 128:1 142:56 143:55 144:54 225:1 228:3 229:6 233:56 234:159 235:304 241:6 242:3 243:6 +6 290 54:2 84:67 85:64 86:61 142:70 143:69 144:68 233:71 234:207 235:406 +6 290 4:1 9:1 26:1 46:1 54:1 55:1 58:2 84:56 85:51 86:46 114:1 119:1 123:1 128:1 142:63 143:62 144:61 177:1 225:1 228:3 229:6 233:62 234:174 235:328 236:6 237:3 238:6 239:3 240:6 241:6 242:3 243:6 +6 290 58:1 84:26 85:24 86:22 142:27 143:26 144:25 233:28 234:78 235:148 +6 290 4:1 26:1 54:1 58:2 67:1 68:1 76:2 84:177 85:168 86:159 114:1 123:1 142:190 143:189 144:188 177:1 233:190 234:561 235:1108 236:6 237:3 238:6 239:3 240:6 +6 290 4:1 6:1 26:1 34:1 84:28 85:25 86:22 114:1 116:1 123:1 125:1 142:27 143:26 144:25 177:1 197:1 233:26 234:69 235:124 239:3 240:6 244:6 245:3 246:6 247:3 248:6 +0 290 4:3 26:3 54:2 58:1 84:248 85:241 86:234 114:3 123:3 142:249 143:248 144:247 177:3 233:247 234:726 235:1426 236:18 237:9 238:18 239:9 240:18 +1 290 6:2 9:2 34:2 46:2 54:1 67:2 76:1 84:114 85:105 86:98 116:2 119:2 125:2 128:2 142:124 143:123 144:122 197:2 225:2 228:6 229:12 233:121 234:345 235:661 241:12 242:6 243:12 244:12 245:6 246:12 247:6 248:9 +0 290 6:1 34:1 54:1 55:1 58:1 84:14 85:12 86:10 116:1 125:1 142:18 143:17 144:16 197:1 233:18 234:45 235:79 244:3 245:3 246:6 247:3 248:6 +2 290 4:1 6:2 9:1 26:1 34:2 46:1 54:7 67:1 84:235 85:222 86:209 114:1 116:2 119:1 123:1 125:2 128:1 142:251 143:250 144:249 177:1 197:2 225:1 228:3 229:6 233:248 234:726 235:1420 236:6 237:3 238:6 239:3 240:6 241:6 242:3 243:6 244:12 245:6 246:12 247:6 248:12 +0 290 4:2 6:5 9:1 26:2 34:5 46:1 54:3 67:1 76:1 84:204 85:191 86:178 114:2 116:5 119:1 123:2 125:5 128:1 142:226 143:225 144:224 177:2 197:5 225:1 228:3 229:6 233:219 234:628 235:1204 236:9 237:5 238:9 239:6 240:12 241:6 242:3 243:6 244:30 245:15 246:27 247:14 248:27 253:3 254:1 255:3 +0 290 6:2 9:1 34:2 46:1 54:4 55:1 58:2 67:1 84:174 85:165 86:156 116:2 119:1 125:2 128:1 142:189 143:188 144:187 197:2 225:1 228:3 229:6 233:187 234:546 235:1066 241:6 242:3 243:6 244:12 245:6 246:12 247:6 248:12 +0 290 84:8 85:7 86:6 142:7 143:6 144:5 233:8 234:18 235:28 +0 290 4:1 9:1 26:1 46:1 50:1 67:2 68:1 84:82 85:76 86:70 114:1 119:1 123:1 128:1 129:1 142:88 143:87 144:86 177:1 225:1 226:1 227:2 228:2 229:3 230:1 231:3 232:6 233:86 234:245 235:471 236:1 237:2 238:5 239:3 240:6 241:6 242:3 243:4 275:3 276:2 277:4 285:2 286:1 +0 290 4:2 6:1 26:2 34:1 84:25 85:22 86:20 114:2 116:1 123:2 125:1 142:29 143:28 144:27 177:2 182:1 197:1 233:27 234:68 235:117 236:12 237:6 238:9 239:4 240:4 244:3 245:2 246:4 247:3 248:6 262:2 263:1 264:2 +4 290 4:1 9:1 26:1 46:1 58:1 62:1 84:27 85:22 86:17 114:1 119:1 128:1 142:28 143:27 144:26 225:1 228:3 229:6 233:28 234:75 235:139 241:3 242:3 243:6 +2 290 58:1 84:47 85:45 86:43 142:44 143:43 144:42 233:45 234:129 235:250 +1 290 6:1 9:1 34:1 46:1 84:68 85:65 86:62 116:1 119:1 125:1 128:1 142:70 143:69 144:68 197:1 225:1 228:3 229:6 233:69 234:195 235:370 241:6 242:3 243:6 244:6 245:3 246:6 247:3 248:6 +1 290 4:3 6:2 9:6 26:3 34:2 46:6 54:7 67:1 68:1 76:1 84:365 85:344 86:323 114:3 116:2 119:6 123:3 125:2 128:6 142:401 143:400 144:399 177:3 197:2 225:6 228:17 229:33 233:391 234:1135 235:2200 236:18 237:9 238:18 239:9 240:18 241:36 242:18 243:33 244:9 245:5 246:9 247:6 248:12 287:3 288:1 289:3 +2 290 4:1 6:2 26:1 34:2 54:3 55:1 58:2 67:1 84:113 85:104 86:96 114:1 116:2 123:1 125:2 142:129 143:128 144:127 177:1 197:2 233:127 234:366 235:706 236:6 237:3 238:6 239:3 240:6 244:12 245:6 246:12 247:6 248:12 +0 290 9:7 46:7 54:5 58:1 67:2 84:307 85:294 86:281 119:7 128:7 142:330 143:329 144:328 225:7 228:21 229:42 233:324 234:945 235:1840 241:42 242:21 243:42 +4 290 9:1 46:1 58:1 62:1 84:24 85:20 86:16 119:1 128:1 142:28 143:27 144:26 225:1 228:3 229:6 233:28 234:75 235:139 241:3 242:3 243:6 +0 290 4:1 6:5 9:4 26:1 34:5 46:4 54:4 67:1 68:1 84:280 85:266 86:253 114:1 116:5 119:4 123:1 125:5 128:4 142:310 143:309 144:308 177:1 197:5 225:4 228:12 229:24 233:301 234:868 235:1673 236:3 237:3 238:6 239:3 240:6 241:24 242:12 243:24 244:27 245:14 246:28 247:14 248:24 249:1 250:2 251:1 252:1 261:3 +0 290 6:1 9:1 34:1 46:1 67:1 84:12 85:8 86:5 116:1 119:1 125:1 142:8 143:7 144:6 197:1 233:8 234:16 235:22 244:1 245:2 246:5 247:3 248:6 +1 290 6:1 34:1 84:8 85:6 86:4 116:1 142:3 143:2 144:1 233:4 234:6 235:4 +2 290 4:1 6:3 9:1 26:1 34:3 46:1 54:2 58:1 67:2 84:354 85:345 86:337 114:1 116:3 119:1 123:1 125:3 128:1 142:369 143:368 144:367 177:1 182:1 197:3 225:1 228:3 229:6 233:365 234:1075 235:2115 236:6 237:3 238:4 239:2 240:3 241:6 242:3 243:6 244:15 245:8 246:16 247:9 248:18 262:2 263:1 264:2 +1 290 6:2 9:1 34:2 46:1 54:1 76:1 84:53 85:47 86:41 116:2 119:1 125:2 128:1 142:55 143:54 144:53 197:2 225:1 228:3 229:6 233:53 234:144 235:262 241:6 242:3 243:6 244:12 245:6 246:12 247:6 248:12 +0 290 6:1 34:1 84:11 85:9 86:7 116:1 125:1 142:10 143:9 144:8 197:1 233:10 234:21 235:28 244:6 245:3 246:6 247:3 248:6 +0 290 84:6 85:5 86:4 142:5 143:4 144:3 233:6 234:12 235:16 +2 290 4:2 6:5 9:7 26:2 34:5 46:7 54:4 58:1 62:1 67:2 68:1 84:486 85:462 86:438 114:2 116:5 119:7 123:2 125:5 128:7 142:531 143:530 144:529 177:2 197:5 225:7 228:21 229:42 233:518 234:1506 235:2923 236:12 237:6 238:12 239:6 240:12 241:42 242:21 243:42 244:27 245:15 246:30 247:15 248:27 250:3 diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeWordBag-Schema.txt b/test/BaselineOutput/Common/SavePipe/SavePipeWordBag-Schema.txt new file mode 100644 index 0000000000..665d9a7119 --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeWordBag-Schema.txt @@ -0,0 +1,396 @@ +---- BoundLoader ---- +3 columns: + Label: Text + One: Text + Vec: Vec + Metadata 'SlotNames': Vec: Length=2, Count=2 + [0] 'weg fuer milliardenhilfe frei', [1] 'vor dem parlamentsgebaeude toben strassenkaempfe zwischen demonstranten drinnen haben die griechischen abgeordneten das drastische sparpaket am abend endgueltig beschlossen die entscheidung ist eine wichtige voraussetzung fuer die auszahlung von weiteren acht milliarden euro hilfsgeldern athen das griechische parlament hat einem umfassenden sparpaket endgueltig zugestimmt' +---- RowToRowMapperTransform ---- +4 columns: + Label: Text + Label: Key + Metadata 'KeyValues': Vec: Length=7, Count=7 + [0] 'Wirtschaft', [1] 'Gesundheit', [2] 'Deutschland', [3] 'Ausland', [4] 'Unterhaltung', [5] 'Sport', [6] 'Technik & Wissen' + One: Text + Vec: Vec + Metadata 'SlotNames': Vec: Length=2, Count=2 + [0] 'weg fuer milliardenhilfe frei', [1] 'vor dem parlamentsgebaeude toben strassenkaempfe zwischen demonstranten drinnen haben die griechischen abgeordneten das drastische sparpaket am abend endgueltig beschlossen die entscheidung ist eine wichtige voraussetzung fuer die auszahlung von weiteren acht milliarden euro hilfsgeldern athen das griechische parlament hat einem umfassenden sparpaket endgueltig zugestimmt' +---- RowToRowMapperTransform ---- +9 columns: + Label: Text + Label: Key + Metadata 'KeyValues': Vec: Length=7, Count=7 + [0] 'Wirtschaft', [1] 'Gesundheit', [2] 'Deutschland', [3] 'Ausland', [4] 'Unterhaltung', [5] 'Sport', [6] 'Technik & Wissen' + One: Text + Vec: Vec + Metadata 'SlotNames': Vec: Length=2, Count=2 + [0] 'weg fuer milliardenhilfe frei', [1] 'vor dem parlamentsgebaeude toben strassenkaempfe zwischen demonstranten drinnen haben die griechischen abgeordneten das drastische sparpaket am abend endgueltig beschlossen die entscheidung ist eine wichtige voraussetzung fuer die auszahlung von weiteren acht milliarden euro hilfsgeldern athen das griechische parlament hat einem umfassenden sparpaket endgueltig zugestimmt' + F11: Vec + F12: Vec + F13: Vec + F21: Vec + F22: Vec +---- RowToRowMapperTransform ---- +14 columns: + Label: Text + Label: Key + Metadata 'KeyValues': Vec: Length=7, Count=7 + [0] 'Wirtschaft', [1] 'Gesundheit', [2] 'Deutschland', [3] 'Ausland', [4] 'Unterhaltung', [5] 'Sport', [6] 'Technik & Wissen' + One: Text + Vec: Vec + Metadata 'SlotNames': Vec: Length=2, Count=2 + [0] 'weg fuer milliardenhilfe frei', [1] 'vor dem parlamentsgebaeude toben strassenkaempfe zwischen demonstranten drinnen haben die griechischen abgeordneten das drastische sparpaket am abend endgueltig beschlossen die entscheidung ist eine wichtige voraussetzung fuer die auszahlung von weiteren acht milliarden euro hilfsgeldern athen das griechische parlament hat einem umfassenden sparpaket endgueltig zugestimmt' + F11: Vec + F11: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' + F12: Vec + F12: Vec> + Metadata 'KeyValues': Vec: Length=3, Count=3 + [0] 'lediglich', [1] 'air', [2] 'worldwide' + F13: Vec + F13: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' + F21: Vec + F21: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'versicherer', [1] 'lediglich', [2] 'air', [3] 'worldwide', [4] 'ein', [5] 'spezialist', [6] 'fuer', [7] 'risikomodelle', [8] 'wagte', [9] 'sich' + F22: Vec + F22: Vec> + Metadata 'KeyValues': Vec: Length=20, Count=20 + [0] 'versicherer', [1] 'lediglich', [2] 'air', [3] 'worldwide', [4] 'ein', [5] 'spezialist', [6] 'fuer', [7] 'risikomodelle', [8] 'wagte', [9] 'sich' + [10] 'schnell', [11] 'mit', [12] 'einer', [13] 'ersten', [14] 'schaetzung', [15] 'vor', [16] 'neue', [17] 'todesfaelle', [18] 'durch', [19] 'ehec' +---- RowToRowMapperTransform ---- +19 columns: + Label: Text + Label: Key + Metadata 'KeyValues': Vec: Length=7, Count=7 + [0] 'Wirtschaft', [1] 'Gesundheit', [2] 'Deutschland', [3] 'Ausland', [4] 'Unterhaltung', [5] 'Sport', [6] 'Technik & Wissen' + One: Text + Vec: Vec + Metadata 'SlotNames': Vec: Length=2, Count=2 + [0] 'weg fuer milliardenhilfe frei', [1] 'vor dem parlamentsgebaeude toben strassenkaempfe zwischen demonstranten drinnen haben die griechischen abgeordneten das drastische sparpaket am abend endgueltig beschlossen die entscheidung ist eine wichtige voraussetzung fuer die auszahlung von weiteren acht milliarden euro hilfsgeldern athen das griechische parlament hat einem umfassenden sparpaket endgueltig zugestimmt' + F11: Vec + F11: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' + F11: Vec + Metadata 'SlotNames': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' + F12: Vec + F12: Vec> + Metadata 'KeyValues': Vec: Length=3, Count=3 + [0] 'lediglich', [1] 'air', [2] 'worldwide' + F12: Vec + Metadata 'SlotNames': Vec: Length=17, Count=17 + [0] 'lediglich', [1] 'lediglich|air', [2] 'lediglich|air|worldwide', [3] 'lediglich|air|worldwide|*', [4] 'air', [5] 'air|worldwide', [6] 'air|worldwide|*', [7] 'air|worldwide|*|*', [8] 'worldwide', [9] 'worldwide|*' + [10] 'worldwide|*|*', [11] 'worldwide|*|*|*', [12] '*|*', [13] '*|*|*', [14] '*|*|*|*', [15] '*|*|*|air', [16] '*|*|air' + F13: Vec + F13: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' + F13: Vec + Metadata 'SlotNames': Vec: Length=143, Count=143 + [0] 'lediglich', [1] 'lediglich|air', [2] 'lediglich|air|worldwide', [3] 'lediglich|air|ein', [4] 'lediglich|air|spezialist', [5] 'lediglich|worldwide', [6] 'lediglich|worldwide|ein', [7] 'lediglich|worldwide|spezialist', [8] 'lediglich|ein', [9] 'lediglich|ein|spezialist' + [10] 'air', [11] 'air|worldwide', [12] 'air|worldwide|ein', [13] 'air|worldwide|spezialist', [14] 'air|worldwide|fuer', [15] 'air|ein', [16] 'air|ein|spezialist', [17] 'air|ein|fuer', [18] 'air|spezialist', [19] 'air|spezialist|fuer' + [20] 'worldwide', [21] 'worldwide|ein', [22] 'worldwide|ein|spezialist', [23] 'worldwide|ein|fuer', [24] 'worldwide|ein|risikomodelle', [25] 'worldwide|spezialist', [26] 'worldwide|spezialist|fuer', [27] 'worldwide|spezialist|risikomodelle', [28] 'worldwide|fuer', [29] 'worldwide|fuer|risikomodelle' + [30] 'ein', [31] 'ein|spezialist', [32] 'ein|spezialist|fuer', [33] 'ein|spezialist|risikomodelle', [34] 'ein|spezialist|wagte', [35] 'ein|fuer', [36] 'ein|fuer|risikomodelle', [37] 'ein|fuer|wagte', [38] 'ein|risikomodelle', [39] 'ein|risikomodelle|wagte' + [40] 'spezialist', [41] 'spezialist|fuer', [42] 'spezialist|fuer|risikomodelle', [43] 'spezialist|fuer|wagte', [44] 'spezialist|fuer|sich', [45] 'spezialist|risikomodelle', [46] 'spezialist|risikomodelle|wagte', [47] 'spezialist|risikomodelle|sich', [48] 'spezialist|wagte', [49] 'spezialist|wagte|sich' + [50] 'fuer', [51] 'fuer|risikomodelle', [52] 'fuer|risikomodelle|wagte', [53] 'fuer|risikomodelle|sich', [54] 'fuer|risikomodelle|schnell', [55] 'fuer|wagte', [56] 'fuer|wagte|sich', [57] 'fuer|wagte|schnell', [58] 'fuer|sich', [59] 'fuer|sich|schnell' + [60] 'risikomodelle', [61] 'risikomodelle|wagte', [62] 'risikomodelle|wagte|sich', [63] 'risikomodelle|wagte|schnell', [64] 'risikomodelle|wagte|*', [65] 'risikomodelle|sich', [66] 'risikomodelle|sich|schnell', [67] 'risikomodelle|sich|*', [68] 'risikomodelle|schnell', [69] 'risikomodelle|schnell|*' + [70] 'wagte', [71] 'wagte|sich', [72] 'wagte|sich|schnell', [73] 'wagte|sich|*', [74] 'wagte|schnell', [75] 'wagte|schnell|*', [76] 'wagte|*', [77] 'wagte|*|*', [78] 'sich', [79] 'sich|schnell' + [80] 'sich|schnell|*', [81] 'sich|*', [82] 'sich|*|*', [83] 'schnell', [84] 'schnell|*', [85] 'schnell|*|*', [86] '*', [87] '*|*', [88] '*|*|*', [89] '*|*|ein' + [90] '*|ein', [91] '*|ein|*', [92] 'ein|*', [93] 'ein|*|*', [94] '*|*|sich', [95] '*|sich', [96] '*|sich|*', [97] '*|*|fuer', [98] '*|fuer', [99] '*|fuer|*' + [100] 'fuer|*', [101] 'fuer|*|*', [102] '*|fuer|fuer', [103] 'fuer|*|fuer', [104] 'fuer|fuer', [105] 'fuer|fuer|*', [106] '*|fuer|ein', [107] 'fuer|ein', [108] 'fuer|ein|*', [109] '*|*|air' + [110] '*|air', [111] '*|air|*', [112] 'air|*', [113] 'air|*|*', [114] 'fuer|*|ein', [115] '*|ein|fuer', [116] 'ein|*|fuer', [117] 'ein|fuer|*', [118] '*|*|wagte', [119] '*|wagte' + [120] '*|wagte|*', [121] '*|sich|ein', [122] 'sich|ein', [123] 'sich|ein|*', [124] '*|ein|ein', [125] 'ein|*|ein', [126] 'ein|ein', [127] 'ein|ein|*', [128] '*|*|schnell', [129] '*|schnell' + [130] '*|schnell|*', [131] 'sich|*|ein', [132] '*|*|lediglich', [133] '*|lediglich', [134] '*|lediglich|*', [135] 'lediglich|*', [136] 'lediglich|*|*', [137] 'sich|*|fuer', [138] '*|sich|schnell', [139] 'sich|*|schnell' + [140] '*|sich|fuer', [141] 'sich|fuer', [142] 'sich|fuer|*' + F21: Vec + F21: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'versicherer', [1] 'lediglich', [2] 'air', [3] 'worldwide', [4] 'ein', [5] 'spezialist', [6] 'fuer', [7] 'risikomodelle', [8] 'wagte', [9] 'sich' + F21: Vec + Metadata 'SlotNames': Vec: Length=10, Count=10 + [0] 'versicherer', [1] 'lediglich', [2] 'air', [3] 'worldwide', [4] 'ein', [5] 'spezialist', [6] 'fuer', [7] 'risikomodelle', [8] 'wagte', [9] 'sich' + F22: Vec + F22: Vec> + Metadata 'KeyValues': Vec: Length=20, Count=20 + [0] 'versicherer', [1] 'lediglich', [2] 'air', [3] 'worldwide', [4] 'ein', [5] 'spezialist', [6] 'fuer', [7] 'risikomodelle', [8] 'wagte', [9] 'sich' + [10] 'schnell', [11] 'mit', [12] 'einer', [13] 'ersten', [14] 'schaetzung', [15] 'vor', [16] 'neue', [17] 'todesfaelle', [18] 'durch', [19] 'ehec' + F22: Vec + Metadata 'SlotNames': Vec: Length=80, Count=80 + [0] 'versicherer', [1] 'versicherer|lediglich', [2] 'versicherer|lediglich|air', [3] 'versicherer|lediglich|air|worldwide', [4] 'lediglich', [5] 'lediglich|air', [6] 'lediglich|air|worldwide', [7] 'lediglich|air|worldwide|ein', [8] 'air', [9] 'air|worldwide' + [10] 'air|worldwide|ein', [11] 'air|worldwide|ein|spezialist', [12] 'worldwide', [13] 'worldwide|ein', [14] 'worldwide|ein|spezialist', [15] 'worldwide|ein|spezialist|fuer', [16] 'ein', [17] 'ein|spezialist', [18] 'ein|spezialist|fuer', [19] 'ein|spezialist|fuer|risikomodelle' + [20] 'spezialist', [21] 'spezialist|fuer', [22] 'spezialist|fuer|risikomodelle', [23] 'spezialist|fuer|risikomodelle|wagte', [24] 'fuer', [25] 'fuer|risikomodelle', [26] 'fuer|risikomodelle|wagte', [27] 'fuer|risikomodelle|wagte|sich', [28] 'risikomodelle', [29] 'risikomodelle|wagte' + [30] 'risikomodelle|wagte|sich', [31] 'risikomodelle|wagte|sich|schnell', [32] 'wagte', [33] 'wagte|sich', [34] 'wagte|sich|schnell', [35] 'wagte|sich|schnell|mit', [36] 'sich', [37] 'sich|schnell', [38] 'sich|schnell|mit', [39] 'sich|schnell|mit|einer' + [40] 'schnell', [41] 'schnell|mit', [42] 'schnell|mit|einer', [43] 'schnell|mit|einer|ersten', [44] 'mit', [45] 'mit|einer', [46] 'mit|einer|ersten', [47] 'mit|einer|ersten|schaetzung', [48] 'einer', [49] 'einer|ersten' + [50] 'einer|ersten|schaetzung', [51] 'einer|ersten|schaetzung|vor', [52] 'ersten', [53] 'ersten|schaetzung', [54] 'ersten|schaetzung|vor', [55] 'schaetzung', [56] 'schaetzung|vor', [57] 'vor', [58] 'neue', [59] 'neue|todesfaelle' + [60] 'neue|todesfaelle|durch', [61] 'neue|todesfaelle|durch|ehec', [62] 'todesfaelle', [63] 'todesfaelle|durch', [64] 'todesfaelle|durch|ehec', [65] 'todesfaelle|durch|ehec|*', [66] 'durch', [67] 'durch|ehec', [68] 'durch|ehec|*', [69] 'durch|ehec|*|*' + [70] 'ehec', [71] 'ehec|*', [72] 'ehec|*|*', [73] 'ehec|*|*|*', [74] '*|*', [75] '*|*|*', [76] '*|*|*|*', [77] '*|*|*|ehec', [78] '*|*|ehec', [79] '*|*|ehec|*' +---- RowToRowMapperTransform ---- +20 columns: + Label: Text + Label: Key + Metadata 'KeyValues': Vec: Length=7, Count=7 + [0] 'Wirtschaft', [1] 'Gesundheit', [2] 'Deutschland', [3] 'Ausland', [4] 'Unterhaltung', [5] 'Sport', [6] 'Technik & Wissen' + One: Text + Vec: Vec + Metadata 'SlotNames': Vec: Length=2, Count=2 + [0] 'weg fuer milliardenhilfe frei', [1] 'vor dem parlamentsgebaeude toben strassenkaempfe zwischen demonstranten drinnen haben die griechischen abgeordneten das drastische sparpaket am abend endgueltig beschlossen die entscheidung ist eine wichtige voraussetzung fuer die auszahlung von weiteren acht milliarden euro hilfsgeldern athen das griechische parlament hat einem umfassenden sparpaket endgueltig zugestimmt' + F11: Vec + F11: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' + F11: Vec + Metadata 'SlotNames': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' + F12: Vec + F12: Vec> + Metadata 'KeyValues': Vec: Length=3, Count=3 + [0] 'lediglich', [1] 'air', [2] 'worldwide' + F12: Vec + Metadata 'SlotNames': Vec: Length=17, Count=17 + [0] 'lediglich', [1] 'lediglich|air', [2] 'lediglich|air|worldwide', [3] 'lediglich|air|worldwide|*', [4] 'air', [5] 'air|worldwide', [6] 'air|worldwide|*', [7] 'air|worldwide|*|*', [8] 'worldwide', [9] 'worldwide|*' + [10] 'worldwide|*|*', [11] 'worldwide|*|*|*', [12] '*|*', [13] '*|*|*', [14] '*|*|*|*', [15] '*|*|*|air', [16] '*|*|air' + F13: Vec + F13: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' + F13: Vec + Metadata 'SlotNames': Vec: Length=143, Count=143 + [0] 'lediglich', [1] 'lediglich|air', [2] 'lediglich|air|worldwide', [3] 'lediglich|air|ein', [4] 'lediglich|air|spezialist', [5] 'lediglich|worldwide', [6] 'lediglich|worldwide|ein', [7] 'lediglich|worldwide|spezialist', [8] 'lediglich|ein', [9] 'lediglich|ein|spezialist' + [10] 'air', [11] 'air|worldwide', [12] 'air|worldwide|ein', [13] 'air|worldwide|spezialist', [14] 'air|worldwide|fuer', [15] 'air|ein', [16] 'air|ein|spezialist', [17] 'air|ein|fuer', [18] 'air|spezialist', [19] 'air|spezialist|fuer' + [20] 'worldwide', [21] 'worldwide|ein', [22] 'worldwide|ein|spezialist', [23] 'worldwide|ein|fuer', [24] 'worldwide|ein|risikomodelle', [25] 'worldwide|spezialist', [26] 'worldwide|spezialist|fuer', [27] 'worldwide|spezialist|risikomodelle', [28] 'worldwide|fuer', [29] 'worldwide|fuer|risikomodelle' + [30] 'ein', [31] 'ein|spezialist', [32] 'ein|spezialist|fuer', [33] 'ein|spezialist|risikomodelle', [34] 'ein|spezialist|wagte', [35] 'ein|fuer', [36] 'ein|fuer|risikomodelle', [37] 'ein|fuer|wagte', [38] 'ein|risikomodelle', [39] 'ein|risikomodelle|wagte' + [40] 'spezialist', [41] 'spezialist|fuer', [42] 'spezialist|fuer|risikomodelle', [43] 'spezialist|fuer|wagte', [44] 'spezialist|fuer|sich', [45] 'spezialist|risikomodelle', [46] 'spezialist|risikomodelle|wagte', [47] 'spezialist|risikomodelle|sich', [48] 'spezialist|wagte', [49] 'spezialist|wagte|sich' + [50] 'fuer', [51] 'fuer|risikomodelle', [52] 'fuer|risikomodelle|wagte', [53] 'fuer|risikomodelle|sich', [54] 'fuer|risikomodelle|schnell', [55] 'fuer|wagte', [56] 'fuer|wagte|sich', [57] 'fuer|wagte|schnell', [58] 'fuer|sich', [59] 'fuer|sich|schnell' + [60] 'risikomodelle', [61] 'risikomodelle|wagte', [62] 'risikomodelle|wagte|sich', [63] 'risikomodelle|wagte|schnell', [64] 'risikomodelle|wagte|*', [65] 'risikomodelle|sich', [66] 'risikomodelle|sich|schnell', [67] 'risikomodelle|sich|*', [68] 'risikomodelle|schnell', [69] 'risikomodelle|schnell|*' + [70] 'wagte', [71] 'wagte|sich', [72] 'wagte|sich|schnell', [73] 'wagte|sich|*', [74] 'wagte|schnell', [75] 'wagte|schnell|*', [76] 'wagte|*', [77] 'wagte|*|*', [78] 'sich', [79] 'sich|schnell' + [80] 'sich|schnell|*', [81] 'sich|*', [82] 'sich|*|*', [83] 'schnell', [84] 'schnell|*', [85] 'schnell|*|*', [86] '*', [87] '*|*', [88] '*|*|*', [89] '*|*|ein' + [90] '*|ein', [91] '*|ein|*', [92] 'ein|*', [93] 'ein|*|*', [94] '*|*|sich', [95] '*|sich', [96] '*|sich|*', [97] '*|*|fuer', [98] '*|fuer', [99] '*|fuer|*' + [100] 'fuer|*', [101] 'fuer|*|*', [102] '*|fuer|fuer', [103] 'fuer|*|fuer', [104] 'fuer|fuer', [105] 'fuer|fuer|*', [106] '*|fuer|ein', [107] 'fuer|ein', [108] 'fuer|ein|*', [109] '*|*|air' + [110] '*|air', [111] '*|air|*', [112] 'air|*', [113] 'air|*|*', [114] 'fuer|*|ein', [115] '*|ein|fuer', [116] 'ein|*|fuer', [117] 'ein|fuer|*', [118] '*|*|wagte', [119] '*|wagte' + [120] '*|wagte|*', [121] '*|sich|ein', [122] 'sich|ein', [123] 'sich|ein|*', [124] '*|ein|ein', [125] 'ein|*|ein', [126] 'ein|ein', [127] 'ein|ein|*', [128] '*|*|schnell', [129] '*|schnell' + [130] '*|schnell|*', [131] 'sich|*|ein', [132] '*|*|lediglich', [133] '*|lediglich', [134] '*|lediglich|*', [135] 'lediglich|*', [136] 'lediglich|*|*', [137] 'sich|*|fuer', [138] '*|sich|schnell', [139] 'sich|*|schnell' + [140] '*|sich|fuer', [141] 'sich|fuer', [142] 'sich|fuer|*' + F21: Vec + F21: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'versicherer', [1] 'lediglich', [2] 'air', [3] 'worldwide', [4] 'ein', [5] 'spezialist', [6] 'fuer', [7] 'risikomodelle', [8] 'wagte', [9] 'sich' + F21: Vec + Metadata 'SlotNames': Vec: Length=10, Count=10 + [0] 'versicherer', [1] 'lediglich', [2] 'air', [3] 'worldwide', [4] 'ein', [5] 'spezialist', [6] 'fuer', [7] 'risikomodelle', [8] 'wagte', [9] 'sich' + F22: Vec + F22: Vec> + Metadata 'KeyValues': Vec: Length=20, Count=20 + [0] 'versicherer', [1] 'lediglich', [2] 'air', [3] 'worldwide', [4] 'ein', [5] 'spezialist', [6] 'fuer', [7] 'risikomodelle', [8] 'wagte', [9] 'sich' + [10] 'schnell', [11] 'mit', [12] 'einer', [13] 'ersten', [14] 'schaetzung', [15] 'vor', [16] 'neue', [17] 'todesfaelle', [18] 'durch', [19] 'ehec' + F22: Vec + Metadata 'SlotNames': Vec: Length=80, Count=80 + [0] 'versicherer', [1] 'versicherer|lediglich', [2] 'versicherer|lediglich|air', [3] 'versicherer|lediglich|air|worldwide', [4] 'lediglich', [5] 'lediglich|air', [6] 'lediglich|air|worldwide', [7] 'lediglich|air|worldwide|ein', [8] 'air', [9] 'air|worldwide' + [10] 'air|worldwide|ein', [11] 'air|worldwide|ein|spezialist', [12] 'worldwide', [13] 'worldwide|ein', [14] 'worldwide|ein|spezialist', [15] 'worldwide|ein|spezialist|fuer', [16] 'ein', [17] 'ein|spezialist', [18] 'ein|spezialist|fuer', [19] 'ein|spezialist|fuer|risikomodelle' + [20] 'spezialist', [21] 'spezialist|fuer', [22] 'spezialist|fuer|risikomodelle', [23] 'spezialist|fuer|risikomodelle|wagte', [24] 'fuer', [25] 'fuer|risikomodelle', [26] 'fuer|risikomodelle|wagte', [27] 'fuer|risikomodelle|wagte|sich', [28] 'risikomodelle', [29] 'risikomodelle|wagte' + [30] 'risikomodelle|wagte|sich', [31] 'risikomodelle|wagte|sich|schnell', [32] 'wagte', [33] 'wagte|sich', [34] 'wagte|sich|schnell', [35] 'wagte|sich|schnell|mit', [36] 'sich', [37] 'sich|schnell', [38] 'sich|schnell|mit', [39] 'sich|schnell|mit|einer' + [40] 'schnell', [41] 'schnell|mit', [42] 'schnell|mit|einer', [43] 'schnell|mit|einer|ersten', [44] 'mit', [45] 'mit|einer', [46] 'mit|einer|ersten', [47] 'mit|einer|ersten|schaetzung', [48] 'einer', [49] 'einer|ersten' + [50] 'einer|ersten|schaetzung', [51] 'einer|ersten|schaetzung|vor', [52] 'ersten', [53] 'ersten|schaetzung', [54] 'ersten|schaetzung|vor', [55] 'schaetzung', [56] 'schaetzung|vor', [57] 'vor', [58] 'neue', [59] 'neue|todesfaelle' + [60] 'neue|todesfaelle|durch', [61] 'neue|todesfaelle|durch|ehec', [62] 'todesfaelle', [63] 'todesfaelle|durch', [64] 'todesfaelle|durch|ehec', [65] 'todesfaelle|durch|ehec|*', [66] 'durch', [67] 'durch|ehec', [68] 'durch|ehec|*', [69] 'durch|ehec|*|*' + [70] 'ehec', [71] 'ehec|*', [72] 'ehec|*|*', [73] 'ehec|*|*|*', [74] '*|*', [75] '*|*|*', [76] '*|*|*|*', [77] '*|*|*|ehec', [78] '*|*|ehec', [79] '*|*|ehec|*' + F23: Vec +---- RowToRowMapperTransform ---- +21 columns: + Label: Text + Label: Key + Metadata 'KeyValues': Vec: Length=7, Count=7 + [0] 'Wirtschaft', [1] 'Gesundheit', [2] 'Deutschland', [3] 'Ausland', [4] 'Unterhaltung', [5] 'Sport', [6] 'Technik & Wissen' + One: Text + Vec: Vec + Metadata 'SlotNames': Vec: Length=2, Count=2 + [0] 'weg fuer milliardenhilfe frei', [1] 'vor dem parlamentsgebaeude toben strassenkaempfe zwischen demonstranten drinnen haben die griechischen abgeordneten das drastische sparpaket am abend endgueltig beschlossen die entscheidung ist eine wichtige voraussetzung fuer die auszahlung von weiteren acht milliarden euro hilfsgeldern athen das griechische parlament hat einem umfassenden sparpaket endgueltig zugestimmt' + F11: Vec + F11: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' + F11: Vec + Metadata 'SlotNames': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' + F12: Vec + F12: Vec> + Metadata 'KeyValues': Vec: Length=3, Count=3 + [0] 'lediglich', [1] 'air', [2] 'worldwide' + F12: Vec + Metadata 'SlotNames': Vec: Length=17, Count=17 + [0] 'lediglich', [1] 'lediglich|air', [2] 'lediglich|air|worldwide', [3] 'lediglich|air|worldwide|*', [4] 'air', [5] 'air|worldwide', [6] 'air|worldwide|*', [7] 'air|worldwide|*|*', [8] 'worldwide', [9] 'worldwide|*' + [10] 'worldwide|*|*', [11] 'worldwide|*|*|*', [12] '*|*', [13] '*|*|*', [14] '*|*|*|*', [15] '*|*|*|air', [16] '*|*|air' + F13: Vec + F13: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' + F13: Vec + Metadata 'SlotNames': Vec: Length=143, Count=143 + [0] 'lediglich', [1] 'lediglich|air', [2] 'lediglich|air|worldwide', [3] 'lediglich|air|ein', [4] 'lediglich|air|spezialist', [5] 'lediglich|worldwide', [6] 'lediglich|worldwide|ein', [7] 'lediglich|worldwide|spezialist', [8] 'lediglich|ein', [9] 'lediglich|ein|spezialist' + [10] 'air', [11] 'air|worldwide', [12] 'air|worldwide|ein', [13] 'air|worldwide|spezialist', [14] 'air|worldwide|fuer', [15] 'air|ein', [16] 'air|ein|spezialist', [17] 'air|ein|fuer', [18] 'air|spezialist', [19] 'air|spezialist|fuer' + [20] 'worldwide', [21] 'worldwide|ein', [22] 'worldwide|ein|spezialist', [23] 'worldwide|ein|fuer', [24] 'worldwide|ein|risikomodelle', [25] 'worldwide|spezialist', [26] 'worldwide|spezialist|fuer', [27] 'worldwide|spezialist|risikomodelle', [28] 'worldwide|fuer', [29] 'worldwide|fuer|risikomodelle' + [30] 'ein', [31] 'ein|spezialist', [32] 'ein|spezialist|fuer', [33] 'ein|spezialist|risikomodelle', [34] 'ein|spezialist|wagte', [35] 'ein|fuer', [36] 'ein|fuer|risikomodelle', [37] 'ein|fuer|wagte', [38] 'ein|risikomodelle', [39] 'ein|risikomodelle|wagte' + [40] 'spezialist', [41] 'spezialist|fuer', [42] 'spezialist|fuer|risikomodelle', [43] 'spezialist|fuer|wagte', [44] 'spezialist|fuer|sich', [45] 'spezialist|risikomodelle', [46] 'spezialist|risikomodelle|wagte', [47] 'spezialist|risikomodelle|sich', [48] 'spezialist|wagte', [49] 'spezialist|wagte|sich' + [50] 'fuer', [51] 'fuer|risikomodelle', [52] 'fuer|risikomodelle|wagte', [53] 'fuer|risikomodelle|sich', [54] 'fuer|risikomodelle|schnell', [55] 'fuer|wagte', [56] 'fuer|wagte|sich', [57] 'fuer|wagte|schnell', [58] 'fuer|sich', [59] 'fuer|sich|schnell' + [60] 'risikomodelle', [61] 'risikomodelle|wagte', [62] 'risikomodelle|wagte|sich', [63] 'risikomodelle|wagte|schnell', [64] 'risikomodelle|wagte|*', [65] 'risikomodelle|sich', [66] 'risikomodelle|sich|schnell', [67] 'risikomodelle|sich|*', [68] 'risikomodelle|schnell', [69] 'risikomodelle|schnell|*' + [70] 'wagte', [71] 'wagte|sich', [72] 'wagte|sich|schnell', [73] 'wagte|sich|*', [74] 'wagte|schnell', [75] 'wagte|schnell|*', [76] 'wagte|*', [77] 'wagte|*|*', [78] 'sich', [79] 'sich|schnell' + [80] 'sich|schnell|*', [81] 'sich|*', [82] 'sich|*|*', [83] 'schnell', [84] 'schnell|*', [85] 'schnell|*|*', [86] '*', [87] '*|*', [88] '*|*|*', [89] '*|*|ein' + [90] '*|ein', [91] '*|ein|*', [92] 'ein|*', [93] 'ein|*|*', [94] '*|*|sich', [95] '*|sich', [96] '*|sich|*', [97] '*|*|fuer', [98] '*|fuer', [99] '*|fuer|*' + [100] 'fuer|*', [101] 'fuer|*|*', [102] '*|fuer|fuer', [103] 'fuer|*|fuer', [104] 'fuer|fuer', [105] 'fuer|fuer|*', [106] '*|fuer|ein', [107] 'fuer|ein', [108] 'fuer|ein|*', [109] '*|*|air' + [110] '*|air', [111] '*|air|*', [112] 'air|*', [113] 'air|*|*', [114] 'fuer|*|ein', [115] '*|ein|fuer', [116] 'ein|*|fuer', [117] 'ein|fuer|*', [118] '*|*|wagte', [119] '*|wagte' + [120] '*|wagte|*', [121] '*|sich|ein', [122] 'sich|ein', [123] 'sich|ein|*', [124] '*|ein|ein', [125] 'ein|*|ein', [126] 'ein|ein', [127] 'ein|ein|*', [128] '*|*|schnell', [129] '*|schnell' + [130] '*|schnell|*', [131] 'sich|*|ein', [132] '*|*|lediglich', [133] '*|lediglich', [134] '*|lediglich|*', [135] 'lediglich|*', [136] 'lediglich|*|*', [137] 'sich|*|fuer', [138] '*|sich|schnell', [139] 'sich|*|schnell' + [140] '*|sich|fuer', [141] 'sich|fuer', [142] 'sich|fuer|*' + F21: Vec + F21: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'versicherer', [1] 'lediglich', [2] 'air', [3] 'worldwide', [4] 'ein', [5] 'spezialist', [6] 'fuer', [7] 'risikomodelle', [8] 'wagte', [9] 'sich' + F21: Vec + Metadata 'SlotNames': Vec: Length=10, Count=10 + [0] 'versicherer', [1] 'lediglich', [2] 'air', [3] 'worldwide', [4] 'ein', [5] 'spezialist', [6] 'fuer', [7] 'risikomodelle', [8] 'wagte', [9] 'sich' + F22: Vec + F22: Vec> + Metadata 'KeyValues': Vec: Length=20, Count=20 + [0] 'versicherer', [1] 'lediglich', [2] 'air', [3] 'worldwide', [4] 'ein', [5] 'spezialist', [6] 'fuer', [7] 'risikomodelle', [8] 'wagte', [9] 'sich' + [10] 'schnell', [11] 'mit', [12] 'einer', [13] 'ersten', [14] 'schaetzung', [15] 'vor', [16] 'neue', [17] 'todesfaelle', [18] 'durch', [19] 'ehec' + F22: Vec + Metadata 'SlotNames': Vec: Length=80, Count=80 + [0] 'versicherer', [1] 'versicherer|lediglich', [2] 'versicherer|lediglich|air', [3] 'versicherer|lediglich|air|worldwide', [4] 'lediglich', [5] 'lediglich|air', [6] 'lediglich|air|worldwide', [7] 'lediglich|air|worldwide|ein', [8] 'air', [9] 'air|worldwide' + [10] 'air|worldwide|ein', [11] 'air|worldwide|ein|spezialist', [12] 'worldwide', [13] 'worldwide|ein', [14] 'worldwide|ein|spezialist', [15] 'worldwide|ein|spezialist|fuer', [16] 'ein', [17] 'ein|spezialist', [18] 'ein|spezialist|fuer', [19] 'ein|spezialist|fuer|risikomodelle' + [20] 'spezialist', [21] 'spezialist|fuer', [22] 'spezialist|fuer|risikomodelle', [23] 'spezialist|fuer|risikomodelle|wagte', [24] 'fuer', [25] 'fuer|risikomodelle', [26] 'fuer|risikomodelle|wagte', [27] 'fuer|risikomodelle|wagte|sich', [28] 'risikomodelle', [29] 'risikomodelle|wagte' + [30] 'risikomodelle|wagte|sich', [31] 'risikomodelle|wagte|sich|schnell', [32] 'wagte', [33] 'wagte|sich', [34] 'wagte|sich|schnell', [35] 'wagte|sich|schnell|mit', [36] 'sich', [37] 'sich|schnell', [38] 'sich|schnell|mit', [39] 'sich|schnell|mit|einer' + [40] 'schnell', [41] 'schnell|mit', [42] 'schnell|mit|einer', [43] 'schnell|mit|einer|ersten', [44] 'mit', [45] 'mit|einer', [46] 'mit|einer|ersten', [47] 'mit|einer|ersten|schaetzung', [48] 'einer', [49] 'einer|ersten' + [50] 'einer|ersten|schaetzung', [51] 'einer|ersten|schaetzung|vor', [52] 'ersten', [53] 'ersten|schaetzung', [54] 'ersten|schaetzung|vor', [55] 'schaetzung', [56] 'schaetzung|vor', [57] 'vor', [58] 'neue', [59] 'neue|todesfaelle' + [60] 'neue|todesfaelle|durch', [61] 'neue|todesfaelle|durch|ehec', [62] 'todesfaelle', [63] 'todesfaelle|durch', [64] 'todesfaelle|durch|ehec', [65] 'todesfaelle|durch|ehec|*', [66] 'durch', [67] 'durch|ehec', [68] 'durch|ehec|*', [69] 'durch|ehec|*|*' + [70] 'ehec', [71] 'ehec|*', [72] 'ehec|*|*', [73] 'ehec|*|*|*', [74] '*|*', [75] '*|*|*', [76] '*|*|*|*', [77] '*|*|*|ehec', [78] '*|*|ehec', [79] '*|*|ehec|*' + F23: Vec + F23: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'versicherer', [1] 'lediglich', [2] 'air', [3] 'worldwide', [4] 'ein', [5] 'spezialist', [6] 'fuer', [7] 'risikomodelle', [8] 'wagte', [9] 'sich' +---- RowToRowMapperTransform ---- +22 columns: + Label: Text + Label: Key + Metadata 'KeyValues': Vec: Length=7, Count=7 + [0] 'Wirtschaft', [1] 'Gesundheit', [2] 'Deutschland', [3] 'Ausland', [4] 'Unterhaltung', [5] 'Sport', [6] 'Technik & Wissen' + One: Text + Vec: Vec + Metadata 'SlotNames': Vec: Length=2, Count=2 + [0] 'weg fuer milliardenhilfe frei', [1] 'vor dem parlamentsgebaeude toben strassenkaempfe zwischen demonstranten drinnen haben die griechischen abgeordneten das drastische sparpaket am abend endgueltig beschlossen die entscheidung ist eine wichtige voraussetzung fuer die auszahlung von weiteren acht milliarden euro hilfsgeldern athen das griechische parlament hat einem umfassenden sparpaket endgueltig zugestimmt' + F11: Vec + F11: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' + F11: Vec + Metadata 'SlotNames': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' + F12: Vec + F12: Vec> + Metadata 'KeyValues': Vec: Length=3, Count=3 + [0] 'lediglich', [1] 'air', [2] 'worldwide' + F12: Vec + Metadata 'SlotNames': Vec: Length=17, Count=17 + [0] 'lediglich', [1] 'lediglich|air', [2] 'lediglich|air|worldwide', [3] 'lediglich|air|worldwide|*', [4] 'air', [5] 'air|worldwide', [6] 'air|worldwide|*', [7] 'air|worldwide|*|*', [8] 'worldwide', [9] 'worldwide|*' + [10] 'worldwide|*|*', [11] 'worldwide|*|*|*', [12] '*|*', [13] '*|*|*', [14] '*|*|*|*', [15] '*|*|*|air', [16] '*|*|air' + F13: Vec + F13: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' + F13: Vec + Metadata 'SlotNames': Vec: Length=143, Count=143 + [0] 'lediglich', [1] 'lediglich|air', [2] 'lediglich|air|worldwide', [3] 'lediglich|air|ein', [4] 'lediglich|air|spezialist', [5] 'lediglich|worldwide', [6] 'lediglich|worldwide|ein', [7] 'lediglich|worldwide|spezialist', [8] 'lediglich|ein', [9] 'lediglich|ein|spezialist' + [10] 'air', [11] 'air|worldwide', [12] 'air|worldwide|ein', [13] 'air|worldwide|spezialist', [14] 'air|worldwide|fuer', [15] 'air|ein', [16] 'air|ein|spezialist', [17] 'air|ein|fuer', [18] 'air|spezialist', [19] 'air|spezialist|fuer' + [20] 'worldwide', [21] 'worldwide|ein', [22] 'worldwide|ein|spezialist', [23] 'worldwide|ein|fuer', [24] 'worldwide|ein|risikomodelle', [25] 'worldwide|spezialist', [26] 'worldwide|spezialist|fuer', [27] 'worldwide|spezialist|risikomodelle', [28] 'worldwide|fuer', [29] 'worldwide|fuer|risikomodelle' + [30] 'ein', [31] 'ein|spezialist', [32] 'ein|spezialist|fuer', [33] 'ein|spezialist|risikomodelle', [34] 'ein|spezialist|wagte', [35] 'ein|fuer', [36] 'ein|fuer|risikomodelle', [37] 'ein|fuer|wagte', [38] 'ein|risikomodelle', [39] 'ein|risikomodelle|wagte' + [40] 'spezialist', [41] 'spezialist|fuer', [42] 'spezialist|fuer|risikomodelle', [43] 'spezialist|fuer|wagte', [44] 'spezialist|fuer|sich', [45] 'spezialist|risikomodelle', [46] 'spezialist|risikomodelle|wagte', [47] 'spezialist|risikomodelle|sich', [48] 'spezialist|wagte', [49] 'spezialist|wagte|sich' + [50] 'fuer', [51] 'fuer|risikomodelle', [52] 'fuer|risikomodelle|wagte', [53] 'fuer|risikomodelle|sich', [54] 'fuer|risikomodelle|schnell', [55] 'fuer|wagte', [56] 'fuer|wagte|sich', [57] 'fuer|wagte|schnell', [58] 'fuer|sich', [59] 'fuer|sich|schnell' + [60] 'risikomodelle', [61] 'risikomodelle|wagte', [62] 'risikomodelle|wagte|sich', [63] 'risikomodelle|wagte|schnell', [64] 'risikomodelle|wagte|*', [65] 'risikomodelle|sich', [66] 'risikomodelle|sich|schnell', [67] 'risikomodelle|sich|*', [68] 'risikomodelle|schnell', [69] 'risikomodelle|schnell|*' + [70] 'wagte', [71] 'wagte|sich', [72] 'wagte|sich|schnell', [73] 'wagte|sich|*', [74] 'wagte|schnell', [75] 'wagte|schnell|*', [76] 'wagte|*', [77] 'wagte|*|*', [78] 'sich', [79] 'sich|schnell' + [80] 'sich|schnell|*', [81] 'sich|*', [82] 'sich|*|*', [83] 'schnell', [84] 'schnell|*', [85] 'schnell|*|*', [86] '*', [87] '*|*', [88] '*|*|*', [89] '*|*|ein' + [90] '*|ein', [91] '*|ein|*', [92] 'ein|*', [93] 'ein|*|*', [94] '*|*|sich', [95] '*|sich', [96] '*|sich|*', [97] '*|*|fuer', [98] '*|fuer', [99] '*|fuer|*' + [100] 'fuer|*', [101] 'fuer|*|*', [102] '*|fuer|fuer', [103] 'fuer|*|fuer', [104] 'fuer|fuer', [105] 'fuer|fuer|*', [106] '*|fuer|ein', [107] 'fuer|ein', [108] 'fuer|ein|*', [109] '*|*|air' + [110] '*|air', [111] '*|air|*', [112] 'air|*', [113] 'air|*|*', [114] 'fuer|*|ein', [115] '*|ein|fuer', [116] 'ein|*|fuer', [117] 'ein|fuer|*', [118] '*|*|wagte', [119] '*|wagte' + [120] '*|wagte|*', [121] '*|sich|ein', [122] 'sich|ein', [123] 'sich|ein|*', [124] '*|ein|ein', [125] 'ein|*|ein', [126] 'ein|ein', [127] 'ein|ein|*', [128] '*|*|schnell', [129] '*|schnell' + [130] '*|schnell|*', [131] 'sich|*|ein', [132] '*|*|lediglich', [133] '*|lediglich', [134] '*|lediglich|*', [135] 'lediglich|*', [136] 'lediglich|*|*', [137] 'sich|*|fuer', [138] '*|sich|schnell', [139] 'sich|*|schnell' + [140] '*|sich|fuer', [141] 'sich|fuer', [142] 'sich|fuer|*' + F21: Vec + F21: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'versicherer', [1] 'lediglich', [2] 'air', [3] 'worldwide', [4] 'ein', [5] 'spezialist', [6] 'fuer', [7] 'risikomodelle', [8] 'wagte', [9] 'sich' + F21: Vec + Metadata 'SlotNames': Vec: Length=10, Count=10 + [0] 'versicherer', [1] 'lediglich', [2] 'air', [3] 'worldwide', [4] 'ein', [5] 'spezialist', [6] 'fuer', [7] 'risikomodelle', [8] 'wagte', [9] 'sich' + F22: Vec + F22: Vec> + Metadata 'KeyValues': Vec: Length=20, Count=20 + [0] 'versicherer', [1] 'lediglich', [2] 'air', [3] 'worldwide', [4] 'ein', [5] 'spezialist', [6] 'fuer', [7] 'risikomodelle', [8] 'wagte', [9] 'sich' + [10] 'schnell', [11] 'mit', [12] 'einer', [13] 'ersten', [14] 'schaetzung', [15] 'vor', [16] 'neue', [17] 'todesfaelle', [18] 'durch', [19] 'ehec' + F22: Vec + Metadata 'SlotNames': Vec: Length=80, Count=80 + [0] 'versicherer', [1] 'versicherer|lediglich', [2] 'versicherer|lediglich|air', [3] 'versicherer|lediglich|air|worldwide', [4] 'lediglich', [5] 'lediglich|air', [6] 'lediglich|air|worldwide', [7] 'lediglich|air|worldwide|ein', [8] 'air', [9] 'air|worldwide' + [10] 'air|worldwide|ein', [11] 'air|worldwide|ein|spezialist', [12] 'worldwide', [13] 'worldwide|ein', [14] 'worldwide|ein|spezialist', [15] 'worldwide|ein|spezialist|fuer', [16] 'ein', [17] 'ein|spezialist', [18] 'ein|spezialist|fuer', [19] 'ein|spezialist|fuer|risikomodelle' + [20] 'spezialist', [21] 'spezialist|fuer', [22] 'spezialist|fuer|risikomodelle', [23] 'spezialist|fuer|risikomodelle|wagte', [24] 'fuer', [25] 'fuer|risikomodelle', [26] 'fuer|risikomodelle|wagte', [27] 'fuer|risikomodelle|wagte|sich', [28] 'risikomodelle', [29] 'risikomodelle|wagte' + [30] 'risikomodelle|wagte|sich', [31] 'risikomodelle|wagte|sich|schnell', [32] 'wagte', [33] 'wagte|sich', [34] 'wagte|sich|schnell', [35] 'wagte|sich|schnell|mit', [36] 'sich', [37] 'sich|schnell', [38] 'sich|schnell|mit', [39] 'sich|schnell|mit|einer' + [40] 'schnell', [41] 'schnell|mit', [42] 'schnell|mit|einer', [43] 'schnell|mit|einer|ersten', [44] 'mit', [45] 'mit|einer', [46] 'mit|einer|ersten', [47] 'mit|einer|ersten|schaetzung', [48] 'einer', [49] 'einer|ersten' + [50] 'einer|ersten|schaetzung', [51] 'einer|ersten|schaetzung|vor', [52] 'ersten', [53] 'ersten|schaetzung', [54] 'ersten|schaetzung|vor', [55] 'schaetzung', [56] 'schaetzung|vor', [57] 'vor', [58] 'neue', [59] 'neue|todesfaelle' + [60] 'neue|todesfaelle|durch', [61] 'neue|todesfaelle|durch|ehec', [62] 'todesfaelle', [63] 'todesfaelle|durch', [64] 'todesfaelle|durch|ehec', [65] 'todesfaelle|durch|ehec|*', [66] 'durch', [67] 'durch|ehec', [68] 'durch|ehec|*', [69] 'durch|ehec|*|*' + [70] 'ehec', [71] 'ehec|*', [72] 'ehec|*|*', [73] 'ehec|*|*|*', [74] '*|*', [75] '*|*|*', [76] '*|*|*|*', [77] '*|*|*|ehec', [78] '*|*|ehec', [79] '*|*|ehec|*' + F23: Vec + F23: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'versicherer', [1] 'lediglich', [2] 'air', [3] 'worldwide', [4] 'ein', [5] 'spezialist', [6] 'fuer', [7] 'risikomodelle', [8] 'wagte', [9] 'sich' + F23: Vec + Metadata 'SlotNames': Vec: Length=30, Count=30 + [0] 'versicherer', [1] 'versicherer|lediglich', [2] 'versicherer|lediglich|air', [3] 'versicherer|lediglich|worldwide', [4] 'versicherer|lediglich|ein', [5] 'versicherer|air', [6] 'versicherer|air|worldwide', [7] 'versicherer|air|ein', [8] 'versicherer|worldwide', [9] 'versicherer|worldwide|ein' + [10] 'lediglich', [11] 'lediglich|air', [12] 'lediglich|air|worldwide', [13] 'lediglich|air|ein', [14] 'lediglich|air|spezialist', [15] 'lediglich|worldwide', [16] 'lediglich|worldwide|ein', [17] 'lediglich|ein', [18] 'air', [19] 'air|worldwide' + [20] 'air|ein', [21] 'air|spezialist', [22] 'worldwide', [23] 'worldwide|ein', [24] 'ein', [25] 'spezialist', [26] 'fuer', [27] 'risikomodelle', [28] 'wagte', [29] 'sich' +---- SelectColumnsDataTransform ---- +7 columns: + Label: Key + Metadata 'KeyValues': Vec: Length=7, Count=7 + [0] 'Wirtschaft', [1] 'Gesundheit', [2] 'Deutschland', [3] 'Ausland', [4] 'Unterhaltung', [5] 'Sport', [6] 'Technik & Wissen' + F21: Vec + Metadata 'SlotNames': Vec: Length=10, Count=10 + [0] 'versicherer', [1] 'lediglich', [2] 'air', [3] 'worldwide', [4] 'ein', [5] 'spezialist', [6] 'fuer', [7] 'risikomodelle', [8] 'wagte', [9] 'sich' + F22: Vec + Metadata 'SlotNames': Vec: Length=80, Count=80 + [0] 'versicherer', [1] 'versicherer|lediglich', [2] 'versicherer|lediglich|air', [3] 'versicherer|lediglich|air|worldwide', [4] 'lediglich', [5] 'lediglich|air', [6] 'lediglich|air|worldwide', [7] 'lediglich|air|worldwide|ein', [8] 'air', [9] 'air|worldwide' + [10] 'air|worldwide|ein', [11] 'air|worldwide|ein|spezialist', [12] 'worldwide', [13] 'worldwide|ein', [14] 'worldwide|ein|spezialist', [15] 'worldwide|ein|spezialist|fuer', [16] 'ein', [17] 'ein|spezialist', [18] 'ein|spezialist|fuer', [19] 'ein|spezialist|fuer|risikomodelle' + [20] 'spezialist', [21] 'spezialist|fuer', [22] 'spezialist|fuer|risikomodelle', [23] 'spezialist|fuer|risikomodelle|wagte', [24] 'fuer', [25] 'fuer|risikomodelle', [26] 'fuer|risikomodelle|wagte', [27] 'fuer|risikomodelle|wagte|sich', [28] 'risikomodelle', [29] 'risikomodelle|wagte' + [30] 'risikomodelle|wagte|sich', [31] 'risikomodelle|wagte|sich|schnell', [32] 'wagte', [33] 'wagte|sich', [34] 'wagte|sich|schnell', [35] 'wagte|sich|schnell|mit', [36] 'sich', [37] 'sich|schnell', [38] 'sich|schnell|mit', [39] 'sich|schnell|mit|einer' + [40] 'schnell', [41] 'schnell|mit', [42] 'schnell|mit|einer', [43] 'schnell|mit|einer|ersten', [44] 'mit', [45] 'mit|einer', [46] 'mit|einer|ersten', [47] 'mit|einer|ersten|schaetzung', [48] 'einer', [49] 'einer|ersten' + [50] 'einer|ersten|schaetzung', [51] 'einer|ersten|schaetzung|vor', [52] 'ersten', [53] 'ersten|schaetzung', [54] 'ersten|schaetzung|vor', [55] 'schaetzung', [56] 'schaetzung|vor', [57] 'vor', [58] 'neue', [59] 'neue|todesfaelle' + [60] 'neue|todesfaelle|durch', [61] 'neue|todesfaelle|durch|ehec', [62] 'todesfaelle', [63] 'todesfaelle|durch', [64] 'todesfaelle|durch|ehec', [65] 'todesfaelle|durch|ehec|*', [66] 'durch', [67] 'durch|ehec', [68] 'durch|ehec|*', [69] 'durch|ehec|*|*' + [70] 'ehec', [71] 'ehec|*', [72] 'ehec|*|*', [73] 'ehec|*|*|*', [74] '*|*', [75] '*|*|*', [76] '*|*|*|*', [77] '*|*|*|ehec', [78] '*|*|ehec', [79] '*|*|ehec|*' + F23: Vec + Metadata 'SlotNames': Vec: Length=30, Count=30 + [0] 'versicherer', [1] 'versicherer|lediglich', [2] 'versicherer|lediglich|air', [3] 'versicherer|lediglich|worldwide', [4] 'versicherer|lediglich|ein', [5] 'versicherer|air', [6] 'versicherer|air|worldwide', [7] 'versicherer|air|ein', [8] 'versicherer|worldwide', [9] 'versicherer|worldwide|ein' + [10] 'lediglich', [11] 'lediglich|air', [12] 'lediglich|air|worldwide', [13] 'lediglich|air|ein', [14] 'lediglich|air|spezialist', [15] 'lediglich|worldwide', [16] 'lediglich|worldwide|ein', [17] 'lediglich|ein', [18] 'air', [19] 'air|worldwide' + [20] 'air|ein', [21] 'air|spezialist', [22] 'worldwide', [23] 'worldwide|ein', [24] 'ein', [25] 'spezialist', [26] 'fuer', [27] 'risikomodelle', [28] 'wagte', [29] 'sich' + F11: Vec + Metadata 'SlotNames': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' + F12: Vec + Metadata 'SlotNames': Vec: Length=17, Count=17 + [0] 'lediglich', [1] 'lediglich|air', [2] 'lediglich|air|worldwide', [3] 'lediglich|air|worldwide|*', [4] 'air', [5] 'air|worldwide', [6] 'air|worldwide|*', [7] 'air|worldwide|*|*', [8] 'worldwide', [9] 'worldwide|*' + [10] 'worldwide|*|*', [11] 'worldwide|*|*|*', [12] '*|*', [13] '*|*|*', [14] '*|*|*|*', [15] '*|*|*|air', [16] '*|*|air' + F13: Vec + Metadata 'SlotNames': Vec: Length=143, Count=143 + [0] 'lediglich', [1] 'lediglich|air', [2] 'lediglich|air|worldwide', [3] 'lediglich|air|ein', [4] 'lediglich|air|spezialist', [5] 'lediglich|worldwide', [6] 'lediglich|worldwide|ein', [7] 'lediglich|worldwide|spezialist', [8] 'lediglich|ein', [9] 'lediglich|ein|spezialist' + [10] 'air', [11] 'air|worldwide', [12] 'air|worldwide|ein', [13] 'air|worldwide|spezialist', [14] 'air|worldwide|fuer', [15] 'air|ein', [16] 'air|ein|spezialist', [17] 'air|ein|fuer', [18] 'air|spezialist', [19] 'air|spezialist|fuer' + [20] 'worldwide', [21] 'worldwide|ein', [22] 'worldwide|ein|spezialist', [23] 'worldwide|ein|fuer', [24] 'worldwide|ein|risikomodelle', [25] 'worldwide|spezialist', [26] 'worldwide|spezialist|fuer', [27] 'worldwide|spezialist|risikomodelle', [28] 'worldwide|fuer', [29] 'worldwide|fuer|risikomodelle' + [30] 'ein', [31] 'ein|spezialist', [32] 'ein|spezialist|fuer', [33] 'ein|spezialist|risikomodelle', [34] 'ein|spezialist|wagte', [35] 'ein|fuer', [36] 'ein|fuer|risikomodelle', [37] 'ein|fuer|wagte', [38] 'ein|risikomodelle', [39] 'ein|risikomodelle|wagte' + [40] 'spezialist', [41] 'spezialist|fuer', [42] 'spezialist|fuer|risikomodelle', [43] 'spezialist|fuer|wagte', [44] 'spezialist|fuer|sich', [45] 'spezialist|risikomodelle', [46] 'spezialist|risikomodelle|wagte', [47] 'spezialist|risikomodelle|sich', [48] 'spezialist|wagte', [49] 'spezialist|wagte|sich' + [50] 'fuer', [51] 'fuer|risikomodelle', [52] 'fuer|risikomodelle|wagte', [53] 'fuer|risikomodelle|sich', [54] 'fuer|risikomodelle|schnell', [55] 'fuer|wagte', [56] 'fuer|wagte|sich', [57] 'fuer|wagte|schnell', [58] 'fuer|sich', [59] 'fuer|sich|schnell' + [60] 'risikomodelle', [61] 'risikomodelle|wagte', [62] 'risikomodelle|wagte|sich', [63] 'risikomodelle|wagte|schnell', [64] 'risikomodelle|wagte|*', [65] 'risikomodelle|sich', [66] 'risikomodelle|sich|schnell', [67] 'risikomodelle|sich|*', [68] 'risikomodelle|schnell', [69] 'risikomodelle|schnell|*' + [70] 'wagte', [71] 'wagte|sich', [72] 'wagte|sich|schnell', [73] 'wagte|sich|*', [74] 'wagte|schnell', [75] 'wagte|schnell|*', [76] 'wagte|*', [77] 'wagte|*|*', [78] 'sich', [79] 'sich|schnell' + [80] 'sich|schnell|*', [81] 'sich|*', [82] 'sich|*|*', [83] 'schnell', [84] 'schnell|*', [85] 'schnell|*|*', [86] '*', [87] '*|*', [88] '*|*|*', [89] '*|*|ein' + [90] '*|ein', [91] '*|ein|*', [92] 'ein|*', [93] 'ein|*|*', [94] '*|*|sich', [95] '*|sich', [96] '*|sich|*', [97] '*|*|fuer', [98] '*|fuer', [99] '*|fuer|*' + [100] 'fuer|*', [101] 'fuer|*|*', [102] '*|fuer|fuer', [103] 'fuer|*|fuer', [104] 'fuer|fuer', [105] 'fuer|fuer|*', [106] '*|fuer|ein', [107] 'fuer|ein', [108] 'fuer|ein|*', [109] '*|*|air' + [110] '*|air', [111] '*|air|*', [112] 'air|*', [113] 'air|*|*', [114] 'fuer|*|ein', [115] '*|ein|fuer', [116] 'ein|*|fuer', [117] 'ein|fuer|*', [118] '*|*|wagte', [119] '*|wagte' + [120] '*|wagte|*', [121] '*|sich|ein', [122] 'sich|ein', [123] 'sich|ein|*', [124] '*|ein|ein', [125] 'ein|*|ein', [126] 'ein|ein', [127] 'ein|ein|*', [128] '*|*|schnell', [129] '*|schnell' + [130] '*|schnell|*', [131] 'sich|*|ein', [132] '*|*|lediglich', [133] '*|lediglich', [134] '*|lediglich|*', [135] 'lediglich|*', [136] 'lediglich|*|*', [137] 'sich|*|fuer', [138] '*|sich|schnell', [139] 'sich|*|schnell' + [140] '*|sich|fuer', [141] 'sich|fuer', [142] 'sich|fuer|*' diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeWordBagManyToOne-Data.txt b/test/BaselineOutput/Common/SavePipe/SavePipeWordBagManyToOne-Data.txt new file mode 100644 index 0000000000..d34a72adda --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeWordBagManyToOne-Data.txt @@ -0,0 +1,108 @@ +#@ TextLoader{ +#@ header+ +#@ sep=tab +#@ col=WB1:R4:0-29 +#@ col=WB2:R4:30-59 +#@ } +lediglich lediglich|air lediglich|air|worldwide lediglich|air|ein lediglich|air|spezialist lediglich|worldwide lediglich|worldwide|ein lediglich|worldwide|spezialist lediglich|ein lediglich|ein|spezialist air air|worldwide air|worldwide|ein air|worldwide|spezialist air|worldwide|fuer air|ein air|ein|spezialist air|spezialist worldwide worldwide|ein worldwide|spezialist worldwide|fuer ein ein|spezialist spezialist fuer risikomodelle wagte sich schnell lediglich lediglich|air lediglich|air|worldwide lediglich|air|ein lediglich|air|spezialist lediglich|worldwide lediglich|worldwide|ein lediglich|worldwide|spezialist lediglich|ein lediglich|ein|spezialist air air|worldwide air|worldwide|ein air|worldwide|spezialist air|worldwide|fuer air|ein air|ein|spezialist air|spezialist worldwide worldwide|ein worldwide|spezialist worldwide|fuer ein ein|spezialist spezialist fuer risikomodelle wagte sich schnell +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 +60 22:2 25:1 28:1 52:4 55:2 58:2 +60 22:1 52:2 +60 25:1 55:2 +60 22:4 25:3 28:6 52:8 55:6 58:12 +60 22:2 25:8 52:4 55:16 +60 10:1 22:2 25:1 28:3 40:2 52:4 55:2 58:6 +60 25:1 28:1 55:2 58:2 +60 0:0 +60 25:2 28:2 55:4 58:4 +60 0:0 +60 22:4 25:3 28:1 52:8 55:6 58:2 +60 22:1 52:2 +60 0:0 +60 28:1 58:2 +60 22:2 25:3 28:5 52:4 55:6 58:10 +60 22:4 25:4 28:3 52:8 55:8 58:6 +60 22:1 25:2 52:2 55:4 +60 25:1 28:1 55:2 58:2 +60 22:2 25:3 28:2 52:4 55:6 58:4 +60 28:1 58:2 +60 22:1 25:4 27:1 28:1 52:2 55:8 57:2 58:2 +60 0:0 +60 28:1 58:2 +60 22:2 25:3 28:3 52:4 55:6 58:6 +60 0:0 +60 22:1 52:2 +60 25:1 28:1 55:2 58:2 +60 25:1 55:2 +60 22:1 25:3 28:1 52:2 55:6 58:2 +60 28:1 58:2 +60 22:1 52:2 +60 0:0 +60 25:2 28:1 55:4 58:2 +60 0:0 +60 0:0 +60 0:0 +60 0:0 +60 0:0 +60 22:1 52:2 +60 0:0 +60 0:0 +60 25:2 55:4 +60 22:4 28:3 52:8 58:6 +60 0:0 +60 22:4 25:2 28:2 52:8 55:4 58:4 +60 22:1 25:1 52:2 55:2 +60 22:2 25:2 29:1 52:4 55:4 59:2 +60 25:1 55:2 +60 22:6 25:2 28:2 52:12 55:4 58:4 +60 22:2 25:4 28:1 52:4 55:8 58:2 +60 22:1 52:2 +60 22:1 25:1 28:1 52:2 55:2 58:2 +60 25:1 55:2 +60 0:0 +60 0:0 +60 0:1 22:1 25:3 28:1 30:2 52:2 55:6 58:2 +60 22:2 25:1 29:1 52:4 55:2 59:2 +60 0:0 +60 22:7 25:3 28:3 29:1 52:14 55:6 58:6 59:2 +60 22:1 28:1 52:2 58:2 +60 22:2 25:1 28:5 52:4 55:2 58:10 +60 22:4 25:2 28:3 29:1 52:8 55:4 58:6 59:2 +60 22:1 28:1 52:2 58:2 +60 22:1 25:2 28:1 52:2 55:4 58:2 +60 0:0 +60 0:0 +60 28:1 58:2 +60 22:2 28:2 52:4 58:4 +60 25:1 28:1 55:2 58:2 +60 28:1 58:2 +60 28:1 58:2 +60 0:0 +60 22:1 28:1 52:2 58:2 +60 0:0 +60 22:1 52:2 +60 22:1 25:1 52:2 55:2 +60 22:3 52:6 +60 25:2 28:2 55:4 58:4 +60 25:1 55:2 +60 22:1 25:2 28:1 52:2 55:4 58:2 +60 22:2 25:5 28:1 52:4 55:10 58:2 +60 25:2 28:1 55:4 58:2 +60 0:0 +60 22:1 28:1 29:1 52:2 58:2 59:2 +60 22:2 25:1 52:4 55:2 +60 28:1 58:2 +60 0:0 +60 25:1 28:1 55:2 58:2 +60 22:3 25:2 28:6 52:6 55:4 58:12 +60 22:1 25:2 52:2 55:4 +60 28:7 58:14 +60 28:1 58:2 +60 22:1 25:5 28:4 52:2 55:10 58:8 +60 25:1 55:2 +60 0:0 +60 22:1 25:3 28:1 52:2 55:6 58:2 +60 25:2 28:1 55:4 58:2 +60 25:1 55:2 +60 0:0 +60 22:2 25:5 28:7 52:4 55:10 58:14 diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeWordBagManyToOne-Schema.txt b/test/BaselineOutput/Common/SavePipe/SavePipeWordBagManyToOne-Schema.txt new file mode 100644 index 0000000000..bcc131996a --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeWordBagManyToOne-Schema.txt @@ -0,0 +1,82 @@ +---- BoundLoader ---- +2 columns: + One: Text + Vec: Vec + Metadata 'SlotNames': Vec: Length=2, Count=2 + [0] 'weg fuer milliardenhilfe frei', [1] 'vor dem parlamentsgebaeude toben strassenkaempfe zwischen demonstranten drinnen haben die griechischen abgeordneten das drastische sparpaket am abend endgueltig beschlossen die entscheidung ist eine wichtige voraussetzung fuer die auszahlung von weiteren acht milliarden euro hilfsgeldern athen das griechische parlament hat einem umfassenden sparpaket endgueltig zugestimmt' +---- RowToRowMapperTransform ---- +3 columns: + One: Text + Vec: Vec + Metadata 'SlotNames': Vec: Length=2, Count=2 + [0] 'weg fuer milliardenhilfe frei', [1] 'vor dem parlamentsgebaeude toben strassenkaempfe zwischen demonstranten drinnen haben die griechischen abgeordneten das drastische sparpaket am abend endgueltig beschlossen die entscheidung ist eine wichtige voraussetzung fuer die auszahlung von weiteren acht milliarden euro hilfsgeldern athen das griechische parlament hat einem umfassenden sparpaket endgueltig zugestimmt' + WB2: Vec + Metadata 'SlotNames': Vec: Length=2, Count=2 + [0] 'One', [1] 'One' +---- RowToRowMapperTransform ---- +5 columns: + One: Text + Vec: Vec + Metadata 'SlotNames': Vec: Length=2, Count=2 + [0] 'weg fuer milliardenhilfe frei', [1] 'vor dem parlamentsgebaeude toben strassenkaempfe zwischen demonstranten drinnen haben die griechischen abgeordneten das drastische sparpaket am abend endgueltig beschlossen die entscheidung ist eine wichtige voraussetzung fuer die auszahlung von weiteren acht milliarden euro hilfsgeldern athen das griechische parlament hat einem umfassenden sparpaket endgueltig zugestimmt' + WB2: Vec + Metadata 'SlotNames': Vec: Length=2, Count=2 + [0] 'One', [1] 'One' + WB2: Vec + WB1: Vec +---- RowToRowMapperTransform ---- +7 columns: + One: Text + Vec: Vec + Metadata 'SlotNames': Vec: Length=2, Count=2 + [0] 'weg fuer milliardenhilfe frei', [1] 'vor dem parlamentsgebaeude toben strassenkaempfe zwischen demonstranten drinnen haben die griechischen abgeordneten das drastische sparpaket am abend endgueltig beschlossen die entscheidung ist eine wichtige voraussetzung fuer die auszahlung von weiteren acht milliarden euro hilfsgeldern athen das griechische parlament hat einem umfassenden sparpaket endgueltig zugestimmt' + WB2: Vec + Metadata 'SlotNames': Vec: Length=2, Count=2 + [0] 'One', [1] 'One' + WB2: Vec + WB2: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' + WB1: Vec + WB1: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' +---- RowToRowMapperTransform ---- +9 columns: + One: Text + Vec: Vec + Metadata 'SlotNames': Vec: Length=2, Count=2 + [0] 'weg fuer milliardenhilfe frei', [1] 'vor dem parlamentsgebaeude toben strassenkaempfe zwischen demonstranten drinnen haben die griechischen abgeordneten das drastische sparpaket am abend endgueltig beschlossen die entscheidung ist eine wichtige voraussetzung fuer die auszahlung von weiteren acht milliarden euro hilfsgeldern athen das griechische parlament hat einem umfassenden sparpaket endgueltig zugestimmt' + WB2: Vec + Metadata 'SlotNames': Vec: Length=2, Count=2 + [0] 'One', [1] 'One' + WB2: Vec + WB2: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' + WB2: Vec + Metadata 'SlotNames': Vec: Length=30, Count=30 + [0] 'lediglich', [1] 'lediglich|air', [2] 'lediglich|air|worldwide', [3] 'lediglich|air|ein', [4] 'lediglich|air|spezialist', [5] 'lediglich|worldwide', [6] 'lediglich|worldwide|ein', [7] 'lediglich|worldwide|spezialist', [8] 'lediglich|ein', [9] 'lediglich|ein|spezialist' + [10] 'air', [11] 'air|worldwide', [12] 'air|worldwide|ein', [13] 'air|worldwide|spezialist', [14] 'air|worldwide|fuer', [15] 'air|ein', [16] 'air|ein|spezialist', [17] 'air|spezialist', [18] 'worldwide', [19] 'worldwide|ein' + [20] 'worldwide|spezialist', [21] 'worldwide|fuer', [22] 'ein', [23] 'ein|spezialist', [24] 'spezialist', [25] 'fuer', [26] 'risikomodelle', [27] 'wagte', [28] 'sich', [29] 'schnell' + WB1: Vec + WB1: Vec> + Metadata 'KeyValues': Vec: Length=10, Count=10 + [0] 'lediglich', [1] 'air', [2] 'worldwide', [3] 'ein', [4] 'spezialist', [5] 'fuer', [6] 'risikomodelle', [7] 'wagte', [8] 'sich', [9] 'schnell' + WB1: Vec + Metadata 'SlotNames': Vec: Length=30, Count=30 + [0] 'lediglich', [1] 'lediglich|air', [2] 'lediglich|air|worldwide', [3] 'lediglich|air|ein', [4] 'lediglich|air|spezialist', [5] 'lediglich|worldwide', [6] 'lediglich|worldwide|ein', [7] 'lediglich|worldwide|spezialist', [8] 'lediglich|ein', [9] 'lediglich|ein|spezialist' + [10] 'air', [11] 'air|worldwide', [12] 'air|worldwide|ein', [13] 'air|worldwide|spezialist', [14] 'air|worldwide|fuer', [15] 'air|ein', [16] 'air|ein|spezialist', [17] 'air|spezialist', [18] 'worldwide', [19] 'worldwide|ein' + [20] 'worldwide|spezialist', [21] 'worldwide|fuer', [22] 'ein', [23] 'ein|spezialist', [24] 'spezialist', [25] 'fuer', [26] 'risikomodelle', [27] 'wagte', [28] 'sich', [29] 'schnell' +---- SelectColumnsDataTransform ---- +2 columns: + WB1: Vec + Metadata 'SlotNames': Vec: Length=30, Count=30 + [0] 'lediglich', [1] 'lediglich|air', [2] 'lediglich|air|worldwide', [3] 'lediglich|air|ein', [4] 'lediglich|air|spezialist', [5] 'lediglich|worldwide', [6] 'lediglich|worldwide|ein', [7] 'lediglich|worldwide|spezialist', [8] 'lediglich|ein', [9] 'lediglich|ein|spezialist' + [10] 'air', [11] 'air|worldwide', [12] 'air|worldwide|ein', [13] 'air|worldwide|spezialist', [14] 'air|worldwide|fuer', [15] 'air|ein', [16] 'air|ein|spezialist', [17] 'air|spezialist', [18] 'worldwide', [19] 'worldwide|ein' + [20] 'worldwide|spezialist', [21] 'worldwide|fuer', [22] 'ein', [23] 'ein|spezialist', [24] 'spezialist', [25] 'fuer', [26] 'risikomodelle', [27] 'wagte', [28] 'sich', [29] 'schnell' + WB2: Vec + Metadata 'SlotNames': Vec: Length=30, Count=30 + [0] 'lediglich', [1] 'lediglich|air', [2] 'lediglich|air|worldwide', [3] 'lediglich|air|ein', [4] 'lediglich|air|spezialist', [5] 'lediglich|worldwide', [6] 'lediglich|worldwide|ein', [7] 'lediglich|worldwide|spezialist', [8] 'lediglich|ein', [9] 'lediglich|ein|spezialist' + [10] 'air', [11] 'air|worldwide', [12] 'air|worldwide|ein', [13] 'air|worldwide|spezialist', [14] 'air|worldwide|fuer', [15] 'air|ein', [16] 'air|ein|spezialist', [17] 'air|spezialist', [18] 'worldwide', [19] 'worldwide|ein' + [20] 'worldwide|spezialist', [21] 'worldwide|fuer', [22] 'ein', [23] 'ein|spezialist', [24] 'spezialist', [25] 'fuer', [26] 'risikomodelle', [27] 'wagte', [28] 'sich', [29] 'schnell' diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeWordBagTfIdf-Data.txt b/test/BaselineOutput/Common/SavePipe/SavePipeWordBagTfIdf-Data.txt new file mode 100644 index 0000000000..69e93058b8 --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeWordBagTfIdf-Data.txt @@ -0,0 +1,11 @@ +#@ TextLoader{ +#@ header+ +#@ sep=tab +#@ col=TfIdf:R4:0-14 +#@ } +A A|B A|B|C B B|C B|C|D C C|D D E E|* E|*|* *|* *|*|* *|*|A +0.223143548 0.510825634 1.609438 0.510825634 1.609438 1.609438 1.609438 1.609438 1.609438 0 0 0 0 0 0 +15 9:0.510825634 10:0.91629076 11:0.91629076 12:1.83258152 13:0.91629076 +15 0:0.223143548 9:0.510825634 +0.223143548 0.510825634 0 0.510825634 0 0 0 0 0 0.510825634 0.91629076 0.91629076 3.665163 2.74887228 1.609438 +15 0:0.223143548 1:0.510825634 3:0.510825634 diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeWordBagTfIdf-Schema.txt b/test/BaselineOutput/Common/SavePipe/SavePipeWordBagTfIdf-Schema.txt new file mode 100644 index 0000000000..da16611bf1 --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeWordBagTfIdf-Schema.txt @@ -0,0 +1,31 @@ +---- BoundLoader ---- +1 columns: + Text: Text +---- RowToRowMapperTransform ---- +2 columns: + Text: Text + TfIdf: Vec +---- RowToRowMapperTransform ---- +3 columns: + Text: Text + TfIdf: Vec + TfIdf: Vec> + Metadata 'KeyValues': Vec: Length=5, Count=5 + [0] 'A', [1] 'B', [2] 'C', [3] 'D', [4] 'E' +---- RowToRowMapperTransform ---- +4 columns: + Text: Text + TfIdf: Vec + TfIdf: Vec> + Metadata 'KeyValues': Vec: Length=5, Count=5 + [0] 'A', [1] 'B', [2] 'C', [3] 'D', [4] 'E' + TfIdf: Vec + Metadata 'SlotNames': Vec: Length=15, Count=15 + [0] 'A', [1] 'A|B', [2] 'A|B|C', [3] 'B', [4] 'B|C', [5] 'B|C|D', [6] 'C', [7] 'C|D', [8] 'D', [9] 'E' + [10] 'E|*', [11] 'E|*|*', [12] '*|*', [13] '*|*|*', [14] '*|*|A' +---- SelectColumnsDataTransform ---- +1 columns: + TfIdf: Vec + Metadata 'SlotNames': Vec: Length=15, Count=15 + [0] 'A', [1] 'A|B', [2] 'A|B|C', [3] 'B', [4] 'B|C', [5] 'B|C|D', [6] 'C', [7] 'C|D', [8] 'D', [9] 'E' + [10] 'E|*', [11] 'E|*|*', [12] '*|*', [13] '*|*|*', [14] '*|*|A' diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeWordHash-Schema.txt b/test/BaselineOutput/Common/SavePipe/SavePipeWordHash-Schema.txt index 83ee4402b3..a117e98d1f 100644 --- a/test/BaselineOutput/Common/SavePipe/SavePipeWordHash-Schema.txt +++ b/test/BaselineOutput/Common/SavePipe/SavePipeWordHash-Schema.txt @@ -55,7 +55,7 @@ temp__tmp012_000: Vec> temp__tmp013_000: Vec> temp__tmp014_000: Vec> ----- NgramHashingTransformer ---- +---- RowToRowMapperTransform ---- 41 columns: One: Text Two: Text diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeWordHashUnordered-Data.txt b/test/BaselineOutput/Common/SavePipe/SavePipeWordHashUnordered-Data.txt new file mode 100644 index 0000000000..728bfd54ef --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeWordHashUnordered-Data.txt @@ -0,0 +1,106 @@ +#@ TextLoader{ +#@ sep=tab +#@ col=F1:R4:0-31 +#@ col=F2:R4:32-63 +#@ } +1 1 1 0 1 0 0 0 0 0 0 0 0 0 0 1 1 0 2 0 1 1 0 0 1 0 1 0 1 0 0 2 2 2 2 0 2 0 0 0 0 0 0 0 0 0 0 2 2 0 4 0 2 2 0 0 2 0 2 0 2 0 0 4 +6 7 3 7 4 6 5 8 10 6 5 6 5 8 9 5 4 1 7 10 1 3 8 4 12 4 5 9 4 5 5 5 12 14 6 14 8 12 10 16 20 12 10 12 10 16 18 10 8 2 14 20 2 6 16 8 24 8 10 18 8 10 10 10 +0 1 1 0 1 0 0 2 3 0 2 2 0 1 1 1 2 0 3 2 1 1 1 1 0 3 1 1 0 0 0 2 0 2 2 0 2 0 0 4 6 0 4 4 0 2 2 2 4 0 6 4 2 2 2 2 0 6 2 2 0 0 0 4 +64 6:2 10:1 14:1 16:2 17:1 18:1 23:1 24:1 26:1 38:4 42:2 46:2 48:4 49:2 50:2 55:2 56:2 58:2 +18 15 7 11 11 5 7 11 18 12 16 10 9 14 23 23 17 9 46 21 12 11 20 8 37 11 17 11 9 12 22 13 36 30 14 22 22 10 14 22 36 24 32 20 18 28 46 46 34 18 92 42 24 22 40 16 74 22 34 22 18 24 44 26 +6 17 7 12 12 3 8 11 18 10 10 4 14 5 8 10 12 9 39 17 8 18 6 7 24 3 34 12 10 9 17 13 12 34 14 24 24 6 16 22 36 20 20 8 28 10 16 20 24 18 78 34 16 36 12 14 48 6 68 24 20 18 34 26 +14 9 11 8 13 7 7 6 8 3 19 10 10 8 5 14 16 8 35 15 7 10 8 5 21 11 9 11 18 5 13 4 28 18 22 16 26 14 14 12 16 6 38 20 20 16 10 28 32 16 70 30 14 20 16 10 42 22 18 22 36 10 26 8 +2 2 1 2 2 1 1 3 1 2 1 7 1 2 1 2 3 2 7 3 1 2 1 1 3 4 4 2 1 1 1 2 4 4 2 4 4 2 2 6 2 4 2 14 2 4 2 4 6 4 14 6 2 4 2 2 6 8 8 4 2 2 2 4 +64 0:2 8:1 10:1 14:1 15:1 18:1 19:1 22:1 30:1 32:4 40:2 42:2 46:2 47:2 50:2 51:2 54:2 62:2 +6 4 5 3 7 4 2 3 5 2 9 3 6 6 11 6 8 8 17 7 7 4 6 4 11 6 10 5 7 5 8 6 12 8 10 6 14 8 4 6 10 4 18 6 12 12 22 12 16 16 34 14 14 8 12 8 22 12 20 10 14 10 16 12 +1 0 0 1 0 1 2 0 0 1 2 2 3 0 2 3 0 1 1 3 0 1 1 1 2 0 3 0 1 0 3 0 2 0 0 2 0 2 4 0 0 2 4 4 6 0 4 6 0 2 2 6 0 2 2 2 4 0 6 0 2 0 6 0 +9 13 14 7 7 7 6 6 3 8 9 6 8 3 7 5 5 11 23 7 6 8 7 5 17 11 12 7 7 5 6 8 18 26 28 14 14 14 12 12 6 16 18 12 16 6 14 10 10 22 46 14 12 16 14 10 34 22 24 14 14 10 12 16 +64 1:1 7:1 8:1 16:1 17:1 21:1 24:1 33:2 39:2 40:2 48:2 49:2 53:2 56:2 +64 1:1 6:1 14:1 18:1 22:2 29:1 31:1 33:2 38:2 46:2 50:2 54:4 61:2 63:2 +1 1 2 1 0 1 1 1 1 0 3 0 1 0 2 2 1 1 1 2 2 1 1 1 2 1 2 2 3 1 2 1 2 2 4 2 0 2 2 2 2 0 6 0 2 0 4 4 2 2 2 4 4 2 2 2 4 2 4 4 6 2 4 2 +12 9 9 14 10 5 8 9 23 8 30 11 7 6 13 9 11 7 40 15 6 8 19 10 39 8 18 12 18 14 15 18 24 18 18 28 20 10 16 18 46 16 60 22 14 12 26 18 22 14 80 30 12 16 38 20 78 16 36 24 36 28 30 36 +20 19 17 29 18 12 6 11 30 17 26 12 15 5 18 14 12 17 53 22 14 8 19 11 28 12 20 16 23 10 29 18 40 38 34 58 36 24 12 22 60 34 52 24 30 10 36 28 24 34 106 44 28 16 38 22 56 24 40 32 46 20 58 36 +6 7 1 3 3 1 2 0 6 6 3 4 5 0 5 4 6 3 15 6 3 1 2 5 10 3 10 1 1 6 6 3 12 14 2 6 6 2 4 0 12 12 6 8 10 0 10 8 12 6 30 12 6 2 4 10 20 6 20 2 2 12 12 6 +2 1 1 3 3 0 4 1 5 6 2 0 3 1 5 1 1 0 11 1 2 1 3 2 3 1 7 1 1 1 2 2 4 2 2 6 6 0 8 2 10 12 4 0 6 2 10 2 2 0 22 2 4 2 6 4 6 2 14 2 2 2 4 4 +5 4 6 7 12 7 7 5 5 3 7 13 8 8 17 9 8 3 18 6 11 4 8 4 15 8 18 16 11 2 9 10 10 8 12 14 24 14 14 10 10 6 14 26 16 16 34 18 16 6 36 12 22 8 16 8 30 16 36 32 22 4 18 20 +0 1 1 0 3 0 0 2 0 4 2 3 0 0 1 0 1 0 2 2 0 0 1 0 1 0 0 1 1 1 0 2 0 2 2 0 6 0 0 4 0 8 4 6 0 0 2 0 2 0 4 4 0 0 2 0 2 0 0 2 2 2 0 4 +8 11 2 6 16 15 5 10 19 2 19 9 5 9 10 12 21 6 32 16 5 7 5 8 30 7 32 12 13 12 6 13 16 22 4 12 32 30 10 20 38 4 38 18 10 18 20 24 42 12 64 32 10 14 10 16 60 14 64 24 26 24 12 26 +4 6 4 1 4 1 1 2 4 6 5 5 0 1 4 1 0 1 11 1 1 0 8 0 7 0 8 2 8 4 3 3 8 12 8 2 8 2 2 4 8 12 10 10 0 2 8 2 0 2 22 2 2 0 16 0 14 0 16 4 16 8 6 6 +0 1 1 0 3 0 0 2 0 4 2 3 0 0 1 0 1 0 2 2 0 0 1 0 1 0 0 1 1 1 0 2 0 2 2 0 6 0 0 4 0 8 4 6 0 0 2 0 2 0 4 4 0 0 2 0 2 0 0 2 2 2 0 4 +18 20 4 20 8 15 14 6 12 10 16 19 8 3 19 10 14 9 38 16 15 16 9 5 37 10 16 15 19 14 12 8 36 40 8 40 16 30 28 12 24 20 32 38 16 6 38 20 28 18 76 32 30 32 18 10 74 20 32 30 38 28 24 16 +0 1 3 0 0 1 1 3 0 2 2 0 2 2 2 2 1 1 7 3 2 2 2 1 2 2 4 2 1 3 1 2 0 2 6 0 0 2 2 6 0 4 4 0 4 4 4 4 2 2 14 6 4 4 4 2 4 4 8 4 2 6 2 4 +1 1 0 1 0 0 1 0 0 0 1 1 1 0 0 0 1 0 3 0 0 1 1 0 1 0 1 0 0 1 0 1 2 2 0 2 0 0 2 0 0 0 2 2 2 0 0 0 2 0 6 0 0 2 2 0 2 0 2 0 0 2 0 2 +4 5 0 5 5 3 0 3 0 7 5 4 5 1 10 7 2 4 12 3 2 3 3 3 9 7 8 2 7 5 5 3 8 10 0 10 10 6 0 6 0 14 10 8 10 2 20 14 4 8 24 6 4 6 6 6 18 14 16 4 14 10 10 6 +2 7 0 3 1 6 1 3 2 3 3 2 7 0 3 1 2 2 4 2 6 1 0 2 3 4 4 4 1 1 2 0 4 14 0 6 2 12 2 6 4 6 6 4 14 0 6 2 4 4 8 4 12 2 0 4 6 8 8 8 2 2 4 0 +8 4 2 3 4 3 2 7 10 7 6 2 2 4 9 4 8 3 11 9 4 4 5 3 6 4 10 6 3 11 2 4 16 8 4 6 8 6 4 14 20 14 12 4 4 8 18 8 16 6 22 18 8 8 10 6 12 8 20 12 6 22 4 8 +2 1 4 1 4 5 0 2 5 2 5 1 1 2 2 2 3 1 6 5 2 1 1 5 5 1 1 3 0 4 1 3 4 2 8 2 8 10 0 4 10 4 10 2 2 4 4 4 6 2 12 10 4 2 2 10 10 2 2 6 0 8 2 6 +0 0 2 3 0 3 0 0 0 1 3 0 1 0 1 2 1 0 1 0 0 0 0 1 0 2 1 0 0 2 0 0 0 0 4 6 0 6 0 0 0 2 6 0 2 0 2 4 2 0 2 0 0 0 0 2 0 4 2 0 0 4 0 0 +8 3 0 10 6 5 3 2 5 4 7 0 7 4 4 7 1 5 13 2 3 5 2 4 13 4 10 3 5 5 6 4 16 6 0 20 12 10 6 4 10 8 14 0 14 8 8 14 2 10 26 4 6 10 4 8 26 8 20 6 10 10 12 8 +3 6 10 0 3 1 0 1 4 3 4 3 3 1 5 1 5 3 11 7 5 0 11 0 4 3 5 7 5 2 9 1 6 12 20 0 6 2 0 2 8 6 8 6 6 2 10 2 10 6 22 14 10 0 22 0 8 6 10 14 10 4 18 2 +64 4:1 12:1 14:1 18:1 19:1 26:2 27:1 28:1 30:1 36:2 44:2 46:2 50:2 51:2 58:4 59:2 60:2 62:2 +64 13:1 15:1 16:1 18:1 24:1 45:2 47:2 48:2 50:2 56:2 +64 17:1 49:2 +64 6:1 10:1 12:1 14:1 17:1 18:1 20:1 38:2 42:2 44:2 46:2 49:2 50:2 52:2 +64 0:1 3:1 5:1 6:1 14:1 16:1 18:1 19:1 24:1 26:1 30:1 32:2 35:2 37:2 38:2 46:2 48:2 50:2 51:2 56:2 58:2 62:2 +3 1 4 4 3 1 4 0 1 2 2 0 3 3 1 2 2 0 0 4 0 2 0 1 8 1 0 3 4 0 1 1 6 2 8 8 6 2 8 0 2 4 4 0 6 6 2 4 4 0 0 8 0 4 0 2 16 2 0 6 8 0 2 2 +64 1:2 8:1 11:2 12:2 13:2 18:1 24:2 26:1 27:1 28:1 31:1 33:4 40:2 43:4 44:4 45:4 50:2 56:4 58:2 59:2 60:2 63:2 +64 7:1 8:1 10:1 15:2 16:2 18:2 20:1 22:1 24:1 27:1 28:2 39:2 40:2 42:2 47:4 48:4 50:4 52:2 54:2 56:2 59:2 60:4 +1 2 1 0 0 0 1 2 1 1 0 1 0 1 1 5 1 0 5 1 2 2 0 2 1 2 3 1 2 5 2 0 2 4 2 0 0 0 2 4 2 2 0 2 0 2 2 10 2 0 10 2 4 4 0 4 2 4 6 2 4 10 4 0 +12 12 4 10 11 4 4 5 14 8 9 9 13 4 5 10 12 15 30 14 7 8 8 7 19 8 15 11 10 8 8 12 24 24 8 20 22 8 8 10 28 16 18 18 26 8 10 20 24 30 60 28 14 16 16 14 38 16 30 22 20 16 16 24 +64 0:1 10:1 13:1 14:2 16:2 18:1 24:2 25:2 26:2 28:3 32:2 42:2 45:2 46:4 48:4 50:2 56:4 57:4 58:4 60:6 +8 10 0 0 5 5 6 5 11 2 7 5 9 7 7 7 11 8 17 7 4 5 5 5 14 5 19 6 5 3 7 5 16 20 0 0 10 10 12 10 22 4 14 10 18 14 14 14 22 16 34 14 8 10 10 10 28 10 38 12 10 6 14 10 +8 7 4 6 5 3 4 5 5 8 9 7 7 2 12 4 4 4 9 10 1 4 6 2 12 12 6 8 5 3 8 6 16 14 8 12 10 6 8 10 10 16 18 14 14 4 24 8 8 8 18 20 2 8 12 4 24 24 12 16 10 6 16 12 +3 0 1 2 2 1 1 2 3 1 2 1 0 1 2 2 3 1 8 3 2 2 0 0 2 1 3 1 2 0 1 0 6 0 2 4 4 2 2 4 6 2 4 2 0 2 4 4 6 2 16 6 4 4 0 0 4 2 6 2 4 0 2 0 +64 0:1 1:2 5:1 14:2 18:1 21:1 24:1 26:1 27:1 32:2 33:4 37:2 46:4 50:2 53:2 56:2 58:2 59:2 +10 9 3 10 2 3 0 8 9 13 15 4 7 3 11 12 13 8 25 11 5 8 6 8 17 4 7 7 18 3 7 10 20 18 6 20 4 6 0 16 18 26 30 8 14 6 22 24 26 16 50 22 10 16 12 16 34 8 14 14 36 6 14 20 +11 5 4 9 5 4 4 3 4 1 8 0 5 7 8 5 6 10 22 10 8 9 7 7 22 3 19 4 5 7 11 10 22 10 8 18 10 8 8 6 8 2 16 0 10 14 16 10 12 20 44 20 16 18 14 14 44 6 38 8 10 14 22 20 +3 0 0 0 3 2 2 3 2 1 0 0 2 0 1 1 1 0 6 1 1 1 3 1 4 1 2 3 3 1 0 1 6 0 0 0 6 4 4 6 4 2 0 0 4 0 2 2 2 0 12 2 2 2 6 2 8 2 4 6 6 2 0 2 +14 13 1 9 8 4 7 5 15 7 5 7 3 10 8 10 11 7 33 10 3 3 11 4 26 12 16 12 5 6 7 10 28 26 2 18 16 8 14 10 30 14 10 14 6 20 16 20 22 14 66 20 6 6 22 8 52 24 32 24 10 12 14 20 +1 1 0 0 0 2 1 0 1 0 1 3 1 1 1 0 1 1 2 0 1 0 0 0 1 1 2 2 3 0 0 0 2 2 0 0 0 4 2 0 2 0 2 6 2 2 2 0 2 2 4 0 2 0 0 0 2 2 4 4 6 0 0 0 +2 0 0 0 0 0 0 1 0 1 0 0 1 0 1 0 0 0 1 1 1 1 1 0 4 0 4 0 1 1 0 0 4 0 0 0 0 0 0 2 0 2 0 0 2 0 2 0 0 0 2 2 2 2 2 0 8 0 8 0 2 2 0 0 +64 7:1 22:2 23:1 39:2 54:4 55:2 +9 7 3 8 6 5 11 11 14 7 8 6 11 8 12 11 12 8 32 10 4 10 16 16 28 12 12 12 16 5 16 8 18 14 6 16 12 10 22 22 28 14 16 12 22 16 24 22 24 16 64 20 8 20 32 32 56 24 24 24 32 10 32 16 +8 4 1 5 0 6 1 7 5 8 1 2 0 2 3 4 5 3 15 6 3 4 2 1 8 4 9 2 7 5 10 4 16 8 2 10 0 12 2 14 10 16 2 4 0 4 6 8 10 6 30 12 6 8 4 2 16 8 18 4 14 10 20 8 +64 0:1 6:1 10:2 12:1 14:1 17:2 22:1 24:2 27:1 29:2 30:2 32:2 38:2 42:4 44:2 46:2 49:4 54:2 56:4 59:2 61:4 62:4 +24 12 11 16 19 8 11 8 24 17 18 13 13 7 23 21 19 7 66 20 9 22 34 15 24 12 25 13 18 15 17 13 48 24 22 32 38 16 22 16 48 34 36 26 26 14 46 42 38 14 132 40 18 44 68 30 48 24 50 26 36 30 34 26 +4 7 1 3 2 2 2 4 5 1 4 5 1 3 4 0 1 2 4 5 3 2 2 4 9 6 3 4 5 4 8 3 8 14 2 6 4 4 4 8 10 2 8 10 2 6 8 0 2 4 8 10 6 4 4 8 18 12 6 8 10 8 16 6 +26 13 8 6 5 5 13 12 24 22 15 5 9 6 10 11 10 16 62 14 15 5 21 22 34 20 18 9 18 12 18 16 52 26 16 12 10 10 26 24 48 44 30 10 18 12 20 22 20 32 124 28 30 10 42 44 68 40 36 18 36 24 36 32 +17 19 5 9 9 3 9 5 8 13 17 7 19 2 11 15 12 7 44 12 7 5 21 10 20 7 32 15 16 11 8 15 34 38 10 18 18 6 18 10 16 26 34 14 38 4 22 30 24 14 88 24 14 10 42 20 40 14 64 30 32 22 16 30 +5 4 2 2 4 4 1 3 6 3 2 1 1 4 2 3 5 2 7 1 1 3 3 2 7 2 2 3 4 0 2 4 10 8 4 4 8 8 2 6 12 6 4 2 2 8 4 6 10 4 14 2 2 6 6 4 14 4 4 6 8 0 4 8 +13 7 2 6 5 4 2 4 10 5 7 6 10 3 8 5 8 3 17 8 3 4 4 6 19 3 7 13 7 3 11 7 26 14 4 12 10 8 4 8 20 10 14 12 20 6 16 10 16 6 34 16 6 8 8 12 38 6 14 26 14 6 22 14 +64 1:2 4:2 10:1 14:1 24:1 25:1 33:4 36:4 42:2 46:2 56:2 57:2 +1 0 0 0 0 0 2 0 2 1 3 1 2 0 0 1 0 0 4 4 1 1 2 1 2 0 1 1 0 1 1 1 2 0 0 0 0 0 4 0 4 2 6 2 4 0 0 2 0 0 8 8 2 2 4 2 4 0 2 2 0 2 2 2 +0 1 1 0 3 0 0 2 0 4 2 3 0 0 1 0 1 0 2 2 0 0 1 0 1 0 0 1 1 1 0 2 0 2 2 0 6 0 0 4 0 8 4 6 0 0 2 0 2 0 4 4 0 0 2 0 2 0 0 2 2 2 0 4 +6 9 4 1 6 1 0 4 6 6 4 8 3 2 4 2 4 0 11 5 3 2 2 1 10 5 4 2 6 3 3 7 12 18 8 2 12 2 0 8 12 12 8 16 6 4 8 4 8 0 22 10 6 4 4 2 20 10 8 4 12 6 6 14 +7 5 6 7 7 1 5 8 4 10 6 7 6 2 8 5 2 3 15 4 9 3 4 5 11 9 5 6 3 3 5 7 14 10 12 14 14 2 10 16 8 20 12 14 12 4 16 10 4 6 30 8 18 6 8 10 22 18 10 12 6 6 10 14 +1 0 0 0 0 0 0 0 2 1 0 2 0 1 0 1 0 1 0 0 1 1 0 0 4 0 0 1 2 1 1 1 2 0 0 0 0 0 0 0 4 2 0 4 0 2 0 2 0 2 0 0 2 2 0 0 8 0 0 2 4 2 2 2 +5 4 0 0 0 1 0 0 6 5 0 2 2 0 3 3 3 0 5 1 0 4 1 0 0 0 0 3 0 3 4 2 10 8 0 0 0 2 0 0 12 10 0 4 4 0 6 6 6 0 10 2 0 8 2 0 0 0 0 6 0 6 8 4 +0 1 1 0 3 6 1 1 3 1 1 1 1 2 4 5 0 1 3 1 0 1 2 2 6 2 3 1 6 4 7 1 0 2 2 0 6 12 2 2 6 2 2 2 2 4 8 10 0 2 6 2 0 2 4 4 12 4 6 2 12 8 14 2 +6 1 1 1 2 4 2 1 0 2 0 0 1 0 1 4 2 0 2 7 0 5 2 2 5 1 0 2 3 2 1 4 12 2 2 2 4 8 4 2 0 4 0 0 2 0 2 8 4 0 4 14 0 10 4 4 10 2 0 4 6 4 2 8 +1 2 0 1 0 1 1 1 0 0 1 3 0 1 1 0 0 1 3 2 1 0 1 0 0 0 0 0 2 2 2 1 2 4 0 2 0 2 2 2 0 0 2 6 0 2 2 0 0 2 6 4 2 0 2 0 0 0 0 0 4 4 4 2 +10 9 5 6 4 4 4 2 9 6 6 2 4 4 9 6 4 3 19 5 1 2 2 4 11 2 12 6 5 9 8 8 20 18 10 12 8 8 8 4 18 12 12 4 8 8 18 12 8 6 38 10 2 4 4 8 22 4 24 12 10 18 16 16 +4 1 1 0 0 1 0 0 3 0 1 1 1 0 1 0 1 0 3 2 0 0 0 0 1 0 1 1 0 3 0 2 8 2 2 0 0 2 0 0 6 0 2 2 2 0 2 0 2 0 6 4 0 0 0 0 2 0 2 2 0 6 0 4 +10 6 2 6 10 5 4 4 9 12 8 3 4 7 10 7 12 5 26 11 8 3 6 7 19 3 13 10 7 8 2 3 20 12 4 12 20 10 8 8 18 24 16 6 8 14 20 14 24 10 52 22 16 6 12 14 38 6 26 20 14 16 4 6 +5 1 0 4 4 6 2 2 3 1 8 3 5 2 4 5 3 8 15 6 3 1 5 2 4 1 6 4 4 1 4 3 10 2 0 8 8 12 4 4 6 2 16 6 10 4 8 10 6 16 30 12 6 2 10 4 8 2 12 8 8 2 8 6 +0 0 0 0 1 0 2 1 0 1 0 0 0 1 0 2 0 1 4 1 1 0 0 0 0 0 2 0 1 0 1 0 0 0 0 0 2 0 4 2 0 2 0 0 0 2 0 4 0 2 8 2 2 0 0 0 0 0 4 0 2 0 2 0 +11 6 2 15 2 3 2 4 12 1 11 10 11 7 4 16 6 4 24 5 0 5 9 8 15 7 9 13 8 6 6 10 22 12 4 30 4 6 4 8 24 2 22 20 22 14 8 32 12 8 48 10 0 10 18 16 30 14 18 26 16 12 12 20 +8 11 1 6 7 7 5 9 4 6 9 3 8 1 10 7 10 3 21 9 3 7 6 1 17 5 20 3 2 7 4 7 16 22 2 12 14 14 10 18 8 12 18 6 16 2 20 14 20 6 42 18 6 14 12 2 34 10 40 6 4 14 8 14 +6 6 2 3 6 0 4 2 3 12 10 6 4 3 4 13 2 5 18 4 6 4 5 3 19 3 7 7 9 2 7 5 12 12 4 6 12 0 8 4 6 24 20 12 8 6 8 26 4 10 36 8 12 8 10 6 38 6 14 14 18 4 14 10 +64 18:2 19:1 24:1 26:1 29:1 30:1 31:1 50:4 51:2 56:2 58:2 61:2 62:2 63:2 +3 4 0 2 3 1 1 0 2 0 2 4 2 1 3 4 1 1 8 6 1 0 3 1 9 5 6 3 3 2 3 5 6 8 0 4 6 2 2 0 4 0 4 8 4 2 6 8 2 2 16 12 2 0 6 2 18 10 12 6 6 4 6 10 +2 0 0 1 0 0 2 0 0 0 0 0 1 1 1 0 5 2 1 0 0 0 1 1 4 1 2 2 1 0 1 1 4 0 0 2 0 0 4 0 0 0 0 0 2 2 2 0 10 4 2 0 0 0 2 2 8 2 4 4 2 0 2 2 +0 1 1 0 3 0 0 2 0 4 2 3 0 0 1 0 1 0 2 2 0 0 1 0 1 0 0 1 1 1 0 2 0 2 2 0 6 0 0 4 0 8 4 6 0 0 2 0 2 0 4 4 0 0 2 0 2 0 0 2 2 2 0 4 +5 0 0 0 1 2 1 5 3 1 1 1 2 1 0 0 1 0 3 1 0 3 1 1 3 1 0 2 2 1 0 3 10 0 0 0 2 4 2 10 6 2 2 2 4 2 0 0 2 0 6 2 0 6 2 2 6 2 0 4 4 2 0 6 +4 2 1 1 3 0 4 2 2 4 3 4 2 0 2 4 1 3 5 5 2 2 0 2 1 0 3 2 2 0 3 2 8 4 2 2 6 0 8 4 4 8 6 8 4 0 4 8 2 6 10 10 4 4 0 4 2 0 6 4 4 0 6 4 +17 17 4 4 10 6 9 8 14 7 11 6 10 11 11 18 13 9 46 16 11 6 9 11 30 9 15 10 9 15 13 17 34 34 8 8 20 12 18 16 28 14 22 12 20 22 22 36 26 18 92 32 22 12 18 22 60 18 30 20 18 30 26 34 +3 3 4 4 2 4 6 3 1 4 3 4 7 2 4 5 1 0 9 3 2 1 4 5 14 0 8 2 6 3 7 6 6 6 8 8 4 8 12 6 2 8 6 8 14 4 8 10 2 0 18 6 4 2 8 10 28 0 16 4 12 6 14 12 +10 13 8 23 8 12 4 4 10 7 18 5 11 5 14 12 10 6 23 9 5 8 14 8 20 5 9 12 9 7 4 18 20 26 16 46 16 24 8 8 20 14 36 10 22 10 28 24 20 12 46 18 10 16 28 16 40 10 18 24 18 14 8 36 +0 1 1 0 3 0 0 2 0 4 2 3 0 0 1 0 1 0 2 2 0 0 1 0 1 0 0 1 1 1 0 2 0 2 2 0 6 0 0 4 0 8 4 6 0 0 2 0 2 0 4 4 0 0 2 0 2 0 0 2 2 2 0 4 +12 6 3 6 11 11 8 9 17 10 3 5 10 4 4 16 3 6 25 8 9 8 9 8 16 10 14 15 11 7 15 12 24 12 6 12 22 22 16 18 34 20 6 10 20 8 8 32 6 12 50 16 18 16 18 16 32 20 28 30 22 14 30 24 +64 2:1 7:1 9:1 18:2 22:1 23:1 24:1 26:1 34:2 39:2 41:2 50:4 54:2 55:2 56:2 58:2 +64 19:1 22:1 24:1 26:1 51:2 54:2 56:2 58:2 +9 7 8 13 9 4 10 12 10 16 10 10 5 5 16 7 7 2 41 11 4 4 11 9 35 8 18 13 23 7 17 9 18 14 16 26 18 8 20 24 20 32 20 20 10 10 32 14 14 4 82 22 8 8 22 18 70 16 36 26 46 14 34 18 +2 2 0 1 0 0 5 0 3 1 3 0 3 0 3 0 0 1 9 4 0 0 2 0 5 1 4 3 2 1 0 1 4 4 0 2 0 0 10 0 6 2 6 0 6 0 6 0 0 2 18 8 0 0 4 0 10 2 8 6 4 2 0 2 +64 4:2 7:1 9:2 19:1 21:1 23:1 26:2 27:1 36:4 39:2 41:4 51:2 53:2 55:2 58:4 59:2 +64 0:1 1:1 4:1 18:2 22:1 32:2 33:2 36:2 50:4 54:2 +14 19 7 13 13 16 15 12 19 10 17 11 9 7 20 13 15 7 46 21 9 12 23 7 48 10 34 16 17 10 20 22 28 38 14 26 26 32 30 24 38 20 34 22 18 14 40 26 30 14 92 42 18 24 46 14 96 20 68 32 34 20 40 44 diff --git a/test/BaselineOutput/Common/SavePipe/SavePipeWordHashUnordered-Schema.txt b/test/BaselineOutput/Common/SavePipe/SavePipeWordHashUnordered-Schema.txt new file mode 100644 index 0000000000..389860ee7f --- /dev/null +++ b/test/BaselineOutput/Common/SavePipe/SavePipeWordHashUnordered-Schema.txt @@ -0,0 +1,52 @@ +---- BoundLoader ---- +2 columns: + One: Text + Two: Text +---- RowToRowMapperTransform ---- +5 columns: + One: Text + Two: Text + _tmp000: Vec + _tmp001: Vec + _tmp002: Vec +---- RowToRowMapperTransform ---- +8 columns: + One: Text + Two: Text + _tmp000: Vec + _tmp001: Vec + _tmp002: Vec + temp__tmp000_000: Vec> + temp__tmp001_000: Vec> + temp__tmp002_000: Vec> +---- RowToRowMapperTransform ---- +10 columns: + One: Text + Two: Text + _tmp000: Vec + _tmp001: Vec + _tmp002: Vec + temp__tmp000_000: Vec> + temp__tmp001_000: Vec> + temp__tmp002_000: Vec> + F1: Vec + F2: Vec +---- SelectColumnsDataTransform ---- +7 columns: + One: Text + Two: Text + _tmp000: Vec + _tmp001: Vec + _tmp002: Vec + F1: Vec + F2: Vec +---- SelectColumnsDataTransform ---- +4 columns: + One: Text + Two: Text + F1: Vec + F2: Vec +---- SelectColumnsDataTransform ---- +2 columns: + F1: Vec + F2: Vec diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs index 95f70b6c8b..23d9b449ba 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs @@ -1370,11 +1370,7 @@ public void EntryPointPipelineEnsembleGetSummary() for (int i = 0; i < nModels; i++) { var data = splitOutput.TrainData[i]; - data = OneHotEncodingTransformer.Create(Env, - new OneHotEncodingTransformer.Arguments() - { - Column = new[] { new OneHotEncodingTransformer.Column() { Name = "Cat", Source = "Cat" } } - }, data); + data = new OneHotEncodingEstimator(Env,"Cat").Fit(data).Transform(data); data = new ColumnConcatenatingTransformer(Env, new ColumnConcatenatingTransformer.ColumnInfo("Features", i % 2 == 0 ? new[] { "Features", "Cat" } : new[] { "Cat", "Features" })).Transform(data); if (i % 2 == 0) { diff --git a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipe.cs b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipe.cs index 115ebede8f..c62c23903f 100644 --- a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipe.cs +++ b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipe.cs @@ -602,6 +602,201 @@ public void SavePipeWordHash() Done(); } + [Fact] + public void SavePipeWordHashUnordered() + { + string pathData = GetDataPath(@"lm.sample.txt"); + TestCore(pathData, true, + new[] { + "loader=Text{header+ col=One:TX:4 col=Two:TX:3 rows=101}", + "xf=WordHashBag{bits=5 ord=- col=F1:One col=F2:One,One}", + "xf=SelectColumns{keepcol=F1 keepcol=F2}", + }, + (pipe) => + { + // Verify that F2 = 2 * F1 + using (var c = pipe.GetRowCursor(col => true)) + { + int col1; + bool tmp1 = c.Schema.TryGetColumnIndex("F1", out col1); + if (!Check(tmp1, "Column F1 not found!")) + return; + int col2; + bool tmp2 = c.Schema.TryGetColumnIndex("F2", out col2); + if (!Check(tmp2, "Column F2 not found!")) + return; + + var get1 = c.GetGetter>(col1); + var get2 = c.GetGetter>(col2); + VBuffer bag1 = default(VBuffer); + VBuffer bag2 = default(VBuffer); + while (c.MoveNext()) + { + get1(ref bag1); + get2(ref bag2); + if (!CompareVec(in bag1, in bag2, bag1.Length, (x1, x2) => 2 * x1 == x2)) + { + Fail("Values don't match"); + return; + } + } + } + }); + + Done(); + } + + [Fact] + public void SavePipeInvertHash() + { + string pathData = DeleteOutputPath("SavePipe","InvertHash-Data.txt"); + // Four columns. First "A" with words starting with "a" (for easy identification), second + // "K" with an explicit key type, third "E" a column that has all missing values, and fourth + // "B" with words starting with "b". + File.WriteAllLines(pathData, + new[] + { + "annie ate an ant\t5\t4\t\tbob bakes brownies", + "an angry ant\t3\t3\t\tbob bowled badly", + "\t10\t\t\t\"\"" + }); + const string loader = "loader=Text{col=A:TX:0 col=K:U4[3-10]:1-2 col=KS:U4[3-10]:2 col=B:TX:4 col=E:TX:3}"; + TestCore(pathData, true, + new[] { + loader, + "xf=WordHashBag{bits=3 ord- col=F1:A,B col=F2:B,A ih=-1 ngram=2}", + "xf=WordHashBag{bits=3 ord- col=F3:A,B col=F4:B,A ih=1 ngram=2}", + "xf=SelectColumns{keepCol=A keepCol=B keepCol=F1 keepCol=F2 keepCol=F3 keepCol=F4}" + }, suffix: "1"); + // Same, but using per column overrides, including one column without inversion. + TestCore(pathData, true, + new[] { + loader, + "xf=WordHashBag{bits=3 ord- col=F1:A,B col=F2:B,A ih=-1 ngram=2 col={name=F3 src=A src=B ih=1} col={name=F4 src=B src=A ih=1} col={name=F5 src=A src=B ih=0}}", + "xf=SelectColumns{keepCol=A keepCol=B keepCol=F1 keepCol=F2 keepCol=F3 keepCol=F4 keepCol=F5}" + }, suffix: "2"); + // Do to the key column. + TestCore(pathData, true, + new[] { + loader, + "xf=CatHash{ih=-1 col=KH:2:K col={name=KHU bits=2 src=K ordered-}}", + "xf=SelectColumns{keepCol=K keepCol=KH keepCol=KHU}" + }, suffix: "3"); + // Do to the key column combining it with the text column. + TestCore(pathData, true, + new[] { + loader, + "xf=WordToken{col=AT:A}", + "xf=Hash{bits=2 ih=-1 col=AH:AT col=KH:K}", + "xf=NGramHash{bits=3 ih=-1 col=N3:AH,KH seed=2}", + "xf=NGramHash{bits=10 ih=-1 col=N10:AH,KH seed=2}", + "xf=SelectColumns{keepCol=A keepCol=K keepCol=KH keepCol=N3 keepCol=N10}" + }, suffix: "4"); + // Do for scalar non-vector columns. + TestCore(pathData, true, + new[] { + loader, + "xf=CatHash{bits=3 ih=-1 col=AH:A col=KH:KS}", + "xf=SelectColumns{keepCol=A keepCol=KS keepCol=AH keepCol=KH}" + }, suffix: "5"); + + // Do with full-length grams only. + TestCore(pathData, true, + new[] { + loader, + "xf=WordToken{col=AT:A}", + "xf=Hash{col=AH:AT}", + "xf=NgramHash{col=AH ngram=3 hashbits=4 all- ih=3}", + "xf=SelectColumns{keepCol=AH}" + }, suffix: "6"); + + Done(); + } + + [Fact] + public void SavePipeWordBag() + { + string pathData = GetDataPath(@"lm.sample.txt"); + TestCore(pathData, true, + new[] { + "loader=Text{header+ col=Label:TX:0 col=One:TX:4 col=Vec:TX:3,4 rows=101}", + "xf=AutoLabel{col=Label}", + "xf=WordBag{max=10", + " col=F11:One col={name=F12 src=One ngram=4 max=3 max=4 max=5} col={name=F13 src=One ngram=3 skips=2}", + " col=F21:Vec col={name=F22 src=Vec max=20 ngram=4}}", + "xf=WordBag{col={name=F23 src=Vec max=10 ngram=3 skips=2}}", + "xf=SelectColumns{keepCol=Label keepCol=F21 keepCol=F22 keepCol=F23 keepCol=F11 keepCol=F12 keepCol=F13}", + }); + + Done(); + } + + [Fact] + public void SavePipeWordBagTfIdf() + { + string pathData = DeleteOutputPath("SavePipe", "Sample-Data.txt"); + File.WriteAllLines(pathData, + new[] + { + "Text", + "A B C D", + "E F G H", + "E A", + "E F G H K L A B", + "A B" + }); + + TestCore(pathData, true, + new[] { + "loader=Text{header=+ col=Text:TX:0}", + "xf=WordBag{col={name=TfIdf src=Text max=5 ngram=3 weighting=TfIdf}}", + "xf=SelectColumns{keepCol=TfIdf}", + }); + + Done(); + } + + [Fact] + public void SavePipeWordBagManyToOne() + { + string pathData = GetDataPath(@"lm.sample.txt"); + TestCore(pathData, true, + new[] { + "loader=Text{header+ col=One:TX:4 col=Vec:TX:3,4 rows=101}", + "xf=WordBag{col={name=WB1 src=One max=10 ngram=3 skips=2} col={name=WB2 src=One src=One max=10 ngram=3 skips=2}}", + "xf=SelectColumns{keepCol=WB1 keepCol=WB2}" + }, + (pipe) => + { + // Verify that WB2 = 2 * WB1 + using (var c = pipe.GetRowCursor(col => true)) + { + var b1 = default(VBuffer); + var b2 = default(VBuffer); + int col1, col2; + if (!c.Schema.TryGetColumnIndex("WB1", out col1) || !c.Schema.TryGetColumnIndex("WB2", out col2)) + { + Fail("Did not find expected columns"); + return; + } + var get1 = c.GetGetter>(col1); + var get2 = c.GetGetter>(col2); + while (c.MoveNext()) + { + get1(ref b1); + get2(ref b2); + if (!CompareVec(in b1, in b2, b1.Length, (x1, x2) => 2 * x1 == x2)) + { + Fail("Unexpected values in row {0}", c.Position); + break; + } + } + } + }); + + Done(); + } + [Fact] public void SavePipeWithKey() { @@ -924,6 +1119,8 @@ private void TestHashTransformVectorHelper(ArrayDataViewBuilder builder, uint[][ } } } + + } /// /// A class for non-baseline data pipe tests. diff --git a/test/Microsoft.ML.Tests/Transformers/KeyToVectorEstimatorTests.cs b/test/Microsoft.ML.Tests/Transformers/KeyToVectorEstimatorTests.cs index 16d7707730..d084fb171e 100644 --- a/test/Microsoft.ML.Tests/Transformers/KeyToVectorEstimatorTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/KeyToVectorEstimatorTests.cs @@ -145,8 +145,8 @@ private void ValidateMetadata(IDataView result) Assert.True(result.Schema.TryGetColumnIndex("CatD", out int colD)); Assert.True(result.Schema.TryGetColumnIndex("CatE", out int colE)); Assert.True(result.Schema.TryGetColumnIndex("CatF", out int colF)); - Assert.True(result.Schema.TryGetColumnIndex("CatE", out int colG)); - Assert.True(result.Schema.TryGetColumnIndex("CatF", out int colH)); + Assert.True(result.Schema.TryGetColumnIndex("CatG", out int colG)); + Assert.True(result.Schema.TryGetColumnIndex("CatH", out int colH)); var types = result.Schema.GetMetadataTypes(colA); Assert.Equal(types.Select(x => x.Key), new string[1] { MetadataUtils.Kinds.SlotNames }); VBuffer> slots = default; @@ -201,16 +201,20 @@ private void ValidateMetadata(IDataView result) Assert.True(normalized); types = result.Schema.GetMetadataTypes(colG); - Assert.Equal(types.Select(x => x.Key), new string[2] { MetadataUtils.Kinds.CategoricalSlotRanges, MetadataUtils.Kinds.IsNormalized }); - result.Schema.GetMetadata(MetadataUtils.Kinds.CategoricalSlotRanges, colG, ref slotRanges); - Assert.True(slotRanges.Length == 4); - Assert.Equal(slotRanges.Items().Select(x => x.Value), new int[4] { 0, 5, 6, 11 }); - result.Schema.GetMetadata(MetadataUtils.Kinds.IsNormalized, colF, ref normalized); - Assert.True(normalized); + Assert.Equal(types.Select(x => x.Key), new string[1] { MetadataUtils.Kinds.SlotNames }); + result.Schema.GetMetadata(MetadataUtils.Kinds.SlotNames, colG, ref slots); + Assert.True(slots.Length == 3); + Assert.Equal(slots.Items().Select(x => x.Value.ToString()), new string[3] { "A", "D", "E" }); types = result.Schema.GetMetadataTypes(colH); - Assert.Equal(types.Select(x => x.Key), new string[1] { MetadataUtils.Kinds.IsNormalized }); - result.Schema.GetMetadata(MetadataUtils.Kinds.IsNormalized, colF, ref normalized); + Assert.Equal(types.Select(x => x.Key), new string[3] { MetadataUtils.Kinds.SlotNames, MetadataUtils.Kinds.CategoricalSlotRanges, MetadataUtils.Kinds.IsNormalized }); + result.Schema.GetMetadata(MetadataUtils.Kinds.SlotNames, colH, ref slots); + Assert.True(slots.Length == 2); + Assert.Equal(slots.Items().Select(x => x.Value.ToString()), new string[2] { "D", "E" }); + result.Schema.GetMetadata(MetadataUtils.Kinds.CategoricalSlotRanges, colH, ref slotRanges); + Assert.True(slotRanges.Length == 2); + Assert.Equal(slotRanges.Items().Select(x => x.Value), new int[2] { 0, 1 }); + result.Schema.GetMetadata(MetadataUtils.Kinds.IsNormalized, colH, ref normalized); Assert.True(normalized); } diff --git a/test/Microsoft.ML.Tests/Transformers/NAReplaceTests.cs b/test/Microsoft.ML.Tests/Transformers/NAReplaceTests.cs index d91b2768b3..0c4e4de6e1 100644 --- a/test/Microsoft.ML.Tests/Transformers/NAReplaceTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/NAReplaceTests.cs @@ -8,6 +8,7 @@ using Microsoft.ML.Runtime.Model; using Microsoft.ML.Runtime.RunTests; using Microsoft.ML.Runtime.Tools; +using Microsoft.ML.StaticPipe; using Microsoft.ML.Transforms; using System.IO; using Xunit; diff --git a/test/Microsoft.ML.Tests/Transformers/TextFeaturizerTests.cs b/test/Microsoft.ML.Tests/Transformers/TextFeaturizerTests.cs index e92a6a29ea..8b24dd01f9 100644 --- a/test/Microsoft.ML.Tests/Transformers/TextFeaturizerTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/TextFeaturizerTests.cs @@ -217,7 +217,7 @@ public void NgramWorkout() var est = new WordTokenizingEstimator(Env, "text", "text") .Append(new ValueToKeyMappingEstimator(Env, "text", "terms")) .Append(new NgramExtractingEstimator(Env, "terms", "ngrams")) - .Append(new NgramHashEstimator(Env, "terms", "ngramshash")); + .Append(new NgramHashingEstimator(Env, "terms", "ngramshash")); TestEstimatorCore(est, data.AsDynamic, invalidInput: invalidData.AsDynamic); From 9db16c85888e7163c671543faee6ba1f47015d68 Mon Sep 17 00:00:00 2001 From: Tom Finley Date: Wed, 12 Dec 2018 22:14:14 -0800 Subject: [PATCH 052/100] Much more core internalization (Phase 3) (#1870) * Clean needless interface IMlState * ComponentCatalog internalized except RegisterAssembly * Internalize other misc Core items * Internalization of ValueMappers, quantile, and ensemble interfaces and fallout thereby * De-interface IPredictorModel, make implementation internal. * De-interface ITransformModel, make implementation internal. * Internalize RoleMappedData and deal with resulting fallout. --- src/Microsoft.ML.Core/CommandLine/CmdLexer.cs | 3 +- .../CommandLine/CmdParser.cs | 14 +- .../ComponentModel/ComponentCatalog.cs | 81 ++- .../ComponentModel/ComponentFactory.cs | 3 +- .../ComponentModel/LoadableClassAttribute.cs | 8 +- src/Microsoft.ML.Core/Data/DataKind.cs | 3 +- .../Data/IHostEnvironment.cs | 3 +- src/Microsoft.ML.Core/Data/IValueMapper.cs | 6 +- .../Data/RoleMappedSchema.cs | 3 +- src/Microsoft.ML.Core/EntryPoints/IMlState.cs | 14 - .../EntryPoints/ModuleArgs.cs | 18 +- .../{IPredictorModel.cs => PredictorModel.cs} | 34 +- .../TransformModel.cs} | 31 +- .../Properties/AssemblyInfo.cs | 1 + .../Commands/EvaluateCommand.cs | 13 +- src/Microsoft.ML.Data/Data/Conversion.cs | 6 +- .../DataView/LambdaColumnMapper.cs | 3 +- .../EntryPoints/CommonOutputs.cs | 8 +- .../EntryPoints/EntryPointNode.cs | 9 +- .../EntryPoints/InputBase.cs | 6 +- ...redictorModel.cs => PredictorModelImpl.cs} | 62 ++- .../EntryPoints/SchemaManipulation.cs | 6 +- .../EntryPoints/ScoreColumnSelector.cs | 8 +- .../EntryPoints/ScoreModel.cs | 18 +- .../EntryPoints/SelectRows.cs | 8 +- .../EntryPoints/SummarizePredictor.cs | 2 +- ...ransformModel.cs => TransformModelImpl.cs} | 44 +- .../Evaluators/AnomalyDetectionEvaluator.cs | 6 +- .../Evaluators/BinaryClassifierEvaluator.cs | 8 +- .../Evaluators/ClusteringEvaluator.cs | 6 +- .../Evaluators/EvaluatorBase.cs | 11 +- .../Evaluators/EvaluatorUtils.cs | 3 +- .../Evaluators/MamlEvaluator.cs | 20 +- .../MultiClassClassifierEvaluator.cs | 6 +- .../MultiOutputRegressionEvaluator.cs | 4 +- .../Evaluators/QuantileRegressionEvaluator.cs | 4 +- .../Evaluators/RankerEvaluator.cs | 8 +- .../Evaluators/RegressionEvaluator.cs | 6 +- .../Prediction/Calibrator.cs | 6 +- .../Properties/AssemblyInfo.cs | 2 + .../Training/TrainerEstimatorBase.cs | 4 +- .../Training/TrainerUtils.cs | 57 +- .../Transforms/GenerateNumberTransform.cs | 2 +- .../Transforms/InvertHashUtils.cs | 6 +- .../Transforms/LabelIndicatorTransform.cs | 2 +- .../Transforms/NopTransform.cs | 2 +- .../Transforms/NormalizeUtils.cs | 8 +- .../Transforms/TypeConverting.cs | 2 +- .../Transforms/ValueToKeyMappingEstimator.cs | 3 +- .../ValueToKeyMappingTransformer.cs | 3 +- .../ValueToKeyMappingTransformerImpl.cs | 5 +- .../Utilities/ComponentCreation.cs | 11 +- src/Microsoft.ML.Ensemble/Batch.cs | 2 +- .../EntryPoints/CreateEnsemble.cs | 16 +- .../OutputCombiners/IOutputCombiner.cs | 2 +- src/Microsoft.ML.Ensemble/PipelineEnsemble.cs | 28 +- .../FeatureSelector/AllFeatureSelector.cs | 2 +- .../FeatureSelector/RandomFeatureSelector.cs | 2 +- .../Selector/IFeatureSelector.cs | 4 +- .../Selector/ISubModelSelector.cs | 16 +- .../Selector/ISubsetSelector.cs | 4 +- .../Selector/SubModelSelector/AllSelector.cs | 2 +- .../SubModelSelector/AllSelectorMultiClass.cs | 2 +- .../BaseBestPerformanceSelector.cs | 2 +- .../SubModelSelector/BaseDiverseSelector.cs | 2 +- .../SubModelSelector/BaseSubModelSelector.cs | 2 +- .../BestDiverseSelectorBinary.cs | 2 +- .../BestDiverseSelectorMultiClass.cs | 2 +- .../BestDiverseSelectorRegression.cs | 2 +- .../BestPerformanceRegressionSelector.cs | 2 +- .../BestPerformanceSelector.cs | 2 +- .../BestPerformanceSelectorMultiClass.cs | 2 +- .../SubModelSelector/SubModelDataSelector.cs | 2 +- .../SubsetSelector/AllInstanceSelector.cs | 2 +- .../SubsetSelector/BaseSubsetSelector.cs | 2 +- .../SubsetSelector/BootstrapSelector.cs | 2 +- .../SubsetSelector/RandomPartitionSelector.cs | 2 +- src/Microsoft.ML.Ensemble/Subset.cs | 2 +- .../Trainer/EnsemblePredictor.cs | 2 +- .../Trainer/EnsembleTrainerBase.cs | 4 +- .../Multiclass/EnsembleMultiClassPredictor.cs | 8 +- src/Microsoft.ML.FastTree/FastTree.cs | 6 +- src/Microsoft.ML.FastTree/GamTrainer.cs | 4 +- .../TreeEnsembleFeaturizer.cs | 4 +- .../EntryPoints/ImageAnalytics.cs | 10 +- src/Microsoft.ML.Legacy/CSharpApi.cs | 526 +++++++++--------- .../Data/CollectionDataSource.cs | 2 +- .../ILearningPipelineItem.cs | 4 +- src/Microsoft.ML.Legacy/LearningPipeline.cs | 16 +- .../Models/CrossValidator.cs | 14 +- .../Models/OneVersusAll.cs | 2 +- .../Models/TrainTestEvaluator.cs | 14 +- src/Microsoft.ML.Legacy/PredictionModel.cs | 2 +- .../EntryPoints/CrossValidationBinaryMacro.cs | 36 +- .../EntryPoints/CrossValidationMacro.cs | 36 +- .../Runtime/EntryPoints/FeatureCombiner.cs | 14 +- .../JsonUtils/ExecuteGraphCommand.cs | 44 +- .../JsonUtils/JsonManifestUtils.cs | 8 +- .../Runtime/EntryPoints/ModelOperations.cs | 26 +- .../Runtime/EntryPoints/OneVersusAllMacro.cs | 18 +- .../EntryPoints/TrainTestBinaryMacro.cs | 4 +- .../Runtime/EntryPoints/TrainTestMacro.cs | 18 +- .../Runtime/Experiment/Experiment.cs | 8 +- .../Internal/Tools/CSharpApiGenerator.cs | 6 +- .../Internal/Tools/CSharpGeneratorUtils.cs | 2 +- .../LightGbmBinaryTrainer.cs | 4 +- .../LightGbmMulticlassTrainer.cs | 6 +- .../LightGbmRankingTrainer.cs | 4 +- .../LightGbmRegressionTrainer.cs | 4 +- .../LightGbmTrainerBase.cs | 6 +- src/Microsoft.ML.Onnx/SaveOnnxCommand.cs | 4 +- src/Microsoft.ML.PCA/PcaTransform.cs | 2 +- .../MatrixFactorizationPredictor.cs | 2 +- .../ResultProcessor.cs | 2 +- .../Standard/LinearModelParameters.cs | 2 +- .../LogisticRegression/LbfgsPredictorBase.cs | 12 +- .../LogisticRegression/LogisticRegression.cs | 6 +- .../MulticlassLogisticRegression.cs | 6 +- .../MultiClass/MetaMulticlassTrainer.cs | 4 +- .../Standard/MultiClass/Ova.cs | 2 +- .../Standard/MultiClass/Pkpd.cs | 2 +- .../Standard/Online/AveragedPerceptron.cs | 2 +- .../Standard/Online/LinearSvm.cs | 2 +- .../Standard/Online/OnlineGradientDescent.cs | 2 +- .../Standard/Online/OnlineLinear.cs | 2 +- .../PoissonRegression/PoissonRegression.cs | 4 +- .../Standard/SdcaBinary.cs | 28 +- .../Standard/SdcaMultiClass.cs | 8 +- .../Standard/SdcaRegression.cs | 6 +- .../Standard/Simple/SimpleTrainers.cs | 3 +- .../Standard/StochasticTrainerBase.cs | 8 +- .../TensorflowTransform.cs | 2 +- .../TimeSeriesProcessing.cs | 16 +- .../BootstrapSamplingTransformer.cs | 2 +- .../EntryPoints/SelectFeatures.cs | 4 +- .../EntryPoints/TextAnalytics.cs | 16 +- src/Microsoft.ML.Transforms/GcnTransform.cs | 4 +- src/Microsoft.ML.Transforms/GroupTransform.cs | 2 +- .../HashJoiningTransform.cs | 2 +- src/Microsoft.ML.Transforms/NAHandling.cs | 10 +- src/Microsoft.ML.Transforms/OneHotEncoding.cs | 8 +- .../OptionalColumnTransform.cs | 2 +- .../UngroupTransform.cs | 2 +- .../Common/EntryPoints/core_ep-list.tsv | 4 +- .../Common/EntryPoints/core_manifest.json | 4 +- .../Helpers/EnvironmentFactory.cs | 1 - .../UnitTests/TestCSharpApi.cs | 12 +- .../UnitTests/TestEntryPoints.cs | 56 +- .../DnnImageFeaturizerTest.cs | 8 +- .../TestPredictors.cs | 16 +- .../Microsoft.ML.TestFramework/ModelHelper.cs | 6 +- .../IrisPlantClassificationTests.cs | 2 +- .../SentimentPredictionTests.cs | 2 +- 153 files changed, 959 insertions(+), 913 deletions(-) delete mode 100644 src/Microsoft.ML.Core/EntryPoints/IMlState.cs rename src/Microsoft.ML.Core/EntryPoints/{IPredictorModel.cs => PredictorModel.cs} (60%) rename src/Microsoft.ML.Core/{Data/ITransformModel.cs => EntryPoints/TransformModel.cs} (68%) rename src/Microsoft.ML.Data/EntryPoints/{PredictorModel.cs => PredictorModelImpl.cs} (65%) rename src/Microsoft.ML.Data/EntryPoints/{TransformModel.cs => TransformModelImpl.cs} (84%) diff --git a/src/Microsoft.ML.Core/CommandLine/CmdLexer.cs b/src/Microsoft.ML.Core/CommandLine/CmdLexer.cs index a5c14259a8..7dc81ea8d2 100644 --- a/src/Microsoft.ML.Core/CommandLine/CmdLexer.cs +++ b/src/Microsoft.ML.Core/CommandLine/CmdLexer.cs @@ -302,7 +302,8 @@ public static string UnquoteValue(string str) } } - public sealed class CmdQuoter + [BestFriend] + internal sealed class CmdQuoter { private readonly string _str; private StringBuilder _sb; diff --git a/src/Microsoft.ML.Core/CommandLine/CmdParser.cs b/src/Microsoft.ML.Core/CommandLine/CmdParser.cs index 81018dad3e..30ad079c13 100644 --- a/src/Microsoft.ML.Core/CommandLine/CmdParser.cs +++ b/src/Microsoft.ML.Core/CommandLine/CmdParser.cs @@ -537,16 +537,10 @@ private static ArgumentInfo GetArgumentInfo(Type type, object defaults) private static ArgumentAttribute GetAttribute(FieldInfo field) { - var attrs = field.GetCustomAttributes(typeof(ArgumentAttribute), false).ToArray(); - if (attrs.Length == 1) - { - var argumentAttribute = (ArgumentAttribute)attrs[0]; - if (argumentAttribute.Visibility == ArgumentAttribute.VisibilityType.EntryPointsOnly) - return null; - return argumentAttribute; - } - Contracts.Assert(attrs.Length == 0); - return null; + var argumentAttribute = field.GetCustomAttribute(false); + if (argumentAttribute?.Visibility == ArgumentAttribute.VisibilityType.EntryPointsOnly) + return null; + return argumentAttribute; } private void ReportUnrecognizedArgument(string argument) diff --git a/src/Microsoft.ML.Core/ComponentModel/ComponentCatalog.cs b/src/Microsoft.ML.Core/ComponentModel/ComponentCatalog.cs index d0ab610a60..d022a63eab 100644 --- a/src/Microsoft.ML.Core/ComponentModel/ComponentCatalog.cs +++ b/src/Microsoft.ML.Core/ComponentModel/ComponentCatalog.cs @@ -40,7 +40,8 @@ internal ComponentCatalog() /// /// Provides information on an instantiatable component, aka, loadable class. /// - public sealed class LoadableClassInfo + [BestFriend] + internal sealed class LoadableClassInfo { /// /// Used for dictionary lookup based on signature and name. @@ -264,7 +265,8 @@ public object CreateArguments() /// /// A description of a single entry point. /// - public sealed class EntryPointInfo + [BestFriend] + internal sealed class EntryPointInfo { public readonly string Name; public readonly string Description; @@ -333,7 +335,8 @@ private Type[] FindEntryPointKinds(Type type) /// The 'component' is a non-standalone building block that is used to parametrize entry points or other ML.NET components. /// For example, 'Loss function', or 'similarity calculator' could be components. /// - public sealed class ComponentInfo + [BestFriend] + internal sealed class ComponentInfo { public readonly string Name; public readonly string Description; @@ -616,7 +619,8 @@ public void RegisterAssembly(Assembly assembly, bool throwOnError = true) /// Return an array containing information for all instantiatable components. /// If provided, the given set of assemblies is loaded first. /// - public LoadableClassInfo[] GetAllClasses() + [BestFriend] + internal LoadableClassInfo[] GetAllClasses() { return _classes.ToArray(); } @@ -625,7 +629,8 @@ public LoadableClassInfo[] GetAllClasses() /// Return an array containing information for instantiatable components with the given /// signature and base type. If provided, the given set of assemblies is loaded first. /// - public LoadableClassInfo[] GetAllDerivedClasses(Type typeBase, Type typeSig) + [BestFriend] + internal LoadableClassInfo[] GetAllDerivedClasses(Type typeBase, Type typeSig) { Contracts.CheckValue(typeBase, nameof(typeBase)); Contracts.CheckValueOrNull(typeSig); @@ -643,7 +648,8 @@ public LoadableClassInfo[] GetAllDerivedClasses(Type typeBase, Type typeSig) /// Return an array containing all the known signature types. If provided, the given set of assemblies /// is loaded first. /// - public Type[] GetAllSignatureTypes() + [BestFriend] + internal Type[] GetAllSignatureTypes() { return _signatures.Select(kvp => kvp.Key).ToArray(); } @@ -651,7 +657,8 @@ public Type[] GetAllSignatureTypes() /// /// Returns a string name for a given signature type. /// - public static string SignatureToString(Type sig) + [BestFriend] + internal static string SignatureToString(Type sig) { Contracts.CheckValue(sig, nameof(sig)); Contracts.CheckParam(sig.BaseType == typeof(MulticastDelegate), nameof(sig), "Must be a delegate type"); @@ -670,7 +677,8 @@ private LoadableClassInfo FindClassCore(LoadableClassInfo.Key key) return null; } - public LoadableClassInfo[] FindLoadableClasses(string name) + [BestFriend] + internal LoadableClassInfo[] FindLoadableClasses(string name) { name = name.ToLowerInvariant().Trim(); @@ -680,14 +688,16 @@ public LoadableClassInfo[] FindLoadableClasses(string name) return res; } - public LoadableClassInfo[] FindLoadableClasses() + [BestFriend] + internal LoadableClassInfo[] FindLoadableClasses() { return _classes .Where(ci => ci.SignatureTypes.Contains(typeof(TSig))) .ToArray(); } - public LoadableClassInfo[] FindLoadableClasses() + [BestFriend] + internal LoadableClassInfo[] FindLoadableClasses() { // REVIEW: this and above methods perform a linear search over all the loadable classes. // On 6/15/2015, TLC release build contained 431 of them, so adding extra lookups looks unnecessary at this time. @@ -696,12 +706,14 @@ public LoadableClassInfo[] FindLoadableClasses() .ToArray(); } - public LoadableClassInfo GetLoadableClassInfo(string loadName) + [BestFriend] + internal LoadableClassInfo GetLoadableClassInfo(string loadName) { return GetLoadableClassInfo(loadName, typeof(TSig)); } - public LoadableClassInfo GetLoadableClassInfo(string loadName, Type signatureType) + [BestFriend] + internal LoadableClassInfo GetLoadableClassInfo(string loadName, Type signatureType) { Contracts.CheckParam(signatureType.BaseType == typeof(MulticastDelegate), nameof(signatureType), "signatureType must be a delegate type"); Contracts.CheckValueOrNull(loadName); @@ -712,18 +724,21 @@ public LoadableClassInfo GetLoadableClassInfo(string loadName, Type signatureTyp /// /// Get all registered entry points. /// - public IEnumerable AllEntryPoints() + [BestFriend] + internal IEnumerable AllEntryPoints() { return _entryPoints.AsEnumerable(); } - public bool TryFindEntryPoint(string name, out EntryPointInfo entryPoint) + [BestFriend] + internal bool TryFindEntryPoint(string name, out EntryPointInfo entryPoint) { Contracts.CheckNonEmpty(name, nameof(name)); return _entryPointMap.TryGetValue(name, out entryPoint); } - public bool TryFindComponent(string kind, string alias, out ComponentInfo component) + [BestFriend] + internal bool TryFindComponent(string kind, string alias, out ComponentInfo component) { Contracts.CheckNonEmpty(kind, nameof(kind)); Contracts.CheckNonEmpty(alias, nameof(alias)); @@ -733,7 +748,8 @@ public bool TryFindComponent(string kind, string alias, out ComponentInfo compon return _componentMap.TryGetValue($"{kind}:{alias}", out component); } - public bool TryFindComponent(Type argumentType, out ComponentInfo component) + [BestFriend] + internal bool TryFindComponent(Type argumentType, out ComponentInfo component) { Contracts.CheckValue(argumentType, nameof(argumentType)); @@ -741,7 +757,8 @@ public bool TryFindComponent(Type argumentType, out ComponentInfo component) return component != null; } - public bool TryFindComponent(Type interfaceType, Type argumentType, out ComponentInfo component) + [BestFriend] + internal bool TryFindComponent(Type interfaceType, Type argumentType, out ComponentInfo component) { Contracts.CheckValue(interfaceType, nameof(interfaceType)); Contracts.CheckParam(interfaceType.IsInterface, nameof(interfaceType), "Must be interface"); @@ -751,7 +768,8 @@ public bool TryFindComponent(Type interfaceType, Type argumentType, out Componen return component != null; } - public bool TryFindComponent(Type interfaceType, string alias, out ComponentInfo component) + [BestFriend] + internal bool TryFindComponent(Type interfaceType, string alias, out ComponentInfo component) { Contracts.CheckValue(interfaceType, nameof(interfaceType)); Contracts.CheckParam(interfaceType.IsInterface, nameof(interfaceType), "Must be interface"); @@ -764,7 +782,8 @@ public bool TryFindComponent(Type interfaceType, string alias, out ComponentInfo /// Akin to , except if the regular (case sensitive) comparison fails, it will /// attempt to back off to a case-insensitive comparison. /// - public bool TryFindComponentCaseInsensitive(Type interfaceType, string alias, out ComponentInfo component) + [BestFriend] + internal bool TryFindComponentCaseInsensitive(Type interfaceType, string alias, out ComponentInfo component) { Contracts.CheckValue(interfaceType, nameof(interfaceType)); Contracts.CheckParam(interfaceType.IsInterface, nameof(interfaceType), "Must be interface"); @@ -786,7 +805,8 @@ private static bool AnyMatch(string name, string[] aliases) /// /// Returns all valid component kinds. /// - public IEnumerable GetAllComponentKinds() + [BestFriend] + internal IEnumerable GetAllComponentKinds() { return _components.Select(x => x.Kind).Distinct().OrderBy(x => x); } @@ -794,7 +814,8 @@ public IEnumerable GetAllComponentKinds() /// /// Returns all components of the specified kind. /// - public IEnumerable GetAllComponents(string kind) + [BestFriend] + internal IEnumerable GetAllComponents(string kind) { Contracts.CheckNonEmpty(kind, nameof(kind)); Contracts.CheckParam(IsValidName(kind), nameof(kind), "Invalid component kind"); @@ -804,13 +825,15 @@ public IEnumerable GetAllComponents(string kind) /// /// Returns all components that implement the specified interface. /// - public IEnumerable GetAllComponents(Type interfaceType) + [BestFriend] + internal IEnumerable GetAllComponents(Type interfaceType) { Contracts.CheckValue(interfaceType, nameof(interfaceType)); return _components.Where(x => x.InterfaceType == interfaceType).OrderBy(x => x.Name); } - public bool TryGetComponentKind(Type signatureType, out string kind) + [BestFriend] + internal bool TryGetComponentKind(Type signatureType, out string kind) { Contracts.CheckValue(signatureType, nameof(signatureType)); // REVIEW: replace with a dictionary lookup. @@ -821,7 +844,8 @@ public bool TryGetComponentKind(Type signatureType, out string kind) return faceAttr != null; } - public bool TryGetComponentShortName(Type type, out string name) + [BestFriend] + internal bool TryGetComponentShortName(Type type, out string name) { ComponentInfo component; if (!TryFindComponent(type, out component)) @@ -850,7 +874,8 @@ private static bool IsValidName(string name) /// /// Create an instance of the indicated component with the given extra parameters. /// - public static TRes CreateInstance(IHostEnvironment env, Type signatureType, string name, string options, params object[] extra) + [BestFriend] + internal static TRes CreateInstance(IHostEnvironment env, Type signatureType, string name, string options, params object[] extra) where TRes : class { TRes result; @@ -863,13 +888,15 @@ public static TRes CreateInstance(IHostEnvironment env, Type signatureType /// Try to create an instance of the indicated component and settings with the given extra parameters. /// If there is no such component in the catalog, returns false. Any other error results in an exception. /// - public static bool TryCreateInstance(IHostEnvironment env, out TRes result, string name, string options, params object[] extra) + [BestFriend] + internal static bool TryCreateInstance(IHostEnvironment env, out TRes result, string name, string options, params object[] extra) where TRes : class { return TryCreateInstance(env, typeof(TSig), out result, name, options, extra); } - private static bool TryCreateInstance(IHostEnvironment env, Type signatureType, out TRes result, string name, string options, params object[] extra) + [BestFriend] + internal static bool TryCreateInstance(IHostEnvironment env, Type signatureType, out TRes result, string name, string options, params object[] extra) where TRes : class { Contracts.CheckValue(env, nameof(env)); diff --git a/src/Microsoft.ML.Core/ComponentModel/ComponentFactory.cs b/src/Microsoft.ML.Core/ComponentModel/ComponentFactory.cs index 58ea3c6baf..25165f62c9 100644 --- a/src/Microsoft.ML.Core/ComponentModel/ComponentFactory.cs +++ b/src/Microsoft.ML.Core/ComponentModel/ComponentFactory.cs @@ -48,7 +48,8 @@ public interface IComponentFactory /// /// A utility class for creating instances. /// - public static class ComponentFactoryUtils + [BestFriend] + internal static class ComponentFactoryUtils { /// /// Creates a component factory with no extra parameters (other than an ) diff --git a/src/Microsoft.ML.Core/ComponentModel/LoadableClassAttribute.cs b/src/Microsoft.ML.Core/ComponentModel/LoadableClassAttribute.cs index 328a48be2b..bd0be7f84e 100644 --- a/src/Microsoft.ML.Core/ComponentModel/LoadableClassAttribute.cs +++ b/src/Microsoft.ML.Core/ComponentModel/LoadableClassAttribute.cs @@ -12,10 +12,12 @@ namespace Microsoft.ML.Runtime /// /// Common signature type with no extra parameters. /// - public delegate void SignatureDefault(); + [BestFriend] + internal delegate void SignatureDefault(); [AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)] - public sealed class LoadableClassAttribute : LoadableClassAttributeBase + [BestFriend] + internal sealed class LoadableClassAttribute : LoadableClassAttributeBase { /// /// Assembly attribute used to specify that a class is loadable by a machine learning @@ -98,7 +100,7 @@ public LoadableClassAttribute(string summary, Type instType, Type loaderType, Ty } } - public abstract class LoadableClassAttributeBase : Attribute + internal abstract class LoadableClassAttributeBase : Attribute { // Note: these properties have private setters to make attribute parsing easier - the values // are all guaranteed to be in the ConstructorArguments of the CustomAttributeData diff --git a/src/Microsoft.ML.Core/Data/DataKind.cs b/src/Microsoft.ML.Core/Data/DataKind.cs index c20d9930af..b52d5d7587 100644 --- a/src/Microsoft.ML.Core/Data/DataKind.cs +++ b/src/Microsoft.ML.Core/Data/DataKind.cs @@ -52,7 +52,8 @@ public enum DataKind : byte /// /// Extension methods related to the DataKind enum. /// - public static class DataKindExtensions + [BestFriend] + internal static class DataKindExtensions { public const DataKind KindMin = DataKind.I1; public const DataKind KindLim = DataKind.U16 + 1; diff --git a/src/Microsoft.ML.Core/Data/IHostEnvironment.cs b/src/Microsoft.ML.Core/Data/IHostEnvironment.cs index 3f93005544..bfff2459ef 100644 --- a/src/Microsoft.ML.Core/Data/IHostEnvironment.cs +++ b/src/Microsoft.ML.Core/Data/IHostEnvironment.cs @@ -233,7 +233,8 @@ public interface IChannel : IPipe /// that do not belong in more specific areas, for example, or /// component creation. /// - public static class HostExtensions + [BestFriend] + internal static class HostExtensions { public static T Apply(this IHost host, string channelName, Func func) { diff --git a/src/Microsoft.ML.Core/Data/IValueMapper.cs b/src/Microsoft.ML.Core/Data/IValueMapper.cs index d0b8d4b755..f5e194cd80 100644 --- a/src/Microsoft.ML.Core/Data/IValueMapper.cs +++ b/src/Microsoft.ML.Core/Data/IValueMapper.cs @@ -9,13 +9,15 @@ namespace Microsoft.ML.Runtime.Data /// /// Delegate type to map/convert a value. /// - public delegate void ValueMapper(in TSrc src, ref TDst dst); + [BestFriend] + internal delegate void ValueMapper(in TSrc src, ref TDst dst); /// /// Delegate type to map/convert among three values, for example, one input with two /// outputs, or two inputs with one output. /// - public delegate void ValueMapper(in TVal1 val1, ref TVal2 val2, ref TVal3 val3); + [BestFriend] + internal delegate void ValueMapper(in TVal1 val1, ref TVal2 val2, ref TVal3 val3); /// /// Interface for mapping a single input value (of an indicated ColumnType) to diff --git a/src/Microsoft.ML.Core/Data/RoleMappedSchema.cs b/src/Microsoft.ML.Core/Data/RoleMappedSchema.cs index 0c167372ae..15ba2c7586 100644 --- a/src/Microsoft.ML.Core/Data/RoleMappedSchema.cs +++ b/src/Microsoft.ML.Core/Data/RoleMappedSchema.cs @@ -469,7 +469,8 @@ public RoleMappedSchema(Schema schema, string label, string feature, /// Note that the schema of of is /// guaranteed to equal the the of . /// - public sealed class RoleMappedData + [BestFriend] + internal sealed class RoleMappedData { /// /// The data. diff --git a/src/Microsoft.ML.Core/EntryPoints/IMlState.cs b/src/Microsoft.ML.Core/EntryPoints/IMlState.cs deleted file mode 100644 index 41ea062861..0000000000 --- a/src/Microsoft.ML.Core/EntryPoints/IMlState.cs +++ /dev/null @@ -1,14 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. -// See the LICENSE file in the project root for more information. - -namespace Microsoft.ML.Runtime.EntryPoints -{ - /// - /// Dummy interface to allow reference to the AutoMlState object in the C# API (since AutoMlState - /// has things that reference C# API, leading to circular dependency). Makes state object an opaque - /// black box to the graph. The macro itself will then case to the concrete type. - /// - public interface IMlState - { } -} \ No newline at end of file diff --git a/src/Microsoft.ML.Core/EntryPoints/ModuleArgs.cs b/src/Microsoft.ML.Core/EntryPoints/ModuleArgs.cs index d538f636ce..3ed68ae056 100644 --- a/src/Microsoft.ML.Core/EntryPoints/ModuleArgs.cs +++ b/src/Microsoft.ML.Core/EntryPoints/ModuleArgs.cs @@ -577,11 +577,11 @@ public enum DataKind /// FileHandle, /// - /// A transform model, represented by an . + /// A transform model, represented by an . /// TransformModel, /// - /// A predictor model, represented by an . + /// A predictor model, represented by an . /// PredictorModel, /// @@ -602,11 +602,7 @@ public enum DataKind /// optionally, a set of parameters, unique to each component. Example: "BinaryClassifierEvaluator{threshold=0.5}". /// The C# representation is . /// - Component, - /// - /// An C# object that represents state, such as . - /// - State + Component } public static DataKind GetDataType(Type type) @@ -632,9 +628,9 @@ public static DataKind GetDataType(Type type) return DataKind.Float; if (typeof(IDataView).IsAssignableFrom(type)) return DataKind.DataView; - if (typeof(ITransformModel).IsAssignableFrom(type)) + if (typeof(TransformModel).IsAssignableFrom(type)) return DataKind.TransformModel; - if (typeof(IPredictorModel).IsAssignableFrom(type)) + if (typeof(PredictorModel).IsAssignableFrom(type)) return DataKind.PredictorModel; if (typeof(IFileHandle).IsAssignableFrom(type)) return DataKind.FileHandle; @@ -649,8 +645,6 @@ public static DataKind GetDataType(Type type) } if (typeof(IComponentFactory).IsAssignableFrom(type)) return DataKind.Component; - if (typeof(IMlState).IsAssignableFrom(type)) - return DataKind.State; return DataKind.Unknown; } @@ -673,7 +667,7 @@ public abstract class Optional public abstract object GetValue(); - protected Optional(bool isExplicit) + private protected Optional(bool isExplicit) { IsExplicit = isExplicit; } diff --git a/src/Microsoft.ML.Core/EntryPoints/IPredictorModel.cs b/src/Microsoft.ML.Core/EntryPoints/PredictorModel.cs similarity index 60% rename from src/Microsoft.ML.Core/EntryPoints/IPredictorModel.cs rename to src/Microsoft.ML.Core/EntryPoints/PredictorModel.cs index 0ec1151134..8a9117c7b4 100644 --- a/src/Microsoft.ML.Core/EntryPoints/IPredictorModel.cs +++ b/src/Microsoft.ML.Core/EntryPoints/PredictorModel.cs @@ -9,35 +9,45 @@ namespace Microsoft.ML.Runtime.EntryPoints { /// - /// Interface for standard predictor model port type. + /// Base type for standard predictor model port type. /// - public interface IPredictorModel + public abstract class PredictorModel { + [BestFriend] + private protected PredictorModel() + { + } + /// /// Save the model to the given stream. /// - void Save(IHostEnvironment env, Stream stream); + [BestFriend] + internal abstract void Save(IHostEnvironment env, Stream stream); /// /// Extract only the transform portion of the predictor model. /// - ITransformModel TransformModel { get; } + [BestFriend] + internal abstract TransformModel TransformModel { get; } /// /// Extract the predictor object out of the predictor model. /// - IPredictor Predictor { get; } + [BestFriend] + internal abstract IPredictor Predictor { get; } /// /// Apply the predictor model to the transform model and return the resulting predictor model. /// - IPredictorModel Apply(IHostEnvironment env, ITransformModel transformModel); + [BestFriend] + internal abstract PredictorModel Apply(IHostEnvironment env, TransformModel transformModel); /// /// For a given input data, return role mapped data and the predictor object. /// The scoring entry point will hopefully know how to construct a scorer out of them. /// - void PrepareData(IHostEnvironment env, IDataView input, out RoleMappedData roleMappedData, out IPredictor predictor); + [BestFriend] + internal abstract void PrepareData(IHostEnvironment env, IDataView input, out RoleMappedData roleMappedData, out IPredictor predictor); /// /// Returns a string array containing the label names of the label column type predictor was trained on. @@ -46,13 +56,13 @@ public interface IPredictorModel /// /// /// The column type of the label the predictor was trained on. - string[] GetLabelInfo(IHostEnvironment env, out ColumnType labelType); + [BestFriend] + internal abstract string[] GetLabelInfo(IHostEnvironment env, out ColumnType labelType); /// - /// Returns the RoleMappedSchema that was used in training. + /// Returns the that was used in training. /// - /// - /// - RoleMappedSchema GetTrainingSchema(IHostEnvironment env); + [BestFriend] + internal abstract RoleMappedSchema GetTrainingSchema(IHostEnvironment env); } } \ No newline at end of file diff --git a/src/Microsoft.ML.Core/Data/ITransformModel.cs b/src/Microsoft.ML.Core/EntryPoints/TransformModel.cs similarity index 68% rename from src/Microsoft.ML.Core/Data/ITransformModel.cs rename to src/Microsoft.ML.Core/EntryPoints/TransformModel.cs index 37ea9c1a2d..2e025019ff 100644 --- a/src/Microsoft.ML.Core/Data/ITransformModel.cs +++ b/src/Microsoft.ML.Core/EntryPoints/TransformModel.cs @@ -12,48 +12,59 @@ namespace Microsoft.ML.Runtime.EntryPoints /// /// Interface for standard transform model port type. /// - public interface ITransformModel + public abstract class TransformModel { + [BestFriend] + private protected TransformModel() + { + } + /// /// The input schema that this transform model was originally instantiated on. /// Note that the schema may have columns that aren't needed by this transform model. - /// If an IDataView exists with this schema, then applying this transform model to it + /// If an exists with this schema, then applying this transform model to it /// shouldn't fail because of column type issues. /// // REVIEW: Would be nice to be able to trim this to the minimum needed somehow. Note // however that doing so may cause issues for composing transform models. For example, // if transform model A needs column X and model B needs Y, that is NOT produced by A, // then trimming A's input schema would cause composition to fail. - Schema InputSchema { get; } + [BestFriend] + internal abstract Schema InputSchema { get; } /// /// The output schema that this transform model was originally instantiated on. The schema resulting - /// from may differ from this, similarly to how + /// from may differ from this, similarly to how /// may differ from the schema of dataviews we apply this transform model to. /// - Schema OutputSchema { get; } + [BestFriend] + internal abstract Schema OutputSchema { get; } /// /// Apply the transform(s) in the model to the given input data. /// - IDataView Apply(IHostEnvironment env, IDataView input); + [BestFriend] + internal abstract IDataView Apply(IHostEnvironment env, IDataView input); /// /// Apply the transform(s) in the model to the given transform model. /// - ITransformModel Apply(IHostEnvironment env, ITransformModel input); + [BestFriend] + internal abstract TransformModel Apply(IHostEnvironment env, TransformModel input); /// /// Save the model to the given stream. /// - void Save(IHostEnvironment env, Stream stream); + [BestFriend] + internal abstract void Save(IHostEnvironment env, Stream stream); /// /// Returns the transform model as an that can output a row /// given a row with the same schema as . /// /// The transform model as an . If not all transforms - /// in the pipeline are then it returns null. - IRowToRowMapper AsRowToRowMapper(IExceptionContext ectx); + /// in the pipeline are then it returns . + [BestFriend] + internal abstract IRowToRowMapper AsRowToRowMapper(IExceptionContext ectx); } } diff --git a/src/Microsoft.ML.Core/Properties/AssemblyInfo.cs b/src/Microsoft.ML.Core/Properties/AssemblyInfo.cs index a61e96ce7b..0ed78e89a6 100644 --- a/src/Microsoft.ML.Core/Properties/AssemblyInfo.cs +++ b/src/Microsoft.ML.Core/Properties/AssemblyInfo.cs @@ -9,6 +9,7 @@ [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Tests" + PublicKey.TestValue)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Core.Tests" + PublicKey.TestValue)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Predictor.Tests" + PublicKey.TestValue)] +[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Sweeper.Tests" + PublicKey.TestValue)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.InferenceTesting" + PublicKey.TestValue)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.StaticPipelineTesting" + PublicKey.TestValue)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.OnnxTransformTest" + PublicKey.TestValue)] diff --git a/src/Microsoft.ML.Data/Commands/EvaluateCommand.cs b/src/Microsoft.ML.Data/Commands/EvaluateCommand.cs index 09a95ebec2..315b6d7fbe 100644 --- a/src/Microsoft.ML.Data/Commands/EvaluateCommand.cs +++ b/src/Microsoft.ML.Data/Commands/EvaluateCommand.cs @@ -89,7 +89,8 @@ public string GetNameMatch(string input) /// Both take a as input. The is assumed to contain all the column /// roles needed for evaluation, including the score column. /// - public interface IEvaluator + [BestFriend] + internal interface IEvaluator { /// /// Compute the aggregate metrics. Return a dictionary from the metric kind @@ -109,12 +110,14 @@ public interface IEvaluator } /// - /// Signature for creating an IEvaluator. + /// Signature for creating an . /// - public delegate void SignatureEvaluator(); - public delegate void SignatureMamlEvaluator(); + [BestFriend] + internal delegate void SignatureEvaluator(); + [BestFriend] + internal delegate void SignatureMamlEvaluator(); - public static class EvaluateTransform + internal static class EvaluateTransform { public sealed class Arguments { diff --git a/src/Microsoft.ML.Data/Data/Conversion.cs b/src/Microsoft.ML.Data/Data/Conversion.cs index 64adc729c2..cf4379e74d 100644 --- a/src/Microsoft.ML.Data/Data/Conversion.cs +++ b/src/Microsoft.ML.Data/Data/Conversion.cs @@ -32,7 +32,8 @@ namespace Microsoft.ML.Runtime.Data.Conversion using U8 = UInt64; using UG = RowId; - public delegate bool TryParseMapper(in TX src, out T dst); + [BestFriend] + internal delegate bool TryParseMapper(in TX src, out T dst); /// /// This type exists to provide efficient delegates for conversion between standard ColumnTypes, @@ -44,7 +45,8 @@ namespace Microsoft.ML.Runtime.Data.Conversion /// text (and StringBuilder). These are needed by the standard TextSaver, which handles /// differences between sparse and dense inputs in a semantically invariant way. /// - public sealed class Conversions + [BestFriend] + internal sealed class Conversions { // REVIEW: Reconcile implementations with TypeUtils, and clarify the distinction. diff --git a/src/Microsoft.ML.Data/DataView/LambdaColumnMapper.cs b/src/Microsoft.ML.Data/DataView/LambdaColumnMapper.cs index e5b5ae1654..712560771c 100644 --- a/src/Microsoft.ML.Data/DataView/LambdaColumnMapper.cs +++ b/src/Microsoft.ML.Data/DataView/LambdaColumnMapper.cs @@ -13,7 +13,8 @@ namespace Microsoft.ML.Runtime.Data /// This applies the user provided ValueMapper to a column to produce a new column. It automatically /// injects a standard conversion from the actual type of the source column to typeSrc (if needed). /// - public static class LambdaColumnMapper + [BestFriend] + internal static class LambdaColumnMapper { // REVIEW: It would be nice to support propagation of select metadata. public static IDataView Create(IHostEnvironment env, string name, IDataView input, diff --git a/src/Microsoft.ML.Data/EntryPoints/CommonOutputs.cs b/src/Microsoft.ML.Data/EntryPoints/CommonOutputs.cs index 37f37f6c64..c4647c6ae8 100644 --- a/src/Microsoft.ML.Data/EntryPoints/CommonOutputs.cs +++ b/src/Microsoft.ML.Data/EntryPoints/CommonOutputs.cs @@ -24,7 +24,7 @@ public sealed class TransformOutput public IDataView OutputData; [TlcModule.Output(Desc = "Transform model", SortOrder = 2)] - public ITransformModel Model; + public TransformModel Model; } /// @@ -33,7 +33,7 @@ public sealed class TransformOutput public interface ITransformOutput { Var OutputData { get; } - Var Model { get; } + Var Model { get; } } /// @@ -44,7 +44,7 @@ public interface ITransformOutput public abstract class TrainerOutput { [TlcModule.Output(Desc = "The trained model", SortOrder = 1)] - public IPredictorModel PredictorModel; + public PredictorModel PredictorModel; } /// @@ -187,7 +187,7 @@ public interface ISequencePredictionOutput /// public interface ITrainerOutput { - Var PredictorModel { get; } + Var PredictorModel { get; } } /// diff --git a/src/Microsoft.ML.Data/EntryPoints/EntryPointNode.cs b/src/Microsoft.ML.Data/EntryPoints/EntryPointNode.cs index 371a58fcae..999b8d0495 100644 --- a/src/Microsoft.ML.Data/EntryPoints/EntryPointNode.cs +++ b/src/Microsoft.ML.Data/EntryPoints/EntryPointNode.cs @@ -77,11 +77,10 @@ public static bool CheckType(Type type) return type == typeof(IDataView) || type == typeof(IFileHandle) || - type == typeof(IPredictorModel) || - type == typeof(ITransformModel) || + type == typeof(PredictorModel) || + type == typeof(TransformModel) || type == typeof(CommonInputs.IEvaluatorInput) || - type == typeof(CommonOutputs.IEvaluatorOutput) || - type == typeof(IMlState); + type == typeof(CommonOutputs.IEvaluatorOutput); } } @@ -186,8 +185,6 @@ public static bool IsValidType(Type variableType) return true; if (variableType == typeof(CommonOutputs.IEvaluatorOutput)) return true; - if (variableType == typeof(IMlState)) - return true; var kind = TlcModule.GetDataType(variableType); if (kind == TlcModule.DataKind.Array) diff --git a/src/Microsoft.ML.Data/EntryPoints/InputBase.cs b/src/Microsoft.ML.Data/EntryPoints/InputBase.cs index 7b76574562..2eae61e079 100644 --- a/src/Microsoft.ML.Data/EntryPoints/InputBase.cs +++ b/src/Microsoft.ML.Data/EntryPoints/InputBase.cs @@ -189,7 +189,7 @@ public static TOut Train(IHost host, TArg input, } var predictor = TrainUtils.Train(host, ch, cachedRoleMappedData, trainer, calibrator, maxCalibrationExamples); - return new TOut() { PredictorModel = new PredictorModel(host, roleMappedData, input.TrainingData, predictor) }; + return new TOut() { PredictorModel = new PredictorModelImpl(host, roleMappedData, input.TrainingData, predictor) }; } } } @@ -212,7 +212,7 @@ public interface ITransformInput /// public interface IFeaturizerInput : ITransformInput { - Var PredictorModel { get; set; } + Var PredictorModel { get; set; } } /// @@ -261,7 +261,7 @@ public interface ITrainerInputWithGroupId : ITrainerInputWithWeight /// public interface ICalibratorInput : ITransformInput { - Var UncalibratedPredictorModel { get; } + Var UncalibratedPredictorModel { get; } int MaxRows { get; } } diff --git a/src/Microsoft.ML.Data/EntryPoints/PredictorModel.cs b/src/Microsoft.ML.Data/EntryPoints/PredictorModelImpl.cs similarity index 65% rename from src/Microsoft.ML.Data/EntryPoints/PredictorModel.cs rename to src/Microsoft.ML.Data/EntryPoints/PredictorModelImpl.cs index f450c5e39a..b29e1cc326 100644 --- a/src/Microsoft.ML.Data/EntryPoints/PredictorModel.cs +++ b/src/Microsoft.ML.Data/EntryPoints/PredictorModelImpl.cs @@ -14,58 +14,60 @@ namespace Microsoft.ML.Runtime.EntryPoints { /// - /// This class encapsulates the predictor and a preceding transform model. + /// This class encapsulates the predictor and a preceding transform model, as the concrete and hidden + /// implementation of . /// - public sealed class PredictorModel : IPredictorModel + [BestFriend] + internal sealed class PredictorModelImpl : PredictorModel { - private readonly IPredictor _predictor; - private readonly ITransformModel _transformModel; private readonly KeyValuePair[] _roleMappings; - public ITransformModel TransformModel { get { return _transformModel; } } + internal override TransformModel TransformModel { get; } - public IPredictor Predictor { get { return _predictor; } } + internal override IPredictor Predictor { get; } - public PredictorModel(IHostEnvironment env, RoleMappedData trainingData, IDataView startingData, IPredictor predictor) + [BestFriend] + internal PredictorModelImpl(IHostEnvironment env, RoleMappedData trainingData, IDataView startingData, IPredictor predictor) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(trainingData, nameof(trainingData)); env.CheckValue(predictor, nameof(predictor)); - _transformModel = new TransformModel(env, trainingData.Data, startingData); + TransformModel = new TransformModelImpl(env, trainingData.Data, startingData); _roleMappings = trainingData.Schema.GetColumnRoleNames().ToArray(); - _predictor = predictor; + Predictor = predictor; } - public PredictorModel(IHostEnvironment env, Stream stream) + [BestFriend] + internal PredictorModelImpl(IHostEnvironment env, Stream stream) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(stream, nameof(stream)); using (var ch = env.Start("Loading predictor model")) { // REVIEW: address the asymmetry in the way we're loading and saving the model. - _transformModel = new TransformModel(env, stream); + TransformModel = new TransformModelImpl(env, stream); var roles = ModelFileUtils.LoadRoleMappingsOrNull(env, stream); env.CheckDecode(roles != null, "Predictor model must contain role mappings"); _roleMappings = roles.ToArray(); - _predictor = ModelFileUtils.LoadPredictorOrNull(env, stream); - env.CheckDecode(_predictor != null, "Predictor model must contain a predictor"); + Predictor = ModelFileUtils.LoadPredictorOrNull(env, stream); + env.CheckDecode(Predictor != null, "Predictor model must contain a predictor"); } } - private PredictorModel(ITransformModel transformModel, IPredictor predictor, KeyValuePair[] roleMappings) + private PredictorModelImpl(TransformModel transformModel, IPredictor predictor, KeyValuePair[] roleMappings) { Contracts.AssertValue(transformModel); Contracts.AssertValue(predictor); Contracts.AssertValue(roleMappings); - _transformModel = transformModel; - _predictor = predictor; + TransformModel = transformModel; + Predictor = predictor; _roleMappings = roleMappings; } - public void Save(IHostEnvironment env, Stream stream) + internal override void Save(IHostEnvironment env, Stream stream) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(stream, nameof(stream)); @@ -77,37 +79,37 @@ public void Save(IHostEnvironment env, Stream stream) // (we use the TrainUtils.SaveModel that does all three). // Create the chain of transforms for saving. - IDataView data = new EmptyDataView(env, _transformModel.InputSchema); - data = _transformModel.Apply(env, data); + IDataView data = new EmptyDataView(env, TransformModel.InputSchema); + data = TransformModel.Apply(env, data); var roleMappedData = new RoleMappedData(data, _roleMappings, opt: true); - TrainUtils.SaveModel(env, ch, stream, _predictor, roleMappedData); + TrainUtils.SaveModel(env, ch, stream, Predictor, roleMappedData); } } - public IPredictorModel Apply(IHostEnvironment env, ITransformModel transformModel) + internal override PredictorModel Apply(IHostEnvironment env, TransformModel transformModel) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(transformModel, nameof(transformModel)); - ITransformModel newTransformModel = _transformModel.Apply(env, transformModel); + TransformModel newTransformModel = TransformModel.Apply(env, transformModel); Contracts.AssertValue(newTransformModel); - return new PredictorModel(newTransformModel, _predictor, _roleMappings); + return new PredictorModelImpl(newTransformModel, Predictor, _roleMappings); } - public void PrepareData(IHostEnvironment env, IDataView input, out RoleMappedData roleMappedData, out IPredictor predictor) + internal override void PrepareData(IHostEnvironment env, IDataView input, out RoleMappedData roleMappedData, out IPredictor predictor) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(input, nameof(input)); - input = _transformModel.Apply(env, input); + input = TransformModel.Apply(env, input); roleMappedData = new RoleMappedData(input, _roleMappings, opt: true); - predictor = _predictor; + predictor = Predictor; } - public string[] GetLabelInfo(IHostEnvironment env, out ColumnType labelType) + internal override string[] GetLabelInfo(IHostEnvironment env, out ColumnType labelType) { Contracts.CheckValue(env, nameof(env)); - var predictor = _predictor; + var predictor = Predictor; var calibrated = predictor as CalibratedPredictorBase; while (calibrated != null) { @@ -135,10 +137,10 @@ public string[] GetLabelInfo(IHostEnvironment env, out ColumnType labelType) return null; } - public RoleMappedSchema GetTrainingSchema(IHostEnvironment env) + internal override RoleMappedSchema GetTrainingSchema(IHostEnvironment env) { Contracts.CheckValue(env, nameof(env)); - var predInput = _transformModel.Apply(env, new EmptyDataView(env, _transformModel.InputSchema)); + var predInput = TransformModel.Apply(env, new EmptyDataView(env, TransformModel.InputSchema)); var trainRms = new RoleMappedSchema(predInput.Schema, _roleMappings, opt: true); return trainRms; } diff --git a/src/Microsoft.ML.Data/EntryPoints/SchemaManipulation.cs b/src/Microsoft.ML.Data/EntryPoints/SchemaManipulation.cs index cccc1da6e7..40302bdc9d 100644 --- a/src/Microsoft.ML.Data/EntryPoints/SchemaManipulation.cs +++ b/src/Microsoft.ML.Data/EntryPoints/SchemaManipulation.cs @@ -22,7 +22,7 @@ public static CommonOutputs.TransformOutput ConcatColumns(IHostEnvironment env, EntryPointUtils.CheckInputArgs(host, input); var xf = ColumnConcatenatingTransformer.Create(env, input, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } [TlcModule.EntryPoint(Name = "Transforms.ColumnSelector", Desc = "Selects a set of columns, dropping all others", UserName = "Select Columns")] @@ -34,7 +34,7 @@ public static CommonOutputs.TransformOutput SelectColumns(IHostEnvironment env, EntryPointUtils.CheckInputArgs(host, input); var xf = new ColumnSelectingTransformer(env, input.KeepColumns, input.DropColumns, input.KeepHidden, input.IgnoreMissing).Transform(input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } [TlcModule.EntryPoint(Name = "Transforms.ColumnCopier", Desc = "Duplicates columns from the dataset", UserName = ColumnCopyingTransformer.UserName, ShortName = ColumnCopyingTransformer.ShortName)] @@ -45,7 +45,7 @@ public static CommonOutputs.TransformOutput CopyColumns(IHostEnvironment env, Co host.CheckValue(input, nameof(input)); EntryPointUtils.CheckInputArgs(host, input); var xf = ColumnCopyingTransformer.Create(env, input, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } } } diff --git a/src/Microsoft.ML.Data/EntryPoints/ScoreColumnSelector.cs b/src/Microsoft.ML.Data/EntryPoints/ScoreColumnSelector.cs index f68fcff00a..f067ca27ac 100644 --- a/src/Microsoft.ML.Data/EntryPoints/ScoreColumnSelector.cs +++ b/src/Microsoft.ML.Data/EntryPoints/ScoreColumnSelector.cs @@ -38,7 +38,7 @@ public static CommonOutputs.TransformOutput SelectColumns(IHostEnvironment env, indices.Add(i); } var newView = new ChooseColumnsByIndexTransform(env, new ChooseColumnsByIndexTransform.Arguments() { Index = indices.ToArray() }, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, newView, input.Data), OutputData = newView }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, newView, input.Data), OutputData = newView }; } private static bool ShouldAddColumn(Schema schema, int i, string[] extraColumns, uint scoreSet) @@ -58,7 +58,7 @@ private static bool ShouldAddColumn(Schema schema, int i, string[] extraColumns, public sealed class RenameBinaryPredictionScoreColumnsInput : TransformInputBase { [Argument(ArgumentType.Required, HelpText = "The predictor model used in scoring", SortOrder = 2)] - public IPredictorModel PredictorModel; + public PredictorModel PredictorModel; } [TlcModule.EntryPoint(Name = "Transforms.BinaryPredictionScoreColumnsRenamer", Desc = "For binary prediction, it renames the PredictedLabel and Score columns to include the name of the positive class.", UserName = "Rename Binary Prediction Score Columns")] @@ -103,12 +103,12 @@ public static CommonOutputs.TransformOutput RenameBinaryPredictionScoreColumns(I var copyColumn = new ColumnCopyingTransformer(env, copyCols.ToArray()).Transform(input.Data); var dropColumn = ColumnSelectingTransformer.CreateDrop(env, copyColumn, copyCols.Select(c => c.Source).ToArray()); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, dropColumn, input.Data), OutputData = dropColumn }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, dropColumn, input.Data), OutputData = dropColumn }; } } var newView = NopTransform.CreateIfNeeded(env, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, newView, input.Data), OutputData = newView }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, newView, input.Data), OutputData = newView }; } } } diff --git a/src/Microsoft.ML.Data/EntryPoints/ScoreModel.cs b/src/Microsoft.ML.Data/EntryPoints/ScoreModel.cs index b5c49c7fdb..1792303468 100644 --- a/src/Microsoft.ML.Data/EntryPoints/ScoreModel.cs +++ b/src/Microsoft.ML.Data/EntryPoints/ScoreModel.cs @@ -13,8 +13,8 @@ namespace Microsoft.ML.Runtime.EntryPoints { /// - /// This module handles scoring a against a new dataset. - /// As a result, we return both the scored data and the scoring transform as a . + /// This module handles scoring a against a new dataset. + /// As a result, we return both the scored data and the scoring transform as a . /// /// REVIEW: This module does not support 'exotic' scoring scenarios, like recommendation and quantile regression /// (those where the user-defined scorer settings are necessary to identify the scorer). We could resolve this by @@ -28,7 +28,7 @@ public sealed class Input public IDataView Data; [Argument(ArgumentType.Required, HelpText = "The predictor model to apply to data", SortOrder = 2)] - public IPredictorModel PredictorModel; + public PredictorModel PredictorModel; [Argument(ArgumentType.AtMostOnce, HelpText = "Suffix to append to the score columns", SortOrder = 3)] public string Suffix; @@ -40,7 +40,7 @@ public sealed class InputTransformScorer public IDataView Data; [Argument(ArgumentType.Required, HelpText = "The transform model to apply to data", SortOrder = 2)] - public ITransformModel TransformModel; + public TransformModel TransformModel; } public sealed class Output @@ -49,19 +49,19 @@ public sealed class Output public IDataView ScoredData; [TlcModule.Output(Desc = "The scoring transform", SortOrder = 2)] - public ITransformModel ScoringTransform; + public TransformModel ScoringTransform; } public sealed class ModelInput { [Argument(ArgumentType.Required, HelpText = "The predictor model to turn into a transform", SortOrder = 1)] - public IPredictorModel PredictorModel; + public PredictorModel PredictorModel; } public sealed class ModelOutput { [TlcModule.Output(Desc = "The scoring transform", SortOrder = 1)] - public ITransformModel ScoringTransform; + public TransformModel ScoringTransform; } [TlcModule.EntryPoint(Name = "Transforms.DatasetScorer", Desc = "Score a dataset with a predictor model")] @@ -91,7 +91,7 @@ public static Output Score(IHostEnvironment env, Input input) new Output { ScoredData = scoredPipe, - ScoringTransform = new TransformModel(host, scoredPipe, inputData) + ScoringTransform = new TransformModelImpl(host, scoredPipe, inputData) }; } @@ -140,7 +140,7 @@ public static Output MakeScoringTransform(IHostEnvironment env, ModelInput input return new Output { ScoredData = scoredPipe, - ScoringTransform = new TransformModel(host, scoredPipe, emptyData) + ScoringTransform = new TransformModelImpl(host, scoredPipe, emptyData) }; } } diff --git a/src/Microsoft.ML.Data/EntryPoints/SelectRows.cs b/src/Microsoft.ML.Data/EntryPoints/SelectRows.cs index aa805eeac8..eaabf248dc 100644 --- a/src/Microsoft.ML.Data/EntryPoints/SelectRows.cs +++ b/src/Microsoft.ML.Data/EntryPoints/SelectRows.cs @@ -20,7 +20,7 @@ public static CommonOutputs.TransformOutput FilterByRange(IHostEnvironment env, EntryPointUtils.CheckInputArgs(host, input); var xf = new RangeFilter(host, input, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } [TlcModule.EntryPoint(Name = "Transforms.RowSkipFilter", Desc = SkipTakeFilter.SkipFilterSummary, UserName = SkipTakeFilter.SkipFilterUserName, @@ -32,7 +32,7 @@ public static CommonOutputs.TransformOutput SkipFilter(IHostEnvironment env, Ski host.CheckValue(input, nameof(input)); EntryPointUtils.CheckInputArgs(host, input); var xf = SkipTakeFilter.Create(host, input, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } [TlcModule.EntryPoint(Name = "Transforms.RowTakeFilter", Desc = SkipTakeFilter.TakeFilterSummary, UserName = SkipTakeFilter.TakeFilterUserName, @@ -44,7 +44,7 @@ public static CommonOutputs.TransformOutput TakeFilter(IHostEnvironment env, Ski host.CheckValue(input, nameof(input)); EntryPointUtils.CheckInputArgs(host, input); var xf = SkipTakeFilter.Create(host, input, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } [TlcModule.EntryPoint(Name = "Transforms.RowSkipAndTakeFilter", Desc = SkipTakeFilter.SkipTakeFilterSummary, @@ -56,7 +56,7 @@ public static CommonOutputs.TransformOutput SkipAndTakeFilter(IHostEnvironment e host.CheckValue(input, nameof(input)); EntryPointUtils.CheckInputArgs(host, input); var xf = SkipTakeFilter.Create(host, input, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } } } diff --git a/src/Microsoft.ML.Data/EntryPoints/SummarizePredictor.cs b/src/Microsoft.ML.Data/EntryPoints/SummarizePredictor.cs index 0603d28e84..49e5acdca5 100644 --- a/src/Microsoft.ML.Data/EntryPoints/SummarizePredictor.cs +++ b/src/Microsoft.ML.Data/EntryPoints/SummarizePredictor.cs @@ -19,7 +19,7 @@ public static class SummarizePredictor public abstract class InputBase { [Argument(ArgumentType.Required, ShortName = "predictorModel", HelpText = "The predictor to summarize")] - public IPredictorModel PredictorModel; + public PredictorModel PredictorModel; } public sealed class Input : InputBase diff --git a/src/Microsoft.ML.Data/EntryPoints/TransformModel.cs b/src/Microsoft.ML.Data/EntryPoints/TransformModelImpl.cs similarity index 84% rename from src/Microsoft.ML.Data/EntryPoints/TransformModel.cs rename to src/Microsoft.ML.Data/EntryPoints/TransformModelImpl.cs index 4e3c2e81f1..7ef6ccddcb 100644 --- a/src/Microsoft.ML.Data/EntryPoints/TransformModel.cs +++ b/src/Microsoft.ML.Data/EntryPoints/TransformModelImpl.cs @@ -7,6 +7,7 @@ using System.IO; using Microsoft.ML.Data; using Microsoft.ML.Runtime.Data; +using Microsoft.ML.Runtime.Data.IO; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; @@ -17,42 +18,45 @@ namespace Microsoft.ML.Runtime.EntryPoints /// the initial schema, together with the sequence of transforms applied to that /// schema. /// - public sealed class TransformModel : ITransformModel + [BestFriend] + internal sealed class TransformModelImpl : TransformModel { // The cached schema of the root of the _chain. private readonly Schema _schemaRoot; - // This contains the transforms to save instantiated on an IDataView with - // appropriate initial schema. Note that the "root" of this is typically either - // an empty IDataView or a BinaryLoader with no rows. However, other root - // types are possible, since we don't insist on this when loading a model - // from a zip file. However, whenever we save, we force a BinaryLoader to - // be serialized for the root. + /// + /// This contains the transforms to save instantiated on an with + /// appropriate initial schema. Note that the "root" of this is typically either + /// an empty or a with no rows. However, other root + /// types are possible, since we don't insist on this when loading a model + /// from a zip file. However, whenever we save, we force a to + /// be serialized for the root. + /// private readonly IDataView _chain; /// /// The input schema that this transform model was originally instantiated on. /// Note that the schema may have columns that aren't needed by this transform model. - /// If an IDataView exists with this schema, then applying this transform model to it + /// If an exists with this schema, then applying this transform model to it /// shouldn't fail because of column type issues. /// REVIEW: Would be nice to be able to trim this to the minimum needed somehow. Note /// however that doing so may cause issues for composing transform models. For example, /// if transform model A needs column X and model B needs Y, that is NOT produced by A, /// then trimming A's input schema would cause composition to fail. /// - public Schema InputSchema => _schemaRoot; + internal override Schema InputSchema => _schemaRoot; /// /// The resulting schema once applied to this model. The might have /// columns that are not needed by this transform and these columns will be seen in the /// produced by this transform. /// - public Schema OutputSchema => _chain.Schema; + internal override Schema OutputSchema => _chain.Schema; /// /// Create a TransformModel containing the transforms from "result" back to "input". /// - public TransformModel(IHostEnvironment env, IDataView result, IDataView input) + public TransformModelImpl(IHostEnvironment env, IDataView result, IDataView input) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(result, nameof(result)); @@ -63,7 +67,7 @@ public TransformModel(IHostEnvironment env, IDataView result, IDataView input) _chain = ApplyTransformUtils.ApplyAllTransformsToData(env, result, root, input); } - private TransformModel(IHostEnvironment env, Schema schemaRoot, IDataView chain) + private TransformModelImpl(IHostEnvironment env, Schema schemaRoot, IDataView chain) { Contracts.AssertValue(env); env.AssertValue(schemaRoot); @@ -76,7 +80,7 @@ private TransformModel(IHostEnvironment env, Schema schemaRoot, IDataView chain) /// Create a TransformModel containing the given (optional) transforms applied to the /// given root schema. /// - public TransformModel(IHostEnvironment env, Schema schemaRoot, IDataTransform[] xfs) + public TransformModelImpl(IHostEnvironment env, Schema schemaRoot, IDataTransform[] xfs) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(schemaRoot, nameof(schemaRoot)); @@ -100,7 +104,7 @@ public TransformModel(IHostEnvironment env, Schema schemaRoot, IDataTransform[] /// /// Load a transform model. /// - public TransformModel(IHostEnvironment env, Stream stream) + public TransformModelImpl(IHostEnvironment env, Stream stream) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(stream, nameof(stream)); @@ -128,7 +132,7 @@ public TransformModel(IHostEnvironment env, Stream stream) /// /// Apply this transform model to the given input data. /// - public IDataView Apply(IHostEnvironment env, IDataView input) + internal override IDataView Apply(IHostEnvironment env, IDataView input) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(input, nameof(input)); @@ -138,14 +142,14 @@ public IDataView Apply(IHostEnvironment env, IDataView input) /// /// Apply this transform model to the given input transform model to produce a composite transform model. /// - public ITransformModel Apply(IHostEnvironment env, ITransformModel input) + internal override TransformModel Apply(IHostEnvironment env, TransformModel input) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(input, nameof(input)); IDataView view; Schema schemaRoot = input.InputSchema; - var mod = input as TransformModel; + var mod = input as TransformModelImpl; if (mod != null) view = ApplyTransformUtils.ApplyAllTransformsToData(env, _chain, mod._chain); else @@ -155,13 +159,13 @@ public ITransformModel Apply(IHostEnvironment env, ITransformModel input) view = Apply(env, view); } - return new TransformModel(env, schemaRoot, view); + return new TransformModelImpl(env, schemaRoot, view); } /// /// Save this transform model. /// - public void Save(IHostEnvironment env, Stream stream) + internal override void Save(IHostEnvironment env, Stream stream) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(stream, nameof(stream)); @@ -177,7 +181,7 @@ public void Save(IHostEnvironment env, Stream stream) } } - public IRowToRowMapper AsRowToRowMapper(IExceptionContext ectx) + internal override IRowToRowMapper AsRowToRowMapper(IExceptionContext ectx) { return CompositeRowToRowMapper.IsCompositeRowToRowMapper(_chain) diff --git a/src/Microsoft.ML.Data/Evaluators/AnomalyDetectionEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/AnomalyDetectionEvaluator.cs index 361f580c71..b8207573a6 100644 --- a/src/Microsoft.ML.Data/Evaluators/AnomalyDetectionEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/AnomalyDetectionEvaluator.cs @@ -107,7 +107,7 @@ protected override Aggregator GetAggregatorCore(RoleMappedSchema schema, string return new Aggregator(Host, _aucCount, _numTopResults, _k, _p, _streaming, schema.Name == null ? -1 : schema.Name.Index, stratName); } - public override IDataTransform GetPerInstanceMetrics(RoleMappedData data) + internal override IDataTransform GetPerInstanceMetricsCore(RoleMappedData data) { return NopTransform.CreateIfNeeded(Host, data.Data); } @@ -605,7 +605,7 @@ public sealed class Arguments : ArgumentsBase private readonly int _k; private readonly Double _p; - protected override IEvaluator Evaluator => _evaluator; + private protected override IEvaluator Evaluator => _evaluator; public AnomalyDetectionMamlEvaluator(IHostEnvironment env, Arguments args) : base(args, env, MetadataUtils.Const.ScoreColumnKind.AnomalyDetection, "AnomalyDetectionMamlEvaluator") @@ -777,7 +777,7 @@ public static CommonOutputs.CommonEvaluateOutput AnomalyDetection(IHostEnvironme string weight; string name; MatchColumns(host, input, out label, out weight, out name); - var evaluator = new AnomalyDetectionMamlEvaluator(host, input); + IMamlEvaluator evaluator = new AnomalyDetectionMamlEvaluator(host, input); var data = new RoleMappedData(input.Data, label, null, null, weight, name); var metrics = evaluator.Evaluate(data); diff --git a/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs index 2b8ffe7d0b..4031441e21 100644 --- a/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/BinaryClassifierEvaluator.cs @@ -813,7 +813,7 @@ public CalibratedBinaryClassificationMetrics Evaluate(IDataView data, string lab RoleMappedSchema.CreatePair(MetadataUtils.Const.ScoreValueKind.Probability, probability), RoleMappedSchema.CreatePair(MetadataUtils.Const.ScoreValueKind.PredictedLabel, predictedLabel)); - var resultDict = Evaluate(roles); + var resultDict = ((IEvaluator)this).Evaluate(roles); Host.Assert(resultDict.ContainsKey(MetricKinds.OverallMetrics)); var overall = resultDict[MetricKinds.OverallMetrics]; @@ -850,7 +850,7 @@ public BinaryClassificationMetrics Evaluate(IDataView data, string label, string RoleMappedSchema.CreatePair(MetadataUtils.Const.ScoreValueKind.Score, score), RoleMappedSchema.CreatePair(MetadataUtils.Const.ScoreValueKind.PredictedLabel, predictedLabel)); - var resultDict = Evaluate(roles); + var resultDict = ((IEvaluator)this).Evaluate(roles); Host.Assert(resultDict.ContainsKey(MetricKinds.OverallMetrics)); var overall = resultDict[MetricKinds.OverallMetrics]; @@ -1152,7 +1152,7 @@ public class Arguments : ArgumentsBase private readonly string _prFileName; private readonly string _probCol; - protected override IEvaluator Evaluator { get { return _evaluator; } } + private protected override IEvaluator Evaluator => _evaluator; public BinaryClassifierMamlEvaluator(IHostEnvironment env, Arguments args) : base(args, env, MetadataUtils.Const.ScoreColumnKind.BinaryClassification, "BinaryClassifierMamlEvaluator") @@ -1521,7 +1521,7 @@ public static CommonOutputs.ClassificationEvaluateOutput Binary(IHostEnvironment string weight; string name; MatchColumns(host, input, out label, out weight, out name); - var evaluator = new BinaryClassifierMamlEvaluator(host, input); + IMamlEvaluator evaluator = new BinaryClassifierMamlEvaluator(host, input); var data = new RoleMappedData(input.Data, label, null, null, weight, name); var metrics = evaluator.Evaluate(data); diff --git a/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs index e74f470f0c..61ba97bb6e 100644 --- a/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/ClusteringEvaluator.cs @@ -78,7 +78,7 @@ public ClusteringMetrics Evaluate(IDataView data, string score, string label = n var rolesMappedData = new RoleMappedData(data, opt: false, roles.ToArray()); - var resultDict = Evaluate(rolesMappedData); + var resultDict = ((IEvaluator)this).Evaluate(rolesMappedData); Host.Assert(resultDict.ContainsKey(MetricKinds.OverallMetrics)); var overall = resultDict[MetricKinds.OverallMetrics]; @@ -777,7 +777,7 @@ public class Arguments : ArgumentsBase private readonly string _featureCol; private readonly bool _calculateDbi; - protected override IEvaluator Evaluator { get { return _evaluator; } } + private protected override IEvaluator Evaluator => _evaluator; public ClusteringMamlEvaluator(IHostEnvironment env, Arguments args) : base(args, env, MetadataUtils.Const.ScoreColumnKind.Clustering, "ClusteringMamlEvaluator") @@ -865,7 +865,7 @@ public static CommonOutputs.CommonEvaluateOutput Clustering(IHostEnvironment env string features = TrainUtils.MatchNameOrDefaultOrNull(host, schema, nameof(ClusteringMamlEvaluator.Arguments.FeatureColumn), input.FeatureColumn, DefaultColumnNames.Features); - var evaluator = new ClusteringMamlEvaluator(host, input); + IMamlEvaluator evaluator = new ClusteringMamlEvaluator(host, input); var data = new RoleMappedData(input.Data, label, features, null, weight, name); var metrics = evaluator.Evaluate(data); diff --git a/src/Microsoft.ML.Data/Evaluators/EvaluatorBase.cs b/src/Microsoft.ML.Data/Evaluators/EvaluatorBase.cs index 59f5cafeb0..ab1330c31b 100644 --- a/src/Microsoft.ML.Data/Evaluators/EvaluatorBase.cs +++ b/src/Microsoft.ML.Data/Evaluators/EvaluatorBase.cs @@ -14,7 +14,7 @@ namespace Microsoft.ML.Runtime.Data { /// /// This is a base class for TLC evaluators. It implements both of the methods: and - /// . Note that the input is assumed to contain all the column + /// . Note that the input is assumed to contain all the column /// roles needed for evaluation, including the score column. /// public abstract partial class EvaluatorBase : IEvaluator @@ -28,7 +28,7 @@ protected EvaluatorBase(IHostEnvironment env, string registrationName) Host = env.Register(registrationName); } - public Dictionary Evaluate(RoleMappedData data) + Dictionary IEvaluator.Evaluate(RoleMappedData data) { CheckColumnTypes(data.Schema); Func activeCols = GetActiveCols(data.Schema); @@ -209,7 +209,10 @@ protected ValueGetter>> GetKeyValueGetter(Aggregato }; } - public abstract IDataTransform GetPerInstanceMetrics(RoleMappedData data); + IDataTransform IEvaluator.GetPerInstanceMetrics(RoleMappedData data) => GetPerInstanceMetricsCore(data); + + [BestFriend] + internal abstract IDataTransform GetPerInstanceMetricsCore(RoleMappedData data); public abstract IEnumerable GetOverallMetricColumns(); @@ -440,7 +443,7 @@ protected RowToRowEvaluatorBase(IHostEnvironment env, string registrationName) { } - public override IDataTransform GetPerInstanceMetrics(RoleMappedData data) + internal override IDataTransform GetPerInstanceMetricsCore(RoleMappedData data) { var mapper = CreatePerInstanceRowMapper(data.Schema); return new RowToRowMapperTransform(Host, data.Data, mapper, null); diff --git a/src/Microsoft.ML.Data/Evaluators/EvaluatorUtils.cs b/src/Microsoft.ML.Data/Evaluators/EvaluatorUtils.cs index 22703accc6..95b8e99055 100644 --- a/src/Microsoft.ML.Data/Evaluators/EvaluatorUtils.cs +++ b/src/Microsoft.ML.Data/Evaluators/EvaluatorUtils.cs @@ -17,7 +17,8 @@ namespace Microsoft.ML.Runtime.Data { - public static class EvaluateUtils + [BestFriend] + internal static class EvaluateUtils { public struct AggregatedMetric { diff --git a/src/Microsoft.ML.Data/Evaluators/MamlEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/MamlEvaluator.cs index 110abd27e4..8b8bd0a761 100644 --- a/src/Microsoft.ML.Data/Evaluators/MamlEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/MamlEvaluator.cs @@ -17,7 +17,8 @@ namespace Microsoft.ML.Runtime.Data /// should be assumed to contain only the following column roles: label, group, weight and name. Any other columns needed for /// evaluation should be searched for by name in the . /// - public interface IMamlEvaluator : IEvaluator + [BestFriend] + internal interface IMamlEvaluator : IEvaluator { /// /// Print the aggregate metrics to the console. @@ -45,7 +46,7 @@ public interface IMamlEvaluator : IEvaluator } /// - /// A base class implementation of . The and + /// A base class implementation of . The and /// methods create a new containing all the columns needed for evaluation, and call the corresponding /// methods on an of the appropriate type. /// @@ -81,7 +82,8 @@ public abstract class ArgumentsBase : EvaluateInputBase protected readonly string WeightCol; protected readonly string[] StratCols; - protected abstract IEvaluator Evaluator { get; } + [BestFriend] + private protected abstract IEvaluator Evaluator { get; } protected MamlEvaluatorBase(ArgumentsBase args, IHostEnvironment env, string scoreColumnKind, string registrationName) { @@ -94,7 +96,7 @@ protected MamlEvaluatorBase(ArgumentsBase args, IHostEnvironment env, string sco StratCols = args.StratColumn; } - public Dictionary Evaluate(RoleMappedData data) + Dictionary IEvaluator.Evaluate(RoleMappedData data) { data = new RoleMappedData(data.Data, GetInputColumnRoles(data.Schema, needStrat: true)); return Evaluator.Evaluate(data); @@ -140,7 +142,7 @@ public virtual IEnumerable GetOverallMetricColumns() return Evaluator.GetOverallMetricColumns(); } - public void PrintFoldResults(IChannel ch, Dictionary metrics) + void IMamlEvaluator.PrintFoldResults(IChannel ch, Dictionary metrics) { Host.CheckValue(ch, nameof(ch)); Host.CheckValue(metrics, nameof(metrics)); @@ -167,7 +169,7 @@ protected virtual void PrintFoldResultsCore(IChannel ch, Dictionary[] metrics) + void IMamlEvaluator.PrintAdditionalMetrics(IChannel ch, params Dictionary[] metrics) { Host.CheckValue(ch, nameof(ch)); Host.CheckNonEmpty(metrics, nameof(metrics)); @@ -199,7 +201,7 @@ protected virtual void PrintAdditionalMetricsCore(IChannel ch, Dictionary _evaluator; public MultiClassMamlEvaluator(IHostEnvironment env, Arguments args) : base(args, env, MetadataUtils.Const.ScoreColumnKind.MultiClassClassification, "MultiClassMamlEvaluator") @@ -1039,7 +1039,7 @@ public static CommonOutputs.ClassificationEvaluateOutput MultiClass(IHostEnviron EntryPointUtils.CheckInputArgs(host, input); MatchColumns(host, input, out string label, out string weight, out string name); - var evaluator = new MultiClassMamlEvaluator(host, input); + IMamlEvaluator evaluator = new MultiClassMamlEvaluator(host, input); var data = new RoleMappedData(input.Data, label, null, null, weight, name); var metrics = evaluator.Evaluate(data); diff --git a/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs index f33fcaee1a..d67225874a 100644 --- a/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/MultiOutputRegressionEvaluator.cs @@ -623,7 +623,7 @@ public sealed class Arguments : ArgumentsBase private readonly MultiOutputRegressionEvaluator _evaluator; private readonly bool _supressScoresAndLabels; - protected override IEvaluator Evaluator { get { return _evaluator; } } + private protected override IEvaluator Evaluator => _evaluator; public MultiOutputRegressionMamlEvaluator(IHostEnvironment env, Arguments args) : base(args, env, MetadataUtils.Const.ScoreColumnKind.MultiOutputRegression, "RegressionMamlEvaluator") @@ -781,7 +781,7 @@ public static CommonOutputs.CommonEvaluateOutput MultiOutputRegression(IHostEnvi string weight; string name; MatchColumns(host, input, out label, out weight, out name); - var evaluator = new MultiOutputRegressionMamlEvaluator(host, input); + IMamlEvaluator evaluator = new MultiOutputRegressionMamlEvaluator(host, input); var data = new RoleMappedData(input.Data, label, null, null, weight, name); var metrics = evaluator.Evaluate(data); diff --git a/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs index 10f5aa6e4b..0e7d30bc79 100644 --- a/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs @@ -474,7 +474,7 @@ public sealed class Arguments : ArgumentsBase private readonly int? _index; private readonly QuantileRegressionEvaluator _evaluator; - protected override IEvaluator Evaluator => _evaluator; + private protected override IEvaluator Evaluator => _evaluator; public QuantileRegressionMamlEvaluator(IHostEnvironment env, Arguments args) : base(args, env, MetadataUtils.Const.ScoreColumnKind.QuantileRegression, "QuantilsRegressionMamlEvaluator") @@ -569,7 +569,7 @@ public static CommonOutputs.CommonEvaluateOutput QuantileRegression(IHostEnviron string weight; string name; MatchColumns(host, input, out label, out weight, out name); - var evaluator = new QuantileRegressionMamlEvaluator(host, input); + IMamlEvaluator evaluator = new QuantileRegressionMamlEvaluator(host, input); var data = new RoleMappedData(input.Data, label, null, null, weight, name); var metrics = evaluator.Evaluate(data); diff --git a/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs index 04298a877f..5523057462 100644 --- a/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/RankerEvaluator.cs @@ -123,7 +123,7 @@ protected override Aggregator GetAggregatorCore(RoleMappedSchema schema, string return new Aggregator(Host, _labelGains, _truncationLevel, _groupSummary, schema.Weight != null, stratName); } - public override IDataTransform GetPerInstanceMetrics(RoleMappedData data) + internal override IDataTransform GetPerInstanceMetricsCore(RoleMappedData data) { Host.CheckValue(data, nameof(data)); Host.CheckParam(data.Schema.Label != null, nameof(data), "Schema must contain a label column"); @@ -253,7 +253,7 @@ public RankerMetrics Evaluate(IDataView data, string label, string groupId, stri RoleMappedSchema.ColumnRole.Group.Bind(groupId), RoleMappedSchema.CreatePair(MetadataUtils.Const.ScoreValueKind.Score, score)); - var resultDict = Evaluate(roles); + var resultDict = ((IEvaluator)this).Evaluate(roles); Host.Assert(resultDict.ContainsKey(MetricKinds.OverallMetrics)); var overall = resultDict[MetricKinds.OverallMetrics]; @@ -864,7 +864,7 @@ public sealed class Arguments : ArgumentsBase private readonly string _groupSummaryFilename; - protected override IEvaluator Evaluator { get { return _evaluator; } } + private protected override IEvaluator Evaluator => _evaluator; public RankerMamlEvaluator(IHostEnvironment env, Arguments args) : base(args, env, MetadataUtils.Const.ScoreColumnKind.Ranking, "RankerMamlEvaluator") @@ -1072,7 +1072,7 @@ public static CommonOutputs.CommonEvaluateOutput Ranking(IHostEnvironment env, R string groupId = TrainUtils.MatchNameOrDefaultOrNull(host, schema, nameof(RankerMamlEvaluator.Arguments.GroupIdColumn), input.GroupIdColumn, DefaultColumnNames.GroupId); - var evaluator = new RankerMamlEvaluator(host, input); + IMamlEvaluator evaluator = new RankerMamlEvaluator(host, input); var data = new RoleMappedData(input.Data, label, null, groupId, weight, name); var metrics = evaluator.Evaluate(data); diff --git a/src/Microsoft.ML.Data/Evaluators/RegressionEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/RegressionEvaluator.cs index bd2c86ff7d..4533de0d8e 100644 --- a/src/Microsoft.ML.Data/Evaluators/RegressionEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/RegressionEvaluator.cs @@ -173,7 +173,7 @@ public RegressionMetrics Evaluate(IDataView data, string label, string score) RoleMappedSchema.ColumnRole.Label.Bind(label), RoleMappedSchema.CreatePair(MetadataUtils.Const.ScoreValueKind.Score, score)); - var resultDict = Evaluate(roles); + var resultDict = ((IEvaluator)this).Evaluate(roles); Host.Assert(resultDict.ContainsKey(MetricKinds.OverallMetrics)); var overall = resultDict[MetricKinds.OverallMetrics]; @@ -338,7 +338,7 @@ public sealed class Arguments : ArgumentsBase private readonly RegressionEvaluator _evaluator; - protected override IEvaluator Evaluator { get { return _evaluator; } } + private protected override IEvaluator Evaluator => _evaluator; public RegressionMamlEvaluator(IHostEnvironment env, Arguments args) : base(args, env, MetadataUtils.Const.ScoreColumnKind.Regression, "RegressionMamlEvaluator") @@ -386,7 +386,7 @@ public static CommonOutputs.CommonEvaluateOutput Regression(IHostEnvironment env string weight; string name; MatchColumns(host, input, out label, out weight, out name); - var evaluator = new RegressionMamlEvaluator(host, input); + IMamlEvaluator evaluator = new RegressionMamlEvaluator(host, input); var data = new RoleMappedData(input.Data, label, null, null, weight, name); var metrics = evaluator.Evaluate(data); diff --git a/src/Microsoft.ML.Data/Prediction/Calibrator.cs b/src/Microsoft.ML.Data/Prediction/Calibrator.cs index b068a73243..ecc0c5501f 100644 --- a/src/Microsoft.ML.Data/Prediction/Calibrator.cs +++ b/src/Microsoft.ML.Data/Prediction/Calibrator.cs @@ -1837,7 +1837,7 @@ public static class Calibrate public abstract class CalibrateInputBase : TransformInputBase { [Argument(ArgumentType.Required, ShortName = "uncalibratedPredictorModel", HelpText = "The predictor to calibrate", SortOrder = 2)] - public IPredictorModel UncalibratedPredictorModel; + public PredictorModel UncalibratedPredictorModel; [Argument(ArgumentType.Required, ShortName = "maxRows", HelpText = "The maximum number of examples to train the calibrator on", SortOrder = 3)] [TlcModule.Range(Inf = 0, Max = int.MaxValue)] @@ -1910,7 +1910,7 @@ public static CommonOutputs.CalibratorOutput FixedPlatt(IHostEnvironment env, Fi /// The input object, containing the predictor, the data and an integer indicating the maximum number /// of examples to use for training the calibrator. /// The kind of calibrator to use. - /// A object, containing an . + /// A object, containing an . public static TOut CalibratePredictor(IHost host, CalibrateInputBase input, ICalibratorTrainer calibratorTrainer) where TOut : CommonOutputs.TrainerOutput, new() @@ -1935,7 +1935,7 @@ public static TOut CalibratePredictor(IHost host, CalibrateInputBase input calibratedPredictor = CalibratorUtils.TrainCalibrator(host, ch, calibratorTrainer, input.MaxRows, predictor, data); } - return new TOut() { PredictorModel = new PredictorModel(host, data, input.Data, calibratedPredictor) }; + return new TOut() { PredictorModel = new PredictorModelImpl(host, data, input.Data, calibratedPredictor) }; } } } diff --git a/src/Microsoft.ML.Data/Properties/AssemblyInfo.cs b/src/Microsoft.ML.Data/Properties/AssemblyInfo.cs index 23e658a818..a33348aa8f 100644 --- a/src/Microsoft.ML.Data/Properties/AssemblyInfo.cs +++ b/src/Microsoft.ML.Data/Properties/AssemblyInfo.cs @@ -10,6 +10,8 @@ [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Core.Tests" + PublicKey.TestValue)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.InferenceTesting" + PublicKey.TestValue)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.OnnxTransformTest" + PublicKey.TestValue)] +[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Predictor.Tests" + PublicKey.TestValue)] +[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.TimeSeries.Tests" + PublicKey.TestValue)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Legacy" + PublicKey.Value)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Maml" + PublicKey.Value)] diff --git a/src/Microsoft.ML.Data/Training/TrainerEstimatorBase.cs b/src/Microsoft.ML.Data/Training/TrainerEstimatorBase.cs index 9049d751e8..583badaf62 100644 --- a/src/Microsoft.ML.Data/Training/TrainerEstimatorBase.cs +++ b/src/Microsoft.ML.Data/Training/TrainerEstimatorBase.cs @@ -142,7 +142,7 @@ protected TTransformer TrainTransformer(IDataView trainSet, protected abstract TTransformer MakeTransformer(TModel model, Schema trainSchema); - protected virtual RoleMappedData MakeRoles(IDataView data) => + private protected virtual RoleMappedData MakeRoles(IDataView data) => new RoleMappedData(data, label: LabelColumn.Name, feature: FeatureColumn.Name, weight: WeightColumn.Name); IPredictor ITrainer.Train(TrainContext context) => ((ITrainer)this).Train(context); @@ -172,7 +172,7 @@ public TrainerEstimatorBaseWithGroupId(IHost host, GroupIdColumn = groupId; } - protected override RoleMappedData MakeRoles(IDataView data) => + private protected override RoleMappedData MakeRoles(IDataView data) => new RoleMappedData(data, label: LabelColumn.Name, feature: FeatureColumn.Name, group: GroupIdColumn.Name, weight: WeightColumn.Name); } diff --git a/src/Microsoft.ML.Data/Training/TrainerUtils.cs b/src/Microsoft.ML.Data/Training/TrainerUtils.cs index 5a04db2b06..0375695bcc 100644 --- a/src/Microsoft.ML.Data/Training/TrainerUtils.cs +++ b/src/Microsoft.ML.Data/Training/TrainerUtils.cs @@ -38,7 +38,8 @@ public enum CursOpt : uint AllFeatures = Features | AllowBadFeatures, } - public static class TrainerUtils + [BestFriend] + internal static class TrainerUtils { /// /// Check for a standard (known-length vector of float) feature column. @@ -402,18 +403,16 @@ public static SchemaShape.Column MakeR4ScalarWeightColumn(string weightColumn, b /// repeated accesses, is to use a cursor factory (usually a nested class of the cursor /// class). This keeps track of what filtering options were actually useful. /// - public abstract class TrainingCursorBase : IDisposable + [BestFriend] + internal abstract class TrainingCursorBase : IDisposable { - public Row Row { get { return _cursor; } } + public Row Row => _cursor; private readonly RowCursor _cursor; private readonly Action _signal; - private long _skipCount; - private long _keptCount; - - public long SkippedRowCount { get { return _skipCount; } } - public long KeptRowCount { get { return _keptCount; } } + public long SkippedRowCount { get; private set; } + public long KeptRowCount { get; private set; } /// /// The base constructor class for the factory-based cursor creation. @@ -471,10 +470,10 @@ public bool MoveNext() } if (Accept()) { - _keptCount++; + KeptRowCount++; return true; } - _skipCount++; + SkippedRowCount++; } } @@ -636,7 +635,8 @@ public void Signal(CursOpt opt) /// /// This supports Weight (float), Group (ulong), and Id (RowId) columns. /// - public class StandardScalarCursor : TrainingCursorBase + [BestFriend] + internal class StandardScalarCursor : TrainingCursorBase { private readonly ValueGetter _getWeight; private readonly ValueGetter _getGroup; @@ -644,10 +644,8 @@ public class StandardScalarCursor : TrainingCursorBase private readonly bool _keepBadWeight; private readonly bool _keepBadGroup; - private long _badWeightCount; - private long _badGroupCount; - public long BadWeightCount { get { return _badWeightCount; } } - public long BadGroupCount { get { return _badGroupCount; } } + public long BadWeightCount { get; private set; } + public long BadGroupCount { get; private set; } public float Weight; public ulong Group; @@ -698,7 +696,7 @@ public override bool Accept() _getWeight(ref Weight); if (!_keepBadWeight && !(0 < Weight && Weight < float.PositiveInfinity)) { - _badWeightCount++; + BadWeightCount++; return false; } } @@ -707,7 +705,7 @@ public override bool Accept() _getGroup(ref Group); if (!_keepBadGroup && Group == 0) { - _badGroupCount++; + BadGroupCount++; return false; } } @@ -732,13 +730,13 @@ protected override StandardScalarCursor CreateCursorCore(RowCursor input, RoleMa /// This derives from and adds the feature column /// as a . /// - public class FeatureFloatVectorCursor : StandardScalarCursor + [BestFriend] + internal class FeatureFloatVectorCursor : StandardScalarCursor { private readonly ValueGetter> _get; private readonly bool _keepBad; - private long _badCount; - public long BadFeaturesRowCount { get { return _badCount; } } + public long BadFeaturesRowCount { get; private set; } public VBuffer Features; @@ -775,7 +773,7 @@ public override bool Accept() _get(ref Features); if (!_keepBad && !FloatUtils.IsFinite(Features.GetValues())) { - _badCount++; + BadFeaturesRowCount++; return false; } } @@ -799,14 +797,13 @@ protected override FeatureFloatVectorCursor CreateCursorCore(RowCursor input, Ro /// /// This derives from the FeatureFloatVectorCursor and adds the Label (float) column. /// - public class FloatLabelCursor : FeatureFloatVectorCursor + [BestFriend] + internal class FloatLabelCursor : FeatureFloatVectorCursor { private readonly ValueGetter _get; private readonly bool _keepBad; - private long _badCount; - - public long BadLabelCount { get { return _badCount; } } + public long BadLabelCount { get; private set; } public float Label; @@ -842,7 +839,7 @@ public override bool Accept() _get(ref Label); if (!_keepBad && !FloatUtils.IsFinite(Label)) { - _badCount++; + BadLabelCount++; return false; } } @@ -867,14 +864,14 @@ protected override FloatLabelCursor CreateCursorCore(RowCursor input, RoleMapped /// This derives from the FeatureFloatVectorCursor and adds the Label (int) column, /// enforcing multi-class semantics. /// - public class MultiClassLabelCursor : FeatureFloatVectorCursor + [BestFriend] + internal class MultiClassLabelCursor : FeatureFloatVectorCursor { private readonly int _classCount; private readonly ValueGetter _get; private readonly bool _keepBad; - private long _badCount; - public long BadLabelCount { get { return _badCount; } } + public long BadLabelCount { get; private set; } private float _raw; public int Label; @@ -915,7 +912,7 @@ public override bool Accept() Label = (int)_raw; if (!_keepBad && !(Label == _raw && (0 <= _raw && (_raw < _classCount || _classCount == 0)))) { - _badCount++; + BadLabelCount++; return false; } } diff --git a/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs b/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs index bc229021bb..e6ad9e1ff6 100644 --- a/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs @@ -473,7 +473,7 @@ public static CommonOutputs.TransformOutput Generate(IHostEnvironment env, Gener var xf = new GenerateNumberTransform(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } diff --git a/src/Microsoft.ML.Data/Transforms/InvertHashUtils.cs b/src/Microsoft.ML.Data/Transforms/InvertHashUtils.cs index 8b683a73bf..2a5606aa85 100644 --- a/src/Microsoft.ML.Data/Transforms/InvertHashUtils.cs +++ b/src/Microsoft.ML.Data/Transforms/InvertHashUtils.cs @@ -13,7 +13,8 @@ namespace Microsoft.ML.Runtime.Data { - public static class InvertHashUtils + [BestFriend] + internal static class InvertHashUtils { /// /// Clears a destination StringBuilder. If it is currently null, allocates it. @@ -101,7 +102,8 @@ public static void AppendToEnd(StringBuilder src, StringBuilder dst, ref char[] } } - public sealed class InvertHashCollector + [BestFriend] + internal sealed class InvertHashCollector { /// /// This is a small struct that is meant to compare akin to the value, diff --git a/src/Microsoft.ML.Data/Transforms/LabelIndicatorTransform.cs b/src/Microsoft.ML.Data/Transforms/LabelIndicatorTransform.cs index 2c5acafd1d..799ff7eb80 100644 --- a/src/Microsoft.ML.Data/Transforms/LabelIndicatorTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/LabelIndicatorTransform.cs @@ -234,7 +234,7 @@ public static CommonOutputs.TransformOutput LabelIndicator(IHostEnvironment env, EntryPointUtils.CheckInputArgs(host, input); var xf = Create(host, input, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } } } diff --git a/src/Microsoft.ML.Data/Transforms/NopTransform.cs b/src/Microsoft.ML.Data/Transforms/NopTransform.cs index c57a60d387..278af962a9 100644 --- a/src/Microsoft.ML.Data/Transforms/NopTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/NopTransform.cs @@ -154,7 +154,7 @@ public static CommonOutputs.TransformOutput Nop(IHostEnvironment env, NopInput i EntryPointUtils.CheckInputArgs(host, input); var xf = CreateIfNeeded(host, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } } } \ No newline at end of file diff --git a/src/Microsoft.ML.Data/Transforms/NormalizeUtils.cs b/src/Microsoft.ML.Data/Transforms/NormalizeUtils.cs index e84d4eb8a4..6003c5cb84 100644 --- a/src/Microsoft.ML.Data/Transforms/NormalizeUtils.cs +++ b/src/Microsoft.ML.Data/Transforms/NormalizeUtils.cs @@ -103,7 +103,7 @@ public static CommonOutputs.TransformOutput MinMax(IHostEnvironment env, Normali EntryPointUtils.CheckInputArgs(host, input); var xf = NormalizeTransform.Create(host, input, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } [TlcModule.EntryPoint(Name = "Transforms.MeanVarianceNormalizer", Desc = NormalizeTransform.MeanVarNormalizerSummary, UserName = NormalizeTransform.MeanVarNormalizerUserName, ShortName = NormalizeTransform.MeanVarNormalizerShortName)] @@ -115,7 +115,7 @@ public static CommonOutputs.TransformOutput MeanVar(IHostEnvironment env, Normal EntryPointUtils.CheckInputArgs(host, input); var xf = NormalizeTransform.Create(host, input, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } [TlcModule.EntryPoint(Name = "Transforms.LogMeanVarianceNormalizer", Desc = NormalizeTransform.LogMeanVarNormalizerSummary, UserName = NormalizeTransform.LogMeanVarNormalizerUserName, ShortName = NormalizeTransform.LogMeanVarNormalizerShortName)] @@ -127,7 +127,7 @@ public static CommonOutputs.TransformOutput LogMeanVar(IHostEnvironment env, Nor EntryPointUtils.CheckInputArgs(host, input); var xf = NormalizeTransform.Create(host, input, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } [TlcModule.EntryPoint(Name = "Transforms.BinNormalizer", Desc = NormalizeTransform.BinNormalizerSummary, UserName = NormalizeTransform.BinNormalizerUserName, ShortName = NormalizeTransform.BinNormalizerShortName)] @@ -139,7 +139,7 @@ public static CommonOutputs.TransformOutput Bin(IHostEnvironment env, NormalizeT EntryPointUtils.CheckInputArgs(host, input); var xf = NormalizeTransform.Create(host, input, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } [TlcModule.EntryPoint(Name = "Transforms.ConditionalNormalizer", Desc = "Normalize the columns only if needed", UserName = "Normalize If Needed")] diff --git a/src/Microsoft.ML.Data/Transforms/TypeConverting.cs b/src/Microsoft.ML.Data/Transforms/TypeConverting.cs index 493d313cc2..74eb46f367 100644 --- a/src/Microsoft.ML.Data/Transforms/TypeConverting.cs +++ b/src/Microsoft.ML.Data/Transforms/TypeConverting.cs @@ -50,7 +50,7 @@ public static CommonOutputs.TransformOutput Convert(IHostEnvironment env, TypeCo return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, view, input.Data), + Model = new TransformModelImpl(h, view, input.Data), OutputData = view }; } diff --git a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingEstimator.cs b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingEstimator.cs index ab74396671..6381b27654 100644 --- a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingEstimator.cs +++ b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingEstimator.cs @@ -118,7 +118,8 @@ public sealed class ToKeyFitResult // At the moment this is empty. Once PR #863 clears, we can change this class to hold the output // key-values metadata. - public ToKeyFitResult(ValueToKeyMappingTransformer.TermMap map) + [BestFriend] + internal ToKeyFitResult(ValueToKeyMappingTransformer.TermMap map) { } } diff --git a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs index 8edd76dff6..c3f61399fa 100644 --- a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs +++ b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs @@ -685,7 +685,8 @@ public override void Save(ModelSaveContext ctx) }); } - public TermMap GetTermMap(int iinfo) + [BestFriend] + internal TermMap GetTermMap(int iinfo) { Contracts.Assert(0 <= iinfo && iinfo < _unboundMaps.Length); return _unboundMaps[iinfo]; diff --git a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformerImpl.cs b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformerImpl.cs index 8c7961c969..03bb9fbf8f 100644 --- a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformerImpl.cs +++ b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformerImpl.cs @@ -474,7 +474,8 @@ private static BoundTermMap Bind(IHostEnvironment env, Schema schema, TermMap un /// These are the immutable and serializable analogs to the used in /// training. /// - public abstract class TermMap + [BestFriend] + internal abstract class TermMap { /// /// The item type of the input type, that is, either the input type or, @@ -757,7 +758,7 @@ internal override void WriteTextTerms(TextWriter writer) } } - public abstract class TermMap : TermMap + internal abstract class TermMap : TermMap { protected TermMap(PrimitiveType type, int count) : base(type, count) diff --git a/src/Microsoft.ML.Data/Utilities/ComponentCreation.cs b/src/Microsoft.ML.Data/Utilities/ComponentCreation.cs index ab9274cc7a..75aeac4a5b 100644 --- a/src/Microsoft.ML.Data/Utilities/ComponentCreation.cs +++ b/src/Microsoft.ML.Data/Utilities/ComponentCreation.cs @@ -45,7 +45,8 @@ public static IDataView Zip(this IHostEnvironment env, IEnumerable so /// The name of the weight column. Can be null. /// Additional column mapping to be passed to the trainer or scorer (specific to the prediction type). Can be null or empty. /// The constructed examples. - public static RoleMappedData CreateExamples(this IHostEnvironment env, IDataView data, string features, string label = null, + [BestFriend] + internal static RoleMappedData CreateExamples(this IHostEnvironment env, IDataView data, string features, string label = null, string group = null, string weight = null, IEnumerable> custom = null) { Contracts.CheckValue(env, nameof(env)); @@ -264,7 +265,7 @@ public static IDataTransform CreateTransform(this IHostEnvironment env, string s /// additional information, for example, label names. If this is null, no information will be /// extracted. /// The scored data. - public static IDataScorerTransform CreateScorer(this IHostEnvironment env, string settings, + internal static IDataScorerTransform CreateScorer(this IHostEnvironment env, string settings, RoleMappedData data, IPredictor predictor, RoleMappedSchema trainSchema = null) { Contracts.CheckValue(env, nameof(env)); @@ -295,7 +296,8 @@ public static IDataScorerTransform CreateScorer(this IHostEnvironment env, strin /// additional information, for example, label names. If this is null, no information will be /// extracted. /// The scored data. - public static IDataScorerTransform CreateDefaultScorer(this IHostEnvironment env, RoleMappedData data, + [BestFriend] + internal static IDataScorerTransform CreateDefaultScorer(this IHostEnvironment env, RoleMappedData data, IPredictor predictor, RoleMappedSchema trainSchema = null) { Contracts.CheckValue(env, nameof(env)); @@ -306,7 +308,8 @@ public static IDataScorerTransform CreateDefaultScorer(this IHostEnvironment env return ScoreUtils.GetScorer(predictor, data, env, trainSchema); } - public static IEvaluator CreateEvaluator(this IHostEnvironment env, string settings) + [BestFriend] + internal static IEvaluator CreateEvaluator(this IHostEnvironment env, string settings) { Contracts.CheckValue(env, nameof(env)); env.CheckNonWhiteSpace(settings, nameof(settings)); diff --git a/src/Microsoft.ML.Ensemble/Batch.cs b/src/Microsoft.ML.Ensemble/Batch.cs index e9c8fcf179..482456f83c 100644 --- a/src/Microsoft.ML.Ensemble/Batch.cs +++ b/src/Microsoft.ML.Ensemble/Batch.cs @@ -6,7 +6,7 @@ namespace Microsoft.ML.Runtime.Ensemble { - public sealed class Batch + internal sealed class Batch { public readonly RoleMappedData TrainInstances; public readonly RoleMappedData TestInstances; diff --git a/src/Microsoft.ML.Ensemble/EntryPoints/CreateEnsemble.cs b/src/Microsoft.ML.Ensemble/EntryPoints/CreateEnsemble.cs index 1d813b37f5..db8708d544 100644 --- a/src/Microsoft.ML.Ensemble/EntryPoints/CreateEnsemble.cs +++ b/src/Microsoft.ML.Ensemble/EntryPoints/CreateEnsemble.cs @@ -48,13 +48,13 @@ public enum ScoreCombiner public abstract class PipelineInputBase { [Argument(ArgumentType.Required, ShortName = "models", HelpText = "The models to combine into an ensemble", SortOrder = 1)] - public IPredictorModel[] Models; + public PredictorModel[] Models; } public abstract class InputBase { [Argument(ArgumentType.Required, ShortName = "models", HelpText = "The models to combine into an ensemble", SortOrder = 1)] - public IPredictorModel[] Models; + public PredictorModel[] Models; [Argument(ArgumentType.AtMostOnce, ShortName = "validate", HelpText = "Whether to validate that all the pipelines are identical", SortOrder = 5)] public bool ValidatePipelines = true; @@ -160,7 +160,7 @@ public static CommonOutputs.BinaryClassificationOutput CreateBinaryEnsemble(IHos var trainer = new EnsembleTrainer(host, args); var ensemble = trainer.CombineModels(input.Models.Select(pm => pm.Predictor as IPredictorProducing)); - var predictorModel = new PredictorModel(host, transformedData, startingData, ensemble); + var predictorModel = new PredictorModelImpl(host, transformedData, startingData, ensemble); var output = new CommonOutputs.BinaryClassificationOutput { PredictorModel = predictorModel }; return output; @@ -192,7 +192,7 @@ public static CommonOutputs.RegressionOutput CreateRegressionEnsemble(IHostEnvir var trainer = new RegressionEnsembleTrainer(host, args); var ensemble = trainer.CombineModels(input.Models.Select(pm => pm.Predictor as IPredictorProducing)); - var predictorModel = new PredictorModel(host, transformedData, startingData, ensemble); + var predictorModel = new PredictorModelImpl(host, transformedData, startingData, ensemble); var output = new CommonOutputs.RegressionOutput { PredictorModel = predictorModel }; return output; @@ -300,7 +300,7 @@ public static CommonOutputs.AnomalyDetectionOutput CreateAnomalyPipelineEnsemble return CreatePipelineEnsemble(host, input.Models, ensemble); } - private static TOut CreatePipelineEnsemble(IHostEnvironment env, IPredictorModel[] predictors, SchemaBindablePipelineEnsembleBase ensemble) + private static TOut CreatePipelineEnsemble(IHostEnvironment env, PredictorModel[] predictors, SchemaBindablePipelineEnsembleBase ensemble) where TOut : CommonOutputs.TrainerOutput, new() { var inputSchema = predictors[0].TransformModel.InputSchema; @@ -308,7 +308,7 @@ private static TOut CreatePipelineEnsemble(IHostEnvironment env, IPredicto // The role mappings are specific to the individual predictors. var rmd = new RoleMappedData(dv); - var predictorModel = new PredictorModel(env, rmd, dv, ensemble); + var predictorModel = new PredictorModelImpl(env, rmd, dv, ensemble); var output = new TOut { PredictorModel = predictorModel }; return output; @@ -323,7 +323,7 @@ private static TOut CreatePipelineEnsemble(IHostEnvironment env, IPredicto /// This method is used for comparing pipelines. Its outputs can be passed to /// to check if this pipeline is identical to another pipeline. /// - public static void SerializeRoleMappedData(IHostEnvironment env, IChannel ch, RoleMappedData data, + private static void SerializeRoleMappedData(IHostEnvironment env, IChannel ch, RoleMappedData data, out byte[][] dataSerialized, out string[] dataZipEntryNames) { Contracts.CheckValue(env, nameof(env)); @@ -357,7 +357,7 @@ public static void SerializeRoleMappedData(IHostEnvironment env, IChannel ch, Ro /// and . /// This method throws if for any of the entries the name/byte sequence are not identical. /// - public static void CheckSamePipeline(IHostEnvironment env, IChannel ch, + private static void CheckSamePipeline(IHostEnvironment env, IChannel ch, RoleMappedData dataToCompare, byte[][] dataSerialized, string[] dataZipEntryNames) { Contracts.CheckValue(env, nameof(env)); diff --git a/src/Microsoft.ML.Ensemble/OutputCombiners/IOutputCombiner.cs b/src/Microsoft.ML.Ensemble/OutputCombiners/IOutputCombiner.cs index 512974b717..6fcf55d5c8 100644 --- a/src/Microsoft.ML.Ensemble/OutputCombiners/IOutputCombiner.cs +++ b/src/Microsoft.ML.Ensemble/OutputCombiners/IOutputCombiner.cs @@ -28,7 +28,7 @@ public interface IOutputCombiner : IOutputCombiner Combiner GetCombiner(); } - public interface IStackingTrainer + internal interface IStackingTrainer { void Train(List>> models, RoleMappedData data, IHostEnvironment env); Single ValidationDatasetProportion { get; } diff --git a/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs b/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs index d393e6c316..59e3414b8b 100644 --- a/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs +++ b/src/Microsoft.ML.Ensemble/PipelineEnsemble.cs @@ -80,7 +80,7 @@ public BoundBase(SchemaBindablePipelineEnsembleBase parent, RoleMappedSchema sch // Get the pipeline. var dv = new EmptyDataView(Parent.Host, schema.Schema); - var tm = new TransformModel(Parent.Host, dv, dv); + var tm = new TransformModelImpl(Parent.Host, dv, dv); var pipeline = Parent.PredictorModels[i].TransformModel.Apply(Parent.Host, tm); BoundPipelines[i] = pipeline.AsRowToRowMapper(Parent.Host); if (BoundPipelines[i] == null) @@ -190,7 +190,7 @@ public ValueGetter GetWeightGetter(Row input, int i, out Action disposer protected readonly IOutputCombiner Combiner; - protected SchemaBindablePipelineEnsemble(IHostEnvironment env, IPredictorModel[] predictors, + protected SchemaBindablePipelineEnsemble(IHostEnvironment env, PredictorModel[] predictors, IOutputCombiner combiner, string registrationName, string scoreColumnKind) : base(env, predictors, registrationName, scoreColumnKind) { @@ -241,7 +241,7 @@ public override PredictionKind PredictionKind } } - public ImplOne(IHostEnvironment env, IPredictorModel[] predictors, IRegressionOutputCombiner combiner, string scoreColumnKind) + public ImplOne(IHostEnvironment env, PredictorModel[] predictors, IRegressionOutputCombiner combiner, string scoreColumnKind) : base(env, predictors, combiner, LoaderSignature, scoreColumnKind) { } @@ -269,7 +269,7 @@ public override PredictionKind PredictionKind private readonly VectorType _scoreType; - public ImplVec(IHostEnvironment env, IPredictorModel[] predictors, IMultiClassOutputCombiner combiner) + public ImplVec(IHostEnvironment env, PredictorModel[] predictors, IMultiClassOutputCombiner combiner) : base(env, predictors, combiner, LoaderSignature, MetadataUtils.Const.ScoreColumnKind.MultiClassClassification) { int classCount = CheckLabelColumn(Host, predictors, false); @@ -291,7 +291,7 @@ private sealed class ImplOneWithCalibrator : SchemaBindablePipelineEnsemble 0); - PredictorModels = new IPredictorModel[length]; + PredictorModels = new PredictorModel[length]; for (int i = 0; i < PredictorModels.Length; i++) { string dir = @@ -467,7 +467,7 @@ protected SchemaBindablePipelineEnsembleBase(IHostEnvironment env, ModelLoadCont ? "PredictorModels" : Path.Combine(ctx.Directory, "PredictorModels"); using (var ent = ctx.Repository.OpenEntry(dir, $"PredictorModel_{i:000}")) - PredictorModels[i] = new PredictorModel(Host, ent.Stream); + PredictorModels[i] = new PredictorModelImpl(Host, ent.Stream); } length = ctx.Reader.ReadInt32(); @@ -512,7 +512,7 @@ public void Save(ModelSaveContext ctx) protected abstract void SaveCore(ModelSaveContext ctx); - public static SchemaBindablePipelineEnsembleBase Create(IHostEnvironment env, IPredictorModel[] predictors, IOutputCombiner combiner, string scoreColumnKind) + public static SchemaBindablePipelineEnsembleBase Create(IHostEnvironment env, PredictorModel[] predictors, IOutputCombiner combiner, string scoreColumnKind) { switch (scoreColumnKind) { @@ -580,7 +580,7 @@ void ICanSaveSummary.SaveSummary(TextWriter writer, RoleMappedSchema schema) } // Checks that the predictors have matching label columns, and returns the number of classes in all predictors. - protected static int CheckLabelColumn(IHostEnvironment env, IPredictorModel[] models, bool isBinary) + protected static int CheckLabelColumn(IHostEnvironment env, PredictorModel[] models, bool isBinary) { Contracts.CheckValue(env, nameof(env)); env.CheckNonEmpty(models, nameof(models)); @@ -609,7 +609,7 @@ protected static int CheckLabelColumn(IHostEnvironment env, IPredictorModel[] mo // When the label column is not a key, we check that the number of classes is the same for all the predictors, by checking the // OutputType property of the IValueMapper. // If any of the predictors do not implement IValueMapper we throw an exception. Returns the class count. - private static int CheckNonKeyLabelColumnCore(IHostEnvironment env, IPredictor pred, IPredictorModel[] models, bool isBinary, ColumnType labelType) + private static int CheckNonKeyLabelColumnCore(IHostEnvironment env, IPredictor pred, PredictorModel[] models, bool isBinary, ColumnType labelType) { env.Assert(!labelType.IsKey); env.AssertNonEmpty(models); @@ -636,7 +636,7 @@ private static int CheckNonKeyLabelColumnCore(IHostEnvironment env, IPredictor p // Checks that all the label columns of the model have the same key type as their label column - including the same // cardinality and the same key values, and returns the cardinality of the label column key. - private static int CheckKeyLabelColumnCore(IHostEnvironment env, IPredictorModel[] models, KeyType labelType, Schema schema, int labelIndex, ColumnType keyValuesType) + private static int CheckKeyLabelColumnCore(IHostEnvironment env, PredictorModel[] models, KeyType labelType, Schema schema, int labelIndex, ColumnType keyValuesType) where T : IEquatable { env.Assert(keyValuesType.ItemType.RawType == typeof(T)); diff --git a/src/Microsoft.ML.Ensemble/Selector/FeatureSelector/AllFeatureSelector.cs b/src/Microsoft.ML.Ensemble/Selector/FeatureSelector/AllFeatureSelector.cs index ad854e975f..3ce4cb1955 100644 --- a/src/Microsoft.ML.Ensemble/Selector/FeatureSelector/AllFeatureSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/FeatureSelector/AllFeatureSelector.cs @@ -13,7 +13,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector.FeatureSelector { - public sealed class AllFeatureSelector : IFeatureSelector + internal sealed class AllFeatureSelector : IFeatureSelector { public const string UserName = "All Feature Selector"; public const string LoadName = "AllFeatureSelector"; diff --git a/src/Microsoft.ML.Ensemble/Selector/FeatureSelector/RandomFeatureSelector.cs b/src/Microsoft.ML.Ensemble/Selector/FeatureSelector/RandomFeatureSelector.cs index b3650dd139..c22c6e8c48 100644 --- a/src/Microsoft.ML.Ensemble/Selector/FeatureSelector/RandomFeatureSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/FeatureSelector/RandomFeatureSelector.cs @@ -17,7 +17,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector.FeatureSelector { - public class RandomFeatureSelector : IFeatureSelector + internal class RandomFeatureSelector : IFeatureSelector { public const string UserName = "Random Feature Selector"; public const string LoadName = "RandomFeatureSelector"; diff --git a/src/Microsoft.ML.Ensemble/Selector/IFeatureSelector.cs b/src/Microsoft.ML.Ensemble/Selector/IFeatureSelector.cs index c53f7d244e..90aa692391 100644 --- a/src/Microsoft.ML.Ensemble/Selector/IFeatureSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/IFeatureSelector.cs @@ -8,7 +8,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector { - public interface IFeatureSelector + internal interface IFeatureSelector { Subset SelectFeatures(RoleMappedData data, Random rand); } @@ -16,7 +16,7 @@ public interface IFeatureSelector public delegate void SignatureEnsembleFeatureSelector(); [TlcModule.ComponentKind("EnsembleFeatureSelector")] - public interface ISupportFeatureSelectorFactory : IComponentFactory + internal interface ISupportFeatureSelectorFactory : IComponentFactory { } } diff --git a/src/Microsoft.ML.Ensemble/Selector/ISubModelSelector.cs b/src/Microsoft.ML.Ensemble/Selector/ISubModelSelector.cs index 96e9f5b886..33e5da6d7f 100644 --- a/src/Microsoft.ML.Ensemble/Selector/ISubModelSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/ISubModelSelector.cs @@ -9,7 +9,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector { - public interface ISubModelSelector + internal interface ISubModelSelector { IList>> Prune(IList>> models); @@ -19,33 +19,33 @@ void CalculateMetrics(FeatureSubsetModel> model, IS Single ValidationDatasetProportion { get; } } - public interface IRegressionSubModelSelector : ISubModelSelector + internal interface IRegressionSubModelSelector : ISubModelSelector { } - public interface IBinarySubModelSelector : ISubModelSelector + internal interface IBinarySubModelSelector : ISubModelSelector { } - public interface IMulticlassSubModelSelector : ISubModelSelector> + internal interface IMulticlassSubModelSelector : ISubModelSelector> { } - public delegate void SignatureEnsembleSubModelSelector(); + internal delegate void SignatureEnsembleSubModelSelector(); [TlcModule.ComponentKind("EnsembleMulticlassSubModelSelector")] - public interface ISupportMulticlassSubModelSelectorFactory : IComponentFactory + internal interface ISupportMulticlassSubModelSelectorFactory : IComponentFactory { } [TlcModule.ComponentKind("EnsembleBinarySubModelSelector")] - public interface ISupportBinarySubModelSelectorFactory: IComponentFactory + internal interface ISupportBinarySubModelSelectorFactory: IComponentFactory { } [TlcModule.ComponentKind("EnsembleRegressionSubModelSelector")] - public interface ISupportRegressionSubModelSelectorFactory : IComponentFactory + internal interface ISupportRegressionSubModelSelectorFactory : IComponentFactory { } diff --git a/src/Microsoft.ML.Ensemble/Selector/ISubsetSelector.cs b/src/Microsoft.ML.Ensemble/Selector/ISubsetSelector.cs index 704bcd919b..983f094c34 100644 --- a/src/Microsoft.ML.Ensemble/Selector/ISubsetSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/ISubsetSelector.cs @@ -9,7 +9,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector { - public interface ISubsetSelector + internal interface ISubsetSelector { void Initialize(RoleMappedData data, int size, int batchSize, Single validationDatasetProportion); IEnumerable GetBatches(Random rand); @@ -20,7 +20,7 @@ public interface ISubsetSelector public delegate void SignatureEnsembleDataSelector(); [TlcModule.ComponentKind("EnsembleSubsetSelector")] - public interface ISupportSubsetSelectorFactory : IComponentFactory + internal interface ISupportSubsetSelectorFactory : IComponentFactory { } } diff --git a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/AllSelector.cs b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/AllSelector.cs index 4196ab3558..9c6f9e6691 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/AllSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/AllSelector.cs @@ -11,7 +11,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector.SubModelSelector { - public class AllSelector : BaseSubModelSelector, IBinarySubModelSelector, IRegressionSubModelSelector + internal sealed class AllSelector : BaseSubModelSelector, IBinarySubModelSelector, IRegressionSubModelSelector { public const string UserName = "All Selector"; public const string LoadName = "AllSelector"; diff --git a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/AllSelectorMultiClass.cs b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/AllSelectorMultiClass.cs index 6c82fc25f5..2c674b8842 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/AllSelectorMultiClass.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/AllSelectorMultiClass.cs @@ -13,7 +13,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector.SubModelSelector { - public class AllSelectorMultiClass : BaseSubModelSelector>, IMulticlassSubModelSelector + internal sealed class AllSelectorMultiClass : BaseSubModelSelector>, IMulticlassSubModelSelector { public const string UserName = "All Selector"; public const string LoadName = "AllSelectorMultiClass"; diff --git a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BaseBestPerformanceSelector.cs b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BaseBestPerformanceSelector.cs index 1e51044fb9..f4062787e2 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BaseBestPerformanceSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BaseBestPerformanceSelector.cs @@ -10,7 +10,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector.SubModelSelector { - public abstract class BaseBestPerformanceSelector : SubModelDataSelector + internal abstract class BaseBestPerformanceSelector : SubModelDataSelector { protected abstract string MetricName { get; } diff --git a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BaseDiverseSelector.cs b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BaseDiverseSelector.cs index ac2e85dda6..0b72b16637 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BaseDiverseSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BaseDiverseSelector.cs @@ -13,7 +13,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector.SubModelSelector { - public abstract class BaseDiverseSelector : SubModelDataSelector + internal abstract class BaseDiverseSelector : SubModelDataSelector where TDiversityMetric : class, IDiversityMeasure { public abstract class DiverseSelectorArguments : ArgumentsBase diff --git a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BaseSubModelSelector.cs b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BaseSubModelSelector.cs index 87ab03bb7c..9eb51c090a 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BaseSubModelSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BaseSubModelSelector.cs @@ -11,7 +11,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector.SubModelSelector { - public abstract class BaseSubModelSelector : ISubModelSelector + internal abstract class BaseSubModelSelector : ISubModelSelector { protected readonly IHost Host; diff --git a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestDiverseSelectorBinary.cs b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestDiverseSelectorBinary.cs index 75d25df9d6..9ef2861d33 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestDiverseSelectorBinary.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestDiverseSelectorBinary.cs @@ -21,7 +21,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector.SubModelSelector { using TScalarPredictor = IPredictorProducing; - public sealed class BestDiverseSelectorBinary : BaseDiverseSelector, IBinarySubModelSelector + internal sealed class BestDiverseSelectorBinary : BaseDiverseSelector, IBinarySubModelSelector { public const string UserName = "Best Diverse Selector"; public const string LoadName = "BestDiverseSelector"; diff --git a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestDiverseSelectorMultiClass.cs b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestDiverseSelectorMultiClass.cs index 8856982f00..a5a0864338 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestDiverseSelectorMultiClass.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestDiverseSelectorMultiClass.cs @@ -22,7 +22,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector.SubModelSelector { using TVectorPredictor = IPredictorProducing>; - public sealed class BestDiverseSelectorMultiClass : BaseDiverseSelector, IDiversityMeasure>>, IMulticlassSubModelSelector + internal sealed class BestDiverseSelectorMultiClass : BaseDiverseSelector, IDiversityMeasure>>, IMulticlassSubModelSelector { public const string UserName = "Best Diverse Selector"; public const string LoadName = "BestDiverseSelectorMultiClass"; diff --git a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestDiverseSelectorRegression.cs b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestDiverseSelectorRegression.cs index cbd2d47330..f4e717d73c 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestDiverseSelectorRegression.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestDiverseSelectorRegression.cs @@ -21,7 +21,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector.SubModelSelector { using TScalarPredictor = IPredictorProducing; - public sealed class BestDiverseSelectorRegression : BaseDiverseSelector, IRegressionSubModelSelector + internal sealed class BestDiverseSelectorRegression : BaseDiverseSelector, IRegressionSubModelSelector { public const string UserName = "Best Diverse Selector"; public const string LoadName = "BestDiverseSelectorRegression"; diff --git a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestPerformanceRegressionSelector.cs b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestPerformanceRegressionSelector.cs index 46f13e9cd1..0ad0e1024c 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestPerformanceRegressionSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestPerformanceRegressionSelector.cs @@ -16,7 +16,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector.SubModelSelector { - public sealed class BestPerformanceRegressionSelector : BaseBestPerformanceSelector, IRegressionSubModelSelector + internal sealed class BestPerformanceRegressionSelector : BaseBestPerformanceSelector, IRegressionSubModelSelector { [TlcModule.Component(Name = LoadName, FriendlyName = UserName)] public sealed class Arguments : ArgumentsBase, ISupportRegressionSubModelSelectorFactory diff --git a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestPerformanceSelector.cs b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestPerformanceSelector.cs index 76742ad0ec..84f00e0958 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestPerformanceSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestPerformanceSelector.cs @@ -16,7 +16,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector.SubModelSelector { - public sealed class BestPerformanceSelector : BaseBestPerformanceSelector, IBinarySubModelSelector + internal sealed class BestPerformanceSelector : BaseBestPerformanceSelector, IBinarySubModelSelector { [TlcModule.Component(Name = LoadName, FriendlyName = UserName)] public sealed class Arguments : ArgumentsBase, ISupportBinarySubModelSelectorFactory diff --git a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestPerformanceSelectorMultiClass.cs b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestPerformanceSelectorMultiClass.cs index 0a9b9ac497..6e3414d945 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestPerformanceSelectorMultiClass.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BestPerformanceSelectorMultiClass.cs @@ -16,7 +16,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector.SubModelSelector { - public class BestPerformanceSelectorMultiClass : BaseBestPerformanceSelector>, IMulticlassSubModelSelector + internal sealed class BestPerformanceSelectorMultiClass : BaseBestPerformanceSelector>, IMulticlassSubModelSelector { [TlcModule.Component(Name = LoadName, FriendlyName = UserName)] public sealed class Arguments : ArgumentsBase, ISupportMulticlassSubModelSelectorFactory diff --git a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/SubModelDataSelector.cs b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/SubModelDataSelector.cs index 5953b30d97..0bd623174f 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/SubModelDataSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/SubModelDataSelector.cs @@ -8,7 +8,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector.SubModelSelector { - public abstract class SubModelDataSelector : BaseSubModelSelector + internal abstract class SubModelDataSelector : BaseSubModelSelector { public abstract class ArgumentsBase { diff --git a/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/AllInstanceSelector.cs b/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/AllInstanceSelector.cs index 483d6a5b0f..c56b6dc9e8 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/AllInstanceSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/AllInstanceSelector.cs @@ -16,7 +16,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector.SubsetSelector { - public sealed class AllInstanceSelector : BaseSubsetSelector + internal sealed class AllInstanceSelector : BaseSubsetSelector { public const string UserName = "All Instance Selector"; public const string LoadName = "AllInstanceSelector"; diff --git a/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/BaseSubsetSelector.cs b/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/BaseSubsetSelector.cs index 6e4d2a6b27..f3d26e646c 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/BaseSubsetSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/BaseSubsetSelector.cs @@ -11,7 +11,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector.SubsetSelector { - public abstract class BaseSubsetSelector : ISubsetSelector + internal abstract class BaseSubsetSelector : ISubsetSelector where TArgs : BaseSubsetSelector.ArgumentsBase { public abstract class ArgumentsBase diff --git a/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/BootstrapSelector.cs b/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/BootstrapSelector.cs index 93cc361a9e..c6020bbf8c 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/BootstrapSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/BootstrapSelector.cs @@ -18,7 +18,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector.SubsetSelector { - public sealed class BootstrapSelector : BaseSubsetSelector + internal sealed class BootstrapSelector : BaseSubsetSelector { public const string UserName = "Bootstrap Selector"; public const string LoadName = "BootstrapSelector"; diff --git a/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/RandomPartitionSelector.cs b/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/RandomPartitionSelector.cs index 7ddf7bb9d7..81a8ec927f 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/RandomPartitionSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubsetSelector/RandomPartitionSelector.cs @@ -18,7 +18,7 @@ namespace Microsoft.ML.Runtime.Ensemble.Selector.SubsetSelector { - public sealed class RandomPartitionSelector : BaseSubsetSelector + internal sealed class RandomPartitionSelector : BaseSubsetSelector { public const string UserName = "Random Partition Selector"; public const string LoadName = "RandomPartitionSelector"; diff --git a/src/Microsoft.ML.Ensemble/Subset.cs b/src/Microsoft.ML.Ensemble/Subset.cs index 77183de6f3..b1baf8725c 100644 --- a/src/Microsoft.ML.Ensemble/Subset.cs +++ b/src/Microsoft.ML.Ensemble/Subset.cs @@ -7,7 +7,7 @@ namespace Microsoft.ML.Runtime.Ensemble { - public sealed class Subset + internal sealed class Subset { public readonly RoleMappedData Data; public readonly BitArray SelectedFeatures; diff --git a/src/Microsoft.ML.Ensemble/Trainer/EnsemblePredictor.cs b/src/Microsoft.ML.Ensemble/Trainer/EnsemblePredictor.cs index 30b2dce057..725294065c 100644 --- a/src/Microsoft.ML.Ensemble/Trainer/EnsemblePredictor.cs +++ b/src/Microsoft.ML.Ensemble/Trainer/EnsemblePredictor.cs @@ -109,7 +109,7 @@ private protected override void SaveCore(ModelSaveContext ctx) ctx.Writer.Write((int)PredictionKind); } - public ValueMapper GetMapper() + ValueMapper IValueMapper.GetMapper() { Host.Check(typeof(TIn) == typeof(VBuffer)); Host.Check(typeof(TOut) == typeof(Single)); diff --git a/src/Microsoft.ML.Ensemble/Trainer/EnsembleTrainerBase.cs b/src/Microsoft.ML.Ensemble/Trainer/EnsembleTrainerBase.cs index a8fc896c5b..7fffa89108 100644 --- a/src/Microsoft.ML.Ensemble/Trainer/EnsembleTrainerBase.cs +++ b/src/Microsoft.ML.Ensemble/Trainer/EnsembleTrainerBase.cs @@ -20,13 +20,14 @@ namespace Microsoft.ML.Runtime.Ensemble { using Stopwatch = System.Diagnostics.Stopwatch; - public abstract class EnsembleTrainerBase : TrainerBase + internal abstract class EnsembleTrainerBase : TrainerBase where TPredictor : class, IPredictorProducing where TSelector : class, ISubModelSelector where TCombiner : class, IOutputCombiner { public abstract class ArgumentsBase : LearnerInputBaseWithLabel { +#pragma warning disable CS0649 // These are set via reflection. [Argument(ArgumentType.AtMostOnce, HelpText = "Number of models per batch. If not specified, will default to 50 if there is only one base predictor, " + "or the number of base predictors otherwise.", ShortName = "nm", SortOrder = 3)] @@ -54,6 +55,7 @@ public abstract class ArgumentsBase : LearnerInputBaseWithLabel public bool ShowMetrics; internal abstract IComponentFactory>>[] GetPredictorFactories(); +#pragma warning restore CS0649 } private const int DefaultNumModels = 50; diff --git a/src/Microsoft.ML.Ensemble/Trainer/Multiclass/EnsembleMultiClassPredictor.cs b/src/Microsoft.ML.Ensemble/Trainer/Multiclass/EnsembleMultiClassPredictor.cs index a332d3ddb0..7181780c8f 100644 --- a/src/Microsoft.ML.Ensemble/Trainer/Multiclass/EnsembleMultiClassPredictor.cs +++ b/src/Microsoft.ML.Ensemble/Trainer/Multiclass/EnsembleMultiClassPredictor.cs @@ -40,8 +40,8 @@ private static VersionInfo GetVersionInfo() private readonly ColumnType _outputType; private readonly IValueMapper[] _mappers; - public ColumnType InputType { get { return _inputType; } } - public ColumnType OutputType { get { return _outputType; } } + public ColumnType InputType => _inputType; + public ColumnType OutputType => _outputType; internal EnsembleMultiClassPredictor(IHostEnvironment env, FeatureSubsetModel[] models, IMultiClassOutputCombiner combiner, Single[] weights = null) @@ -101,9 +101,9 @@ private protected override void SaveCore(ModelSaveContext ctx) ctx.SetVersionInfo(GetVersionInfo()); } - public override PredictionKind PredictionKind { get { return PredictionKind.MultiClassClassification; } } + public override PredictionKind PredictionKind => PredictionKind.MultiClassClassification; - public ValueMapper GetMapper() + ValueMapper IValueMapper.GetMapper() { Host.Check(typeof(TIn) == typeof(VBuffer)); Host.Check(typeof(TOut) == typeof(VBuffer)); diff --git a/src/Microsoft.ML.FastTree/FastTree.cs b/src/Microsoft.ML.FastTree/FastTree.cs index c790783cc9..7765535aed 100644 --- a/src/Microsoft.ML.FastTree/FastTree.cs +++ b/src/Microsoft.ML.FastTree/FastTree.cs @@ -58,12 +58,12 @@ public abstract class FastTreeTrainerBase : protected readonly bool AllowGC; protected TreeEnsemble TrainedEnsemble; protected int FeatureCount; - protected RoleMappedData ValidData; + private protected RoleMappedData ValidData; /// /// If not null, it's a test data set passed in from training context. It will be converted to one element in /// by calling in . /// - protected RoleMappedData TestData; + private protected RoleMappedData TestData; protected IParallelTraining ParallelTraining; protected OptimizationAlgorithm OptimizationAlgorithm; protected Dataset TrainSet; @@ -209,7 +209,7 @@ private void Initialize(IHostEnvironment env) InitializeThreads(numThreads); } - protected void ConvertData(RoleMappedData trainData) + private protected void ConvertData(RoleMappedData trainData) { MetadataUtils.TryGetCategoricalFeatureIndices(trainData.Schema.Schema, trainData.Schema.Feature.Index, out CategoricalFeatures); var useTranspose = UseTranspose(Args.DiskTranspose, trainData) && (ValidData == null || UseTranspose(Args.DiskTranspose, ValidData)); diff --git a/src/Microsoft.ML.FastTree/GamTrainer.cs b/src/Microsoft.ML.FastTree/GamTrainer.cs index 74eb2a4f22..019613d787 100644 --- a/src/Microsoft.ML.FastTree/GamTrainer.cs +++ b/src/Microsoft.ML.FastTree/GamTrainer.cs @@ -818,7 +818,7 @@ public override void Save(ModelSaveContext ctx) } } - public ValueMapper GetMapper() + ValueMapper IValueMapper.GetMapper() { Host.Check(typeof(TIn) == typeof(VBuffer)); Host.Check(typeof(TOut) == typeof(float)); @@ -994,7 +994,7 @@ void ICanSaveSummary.SaveSummary(TextWriter writer, RoleMappedSchema schema) ((ICanSaveInTextFormat)this).SaveAsText(writer, schema); } - public ValueMapper> GetFeatureContributionMapper + ValueMapper> IFeatureContributionMapper.GetFeatureContributionMapper (int top, int bottom, bool normalize) { Contracts.Check(typeof(TSrc) == typeof(VBuffer)); diff --git a/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs b/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs index ec3827286c..677a18c97d 100644 --- a/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs +++ b/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs @@ -575,7 +575,7 @@ public sealed class ArgumentsForEntryPoint : TransformInputBase public int LabelPermutationSeed; [Argument(ArgumentType.Required, HelpText = "Trainer to use", SortOrder = 10, Visibility = ArgumentAttribute.VisibilityType.EntryPointsOnly)] - public IPredictorModel PredictorModel; + public PredictorModel PredictorModel; } #pragma warning restore CS0649 @@ -809,7 +809,7 @@ public static CommonOutputs.TransformOutput Featurizer(IHostEnvironment env, Tre EntryPointUtils.CheckInputArgs(host, input); var xf = TreeEnsembleFeaturizerTransform.CreateForEntryPoint(env, input, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } #pragma warning restore CS0649 } diff --git a/src/Microsoft.ML.ImageAnalytics/EntryPoints/ImageAnalytics.cs b/src/Microsoft.ML.ImageAnalytics/EntryPoints/ImageAnalytics.cs index 921309d7ef..dc63fa0f17 100644 --- a/src/Microsoft.ML.ImageAnalytics/EntryPoints/ImageAnalytics.cs +++ b/src/Microsoft.ML.ImageAnalytics/EntryPoints/ImageAnalytics.cs @@ -19,7 +19,7 @@ public static CommonOutputs.TransformOutput ImageLoader(IHostEnvironment env, Im var xf = ImageLoaderTransform.Create(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } @@ -32,7 +32,7 @@ public static CommonOutputs.TransformOutput ImageResizer(IHostEnvironment env, I var xf = ImageResizerTransform.Create(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } @@ -45,7 +45,7 @@ public static CommonOutputs.TransformOutput ImagePixelExtractor(IHostEnvironment var xf = ImagePixelExtractorTransform.Create(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } @@ -58,7 +58,7 @@ public static CommonOutputs.TransformOutput ImageGrayscale(IHostEnvironment env, var xf = ImageGrayscaleTransform.Create(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } @@ -71,7 +71,7 @@ public static CommonOutputs.TransformOutput VectorToImage(IHostEnvironment env, var xf = new VectorToImageTransform(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } diff --git a/src/Microsoft.ML.Legacy/CSharpApi.cs b/src/Microsoft.ML.Legacy/CSharpApi.cs index f91479b881..432fc01779 100644 --- a/src/Microsoft.ML.Legacy/CSharpApi.cs +++ b/src/Microsoft.ML.Legacy/CSharpApi.cs @@ -2070,7 +2070,7 @@ namespace Legacy.Data { /// - /// Create an array variable of IPredictorModel + /// Create an array variable of PredictorModel /// [Obsolete] public sealed partial class PredictorModelArrayConverter @@ -2081,7 +2081,7 @@ public sealed partial class PredictorModelArrayConverter /// The models /// [Obsolete] - public ArrayVar Model { get; set; } = new ArrayVar(); + public ArrayVar Model { get; set; } = new ArrayVar(); [Obsolete] @@ -2090,7 +2090,7 @@ public sealed class Output /// /// The model array /// - public ArrayVar OutputModel { get; set; } = new ArrayVar(); + public ArrayVar OutputModel { get; set; } = new ArrayVar(); } } @@ -2329,7 +2329,7 @@ public TextLoaderPipelineStep (Output output) } public Var Data { get; } - public Var Model { get; } + public Var Model { get; } } /// @@ -2361,7 +2361,7 @@ namespace Legacy.Data { /// - /// Create an array variable of ITransformModel + /// Create an array variable of TransformModel /// [Obsolete] public sealed partial class TransformModelArrayConverter @@ -2372,7 +2372,7 @@ public sealed partial class TransformModelArrayConverter /// The models /// [Obsolete] - public ArrayVar TransformModel { get; set; } = new ArrayVar(); + public ArrayVar TransformModel { get; set; } = new ArrayVar(); [Obsolete] @@ -2381,7 +2381,7 @@ public sealed class Output /// /// The model array /// - public ArrayVar OutputModel { get; set; } = new ArrayVar(); + public ArrayVar OutputModel { get; set; } = new ArrayVar(); } } @@ -2515,7 +2515,7 @@ public sealed partial class AnomalyPipelineEnsemble /// The models to combine into an ensemble /// [Obsolete] - public ArrayVar Models { get; set; } = new ArrayVar(); + public ArrayVar Models { get; set; } = new ArrayVar(); [Obsolete] @@ -2524,7 +2524,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IAno /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } } @@ -2662,7 +2662,7 @@ public sealed partial class CrossValidationBinaryMacroSubGraphOutput /// The model /// [Obsolete] - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } @@ -2717,7 +2717,7 @@ public sealed class Output /// /// The trained model /// - public ArrayVar PredictorModel { get; set; } = new ArrayVar(); + public ArrayVar PredictorModel { get; set; } = new ArrayVar(); /// /// Warning dataset @@ -2772,7 +2772,7 @@ public sealed partial class BinaryEnsemble /// The models to combine into an ensemble /// [Obsolete] - public ArrayVar Models { get; set; } = new ArrayVar(); + public ArrayVar Models { get; set; } = new ArrayVar(); /// /// Whether to validate that all the pipelines are identical @@ -2787,7 +2787,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } } @@ -2814,7 +2814,7 @@ public sealed partial class BinaryPipelineEnsemble /// The models to combine into an ensemble /// [Obsolete] - public ArrayVar Models { get; set; } = new ArrayVar(); + public ArrayVar Models { get; set; } = new ArrayVar(); [Obsolete] @@ -2823,7 +2823,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } } @@ -3142,13 +3142,13 @@ public sealed partial class CrossValidationMacroSubGraphOutput /// The predictor model /// [Obsolete] - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); /// /// The transform model /// [Obsolete] - public Var TransformModel { get; set; } = new Var(); + public Var TransformModel { get; set; } = new Var(); } @@ -3170,7 +3170,7 @@ public sealed partial class CrossValidator /// The transform model from the pipeline before this command. It gets included in the Output.PredictorModel. /// [Obsolete] - public Var TransformModel { get; set; } = new Var(); + public Var TransformModel { get; set; } = new Var(); /// /// The training subgraph @@ -3239,12 +3239,12 @@ public sealed class Output /// /// The final model including the trained predictor model and the model from the transforms, provided as the Input.TransformModel. /// - public ArrayVar PredictorModel { get; set; } = new ArrayVar(); + public ArrayVar PredictorModel { get; set; } = new ArrayVar(); /// /// The final model including the trained predictor model and the model from the transforms, provided as the Input.TransformModel. /// - public ArrayVar TransformModel { get; set; } = new ArrayVar(); + public ArrayVar TransformModel { get; set; } = new ArrayVar(); /// /// Warning dataset @@ -3332,7 +3332,7 @@ public sealed partial class DatasetTransformer : Microsoft.ML.Runtime.EntryPoint /// Transform model /// [Obsolete] - public Var TransformModel { get; set; } = new Var(); + public Var TransformModel { get; set; } = new Var(); /// /// Input dataset @@ -3381,7 +3381,7 @@ public DatasetTransformerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -3401,7 +3401,7 @@ public sealed partial class EnsembleSummary /// The predictor to summarize /// [Obsolete] - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); [Obsolete] @@ -3448,7 +3448,7 @@ public sealed partial class FixedPlattCalibrator : Microsoft.ML.Runtime.EntryPoi /// The predictor to calibrate /// [Obsolete] - public Var UncalibratedPredictorModel { get; set; } = new Var(); + public Var UncalibratedPredictorModel { get; set; } = new Var(); /// /// The maximum number of examples to train the calibrator on @@ -3470,7 +3470,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ICal /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -3502,7 +3502,7 @@ public FixedPlattCalibratorPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -3528,7 +3528,7 @@ public sealed partial class MultiClassPipelineEnsemble /// The models to combine into an ensemble /// [Obsolete] - public ArrayVar Models { get; set; } = new ArrayVar(); + public ArrayVar Models { get; set; } = new ArrayVar(); [Obsolete] @@ -3537,7 +3537,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IMul /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } } @@ -3641,7 +3641,7 @@ public sealed partial class NaiveCalibrator : Microsoft.ML.Runtime.EntryPoints.C /// The predictor to calibrate /// [Obsolete] - public Var UncalibratedPredictorModel { get; set; } = new Var(); + public Var UncalibratedPredictorModel { get; set; } = new Var(); /// /// The maximum number of examples to train the calibrator on @@ -3663,7 +3663,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ICal /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -3695,7 +3695,7 @@ public NaiveCalibratorPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -3728,7 +3728,7 @@ public sealed partial class OneVersusAllMacroSubGraphOutput /// The predictor model for the subgraph exemplar. /// [Obsolete] - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } @@ -3799,7 +3799,7 @@ public sealed class Output /// /// The trained multiclass model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -3831,7 +3831,7 @@ public OneVersusAllPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -3894,7 +3894,7 @@ public sealed partial class OnnxConverter /// Model that needs to be converted to ONNX format. /// [Obsolete] - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); /// /// The targeted ONNX version. It can be either "Stable" or "Experimental". If "Experimental" is used, produced model can contain components that is not officially supported in ONNX standard. @@ -3931,7 +3931,7 @@ public sealed partial class OvaModelCombiner : Microsoft.ML.Runtime.EntryPoints. /// Input models /// [Obsolete] - public ArrayVar ModelArray { get; set; } = new ArrayVar(); + public ArrayVar ModelArray { get; set; } = new ArrayVar(); /// /// Use probabilities from learners instead of raw values. @@ -3982,7 +3982,7 @@ public sealed class Output /// /// Predictor model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -4014,7 +4014,7 @@ public OvaModelCombinerPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -4034,7 +4034,7 @@ public sealed partial class PAVCalibrator : Microsoft.ML.Runtime.EntryPoints.Com /// The predictor to calibrate /// [Obsolete] - public Var UncalibratedPredictorModel { get; set; } = new Var(); + public Var UncalibratedPredictorModel { get; set; } = new Var(); /// /// The maximum number of examples to train the calibrator on @@ -4056,7 +4056,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ICal /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -4088,7 +4088,7 @@ public PAVCalibratorPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -4108,7 +4108,7 @@ public sealed partial class PlattCalibrator : Microsoft.ML.Runtime.EntryPoints.C /// The predictor to calibrate /// [Obsolete] - public Var UncalibratedPredictorModel { get; set; } = new Var(); + public Var UncalibratedPredictorModel { get; set; } = new Var(); /// /// The maximum number of examples to train the calibrator on @@ -4130,7 +4130,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ICal /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -4162,7 +4162,7 @@ public PlattCalibratorPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -4359,7 +4359,7 @@ public sealed partial class RegressionEnsemble /// The models to combine into an ensemble /// [Obsolete] - public ArrayVar Models { get; set; } = new ArrayVar(); + public ArrayVar Models { get; set; } = new ArrayVar(); /// /// Whether to validate that all the pipelines are identical @@ -4374,7 +4374,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } } @@ -4478,7 +4478,7 @@ public sealed partial class RegressionPipelineEnsemble /// The models to combine into an ensemble /// [Obsolete] - public ArrayVar Models { get; set; } = new ArrayVar(); + public ArrayVar Models { get; set; } = new ArrayVar(); [Obsolete] @@ -4487,7 +4487,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } } @@ -4508,7 +4508,7 @@ public sealed partial class Summarizer /// The predictor to summarize /// [Obsolete] - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); [Obsolete] @@ -4549,7 +4549,7 @@ public sealed partial class TrainTestBinaryMacroSubGraphOutput /// The model /// [Obsolete] - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } @@ -4598,7 +4598,7 @@ public sealed class Output /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); /// /// Warning dataset @@ -4645,13 +4645,13 @@ public sealed partial class TrainTestMacroSubGraphOutput /// The predictor model /// [Obsolete] - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); /// /// Transform model /// [Obsolete] - public Var TransformModel { get; set; } = new Var(); + public Var TransformModel { get; set; } = new Var(); } @@ -4679,7 +4679,7 @@ public sealed partial class TrainTestEvaluator /// The aggregated transform model from the pipeline before this command, to apply to the test data, and also include in the final model, together with the predictor model. /// [Obsolete] - public Var TransformModel { get; set; } = new Var(); + public Var TransformModel { get; set; } = new Var(); /// /// The training subgraph @@ -4748,12 +4748,12 @@ public sealed class Output /// /// The final model including the trained predictor model and the model from the transforms, provided as the Input.TransformModel. /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); /// /// The final model including the trained predictor model and the model from the transforms, provided as the Input.TransformModel. /// - public Var TransformModel { get; set; } = new Var(); + public Var TransformModel { get; set; } = new Var(); /// /// Warning dataset @@ -4846,7 +4846,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -4881,7 +4881,7 @@ public ExponentialAveragePipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -4959,7 +4959,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -4994,7 +4994,7 @@ public IidChangePointDetectorPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -5066,7 +5066,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -5101,7 +5101,7 @@ public IidSpikeDetectorPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -5159,7 +5159,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -5194,7 +5194,7 @@ public PercentileThresholdTransformPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -5264,7 +5264,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -5299,7 +5299,7 @@ public PValueTransformPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -5370,7 +5370,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -5405,7 +5405,7 @@ public SlidingWindowTransformPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -5511,7 +5511,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -5546,7 +5546,7 @@ public SsaChangePointDetectorPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -5636,7 +5636,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -5671,7 +5671,7 @@ public SsaSpikeDetectorPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -5833,7 +5833,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -5865,7 +5865,7 @@ public AveragedPerceptronBinaryClassifierPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -5963,7 +5963,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -5995,7 +5995,7 @@ public EnsembleBinaryClassifierPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -6093,7 +6093,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IMul /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -6125,7 +6125,7 @@ public EnsembleClassificationPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -6223,7 +6223,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -6255,7 +6255,7 @@ public EnsembleRegressionPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -6578,7 +6578,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -6610,7 +6610,7 @@ public FastForestBinaryClassifierPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -6912,7 +6912,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -6944,7 +6944,7 @@ public FastForestRegressorPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -7384,7 +7384,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -7416,7 +7416,7 @@ public FastTreeBinaryClassifierPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -7890,7 +7890,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IRan /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -7922,7 +7922,7 @@ public FastTreeRankerPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -8348,7 +8348,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -8380,7 +8380,7 @@ public FastTreeRegressorPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -8811,7 +8811,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -8843,7 +8843,7 @@ public FastTreeTweedieRegressorPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -8955,7 +8955,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -8987,7 +8987,7 @@ public FieldAwareFactorizationMachineBinaryClassifierPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -9133,7 +9133,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -9165,7 +9165,7 @@ public GeneralizedAdditiveModelBinaryClassifierPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -9311,7 +9311,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -9343,7 +9343,7 @@ public GeneralizedAdditiveModelRegressorPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -9440,7 +9440,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IClu /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -9472,7 +9472,7 @@ public KMeansPlusPlusClustererPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -9700,7 +9700,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -9732,7 +9732,7 @@ public LightGbmBinaryClassifierPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -9945,7 +9945,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IMul /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -9977,7 +9977,7 @@ public LightGbmClassifierPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -10190,7 +10190,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IRan /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -10222,7 +10222,7 @@ public LightGbmRankerPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -10435,7 +10435,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -10467,7 +10467,7 @@ public LightGbmRegressorPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -10593,7 +10593,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -10625,7 +10625,7 @@ public LinearSvmBinaryClassifierPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -10768,7 +10768,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -10800,7 +10800,7 @@ public LogisticRegressionBinaryClassifierPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -10943,7 +10943,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IMul /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -10975,7 +10975,7 @@ public LogisticRegressionClassifierPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -11027,7 +11027,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IMul /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -11059,7 +11059,7 @@ public NaiveBayesClassifierPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -11208,7 +11208,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -11240,7 +11240,7 @@ public OnlineGradientDescentRegressorPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -11310,7 +11310,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -11342,7 +11342,7 @@ public OrdinaryLeastSquaresRegressorPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -11421,7 +11421,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IAno /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -11453,7 +11453,7 @@ public PcaAnomalyDetectorPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -11590,7 +11590,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -11622,7 +11622,7 @@ public PoissonRegressorPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -11754,7 +11754,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -11786,7 +11786,7 @@ public StochasticDualCoordinateAscentBinaryClassifierPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -11899,7 +11899,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IMul /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -11931,7 +11931,7 @@ public StochasticDualCoordinateAscentClassifierPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -12044,7 +12044,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IReg /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -12076,7 +12076,7 @@ public StochasticDualCoordinateAscentRegressorPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -12207,7 +12207,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -12239,7 +12239,7 @@ public StochasticGradientDescentBinaryClassifierPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -12348,7 +12348,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBin /// /// The trained model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } [Obsolete] @@ -12380,7 +12380,7 @@ public SymSgdBinaryClassifierPipelineStep(Output output) } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -12438,7 +12438,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -12473,7 +12473,7 @@ public ApproximateBootstrapSamplerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -12493,7 +12493,7 @@ public sealed partial class BinaryPredictionScoreColumnsRenamer : Microsoft.ML.R /// The predictor model used in scoring /// [Obsolete] - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); /// /// Input dataset @@ -12513,7 +12513,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -12548,7 +12548,7 @@ public BinaryPredictionScoreColumnsRenamerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -12681,7 +12681,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -12716,7 +12716,7 @@ public BinNormalizerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -12881,7 +12881,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -12916,7 +12916,7 @@ public CategoricalHashOneHotVectorizerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -13079,7 +13079,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -13114,7 +13114,7 @@ public CategoricalOneHotVectorizerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -13215,7 +13215,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -13250,7 +13250,7 @@ public CharacterTokenizerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -13323,7 +13323,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -13358,7 +13358,7 @@ public ColumnConcatenatorPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -13455,7 +13455,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -13490,7 +13490,7 @@ public ColumnCopierPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -13548,7 +13548,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -13583,7 +13583,7 @@ public ColumnSelectorPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -13704,7 +13704,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -13739,7 +13739,7 @@ public ColumnTypeConverterPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -13783,7 +13783,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -13818,7 +13818,7 @@ public CombinerByContiguousGroupIdPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -13939,7 +13939,7 @@ public sealed class Output /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -13974,7 +13974,7 @@ public ConditionalNormalizerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -14050,7 +14050,7 @@ public DataCachePipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -14076,7 +14076,7 @@ public sealed partial class DatasetScorer /// The predictor model to apply to data /// [Obsolete] - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); /// /// Suffix to append to the score columns @@ -14096,7 +14096,7 @@ public sealed class Output /// /// The scoring transform /// - public Var ScoringTransform { get; set; } = new Var(); + public Var ScoringTransform { get; set; } = new Var(); } } @@ -14123,7 +14123,7 @@ public sealed partial class DatasetTransformScorer /// The transform model to apply to data /// [Obsolete] - public Var TransformModel { get; set; } = new Var(); + public Var TransformModel { get; set; } = new Var(); [Obsolete] @@ -14137,7 +14137,7 @@ public sealed class Output /// /// The scoring transform /// - public Var ScoringTransform { get; set; } = new Var(); + public Var ScoringTransform { get; set; } = new Var(); } } @@ -14283,7 +14283,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -14318,7 +14318,7 @@ public DictionarizerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -14358,7 +14358,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -14393,7 +14393,7 @@ public FeatureCombinerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -14438,7 +14438,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -14473,7 +14473,7 @@ public FeatureSelectorByCountPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -14530,7 +14530,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -14565,7 +14565,7 @@ public FeatureSelectorByMutualInformationPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -14696,7 +14696,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -14731,7 +14731,7 @@ public GlobalContrastNormalizerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -14881,7 +14881,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -14916,7 +14916,7 @@ public HashConverterPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -15013,7 +15013,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -15048,7 +15048,7 @@ public ImageGrayscalePipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -15151,7 +15151,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -15186,7 +15186,7 @@ public ImageLoaderPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -15379,7 +15379,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -15414,7 +15414,7 @@ public ImagePixelExtractorPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -15576,7 +15576,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -15611,7 +15611,7 @@ public ImageResizerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -15706,7 +15706,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -15741,7 +15741,7 @@ public KeyToTextConverterPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -15787,7 +15787,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -15822,7 +15822,7 @@ public LabelColumnKeyBooleanConverterPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -15931,7 +15931,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -15966,7 +15966,7 @@ public LabelIndicatorPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -16006,7 +16006,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -16041,7 +16041,7 @@ public LabelToFloatConverterPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -16281,7 +16281,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -16316,7 +16316,7 @@ public LightLdaPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -16431,7 +16431,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -16466,7 +16466,7 @@ public LogMeanVarianceNormalizerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -16594,7 +16594,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -16629,7 +16629,7 @@ public LpNormalizerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -16649,13 +16649,13 @@ public sealed partial class ManyHeterogeneousModelCombiner /// Transform model /// [Obsolete] - public ArrayVar TransformModels { get; set; } = new ArrayVar(); + public ArrayVar TransformModels { get; set; } = new ArrayVar(); /// /// Predictor model /// [Obsolete] - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); [Obsolete] @@ -16664,7 +16664,7 @@ public sealed class Output /// /// Predictor model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } } @@ -16763,7 +16763,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -16798,7 +16798,7 @@ public MeanVarianceNormalizerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -16890,7 +16890,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -16925,7 +16925,7 @@ public MinMaxNormalizerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -17066,7 +17066,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -17101,7 +17101,7 @@ public MissingValueHandlerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -17197,7 +17197,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -17232,7 +17232,7 @@ public MissingValueIndicatorPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -17328,7 +17328,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -17363,7 +17363,7 @@ public MissingValuesDropperPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -17408,7 +17408,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -17443,7 +17443,7 @@ public MissingValuesRowDropperPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -17579,7 +17579,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -17614,7 +17614,7 @@ public MissingValueSubstitutorPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -17634,7 +17634,7 @@ public sealed partial class ModelCombiner /// Input models /// [Obsolete] - public ArrayVar Models { get; set; } = new ArrayVar(); + public ArrayVar Models { get; set; } = new ArrayVar(); [Obsolete] @@ -17643,7 +17643,7 @@ public sealed class Output /// /// Combined model /// - public Var OutputModel { get; set; } = new Var(); + public Var OutputModel { get; set; } = new Var(); } } @@ -17807,7 +17807,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -17842,7 +17842,7 @@ public NGramTranslatorPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -17876,7 +17876,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -17911,7 +17911,7 @@ public NoOperationPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -17950,7 +17950,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -17985,7 +17985,7 @@ public OptionalColumnCreatorPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -18141,7 +18141,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -18176,7 +18176,7 @@ public PcaCalculatorPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -18216,7 +18216,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -18251,7 +18251,7 @@ public PredictedLabelColumnOriginalValueConverterPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -18326,7 +18326,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -18361,7 +18361,7 @@ public RandomNumberGeneratorPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -18431,7 +18431,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -18466,7 +18466,7 @@ public RowRangeFilterPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -18512,7 +18512,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -18547,7 +18547,7 @@ public RowSkipAndTakeFilterPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -18587,7 +18587,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -18622,7 +18622,7 @@ public RowSkipFilterPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -18662,7 +18662,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -18697,7 +18697,7 @@ public RowTakeFilterPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -18737,7 +18737,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -18772,7 +18772,7 @@ public ScoreColumnSelectorPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -18792,7 +18792,7 @@ public sealed partial class Scorer /// The predictor model to turn into a transform /// [Obsolete] - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); [Obsolete] @@ -18806,7 +18806,7 @@ public sealed class Output /// /// The scoring transform /// - public Var ScoringTransform { get; set; } = new Var(); + public Var ScoringTransform { get; set; } = new Var(); } } @@ -18860,7 +18860,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -18895,7 +18895,7 @@ public SegregatorPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -18940,7 +18940,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -18975,7 +18975,7 @@ public SentimentAnalyzerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -19098,7 +19098,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -19133,7 +19133,7 @@ public TensorFlowScorerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -19323,7 +19323,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -19358,7 +19358,7 @@ public TextFeaturizerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -19461,7 +19461,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -19496,7 +19496,7 @@ public TextToKeyConverterPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -19573,7 +19573,7 @@ public sealed partial class TreeLeafFeaturizer : Microsoft.ML.Runtime.EntryPoint /// Trainer to use /// [Obsolete] - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); /// /// Input dataset @@ -19593,7 +19593,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -19628,7 +19628,7 @@ public TreeLeafFeaturizerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -19648,13 +19648,13 @@ public sealed partial class TwoHeterogeneousModelCombiner /// Transform model /// [Obsolete] - public Var TransformModel { get; set; } = new Var(); + public Var TransformModel { get; set; } = new Var(); /// /// Predictor model /// [Obsolete] - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); [Obsolete] @@ -19663,7 +19663,7 @@ public sealed class Output /// /// Predictor model /// - public Var PredictorModel { get; set; } = new Var(); + public Var PredictorModel { get; set; } = new Var(); } } @@ -19869,7 +19869,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -19904,7 +19904,7 @@ public VectorToImagePipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -20027,7 +20027,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -20062,7 +20062,7 @@ public WordEmbeddingsPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } @@ -20170,7 +20170,7 @@ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITra /// /// Transform model /// - public Var Model { get; set; } = new Var(); + public Var Model { get; set; } = new Var(); } [Obsolete] @@ -20205,7 +20205,7 @@ public WordTokenizerPipelineStep(Output output) [Obsolete] public Var Data { get; } [Obsolete] - public Var Model { get; } + public Var Model { get; } } } } diff --git a/src/Microsoft.ML.Legacy/Data/CollectionDataSource.cs b/src/Microsoft.ML.Legacy/Data/CollectionDataSource.cs index 18b6d2b6c3..ae0b7cf9ad 100644 --- a/src/Microsoft.ML.Legacy/Data/CollectionDataSource.cs +++ b/src/Microsoft.ML.Legacy/Data/CollectionDataSource.cs @@ -99,7 +99,7 @@ public CollectionDataSourcePipelineStep(Var data) } public Var Data { get; } - public Var Model => null; + public Var Model => null; } } } diff --git a/src/Microsoft.ML.Legacy/ILearningPipelineItem.cs b/src/Microsoft.ML.Legacy/ILearningPipelineItem.cs index 0e3df2aea6..f0b3019c87 100644 --- a/src/Microsoft.ML.Legacy/ILearningPipelineItem.cs +++ b/src/Microsoft.ML.Legacy/ILearningPipelineItem.cs @@ -47,12 +47,12 @@ public interface ILearningPipelineStep public interface ILearningPipelineDataStep : ILearningPipelineStep { Var Data { get; } - Var Model { get; } + Var Model { get; } } [Obsolete] public interface ILearningPipelinePredictorStep : ILearningPipelineStep { - Var Model { get; } + Var Model { get; } } } \ No newline at end of file diff --git a/src/Microsoft.ML.Legacy/LearningPipeline.cs b/src/Microsoft.ML.Legacy/LearningPipeline.cs index 37adedecbc..944fe1b04c 100644 --- a/src/Microsoft.ML.Legacy/LearningPipeline.cs +++ b/src/Microsoft.ML.Legacy/LearningPipeline.cs @@ -17,14 +17,14 @@ namespace Microsoft.ML.Legacy [Obsolete] public sealed class ScorerPipelineStep : ILearningPipelineDataStep { - public ScorerPipelineStep(Var data, Var model) + public ScorerPipelineStep(Var data, Var model) { Data = data; Model = model; } public Var Data { get; } - public Var Model { get; } + public Var Model { get; } } /// @@ -167,8 +167,8 @@ public PredictionModel Train() Experiment experiment = environment.CreateExperiment(); ILearningPipelineStep step = null; List loaders = new List(); - List> transformModels = new List>(); - Var lastTransformModel = null; + List> transformModels = new List>(); + Var lastTransformModel = null; foreach (ILearningPipelineItem currentItem in this) { @@ -183,13 +183,13 @@ public PredictionModel Train() if (lastTransformModel != null) transformModels.Insert(0, lastTransformModel); - Var predictorModel; + Var predictorModel; if (transformModels.Count != 0) { var localModelInput = new Transforms.ManyHeterogeneousModelCombiner { PredictorModel = predictorDataStep.Model, - TransformModels = new ArrayVar(transformModels.ToArray()) + TransformModels = new ArrayVar(transformModels.ToArray()) }; var localModelOutput = experiment.Add(localModelInput); predictorModel = localModelOutput.PredictorModel; @@ -216,7 +216,7 @@ public PredictionModel Train() var modelInput = new Transforms.ModelCombiner { - Models = new ArrayVar(transformModels.ToArray()) + Models = new ArrayVar(transformModels.ToArray()) }; var modelOutput = experiment.Add(modelInput); @@ -230,7 +230,7 @@ public PredictionModel Train() } experiment.Run(); - ITransformModel model = experiment.GetOutput(lastTransformModel); + TransformModel model = experiment.GetOutput(lastTransformModel); BatchPredictionEngine predictor; using (var memoryStream = new MemoryStream()) { diff --git a/src/Microsoft.ML.Legacy/Models/CrossValidator.cs b/src/Microsoft.ML.Legacy/Models/CrossValidator.cs index 81ac48a733..78a69d7a96 100644 --- a/src/Microsoft.ML.Legacy/Models/CrossValidator.cs +++ b/src/Microsoft.ML.Legacy/Models/CrossValidator.cs @@ -33,10 +33,10 @@ public CrossValidationOutput CrossValidate(Lea Experiment subGraph = environment.CreateExperiment(); ILearningPipelineStep step = null; List loaders = new List(); - List> transformModels = new List>(); - Var lastTransformModel = null; + List> transformModels = new List>(); + Var lastTransformModel = null; Var firstPipelineDataStep = null; - Var firstModel = null; + Var firstModel = null; ILearningPipelineItem firstTransform = null; foreach (ILearningPipelineItem currentItem in pipeline) { @@ -62,13 +62,13 @@ public CrossValidationOutput CrossValidate(Lea if (lastTransformModel != null) transformModels.Insert(0, lastTransformModel); - Var predictorModel; + Var predictorModel; if (transformModels.Count != 0) { var localModelInput = new Transforms.ManyHeterogeneousModelCombiner { PredictorModel = predictorDataStep.Model, - TransformModels = new ArrayVar(transformModels.ToArray()) + TransformModels = new ArrayVar(transformModels.ToArray()) }; var localModelOutput = subGraph.Add(localModelInput); predictorModel = localModelOutput.PredictorModel; @@ -96,7 +96,7 @@ public CrossValidationOutput CrossValidate(Lea var modelInput = new Transforms.ModelCombiner { - Models = new ArrayVar(transformModels.ToArray()) + Models = new ArrayVar(transformModels.ToArray()) }; var modelOutput = subGraph.Add(modelInput); @@ -159,7 +159,7 @@ public CrossValidationOutput CrossValidate(Lea throw Contracts.Except($"{Kind.ToString()} is not supported at the moment."); } - ITransformModel model = experiment.GetOutput(crossValidateOutput.TransformModel[Index]); + TransformModel model = experiment.GetOutput(crossValidateOutput.TransformModel[Index]); BatchPredictionEngine predictor; using (var memoryStream = new MemoryStream()) { diff --git a/src/Microsoft.ML.Legacy/Models/OneVersusAll.cs b/src/Microsoft.ML.Legacy/Models/OneVersusAll.cs index e1064506a0..82a58ab452 100644 --- a/src/Microsoft.ML.Legacy/Models/OneVersusAll.cs +++ b/src/Microsoft.ML.Legacy/Models/OneVersusAll.cs @@ -82,7 +82,7 @@ public OvaPipelineStep(Output output) Model = output.PredictorModel; } - public Var Model { get; } + public Var Model { get; } } } } diff --git a/src/Microsoft.ML.Legacy/Models/TrainTestEvaluator.cs b/src/Microsoft.ML.Legacy/Models/TrainTestEvaluator.cs index e547040cb8..3837283e94 100644 --- a/src/Microsoft.ML.Legacy/Models/TrainTestEvaluator.cs +++ b/src/Microsoft.ML.Legacy/Models/TrainTestEvaluator.cs @@ -36,10 +36,10 @@ public TrainTestEvaluatorOutput TrainTestEvaluate loaders = new List(); - List> transformModels = new List>(); - Var lastTransformModel = null; + List> transformModels = new List>(); + Var lastTransformModel = null; Var firstPipelineDataStep = null; - Var firstModel = null; + Var firstModel = null; ILearningPipelineItem firstTransform = null; foreach (ILearningPipelineItem currentItem in pipeline) { @@ -65,13 +65,13 @@ public TrainTestEvaluatorOutput TrainTestEvaluate predictorModel; + Var predictorModel; if (transformModels.Count != 0) { var localModelInput = new Transforms.ManyHeterogeneousModelCombiner { PredictorModel = predictorDataStep.Model, - TransformModels = new ArrayVar(transformModels.ToArray()) + TransformModels = new ArrayVar(transformModels.ToArray()) }; var localModelOutput = subGraph.Add(localModelInput); predictorModel = localModelOutput.PredictorModel; @@ -99,7 +99,7 @@ public TrainTestEvaluatorOutput TrainTestEvaluate(transformModels.ToArray()) + Models = new ArrayVar(transformModels.ToArray()) }; var modelOutput = subGraph.Add(modelInput); @@ -157,7 +157,7 @@ public TrainTestEvaluatorOutput TrainTestEvaluate predictor; using (var memoryStream = new MemoryStream()) { diff --git a/src/Microsoft.ML.Legacy/PredictionModel.cs b/src/Microsoft.ML.Legacy/PredictionModel.cs index 1c1fcf97aa..d13980e3a7 100644 --- a/src/Microsoft.ML.Legacy/PredictionModel.cs +++ b/src/Microsoft.ML.Legacy/PredictionModel.cs @@ -21,7 +21,7 @@ internal PredictionModel(Stream stream) { _env = new MLContext(); AssemblyRegistration.RegisterAssemblies(_env); - PredictorModel = new TransformModel(_env, stream); + PredictorModel = new TransformModelImpl(_env, stream); } internal TransformModel PredictorModel { get; } diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/CrossValidationBinaryMacro.cs b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/CrossValidationBinaryMacro.cs index d2e61d2db0..4dd8f08fca 100644 --- a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/CrossValidationBinaryMacro.cs +++ b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/CrossValidationBinaryMacro.cs @@ -31,7 +31,7 @@ public sealed class SubGraphInput public sealed class SubGraphOutput { [Argument(ArgumentType.Required, HelpText = "The model", SortOrder = 1)] - public Var Model; + public Var Model; } public sealed class Arguments @@ -72,7 +72,7 @@ public sealed class Arguments public sealed class Output { [TlcModule.Output(Desc = "The trained model", SortOrder = 1)] - public IPredictorModel[] PredictorModel; + public PredictorModel[] PredictorModel; [TlcModule.Output(Desc = "Warning dataset", SortOrder = 2)] public IDataView[] Warnings; @@ -105,7 +105,7 @@ public static CommonOutputs.MacroOutput CrossValidateBinary( var cvSplitOutput = exp.Add(cvSplit); subGraphNodes.AddRange(EntryPointNode.ValidateNodes(env, node.Context, exp.GetNodes())); - var predModelVars = new Var[input.NumFolds]; + var predModelVars = new Var[input.NumFolds]; var warningsVars = new Var[input.NumFolds]; var overallMetricsVars = new Var[input.NumFolds]; var instanceMetricsVars = new Var[input.NumFolds]; @@ -131,7 +131,7 @@ public static CommonOutputs.MacroOutput CrossValidateBinary( { VarName = mapping[input.Inputs.Data.VarName] }; - args.Outputs.Model = new Var + args.Outputs.Model = new Var { VarName = mapping[input.Outputs.Model.VarName] }; @@ -146,7 +146,7 @@ public static CommonOutputs.MacroOutput CrossValidateBinary( inputBindingMap.Add(nameof(args.TestingData), new List { testingData }); inputMap.Add(testingData, new ArrayIndexVariableBinding(cvSplitOutput.TestData.VarName, k)); var outputMap = new Dictionary(); - var predModelVar = new Var(); + var predModelVar = new Var(); outputMap.Add(nameof(TrainTestBinaryMacro.Output.PredictorModel), predModelVar.VarName); predModelVars[k] = predModelVar; var warningVar = new Var(); @@ -168,7 +168,7 @@ public static CommonOutputs.MacroOutput CrossValidateBinary( var outModels = new Legacy.Data.PredictorModelArrayConverter { - Model = new ArrayVar(predModelVars) + Model = new ArrayVar(predModelVars) }; var outModelsOutput = new Legacy.Data.PredictorModelArrayConverter.Output(); outModelsOutput.OutputModel.VarName = node.GetOutputVariableName(nameof(Output.PredictorModel)); @@ -214,39 +214,39 @@ public static CommonOutputs.MacroOutput CrossValidateBinary( public sealed class ArrayIPredictorModelInput { [Argument(ArgumentType.Required, HelpText = "The models", SortOrder = 1)] - public IPredictorModel[] Model; + public PredictorModel[] Model; } - public sealed class ArrayIPredictorModelOutput + public sealed class ArrayPredictorModelOutput { [TlcModule.Output(Desc = "The model array", SortOrder = 1)] - public IPredictorModel[] OutputModel; + public PredictorModel[] OutputModel; } - [TlcModule.EntryPoint(Desc = "Create an array variable of IPredictorModel", Name = "Data.PredictorModelArrayConverter")] - public static ArrayIPredictorModelOutput MakeArray(IHostEnvironment env, ArrayIPredictorModelInput input) + [TlcModule.EntryPoint(Desc = "Create an array variable of " + nameof(PredictorModel), Name = "Data.PredictorModelArrayConverter")] + public static ArrayPredictorModelOutput MakeArray(IHostEnvironment env, ArrayIPredictorModelInput input) { - var result = new ArrayIPredictorModelOutput + var result = new ArrayPredictorModelOutput { OutputModel = input.Model }; return result; } - public sealed class ArrayITransformModelInput + public sealed class ArrayTransformModelInput { [Argument(ArgumentType.Required, HelpText = "The models", SortOrder = 1)] - public ITransformModel[] TransformModel; + public TransformModel[] TransformModel; } public sealed class ArrayITransformModelOutput { [TlcModule.Output(Desc = "The model array", SortOrder = 1)] - public ITransformModel[] OutputModel; + public TransformModel[] OutputModel; } - [TlcModule.EntryPoint(Desc = "Create an array variable of ITransformModel", Name = "Data.TransformModelArrayConverter")] - public static ArrayITransformModelOutput MakeArray(IHostEnvironment env, ArrayITransformModelInput input) + [TlcModule.EntryPoint(Desc = "Create an array variable of " + nameof(TransformModel), Name = "Data.TransformModelArrayConverter")] + public static ArrayITransformModelOutput MakeArray(IHostEnvironment env, ArrayTransformModelInput input) { var result = new ArrayITransformModelOutput { @@ -267,7 +267,7 @@ public sealed class ArrayIDataViewOutput public IDataView[] OutputData; } - [TlcModule.EntryPoint(Desc = "Create an array variable of IDataView", Name = "Data.IDataViewArrayConverter")] + [TlcModule.EntryPoint(Desc = "Create an array variable of " + nameof(IDataView), Name = "Data.IDataViewArrayConverter")] public static ArrayIDataViewOutput MakeArray(IHostEnvironment env, ArrayIDataViewInput input) { var result = new ArrayIDataViewOutput diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/CrossValidationMacro.cs b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/CrossValidationMacro.cs index 9522b8aaa5..e037cf5245 100644 --- a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/CrossValidationMacro.cs +++ b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/CrossValidationMacro.cs @@ -34,10 +34,10 @@ public sealed class SubGraphInput public sealed class SubGraphOutput { [Argument(ArgumentType.AtMostOnce, HelpText = "The predictor model", SortOrder = 1)] - public Var PredictorModel; + public Var PredictorModel; [Argument(ArgumentType.AtMostOnce, HelpText = "The transform model", SortOrder = 2)] - public Var TransformModel; + public Var TransformModel; } public sealed class Arguments @@ -51,7 +51,7 @@ public sealed class Arguments [TlcModule.OptionalInput] [Argument(ArgumentType.AtMostOnce, HelpText = "The transform model from the pipeline before this command. " + "It gets included in the Output.PredictorModel.", SortOrder = 2)] - public ITransformModel TransformModel; + public TransformModel TransformModel; // This is the subgraph that describes how to train a model for each fold. It should // accept one IDataView input and output one IPredictorModel output (see Inputs and Outputs). @@ -101,11 +101,11 @@ public sealed class Output { [TlcModule.Output(Desc = "The final model including the trained predictor model and the model from the transforms, " + "provided as the Input.TransformModel.", SortOrder = 1)] - public IPredictorModel[] PredictorModel; + public PredictorModel[] PredictorModel; [TlcModule.Output(Desc = "The final model including the trained predictor model and the model from the transforms, " + "provided as the Input.TransformModel.", SortOrder = 2)] - public ITransformModel[] TransformModel; + public TransformModel[] TransformModel; [TlcModule.Output(Desc = "Warning dataset", SortOrder = 3)] public IDataView Warnings; @@ -190,9 +190,9 @@ public static CommonOutputs.MacroOutput CrossValidate( var cvSplitOutput = exp.Add(cvSplit); subGraphNodes.AddRange(EntryPointNode.ValidateNodes(env, node.Context, exp.GetNodes())); - var predModelVars = new Var[input.NumFolds]; - var transformModelVars = new Var[input.NumFolds]; - var inputTransformModelVars = new Var[input.NumFolds]; + var predModelVars = new Var[input.NumFolds]; + var transformModelVars = new Var[input.NumFolds]; + var inputTransformModelVars = new Var[input.NumFolds]; var warningsVars = new Var[input.NumFolds]; var overallMetricsVars = new Var[input.NumFolds]; var instanceMetricsVars = new Var[input.NumFolds]; @@ -221,7 +221,7 @@ public static CommonOutputs.MacroOutput CrossValidate( }; if (transformModelVarName != null) - args.TransformModel = new Var { VarName = transformModelVarName.VariableName }; + args.TransformModel = new Var { VarName = transformModelVarName.VariableName }; args.Inputs.Data = new Var { @@ -230,7 +230,7 @@ public static CommonOutputs.MacroOutput CrossValidate( if (input.Outputs.PredictorModel != null && mapping.ContainsKey(input.Outputs.PredictorModel.VarName)) { - args.Outputs.PredictorModel = new Var + args.Outputs.PredictorModel = new Var { VarName = mapping[input.Outputs.PredictorModel.VarName] }; @@ -240,7 +240,7 @@ public static CommonOutputs.MacroOutput CrossValidate( if (input.Outputs.TransformModel != null && mapping.ContainsKey(input.Outputs.TransformModel.VarName)) { - args.Outputs.TransformModel = new Var + args.Outputs.TransformModel = new Var { VarName = mapping[input.Outputs.TransformModel.VarName] }; @@ -261,8 +261,8 @@ public static CommonOutputs.MacroOutput CrossValidate( inputBindingMap.Add(nameof(args.TestingData), new List { testingData }); inputMap.Add(testingData, new ArrayIndexVariableBinding(cvSplitOutput.TestData.VarName, k)); var outputMap = new Dictionary(); - var transformModelVar = new Var(); - var predModelVar = new Var(); + var transformModelVar = new Var(); + var predModelVar = new Var(); if (input.Outputs.PredictorModel == null) { outputMap.Add(nameof(TrainTestMacro.Output.TransformModel), transformModelVar.VarName); @@ -272,9 +272,9 @@ public static CommonOutputs.MacroOutput CrossValidate( { var modelCombine = new Legacy.Transforms.ModelCombiner { - Models = new ArrayVar( - new Var[] { - new Var { VarName = transformModelVarName.VariableName }, + Models = new ArrayVar( + new Var[] { + new Var { VarName = transformModelVarName.VariableName }, transformModelVar } ) }; @@ -329,7 +329,7 @@ public static CommonOutputs.MacroOutput CrossValidate( { var outModels = new Legacy.Data.TransformModelArrayConverter { - TransformModel = new ArrayVar(transformModelVars) + TransformModel = new ArrayVar(transformModelVars) }; var outModelsOutput = new Legacy.Data.TransformModelArrayConverter.Output(); outModelsOutput.OutputModel.VarName = node.GetOutputVariableName(nameof(Output.TransformModel)); @@ -339,7 +339,7 @@ public static CommonOutputs.MacroOutput CrossValidate( { var outModels = new Legacy.Data.PredictorModelArrayConverter { - Model = new ArrayVar(predModelVars) + Model = new ArrayVar(predModelVars) }; var outModelsOutput = new Legacy.Data.PredictorModelArrayConverter.Output(); outModelsOutput.OutputModel.VarName = node.GetOutputVariableName(nameof(Output.PredictorModel)); diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/FeatureCombiner.cs b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/FeatureCombiner.cs index 6735cb43e6..179538e94e 100644 --- a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/FeatureCombiner.cs +++ b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/FeatureCombiner.cs @@ -80,7 +80,7 @@ public static CommonOutputs.TransformOutput PrepareFeatures(IHostEnvironment env new[] { new ColumnConcatenatingTransformer.TaggedColumn() { Name = nameFeat, Source = concatNames.ToArray() } } }, viewTrain); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, viewTrain, input.Data), OutputData = viewTrain }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, viewTrain, input.Data), OutputData = viewTrain }; } } @@ -239,7 +239,7 @@ public static CommonOutputs.TransformOutput PrepareClassificationLabel(IHostEnvi if (labelType.IsKey || labelType.IsBool) { var nop = NopTransform.CreateIfNeeded(env, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, nop, input.Data), OutputData = nop }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, nop, input.Data), OutputData = nop }; } var args = new ValueToKeyMappingTransformer.Arguments() @@ -256,7 +256,7 @@ public static CommonOutputs.TransformOutput PrepareClassificationLabel(IHostEnvi } }; var xf = ValueToKeyMappingTransformer.Create(host, args, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } [TlcModule.EntryPoint(Name = "Transforms.PredictedLabelColumnOriginalValueConverter", Desc = "Transforms a predicted label column to its original values, unless it is of type bool.", UserName = "Convert Predicted Label")] @@ -274,11 +274,11 @@ public static CommonOutputs.TransformOutput ConvertPredictedLabel(IHostEnvironme if (predictedLabelType.IsNumber || predictedLabelType.IsBool) { var nop = NopTransform.CreateIfNeeded(env, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, nop, input.Data), OutputData = nop }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, nop, input.Data), OutputData = nop }; } var xf = new KeyToValueMappingTransformer(host, input.PredictedLabelColumn).Transform(input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } [TlcModule.EntryPoint(Name = "Transforms.LabelToFloatConverter", Desc = "Transforms the label to float to make it suitable for regression.", UserName = "Prepare Regression Label")] @@ -296,7 +296,7 @@ public static CommonOutputs.TransformOutput PrepareRegressionLabel(IHostEnvironm if (labelType == NumberType.R4 || !labelType.IsNumber) { var nop = NopTransform.CreateIfNeeded(env, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, nop, input.Data), OutputData = nop }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, nop, input.Data), OutputData = nop }; } var args = new TypeConvertingTransformer.Arguments() @@ -312,7 +312,7 @@ public static CommonOutputs.TransformOutput PrepareRegressionLabel(IHostEnvironm } }; var xf = new TypeConvertingTransformer(host, new TypeConvertingTransformer.ColumnInfo(input.LabelColumn, input.LabelColumn, DataKind.R4)).Transform(input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } } } diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/JsonUtils/ExecuteGraphCommand.cs b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/JsonUtils/ExecuteGraphCommand.cs index 1f30644fc2..5c8cd4504e 100644 --- a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/JsonUtils/ExecuteGraphCommand.cs +++ b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/JsonUtils/ExecuteGraphCommand.cs @@ -115,15 +115,15 @@ public void SetInputFromPath(GraphRunner runner, string varName, string path, Tl runner.SetInput(varName, loader); break; case TlcModule.DataKind.PredictorModel: - PredictorModel pm; + PredictorModelImpl pm; using (var fs = File.OpenRead(path)) - pm = new PredictorModel(_host, fs); + pm = new PredictorModelImpl(_host, fs); runner.SetInput(varName, pm); break; case TlcModule.DataKind.TransformModel: - TransformModel tm; + TransformModelImpl tm; using (var fs = File.OpenRead(path)) - tm = new TransformModel(_host, fs); + tm = new TransformModelImpl(_host, fs); runner.SetInput(varName, tm); break; default: @@ -156,11 +156,11 @@ public void GetOutputToPath(GraphRunner runner, string varName, string path, Tlc } break; case TlcModule.DataKind.PredictorModel: - var pm = runner.GetOutput(varName); + var pm = runner.GetOutput(varName); SavePredictorModel(pm, path); break; case TlcModule.DataKind.TransformModel: - var tm = runner.GetOutput(varName); + var tm = runner.GetOutput(varName); using (var handle = _host.CreateOutputFile(path)) using (var fs = handle.CreateWriteStream()) tm.Save(_host, fs); @@ -168,17 +168,17 @@ public void GetOutputToPath(GraphRunner runner, string varName, string path, Tlc case TlcModule.DataKind.Array: string partialPath = path.Substring(0, path.Length - extension.Length); - var ipmArray = runner.GetOutputOrDefault(varName); + var ipmArray = runner.GetOutputOrDefault(varName); if (ipmArray != null && !ipmArray.GetType().IsValueType) { - SaveArrayToFile(ipmArray.ToList(), TlcModule.DataKind.PredictorModel, partialPath, extension); + SaveArrayToFile(ipmArray, partialPath, extension); break; } var idvArray = runner.GetOutputOrDefault(varName); if (idvArray != null && !idvArray.GetType().IsValueType) { - SaveArrayToFile(idvArray.ToList(), TlcModule.DataKind.DataView, partialPath, extension); + SaveArrayToFile(idvArray, partialPath, extension); break; } goto default; @@ -188,28 +188,28 @@ public void GetOutputToPath(GraphRunner runner, string varName, string path, Tlc } - private void SaveArrayToFile(List array, TlcModule.DataKind kind, string partialPath, string extension) - where T : class + private void SaveArrayToFile(PredictorModel[] array, string partialPath, string extension) { - for (int i = 0; i < array.Count; i++) + for (int i = 0; i < array.Length; i++) { string path = $"{partialPath}_{i}{extension}"; - switch (kind) - { - case TlcModule.DataKind.DataView: - SaveDataView((IDataView)array[i], path, extension); - break; - case TlcModule.DataKind.PredictorModel: - SavePredictorModel((IPredictorModel)array[i], path); - break; - } + SavePredictorModel(array[i], path); + } + } + + private void SaveArrayToFile(IDataView[] array, string partialPath, string extension) + { + for (int i = 0; i < array.Length; i++) + { + string path = $"{partialPath}_{i}{extension}"; + SaveDataView(array[i], path, extension); } } /// /// Saves the PredictorModel to the given path /// - private void SavePredictorModel(IPredictorModel pm, string path) + private void SavePredictorModel(PredictorModel pm, string path) { Contracts.CheckValue(pm, nameof(pm)); diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/JsonUtils/JsonManifestUtils.cs b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/JsonUtils/JsonManifestUtils.cs index 2a5c20e3c9..d5f0f45a7f 100644 --- a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/JsonUtils/JsonManifestUtils.cs +++ b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/JsonUtils/JsonManifestUtils.cs @@ -103,7 +103,7 @@ private static JObject BuildComponentManifest(IExceptionContext ectx, ComponentC return result; } - public static JObject BuildEntryPointManifest(IExceptionContext ectx, ComponentCatalog.EntryPointInfo entryPointInfo, ComponentCatalog catalog) + private static JObject BuildEntryPointManifest(IExceptionContext ectx, ComponentCatalog.EntryPointInfo entryPointInfo, ComponentCatalog catalog) { Contracts.CheckValueOrNull(ectx); ectx.CheckValue(entryPointInfo, nameof(entryPointInfo)); @@ -366,12 +366,6 @@ private static JToken BuildTypeToken(IExceptionContext ectx, FieldInfo fieldInfo jo[FieldNames.Kind] = typeEnum.ToString(); jo[FieldNames.ComponentKind] = kind; return jo; - case TlcModule.DataKind.State: - jo = new JObject(); - var typeString = $"{type}".Replace("Microsoft.ML.Runtime.Interfaces.", ""); - jo[FieldNames.Kind] = "C# Object"; - jo[FieldNames.ItemType] = typeString; - return jo; default: ectx.Assert(false); throw ectx.ExceptNotSupp(); diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/ModelOperations.cs b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/ModelOperations.cs index f92cca99fc..7675a15925 100644 --- a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/ModelOperations.cs +++ b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/ModelOperations.cs @@ -19,43 +19,43 @@ public static class ModelOperations public sealed class CombineTransformModelsInput { [Argument(ArgumentType.Multiple, HelpText = "Input models", SortOrder = 1)] - public ITransformModel[] Models; + public TransformModel[] Models; } public sealed class CombineTransformModelsOutput { [TlcModule.Output(Desc = "Combined model", SortOrder = 1)] - public ITransformModel OutputModel; + public TransformModel OutputModel; } public sealed class PredictorModelInput { [Argument(ArgumentType.Multiple | ArgumentType.Required, HelpText = "Transform model", SortOrder = 1)] - public ITransformModel[] TransformModels; + public TransformModel[] TransformModels; [Argument(ArgumentType.Required, HelpText = "Predictor model", SortOrder = 2)] - public IPredictorModel PredictorModel; + public PredictorModel PredictorModel; } public sealed class SimplePredictorModelInput { [Argument(ArgumentType.Required, HelpText = "Transform model", SortOrder = 1)] - public ITransformModel TransformModel; + public TransformModel TransformModel; [Argument(ArgumentType.Required, HelpText = "Predictor model", SortOrder = 2)] - public IPredictorModel PredictorModel; + public PredictorModel PredictorModel; } public sealed class PredictorModelOutput { [TlcModule.Output(Desc = "Predictor model", SortOrder = 1)] - public IPredictorModel PredictorModel; + public PredictorModel PredictorModel; } public sealed class CombineOvaPredictorModelsInput : LearnerInputBaseWithWeight { [Argument(ArgumentType.Multiple, HelpText = "Input models", SortOrder = 1)] - public IPredictorModel[] ModelArray; + public PredictorModel[] ModelArray; [Argument(ArgumentType.AtMostOnce, HelpText = "Use probabilities from learners instead of raw values.", SortOrder = 2)] public bool UseProbabilities = true; @@ -64,13 +64,13 @@ public sealed class CombineOvaPredictorModelsInput : LearnerInputBaseWithWeight public sealed class CombinePredictorModelsInput { [Argument(ArgumentType.Multiple, HelpText = "Input models", SortOrder = 1)] - public IPredictorModel[] Models; + public PredictorModel[] Models; } public sealed class ApplyTransformModelInput : TransformInputBase { [Argument(ArgumentType.Required, HelpText = "Transform model", SortOrder = 2)] - public ITransformModel TransformModel; + public TransformModel TransformModel; } public sealed class ApplyTransformModelOutput @@ -88,7 +88,7 @@ public static CombineTransformModelsOutput CombineTransformModels(IHostEnvironme EntryPointUtils.CheckInputArgs(host, input); host.CheckNonEmpty(input.Models, nameof(input.Models)); - ITransformModel model = input.Models[input.Models.Length - 1]; + TransformModel model = input.Models[input.Models.Length - 1]; for (int i = input.Models.Length - 2; i >= 0; i--) model = model.Apply(env, input.Models[i]); @@ -104,7 +104,7 @@ public static PredictorModelOutput CombineModels(IHostEnvironment env, Predictor EntryPointUtils.CheckInputArgs(host, input); host.CheckNonEmpty(input.TransformModels, nameof(input.TransformModels)); - ITransformModel model = input.TransformModels[input.TransformModels.Length - 1]; + TransformModel model = input.TransformModels[input.TransformModels.Length - 1]; for (int i = input.TransformModels.Length - 2; i >= 0; i--) model = model.Apply(env, input.TransformModels[i]); return new PredictorModelOutput() { PredictorModel = input.PredictorModel.Apply(env, model) }; @@ -153,7 +153,7 @@ public static PredictorModelOutput CombineOvaModels(IHostEnvironment env, Combin return new PredictorModelOutput { - PredictorModel = new PredictorModel(env, data, input.TrainingData, + PredictorModel = new PredictorModelImpl(env, data, input.TrainingData, OvaPredictor.Create(host, input.UseProbabilities, input.ModelArray.Select(p => p.Predictor as IPredictorProducing).ToArray())) }; diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/OneVersusAllMacro.cs b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/OneVersusAllMacro.cs index c4a4dcc88c..2456ab3a2f 100644 --- a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/OneVersusAllMacro.cs +++ b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/OneVersusAllMacro.cs @@ -26,7 +26,7 @@ public static class OneVersusAllMacro public sealed class SubGraphOutput { [Argument(ArgumentType.Required, HelpText = "The predictor model for the subgraph exemplar.", SortOrder = 1)] - public Var Model; + public Var Model; } public sealed class Arguments : LearnerInputBaseWithWeight @@ -46,10 +46,10 @@ public sealed class Arguments : LearnerInputBaseWithWeight public sealed class Output { [TlcModule.Output(Desc = "The trained multiclass model", SortOrder = 1)] - public IPredictorModel PredictorModel; + public PredictorModel PredictorModel; } - private static Tuple, Var> ProcessClass(IHostEnvironment env, int k, string label, Arguments input, EntryPointNode node) + private static Tuple, Var> ProcessClass(IHostEnvironment env, int k, string label, Arguments input, EntryPointNode node) { var macroNodes = new List(); @@ -80,7 +80,7 @@ private static Tuple, Var> ProcessClass(IH // Rename all the variables such that they don't conflict with the ones in the outer run context. var mapping = new Dictionary(); bool foundOutput = false; - Var predModelVar = null; + Var predModelVar = null; foreach (var entryPointNode in subGraphNodes) { // Rename variables in input/output maps, and in subgraph context. @@ -91,7 +91,7 @@ private static Tuple, Var> ProcessClass(IH // Grab a hold of output model from this subgraph. if (entryPointNode.GetOutputVariableName("PredictorModel") is string mvn) { - predModelVar = new Var { VarName = mvn }; + predModelVar = new Var { VarName = mvn }; foundOutput = true; } @@ -113,7 +113,7 @@ private static Tuple, Var> ProcessClass(IH // Add training subgraph to our context. macroNodes.AddRange(subGraphNodes); - return new Tuple, Var>(macroNodes, predModelVar); + return new Tuple, Var>(macroNodes, predModelVar); } private static int GetNumberOfClasses(IHostEnvironment env, Arguments input, out string label) @@ -151,7 +151,7 @@ public static CommonOutputs.MacroOutput OneVersusAll( env.Assert(input.Nodes.Count > 0); var numClasses = GetNumberOfClasses(env, input, out var label); - var predModelVars = new Var[numClasses]; + var predModelVars = new Var[numClasses]; // This will be the final resulting list of nodes that is returned from the macro. var macroNodes = new List(); @@ -170,7 +170,7 @@ public static CommonOutputs.MacroOutput OneVersusAll( var macroExperiment = new Experiment(env); var combinerNode = new Legacy.Models.OvaModelCombiner { - ModelArray = new ArrayVar(predModelVars), + ModelArray = new ArrayVar(predModelVars), TrainingData = new Var { VarName = node.GetInputVariable(nameof(input.TrainingData)).VariableName }, Caching = (Legacy.Models.CachingOptions)input.Caching, FeatureColumn = input.FeatureColumn, @@ -184,7 +184,7 @@ public static CommonOutputs.MacroOutput OneVersusAll( throw new Exception("Cannot find OVA model output."); // Map macro's output back to OVA combiner (so OVA combiner will set the value on our output variable). - var combinerOutput = new Legacy.Models.OvaModelCombiner.Output { PredictorModel = new Var { VarName = outVariableName } }; + var combinerOutput = new Legacy.Models.OvaModelCombiner.Output { PredictorModel = new Var { VarName = outVariableName } }; // Add to experiment (must be done AFTER we assign variable name to output). macroExperiment.Add(combinerNode, combinerOutput); diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/TrainTestBinaryMacro.cs b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/TrainTestBinaryMacro.cs index d02f7f6b99..4fa88c85dc 100644 --- a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/TrainTestBinaryMacro.cs +++ b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/TrainTestBinaryMacro.cs @@ -27,7 +27,7 @@ public sealed class SubGraphInput public sealed class SubGraphOutput { [Argument(ArgumentType.Required, HelpText = "The model", SortOrder = 1)] - public Var Model; + public Var Model; } public sealed class Arguments @@ -53,7 +53,7 @@ public sealed class Arguments public sealed class Output { [TlcModule.Output(Desc = "The trained model", SortOrder = 1)] - public IPredictorModel PredictorModel; + public PredictorModel PredictorModel; [TlcModule.Output(Desc = "Warning dataset", SortOrder = 2)] public IDataView Warnings; diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/TrainTestMacro.cs b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/TrainTestMacro.cs index c61ea1d268..16571e7403 100644 --- a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/TrainTestMacro.cs +++ b/src/Microsoft.ML.Legacy/Runtime/EntryPoints/TrainTestMacro.cs @@ -27,10 +27,10 @@ public sealed class SubGraphInput public sealed class SubGraphOutput { [Argument(ArgumentType.AtMostOnce, HelpText = "The predictor model", SortOrder = 1)] - public Var PredictorModel; + public Var PredictorModel; [Argument(ArgumentType.AtMostOnce, HelpText = "Transform model", SortOrder = 2)] - public Var TransformModel; + public Var TransformModel; } public sealed class Arguments @@ -45,7 +45,7 @@ public sealed class Arguments [TlcModule.OptionalInput] [Argument(ArgumentType.AtMostOnce, HelpText = "The aggregated transform model from the pipeline before this command, to apply to the test data, and also include in the final model, together with the predictor model.", SortOrder = 3)] - public Var TransformModel = null; + public Var TransformModel = null; [Argument(ArgumentType.Required, HelpText = "The training subgraph", SortOrder = 4)] public JArray Nodes; @@ -82,11 +82,11 @@ public sealed class Output { [TlcModule.Output(Desc = "The final model including the trained predictor model and the model from the transforms, " + "provided as the Input.TransformModel.", SortOrder = 1)] - public IPredictorModel PredictorModel; + public PredictorModel PredictorModel; [TlcModule.Output(Desc = "The final model including the trained predictor model and the model from the transforms, " + "provided as the Input.TransformModel.", SortOrder = 2)] - public ITransformModel TransformModel; + public TransformModel TransformModel; [TlcModule.Output(Desc = "Warning dataset", SortOrder = 3)] public IDataView Warnings; @@ -174,10 +174,10 @@ public static CommonOutputs.MacroOutput TrainTest( { var modelCombine = new ML.Legacy.Transforms.ModelCombiner { - Models = new ArrayVar( - new Var[] { - new Var { VarName = transformModelVarName.VariableName }, - new Var { VarName = outputVarName} } + Models = new ArrayVar( + new Var[] { + new Var { VarName = transformModelVarName.VariableName }, + new Var { VarName = outputVarName} } ) }; diff --git a/src/Microsoft.ML.Legacy/Runtime/Experiment/Experiment.cs b/src/Microsoft.ML.Legacy/Runtime/Experiment/Experiment.cs index a51661630d..1069c7194b 100644 --- a/src/Microsoft.ML.Legacy/Runtime/Experiment/Experiment.cs +++ b/src/Microsoft.ML.Legacy/Runtime/Experiment/Experiment.cs @@ -306,12 +306,12 @@ public sealed class EntryPointTransformOutput : CommonOutputs.ITransformOutput /// /// Transform model /// - public Var Model { get; set; } + public Var Model { get; set; } public EntryPointTransformOutput() { OutputData = new Var(); - Model = new Var(); + Model = new Var(); } } @@ -320,11 +320,11 @@ public sealed class EntryPointTrainerOutput : CommonOutputs.ITrainerOutput /// /// The trained model /// - public Var PredictorModel { get; set; } + public Var PredictorModel { get; set; } public EntryPointTrainerOutput() { - PredictorModel = new Var(); + PredictorModel = new Var(); } } } diff --git a/src/Microsoft.ML.Legacy/Runtime/Internal/Tools/CSharpApiGenerator.cs b/src/Microsoft.ML.Legacy/Runtime/Internal/Tools/CSharpApiGenerator.cs index 668b3b4265..997d19ae7b 100644 --- a/src/Microsoft.ML.Legacy/Runtime/Internal/Tools/CSharpApiGenerator.cs +++ b/src/Microsoft.ML.Legacy/Runtime/Internal/Tools/CSharpApiGenerator.cs @@ -203,8 +203,6 @@ private void GenerateClasses(IndentedTextWriter writer, Type inputType, Componen if (type == typeof(CommonOutputs.IEvaluatorOutput)) continue; var typeEnum = TlcModule.GetDataType(type); - if (typeEnum == TlcModule.DataKind.State) - continue; if (typeEnum != TlcModule.DataKind.Unknown) continue; @@ -502,12 +500,12 @@ private static void GenerateApplyFunction(IndentedTextWriter writer, string clas writer.WriteLine("[Obsolete]"); writer.WriteLine("public Var Data { get; }"); writer.WriteLine("[Obsolete]"); - writer.WriteLine("public Var Model { get; }"); + writer.WriteLine("public Var Model { get; }"); } else { writer.WriteLine("[Obsolete]"); - writer.WriteLine("public Var Model { get; }"); + writer.WriteLine("public Var Model { get; }"); } writer.Outdent(); diff --git a/src/Microsoft.ML.Legacy/Runtime/Internal/Tools/CSharpGeneratorUtils.cs b/src/Microsoft.ML.Legacy/Runtime/Internal/Tools/CSharpGeneratorUtils.cs index 6903e4ebbc..58a496f2c7 100644 --- a/src/Microsoft.ML.Legacy/Runtime/Internal/Tools/CSharpGeneratorUtils.cs +++ b/src/Microsoft.ML.Legacy/Runtime/Internal/Tools/CSharpGeneratorUtils.cs @@ -485,7 +485,7 @@ public static void GenerateLoaderAddInputMethod(IndentedTextWriter writer, strin writer.WriteLine("}"); writer.WriteLineNoTabs(); writer.WriteLine("public Var Data { get; }"); - writer.WriteLine("public Var Model { get; }"); + writer.WriteLine("public Var Model { get; }"); writer.Outdent(); writer.WriteLine("}"); } diff --git a/src/Microsoft.ML.LightGBM/LightGbmBinaryTrainer.cs b/src/Microsoft.ML.LightGBM/LightGbmBinaryTrainer.cs index 45d55f4bce..b6afe58b5b 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmBinaryTrainer.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmBinaryTrainer.cs @@ -137,7 +137,7 @@ private protected override IPredictorWithFeatureWeights CreatePredictor() return new FeatureWeightsCalibratedPredictor(Host, pred, cali); } - protected override void CheckDataValid(IChannel ch, RoleMappedData data) + private protected override void CheckDataValid(IChannel ch, RoleMappedData data) { Host.AssertValue(ch); base.CheckDataValid(ch, data); @@ -149,7 +149,7 @@ protected override void CheckDataValid(IChannel ch, RoleMappedData data) } } - protected override void CheckAndUpdateParametersBeforeTraining(IChannel ch, RoleMappedData data, float[] labels, int[] groups) + private protected override void CheckAndUpdateParametersBeforeTraining(IChannel ch, RoleMappedData data, float[] labels, int[] groups) { Options["objective"] = "binary"; // Add default metric. diff --git a/src/Microsoft.ML.LightGBM/LightGbmMulticlassTrainer.cs b/src/Microsoft.ML.LightGBM/LightGbmMulticlassTrainer.cs index 2216beadb7..68b5875bd3 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmMulticlassTrainer.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmMulticlassTrainer.cs @@ -105,7 +105,7 @@ private protected override OvaPredictor CreatePredictor() return OvaPredictor.Create(Host, predictors); } - protected override void CheckDataValid(IChannel ch, RoleMappedData data) + private protected override void CheckDataValid(IChannel ch, RoleMappedData data) { Host.AssertValue(ch); base.CheckDataValid(ch, data); @@ -117,7 +117,7 @@ protected override void CheckDataValid(IChannel ch, RoleMappedData data) } } - protected override void ConvertNaNLabels(IChannel ch, RoleMappedData data, float[] labels) + private protected override void ConvertNaNLabels(IChannel ch, RoleMappedData data, float[] labels) { // Only initialize one time. if (_numClass < 0) @@ -180,7 +180,7 @@ protected override void GetDefaultParameters(IChannel ch, int numRow, bool hasCa } } - protected override void CheckAndUpdateParametersBeforeTraining(IChannel ch, RoleMappedData data, float[] labels, int[] groups) + private protected override void CheckAndUpdateParametersBeforeTraining(IChannel ch, RoleMappedData data, float[] labels, int[] groups) { Host.AssertValue(ch); ch.Assert(PredictionKind == PredictionKind.MultiClassClassification); diff --git a/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs b/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs index bcc32d533c..2d27820592 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs @@ -118,7 +118,7 @@ public LightGbmRankingTrainer(IHostEnvironment env, Host.CheckNonEmpty(groupId, nameof(groupId)); } - protected override void CheckDataValid(IChannel ch, RoleMappedData data) + private protected override void CheckDataValid(IChannel ch, RoleMappedData data) { Host.AssertValue(ch); base.CheckDataValid(ch, data); @@ -158,7 +158,7 @@ private protected override LightGbmRankingModelParameters CreatePredictor() return new LightGbmRankingModelParameters(Host, TrainedEnsemble, FeatureCount, innerArgs); } - protected override void CheckAndUpdateParametersBeforeTraining(IChannel ch, RoleMappedData data, float[] labels, int[] groups) + private protected override void CheckAndUpdateParametersBeforeTraining(IChannel ch, RoleMappedData data, float[] labels, int[] groups) { Host.AssertValue(ch); Options["objective"] = "lambdarank"; diff --git a/src/Microsoft.ML.LightGBM/LightGbmRegressionTrainer.cs b/src/Microsoft.ML.LightGBM/LightGbmRegressionTrainer.cs index 9e3868bf96..f07baba4cb 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmRegressionTrainer.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmRegressionTrainer.cs @@ -127,7 +127,7 @@ private protected override LightGbmRegressionModelParameters CreatePredictor() return new LightGbmRegressionModelParameters(Host, TrainedEnsemble, FeatureCount, innerArgs); } - protected override void CheckDataValid(IChannel ch, RoleMappedData data) + private protected override void CheckDataValid(IChannel ch, RoleMappedData data) { Host.AssertValue(ch); base.CheckDataValid(ch, data); @@ -139,7 +139,7 @@ protected override void CheckDataValid(IChannel ch, RoleMappedData data) } } - protected override void CheckAndUpdateParametersBeforeTraining(IChannel ch, RoleMappedData data, float[] labels, int[] groups) + private protected override void CheckAndUpdateParametersBeforeTraining(IChannel ch, RoleMappedData data, float[] labels, int[] groups) { Options["objective"] = "regression"; // Add default metric. diff --git a/src/Microsoft.ML.LightGBM/LightGbmTrainerBase.cs b/src/Microsoft.ML.LightGBM/LightGbmTrainerBase.cs index 755dd32cff..28e12510d0 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmTrainerBase.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmTrainerBase.cs @@ -167,7 +167,7 @@ private void DisposeParallelTraining() LightGbmInterfaceUtils.Check(WrappedLightGbmInterface.NetworkFree()); } - protected virtual void CheckDataValid(IChannel ch, RoleMappedData data) + private protected virtual void CheckDataValid(IChannel ch, RoleMappedData data) { data.CheckFeatureFloatVector(); ch.CheckParam(data.Schema.Label != null, nameof(data), "Need a label column"); @@ -463,7 +463,7 @@ private void GetMetainfo(IChannel ch, FloatLabelCursor.Factory factory, /// /// Convert Nan labels. Default way is converting them to zero. /// - protected virtual void ConvertNaNLabels(IChannel ch, RoleMappedData data, float[] labels) + private protected virtual void ConvertNaNLabels(IChannel ch, RoleMappedData data, float[] labels) { for (int i = 0; i < labels.Length; ++i) { @@ -896,7 +896,7 @@ private static int GetNumSampleRow(int numRow, int numCol) /// /// This function will be called before training. It will check the label/group and add parameters for specific applications. /// - protected abstract void CheckAndUpdateParametersBeforeTraining(IChannel ch, + private protected abstract void CheckAndUpdateParametersBeforeTraining(IChannel ch, RoleMappedData data, float[] labels, int[] groups); } } diff --git a/src/Microsoft.ML.Onnx/SaveOnnxCommand.cs b/src/Microsoft.ML.Onnx/SaveOnnxCommand.cs index 3c3dd97dcd..1ed8730c1f 100644 --- a/src/Microsoft.ML.Onnx/SaveOnnxCommand.cs +++ b/src/Microsoft.ML.Onnx/SaveOnnxCommand.cs @@ -56,7 +56,7 @@ public sealed class Arguments : DataCommand.ArgumentsBase public bool? LoadPredictor; [Argument(ArgumentType.Required, Visibility = ArgumentAttribute.VisibilityType.EntryPointsOnly, HelpText = "Model that needs to be converted to ONNX format.", SortOrder = 10)] - public ITransformModel Model; + public TransformModel Model; [Argument(ArgumentType.AtMostOnce, HelpText = "The targeted ONNX version. It can be either \"Stable\" or \"Experimental\". If \"Experimental\" is used, produced model can contain components that is not officially supported in ONNX standard.", SortOrder = 11)] public OnnxVersion OnnxVersion; @@ -69,7 +69,7 @@ public sealed class Arguments : DataCommand.ArgumentsBase private readonly bool? _loadPredictor; private readonly HashSet _inputsToDrop; private readonly HashSet _outputsToDrop; - private readonly ITransformModel _model; + private readonly TransformModel _model; private const string ProducerName = "ML.NET"; private const long ModelVersion = 0; diff --git a/src/Microsoft.ML.PCA/PcaTransform.cs b/src/Microsoft.ML.PCA/PcaTransform.cs index 90a613e844..6081f8ee54 100644 --- a/src/Microsoft.ML.PCA/PcaTransform.cs +++ b/src/Microsoft.ML.PCA/PcaTransform.cs @@ -654,7 +654,7 @@ public static CommonOutputs.TransformOutput Calculate(IHostEnvironment env, Argu var view = PcaTransform.Create(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, view, input.Data), + Model = new TransformModelImpl(h, view, input.Data), OutputData = view }; } diff --git a/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs b/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs index ae79e564cd..49a6d8241a 100644 --- a/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs +++ b/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs @@ -212,7 +212,7 @@ private ValueGetter GetGetter(ValueGetter matrixColumnIndexGetter, /// ratings from users to items, the mappers maps user ID and item ID to the rating of that /// item given by the user. /// - public ValueMapper GetMapper() + private ValueMapper GetMapper() { string msg = null; msg = "Invalid " + nameof(TMatrixColumnIndexIn) + " in GetMapper: " + typeof(TMatrixColumnIndexIn); diff --git a/src/Microsoft.ML.ResultProcessor/ResultProcessor.cs b/src/Microsoft.ML.ResultProcessor/ResultProcessor.cs index 85b9234856..987d63d237 100644 --- a/src/Microsoft.ML.ResultProcessor/ResultProcessor.cs +++ b/src/Microsoft.ML.ResultProcessor/ResultProcessor.cs @@ -661,7 +661,7 @@ private static bool ValidateMamlOutput(string filename, string[] rawLines, out L }; } - public static bool ParseCommandArguments(IHostEnvironment env, string commandline, out object commandArgs, out ComponentCatalog.LoadableClassInfo commandClass, bool trimExe = true) + private static bool ParseCommandArguments(IHostEnvironment env, string commandline, out object commandArgs, out ComponentCatalog.LoadableClassInfo commandClass, bool trimExe = true) { string args = commandline; if (trimExe) diff --git a/src/Microsoft.ML.StandardLearners/Standard/LinearModelParameters.cs b/src/Microsoft.ML.StandardLearners/Standard/LinearModelParameters.cs index 0b64f32ffb..7e380a2158 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LinearModelParameters.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LinearModelParameters.cs @@ -309,7 +309,7 @@ ValueMapper IValueMapper.GetMapper() /// /// Combine a bunch of models into one by averaging parameters /// - internal void CombineParameters(IList> models, out VBuffer weights, out float bias) + private protected void CombineParameters(IList> models, out VBuffer weights, out float bias) { Type type = GetType(); diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs index 4c7fad0812..490a3cca9f 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs @@ -260,7 +260,7 @@ private static TArgs ArgsInit(string featureColumn, SchemaShape.Column labelColu protected virtual int ClassCount => 1; protected int BiasCount => ClassCount; protected int WeightCount => ClassCount * NumFeatures; - protected virtual Optimizer InitializeOptimizer(IChannel ch, FloatLabelCursor.Factory cursorFactory, + private protected virtual Optimizer InitializeOptimizer(IChannel ch, FloatLabelCursor.Factory cursorFactory, out VBuffer init, out ITerminationCriterion terminationCriterion) { // MeanRelativeImprovementCriterion: @@ -293,7 +293,7 @@ protected virtual Optimizer InitializeOptimizer(IChannel ch, FloatLabelCursor.Fa /// /// Initialize weights by running SGD up to specified tolerance. /// - protected virtual VBuffer InitializeWeightsSgd(IChannel ch, FloatLabelCursor.Factory cursorFactory) + private protected virtual VBuffer InitializeWeightsSgd(IChannel ch, FloatLabelCursor.Factory cursorFactory) { if (!Quiet) ch.Info("Running SGD initialization with tolerance {0}", SgdInitializationTolerance); @@ -367,7 +367,7 @@ protected virtual VBuffer InitializeWeightsSgd(IChannel ch, FloatLabelCur protected abstract VBuffer InitializeWeightsFromPredictor(TModel srcPredictor); - protected abstract void CheckLabel(RoleMappedData data); + private protected abstract void CheckLabel(RoleMappedData data); protected virtual void PreTrainingProcessInstance(float label, in VBuffer feat, float weight) { @@ -404,7 +404,7 @@ private protected override TModel TrainModelCore(TrainContext context) } } - protected virtual void TrainCore(IChannel ch, RoleMappedData data) + private protected virtual void TrainCore(IChannel ch, RoleMappedData data) { Host.AssertValue(ch); ch.AssertValue(data); @@ -578,7 +578,7 @@ protected void EnsureBiases(ref VBuffer vec) protected abstract float AccumulateOneGradient(in VBuffer feat, float label, float weight, in VBuffer xDense, ref VBuffer grad, ref float[] scratch); - protected abstract void ComputeTrainingStatistics(IChannel ch, FloatLabelCursor.Factory cursorFactory, float loss, int numParams); + private protected abstract void ComputeTrainingStatistics(IChannel ch, FloatLabelCursor.Factory cursorFactory, float loss, int numParams); protected abstract void ProcessPriorDistribution(float label, float weight); /// @@ -702,7 +702,7 @@ protected float DifferentiableFunctionComputeChunk(int ichk, in VBuffer x return (float)loss; } - protected float DifferentiableFunctionStream(FloatLabelCursor.Factory cursorFactory, in VBuffer xDense, ref VBuffer grad, IProgressChannel pch) + private protected float DifferentiableFunctionStream(FloatLabelCursor.Factory cursorFactory, in VBuffer xDense, ref VBuffer grad, IProgressChannel pch) { Contracts.AssertValue(cursorFactory); diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs index 37d79e87c6..6f8b74fab2 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs @@ -108,7 +108,7 @@ internal LogisticRegression(IHostEnvironment env, Arguments args) public override PredictionKind PredictionKind => PredictionKind.BinaryClassification; - protected override void CheckLabel(RoleMappedData data) + private protected override void CheckLabel(RoleMappedData data) { Contracts.AssertValue(data); data.CheckBinaryLabel(); @@ -160,7 +160,7 @@ protected override float AccumulateOneGradient(in VBuffer feat, float lab return weight * datumLoss; } - protected override void ComputeTrainingStatistics(IChannel ch, FloatLabelCursor.Factory cursorFactory, float loss, int numParams) + private protected override void ComputeTrainingStatistics(IChannel ch, FloatLabelCursor.Factory cursorFactory, float loss, int numParams) { Contracts.AssertValue(ch); Contracts.AssertValue(cursorFactory); @@ -367,7 +367,7 @@ protected override void ProcessPriorDistribution(float label, float weight) } //Override default termination criterion MeanRelativeImprovementCriterion with - protected override Optimizer InitializeOptimizer(IChannel ch, FloatLabelCursor.Factory cursorFactory, + private protected override Optimizer InitializeOptimizer(IChannel ch, FloatLabelCursor.Factory cursorFactory, out VBuffer init, out ITerminationCriterion terminationCriterion) { var opt = base.InitializeOptimizer(ch, cursorFactory, out init, out terminationCriterion); diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs index ca1ea84140..912e1747b1 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs @@ -114,7 +114,7 @@ internal MulticlassLogisticRegression(IHostEnvironment env, Arguments args) public override PredictionKind PredictionKind => PredictionKind.MultiClassClassification; - protected override void CheckLabel(RoleMappedData data) + private protected override void CheckLabel(RoleMappedData data) { Contracts.AssertValue(data); // REVIEW: For floating point labels, this will make a pass over the data. @@ -180,7 +180,7 @@ protected override void CheckLabel(RoleMappedData data) } //Override default termination criterion MeanRelativeImprovementCriterion with - protected override Optimizer InitializeOptimizer(IChannel ch, FloatLabelCursor.Factory cursorFactory, + private protected override Optimizer InitializeOptimizer(IChannel ch, FloatLabelCursor.Factory cursorFactory, out VBuffer init, out ITerminationCriterion terminationCriterion) { var opt = base.InitializeOptimizer(ch, cursorFactory, out init, out terminationCriterion); @@ -255,7 +255,7 @@ protected override MulticlassLogisticRegressionPredictor CreatePredictor() return new MulticlassLogisticRegressionPredictor(Host, in CurrentWeights, _numClasses, NumFeatures, _labelNames, _stats); } - protected override void ComputeTrainingStatistics(IChannel ch, FloatLabelCursor.Factory cursorFactory, float loss, int numParams) + private protected override void ComputeTrainingStatistics(IChannel ch, FloatLabelCursor.Factory cursorFactory, float loss, int numParams) { Contracts.AssertValue(ch); Contracts.AssertValue(cursorFactory); diff --git a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MetaMulticlassTrainer.cs b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MetaMulticlassTrainer.cs index e7aeade4fe..09839c410d 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MetaMulticlassTrainer.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MetaMulticlassTrainer.cs @@ -90,7 +90,7 @@ private TScalarTrainer CreateTrainer() new LinearSvm(Host, new LinearSvm.Arguments()); } - protected IDataView MapLabelsCore(ColumnType type, InPredicate equalsTarget, RoleMappedData data) + private protected IDataView MapLabelsCore(ColumnType type, InPredicate equalsTarget, RoleMappedData data) { Host.AssertValue(type); Host.Assert(type.RawType == typeof(T)); @@ -114,7 +114,7 @@ protected IDataView MapLabelsCore(ColumnType type, InPredicate equalsTarge dst = equalsTarget(in src) ? 1 : default(float)); } - protected abstract TModel TrainCore(IChannel ch, RoleMappedData data, int count); + private protected abstract TModel TrainCore(IChannel ch, RoleMappedData data, int count); /// /// The legacy train method. diff --git a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Ova.cs b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Ova.cs index 398fce9015..0c06870dd6 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Ova.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Ova.cs @@ -103,7 +103,7 @@ public Ova(IHostEnvironment env, _args.UseProbabilities = useProbabilities; } - protected override OvaPredictor TrainCore(IChannel ch, RoleMappedData data, int count) + private protected override OvaPredictor TrainCore(IChannel ch, RoleMappedData data, int count) { // Train one-vs-all models. var predictors = new TScalarPredictor[count]; diff --git a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Pkpd.cs b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Pkpd.cs index 902906c5da..dd8129284d 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Pkpd.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Pkpd.cs @@ -104,7 +104,7 @@ public Pkpd(IHostEnvironment env, Host.CheckValue(labelColumn, nameof(labelColumn), "Label column should not be null."); } - protected override PkpdPredictor TrainCore(IChannel ch, RoleMappedData data, int count) + private protected override PkpdPredictor TrainCore(IChannel ch, RoleMappedData data, int count) { // Train M * (M+1) / 2 models arranged as a lower triangular matrix. var predModels = new TDistPredictor[count][]; diff --git a/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedPerceptron.cs b/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedPerceptron.cs index 6556a98dae..c4789895f8 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedPerceptron.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedPerceptron.cs @@ -155,7 +155,7 @@ protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSc }; } - protected override void CheckLabels(RoleMappedData data) + private protected override void CheckLabels(RoleMappedData data) { Contracts.AssertValue(data); data.CheckBinaryLabel(); diff --git a/src/Microsoft.ML.StandardLearners/Standard/Online/LinearSvm.cs b/src/Microsoft.ML.StandardLearners/Standard/Online/LinearSvm.cs index 716f78d298..ac804fde2a 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/Online/LinearSvm.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/Online/LinearSvm.cs @@ -268,7 +268,7 @@ protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSc }; } - protected override void CheckLabels(RoleMappedData data) + private protected override void CheckLabels(RoleMappedData data) { Contracts.AssertValue(data); data.CheckBinaryLabel(); diff --git a/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineGradientDescent.cs b/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineGradientDescent.cs index 70a53f6d49..c2d6baec9f 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineGradientDescent.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineGradientDescent.cs @@ -153,7 +153,7 @@ protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSc }; } - protected override void CheckLabels(RoleMappedData data) + private protected override void CheckLabels(RoleMappedData data) { data.CheckRegressionLabel(); } diff --git a/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineLinear.cs b/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineLinear.cs index dcdeeac236..78341717d6 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineLinear.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineLinear.cs @@ -279,7 +279,7 @@ private protected sealed override TModel TrainModelCore(TrainContext context) } } - protected abstract void CheckLabels(RoleMappedData data); + private protected abstract void CheckLabels(RoleMappedData data); private void TrainCore(IChannel ch, RoleMappedData data, TrainStateBase state) { diff --git a/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs index 70956bdc71..3c56292c47 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs @@ -81,7 +81,7 @@ internal PoissonRegression(IHostEnvironment env, Arguments args) public override PredictionKind PredictionKind => PredictionKind.Regression; - protected override void CheckLabel(RoleMappedData data) + private protected override void CheckLabel(RoleMappedData data) { Contracts.AssertValue(data); data.CheckRegressionLabel(); @@ -162,7 +162,7 @@ protected override PoissonRegressionModelParameters CreatePredictor() return new PoissonRegressionModelParameters(Host, in weights, bias); } - protected override void ComputeTrainingStatistics(IChannel ch, FloatLabelCursor.Factory factory, float loss, int numParams) + private protected override void ComputeTrainingStatistics(IChannel ch, FloatLabelCursor.Factory factory, float loss, int numParams) { // No-op by design. } diff --git a/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs b/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs index a286d63994..4d10d0f702 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs @@ -84,7 +84,7 @@ private protected override TModel TrainModelCore(TrainContext context) } } - protected abstract TModel TrainCore(IChannel ch, RoleMappedData data, LinearModelParameters predictor, int weightSetCount); + private protected abstract TModel TrainCore(IChannel ch, RoleMappedData data, LinearModelParameters predictor, int weightSetCount); /// /// This method ensures that the data meets the requirements of this trainer and its @@ -95,7 +95,7 @@ private protected override TModel TrainModelCore(TrainContext context) /// Gets the length of weights and bias array. For binary classification and regression, /// this is 1. For multi-class classification, this equals the number of classes on the label. /// A potentially modified version of - protected RoleMappedData PrepareDataFromTrainingExamples(IChannel ch, RoleMappedData examples, out int weightSetCount) + private protected RoleMappedData PrepareDataFromTrainingExamples(IChannel ch, RoleMappedData examples, out int weightSetCount) { ch.AssertValue(examples); CheckLabel(examples, out weightSetCount); @@ -129,7 +129,7 @@ protected RoleMappedData PrepareDataFromTrainingExamples(IChannel ch, RoleMapped return examplesToFeedTrain; } - protected abstract void CheckLabel(RoleMappedData examples, out int weightSetCount); + private protected abstract void CheckLabel(RoleMappedData examples, out int weightSetCount); protected float WDot(in VBuffer features, in VBuffer weights, float bias) { @@ -141,7 +141,7 @@ protected float WScaledDot(in VBuffer features, Double scaling, in VBuffe return VectorUtils.DotProduct(in weights, in features) * (float)scaling + bias; } - protected virtual int ComputeNumThreads(FloatLabelCursor.Factory cursorFactory) + private protected virtual int ComputeNumThreads(FloatLabelCursor.Factory cursorFactory) { int maxThreads = Math.Min(8, Math.Max(1, Environment.ProcessorCount / 2)); if (0 < Host.ConcurrencyFactor && Host.ConcurrencyFactor < maxThreads) @@ -281,7 +281,7 @@ protected float WDot(in VBuffer features, in VBuffer weights, floa return VectorUtils.DotProduct(in weights, in features) + bias; } - protected sealed override TModel TrainCore(IChannel ch, RoleMappedData data, LinearModelParameters predictor, int weightSetCount) + private protected sealed override TModel TrainCore(IChannel ch, RoleMappedData data, LinearModelParameters predictor, int weightSetCount) { Contracts.Assert(predictor == null, "SDCA based trainers don't support continuous training."); Contracts.Assert(weightSetCount >= 1); @@ -748,7 +748,7 @@ private void InitializeConvergenceMetrics(out string[] names, out Double[] initi /// The array holding the pre-computed squared L2-norm of features for each training example. It may be null. It is always null for /// binary classification and regression because this quantity is not needed. /// - protected virtual void TrainWithoutLock(IProgressChannelProvider progress, FloatLabelCursor.Factory cursorFactory, Random rand, + private protected virtual void TrainWithoutLock(IProgressChannelProvider progress, FloatLabelCursor.Factory cursorFactory, Random rand, IdToIdxLookup idToIdx, int numThreads, DualsTableBase duals, float[] biasReg, float[] invariants, float lambdaNInv, VBuffer[] weights, float[] biasUnreg, VBuffer[] l1IntermediateWeights, float[] l1IntermediateBias, float[] featureNormSquared) { @@ -902,7 +902,7 @@ protected virtual void TrainWithoutLock(IProgressChannelProvider progress, Float /// /// The iteration number when the best model is obtained. /// Whether the optimization has converged. - protected virtual bool CheckConvergence( + private protected virtual bool CheckConvergence( IProgressChannel pch, int iter, FloatLabelCursor.Factory cursorFactory, @@ -992,15 +992,15 @@ protected virtual float[] InitializeFeatureNormSquared(int length) return null; } - protected abstract float GetInstanceWeight(FloatLabelCursor cursor); + private protected abstract float GetInstanceWeight(FloatLabelCursor cursor); - protected delegate void Visitor(long index, ref float value); + private protected delegate void Visitor(long index, ref float value); /// /// Encapsulates the common functionality of storing and /// retrieving the dual variables. /// - protected abstract class DualsTableBase + private protected abstract class DualsTableBase { public abstract float this[long index] { get; set; } public abstract long Length { get; } @@ -1566,12 +1566,12 @@ protected override TScalarPredictor CreatePredictor(VBuffer[] weights, fl return new ParameterMixingCalibratedPredictor(Host, predictor, new PlattCalibrator(Host, -1, 0)); } - protected override float GetInstanceWeight(FloatLabelCursor cursor) + private protected override float GetInstanceWeight(FloatLabelCursor cursor) { return cursor.Label > 0 ? cursor.Weight * _positiveInstanceWeight : cursor.Weight; } - protected override void CheckLabel(RoleMappedData examples, out int weightSetCount) + private protected override void CheckLabel(RoleMappedData examples, out int weightSetCount) { examples.CheckBinaryLabel(); weightSetCount = 1; @@ -1748,7 +1748,7 @@ public BinaryPredictionTransformer Train(IDataView trainData, //For complexity analysis, we assume that // - The number of features is N // - Average number of non-zero per instance is k - protected override TScalarPredictor TrainCore(IChannel ch, RoleMappedData data, LinearModelParameters predictor, int weightSetCount) + private protected override TScalarPredictor TrainCore(IChannel ch, RoleMappedData data, LinearModelParameters predictor, int weightSetCount) { Contracts.AssertValue(data); Contracts.Assert(weightSetCount == 1); @@ -1931,7 +1931,7 @@ protected override TScalarPredictor TrainCore(IChannel ch, RoleMappedData data, return new ParameterMixingCalibratedPredictor(Host, pred, new PlattCalibrator(Host, -1, 0)); } - protected override void CheckLabel(RoleMappedData examples, out int weightSetCount) + private protected override void CheckLabel(RoleMappedData examples, out int weightSetCount) { examples.CheckBinaryLabel(); weightSetCount = 1; diff --git a/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs b/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs index 659b567843..e7a9a6fe7b 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs @@ -124,7 +124,7 @@ protected override void CheckLabelCompatible(SchemaShape.Column labelCol) } /// - protected override void TrainWithoutLock(IProgressChannelProvider progress, FloatLabelCursor.Factory cursorFactory, Random rand, + private protected override void TrainWithoutLock(IProgressChannelProvider progress, FloatLabelCursor.Factory cursorFactory, Random rand, IdToIdxLookup idToIdx, int numThreads, DualsTableBase duals, Float[] biasReg, Float[] invariants, Float lambdaNInv, VBuffer[] weights, Float[] biasUnreg, VBuffer[] l1IntermediateWeights, Float[] l1IntermediateBias, Float[] featureNormSquared) { @@ -289,7 +289,7 @@ protected override void TrainWithoutLock(IProgressChannelProvider progress, Floa } /// - protected override bool CheckConvergence( + private protected override bool CheckConvergence( IProgressChannel pch, int iter, FloatLabelCursor.Factory cursorFactory, @@ -425,7 +425,7 @@ protected override MulticlassLogisticRegressionPredictor CreatePredictor(VBuffer return new MulticlassLogisticRegressionPredictor(Host, weights, bias, bias.Length, weights[0].Length, null, stats: null); } - protected override void CheckLabel(RoleMappedData examples, out int weightSetCount) + private protected override void CheckLabel(RoleMappedData examples, out int weightSetCount) { examples.CheckMultiClassLabel(out weightSetCount); } @@ -436,7 +436,7 @@ protected override Float[] InitializeFeatureNormSquared(int length) return new Float[length]; } - protected override Float GetInstanceWeight(FloatLabelCursor cursor) + private protected override Float GetInstanceWeight(FloatLabelCursor cursor) { return cursor.Weight; } diff --git a/src/Microsoft.ML.StandardLearners/Standard/SdcaRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/SdcaRegression.cs index 0a50df7c31..0eec9f2588 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/SdcaRegression.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/SdcaRegression.cs @@ -113,19 +113,19 @@ protected override LinearRegressionModelParameters CreatePredictor(VBuffer PredictionKind.BinaryClassification; private readonly ColumnType _inputType; ColumnType IValueMapper.InputType => _inputType; diff --git a/src/Microsoft.ML.StandardLearners/Standard/StochasticTrainerBase.cs b/src/Microsoft.ML.StandardLearners/Standard/StochasticTrainerBase.cs index bb4a2965d9..d27ce8791c 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/StochasticTrainerBase.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/StochasticTrainerBase.cs @@ -43,7 +43,7 @@ private protected override TModel TrainModelCore(TrainContext context) } } - protected virtual int ComputeNumThreads(FloatLabelCursor.Factory cursorFactory) + private protected virtual int ComputeNumThreads(FloatLabelCursor.Factory cursorFactory) { int maxThreads = Math.Min(8, Math.Max(1, Environment.ProcessorCount / 2)); if (0 < Host.ConcurrencyFactor && Host.ConcurrencyFactor < maxThreads) @@ -61,7 +61,7 @@ protected virtual int ComputeNumThreads(FloatLabelCursor.Factory cursorFactory) /// Gets the length of weights and bias array. For binary classification and regression, /// this is 1. For multi-class classification, this equals the number of classes on the label. /// A potentially modified version of - protected RoleMappedData PrepareDataFromTrainingExamples(IChannel ch, RoleMappedData examples, out int weightSetCount) + private protected RoleMappedData PrepareDataFromTrainingExamples(IChannel ch, RoleMappedData examples, out int weightSetCount) { ch.AssertValue(examples); CheckLabel(examples, out weightSetCount); @@ -95,8 +95,8 @@ protected RoleMappedData PrepareDataFromTrainingExamples(IChannel ch, RoleMapped return examplesToFeedTrain; } - protected abstract TModel TrainCore(IChannel ch, RoleMappedData data, LinearModelParameters predictor, int weightSetCount); + private protected abstract TModel TrainCore(IChannel ch, RoleMappedData data, LinearModelParameters predictor, int weightSetCount); - protected abstract void CheckLabel(RoleMappedData examples, out int weightSetCount); + private protected abstract void CheckLabel(RoleMappedData examples, out int weightSetCount); } } diff --git a/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs b/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs index f555545099..b4d9bd0993 100644 --- a/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs +++ b/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs @@ -943,7 +943,7 @@ public static CommonOutputs.TransformOutput TensorFlowScorer(IHostEnvironment en var view = Create(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, view, input.Data), + Model = new TransformModelImpl(h, view, input.Data), OutputData = view }; } diff --git a/src/Microsoft.ML.TimeSeries/TimeSeriesProcessing.cs b/src/Microsoft.ML.TimeSeries/TimeSeriesProcessing.cs index 65c7346d55..a03243857d 100644 --- a/src/Microsoft.ML.TimeSeries/TimeSeriesProcessing.cs +++ b/src/Microsoft.ML.TimeSeries/TimeSeriesProcessing.cs @@ -21,7 +21,7 @@ public static CommonOutputs.TransformOutput ExponentialAverage(IHostEnvironment var xf = new ExponentialAverageTransform(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } @@ -33,7 +33,7 @@ public static CommonOutputs.TransformOutput IidChangePointDetector(IHostEnvironm var view = new IidChangePointEstimator(h, input).Fit(input.Data).Transform(input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, view, input.Data), + Model = new TransformModelImpl(h, view, input.Data), OutputData = view }; } @@ -45,7 +45,7 @@ public static CommonOutputs.TransformOutput IidSpikeDetector(IHostEnvironment en var view = new IidSpikeEstimator(h, input).Fit(input.Data).Transform(input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, view, input.Data), + Model = new TransformModelImpl(h, view, input.Data), OutputData = view }; } @@ -57,7 +57,7 @@ public static CommonOutputs.TransformOutput PercentileThresholdTransform(IHostEn var view = new PercentileThresholdTransform(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, view, input.Data), + Model = new TransformModelImpl(h, view, input.Data), OutputData = view }; } @@ -69,7 +69,7 @@ public static CommonOutputs.TransformOutput PValueTransform(IHostEnvironment env var view = new PValueTransform(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, view, input.Data), + Model = new TransformModelImpl(h, view, input.Data), OutputData = view }; } @@ -81,7 +81,7 @@ public static CommonOutputs.TransformOutput SlidingWindowTransform(IHostEnvironm var view = new SlidingWindowTransform(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, view, input.Data), + Model = new TransformModelImpl(h, view, input.Data), OutputData = view }; } @@ -93,7 +93,7 @@ public static CommonOutputs.TransformOutput SsaChangePointDetector(IHostEnvironm var view = new SsaChangePointEstimator(h, input).Fit(input.Data).Transform(input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, view, input.Data), + Model = new TransformModelImpl(h, view, input.Data), OutputData = view }; } @@ -105,7 +105,7 @@ public static CommonOutputs.TransformOutput SsaSpikeDetector(IHostEnvironment en var view = new SsaSpikeEstimator(h, input).Fit(input.Data).Transform(input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, view, input.Data), + Model = new TransformModelImpl(h, view, input.Data), OutputData = view }; } diff --git a/src/Microsoft.ML.Transforms/BootstrapSamplingTransformer.cs b/src/Microsoft.ML.Transforms/BootstrapSamplingTransformer.cs index a1f12acb23..ecb3bed6fe 100644 --- a/src/Microsoft.ML.Transforms/BootstrapSamplingTransformer.cs +++ b/src/Microsoft.ML.Transforms/BootstrapSamplingTransformer.cs @@ -248,7 +248,7 @@ public static CommonOutputs.TransformOutput GetSample(IHostEnvironment env, Boot var view = new BootstrapSamplingTransformer(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, view, input.Data), + Model = new TransformModelImpl(h, view, input.Data), OutputData = view }; } diff --git a/src/Microsoft.ML.Transforms/EntryPoints/SelectFeatures.cs b/src/Microsoft.ML.Transforms/EntryPoints/SelectFeatures.cs index 4edf7cd840..f7ccab8fc5 100644 --- a/src/Microsoft.ML.Transforms/EntryPoints/SelectFeatures.cs +++ b/src/Microsoft.ML.Transforms/EntryPoints/SelectFeatures.cs @@ -26,7 +26,7 @@ public static CommonOutputs.TransformOutput CountSelect(IHostEnvironment env, Co EntryPointUtils.CheckInputArgs(host, input); var xf = CountFeatureSelectingEstimator.Create(host, input, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } [TlcModule.EntryPoint(Name = "Transforms.FeatureSelectorByMutualInformation", @@ -43,7 +43,7 @@ public static CommonOutputs.TransformOutput MutualInformationSelect(IHostEnviron EntryPointUtils.CheckInputArgs(host, input); var xf = MutualInformationFeatureSelectingEstimator.Create(host, input, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } } } diff --git a/src/Microsoft.ML.Transforms/EntryPoints/TextAnalytics.cs b/src/Microsoft.ML.Transforms/EntryPoints/TextAnalytics.cs index a6a3b42344..4dac234ab9 100644 --- a/src/Microsoft.ML.Transforms/EntryPoints/TextAnalytics.cs +++ b/src/Microsoft.ML.Transforms/EntryPoints/TextAnalytics.cs @@ -29,7 +29,7 @@ public static CommonOutputs.TransformOutput TextTransform(IHostEnvironment env, var xf = TextFeaturizingEstimator.Create(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } @@ -46,7 +46,7 @@ public static CommonOutputs.TransformOutput DelimitedTokenizeTransform(IHostEnvi var xf = ML.Transforms.Text.WordTokenizingTransformer.Create(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } @@ -62,7 +62,7 @@ public static CommonOutputs.TransformOutput NGramTransform(IHostEnvironment env, var xf = NgramExtractingTransformer.Create(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } @@ -77,7 +77,7 @@ public static CommonOutputs.TransformOutput TermTransform(IHostEnvironment env, var xf = ValueToKeyMappingTransformer.Create(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } @@ -94,7 +94,7 @@ public static CommonOutputs.TransformOutput AnalyzeSentiment(IHostEnvironment en var view = SentimentAnalyzingTransformer.Create(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, view, input.Data), + Model = new TransformModelImpl(h, view, input.Data), OutputData = view }; } @@ -113,7 +113,7 @@ public static CommonOutputs.TransformOutput CharTokenize(IHostEnvironment env, T var view = TokenizingByCharactersTransformer.Create(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, view, input.Data), + Model = new TransformModelImpl(h, view, input.Data), OutputData = view }; } @@ -136,7 +136,7 @@ public static CommonOutputs.TransformOutput LightLda(IHostEnvironment env, Laten return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, view, input.Data), + Model = new TransformModelImpl(h, view, input.Data), OutputData = view }; } @@ -156,7 +156,7 @@ public static CommonOutputs.TransformOutput WordEmbeddings(IHostEnvironment env, var view = WordEmbeddingsExtractingTransformer.Create(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, view, input.Data), + Model = new TransformModelImpl(h, view, input.Data), OutputData = view }; } diff --git a/src/Microsoft.ML.Transforms/GcnTransform.cs b/src/Microsoft.ML.Transforms/GcnTransform.cs index b203fc9dd7..9410535b5f 100644 --- a/src/Microsoft.ML.Transforms/GcnTransform.cs +++ b/src/Microsoft.ML.Transforms/GcnTransform.cs @@ -720,7 +720,7 @@ public static CommonOutputs.TransformOutput Normalize(IHostEnvironment env, LpNo var xf = LpNormalizingTransformer.Create(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } @@ -736,7 +736,7 @@ public static CommonOutputs.TransformOutput GcNormalize(IHostEnvironment env, Lp var xf = LpNormalizingTransformer.Create(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } diff --git a/src/Microsoft.ML.Transforms/GroupTransform.cs b/src/Microsoft.ML.Transforms/GroupTransform.cs index 612b08cfe9..64e09e8043 100644 --- a/src/Microsoft.ML.Transforms/GroupTransform.cs +++ b/src/Microsoft.ML.Transforms/GroupTransform.cs @@ -709,7 +709,7 @@ public static CommonOutputs.TransformOutput Group(IHostEnvironment env, GroupTra var view = new GroupTransform(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, view, input.Data), + Model = new TransformModelImpl(h, view, input.Data), OutputData = view }; } diff --git a/src/Microsoft.ML.Transforms/HashJoiningTransform.cs b/src/Microsoft.ML.Transforms/HashJoiningTransform.cs index e8ce9c1c15..e79782b427 100644 --- a/src/Microsoft.ML.Transforms/HashJoiningTransform.cs +++ b/src/Microsoft.ML.Transforms/HashJoiningTransform.cs @@ -713,7 +713,7 @@ public static CommonOutputs.TransformOutput Apply(IHostEnvironment env, HashJoin var view = new HashJoiningTransform(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, view, input.Data), + Model = new TransformModelImpl(h, view, input.Data), OutputData = view }; } diff --git a/src/Microsoft.ML.Transforms/NAHandling.cs b/src/Microsoft.ML.Transforms/NAHandling.cs index e5809adf68..c2c234abb5 100644 --- a/src/Microsoft.ML.Transforms/NAHandling.cs +++ b/src/Microsoft.ML.Transforms/NAHandling.cs @@ -24,7 +24,7 @@ public static CommonOutputs.TransformOutput Drop(IHostEnvironment env, MissingVa var xf = MissingValueDroppingTransformer.Create(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } @@ -41,7 +41,7 @@ public static CommonOutputs.TransformOutput Filter(IHostEnvironment env, NAFilte var xf = new NAFilter(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } @@ -58,7 +58,7 @@ public static CommonOutputs.TransformOutput Handle(IHostEnvironment env, Missing var xf = MissingValueHandlingTransformer.Create(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } @@ -75,7 +75,7 @@ public static CommonOutputs.TransformOutput Indicator(IHostEnvironment env, Miss var xf = new MissingValueIndicatorTransformer(h, input).Transform(input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } @@ -92,7 +92,7 @@ public static CommonOutputs.TransformOutput Replace(IHostEnvironment env, Missin var xf = MissingValueReplacingTransformer.Create(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } diff --git a/src/Microsoft.ML.Transforms/OneHotEncoding.cs b/src/Microsoft.ML.Transforms/OneHotEncoding.cs index 1a617c63e4..7e357259d4 100644 --- a/src/Microsoft.ML.Transforms/OneHotEncoding.cs +++ b/src/Microsoft.ML.Transforms/OneHotEncoding.cs @@ -293,7 +293,7 @@ public static CommonOutputs.TransformOutput CatTransformDict(IHostEnvironment en EntryPointUtils.CheckInputArgs(host, input); var xf = OneHotEncodingTransformer.Create(host, input, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } [TlcModule.EntryPoint(Name = "Transforms.CategoricalHashOneHotVectorizer", @@ -309,7 +309,7 @@ public static CommonOutputs.TransformOutput CatTransformHash(IHostEnvironment en EntryPointUtils.CheckInputArgs(host, input); var xf = OneHotHashEncoding.Create(host, input, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } [TlcModule.EntryPoint(Name = "Transforms.TextToKeyConverter", @@ -325,7 +325,7 @@ public static CommonOutputs.TransformOutput TextToKey(IHostEnvironment env, Valu EntryPointUtils.CheckInputArgs(host, input); var xf = ValueToKeyMappingTransformer.Create(host, input, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } [TlcModule.EntryPoint(Name = "Transforms.KeyToTextConverter", @@ -340,7 +340,7 @@ public static CommonOutputs.TransformOutput KeyToText(IHostEnvironment env, KeyT EntryPointUtils.CheckInputArgs(host, input); var xf = KeyToValueMappingTransformer.Create(host, input, input.Data); - return new CommonOutputs.TransformOutput { Model = new TransformModel(env, xf, input.Data), OutputData = xf }; + return new CommonOutputs.TransformOutput { Model = new TransformModelImpl(env, xf, input.Data), OutputData = xf }; } } } diff --git a/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs b/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs index 8c215dcb07..06e793c6ba 100644 --- a/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs +++ b/src/Microsoft.ML.Transforms/OptionalColumnTransform.cs @@ -487,7 +487,7 @@ public static CommonOutputs.TransformOutput MakeOptional(IHostEnvironment env, A var xf = new OptionalColumnTransform(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, xf, input.Data), + Model = new TransformModelImpl(h, xf, input.Data), OutputData = xf }; } diff --git a/src/Microsoft.ML.Transforms/UngroupTransform.cs b/src/Microsoft.ML.Transforms/UngroupTransform.cs index 72dc32c128..b34a0ad63b 100644 --- a/src/Microsoft.ML.Transforms/UngroupTransform.cs +++ b/src/Microsoft.ML.Transforms/UngroupTransform.cs @@ -670,7 +670,7 @@ public static CommonOutputs.TransformOutput Ungroup(IHostEnvironment env, Ungrou var view = new UngroupTransform(h, input, input.Data); return new CommonOutputs.TransformOutput() { - Model = new TransformModel(h, view, input.Data), + Model = new TransformModelImpl(h, view, input.Data), OutputData = view }; } diff --git a/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv b/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv index d1f3bf4a52..2d1f4e4cf9 100644 --- a/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv +++ b/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv @@ -1,9 +1,9 @@ Data.CustomTextLoader Import a dataset from a text file Microsoft.ML.Runtime.EntryPoints.ImportTextData ImportText Microsoft.ML.Runtime.EntryPoints.ImportTextData+Input Microsoft.ML.Runtime.EntryPoints.ImportTextData+Output Data.DataViewReference Pass dataview from memory to experiment Microsoft.ML.Runtime.EntryPoints.DataViewReference ImportData Microsoft.ML.Runtime.EntryPoints.DataViewReference+Input Microsoft.ML.Runtime.EntryPoints.DataViewReference+Output Data.IDataViewArrayConverter Create an array variable of IDataView Microsoft.ML.Runtime.EntryPoints.CrossValidationBinaryMacro MakeArray Microsoft.ML.Runtime.EntryPoints.CrossValidationBinaryMacro+ArrayIDataViewInput Microsoft.ML.Runtime.EntryPoints.CrossValidationBinaryMacro+ArrayIDataViewOutput -Data.PredictorModelArrayConverter Create an array variable of IPredictorModel Microsoft.ML.Runtime.EntryPoints.CrossValidationBinaryMacro MakeArray Microsoft.ML.Runtime.EntryPoints.CrossValidationBinaryMacro+ArrayIPredictorModelInput Microsoft.ML.Runtime.EntryPoints.CrossValidationBinaryMacro+ArrayIPredictorModelOutput +Data.PredictorModelArrayConverter Create an array variable of PredictorModel Microsoft.ML.Runtime.EntryPoints.CrossValidationBinaryMacro MakeArray Microsoft.ML.Runtime.EntryPoints.CrossValidationBinaryMacro+ArrayIPredictorModelInput Microsoft.ML.Runtime.EntryPoints.CrossValidationBinaryMacro+ArrayPredictorModelOutput Data.TextLoader Import a dataset from a text file Microsoft.ML.Runtime.EntryPoints.ImportTextData TextLoader Microsoft.ML.Runtime.EntryPoints.ImportTextData+LoaderInput Microsoft.ML.Runtime.EntryPoints.ImportTextData+Output -Data.TransformModelArrayConverter Create an array variable of ITransformModel Microsoft.ML.Runtime.EntryPoints.CrossValidationBinaryMacro MakeArray Microsoft.ML.Runtime.EntryPoints.CrossValidationBinaryMacro+ArrayITransformModelInput Microsoft.ML.Runtime.EntryPoints.CrossValidationBinaryMacro+ArrayITransformModelOutput +Data.TransformModelArrayConverter Create an array variable of TransformModel Microsoft.ML.Runtime.EntryPoints.CrossValidationBinaryMacro MakeArray Microsoft.ML.Runtime.EntryPoints.CrossValidationBinaryMacro+ArrayTransformModelInput Microsoft.ML.Runtime.EntryPoints.CrossValidationBinaryMacro+ArrayITransformModelOutput Models.AnomalyDetectionEvaluator Evaluates an anomaly detection scored dataset. Microsoft.ML.Runtime.Data.Evaluate AnomalyDetection Microsoft.ML.Runtime.Data.AnomalyDetectionMamlEvaluator+Arguments Microsoft.ML.Runtime.EntryPoints.CommonOutputs+CommonEvaluateOutput Models.AnomalyPipelineEnsemble Combine anomaly detection models into an ensemble Microsoft.ML.Runtime.EntryPoints.EnsembleCreator CreateAnomalyPipelineEnsemble Microsoft.ML.Runtime.EntryPoints.EnsembleCreator+PipelineAnomalyInput Microsoft.ML.Runtime.EntryPoints.CommonOutputs+AnomalyDetectionOutput Models.BinaryClassificationEvaluator Evaluates a binary classification scored dataset. Microsoft.ML.Runtime.Data.Evaluate Binary Microsoft.ML.Runtime.Data.BinaryClassifierMamlEvaluator+Arguments Microsoft.ML.Runtime.EntryPoints.CommonOutputs+ClassificationEvaluateOutput diff --git a/test/BaselineOutput/Common/EntryPoints/core_manifest.json b/test/BaselineOutput/Common/EntryPoints/core_manifest.json index e76f410d5e..8f4484992a 100644 --- a/test/BaselineOutput/Common/EntryPoints/core_manifest.json +++ b/test/BaselineOutput/Common/EntryPoints/core_manifest.json @@ -92,7 +92,7 @@ }, { "Name": "Data.PredictorModelArrayConverter", - "Desc": "Create an array variable of IPredictorModel", + "Desc": "Create an array variable of PredictorModel", "FriendlyName": null, "ShortName": null, "Inputs": [ @@ -471,7 +471,7 @@ }, { "Name": "Data.TransformModelArrayConverter", - "Desc": "Create an array variable of ITransformModel", + "Desc": "Create an array variable of TransformModel", "FriendlyName": null, "ShortName": null, "Inputs": [ diff --git a/test/Microsoft.ML.Benchmarks/Helpers/EnvironmentFactory.cs b/test/Microsoft.ML.Benchmarks/Helpers/EnvironmentFactory.cs index 3b0b055d30..20582c108d 100644 --- a/test/Microsoft.ML.Benchmarks/Helpers/EnvironmentFactory.cs +++ b/test/Microsoft.ML.Benchmarks/Helpers/EnvironmentFactory.cs @@ -28,7 +28,6 @@ internal static MLContext CreateClassificationEnvironment() - where TEvaluator : IEvaluator where TLoader : IDataReader where TTransformer : ITransformer where TTrainer : ITrainerEstimator, IPredictor> diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestCSharpApi.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestCSharpApi.cs index f5d9006800..b1f35eb0ac 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestCSharpApi.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestCSharpApi.cs @@ -147,7 +147,7 @@ public void TestTrainTestMacro() var modelCombine = new Legacy.Transforms.ManyHeterogeneousModelCombiner { - TransformModels = new ArrayVar(catOutput.Model, concatOutput.Model), + TransformModels = new ArrayVar(catOutput.Model, concatOutput.Model), PredictorModel = sdcaOutput.PredictorModel }; var modelCombineOutput = subGraph.Add(modelCombine); @@ -215,7 +215,7 @@ public void TestCrossValidationBinaryMacro() var modelCombine = new Legacy.Transforms.ManyHeterogeneousModelCombiner { - TransformModels = new ArrayVar(catOutput.Model, concatOutput.Model), + TransformModels = new ArrayVar(catOutput.Model, concatOutput.Model), PredictorModel = lrOutput.PredictorModel }; var modelCombineOutput = subGraph.Add(modelCombine); @@ -280,7 +280,7 @@ public void TestCrossValidationMacro() var modelCombine = new Legacy.Transforms.ManyHeterogeneousModelCombiner { - TransformModels = new ArrayVar(nopOutput.Model, generateOutput.Model), + TransformModels = new ArrayVar(nopOutput.Model, generateOutput.Model), PredictorModel = learnerOutput.PredictorModel }; var modelCombineOutput = subGraph.Add(modelCombine); @@ -418,7 +418,7 @@ public void TestCrossValidationMacroWithMultiClass() var modelCombine = new Legacy.Transforms.ManyHeterogeneousModelCombiner { - TransformModels = new ArrayVar(nopOutput.Model), + TransformModels = new ArrayVar(nopOutput.Model), PredictorModel = learnerOutput.PredictorModel }; var modelCombineOutput = subGraph.Add(modelCombine); @@ -626,7 +626,7 @@ public void TestCrossValidationMacroWithStratification() var modelCombine = new Legacy.Transforms.ManyHeterogeneousModelCombiner { - TransformModels = new ArrayVar(nopOutput.Model), + TransformModels = new ArrayVar(nopOutput.Model), PredictorModel = learnerOutput.PredictorModel }; var modelCombineOutput = subGraph.Add(modelCombine); @@ -728,7 +728,7 @@ public void TestCrossValidationMacroWithNonDefaultNames() var modelCombine = new Legacy.Transforms.ManyHeterogeneousModelCombiner { - TransformModels = new ArrayVar(textToKeyOutput.Model, hashOutput.Model), + TransformModels = new ArrayVar(textToKeyOutput.Model, hashOutput.Model), PredictorModel = learnerOutput.PredictorModel }; var modelCombineOutput = subGraph.Add(modelCombine); diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs index 23d9b449ba..e8eb0fdcc6 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs @@ -442,7 +442,7 @@ public void EntryPointCreateEnsemble() var dataView = GetBreastCancerDataView(); const int nModels = 5; var splitOutput = CVSplit.Split(Env, new CVSplit.Input { Data = dataView, NumFolds = nModels + 1 }); - var predictorModels = new IPredictorModel[nModels]; + var predictorModels = new PredictorModel[nModels]; var individualScores = new IDataView[nModels]; for (int i = 0; i < nModels; i++) { @@ -593,7 +593,7 @@ public void EntryPointOptionalParams() runner.RunAll(); - var model = runner.GetOutput("model1"); + var model = runner.GetOutput("model1"); Assert.NotNull(model); } @@ -732,7 +732,7 @@ public void EntryPointCalibrate() // This tests that the SchemaBindableCalibratedPredictor doesn't get confused if its sub-predictor is already calibrated. var fastForest = new FastForestClassification(Env, "Label", "Features"); var rmd = new RoleMappedData(splitOutput.TrainData[0], "Label", "Features"); - var ffModel = new PredictorModel(Env, rmd, splitOutput.TrainData[0], fastForest.Train(rmd)); + var ffModel = new PredictorModelImpl(Env, rmd, splitOutput.TrainData[0], fastForest.Train(rmd)); var calibratedFfModel = Calibrate.Platt(Env, new Calibrate.NoArgumentsInput() { Data = splitOutput.TestData[0], UncalibratedPredictorModel = ffModel }).PredictorModel; var twiceCalibratedFfModel = Calibrate.Platt(Env, @@ -747,7 +747,7 @@ public void EntryPointPipelineEnsemble() var dataView = GetBreastCancerDataView(); const int nModels = 5; var splitOutput = CVSplit.Split(Env, new CVSplit.Input { Data = dataView, NumFolds = nModels + 1 }); - var predictorModels = new IPredictorModel[nModels]; + var predictorModels = new PredictorModel[nModels]; var individualScores = new IDataView[nModels]; for (int i = 0; i < nModels; i++) { @@ -783,7 +783,7 @@ public void EntryPointPipelineEnsemble() NormalizeFeatures = NormalizeOption.Yes }; predictorModels[i] = LogisticRegression.TrainBinary(Env, lrInput).PredictorModel; - var transformModel = new TransformModel(Env, data, splitOutput.TrainData[i]); + var transformModel = new TransformModelImpl(Env, data, splitOutput.TrainData[i]); predictorModels[i] = ModelOperations.CombineTwoModels(Env, new ModelOperations.SimplePredictorModelInput() @@ -877,10 +877,10 @@ public void EntryPointPipelineEnsemble() using (var strm = file.CreateWriteStream()) regressionEnsembleModel.Save(Env, strm); - IPredictorModel loadedFromSaved; + PredictorModel loadedFromSaved; using (var file = Env.OpenInputFile(modelPath)) using (var strm = file.OpenReadStream()) - loadedFromSaved = new PredictorModel(Env, strm); + loadedFromSaved = new PredictorModelImpl(Env, strm); var scoredFromSaved = ScoreModel.Score(Env, new ScoreModel.Input() @@ -1014,7 +1014,7 @@ public void EntryPointPipelineEnsembleText() const int nModels = 5; var splitOutput = CVSplit.Split(Env, new CVSplit.Input { Data = dataView, NumFolds = nModels + 1 }); - var predictorModels = new IPredictorModel[nModels]; + var predictorModels = new PredictorModel[nModels]; var individualScores = new IDataView[nModels]; for (int i = 0; i < nModels; i++) { @@ -1046,7 +1046,7 @@ public void EntryPointPipelineEnsembleText() NormalizeFeatures = NormalizeOption.Yes }; predictorModels[i] = LogisticRegression.TrainBinary(Env, lrInput).PredictorModel; - var transformModel = new TransformModel(Env, data, splitOutput.TrainData[i]); + var transformModel = new TransformModelImpl(Env, data, splitOutput.TrainData[i]); predictorModels[i] = ModelOperations.CombineTwoModels(Env, new ModelOperations.SimplePredictorModelInput() @@ -1101,10 +1101,10 @@ public void EntryPointPipelineEnsembleText() using (var strm = file.CreateWriteStream()) regressionEnsembleModel.Save(Env, strm); - IPredictorModel loadedFromSaved; + PredictorModel loadedFromSaved; using (var file = Env.OpenInputFile(modelPath)) using (var strm = file.OpenReadStream()) - loadedFromSaved = new PredictorModel(Env, strm); + loadedFromSaved = new PredictorModelImpl(Env, strm); var scoredFromSaved = ScoreModel.Score(Env, new ScoreModel.Input() @@ -1216,7 +1216,7 @@ public void EntryPointMulticlassPipelineEnsemble() const int nModels = 5; var splitOutput = CVSplit.Split(Env, new CVSplit.Input { Data = dataView, NumFolds = nModels + 1 }); - var predictorModels = new IPredictorModel[nModels]; + var predictorModels = new PredictorModel[nModels]; var individualScores = new IDataView[nModels]; for (int i = 0; i < nModels; i++) { @@ -1233,8 +1233,8 @@ public void EntryPointMulticlassPipelineEnsemble() var mlr = new MulticlassLogisticRegression(Env, "Label", "Features"); var rmd = new RoleMappedData(data, "Label", "Features"); - predictorModels[i] = new PredictorModel(Env, rmd, data, mlr.Train(rmd)); - var transformModel = new TransformModel(Env, data, splitOutput.TrainData[i]); + predictorModels[i] = new PredictorModelImpl(Env, rmd, data, mlr.Train(rmd)); + var transformModel = new TransformModelImpl(Env, data, splitOutput.TrainData[i]); predictorModels[i] = ModelOperations.CombineTwoModels(Env, new ModelOperations.SimplePredictorModelInput() @@ -1264,10 +1264,10 @@ public void EntryPointMulticlassPipelineEnsemble() using (var strm = file.CreateWriteStream()) mcEnsembleModel.Save(Env, strm); - IPredictorModel loadedFromSaved; + PredictorModel loadedFromSaved; using (var file = Env.OpenInputFile(modelPath)) using (var strm = file.OpenReadStream()) - loadedFromSaved = new PredictorModel(Env, strm); + loadedFromSaved = new PredictorModelImpl(Env, strm); var scoredFromSaved = ScoreModel.Score(Env, new ScoreModel.Input() @@ -1366,7 +1366,7 @@ public void EntryPointPipelineEnsembleGetSummary() const int nModels = 4; var splitOutput = CVSplit.Split(Env, new CVSplit.Input { Data = dataView, NumFolds = nModels }); - var predictorModels = new IPredictorModel[nModels]; + var predictorModels = new PredictorModel[nModels]; for (int i = 0; i < nModels; i++) { var data = splitOutput.TrainData[i]; @@ -1383,7 +1383,7 @@ public void EntryPointPipelineEnsembleGetSummary() StdComputer = new ComputeLRTrainingStdThroughHal() }; predictorModels[i] = LogisticRegression.TrainBinary(Env, lrInput).PredictorModel; - var transformModel = new TransformModel(Env, data, splitOutput.TrainData[i]); + var transformModel = new TransformModelImpl(Env, data, splitOutput.TrainData[i]); predictorModels[i] = ModelOperations.CombineTwoModels(Env, new ModelOperations.SimplePredictorModelInput() @@ -1396,7 +1396,7 @@ public void EntryPointPipelineEnsembleGetSummary() RoleMappedSchema.CreatePair(RoleMappedSchema.ColumnRole.Feature, "Features"), RoleMappedSchema.CreatePair(RoleMappedSchema.ColumnRole.Label, "Label")); var predictor = trainer.Train(rmd); - predictorModels[i] = new PredictorModel(Env, rmd, splitOutput.TrainData[i], predictor); + predictorModels[i] = new PredictorModelImpl(Env, rmd, splitOutput.TrainData[i], predictor); } } @@ -2637,7 +2637,7 @@ public void EntryPointNormalizeIfNeeded() runner.RunAll(); - var model = runner.GetOutput("model"); + var model = runner.GetOutput("model"); Assert.NotNull(model); } @@ -2732,7 +2732,7 @@ public void EntryPointTrainTestBinaryMacro() runner.RunAll(); - var model = runner.GetOutput("model"); + var model = runner.GetOutput("model"); Assert.NotNull(model); var metrics = runner.GetOutput("OverallMetrics"); @@ -2839,7 +2839,7 @@ public void EntryPointTrainTestMacroNoTransformInput() runner.RunAll(); - var model = runner.GetOutput("model"); + var model = runner.GetOutput("model"); Assert.NotNull(model); var metrics = runner.GetOutput("OverallMetrics"); @@ -2943,7 +2943,7 @@ public void EntryPointTrainTestMacro() runner.RunAll(); - var model = runner.GetOutput("model"); + var model = runner.GetOutput("model"); Assert.NotNull(model); var metrics = runner.GetOutput("OverallMetrics"); @@ -3103,10 +3103,10 @@ public void EntryPointChainedTrainTestMacros() runner.RunAll(); - var model = runner.GetOutput("model"); + var model = runner.GetOutput("model"); Assert.NotNull(model); - model = runner.GetOutput("model2"); + model = runner.GetOutput("model2"); Assert.NotNull(model); var metrics = runner.GetOutput("OverallMetrics"); @@ -3297,10 +3297,10 @@ public void EntryPointChainedCrossValMacros() runner.RunAll(); - var model = runner.GetOutput("model"); + var model = runner.GetOutput("model"); Assert.NotNull(model[0]); - model = runner.GetOutput("model2"); + model = runner.GetOutput("model2"); Assert.NotNull(model[0]); var metrics = runner.GetOutput("OverallMetrics"); @@ -3476,7 +3476,7 @@ public void EntryPointSerialization() runner.RunAll(); - var model = runner.GetOutput("model"); + var model = runner.GetOutput("model"); Assert.NotNull(model); } diff --git a/test/Microsoft.ML.OnnxTransformTest/DnnImageFeaturizerTest.cs b/test/Microsoft.ML.OnnxTransformTest/DnnImageFeaturizerTest.cs index a57f752064..d5d071c0e5 100644 --- a/test/Microsoft.ML.OnnxTransformTest/DnnImageFeaturizerTest.cs +++ b/test/Microsoft.ML.OnnxTransformTest/DnnImageFeaturizerTest.cs @@ -44,7 +44,7 @@ private class TestDataDifferntType public string[] data_0; } - private float[] getSampleArrayData() + private float[] GetSampleArrayData() { var samplevector = new float[inputSize]; for (int i = 0; i < inputSize; i++) @@ -64,9 +64,9 @@ void TestDnnImageFeaturizer() return; - var samplevector = getSampleArrayData(); + var samplevector = GetSampleArrayData(); - var dataView = ComponentCreation.CreateDataView(Env, + var dataView = DataViewConstructionUtils.CreateFromList(Env, new TestData[] { new TestData() { @@ -145,7 +145,7 @@ public void TestOldSavingAndLoading() return; - var samplevector = getSampleArrayData(); + var samplevector = GetSampleArrayData(); var dataView = ComponentCreation.CreateDataView(Env, new TestData[] { diff --git a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs index 315c1d54be..35558a7e0a 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs @@ -608,7 +608,7 @@ public void TestTreeEnsembleCombiner() var dataPath = GetDataPath("breast-cancer.txt"); var dataView = ML.Data.ReadFromTextFile(dataPath); - var fastTrees = new IPredictorModel[3]; + var fastTrees = new PredictorModel[3]; for (int i = 0; i < 3; i++) { fastTrees[i] = FastTree.TrainBinary(ML, new FastTreeBinaryClassificationTrainer.Arguments @@ -631,7 +631,7 @@ public void TestTreeEnsembleCombinerWithCategoricalSplits() var dataView = ML.Data.ReadFromTextFile(dataPath); var cat = new OneHotEncodingEstimator(ML, "Categories", "Features").Fit(dataView).Transform(dataView); - var fastTrees = new IPredictorModel[3]; + var fastTrees = new PredictorModel[3]; for (int i = 0; i < 3; i++) { fastTrees[i] = FastTree.TrainBinary(ML, new FastTreeBinaryClassificationTrainer.Arguments @@ -647,14 +647,14 @@ public void TestTreeEnsembleCombinerWithCategoricalSplits() CombineAndTestTreeEnsembles(cat, fastTrees); } - private void CombineAndTestTreeEnsembles(IDataView idv, IPredictorModel[] fastTrees) + private void CombineAndTestTreeEnsembles(IDataView idv, PredictorModel[] fastTrees) { var combiner = new TreeEnsembleCombiner(Env, PredictionKind.BinaryClassification); var fastTree = combiner.CombineModels(fastTrees.Select(pm => pm.Predictor as IPredictorProducing)); var data = new RoleMappedData(idv, label: null, feature: "Features"); - var scored = ScoreModel.Score(Env, new ScoreModel.Input() { Data = idv, PredictorModel = new PredictorModel(Env, data, idv, fastTree) }).ScoredData; + var scored = ScoreModel.Score(Env, new ScoreModel.Input() { Data = idv, PredictorModel = new PredictorModelImpl(Env, data, idv, fastTree) }).ScoredData; Assert.True(scored.Schema.TryGetColumnIndex("Score", out int scoreCol)); Assert.True(scored.Schema.TryGetColumnIndex("Probability", out int probCol)); Assert.True(scored.Schema.TryGetColumnIndex("PredictedLabel", out int predCol)); @@ -731,7 +731,7 @@ public void TestEnsembleCombiner() var dataPath = GetDataPath("breast-cancer.txt"); var dataView = ML.Data.ReadFromTextFile(dataPath); - var predictors = new IPredictorModel[] + var predictors = new PredictorModel[] { FastTree.TrainBinary(ML, new FastTreeBinaryClassificationTrainer.Arguments { @@ -777,7 +777,7 @@ public void TestMultiClassEnsembleCombiner() var dataPath = GetDataPath("breast-cancer.txt"); var dataView = ML.Data.ReadFromTextFile(dataPath); - var predictors = new IPredictorModel[] + var predictors = new PredictorModel[] { LightGbm.TrainMultiClass(Env, new LightGbmArguments { @@ -808,7 +808,7 @@ public void TestMultiClassEnsembleCombiner() } private void CombineAndTestEnsembles(IDataView idv, string name, string options, PredictionKind predictionKind, - IPredictorModel[] predictors) + PredictorModel[] predictors) { var combiner = ComponentCatalog.CreateInstance( Env, typeof(SignatureModelCombiner), name, options, predictionKind); @@ -816,7 +816,7 @@ private void CombineAndTestEnsembles(IDataView idv, string name, string options, var predictor = combiner.CombineModels(predictors.Select(pm => pm.Predictor)); var data = new RoleMappedData(idv, label: null, feature: "Features"); - var scored = ScoreModel.Score(Env, new ScoreModel.Input() { Data = idv, PredictorModel = new PredictorModel(Env, data, idv, predictor) }).ScoredData; + var scored = ScoreModel.Score(Env, new ScoreModel.Input() { Data = idv, PredictorModel = new PredictorModelImpl(Env, data, idv, predictor) }).ScoredData; var predCount = Utils.Size(predictors); diff --git a/test/Microsoft.ML.TestFramework/ModelHelper.cs b/test/Microsoft.ML.TestFramework/ModelHelper.cs index 4692942e83..ff5e42f8a1 100644 --- a/test/Microsoft.ML.TestFramework/ModelHelper.cs +++ b/test/Microsoft.ML.TestFramework/ModelHelper.cs @@ -15,7 +15,7 @@ namespace Microsoft.ML.TestFramework public static class ModelHelper { private static MLContext s_environment = new MLContext(seed: 1); - private static ITransformModel s_housePriceModel; + private static TransformModel s_housePriceModel; public static void WriteKcHousePriceModel(string dataPath, string outputModelPath) { @@ -71,7 +71,7 @@ public static IDataView GetKcHouseDataView(string dataPath) ); } - private static ITransformModel CreateKcHousePricePredictorModel(string dataPath) + private static TransformModel CreateKcHousePricePredictorModel(string dataPath) { Experiment experiment = s_environment.CreateExperiment(); var importData = new Legacy.Data.TextLoader(dataPath) @@ -263,7 +263,7 @@ private static ITransformModel CreateKcHousePricePredictorModel(string dataPath) Legacy.Trainers.StochasticDualCoordinateAscentRegressor.Output learnerOutput = experiment.Add(learner); var combineModels = new Legacy.Transforms.ManyHeterogeneousModelCombiner(); - combineModels.TransformModels = new ArrayVar(numericalConcatenated.Model, categoryConcatenated.Model, categorized.Model, featuresConcatenated.Model); + combineModels.TransformModels = new ArrayVar(numericalConcatenated.Model, categoryConcatenated.Model, categorized.Model, featuresConcatenated.Model); combineModels.PredictorModel = learnerOutput.PredictorModel; Legacy.Transforms.ManyHeterogeneousModelCombiner.Output combinedModels = experiment.Add(combineModels); diff --git a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs index 667d3c6619..e23690cf5e 100644 --- a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs +++ b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs @@ -113,7 +113,7 @@ private ClassificationMetrics Evaluate(IHostEnvironment env, IDataView scoredDat // It does not work. It throws error "Failed to find 'Score' column" when Evaluate is called //var evaluator = new MultiClassClassifierEvaluator(env, new MultiClassClassifierEvaluator.Arguments() { OutputTopKAcc = 3 }); - var evaluator = new MultiClassMamlEvaluator(env, new MultiClassMamlEvaluator.Arguments() { OutputTopKAcc = 3 }); + IMamlEvaluator evaluator = new MultiClassMamlEvaluator(env, new MultiClassMamlEvaluator.Arguments() { OutputTopKAcc = 3 }); var metricsDic = evaluator.Evaluate(dataEval); return ClassificationMetrics.FromMetrics(env, metricsDic["OverallMetrics"], metricsDic["ConfusionMatrix"])[0]; diff --git a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs index 87732048ff..dd796b0b59 100644 --- a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs +++ b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/SentimentPredictionTests.cs @@ -153,7 +153,7 @@ private Microsoft.ML.Legacy.Models.BinaryClassificationMetrics EvaluateBinary(IH // It does not work. It throws error "Failed to find 'Score' column" when Evaluate is called //var evaluator = new BinaryClassifierEvaluator(env, new BinaryClassifierEvaluator.Arguments()); - var evaluator = new BinaryClassifierMamlEvaluator(env, new BinaryClassifierMamlEvaluator.Arguments()); + IMamlEvaluator evaluator = new BinaryClassifierMamlEvaluator(env, new BinaryClassifierMamlEvaluator.Arguments()); var metricsDic = evaluator.Evaluate(dataEval); return Microsoft.ML.Legacy.Models.BinaryClassificationMetrics From 616dca28a1ec83c5760b492066a7f71e6dca3de8 Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Thu, 13 Dec 2018 13:45:10 -0800 Subject: [PATCH 053/100] Remove ISchema in FakeSchema (#1872) * Remove ISchema in FakeSchema * Address comments * Cleaning --- .../DataLoadSave/FakeSchema.cs | 104 ++++++------------ .../DataLoadSave/TransformWrapper.cs | 4 +- 2 files changed, 34 insertions(+), 74 deletions(-) diff --git a/src/Microsoft.ML.Data/DataLoadSave/FakeSchema.cs b/src/Microsoft.ML.Data/DataLoadSave/FakeSchema.cs index 604fe3c2b7..296dc69a1d 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/FakeSchema.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/FakeSchema.cs @@ -3,108 +3,68 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Core.Data; -using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.Utilities; -using System.Collections.Generic; -using System.Linq; +using System; namespace Microsoft.ML.Data.DataLoadSave { - /// /// A fake schema that is manufactured out of a SchemaShape. /// It will pretend that all vector sizes are equal to 10, all key value counts are equal to 10, /// and all values are defaults (for metadata). /// - internal sealed class FakeSchema : ISchema + internal static class FakeSchemaFactory { private const int AllVectorSizes = 10; private const int AllKeySizes = 10; - private readonly IHostEnvironment _env; - private readonly SchemaShape _shape; - private readonly Dictionary _colMap; - - public FakeSchema(IHostEnvironment env, SchemaShape inputShape) + public static Schema Create(SchemaShape shape) { - _env = env; - _shape = inputShape; - _colMap = Enumerable.Range(0, _shape.Count) - .ToDictionary(idx => _shape[idx].Name, idx => idx); - } + var builder = new SchemaBuilder(); - public int ColumnCount => _shape.Count; - - public string GetColumnName(int col) - { - _env.Check(0 <= col && col < ColumnCount); - return _shape[col].Name; + for (int i = 0; i < shape.Count; ++i) + { + var metaBuilder = new MetadataBuilder(); + var partialMetadata = shape[i].Metadata; + for (int j = 0; j < partialMetadata.Count; ++j) + { + var metaColumnType = MakeColumnType(partialMetadata[i]); + Delegate del; + if (metaColumnType.IsVector) + del = Utils.MarshalInvoke(GetDefaultVectorGetter, metaColumnType.ItemType.RawType); + else + del = Utils.MarshalInvoke(GetDefaultGetter, metaColumnType.RawType); + metaBuilder.Add(partialMetadata[j].Name, metaColumnType, del); + } + builder.AddColumn(shape[i].Name, MakeColumnType(shape[i])); + } + return builder.GetSchema(); } - public ColumnType GetColumnType(int col) + private static ColumnType MakeColumnType(SchemaShape.Column column) { - _env.Check(0 <= col && col < ColumnCount); - var inputCol = _shape[col]; - return MakeColumnType(inputCol); - } - - public bool TryGetColumnIndex(string name, out int col) => _colMap.TryGetValue(name, out col); - - private static ColumnType MakeColumnType(SchemaShape.Column inputCol) - { - ColumnType curType = inputCol.ItemType; - if (inputCol.IsKey) + ColumnType curType = column.ItemType; + if (column.IsKey) curType = new KeyType(((PrimitiveType)curType).RawKind, 0, AllKeySizes); - if (inputCol.Kind == SchemaShape.Column.VectorKind.VariableVector) + if (column.Kind == SchemaShape.Column.VectorKind.VariableVector) curType = new VectorType((PrimitiveType)curType, 0); - else if (inputCol.Kind == SchemaShape.Column.VectorKind.Vector) + else if (column.Kind == SchemaShape.Column.VectorKind.Vector) curType = new VectorType((PrimitiveType)curType, AllVectorSizes); return curType; } - public void GetMetadata(string kind, int col, ref TValue value) + private static Delegate GetDefaultVectorGetter() { - _env.Check(0 <= col && col < ColumnCount); - var inputCol = _shape[col]; - var metaShape = inputCol.Metadata; - if (metaShape == null || !metaShape.TryFindColumn(kind, out var metaColumn)) - throw _env.ExceptGetMetadata(); - - var colType = MakeColumnType(metaColumn); - _env.Check(colType.RawType.Equals(typeof(TValue))); - - if (colType.IsVector) - { - // This as an atypical use of VBuffer: we create it in GetMetadataVec, and then pass through - // via boxing to be returned out of this method. This is intentional. - value = (TValue)Utils.MarshalInvoke(GetMetadataVec, colType.ItemType.RawType); - } - else - value = default; + ValueGetter> getter = (ref VBuffer value) => value = new VBuffer(AllVectorSizes, 0, null, null); + return getter; } - private object GetMetadataVec() => new VBuffer(AllVectorSizes, 0, null, null); - - public ColumnType GetMetadataTypeOrNull(string kind, int col) + private static Delegate GetDefaultGetter() { - _env.Check(0 <= col && col < ColumnCount); - var inputCol = _shape[col]; - var metaShape = inputCol.Metadata; - if (metaShape == null || !metaShape.TryFindColumn(kind, out var metaColumn)) - return null; - return MakeColumnType(metaColumn); + ValueGetter getter = (ref TValue value) => value = default; + return getter; } - public IEnumerable> GetMetadataTypes(int col) - { - _env.Check(0 <= col && col < ColumnCount); - var inputCol = _shape[col]; - var metaShape = inputCol.Metadata; - if (metaShape == null) - return Enumerable.Empty>(); - - return metaShape.Select(c => new KeyValuePair(c.Name, MakeColumnType(c))); - } } } diff --git a/src/Microsoft.ML.Data/DataLoadSave/TransformWrapper.cs b/src/Microsoft.ML.Data/DataLoadSave/TransformWrapper.cs index 863594ce5a..82ffcaa81c 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/TransformWrapper.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/TransformWrapper.cs @@ -160,7 +160,7 @@ public SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var fakeSchema = Schema.Create(new FakeSchema(Host, inputSchema)); + var fakeSchema = FakeSchemaFactory.Create(inputSchema); var transformer = Fit(new EmptyDataView(Host, fakeSchema)); return SchemaShape.Create(transformer.GetOutputSchema(fakeSchema)); } @@ -179,7 +179,7 @@ protected TrivialWrapperEstimator(IHost host, TransformWrapper transformer) public override SchemaShape GetOutputSchema(SchemaShape inputSchema) { Host.CheckValue(inputSchema, nameof(inputSchema)); - var fakeSchema = Schema.Create(new FakeSchema(Host, inputSchema)); + var fakeSchema = FakeSchemaFactory.Create(inputSchema); return SchemaShape.Create(Transformer.GetOutputSchema(fakeSchema)); } } From 1f3cfc8e7cc4efe990b85b678edca506273b81b3 Mon Sep 17 00:00:00 2001 From: Anirudh Agnihotry Date: Fri, 14 Dec 2018 11:10:35 -0800 Subject: [PATCH 054/100] Loading the label for binary classfication as BL instead of R4 (#1883) --- .../Microsoft.ML.Benchmarks/KMeansAndLogisticRegressionBench.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/Microsoft.ML.Benchmarks/KMeansAndLogisticRegressionBench.cs b/test/Microsoft.ML.Benchmarks/KMeansAndLogisticRegressionBench.cs index 1adf7d4065..04e735fb37 100644 --- a/test/Microsoft.ML.Benchmarks/KMeansAndLogisticRegressionBench.cs +++ b/test/Microsoft.ML.Benchmarks/KMeansAndLogisticRegressionBench.cs @@ -19,7 +19,7 @@ public ParameterMixingCalibratedPredictor TrainKMeansAndLR() // Pipeline var input = ml.Data.ReadFromTextFile(_dataPath, new[] { - new TextLoader.Column("Label", DataKind.R4, 0), + new TextLoader.Column("Label", DataKind.BL, 0), new TextLoader.Column("CatFeatures", DataKind.TX, new [] { new TextLoader.Range() { Min = 1, Max = 8 }, From 4002e71781f1a2796f5b7b143d4648e56894621a Mon Sep 17 00:00:00 2001 From: jignparm Date: Fri, 14 Dec 2018 19:27:36 +0000 Subject: [PATCH 055/100] Updated fix for models with negative dimensions (#1869) * Updated fix for models with negative dimensions * convert 0 dims to 1. * added unit tests for model with -1 (unknown) for some dimensions * minor change to kick off build --- .../OnnxTransform.cs | 21 +++------- .../Microsoft.ML.OnnxTransformTest.csproj | 2 +- .../OnnxTransformTests.cs | 40 +++++++++++++++++++ 3 files changed, 47 insertions(+), 16 deletions(-) diff --git a/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs b/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs index bfca634045..c52b2a6c8e 100644 --- a/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs +++ b/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs @@ -182,7 +182,7 @@ private OnnxTransform(IHostEnvironment env, Arguments args, byte[] modelBytes = var outputNodeInfo = Model.ModelInfo.OutputsInfo[idx]; var shape = outputNodeInfo.Shape; var dims = AdjustDimensions(shape); - OutputTypes[i] = new VectorType(OnnxUtils.OnnxToMlNetType(outputNodeInfo.Type), dims); + OutputTypes[i] = new VectorType(OnnxUtils.OnnxToMlNetType(outputNodeInfo.Type), dims.ToArray()); } _args = args; } @@ -223,25 +223,16 @@ public override void Save(ModelSaveContext ctx) } private protected override IRowMapper MakeRowMapper(Schema inputSchema) => new Mapper(this, inputSchema); - private static int[] AdjustDimensions(OnnxShape shape) + private static IEnumerable AdjustDimensions(OnnxShape shape) { // if the model output is of type Map or Sequence, the shape property // will not be filled (so count=0). Don't throw an exception here // it will be runtime exception, util Maps and Sequences become supported. if (shape.Count > 0) { - // some models may have -1 in first position. - // skip this dimension when setting output column dimensions. - if (shape[0] < 0) - { - return shape.Skip(1).Select(x => (int)x).ToArray(); - } - else - { - return shape.Select(x => (int)x).ToArray(); - } + return shape.Select(x => (x <= 0) ? 1 : x); } - return new[] { 0 }; + return new[] { 1 }; } private sealed class Mapper : MapperBase @@ -274,8 +265,8 @@ public Mapper(OnnxTransform parent, Schema inputSchema) : var shape = inputNodeInfo.Shape; var inputType = OnnxUtils.OnnxToMlNetType(inputNodeInfo.Type); - var inputShape = inputNodeInfo.Shape; - _inputTensorShapes[i] = inputShape; + var inputShape = AdjustDimensions(inputNodeInfo.Shape); + _inputTensorShapes[i] = inputShape.ToList(); _inputOnnxTypes[i] = inputNodeInfo.Type; if (!inputSchema.TryGetColumnIndex(_parent.Inputs[i], out _inputColIndices[i])) diff --git a/test/Microsoft.ML.OnnxTransformTest/Microsoft.ML.OnnxTransformTest.csproj b/test/Microsoft.ML.OnnxTransformTest/Microsoft.ML.OnnxTransformTest.csproj index ebc245df6e..68d4b3f192 100644 --- a/test/Microsoft.ML.OnnxTransformTest/Microsoft.ML.OnnxTransformTest.csproj +++ b/test/Microsoft.ML.OnnxTransformTest/Microsoft.ML.OnnxTransformTest.csproj @@ -9,7 +9,7 @@ - + diff --git a/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs b/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs index b0ab758720..6fb551a5ac 100644 --- a/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs +++ b/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs @@ -46,17 +46,31 @@ private class TestDataSize [VectorType(2)] public float[] data_0; } + private class TestDataXY { [VectorType(inputSize)] public float[] A; } + private class TestDataDifferntType { [VectorType(inputSize)] public string[] data_0; } + private class TestDataUnknownDimensions + { + [VectorType(3)] + public float[] input; + } + + class PredictionUnknownDimensions + { + [VectorType(1)] + public long[] argmax { get; set; } + } + private float[] GetSampleArrayData() { var samplevector = new float[inputSize]; @@ -308,6 +322,32 @@ public void OnnxModelMultiInput() } } } + + [ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // x86 output differs from Baseline + public void TestUnknownDimensions() + { + if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + return; + + // model contains -1 in input and output shape dimensions + // model: input dims = [-1, 3], output argmax dims = [-1] + var modelFile = @"unknowndimensions/test_unknowndimensions_float.onnx"; + var mlContext = new MLContext(); + var data = new TestDataUnknownDimensions[] + { + new TestDataUnknownDimensions(){input = new float[] {1.1f, 1.3f, 1.2f }}, + new TestDataUnknownDimensions(){input = new float[] {-1.1f, -1.3f, -1.2f }}, + new TestDataUnknownDimensions(){input = new float[] {-1.1f, -1.3f, 1.2f }}, + }; + var idv = mlContext.CreateStreamingDataView(data); + var pipeline = new OnnxScoringEstimator(mlContext, modelFile); + var transformedValues = pipeline.Fit(idv).Transform(idv); + var predictions = transformedValues.AsEnumerable(mlContext, reuseRowObject: false).ToArray(); + + Assert.Equal(1, predictions[0].argmax[0]); + Assert.Equal(0, predictions[1].argmax[0]); + Assert.Equal(2, predictions[2].argmax[0]); + } } } From a01cacada95235318d42762def0153ada62ff3cd Mon Sep 17 00:00:00 2001 From: Senja Filipi Date: Fri, 14 Dec 2018 13:22:57 -0800 Subject: [PATCH 056/100] Calibrator estimators (#1816) * introducing calibrator estimators and calibrator transformers in a clean branch --- .../Dynamic/Calibrator.cs | 114 ++++ src/Microsoft.ML.Core/Utilities/Utils.cs | 16 +- src/Microsoft.ML.Data/Dirty/IniFileUtils.cs | 4 +- .../Dirty/PredictorInterfaces.cs | 2 +- .../Model/Onnx/ICanSaveOnnx.cs | 3 +- .../Model/Pfa/ICanSavePfa.cs | 3 +- .../Prediction/Calibrator.cs | 635 ++++++++++-------- .../Prediction/CalibratorCatalog.cs | 462 +++++++++++++ .../Prediction/IPredictionTransformer.cs | 10 +- .../Scorers/PredictionTransformer.cs | 2 +- .../Training/ITrainerEstimator.cs | 6 +- .../Training/TrainerUtils.cs | 8 +- src/Microsoft.ML.FastTree/FastTree.cs | 1 + .../FastTreeClassification.cs | 1 + src/Microsoft.ML.FastTree/FastTreeRanking.cs | 4 +- .../FastTreeRegression.cs | 4 +- src/Microsoft.ML.FastTree/FastTreeTweedie.cs | 4 +- .../GamClassification.cs | 1 + src/Microsoft.ML.FastTree/GamRegression.cs | 4 +- .../RandomForestClassification.cs | 1 + .../RandomForestRegression.cs | 4 +- .../TreeEnsemble/TreeEnsembleCombiner.cs | 2 +- .../OlsLinearRegression.cs | 2 +- .../LightGbmBinaryTrainer.cs | 1 + .../LightGbmRankingTrainer.cs | 4 +- .../LightGbmRegressionTrainer.cs | 4 +- .../SamplesDatasetUtils.cs | 2 +- .../Standard/LinearModelParameters.cs | 1 + .../Standard/MultiClass/Ova.cs | 2 +- .../Standard/MultiClass/Pkpd.cs | 2 +- .../Standard/Online/OnlineGradientDescent.cs | 2 +- .../PoissonRegression/PoissonRegression.cs | 4 +- .../Standard/SdcaBinary.cs | 104 +-- .../Standard/SdcaRegression.cs | 4 +- .../TrainerEstimators/CalibratorEstimators.cs | 144 ++++ .../TrainerEstimators/TrainerEstimators.cs | 1 - .../NameAnalyzer.cs | 4 +- 37 files changed, 1176 insertions(+), 396 deletions(-) create mode 100644 docs/samples/Microsoft.ML.Samples/Dynamic/Calibrator.cs create mode 100644 src/Microsoft.ML.Data/Prediction/CalibratorCatalog.cs create mode 100644 test/Microsoft.ML.Tests/TrainerEstimators/CalibratorEstimators.cs diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Calibrator.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Calibrator.cs new file mode 100644 index 0000000000..96339d44fa --- /dev/null +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Calibrator.cs @@ -0,0 +1,114 @@ +using Microsoft.ML.Calibrator; +using Microsoft.ML.Runtime; +using Microsoft.ML.Runtime.Data; +using System; +using System.Linq; + +namespace Microsoft.ML.Samples.Dynamic +{ + /// + /// This example first trains a StochasticDualCoordinateAscentBinary Classifier and then convert its output to probability via training a calibrator. + /// + public class CalibratorExample + { + public static void Calibration() + { + // Downloading the dataset from github.com/dotnet/machinelearning. + // This will create a sentiment.tsv file in the filesystem. + // The string, dataFile, is the path to the downloaded file. + // You can open this file, if you want to see the data. + string dataFile = SamplesUtils.DatasetUtils.DownloadSentimentDataset(); + + // A preview of the data. + // Sentiment SentimentText + // 0 " :Erm, thank you. " + // 1 ==You're cool== + + // Create a new context for ML.NET operations. It can be used for exception tracking and logging, + // as a catalog of available operations and as the source of randomness. + var mlContext = new MLContext(); + + // Create a text loader. + var reader = mlContext.Data.CreateTextReader(new TextLoader.Arguments() + { + Separator = "tab", + HasHeader = true, + Column = new[] + { + new TextLoader.Column("Sentiment", DataKind.BL, 0), + new TextLoader.Column("SentimentText", DataKind.Text, 1) + } + }); + + // Read the data + var data = reader.Read(dataFile); + + // Split the dataset into two parts: one used for training, the other to train the calibrator + var (trainData, calibratorTrainingData) = mlContext.BinaryClassification.TrainTestSplit(data, testFraction: 0.1); + + // Featurize the text column through the FeaturizeText API. + // Then append the StochasticDualCoordinateAscentBinary binary classifier, setting the "Label" column as the label of the dataset, and + // the "Features" column produced by FeaturizeText as the features column. + var pipeline = mlContext.Transforms.Text.FeaturizeText("SentimentText", "Features") + .Append(mlContext.BinaryClassification.Trainers.StochasticDualCoordinateAscent( + labelColumn: "Sentiment", + featureColumn: "Features", + l2Const: 0.001f, + loss: new HingeLoss())); // By specifying loss: new HingeLoss(), StochasticDualCoordinateAscent will train a support vector machine (SVM). + + // Fit the pipeline, and get a transformer that knows how to score new data. + var transformer = pipeline.Fit(trainData); + IPredictor model = transformer.LastTransformer.Model; + + // Let's score the new data. The score will give us a numerical estimation of the chance that the particular sample + // bears positive sentiment. This estimate is relative to the numbers obtained. + var scoredData = transformer.Transform(calibratorTrainingData); + var scoredDataPreview = scoredData.Preview(); + + PrintRowViewValues(scoredDataPreview); + // Preview of scoredDataPreview.RowView + // + // Score - 0.458968 + // Score - 0.7022135 + // Score 1.138822 + // Score 0.4807112 + // Score 1.112813 + + // Let's train a calibrator estimator on this scored dataset. The trained calibrator estimator produces a transformer + // that can transform the scored data by adding a new column names "Probability". + var calibratorEstimator = new PlattCalibratorEstimator(mlContext, model, "Sentiment", "Features"); + var calibratorTransformer = calibratorEstimator.Fit(scoredData); + + // Transform the scored data with a calibrator transfomer by adding a new column names "Probability". + // This column is a calibrated version of the "Score" column, meaning its values are a valid probability value in the [0, 1] interval + // representing the chance that the respective sample bears positive sentiment. + var finalData = calibratorTransformer.Transform(scoredData).Preview(); + + PrintRowViewValues(finalData); + + //Preview of finalData.RowView + // + // Score - 0.458968 Probability 0.4670409 + // Score - 0.7022135 Probability 0.3912723 + // Score 1.138822 Probability 0.8703266 + // Score 0.4807112 Probability 0.7437012 + // Score 1.112813 Probability 0.8665403 + + } + + private static void PrintRowViewValues(Data.DataDebuggerPreview data) + { + var firstRows = data.RowView.Take(5); + + foreach(Data.DataDebuggerPreview.RowInfo row in firstRows) + { + foreach (var kvPair in row.Values) + { + if (kvPair.Key.Equals("Score") || kvPair.Key.Equals("Probability")) + Console.Write($" {kvPair.Key} {kvPair.Value} "); + } + Console.WriteLine(); + } + } + } +} diff --git a/src/Microsoft.ML.Core/Utilities/Utils.cs b/src/Microsoft.ML.Core/Utilities/Utils.cs index 971c5fb1d1..a84e1b31ee 100644 --- a/src/Microsoft.ML.Core/Utilities/Utils.cs +++ b/src/Microsoft.ML.Core/Utilities/Utils.cs @@ -218,10 +218,10 @@ public static int FindIndexSorted(this IList input, int value) /// In case of duplicates it returns the index of the first one. /// It guarantees that items before the returned index are < value, while those at and after the returned index are >= value. /// - public static int FindIndexSorted(this Single[] input, Single value) + public static int FindIndexSorted(this IList input, float value) { Contracts.AssertValue(input); - return FindIndexSorted(input, 0, input.Length, value); + return FindIndexSorted(input, 0, input.Count, value); } /// @@ -342,11 +342,11 @@ public static int FindIndexSorted(this IList input, int min, int lim, int v /// In case of duplicates it returns the index of the first one. /// It guarantees that items before the returned index are < value, while those at and after the returned index are >= value. /// - public static int FindIndexSorted(this Single[] input, int min, int lim, Single value) + public static int FindIndexSorted(this IList input, int min, int lim, float value) { Contracts.AssertValue(input); - Contracts.Assert(0 <= min & min <= lim & lim <= input.Length); - Contracts.Assert(!Single.IsNaN(value)); + Contracts.Assert(0 <= min & min <= lim & lim <= input.Count); + Contracts.Assert(!float.IsNaN(value)); int minCur = min; int limCur = lim; @@ -354,7 +354,7 @@ public static int FindIndexSorted(this Single[] input, int min, int lim, Single { int mid = (int)(((uint)minCur + (uint)limCur) / 2); Contracts.Assert(minCur <= mid & mid < limCur); - Contracts.Assert(!Single.IsNaN(input[mid])); + Contracts.Assert(!float.IsNaN(input[mid])); if (input[mid] >= value) limCur = mid; @@ -615,14 +615,14 @@ public static string ExtractLettersAndNumbers(string value) return Regex.Replace(value, "[^A-Za-z0-9]", ""); } - public static bool IsSorted(Float[] values) + public static bool IsSorted(IList values) { if (Utils.Size(values) <= 1) return true; var prev = values[0]; - for (int i = 1; i < values.Length; i++) + for (int i = 1; i < values.Count; i++) { if (!(values[i] >= prev)) return false; diff --git a/src/Microsoft.ML.Data/Dirty/IniFileUtils.cs b/src/Microsoft.ML.Data/Dirty/IniFileUtils.cs index 782c07af01..542048825f 100644 --- a/src/Microsoft.ML.Data/Dirty/IniFileUtils.cs +++ b/src/Microsoft.ML.Data/Dirty/IniFileUtils.cs @@ -52,10 +52,10 @@ public static string GetCalibratorEvaluatorIni(string originalIni, PlattCalibrat StringBuilder newEvaluator = new StringBuilder(); newEvaluator.AppendLine("EvaluatorType=Aggregator"); newEvaluator.AppendLine("Type=Sigmoid"); - newEvaluator.AppendLine("Bias=" + -calibrator.ParamB); + newEvaluator.AppendLine("Bias=" + -calibrator.Offset); newEvaluator.AppendLine("NumNodes=1"); newEvaluator.AppendLine("Nodes=E:" + NumEvaluators(originalIni)); - newEvaluator.AppendLine("Weights=" + -calibrator.ParamA); + newEvaluator.AppendLine("Weights=" + -calibrator.Slope); return newEvaluator.ToString(); } } diff --git a/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs b/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs index f5dbc7f470..312603b949 100644 --- a/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs +++ b/src/Microsoft.ML.Data/Dirty/PredictorInterfaces.cs @@ -6,7 +6,7 @@ using System.Collections.Generic; using System.IO; using Microsoft.ML.Runtime.Data; -using Microsoft.ML.Runtime.Internal.Calibration; +using Microsoft.ML.Calibrator; namespace Microsoft.ML.Runtime.Internal.Internallearn { diff --git a/src/Microsoft.ML.Data/Model/Onnx/ICanSaveOnnx.cs b/src/Microsoft.ML.Data/Model/Onnx/ICanSaveOnnx.cs index dfc8756303..5e3dce06be 100644 --- a/src/Microsoft.ML.Data/Model/Onnx/ICanSaveOnnx.cs +++ b/src/Microsoft.ML.Data/Model/Onnx/ICanSaveOnnx.cs @@ -3,6 +3,7 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Runtime.Data; +using Microsoft.ML.Calibrator; namespace Microsoft.ML.Runtime.Model.Onnx { @@ -68,7 +69,7 @@ internal interface IBindableCanSaveOnnx : ICanSaveOnnx, ISchemaBindableMapper /// /// For simple mappers. Intended to be used for and - /// instances. + /// instances. /// [BestFriend] internal interface ISingleCanSaveOnnx : ICanSaveOnnx diff --git a/src/Microsoft.ML.Data/Model/Pfa/ICanSavePfa.cs b/src/Microsoft.ML.Data/Model/Pfa/ICanSavePfa.cs index bcda835fe8..e442960b40 100644 --- a/src/Microsoft.ML.Data/Model/Pfa/ICanSavePfa.cs +++ b/src/Microsoft.ML.Data/Model/Pfa/ICanSavePfa.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Calibrator; using Microsoft.ML.Runtime.Data; using Newtonsoft.Json.Linq; @@ -72,7 +73,7 @@ internal interface IBindableCanSavePfa : ICanSavePfa, ISchemaBindableMapper /// /// For simple mappers. Intended to be used for and - /// instances. + /// instances. /// [BestFriend] internal interface ISingleCanSavePfa : ICanSavePfa diff --git a/src/Microsoft.ML.Data/Prediction/Calibrator.cs b/src/Microsoft.ML.Data/Prediction/Calibrator.cs index ecc0c5501f..0efeac16f1 100644 --- a/src/Microsoft.ML.Data/Prediction/Calibrator.cs +++ b/src/Microsoft.ML.Data/Prediction/Calibrator.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Calibrator; using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.CommandLine; @@ -17,9 +18,9 @@ using System; using System.Collections; using System.Collections.Generic; +using System.Collections.Immutable; using System.IO; using System.Linq; -using Float = System.Single; [assembly: LoadableClass(PlattCalibratorTrainer.Summary, typeof(PlattCalibratorTrainer), null, typeof(SignatureCalibrator), PlattCalibratorTrainer.UserName, @@ -100,34 +101,24 @@ public interface ICalibratorTrainer /// Training calibrators: provide the output and the class label /// True if it needs more examples, false otherwise - bool ProcessTrainingExample(Float output, bool labelIs1, Float weight); + bool ProcessTrainingExample(float output, bool labelIs1, float weight); /// Finish up training after seeing all examples ICalibrator FinishTraining(IChannel ch); } - /// - /// An interface for probability calibrators. - /// - public interface ICalibrator - { - /// Given a classifier output, produce the probability - Float PredictProbability(Float output); - - /// Get the summary of current calibrator settings - string GetSummary(); - } - /// /// An interface for predictors that take care of their own calibration given an input data view. /// - public interface ISelfCalibratingPredictor + [BestFriend] + internal interface ISelfCalibratingPredictor { IPredictor Calibrate(IChannel ch, IDataView data, ICalibratorTrainer caliTrainer, int maxRows); } + [BestFriend] public abstract class CalibratedPredictorBase : - IDistPredictorProducing, + IDistPredictorProducing, ICanSaveInIniFormat, ICanSaveInTextFormat, ICanSaveInSourceCode, @@ -136,11 +127,11 @@ public abstract class CalibratedPredictorBase : { protected readonly IHost Host; - public IPredictorProducing SubPredictor { get; } + public IPredictorProducing SubPredictor { get; } public ICalibrator Calibrator { get; } public PredictionKind PredictionKind => SubPredictor.PredictionKind; - protected CalibratedPredictorBase(IHostEnvironment env, string name, IPredictorProducing predictor, ICalibrator calibrator) + protected CalibratedPredictorBase(IHostEnvironment env, string name, IPredictorProducing predictor, ICalibrator calibrator) { Contracts.CheckValue(env, nameof(env)); env.CheckNonWhiteSpace(name, nameof(name)); @@ -200,10 +191,10 @@ protected void SaveCore(ModelSaveContext ctx) ctx.SaveModel(Calibrator, @"Calibrator"); } - protected static IPredictorProducing GetPredictor(IHostEnvironment env, ModelLoadContext ctx) + protected static IPredictorProducing GetPredictor(IHostEnvironment env, ModelLoadContext ctx) { - IPredictorProducing predictor; - ctx.LoadModel, SignatureLoadModel>(env, out predictor, ModelFileUtils.DirPredictor); + IPredictorProducing predictor; + ctx.LoadModel, SignatureLoadModel>(env, out predictor, ModelFileUtils.DirPredictor); return predictor; } @@ -227,14 +218,14 @@ public abstract class ValueMapperCalibratedPredictorBase : CalibratedPredictorBa bool ICanSavePfa.CanSavePfa => (_mapper as ICanSavePfa)?.CanSavePfa == true; bool ICanSaveOnnx.CanSaveOnnx(OnnxContext ctx) => (_mapper as ICanSaveOnnx)?.CanSaveOnnx(ctx) == true; - protected ValueMapperCalibratedPredictorBase(IHostEnvironment env, string name, IPredictorProducing predictor, ICalibrator calibrator) + protected ValueMapperCalibratedPredictorBase(IHostEnvironment env, string name, IPredictorProducing predictor, ICalibrator calibrator) : base(env, name, predictor, calibrator) { Contracts.AssertValue(Host); _mapper = SubPredictor as IValueMapper; Host.Check(_mapper != null, "The predictor does not implement IValueMapper"); - Host.Check(_mapper.OutputType == NumberType.Float, "The output type of the predictor is expected to be Float"); + Host.Check(_mapper.OutputType == NumberType.Float, "The output type of the predictor is expected to be float"); _featureContribution = predictor as IFeatureContributionMapper; } @@ -246,11 +237,11 @@ ValueMapper IValueMapper.GetMapper() ValueMapper IValueMapperDist.GetMapper() { - Host.Check(typeof(TOut) == typeof(Float)); - Host.Check(typeof(TDist) == typeof(Float)); - var map = ((IValueMapper)this).GetMapper(); - ValueMapper del = - (in TIn src, ref Float score, ref Float prob) => + Host.Check(typeof(TOut) == typeof(float)); + Host.Check(typeof(TDist) == typeof(float)); + var map = ((IValueMapper)this).GetMapper(); + ValueMapper del = + (in TIn src, ref float score, ref float prob) => { map(in src, ref score); prob = Calibrator.PredictProbability(score); @@ -258,7 +249,7 @@ ValueMapper IValueMapperDist.GetMapper() return (ValueMapper)(Delegate)del; } - ValueMapper> IFeatureContributionMapper.GetFeatureContributionMapper(int top, int bottom, bool normalize) + ValueMapper> IFeatureContributionMapper.GetFeatureContributionMapper(int top, int bottom, bool normalize) { // REVIEW: checking this a bit too late. Host.Check(_featureContribution != null, "Predictor does not implement IFeatureContributionMapper"); @@ -318,15 +309,17 @@ bool ISingleCanSaveOnnx.SaveAsOnnx(OnnxContext ctx, string[] outputNames, string } } - public sealed class CalibratedPredictor : ValueMapperCalibratedPredictorBase, ICanSaveModel + + [BestFriend] + internal sealed class CalibratedPredictor : ValueMapperCalibratedPredictorBase, ICanSaveModel { - public CalibratedPredictor(IHostEnvironment env, IPredictorProducing predictor, ICalibrator calibrator) + internal CalibratedPredictor(IHostEnvironment env, IPredictorProducing predictor, ICalibrator calibrator) : base(env, RegistrationName, predictor, calibrator) { } - public const string LoaderSignature = "CaliPredExec"; - public const string RegistrationName = "CalibratedPredictor"; + internal const string LoaderSignature = "CaliPredExec"; + internal const string RegistrationName = "CalibratedPredictor"; private static VersionInfo GetVersionInfo() { @@ -354,7 +347,7 @@ private CalibratedPredictor(IHostEnvironment env, ModelLoadContext ctx) { } - public static CalibratedPredictor Create(IHostEnvironment env, ModelLoadContext ctx) + private static CalibratedPredictor Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(ctx, nameof(ctx)); // Can load either the old "bulk" model or standard "cali". The two formats are identical. @@ -374,22 +367,23 @@ public void Save(ModelSaveContext ctx) } } - public sealed class FeatureWeightsCalibratedPredictor : + [BestFriend] + internal sealed class FeatureWeightsCalibratedPredictor : ValueMapperCalibratedPredictorBase, - IPredictorWithFeatureWeights, + IPredictorWithFeatureWeights, ICanSaveModel { - private readonly IPredictorWithFeatureWeights _featureWeights; + private readonly IPredictorWithFeatureWeights _featureWeights; - public FeatureWeightsCalibratedPredictor(IHostEnvironment env, IPredictorWithFeatureWeights predictor, + internal FeatureWeightsCalibratedPredictor(IHostEnvironment env, IPredictorWithFeatureWeights predictor, ICalibrator calibrator) : base(env, RegistrationName, predictor, calibrator) { _featureWeights = predictor; } - public const string LoaderSignature = "FeatWCaliPredExec"; - public const string RegistrationName = "FeatureWeightsCalibratedPredictor"; + internal const string LoaderSignature = "FeatWCaliPredExec"; + internal const string RegistrationName = "FeatureWeightsCalibratedPredictor"; private static VersionInfo GetVersionInfo() { @@ -405,11 +399,11 @@ private static VersionInfo GetVersionInfo() private FeatureWeightsCalibratedPredictor(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, GetPredictor(env, ctx), GetCalibrator(env, ctx)) { - Host.Check(SubPredictor is IPredictorWithFeatureWeights, "Predictor does not implement " + nameof(IPredictorWithFeatureWeights)); - _featureWeights = (IPredictorWithFeatureWeights)SubPredictor; + Host.Check(SubPredictor is IPredictorWithFeatureWeights, "Predictor does not implement " + nameof(IPredictorWithFeatureWeights)); + _featureWeights = (IPredictorWithFeatureWeights)SubPredictor; } - public static FeatureWeightsCalibratedPredictor Create(IHostEnvironment env, ModelLoadContext ctx) + private static FeatureWeightsCalibratedPredictor Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); @@ -425,7 +419,7 @@ public void Save(ModelSaveContext ctx) SaveCore(ctx); } - public void GetFeatureWeights(ref VBuffer weights) + public void GetFeatureWeights(ref VBuffer weights) { _featureWeights.GetFeatureWeights(ref weights); } @@ -437,22 +431,22 @@ public void GetFeatureWeights(ref VBuffer weights) /// public sealed class ParameterMixingCalibratedPredictor : ValueMapperCalibratedPredictorBase, - IParameterMixer, - IPredictorWithFeatureWeights, + IParameterMixer, + IPredictorWithFeatureWeights, ICanSaveModel { - private readonly IPredictorWithFeatureWeights _featureWeights; + private readonly IPredictorWithFeatureWeights _featureWeights; - public ParameterMixingCalibratedPredictor(IHostEnvironment env, IPredictorWithFeatureWeights predictor, ICalibrator calibrator) + internal ParameterMixingCalibratedPredictor(IHostEnvironment env, IPredictorWithFeatureWeights predictor, ICalibrator calibrator) : base(env, RegistrationName, predictor, calibrator) { - Host.Check(predictor is IParameterMixer, "Predictor does not implement " + nameof(IParameterMixer)); + Host.Check(predictor is IParameterMixer, "Predictor does not implement " + nameof(IParameterMixer)); Host.Check(calibrator is IParameterMixer, "Calibrator does not implement " + nameof(IParameterMixer)); _featureWeights = predictor; } - public const string LoaderSignature = "PMixCaliPredExec"; - public const string RegistrationName = "ParameterMixingCalibratedPredictor"; + internal const string LoaderSignature = "PMixCaliPredExec"; + internal const string RegistrationName = "ParameterMixingCalibratedPredictor"; private static VersionInfo GetVersionInfo() { @@ -468,12 +462,12 @@ private static VersionInfo GetVersionInfo() private ParameterMixingCalibratedPredictor(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, GetPredictor(env, ctx), GetCalibrator(env, ctx)) { - Host.Check(SubPredictor is IParameterMixer, "Predictor does not implement " + nameof(IParameterMixer)); - Host.Check(SubPredictor is IPredictorWithFeatureWeights, "Predictor does not implement " + nameof(IPredictorWithFeatureWeights)); - _featureWeights = (IPredictorWithFeatureWeights)SubPredictor; + Host.Check(SubPredictor is IParameterMixer, "Predictor does not implement " + nameof(IParameterMixer)); + Host.Check(SubPredictor is IPredictorWithFeatureWeights, "Predictor does not implement " + nameof(IPredictorWithFeatureWeights)); + _featureWeights = (IPredictorWithFeatureWeights)SubPredictor; } - public static ParameterMixingCalibratedPredictor Create(IHostEnvironment env, ModelLoadContext ctx) + private static ParameterMixingCalibratedPredictor Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); @@ -489,19 +483,19 @@ public void Save(ModelSaveContext ctx) SaveCore(ctx); } - public void GetFeatureWeights(ref VBuffer weights) + public void GetFeatureWeights(ref VBuffer weights) { _featureWeights.GetFeatureWeights(ref weights); } - IParameterMixer IParameterMixer.CombineParameters(IList> models) + IParameterMixer IParameterMixer.CombineParameters(IList> models) { var predictors = models.Select( m => { var model = m as ParameterMixingCalibratedPredictor; Contracts.Assert(model != null); - return (IParameterMixer)model.SubPredictor; + return (IParameterMixer)model.SubPredictor; }).ToArray(); var calibrators = models.Select( m => @@ -512,11 +506,12 @@ IParameterMixer IParameterMixer.CombineParameters(IList)combinedPredictor, (ICalibrator)combinedCalibrator); + return new ParameterMixingCalibratedPredictor(Host, (IPredictorWithFeatureWeights)combinedPredictor, (ICalibrator)combinedCalibrator); } } - public sealed class SchemaBindableCalibratedPredictor : CalibratedPredictorBase, ISchemaBindableMapper, ICanSaveModel, + [BestFriend] + internal sealed class SchemaBindableCalibratedPredictor : CalibratedPredictorBase, ISchemaBindableMapper, ICanSaveModel, IBindableCanSavePfa, IBindableCanSaveOnnx, IFeatureContributionMapper { private sealed class Bound : ISchemaBoundRowMapper @@ -606,7 +601,7 @@ private Delegate GetProbGetter(Row input) private readonly ISchemaBindableMapper _bindable; private readonly IFeatureContributionMapper _featureContribution; - public const string LoaderSignature = "SchemaBindableCalibrated"; + internal const string LoaderSignature = "SchemaBindableCalibrated"; private static VersionInfo GetVersionInfo() { @@ -627,7 +622,7 @@ private static VersionInfo GetVersionInfo() bool ICanSaveOnnx.CanSaveOnnx(OnnxContext ctx) => (_bindable as ICanSaveOnnx)?.CanSaveOnnx(ctx) == true; - public SchemaBindableCalibratedPredictor(IHostEnvironment env, IPredictorProducing predictor, ICalibrator calibrator) + internal SchemaBindableCalibratedPredictor(IHostEnvironment env, IPredictorProducing predictor, ICalibrator calibrator) : base(env, LoaderSignature, predictor, calibrator) { _bindable = ScoreUtils.GetSchemaBindableMapper(Host, SubPredictor); @@ -641,7 +636,7 @@ private SchemaBindableCalibratedPredictor(IHostEnvironment env, ModelLoadContext _featureContribution = SubPredictor as IFeatureContributionMapper; } - public static SchemaBindableCalibratedPredictor Create(IHostEnvironment env, ModelLoadContext ctx) + private static SchemaBindableCalibratedPredictor Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); @@ -693,6 +688,9 @@ ValueMapper> IFeatureContributionMapper.GetFeatureContribut [BestFriend] internal static class CalibratorUtils { + // maximum number of rows passed to the calibrator. + private const int _maxCalibrationExamples = 1000000; + private static bool NeedCalibration(IHostEnvironment env, IChannel ch, ICalibratorTrainer calibrator, ITrainer trainer, IPredictor predictor, RoleMappedSchema schema) { @@ -720,7 +718,7 @@ private static bool NeedCalibration(IHostEnvironment env, IChannel ch, ICalibrat return false; } - if (!(predictor is IPredictorProducing)) + if (!(predictor is IPredictorProducing)) { ch.Info("Not training a calibrator because the predictor does not implement IPredictorProducing."); return false; @@ -768,7 +766,7 @@ public static IPredictor TrainCalibratorIfNeeded(IHostEnvironment env, IChannel if (!NeedCalibration(env, ch, calibrator, trainer, predictor, data.Schema)) return predictor; - return TrainCalibrator(env, ch, calibrator, maxRows, predictor, data); + return GetCalibratedPredictor(env, ch, calibrator, predictor, data, maxRows); } /// @@ -777,13 +775,30 @@ public static IPredictor TrainCalibratorIfNeeded(IHostEnvironment env, IChannel /// The environment to use. /// The channel. /// The calibrator trainer. + /// The predictor that needs calibration. + /// The examples to used for calibrator training. /// The maximum rows to use for calibrator training. + /// The original predictor, if no calibration is needed, + /// or a metapredictor that wraps the original predictor and the newly trained calibrator. + public static IPredictor GetCalibratedPredictor(IHostEnvironment env, IChannel ch, ICalibratorTrainer caliTrainer, + IPredictor predictor, RoleMappedData data, int maxRows = _maxCalibrationExamples) + { + var trainedCalibrator = TrainCalibrator(env, ch, caliTrainer, predictor, data, maxRows); + return CreateCalibratedPredictor(env, (IPredictorProducing)predictor, trainedCalibrator); + } + + /// + /// Trains a calibrator. + /// + /// The environment to use. + /// The channel. + /// The calibrator trainer. /// The predictor that needs calibration. /// The examples to used for calibrator training. + /// The maximum rows to use for calibrator training. /// The original predictor, if no calibration is needed, /// or a metapredictor that wraps the original predictor and the newly trained calibrator. - public static IPredictor TrainCalibrator(IHostEnvironment env, IChannel ch, ICalibratorTrainer caliTrainer, - int maxRows, IPredictor predictor, RoleMappedData data) + public static ICalibrator TrainCalibrator(IHostEnvironment env, IChannel ch, ICalibratorTrainer caliTrainer, IPredictor predictor, RoleMappedData data, int maxRows = _maxCalibrationExamples) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ch, nameof(ch)); @@ -835,11 +850,10 @@ public static IPredictor TrainCalibrator(IHostEnvironment env, IChannel ch, ICal } } } - var cali = caliTrainer.FinishTraining(ch); - return CreateCalibratedPredictor(env, (IPredictorProducing)predictor, cali); + return caliTrainer.FinishTraining(ch); } - public static IPredictorProducing CreateCalibratedPredictor(IHostEnvironment env, IPredictorProducing predictor, ICalibrator cali) + public static IPredictorProducing CreateCalibratedPredictor(IHostEnvironment env, IPredictorProducing predictor, ICalibrator cali) { Contracts.Assert(predictor != null); if (cali == null) @@ -852,8 +866,8 @@ public static IPredictorProducing CreateCalibratedPredictor(IHostEnvironm predictor = p.SubPredictor; } // REVIEW: Split the requirement for IPredictorWithFeatureWeights into a different class. - var predWithFeatureScores = predictor as IPredictorWithFeatureWeights; - if (predWithFeatureScores != null && predictor is IParameterMixer && cali is IParameterMixer) + var predWithFeatureScores = predictor as IPredictorWithFeatureWeights; + if (predWithFeatureScores != null && predictor is IParameterMixer && cali is IParameterMixer) return new ParameterMixingCalibratedPredictor(env, predWithFeatureScores, cali); if (predictor is IValueMapper) return new CalibratedPredictor(env, predictor, cali); @@ -862,7 +876,7 @@ public static IPredictorProducing CreateCalibratedPredictor(IHostEnvironm } [TlcModule.Component(Name = "NaiveCalibrator", FriendlyName = "Naive Calibrator", Alias = "Naive")] - public sealed class NaiveCalibratorTrainerFactory : ICalibratorTrainerFactory + internal sealed class NaiveCalibratorTrainerFactory : ICalibratorTrainerFactory { public ICalibratorTrainer CreateComponent(IHostEnvironment env) { @@ -870,40 +884,49 @@ public ICalibratorTrainer CreateComponent(IHostEnvironment env) } } + /// + /// Trains a by dividing the range of the outputs into equally sized bins. + /// The probability of belonging to a particular class, for example class 1, is the number of class 1 instances in the bin, divided by the total number + /// of instances in that bin. + /// public sealed class NaiveCalibratorTrainer : ICalibratorTrainer { private readonly IHost _host; - private List _cMargins; - private List _ncMargins; + private List _cMargins; + private List _ncMargins; - private int _numBins; - private Float _binSize; - private Float _min; - private Float _max; - private Float[] _binProbs; + public int NumBins; + public float BinSize; + public float Min; + public float Max; + public float[] BinProbs; // REVIEW: The others have user/load names of calibraTION, but this has calibratOR. - public const string UserName = "Naive Calibrator"; - public const string LoadName = "NaiveCalibrator"; + internal const string UserName = "Naive Calibrator"; + internal const string LoadName = "NaiveCalibrator"; internal const string Summary = "Naive calibrator divides the range of the outputs into equally sized bins. In each bin, " + "the probability of belonging to class 1 is the number of class 1 instances in the bin, divided by the total number " + "of instances in the bin."; + // REVIEW: does this need a ctor that initialized the parameters to given values? + /// + /// Initializes a new instance of . + /// public NaiveCalibratorTrainer(IHostEnvironment env) { Contracts.CheckValue(env, nameof(env)); _host = env.Register(LoadName); - _cMargins = new List(); - _ncMargins = new List(); - _numBins = 200; - _min = Float.MaxValue; - _max = Float.MinValue; + _cMargins = new List(); + _ncMargins = new List(); + NumBins = 200; + Min = float.MaxValue; + Max = float.MinValue; } - public bool NeedsTraining => true; + bool ICalibratorTrainer.NeedsTraining => true; - public bool ProcessTrainingExample(Float output, bool labelIs1, Float weight) + public bool ProcessTrainingExample(float output, bool labelIs1, float weight) { //AP todo proper weighting here if (labelIs1) @@ -917,57 +940,57 @@ public bool ProcessTrainingExample(Float output, bool labelIs1, Float weight) return true; } - public ICalibrator FinishTraining(IChannel ch) + ICalibrator ICalibratorTrainer.FinishTraining(IChannel ch) { - Float[] cOutputs = _cMargins.ToArray(); + float[] cOutputs = _cMargins.ToArray(); ch.Check(cOutputs.Length > 0, "Calibrator trained on zero instances."); - Float minC = MathUtils.Min(cOutputs); - Float maxC = MathUtils.Max(cOutputs); + float minC = MathUtils.Min(cOutputs); + float maxC = MathUtils.Max(cOutputs); - Float[] ncOutputs = _ncMargins.ToArray(); - Float minNC = MathUtils.Min(ncOutputs); - Float maxNC = MathUtils.Max(ncOutputs); + float[] ncOutputs = _ncMargins.ToArray(); + float minNC = MathUtils.Min(ncOutputs); + float maxNC = MathUtils.Max(ncOutputs); - _min = (minC < minNC) ? minC : minNC; - _max = (maxC > maxNC) ? maxC : maxNC; - _binSize = (_max - _min) / _numBins; + Min = (minC < minNC) ? minC : minNC; + Max = (maxC > maxNC) ? maxC : maxNC; + BinSize = (Max - Min) / NumBins; - Float[] cBins = new Float[_numBins]; - Float[] ncBins = new Float[_numBins]; + float[] cBins = new float[NumBins]; + float[] ncBins = new float[NumBins]; - foreach (Float xi in cOutputs) + foreach (float xi in cOutputs) { - int binIdx = NaiveCalibrator.GetBinIdx(xi, _min, _binSize, _numBins); + int binIdx = NaiveCalibrator.GetBinIdx(xi, Min, BinSize, NumBins); cBins[binIdx]++; } - foreach (Float xi in ncOutputs) + foreach (float xi in ncOutputs) { - int binIdx = NaiveCalibrator.GetBinIdx(xi, _min, _binSize, _numBins); + int binIdx = NaiveCalibrator.GetBinIdx(xi, Min, BinSize, NumBins); ncBins[binIdx]++; } - _binProbs = new Float[_numBins]; - for (int i = 0; i < _numBins; i++) + BinProbs = new float[NumBins]; + for (int i = 0; i < NumBins; i++) { if (cBins[i] + ncBins[i] == 0) - _binProbs[i] = 0; + BinProbs[i] = 0; else - _binProbs[i] = cBins[i] / (cBins[i] + ncBins[i]); + BinProbs[i] = cBins[i] / (cBins[i] + ncBins[i]); } - return new NaiveCalibrator(_host, _min, _binSize, _binProbs); + return new NaiveCalibrator(_host, Min, BinSize, BinProbs); } } /// - /// The naive binning-based calibrator + /// The naive binning-based calibrator. /// public sealed class NaiveCalibrator : ICalibrator, ICanSaveInBinaryFormat { - public const string LoaderSignature = "NaiveCaliExec"; - public const string RegistrationName = "NaiveCalibrator"; + internal const string LoaderSignature = "NaiveCaliExec"; + internal const string RegistrationName = "NaiveCalibrator"; private static VersionInfo GetVersionInfo() { @@ -981,18 +1004,28 @@ private static VersionInfo GetVersionInfo() } private readonly IHost _host; - private readonly Float _binSize; - private readonly Float _min; - private readonly Float[] _binProbs; - /// Create a default calibrator - public NaiveCalibrator(IHostEnvironment env, Float min, Float binSize, Float[] binProbs) + /// The bin size. + public readonly float BinSize; + + /// The minimum value in the first bin. + public readonly float Min; + + /// The value of probability in each bin. + public readonly float[] BinProbs; + + /// Initializes a new instance of . + /// The to use. + /// The minimum value in the first bin. + /// The values of the probability in each bin. + /// The bin size. + public NaiveCalibrator(IHostEnvironment env, float min, float binSize, float[] binProbs) { Contracts.CheckValue(env, nameof(env)); _host = env.Register(RegistrationName); - _min = min; - _binSize = binSize; - _binProbs = binProbs; + Min = min; + BinSize = binSize; + BinProbs = binProbs; } private NaiveCalibrator(IHostEnvironment env, ModelLoadContext ctx) @@ -1002,26 +1035,26 @@ private NaiveCalibrator(IHostEnvironment env, ModelLoadContext ctx) _host.CheckValue(ctx, nameof(ctx)); // *** Binary format *** - // int: sizeof(Float) - // Float: bin size - // Float: minimum value of first bin + // int: sizeof(float) + // float: bin size + // float: minimum value of first bin // int: number of bins - // Float[]: probability in each bin + // float[]: probability in each bin int cbFloat = ctx.Reader.ReadInt32(); - _host.CheckDecode(cbFloat == sizeof(Float)); + _host.CheckDecode(cbFloat == sizeof(float)); - _binSize = ctx.Reader.ReadFloat(); - _host.CheckDecode(0 < _binSize && _binSize < Float.PositiveInfinity); + BinSize = ctx.Reader.ReadFloat(); + _host.CheckDecode(0 < BinSize && BinSize < float.PositiveInfinity); - _min = ctx.Reader.ReadFloat(); - _host.CheckDecode(FloatUtils.IsFinite(_min)); + Min = ctx.Reader.ReadFloat(); + _host.CheckDecode(FloatUtils.IsFinite(Min)); - _binProbs = ctx.Reader.ReadFloatArray(); - _host.CheckDecode(Utils.Size(_binProbs) > 0); - _host.CheckDecode(_binProbs.All(x => (0 <= x && x <= 1))); + BinProbs = ctx.Reader.ReadFloatArray(); + _host.CheckDecode(Utils.Size(BinProbs) > 0); + _host.CheckDecode(BinProbs.All(x => (0 <= x && x <= 1))); } - public static NaiveCalibrator Create(IHostEnvironment env, ModelLoadContext ctx) + private static NaiveCalibrator Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); @@ -1041,30 +1074,30 @@ private void SaveCore(ModelSaveContext ctx) ctx.SetVersionInfo(GetVersionInfo()); // *** Binary format *** - // int: sizeof(Float) - // Float: bin size - // Float: minimum value of first bin + // int: sizeof(float) + // float: bin size + // float: minimum value of first bin // int: number of bins - // Float[]: probability in each bin - ctx.Writer.Write(sizeof(Float)); - ctx.Writer.Write(_binSize); - ctx.Writer.Write(_min); - ctx.Writer.WriteSingleArray(_binProbs); + // float[]: probability in each bin + ctx.Writer.Write(sizeof(float)); + ctx.Writer.Write(BinSize); + ctx.Writer.Write(Min); + ctx.Writer.WriteSingleArray(BinProbs); } /// /// Given a classifier output, produce the probability /// - public Float PredictProbability(Float output) + public float PredictProbability(float output) { - if (Float.IsNaN(output)) + if (float.IsNaN(output)) return output; - int binIdx = GetBinIdx(output, _min, _binSize, _binProbs.Length); - return _binProbs[binIdx]; + int binIdx = GetBinIdx(output, Min, BinSize, BinProbs.Length); + return BinProbs[binIdx]; } // get the bin for a given output - internal static int GetBinIdx(Float output, Float min, Float binSize, int numBins) + internal static int GetBinIdx(float output, float min, float binSize, int numBins) { int binIdx = (int)((output - min) / binSize); if (binIdx >= numBins) @@ -1077,10 +1110,13 @@ internal static int GetBinIdx(Float output, Float min, Float binSize, int numBin /// Get the summary of current calibrator settings public string GetSummary() { - return string.Format("Naive Calibrator has {0} bins, starting at {1}, with bin size of {2}", _binProbs.Length, _min, _binSize); + return string.Format("Naive Calibrator has {0} bins, starting at {1}, with bin size of {2}", BinProbs.Length, Min, BinSize); } } + /// + /// Base class for calibrator trainers. + /// public abstract class CalibratorTrainerBase : ICalibratorTrainer { protected readonly IHost Host; @@ -1096,12 +1132,12 @@ protected CalibratorTrainerBase(IHostEnvironment env, string name) MaxNumSamples = DefaultMaxNumSamples; } - public bool NeedsTraining { get { return true; } } + bool ICalibratorTrainer.NeedsTraining => true; /// /// Training calibrators: provide the classifier output and the class label /// - public bool ProcessTrainingExample(Float output, bool labelIs1, Float weight) + bool ICalibratorTrainer.ProcessTrainingExample(float output, bool labelIs1, float weight) { if (Data == null) Data = new CalibrationDataStore(MaxNumSamples); @@ -1109,7 +1145,7 @@ public bool ProcessTrainingExample(Float output, bool labelIs1, Float weight) return true; } - public ICalibrator FinishTraining(IChannel ch) + ICalibrator ICalibratorTrainer.FinishTraining(IChannel ch) { ch.Check(Data != null, "Calibrator trained on zero instances."); var calibrator = CreateCalibrator(ch); @@ -1121,7 +1157,8 @@ public ICalibrator FinishTraining(IChannel ch) } [TlcModule.Component(Name = "PlattCalibrator", FriendlyName = "Platt Calibrator", Aliases = new[] { "Platt", "Sigmoid" }, Desc = "Platt calibration.")] - public sealed class PlattCalibratorTrainerFactory : ICalibratorTrainerFactory + [BestFriend] + internal sealed class PlattCalibratorTrainerFactory : ICalibratorTrainerFactory { public ICalibratorTrainer CreateComponent(IHostEnvironment env) { @@ -1131,9 +1168,6 @@ public ICalibratorTrainer CreateComponent(IHostEnvironment env) public sealed class PlattCalibratorTrainer : CalibratorTrainerBase { - private Double _paramA; - private Double _paramB; - internal const string UserName = "Sigmoid Calibration"; internal const string LoadName = "PlattCalibration"; internal const string Summary = "This model was introduced by Platt in the paper Probabilistic Outputs for Support Vector Machines " @@ -1142,13 +1176,13 @@ public sealed class PlattCalibratorTrainer : CalibratorTrainerBase public PlattCalibratorTrainer(IHostEnvironment env) : base(env, LoadName) { - } public override ICalibrator CreateCalibrator(IChannel ch) { - _paramA = 0; - _paramB = 0; + Double slope = 0; + Double offset = 0; + Double prior0 = 0; Double prior1 = 0; long n = 0; @@ -1162,12 +1196,12 @@ public override ICalibrator CreateCalibrator(IChannel ch) n++; } if (n == 0) - return new PlattCalibrator(Host, _paramA, _paramB); + return new PlattCalibrator(Host, slope, offset); - _paramA = 0; + slope = 0; // Initialize B to be the marginal probability of class // smoothed i.e. P(+ | x) = (N+ + 1) / (N + 2) - _paramB = Math.Log((prior0 + 1) / (prior1 + 1)); + offset = Math.Log((prior0 + 1) / (prior1 + 1)); // OK. We're going to maximize the likelihood of the output by // minimizing the cross-entropy of the output. Here's a @@ -1178,8 +1212,8 @@ public override ICalibrator CreateCalibrator(IChannel ch) Double lambda = 0.001; Double olderr = Double.MaxValue / 2; // array to store current estimate of probability of training points - Float[] pp = new Float[n]; - Float defValue = (Float)((prior1 + 1) / (prior0 + prior1 + 2)); + float[] pp = new float[n]; + float defValue = (float)((prior1 + 1) / (prior0 + prior1 + 2)); for (int i = 0; i < n; i++) pp[i] = defValue; @@ -1221,8 +1255,8 @@ public override ICalibrator CreateCalibrator(IChannel ch) break; } Double err = 0; - Double oldA = _paramA; - Double oldB = _paramB; + Double oldA = slope; + Double oldB = offset; // Loop until you get a increase in the goodness of fit for (; ; ) { @@ -1234,8 +1268,8 @@ public override ICalibrator CreateCalibrator(IChannel ch) continue; } // This is the Newton-Raphson step (with lambda as stabilizer) - _paramA = oldA + ((b + lambda) * d - c * e) / det; - _paramB = oldB + ((a + lambda) * e - c * d) / det; + slope = oldA + ((b + lambda) * d - c * e) / det; + offset = oldB + ((a + lambda) * e - c * d) / det; // Now, compute goodness of fit err = 0; @@ -1243,7 +1277,7 @@ public override ICalibrator CreateCalibrator(IChannel ch) foreach (var d_i in Data) { var y = d_i.Target ? d_i.Score : -d_i.Score; - var p = PlattCalibrator.PredictProbability(d_i.Score, _paramA, _paramB); + var p = PlattCalibrator.PredictProbability(d_i.Score, slope, offset); var t = d_i.Target ? hiTarget : loTarget; var weight = d_i.Weight; pp[i] = p; @@ -1287,7 +1321,7 @@ public override ICalibrator CreateCalibrator(IChannel ch) break; } - return new PlattCalibrator(Host, _paramA, _paramB); + return new PlattCalibrator(Host, slope, offset); } } @@ -1308,15 +1342,15 @@ public ICalibratorTrainer CreateComponent(IHostEnvironment env) } } - public const string UserName = "Fixed Sigmoid Calibration"; - public const string LoadName = "FixedPlattCalibration"; + internal const string UserName = "Fixed Sigmoid Calibration"; + internal const string LoadName = "FixedPlattCalibration"; internal const string Summary = "Sigmoid calibrator with configurable slope and offset."; private readonly IHost _host; private readonly Double _slope; private readonly Double _offset; - public FixedPlattCalibratorTrainer(IHostEnvironment env, Arguments args) + internal FixedPlattCalibratorTrainer(IHostEnvironment env, Arguments args) { Contracts.CheckValue(env, nameof(env)); _host = env.Register(LoadName); @@ -1324,21 +1358,19 @@ public FixedPlattCalibratorTrainer(IHostEnvironment env, Arguments args) _offset = args.Offset; } - public bool NeedsTraining => false; + bool ICalibratorTrainer.NeedsTraining => false; - public bool ProcessTrainingExample(Float output, bool labelIs1, Float weight) - => false; + bool ICalibratorTrainer.ProcessTrainingExample(float output, bool labelIs1, float weight) => false; - public ICalibrator FinishTraining(IChannel ch) - { - return new PlattCalibrator(_host, _slope, _offset); - } + ICalibrator ICalibratorTrainer.FinishTraining(IChannel ch) => new PlattCalibrator(_host, _slope, _offset); } + /// The Platt calibrator calculates the probability following: + /// P(x) = 1 / (1 + exp(- * x + ) . public sealed class PlattCalibrator : ICalibrator, IParameterMixer, ICanSaveModel, ISingleCanSavePfa, ISingleCanSaveOnnx { - public const string LoaderSignature = "PlattCaliExec"; - public const string RegistrationName = "PlattCalibrator"; + internal const string LoaderSignature = "PlattCaliExec"; + internal const string RegistrationName = "PlattCalibrator"; private static VersionInfo GetVersionInfo() { return new VersionInfo( @@ -1352,17 +1384,20 @@ private static VersionInfo GetVersionInfo() private readonly IHost _host; - public Double ParamA { get; } - public Double ParamB { get; } + public Double Slope { get; } + public Double Offset { get; } bool ICanSavePfa.CanSavePfa => true; bool ICanSaveOnnx.CanSaveOnnx(OnnxContext ctx) => true; - public PlattCalibrator(IHostEnvironment env, Double paramA, Double paramB) + /// + /// Initializes a new instance of . + /// + public PlattCalibrator(IHostEnvironment env, Double slope, Double offset) { Contracts.CheckValue(env, nameof(env)); _host = env.Register(RegistrationName); - ParamA = paramA; - ParamB = paramB; + Slope = slope; + Offset = offset; } private PlattCalibrator(IHostEnvironment env, ModelLoadContext ctx) @@ -1374,14 +1409,14 @@ private PlattCalibrator(IHostEnvironment env, ModelLoadContext ctx) // *** Binary format *** // Double: A // Double: B - ParamA = ctx.Reader.ReadDouble(); - _host.CheckDecode(FloatUtils.IsFinite(ParamA)); + Slope = ctx.Reader.ReadDouble(); + _host.CheckDecode(FloatUtils.IsFinite(Slope)); - ParamB = ctx.Reader.ReadDouble(); - _host.CheckDecode(FloatUtils.IsFinite(ParamB)); + Offset = ctx.Reader.ReadDouble(); + _host.CheckDecode(FloatUtils.IsFinite(Offset)); } - public static PlattCalibrator Create(IHostEnvironment env, ModelLoadContext ctx) + private static PlattCalibrator Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); @@ -1405,8 +1440,8 @@ private void SaveCore(ModelSaveContext ctx) // *** Binary format *** // Double: A // Double: B - ctx.Writer.Write(ParamA); - ctx.Writer.Write(ParamB); + ctx.Writer.Write(Slope); + ctx.Writer.Write(Offset); if (ctx.InRepository) { @@ -1414,22 +1449,22 @@ private void SaveCore(ModelSaveContext ctx) { writer.WriteLine("Platt calibrator"); writer.WriteLine("P(y=1|x) = 1/1+exp(A*x + B)"); - writer.WriteLine("A={0:R}", (object)ParamA); - writer.WriteLine("B={0:R}", ParamB); + writer.WriteLine("A={0:R}", (object)Slope); + writer.WriteLine("B={0:R}", Offset); })); } } - public Float PredictProbability(Float output) + public float PredictProbability(float output) { - if (Float.IsNaN(output)) + if (float.IsNaN(output)) return output; - return PredictProbability(output, ParamA, ParamB); + return PredictProbability(output, Slope, Offset); } - public static Float PredictProbability(Float output, Double a, Double b) + public static float PredictProbability(float output, Double a, Double b) { - return (Float)(1 / (1 + Math.Exp(a * output + b))); + return (float)(1 / (1 + Math.Exp(a * output + b))); } JToken ISingleCanSavePfa.SaveAsPfa(BoundPfaContext ctx, JToken input) @@ -1438,7 +1473,7 @@ JToken ISingleCanSavePfa.SaveAsPfa(BoundPfaContext ctx, JToken input) _host.CheckValue(input, nameof(input)); return PfaUtils.Call("m.link.logit", - PfaUtils.Call("+", -ParamB, PfaUtils.Call("*", -ParamA, input))); + PfaUtils.Call("+", -Offset, PfaUtils.Call("*", -Slope, input))); } bool ISingleCanSaveOnnx.SaveAsOnnx(OnnxContext ctx, string[] scoreProbablityColumnNames, string featureColumnName) @@ -1451,7 +1486,7 @@ bool ISingleCanSaveOnnx.SaveAsOnnx(OnnxContext ctx, string[] scoreProbablityColu string linearOutput = ctx.AddIntermediateVariable(null, "linearOutput", true); var node = ctx.CreateNode(opType, new[] { scoreProbablityColumnNames[0] }, new[] { linearOutput }, ctx.GetNodeName(opType), ""); - node.AddAttribute("alpha", ParamA * -1); + node.AddAttribute("alpha", Slope * -1); node.AddAttribute("beta", -0.0000001); opType = "Sigmoid"; @@ -1463,7 +1498,7 @@ bool ISingleCanSaveOnnx.SaveAsOnnx(OnnxContext ctx, string[] scoreProbablityColu public string GetSummary() { - return string.Format("Platt calibrator parameters: A={0}, B={1}", ParamA, ParamB); + return string.Format("Platt calibrator parameters: A={0}, B={1}", Slope, Offset); } IParameterMixer IParameterMixer.CombineParameters(IList calibrators) @@ -1474,8 +1509,8 @@ IParameterMixer IParameterMixer.CombineParameters(IList calibra { PlattCalibrator cal = calibrator as PlattCalibrator; - a += cal.ParamA; - b += cal.ParamB; + a += cal.Slope; + b += cal.Offset; } PlattCalibrator newCal = new PlattCalibrator(_host, a / calibrators.Count, b / calibrators.Count); @@ -1484,7 +1519,7 @@ IParameterMixer IParameterMixer.CombineParameters(IList calibra } [TlcModule.Component(Name = "PavCalibrator", FriendlyName = "PAV Calibrator", Alias = "Pav")] - public sealed class PavCalibratorTrainerFactory : ICalibratorTrainerFactory + internal sealed class PavCalibratorTrainerFactory : ICalibratorTrainerFactory { public ICalibratorTrainer CreateComponent(IHostEnvironment env) { @@ -1497,12 +1532,12 @@ public class PavCalibratorTrainer : CalibratorTrainerBase // a piece of the piecwise function private readonly struct Piece { - public readonly Float MinX; // end of interval. - public readonly Float MaxX; // beginning of interval. - public readonly Float Value; // value of function in interval. - public readonly Float N; // number of points/sum of weights of interval. + public readonly float MinX; // end of interval. + public readonly float MaxX; // beginning of interval. + public readonly float Value; // value of function in interval. + public readonly float N; // number of points/sum of weights of interval. - public Piece(Float minX, Float maxX, Float value, Float n) + public Piece(float minX, float maxX, float value, float n) { Contracts.Assert(minX <= maxX); // REVIEW: Can this fail due to more innocent imprecision issues? @@ -1515,10 +1550,11 @@ public Piece(Float minX, Float maxX, Float value, Float n) } } - public const string UserName = "PAV Calibration"; - public const string LoadName = "PAVCalibration"; + internal const string UserName = "PAV Calibration"; + internal const string LoadName = "PAVCalibration"; internal const string Summary = "Piecewise linear calibrator."; + // REVIEW: Do we need a ctor that initializes min, max, value, n? public PavCalibratorTrainer(IHostEnvironment env) : base(env, LoadName) { @@ -1535,7 +1571,7 @@ public override ICalibrator CreateCalibrator(IChannel ch) Piece curr = new Piece(di.Score, di.Score, di.Target ? 1 : 0, di.Weight); for (; stack.Count > 0 && ((top.MaxX >= curr.MinX) || curr.Value <= top.Value);) { - Float newN = top.N + curr.N; + float newN = top.N + curr.N; curr = new Piece(top.MinX, curr.MaxX, (top.Value * top.N + curr.Value * curr.N) / newN, newN); stack.Pop(); if (stack.Count > 0) @@ -1547,9 +1583,9 @@ public override ICalibrator CreateCalibrator(IChannel ch) } ch.Info("PAV calibrator: piecewise function approximation has {0} components.", stack.Count); - Float[] mins = new Float[stack.Count]; - Float[] maxes = new Float[stack.Count]; - Float[] values = new Float[stack.Count]; + float[] mins = new float[stack.Count]; + float[] maxes = new float[stack.Count]; + float[] values = new float[stack.Count]; for (int i = stack.Count - 1; stack.Count > 0; --i) { @@ -1559,7 +1595,7 @@ public override ICalibrator CreateCalibrator(IChannel ch) values[i] = top.Value; } - return new PavCalibrator(Host, mins, maxes, values); + return new PavCalibrator(Host, mins.ToImmutableArray(), maxes.ToImmutableArray(), values.ToImmutableArray()); } } @@ -1572,8 +1608,8 @@ public override ICalibrator CreateCalibrator(IChannel ch) /// public sealed class PavCalibrator : ICalibrator, ICanSaveInBinaryFormat { - public const string LoaderSignature = "PAVCaliExec"; - public const string RegistrationName = "PAVCalibrator"; + internal const string LoaderSignature = "PAVCaliExec"; + internal const string RegistrationName = "PAVCalibrator"; private static VersionInfo GetVersionInfo() { @@ -1587,31 +1623,38 @@ private static VersionInfo GetVersionInfo() } // Epsilon for 0-comparisons - private const Float Epsilon = (Float)1e-15; - private const Float MinToReturn = Epsilon; // max predicted is 1 - min; - private const Float MaxToReturn = 1 - Epsilon; // max predicted is 1 - min; + private const float Epsilon = (float)1e-15; + private const float MinToReturn = Epsilon; // max predicted is 1 - min; + private const float MaxToReturn = 1 - Epsilon; // max predicted is 1 - min; private readonly IHost _host; - private readonly Float[] _mins; - private readonly Float[] _maxes; - private readonly Float[] _values; + public readonly ImmutableArray Mins; + public readonly ImmutableArray Maxes; + public readonly ImmutableArray Values; - internal PavCalibrator(IHostEnvironment env, Float[] mins, Float[] maxes, Float[] values) + /// + /// Initializes a new instance of . + /// + /// The to use. + /// The minimum values for each piece. + /// The maximum values for each piece. + /// The actual values for each piece. + public PavCalibrator(IHostEnvironment env, ImmutableArray mins, ImmutableArray maxes, ImmutableArray values) { Contracts.AssertValue(env); _host = env.Register(RegistrationName); - _host.AssertValue(mins); - _host.AssertValue(maxes); - _host.AssertValue(values); + _host.AssertNonEmpty(mins); + _host.AssertNonEmpty(maxes); + _host.AssertNonEmpty(values); _host.Assert(Utils.IsSorted(mins)); _host.Assert(Utils.IsSorted(maxes)); _host.Assert(Utils.IsSorted(values)); _host.Assert(values.Length == 0 || (0 <= values[0] && values[values.Length - 1] <= 1)); _host.Assert(mins.Zip(maxes, (min, max) => min <= max).All(x => x)); - _mins = mins; - _maxes = maxes; - _values = values; + Mins = mins; + Maxes = maxes; + Values = values; } private PavCalibrator(IHostEnvironment env, ModelLoadContext ctx) @@ -1621,40 +1664,44 @@ private PavCalibrator(IHostEnvironment env, ModelLoadContext ctx) _host.AssertValue(ctx); // *** Binary format *** - // int: sizeof(Float) + // int: sizeof(float) // int: number of pieces // for each piece: - // Float: MinX - // Float: MaxX - // Float: Value + // float: MinX + // float: MaxX + // float: Value int cbFloat = ctx.Reader.ReadInt32(); - _host.CheckDecode(cbFloat == sizeof(Float)); + _host.CheckDecode(cbFloat == sizeof(float)); int numPieces = ctx.Reader.ReadInt32(); _host.CheckDecode(numPieces >= 0); - _mins = new Float[numPieces]; - _maxes = new Float[numPieces]; - _values = new Float[numPieces]; - Float valuePrev = 0; - Float maxPrev = Float.NegativeInfinity; + var mins = new float[numPieces]; + var maxes = new float[numPieces]; + var values = new float[numPieces]; + float valuePrev = 0; + float maxPrev = float.NegativeInfinity; for (int i = 0; i < numPieces; ++i) { - Float minX = ctx.Reader.ReadFloat(); - Float maxX = ctx.Reader.ReadFloat(); - Float val = ctx.Reader.ReadFloat(); + float minX = ctx.Reader.ReadFloat(); + float maxX = ctx.Reader.ReadFloat(); + float val = ctx.Reader.ReadFloat(); _host.CheckDecode(minX <= maxX); _host.CheckDecode(minX > maxPrev); _host.CheckDecode(val > valuePrev || val == valuePrev && i == 0); valuePrev = val; maxPrev = maxX; - _mins[i] = minX; - _maxes[i] = maxX; - _values[i] = val; + mins[i] = minX; + maxes[i] = maxX; + values[i] = val; } + + Mins = mins.ToImmutableArray(); + Maxes = maxes.ToImmutableArray(); + Values = values.ToImmutableArray(); _host.CheckDecode(valuePrev <= 1); } - public static PavCalibrator Create(IHostEnvironment env, ModelLoadContext ctx) + private static PavCalibrator Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); @@ -1674,38 +1721,38 @@ private void SaveCore(ModelSaveContext ctx) ctx.SetVersionInfo(GetVersionInfo()); // *** Binary format *** - // int: sizeof(Float) + // int: sizeof(float) // int: number of pieces // for each piece: - // Float: MinX - // Float: MaxX - // Float: Value - ctx.Writer.Write(sizeof(Float)); - - _host.Assert(_mins.Length == _maxes.Length); - _host.Assert(_mins.Length == _values.Length); - ctx.Writer.Write(_mins.Length); - Float valuePrev = 0; - Float maxPrev = Float.NegativeInfinity; - for (int i = 0; i < _mins.Length; i++) + // float: MinX + // float: MaxX + // float: Value + ctx.Writer.Write(sizeof(float)); + + _host.Assert(Mins.Length == Maxes.Length); + _host.Assert(Mins.Length == Values.Length); + ctx.Writer.Write(Mins.Length); + float valuePrev = 0; + float maxPrev = float.NegativeInfinity; + for (int i = 0; i < Mins.Length; i++) { - _host.Assert(_mins[i] <= _maxes[i]); - _host.Assert(_mins[i] > maxPrev); - _host.Assert(_values[i] > valuePrev || _values[i] == valuePrev && i == 0); - valuePrev = _values[i]; - maxPrev = _maxes[i]; - ctx.Writer.Write(_mins[i]); - ctx.Writer.Write(_maxes[i]); - ctx.Writer.Write(_values[i]); + _host.Assert(Mins[i] <= Maxes[i]); + _host.Assert(Mins[i] > maxPrev); + _host.Assert(Values[i] > valuePrev || Values[i] == valuePrev && i == 0); + valuePrev = Values[i]; + maxPrev = Maxes[i]; + ctx.Writer.Write(Mins[i]); + ctx.Writer.Write(Maxes[i]); + ctx.Writer.Write(Values[i]); } _host.CheckDecode(valuePrev <= 1); } - public Float PredictProbability(Float output) + public float PredictProbability(float output) { - if (Float.IsNaN(output)) + if (float.IsNaN(output)) return output; - Float prob = FindValue(output); + float prob = FindValue(output); if (prob < MinToReturn) return MinToReturn; if (prob > MaxToReturn) @@ -1713,37 +1760,37 @@ public Float PredictProbability(Float output) return prob; } - private Float FindValue(Float score) + private float FindValue(float score) { - int p = _mins.Length; + int p = Mins.Length; if (p == 0) return 0; - if (score < _mins[0]) + if (score < Mins[0]) { - return _values[0]; + return Values[0]; // tail off to zero exponentially // return Math.Exp(-(piecewise[0].MinX-score)) * piecewise[0].Value; } - if (score > _maxes[p - 1]) + if (score > Maxes[p - 1]) { - return _values[p - 1]; + return Values[p - 1]; // tail off to one exponentially // return (1-Math.Exp(-(score - piecewise[P - 1].MaxX))) * (1 - piecewise[P - 1].Value) + piecewise[P - 1].Value; } - int pos = _maxes.FindIndexSorted(score); + int pos = Maxes.FindIndexSorted(score); _host.Assert(pos < p); // inside the piece, the value is constant - if (score >= _mins[pos]) - return _values[pos]; + if (score >= Mins[pos]) + return Values[pos]; // between pieces, interpolate - Float t = (score - _maxes[pos - 1]) / (_mins[pos] - _maxes[pos - 1]); - return _values[pos - 1] + t * (_values[pos] - _values[pos - 1]); + float t = (score - Maxes[pos - 1]) / (Mins[pos] - Maxes[pos - 1]); + return Values[pos - 1] + t * (Values[pos] - Values[pos - 1]); } public string GetSummary() { - return string.Format("PAV calibrator with {0} intervals", _mins.Length); + return string.Format("PAV calibrator with {0} intervals", Mins.Length); } } @@ -1754,11 +1801,11 @@ public readonly struct DataItem // The actual binary label of this example. public readonly bool Target; // The weight associated with this example. - public readonly Float Weight; + public readonly float Weight; // The output of the example. - public readonly Float Score; + public readonly float Score; - public DataItem(bool target, Float weight, Float score) + public DataItem(bool target, float weight, float score) { Target = target; Weight = weight; @@ -1813,10 +1860,10 @@ IEnumerator IEnumerable.GetEnumerator() return GetEnumerator(); } - public void AddToStore(Float score, bool isPositive, Float weight) + public void AddToStore(float score, bool isPositive, float weight) { // Can't calibrate NaN scores. - if (weight == 0 || Float.IsNaN(score)) + if (weight == 0 || float.IsNaN(score)) return; int index = _itemsSeen++; if (_itemsSeen <= _capacity) @@ -1831,7 +1878,7 @@ public void AddToStore(Float score, bool isPositive, Float weight) } } - public static class Calibrate + internal static class Calibrate { [TlcModule.EntryPointKind(typeof(CommonInputs.ICalibratorInput))] public abstract class CalibrateInputBase : TransformInputBase @@ -1911,7 +1958,7 @@ public static CommonOutputs.CalibratorOutput FixedPlatt(IHostEnvironment env, Fi /// of examples to use for training the calibrator. /// The kind of calibrator to use. /// A object, containing an . - public static TOut CalibratePredictor(IHost host, CalibrateInputBase input, + internal static TOut CalibratePredictor(IHost host, CalibrateInputBase input, ICalibratorTrainer calibratorTrainer) where TOut : CommonOutputs.TrainerOutput, new() { @@ -1933,10 +1980,10 @@ public static TOut CalibratePredictor(IHost host, CalibrateInputBase input else { calibratedPredictor = - CalibratorUtils.TrainCalibrator(host, ch, calibratorTrainer, input.MaxRows, predictor, data); + CalibratorUtils.GetCalibratedPredictor(host, ch, calibratorTrainer, predictor, data, input.MaxRows); } return new TOut() { PredictorModel = new PredictorModelImpl(host, data, input.Data, calibratedPredictor) }; } } } -} +} \ No newline at end of file diff --git a/src/Microsoft.ML.Data/Prediction/CalibratorCatalog.cs b/src/Microsoft.ML.Data/Prediction/CalibratorCatalog.cs new file mode 100644 index 0000000000..08460a27c5 --- /dev/null +++ b/src/Microsoft.ML.Data/Prediction/CalibratorCatalog.cs @@ -0,0 +1,462 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.ML.Calibrator; +using Microsoft.ML.Core.Data; +using Microsoft.ML.Data; +using Microsoft.ML.Runtime; +using Microsoft.ML.Runtime.Data; +using Microsoft.ML.Runtime.Internal.Calibration; +using Microsoft.ML.Runtime.Model; +using Microsoft.ML.Runtime.Training; +using System; +using System.Collections.Generic; +using System.Linq; + +[assembly: LoadableClass(typeof(CalibratorTransformer), typeof(PlattCalibratorTransformer), null, + typeof(SignatureLoadModel), "", PlattCalibratorTransformer.LoadName)] + +[assembly: LoadableClass(typeof(CalibratorTransformer), typeof(NaiveCalibratorTransformer), null, + typeof(SignatureLoadModel), "", NaiveCalibratorTransformer.LoadName)] + +[assembly: LoadableClass(typeof(CalibratorTransformer), typeof(PavCalibratorTransformer), null, + typeof(SignatureLoadModel), "", PavCalibratorTransformer.LoadName)] + +namespace Microsoft.ML.Calibrator +{ + + /// + /// An interface for probability calibrators. + /// + public interface ICalibrator + { + /// Given a classifier output, produce the probability. + float PredictProbability(float output); + } + + /// + /// Base class for CalibratorEstimators. + /// + /// + /// CalibratorEstimators take an (the output of a ) + /// that contains a "Score" column, and converts the scores to probabilities(through binning, interpolation etc.), based on the type. + /// They are used in pipelines where the binary classifier produces non-calibrated scores. + /// + /// + /// + /// + /// + public abstract class CalibratorEstimatorBase : IEstimator> + where TCalibratorTrainer : ICalibratorTrainer + where TICalibrator : class, ICalibrator + { + protected readonly IHostEnvironment Host; + protected readonly TCalibratorTrainer CalibratorTrainer; + + protected readonly IPredictor Predictor; + protected readonly SchemaShape.Column ScoreColumn; + protected readonly SchemaShape.Column FeatureColumn; + protected readonly SchemaShape.Column LabelColumn; + protected readonly SchemaShape.Column WeightColumn; + protected readonly SchemaShape.Column PredictedLabel; + + protected CalibratorEstimatorBase(IHostEnvironment env, + TCalibratorTrainer calibratorTrainer, + IPredictor predictor = null, + string labelColumn = DefaultColumnNames.Label, + string featureColumn = DefaultColumnNames.Features, + string weightColumn = null) + { + Host = env; + Predictor = predictor; + CalibratorTrainer = calibratorTrainer; + + ScoreColumn = TrainerUtils.MakeR4ScalarColumn(DefaultColumnNames.Score); // Do we fantom this being named anything else (renaming column)? Complete metadata? + LabelColumn = TrainerUtils.MakeBoolScalarLabel(labelColumn); + FeatureColumn = TrainerUtils.MakeR4VecFeature(featureColumn); + PredictedLabel = new SchemaShape.Column(DefaultColumnNames.PredictedLabel, + SchemaShape.Column.VectorKind.Scalar, + BoolType.Instance, + false, + new SchemaShape(MetadataUtils.GetTrainerOutputMetadata())); + + if (weightColumn != null) + WeightColumn = TrainerUtils.MakeR4ScalarWeightColumn(weightColumn); + } + + /// + /// Gets the output of the after fitting the calibrator. + /// Fitting the calibrator will add a column named "Probability" to the schema. If you already had such a column, a new one will be added. + /// + /// The input . + SchemaShape IEstimator>.GetOutputSchema(SchemaShape inputSchema) + { + Action checkColumnValid = (SchemaShape.Column column, string expected) => + { + if (column.IsValid) + { + if (!inputSchema.TryFindColumn(column.Name, out var outCol)) + throw Host.Except($"{expected} column '{column.Name}' is not found"); + if (!column.IsCompatibleWith(outCol)) + throw Host.Except($"{expected} column '{column.Name}' is not compatible"); + } + }; + + // check the input schema + checkColumnValid(ScoreColumn, DefaultColumnNames.Score); + checkColumnValid(WeightColumn, DefaultColumnNames.Weight); + checkColumnValid(LabelColumn, DefaultColumnNames.Label); + checkColumnValid(FeatureColumn, DefaultColumnNames.Features); + checkColumnValid(PredictedLabel, DefaultColumnNames.PredictedLabel); + + //create the new Probability column + var outColumns = inputSchema.ToDictionary(x => x.Name); + outColumns[DefaultColumnNames.Probability] = new SchemaShape.Column(DefaultColumnNames.Probability, + SchemaShape.Column.VectorKind.Scalar, + NumberType.R4, + false, + new SchemaShape(MetadataUtils.GetTrainerOutputMetadata(true))); + + return new SchemaShape(outColumns.Values); + } + + public CalibratorTransformer Fit(IDataView input) + { + TICalibrator calibrator = null; + + var roles = new List>(); + roles.Add(RoleMappedSchema.CreatePair(MetadataUtils.Const.ScoreValueKind.Score, DefaultColumnNames.Score)); + roles.Add(RoleMappedSchema.ColumnRole.Label.Bind(LabelColumn.Name)); + roles.Add(RoleMappedSchema.ColumnRole.Feature.Bind(FeatureColumn.Name)); + if (WeightColumn.IsValid) + roles.Add(RoleMappedSchema.ColumnRole.Weight.Bind(WeightColumn.Name)); + + var roleMappedData = new RoleMappedData(input, opt: false, roles.ToArray()); + + using (var ch = Host.Start("Creating calibrator.")) + calibrator = (TICalibrator)CalibratorUtils.TrainCalibrator(Host, ch, CalibratorTrainer, Predictor, roleMappedData); + + return Create(Host, calibrator); + } + + /// + /// Implemented by deriving classes that create a concrete calibrator. + /// + protected abstract CalibratorTransformer Create(IHostEnvironment env, TICalibrator calibrator); + } + + /// + /// CalibratorTransfomers, the artifact of calling Fit on a . + /// If you pass a scored data, to the Transform method, it will add the Probability column + /// to the dataset. The Probability column is the value of the Score normalized to be a valid probability. + /// The CalibratorTransformer is an instance of where score can be viewed as a feature + /// while probability is treated as the label. + /// + /// The used to transform the data. + public abstract class CalibratorTransformer : RowToRowTransformerBase, ISingleFeaturePredictionTransformer + where TICalibrator : class, ICalibrator + { + private TICalibrator _calibrator; + private readonly string _loaderSignature; + + internal CalibratorTransformer(IHostEnvironment env, TICalibrator calibrator, string loaderSignature) + : base(Contracts.CheckRef(env, nameof(env)).Register(nameof(CalibratorTransformer))) + { + Host.CheckRef(calibrator, nameof(calibrator)); + + _loaderSignature = loaderSignature; + _calibrator = calibrator; + } + + // Factory method for SignatureLoadModel. + internal CalibratorTransformer(IHostEnvironment env, ModelLoadContext ctx, string loaderSignature) + : base(Contracts.CheckRef(env, nameof(env)).Register(nameof(CalibratorTransformer))) + { + Contracts.AssertValue(ctx); + + _loaderSignature = loaderSignature; + ctx.CheckAtModel(GetVersionInfo()); + + // *** Binary format *** + // model: _calibrator + ctx.LoadModel(env, out _calibrator, @"Calibrator"); + } + + string ISingleFeaturePredictionTransformer.FeatureColumn => DefaultColumnNames.Score; + + ColumnType ISingleFeaturePredictionTransformer.FeatureColumnType => NumberType.Float; + + TICalibrator IPredictionTransformer.Model => _calibrator; + + bool ITransformer.IsRowToRowMapper => true; + + public override void Save(ModelSaveContext ctx) + { + Contracts.AssertValue(ctx); + ctx.CheckAtModel(); + ctx.SetVersionInfo(GetVersionInfo()); + + // *** Binary format *** + // model: _calibrator + ctx.SaveModel(_calibrator, @"Calibrator"); + } + + private protected override IRowMapper MakeRowMapper(Schema schema) => new Mapper(this, _calibrator, schema); + + protected VersionInfo GetVersionInfo() + { + return new VersionInfo( + modelSignature: "CALTRANS", + verWrittenCur: 0x00010001, // Initial + verReadableCur: 0x00010001, + verWeCanReadBack: 0x00010001, + loaderSignature: _loaderSignature, + loaderAssemblyName: typeof(CalibratorTransformer<>).Assembly.FullName); + } + + private sealed class Mapper : MapperBase + where TCalibrator : class, ICalibrator + { + private TCalibrator _calibrator; + private int _scoreColIndex; + private CalibratorTransformer _parent; + + internal Mapper(CalibratorTransformer parent, TCalibrator calibrator, Schema inputSchema) : + base(parent.Host, inputSchema) + { + _calibrator = calibrator; + _parent = parent; + + _scoreColIndex = inputSchema.GetColumnOrNull(DefaultColumnNames.Score)?.Index ?? -1; + + parent.Host.Check(_scoreColIndex > 0, "The data to calibrate contains no 'Score' column"); + } + + private protected override Func GetDependenciesCore(Func activeOutput) + => col => col == _scoreColIndex; + + public override void Save(ModelSaveContext ctx) => _parent.Save(ctx); + + protected override Schema.DetachedColumn[] GetOutputColumnsCore() + { + return new[] + { + new Schema.DetachedColumn(DefaultColumnNames.Probability, NumberType.Float, null) + }; + } + + protected override Delegate MakeGetter(Row input, int iinfo, Func activeOutput, out Action disposer) + { + Host.AssertValue(input); + disposer = null; + + Host.Assert(input.IsColumnActive(_scoreColIndex)); + var getScore = input.GetGetter(_scoreColIndex); + + float score = default; + + ValueGetter probability = (ref float dst) => + { + getScore(ref score); + dst = _calibrator.PredictProbability(score); + }; + + return probability; + } + } + } + + /// + /// The PlattCalibratorEstimator. + /// + /// + /// For the usage pattern see the example in . + /// + public sealed class PlattCalibratorEstimator : CalibratorEstimatorBase + { + /// + /// Initializes a new instance of + /// + /// The environment to use. + /// The predictor used to train the data. + /// The label column name. + /// The feature column name. + /// The weight column name. + public PlattCalibratorEstimator(IHostEnvironment env, + IPredictor predictor, + string labelColumn = DefaultColumnNames.Label, + string featureColumn = DefaultColumnNames.Features, + string weightColumn = null) : base(env, new PlattCalibratorTrainer(env), predictor, labelColumn, featureColumn, weightColumn) + { + + } + + protected override CalibratorTransformer Create(IHostEnvironment env, PlattCalibrator calibrator) + => new PlattCalibratorTransformer(env, calibrator); + } + + /// + /// Obtains the probability values by fitting the sigmoid: f(x) = 1 / (1 + exp(-slope * x + offset). + /// + /// + /// For the usage pattern see the example in . + /// + public sealed class FixedPlattCalibratorEstimator : CalibratorEstimatorBase + { + /// + /// Initializes a new instance of + /// + /// The environment to use. + /// The predictor used to train the data. + /// The slope in the function of the exponent of the sigmoid. + /// The offset in the function of the exponent of the sigmoid. + /// The label column name. + /// The feature column name. + /// The weight column name. + public FixedPlattCalibratorEstimator(IHostEnvironment env, + IPredictor predictor, + double slope = 1, + double offset = 0, + string labelColumn = DefaultColumnNames.Label, + string featureColumn = DefaultColumnNames.Features, + string weightColumn = null) : base(env, new FixedPlattCalibratorTrainer(env, new FixedPlattCalibratorTrainer.Arguments() + { + Slope = slope, + Offset = offset + }), predictor, labelColumn, featureColumn, weightColumn) + { + + } + + protected override CalibratorTransformer Create(IHostEnvironment env, PlattCalibrator calibrator) + => new PlattCalibratorTransformer(env, calibrator); + } + + /// + /// The implementation obtained by training a or a . + /// + public sealed class PlattCalibratorTransformer : CalibratorTransformer + { + internal const string LoadName = "PlattCalibratTransf"; + + internal PlattCalibratorTransformer(IHostEnvironment env, PlattCalibrator calibrator) + : base(env, calibrator, LoadName) + { + + } + + // Factory method for SignatureLoadModel. + internal PlattCalibratorTransformer(IHostEnvironment env, ModelLoadContext ctx) + :base(env, ctx, LoadName) + { + + } + } + + /// + /// The naive binning-based calibratorEstimator. + /// + /// + /// It divides the range of the outputs into equally sized bins. In each bin, + /// the probability of belonging to class 1, is the number of class 1 instances in the bin, divided by the total number + /// of instances in the bin. + /// For the usage pattern see the example in . + /// + public sealed class NaiveCalibratorEstimator : CalibratorEstimatorBase + { + /// + /// Initializes a new instance of + /// + /// The environment to use. + /// The predictor used to train the data. + /// The label column name. + /// The feature column name. + /// The weight column name. + public NaiveCalibratorEstimator(IHostEnvironment env, + IPredictor predictor, + string labelColumn = DefaultColumnNames.Label, + string featureColumn = DefaultColumnNames.Features, + string weightColumn = null) : base(env, new NaiveCalibratorTrainer(env), predictor, labelColumn, featureColumn, weightColumn) + { + + } + + protected override CalibratorTransformer Create(IHostEnvironment env, NaiveCalibrator calibrator) + => new NaiveCalibratorTransformer(env, calibrator); + } + + /// + /// The implementation obtained by training a + /// + public sealed class NaiveCalibratorTransformer : CalibratorTransformer + { + internal const string LoadName = "NaiveCalibratTransf"; + + internal NaiveCalibratorTransformer(IHostEnvironment env, NaiveCalibrator calibrator) + : base(env, calibrator, LoadName) + { + + } + + // Factory method for SignatureLoadModel. + internal NaiveCalibratorTransformer(IHostEnvironment env, ModelLoadContext ctx) + : base(env, ctx, LoadName) + { + + } + } + + /// + /// The PavCalibratorEstimator. + /// + /// + /// For the usage pattern see the example in . + /// + public sealed class PavCalibratorEstimator : CalibratorEstimatorBase + { + /// + /// Initializes a new instance of + /// + /// The environment to use. + /// The predictor used to train the data. + /// The label column name. + /// The feature column name. + /// The weight column name. + public PavCalibratorEstimator(IHostEnvironment env, + IPredictor predictor, + string labelColumn = DefaultColumnNames.Label, + string featureColumn = DefaultColumnNames.Features, + string weightColumn = null) : base(env, new PavCalibratorTrainer(env), predictor, labelColumn, featureColumn, weightColumn) + { + + } + + protected override CalibratorTransformer Create(IHostEnvironment env, PavCalibrator calibrator) + => new PavCalibratorTransformer(env, calibrator); + + } + + /// + /// The implementation obtained by training a + /// + public sealed class PavCalibratorTransformer : CalibratorTransformer + { + internal const string LoadName = "PavCalibratTransf"; + + internal PavCalibratorTransformer(IHostEnvironment env, PavCalibrator calibrator) + : base(env, calibrator, LoadName) + { + + } + + // Factory method for SignatureLoadModel. + private PavCalibratorTransformer(IHostEnvironment env, ModelLoadContext ctx) + : base(env, ctx, LoadName) + { + + } + } +} diff --git a/src/Microsoft.ML.Data/Prediction/IPredictionTransformer.cs b/src/Microsoft.ML.Data/Prediction/IPredictionTransformer.cs index 06c1894f0a..d3f1e9ddf2 100644 --- a/src/Microsoft.ML.Data/Prediction/IPredictionTransformer.cs +++ b/src/Microsoft.ML.Data/Prediction/IPredictionTransformer.cs @@ -2,7 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using System.Collections.Generic; +using Microsoft.ML.Calibrator; using Microsoft.ML.Core.Data; using Microsoft.ML.Runtime.Data; @@ -13,9 +13,8 @@ namespace Microsoft.ML.Runtime /// The implemendations of this interface either have no feature column, or have more than one feature column, and cannot implement the /// , which most of the ML.Net tranformer implement. /// - /// The used for the data transformation. + /// The or used for the data transformation. public interface IPredictionTransformer : ITransformer - where TModel : IPredictor { TModel Model { get; } } @@ -25,9 +24,8 @@ public interface IPredictionTransformer : ITransformer /// and its type, . Implementations of this interface, have the ability /// to score the data of an input through the /// - /// The used for the data transformation. + /// The or used for the data transformation. public interface ISingleFeaturePredictionTransformer : IPredictionTransformer - where TModel : IPredictor { /// The name of the feature column. string FeatureColumn { get; } @@ -35,4 +33,4 @@ public interface ISingleFeaturePredictionTransformer : IPredictionTr /// Holds information about the type of the feature column. ColumnType FeatureColumnType { get; } } -} +} \ No newline at end of file diff --git a/src/Microsoft.ML.Data/Scorers/PredictionTransformer.cs b/src/Microsoft.ML.Data/Scorers/PredictionTransformer.cs index c6f529429f..2441e0b0d2 100644 --- a/src/Microsoft.ML.Data/Scorers/PredictionTransformer.cs +++ b/src/Microsoft.ML.Data/Scorers/PredictionTransformer.cs @@ -245,7 +245,7 @@ public sealed class AnomalyPredictionTransformer : SingleFeaturePredicti public AnomalyPredictionTransformer(IHostEnvironment env, TModel model, Schema inputSchema, string featureColumn, float threshold = 0f, string thresholdColumn = DefaultColumnNames.Score) - : base(Contracts.CheckRef(env, nameof(env)).Register(nameof(BinaryPredictionTransformer)), model, inputSchema, featureColumn) + : base(Contracts.CheckRef(env, nameof(env)).Register(nameof(AnomalyPredictionTransformer)),model, inputSchema, featureColumn) { Host.CheckNonEmpty(thresholdColumn, nameof(thresholdColumn)); Threshold = threshold; diff --git a/src/Microsoft.ML.Data/Training/ITrainerEstimator.cs b/src/Microsoft.ML.Data/Training/ITrainerEstimator.cs index 2c9942e8d0..1194abd9a1 100644 --- a/src/Microsoft.ML.Data/Training/ITrainerEstimator.cs +++ b/src/Microsoft.ML.Data/Training/ITrainerEstimator.cs @@ -6,9 +6,9 @@ namespace Microsoft.ML.Runtime.Training { - public interface ITrainerEstimator: IEstimator - where TTransformer: ISingleFeaturePredictionTransformer - where TPredictor: IPredictor + public interface ITrainerEstimator : IEstimator + where TTransformer : ISingleFeaturePredictionTransformer + where TPredictor : IPredictor { TrainerInfo Info { get; } diff --git a/src/Microsoft.ML.Data/Training/TrainerUtils.cs b/src/Microsoft.ML.Data/Training/TrainerUtils.cs index 0375695bcc..571b9119d3 100644 --- a/src/Microsoft.ML.Data/Training/TrainerUtils.cs +++ b/src/Microsoft.ML.Data/Training/TrainerUtils.cs @@ -354,11 +354,11 @@ public static SchemaShape.Column MakeBoolScalarLabel(string labelColumn) => new SchemaShape.Column(labelColumn, SchemaShape.Column.VectorKind.Scalar, BoolType.Instance, false); /// - /// The for the label column for regression tasks. + /// The for the float type columns. /// - /// name of the weight column - public static SchemaShape.Column MakeR4ScalarLabel(string labelColumn) - => new SchemaShape.Column(labelColumn, SchemaShape.Column.VectorKind.Scalar, NumberType.R4, false); + /// name of the column + public static SchemaShape.Column MakeR4ScalarColumn(string columnName) + => new SchemaShape.Column(columnName, SchemaShape.Column.VectorKind.Scalar, NumberType.R4, false); /// /// The for the label column for regression tasks. diff --git a/src/Microsoft.ML.FastTree/FastTree.cs b/src/Microsoft.ML.FastTree/FastTree.cs index 7765535aed..c61b809813 100644 --- a/src/Microsoft.ML.FastTree/FastTree.cs +++ b/src/Microsoft.ML.FastTree/FastTree.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Calibrator; using Microsoft.ML.Core.Data; using Microsoft.ML.Data; using Microsoft.ML.Runtime; diff --git a/src/Microsoft.ML.FastTree/FastTreeClassification.cs b/src/Microsoft.ML.FastTree/FastTreeClassification.cs index 5fd1feaa94..4ac4641da3 100644 --- a/src/Microsoft.ML.FastTree/FastTreeClassification.cs +++ b/src/Microsoft.ML.FastTree/FastTreeClassification.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Calibrator; using Microsoft.ML.Core.Data; using Microsoft.ML.Data; using Microsoft.ML.Runtime; diff --git a/src/Microsoft.ML.FastTree/FastTreeRanking.cs b/src/Microsoft.ML.FastTree/FastTreeRanking.cs index c17d86f9e8..dc6073f324 100644 --- a/src/Microsoft.ML.FastTree/FastTreeRanking.cs +++ b/src/Microsoft.ML.FastTree/FastTreeRanking.cs @@ -83,7 +83,7 @@ public FastTreeRankingTrainer(IHostEnvironment env, int minDatapointsInLeaves = Defaults.MinDocumentsInLeaves, double learningRate = Defaults.LearningRates, Action advancedSettings = null) - : base(env, TrainerUtils.MakeR4ScalarLabel(labelColumn), featureColumn, weightColumn, groupIdColumn, numLeaves, numTrees, minDatapointsInLeaves, learningRate, advancedSettings) + : base(env, TrainerUtils.MakeR4ScalarColumn(labelColumn), featureColumn, weightColumn, groupIdColumn, numLeaves, numTrees, minDatapointsInLeaves, learningRate, advancedSettings) { Host.CheckNonEmpty(groupIdColumn, nameof(groupIdColumn)); } @@ -92,7 +92,7 @@ public FastTreeRankingTrainer(IHostEnvironment env, /// Initializes a new instance of by using the legacy class. /// internal FastTreeRankingTrainer(IHostEnvironment env, Arguments args) - : base(env, args, TrainerUtils.MakeR4ScalarLabel(args.LabelColumn)) + : base(env, args, TrainerUtils.MakeR4ScalarColumn(args.LabelColumn)) { } diff --git a/src/Microsoft.ML.FastTree/FastTreeRegression.cs b/src/Microsoft.ML.FastTree/FastTreeRegression.cs index 5a19f91e54..6130ac669b 100644 --- a/src/Microsoft.ML.FastTree/FastTreeRegression.cs +++ b/src/Microsoft.ML.FastTree/FastTreeRegression.cs @@ -73,7 +73,7 @@ public FastTreeRegressionTrainer(IHostEnvironment env, int minDatapointsInLeaves = Defaults.MinDocumentsInLeaves, double learningRate = Defaults.LearningRates, Action advancedSettings = null) - : base(env, TrainerUtils.MakeR4ScalarLabel(labelColumn), featureColumn, weightColumn, null, numLeaves, numTrees, minDatapointsInLeaves, learningRate, advancedSettings) + : base(env, TrainerUtils.MakeR4ScalarColumn(labelColumn), featureColumn, weightColumn, null, numLeaves, numTrees, minDatapointsInLeaves, learningRate, advancedSettings) { } @@ -81,7 +81,7 @@ public FastTreeRegressionTrainer(IHostEnvironment env, /// Initializes a new instance of by using the legacy class. /// internal FastTreeRegressionTrainer(IHostEnvironment env, Arguments args) - : base(env, args, TrainerUtils.MakeR4ScalarLabel(args.LabelColumn)) + : base(env, args, TrainerUtils.MakeR4ScalarColumn(args.LabelColumn)) { } diff --git a/src/Microsoft.ML.FastTree/FastTreeTweedie.cs b/src/Microsoft.ML.FastTree/FastTreeTweedie.cs index aa0b4e90e0..af3f1d52f7 100644 --- a/src/Microsoft.ML.FastTree/FastTreeTweedie.cs +++ b/src/Microsoft.ML.FastTree/FastTreeTweedie.cs @@ -70,7 +70,7 @@ public FastTreeTweedieTrainer(IHostEnvironment env, int minDatapointsInLeaves = Defaults.MinDocumentsInLeaves, double learningRate = Defaults.LearningRates, Action advancedSettings = null) - : base(env, TrainerUtils.MakeR4ScalarLabel(labelColumn), featureColumn, weightColumn, null, numLeaves, numTrees, minDatapointsInLeaves, learningRate, advancedSettings) + : base(env, TrainerUtils.MakeR4ScalarColumn(labelColumn), featureColumn, weightColumn, null, numLeaves, numTrees, minDatapointsInLeaves, learningRate, advancedSettings) { Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); Host.CheckNonEmpty(featureColumn, nameof(featureColumn)); @@ -82,7 +82,7 @@ public FastTreeTweedieTrainer(IHostEnvironment env, /// Initializes a new instance of by using the legacy class. /// internal FastTreeTweedieTrainer(IHostEnvironment env, Arguments args) - : base(env, args, TrainerUtils.MakeR4ScalarLabel(args.LabelColumn)) + : base(env, args, TrainerUtils.MakeR4ScalarColumn(args.LabelColumn)) { Initialize(); } diff --git a/src/Microsoft.ML.FastTree/GamClassification.cs b/src/Microsoft.ML.FastTree/GamClassification.cs index ee094a384d..a256196dd3 100644 --- a/src/Microsoft.ML.FastTree/GamClassification.cs +++ b/src/Microsoft.ML.FastTree/GamClassification.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Calibrator; using Microsoft.ML.Core.Data; using Microsoft.ML.Data; using Microsoft.ML.Runtime; diff --git a/src/Microsoft.ML.FastTree/GamRegression.cs b/src/Microsoft.ML.FastTree/GamRegression.cs index a0e4ead4be..d8e7677975 100644 --- a/src/Microsoft.ML.FastTree/GamRegression.cs +++ b/src/Microsoft.ML.FastTree/GamRegression.cs @@ -43,7 +43,7 @@ public partial class Arguments : ArgumentsBase public override PredictionKind PredictionKind => PredictionKind.Regression; internal RegressionGamTrainer(IHostEnvironment env, Arguments args) - : base(env, args, LoadNameValue, TrainerUtils.MakeR4ScalarLabel(args.LabelColumn)) { } + : base(env, args, LoadNameValue, TrainerUtils.MakeR4ScalarColumn(args.LabelColumn)) { } /// /// Initializes a new instance of @@ -64,7 +64,7 @@ public RegressionGamTrainer(IHostEnvironment env, double learningRate = GamDefaults.LearningRates, int maxBins = GamDefaults.MaxBins, Action advancedSettings = null) - : base(env, LoadNameValue, TrainerUtils.MakeR4ScalarLabel(labelColumn), featureColumn, weightColumn, numIterations, learningRate, maxBins, advancedSettings) + : base(env, LoadNameValue, TrainerUtils.MakeR4ScalarColumn(labelColumn), featureColumn, weightColumn, numIterations, learningRate, maxBins, advancedSettings) { } diff --git a/src/Microsoft.ML.FastTree/RandomForestClassification.cs b/src/Microsoft.ML.FastTree/RandomForestClassification.cs index 0f2cbe5fdc..29fb13f6ee 100644 --- a/src/Microsoft.ML.FastTree/RandomForestClassification.cs +++ b/src/Microsoft.ML.FastTree/RandomForestClassification.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Calibrator; using Microsoft.ML.Core.Data; using Microsoft.ML.Data; using Microsoft.ML.Runtime; diff --git a/src/Microsoft.ML.FastTree/RandomForestRegression.cs b/src/Microsoft.ML.FastTree/RandomForestRegression.cs index 977a870f9d..8923a06ca6 100644 --- a/src/Microsoft.ML.FastTree/RandomForestRegression.cs +++ b/src/Microsoft.ML.FastTree/RandomForestRegression.cs @@ -174,7 +174,7 @@ public FastForestRegression(IHostEnvironment env, int minDatapointsInLeaves = Defaults.MinDocumentsInLeaves, double learningRate = Defaults.LearningRates, Action advancedSettings = null) - : base(env, TrainerUtils.MakeR4ScalarLabel(labelColumn), featureColumn, weightColumn, null, numLeaves, numTrees, minDatapointsInLeaves, learningRate, advancedSettings) + : base(env, TrainerUtils.MakeR4ScalarColumn(labelColumn), featureColumn, weightColumn, null, numLeaves, numTrees, minDatapointsInLeaves, learningRate, advancedSettings) { Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); Host.CheckNonEmpty(featureColumn, nameof(featureColumn)); @@ -184,7 +184,7 @@ public FastForestRegression(IHostEnvironment env, /// Initializes a new instance of by using the legacy class. /// public FastForestRegression(IHostEnvironment env, Arguments args) - : base(env, args, TrainerUtils.MakeR4ScalarLabel(args.LabelColumn), true) + : base(env, args, TrainerUtils.MakeR4ScalarColumn(args.LabelColumn), true) { } diff --git a/src/Microsoft.ML.FastTree/TreeEnsemble/TreeEnsembleCombiner.cs b/src/Microsoft.ML.FastTree/TreeEnsemble/TreeEnsembleCombiner.cs index e621b87b8a..21a8063218 100644 --- a/src/Microsoft.ML.FastTree/TreeEnsemble/TreeEnsembleCombiner.cs +++ b/src/Microsoft.ML.FastTree/TreeEnsemble/TreeEnsembleCombiner.cs @@ -55,7 +55,7 @@ public IPredictor CombineModels(IEnumerable models) _host.Check(calibrated.Calibrator is PlattCalibrator, "Combining FastTree models can only be done when the models are calibrated with Platt calibrator"); predictor = calibrated.SubPredictor; - paramA = -(calibrated.Calibrator as PlattCalibrator).ParamA; + paramA = -(calibrated.Calibrator as PlattCalibrator).Slope; } var tree = predictor as TreeEnsembleModelParameters; if (tree == null) diff --git a/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs b/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs index 6ff155feea..f8c0a8fdb5 100644 --- a/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs +++ b/src/Microsoft.ML.HalLearners/OlsLinearRegression.cs @@ -88,7 +88,7 @@ public OlsLinearRegressionTrainer(IHostEnvironment env, /// internal OlsLinearRegressionTrainer(IHostEnvironment env, Arguments args) : base(Contracts.CheckRef(env, nameof(env)).Register(LoadNameValue), TrainerUtils.MakeR4VecFeature(args.FeatureColumn), - TrainerUtils.MakeR4ScalarLabel(args.LabelColumn), TrainerUtils.MakeR4ScalarWeightColumn(args.WeightColumn, args.WeightColumn.IsExplicit)) + TrainerUtils.MakeR4ScalarColumn(args.LabelColumn), TrainerUtils.MakeR4ScalarWeightColumn(args.WeightColumn, args.WeightColumn.IsExplicit)) { Host.CheckValue(args, nameof(args)); Host.CheckUserArg(args.L2Weight >= 0, nameof(args.L2Weight), "L2 regularization term cannot be negative"); diff --git a/src/Microsoft.ML.LightGBM/LightGbmBinaryTrainer.cs b/src/Microsoft.ML.LightGBM/LightGbmBinaryTrainer.cs index b6afe58b5b..bb544bcd1f 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmBinaryTrainer.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmBinaryTrainer.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Calibrator; using Microsoft.ML.Core.Data; using Microsoft.ML.Data; using Microsoft.ML.Runtime; diff --git a/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs b/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs index 2d27820592..a92e593c82 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmRankingTrainer.cs @@ -83,7 +83,7 @@ public sealed class LightGbmRankingTrainer : LightGbmTrainerBase PredictionKind.Ranking; internal LightGbmRankingTrainer(IHostEnvironment env, LightGbmArguments args) - : base(env, LoadNameValue, args, TrainerUtils.MakeR4ScalarLabel(args.LabelColumn)) + : base(env, LoadNameValue, args, TrainerUtils.MakeR4ScalarColumn(args.LabelColumn)) { } @@ -113,7 +113,7 @@ public LightGbmRankingTrainer(IHostEnvironment env, double? learningRate = null, int numBoostRound = LightGbmArguments.Defaults.NumBoostRound, Action advancedSettings = null) - : base(env, LoadNameValue, TrainerUtils.MakeR4ScalarLabel(labelColumn), featureColumn, weights, groupId, numLeaves, minDataPerLeaf, learningRate, numBoostRound, advancedSettings) + : base(env, LoadNameValue, TrainerUtils.MakeR4ScalarColumn(labelColumn), featureColumn, weights, groupId, numLeaves, minDataPerLeaf, learningRate, numBoostRound, advancedSettings) { Host.CheckNonEmpty(groupId, nameof(groupId)); } diff --git a/src/Microsoft.ML.LightGBM/LightGbmRegressionTrainer.cs b/src/Microsoft.ML.LightGBM/LightGbmRegressionTrainer.cs index f07baba4cb..26bac8c8aa 100644 --- a/src/Microsoft.ML.LightGBM/LightGbmRegressionTrainer.cs +++ b/src/Microsoft.ML.LightGBM/LightGbmRegressionTrainer.cs @@ -110,12 +110,12 @@ public LightGbmRegressorTrainer(IHostEnvironment env, double? learningRate = null, int numBoostRound = LightGbmArguments.Defaults.NumBoostRound, Action advancedSettings = null) - : base(env, LoadNameValue, TrainerUtils.MakeR4ScalarLabel(labelColumn), featureColumn, weights, null, numLeaves, minDataPerLeaf, learningRate, numBoostRound, advancedSettings) + : base(env, LoadNameValue, TrainerUtils.MakeR4ScalarColumn(labelColumn), featureColumn, weights, null, numLeaves, minDataPerLeaf, learningRate, numBoostRound, advancedSettings) { } internal LightGbmRegressorTrainer(IHostEnvironment env, LightGbmArguments args) - : base(env, LoadNameValue, args, TrainerUtils.MakeR4ScalarLabel(args.LabelColumn)) + : base(env, LoadNameValue, args, TrainerUtils.MakeR4ScalarColumn(args.LabelColumn)) { } diff --git a/src/Microsoft.ML.SamplesUtils/SamplesDatasetUtils.cs b/src/Microsoft.ML.SamplesUtils/SamplesDatasetUtils.cs index 714a3532eb..76357ff264 100644 --- a/src/Microsoft.ML.SamplesUtils/SamplesDatasetUtils.cs +++ b/src/Microsoft.ML.SamplesUtils/SamplesDatasetUtils.cs @@ -21,7 +21,7 @@ public static string DownloadHousingRegressionDataset() /// Downloads the wikipedia detox dataset from the ML.NET repo. /// public static string DownloadSentimentDataset() - => Download("https://github.com/dotnet/machinelearning/blob/76cb2cdf5cc8b6c88ca44b8969153836e589df04/test/data/wikipedia-detox-250-line-data.tsv", "sentiment.tsv"); + => Download("https://raw.githubusercontent.com/dotnet/machinelearning/76cb2cdf5cc8b6c88ca44b8969153836e589df04/test/data/wikipedia-detox-250-line-data.tsv", "sentiment.tsv"); /// /// Downloads the adult dataset from the ML.NET repo. diff --git a/src/Microsoft.ML.StandardLearners/Standard/LinearModelParameters.cs b/src/Microsoft.ML.StandardLearners/Standard/LinearModelParameters.cs index 7e380a2158..4ed8c1a260 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LinearModelParameters.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LinearModelParameters.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Calibrator; using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; diff --git a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Ova.cs b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Ova.cs index 0c06870dd6..dbe8e44347 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Ova.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Ova.cs @@ -134,7 +134,7 @@ private ISingleFeaturePredictionTransformer TrainOne(IChannel var trainedData = new RoleMappedData(view, label: trainerLabel, feature: transformer.FeatureColumn); if (calibratedModel == null) - calibratedModel = CalibratorUtils.TrainCalibrator(Host, ch, Calibrator, Args.MaxCalibrationExamples, transformer.Model, trainedData) as TDistPredictor; + calibratedModel = CalibratorUtils.GetCalibratedPredictor(Host, ch, Calibrator, transformer.Model, trainedData, Args.MaxCalibrationExamples) as TDistPredictor; Host.Check(calibratedModel != null, "Calibrated predictor does not implement the expected interface"); return new BinaryPredictionTransformer(Host, calibratedModel, trainedData.Data.Schema, transformer.FeatureColumn); diff --git a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Pkpd.cs b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Pkpd.cs index dd8129284d..46e54b0c03 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Pkpd.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/Pkpd.cs @@ -137,7 +137,7 @@ private ISingleFeaturePredictionTransformer TrainOne(IChannel ch var calibratedModel = transformer.Model as TDistPredictor; if (calibratedModel == null) - calibratedModel = CalibratorUtils.TrainCalibrator(Host, ch, Calibrator, Args.MaxCalibrationExamples, transformer.Model, trainedData) as TDistPredictor; + calibratedModel = CalibratorUtils.GetCalibratedPredictor(Host, ch, Calibrator, transformer.Model, trainedData, Args.MaxCalibrationExamples) as TDistPredictor; return new BinaryPredictionTransformer(Host, calibratedModel, trainedData.Data.Schema, transformer.FeatureColumn); } diff --git a/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineGradientDescent.cs b/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineGradientDescent.cs index c2d6baec9f..5aa28ad584 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineGradientDescent.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineGradientDescent.cs @@ -138,7 +138,7 @@ public TrivialFactory(IRegressionLoss loss) } internal OnlineGradientDescentTrainer(IHostEnvironment env, Arguments args) - : base(args, env, UserNameValue, TrainerUtils.MakeR4ScalarLabel(args.LabelColumn)) + : base(args, env, UserNameValue, TrainerUtils.MakeR4ScalarColumn(args.LabelColumn)) { LossFunction = args.LossFunction.CreateComponent(env); } diff --git a/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs index 3c56292c47..eadb146e3a 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs @@ -64,7 +64,7 @@ public PoissonRegression(IHostEnvironment env, int memorySize = Arguments.Defaults.MemorySize, bool enforceNoNegativity = Arguments.Defaults.EnforceNonNegativity, Action advancedSettings = null) - : base(env, featureColumn, TrainerUtils.MakeR4ScalarLabel(labelColumn), weights, advancedSettings, + : base(env, featureColumn, TrainerUtils.MakeR4ScalarColumn(labelColumn), weights, advancedSettings, l1Weight, l2Weight, optimizationTolerance, memorySize, enforceNoNegativity) { Host.CheckNonEmpty(featureColumn, nameof(featureColumn)); @@ -75,7 +75,7 @@ public PoissonRegression(IHostEnvironment env, /// Initializes a new instance of /// internal PoissonRegression(IHostEnvironment env, Arguments args) - : base(env, args, TrainerUtils.MakeR4ScalarLabel(args.LabelColumn)) + : base(env, args, TrainerUtils.MakeR4ScalarColumn(args.LabelColumn)) { } diff --git a/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs b/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs index 4d10d0f702..1c6645a78f 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs @@ -19,6 +19,7 @@ using Microsoft.ML.Trainers; using Microsoft.ML.Transforms; using System; +using System.Collections.Generic; using System.Linq; using System.Threading; using System.Threading.Tasks; @@ -1465,23 +1466,35 @@ public SdcaBinaryTrainer(IHostEnvironment env, Info = new TrainerInfo(calibration: !(_loss is LogLoss)); _positiveInstanceWeight = Args.PositiveInstanceWeight; - if (Info.NeedCalibration) + var outCols = new List() { - _outputColumns = new[] - { - new SchemaShape.Column(DefaultColumnNames.Score, SchemaShape.Column.VectorKind.Scalar, NumberType.R4, false, new SchemaShape(MetadataUtils.GetTrainerOutputMetadata())), - new SchemaShape.Column(DefaultColumnNames.PredictedLabel, SchemaShape.Column.VectorKind.Scalar, BoolType.Instance, false, new SchemaShape(MetadataUtils.GetTrainerOutputMetadata())) - }; - } - else + new SchemaShape.Column( + DefaultColumnNames.Score, + SchemaShape.Column.VectorKind.Scalar, + NumberType.R4, + false, + new SchemaShape(MetadataUtils.GetTrainerOutputMetadata()) + ), + new SchemaShape.Column( + DefaultColumnNames.PredictedLabel, + SchemaShape.Column.VectorKind.Scalar, + BoolType.Instance, + false, + new SchemaShape(MetadataUtils.GetTrainerOutputMetadata())) + + }; + + if (!Info.NeedCalibration) { - _outputColumns = new[] - { - new SchemaShape.Column(DefaultColumnNames.Score, SchemaShape.Column.VectorKind.Scalar, NumberType.R4, false, new SchemaShape(MetadataUtils.GetTrainerOutputMetadata())), - new SchemaShape.Column(DefaultColumnNames.Probability, SchemaShape.Column.VectorKind.Scalar, NumberType.R4, false, new SchemaShape(MetadataUtils.GetTrainerOutputMetadata(true))), - new SchemaShape.Column(DefaultColumnNames.PredictedLabel, SchemaShape.Column.VectorKind.Scalar, BoolType.Instance, false, new SchemaShape(MetadataUtils.GetTrainerOutputMetadata())) - }; - } + outCols.Insert(1, new SchemaShape.Column( + DefaultColumnNames.Probability, + SchemaShape.Column.VectorKind.Scalar, + NumberType.R4, + false, + new SchemaShape(MetadataUtils.GetTrainerOutputMetadata(true)))); + }; + + _outputColumns = outCols.ToArray(); } internal SdcaBinaryTrainer(IHostEnvironment env, Arguments args, @@ -1493,23 +1506,35 @@ internal SdcaBinaryTrainer(IHostEnvironment env, Arguments args, Info = new TrainerInfo(calibration: !(_loss is LogLoss)); _positiveInstanceWeight = Args.PositiveInstanceWeight; - if (Info.NeedCalibration) + var outCols = new List() { - _outputColumns = new[] - { - new SchemaShape.Column(DefaultColumnNames.Score, SchemaShape.Column.VectorKind.Scalar, NumberType.R4, false, new SchemaShape(MetadataUtils.GetTrainerOutputMetadata())), - new SchemaShape.Column(DefaultColumnNames.PredictedLabel, SchemaShape.Column.VectorKind.Scalar, BoolType.Instance, false, new SchemaShape(MetadataUtils.GetTrainerOutputMetadata())) - }; - } - else + new SchemaShape.Column( + DefaultColumnNames.Score, + SchemaShape.Column.VectorKind.Scalar, + NumberType.R4, + false, + new SchemaShape(MetadataUtils.GetTrainerOutputMetadata()) + ), + new SchemaShape.Column( + DefaultColumnNames.PredictedLabel, + SchemaShape.Column.VectorKind.Scalar, + BoolType.Instance, + false, + new SchemaShape(MetadataUtils.GetTrainerOutputMetadata())) + + }; + + if (!Info.NeedCalibration) { - _outputColumns = new[] - { - new SchemaShape.Column(DefaultColumnNames.Score, SchemaShape.Column.VectorKind.Scalar, NumberType.R4, false, new SchemaShape(MetadataUtils.GetTrainerOutputMetadata())), - new SchemaShape.Column(DefaultColumnNames.Probability, SchemaShape.Column.VectorKind.Scalar, NumberType.R4, false, new SchemaShape(MetadataUtils.GetTrainerOutputMetadata(true))), - new SchemaShape.Column(DefaultColumnNames.PredictedLabel, SchemaShape.Column.VectorKind.Scalar, BoolType.Instance, false, new SchemaShape(MetadataUtils.GetTrainerOutputMetadata())) - }; - } + outCols.Insert(1, new SchemaShape.Column( + DefaultColumnNames.Probability, + SchemaShape.Column.VectorKind.Scalar, + NumberType.R4, + false, + new SchemaShape(MetadataUtils.GetTrainerOutputMetadata(true)))); + }; + + _outputColumns = outCols.ToArray(); } @@ -1532,23 +1557,6 @@ protected override void CheckLabelCompatible(SchemaShape.Column labelCol) error(); } - private static SchemaShape.Column MakeWeightColumn(string weightColumn) - { - if (weightColumn == null) - return default; - return new SchemaShape.Column(weightColumn, SchemaShape.Column.VectorKind.Scalar, NumberType.R4, false); - } - - private static SchemaShape.Column MakeLabelColumn(string labelColumn) - { - return new SchemaShape.Column(labelColumn, SchemaShape.Column.VectorKind.Scalar, BoolType.Instance, false); - } - - private static SchemaShape.Column MakeFeatureColumn(string featureColumn) - { - return new SchemaShape.Column(featureColumn, SchemaShape.Column.VectorKind.Vector, NumberType.R4, false); - } - protected override TScalarPredictor CreatePredictor(VBuffer[] weights, float[] bias) { Host.CheckParam(Utils.Size(weights) == 1, nameof(weights)); @@ -1561,7 +1569,7 @@ protected override TScalarPredictor CreatePredictor(VBuffer[] weights, fl Conversions.Instance.GetIsDefaultPredicate(NumberType.Float)); var predictor = new LinearBinaryModelParameters(Host, in maybeSparseWeights, bias[0]); - if (!(_loss is LogLoss)) + if (Info.NeedCalibration) return predictor; return new ParameterMixingCalibratedPredictor(Host, predictor, new PlattCalibrator(Host, -1, 0)); } diff --git a/src/Microsoft.ML.StandardLearners/Standard/SdcaRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/SdcaRegression.cs index 0eec9f2588..b8bd077840 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/SdcaRegression.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/SdcaRegression.cs @@ -75,7 +75,7 @@ public SdcaRegressionTrainer(IHostEnvironment env, float? l1Threshold = null, int? maxIterations = null, Action advancedSettings = null) - : base(env, featureColumn, TrainerUtils.MakeR4ScalarLabel(labelColumn), TrainerUtils.MakeR4ScalarWeightColumn(weights), advancedSettings, + : base(env, featureColumn, TrainerUtils.MakeR4ScalarColumn(labelColumn), TrainerUtils.MakeR4ScalarWeightColumn(weights), advancedSettings, l2Const, l1Threshold, maxIterations) { Host.CheckNonEmpty(featureColumn, nameof(featureColumn)); @@ -85,7 +85,7 @@ public SdcaRegressionTrainer(IHostEnvironment env, } internal SdcaRegressionTrainer(IHostEnvironment env, Arguments args, string featureColumn, string labelColumn, string weightColumn = null) - : base(env, args, TrainerUtils.MakeR4ScalarLabel(labelColumn), TrainerUtils.MakeR4ScalarWeightColumn(weightColumn)) + : base(env, args, TrainerUtils.MakeR4ScalarColumn(labelColumn), TrainerUtils.MakeR4ScalarWeightColumn(weightColumn)) { Host.CheckValue(labelColumn, nameof(labelColumn)); Host.CheckValue(featureColumn, nameof(featureColumn)); diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/CalibratorEstimators.cs b/test/Microsoft.ML.Tests/TrainerEstimators/CalibratorEstimators.cs new file mode 100644 index 0000000000..63bfc8d960 --- /dev/null +++ b/test/Microsoft.ML.Tests/TrainerEstimators/CalibratorEstimators.cs @@ -0,0 +1,144 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.ML.Calibrator; +using Microsoft.ML.Core.Data; +using Microsoft.ML.Data; +using Microsoft.ML.Runtime.Data; +using Microsoft.ML.Runtime.Learners; +using Microsoft.ML.Trainers.Online; +using Xunit; + +namespace Microsoft.ML.Tests.TrainerEstimators +{ + public partial class TrainerEstimators + { + /// + /// OVA and calibrators + /// + [Fact] + public void PlattCalibratorEstimator() + { + var calibratorTestData = GetCalibratorTestData(); + + // platCalibrator + var platCalibratorEstimator = new PlattCalibratorEstimator(Env, calibratorTestData.transformer.Model, "Label", "Features"); + var platCalibratorTransformer = platCalibratorEstimator.Fit(calibratorTestData.scoredData); + + //testData + checkValidCalibratedData(calibratorTestData.scoredData, platCalibratorTransformer); + + //test estimator + TestEstimatorCore(platCalibratorEstimator, calibratorTestData.scoredData); + + Done(); + } + + /// + /// OVA and calibrators + /// + [Fact] + public void FixedPlatCalibratorEstimator() + { + var calibratorTestData = GetCalibratorTestData(); + + // fixedPlatCalibrator + var fixedPlatCalibratorEstimator = new FixedPlattCalibratorEstimator(Env, calibratorTestData.transformer.Model, labelColumn: "Label", featureColumn: "Features"); + var fixedPlatCalibratorTransformer = fixedPlatCalibratorEstimator.Fit(calibratorTestData.scoredData); + + checkValidCalibratedData(calibratorTestData.scoredData, fixedPlatCalibratorTransformer); + + //test estimator + TestEstimatorCore(calibratorTestData.pipeline, calibratorTestData.data); + + Done(); + } + + /// + /// OVA and calibrators + /// + [Fact] + public void NaiveCalibratorEstimator() + { + var calibratorTestData = GetCalibratorTestData(); + + // naive calibrator + var naiveCalibratorEstimator = new NaiveCalibratorEstimator(Env, calibratorTestData.transformer.Model, "Label", "Features"); + var naiveCalibratorTransformer = naiveCalibratorEstimator.Fit(calibratorTestData.scoredData); + + // check data + checkValidCalibratedData(calibratorTestData.scoredData, naiveCalibratorTransformer); + + //test estimator + TestEstimatorCore(calibratorTestData.pipeline, calibratorTestData.data); + + Done(); + } + /// + /// OVA and calibrators + /// + [Fact] + public void PavCalibratorEstimator() + { + var calibratorTestData = GetCalibratorTestData(); + + // pav calibrator + var pavCalibratorEstimator = new PavCalibratorEstimator(Env, calibratorTestData.transformer.Model, "Label", "Features"); + var pavCalibratorTransformer = pavCalibratorEstimator.Fit(calibratorTestData.scoredData); + + //check data + checkValidCalibratedData(calibratorTestData.scoredData, pavCalibratorTransformer); + + //test estimator + TestEstimatorCore(calibratorTestData.pipeline, calibratorTestData.data); + + Done(); + } + + CalibratorTestData GetCalibratorTestData() + { + var (pipeline, data) = GetBinaryClassificationPipeline(); + var binaryTrainer = new AveragedPerceptronTrainer(Env); + + pipeline = pipeline.Append(binaryTrainer); + + var transformer = pipeline.Fit(data); + var scoredData = transformer.Transform(data); + var scoredDataPreview = scoredData.Preview(); + Assert.True(scoredDataPreview.ColumnView.Length == 5); + + return new CalibratorTestData + { + data = data, + scoredData = scoredData, + pipeline = pipeline, + transformer = ((TransformerChain>)transformer).LastTransformer as BinaryPredictionTransformer, + }; + } + + private class CalibratorTestData + { + internal IDataView data { get; set; } + internal IDataView scoredData { get; set; } + internal IEstimator pipeline { get; set; } + + internal BinaryPredictionTransformer transformer { get; set; } + } + + + void checkValidCalibratedData (IDataView scoredData, ITransformer transformer){ + + var calibratedData = transformer.Transform(scoredData).Preview(); + + Assert.True(calibratedData.ColumnView.Length == 6); + + for (int i = 0; i < 10; i++) + { + var probability = calibratedData.RowView[i].Values[5]; + Assert.InRange((float)probability.Value, 0, 1); + } + } + + } +} diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/TrainerEstimators.cs b/test/Microsoft.ML.Tests/TrainerEstimators/TrainerEstimators.cs index 8baf5a2d39..e0d86ba784 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/TrainerEstimators.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/TrainerEstimators.cs @@ -8,7 +8,6 @@ using Microsoft.ML.Trainers; using Microsoft.ML.Trainers.KMeans; using Microsoft.ML.Trainers.PCA; -using Microsoft.ML.Transforms.Categorical; using Microsoft.ML.Transforms.Conversions; using Microsoft.ML.Transforms.Text; using Xunit; diff --git a/tools-local/Microsoft.ML.InternalCodeAnalyzer/NameAnalyzer.cs b/tools-local/Microsoft.ML.InternalCodeAnalyzer/NameAnalyzer.cs index a98930f3ab..578048ad57 100644 --- a/tools-local/Microsoft.ML.InternalCodeAnalyzer/NameAnalyzer.cs +++ b/tools-local/Microsoft.ML.InternalCodeAnalyzer/NameAnalyzer.cs @@ -45,8 +45,8 @@ internal static Diagnostic CreateDiagnostic(DiagnosticDescriptor rule, SyntaxTok internal static class PrivateFieldName { public const string Id = "MSML_PrivateFieldName"; - private const string Title = "Private field name not _camelCased"; - private const string Format = "Private field name '{0}' not _camelCased"; + private const string Title = "Private field name not in: _camelCase format"; + private const string Format = "Private field name '{0}' not in: _camelCase format"; private const string Description = "Private fields should have an _ prefix and be _lowerCamelCased, unless they are const."; From 104d4b224a3955013d0da334f2efc87f5b8581ff Mon Sep 17 00:00:00 2001 From: Monte Hoover <37886197+montebhoover@users.noreply.github.com> Date: Fri, 14 Dec 2018 14:14:39 -0800 Subject: [PATCH 057/100] Move entrypoints from Microsoft.ML.Legacy to Microsoft.ML.Entrypoints. (#1859) --- Microsoft.ML.sln | 11 +++++++++++ .../Microsoft.ML.EntryPoints.nupkgproj | 12 ++++++++++++ .../Microsoft.ML.EntryPoints.symbols.nupkgproj | 5 +++++ src/Microsoft.ML.Core/Properties/AssemblyInfo.cs | 1 + src/Microsoft.ML.Data/Properties/AssemblyInfo.cs | 1 + .../CVSplit.cs | 0 .../CrossValidationBinaryMacro.cs | 0 .../CrossValidationMacro.cs | 0 .../DataViewReference.cs | 0 .../FeatureCombiner.cs | 0 .../ImportTextData.cs | 0 .../MacroUtils.cs | 0 .../Microsoft.ML.EntryPoints.csproj | 12 ++++++++++++ .../ModelOperations.cs | 0 .../OneVersusAllMacro.cs | 0 .../Properties/AssemblyInfo.cs | 9 +++++++++ .../TrainTestBinaryMacro.cs | 0 .../TrainTestMacro.cs | 0 .../TrainTestSplit.cs | 0 src/Microsoft.ML.StandardLearners/AssemblyInfo.cs | 1 + .../Microsoft.ML.Core.Tests.csproj | 1 + .../Microsoft.ML.FSharp.Tests.fsproj | 9 ++------- test/Microsoft.ML.FSharp.Tests/SmokeTests.fs | 9 ++++++--- .../EnvironmentExtensions.cs | 2 ++ .../Microsoft.ML.TestFramework.csproj | 1 + test/Microsoft.ML.Tests/Microsoft.ML.Tests.csproj | 1 + 26 files changed, 65 insertions(+), 10 deletions(-) create mode 100644 pkg/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.nupkgproj create mode 100644 pkg/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.symbols.nupkgproj rename src/{Microsoft.ML.Legacy/Runtime/EntryPoints => Microsoft.ML.EntryPoints}/CVSplit.cs (100%) rename src/{Microsoft.ML.Legacy/Runtime/EntryPoints => Microsoft.ML.EntryPoints}/CrossValidationBinaryMacro.cs (100%) rename src/{Microsoft.ML.Legacy/Runtime/EntryPoints => Microsoft.ML.EntryPoints}/CrossValidationMacro.cs (100%) rename src/{Microsoft.ML.Legacy/Runtime/EntryPoints => Microsoft.ML.EntryPoints}/DataViewReference.cs (100%) rename src/{Microsoft.ML.Legacy/Runtime/EntryPoints => Microsoft.ML.EntryPoints}/FeatureCombiner.cs (100%) rename src/{Microsoft.ML.Legacy/Runtime/EntryPoints => Microsoft.ML.EntryPoints}/ImportTextData.cs (100%) rename src/{Microsoft.ML.Legacy/Runtime/EntryPoints => Microsoft.ML.EntryPoints}/MacroUtils.cs (100%) create mode 100644 src/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.csproj rename src/{Microsoft.ML.Legacy/Runtime/EntryPoints => Microsoft.ML.EntryPoints}/ModelOperations.cs (100%) rename src/{Microsoft.ML.Legacy/Runtime/EntryPoints => Microsoft.ML.EntryPoints}/OneVersusAllMacro.cs (100%) create mode 100644 src/Microsoft.ML.EntryPoints/Properties/AssemblyInfo.cs rename src/{Microsoft.ML.Legacy/Runtime/EntryPoints => Microsoft.ML.EntryPoints}/TrainTestBinaryMacro.cs (100%) rename src/{Microsoft.ML.Legacy/Runtime/EntryPoints => Microsoft.ML.EntryPoints}/TrainTestMacro.cs (100%) rename src/{Microsoft.ML.Legacy/Runtime/EntryPoints => Microsoft.ML.EntryPoints}/TrainTestSplit.cs (100%) diff --git a/Microsoft.ML.sln b/Microsoft.ML.sln index 4a1eede6ce..bc2a797c15 100644 --- a/Microsoft.ML.sln +++ b/Microsoft.ML.sln @@ -137,6 +137,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.DnnImageFeatur EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.DnnImageFeaturizer.ResNet101", "src\Microsoft.ML.DnnImageFeaturizer.ResNet101\Microsoft.ML.DnnImageFeaturizer.ResNet101.csproj", "{DB7CEB5E-8BE6-48A7-87BE-B91D9AE96F71}" EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.EntryPoints", "src\Microsoft.ML.EntryPoints\Microsoft.ML.EntryPoints.csproj", "{7504D46F-E4B3-43CB-9B1C-82F3131F1C99}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -521,6 +523,14 @@ Global {DB7CEB5E-8BE6-48A7-87BE-B91D9AE96F71}.Release|Any CPU.Build.0 = Release|Any CPU {DB7CEB5E-8BE6-48A7-87BE-B91D9AE96F71}.Release-Intrinsics|Any CPU.ActiveCfg = Release-Intrinsics|Any CPU {DB7CEB5E-8BE6-48A7-87BE-B91D9AE96F71}.Release-Intrinsics|Any CPU.Build.0 = Release-Intrinsics|Any CPU + {7504D46F-E4B3-43CB-9B1C-82F3131F1C99}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7504D46F-E4B3-43CB-9B1C-82F3131F1C99}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7504D46F-E4B3-43CB-9B1C-82F3131F1C99}.Debug-Intrinsics|Any CPU.ActiveCfg = Debug-Intrinsics|Any CPU + {7504D46F-E4B3-43CB-9B1C-82F3131F1C99}.Debug-Intrinsics|Any CPU.Build.0 = Debug-Intrinsics|Any CPU + {7504D46F-E4B3-43CB-9B1C-82F3131F1C99}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7504D46F-E4B3-43CB-9B1C-82F3131F1C99}.Release|Any CPU.Build.0 = Release|Any CPU + {7504D46F-E4B3-43CB-9B1C-82F3131F1C99}.Release-Intrinsics|Any CPU.ActiveCfg = Release-Intrinsics|Any CPU + {7504D46F-E4B3-43CB-9B1C-82F3131F1C99}.Release-Intrinsics|Any CPU.Build.0 = Release-Intrinsics|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -578,6 +588,7 @@ Global {6C29AA9B-054B-4762-BEA5-D305B932AA80} = {09EADF06-BE25-4228-AB53-95AE3E15B530} {4805129D-78C8-46D4-9519-0AD9B0574D6D} = {09EADF06-BE25-4228-AB53-95AE3E15B530} {DB7CEB5E-8BE6-48A7-87BE-B91D9AE96F71} = {09EADF06-BE25-4228-AB53-95AE3E15B530} + {7504D46F-E4B3-43CB-9B1C-82F3131F1C99} = {09EADF06-BE25-4228-AB53-95AE3E15B530} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {41165AF1-35BB-4832-A189-73060F82B01D} diff --git a/pkg/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.nupkgproj b/pkg/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.nupkgproj new file mode 100644 index 0000000000..b845fdeb45 --- /dev/null +++ b/pkg/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.nupkgproj @@ -0,0 +1,12 @@ + + + + netstandard2.0 + Microsoft.ML.EntryPoints contains the ML.NET entry point API catalog. + + + + + + + diff --git a/pkg/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.symbols.nupkgproj b/pkg/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.symbols.nupkgproj new file mode 100644 index 0000000000..3fa0255960 --- /dev/null +++ b/pkg/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.symbols.nupkgproj @@ -0,0 +1,5 @@ + + + + + diff --git a/src/Microsoft.ML.Core/Properties/AssemblyInfo.cs b/src/Microsoft.ML.Core/Properties/AssemblyInfo.cs index 0ed78e89a6..50b698c5b9 100644 --- a/src/Microsoft.ML.Core/Properties/AssemblyInfo.cs +++ b/src/Microsoft.ML.Core/Properties/AssemblyInfo.cs @@ -14,6 +14,7 @@ [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.StaticPipelineTesting" + PublicKey.TestValue)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.OnnxTransformTest" + PublicKey.TestValue)] +[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.EntryPoints" + PublicKey.Value)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Legacy" + PublicKey.Value)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Maml" + PublicKey.Value)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.ResultProcessor" + PublicKey.Value)] diff --git a/src/Microsoft.ML.Data/Properties/AssemblyInfo.cs b/src/Microsoft.ML.Data/Properties/AssemblyInfo.cs index a33348aa8f..50febaa558 100644 --- a/src/Microsoft.ML.Data/Properties/AssemblyInfo.cs +++ b/src/Microsoft.ML.Data/Properties/AssemblyInfo.cs @@ -13,6 +13,7 @@ [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Predictor.Tests" + PublicKey.TestValue)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.TimeSeries.Tests" + PublicKey.TestValue)] +[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.EntryPoints" + PublicKey.Value)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Legacy" + PublicKey.Value)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Maml" + PublicKey.Value)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.ResultProcessor" + PublicKey.Value)] diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/CVSplit.cs b/src/Microsoft.ML.EntryPoints/CVSplit.cs similarity index 100% rename from src/Microsoft.ML.Legacy/Runtime/EntryPoints/CVSplit.cs rename to src/Microsoft.ML.EntryPoints/CVSplit.cs diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/CrossValidationBinaryMacro.cs b/src/Microsoft.ML.EntryPoints/CrossValidationBinaryMacro.cs similarity index 100% rename from src/Microsoft.ML.Legacy/Runtime/EntryPoints/CrossValidationBinaryMacro.cs rename to src/Microsoft.ML.EntryPoints/CrossValidationBinaryMacro.cs diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/CrossValidationMacro.cs b/src/Microsoft.ML.EntryPoints/CrossValidationMacro.cs similarity index 100% rename from src/Microsoft.ML.Legacy/Runtime/EntryPoints/CrossValidationMacro.cs rename to src/Microsoft.ML.EntryPoints/CrossValidationMacro.cs diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/DataViewReference.cs b/src/Microsoft.ML.EntryPoints/DataViewReference.cs similarity index 100% rename from src/Microsoft.ML.Legacy/Runtime/EntryPoints/DataViewReference.cs rename to src/Microsoft.ML.EntryPoints/DataViewReference.cs diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/FeatureCombiner.cs b/src/Microsoft.ML.EntryPoints/FeatureCombiner.cs similarity index 100% rename from src/Microsoft.ML.Legacy/Runtime/EntryPoints/FeatureCombiner.cs rename to src/Microsoft.ML.EntryPoints/FeatureCombiner.cs diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/ImportTextData.cs b/src/Microsoft.ML.EntryPoints/ImportTextData.cs similarity index 100% rename from src/Microsoft.ML.Legacy/Runtime/EntryPoints/ImportTextData.cs rename to src/Microsoft.ML.EntryPoints/ImportTextData.cs diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/MacroUtils.cs b/src/Microsoft.ML.EntryPoints/MacroUtils.cs similarity index 100% rename from src/Microsoft.ML.Legacy/Runtime/EntryPoints/MacroUtils.cs rename to src/Microsoft.ML.EntryPoints/MacroUtils.cs diff --git a/src/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.csproj b/src/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.csproj new file mode 100644 index 0000000000..8b5f16c6cd --- /dev/null +++ b/src/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.csproj @@ -0,0 +1,12 @@ + + + + netstandard2.0 + Microsoft.ML.EntryPoints + + + + + + + diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/ModelOperations.cs b/src/Microsoft.ML.EntryPoints/ModelOperations.cs similarity index 100% rename from src/Microsoft.ML.Legacy/Runtime/EntryPoints/ModelOperations.cs rename to src/Microsoft.ML.EntryPoints/ModelOperations.cs diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/OneVersusAllMacro.cs b/src/Microsoft.ML.EntryPoints/OneVersusAllMacro.cs similarity index 100% rename from src/Microsoft.ML.Legacy/Runtime/EntryPoints/OneVersusAllMacro.cs rename to src/Microsoft.ML.EntryPoints/OneVersusAllMacro.cs diff --git a/src/Microsoft.ML.EntryPoints/Properties/AssemblyInfo.cs b/src/Microsoft.ML.EntryPoints/Properties/AssemblyInfo.cs new file mode 100644 index 0000000000..530c78cc6b --- /dev/null +++ b/src/Microsoft.ML.EntryPoints/Properties/AssemblyInfo.cs @@ -0,0 +1,9 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.ML; +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("Microsoft.ML.Tests" + PublicKey.TestValue)] +[assembly: InternalsVisibleTo("Microsoft.ML.Core.Tests" + PublicKey.TestValue)] \ No newline at end of file diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/TrainTestBinaryMacro.cs b/src/Microsoft.ML.EntryPoints/TrainTestBinaryMacro.cs similarity index 100% rename from src/Microsoft.ML.Legacy/Runtime/EntryPoints/TrainTestBinaryMacro.cs rename to src/Microsoft.ML.EntryPoints/TrainTestBinaryMacro.cs diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/TrainTestMacro.cs b/src/Microsoft.ML.EntryPoints/TrainTestMacro.cs similarity index 100% rename from src/Microsoft.ML.Legacy/Runtime/EntryPoints/TrainTestMacro.cs rename to src/Microsoft.ML.EntryPoints/TrainTestMacro.cs diff --git a/src/Microsoft.ML.Legacy/Runtime/EntryPoints/TrainTestSplit.cs b/src/Microsoft.ML.EntryPoints/TrainTestSplit.cs similarity index 100% rename from src/Microsoft.ML.Legacy/Runtime/EntryPoints/TrainTestSplit.cs rename to src/Microsoft.ML.EntryPoints/TrainTestSplit.cs diff --git a/src/Microsoft.ML.StandardLearners/AssemblyInfo.cs b/src/Microsoft.ML.StandardLearners/AssemblyInfo.cs index 671913b203..98b00cf33a 100644 --- a/src/Microsoft.ML.StandardLearners/AssemblyInfo.cs +++ b/src/Microsoft.ML.StandardLearners/AssemblyInfo.cs @@ -5,6 +5,7 @@ using System.Runtime.CompilerServices; using Microsoft.ML; +[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.EntryPoints" + PublicKey.Value)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Legacy" + PublicKey.Value)] [assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.HalLearners" + PublicKey.Value)] diff --git a/test/Microsoft.ML.Core.Tests/Microsoft.ML.Core.Tests.csproj b/test/Microsoft.ML.Core.Tests/Microsoft.ML.Core.Tests.csproj index d50494290e..8ff5e2dcfd 100644 --- a/test/Microsoft.ML.Core.Tests/Microsoft.ML.Core.Tests.csproj +++ b/test/Microsoft.ML.Core.Tests/Microsoft.ML.Core.Tests.csproj @@ -4,6 +4,7 @@ + diff --git a/test/Microsoft.ML.FSharp.Tests/Microsoft.ML.FSharp.Tests.fsproj b/test/Microsoft.ML.FSharp.Tests/Microsoft.ML.FSharp.Tests.fsproj index 406b4a41a7..bdf496e780 100644 --- a/test/Microsoft.ML.FSharp.Tests/Microsoft.ML.FSharp.Tests.fsproj +++ b/test/Microsoft.ML.FSharp.Tests/Microsoft.ML.FSharp.Tests.fsproj @@ -14,13 +14,7 @@ - - - - - - - + @@ -29,6 +23,7 @@ + diff --git a/test/Microsoft.ML.FSharp.Tests/SmokeTests.fs b/test/Microsoft.ML.FSharp.Tests/SmokeTests.fs index ebff475fb8..b8b06bb8c3 100644 --- a/test/Microsoft.ML.FSharp.Tests/SmokeTests.fs +++ b/test/Microsoft.ML.FSharp.Tests/SmokeTests.fs @@ -80,7 +80,8 @@ module SmokeTest1 = // See https://github.com/dotnet/machinelearning/issues/401: forces the loading of ML.NET component assemblies let _load = [ typeof; - typeof ] + typeof; + typeof] // ML.EntryPoints let testDataPath = __SOURCE_DIRECTORY__ + @"/../data/wikipedia-detox-250-line-data.tsv" @@ -147,7 +148,8 @@ module SmokeTest2 = // See https://github.com/dotnet/machinelearning/issues/401: forces the loading of ML.NET component assemblies let _load = [ typeof; - typeof ] + typeof; + typeof] // ML.EntryPoints let testDataPath = __SOURCE_DIRECTORY__ + @"/../data/wikipedia-detox-250-line-data.tsv" @@ -211,7 +213,8 @@ module SmokeTest3 = // See https://github.com/dotnet/machinelearning/issues/401: forces the loading of ML.NET component assemblies let _load = [ typeof; - typeof ] + typeof; + typeof] // ML.EntryPoints let testDataPath = __SOURCE_DIRECTORY__ + @"/../data/wikipedia-detox-250-line-data.tsv" diff --git a/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs b/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs index a07f805010..992f089406 100644 --- a/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs +++ b/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs @@ -5,6 +5,7 @@ using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Ensemble; +using Microsoft.ML.Runtime.EntryPoints; using Microsoft.ML.Runtime.Learners; using Microsoft.ML.Trainers.FastTree; using Microsoft.ML.Trainers.KMeans; @@ -28,6 +29,7 @@ public static TEnvironment AddStandardComponents(this TEnvironment #pragma warning disable 612 env.ComponentCatalog.RegisterAssembly(typeof(Experiment).Assembly); // ML.Legacy #pragma warning restore 612 + env.ComponentCatalog.RegisterAssembly(typeof(CVSplit).Assembly); // ML.EntryPoints return env; } } diff --git a/test/Microsoft.ML.TestFramework/Microsoft.ML.TestFramework.csproj b/test/Microsoft.ML.TestFramework/Microsoft.ML.TestFramework.csproj index dc14be3b7c..62ae1cea6a 100644 --- a/test/Microsoft.ML.TestFramework/Microsoft.ML.TestFramework.csproj +++ b/test/Microsoft.ML.TestFramework/Microsoft.ML.TestFramework.csproj @@ -7,6 +7,7 @@ + diff --git a/test/Microsoft.ML.Tests/Microsoft.ML.Tests.csproj b/test/Microsoft.ML.Tests/Microsoft.ML.Tests.csproj index 0bb30e382a..ddaea902c6 100644 --- a/test/Microsoft.ML.Tests/Microsoft.ML.Tests.csproj +++ b/test/Microsoft.ML.Tests/Microsoft.ML.Tests.csproj @@ -7,6 +7,7 @@ + From 7b9d3b09d1c8b4e3c35485774e247f6e9a17a913 Mon Sep 17 00:00:00 2001 From: Ivan Matantsev Date: Mon, 17 Dec 2018 09:52:58 -0800 Subject: [PATCH 058/100] Provide seed parameter for TrainTest routine (#1885) --- src/Microsoft.ML.Data/TrainContext.cs | 92 +++++++++++-------- .../Training/TrainingStaticExtensions.cs | 74 ++++++++------- .../Transforms/GenerateNumberTransform.cs | 5 +- .../RecommenderCatalog.cs | 15 +-- 4 files changed, 108 insertions(+), 78 deletions(-) diff --git a/src/Microsoft.ML.Data/TrainContext.cs b/src/Microsoft.ML.Data/TrainContext.cs index 13eda65945..2e5557e5bf 100644 --- a/src/Microsoft.ML.Data/TrainContext.cs +++ b/src/Microsoft.ML.Data/TrainContext.cs @@ -32,18 +32,20 @@ public abstract class TrainContextBase /// /// The dataset to split. /// The fraction of data to go into the test set. - /// Optional stratification column. - /// If two examples share the same value of the (if provided), - /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from - /// train to the test set. + /// Optional name of the column to use as a stratification column. If two examples share the same value of the + /// (if provided), they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from train to the test set. + /// If this optional parameter is not provided, a stratification columns will be generated, and its values will be random numbers . + /// Optional parameter used in combination with the . + /// If the is not provided, the random numbers generated to create it, will use this seed as value. + /// And if it is not provided, the default value will be used. /// A pair of datasets, for the train and test set. - public (IDataView trainSet, IDataView testSet) TrainTestSplit(IDataView data, double testFraction = 0.1, string stratificationColumn = null) + public (IDataView trainSet, IDataView testSet) TrainTestSplit(IDataView data, double testFraction = 0.1, string stratificationColumn = null, uint? seed = null) { Host.CheckValue(data, nameof(data)); Host.CheckParam(0 < testFraction && testFraction < 1, nameof(testFraction), "Must be between 0 and 1 exclusive"); Host.CheckValueOrNull(stratificationColumn); - EnsureStratificationColumn(ref data, ref stratificationColumn); + EnsureStratificationColumn(ref data, ref stratificationColumn, seed); var trainFilter = new RangeFilter(Host, new RangeFilter.Arguments() { @@ -68,14 +70,14 @@ public abstract class TrainContextBase /// Return each model and each scored test dataset. /// protected (IDataView scoredTestSet, ITransformer model)[] CrossValidateTrain(IDataView data, IEstimator estimator, - int numFolds, string stratificationColumn) + int numFolds, string stratificationColumn, uint? seed = null) { Host.CheckValue(data, nameof(data)); Host.CheckValue(estimator, nameof(estimator)); Host.CheckParam(numFolds > 1, nameof(numFolds), "Must be more than 1"); Host.CheckValueOrNull(stratificationColumn); - EnsureStratificationColumn(ref data, ref stratificationColumn); + EnsureStratificationColumn(ref data, ref stratificationColumn, seed); Func foldFunction = fold => @@ -122,7 +124,7 @@ protected TrainContextBase(IHostEnvironment env, string registrationName) /// for , hash it if needed, or introduce a new one /// if needed. /// - private void EnsureStratificationColumn(ref IDataView data, ref string stratificationColumn) + private void EnsureStratificationColumn(ref IDataView data, ref string stratificationColumn, uint? seed = null) { // We need to handle two cases: if the stratification column is provided, we use hashJoin to // build a single hash of it. If it is not, we generate a random number. @@ -130,7 +132,7 @@ private void EnsureStratificationColumn(ref IDataView data, ref string stratific if (stratificationColumn == null) { stratificationColumn = data.Schema.GetTempColumnName("StratificationColumn"); - data = new GenerateNumberTransform(Host, data, stratificationColumn); + data = new GenerateNumberTransform(Host, data, stratificationColumn, seed); } else { @@ -250,16 +252,19 @@ public BinaryClassificationMetrics EvaluateNonCalibrated(IDataView data, string /// The estimator to fit. /// Number of cross-validation folds. /// The label column (for evaluation). - /// Optional stratification column. - /// If two examples share the same value of the (if provided), - /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from - /// train to the test set. + /// Optional name of the column to use as a stratification column. If two examples share the same value of the + /// (if provided), they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from train to the test set. + /// If this optional parameter is not provided, a stratification columns will be generated, and its values will be random numbers . + /// Optional parameter used in combination with the . + /// If the is not provided, the random numbers generated to create it, will use this seed as value. + /// And if it is not provided, the default value will be used. /// Per-fold results: metrics, models, scored datasets. public (BinaryClassificationMetrics metrics, ITransformer model, IDataView scoredTestData)[] CrossValidateNonCalibrated( - IDataView data, IEstimator estimator, int numFolds = 5, string labelColumn = DefaultColumnNames.Label, string stratificationColumn = null) + IDataView data, IEstimator estimator, int numFolds = 5, string labelColumn = DefaultColumnNames.Label, + string stratificationColumn = null, uint? seed = null) { Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); - var result = CrossValidateTrain(data, estimator, numFolds, stratificationColumn); + var result = CrossValidateTrain(data, estimator, numFolds, stratificationColumn, seed); return result.Select(x => (EvaluateNonCalibrated(x.scoredTestSet, labelColumn), x.model, x.scoredTestSet)).ToArray(); } @@ -276,12 +281,14 @@ public BinaryClassificationMetrics EvaluateNonCalibrated(IDataView data, string /// If two examples share the same value of the (if provided), /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from /// train to the test set. + /// If not present in dataset we will generate random filled column based on provided . /// Per-fold results: metrics, models, scored datasets. public (CalibratedBinaryClassificationMetrics metrics, ITransformer model, IDataView scoredTestData)[] CrossValidate( - IDataView data, IEstimator estimator, int numFolds = 5, string labelColumn = DefaultColumnNames.Label, string stratificationColumn = null) + IDataView data, IEstimator estimator, int numFolds = 5, string labelColumn = DefaultColumnNames.Label, + string stratificationColumn = null, uint? seed = null) { Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); - var result = CrossValidateTrain(data, estimator, numFolds, stratificationColumn); + var result = CrossValidateTrain(data, estimator, numFolds, stratificationColumn, seed); return result.Select(x => (Evaluate(x.scoredTestSet, labelColumn), x.model, x.scoredTestSet)).ToArray(); } } @@ -326,12 +333,12 @@ internal ClusteringTrainers(ClusteringContext ctx) public ClusteringMetrics Evaluate(IDataView data, string label = null, string score = DefaultColumnNames.Score, - string features = null ) + string features = null) { Host.CheckValue(data, nameof(data)); Host.CheckNonEmpty(score, nameof(score)); - if(features != null) + if (features != null) Host.CheckNonEmpty(features, nameof(features), "The features column name should be non-empty if you want to calculate the Dbi metric."); if (label != null) @@ -351,15 +358,18 @@ public ClusteringMetrics Evaluate(IDataView data, /// Number of cross-validation folds. /// Optional label column for evaluation (clustering tasks may not always have a label). /// Optional features column for evaluation (needed for calculating Dbi metric) - /// Optional stratification column. - /// If two examples share the same value of the (if provided), - /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from - /// train to the test set. + /// Optional name of the column to use as a stratification column. If two examples share the same value of the + /// (if provided), they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from train to the test set. + /// If this optional parameter is not provided, a stratification columns will be generated, and its values will be random numbers . + /// Optional parameter used in combination with the . + /// If the is not provided, the random numbers generated to create it, will use this seed as value. + /// And if it is not provided, the default value will be used. /// Per-fold results: metrics, models, scored datasets. public (ClusteringMetrics metrics, ITransformer model, IDataView scoredTestData)[] CrossValidate( - IDataView data, IEstimator estimator, int numFolds = 5, string labelColumn = null, string featuresColumn = null, string stratificationColumn = null) + IDataView data, IEstimator estimator, int numFolds = 5, string labelColumn = null, string featuresColumn = null, + string stratificationColumn = null, uint? seed = null) { - var result = CrossValidateTrain(data, estimator, numFolds, stratificationColumn); + var result = CrossValidateTrain(data, estimator, numFolds, stratificationColumn, seed); return result.Select(x => (Evaluate(x.scoredTestSet, label: labelColumn, features: featuresColumn), x.model, x.scoredTestSet)).ToArray(); } } @@ -423,16 +433,19 @@ public MultiClassClassifierMetrics Evaluate(IDataView data, string label = Defau /// The estimator to fit. /// Number of cross-validation folds. /// The label column (for evaluation). - /// Optional stratification column. - /// If two examples share the same value of the (if provided), - /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from - /// train to the test set. + /// Optional name of the column to use as a stratification column. If two examples share the same value of the + /// (if provided), they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from train to the test set. + /// If this optional parameter is not provided, a stratification columns will be generated, and its values will be random numbers . + /// Optional parameter used in combination with the . + /// If the is not provided, the random numbers generated to create it, will use this seed as value. + /// And if it is not provided, the default value will be used. /// Per-fold results: metrics, models, scored datasets. public (MultiClassClassifierMetrics metrics, ITransformer model, IDataView scoredTestData)[] CrossValidate( - IDataView data, IEstimator estimator, int numFolds = 5, string labelColumn = DefaultColumnNames.Label, string stratificationColumn = null) + IDataView data, IEstimator estimator, int numFolds = 5, string labelColumn = DefaultColumnNames.Label, + string stratificationColumn = null, uint? seed = null) { Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); - var result = CrossValidateTrain(data, estimator, numFolds, stratificationColumn); + var result = CrossValidateTrain(data, estimator, numFolds, stratificationColumn, seed); return result.Select(x => (Evaluate(x.scoredTestSet, labelColumn), x.model, x.scoredTestSet)).ToArray(); } } @@ -487,16 +500,19 @@ public RegressionMetrics Evaluate(IDataView data, string label = DefaultColumnNa /// The estimator to fit. /// Number of cross-validation folds. /// The label column (for evaluation). - /// Optional stratification column. - /// If two examples share the same value of the (if provided), - /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from - /// train to the test set. + /// Optional name of the column to use as a stratification column. If two examples share the same value of the + /// (if provided), they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from train to the test set. + /// If this optional parameter is not provided, a stratification columns will be generated, and its values will be random numbers . + /// Optional parameter used in combination with the . + /// If the is not provided, the random numbers generated to create it, will use this seed as value. + /// And if it is not provided, the default value will be used. /// Per-fold results: metrics, models, scored datasets. public (RegressionMetrics metrics, ITransformer model, IDataView scoredTestData)[] CrossValidate( - IDataView data, IEstimator estimator, int numFolds = 5, string labelColumn = DefaultColumnNames.Label, string stratificationColumn = null) + IDataView data, IEstimator estimator, int numFolds = 5, string labelColumn = DefaultColumnNames.Label, + string stratificationColumn = null, uint? seed = null) { Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); - var result = CrossValidateTrain(data, estimator, numFolds, stratificationColumn); + var result = CrossValidateTrain(data, estimator, numFolds, stratificationColumn, seed); return result.Select(x => (Evaluate(x.scoredTestSet, labelColumn), x.model, x.scoredTestSet)).ToArray(); } } diff --git a/src/Microsoft.ML.Data/Training/TrainingStaticExtensions.cs b/src/Microsoft.ML.Data/Training/TrainingStaticExtensions.cs index 1107a24a83..a3ee486d43 100644 --- a/src/Microsoft.ML.Data/Training/TrainingStaticExtensions.cs +++ b/src/Microsoft.ML.Data/Training/TrainingStaticExtensions.cs @@ -5,10 +5,10 @@ using Microsoft.ML.Core.Data; using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.StaticPipe.Runtime; using Microsoft.ML.StaticPipe; -using System.Linq; +using Microsoft.ML.StaticPipe.Runtime; using System; +using System.Linq; namespace Microsoft.ML { @@ -26,13 +26,15 @@ public static class TrainingStaticExtensions /// The training context. /// The dataset to split. /// The fraction of data to go into the test set. - /// Optional selector for the stratification column. - /// If two examples share the same value of the (if provided), - /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from - /// train to the test set. + /// Optional selector for the column to use as a stratification column. If two examples share the same value of the + /// (if provided), they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from train to the test set. + /// If this optional parameter is not provided, a stratification columns will be generated, and its values will be random numbers . + /// Optional parameter used in combination with the . + /// If the is not provided, the random numbers generated to create it, will use this seed as value. + /// And if it is not provided, the default value will be used. /// A pair of datasets, for the train and test set. public static (DataView trainSet, DataView testSet) TrainTestSplit(this TrainContextBase context, - DataView data, double testFraction = 0.1, Func stratificationColumn = null) + DataView data, double testFraction = 0.1, Func stratificationColumn = null, uint? seed = null) { var env = StaticPipeUtils.GetEnvironment(data); Contracts.AssertValue(env); @@ -49,7 +51,7 @@ public static (DataView trainSet, DataView testSet) TrainTestSplit(this stratName = indexer.Get(column); } - var (trainData, testData) = context.TrainTestSplit(data.AsDynamic, testFraction, stratName); + var (trainData, testData) = context.TrainTestSplit(data.AsDynamic, testFraction, stratName, seed); return (new DataView(env, trainData, data.Shape), new DataView(env, testData, data.Shape)); } @@ -66,10 +68,12 @@ public static (DataView trainSet, DataView testSet) TrainTestSplit(this /// The estimator to fit. /// Number of cross-validation folds. /// The label column (for evaluation). - /// Optional stratification column. - /// If two examples share the same value of the (if provided), - /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from - /// train to the test set. + /// Optional selector for the column to use as a stratification column. If two examples share the same value of the + /// (if provided), they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from train to the test set. + /// If this optional parameter is not provided, a stratification columns will be generated, and its values will be random numbers . + /// Optional parameter used in combination with the . + /// If the is not provided, the random numbers generated to create it, will use this seed as value. + /// And if it is not provided, the default value will be used. /// Per-fold results: metrics, models, scored datasets. public static (RegressionMetrics metrics, Transformer model, DataView scoredTestData)[] CrossValidate( this RegressionContext context, @@ -77,7 +81,7 @@ public static (RegressionMetrics metrics, Transformer estimator, Func> label, int numFolds = 5, - Func stratificationColumn = null) + Func stratificationColumn = null, uint? seed = null) where TTransformer : class, ITransformer { var env = StaticPipeUtils.GetEnvironment(data); @@ -100,7 +104,7 @@ public static (RegressionMetrics metrics, Transformer ( x.metrics, @@ -122,10 +126,12 @@ public static (RegressionMetrics metrics, TransformerThe estimator to fit. /// Number of cross-validation folds. /// The label column (for evaluation). - /// Optional stratification column. - /// If two examples share the same value of the (if provided), - /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from - /// train to the test set. + /// Optional selector for the column to use as a stratification column. If two examples share the same value of the + /// (if provided), they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from train to the test set. + /// If this optional parameter is not provided, a stratification columns will be generated, and its values will be random numbers . + /// Optional parameter used in combination with the . + /// If the is not provided, the random numbers generated to create it, will use this seed as value. + /// And if it is not provided, the default value will be used. /// Per-fold results: metrics, models, scored datasets. public static (MultiClassClassifierMetrics metrics, Transformer model, DataView scoredTestData)[] CrossValidate( this MulticlassClassificationContext context, @@ -133,7 +139,7 @@ public static (MultiClassClassifierMetrics metrics, Transformer estimator, Func> label, int numFolds = 5, - Func stratificationColumn = null) + Func stratificationColumn = null, uint? seed = null) where TTransformer : class, ITransformer { var env = StaticPipeUtils.GetEnvironment(data); @@ -156,7 +162,7 @@ public static (MultiClassClassifierMetrics metrics, Transformer ( x.metrics, @@ -178,10 +184,12 @@ public static (MultiClassClassifierMetrics metrics, TransformerThe estimator to fit. /// Number of cross-validation folds. /// The label column (for evaluation). - /// Optional stratification column. - /// If two examples share the same value of the (if provided), - /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from - /// train to the test set. + /// Optional selector for the column to use as a stratification column. If two examples share the same value of the + /// (if provided), they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from train to the test set. + /// If this optional parameter is not provided, a stratification columns will be generated, and its values will be random numbers . + /// Optional parameter used in combination with the . + /// If the is not provided, the random numbers generated to create it, will use this seed as value. + /// And if it is not provided, the default value will be used. /// Per-fold results: metrics, models, scored datasets. public static (BinaryClassificationMetrics metrics, Transformer model, DataView scoredTestData)[] CrossValidateNonCalibrated( this BinaryClassificationContext context, @@ -189,7 +197,7 @@ public static (BinaryClassificationMetrics metrics, Transformer estimator, Func> label, int numFolds = 5, - Func stratificationColumn = null) + Func stratificationColumn = null, uint? seed = null) where TTransformer : class, ITransformer { var env = StaticPipeUtils.GetEnvironment(data); @@ -212,7 +220,7 @@ public static (BinaryClassificationMetrics metrics, Transformer ( x.metrics, @@ -234,10 +242,12 @@ public static (BinaryClassificationMetrics metrics, TransformerThe estimator to fit. /// Number of cross-validation folds. /// The label column (for evaluation). - /// Optional stratification column. - /// If two examples share the same value of the (if provided), - /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from - /// train to the test set. + /// Optional selector for the column to use as a stratification column. If two examples share the same value of the + /// (if provided), they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from train to the test set. + /// If this optional parameter is not provided, a stratification columns will be generated, and its values will be random numbers . + /// Optional parameter used in combination with the . + /// If the is not provided, the random numbers generated to create it, will use this seed as value. + /// And if it is not provided, the default value will be used. /// Per-fold results: metrics, models, scored datasets. public static (CalibratedBinaryClassificationMetrics metrics, Transformer model, DataView scoredTestData)[] CrossValidate( this BinaryClassificationContext context, @@ -245,7 +255,7 @@ public static (CalibratedBinaryClassificationMetrics metrics, Transformer estimator, Func> label, int numFolds = 5, - Func stratificationColumn = null) + Func stratificationColumn = null, uint? seed = null) where TTransformer : class, ITransformer { var env = StaticPipeUtils.GetEnvironment(data); @@ -268,7 +278,7 @@ public static (CalibratedBinaryClassificationMetrics metrics, Transformer ( x.metrics, diff --git a/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs b/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs index e6ad9e1ff6..882717671f 100644 --- a/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs @@ -264,9 +264,10 @@ private static VersionInfo GetVersionInfo() /// Host Environment. /// Input . This is the output from previous transform or loader. /// Name of the output column. + /// Seed to start random number generator. /// Use an auto-incremented integer starting at zero instead of a random number. - public GenerateNumberTransform(IHostEnvironment env, IDataView input, string name, bool useCounter = Defaults.UseCounter) - : this(env, new Arguments() { Column = new[] { new Column() { Name = name } }, UseCounter = useCounter }, input) + public GenerateNumberTransform(IHostEnvironment env, IDataView input, string name, uint? seed = null, bool useCounter = Defaults.UseCounter) + : this(env, new Arguments() { Column = new[] { new Column() { Name = name } }, Seed = seed ?? Defaults.Seed, UseCounter = useCounter }, input) { } diff --git a/src/Microsoft.ML.Recommender/RecommenderCatalog.cs b/src/Microsoft.ML.Recommender/RecommenderCatalog.cs index 2be553736f..a333df30f1 100644 --- a/src/Microsoft.ML.Recommender/RecommenderCatalog.cs +++ b/src/Microsoft.ML.Recommender/RecommenderCatalog.cs @@ -91,16 +91,19 @@ public RegressionMetrics Evaluate(IDataView data, string label = DefaultColumnNa /// The estimator to fit. /// Number of cross-validation folds. /// The label column (for evaluation). - /// Optional stratification column. - /// If two examples share the same value of the (if provided), - /// they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from - /// train to the test set. + /// Optional name of the column to use as a stratification column. If two examples share the same value of the + /// (if provided), they are guaranteed to appear in the same subset (train or test). Use this to make sure there is no label leakage from train to the test set. + /// If this optional parameter is not provided, a stratification columns will be generated, and its values will be random numbers . + /// Optional parameter used in combination with the . + /// If the is not provided, the random numbers generated to create it, will use this seed as value. + /// And if it is not provided, the default value will be used. /// Per-fold results: metrics, models, scored datasets. public (RegressionMetrics metrics, ITransformer model, IDataView scoredTestData)[] CrossValidate( - IDataView data, IEstimator estimator, int numFolds = 5, string labelColumn = DefaultColumnNames.Label, string stratificationColumn = null) + IDataView data, IEstimator estimator, int numFolds = 5, string labelColumn = DefaultColumnNames.Label, + string stratificationColumn = null, uint? seed = null) { Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); - var result = CrossValidateTrain(data, estimator, numFolds, stratificationColumn); + var result = CrossValidateTrain(data, estimator, numFolds, stratificationColumn, seed); return result.Select(x => (Evaluate(x.scoredTestSet, labelColumn), x.model, x.scoredTestSet)).ToArray(); } } From 0b35b89685d04bf8247227538d7fcd4885b2d38a Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Mon, 17 Dec 2018 10:14:42 -0800 Subject: [PATCH 059/100] Bindings in ChooseColumnsByIndexTransform not ISchema (#1879) --- .../Dirty/ChooseColumnsByIndexTransform.cs | 223 +++++++++--------- .../SavePipeChooseColumnsByIndex-1-out.txt | 28 +++ .../SavePipeChooseColumnsByIndex-out.txt | 28 +++ ...SavePipeChooseColumnsByIndexDrop-1-out.txt | 28 +++ .../SavePipeChooseColumnsByIndexDrop-out.txt | 28 +++ .../TestCommandBase.cs | 35 +++ 6 files changed, 252 insertions(+), 118 deletions(-) create mode 100644 test/BaselineOutput/Common/Command/SavePipeChooseColumnsByIndex-1-out.txt create mode 100644 test/BaselineOutput/Common/Command/SavePipeChooseColumnsByIndex-out.txt create mode 100644 test/BaselineOutput/Common/Command/SavePipeChooseColumnsByIndexDrop-1-out.txt create mode 100644 test/BaselineOutput/Common/Command/SavePipeChooseColumnsByIndexDrop-out.txt diff --git a/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs b/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs index b0fa32250b..7abfa534e4 100644 --- a/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs +++ b/src/Microsoft.ML.Data/Dirty/ChooseColumnsByIndexTransform.cs @@ -2,15 +2,14 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using System; -using System.Collections.Generic; -using System.Linq; using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.CommandLine; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; +using System; +using System.Linq; [assembly: LoadableClass(typeof(ChooseColumnsByIndexTransform), typeof(ChooseColumnsByIndexTransform.Arguments), typeof(SignatureDataTransform), "", "ChooseColumnsByIndexTransform", "ChooseColumnsByIndex")] @@ -31,158 +30,146 @@ public sealed class Arguments public bool Drop; } - private sealed class Bindings : ISchema + private sealed class Bindings { - public readonly int[] Sources; - - private readonly Schema _input; - private readonly Dictionary _nameToIndex; - - // The following argument is used only to inform serialization. - private readonly int[] _dropped; - - public Schema AsSchema { get; } - - public Bindings(Arguments args, Schema schemaInput) + /// + /// A collection of source column indexes after removing those we want to drop. Specifically, j=_sources[i] means + /// that the i-th output column in the output schema is the j-th column in the input schema. + /// + private readonly int[] _sources; + + /// + /// Input schema of this transform. It's useful when determining column dependencies and other + /// relations between input and output schemas. + /// + private readonly Schema _sourceSchema; + + /// + /// Some column indexes in the input schema. is computed from + /// and . + /// + private readonly int[] _selectedColumnIndexes; + + /// + /// True, if this transform drops selected columns indexed by . + /// + private readonly bool _drop; + + // This transform's output schema. + internal Schema OutputSchema { get; } + + internal Bindings(Arguments args, Schema sourceSchema) { Contracts.AssertValue(args); - Contracts.AssertValue(schemaInput); + Contracts.AssertValue(sourceSchema); + + _sourceSchema = sourceSchema; - _input = schemaInput; + // Store user-specified arguments as the major state of this transform. Only the major states will + // be saved and all other attributes can be reconstructed from them. + _drop = args.Drop; + _selectedColumnIndexes = args.Index; - int[] indexCopy = args.Index == null ? new int[0] : args.Index.ToArray(); - BuildNameDict(indexCopy, args.Drop, out Sources, out _dropped, out _nameToIndex, user: true); + // Compute actually used attributes in runtime from those major states. + ComputeSources(_drop, _selectedColumnIndexes, _sourceSchema, out _sources); - AsSchema = Schema.Create(this); + // All necessary fields in this class are set, so we can compute output schema now. + OutputSchema = ComputeOutputSchema(); } - private void BuildNameDict(int[] indexCopy, bool drop, out int[] sources, out int[] dropped, out Dictionary nameToCol, bool user) + /// + /// Common method of computing from necessary parameters. This function is used in constructors. + /// + private static void ComputeSources(bool drop, int[] selectedColumnIndexes, Schema sourceSchema, out int[] sources) { - Contracts.AssertValue(indexCopy); - foreach (int col in indexCopy) - { - if (col < 0 || _input.ColumnCount <= col) - { - const string fmt = "Column index {0} invalid for input with {1} columns"; - if (user) - throw Contracts.ExceptUserArg(nameof(Arguments.Index), fmt, col, _input.ColumnCount); - else - throw Contracts.ExceptDecode(fmt, col, _input.ColumnCount); - } - } + // Compute the mapping, , from output column index to input column index. if (drop) - { - sources = Enumerable.Range(0, _input.ColumnCount).Except(indexCopy).ToArray(); - dropped = indexCopy; - } - else - { - sources = indexCopy; - dropped = null; - } - if (user) - Contracts.CheckUserArg(sources.Length > 0, nameof(Arguments.Index), "Choose columns by index has no output columns"); + // Drop columns indexed by args.Index + sources = Enumerable.Range(0, sourceSchema.ColumnCount).Except(selectedColumnIndexes).ToArray(); else - Contracts.CheckDecode(sources.Length > 0, "Choose columns by index has no output columns"); - nameToCol = new Dictionary(); - for (int c = 0; c < sources.Length; ++c) - nameToCol[_input.GetColumnName(sources[c])] = c; - } - - public Bindings(ModelLoadContext ctx, Schema schemaInput) - { - Contracts.AssertValue(ctx); - Contracts.AssertValue(schemaInput); - - _input = schemaInput; - - // *** Binary format *** - // bool(as byte): whether the indicated source columns are columns to keep, or drop - // int: number of source column indices - // int[]: source column indices + // Keep columns indexed by args.Index + sources = selectedColumnIndexes; - bool isDrop = ctx.Reader.ReadBoolByte(); - BuildNameDict(ctx.Reader.ReadIntArray() ?? new int[0], isDrop, out Sources, out _dropped, out _nameToIndex, user: false); - AsSchema = Schema.Create(this); + // Make sure the output of this transform is meaningful. + Contracts.Check(sources.Length > 0, "Choose columns by index has no output column."); } - public void Save(ModelSaveContext ctx) + /// + /// After and are set, pick up selected columns from to create + /// Note that tells us what columns in are put into . + /// + private Schema ComputeOutputSchema() { - Contracts.AssertValue(ctx); + var schemaBuilder = new SchemaBuilder(); + for (int i = 0; i < _sources.Length; ++i) + { + // selectedIndex is an column index of input schema. Note that the input column indexed by _sources[i] in _sourceSchema is sent + // to the i-th column in the output schema. + var selectedIndex = _sources[i]; - // *** Binary format *** - // bool(as byte): whether the indicated columns are columns to keep, or drop - // int: number of source column indices - // int[]: source column indices + // The dropped/kept columns are determined by user-specified arguments, so we throw if a bad configuration is provided. + string fmt = string.Format("Column index {0} invalid for input with {1} columns", selectedIndex, _sourceSchema.ColumnCount); + Contracts.Check(selectedIndex < _sourceSchema.ColumnCount, fmt); - ctx.Writer.WriteBoolByte(_dropped != null); - ctx.Writer.WriteIntArray(_dropped ?? Sources); + // Copy the selected column into output schema. + var selectedColumn = _sourceSchema[selectedIndex]; + schemaBuilder.AddColumn(selectedColumn.Name, selectedColumn.Type, selectedColumn.Metadata); + } + return schemaBuilder.GetSchema(); } - public int ColumnCount + internal Bindings(ModelLoadContext ctx, Schema sourceSchema) { - get { return Sources.Length; } - } + Contracts.AssertValue(ctx); + Contracts.AssertValue(sourceSchema); - public bool TryGetColumnIndex(string name, out int col) - { - Contracts.CheckValueOrNull(name); - if (name == null) - { - col = default(int); - return false; - } - return _nameToIndex.TryGetValue(name, out col); - } + _sourceSchema = sourceSchema; - public string GetColumnName(int col) - { - Contracts.CheckParam(0 <= col && col < ColumnCount, nameof(col)); - return _input.GetColumnName(Sources[col]); - } + // *** Binary format *** + // bool (as byte): operation mode + // int[]: selected source column indices + _drop = ctx.Reader.ReadBoolByte(); + _selectedColumnIndexes = ctx.Reader.ReadIntArray(); - public ColumnType GetColumnType(int col) - { - Contracts.CheckParam(0 <= col && col < ColumnCount, nameof(col)); - return _input.GetColumnType(Sources[col]); - } + // Compute actually used attributes in runtime from those major states. + ComputeSources(_drop, _selectedColumnIndexes, _sourceSchema, out _sources); - public IEnumerable> GetMetadataTypes(int col) - { - Contracts.CheckParam(0 <= col && col < ColumnCount, nameof(col)); - return _input.GetMetadataTypes(Sources[col]); + _sourceSchema = sourceSchema; + OutputSchema = ComputeOutputSchema(); } - public ColumnType GetMetadataTypeOrNull(string kind, int col) + internal void Save(ModelSaveContext ctx) { - Contracts.CheckNonEmpty(kind, nameof(kind)); - Contracts.CheckParam(0 <= col && col < ColumnCount, nameof(col)); - return _input.GetMetadataTypeOrNull(kind, Sources[col]); - } + Contracts.AssertValue(ctx); - public void GetMetadata(string kind, int col, ref TValue value) - { - Contracts.CheckNonEmpty(kind, nameof(kind)); - Contracts.CheckParam(0 <= col && col < ColumnCount, nameof(col)); - _input.GetMetadata(kind, Sources[col], ref value); + // *** Binary format *** + // bool (as byte): operation mode + // int[]: selected source column indices + ctx.Writer.WriteBoolByte(_drop); + ctx.Writer.WriteIntArray(_selectedColumnIndexes); } internal bool[] GetActive(Func predicate) { - return Utils.BuildArray(ColumnCount, predicate); + return Utils.BuildArray(OutputSchema.ColumnCount, predicate); } internal Func GetDependencies(Func predicate) { Contracts.AssertValue(predicate); - var active = new bool[_input.ColumnCount]; - for (int i = 0; i < Sources.Length; i++) + var active = new bool[_sourceSchema.ColumnCount]; + for (int i = 0; i < _sources.Length; i++) { if (predicate(i)) - active[Sources[i]] = true; + active[_sources[i]] = true; } return col => 0 <= col && col < active.Length && active[col]; } + + /// + /// Given the column index in the output schema, this function returns its source column's index in the input schema. + /// + internal int GetSourceColumnIndex(int outputColumnIndex) => _sources[outputColumnIndex]; } public const string LoaderSignature = "ChooseColumnsIdxTrans"; @@ -245,7 +232,7 @@ public override void Save(ModelSaveContext ctx) _bindings.Save(ctx); } - public override Schema OutputSchema => _bindings.AsSchema; + public override Schema OutputSchema => _bindings.OutputSchema; protected override bool? ShouldUseParallelCursors(Func predicate) { @@ -292,17 +279,17 @@ public Cursor(IChannelProvider provider, Bindings bindings, RowCursor input, boo : base(provider, input) { Ch.AssertValue(bindings); - Ch.Assert(active == null || active.Length == bindings.ColumnCount); + Ch.Assert(active == null || active.Length == bindings.OutputSchema.ColumnCount); _bindings = bindings; _active = active; } - public override Schema Schema => _bindings.AsSchema; + public override Schema Schema => _bindings.OutputSchema; public override bool IsColumnActive(int col) { - Ch.Check(0 <= col && col < _bindings.ColumnCount); + Ch.Check(0 <= col && col < _bindings.OutputSchema.ColumnCount); return _active == null || _active[col]; } @@ -310,7 +297,7 @@ public override ValueGetter GetGetter(int col) { Ch.Check(IsColumnActive(col)); - var src = _bindings.Sources[col]; + var src = _bindings.GetSourceColumnIndex(col); return Input.GetGetter(src); } } diff --git a/test/BaselineOutput/Common/Command/SavePipeChooseColumnsByIndex-1-out.txt b/test/BaselineOutput/Common/Command/SavePipeChooseColumnsByIndex-1-out.txt new file mode 100644 index 0000000000..6229f5397f --- /dev/null +++ b/test/BaselineOutput/Common/Command/SavePipeChooseColumnsByIndex-1-out.txt @@ -0,0 +1,28 @@ +#@ TextLoader{ +#@ header+ +#@ sep=tab +#@ col=Name:TX:0 +#@ col=Label:R4:1 +#@ } +Name Label +25 0 +38 0 +28 1 +44 1 +18 0 +34 0 +29 0 +63 1 +24 0 +55 0 +65 1 +36 0 +26 0 +58 0 +48 1 +43 1 +20 0 +43 0 +37 0 +40 1 +Wrote 20 rows of length 2 diff --git a/test/BaselineOutput/Common/Command/SavePipeChooseColumnsByIndex-out.txt b/test/BaselineOutput/Common/Command/SavePipeChooseColumnsByIndex-out.txt new file mode 100644 index 0000000000..6229f5397f --- /dev/null +++ b/test/BaselineOutput/Common/Command/SavePipeChooseColumnsByIndex-out.txt @@ -0,0 +1,28 @@ +#@ TextLoader{ +#@ header+ +#@ sep=tab +#@ col=Name:TX:0 +#@ col=Label:R4:1 +#@ } +Name Label +25 0 +38 0 +28 1 +44 1 +18 0 +34 0 +29 0 +63 1 +24 0 +55 0 +65 1 +36 0 +26 0 +58 0 +48 1 +43 1 +20 0 +43 0 +37 0 +40 1 +Wrote 20 rows of length 2 diff --git a/test/BaselineOutput/Common/Command/SavePipeChooseColumnsByIndexDrop-1-out.txt b/test/BaselineOutput/Common/Command/SavePipeChooseColumnsByIndexDrop-1-out.txt new file mode 100644 index 0000000000..4f9cd70a5c --- /dev/null +++ b/test/BaselineOutput/Common/Command/SavePipeChooseColumnsByIndexDrop-1-out.txt @@ -0,0 +1,28 @@ +#@ TextLoader{ +#@ header+ +#@ sep=tab +#@ col=Cat:TX:0-7 +#@ col=Num:R4:8-13 +#@ } +Workclass education marital-status occupation relationship ethnicity sex native-country-region age fnlwgt education-num capital-gain capital-loss hours-per-week +Private 11th Never-married Machine-op-inspct Own-child Black Male United-States 25 226802 7 0 0 40 +Private HS-grad Married-civ-spouse Farming-fishing Husband White Male United-States 38 89814 9 0 0 50 +Local-gov Assoc-acdm Married-civ-spouse Protective-serv Husband White Male United-States 28 336951 12 0 0 40 +Private Some-college Married-civ-spouse Machine-op-inspct Husband Black Male United-States 44 160323 10 7688 0 40 +? Some-college Never-married ? Own-child White Female United-States 18 103497 10 0 0 30 +Private 10th Never-married Other-service Not-in-family White Male United-States 34 198693 6 0 0 30 +? HS-grad Never-married ? Unmarried Black Male United-States 29 227026 9 0 0 40 +Self-emp-not-inc Prof-school Married-civ-spouse Prof-specialty Husband White Male United-States 63 104626 15 3103 0 32 +Private Some-college Never-married Other-service Unmarried White Female United-States 24 369667 10 0 0 40 +Private 7th-8th Married-civ-spouse Craft-repair Husband White Male United-States 55 104996 4 0 0 10 +Private HS-grad Married-civ-spouse Machine-op-inspct Husband White Male United-States 65 184454 9 6418 0 40 +Federal-gov Bachelors Married-civ-spouse Adm-clerical Husband White Male United-States 36 212465 13 0 0 40 +Private HS-grad Never-married Adm-clerical Not-in-family White Female United-States 26 82091 9 0 0 39 +? HS-grad Married-civ-spouse ? Husband White Male United-States 58 299831 9 0 0 35 +Private HS-grad Married-civ-spouse Machine-op-inspct Husband White Male United-States 48 279724 9 3103 0 48 +Private Masters Married-civ-spouse Exec-managerial Husband White Male United-States 43 346189 14 0 0 50 +State-gov Some-college Never-married Other-service Own-child White Male United-States 20 444554 10 0 0 25 +Private HS-grad Married-civ-spouse Adm-clerical Wife White Female United-States 43 128354 9 0 0 30 +Private HS-grad Widowed Machine-op-inspct Unmarried White Female United-States 37 60548 9 0 0 20 +Private Doctorate Married-civ-spouse Prof-specialty Husband Asian-Pac-Islander Male ? 40 85019 16 0 0 45 +Wrote 20 rows of length 14 diff --git a/test/BaselineOutput/Common/Command/SavePipeChooseColumnsByIndexDrop-out.txt b/test/BaselineOutput/Common/Command/SavePipeChooseColumnsByIndexDrop-out.txt new file mode 100644 index 0000000000..4f9cd70a5c --- /dev/null +++ b/test/BaselineOutput/Common/Command/SavePipeChooseColumnsByIndexDrop-out.txt @@ -0,0 +1,28 @@ +#@ TextLoader{ +#@ header+ +#@ sep=tab +#@ col=Cat:TX:0-7 +#@ col=Num:R4:8-13 +#@ } +Workclass education marital-status occupation relationship ethnicity sex native-country-region age fnlwgt education-num capital-gain capital-loss hours-per-week +Private 11th Never-married Machine-op-inspct Own-child Black Male United-States 25 226802 7 0 0 40 +Private HS-grad Married-civ-spouse Farming-fishing Husband White Male United-States 38 89814 9 0 0 50 +Local-gov Assoc-acdm Married-civ-spouse Protective-serv Husband White Male United-States 28 336951 12 0 0 40 +Private Some-college Married-civ-spouse Machine-op-inspct Husband Black Male United-States 44 160323 10 7688 0 40 +? Some-college Never-married ? Own-child White Female United-States 18 103497 10 0 0 30 +Private 10th Never-married Other-service Not-in-family White Male United-States 34 198693 6 0 0 30 +? HS-grad Never-married ? Unmarried Black Male United-States 29 227026 9 0 0 40 +Self-emp-not-inc Prof-school Married-civ-spouse Prof-specialty Husband White Male United-States 63 104626 15 3103 0 32 +Private Some-college Never-married Other-service Unmarried White Female United-States 24 369667 10 0 0 40 +Private 7th-8th Married-civ-spouse Craft-repair Husband White Male United-States 55 104996 4 0 0 10 +Private HS-grad Married-civ-spouse Machine-op-inspct Husband White Male United-States 65 184454 9 6418 0 40 +Federal-gov Bachelors Married-civ-spouse Adm-clerical Husband White Male United-States 36 212465 13 0 0 40 +Private HS-grad Never-married Adm-clerical Not-in-family White Female United-States 26 82091 9 0 0 39 +? HS-grad Married-civ-spouse ? Husband White Male United-States 58 299831 9 0 0 35 +Private HS-grad Married-civ-spouse Machine-op-inspct Husband White Male United-States 48 279724 9 3103 0 48 +Private Masters Married-civ-spouse Exec-managerial Husband White Male United-States 43 346189 14 0 0 50 +State-gov Some-college Never-married Other-service Own-child White Male United-States 20 444554 10 0 0 25 +Private HS-grad Married-civ-spouse Adm-clerical Wife White Female United-States 43 128354 9 0 0 30 +Private HS-grad Widowed Machine-op-inspct Unmarried White Female United-States 37 60548 9 0 0 20 +Private Doctorate Married-civ-spouse Prof-specialty Husband Asian-Pac-Islander Male ? 40 85019 16 0 0 45 +Wrote 20 rows of length 14 diff --git a/test/Microsoft.ML.TestFramework/TestCommandBase.cs b/test/Microsoft.ML.TestFramework/TestCommandBase.cs index 378f98ccb0..d7545055b6 100644 --- a/test/Microsoft.ML.TestFramework/TestCommandBase.cs +++ b/test/Microsoft.ML.TestFramework/TestCommandBase.cs @@ -2093,5 +2093,40 @@ public void Datatypes() TestCore("savedata", intermediateData.Path, "loader=binary", "saver=text", textOutputPath.Arg("dout")); Done(); } + + [TestCategory("DataPipeSerialization")] + [Fact()] + public void SavePipeChooseColumnsByIndex() + { + string dataPath = GetDataPath("adult.tiny.with-schema.txt"); + const string loaderArgs = "loader=text{header+ col=Label:0 col=Cat:TX:1-8 col=Num:9-14 col=Name:TX:9}"; + + OutputPath modelPath = ModelPath(); + string extraArgs = "xf=ChooseColumnsByIndex{ind=3 ind=0}"; + TestCore("showdata", dataPath, loaderArgs, extraArgs); + + _step++; + + TestCore("showdata", dataPath, string.Format("in={{{0}}}", modelPath.Path), ""); + Done(); + } + + [TestCategory("DataPipeSerialization")] + [Fact()] + public void SavePipeChooseColumnsByIndexDrop() + { + string dataPath = GetDataPath("adult.tiny.with-schema.txt"); + const string loaderArgs = "loader=text{header+ col=Label:0 col=Cat:TX:1-8 col=Num:9-14 col=Name:TX:9}"; + + OutputPath modelPath = ModelPath(); + + string extraArgs = "xf=ChooseColumnsByIndex{ind=3 ind=0 drop+}"; + TestCore("showdata", dataPath, loaderArgs, extraArgs); + + _step++; + + TestCore("showdata", dataPath, string.Format("in={{{0}}}", modelPath.Path), ""); + Done(); + } } } From 421252f87ab5f9b1a4dc1a7f1e83986377631fef Mon Sep 17 00:00:00 2001 From: Abhishek Goswami Date: Mon, 17 Dec 2018 11:49:04 -0800 Subject: [PATCH 060/100] Supervised Bin Normalizer (#1876) * prepwork to enable supervised bin normalizer * column was not active * conversion to RowToRow * scenario works; code cleanup needed * fix build break after merge * inhering from OneToOneTransformBase (instead of RowToRowTransform Base). cleaned up tests * test cleanup (redundant code) * fix what I intend to verify (SupervisedBinning) --- .../Transforms/NormalizeColumn.cs | 36 +- .../Transforms/NormalizeColumnDbl.cs | 32 +- .../Transforms/NormalizeColumnSng.cs | 32 +- .../Transforms/Normalizer.cs | 42 ++- .../Common/NormalizerEstimator/normalized.tsv | 320 +++++++++--------- .../Transformers/NormalizerTests.cs | 19 +- 6 files changed, 283 insertions(+), 198 deletions(-) diff --git a/src/Microsoft.ML.Data/Transforms/NormalizeColumn.cs b/src/Microsoft.ML.Data/Transforms/NormalizeColumn.cs index a9e12c2df8..217354b56b 100644 --- a/src/Microsoft.ML.Data/Transforms/NormalizeColumn.cs +++ b/src/Microsoft.ML.Data/Transforms/NormalizeColumn.cs @@ -1068,22 +1068,48 @@ public static IColumnFunctionBuilder CreateBuilder(SupervisedBinArguments args, else host.CheckUserArg(labelColumnType.IsNumber, nameof(args.LabelColumn), "Label column must be a number or a key type"); + return CreateBuilder( + new NormalizingEstimator.SupervisedBinningColumn( + args.Column[icol].Source ?? args.Column[icol].Name, + args.Column[icol].Name, + args.LabelColumn ?? DefaultColumnNames.Label, + args.Column[icol].MaxTrainingExamples ?? args.MaxTrainingExamples, + args.Column[icol].FixZero ?? args.FixZero, + args.Column[icol].NumBins ?? args.NumBins, + args.MinBinSize), + host, labelColumnId, srcIndex, srcType, cursor); + } + + public static IColumnFunctionBuilder CreateBuilder(NormalizingEstimator.SupervisedBinningColumn column, IHost host, + string labelColumn, int srcIndex, ColumnType srcType, RowCursor cursor) + { + int labelColumnId = GetLabelColumnId(host, cursor.Schema, labelColumn); + return CreateBuilder(column, host, labelColumnId, srcIndex, srcType, cursor); + } + + private static IColumnFunctionBuilder CreateBuilder(NormalizingEstimator.SupervisedBinningColumn column, IHost host, + int labelColumnId, int srcIndex, ColumnType srcType, RowCursor cursor) + { + Contracts.AssertValue(host); + if (srcType.IsNumber) { if (srcType == NumberType.R4) - return Sng.SupervisedBinOneColumnFunctionBuilder.Create(args, host, icol, srcIndex, labelColumnId, cursor); + return Sng.SupervisedBinOneColumnFunctionBuilder.Create(column, host, srcIndex, labelColumnId, cursor); if (srcType == NumberType.R8) - return Dbl.SupervisedBinOneColumnFunctionBuilder.Create(args, host, icol, srcIndex, labelColumnId, cursor); + return Dbl.SupervisedBinOneColumnFunctionBuilder.Create(column, host, srcIndex, labelColumnId, cursor); } if (srcType.IsVector && srcType.ItemType.IsNumber) { if (srcType.ItemType == NumberType.R4) - return Sng.SupervisedBinVecColumnFunctionBuilder.Create(args, host, icol, srcIndex, labelColumnId, cursor); + return Sng.SupervisedBinVecColumnFunctionBuilder.Create(column, host, srcIndex, labelColumnId, cursor); if (srcType.ItemType == NumberType.R8) - return Dbl.SupervisedBinVecColumnFunctionBuilder.Create(args, host, icol, srcIndex, labelColumnId, cursor); + return Dbl.SupervisedBinVecColumnFunctionBuilder.Create(column, host, srcIndex, labelColumnId, cursor); } - throw host.ExceptUserArg(nameof(args.Column), "Wrong column type for column {0}. Expected: R4, R8, Vec or Vec. Got: {1}.", args.Column[icol].Source, srcType.ToString()); + throw host.ExceptParam(nameof(column), "Wrong column type for column {0}. Expected: R4, R8, Vec or Vec. Got: {1}.", + column.Input, + srcType.ToString()); } public static int GetLabelColumnId(IExceptionContext host, Schema schema, string labelColumnName) diff --git a/src/Microsoft.ML.Data/Transforms/NormalizeColumnDbl.cs b/src/Microsoft.ML.Data/Transforms/NormalizeColumnDbl.cs index 42a03e3eb6..44a9715c3e 100644 --- a/src/Microsoft.ML.Data/Transforms/NormalizeColumnDbl.cs +++ b/src/Microsoft.ML.Data/Transforms/NormalizeColumnDbl.cs @@ -1862,15 +1862,15 @@ public override IColumnFunction CreateColumnFunction() return BinColumnFunction.Create(Host, binUpperBounds, _fix); } - public static IColumnFunctionBuilder Create(SupervisedBinArguments args, IHost host, int argsColumnIndex, int valueColumnId, int labelColumnId, Row dataRow) + public static IColumnFunctionBuilder Create(NormalizingEstimator.SupervisedBinningColumn column, IHost host, int valueColumnId, int labelColumnId, Row dataRow) { - var lim = args.Column[argsColumnIndex].MaxTrainingExamples ?? args.MaxTrainingExamples; - host.CheckUserArg(lim > 1, nameof(args.MaxTrainingExamples), "Must be greater than 1"); - bool fix = args.Column[argsColumnIndex].FixZero ?? args.FixZero; - var numBins = args.Column[argsColumnIndex].NumBins ?? args.NumBins; - host.CheckUserArg(numBins > 1, nameof(args.NumBins), "Must be greater than 1"); - host.CheckUserArg(args.MinBinSize > 0, nameof(args.MinBinSize), "Must be positive"); - return new SupervisedBinOneColumnFunctionBuilder(host, lim, fix, numBins, args.MinBinSize, valueColumnId, labelColumnId, dataRow); + var lim = column.MaxTrainingExamples; + host.CheckUserArg(lim > 1, nameof(column.MaxTrainingExamples), "Must be greater than 1"); + bool fix = column.FixZero; + var numBins = column.NumBins; + host.CheckUserArg(numBins > 1, nameof(column.NumBins), "Must be greater than 1"); + host.CheckUserArg(column.MinBinSize > 0, nameof(column.MinBinSize), "Must be positive"); + return new SupervisedBinOneColumnFunctionBuilder(host, lim, fix, numBins, column.MinBinSize, valueColumnId, labelColumnId, dataRow); } } @@ -1902,15 +1902,15 @@ public override IColumnFunction CreateColumnFunction() return BinColumnFunction.Create(Host, binUpperBounds, _fix); } - public static IColumnFunctionBuilder Create(SupervisedBinArguments args, IHost host, int argsColumnIndex, int valueColumnId, int labelColumnId, Row dataRow) + public static IColumnFunctionBuilder Create(NormalizingEstimator.SupervisedBinningColumn column, IHost host, int valueColumnId, int labelColumnId, Row dataRow) { - var lim = args.Column[argsColumnIndex].MaxTrainingExamples ?? args.MaxTrainingExamples; - host.CheckUserArg(lim > 1, nameof(args.MaxTrainingExamples), "Must be greater than 1"); - bool fix = args.Column[argsColumnIndex].FixZero ?? args.FixZero; - var numBins = args.Column[argsColumnIndex].NumBins ?? args.NumBins; - host.CheckUserArg(numBins > 1, nameof(args.NumBins), "Must be greater than 1"); - host.CheckUserArg(args.MinBinSize > 0, nameof(args.MinBinSize), "Must be positive"); - return new SupervisedBinVecColumnFunctionBuilder(host, lim, fix, numBins, args.MinBinSize, valueColumnId, labelColumnId, dataRow); + var lim = column.MaxTrainingExamples; + host.CheckUserArg(lim > 1, nameof(column.MaxTrainingExamples), "Must be greater than 1"); + bool fix = column.FixZero; + var numBins = column.NumBins; + host.CheckUserArg(numBins > 1, nameof(column.NumBins), "Must be greater than 1"); + host.CheckUserArg(column.MinBinSize > 0, nameof(column.MinBinSize), "Must be positive"); + return new SupervisedBinVecColumnFunctionBuilder(host, lim, fix, numBins, column.MinBinSize, valueColumnId, labelColumnId, dataRow); } } } diff --git a/src/Microsoft.ML.Data/Transforms/NormalizeColumnSng.cs b/src/Microsoft.ML.Data/Transforms/NormalizeColumnSng.cs index d310dbb2e4..15957c225c 100644 --- a/src/Microsoft.ML.Data/Transforms/NormalizeColumnSng.cs +++ b/src/Microsoft.ML.Data/Transforms/NormalizeColumnSng.cs @@ -1870,15 +1870,15 @@ public override IColumnFunction CreateColumnFunction() return BinColumnFunction.Create(Host, binUpperBounds, _fix); } - public static IColumnFunctionBuilder Create(SupervisedBinArguments args, IHost host, int argsColumnIndex, int valueColumnId, int labelColumnId, Row dataRow) + public static IColumnFunctionBuilder Create(NormalizingEstimator.SupervisedBinningColumn column, IHost host, int valueColumnId, int labelColumnId, Row dataRow) { - var lim = args.Column[argsColumnIndex].MaxTrainingExamples ?? args.MaxTrainingExamples; - host.CheckUserArg(lim > 1, nameof(args.MaxTrainingExamples), "Must be greater than 1"); - bool fix = args.Column[argsColumnIndex].FixZero ?? args.FixZero; - var numBins = args.Column[argsColumnIndex].NumBins ?? args.NumBins; - host.CheckUserArg(numBins > 1, nameof(args.NumBins), "Must be greater than 1"); - host.CheckUserArg(args.MinBinSize > 0, nameof(args.MinBinSize), "Must be positive"); - return new SupervisedBinOneColumnFunctionBuilder(host, lim, fix, numBins, args.MinBinSize, valueColumnId, labelColumnId, dataRow); + var lim = column.MaxTrainingExamples; + host.CheckUserArg(lim > 1, nameof(column.MaxTrainingExamples), "Must be greater than 1"); + bool fix = column.FixZero; + var numBins = column.NumBins; + host.CheckUserArg(numBins > 1, nameof(column.NumBins), "Must be greater than 1"); + host.CheckUserArg(column.MinBinSize > 0, nameof(column.MinBinSize), "Must be positive"); + return new SupervisedBinOneColumnFunctionBuilder(host, lim, fix, numBins, column.MinBinSize, valueColumnId, labelColumnId, dataRow); } } @@ -1910,15 +1910,15 @@ public override IColumnFunction CreateColumnFunction() return BinColumnFunction.Create(Host, binUpperBounds, _fix); } - public static IColumnFunctionBuilder Create(SupervisedBinArguments args, IHost host, int argsColumnIndex, int valueColumnId, int labelColumnId, Row dataRow) + public static IColumnFunctionBuilder Create(NormalizingEstimator.SupervisedBinningColumn column, IHost host, int valueColumnId, int labelColumnId, Row dataRow) { - var lim = args.Column[argsColumnIndex].MaxTrainingExamples ?? args.MaxTrainingExamples; - host.CheckUserArg(lim > 1, nameof(args.MaxTrainingExamples), "Must be greater than 1"); - bool fix = args.Column[argsColumnIndex].FixZero ?? args.FixZero; - var numBins = args.Column[argsColumnIndex].NumBins ?? args.NumBins; - host.CheckUserArg(numBins > 1, nameof(args.NumBins), "Must be greater than 1"); - host.CheckUserArg(args.MinBinSize > 0, nameof(args.MinBinSize), "Must be positive"); - return new SupervisedBinVecColumnFunctionBuilder(host, lim, fix, numBins, args.MinBinSize, valueColumnId, labelColumnId, dataRow); + var lim = column.MaxTrainingExamples; + host.CheckUserArg(lim > 1, nameof(column.MaxTrainingExamples), "Must be greater than 1"); + bool fix = column.FixZero; + var numBins = column.NumBins; + host.CheckUserArg(numBins > 1, nameof(column.NumBins), "Must be greater than 1"); + host.CheckUserArg(column.MinBinSize > 0, nameof(column.MinBinSize), "Must be positive"); + return new SupervisedBinVecColumnFunctionBuilder(host, lim, fix, numBins, column.MinBinSize, valueColumnId, labelColumnId, dataRow); } } } diff --git a/src/Microsoft.ML.Data/Transforms/Normalizer.cs b/src/Microsoft.ML.Data/Transforms/Normalizer.cs index 2c8dc6c622..1ba236e3da 100644 --- a/src/Microsoft.ML.Data/Transforms/Normalizer.cs +++ b/src/Microsoft.ML.Data/Transforms/Normalizer.cs @@ -16,6 +16,7 @@ using System.Collections.Generic; using System.Collections.Immutable; using System.Linq; +using static Microsoft.ML.Transforms.Normalizers.NormalizeTransform; [assembly: LoadableClass(typeof(NormalizingTransformer), null, typeof(SignatureLoadModel), "", NormalizingTransformer.LoaderSignature)] @@ -54,7 +55,11 @@ public enum NormalizerMode /// /// Bucketize and then rescale to between -1 and 1. /// - Binning = 3 + Binning = 3, + /// + /// Bucketize and then rescale to between -1 and 1. Calculates bins based on correlation with the Label column. + /// + SupervisedBinning = 4 } public abstract class ColumnBase @@ -88,6 +93,8 @@ internal static ColumnBase Create(string input, string output, NormalizerMode mo return new LogMeanVarColumn(input, output); case NormalizerMode.Binning: return new BinningColumn(input, output); + case NormalizerMode.SupervisedBinning: + return new SupervisedBinningColumn(input, output); default: throw Contracts.ExceptParam(nameof(mode), "Unknown normalizer mode"); } @@ -161,6 +168,29 @@ internal override IColumnFunctionBuilder MakeBuilder(IHost host, int srcIndex, C => NormalizeTransform.BinUtils.CreateBuilder(this, host, srcIndex, srcType, cursor); } + public sealed class SupervisedBinningColumn : FixZeroColumnBase + { + public readonly int NumBins; + public readonly string LabelColumn; + public readonly int MinBinSize; + + public SupervisedBinningColumn(string input, string output = null, + string labelColumn = DefaultColumnNames.Label, + long maxTrainingExamples = Defaults.MaxTrainingExamples, + bool fixZero = true, + int numBins = Defaults.NumBins, + int minBinSize = Defaults.MinBinSize) + : base(input, output ?? input, maxTrainingExamples, fixZero) + { + NumBins = numBins; + LabelColumn = labelColumn; + MinBinSize = minBinSize; + } + + internal override IColumnFunctionBuilder MakeBuilder(IHost host, int srcIndex, ColumnType srcType, RowCursor cursor) + => NormalizeTransform.SupervisedBinUtils.CreateBuilder(this, host, LabelColumn, srcIndex, srcType, cursor); + } + private readonly IHost _host; private readonly ColumnBase[] _columns; @@ -350,6 +380,13 @@ public static NormalizingTransformer Train(IHostEnvironment env, IDataView data, throw env.ExceptSchemaMismatch(nameof(data), "input", info.Input); srcTypes[i] = data.Schema.GetColumnType(srcCols[i]); activeInput[srcCols[i]] = true; + + var supervisedBinColumn = info as NormalizingEstimator.SupervisedBinningColumn; + if(supervisedBinColumn != null) + { + var labelColumnId = SupervisedBinUtils.GetLabelColumnId(env, data.Schema, supervisedBinColumn.LabelColumn); + activeInput[labelColumnId] = true; + } } var functionBuilders = new IColumnFunctionBuilder[columns.Length]; @@ -416,6 +453,7 @@ private NormalizingTransformer(IHost host, ModelLoadContext ctx) // for each added column: // - source type // - separate model for column function + var cols = new ColumnInfo[ColumnPairs.Length]; ColumnFunctions = new ColumnFunctionAccessor(Columns); for (int iinfo = 0; iinfo < ColumnPairs.Length; iinfo++) @@ -725,4 +763,4 @@ internal BinNormalizerModelParameters(ImmutableArray upperBounds, TData d } } } -} \ No newline at end of file +} diff --git a/test/BaselineOutput/Common/NormalizerEstimator/normalized.tsv b/test/BaselineOutput/Common/NormalizerEstimator/normalized.tsv index b231f8e23d..4652b4feac 100644 --- a/test/BaselineOutput/Common/NormalizerEstimator/normalized.tsv +++ b/test/BaselineOutput/Common/NormalizerEstimator/normalized.tsv @@ -14,162 +14,166 @@ #@ col=float4bin:R4:22-25 #@ col=double1bin:R8:26 #@ col=double4bin:R8:27-30 -#@ col=float1mv:R4:31 -#@ col=float4mv:R4:32-35 -#@ col=double1mv:R8:36 -#@ col=double4mv:R8:37-40 -#@ col=float1lmv:R4:41 -#@ col=float4lmv:R4:42-45 -#@ col=double1lmv:R8:46 -#@ col=double4lmv:R8:47-50 +#@ col=float1supervisedbin:R4:31 +#@ col=float4supervisedbin:R4:32-35 +#@ col=double1supervisedbin:R8:36 +#@ col=double4supervisedbin:R8:37-40 +#@ col=float1mv:R4:41 +#@ col=float4mv:R4:42-45 +#@ col=double1mv:R8:46 +#@ col=double4mv:R8:47-50 +#@ col=float1lmv:R4:51 +#@ col=float4lmv:R4:52-55 +#@ col=double1lmv:R8:56 +#@ col=double4lmv:R8:57-60 #@ } -float1 float1 5.1 3.5 1.4 0.2 5.1 3.5 1.4 0.2 double1 double1 5.1 3.5 1.4 0.2 5.1 3.5 1.4 0.2 int1 float1bin 5.1 3.5 1.4 0.2 double1bin 5.1 3.5 1.4 0.2 float1mv 5.1 3.5 1.4 0.2 double1mv 5.1 3.5 1.4 0.2 float1lmv 5.1 3.5 1.4 0.2 double1lmv 5.1 3.5 1.4 0.2 -4.9 0.6202532 4.9 3 1.4 0.2 0.6202532 0.6818181 0.202898547 0.0800000057 4.9000000000000004 0.620253164556962 4.9000000000000004 3 1.3999999999999999 0.20000000000000001 0.620253164556962 0.68181818181818177 0.20289855072463767 0.080000000000000016 0 0.1764706 0.1764706 0.4090909 0.0952381 0.04761905 0.17647058823529413 0.17647058823529413 0.40909090909090912 0.095238095238095233 0.047619047619047616 0.829617262 0.829617262 0.9735693 0.336375058 0.140421242 0.82961722543499561 0.82961722543499561 0.97356928368104678 0.33637507090625185 0.14042123582229432 0.117838435 0.117838435 0.480745673 0.07455328 0.07146728 0.11783846996167147 0.11783846996167147 0.4807456731235793 0.074553292690365869 0.071467286508792471 -4.7 0.594936669 4.7 3.2 1.3 0.2 0.594936669 0.7272727 0.188405782 0.0800000057 4.7000000000000002 0.59493670886075944 4.7000000000000002 3.2000000000000002 1.3 0.20000000000000001 0.59493670886075944 0.72727272727272729 0.18840579710144928 0.080000000000000016 0 0.117647059 0.117647059 0.5 0.0714285746 0.04761905 0.11764705882352941 0.11764705882352941 0.5 0.071428571428571425 0.047619047619047616 0.7957553 0.7957553 1.038474 0.312348276 0.140421242 0.79575529786622023 0.79575529786622023 1.0384739025931167 0.31234828012723387 0.14042123582229432 0.06917418 0.06917418 0.6578964 0.0582755022 0.07146728 0.069174201507542998 0.069174201507542998 0.65789648182451921 0.058275496366795021 0.071467286508792471 -4.6 0.5822785 4.6 3.1 1.5 0.2 0.5822785 0.7045454 0.2173913 0.0800000057 4.5999999999999996 0.58227848101265811 4.5999999999999996 3.1000000000000001 1.5 0.20000000000000001 0.58227848101265811 0.70454545454545459 0.21739130434782611 0.080000000000000016 0 0.0882353 0.0882353 0.454545468 0.119047619 0.04761905 0.088235294117647065 0.088235294117647065 0.45454545454545453 0.11904761904761904 0.047619047619047616 0.7788243 0.7788243 1.0060215 0.360401869 0.140421242 0.77882433408183249 0.77882433408183249 1.0060215931370817 0.36040186168526983 0.14042123582229432 0.0510348342 0.0510348342 0.5725629 0.0926237255 0.07146728 0.05103484272829939 0.05103484272829939 0.5725628341629212 0.092623715229236292 0.071467286508792471 -5 0.6329114 5 3.6 1.4 0.2 0.6329114 0.818181753 0.202898547 0.0800000057 5 0.63291139240506322 5 3.6000000000000001 1.3999999999999999 0.20000000000000001 0.63291139240506322 0.81818181818181812 0.20289855072463767 0.080000000000000016 0 0.205882356 0.205882356 0.6818182 0.0952381 0.04761905 0.20588235294117646 0.20588235294117646 0.68181818181818177 0.095238095238095233 0.047619047619047616 0.8465482 0.8465482 1.1682831 0.336375058 0.140421242 0.84654818921938324 0.84654818921938324 1.1682831404172562 0.33637507090625185 0.14042123582229432 0.14861621 0.14861621 0.8919594 0.07455328 0.07146728 0.14861616399327332 0.14861616399327332 0.89195941323598249 0.074553292690365869 0.071467286508792471 -5.4 0.683544338 5.4 3.9 1.7 0.4 0.683544338 0.8863636 0.246376812 0.160000011 5.4000000000000004 0.68354430379746833 5.4000000000000004 3.8999999999999999 1.7 0.40000000000000002 0.68354430379746833 0.88636363636363635 0.24637681159420291 0.16000000000000003 0 0.323529422 0.323529422 0.8181818 0.166666672 0.142857149 0.3235294117647059 0.3235294117647059 0.81818181818181823 0.16666666666666666 0.14285714285714285 0.9142721 0.9142721 1.26564014 0.408455431 0.280842483 0.91427204435693399 0.91427204435693399 1.2656400687853608 0.40845544324330579 0.28084247164458864 0.3099612 0.3099612 0.9642299 0.13329801 0.223406911 0.30996111189240849 0.30996111189240849 0.96422985148785167 0.1332979854433643 0.22340689032507804 -4.6 0.5822785 4.6 3.4 1.4 0.3 0.5822785 0.772727251 0.202898547 0.120000005 4.5999999999999996 0.58227848101265811 4.5999999999999996 3.3999999999999999 1.3999999999999999 0.29999999999999999 0.58227848101265811 0.77272727272727271 0.20289855072463767 0.12 0 0.0882353 0.0882353 0.590909064 0.0952381 0.0952381 0.088235294117647065 0.088235294117647065 0.59090909090909094 0.095238095238095233 0.095238095238095233 0.7788243 0.7788243 1.10337853 0.336375058 0.210631862 0.77882433408183249 0.77882433408183249 1.1033785215051863 0.33637507090625185 0.21063185373344145 0.0510348342 0.0510348342 0.797875941 0.07455328 0.146190211 0.05103484272829939 0.05103484272829939 0.79787580879856601 0.074553292690365869 0.14619023377705653 -5 0.6329114 5 3.4 1.5 0.2 0.6329114 0.772727251 0.2173913 0.0800000057 5 0.63291139240506322 5 3.3999999999999999 1.5 0.20000000000000001 0.63291139240506322 0.77272727272727271 0.21739130434782611 0.080000000000000016 0 0.205882356 0.205882356 0.590909064 0.119047619 0.04761905 0.20588235294117646 0.20588235294117646 0.59090909090909094 0.11904761904761904 0.047619047619047616 0.8465482 0.8465482 1.10337853 0.360401869 0.140421242 0.84654818921938324 0.84654818921938324 1.1033785215051863 0.36040186168526983 0.14042123582229432 0.14861621 0.14861621 0.797875941 0.0926237255 0.07146728 0.14861616399327332 0.14861616399327332 0.79787580879856601 0.092623715229236292 0.071467286508792471 -4.4 0.5569621 4.4 2.9 1.4 0.2 0.5569621 0.659090936 0.202898547 0.0800000057 4.4000000000000004 0.55696202531645567 4.4000000000000004 2.8999999999999999 1.3999999999999999 0.20000000000000001 0.55696202531645567 0.65909090909090906 0.20289855072463767 0.080000000000000016 0 0.0294117648 0.0294117648 0.363636374 0.0952381 0.04761905 0.029411764705882353 0.029411764705882353 0.36363636363636365 0.095238095238095233 0.047619047619047616 0.744962454 0.744962454 0.941117 0.336375058 0.140421242 0.74496240651305734 0.74496240651305734 0.94111697422501195 0.33637507090625185 0.14042123582229432 0.0255098473 0.0255098473 0.386941522 0.07455328 0.07146728 0.025509830781751675 0.025509830781751675 0.38694155923435009 0.074553292690365869 0.071467286508792471 -4.9 0.6202532 4.9 3.1 1.5 0.1 0.6202532 0.7045454 0.2173913 0.0400000028 4.9000000000000004 0.620253164556962 4.9000000000000004 3.1000000000000001 1.5 0.10000000000000001 0.620253164556962 0.70454545454545459 0.21739130434782611 0.040000000000000008 0 0.1764706 0.1764706 0.454545468 0.119047619 0 0.17647058823529413 0.17647058823529413 0.45454545454545453 0.11904761904761904 0 0.829617262 0.829617262 1.0060215 0.360401869 0.07021062 0.82961722543499561 0.82961722543499561 1.0060215931370817 0.36040186168526983 0.070210617911147161 0.117838435 0.117838435 0.5725629 0.0926237255 0.0149739943 0.11783846996167147 0.11783846996167147 0.5725628341629212 0.092623715229236292 0.014973996264087019 -5.4 0.683544338 5.4 3.7 1.5 0.2 0.683544338 0.840909064 0.2173913 0.0800000057 5.4000000000000004 0.68354430379746833 5.4000000000000004 3.7000000000000002 1.5 0.20000000000000001 0.68354430379746833 0.84090909090909094 0.21739130434782611 0.080000000000000016 0 0.323529422 0.323529422 0.727272749 0.119047619 0.04761905 0.3235294117647059 0.3235294117647059 0.72727272727272729 0.11904761904761904 0.047619047619047616 0.9142721 0.9142721 1.20073545 0.360401869 0.140421242 0.91427204435693399 0.91427204435693399 1.2007354498732912 0.36040186168526983 0.14042123582229432 0.3099612 0.3099612 0.9236834 0.0926237255 0.07146728 0.30996111189240849 0.30996111189240849 0.92368343544686704 0.092623715229236292 0.071467286508792471 -4.8 0.607594967 4.8 3.4 1.6 0.2 0.607594967 0.772727251 0.231884047 0.0800000057 4.7999999999999998 0.60759493670886067 4.7999999999999998 3.3999999999999999 1.6000000000000001 0.20000000000000001 0.60759493670886067 0.77272727272727271 0.23188405797101452 0.080000000000000016 0 0.14705883 0.14705883 0.590909064 0.142857149 0.04761905 0.14705882352941177 0.14705882352941177 0.59090909090909094 0.14285714285714285 0.047619047619047616 0.8126863 0.8126863 1.10337853 0.38442865 0.140421242 0.81268626165060787 0.81268626165060787 1.1033785215051863 0.38442865246428787 0.14042123582229432 0.09137413 0.09137413 0.797875941 0.112279132 0.07146728 0.091374136005250073 0.091374136005250073 0.79787580879856601 0.11227913256984562 0.071467286508792471 -4.8 0.607594967 4.8 3 1.4 0.1 0.607594967 0.6818181 0.202898547 0.0400000028 4.7999999999999998 0.60759493670886067 4.7999999999999998 3 1.3999999999999999 0.10000000000000001 0.60759493670886067 0.68181818181818177 0.20289855072463767 0.040000000000000008 0 0.14705883 0.14705883 0.4090909 0.0952381 0 0.14705882352941177 0.14705882352941177 0.40909090909090912 0.095238095238095233 0 0.8126863 0.8126863 0.9735693 0.336375058 0.07021062 0.81268626165060787 0.81268626165060787 0.97356928368104678 0.33637507090625185 0.070210617911147161 0.09137413 0.09137413 0.480745673 0.07455328 0.0149739943 0.091374136005250073 0.091374136005250073 0.4807456731235793 0.074553292690365869 0.014973996264087019 -4.3 0.544303834 4.3 3 1.1 0.1 0.544303834 0.6818181 0.159420282 0.0400000028 4.2999999999999998 0.54430379746835433 4.2999999999999998 3 1.1000000000000001 0.10000000000000001 0.54430379746835433 0.68181818181818177 0.15942028985507248 0.040000000000000008 0 0 0 0.4090909 0.0238095243 0 0 0 0.40909090909090912 0.023809523809523808 0 0.7280315 0.7280315 0.9735693 0.264294684 0.07021062 0.7280314427286696 0.7280314427286696 0.97356928368104678 0.26429469856919791 0.070210617911147161 0.01720981 0.01720981 0.480745673 0.0317756645 0.0149739943 0.017209798931850762 0.017209798931850762 0.4807456731235793 0.031775654639957629 0.014973996264087019 -5.8 0.734177232 5.8 4 1.2 0.2 0.734177232 0.9090909 0.173913047 0.0800000057 5.7999999999999998 0.73417721518987333 5.7999999999999998 4 1.2 0.20000000000000001 0.73417721518987333 0.90909090909090906 0.17391304347826086 0.080000000000000016 0 0.441176474 0.441176474 0.8636364 0.04761905 0.04761905 0.44117647058823528 0.44117647058823528 0.86363636363636365 0.047619047619047616 0.047619047619047616 0.981996 0.981996 1.29809237 0.2883215 0.140421242 0.98199589949448451 0.98199589949448451 1.2980923782413958 0.28832148934821589 0.14042123582229432 0.504585147 0.504585147 0.9762056 0.0439706929 0.07146728 0.50458515122771275 0.50458515122771275 0.9762055888000436 0.043970678884132197 0.071467286508792471 -5.7 0.721519 5.7 4.4 1.5 0.4 0.721519 1 0.2173913 0.160000011 5.7000000000000002 0.72151898734177211 5.7000000000000002 4.4000000000000004 1.5 0.40000000000000002 0.72151898734177211 1 0.21739130434782611 0.16000000000000003 0 0.4117647 0.4117647 1 0.119047619 0.142857149 0.41176470588235292 0.41176470588235292 1 0.11904761904761904 0.14285714285714285 0.965064943 0.965064943 1.42790163 0.360401869 0.280842483 0.96506493571009688 0.96506493571009688 1.4279016160655356 0.36040186168526983 0.28084247164458864 0.455403626 0.455403626 0.996043563 0.0926237255 0.223406911 0.45540375842690767 0.45540375842690767 0.99604355189588412 0.092623715229236292 0.22340689032507804 -5.4 0.683544338 5.4 3.9 1.3 0.4 0.683544338 0.8863636 0.188405782 0.160000011 5.4000000000000004 0.68354430379746833 5.4000000000000004 3.8999999999999999 1.3 0.40000000000000002 0.68354430379746833 0.88636363636363635 0.18840579710144928 0.16000000000000003 0 0.323529422 0.323529422 0.8181818 0.0714285746 0.142857149 0.3235294117647059 0.3235294117647059 0.81818181818181823 0.071428571428571425 0.14285714285714285 0.9142721 0.9142721 1.26564014 0.312348276 0.280842483 0.91427204435693399 0.91427204435693399 1.2656400687853608 0.31234828012723387 0.28084247164458864 0.3099612 0.3099612 0.9642299 0.0582755022 0.223406911 0.30996111189240849 0.30996111189240849 0.96422985148785167 0.058275496366795021 0.22340689032507804 -5.1 0.6455696 5.1 3.5 1.4 0.3 0.6455696 0.7954545 0.202898547 0.120000005 5.0999999999999996 0.64556962025316444 5.0999999999999996 3.5 1.3999999999999999 0.29999999999999999 0.64556962025316444 0.79545454545454541 0.20289855072463767 0.12 0 0.235294119 0.235294119 0.6363636 0.0952381 0.0952381 0.23529411764705882 0.23529411764705882 0.63636363636363635 0.095238095238095233 0.095238095238095233 0.8634792 0.8634792 1.13583088 0.336375058 0.210631862 0.86347915300377087 0.86347915300377087 1.1358308309612213 0.33637507090625185 0.21063185373344145 0.183586776 0.183586776 0.8504554 0.07455328 0.146190211 0.18358681923369741 0.18358681923369741 0.8504555107896975 0.074553292690365869 0.14619023377705653 -5.7 0.721519 5.7 3.8 1.7 0.3 0.721519 0.8636363 0.246376812 0.120000005 5.7000000000000002 0.72151898734177211 5.7000000000000002 3.7999999999999998 1.7 0.29999999999999999 0.72151898734177211 0.86363636363636354 0.24637681159420291 0.12 0 0.4117647 0.4117647 0.772727251 0.166666672 0.0952381 0.41176470588235292 0.41176470588235292 0.77272727272727271 0.16666666666666666 0.095238095238095233 0.965064943 0.965064943 1.23318768 0.408455431 0.210631862 0.96506493571009688 0.96506493571009688 1.2331877593293259 0.40845544324330579 0.21063185373344145 0.455403626 0.455403626 0.9472266 0.13329801 0.146190211 0.45540375842690767 0.45540375842690767 0.94722658326235554 0.1332979854433643 0.14619023377705653 -5.1 0.6455696 5.1 3.8 1.5 0.3 0.6455696 0.8636363 0.2173913 0.120000005 5.0999999999999996 0.64556962025316444 5.0999999999999996 3.7999999999999998 1.5 0.29999999999999999 0.64556962025316444 0.86363636363636354 0.21739130434782611 0.12 0 0.235294119 0.235294119 0.772727251 0.119047619 0.0952381 0.23529411764705882 0.23529411764705882 0.77272727272727271 0.11904761904761904 0.095238095238095233 0.8634792 0.8634792 1.23318768 0.360401869 0.210631862 0.86347915300377087 0.86347915300377087 1.2331877593293259 0.36040186168526983 0.21063185373344145 0.183586776 0.183586776 0.9472266 0.0926237255 0.146190211 0.18358681923369741 0.18358681923369741 0.94722658326235554 0.092623715229236292 0.14619023377705653 -5.4 0.683544338 5.4 3.4 1.7 0.2 0.683544338 0.772727251 0.246376812 0.0800000057 5.4000000000000004 0.68354430379746833 5.4000000000000004 3.3999999999999999 1.7 0.20000000000000001 0.68354430379746833 0.77272727272727271 0.24637681159420291 0.080000000000000016 0 0.323529422 0.323529422 0.590909064 0.166666672 0.04761905 0.3235294117647059 0.3235294117647059 0.59090909090909094 0.16666666666666666 0.047619047619047616 0.9142721 0.9142721 1.10337853 0.408455431 0.140421242 0.91427204435693399 0.91427204435693399 1.1033785215051863 0.40845544324330579 0.14042123582229432 0.3099612 0.3099612 0.797875941 0.13329801 0.07146728 0.30996111189240849 0.30996111189240849 0.79787580879856601 0.1332979854433643 0.071467286508792471 -5.1 0.6455696 5.1 3.7 1.5 0.4 0.6455696 0.840909064 0.2173913 0.160000011 5.0999999999999996 0.64556962025316444 5.0999999999999996 3.7000000000000002 1.5 0.40000000000000002 0.64556962025316444 0.84090909090909094 0.21739130434782611 0.16000000000000003 0 0.235294119 0.235294119 0.727272749 0.119047619 0.142857149 0.23529411764705882 0.23529411764705882 0.72727272727272729 0.11904761904761904 0.14285714285714285 0.8634792 0.8634792 1.20073545 0.360401869 0.280842483 0.86347915300377087 0.86347915300377087 1.2007354498732912 0.36040186168526983 0.28084247164458864 0.183586776 0.183586776 0.9236834 0.0926237255 0.223406911 0.18358681923369741 0.18358681923369741 0.92368343544686704 0.092623715229236292 0.22340689032507804 -4.6 0.5822785 4.6 3.6 1 0.2 0.5822785 0.818181753 0.144927531 0.0800000057 4.5999999999999996 0.58227848101265811 4.5999999999999996 3.6000000000000001 1 0.20000000000000001 0.58227848101265811 0.81818181818181812 0.14492753623188406 0.080000000000000016 0 0.0882353 0.0882353 0.6818182 0 0.04761905 0.088235294117647065 0.088235294117647065 0.68181818181818177 0 0.047619047619047616 0.7788243 0.7788243 1.1682831 0.2402679 0.140421242 0.77882433408183249 0.77882433408183249 1.1682831404172562 0.2402679077901799 0.14042123582229432 0.0510348342 0.0510348342 0.8919594 0.0217650775 0.07146728 0.05103484272829939 0.05103484272829939 0.89195941323598249 0.021765071971117544 0.071467286508792471 -5.1 0.6455696 5.1 3.3 1.7 0.5 0.6455696 0.74999994 0.246376812 0.2 5.0999999999999996 0.64556962025316444 5.0999999999999996 3.2999999999999998 1.7 0.5 0.64556962025316444 0.74999999999999989 0.24637681159420291 0.20000000000000001 0 0.235294119 0.235294119 0.545454562 0.166666672 0.1904762 0.23529411764705882 0.23529411764705882 0.54545454545454541 0.16666666666666666 0.19047619047619047 0.8634792 0.8634792 1.07092619 0.408455431 0.3510531 0.86347915300377087 0.86347915300377087 1.0709262120491514 0.40845544324330579 0.35105308955573578 0.183586776 0.183586776 0.733567655 0.13329801 0.29663077 0.18358681923369741 0.18358681923369741 0.73356785053506501 0.1332979854433643 0.29663078722186503 -4.8 0.607594967 4.8 3.4 1.9 0.2 0.607594967 0.772727251 0.2753623 0.0800000057 4.7999999999999998 0.60759493670886067 4.7999999999999998 3.3999999999999999 1.8999999999999999 0.20000000000000001 0.60759493670886067 0.77272727272727271 0.27536231884057971 0.080000000000000016 0 0.14705883 0.14705883 0.590909064 0.1904762 0.04761905 0.14705882352941177 0.14705882352941177 0.59090909090909094 0.19047619047619047 0.047619047619047616 0.8126863 0.8126863 1.10337853 0.456509024 0.140421242 0.81268626165060787 0.81268626165060787 1.1033785215051863 0.45650902480134176 0.14042123582229432 0.09137413 0.09137413 0.797875941 0.1785302 0.07146728 0.091374136005250073 0.091374136005250073 0.79787580879856601 0.17853019682812199 0.071467286508792471 -5 0.6329114 5 3 1.6 0.2 0.6329114 0.6818181 0.231884047 0.0800000057 5 0.63291139240506322 5 3 1.6000000000000001 0.20000000000000001 0.63291139240506322 0.68181818181818177 0.23188405797101452 0.080000000000000016 0 0.205882356 0.205882356 0.4090909 0.142857149 0.04761905 0.20588235294117646 0.20588235294117646 0.40909090909090912 0.14285714285714285 0.047619047619047616 0.8465482 0.8465482 0.9735693 0.38442865 0.140421242 0.84654818921938324 0.84654818921938324 0.97356928368104678 0.38442865246428787 0.14042123582229432 0.14861621 0.14861621 0.480745673 0.112279132 0.07146728 0.14861616399327332 0.14861616399327332 0.4807456731235793 0.11227913256984562 0.071467286508792471 -5 0.6329114 5 3.4 1.6 0.4 0.6329114 0.772727251 0.231884047 0.160000011 5 0.63291139240506322 5 3.3999999999999999 1.6000000000000001 0.40000000000000002 0.63291139240506322 0.77272727272727271 0.23188405797101452 0.16000000000000003 0 0.205882356 0.205882356 0.590909064 0.142857149 0.142857149 0.20588235294117646 0.20588235294117646 0.59090909090909094 0.14285714285714285 0.14285714285714285 0.8465482 0.8465482 1.10337853 0.38442865 0.280842483 0.84654818921938324 0.84654818921938324 1.1033785215051863 0.38442865246428787 0.28084247164458864 0.14861621 0.14861621 0.797875941 0.112279132 0.223406911 0.14861616399327332 0.14861616399327332 0.79787580879856601 0.11227913256984562 0.22340689032507804 -5.2 0.6582278 5.2 3.5 1.5 0.2 0.6582278 0.7954545 0.2173913 0.0800000057 5.2000000000000002 0.65822784810126578 5.2000000000000002 3.5 1.5 0.20000000000000001 0.65822784810126578 0.79545454545454541 0.21739130434782611 0.080000000000000016 0 0.2647059 0.2647059 0.6363636 0.119047619 0.04761905 0.26470588235294118 0.26470588235294118 0.63636363636363635 0.11904761904761904 0.047619047619047616 0.880410135 0.880410135 1.13583088 0.360401869 0.140421242 0.88041011678815861 0.88041011678815861 1.1358308309612213 0.36040186168526983 0.14042123582229432 0.222458825 0.222458825 0.8504554 0.0926237255 0.07146728 0.22245879972342997 0.22245879972342997 0.8504555107896975 0.092623715229236292 0.071467286508792471 -5.2 0.6582278 5.2 3.4 1.4 0.2 0.6582278 0.772727251 0.202898547 0.0800000057 5.2000000000000002 0.65822784810126578 5.2000000000000002 3.3999999999999999 1.3999999999999999 0.20000000000000001 0.65822784810126578 0.77272727272727271 0.20289855072463767 0.080000000000000016 0 0.2647059 0.2647059 0.590909064 0.0952381 0.04761905 0.26470588235294118 0.26470588235294118 0.59090909090909094 0.095238095238095233 0.047619047619047616 0.880410135 0.880410135 1.10337853 0.336375058 0.140421242 0.88041011678815861 0.88041011678815861 1.1033785215051863 0.33637507090625185 0.14042123582229432 0.222458825 0.222458825 0.797875941 0.07455328 0.07146728 0.22245879972342997 0.22245879972342997 0.79787580879856601 0.074553292690365869 0.071467286508792471 -4.7 0.594936669 4.7 3.2 1.6 0.2 0.594936669 0.7272727 0.231884047 0.0800000057 4.7000000000000002 0.59493670886075944 4.7000000000000002 3.2000000000000002 1.6000000000000001 0.20000000000000001 0.59493670886075944 0.72727272727272729 0.23188405797101452 0.080000000000000016 0 0.117647059 0.117647059 0.5 0.142857149 0.04761905 0.11764705882352941 0.11764705882352941 0.5 0.14285714285714285 0.047619047619047616 0.7957553 0.7957553 1.038474 0.38442865 0.140421242 0.79575529786622023 0.79575529786622023 1.0384739025931167 0.38442865246428787 0.14042123582229432 0.06917418 0.06917418 0.6578964 0.112279132 0.07146728 0.069174201507542998 0.069174201507542998 0.65789648182451921 0.11227913256984562 0.071467286508792471 -4.8 0.607594967 4.8 3.1 1.6 0.2 0.607594967 0.7045454 0.231884047 0.0800000057 4.7999999999999998 0.60759493670886067 4.7999999999999998 3.1000000000000001 1.6000000000000001 0.20000000000000001 0.60759493670886067 0.70454545454545459 0.23188405797101452 0.080000000000000016 0 0.14705883 0.14705883 0.454545468 0.142857149 0.04761905 0.14705882352941177 0.14705882352941177 0.45454545454545453 0.14285714285714285 0.047619047619047616 0.8126863 0.8126863 1.0060215 0.38442865 0.140421242 0.81268626165060787 0.81268626165060787 1.0060215931370817 0.38442865246428787 0.14042123582229432 0.09137413 0.09137413 0.5725629 0.112279132 0.07146728 0.091374136005250073 0.091374136005250073 0.5725628341629212 0.11227913256984562 0.071467286508792471 -5.4 0.683544338 5.4 3.4 1.5 0.4 0.683544338 0.772727251 0.2173913 0.160000011 5.4000000000000004 0.68354430379746833 5.4000000000000004 3.3999999999999999 1.5 0.40000000000000002 0.68354430379746833 0.77272727272727271 0.21739130434782611 0.16000000000000003 0 0.323529422 0.323529422 0.590909064 0.119047619 0.142857149 0.3235294117647059 0.3235294117647059 0.59090909090909094 0.11904761904761904 0.14285714285714285 0.9142721 0.9142721 1.10337853 0.360401869 0.280842483 0.91427204435693399 0.91427204435693399 1.1033785215051863 0.36040186168526983 0.28084247164458864 0.3099612 0.3099612 0.797875941 0.0926237255 0.223406911 0.30996111189240849 0.30996111189240849 0.79787580879856601 0.092623715229236292 0.22340689032507804 -5.2 0.6582278 5.2 4.1 1.5 0.1 0.6582278 0.9318181 0.2173913 0.0400000028 5.2000000000000002 0.65822784810126578 5.2000000000000002 4.0999999999999996 1.5 0.10000000000000001 0.65822784810126578 0.93181818181818166 0.21739130434782611 0.040000000000000008 0 0.2647059 0.2647059 0.909090936 0.119047619 0 0.26470588235294118 0.26470588235294118 0.90909090909090906 0.11904761904761904 0 0.880410135 0.880410135 1.33054459 0.360401869 0.07021062 0.88041011678815861 0.88041011678815861 1.3305446876974305 0.36040186168526983 0.070210617911147161 0.222458825 0.222458825 0.984447062 0.0926237255 0.0149739943 0.22245879972342997 0.22245879972342997 0.98444709277532771 0.092623715229236292 0.014973996264087019 -5.5 0.6962025 5.5 4.2 1.4 0.2 0.6962025 0.9545454 0.202898547 0.0800000057 5.5 0.69620253164556956 5.5 4.2000000000000002 1.3999999999999999 0.20000000000000001 0.69620253164556956 0.95454545454545459 0.20289855072463767 0.080000000000000016 0 0.3529412 0.3529412 0.954545438 0.0952381 0.04761905 0.35294117647058826 0.35294117647058826 0.95454545454545459 0.095238095238095233 0.047619047619047616 0.931203067 0.931203067 1.36299694 0.336375058 0.140421242 0.93120300814132162 0.93120300814132162 1.3629969971534657 0.33637507090625185 0.14042123582229432 0.3573058 0.3573058 0.9899987 0.07455328 0.07146728 0.35730592590256216 0.35730592590256216 0.98999870773555454 0.074553292690365869 0.071467286508792471 -4.9 0.6202532 4.9 3.1 1.5 0.1 0.6202532 0.7045454 0.2173913 0.0400000028 4.9000000000000004 0.620253164556962 4.9000000000000004 3.1000000000000001 1.5 0.10000000000000001 0.620253164556962 0.70454545454545459 0.21739130434782611 0.040000000000000008 0 0.1764706 0.1764706 0.454545468 0.119047619 0 0.17647058823529413 0.17647058823529413 0.45454545454545453 0.11904761904761904 0 0.829617262 0.829617262 1.0060215 0.360401869 0.07021062 0.82961722543499561 0.82961722543499561 1.0060215931370817 0.36040186168526983 0.070210617911147161 0.117838435 0.117838435 0.5725629 0.0926237255 0.0149739943 0.11783846996167147 0.11783846996167147 0.5725628341629212 0.092623715229236292 0.014973996264087019 -5 0.6329114 5 3.2 1.2 0.2 0.6329114 0.7272727 0.173913047 0.0800000057 5 0.63291139240506322 5 3.2000000000000002 1.2 0.20000000000000001 0.63291139240506322 0.72727272727272729 0.17391304347826086 0.080000000000000016 0 0.205882356 0.205882356 0.5 0.04761905 0.04761905 0.20588235294117646 0.20588235294117646 0.5 0.047619047619047616 0.047619047619047616 0.8465482 0.8465482 1.038474 0.2883215 0.140421242 0.84654818921938324 0.84654818921938324 1.0384739025931167 0.28832148934821589 0.14042123582229432 0.14861621 0.14861621 0.6578964 0.0439706929 0.07146728 0.14861616399327332 0.14861616399327332 0.65789648182451921 0.043970678884132197 0.071467286508792471 -5.5 0.6962025 5.5 3.5 1.3 0.2 0.6962025 0.7954545 0.188405782 0.0800000057 5.5 0.69620253164556956 5.5 3.5 1.3 0.20000000000000001 0.69620253164556956 0.79545454545454541 0.18840579710144928 0.080000000000000016 0 0.3529412 0.3529412 0.6363636 0.0714285746 0.04761905 0.35294117647058826 0.35294117647058826 0.63636363636363635 0.071428571428571425 0.047619047619047616 0.931203067 0.931203067 1.13583088 0.312348276 0.140421242 0.93120300814132162 0.93120300814132162 1.1358308309612213 0.31234828012723387 0.14042123582229432 0.3573058 0.3573058 0.8504554 0.0582755022 0.07146728 0.35730592590256216 0.35730592590256216 0.8504555107896975 0.058275496366795021 0.071467286508792471 -4.9 0.6202532 4.9 3.1 1.5 0.1 0.6202532 0.7045454 0.2173913 0.0400000028 4.9000000000000004 0.620253164556962 4.9000000000000004 3.1000000000000001 1.5 0.10000000000000001 0.620253164556962 0.70454545454545459 0.21739130434782611 0.040000000000000008 0 0.1764706 0.1764706 0.454545468 0.119047619 0 0.17647058823529413 0.17647058823529413 0.45454545454545453 0.11904761904761904 0 0.829617262 0.829617262 1.0060215 0.360401869 0.07021062 0.82961722543499561 0.82961722543499561 1.0060215931370817 0.36040186168526983 0.070210617911147161 0.117838435 0.117838435 0.5725629 0.0926237255 0.0149739943 0.11783846996167147 0.11783846996167147 0.5725628341629212 0.092623715229236292 0.014973996264087019 -4.4 0.5569621 4.4 3 1.3 0.2 0.5569621 0.6818181 0.188405782 0.0800000057 4.4000000000000004 0.55696202531645567 4.4000000000000004 3 1.3 0.20000000000000001 0.55696202531645567 0.68181818181818177 0.18840579710144928 0.080000000000000016 0 0.0294117648 0.0294117648 0.4090909 0.0714285746 0.04761905 0.029411764705882353 0.029411764705882353 0.40909090909090912 0.071428571428571425 0.047619047619047616 0.744962454 0.744962454 0.9735693 0.312348276 0.140421242 0.74496240651305734 0.74496240651305734 0.97356928368104678 0.31234828012723387 0.14042123582229432 0.0255098473 0.0255098473 0.480745673 0.0582755022 0.07146728 0.025509830781751675 0.025509830781751675 0.4807456731235793 0.058275496366795021 0.071467286508792471 -5.1 0.6455696 5.1 3.4 1.5 0.2 0.6455696 0.772727251 0.2173913 0.0800000057 5.0999999999999996 0.64556962025316444 5.0999999999999996 3.3999999999999999 1.5 0.20000000000000001 0.64556962025316444 0.77272727272727271 0.21739130434782611 0.080000000000000016 0 0.235294119 0.235294119 0.590909064 0.119047619 0.04761905 0.23529411764705882 0.23529411764705882 0.59090909090909094 0.11904761904761904 0.047619047619047616 0.8634792 0.8634792 1.10337853 0.360401869 0.140421242 0.86347915300377087 0.86347915300377087 1.1033785215051863 0.36040186168526983 0.14042123582229432 0.183586776 0.183586776 0.797875941 0.0926237255 0.07146728 0.18358681923369741 0.18358681923369741 0.79787580879856601 0.092623715229236292 0.071467286508792471 -5 0.6329114 5 3.5 1.3 0.3 0.6329114 0.7954545 0.188405782 0.120000005 5 0.63291139240506322 5 3.5 1.3 0.29999999999999999 0.63291139240506322 0.79545454545454541 0.18840579710144928 0.12 0 0.205882356 0.205882356 0.6363636 0.0714285746 0.0952381 0.20588235294117646 0.20588235294117646 0.63636363636363635 0.071428571428571425 0.095238095238095233 0.8465482 0.8465482 1.13583088 0.312348276 0.210631862 0.84654818921938324 0.84654818921938324 1.1358308309612213 0.31234828012723387 0.21063185373344145 0.14861621 0.14861621 0.8504554 0.0582755022 0.146190211 0.14861616399327332 0.14861616399327332 0.8504555107896975 0.058275496366795021 0.14619023377705653 -4.5 0.569620252 4.5 2.3 1.3 0.3 0.569620252 0.522727251 0.188405782 0.120000005 4.5 0.56962025316455689 4.5 2.2999999999999998 1.3 0.29999999999999999 0.56962025316455689 0.52272727272727271 0.18840579710144928 0.12 0 0.05882353 0.05882353 0.09090909 0.0714285746 0.0952381 0.058823529411764705 0.058823529411764705 0.090909090909090912 0.071428571428571425 0.095238095238095233 0.7618934 0.7618934 0.7464031 0.312348276 0.210631862 0.76189337029744486 0.76189337029744486 0.7464031174888025 0.31234828012723387 0.21063185373344145 0.0366229452 0.0366229452 0.02727463 0.0582755022 0.146190211 0.036622927317243759 0.036622927317243759 0.027274649605582402 0.058275496366795021 0.14619023377705653 -4.4 0.5569621 4.4 3.2 1.3 0.2 0.5569621 0.7272727 0.188405782 0.0800000057 4.4000000000000004 0.55696202531645567 4.4000000000000004 3.2000000000000002 1.3 0.20000000000000001 0.55696202531645567 0.72727272727272729 0.18840579710144928 0.080000000000000016 0 0.0294117648 0.0294117648 0.5 0.0714285746 0.04761905 0.029411764705882353 0.029411764705882353 0.5 0.071428571428571425 0.047619047619047616 0.744962454 0.744962454 1.038474 0.312348276 0.140421242 0.74496240651305734 0.74496240651305734 1.0384739025931167 0.31234828012723387 0.14042123582229432 0.0255098473 0.0255098473 0.6578964 0.0582755022 0.07146728 0.025509830781751675 0.025509830781751675 0.65789648182451921 0.058275496366795021 0.071467286508792471 -5 0.6329114 5 3.5 1.6 0.6 0.6329114 0.7954545 0.231884047 0.24000001 5 0.63291139240506322 5 3.5 1.6000000000000001 0.59999999999999998 0.63291139240506322 0.79545454545454541 0.23188405797101452 0.23999999999999999 0 0.205882356 0.205882356 0.6363636 0.142857149 0.238095239 0.20588235294117646 0.20588235294117646 0.63636363636363635 0.14285714285714285 0.23809523809523808 0.8465482 0.8465482 1.13583088 0.38442865 0.421263725 0.84654818921938324 0.84654818921938324 1.1358308309612213 0.38442865246428787 0.42126370746688291 0.14861621 0.14861621 0.8504554 0.112279132 0.363569826 0.14861616399327332 0.14861616399327332 0.8504555107896975 0.11227913256984562 0.36356980977329256 -5.1 0.6455696 5.1 3.8 1.9 0.4 0.6455696 0.8636363 0.2753623 0.160000011 5.0999999999999996 0.64556962025316444 5.0999999999999996 3.7999999999999998 1.8999999999999999 0.40000000000000002 0.64556962025316444 0.86363636363636354 0.27536231884057971 0.16000000000000003 0 0.235294119 0.235294119 0.772727251 0.1904762 0.142857149 0.23529411764705882 0.23529411764705882 0.77272727272727271 0.19047619047619047 0.14285714285714285 0.8634792 0.8634792 1.23318768 0.456509024 0.280842483 0.86347915300377087 0.86347915300377087 1.2331877593293259 0.45650902480134176 0.28084247164458864 0.183586776 0.183586776 0.9472266 0.1785302 0.223406911 0.18358681923369741 0.18358681923369741 0.94722658326235554 0.17853019682812199 0.22340689032507804 -4.8 0.607594967 4.8 3 1.4 0.3 0.607594967 0.6818181 0.202898547 0.120000005 4.7999999999999998 0.60759493670886067 4.7999999999999998 3 1.3999999999999999 0.29999999999999999 0.60759493670886067 0.68181818181818177 0.20289855072463767 0.12 0 0.14705883 0.14705883 0.4090909 0.0952381 0.0952381 0.14705882352941177 0.14705882352941177 0.40909090909090912 0.095238095238095233 0.095238095238095233 0.8126863 0.8126863 0.9735693 0.336375058 0.210631862 0.81268626165060787 0.81268626165060787 0.97356928368104678 0.33637507090625185 0.21063185373344145 0.09137413 0.09137413 0.480745673 0.07455328 0.146190211 0.091374136005250073 0.091374136005250073 0.4807456731235793 0.074553292690365869 0.14619023377705653 -5.1 0.6455696 5.1 3.8 1.6 0.2 0.6455696 0.8636363 0.231884047 0.0800000057 5.0999999999999996 0.64556962025316444 5.0999999999999996 3.7999999999999998 1.6000000000000001 0.20000000000000001 0.64556962025316444 0.86363636363636354 0.23188405797101452 0.080000000000000016 0 0.235294119 0.235294119 0.772727251 0.142857149 0.04761905 0.23529411764705882 0.23529411764705882 0.77272727272727271 0.14285714285714285 0.047619047619047616 0.8634792 0.8634792 1.23318768 0.38442865 0.140421242 0.86347915300377087 0.86347915300377087 1.2331877593293259 0.38442865246428787 0.14042123582229432 0.183586776 0.183586776 0.9472266 0.112279132 0.07146728 0.18358681923369741 0.18358681923369741 0.94722658326235554 0.11227913256984562 0.071467286508792471 -4.6 0.5822785 4.6 3.2 1.4 0.2 0.5822785 0.7272727 0.202898547 0.0800000057 4.5999999999999996 0.58227848101265811 4.5999999999999996 3.2000000000000002 1.3999999999999999 0.20000000000000001 0.58227848101265811 0.72727272727272729 0.20289855072463767 0.080000000000000016 0 0.0882353 0.0882353 0.5 0.0952381 0.04761905 0.088235294117647065 0.088235294117647065 0.5 0.095238095238095233 0.047619047619047616 0.7788243 0.7788243 1.038474 0.336375058 0.140421242 0.77882433408183249 0.77882433408183249 1.0384739025931167 0.33637507090625185 0.14042123582229432 0.0510348342 0.0510348342 0.6578964 0.07455328 0.07146728 0.05103484272829939 0.05103484272829939 0.65789648182451921 0.074553292690365869 0.071467286508792471 -5.3 0.6708861 5.3 3.7 1.5 0.2 0.6708861 0.840909064 0.2173913 0.0800000057 5.2999999999999998 0.670886075949367 5.2999999999999998 3.7000000000000002 1.5 0.20000000000000001 0.670886075949367 0.84090909090909094 0.21739130434782611 0.080000000000000016 0 0.294117659 0.294117659 0.727272749 0.119047619 0.04761905 0.29411764705882354 0.29411764705882354 0.72727272727272729 0.11904761904761904 0.047619047619047616 0.897341132 0.897341132 1.20073545 0.360401869 0.140421242 0.89734108057254625 0.89734108057254625 1.2007354498732912 0.36040186168526983 0.14042123582229432 0.264780223 0.264780223 0.9236834 0.0926237255 0.07146728 0.26478014840942388 0.26478014840942388 0.92368343544686704 0.092623715229236292 0.071467286508792471 -5 0.6329114 5 3.3 1.4 0.2 0.6329114 0.74999994 0.202898547 0.0800000057 5 0.63291139240506322 5 3.2999999999999998 1.3999999999999999 0.20000000000000001 0.63291139240506322 0.74999999999999989 0.20289855072463767 0.080000000000000016 0 0.205882356 0.205882356 0.545454562 0.0952381 0.04761905 0.20588235294117646 0.20588235294117646 0.54545454545454541 0.095238095238095233 0.047619047619047616 0.8465482 0.8465482 1.07092619 0.336375058 0.140421242 0.84654818921938324 0.84654818921938324 1.0709262120491514 0.33637507090625185 0.14042123582229432 0.14861621 0.14861621 0.733567655 0.07455328 0.07146728 0.14861616399327332 0.14861616399327332 0.73356785053506501 0.074553292690365869 0.071467286508792471 -7 0.886076 7 3.2 4.7 1.4 0.886076 0.7272727 0.6811594 0.56 7 0.88607594936708844 7 3.2000000000000002 4.7000000000000002 1.3999999999999999 0.88607594936708844 0.72727272727272729 0.6811594202898551 0.55999999999999994 1 0.7941176 0.7941176 0.5 0.547619045 0.476190478 0.79411764705882348 0.79411764705882348 0.5 0.54761904761904767 0.47619047619047616 1.18516755 1.18516755 1.038474 1.12925911 0.982948661 1.1851674649071366 1.1851674649071366 1.0384739025931167 1.1292591666138456 0.98294865075606008 0.91099143 0.91099143 0.6578964 0.734288335 0.6955889 0.9109914626370752 0.9109914626370752 0.65789648182451921 0.73428833770009005 0.69558889835906823 -6.4 0.8101266 6.4 3.2 4.5 1.5 0.8101266 0.7272727 0.6521739 0.6 6.4000000000000004 0.81012658227848089 6.4000000000000004 3.2000000000000002 4.5 1.5 0.81012658227848089 0.72727272727272729 0.65217391304347827 0.60000000000000009 1 0.617647052 0.617647052 0.5 0.5 0.523809552 0.61764705882352944 0.61764705882352944 0.5 0.5 0.52380952380952384 1.08358181 1.08358181 1.038474 1.08120561 1.05315924 1.0835816822008106 1.0835816822008106 1.0384739025931167 1.0812055850558095 1.0531592686672073 0.7613054 0.7613054 0.6578964 0.7093809 0.719658256 0.76130542616799635 0.76130542616799635 0.65789648182451921 0.70938088629442786 0.71965826470413374 -6.9 0.873417735 6.9 3.1 4.9 1.5 0.873417735 0.7045454 0.710144937 0.6 6.9000000000000004 0.87341772151898722 6.9000000000000004 3.1000000000000001 4.9000000000000004 1.5 0.87341772151898722 0.70454545454545459 0.71014492753623193 0.60000000000000009 1 0.7647059 0.7647059 0.454545468 0.5952381 0.523809552 0.76470588235294112 0.76470588235294112 0.45454545454545453 0.59523809523809523 0.52380952380952384 1.16823661 1.16823661 1.0060215 1.17731273 1.05315924 1.1682365011227489 1.1682365011227489 1.0060215931370817 1.1773127481718815 1.0531592686672073 0.8933714 0.8933714 0.5725629 0.757097244 0.719658256 0.89337135404275458 0.89337135404275458 0.5725628341629212 0.75709721026728072 0.71965826470413374 -5.5 0.6962025 5.5 2.3 4 1.3 0.6962025 0.522727251 0.5797101 0.52 5.5 0.69620253164556956 5.5 2.2999999999999998 4 1.3 0.69620253164556956 0.52272727272727271 0.57971014492753625 0.52000000000000002 1 0.3529412 0.3529412 0.09090909 0.3809524 0.428571433 0.35294117647058826 0.35294117647058826 0.090909090909090912 0.38095238095238093 0.42857142857142855 0.931203067 0.931203067 0.7464031 0.9610716 0.912738 0.93120300814132162 0.93120300814132162 0.7464031174888025 0.96107163116071959 0.91273803284491306 0.3573058 0.3573058 0.02727463 0.636991262 0.6687579 0.35730592590256216 0.35730592590256216 0.027274649605582402 0.63699126114775995 0.66875793094202918 -6.5 0.822784841 6.5 2.8 4.6 1.5 0.822784841 0.6363636 0.6666666 0.6 6.5 0.82278481012658211 6.5 2.7999999999999998 4.5999999999999996 1.5 0.82278481012658211 0.63636363636363635 0.66666666666666663 0.60000000000000009 1 0.647058845 0.647058845 0.3181818 0.523809552 0.523809552 0.6470588235294118 0.6470588235294118 0.31818181818181818 0.52380952380952384 0.52380952380952384 1.10051274 1.10051274 0.908664644 1.10523236 1.05315924 1.1005126459851982 1.1005126459851982 0.908664664768977 1.1052323758348275 1.0531592686672073 0.7940583 0.7940583 0.296437562 0.7221062 0.719658256 0.79405825408863862 0.79405825408863862 0.29643751019242903 0.72210618745756316 0.71965826470413374 -5.7 0.721519 5.7 2.8 4.5 1.3 0.721519 0.6363636 0.6521739 0.52 5.7000000000000002 0.72151898734177211 5.7000000000000002 2.7999999999999998 4.5 1.3 0.72151898734177211 0.63636363636363635 0.65217391304347827 0.52000000000000002 1 0.4117647 0.4117647 0.3181818 0.5 0.428571433 0.41176470588235292 0.41176470588235292 0.31818181818181818 0.5 0.42857142857142855 0.965064943 0.965064943 0.908664644 1.08120561 0.912738 0.96506493571009688 0.96506493571009688 0.908664664768977 1.0812055850558095 0.91273803284491306 0.455403626 0.455403626 0.296437562 0.7093809 0.6687579 0.45540375842690767 0.45540375842690767 0.29643751019242903 0.70938088629442786 0.66875793094202918 -6.3 0.797468364 6.3 3.3 4.7 1.6 0.797468364 0.74999994 0.6811594 0.640000045 6.2999999999999998 0.79746835443037956 6.2999999999999998 3.2999999999999998 4.7000000000000002 1.6000000000000001 0.79746835443037956 0.74999999999999989 0.6811594202898551 0.64000000000000012 1 0.5882353 0.5882353 0.545454562 0.547619045 0.5714286 0.58823529411764708 0.58823529411764708 0.54545454545454541 0.54761904761904767 0.5714285714285714 1.06665075 1.06665075 1.07092619 1.12925911 1.12336993 1.0666507184164229 1.0666507184164229 1.0709262120491514 1.1292591666138456 1.1233698865783546 0.72531265 0.72531265 0.733567655 0.734288335 0.7413043 0.72531248018388961 0.72531248018388961 0.73356785053506501 0.73428833770009005 0.74130430666213609 -4.9 0.6202532 4.9 2.4 3.3 1 0.6202532 0.545454562 0.478260845 0.4 4.9000000000000004 0.620253164556962 4.9000000000000004 2.3999999999999999 3.2999999999999998 1 0.620253164556962 0.54545454545454541 0.47826086956521741 0.40000000000000002 1 0.1764706 0.1764706 0.13636364 0.238095239 0.2857143 0.17647058823529413 0.17647058823529413 0.13636363636363635 0.23809523809523808 0.2857142857142857 0.829617262 0.829617262 0.778855443 0.792884052 0.7021062 0.82961722543499561 0.82961722543499561 0.77885542694483745 0.79288409570759366 0.70210617911147155 0.117838435 0.117838435 0.052431643 0.508717 0.567488432 0.11783846996167147 0.11783846996167147 0.052431629740293029 0.50871703350008446 0.56748843907683133 -6.6 0.835443 6.6 2.9 4.6 1.3 0.835443 0.659090936 0.6666666 0.52 6.5999999999999996 0.83544303797468333 6.5999999999999996 2.8999999999999999 4.5999999999999996 1.3 0.83544303797468333 0.65909090909090906 0.66666666666666663 0.52000000000000002 1 0.6764706 0.6764706 0.363636374 0.523809552 0.428571433 0.67647058823529416 0.67647058823529416 0.36363636363636365 0.52380952380952384 0.42857142857142855 1.11744368 1.11744368 0.941117 1.10523236 0.912738 1.1174436097695859 1.1174436097695859 0.94111697422501195 1.1052323758348275 0.91273803284491306 0.8235505 0.8235505 0.386941522 0.7221062 0.6687579 0.82355060799945012 0.82355060799945012 0.38694155923435009 0.72210618745756316 0.66875793094202918 -5.2 0.6582278 5.2 2.7 3.9 1.4 0.6582278 0.6136364 0.5652174 0.56 5.2000000000000002 0.65822784810126578 5.2000000000000002 2.7000000000000002 3.8999999999999999 1.3999999999999999 0.65822784810126578 0.61363636363636365 0.56521739130434778 0.55999999999999994 1 0.2647059 0.2647059 0.272727281 0.357142866 0.476190478 0.26470588235294118 0.26470588235294118 0.27272727272727271 0.35714285714285715 0.47619047619047616 0.880410135 0.880410135 0.876212358 0.937044859 0.982948661 0.88041011678815861 0.88041011678815861 0.87621235531294217 0.93704484038170155 0.98294865075606008 0.222458825 0.222458825 0.214467555 0.620649636 0.6955889 0.22245879972342997 0.22245879972342997 0.21446754872116464 0.62064960460061858 0.69558889835906823 -5 0.6329114 5 2 3.5 1 0.6329114 0.454545438 0.5072464 0.4 5 0.63291139240506322 5 2 3.5 1 0.63291139240506322 0.45454545454545453 0.50724637681159424 0.40000000000000002 1 0.205882356 0.205882356 0 0.261904776 0.2857143 0.20588235294117646 0.20588235294117646 0 0.26190476190476192 0.2857142857142857 0.8465482 0.8465482 0.6490462 0.8409377 0.7021062 0.84654818921938324 0.84654818921938324 0.64904618912069789 0.84093767726562962 0.70210617911147155 0.14861621 0.14861621 0.00179608515 0.548691869 0.567488432 0.14861616399327332 0.14861616399327332 0.0017960868928680873 0.54869186015931659 0.56748843907683133 -5.9 0.7468355 5.9 3 4.2 1.5 0.7468355 0.6818181 0.6086956 0.6 5.9000000000000004 0.74683544303797467 5.9000000000000004 3 4.2000000000000002 1.5 0.74683544303797467 0.68181818181818177 0.60869565217391308 0.60000000000000009 1 0.470588237 0.470588237 0.4090909 0.428571433 0.523809552 0.47058823529411764 0.47058823529411764 0.40909090909090912 0.42857142857142855 0.52380952380952384 0.998926938 0.998926938 0.9735693 1.00912511 1.05315924 0.99892686327887226 0.99892686327887226 0.97356928368104678 1.0091252127187555 1.0531592686672073 0.5528621 0.5528621 0.480745673 0.667766631 0.719658256 0.55286190159866166 0.55286190159866166 0.4807456731235793 0.66776662351076677 0.71965826470413374 -6 0.7594937 6 2.2 4 1 0.7594937 0.5 0.5797101 0.4 6 0.75949367088607578 6 2.2000000000000002 4 1 0.75949367088607578 0.5 0.57971014492753625 0.40000000000000002 1 0.5 0.5 0.0454545468 0.3809524 0.2857143 0.5 0.5 0.045454545454545456 0.38095238095238093 0.2857142857142857 1.01585793 1.01585793 0.7139508 0.9610716 0.7021062 1.0158578270632599 1.0158578270632599 0.71395080803276778 0.96107163116071959 0.70210617911147155 0.5995771 0.5995771 0.0126449876 0.636991262 0.567488432 0.59957706030964308 0.59957706030964308 0.012644988485085273 0.63699126114775995 0.56748843907683133 -6.1 0.7721519 6.1 2.9 4.7 1.4 0.7721519 0.659090936 0.6811594 0.56 6.0999999999999996 0.772151898734177 6.0999999999999996 2.8999999999999999 4.7000000000000002 1.3999999999999999 0.772151898734177 0.65909090909090906 0.6811594202898551 0.55999999999999994 1 0.5294118 0.5294118 0.363636374 0.547619045 0.476190478 0.52941176470588236 0.52941176470588236 0.36363636363636365 0.54761904761904767 0.47619047619047616 1.03278887 1.03278887 0.941117 1.12925911 0.982948661 1.0327887908476474 1.0327887908476474 0.94111697422501195 1.1292591666138456 0.98294865075606008 0.644171059 0.644171059 0.386941522 0.734288335 0.6955889 0.64417093822767468 0.64417093822767468 0.38694155923435009 0.73428833770009005 0.69558889835906823 -5.6 0.708860755 5.6 2.9 3.6 1.3 0.708860755 0.659090936 0.5217391 0.52 5.5999999999999996 0.70886075949367078 5.5999999999999996 2.8999999999999999 3.6000000000000001 1.3 0.70886075949367078 0.65909090909090906 0.52173913043478259 0.52000000000000002 1 0.382352948 0.382352948 0.363636374 0.2857143 0.428571433 0.38235294117647056 0.38235294117647056 0.36363636363636365 0.2857142857142857 0.42857142857142855 0.948134 0.948134 0.941117 0.8649644 0.912738 0.94813397192570914 0.94813397192570914 0.94111697422501195 0.86496446804464766 0.91273803284491306 0.4060508 0.4060508 0.386941522 0.5676816 0.6687579 0.40605068921232229 0.40605068921232229 0.38694155923435009 0.56768154970207829 0.66875793094202918 -6.7 0.848101258 6.7 3.1 4.4 1.4 0.848101258 0.7045454 0.6376811 0.56 6.7000000000000002 0.84810126582278467 6.7000000000000002 3.1000000000000001 4.4000000000000004 1.3999999999999999 0.84810126582278467 0.70454545454545459 0.63768115942028991 0.55999999999999994 1 0.7058824 0.7058824 0.454545468 0.476190478 0.476190478 0.70588235294117652 0.70588235294117652 0.45454545454545453 0.47619047619047616 0.47619047619047616 1.13437462 1.13437462 1.0060215 1.05717874 0.982948661 1.1343745735539736 1.1343745735539736 1.0060215931370817 1.0571787942767916 0.98294865075606008 0.84984225 0.84984225 0.5725629 0.6960943 0.6955889 0.84984228863096656 0.84984228863096656 0.5725628341629212 0.69609423458217601 0.69558889835906823 -5.6 0.708860755 5.6 3 4.5 1.5 0.708860755 0.6818181 0.6521739 0.6 5.5999999999999996 0.70886075949367078 5.5999999999999996 3 4.5 1.5 0.70886075949367078 0.68181818181818177 0.65217391304347827 0.60000000000000009 1 0.382352948 0.382352948 0.4090909 0.5 0.523809552 0.38235294117647056 0.38235294117647056 0.40909090909090912 0.5 0.52380952380952384 0.948134 0.948134 0.9735693 1.08120561 1.05315924 0.94813397192570914 0.94813397192570914 0.97356928368104678 1.0812055850558095 1.0531592686672073 0.4060508 0.4060508 0.480745673 0.7093809 0.719658256 0.40605068921232229 0.40605068921232229 0.4807456731235793 0.70938088629442786 0.71965826470413374 -5.8 0.734177232 5.8 2.7 4.1 1 0.734177232 0.6136364 0.5942029 0.4 5.7999999999999998 0.73417721518987333 5.7999999999999998 2.7000000000000002 4.0999999999999996 1 0.73417721518987333 0.61363636363636365 0.59420289855072461 0.40000000000000002 1 0.441176474 0.441176474 0.272727281 0.4047619 0.2857143 0.44117647058823528 0.44117647058823528 0.27272727272727271 0.40476190476190477 0.2857142857142857 0.981996 0.981996 0.876212358 0.985098362 0.7021062 0.98199589949448451 0.98199589949448451 0.87621235531294217 0.98509842193973751 0.70210617911147155 0.504585147 0.504585147 0.214467555 0.6526925 0.567488432 0.50458515122771275 0.50458515122771275 0.21446754872116464 0.65269250269310186 0.56748843907683133 -6.2 0.7848101 6.2 2.2 4.5 1.5 0.7848101 0.5 0.6521739 0.6 6.2000000000000002 0.78481012658227833 6.2000000000000002 2.2000000000000002 4.5 1.5 0.78481012658227833 0.5 0.65217391304347827 0.60000000000000009 1 0.5588235 0.5588235 0.0454545468 0.5 0.523809552 0.55882352941176472 0.55882352941176472 0.045454545454545456 0.5 0.52380952380952384 1.04971981 1.04971981 0.7139508 1.08120561 1.05315924 1.0497197546320352 1.0497197546320352 0.71395080803276778 1.0812055850558095 1.0531592686672073 0.6861942 0.6861942 0.0126449876 0.7093809 0.719658256 0.6861941068147408 0.6861941068147408 0.012644988485085273 0.70938088629442786 0.71965826470413374 -5.6 0.708860755 5.6 2.5 3.9 1.1 0.708860755 0.5681818 0.5652174 0.440000027 5.5999999999999996 0.70886075949367078 5.5999999999999996 2.5 3.8999999999999999 1.1000000000000001 0.70886075949367078 0.56818181818181812 0.56521739130434778 0.44000000000000006 1 0.382352948 0.382352948 0.181818187 0.357142866 0.333333343 0.38235294117647056 0.38235294117647056 0.18181818181818182 0.35714285714285715 0.33333333333333331 0.948134 0.948134 0.8113077 0.937044859 0.7723168 0.94813397192570914 0.94813397192570914 0.81130773640087239 0.93704484038170155 0.7723167970226188 0.4060508 0.4060508 0.09116498 0.620649636 0.605188966 0.40605068921232229 0.40605068921232229 0.091164973250557446 0.62064960460061858 0.60518894407556645 -5.9 0.7468355 5.9 3.2 4.8 1.8 0.7468355 0.7272727 0.6956522 0.719999969 5.9000000000000004 0.74683544303797467 5.9000000000000004 3.2000000000000002 4.7999999999999998 1.8 0.74683544303797467 0.72727272727272729 0.69565217391304346 0.72000000000000008 1 0.470588237 0.470588237 0.5 0.5714286 0.6666667 0.47058823529411764 0.47058823529411764 0.5 0.5714285714285714 0.66666666666666663 0.998926938 0.998926938 1.038474 1.153286 1.26379108 0.99892686327887226 0.99892686327887226 1.0384739025931167 1.1532859573928635 1.2637911224006488 0.5528621 0.5528621 0.6578964 0.7459458 0.778455853 0.55286190159866166 0.55286190159866166 0.65789648182451921 0.74594581373449664 0.77845583371698512 -6.1 0.7721519 6.1 2.8 4 1.3 0.7721519 0.6363636 0.5797101 0.52 6.0999999999999996 0.772151898734177 6.0999999999999996 2.7999999999999998 4 1.3 0.772151898734177 0.63636363636363635 0.57971014492753625 0.52000000000000002 1 0.5294118 0.5294118 0.3181818 0.3809524 0.428571433 0.52941176470588236 0.52941176470588236 0.31818181818181818 0.38095238095238093 0.42857142857142855 1.03278887 1.03278887 0.908664644 0.9610716 0.912738 1.0327887908476474 1.0327887908476474 0.908664664768977 0.96107163116071959 0.91273803284491306 0.644171059 0.644171059 0.296437562 0.636991262 0.6687579 0.64417093822767468 0.64417093822767468 0.29643751019242903 0.63699126114775995 0.66875793094202918 -6.3 0.797468364 6.3 2.5 4.9 1.5 0.797468364 0.5681818 0.710144937 0.6 6.2999999999999998 0.79746835443037956 6.2999999999999998 2.5 4.9000000000000004 1.5 0.79746835443037956 0.56818181818181812 0.71014492753623193 0.60000000000000009 1 0.5882353 0.5882353 0.181818187 0.5952381 0.523809552 0.58823529411764708 0.58823529411764708 0.18181818181818182 0.59523809523809523 0.52380952380952384 1.06665075 1.06665075 0.8113077 1.17731273 1.05315924 1.0666507184164229 1.0666507184164229 0.81130773640087239 1.1773127481718815 1.0531592686672073 0.72531265 0.72531265 0.09116498 0.757097244 0.719658256 0.72531248018388961 0.72531248018388961 0.091164973250557446 0.75709721026728072 0.71965826470413374 -6.1 0.7721519 6.1 2.8 4.7 1.2 0.7721519 0.6363636 0.6811594 0.480000019 6.0999999999999996 0.772151898734177 6.0999999999999996 2.7999999999999998 4.7000000000000002 1.2 0.772151898734177 0.63636363636363635 0.6811594202898551 0.47999999999999998 1 0.5294118 0.5294118 0.3181818 0.547619045 0.3809524 0.52941176470588236 0.52941176470588236 0.31818181818181818 0.54761904761904767 0.38095238095238093 1.03278887 1.03278887 0.908664644 1.12925911 0.842527449 1.0327887908476474 1.0327887908476474 0.908664664768977 1.1292591666138456 0.84252741493376582 0.644171059 0.644171059 0.296437562 0.734288335 0.6387745 0.64417093822767468 0.64417093822767468 0.29643751019242903 0.73428833770009005 0.63877450359674082 -6.4 0.8101266 6.4 2.9 4.3 1.3 0.8101266 0.659090936 0.623188436 0.52 6.4000000000000004 0.81012658227848089 6.4000000000000004 2.8999999999999999 4.2999999999999998 1.3 0.81012658227848089 0.65909090909090906 0.62318840579710144 0.52000000000000002 1 0.617647052 0.617647052 0.363636374 0.452380955 0.428571433 0.61764705882352944 0.61764705882352944 0.36363636363636365 0.45238095238095238 0.42857142857142855 1.08358181 1.08358181 0.941117 1.033152 0.912738 1.0835816822008106 1.0835816822008106 0.94111697422501195 1.0331520034977735 0.91273803284491306 0.7613054 0.7613054 0.386941522 0.682228565 0.6687579 0.76130542616799635 0.76130542616799635 0.38694155923435009 0.68222849726345214 0.66875793094202918 -6.6 0.835443 6.6 3 4.4 1.4 0.835443 0.6818181 0.6376811 0.56 6.5999999999999996 0.83544303797468333 6.5999999999999996 3 4.4000000000000004 1.3999999999999999 0.83544303797468333 0.68181818181818177 0.63768115942028991 0.55999999999999994 1 0.6764706 0.6764706 0.4090909 0.476190478 0.476190478 0.67647058823529416 0.67647058823529416 0.40909090909090912 0.47619047619047616 0.47619047619047616 1.11744368 1.11744368 0.9735693 1.05717874 0.982948661 1.1174436097695859 1.1174436097695859 0.97356928368104678 1.0571787942767916 0.98294865075606008 0.8235505 0.8235505 0.480745673 0.6960943 0.6955889 0.82355060799945012 0.82355060799945012 0.4807456731235793 0.69609423458217601 0.69558889835906823 -6.8 0.860759556 6.8 2.8 4.8 1.4 0.860759556 0.6363636 0.6956522 0.56 6.7999999999999998 0.86075949367088589 6.7999999999999998 2.7999999999999998 4.7999999999999998 1.3999999999999999 0.86075949367088589 0.63636363636363635 0.69565217391304346 0.55999999999999994 1 0.7352941 0.7352941 0.3181818 0.5714286 0.476190478 0.73529411764705888 0.73529411764705888 0.31818181818181818 0.5714285714285714 0.47619047619047616 1.15130568 1.15130568 0.908664644 1.153286 0.982948661 1.1513055373383612 1.1513055373383612 0.908664664768977 1.1532859573928635 0.98294865075606008 0.873058 0.873058 0.296437562 0.7459458 0.6955889 0.87305788341059976 0.87305788341059976 0.29643751019242903 0.74594581373449664 0.69558889835906823 -6.7 0.848101258 6.7 3 5 1.7 0.848101258 0.6818181 0.7246376 0.68 6.7000000000000002 0.84810126582278467 6.7000000000000002 3 5 1.7 0.84810126582278467 0.68181818181818177 0.72463768115942029 0.68000000000000005 1 0.7058824 0.7058824 0.4090909 0.619047642 0.619047642 0.70588235294117652 0.70588235294117652 0.40909090909090912 0.61904761904761907 0.61904761904761907 1.13437462 1.13437462 0.9735693 1.20133948 1.19358051 1.1343745735539736 1.1343745735539736 0.97356928368104678 1.2013395389508994 1.1935805044895016 0.84984225 0.84984225 0.480745673 0.7677612 0.7608193 0.84984228863096656 0.84984228863096656 0.4807456731235793 0.76776110588492996 0.76081931222191046 -6 0.7594937 6 2.9 4.5 1.5 0.7594937 0.659090936 0.6521739 0.6 6 0.75949367088607578 6 2.8999999999999999 4.5 1.5 0.75949367088607578 0.65909090909090906 0.65217391304347827 0.60000000000000009 1 0.5 0.5 0.363636374 0.5 0.523809552 0.5 0.5 0.36363636363636365 0.5 0.52380952380952384 1.01585793 1.01585793 0.941117 1.08120561 1.05315924 1.0158578270632599 1.0158578270632599 0.94111697422501195 1.0812055850558095 1.0531592686672073 0.5995771 0.5995771 0.386941522 0.7093809 0.719658256 0.59957706030964308 0.59957706030964308 0.38694155923435009 0.70938088629442786 0.71965826470413374 -5.7 0.721519 5.7 2.6 3.5 1 0.721519 0.590909064 0.5072464 0.4 5.7000000000000002 0.72151898734177211 5.7000000000000002 2.6000000000000001 3.5 1 0.72151898734177211 0.59090909090909094 0.50724637681159424 0.40000000000000002 1 0.4117647 0.4117647 0.227272734 0.261904776 0.2857143 0.41176470588235292 0.41176470588235292 0.22727272727272727 0.26190476190476192 0.2857142857142857 0.965064943 0.965064943 0.84376 0.8409377 0.7021062 0.96506493571009688 0.96506493571009688 0.84376004585690734 0.84093767726562962 0.70210617911147155 0.455403626 0.455403626 0.145245746 0.548691869 0.567488432 0.45540375842690767 0.45540375842690767 0.14524588521591403 0.54869186015931659 0.56748843907683133 -5.5 0.6962025 5.5 2.4 3.8 1.1 0.6962025 0.545454562 0.5507246 0.440000027 5.5 0.69620253164556956 5.5 2.3999999999999999 3.7999999999999998 1.1000000000000001 0.69620253164556956 0.54545454545454541 0.55072463768115942 0.44000000000000006 1 0.3529412 0.3529412 0.13636364 0.333333343 0.333333343 0.35294117647058826 0.35294117647058826 0.13636363636363635 0.33333333333333331 0.33333333333333331 0.931203067 0.931203067 0.778855443 0.913018048 0.7723168 0.93120300814132162 0.93120300814132162 0.77885542694483745 0.91301804960268351 0.7723167970226188 0.3573058 0.3573058 0.052431643 0.6036563 0.605188966 0.35730592590256216 0.35730592590256216 0.052431629740293029 0.60365621138880055 0.60518894407556645 -5.5 0.6962025 5.5 2.4 3.7 1 0.6962025 0.545454562 0.5362319 0.4 5.5 0.69620253164556956 5.5 2.3999999999999999 3.7000000000000002 1 0.69620253164556956 0.54545454545454541 0.53623188405797106 0.40000000000000002 1 0.3529412 0.3529412 0.13636364 0.309523821 0.2857143 0.35294117647058826 0.35294117647058826 0.13636363636363635 0.30952380952380953 0.2857142857142857 0.931203067 0.931203067 0.778855443 0.888991237 0.7021062 0.93120300814132162 0.93120300814132162 0.77885542694483745 0.8889912588236657 0.70210617911147155 0.3573058 0.3573058 0.052431643 0.5860022 0.567488432 0.35730592590256216 0.35730592590256216 0.052431629740293029 0.58600218188861053 0.56748843907683133 -5.8 0.734177232 5.8 2.7 3.9 1.2 0.734177232 0.6136364 0.5652174 0.480000019 5.7999999999999998 0.73417721518987333 5.7999999999999998 2.7000000000000002 3.8999999999999999 1.2 0.73417721518987333 0.61363636363636365 0.56521739130434778 0.47999999999999998 1 0.441176474 0.441176474 0.272727281 0.357142866 0.3809524 0.44117647058823528 0.44117647058823528 0.27272727272727271 0.35714285714285715 0.38095238095238093 0.981996 0.981996 0.876212358 0.937044859 0.842527449 0.98199589949448451 0.98199589949448451 0.87621235531294217 0.93704484038170155 0.84252741493376582 0.504585147 0.504585147 0.214467555 0.620649636 0.6387745 0.50458515122771275 0.50458515122771275 0.21446754872116464 0.62064960460061858 0.63877450359674082 -6 0.7594937 6 2.7 5.1 1.6 0.7594937 0.6136364 0.7391304 0.640000045 6 0.75949367088607578 6 2.7000000000000002 5.0999999999999996 1.6000000000000001 0.75949367088607578 0.61363636363636365 0.73913043478260865 0.64000000000000012 1 0.5 0.5 0.272727281 0.642857134 0.5714286 0.5 0.5 0.27272727272727271 0.6428571428571429 0.5714285714285714 1.01585793 1.01585793 0.876212358 1.22536623 1.12336993 1.0158578270632599 1.0158578270632599 0.87621235531294217 1.2253663297299173 1.1233698865783546 0.5995771 0.5995771 0.214467555 0.777955949 0.7413043 0.59957706030964308 0.59957706030964308 0.21446754872116464 0.77795595083214475 0.74130430666213609 -5.4 0.683544338 5.4 3 4.5 1.5 0.683544338 0.6818181 0.6521739 0.6 5.4000000000000004 0.68354430379746833 5.4000000000000004 3 4.5 1.5 0.68354430379746833 0.68181818181818177 0.65217391304347827 0.60000000000000009 1 0.323529422 0.323529422 0.4090909 0.5 0.523809552 0.3235294117647059 0.3235294117647059 0.40909090909090912 0.5 0.52380952380952384 0.9142721 0.9142721 0.9735693 1.08120561 1.05315924 0.91427204435693399 0.91427204435693399 0.97356928368104678 1.0812055850558095 1.0531592686672073 0.3099612 0.3099612 0.480745673 0.7093809 0.719658256 0.30996111189240849 0.30996111189240849 0.4807456731235793 0.70938088629442786 0.71965826470413374 -6 0.7594937 6 3.4 4.5 1.6 0.7594937 0.772727251 0.6521739 0.640000045 6 0.75949367088607578 6 3.3999999999999999 4.5 1.6000000000000001 0.75949367088607578 0.77272727272727271 0.65217391304347827 0.64000000000000012 1 0.5 0.5 0.590909064 0.5 0.5714286 0.5 0.5 0.59090909090909094 0.5 0.5714285714285714 1.01585793 1.01585793 1.10337853 1.08120561 1.12336993 1.0158578270632599 1.0158578270632599 1.1033785215051863 1.0812055850558095 1.1233698865783546 0.5995771 0.5995771 0.797875941 0.7093809 0.7413043 0.59957706030964308 0.59957706030964308 0.79787580879856601 0.70938088629442786 0.74130430666213609 -6.7 0.848101258 6.7 3.1 4.7 1.5 0.848101258 0.7045454 0.6811594 0.6 6.7000000000000002 0.84810126582278467 6.7000000000000002 3.1000000000000001 4.7000000000000002 1.5 0.84810126582278467 0.70454545454545459 0.6811594202898551 0.60000000000000009 1 0.7058824 0.7058824 0.454545468 0.547619045 0.523809552 0.70588235294117652 0.70588235294117652 0.45454545454545453 0.54761904761904767 0.52380952380952384 1.13437462 1.13437462 1.0060215 1.12925911 1.05315924 1.1343745735539736 1.1343745735539736 1.0060215931370817 1.1292591666138456 1.0531592686672073 0.84984225 0.84984225 0.5725629 0.734288335 0.719658256 0.84984228863096656 0.84984228863096656 0.5725628341629212 0.73428833770009005 0.71965826470413374 -6.3 0.797468364 6.3 2.3 4.4 1.3 0.797468364 0.522727251 0.6376811 0.52 6.2999999999999998 0.79746835443037956 6.2999999999999998 2.2999999999999998 4.4000000000000004 1.3 0.79746835443037956 0.52272727272727271 0.63768115942028991 0.52000000000000002 1 0.5882353 0.5882353 0.09090909 0.476190478 0.428571433 0.58823529411764708 0.58823529411764708 0.090909090909090912 0.47619047619047616 0.42857142857142855 1.06665075 1.06665075 0.7464031 1.05717874 0.912738 1.0666507184164229 1.0666507184164229 0.7464031174888025 1.0571787942767916 0.91273803284491306 0.72531265 0.72531265 0.02727463 0.6960943 0.6687579 0.72531248018388961 0.72531248018388961 0.027274649605582402 0.69609423458217601 0.66875793094202918 -5.6 0.708860755 5.6 3 4.1 1.3 0.708860755 0.6818181 0.5942029 0.52 5.5999999999999996 0.70886075949367078 5.5999999999999996 3 4.0999999999999996 1.3 0.70886075949367078 0.68181818181818177 0.59420289855072461 0.52000000000000002 1 0.382352948 0.382352948 0.4090909 0.4047619 0.428571433 0.38235294117647056 0.38235294117647056 0.40909090909090912 0.40476190476190477 0.42857142857142855 0.948134 0.948134 0.9735693 0.985098362 0.912738 0.94813397192570914 0.94813397192570914 0.97356928368104678 0.98509842193973751 0.91273803284491306 0.4060508 0.4060508 0.480745673 0.6526925 0.6687579 0.40605068921232229 0.40605068921232229 0.4807456731235793 0.65269250269310186 0.66875793094202918 -5.5 0.6962025 5.5 2.5 4 1.3 0.6962025 0.5681818 0.5797101 0.52 5.5 0.69620253164556956 5.5 2.5 4 1.3 0.69620253164556956 0.56818181818181812 0.57971014492753625 0.52000000000000002 1 0.3529412 0.3529412 0.181818187 0.3809524 0.428571433 0.35294117647058826 0.35294117647058826 0.18181818181818182 0.38095238095238093 0.42857142857142855 0.931203067 0.931203067 0.8113077 0.9610716 0.912738 0.93120300814132162 0.93120300814132162 0.81130773640087239 0.96107163116071959 0.91273803284491306 0.3573058 0.3573058 0.09116498 0.636991262 0.6687579 0.35730592590256216 0.35730592590256216 0.091164973250557446 0.63699126114775995 0.66875793094202918 -5.5 0.6962025 5.5 2.6 4.4 1.2 0.6962025 0.590909064 0.6376811 0.480000019 5.5 0.69620253164556956 5.5 2.6000000000000001 4.4000000000000004 1.2 0.69620253164556956 0.59090909090909094 0.63768115942028991 0.47999999999999998 1 0.3529412 0.3529412 0.227272734 0.476190478 0.3809524 0.35294117647058826 0.35294117647058826 0.22727272727272727 0.47619047619047616 0.38095238095238093 0.931203067 0.931203067 0.84376 1.05717874 0.842527449 0.93120300814132162 0.93120300814132162 0.84376004585690734 1.0571787942767916 0.84252741493376582 0.3573058 0.3573058 0.145245746 0.6960943 0.6387745 0.35730592590256216 0.35730592590256216 0.14524588521591403 0.69609423458217601 0.63877450359674082 -6.1 0.7721519 6.1 3 4.6 1.4 0.7721519 0.6818181 0.6666666 0.56 6.0999999999999996 0.772151898734177 6.0999999999999996 3 4.5999999999999996 1.3999999999999999 0.772151898734177 0.68181818181818177 0.66666666666666663 0.55999999999999994 1 0.5294118 0.5294118 0.4090909 0.523809552 0.476190478 0.52941176470588236 0.52941176470588236 0.40909090909090912 0.52380952380952384 0.47619047619047616 1.03278887 1.03278887 0.9735693 1.10523236 0.982948661 1.0327887908476474 1.0327887908476474 0.97356928368104678 1.1052323758348275 0.98294865075606008 0.644171059 0.644171059 0.480745673 0.7221062 0.6955889 0.64417093822767468 0.64417093822767468 0.4807456731235793 0.72210618745756316 0.69558889835906823 -5.8 0.734177232 5.8 2.6 4 1.2 0.734177232 0.590909064 0.5797101 0.480000019 5.7999999999999998 0.73417721518987333 5.7999999999999998 2.6000000000000001 4 1.2 0.73417721518987333 0.59090909090909094 0.57971014492753625 0.47999999999999998 1 0.441176474 0.441176474 0.227272734 0.3809524 0.3809524 0.44117647058823528 0.44117647058823528 0.22727272727272727 0.38095238095238093 0.38095238095238093 0.981996 0.981996 0.84376 0.9610716 0.842527449 0.98199589949448451 0.98199589949448451 0.84376004585690734 0.96107163116071959 0.84252741493376582 0.504585147 0.504585147 0.145245746 0.636991262 0.6387745 0.50458515122771275 0.50458515122771275 0.14524588521591403 0.63699126114775995 0.63877450359674082 -5 0.6329114 5 2.3 3.3 1 0.6329114 0.522727251 0.478260845 0.4 5 0.63291139240506322 5 2.2999999999999998 3.2999999999999998 1 0.63291139240506322 0.52272727272727271 0.47826086956521741 0.40000000000000002 1 0.205882356 0.205882356 0.09090909 0.238095239 0.2857143 0.20588235294117646 0.20588235294117646 0.090909090909090912 0.23809523809523808 0.2857142857142857 0.8465482 0.8465482 0.7464031 0.792884052 0.7021062 0.84654818921938324 0.84654818921938324 0.7464031174888025 0.79288409570759366 0.70210617911147155 0.14861621 0.14861621 0.02727463 0.508717 0.567488432 0.14861616399327332 0.14861616399327332 0.027274649605582402 0.50871703350008446 0.56748843907683133 -5.6 0.708860755 5.6 2.7 4.2 1.3 0.708860755 0.6136364 0.6086956 0.52 5.5999999999999996 0.70886075949367078 5.5999999999999996 2.7000000000000002 4.2000000000000002 1.3 0.70886075949367078 0.61363636363636365 0.60869565217391308 0.52000000000000002 1 0.382352948 0.382352948 0.272727281 0.428571433 0.428571433 0.38235294117647056 0.38235294117647056 0.27272727272727271 0.42857142857142855 0.42857142857142855 0.948134 0.948134 0.876212358 1.00912511 0.912738 0.94813397192570914 0.94813397192570914 0.87621235531294217 1.0091252127187555 0.91273803284491306 0.4060508 0.4060508 0.214467555 0.667766631 0.6687579 0.40605068921232229 0.40605068921232229 0.21446754872116464 0.66776662351076677 0.66875793094202918 -5.7 0.721519 5.7 3 4.2 1.2 0.721519 0.6818181 0.6086956 0.480000019 5.7000000000000002 0.72151898734177211 5.7000000000000002 3 4.2000000000000002 1.2 0.72151898734177211 0.68181818181818177 0.60869565217391308 0.47999999999999998 1 0.4117647 0.4117647 0.4090909 0.428571433 0.3809524 0.41176470588235292 0.41176470588235292 0.40909090909090912 0.42857142857142855 0.38095238095238093 0.965064943 0.965064943 0.9735693 1.00912511 0.842527449 0.96506493571009688 0.96506493571009688 0.97356928368104678 1.0091252127187555 0.84252741493376582 0.455403626 0.455403626 0.480745673 0.667766631 0.6387745 0.45540375842690767 0.45540375842690767 0.4807456731235793 0.66776662351076677 0.63877450359674082 -5.7 0.721519 5.7 2.9 4.2 1.3 0.721519 0.659090936 0.6086956 0.52 5.7000000000000002 0.72151898734177211 5.7000000000000002 2.8999999999999999 4.2000000000000002 1.3 0.72151898734177211 0.65909090909090906 0.60869565217391308 0.52000000000000002 1 0.4117647 0.4117647 0.363636374 0.428571433 0.428571433 0.41176470588235292 0.41176470588235292 0.36363636363636365 0.42857142857142855 0.42857142857142855 0.965064943 0.965064943 0.941117 1.00912511 0.912738 0.96506493571009688 0.96506493571009688 0.94111697422501195 1.0091252127187555 0.91273803284491306 0.455403626 0.455403626 0.386941522 0.667766631 0.6687579 0.45540375842690767 0.45540375842690767 0.38694155923435009 0.66776662351076677 0.66875793094202918 -6.2 0.7848101 6.2 2.9 4.3 1.3 0.7848101 0.659090936 0.623188436 0.52 6.2000000000000002 0.78481012658227833 6.2000000000000002 2.8999999999999999 4.2999999999999998 1.3 0.78481012658227833 0.65909090909090906 0.62318840579710144 0.52000000000000002 1 0.5588235 0.5588235 0.363636374 0.452380955 0.428571433 0.55882352941176472 0.55882352941176472 0.36363636363636365 0.45238095238095238 0.42857142857142855 1.04971981 1.04971981 0.941117 1.033152 0.912738 1.0497197546320352 1.0497197546320352 0.94111697422501195 1.0331520034977735 0.91273803284491306 0.6861942 0.6861942 0.386941522 0.682228565 0.6687579 0.6861941068147408 0.6861941068147408 0.38694155923435009 0.68222849726345214 0.66875793094202918 -5.1 0.6455696 5.1 2.5 3 1.1 0.6455696 0.5681818 0.4347826 0.440000027 5.0999999999999996 0.64556962025316444 5.0999999999999996 2.5 3 1.1000000000000001 0.64556962025316444 0.56818181818181812 0.43478260869565222 0.44000000000000006 1 0.235294119 0.235294119 0.181818187 0.214285716 0.333333343 0.23529411764705882 0.23529411764705882 0.18181818181818182 0.21428571428571427 0.33333333333333331 0.8634792 0.8634792 0.8113077 0.720803738 0.7723168 0.86347915300377087 0.86347915300377087 0.81130773640087239 0.72080372337053966 0.7723167970226188 0.183586776 0.183586776 0.09116498 0.4439562 0.605188966 0.18358681923369741 0.18358681923369741 0.091164973250557446 0.44395614729152527 0.60518894407556645 -5.7 0.721519 5.7 2.8 4.1 1.3 0.721519 0.6363636 0.5942029 0.52 5.7000000000000002 0.72151898734177211 5.7000000000000002 2.7999999999999998 4.0999999999999996 1.3 0.72151898734177211 0.63636363636363635 0.59420289855072461 0.52000000000000002 1 0.4117647 0.4117647 0.3181818 0.4047619 0.428571433 0.41176470588235292 0.41176470588235292 0.31818181818181818 0.40476190476190477 0.42857142857142855 0.965064943 0.965064943 0.908664644 0.985098362 0.912738 0.96506493571009688 0.96506493571009688 0.908664664768977 0.98509842193973751 0.91273803284491306 0.455403626 0.455403626 0.296437562 0.6526925 0.6687579 0.45540375842690767 0.45540375842690767 0.29643751019242903 0.65269250269310186 0.66875793094202918 -6.3 0.797468364 6.3 3.3 6 2.5 0.797468364 0.74999994 0.8695652 1 6.2999999999999998 0.79746835443037956 6.2999999999999998 3.2999999999999998 6 2.5 0.79746835443037956 0.74999999999999989 0.86956521739130443 1 2 0.5882353 0.5882353 0.545454562 0.857142866 1 0.58823529411764708 0.58823529411764708 0.54545454545454541 0.8571428571428571 1 1.06665075 1.06665075 1.07092619 1.44160748 1.75526547 1.0666507184164229 1.0666507184164229 1.0709262120491514 1.4416074467410793 1.7552654477786789 0.72531265 0.72531265 0.733567655 0.851488352 0.8644717 0.72531248018388961 0.72531248018388961 0.73356785053506501 0.85148833619462083 0.86447170475057145 -5.8 0.734177232 5.8 2.7 5.1 1.9 0.734177232 0.6136364 0.7391304 0.76 5.7999999999999998 0.73417721518987333 5.7999999999999998 2.7000000000000002 5.0999999999999996 1.8999999999999999 0.73417721518987333 0.61363636363636365 0.73913043478260865 0.76000000000000001 2 0.441176474 0.441176474 0.272727281 0.642857134 0.714285731 0.44117647058823528 0.44117647058823528 0.27272727272727271 0.6428571428571429 0.7142857142857143 0.981996 0.981996 0.876212358 1.22536623 1.33400178 0.98199589949448451 0.98199589949448451 0.87621235531294217 1.2253663297299173 1.3340017403117959 0.504585147 0.504585147 0.214467555 0.777955949 0.794432342 0.50458515122771275 0.50458515122771275 0.21446754872116464 0.77795595083214475 0.7944323164067586 -7.1 0.898734152 7.1 3 5.9 2.1 0.898734152 0.6818181 0.855072439 0.84 7.0999999999999996 0.89873417721518967 7.0999999999999996 3 5.9000000000000004 2.1000000000000001 0.89873417721518967 0.68181818181818177 0.85507246376811608 0.84000000000000008 2 0.8235294 0.8235294 0.4090909 0.8333333 0.8095238 0.82352941176470584 0.82352941176470584 0.40909090909090912 0.83333333333333337 0.80952380952380953 1.20209849 1.20209849 0.9735693 1.4175806 1.47442293 1.2020984286915242 1.2020984286915242 0.97356928368104678 1.4175806559620614 1.4744229761340903 0.9261487 0.9261487 0.480745673 0.8447406 0.8221373 0.92614864776751638 0.92614864776751638 0.4807456731235793 0.8447405985468005 0.82213728509573358 -6.3 0.797468364 6.3 2.9 5.6 1.8 0.797468364 0.659090936 0.8115942 0.719999969 6.2999999999999998 0.79746835443037956 6.2999999999999998 2.8999999999999999 5.5999999999999996 1.8 0.79746835443037956 0.65909090909090906 0.81159420289855067 0.72000000000000008 2 0.5882353 0.5882353 0.363636374 0.7619048 0.6666667 0.58823529411764708 0.58823529411764708 0.36363636363636365 0.76190476190476186 0.66666666666666663 1.06665075 1.06665075 0.941117 1.34550023 1.26379108 1.0666507184164229 1.0666507184164229 0.94111697422501195 1.3455002836250074 1.2637911224006488 0.72531265 0.72531265 0.386941522 0.8225208 0.778455853 0.72531248018388961 0.72531248018388961 0.38694155923435009 0.82252078229590153 0.77845583371698512 -6.5 0.822784841 6.5 3 5.8 2.2 0.822784841 0.6818181 0.8405797 0.880000055 6.5 0.82278481012658211 6.5 3 5.7999999999999998 2.2000000000000002 0.82278481012658211 0.68181818181818177 0.84057971014492749 0.88000000000000012 2 0.647058845 0.647058845 0.4090909 0.8095238 0.857142866 0.6470588235294118 0.6470588235294118 0.40909090909090912 0.80952380952380953 0.8571428571428571 1.10051274 1.10051274 0.9735693 1.39355385 1.54463363 1.1005126459851982 1.1005126459851982 0.97356928368104678 1.3935538651830432 1.5446335940452376 0.7940583 0.7940583 0.480745673 0.837673366 0.834173143 0.79405825408863862 0.79405825408863862 0.4807456731235793 0.83767331479677276 0.83417310863648386 -7.6 0.9620253 7.6 3 6.6 2.1 0.9620253 0.6818181 0.9565217 0.84 7.5999999999999996 0.962025316455696 7.5999999999999996 3 6.5999999999999996 2.1000000000000001 0.962025316455696 0.68181818181818177 0.95652173913043481 0.84000000000000008 2 0.9411765 0.9411765 0.4090909 0.952380955 0.8095238 0.94117647058823528 0.94117647058823528 0.40909090909090912 0.95238095238095233 0.80952380952380953 1.2867533 1.2867533 0.9735693 1.5857681 1.47442293 1.2867532476134624 1.2867532476134624 0.97356928368104678 1.5857681914151873 1.4744229761340903 0.9733138 0.9733138 0.480745673 0.8860214 0.8221373 0.97331382055990801 0.97331382055990801 0.4807456731235793 0.88602142043286447 0.82213728509573358 -4.9 0.6202532 4.9 2.5 4.5 1.7 0.6202532 0.5681818 0.6521739 0.68 4.9000000000000004 0.620253164556962 4.9000000000000004 2.5 4.5 1.7 0.620253164556962 0.56818181818181812 0.65217391304347827 0.68000000000000005 2 0.1764706 0.1764706 0.181818187 0.5 0.619047642 0.17647058823529413 0.17647058823529413 0.18181818181818182 0.5 0.61904761904761907 0.829617262 0.829617262 0.8113077 1.08120561 1.19358051 0.82961722543499561 0.82961722543499561 0.81130773640087239 1.0812055850558095 1.1935805044895016 0.117838435 0.117838435 0.09116498 0.7093809 0.7608193 0.11783846996167147 0.11783846996167147 0.091164973250557446 0.70938088629442786 0.76081931222191046 -7.3 0.9240507 7.3 2.9 6.3 1.8 0.9240507 0.659090936 0.9130435 0.719999969 7.2999999999999998 0.92405063291139222 7.2999999999999998 2.8999999999999999 6.2999999999999998 1.8 0.92405063291139222 0.65909090909090906 0.91304347826086962 0.72000000000000008 2 0.882352948 0.882352948 0.363636374 0.9047619 0.6666667 0.88235294117647056 0.88235294117647056 0.36363636363636365 0.90476190476190477 0.66666666666666663 1.23596048 1.23596048 0.941117 1.51368785 1.26379108 1.2359603562602994 1.2359603562602994 0.94111697422501195 1.5136878190781333 1.2637911224006488 0.950038552 0.950038552 0.386941522 0.869953454 0.778455853 0.95003851659070837 0.95003851659070837 0.38694155923435009 0.86995338291407664 0.77845583371698512 -6.7 0.848101258 6.7 2.5 5.8 1.8 0.848101258 0.5681818 0.8405797 0.719999969 6.7000000000000002 0.84810126582278467 6.7000000000000002 2.5 5.7999999999999998 1.8 0.84810126582278467 0.56818181818181812 0.84057971014492749 0.72000000000000008 2 0.7058824 0.7058824 0.181818187 0.8095238 0.6666667 0.70588235294117652 0.70588235294117652 0.18181818181818182 0.80952380952380953 0.66666666666666663 1.13437462 1.13437462 0.8113077 1.39355385 1.26379108 1.1343745735539736 1.1343745735539736 0.81130773640087239 1.3935538651830432 1.2637911224006488 0.84984225 0.84984225 0.09116498 0.837673366 0.778455853 0.84984228863096656 0.84984228863096656 0.091164973250557446 0.83767331479677276 0.77845583371698512 -7.2 0.9113924 7.2 3.6 6.1 2.5 0.9113924 0.818181753 0.884057939 1 7.2000000000000002 0.911392405063291 7.2000000000000002 3.6000000000000001 6.0999999999999996 2.5 0.911392405063291 0.81818181818181812 0.88405797101449268 1 2 0.852941155 0.852941155 0.6818182 0.880952358 1 0.8529411764705882 0.8529411764705882 0.68181818181818177 0.88095238095238093 1 1.21902943 1.21902943 1.1682831 1.46563423 1.75526547 1.2190293924759119 1.2190293924759119 1.1682831404172562 1.4656342375200972 1.7552654477786789 0.939083755 0.939083755 0.8919594 0.8579307 0.8644717 0.93908371694631576 0.93908371694631576 0.89195941323598249 0.85793070191741871 0.86447170475057145 -6.5 0.822784841 6.5 3.2 5.1 2 0.822784841 0.7272727 0.7391304 0.8 6.5 0.82278481012658211 6.5 3.2000000000000002 5.0999999999999996 2 0.82278481012658211 0.72727272727272729 0.73913043478260865 0.80000000000000004 2 0.647058845 0.647058845 0.5 0.642857134 0.7619048 0.6470588235294118 0.6470588235294118 0.5 0.6428571428571429 0.76190476190476186 1.10051274 1.10051274 1.038474 1.22536623 1.40421236 1.1005126459851982 1.1005126459851982 1.0384739025931167 1.2253663297299173 1.4042123582229431 0.7940583 0.7940583 0.6578964 0.777955949 0.808938 0.79405825408863862 0.79405825408863862 0.65789648182451921 0.77795595083214475 0.80893802463851161 -6.4 0.8101266 6.4 2.7 5.3 1.9 0.8101266 0.6136364 0.768115938 0.76 6.4000000000000004 0.81012658227848089 6.4000000000000004 2.7000000000000002 5.2999999999999998 1.8999999999999999 0.81012658227848089 0.61363636363636365 0.76811594202898548 0.76000000000000001 2 0.617647052 0.617647052 0.272727281 0.6904762 0.714285731 0.61764705882352944 0.61764705882352944 0.27272727272727271 0.69047619047619047 0.7142857142857143 1.08358181 1.08358181 0.876212358 1.27342 1.33400178 1.0835816822008106 1.0835816822008106 0.87621235531294217 1.2734199112879534 1.3340017403117959 0.7613054 0.7613054 0.214467555 0.797011137 0.794432342 0.76130542616799635 0.76130542616799635 0.21446754872116464 0.79701110606800918 0.7944323164067586 -6.8 0.860759556 6.8 3 5.5 2.1 0.860759556 0.6818181 0.797101438 0.84 6.7999999999999998 0.86075949367088589 6.7999999999999998 3 5.5 2.1000000000000001 0.86075949367088589 0.68181818181818177 0.79710144927536231 0.84000000000000008 2 0.7352941 0.7352941 0.4090909 0.7380952 0.8095238 0.73529411764705888 0.73529411764705888 0.40909090909090912 0.73809523809523814 0.80952380952380953 1.15130568 1.15130568 0.9735693 1.32147348 1.47442293 1.1513055373383612 1.1513055373383612 0.97356928368104678 1.3214734928459895 1.4744229761340903 0.873058 0.873058 0.480745673 0.814404547 0.8221373 0.87305788341059976 0.87305788341059976 0.4807456731235793 0.81440457252843534 0.82213728509573358 -5.7 0.721519 5.7 2.5 5 2 0.721519 0.5681818 0.7246376 0.8 5.7000000000000002 0.72151898734177211 5.7000000000000002 2.5 5 2 0.72151898734177211 0.56818181818181812 0.72463768115942029 0.80000000000000004 2 0.4117647 0.4117647 0.181818187 0.619047642 0.7619048 0.41176470588235292 0.41176470588235292 0.18181818181818182 0.61904761904761907 0.76190476190476186 0.965064943 0.965064943 0.8113077 1.20133948 1.40421236 0.96506493571009688 0.96506493571009688 0.81130773640087239 1.2013395389508994 1.4042123582229431 0.455403626 0.455403626 0.09116498 0.7677612 0.808938 0.45540375842690767 0.45540375842690767 0.091164973250557446 0.76776110588492996 0.80893802463851161 -5.8 0.734177232 5.8 2.8 5.1 2.4 0.734177232 0.6363636 0.7391304 0.960000038 5.7999999999999998 0.73417721518987333 5.7999999999999998 2.7999999999999998 5.0999999999999996 2.3999999999999999 0.73417721518987333 0.63636363636363635 0.73913043478260865 0.95999999999999996 2 0.441176474 0.441176474 0.3181818 0.642857134 0.952380955 0.44117647058823528 0.44117647058823528 0.31818181818181818 0.6428571428571429 0.95238095238095233 0.981996 0.981996 0.908664644 1.22536623 1.6850549 0.98199589949448451 0.98199589949448451 0.908664664768977 1.2253663297299173 1.6850548298675316 0.504585147 0.504585147 0.296437562 0.777955949 0.8552379 0.50458515122771275 0.50458515122771275 0.29643751019242903 0.77795595083214475 0.85523789511433224 -6.4 0.8101266 6.4 3.2 5.3 2.3 0.8101266 0.7272727 0.768115938 0.92 6.4000000000000004 0.81012658227848089 6.4000000000000004 3.2000000000000002 5.2999999999999998 2.2999999999999998 0.81012658227848089 0.72727272727272729 0.76811594202898548 0.91999999999999993 2 0.617647052 0.617647052 0.5 0.6904762 0.9047619 0.61764705882352944 0.61764705882352944 0.5 0.69047619047619047 0.90476190476190477 1.08358181 1.08358181 1.038474 1.27342 1.6148442 1.0835816822008106 1.0835816822008106 1.0384739025931167 1.2734199112879534 1.6148442119563844 0.7613054 0.7613054 0.6578964 0.797011137 0.845170259 0.76130542616799635 0.76130542616799635 0.65789648182451921 0.79701110606800918 0.84517026625737923 -6.5 0.822784841 6.5 3 5.5 1.8 0.822784841 0.6818181 0.797101438 0.719999969 6.5 0.82278481012658211 6.5 3 5.5 1.8 0.82278481012658211 0.68181818181818177 0.79710144927536231 0.72000000000000008 2 0.647058845 0.647058845 0.4090909 0.7380952 0.6666667 0.6470588235294118 0.6470588235294118 0.40909090909090912 0.73809523809523814 0.66666666666666663 1.10051274 1.10051274 0.9735693 1.32147348 1.26379108 1.1005126459851982 1.1005126459851982 0.97356928368104678 1.3214734928459895 1.2637911224006488 0.7940583 0.7940583 0.480745673 0.814404547 0.778455853 0.79405825408863862 0.79405825408863862 0.4807456731235793 0.81440457252843534 0.77845583371698512 -7.7 0.9746835 7.7 3.8 6.7 2.2 0.9746835 0.8636363 0.97101444 0.880000055 7.7000000000000002 0.97468354430379733 7.7000000000000002 3.7999999999999998 6.7000000000000002 2.2000000000000002 0.97468354430379733 0.86363636363636354 0.97101449275362328 0.88000000000000012 2 0.9705882 0.9705882 0.772727251 0.976190448 0.857142866 0.97058823529411764 0.97058823529411764 0.77272727272727271 0.97619047619047616 0.8571428571428571 1.30368423 1.30368423 1.23318768 1.60979486 1.54463363 1.3036842113978502 1.3036842113978502 1.2331877593293259 1.6097949821942052 1.5446335940452376 0.9785672 0.9785672 0.9472266 0.8909001 0.834173143 0.97856725002805034 0.97856725002805034 0.94722658326235554 0.89090007639933866 0.83417310863648386 -7.7 0.9746835 7.7 2.6 6.9 2.3 0.9746835 0.590909064 1 0.92 7.7000000000000002 0.97468354430379733 7.7000000000000002 2.6000000000000001 6.9000000000000004 2.2999999999999998 0.97468354430379733 0.59090909090909094 1 0.91999999999999993 2 0.9705882 0.9705882 0.227272734 1 0.9047619 0.97058823529411764 0.97058823529411764 0.22727272727272727 1 0.90476190476190477 1.30368423 1.30368423 0.84376 1.6578486 1.6148442 1.3036842113978502 1.3036842113978502 0.84376004585690734 1.6578485637522413 1.6148442119563844 0.9785672 0.9785672 0.145245746 0.900005937 0.845170259 0.97856725002805034 0.97856725002805034 0.14524588521591403 0.90000590086073773 0.84517026625737923 -6 0.7594937 6 2.2 5 1.5 0.7594937 0.5 0.7246376 0.6 6 0.75949367088607578 6 2.2000000000000002 5 1.5 0.75949367088607578 0.5 0.72463768115942029 0.60000000000000009 2 0.5 0.5 0.0454545468 0.619047642 0.523809552 0.5 0.5 0.045454545454545456 0.61904761904761907 0.52380952380952384 1.01585793 1.01585793 0.7139508 1.20133948 1.05315924 1.0158578270632599 1.0158578270632599 0.71395080803276778 1.2013395389508994 1.0531592686672073 0.5995771 0.5995771 0.0126449876 0.7677612 0.719658256 0.59957706030964308 0.59957706030964308 0.012644988485085273 0.76776110588492996 0.71965826470413374 -6.9 0.873417735 6.9 3.2 5.7 2.3 0.873417735 0.7272727 0.8260869 0.92 6.9000000000000004 0.87341772151898722 6.9000000000000004 3.2000000000000002 5.7000000000000002 2.2999999999999998 0.87341772151898722 0.72727272727272729 0.82608695652173914 0.91999999999999993 2 0.7647059 0.7647059 0.5 0.785714269 0.9047619 0.76470588235294112 0.76470588235294112 0.5 0.7857142857142857 0.90476190476190477 1.16823661 1.16823661 1.038474 1.369527 1.6148442 1.1682365011227489 1.1682365011227489 1.0384739025931167 1.3695270744040255 1.6148442119563844 0.8933714 0.8933714 0.6578964 0.8302718 0.845170259 0.89337135404275458 0.89337135404275458 0.65789648182451921 0.83027178393794032 0.84517026625737923 -5.6 0.708860755 5.6 2.8 4.9 2 0.708860755 0.6363636 0.710144937 0.8 5.5999999999999996 0.70886075949367078 5.5999999999999996 2.7999999999999998 4.9000000000000004 2 0.70886075949367078 0.63636363636363635 0.71014492753623193 0.80000000000000004 2 0.382352948 0.382352948 0.3181818 0.5952381 0.7619048 0.38235294117647056 0.38235294117647056 0.31818181818181818 0.59523809523809523 0.76190476190476186 0.948134 0.948134 0.908664644 1.17731273 1.40421236 0.94813397192570914 0.94813397192570914 0.908664664768977 1.1773127481718815 1.4042123582229431 0.4060508 0.4060508 0.296437562 0.757097244 0.808938 0.40605068921232229 0.40605068921232229 0.29643751019242903 0.75709721026728072 0.80893802463851161 -7.7 0.9746835 7.7 2.8 6.7 2 0.9746835 0.6363636 0.97101444 0.8 7.7000000000000002 0.97468354430379733 7.7000000000000002 2.7999999999999998 6.7000000000000002 2 0.97468354430379733 0.63636363636363635 0.97101449275362328 0.80000000000000004 2 0.9705882 0.9705882 0.3181818 0.976190448 0.7619048 0.97058823529411764 0.97058823529411764 0.31818181818181818 0.97619047619047616 0.76190476190476186 1.30368423 1.30368423 0.908664644 1.60979486 1.40421236 1.3036842113978502 1.3036842113978502 0.908664664768977 1.6097949821942052 1.4042123582229431 0.9785672 0.9785672 0.296437562 0.8909001 0.808938 0.97856725002805034 0.97856725002805034 0.29643751019242903 0.89090007639933866 0.80893802463851161 -6.3 0.797468364 6.3 2.7 4.9 1.8 0.797468364 0.6136364 0.710144937 0.719999969 6.2999999999999998 0.79746835443037956 6.2999999999999998 2.7000000000000002 4.9000000000000004 1.8 0.79746835443037956 0.61363636363636365 0.71014492753623193 0.72000000000000008 2 0.5882353 0.5882353 0.272727281 0.5952381 0.6666667 0.58823529411764708 0.58823529411764708 0.27272727272727271 0.59523809523809523 0.66666666666666663 1.06665075 1.06665075 0.876212358 1.17731273 1.26379108 1.0666507184164229 1.0666507184164229 0.87621235531294217 1.1773127481718815 1.2637911224006488 0.72531265 0.72531265 0.214467555 0.757097244 0.778455853 0.72531248018388961 0.72531248018388961 0.21446754872116464 0.75709721026728072 0.77845583371698512 -6.7 0.848101258 6.7 3.3 5.7 2.1 0.848101258 0.74999994 0.8260869 0.84 6.7000000000000002 0.84810126582278467 6.7000000000000002 3.2999999999999998 5.7000000000000002 2.1000000000000001 0.84810126582278467 0.74999999999999989 0.82608695652173914 0.84000000000000008 2 0.7058824 0.7058824 0.545454562 0.785714269 0.8095238 0.70588235294117652 0.70588235294117652 0.54545454545454541 0.7857142857142857 0.80952380952380953 1.13437462 1.13437462 1.07092619 1.369527 1.47442293 1.1343745735539736 1.1343745735539736 1.0709262120491514 1.3695270744040255 1.4744229761340903 0.84984225 0.84984225 0.733567655 0.8302718 0.8221373 0.84984228863096656 0.84984228863096656 0.73356785053506501 0.83027178393794032 0.82213728509573358 -7.2 0.9113924 7.2 3.2 6 1.8 0.9113924 0.7272727 0.8695652 0.719999969 7.2000000000000002 0.911392405063291 7.2000000000000002 3.2000000000000002 6 1.8 0.911392405063291 0.72727272727272729 0.86956521739130443 0.72000000000000008 2 0.852941155 0.852941155 0.5 0.857142866 0.6666667 0.8529411764705882 0.8529411764705882 0.5 0.8571428571428571 0.66666666666666663 1.21902943 1.21902943 1.038474 1.44160748 1.26379108 1.2190293924759119 1.2190293924759119 1.0384739025931167 1.4416074467410793 1.2637911224006488 0.939083755 0.939083755 0.6578964 0.851488352 0.778455853 0.93908371694631576 0.93908371694631576 0.65789648182451921 0.85148833619462083 0.77845583371698512 -6.2 0.7848101 6.2 2.8 4.8 1.8 0.7848101 0.6363636 0.6956522 0.719999969 6.2000000000000002 0.78481012658227833 6.2000000000000002 2.7999999999999998 4.7999999999999998 1.8 0.78481012658227833 0.63636363636363635 0.69565217391304346 0.72000000000000008 2 0.5588235 0.5588235 0.3181818 0.5714286 0.6666667 0.55882352941176472 0.55882352941176472 0.31818181818181818 0.5714285714285714 0.66666666666666663 1.04971981 1.04971981 0.908664644 1.153286 1.26379108 1.0497197546320352 1.0497197546320352 0.908664664768977 1.1532859573928635 1.2637911224006488 0.6861942 0.6861942 0.296437562 0.7459458 0.778455853 0.6861941068147408 0.6861941068147408 0.29643751019242903 0.74594581373449664 0.77845583371698512 -6.1 0.7721519 6.1 3 4.9 1.8 0.7721519 0.6818181 0.710144937 0.719999969 6.0999999999999996 0.772151898734177 6.0999999999999996 3 4.9000000000000004 1.8 0.772151898734177 0.68181818181818177 0.71014492753623193 0.72000000000000008 2 0.5294118 0.5294118 0.4090909 0.5952381 0.6666667 0.52941176470588236 0.52941176470588236 0.40909090909090912 0.59523809523809523 0.66666666666666663 1.03278887 1.03278887 0.9735693 1.17731273 1.26379108 1.0327887908476474 1.0327887908476474 0.97356928368104678 1.1773127481718815 1.2637911224006488 0.644171059 0.644171059 0.480745673 0.757097244 0.778455853 0.64417093822767468 0.64417093822767468 0.4807456731235793 0.75709721026728072 0.77845583371698512 -6.4 0.8101266 6.4 2.8 5.6 2.1 0.8101266 0.6363636 0.8115942 0.84 6.4000000000000004 0.81012658227848089 6.4000000000000004 2.7999999999999998 5.5999999999999996 2.1000000000000001 0.81012658227848089 0.63636363636363635 0.81159420289855067 0.84000000000000008 2 0.617647052 0.617647052 0.3181818 0.7619048 0.8095238 0.61764705882352944 0.61764705882352944 0.31818181818181818 0.76190476190476186 0.80952380952380953 1.08358181 1.08358181 0.908664644 1.34550023 1.47442293 1.0835816822008106 1.0835816822008106 0.908664664768977 1.3455002836250074 1.4744229761340903 0.7613054 0.7613054 0.296437562 0.8225208 0.8221373 0.76130542616799635 0.76130542616799635 0.29643751019242903 0.82252078229590153 0.82213728509573358 -7.2 0.9113924 7.2 3 5.8 1.6 0.9113924 0.6818181 0.8405797 0.640000045 7.2000000000000002 0.911392405063291 7.2000000000000002 3 5.7999999999999998 1.6000000000000001 0.911392405063291 0.68181818181818177 0.84057971014492749 0.64000000000000012 2 0.852941155 0.852941155 0.4090909 0.8095238 0.5714286 0.8529411764705882 0.8529411764705882 0.40909090909090912 0.80952380952380953 0.5714285714285714 1.21902943 1.21902943 0.9735693 1.39355385 1.12336993 1.2190293924759119 1.2190293924759119 0.97356928368104678 1.3935538651830432 1.1233698865783546 0.939083755 0.939083755 0.480745673 0.837673366 0.7413043 0.93908371694631576 0.93908371694631576 0.4807456731235793 0.83767331479677276 0.74130430666213609 -7.4 0.936708868 7.4 2.8 6.1 1.9 0.936708868 0.6363636 0.884057939 0.76 7.4000000000000004 0.93670886075949356 7.4000000000000004 2.7999999999999998 6.0999999999999996 1.8999999999999999 0.93670886075949356 0.63636363636363635 0.88405797101449268 0.76000000000000001 2 0.9117647 0.9117647 0.3181818 0.880952358 0.714285731 0.91176470588235292 0.91176470588235292 0.31818181818181818 0.88095238095238093 0.7142857142857143 1.25289142 1.25289142 0.908664644 1.46563423 1.33400178 1.2528913200446872 1.2528913200446872 0.908664664768977 1.4656342375200972 1.3340017403117959 0.959248662 0.959248662 0.296437562 0.8579307 0.794432342 0.95924858044400851 0.95924858044400851 0.29643751019242903 0.85793070191741871 0.7944323164067586 -7.9 1 7.9 3.8 6.4 2 1 0.8636363 0.9275362 0.8 7.9000000000000004 0.99999999999999989 7.9000000000000004 3.7999999999999998 6.4000000000000004 2 0.99999999999999989 0.86363636363636354 0.92753623188405809 0.80000000000000004 2 1 1 0.772727251 0.9285714 0.7619048 1 1 0.77272727272727271 0.9285714285714286 0.76190476190476186 1.33754623 1.33754623 1.23318768 1.5377146 1.40421236 1.3375461389666257 1.3375461389666257 1.2331877593293259 1.5377146098571515 1.4042123582229431 0.9863704 0.9863704 0.9472266 0.875559449 0.808938 0.98637034929396195 0.98637034929396195 0.94722658326235554 0.87555942778912454 0.80893802463851161 -6.4 0.8101266 6.4 2.8 5.6 2.2 0.8101266 0.6363636 0.8115942 0.880000055 6.4000000000000004 0.81012658227848089 6.4000000000000004 2.7999999999999998 5.5999999999999996 2.2000000000000002 0.81012658227848089 0.63636363636363635 0.81159420289855067 0.88000000000000012 2 0.617647052 0.617647052 0.3181818 0.7619048 0.857142866 0.61764705882352944 0.61764705882352944 0.31818181818181818 0.76190476190476186 0.8571428571428571 1.08358181 1.08358181 0.908664644 1.34550023 1.54463363 1.0835816822008106 1.0835816822008106 0.908664664768977 1.3455002836250074 1.5446335940452376 0.7613054 0.7613054 0.296437562 0.8225208 0.834173143 0.76130542616799635 0.76130542616799635 0.29643751019242903 0.82252078229590153 0.83417310863648386 -6.3 0.797468364 6.3 2.8 5.1 1.5 0.797468364 0.6363636 0.7391304 0.6 6.2999999999999998 0.79746835443037956 6.2999999999999998 2.7999999999999998 5.0999999999999996 1.5 0.79746835443037956 0.63636363636363635 0.73913043478260865 0.60000000000000009 2 0.5882353 0.5882353 0.3181818 0.642857134 0.523809552 0.58823529411764708 0.58823529411764708 0.31818181818181818 0.6428571428571429 0.52380952380952384 1.06665075 1.06665075 0.908664644 1.22536623 1.05315924 1.0666507184164229 1.0666507184164229 0.908664664768977 1.2253663297299173 1.0531592686672073 0.72531265 0.72531265 0.296437562 0.777955949 0.719658256 0.72531248018388961 0.72531248018388961 0.29643751019242903 0.77795595083214475 0.71965826470413374 -6.1 0.7721519 6.1 2.6 5.6 1.4 0.7721519 0.590909064 0.8115942 0.56 6.0999999999999996 0.772151898734177 6.0999999999999996 2.6000000000000001 5.5999999999999996 1.3999999999999999 0.772151898734177 0.59090909090909094 0.81159420289855067 0.55999999999999994 2 0.5294118 0.5294118 0.227272734 0.7619048 0.476190478 0.52941176470588236 0.52941176470588236 0.22727272727272727 0.76190476190476186 0.47619047619047616 1.03278887 1.03278887 0.84376 1.34550023 0.982948661 1.0327887908476474 1.0327887908476474 0.84376004585690734 1.3455002836250074 0.98294865075606008 0.644171059 0.644171059 0.145245746 0.8225208 0.6955889 0.64417093822767468 0.64417093822767468 0.14524588521591403 0.82252078229590153 0.69558889835906823 -7.7 0.9746835 7.7 3 6.1 2.3 0.9746835 0.6818181 0.884057939 0.92 7.7000000000000002 0.97468354430379733 7.7000000000000002 3 6.0999999999999996 2.2999999999999998 0.97468354430379733 0.68181818181818177 0.88405797101449268 0.91999999999999993 2 0.9705882 0.9705882 0.4090909 0.880952358 0.9047619 0.97058823529411764 0.97058823529411764 0.40909090909090912 0.88095238095238093 0.90476190476190477 1.30368423 1.30368423 0.9735693 1.46563423 1.6148442 1.3036842113978502 1.3036842113978502 0.97356928368104678 1.4656342375200972 1.6148442119563844 0.9785672 0.9785672 0.480745673 0.8579307 0.845170259 0.97856725002805034 0.97856725002805034 0.4807456731235793 0.85793070191741871 0.84517026625737923 -6.3 0.797468364 6.3 3.4 5.6 2.4 0.797468364 0.772727251 0.8115942 0.960000038 6.2999999999999998 0.79746835443037956 6.2999999999999998 3.3999999999999999 5.5999999999999996 2.3999999999999999 0.79746835443037956 0.77272727272727271 0.81159420289855067 0.95999999999999996 2 0.5882353 0.5882353 0.590909064 0.7619048 0.952380955 0.58823529411764708 0.58823529411764708 0.59090909090909094 0.76190476190476186 0.95238095238095233 1.06665075 1.06665075 1.10337853 1.34550023 1.6850549 1.0666507184164229 1.0666507184164229 1.1033785215051863 1.3455002836250074 1.6850548298675316 0.72531265 0.72531265 0.797875941 0.8225208 0.8552379 0.72531248018388961 0.72531248018388961 0.79787580879856601 0.82252078229590153 0.85523789511433224 -6.4 0.8101266 6.4 3.1 5.5 1.8 0.8101266 0.7045454 0.797101438 0.719999969 6.4000000000000004 0.81012658227848089 6.4000000000000004 3.1000000000000001 5.5 1.8 0.81012658227848089 0.70454545454545459 0.79710144927536231 0.72000000000000008 2 0.617647052 0.617647052 0.454545468 0.7380952 0.6666667 0.61764705882352944 0.61764705882352944 0.45454545454545453 0.73809523809523814 0.66666666666666663 1.08358181 1.08358181 1.0060215 1.32147348 1.26379108 1.0835816822008106 1.0835816822008106 1.0060215931370817 1.3214734928459895 1.2637911224006488 0.7613054 0.7613054 0.5725629 0.814404547 0.778455853 0.76130542616799635 0.76130542616799635 0.5725628341629212 0.81440457252843534 0.77845583371698512 -6 0.7594937 6 3 4.8 1.8 0.7594937 0.6818181 0.6956522 0.719999969 6 0.75949367088607578 6 3 4.7999999999999998 1.8 0.75949367088607578 0.68181818181818177 0.69565217391304346 0.72000000000000008 2 0.5 0.5 0.4090909 0.5714286 0.6666667 0.5 0.5 0.40909090909090912 0.5714285714285714 0.66666666666666663 1.01585793 1.01585793 0.9735693 1.153286 1.26379108 1.0158578270632599 1.0158578270632599 0.97356928368104678 1.1532859573928635 1.2637911224006488 0.5995771 0.5995771 0.480745673 0.7459458 0.778455853 0.59957706030964308 0.59957706030964308 0.4807456731235793 0.74594581373449664 0.77845583371698512 -6.9 0.873417735 6.9 3.1 5.4 2.1 0.873417735 0.7045454 0.7826087 0.84 6.9000000000000004 0.87341772151898722 6.9000000000000004 3.1000000000000001 5.4000000000000004 2.1000000000000001 0.87341772151898722 0.70454545454545459 0.78260869565217395 0.84000000000000008 2 0.7647059 0.7647059 0.454545468 0.714285731 0.8095238 0.76470588235294112 0.76470588235294112 0.45454545454545453 0.7142857142857143 0.80952380952380953 1.16823661 1.16823661 1.0060215 1.29744673 1.47442293 1.1682365011227489 1.1682365011227489 1.0060215931370817 1.2974467020669715 1.4744229761340903 0.8933714 0.8933714 0.5725629 0.805906951 0.8221373 0.89337135404275458 0.89337135404275458 0.5725628341629212 0.80590691835886541 0.82213728509573358 -6.7 0.848101258 6.7 3.1 5.6 2.4 0.848101258 0.7045454 0.8115942 0.960000038 6.7000000000000002 0.84810126582278467 6.7000000000000002 3.1000000000000001 5.5999999999999996 2.3999999999999999 0.84810126582278467 0.70454545454545459 0.81159420289855067 0.95999999999999996 2 0.7058824 0.7058824 0.454545468 0.7619048 0.952380955 0.70588235294117652 0.70588235294117652 0.45454545454545453 0.76190476190476186 0.95238095238095233 1.13437462 1.13437462 1.0060215 1.34550023 1.6850549 1.1343745735539736 1.1343745735539736 1.0060215931370817 1.3455002836250074 1.6850548298675316 0.84984225 0.84984225 0.5725629 0.8225208 0.8552379 0.84984228863096656 0.84984228863096656 0.5725628341629212 0.82252078229590153 0.85523789511433224 -6.9 0.873417735 6.9 3.1 5.1 2.3 0.873417735 0.7045454 0.7391304 0.92 6.9000000000000004 0.87341772151898722 6.9000000000000004 3.1000000000000001 5.0999999999999996 2.2999999999999998 0.87341772151898722 0.70454545454545459 0.73913043478260865 0.91999999999999993 2 0.7647059 0.7647059 0.454545468 0.642857134 0.9047619 0.76470588235294112 0.76470588235294112 0.45454545454545453 0.6428571428571429 0.90476190476190477 1.16823661 1.16823661 1.0060215 1.22536623 1.6148442 1.1682365011227489 1.1682365011227489 1.0060215931370817 1.2253663297299173 1.6148442119563844 0.8933714 0.8933714 0.5725629 0.777955949 0.845170259 0.89337135404275458 0.89337135404275458 0.5725628341629212 0.77795595083214475 0.84517026625737923 -5.8 0.734177232 5.8 2.7 5.1 1.9 0.734177232 0.6136364 0.7391304 0.76 5.7999999999999998 0.73417721518987333 5.7999999999999998 2.7000000000000002 5.0999999999999996 1.8999999999999999 0.73417721518987333 0.61363636363636365 0.73913043478260865 0.76000000000000001 2 0.441176474 0.441176474 0.272727281 0.642857134 0.714285731 0.44117647058823528 0.44117647058823528 0.27272727272727271 0.6428571428571429 0.7142857142857143 0.981996 0.981996 0.876212358 1.22536623 1.33400178 0.98199589949448451 0.98199589949448451 0.87621235531294217 1.2253663297299173 1.3340017403117959 0.504585147 0.504585147 0.214467555 0.777955949 0.794432342 0.50458515122771275 0.50458515122771275 0.21446754872116464 0.77795595083214475 0.7944323164067586 -6.8 0.860759556 6.8 3.2 5.9 2.3 0.860759556 0.7272727 0.855072439 0.92 6.7999999999999998 0.86075949367088589 6.7999999999999998 3.2000000000000002 5.9000000000000004 2.2999999999999998 0.86075949367088589 0.72727272727272729 0.85507246376811608 0.91999999999999993 2 0.7352941 0.7352941 0.5 0.8333333 0.9047619 0.73529411764705888 0.73529411764705888 0.5 0.83333333333333337 0.90476190476190477 1.15130568 1.15130568 1.038474 1.4175806 1.6148442 1.1513055373383612 1.1513055373383612 1.0384739025931167 1.4175806559620614 1.6148442119563844 0.873058 0.873058 0.6578964 0.8447406 0.845170259 0.87305788341059976 0.87305788341059976 0.65789648182451921 0.8447405985468005 0.84517026625737923 -6.7 0.848101258 6.7 3.3 5.7 2.5 0.848101258 0.74999994 0.8260869 1 6.7000000000000002 0.84810126582278467 6.7000000000000002 3.2999999999999998 5.7000000000000002 2.5 0.84810126582278467 0.74999999999999989 0.82608695652173914 1 2 0.7058824 0.7058824 0.545454562 0.785714269 1 0.70588235294117652 0.70588235294117652 0.54545454545454541 0.7857142857142857 1 1.13437462 1.13437462 1.07092619 1.369527 1.75526547 1.1343745735539736 1.1343745735539736 1.0709262120491514 1.3695270744040255 1.7552654477786789 0.84984225 0.84984225 0.733567655 0.8302718 0.8644717 0.84984228863096656 0.84984228863096656 0.73356785053506501 0.83027178393794032 0.86447170475057145 -6.7 0.848101258 6.7 3 5.2 2.3 0.848101258 0.6818181 0.7536231 0.92 6.7000000000000002 0.84810126582278467 6.7000000000000002 3 5.2000000000000002 2.2999999999999998 0.84810126582278467 0.68181818181818177 0.75362318840579712 0.91999999999999993 2 0.7058824 0.7058824 0.4090909 0.6666667 0.9047619 0.70588235294117652 0.70588235294117652 0.40909090909090912 0.66666666666666663 0.90476190476190477 1.13437462 1.13437462 0.9735693 1.24939311 1.6148442 1.1343745735539736 1.1343745735539736 0.97356928368104678 1.2493931205089355 1.6148442119563844 0.84984225 0.84984225 0.480745673 0.7877 0.845170259 0.84984228863096656 0.84984228863096656 0.4807456731235793 0.78769997391633806 0.84517026625737923 -6.3 0.797468364 6.3 2.5 5 1.9 0.797468364 0.5681818 0.7246376 0.76 6.2999999999999998 0.79746835443037956 6.2999999999999998 2.5 5 1.8999999999999999 0.79746835443037956 0.56818181818181812 0.72463768115942029 0.76000000000000001 2 0.5882353 0.5882353 0.181818187 0.619047642 0.714285731 0.58823529411764708 0.58823529411764708 0.18181818181818182 0.61904761904761907 0.7142857142857143 1.06665075 1.06665075 0.8113077 1.20133948 1.33400178 1.0666507184164229 1.0666507184164229 0.81130773640087239 1.2013395389508994 1.3340017403117959 0.72531265 0.72531265 0.09116498 0.7677612 0.794432342 0.72531248018388961 0.72531248018388961 0.091164973250557446 0.76776110588492996 0.7944323164067586 -6.5 0.822784841 6.5 3 5.2 2 0.822784841 0.6818181 0.7536231 0.8 6.5 0.82278481012658211 6.5 3 5.2000000000000002 2 0.82278481012658211 0.68181818181818177 0.75362318840579712 0.80000000000000004 2 0.647058845 0.647058845 0.4090909 0.6666667 0.7619048 0.6470588235294118 0.6470588235294118 0.40909090909090912 0.66666666666666663 0.76190476190476186 1.10051274 1.10051274 0.9735693 1.24939311 1.40421236 1.1005126459851982 1.1005126459851982 0.97356928368104678 1.2493931205089355 1.4042123582229431 0.7940583 0.7940583 0.480745673 0.7877 0.808938 0.79405825408863862 0.79405825408863862 0.4807456731235793 0.78769997391633806 0.80893802463851161 -6.2 0.7848101 6.2 3.4 5.4 2.3 0.7848101 0.772727251 0.7826087 0.92 6.2000000000000002 0.78481012658227833 6.2000000000000002 3.3999999999999999 5.4000000000000004 2.2999999999999998 0.78481012658227833 0.77272727272727271 0.78260869565217395 0.91999999999999993 2 0.5588235 0.5588235 0.590909064 0.714285731 0.9047619 0.55882352941176472 0.55882352941176472 0.59090909090909094 0.7142857142857143 0.90476190476190477 1.04971981 1.04971981 1.10337853 1.29744673 1.6148442 1.0497197546320352 1.0497197546320352 1.1033785215051863 1.2974467020669715 1.6148442119563844 0.6861942 0.6861942 0.797875941 0.805906951 0.845170259 0.6861941068147408 0.6861941068147408 0.79787580879856601 0.80590691835886541 0.84517026625737923 -5.9 0.7468355 5.9 3 5.1 1.8 0.7468355 0.6818181 0.7391304 0.719999969 5.9000000000000004 0.74683544303797467 5.9000000000000004 3 5.0999999999999996 1.8 0.74683544303797467 0.68181818181818177 0.73913043478260865 0.72000000000000008 2 0.470588237 0.470588237 0.4090909 0.642857134 0.6666667 0.47058823529411764 0.47058823529411764 0.40909090909090912 0.6428571428571429 0.66666666666666663 0.998926938 0.998926938 0.9735693 1.22536623 1.26379108 0.99892686327887226 0.99892686327887226 0.97356928368104678 1.2253663297299173 1.2637911224006488 0.5528621 0.5528621 0.480745673 0.777955949 0.778455853 0.55286190159866166 0.55286190159866166 0.4807456731235793 0.77795595083214475 0.77845583371698512 +float1 float1 5.1 3.5 1.4 0.2 5.1 3.5 1.4 0.2 double1 double1 5.1 3.5 1.4 0.2 5.1 3.5 1.4 0.2 int1 float1bin 5.1 3.5 1.4 0.2 double1bin 5.1 3.5 1.4 0.2 float1supervisedbin 5.1 3.5 1.4 0.2 double1supervisedbin 5.1 3.5 1.4 0.2 float1mv 5.1 3.5 1.4 0.2 double1mv 5.1 3.5 1.4 0.2 float1lmv 5.1 3.5 1.4 0.2 double1lmv 5.1 3.5 1.4 0.2 +4.9 0.6202532 4.9 3 1.4 0.2 0.6202532 0.6818181 0.202898547 0.0800000057 4.9000000000000004 0.620253164556962 4.9000000000000004 3 1.3999999999999999 0.20000000000000001 0.620253164556962 0.68181818181818177 0.20289855072463767 0.080000000000000016 0 0.1764706 0.1764706 0.4090909 0.0952381 0.04761905 0.17647058823529413 0.17647058823529413 0.40909090909090912 0.095238095238095233 0.047619047619047616 0.333333343 0.333333343 0.5 0 0 0.33333333333333331 0.33333333333333331 0.5 0 0 0.829617262 0.829617262 0.9735693 0.336375058 0.140421242 0.82961722543499561 0.82961722543499561 0.97356928368104678 0.33637507090625185 0.14042123582229432 0.117838435 0.117838435 0.480745673 0.07455328 0.07146728 0.11783846996167147 0.11783846996167147 0.4807456731235793 0.074553292690365869 0.071467286508792471 +4.7 0.594936669 4.7 3.2 1.3 0.2 0.594936669 0.7272727 0.188405782 0.0800000057 4.7000000000000002 0.59493670886075944 4.7000000000000002 3.2000000000000002 1.3 0.20000000000000001 0.59493670886075944 0.72727272727272729 0.18840579710144928 0.080000000000000016 0 0.117647059 0.117647059 0.5 0.0714285746 0.04761905 0.11764705882352941 0.11764705882352941 0.5 0.071428571428571425 0.047619047619047616 0 0 0.5 0 0 0 0 0.5 0 0 0.7957553 0.7957553 1.038474 0.312348276 0.140421242 0.79575529786622023 0.79575529786622023 1.0384739025931167 0.31234828012723387 0.14042123582229432 0.06917418 0.06917418 0.6578964 0.0582755022 0.07146728 0.069174201507542998 0.069174201507542998 0.65789648182451921 0.058275496366795021 0.071467286508792471 +4.6 0.5822785 4.6 3.1 1.5 0.2 0.5822785 0.7045454 0.2173913 0.0800000057 4.5999999999999996 0.58227848101265811 4.5999999999999996 3.1000000000000001 1.5 0.20000000000000001 0.58227848101265811 0.70454545454545459 0.21739130434782611 0.080000000000000016 0 0.0882353 0.0882353 0.454545468 0.119047619 0.04761905 0.088235294117647065 0.088235294117647065 0.45454545454545453 0.11904761904761904 0.047619047619047616 0 0 0.5 0 0 0 0 0.5 0 0 0.7788243 0.7788243 1.0060215 0.360401869 0.140421242 0.77882433408183249 0.77882433408183249 1.0060215931370817 0.36040186168526983 0.14042123582229432 0.0510348342 0.0510348342 0.5725629 0.0926237255 0.07146728 0.05103484272829939 0.05103484272829939 0.5725628341629212 0.092623715229236292 0.071467286508792471 +5 0.6329114 5 3.6 1.4 0.2 0.6329114 0.818181753 0.202898547 0.0800000057 5 0.63291139240506322 5 3.6000000000000001 1.3999999999999999 0.20000000000000001 0.63291139240506322 0.81818181818181812 0.20289855072463767 0.080000000000000016 0 0.205882356 0.205882356 0.6818182 0.0952381 0.04761905 0.20588235294117646 0.20588235294117646 0.68181818181818177 0.095238095238095233 0.047619047619047616 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.8465482 0.8465482 1.1682831 0.336375058 0.140421242 0.84654818921938324 0.84654818921938324 1.1682831404172562 0.33637507090625185 0.14042123582229432 0.14861621 0.14861621 0.8919594 0.07455328 0.07146728 0.14861616399327332 0.14861616399327332 0.89195941323598249 0.074553292690365869 0.071467286508792471 +5.4 0.683544338 5.4 3.9 1.7 0.4 0.683544338 0.8863636 0.246376812 0.160000011 5.4000000000000004 0.68354430379746833 5.4000000000000004 3.8999999999999999 1.7 0.40000000000000002 0.68354430379746833 0.88636363636363635 0.24637681159420291 0.16000000000000003 0 0.323529422 0.323529422 0.8181818 0.166666672 0.142857149 0.3235294117647059 0.3235294117647059 0.81818181818181823 0.16666666666666666 0.14285714285714285 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.9142721 0.9142721 1.26564014 0.408455431 0.280842483 0.91427204435693399 0.91427204435693399 1.2656400687853608 0.40845544324330579 0.28084247164458864 0.3099612 0.3099612 0.9642299 0.13329801 0.223406911 0.30996111189240849 0.30996111189240849 0.96422985148785167 0.1332979854433643 0.22340689032507804 +4.6 0.5822785 4.6 3.4 1.4 0.3 0.5822785 0.772727251 0.202898547 0.120000005 4.5999999999999996 0.58227848101265811 4.5999999999999996 3.3999999999999999 1.3999999999999999 0.29999999999999999 0.58227848101265811 0.77272727272727271 0.20289855072463767 0.12 0 0.0882353 0.0882353 0.590909064 0.0952381 0.0952381 0.088235294117647065 0.088235294117647065 0.59090909090909094 0.095238095238095233 0.095238095238095233 0 0 1 0 0 0 0 1 0 0 0.7788243 0.7788243 1.10337853 0.336375058 0.210631862 0.77882433408183249 0.77882433408183249 1.1033785215051863 0.33637507090625185 0.21063185373344145 0.0510348342 0.0510348342 0.797875941 0.07455328 0.146190211 0.05103484272829939 0.05103484272829939 0.79787580879856601 0.074553292690365869 0.14619023377705653 +5 0.6329114 5 3.4 1.5 0.2 0.6329114 0.772727251 0.2173913 0.0800000057 5 0.63291139240506322 5 3.3999999999999999 1.5 0.20000000000000001 0.63291139240506322 0.77272727272727271 0.21739130434782611 0.080000000000000016 0 0.205882356 0.205882356 0.590909064 0.119047619 0.04761905 0.20588235294117646 0.20588235294117646 0.59090909090909094 0.11904761904761904 0.047619047619047616 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.8465482 0.8465482 1.10337853 0.360401869 0.140421242 0.84654818921938324 0.84654818921938324 1.1033785215051863 0.36040186168526983 0.14042123582229432 0.14861621 0.14861621 0.797875941 0.0926237255 0.07146728 0.14861616399327332 0.14861616399327332 0.79787580879856601 0.092623715229236292 0.071467286508792471 +4.4 0.5569621 4.4 2.9 1.4 0.2 0.5569621 0.659090936 0.202898547 0.0800000057 4.4000000000000004 0.55696202531645567 4.4000000000000004 2.8999999999999999 1.3999999999999999 0.20000000000000001 0.55696202531645567 0.65909090909090906 0.20289855072463767 0.080000000000000016 0 0.0294117648 0.0294117648 0.363636374 0.0952381 0.04761905 0.029411764705882353 0.029411764705882353 0.36363636363636365 0.095238095238095233 0.047619047619047616 0 0 0 0 0 0 0 0 0 0 0.744962454 0.744962454 0.941117 0.336375058 0.140421242 0.74496240651305734 0.74496240651305734 0.94111697422501195 0.33637507090625185 0.14042123582229432 0.0255098473 0.0255098473 0.386941522 0.07455328 0.07146728 0.025509830781751675 0.025509830781751675 0.38694155923435009 0.074553292690365869 0.071467286508792471 +4.9 0.6202532 4.9 3.1 1.5 0.1 0.6202532 0.7045454 0.2173913 0.0400000028 4.9000000000000004 0.620253164556962 4.9000000000000004 3.1000000000000001 1.5 0.10000000000000001 0.620253164556962 0.70454545454545459 0.21739130434782611 0.040000000000000008 0 0.1764706 0.1764706 0.454545468 0.119047619 0 0.17647058823529413 0.17647058823529413 0.45454545454545453 0.11904761904761904 0 0.333333343 0.333333343 0.5 0 0 0.33333333333333331 0.33333333333333331 0.5 0 0 0.829617262 0.829617262 1.0060215 0.360401869 0.07021062 0.82961722543499561 0.82961722543499561 1.0060215931370817 0.36040186168526983 0.070210617911147161 0.117838435 0.117838435 0.5725629 0.0926237255 0.0149739943 0.11783846996167147 0.11783846996167147 0.5725628341629212 0.092623715229236292 0.014973996264087019 +5.4 0.683544338 5.4 3.7 1.5 0.2 0.683544338 0.840909064 0.2173913 0.0800000057 5.4000000000000004 0.68354430379746833 5.4000000000000004 3.7000000000000002 1.5 0.20000000000000001 0.68354430379746833 0.84090909090909094 0.21739130434782611 0.080000000000000016 0 0.323529422 0.323529422 0.727272749 0.119047619 0.04761905 0.3235294117647059 0.3235294117647059 0.72727272727272729 0.11904761904761904 0.047619047619047616 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.9142721 0.9142721 1.20073545 0.360401869 0.140421242 0.91427204435693399 0.91427204435693399 1.2007354498732912 0.36040186168526983 0.14042123582229432 0.3099612 0.3099612 0.9236834 0.0926237255 0.07146728 0.30996111189240849 0.30996111189240849 0.92368343544686704 0.092623715229236292 0.071467286508792471 +4.8 0.607594967 4.8 3.4 1.6 0.2 0.607594967 0.772727251 0.231884047 0.0800000057 4.7999999999999998 0.60759493670886067 4.7999999999999998 3.3999999999999999 1.6000000000000001 0.20000000000000001 0.60759493670886067 0.77272727272727271 0.23188405797101452 0.080000000000000016 0 0.14705883 0.14705883 0.590909064 0.142857149 0.04761905 0.14705882352941177 0.14705882352941177 0.59090909090909094 0.14285714285714285 0.047619047619047616 0 0 1 0 0 0 0 1 0 0 0.8126863 0.8126863 1.10337853 0.38442865 0.140421242 0.81268626165060787 0.81268626165060787 1.1033785215051863 0.38442865246428787 0.14042123582229432 0.09137413 0.09137413 0.797875941 0.112279132 0.07146728 0.091374136005250073 0.091374136005250073 0.79787580879856601 0.11227913256984562 0.071467286508792471 +4.8 0.607594967 4.8 3 1.4 0.1 0.607594967 0.6818181 0.202898547 0.0400000028 4.7999999999999998 0.60759493670886067 4.7999999999999998 3 1.3999999999999999 0.10000000000000001 0.60759493670886067 0.68181818181818177 0.20289855072463767 0.040000000000000008 0 0.14705883 0.14705883 0.4090909 0.0952381 0 0.14705882352941177 0.14705882352941177 0.40909090909090912 0.095238095238095233 0 0 0 0.5 0 0 0 0 0.5 0 0 0.8126863 0.8126863 0.9735693 0.336375058 0.07021062 0.81268626165060787 0.81268626165060787 0.97356928368104678 0.33637507090625185 0.070210617911147161 0.09137413 0.09137413 0.480745673 0.07455328 0.0149739943 0.091374136005250073 0.091374136005250073 0.4807456731235793 0.074553292690365869 0.014973996264087019 +4.3 0.544303834 4.3 3 1.1 0.1 0.544303834 0.6818181 0.159420282 0.0400000028 4.2999999999999998 0.54430379746835433 4.2999999999999998 3 1.1000000000000001 0.10000000000000001 0.54430379746835433 0.68181818181818177 0.15942028985507248 0.040000000000000008 0 0 0 0.4090909 0.0238095243 0 0 0 0.40909090909090912 0.023809523809523808 0 0 0 0.5 0 0 0 0 0.5 0 0 0.7280315 0.7280315 0.9735693 0.264294684 0.07021062 0.7280314427286696 0.7280314427286696 0.97356928368104678 0.26429469856919791 0.070210617911147161 0.01720981 0.01720981 0.480745673 0.0317756645 0.0149739943 0.017209798931850762 0.017209798931850762 0.4807456731235793 0.031775654639957629 0.014973996264087019 +5.8 0.734177232 5.8 4 1.2 0.2 0.734177232 0.9090909 0.173913047 0.0800000057 5.7999999999999998 0.73417721518987333 5.7999999999999998 4 1.2 0.20000000000000001 0.73417721518987333 0.90909090909090906 0.17391304347826086 0.080000000000000016 0 0.441176474 0.441176474 0.8636364 0.04761905 0.04761905 0.44117647058823528 0.44117647058823528 0.86363636363636365 0.047619047619047616 0.047619047619047616 0.6666667 0.6666667 1 0 0 0.66666666666666663 0.66666666666666663 1 0 0 0.981996 0.981996 1.29809237 0.2883215 0.140421242 0.98199589949448451 0.98199589949448451 1.2980923782413958 0.28832148934821589 0.14042123582229432 0.504585147 0.504585147 0.9762056 0.0439706929 0.07146728 0.50458515122771275 0.50458515122771275 0.9762055888000436 0.043970678884132197 0.071467286508792471 +5.7 0.721519 5.7 4.4 1.5 0.4 0.721519 1 0.2173913 0.160000011 5.7000000000000002 0.72151898734177211 5.7000000000000002 4.4000000000000004 1.5 0.40000000000000002 0.72151898734177211 1 0.21739130434782611 0.16000000000000003 0 0.4117647 0.4117647 1 0.119047619 0.142857149 0.41176470588235292 0.41176470588235292 1 0.11904761904761904 0.14285714285714285 0.6666667 0.6666667 1 0 0 0.66666666666666663 0.66666666666666663 1 0 0 0.965064943 0.965064943 1.42790163 0.360401869 0.280842483 0.96506493571009688 0.96506493571009688 1.4279016160655356 0.36040186168526983 0.28084247164458864 0.455403626 0.455403626 0.996043563 0.0926237255 0.223406911 0.45540375842690767 0.45540375842690767 0.99604355189588412 0.092623715229236292 0.22340689032507804 +5.4 0.683544338 5.4 3.9 1.3 0.4 0.683544338 0.8863636 0.188405782 0.160000011 5.4000000000000004 0.68354430379746833 5.4000000000000004 3.8999999999999999 1.3 0.40000000000000002 0.68354430379746833 0.88636363636363635 0.18840579710144928 0.16000000000000003 0 0.323529422 0.323529422 0.8181818 0.0714285746 0.142857149 0.3235294117647059 0.3235294117647059 0.81818181818181823 0.071428571428571425 0.14285714285714285 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.9142721 0.9142721 1.26564014 0.312348276 0.280842483 0.91427204435693399 0.91427204435693399 1.2656400687853608 0.31234828012723387 0.28084247164458864 0.3099612 0.3099612 0.9642299 0.0582755022 0.223406911 0.30996111189240849 0.30996111189240849 0.96422985148785167 0.058275496366795021 0.22340689032507804 +5.1 0.6455696 5.1 3.5 1.4 0.3 0.6455696 0.7954545 0.202898547 0.120000005 5.0999999999999996 0.64556962025316444 5.0999999999999996 3.5 1.3999999999999999 0.29999999999999999 0.64556962025316444 0.79545454545454541 0.20289855072463767 0.12 0 0.235294119 0.235294119 0.6363636 0.0952381 0.0952381 0.23529411764705882 0.23529411764705882 0.63636363636363635 0.095238095238095233 0.095238095238095233 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.8634792 0.8634792 1.13583088 0.336375058 0.210631862 0.86347915300377087 0.86347915300377087 1.1358308309612213 0.33637507090625185 0.21063185373344145 0.183586776 0.183586776 0.8504554 0.07455328 0.146190211 0.18358681923369741 0.18358681923369741 0.8504555107896975 0.074553292690365869 0.14619023377705653 +5.7 0.721519 5.7 3.8 1.7 0.3 0.721519 0.8636363 0.246376812 0.120000005 5.7000000000000002 0.72151898734177211 5.7000000000000002 3.7999999999999998 1.7 0.29999999999999999 0.72151898734177211 0.86363636363636354 0.24637681159420291 0.12 0 0.4117647 0.4117647 0.772727251 0.166666672 0.0952381 0.41176470588235292 0.41176470588235292 0.77272727272727271 0.16666666666666666 0.095238095238095233 0.6666667 0.6666667 1 0 0 0.66666666666666663 0.66666666666666663 1 0 0 0.965064943 0.965064943 1.23318768 0.408455431 0.210631862 0.96506493571009688 0.96506493571009688 1.2331877593293259 0.40845544324330579 0.21063185373344145 0.455403626 0.455403626 0.9472266 0.13329801 0.146190211 0.45540375842690767 0.45540375842690767 0.94722658326235554 0.1332979854433643 0.14619023377705653 +5.1 0.6455696 5.1 3.8 1.5 0.3 0.6455696 0.8636363 0.2173913 0.120000005 5.0999999999999996 0.64556962025316444 5.0999999999999996 3.7999999999999998 1.5 0.29999999999999999 0.64556962025316444 0.86363636363636354 0.21739130434782611 0.12 0 0.235294119 0.235294119 0.772727251 0.119047619 0.0952381 0.23529411764705882 0.23529411764705882 0.77272727272727271 0.11904761904761904 0.095238095238095233 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.8634792 0.8634792 1.23318768 0.360401869 0.210631862 0.86347915300377087 0.86347915300377087 1.2331877593293259 0.36040186168526983 0.21063185373344145 0.183586776 0.183586776 0.9472266 0.0926237255 0.146190211 0.18358681923369741 0.18358681923369741 0.94722658326235554 0.092623715229236292 0.14619023377705653 +5.4 0.683544338 5.4 3.4 1.7 0.2 0.683544338 0.772727251 0.246376812 0.0800000057 5.4000000000000004 0.68354430379746833 5.4000000000000004 3.3999999999999999 1.7 0.20000000000000001 0.68354430379746833 0.77272727272727271 0.24637681159420291 0.080000000000000016 0 0.323529422 0.323529422 0.590909064 0.166666672 0.04761905 0.3235294117647059 0.3235294117647059 0.59090909090909094 0.16666666666666666 0.047619047619047616 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.9142721 0.9142721 1.10337853 0.408455431 0.140421242 0.91427204435693399 0.91427204435693399 1.1033785215051863 0.40845544324330579 0.14042123582229432 0.3099612 0.3099612 0.797875941 0.13329801 0.07146728 0.30996111189240849 0.30996111189240849 0.79787580879856601 0.1332979854433643 0.071467286508792471 +5.1 0.6455696 5.1 3.7 1.5 0.4 0.6455696 0.840909064 0.2173913 0.160000011 5.0999999999999996 0.64556962025316444 5.0999999999999996 3.7000000000000002 1.5 0.40000000000000002 0.64556962025316444 0.84090909090909094 0.21739130434782611 0.16000000000000003 0 0.235294119 0.235294119 0.727272749 0.119047619 0.142857149 0.23529411764705882 0.23529411764705882 0.72727272727272729 0.11904761904761904 0.14285714285714285 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.8634792 0.8634792 1.20073545 0.360401869 0.280842483 0.86347915300377087 0.86347915300377087 1.2007354498732912 0.36040186168526983 0.28084247164458864 0.183586776 0.183586776 0.9236834 0.0926237255 0.223406911 0.18358681923369741 0.18358681923369741 0.92368343544686704 0.092623715229236292 0.22340689032507804 +4.6 0.5822785 4.6 3.6 1 0.2 0.5822785 0.818181753 0.144927531 0.0800000057 4.5999999999999996 0.58227848101265811 4.5999999999999996 3.6000000000000001 1 0.20000000000000001 0.58227848101265811 0.81818181818181812 0.14492753623188406 0.080000000000000016 0 0.0882353 0.0882353 0.6818182 0 0.04761905 0.088235294117647065 0.088235294117647065 0.68181818181818177 0 0.047619047619047616 0 0 1 0 0 0 0 1 0 0 0.7788243 0.7788243 1.1682831 0.2402679 0.140421242 0.77882433408183249 0.77882433408183249 1.1682831404172562 0.2402679077901799 0.14042123582229432 0.0510348342 0.0510348342 0.8919594 0.0217650775 0.07146728 0.05103484272829939 0.05103484272829939 0.89195941323598249 0.021765071971117544 0.071467286508792471 +5.1 0.6455696 5.1 3.3 1.7 0.5 0.6455696 0.74999994 0.246376812 0.2 5.0999999999999996 0.64556962025316444 5.0999999999999996 3.2999999999999998 1.7 0.5 0.64556962025316444 0.74999999999999989 0.24637681159420291 0.20000000000000001 0 0.235294119 0.235294119 0.545454562 0.166666672 0.1904762 0.23529411764705882 0.23529411764705882 0.54545454545454541 0.16666666666666666 0.19047619047619047 0.333333343 0.333333343 0.5 0 0 0.33333333333333331 0.33333333333333331 0.5 0 0 0.8634792 0.8634792 1.07092619 0.408455431 0.3510531 0.86347915300377087 0.86347915300377087 1.0709262120491514 0.40845544324330579 0.35105308955573578 0.183586776 0.183586776 0.733567655 0.13329801 0.29663077 0.18358681923369741 0.18358681923369741 0.73356785053506501 0.1332979854433643 0.29663078722186503 +4.8 0.607594967 4.8 3.4 1.9 0.2 0.607594967 0.772727251 0.2753623 0.0800000057 4.7999999999999998 0.60759493670886067 4.7999999999999998 3.3999999999999999 1.8999999999999999 0.20000000000000001 0.60759493670886067 0.77272727272727271 0.27536231884057971 0.080000000000000016 0 0.14705883 0.14705883 0.590909064 0.1904762 0.04761905 0.14705882352941177 0.14705882352941177 0.59090909090909094 0.19047619047619047 0.047619047619047616 0 0 1 0 0 0 0 1 0 0 0.8126863 0.8126863 1.10337853 0.456509024 0.140421242 0.81268626165060787 0.81268626165060787 1.1033785215051863 0.45650902480134176 0.14042123582229432 0.09137413 0.09137413 0.797875941 0.1785302 0.07146728 0.091374136005250073 0.091374136005250073 0.79787580879856601 0.17853019682812199 0.071467286508792471 +5 0.6329114 5 3 1.6 0.2 0.6329114 0.6818181 0.231884047 0.0800000057 5 0.63291139240506322 5 3 1.6000000000000001 0.20000000000000001 0.63291139240506322 0.68181818181818177 0.23188405797101452 0.080000000000000016 0 0.205882356 0.205882356 0.4090909 0.142857149 0.04761905 0.20588235294117646 0.20588235294117646 0.40909090909090912 0.14285714285714285 0.047619047619047616 0.333333343 0.333333343 0.5 0 0 0.33333333333333331 0.33333333333333331 0.5 0 0 0.8465482 0.8465482 0.9735693 0.38442865 0.140421242 0.84654818921938324 0.84654818921938324 0.97356928368104678 0.38442865246428787 0.14042123582229432 0.14861621 0.14861621 0.480745673 0.112279132 0.07146728 0.14861616399327332 0.14861616399327332 0.4807456731235793 0.11227913256984562 0.071467286508792471 +5 0.6329114 5 3.4 1.6 0.4 0.6329114 0.772727251 0.231884047 0.160000011 5 0.63291139240506322 5 3.3999999999999999 1.6000000000000001 0.40000000000000002 0.63291139240506322 0.77272727272727271 0.23188405797101452 0.16000000000000003 0 0.205882356 0.205882356 0.590909064 0.142857149 0.142857149 0.20588235294117646 0.20588235294117646 0.59090909090909094 0.14285714285714285 0.14285714285714285 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.8465482 0.8465482 1.10337853 0.38442865 0.280842483 0.84654818921938324 0.84654818921938324 1.1033785215051863 0.38442865246428787 0.28084247164458864 0.14861621 0.14861621 0.797875941 0.112279132 0.223406911 0.14861616399327332 0.14861616399327332 0.79787580879856601 0.11227913256984562 0.22340689032507804 +5.2 0.6582278 5.2 3.5 1.5 0.2 0.6582278 0.7954545 0.2173913 0.0800000057 5.2000000000000002 0.65822784810126578 5.2000000000000002 3.5 1.5 0.20000000000000001 0.65822784810126578 0.79545454545454541 0.21739130434782611 0.080000000000000016 0 0.2647059 0.2647059 0.6363636 0.119047619 0.04761905 0.26470588235294118 0.26470588235294118 0.63636363636363635 0.11904761904761904 0.047619047619047616 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.880410135 0.880410135 1.13583088 0.360401869 0.140421242 0.88041011678815861 0.88041011678815861 1.1358308309612213 0.36040186168526983 0.14042123582229432 0.222458825 0.222458825 0.8504554 0.0926237255 0.07146728 0.22245879972342997 0.22245879972342997 0.8504555107896975 0.092623715229236292 0.071467286508792471 +5.2 0.6582278 5.2 3.4 1.4 0.2 0.6582278 0.772727251 0.202898547 0.0800000057 5.2000000000000002 0.65822784810126578 5.2000000000000002 3.3999999999999999 1.3999999999999999 0.20000000000000001 0.65822784810126578 0.77272727272727271 0.20289855072463767 0.080000000000000016 0 0.2647059 0.2647059 0.590909064 0.0952381 0.04761905 0.26470588235294118 0.26470588235294118 0.59090909090909094 0.095238095238095233 0.047619047619047616 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.880410135 0.880410135 1.10337853 0.336375058 0.140421242 0.88041011678815861 0.88041011678815861 1.1033785215051863 0.33637507090625185 0.14042123582229432 0.222458825 0.222458825 0.797875941 0.07455328 0.07146728 0.22245879972342997 0.22245879972342997 0.79787580879856601 0.074553292690365869 0.071467286508792471 +4.7 0.594936669 4.7 3.2 1.6 0.2 0.594936669 0.7272727 0.231884047 0.0800000057 4.7000000000000002 0.59493670886075944 4.7000000000000002 3.2000000000000002 1.6000000000000001 0.20000000000000001 0.59493670886075944 0.72727272727272729 0.23188405797101452 0.080000000000000016 0 0.117647059 0.117647059 0.5 0.142857149 0.04761905 0.11764705882352941 0.11764705882352941 0.5 0.14285714285714285 0.047619047619047616 0 0 0.5 0 0 0 0 0.5 0 0 0.7957553 0.7957553 1.038474 0.38442865 0.140421242 0.79575529786622023 0.79575529786622023 1.0384739025931167 0.38442865246428787 0.14042123582229432 0.06917418 0.06917418 0.6578964 0.112279132 0.07146728 0.069174201507542998 0.069174201507542998 0.65789648182451921 0.11227913256984562 0.071467286508792471 +4.8 0.607594967 4.8 3.1 1.6 0.2 0.607594967 0.7045454 0.231884047 0.0800000057 4.7999999999999998 0.60759493670886067 4.7999999999999998 3.1000000000000001 1.6000000000000001 0.20000000000000001 0.60759493670886067 0.70454545454545459 0.23188405797101452 0.080000000000000016 0 0.14705883 0.14705883 0.454545468 0.142857149 0.04761905 0.14705882352941177 0.14705882352941177 0.45454545454545453 0.14285714285714285 0.047619047619047616 0 0 0.5 0 0 0 0 0.5 0 0 0.8126863 0.8126863 1.0060215 0.38442865 0.140421242 0.81268626165060787 0.81268626165060787 1.0060215931370817 0.38442865246428787 0.14042123582229432 0.09137413 0.09137413 0.5725629 0.112279132 0.07146728 0.091374136005250073 0.091374136005250073 0.5725628341629212 0.11227913256984562 0.071467286508792471 +5.4 0.683544338 5.4 3.4 1.5 0.4 0.683544338 0.772727251 0.2173913 0.160000011 5.4000000000000004 0.68354430379746833 5.4000000000000004 3.3999999999999999 1.5 0.40000000000000002 0.68354430379746833 0.77272727272727271 0.21739130434782611 0.16000000000000003 0 0.323529422 0.323529422 0.590909064 0.119047619 0.142857149 0.3235294117647059 0.3235294117647059 0.59090909090909094 0.11904761904761904 0.14285714285714285 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.9142721 0.9142721 1.10337853 0.360401869 0.280842483 0.91427204435693399 0.91427204435693399 1.1033785215051863 0.36040186168526983 0.28084247164458864 0.3099612 0.3099612 0.797875941 0.0926237255 0.223406911 0.30996111189240849 0.30996111189240849 0.79787580879856601 0.092623715229236292 0.22340689032507804 +5.2 0.6582278 5.2 4.1 1.5 0.1 0.6582278 0.9318181 0.2173913 0.0400000028 5.2000000000000002 0.65822784810126578 5.2000000000000002 4.0999999999999996 1.5 0.10000000000000001 0.65822784810126578 0.93181818181818166 0.21739130434782611 0.040000000000000008 0 0.2647059 0.2647059 0.909090936 0.119047619 0 0.26470588235294118 0.26470588235294118 0.90909090909090906 0.11904761904761904 0 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.880410135 0.880410135 1.33054459 0.360401869 0.07021062 0.88041011678815861 0.88041011678815861 1.3305446876974305 0.36040186168526983 0.070210617911147161 0.222458825 0.222458825 0.984447062 0.0926237255 0.0149739943 0.22245879972342997 0.22245879972342997 0.98444709277532771 0.092623715229236292 0.014973996264087019 +5.5 0.6962025 5.5 4.2 1.4 0.2 0.6962025 0.9545454 0.202898547 0.0800000057 5.5 0.69620253164556956 5.5 4.2000000000000002 1.3999999999999999 0.20000000000000001 0.69620253164556956 0.95454545454545459 0.20289855072463767 0.080000000000000016 0 0.3529412 0.3529412 0.954545438 0.0952381 0.04761905 0.35294117647058826 0.35294117647058826 0.95454545454545459 0.095238095238095233 0.047619047619047616 0.6666667 0.6666667 1 0 0 0.66666666666666663 0.66666666666666663 1 0 0 0.931203067 0.931203067 1.36299694 0.336375058 0.140421242 0.93120300814132162 0.93120300814132162 1.3629969971534657 0.33637507090625185 0.14042123582229432 0.3573058 0.3573058 0.9899987 0.07455328 0.07146728 0.35730592590256216 0.35730592590256216 0.98999870773555454 0.074553292690365869 0.071467286508792471 +4.9 0.6202532 4.9 3.1 1.5 0.1 0.6202532 0.7045454 0.2173913 0.0400000028 4.9000000000000004 0.620253164556962 4.9000000000000004 3.1000000000000001 1.5 0.10000000000000001 0.620253164556962 0.70454545454545459 0.21739130434782611 0.040000000000000008 0 0.1764706 0.1764706 0.454545468 0.119047619 0 0.17647058823529413 0.17647058823529413 0.45454545454545453 0.11904761904761904 0 0.333333343 0.333333343 0.5 0 0 0.33333333333333331 0.33333333333333331 0.5 0 0 0.829617262 0.829617262 1.0060215 0.360401869 0.07021062 0.82961722543499561 0.82961722543499561 1.0060215931370817 0.36040186168526983 0.070210617911147161 0.117838435 0.117838435 0.5725629 0.0926237255 0.0149739943 0.11783846996167147 0.11783846996167147 0.5725628341629212 0.092623715229236292 0.014973996264087019 +5 0.6329114 5 3.2 1.2 0.2 0.6329114 0.7272727 0.173913047 0.0800000057 5 0.63291139240506322 5 3.2000000000000002 1.2 0.20000000000000001 0.63291139240506322 0.72727272727272729 0.17391304347826086 0.080000000000000016 0 0.205882356 0.205882356 0.5 0.04761905 0.04761905 0.20588235294117646 0.20588235294117646 0.5 0.047619047619047616 0.047619047619047616 0.333333343 0.333333343 0.5 0 0 0.33333333333333331 0.33333333333333331 0.5 0 0 0.8465482 0.8465482 1.038474 0.2883215 0.140421242 0.84654818921938324 0.84654818921938324 1.0384739025931167 0.28832148934821589 0.14042123582229432 0.14861621 0.14861621 0.6578964 0.0439706929 0.07146728 0.14861616399327332 0.14861616399327332 0.65789648182451921 0.043970678884132197 0.071467286508792471 +5.5 0.6962025 5.5 3.5 1.3 0.2 0.6962025 0.7954545 0.188405782 0.0800000057 5.5 0.69620253164556956 5.5 3.5 1.3 0.20000000000000001 0.69620253164556956 0.79545454545454541 0.18840579710144928 0.080000000000000016 0 0.3529412 0.3529412 0.6363636 0.0714285746 0.04761905 0.35294117647058826 0.35294117647058826 0.63636363636363635 0.071428571428571425 0.047619047619047616 0.6666667 0.6666667 1 0 0 0.66666666666666663 0.66666666666666663 1 0 0 0.931203067 0.931203067 1.13583088 0.312348276 0.140421242 0.93120300814132162 0.93120300814132162 1.1358308309612213 0.31234828012723387 0.14042123582229432 0.3573058 0.3573058 0.8504554 0.0582755022 0.07146728 0.35730592590256216 0.35730592590256216 0.8504555107896975 0.058275496366795021 0.071467286508792471 +4.9 0.6202532 4.9 3.1 1.5 0.1 0.6202532 0.7045454 0.2173913 0.0400000028 4.9000000000000004 0.620253164556962 4.9000000000000004 3.1000000000000001 1.5 0.10000000000000001 0.620253164556962 0.70454545454545459 0.21739130434782611 0.040000000000000008 0 0.1764706 0.1764706 0.454545468 0.119047619 0 0.17647058823529413 0.17647058823529413 0.45454545454545453 0.11904761904761904 0 0.333333343 0.333333343 0.5 0 0 0.33333333333333331 0.33333333333333331 0.5 0 0 0.829617262 0.829617262 1.0060215 0.360401869 0.07021062 0.82961722543499561 0.82961722543499561 1.0060215931370817 0.36040186168526983 0.070210617911147161 0.117838435 0.117838435 0.5725629 0.0926237255 0.0149739943 0.11783846996167147 0.11783846996167147 0.5725628341629212 0.092623715229236292 0.014973996264087019 +4.4 0.5569621 4.4 3 1.3 0.2 0.5569621 0.6818181 0.188405782 0.0800000057 4.4000000000000004 0.55696202531645567 4.4000000000000004 3 1.3 0.20000000000000001 0.55696202531645567 0.68181818181818177 0.18840579710144928 0.080000000000000016 0 0.0294117648 0.0294117648 0.4090909 0.0714285746 0.04761905 0.029411764705882353 0.029411764705882353 0.40909090909090912 0.071428571428571425 0.047619047619047616 0 0 0.5 0 0 0 0 0.5 0 0 0.744962454 0.744962454 0.9735693 0.312348276 0.140421242 0.74496240651305734 0.74496240651305734 0.97356928368104678 0.31234828012723387 0.14042123582229432 0.0255098473 0.0255098473 0.480745673 0.0582755022 0.07146728 0.025509830781751675 0.025509830781751675 0.4807456731235793 0.058275496366795021 0.071467286508792471 +5.1 0.6455696 5.1 3.4 1.5 0.2 0.6455696 0.772727251 0.2173913 0.0800000057 5.0999999999999996 0.64556962025316444 5.0999999999999996 3.3999999999999999 1.5 0.20000000000000001 0.64556962025316444 0.77272727272727271 0.21739130434782611 0.080000000000000016 0 0.235294119 0.235294119 0.590909064 0.119047619 0.04761905 0.23529411764705882 0.23529411764705882 0.59090909090909094 0.11904761904761904 0.047619047619047616 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.8634792 0.8634792 1.10337853 0.360401869 0.140421242 0.86347915300377087 0.86347915300377087 1.1033785215051863 0.36040186168526983 0.14042123582229432 0.183586776 0.183586776 0.797875941 0.0926237255 0.07146728 0.18358681923369741 0.18358681923369741 0.79787580879856601 0.092623715229236292 0.071467286508792471 +5 0.6329114 5 3.5 1.3 0.3 0.6329114 0.7954545 0.188405782 0.120000005 5 0.63291139240506322 5 3.5 1.3 0.29999999999999999 0.63291139240506322 0.79545454545454541 0.18840579710144928 0.12 0 0.205882356 0.205882356 0.6363636 0.0714285746 0.0952381 0.20588235294117646 0.20588235294117646 0.63636363636363635 0.071428571428571425 0.095238095238095233 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.8465482 0.8465482 1.13583088 0.312348276 0.210631862 0.84654818921938324 0.84654818921938324 1.1358308309612213 0.31234828012723387 0.21063185373344145 0.14861621 0.14861621 0.8504554 0.0582755022 0.146190211 0.14861616399327332 0.14861616399327332 0.8504555107896975 0.058275496366795021 0.14619023377705653 +4.5 0.569620252 4.5 2.3 1.3 0.3 0.569620252 0.522727251 0.188405782 0.120000005 4.5 0.56962025316455689 4.5 2.2999999999999998 1.3 0.29999999999999999 0.56962025316455689 0.52272727272727271 0.18840579710144928 0.12 0 0.05882353 0.05882353 0.09090909 0.0714285746 0.0952381 0.058823529411764705 0.058823529411764705 0.090909090909090912 0.071428571428571425 0.095238095238095233 0 0 0 0 0 0 0 0 0 0 0.7618934 0.7618934 0.7464031 0.312348276 0.210631862 0.76189337029744486 0.76189337029744486 0.7464031174888025 0.31234828012723387 0.21063185373344145 0.0366229452 0.0366229452 0.02727463 0.0582755022 0.146190211 0.036622927317243759 0.036622927317243759 0.027274649605582402 0.058275496366795021 0.14619023377705653 +4.4 0.5569621 4.4 3.2 1.3 0.2 0.5569621 0.7272727 0.188405782 0.0800000057 4.4000000000000004 0.55696202531645567 4.4000000000000004 3.2000000000000002 1.3 0.20000000000000001 0.55696202531645567 0.72727272727272729 0.18840579710144928 0.080000000000000016 0 0.0294117648 0.0294117648 0.5 0.0714285746 0.04761905 0.029411764705882353 0.029411764705882353 0.5 0.071428571428571425 0.047619047619047616 0 0 0.5 0 0 0 0 0.5 0 0 0.744962454 0.744962454 1.038474 0.312348276 0.140421242 0.74496240651305734 0.74496240651305734 1.0384739025931167 0.31234828012723387 0.14042123582229432 0.0255098473 0.0255098473 0.6578964 0.0582755022 0.07146728 0.025509830781751675 0.025509830781751675 0.65789648182451921 0.058275496366795021 0.071467286508792471 +5 0.6329114 5 3.5 1.6 0.6 0.6329114 0.7954545 0.231884047 0.24000001 5 0.63291139240506322 5 3.5 1.6000000000000001 0.59999999999999998 0.63291139240506322 0.79545454545454541 0.23188405797101452 0.23999999999999999 0 0.205882356 0.205882356 0.6363636 0.142857149 0.238095239 0.20588235294117646 0.20588235294117646 0.63636363636363635 0.14285714285714285 0.23809523809523808 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.8465482 0.8465482 1.13583088 0.38442865 0.421263725 0.84654818921938324 0.84654818921938324 1.1358308309612213 0.38442865246428787 0.42126370746688291 0.14861621 0.14861621 0.8504554 0.112279132 0.363569826 0.14861616399327332 0.14861616399327332 0.8504555107896975 0.11227913256984562 0.36356980977329256 +5.1 0.6455696 5.1 3.8 1.9 0.4 0.6455696 0.8636363 0.2753623 0.160000011 5.0999999999999996 0.64556962025316444 5.0999999999999996 3.7999999999999998 1.8999999999999999 0.40000000000000002 0.64556962025316444 0.86363636363636354 0.27536231884057971 0.16000000000000003 0 0.235294119 0.235294119 0.772727251 0.1904762 0.142857149 0.23529411764705882 0.23529411764705882 0.77272727272727271 0.19047619047619047 0.14285714285714285 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.8634792 0.8634792 1.23318768 0.456509024 0.280842483 0.86347915300377087 0.86347915300377087 1.2331877593293259 0.45650902480134176 0.28084247164458864 0.183586776 0.183586776 0.9472266 0.1785302 0.223406911 0.18358681923369741 0.18358681923369741 0.94722658326235554 0.17853019682812199 0.22340689032507804 +4.8 0.607594967 4.8 3 1.4 0.3 0.607594967 0.6818181 0.202898547 0.120000005 4.7999999999999998 0.60759493670886067 4.7999999999999998 3 1.3999999999999999 0.29999999999999999 0.60759493670886067 0.68181818181818177 0.20289855072463767 0.12 0 0.14705883 0.14705883 0.4090909 0.0952381 0.0952381 0.14705882352941177 0.14705882352941177 0.40909090909090912 0.095238095238095233 0.095238095238095233 0 0 0.5 0 0 0 0 0.5 0 0 0.8126863 0.8126863 0.9735693 0.336375058 0.210631862 0.81268626165060787 0.81268626165060787 0.97356928368104678 0.33637507090625185 0.21063185373344145 0.09137413 0.09137413 0.480745673 0.07455328 0.146190211 0.091374136005250073 0.091374136005250073 0.4807456731235793 0.074553292690365869 0.14619023377705653 +5.1 0.6455696 5.1 3.8 1.6 0.2 0.6455696 0.8636363 0.231884047 0.0800000057 5.0999999999999996 0.64556962025316444 5.0999999999999996 3.7999999999999998 1.6000000000000001 0.20000000000000001 0.64556962025316444 0.86363636363636354 0.23188405797101452 0.080000000000000016 0 0.235294119 0.235294119 0.772727251 0.142857149 0.04761905 0.23529411764705882 0.23529411764705882 0.77272727272727271 0.14285714285714285 0.047619047619047616 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.8634792 0.8634792 1.23318768 0.38442865 0.140421242 0.86347915300377087 0.86347915300377087 1.2331877593293259 0.38442865246428787 0.14042123582229432 0.183586776 0.183586776 0.9472266 0.112279132 0.07146728 0.18358681923369741 0.18358681923369741 0.94722658326235554 0.11227913256984562 0.071467286508792471 +4.6 0.5822785 4.6 3.2 1.4 0.2 0.5822785 0.7272727 0.202898547 0.0800000057 4.5999999999999996 0.58227848101265811 4.5999999999999996 3.2000000000000002 1.3999999999999999 0.20000000000000001 0.58227848101265811 0.72727272727272729 0.20289855072463767 0.080000000000000016 0 0.0882353 0.0882353 0.5 0.0952381 0.04761905 0.088235294117647065 0.088235294117647065 0.5 0.095238095238095233 0.047619047619047616 0 0 0.5 0 0 0 0 0.5 0 0 0.7788243 0.7788243 1.038474 0.336375058 0.140421242 0.77882433408183249 0.77882433408183249 1.0384739025931167 0.33637507090625185 0.14042123582229432 0.0510348342 0.0510348342 0.6578964 0.07455328 0.07146728 0.05103484272829939 0.05103484272829939 0.65789648182451921 0.074553292690365869 0.071467286508792471 +5.3 0.6708861 5.3 3.7 1.5 0.2 0.6708861 0.840909064 0.2173913 0.0800000057 5.2999999999999998 0.670886075949367 5.2999999999999998 3.7000000000000002 1.5 0.20000000000000001 0.670886075949367 0.84090909090909094 0.21739130434782611 0.080000000000000016 0 0.294117659 0.294117659 0.727272749 0.119047619 0.04761905 0.29411764705882354 0.29411764705882354 0.72727272727272729 0.11904761904761904 0.047619047619047616 0.333333343 0.333333343 1 0 0 0.33333333333333331 0.33333333333333331 1 0 0 0.897341132 0.897341132 1.20073545 0.360401869 0.140421242 0.89734108057254625 0.89734108057254625 1.2007354498732912 0.36040186168526983 0.14042123582229432 0.264780223 0.264780223 0.9236834 0.0926237255 0.07146728 0.26478014840942388 0.26478014840942388 0.92368343544686704 0.092623715229236292 0.071467286508792471 +5 0.6329114 5 3.3 1.4 0.2 0.6329114 0.74999994 0.202898547 0.0800000057 5 0.63291139240506322 5 3.2999999999999998 1.3999999999999999 0.20000000000000001 0.63291139240506322 0.74999999999999989 0.20289855072463767 0.080000000000000016 0 0.205882356 0.205882356 0.545454562 0.0952381 0.04761905 0.20588235294117646 0.20588235294117646 0.54545454545454541 0.095238095238095233 0.047619047619047616 0.333333343 0.333333343 0.5 0 0 0.33333333333333331 0.33333333333333331 0.5 0 0 0.8465482 0.8465482 1.07092619 0.336375058 0.140421242 0.84654818921938324 0.84654818921938324 1.0709262120491514 0.33637507090625185 0.14042123582229432 0.14861621 0.14861621 0.733567655 0.07455328 0.07146728 0.14861616399327332 0.14861616399327332 0.73356785053506501 0.074553292690365869 0.071467286508792471 +7 0.886076 7 3.2 4.7 1.4 0.886076 0.7272727 0.6811594 0.56 7 0.88607594936708844 7 3.2000000000000002 4.7000000000000002 1.3999999999999999 0.88607594936708844 0.72727272727272729 0.6811594202898551 0.55999999999999994 1 0.7941176 0.7941176 0.5 0.547619045 0.476190478 0.79411764705882348 0.79411764705882348 0.5 0.54761904761904767 0.47619047619047616 1 1 0.5 1 1 1 1 0.5 1 1 1.18516755 1.18516755 1.038474 1.12925911 0.982948661 1.1851674649071366 1.1851674649071366 1.0384739025931167 1.1292591666138456 0.98294865075606008 0.91099143 0.91099143 0.6578964 0.734288335 0.6955889 0.9109914626370752 0.9109914626370752 0.65789648182451921 0.73428833770009005 0.69558889835906823 +6.4 0.8101266 6.4 3.2 4.5 1.5 0.8101266 0.7272727 0.6521739 0.6 6.4000000000000004 0.81012658227848089 6.4000000000000004 3.2000000000000002 4.5 1.5 0.81012658227848089 0.72727272727272729 0.65217391304347827 0.60000000000000009 1 0.617647052 0.617647052 0.5 0.5 0.523809552 0.61764705882352944 0.61764705882352944 0.5 0.5 0.52380952380952384 1 1 0.5 1 1 1 1 0.5 1 1 1.08358181 1.08358181 1.038474 1.08120561 1.05315924 1.0835816822008106 1.0835816822008106 1.0384739025931167 1.0812055850558095 1.0531592686672073 0.7613054 0.7613054 0.6578964 0.7093809 0.719658256 0.76130542616799635 0.76130542616799635 0.65789648182451921 0.70938088629442786 0.71965826470413374 +6.9 0.873417735 6.9 3.1 4.9 1.5 0.873417735 0.7045454 0.710144937 0.6 6.9000000000000004 0.87341772151898722 6.9000000000000004 3.1000000000000001 4.9000000000000004 1.5 0.87341772151898722 0.70454545454545459 0.71014492753623193 0.60000000000000009 1 0.7647059 0.7647059 0.454545468 0.5952381 0.523809552 0.76470588235294112 0.76470588235294112 0.45454545454545453 0.59523809523809523 0.52380952380952384 1 1 0.5 1 1 1 1 0.5 1 1 1.16823661 1.16823661 1.0060215 1.17731273 1.05315924 1.1682365011227489 1.1682365011227489 1.0060215931370817 1.1773127481718815 1.0531592686672073 0.8933714 0.8933714 0.5725629 0.757097244 0.719658256 0.89337135404275458 0.89337135404275458 0.5725628341629212 0.75709721026728072 0.71965826470413374 +5.5 0.6962025 5.5 2.3 4 1.3 0.6962025 0.522727251 0.5797101 0.52 5.5 0.69620253164556956 5.5 2.2999999999999998 4 1.3 0.69620253164556956 0.52272727272727271 0.57971014492753625 0.52000000000000002 1 0.3529412 0.3529412 0.09090909 0.3809524 0.428571433 0.35294117647058826 0.35294117647058826 0.090909090909090912 0.38095238095238093 0.42857142857142855 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.931203067 0.931203067 0.7464031 0.9610716 0.912738 0.93120300814132162 0.93120300814132162 0.7464031174888025 0.96107163116071959 0.91273803284491306 0.3573058 0.3573058 0.02727463 0.636991262 0.6687579 0.35730592590256216 0.35730592590256216 0.027274649605582402 0.63699126114775995 0.66875793094202918 +6.5 0.822784841 6.5 2.8 4.6 1.5 0.822784841 0.6363636 0.6666666 0.6 6.5 0.82278481012658211 6.5 2.7999999999999998 4.5999999999999996 1.5 0.82278481012658211 0.63636363636363635 0.66666666666666663 0.60000000000000009 1 0.647058845 0.647058845 0.3181818 0.523809552 0.523809552 0.6470588235294118 0.6470588235294118 0.31818181818181818 0.52380952380952384 0.52380952380952384 1 1 0 1 1 1 1 0 1 1 1.10051274 1.10051274 0.908664644 1.10523236 1.05315924 1.1005126459851982 1.1005126459851982 0.908664664768977 1.1052323758348275 1.0531592686672073 0.7940583 0.7940583 0.296437562 0.7221062 0.719658256 0.79405825408863862 0.79405825408863862 0.29643751019242903 0.72210618745756316 0.71965826470413374 +5.7 0.721519 5.7 2.8 4.5 1.3 0.721519 0.6363636 0.6521739 0.52 5.7000000000000002 0.72151898734177211 5.7000000000000002 2.7999999999999998 4.5 1.3 0.72151898734177211 0.63636363636363635 0.65217391304347827 0.52000000000000002 1 0.4117647 0.4117647 0.3181818 0.5 0.428571433 0.41176470588235292 0.41176470588235292 0.31818181818181818 0.5 0.42857142857142855 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.965064943 0.965064943 0.908664644 1.08120561 0.912738 0.96506493571009688 0.96506493571009688 0.908664664768977 1.0812055850558095 0.91273803284491306 0.455403626 0.455403626 0.296437562 0.7093809 0.6687579 0.45540375842690767 0.45540375842690767 0.29643751019242903 0.70938088629442786 0.66875793094202918 +6.3 0.797468364 6.3 3.3 4.7 1.6 0.797468364 0.74999994 0.6811594 0.640000045 6.2999999999999998 0.79746835443037956 6.2999999999999998 3.2999999999999998 4.7000000000000002 1.6000000000000001 0.79746835443037956 0.74999999999999989 0.6811594202898551 0.64000000000000012 1 0.5882353 0.5882353 0.545454562 0.547619045 0.5714286 0.58823529411764708 0.58823529411764708 0.54545454545454541 0.54761904761904767 0.5714285714285714 1 1 0.5 1 1 1 1 0.5 1 1 1.06665075 1.06665075 1.07092619 1.12925911 1.12336993 1.0666507184164229 1.0666507184164229 1.0709262120491514 1.1292591666138456 1.1233698865783546 0.72531265 0.72531265 0.733567655 0.734288335 0.7413043 0.72531248018388961 0.72531248018388961 0.73356785053506501 0.73428833770009005 0.74130430666213609 +4.9 0.6202532 4.9 2.4 3.3 1 0.6202532 0.545454562 0.478260845 0.4 4.9000000000000004 0.620253164556962 4.9000000000000004 2.3999999999999999 3.2999999999999998 1 0.620253164556962 0.54545454545454541 0.47826086956521741 0.40000000000000002 1 0.1764706 0.1764706 0.13636364 0.238095239 0.2857143 0.17647058823529413 0.17647058823529413 0.13636363636363635 0.23809523809523808 0.2857142857142857 0.333333343 0.333333343 0 1 1 0.33333333333333331 0.33333333333333331 0 1 1 0.829617262 0.829617262 0.778855443 0.792884052 0.7021062 0.82961722543499561 0.82961722543499561 0.77885542694483745 0.79288409570759366 0.70210617911147155 0.117838435 0.117838435 0.052431643 0.508717 0.567488432 0.11783846996167147 0.11783846996167147 0.052431629740293029 0.50871703350008446 0.56748843907683133 +6.6 0.835443 6.6 2.9 4.6 1.3 0.835443 0.659090936 0.6666666 0.52 6.5999999999999996 0.83544303797468333 6.5999999999999996 2.8999999999999999 4.5999999999999996 1.3 0.83544303797468333 0.65909090909090906 0.66666666666666663 0.52000000000000002 1 0.6764706 0.6764706 0.363636374 0.523809552 0.428571433 0.67647058823529416 0.67647058823529416 0.36363636363636365 0.52380952380952384 0.42857142857142855 1 1 0 1 1 1 1 0 1 1 1.11744368 1.11744368 0.941117 1.10523236 0.912738 1.1174436097695859 1.1174436097695859 0.94111697422501195 1.1052323758348275 0.91273803284491306 0.8235505 0.8235505 0.386941522 0.7221062 0.6687579 0.82355060799945012 0.82355060799945012 0.38694155923435009 0.72210618745756316 0.66875793094202918 +5.2 0.6582278 5.2 2.7 3.9 1.4 0.6582278 0.6136364 0.5652174 0.56 5.2000000000000002 0.65822784810126578 5.2000000000000002 2.7000000000000002 3.8999999999999999 1.3999999999999999 0.65822784810126578 0.61363636363636365 0.56521739130434778 0.55999999999999994 1 0.2647059 0.2647059 0.272727281 0.357142866 0.476190478 0.26470588235294118 0.26470588235294118 0.27272727272727271 0.35714285714285715 0.47619047619047616 0.333333343 0.333333343 0 1 1 0.33333333333333331 0.33333333333333331 0 1 1 0.880410135 0.880410135 0.876212358 0.937044859 0.982948661 0.88041011678815861 0.88041011678815861 0.87621235531294217 0.93704484038170155 0.98294865075606008 0.222458825 0.222458825 0.214467555 0.620649636 0.6955889 0.22245879972342997 0.22245879972342997 0.21446754872116464 0.62064960460061858 0.69558889835906823 +5 0.6329114 5 2 3.5 1 0.6329114 0.454545438 0.5072464 0.4 5 0.63291139240506322 5 2 3.5 1 0.63291139240506322 0.45454545454545453 0.50724637681159424 0.40000000000000002 1 0.205882356 0.205882356 0 0.261904776 0.2857143 0.20588235294117646 0.20588235294117646 0 0.26190476190476192 0.2857142857142857 0.333333343 0.333333343 0 1 1 0.33333333333333331 0.33333333333333331 0 1 1 0.8465482 0.8465482 0.6490462 0.8409377 0.7021062 0.84654818921938324 0.84654818921938324 0.64904618912069789 0.84093767726562962 0.70210617911147155 0.14861621 0.14861621 0.00179608515 0.548691869 0.567488432 0.14861616399327332 0.14861616399327332 0.0017960868928680873 0.54869186015931659 0.56748843907683133 +5.9 0.7468355 5.9 3 4.2 1.5 0.7468355 0.6818181 0.6086956 0.6 5.9000000000000004 0.74683544303797467 5.9000000000000004 3 4.2000000000000002 1.5 0.74683544303797467 0.68181818181818177 0.60869565217391308 0.60000000000000009 1 0.470588237 0.470588237 0.4090909 0.428571433 0.523809552 0.47058823529411764 0.47058823529411764 0.40909090909090912 0.42857142857142855 0.52380952380952384 1 1 0.5 1 1 1 1 0.5 1 1 0.998926938 0.998926938 0.9735693 1.00912511 1.05315924 0.99892686327887226 0.99892686327887226 0.97356928368104678 1.0091252127187555 1.0531592686672073 0.5528621 0.5528621 0.480745673 0.667766631 0.719658256 0.55286190159866166 0.55286190159866166 0.4807456731235793 0.66776662351076677 0.71965826470413374 +6 0.7594937 6 2.2 4 1 0.7594937 0.5 0.5797101 0.4 6 0.75949367088607578 6 2.2000000000000002 4 1 0.75949367088607578 0.5 0.57971014492753625 0.40000000000000002 1 0.5 0.5 0.0454545468 0.3809524 0.2857143 0.5 0.5 0.045454545454545456 0.38095238095238093 0.2857142857142857 1 1 0 1 1 1 1 0 1 1 1.01585793 1.01585793 0.7139508 0.9610716 0.7021062 1.0158578270632599 1.0158578270632599 0.71395080803276778 0.96107163116071959 0.70210617911147155 0.5995771 0.5995771 0.0126449876 0.636991262 0.567488432 0.59957706030964308 0.59957706030964308 0.012644988485085273 0.63699126114775995 0.56748843907683133 +6.1 0.7721519 6.1 2.9 4.7 1.4 0.7721519 0.659090936 0.6811594 0.56 6.0999999999999996 0.772151898734177 6.0999999999999996 2.8999999999999999 4.7000000000000002 1.3999999999999999 0.772151898734177 0.65909090909090906 0.6811594202898551 0.55999999999999994 1 0.5294118 0.5294118 0.363636374 0.547619045 0.476190478 0.52941176470588236 0.52941176470588236 0.36363636363636365 0.54761904761904767 0.47619047619047616 1 1 0 1 1 1 1 0 1 1 1.03278887 1.03278887 0.941117 1.12925911 0.982948661 1.0327887908476474 1.0327887908476474 0.94111697422501195 1.1292591666138456 0.98294865075606008 0.644171059 0.644171059 0.386941522 0.734288335 0.6955889 0.64417093822767468 0.64417093822767468 0.38694155923435009 0.73428833770009005 0.69558889835906823 +5.6 0.708860755 5.6 2.9 3.6 1.3 0.708860755 0.659090936 0.5217391 0.52 5.5999999999999996 0.70886075949367078 5.5999999999999996 2.8999999999999999 3.6000000000000001 1.3 0.70886075949367078 0.65909090909090906 0.52173913043478259 0.52000000000000002 1 0.382352948 0.382352948 0.363636374 0.2857143 0.428571433 0.38235294117647056 0.38235294117647056 0.36363636363636365 0.2857142857142857 0.42857142857142855 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.948134 0.948134 0.941117 0.8649644 0.912738 0.94813397192570914 0.94813397192570914 0.94111697422501195 0.86496446804464766 0.91273803284491306 0.4060508 0.4060508 0.386941522 0.5676816 0.6687579 0.40605068921232229 0.40605068921232229 0.38694155923435009 0.56768154970207829 0.66875793094202918 +6.7 0.848101258 6.7 3.1 4.4 1.4 0.848101258 0.7045454 0.6376811 0.56 6.7000000000000002 0.84810126582278467 6.7000000000000002 3.1000000000000001 4.4000000000000004 1.3999999999999999 0.84810126582278467 0.70454545454545459 0.63768115942028991 0.55999999999999994 1 0.7058824 0.7058824 0.454545468 0.476190478 0.476190478 0.70588235294117652 0.70588235294117652 0.45454545454545453 0.47619047619047616 0.47619047619047616 1 1 0.5 1 1 1 1 0.5 1 1 1.13437462 1.13437462 1.0060215 1.05717874 0.982948661 1.1343745735539736 1.1343745735539736 1.0060215931370817 1.0571787942767916 0.98294865075606008 0.84984225 0.84984225 0.5725629 0.6960943 0.6955889 0.84984228863096656 0.84984228863096656 0.5725628341629212 0.69609423458217601 0.69558889835906823 +5.6 0.708860755 5.6 3 4.5 1.5 0.708860755 0.6818181 0.6521739 0.6 5.5999999999999996 0.70886075949367078 5.5999999999999996 3 4.5 1.5 0.70886075949367078 0.68181818181818177 0.65217391304347827 0.60000000000000009 1 0.382352948 0.382352948 0.4090909 0.5 0.523809552 0.38235294117647056 0.38235294117647056 0.40909090909090912 0.5 0.52380952380952384 0.6666667 0.6666667 0.5 1 1 0.66666666666666663 0.66666666666666663 0.5 1 1 0.948134 0.948134 0.9735693 1.08120561 1.05315924 0.94813397192570914 0.94813397192570914 0.97356928368104678 1.0812055850558095 1.0531592686672073 0.4060508 0.4060508 0.480745673 0.7093809 0.719658256 0.40605068921232229 0.40605068921232229 0.4807456731235793 0.70938088629442786 0.71965826470413374 +5.8 0.734177232 5.8 2.7 4.1 1 0.734177232 0.6136364 0.5942029 0.4 5.7999999999999998 0.73417721518987333 5.7999999999999998 2.7000000000000002 4.0999999999999996 1 0.73417721518987333 0.61363636363636365 0.59420289855072461 0.40000000000000002 1 0.441176474 0.441176474 0.272727281 0.4047619 0.2857143 0.44117647058823528 0.44117647058823528 0.27272727272727271 0.40476190476190477 0.2857142857142857 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.981996 0.981996 0.876212358 0.985098362 0.7021062 0.98199589949448451 0.98199589949448451 0.87621235531294217 0.98509842193973751 0.70210617911147155 0.504585147 0.504585147 0.214467555 0.6526925 0.567488432 0.50458515122771275 0.50458515122771275 0.21446754872116464 0.65269250269310186 0.56748843907683133 +6.2 0.7848101 6.2 2.2 4.5 1.5 0.7848101 0.5 0.6521739 0.6 6.2000000000000002 0.78481012658227833 6.2000000000000002 2.2000000000000002 4.5 1.5 0.78481012658227833 0.5 0.65217391304347827 0.60000000000000009 1 0.5588235 0.5588235 0.0454545468 0.5 0.523809552 0.55882352941176472 0.55882352941176472 0.045454545454545456 0.5 0.52380952380952384 1 1 0 1 1 1 1 0 1 1 1.04971981 1.04971981 0.7139508 1.08120561 1.05315924 1.0497197546320352 1.0497197546320352 0.71395080803276778 1.0812055850558095 1.0531592686672073 0.6861942 0.6861942 0.0126449876 0.7093809 0.719658256 0.6861941068147408 0.6861941068147408 0.012644988485085273 0.70938088629442786 0.71965826470413374 +5.6 0.708860755 5.6 2.5 3.9 1.1 0.708860755 0.5681818 0.5652174 0.440000027 5.5999999999999996 0.70886075949367078 5.5999999999999996 2.5 3.8999999999999999 1.1000000000000001 0.70886075949367078 0.56818181818181812 0.56521739130434778 0.44000000000000006 1 0.382352948 0.382352948 0.181818187 0.357142866 0.333333343 0.38235294117647056 0.38235294117647056 0.18181818181818182 0.35714285714285715 0.33333333333333331 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.948134 0.948134 0.8113077 0.937044859 0.7723168 0.94813397192570914 0.94813397192570914 0.81130773640087239 0.93704484038170155 0.7723167970226188 0.4060508 0.4060508 0.09116498 0.620649636 0.605188966 0.40605068921232229 0.40605068921232229 0.091164973250557446 0.62064960460061858 0.60518894407556645 +5.9 0.7468355 5.9 3.2 4.8 1.8 0.7468355 0.7272727 0.6956522 0.719999969 5.9000000000000004 0.74683544303797467 5.9000000000000004 3.2000000000000002 4.7999999999999998 1.8 0.74683544303797467 0.72727272727272729 0.69565217391304346 0.72000000000000008 1 0.470588237 0.470588237 0.5 0.5714286 0.6666667 0.47058823529411764 0.47058823529411764 0.5 0.5714285714285714 0.66666666666666663 1 1 0.5 1 1 1 1 0.5 1 1 0.998926938 0.998926938 1.038474 1.153286 1.26379108 0.99892686327887226 0.99892686327887226 1.0384739025931167 1.1532859573928635 1.2637911224006488 0.5528621 0.5528621 0.6578964 0.7459458 0.778455853 0.55286190159866166 0.55286190159866166 0.65789648182451921 0.74594581373449664 0.77845583371698512 +6.1 0.7721519 6.1 2.8 4 1.3 0.7721519 0.6363636 0.5797101 0.52 6.0999999999999996 0.772151898734177 6.0999999999999996 2.7999999999999998 4 1.3 0.772151898734177 0.63636363636363635 0.57971014492753625 0.52000000000000002 1 0.5294118 0.5294118 0.3181818 0.3809524 0.428571433 0.52941176470588236 0.52941176470588236 0.31818181818181818 0.38095238095238093 0.42857142857142855 1 1 0 1 1 1 1 0 1 1 1.03278887 1.03278887 0.908664644 0.9610716 0.912738 1.0327887908476474 1.0327887908476474 0.908664664768977 0.96107163116071959 0.91273803284491306 0.644171059 0.644171059 0.296437562 0.636991262 0.6687579 0.64417093822767468 0.64417093822767468 0.29643751019242903 0.63699126114775995 0.66875793094202918 +6.3 0.797468364 6.3 2.5 4.9 1.5 0.797468364 0.5681818 0.710144937 0.6 6.2999999999999998 0.79746835443037956 6.2999999999999998 2.5 4.9000000000000004 1.5 0.79746835443037956 0.56818181818181812 0.71014492753623193 0.60000000000000009 1 0.5882353 0.5882353 0.181818187 0.5952381 0.523809552 0.58823529411764708 0.58823529411764708 0.18181818181818182 0.59523809523809523 0.52380952380952384 1 1 0 1 1 1 1 0 1 1 1.06665075 1.06665075 0.8113077 1.17731273 1.05315924 1.0666507184164229 1.0666507184164229 0.81130773640087239 1.1773127481718815 1.0531592686672073 0.72531265 0.72531265 0.09116498 0.757097244 0.719658256 0.72531248018388961 0.72531248018388961 0.091164973250557446 0.75709721026728072 0.71965826470413374 +6.1 0.7721519 6.1 2.8 4.7 1.2 0.7721519 0.6363636 0.6811594 0.480000019 6.0999999999999996 0.772151898734177 6.0999999999999996 2.7999999999999998 4.7000000000000002 1.2 0.772151898734177 0.63636363636363635 0.6811594202898551 0.47999999999999998 1 0.5294118 0.5294118 0.3181818 0.547619045 0.3809524 0.52941176470588236 0.52941176470588236 0.31818181818181818 0.54761904761904767 0.38095238095238093 1 1 0 1 1 1 1 0 1 1 1.03278887 1.03278887 0.908664644 1.12925911 0.842527449 1.0327887908476474 1.0327887908476474 0.908664664768977 1.1292591666138456 0.84252741493376582 0.644171059 0.644171059 0.296437562 0.734288335 0.6387745 0.64417093822767468 0.64417093822767468 0.29643751019242903 0.73428833770009005 0.63877450359674082 +6.4 0.8101266 6.4 2.9 4.3 1.3 0.8101266 0.659090936 0.623188436 0.52 6.4000000000000004 0.81012658227848089 6.4000000000000004 2.8999999999999999 4.2999999999999998 1.3 0.81012658227848089 0.65909090909090906 0.62318840579710144 0.52000000000000002 1 0.617647052 0.617647052 0.363636374 0.452380955 0.428571433 0.61764705882352944 0.61764705882352944 0.36363636363636365 0.45238095238095238 0.42857142857142855 1 1 0 1 1 1 1 0 1 1 1.08358181 1.08358181 0.941117 1.033152 0.912738 1.0835816822008106 1.0835816822008106 0.94111697422501195 1.0331520034977735 0.91273803284491306 0.7613054 0.7613054 0.386941522 0.682228565 0.6687579 0.76130542616799635 0.76130542616799635 0.38694155923435009 0.68222849726345214 0.66875793094202918 +6.6 0.835443 6.6 3 4.4 1.4 0.835443 0.6818181 0.6376811 0.56 6.5999999999999996 0.83544303797468333 6.5999999999999996 3 4.4000000000000004 1.3999999999999999 0.83544303797468333 0.68181818181818177 0.63768115942028991 0.55999999999999994 1 0.6764706 0.6764706 0.4090909 0.476190478 0.476190478 0.67647058823529416 0.67647058823529416 0.40909090909090912 0.47619047619047616 0.47619047619047616 1 1 0.5 1 1 1 1 0.5 1 1 1.11744368 1.11744368 0.9735693 1.05717874 0.982948661 1.1174436097695859 1.1174436097695859 0.97356928368104678 1.0571787942767916 0.98294865075606008 0.8235505 0.8235505 0.480745673 0.6960943 0.6955889 0.82355060799945012 0.82355060799945012 0.4807456731235793 0.69609423458217601 0.69558889835906823 +6.8 0.860759556 6.8 2.8 4.8 1.4 0.860759556 0.6363636 0.6956522 0.56 6.7999999999999998 0.86075949367088589 6.7999999999999998 2.7999999999999998 4.7999999999999998 1.3999999999999999 0.86075949367088589 0.63636363636363635 0.69565217391304346 0.55999999999999994 1 0.7352941 0.7352941 0.3181818 0.5714286 0.476190478 0.73529411764705888 0.73529411764705888 0.31818181818181818 0.5714285714285714 0.47619047619047616 1 1 0 1 1 1 1 0 1 1 1.15130568 1.15130568 0.908664644 1.153286 0.982948661 1.1513055373383612 1.1513055373383612 0.908664664768977 1.1532859573928635 0.98294865075606008 0.873058 0.873058 0.296437562 0.7459458 0.6955889 0.87305788341059976 0.87305788341059976 0.29643751019242903 0.74594581373449664 0.69558889835906823 +6.7 0.848101258 6.7 3 5 1.7 0.848101258 0.6818181 0.7246376 0.68 6.7000000000000002 0.84810126582278467 6.7000000000000002 3 5 1.7 0.84810126582278467 0.68181818181818177 0.72463768115942029 0.68000000000000005 1 0.7058824 0.7058824 0.4090909 0.619047642 0.619047642 0.70588235294117652 0.70588235294117652 0.40909090909090912 0.61904761904761907 0.61904761904761907 1 1 0.5 1 1 1 1 0.5 1 1 1.13437462 1.13437462 0.9735693 1.20133948 1.19358051 1.1343745735539736 1.1343745735539736 0.97356928368104678 1.2013395389508994 1.1935805044895016 0.84984225 0.84984225 0.480745673 0.7677612 0.7608193 0.84984228863096656 0.84984228863096656 0.4807456731235793 0.76776110588492996 0.76081931222191046 +6 0.7594937 6 2.9 4.5 1.5 0.7594937 0.659090936 0.6521739 0.6 6 0.75949367088607578 6 2.8999999999999999 4.5 1.5 0.75949367088607578 0.65909090909090906 0.65217391304347827 0.60000000000000009 1 0.5 0.5 0.363636374 0.5 0.523809552 0.5 0.5 0.36363636363636365 0.5 0.52380952380952384 1 1 0 1 1 1 1 0 1 1 1.01585793 1.01585793 0.941117 1.08120561 1.05315924 1.0158578270632599 1.0158578270632599 0.94111697422501195 1.0812055850558095 1.0531592686672073 0.5995771 0.5995771 0.386941522 0.7093809 0.719658256 0.59957706030964308 0.59957706030964308 0.38694155923435009 0.70938088629442786 0.71965826470413374 +5.7 0.721519 5.7 2.6 3.5 1 0.721519 0.590909064 0.5072464 0.4 5.7000000000000002 0.72151898734177211 5.7000000000000002 2.6000000000000001 3.5 1 0.72151898734177211 0.59090909090909094 0.50724637681159424 0.40000000000000002 1 0.4117647 0.4117647 0.227272734 0.261904776 0.2857143 0.41176470588235292 0.41176470588235292 0.22727272727272727 0.26190476190476192 0.2857142857142857 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.965064943 0.965064943 0.84376 0.8409377 0.7021062 0.96506493571009688 0.96506493571009688 0.84376004585690734 0.84093767726562962 0.70210617911147155 0.455403626 0.455403626 0.145245746 0.548691869 0.567488432 0.45540375842690767 0.45540375842690767 0.14524588521591403 0.54869186015931659 0.56748843907683133 +5.5 0.6962025 5.5 2.4 3.8 1.1 0.6962025 0.545454562 0.5507246 0.440000027 5.5 0.69620253164556956 5.5 2.3999999999999999 3.7999999999999998 1.1000000000000001 0.69620253164556956 0.54545454545454541 0.55072463768115942 0.44000000000000006 1 0.3529412 0.3529412 0.13636364 0.333333343 0.333333343 0.35294117647058826 0.35294117647058826 0.13636363636363635 0.33333333333333331 0.33333333333333331 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.931203067 0.931203067 0.778855443 0.913018048 0.7723168 0.93120300814132162 0.93120300814132162 0.77885542694483745 0.91301804960268351 0.7723167970226188 0.3573058 0.3573058 0.052431643 0.6036563 0.605188966 0.35730592590256216 0.35730592590256216 0.052431629740293029 0.60365621138880055 0.60518894407556645 +5.5 0.6962025 5.5 2.4 3.7 1 0.6962025 0.545454562 0.5362319 0.4 5.5 0.69620253164556956 5.5 2.3999999999999999 3.7000000000000002 1 0.69620253164556956 0.54545454545454541 0.53623188405797106 0.40000000000000002 1 0.3529412 0.3529412 0.13636364 0.309523821 0.2857143 0.35294117647058826 0.35294117647058826 0.13636363636363635 0.30952380952380953 0.2857142857142857 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.931203067 0.931203067 0.778855443 0.888991237 0.7021062 0.93120300814132162 0.93120300814132162 0.77885542694483745 0.8889912588236657 0.70210617911147155 0.3573058 0.3573058 0.052431643 0.5860022 0.567488432 0.35730592590256216 0.35730592590256216 0.052431629740293029 0.58600218188861053 0.56748843907683133 +5.8 0.734177232 5.8 2.7 3.9 1.2 0.734177232 0.6136364 0.5652174 0.480000019 5.7999999999999998 0.73417721518987333 5.7999999999999998 2.7000000000000002 3.8999999999999999 1.2 0.73417721518987333 0.61363636363636365 0.56521739130434778 0.47999999999999998 1 0.441176474 0.441176474 0.272727281 0.357142866 0.3809524 0.44117647058823528 0.44117647058823528 0.27272727272727271 0.35714285714285715 0.38095238095238093 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.981996 0.981996 0.876212358 0.937044859 0.842527449 0.98199589949448451 0.98199589949448451 0.87621235531294217 0.93704484038170155 0.84252741493376582 0.504585147 0.504585147 0.214467555 0.620649636 0.6387745 0.50458515122771275 0.50458515122771275 0.21446754872116464 0.62064960460061858 0.63877450359674082 +6 0.7594937 6 2.7 5.1 1.6 0.7594937 0.6136364 0.7391304 0.640000045 6 0.75949367088607578 6 2.7000000000000002 5.0999999999999996 1.6000000000000001 0.75949367088607578 0.61363636363636365 0.73913043478260865 0.64000000000000012 1 0.5 0.5 0.272727281 0.642857134 0.5714286 0.5 0.5 0.27272727272727271 0.6428571428571429 0.5714285714285714 1 1 0 1 1 1 1 0 1 1 1.01585793 1.01585793 0.876212358 1.22536623 1.12336993 1.0158578270632599 1.0158578270632599 0.87621235531294217 1.2253663297299173 1.1233698865783546 0.5995771 0.5995771 0.214467555 0.777955949 0.7413043 0.59957706030964308 0.59957706030964308 0.21446754872116464 0.77795595083214475 0.74130430666213609 +5.4 0.683544338 5.4 3 4.5 1.5 0.683544338 0.6818181 0.6521739 0.6 5.4000000000000004 0.68354430379746833 5.4000000000000004 3 4.5 1.5 0.68354430379746833 0.68181818181818177 0.65217391304347827 0.60000000000000009 1 0.323529422 0.323529422 0.4090909 0.5 0.523809552 0.3235294117647059 0.3235294117647059 0.40909090909090912 0.5 0.52380952380952384 0.333333343 0.333333343 0.5 1 1 0.33333333333333331 0.33333333333333331 0.5 1 1 0.9142721 0.9142721 0.9735693 1.08120561 1.05315924 0.91427204435693399 0.91427204435693399 0.97356928368104678 1.0812055850558095 1.0531592686672073 0.3099612 0.3099612 0.480745673 0.7093809 0.719658256 0.30996111189240849 0.30996111189240849 0.4807456731235793 0.70938088629442786 0.71965826470413374 +6 0.7594937 6 3.4 4.5 1.6 0.7594937 0.772727251 0.6521739 0.640000045 6 0.75949367088607578 6 3.3999999999999999 4.5 1.6000000000000001 0.75949367088607578 0.77272727272727271 0.65217391304347827 0.64000000000000012 1 0.5 0.5 0.590909064 0.5 0.5714286 0.5 0.5 0.59090909090909094 0.5 0.5714285714285714 1 1 1 1 1 1 1 1 1 1 1.01585793 1.01585793 1.10337853 1.08120561 1.12336993 1.0158578270632599 1.0158578270632599 1.1033785215051863 1.0812055850558095 1.1233698865783546 0.5995771 0.5995771 0.797875941 0.7093809 0.7413043 0.59957706030964308 0.59957706030964308 0.79787580879856601 0.70938088629442786 0.74130430666213609 +6.7 0.848101258 6.7 3.1 4.7 1.5 0.848101258 0.7045454 0.6811594 0.6 6.7000000000000002 0.84810126582278467 6.7000000000000002 3.1000000000000001 4.7000000000000002 1.5 0.84810126582278467 0.70454545454545459 0.6811594202898551 0.60000000000000009 1 0.7058824 0.7058824 0.454545468 0.547619045 0.523809552 0.70588235294117652 0.70588235294117652 0.45454545454545453 0.54761904761904767 0.52380952380952384 1 1 0.5 1 1 1 1 0.5 1 1 1.13437462 1.13437462 1.0060215 1.12925911 1.05315924 1.1343745735539736 1.1343745735539736 1.0060215931370817 1.1292591666138456 1.0531592686672073 0.84984225 0.84984225 0.5725629 0.734288335 0.719658256 0.84984228863096656 0.84984228863096656 0.5725628341629212 0.73428833770009005 0.71965826470413374 +6.3 0.797468364 6.3 2.3 4.4 1.3 0.797468364 0.522727251 0.6376811 0.52 6.2999999999999998 0.79746835443037956 6.2999999999999998 2.2999999999999998 4.4000000000000004 1.3 0.79746835443037956 0.52272727272727271 0.63768115942028991 0.52000000000000002 1 0.5882353 0.5882353 0.09090909 0.476190478 0.428571433 0.58823529411764708 0.58823529411764708 0.090909090909090912 0.47619047619047616 0.42857142857142855 1 1 0 1 1 1 1 0 1 1 1.06665075 1.06665075 0.7464031 1.05717874 0.912738 1.0666507184164229 1.0666507184164229 0.7464031174888025 1.0571787942767916 0.91273803284491306 0.72531265 0.72531265 0.02727463 0.6960943 0.6687579 0.72531248018388961 0.72531248018388961 0.027274649605582402 0.69609423458217601 0.66875793094202918 +5.6 0.708860755 5.6 3 4.1 1.3 0.708860755 0.6818181 0.5942029 0.52 5.5999999999999996 0.70886075949367078 5.5999999999999996 3 4.0999999999999996 1.3 0.70886075949367078 0.68181818181818177 0.59420289855072461 0.52000000000000002 1 0.382352948 0.382352948 0.4090909 0.4047619 0.428571433 0.38235294117647056 0.38235294117647056 0.40909090909090912 0.40476190476190477 0.42857142857142855 0.6666667 0.6666667 0.5 1 1 0.66666666666666663 0.66666666666666663 0.5 1 1 0.948134 0.948134 0.9735693 0.985098362 0.912738 0.94813397192570914 0.94813397192570914 0.97356928368104678 0.98509842193973751 0.91273803284491306 0.4060508 0.4060508 0.480745673 0.6526925 0.6687579 0.40605068921232229 0.40605068921232229 0.4807456731235793 0.65269250269310186 0.66875793094202918 +5.5 0.6962025 5.5 2.5 4 1.3 0.6962025 0.5681818 0.5797101 0.52 5.5 0.69620253164556956 5.5 2.5 4 1.3 0.69620253164556956 0.56818181818181812 0.57971014492753625 0.52000000000000002 1 0.3529412 0.3529412 0.181818187 0.3809524 0.428571433 0.35294117647058826 0.35294117647058826 0.18181818181818182 0.38095238095238093 0.42857142857142855 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.931203067 0.931203067 0.8113077 0.9610716 0.912738 0.93120300814132162 0.93120300814132162 0.81130773640087239 0.96107163116071959 0.91273803284491306 0.3573058 0.3573058 0.09116498 0.636991262 0.6687579 0.35730592590256216 0.35730592590256216 0.091164973250557446 0.63699126114775995 0.66875793094202918 +5.5 0.6962025 5.5 2.6 4.4 1.2 0.6962025 0.590909064 0.6376811 0.480000019 5.5 0.69620253164556956 5.5 2.6000000000000001 4.4000000000000004 1.2 0.69620253164556956 0.59090909090909094 0.63768115942028991 0.47999999999999998 1 0.3529412 0.3529412 0.227272734 0.476190478 0.3809524 0.35294117647058826 0.35294117647058826 0.22727272727272727 0.47619047619047616 0.38095238095238093 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.931203067 0.931203067 0.84376 1.05717874 0.842527449 0.93120300814132162 0.93120300814132162 0.84376004585690734 1.0571787942767916 0.84252741493376582 0.3573058 0.3573058 0.145245746 0.6960943 0.6387745 0.35730592590256216 0.35730592590256216 0.14524588521591403 0.69609423458217601 0.63877450359674082 +6.1 0.7721519 6.1 3 4.6 1.4 0.7721519 0.6818181 0.6666666 0.56 6.0999999999999996 0.772151898734177 6.0999999999999996 3 4.5999999999999996 1.3999999999999999 0.772151898734177 0.68181818181818177 0.66666666666666663 0.55999999999999994 1 0.5294118 0.5294118 0.4090909 0.523809552 0.476190478 0.52941176470588236 0.52941176470588236 0.40909090909090912 0.52380952380952384 0.47619047619047616 1 1 0.5 1 1 1 1 0.5 1 1 1.03278887 1.03278887 0.9735693 1.10523236 0.982948661 1.0327887908476474 1.0327887908476474 0.97356928368104678 1.1052323758348275 0.98294865075606008 0.644171059 0.644171059 0.480745673 0.7221062 0.6955889 0.64417093822767468 0.64417093822767468 0.4807456731235793 0.72210618745756316 0.69558889835906823 +5.8 0.734177232 5.8 2.6 4 1.2 0.734177232 0.590909064 0.5797101 0.480000019 5.7999999999999998 0.73417721518987333 5.7999999999999998 2.6000000000000001 4 1.2 0.73417721518987333 0.59090909090909094 0.57971014492753625 0.47999999999999998 1 0.441176474 0.441176474 0.227272734 0.3809524 0.3809524 0.44117647058823528 0.44117647058823528 0.22727272727272727 0.38095238095238093 0.38095238095238093 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.981996 0.981996 0.84376 0.9610716 0.842527449 0.98199589949448451 0.98199589949448451 0.84376004585690734 0.96107163116071959 0.84252741493376582 0.504585147 0.504585147 0.145245746 0.636991262 0.6387745 0.50458515122771275 0.50458515122771275 0.14524588521591403 0.63699126114775995 0.63877450359674082 +5 0.6329114 5 2.3 3.3 1 0.6329114 0.522727251 0.478260845 0.4 5 0.63291139240506322 5 2.2999999999999998 3.2999999999999998 1 0.63291139240506322 0.52272727272727271 0.47826086956521741 0.40000000000000002 1 0.205882356 0.205882356 0.09090909 0.238095239 0.2857143 0.20588235294117646 0.20588235294117646 0.090909090909090912 0.23809523809523808 0.2857142857142857 0.333333343 0.333333343 0 1 1 0.33333333333333331 0.33333333333333331 0 1 1 0.8465482 0.8465482 0.7464031 0.792884052 0.7021062 0.84654818921938324 0.84654818921938324 0.7464031174888025 0.79288409570759366 0.70210617911147155 0.14861621 0.14861621 0.02727463 0.508717 0.567488432 0.14861616399327332 0.14861616399327332 0.027274649605582402 0.50871703350008446 0.56748843907683133 +5.6 0.708860755 5.6 2.7 4.2 1.3 0.708860755 0.6136364 0.6086956 0.52 5.5999999999999996 0.70886075949367078 5.5999999999999996 2.7000000000000002 4.2000000000000002 1.3 0.70886075949367078 0.61363636363636365 0.60869565217391308 0.52000000000000002 1 0.382352948 0.382352948 0.272727281 0.428571433 0.428571433 0.38235294117647056 0.38235294117647056 0.27272727272727271 0.42857142857142855 0.42857142857142855 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.948134 0.948134 0.876212358 1.00912511 0.912738 0.94813397192570914 0.94813397192570914 0.87621235531294217 1.0091252127187555 0.91273803284491306 0.4060508 0.4060508 0.214467555 0.667766631 0.6687579 0.40605068921232229 0.40605068921232229 0.21446754872116464 0.66776662351076677 0.66875793094202918 +5.7 0.721519 5.7 3 4.2 1.2 0.721519 0.6818181 0.6086956 0.480000019 5.7000000000000002 0.72151898734177211 5.7000000000000002 3 4.2000000000000002 1.2 0.72151898734177211 0.68181818181818177 0.60869565217391308 0.47999999999999998 1 0.4117647 0.4117647 0.4090909 0.428571433 0.3809524 0.41176470588235292 0.41176470588235292 0.40909090909090912 0.42857142857142855 0.38095238095238093 0.6666667 0.6666667 0.5 1 1 0.66666666666666663 0.66666666666666663 0.5 1 1 0.965064943 0.965064943 0.9735693 1.00912511 0.842527449 0.96506493571009688 0.96506493571009688 0.97356928368104678 1.0091252127187555 0.84252741493376582 0.455403626 0.455403626 0.480745673 0.667766631 0.6387745 0.45540375842690767 0.45540375842690767 0.4807456731235793 0.66776662351076677 0.63877450359674082 +5.7 0.721519 5.7 2.9 4.2 1.3 0.721519 0.659090936 0.6086956 0.52 5.7000000000000002 0.72151898734177211 5.7000000000000002 2.8999999999999999 4.2000000000000002 1.3 0.72151898734177211 0.65909090909090906 0.60869565217391308 0.52000000000000002 1 0.4117647 0.4117647 0.363636374 0.428571433 0.428571433 0.41176470588235292 0.41176470588235292 0.36363636363636365 0.42857142857142855 0.42857142857142855 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.965064943 0.965064943 0.941117 1.00912511 0.912738 0.96506493571009688 0.96506493571009688 0.94111697422501195 1.0091252127187555 0.91273803284491306 0.455403626 0.455403626 0.386941522 0.667766631 0.6687579 0.45540375842690767 0.45540375842690767 0.38694155923435009 0.66776662351076677 0.66875793094202918 +6.2 0.7848101 6.2 2.9 4.3 1.3 0.7848101 0.659090936 0.623188436 0.52 6.2000000000000002 0.78481012658227833 6.2000000000000002 2.8999999999999999 4.2999999999999998 1.3 0.78481012658227833 0.65909090909090906 0.62318840579710144 0.52000000000000002 1 0.5588235 0.5588235 0.363636374 0.452380955 0.428571433 0.55882352941176472 0.55882352941176472 0.36363636363636365 0.45238095238095238 0.42857142857142855 1 1 0 1 1 1 1 0 1 1 1.04971981 1.04971981 0.941117 1.033152 0.912738 1.0497197546320352 1.0497197546320352 0.94111697422501195 1.0331520034977735 0.91273803284491306 0.6861942 0.6861942 0.386941522 0.682228565 0.6687579 0.6861941068147408 0.6861941068147408 0.38694155923435009 0.68222849726345214 0.66875793094202918 +5.1 0.6455696 5.1 2.5 3 1.1 0.6455696 0.5681818 0.4347826 0.440000027 5.0999999999999996 0.64556962025316444 5.0999999999999996 2.5 3 1.1000000000000001 0.64556962025316444 0.56818181818181812 0.43478260869565222 0.44000000000000006 1 0.235294119 0.235294119 0.181818187 0.214285716 0.333333343 0.23529411764705882 0.23529411764705882 0.18181818181818182 0.21428571428571427 0.33333333333333331 0.333333343 0.333333343 0 1 1 0.33333333333333331 0.33333333333333331 0 1 1 0.8634792 0.8634792 0.8113077 0.720803738 0.7723168 0.86347915300377087 0.86347915300377087 0.81130773640087239 0.72080372337053966 0.7723167970226188 0.183586776 0.183586776 0.09116498 0.4439562 0.605188966 0.18358681923369741 0.18358681923369741 0.091164973250557446 0.44395614729152527 0.60518894407556645 +5.7 0.721519 5.7 2.8 4.1 1.3 0.721519 0.6363636 0.5942029 0.52 5.7000000000000002 0.72151898734177211 5.7000000000000002 2.7999999999999998 4.0999999999999996 1.3 0.72151898734177211 0.63636363636363635 0.59420289855072461 0.52000000000000002 1 0.4117647 0.4117647 0.3181818 0.4047619 0.428571433 0.41176470588235292 0.41176470588235292 0.31818181818181818 0.40476190476190477 0.42857142857142855 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.965064943 0.965064943 0.908664644 0.985098362 0.912738 0.96506493571009688 0.96506493571009688 0.908664664768977 0.98509842193973751 0.91273803284491306 0.455403626 0.455403626 0.296437562 0.6526925 0.6687579 0.45540375842690767 0.45540375842690767 0.29643751019242903 0.65269250269310186 0.66875793094202918 +6.3 0.797468364 6.3 3.3 6 2.5 0.797468364 0.74999994 0.8695652 1 6.2999999999999998 0.79746835443037956 6.2999999999999998 3.2999999999999998 6 2.5 0.79746835443037956 0.74999999999999989 0.86956521739130443 1 2 0.5882353 0.5882353 0.545454562 0.857142866 1 0.58823529411764708 0.58823529411764708 0.54545454545454541 0.8571428571428571 1 1 1 0.5 1 1 1 1 0.5 1 1 1.06665075 1.06665075 1.07092619 1.44160748 1.75526547 1.0666507184164229 1.0666507184164229 1.0709262120491514 1.4416074467410793 1.7552654477786789 0.72531265 0.72531265 0.733567655 0.851488352 0.8644717 0.72531248018388961 0.72531248018388961 0.73356785053506501 0.85148833619462083 0.86447170475057145 +5.8 0.734177232 5.8 2.7 5.1 1.9 0.734177232 0.6136364 0.7391304 0.76 5.7999999999999998 0.73417721518987333 5.7999999999999998 2.7000000000000002 5.0999999999999996 1.8999999999999999 0.73417721518987333 0.61363636363636365 0.73913043478260865 0.76000000000000001 2 0.441176474 0.441176474 0.272727281 0.642857134 0.714285731 0.44117647058823528 0.44117647058823528 0.27272727272727271 0.6428571428571429 0.7142857142857143 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.981996 0.981996 0.876212358 1.22536623 1.33400178 0.98199589949448451 0.98199589949448451 0.87621235531294217 1.2253663297299173 1.3340017403117959 0.504585147 0.504585147 0.214467555 0.777955949 0.794432342 0.50458515122771275 0.50458515122771275 0.21446754872116464 0.77795595083214475 0.7944323164067586 +7.1 0.898734152 7.1 3 5.9 2.1 0.898734152 0.6818181 0.855072439 0.84 7.0999999999999996 0.89873417721518967 7.0999999999999996 3 5.9000000000000004 2.1000000000000001 0.89873417721518967 0.68181818181818177 0.85507246376811608 0.84000000000000008 2 0.8235294 0.8235294 0.4090909 0.8333333 0.8095238 0.82352941176470584 0.82352941176470584 0.40909090909090912 0.83333333333333337 0.80952380952380953 1 1 0.5 1 1 1 1 0.5 1 1 1.20209849 1.20209849 0.9735693 1.4175806 1.47442293 1.2020984286915242 1.2020984286915242 0.97356928368104678 1.4175806559620614 1.4744229761340903 0.9261487 0.9261487 0.480745673 0.8447406 0.8221373 0.92614864776751638 0.92614864776751638 0.4807456731235793 0.8447405985468005 0.82213728509573358 +6.3 0.797468364 6.3 2.9 5.6 1.8 0.797468364 0.659090936 0.8115942 0.719999969 6.2999999999999998 0.79746835443037956 6.2999999999999998 2.8999999999999999 5.5999999999999996 1.8 0.79746835443037956 0.65909090909090906 0.81159420289855067 0.72000000000000008 2 0.5882353 0.5882353 0.363636374 0.7619048 0.6666667 0.58823529411764708 0.58823529411764708 0.36363636363636365 0.76190476190476186 0.66666666666666663 1 1 0 1 1 1 1 0 1 1 1.06665075 1.06665075 0.941117 1.34550023 1.26379108 1.0666507184164229 1.0666507184164229 0.94111697422501195 1.3455002836250074 1.2637911224006488 0.72531265 0.72531265 0.386941522 0.8225208 0.778455853 0.72531248018388961 0.72531248018388961 0.38694155923435009 0.82252078229590153 0.77845583371698512 +6.5 0.822784841 6.5 3 5.8 2.2 0.822784841 0.6818181 0.8405797 0.880000055 6.5 0.82278481012658211 6.5 3 5.7999999999999998 2.2000000000000002 0.82278481012658211 0.68181818181818177 0.84057971014492749 0.88000000000000012 2 0.647058845 0.647058845 0.4090909 0.8095238 0.857142866 0.6470588235294118 0.6470588235294118 0.40909090909090912 0.80952380952380953 0.8571428571428571 1 1 0.5 1 1 1 1 0.5 1 1 1.10051274 1.10051274 0.9735693 1.39355385 1.54463363 1.1005126459851982 1.1005126459851982 0.97356928368104678 1.3935538651830432 1.5446335940452376 0.7940583 0.7940583 0.480745673 0.837673366 0.834173143 0.79405825408863862 0.79405825408863862 0.4807456731235793 0.83767331479677276 0.83417310863648386 +7.6 0.9620253 7.6 3 6.6 2.1 0.9620253 0.6818181 0.9565217 0.84 7.5999999999999996 0.962025316455696 7.5999999999999996 3 6.5999999999999996 2.1000000000000001 0.962025316455696 0.68181818181818177 0.95652173913043481 0.84000000000000008 2 0.9411765 0.9411765 0.4090909 0.952380955 0.8095238 0.94117647058823528 0.94117647058823528 0.40909090909090912 0.95238095238095233 0.80952380952380953 1 1 0.5 1 1 1 1 0.5 1 1 1.2867533 1.2867533 0.9735693 1.5857681 1.47442293 1.2867532476134624 1.2867532476134624 0.97356928368104678 1.5857681914151873 1.4744229761340903 0.9733138 0.9733138 0.480745673 0.8860214 0.8221373 0.97331382055990801 0.97331382055990801 0.4807456731235793 0.88602142043286447 0.82213728509573358 +4.9 0.6202532 4.9 2.5 4.5 1.7 0.6202532 0.5681818 0.6521739 0.68 4.9000000000000004 0.620253164556962 4.9000000000000004 2.5 4.5 1.7 0.620253164556962 0.56818181818181812 0.65217391304347827 0.68000000000000005 2 0.1764706 0.1764706 0.181818187 0.5 0.619047642 0.17647058823529413 0.17647058823529413 0.18181818181818182 0.5 0.61904761904761907 0.333333343 0.333333343 0 1 1 0.33333333333333331 0.33333333333333331 0 1 1 0.829617262 0.829617262 0.8113077 1.08120561 1.19358051 0.82961722543499561 0.82961722543499561 0.81130773640087239 1.0812055850558095 1.1935805044895016 0.117838435 0.117838435 0.09116498 0.7093809 0.7608193 0.11783846996167147 0.11783846996167147 0.091164973250557446 0.70938088629442786 0.76081931222191046 +7.3 0.9240507 7.3 2.9 6.3 1.8 0.9240507 0.659090936 0.9130435 0.719999969 7.2999999999999998 0.92405063291139222 7.2999999999999998 2.8999999999999999 6.2999999999999998 1.8 0.92405063291139222 0.65909090909090906 0.91304347826086962 0.72000000000000008 2 0.882352948 0.882352948 0.363636374 0.9047619 0.6666667 0.88235294117647056 0.88235294117647056 0.36363636363636365 0.90476190476190477 0.66666666666666663 1 1 0 1 1 1 1 0 1 1 1.23596048 1.23596048 0.941117 1.51368785 1.26379108 1.2359603562602994 1.2359603562602994 0.94111697422501195 1.5136878190781333 1.2637911224006488 0.950038552 0.950038552 0.386941522 0.869953454 0.778455853 0.95003851659070837 0.95003851659070837 0.38694155923435009 0.86995338291407664 0.77845583371698512 +6.7 0.848101258 6.7 2.5 5.8 1.8 0.848101258 0.5681818 0.8405797 0.719999969 6.7000000000000002 0.84810126582278467 6.7000000000000002 2.5 5.7999999999999998 1.8 0.84810126582278467 0.56818181818181812 0.84057971014492749 0.72000000000000008 2 0.7058824 0.7058824 0.181818187 0.8095238 0.6666667 0.70588235294117652 0.70588235294117652 0.18181818181818182 0.80952380952380953 0.66666666666666663 1 1 0 1 1 1 1 0 1 1 1.13437462 1.13437462 0.8113077 1.39355385 1.26379108 1.1343745735539736 1.1343745735539736 0.81130773640087239 1.3935538651830432 1.2637911224006488 0.84984225 0.84984225 0.09116498 0.837673366 0.778455853 0.84984228863096656 0.84984228863096656 0.091164973250557446 0.83767331479677276 0.77845583371698512 +7.2 0.9113924 7.2 3.6 6.1 2.5 0.9113924 0.818181753 0.884057939 1 7.2000000000000002 0.911392405063291 7.2000000000000002 3.6000000000000001 6.0999999999999996 2.5 0.911392405063291 0.81818181818181812 0.88405797101449268 1 2 0.852941155 0.852941155 0.6818182 0.880952358 1 0.8529411764705882 0.8529411764705882 0.68181818181818177 0.88095238095238093 1 1 1 1 1 1 1 1 1 1 1 1.21902943 1.21902943 1.1682831 1.46563423 1.75526547 1.2190293924759119 1.2190293924759119 1.1682831404172562 1.4656342375200972 1.7552654477786789 0.939083755 0.939083755 0.8919594 0.8579307 0.8644717 0.93908371694631576 0.93908371694631576 0.89195941323598249 0.85793070191741871 0.86447170475057145 +6.5 0.822784841 6.5 3.2 5.1 2 0.822784841 0.7272727 0.7391304 0.8 6.5 0.82278481012658211 6.5 3.2000000000000002 5.0999999999999996 2 0.82278481012658211 0.72727272727272729 0.73913043478260865 0.80000000000000004 2 0.647058845 0.647058845 0.5 0.642857134 0.7619048 0.6470588235294118 0.6470588235294118 0.5 0.6428571428571429 0.76190476190476186 1 1 0.5 1 1 1 1 0.5 1 1 1.10051274 1.10051274 1.038474 1.22536623 1.40421236 1.1005126459851982 1.1005126459851982 1.0384739025931167 1.2253663297299173 1.4042123582229431 0.7940583 0.7940583 0.6578964 0.777955949 0.808938 0.79405825408863862 0.79405825408863862 0.65789648182451921 0.77795595083214475 0.80893802463851161 +6.4 0.8101266 6.4 2.7 5.3 1.9 0.8101266 0.6136364 0.768115938 0.76 6.4000000000000004 0.81012658227848089 6.4000000000000004 2.7000000000000002 5.2999999999999998 1.8999999999999999 0.81012658227848089 0.61363636363636365 0.76811594202898548 0.76000000000000001 2 0.617647052 0.617647052 0.272727281 0.6904762 0.714285731 0.61764705882352944 0.61764705882352944 0.27272727272727271 0.69047619047619047 0.7142857142857143 1 1 0 1 1 1 1 0 1 1 1.08358181 1.08358181 0.876212358 1.27342 1.33400178 1.0835816822008106 1.0835816822008106 0.87621235531294217 1.2734199112879534 1.3340017403117959 0.7613054 0.7613054 0.214467555 0.797011137 0.794432342 0.76130542616799635 0.76130542616799635 0.21446754872116464 0.79701110606800918 0.7944323164067586 +6.8 0.860759556 6.8 3 5.5 2.1 0.860759556 0.6818181 0.797101438 0.84 6.7999999999999998 0.86075949367088589 6.7999999999999998 3 5.5 2.1000000000000001 0.86075949367088589 0.68181818181818177 0.79710144927536231 0.84000000000000008 2 0.7352941 0.7352941 0.4090909 0.7380952 0.8095238 0.73529411764705888 0.73529411764705888 0.40909090909090912 0.73809523809523814 0.80952380952380953 1 1 0.5 1 1 1 1 0.5 1 1 1.15130568 1.15130568 0.9735693 1.32147348 1.47442293 1.1513055373383612 1.1513055373383612 0.97356928368104678 1.3214734928459895 1.4744229761340903 0.873058 0.873058 0.480745673 0.814404547 0.8221373 0.87305788341059976 0.87305788341059976 0.4807456731235793 0.81440457252843534 0.82213728509573358 +5.7 0.721519 5.7 2.5 5 2 0.721519 0.5681818 0.7246376 0.8 5.7000000000000002 0.72151898734177211 5.7000000000000002 2.5 5 2 0.72151898734177211 0.56818181818181812 0.72463768115942029 0.80000000000000004 2 0.4117647 0.4117647 0.181818187 0.619047642 0.7619048 0.41176470588235292 0.41176470588235292 0.18181818181818182 0.61904761904761907 0.76190476190476186 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.965064943 0.965064943 0.8113077 1.20133948 1.40421236 0.96506493571009688 0.96506493571009688 0.81130773640087239 1.2013395389508994 1.4042123582229431 0.455403626 0.455403626 0.09116498 0.7677612 0.808938 0.45540375842690767 0.45540375842690767 0.091164973250557446 0.76776110588492996 0.80893802463851161 +5.8 0.734177232 5.8 2.8 5.1 2.4 0.734177232 0.6363636 0.7391304 0.960000038 5.7999999999999998 0.73417721518987333 5.7999999999999998 2.7999999999999998 5.0999999999999996 2.3999999999999999 0.73417721518987333 0.63636363636363635 0.73913043478260865 0.95999999999999996 2 0.441176474 0.441176474 0.3181818 0.642857134 0.952380955 0.44117647058823528 0.44117647058823528 0.31818181818181818 0.6428571428571429 0.95238095238095233 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.981996 0.981996 0.908664644 1.22536623 1.6850549 0.98199589949448451 0.98199589949448451 0.908664664768977 1.2253663297299173 1.6850548298675316 0.504585147 0.504585147 0.296437562 0.777955949 0.8552379 0.50458515122771275 0.50458515122771275 0.29643751019242903 0.77795595083214475 0.85523789511433224 +6.4 0.8101266 6.4 3.2 5.3 2.3 0.8101266 0.7272727 0.768115938 0.92 6.4000000000000004 0.81012658227848089 6.4000000000000004 3.2000000000000002 5.2999999999999998 2.2999999999999998 0.81012658227848089 0.72727272727272729 0.76811594202898548 0.91999999999999993 2 0.617647052 0.617647052 0.5 0.6904762 0.9047619 0.61764705882352944 0.61764705882352944 0.5 0.69047619047619047 0.90476190476190477 1 1 0.5 1 1 1 1 0.5 1 1 1.08358181 1.08358181 1.038474 1.27342 1.6148442 1.0835816822008106 1.0835816822008106 1.0384739025931167 1.2734199112879534 1.6148442119563844 0.7613054 0.7613054 0.6578964 0.797011137 0.845170259 0.76130542616799635 0.76130542616799635 0.65789648182451921 0.79701110606800918 0.84517026625737923 +6.5 0.822784841 6.5 3 5.5 1.8 0.822784841 0.6818181 0.797101438 0.719999969 6.5 0.82278481012658211 6.5 3 5.5 1.8 0.82278481012658211 0.68181818181818177 0.79710144927536231 0.72000000000000008 2 0.647058845 0.647058845 0.4090909 0.7380952 0.6666667 0.6470588235294118 0.6470588235294118 0.40909090909090912 0.73809523809523814 0.66666666666666663 1 1 0.5 1 1 1 1 0.5 1 1 1.10051274 1.10051274 0.9735693 1.32147348 1.26379108 1.1005126459851982 1.1005126459851982 0.97356928368104678 1.3214734928459895 1.2637911224006488 0.7940583 0.7940583 0.480745673 0.814404547 0.778455853 0.79405825408863862 0.79405825408863862 0.4807456731235793 0.81440457252843534 0.77845583371698512 +7.7 0.9746835 7.7 3.8 6.7 2.2 0.9746835 0.8636363 0.97101444 0.880000055 7.7000000000000002 0.97468354430379733 7.7000000000000002 3.7999999999999998 6.7000000000000002 2.2000000000000002 0.97468354430379733 0.86363636363636354 0.97101449275362328 0.88000000000000012 2 0.9705882 0.9705882 0.772727251 0.976190448 0.857142866 0.97058823529411764 0.97058823529411764 0.77272727272727271 0.97619047619047616 0.8571428571428571 1 1 1 1 1 1 1 1 1 1 1.30368423 1.30368423 1.23318768 1.60979486 1.54463363 1.3036842113978502 1.3036842113978502 1.2331877593293259 1.6097949821942052 1.5446335940452376 0.9785672 0.9785672 0.9472266 0.8909001 0.834173143 0.97856725002805034 0.97856725002805034 0.94722658326235554 0.89090007639933866 0.83417310863648386 +7.7 0.9746835 7.7 2.6 6.9 2.3 0.9746835 0.590909064 1 0.92 7.7000000000000002 0.97468354430379733 7.7000000000000002 2.6000000000000001 6.9000000000000004 2.2999999999999998 0.97468354430379733 0.59090909090909094 1 0.91999999999999993 2 0.9705882 0.9705882 0.227272734 1 0.9047619 0.97058823529411764 0.97058823529411764 0.22727272727272727 1 0.90476190476190477 1 1 0 1 1 1 1 0 1 1 1.30368423 1.30368423 0.84376 1.6578486 1.6148442 1.3036842113978502 1.3036842113978502 0.84376004585690734 1.6578485637522413 1.6148442119563844 0.9785672 0.9785672 0.145245746 0.900005937 0.845170259 0.97856725002805034 0.97856725002805034 0.14524588521591403 0.90000590086073773 0.84517026625737923 +6 0.7594937 6 2.2 5 1.5 0.7594937 0.5 0.7246376 0.6 6 0.75949367088607578 6 2.2000000000000002 5 1.5 0.75949367088607578 0.5 0.72463768115942029 0.60000000000000009 2 0.5 0.5 0.0454545468 0.619047642 0.523809552 0.5 0.5 0.045454545454545456 0.61904761904761907 0.52380952380952384 1 1 0 1 1 1 1 0 1 1 1.01585793 1.01585793 0.7139508 1.20133948 1.05315924 1.0158578270632599 1.0158578270632599 0.71395080803276778 1.2013395389508994 1.0531592686672073 0.5995771 0.5995771 0.0126449876 0.7677612 0.719658256 0.59957706030964308 0.59957706030964308 0.012644988485085273 0.76776110588492996 0.71965826470413374 +6.9 0.873417735 6.9 3.2 5.7 2.3 0.873417735 0.7272727 0.8260869 0.92 6.9000000000000004 0.87341772151898722 6.9000000000000004 3.2000000000000002 5.7000000000000002 2.2999999999999998 0.87341772151898722 0.72727272727272729 0.82608695652173914 0.91999999999999993 2 0.7647059 0.7647059 0.5 0.785714269 0.9047619 0.76470588235294112 0.76470588235294112 0.5 0.7857142857142857 0.90476190476190477 1 1 0.5 1 1 1 1 0.5 1 1 1.16823661 1.16823661 1.038474 1.369527 1.6148442 1.1682365011227489 1.1682365011227489 1.0384739025931167 1.3695270744040255 1.6148442119563844 0.8933714 0.8933714 0.6578964 0.8302718 0.845170259 0.89337135404275458 0.89337135404275458 0.65789648182451921 0.83027178393794032 0.84517026625737923 +5.6 0.708860755 5.6 2.8 4.9 2 0.708860755 0.6363636 0.710144937 0.8 5.5999999999999996 0.70886075949367078 5.5999999999999996 2.7999999999999998 4.9000000000000004 2 0.70886075949367078 0.63636363636363635 0.71014492753623193 0.80000000000000004 2 0.382352948 0.382352948 0.3181818 0.5952381 0.7619048 0.38235294117647056 0.38235294117647056 0.31818181818181818 0.59523809523809523 0.76190476190476186 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.948134 0.948134 0.908664644 1.17731273 1.40421236 0.94813397192570914 0.94813397192570914 0.908664664768977 1.1773127481718815 1.4042123582229431 0.4060508 0.4060508 0.296437562 0.757097244 0.808938 0.40605068921232229 0.40605068921232229 0.29643751019242903 0.75709721026728072 0.80893802463851161 +7.7 0.9746835 7.7 2.8 6.7 2 0.9746835 0.6363636 0.97101444 0.8 7.7000000000000002 0.97468354430379733 7.7000000000000002 2.7999999999999998 6.7000000000000002 2 0.97468354430379733 0.63636363636363635 0.97101449275362328 0.80000000000000004 2 0.9705882 0.9705882 0.3181818 0.976190448 0.7619048 0.97058823529411764 0.97058823529411764 0.31818181818181818 0.97619047619047616 0.76190476190476186 1 1 0 1 1 1 1 0 1 1 1.30368423 1.30368423 0.908664644 1.60979486 1.40421236 1.3036842113978502 1.3036842113978502 0.908664664768977 1.6097949821942052 1.4042123582229431 0.9785672 0.9785672 0.296437562 0.8909001 0.808938 0.97856725002805034 0.97856725002805034 0.29643751019242903 0.89090007639933866 0.80893802463851161 +6.3 0.797468364 6.3 2.7 4.9 1.8 0.797468364 0.6136364 0.710144937 0.719999969 6.2999999999999998 0.79746835443037956 6.2999999999999998 2.7000000000000002 4.9000000000000004 1.8 0.79746835443037956 0.61363636363636365 0.71014492753623193 0.72000000000000008 2 0.5882353 0.5882353 0.272727281 0.5952381 0.6666667 0.58823529411764708 0.58823529411764708 0.27272727272727271 0.59523809523809523 0.66666666666666663 1 1 0 1 1 1 1 0 1 1 1.06665075 1.06665075 0.876212358 1.17731273 1.26379108 1.0666507184164229 1.0666507184164229 0.87621235531294217 1.1773127481718815 1.2637911224006488 0.72531265 0.72531265 0.214467555 0.757097244 0.778455853 0.72531248018388961 0.72531248018388961 0.21446754872116464 0.75709721026728072 0.77845583371698512 +6.7 0.848101258 6.7 3.3 5.7 2.1 0.848101258 0.74999994 0.8260869 0.84 6.7000000000000002 0.84810126582278467 6.7000000000000002 3.2999999999999998 5.7000000000000002 2.1000000000000001 0.84810126582278467 0.74999999999999989 0.82608695652173914 0.84000000000000008 2 0.7058824 0.7058824 0.545454562 0.785714269 0.8095238 0.70588235294117652 0.70588235294117652 0.54545454545454541 0.7857142857142857 0.80952380952380953 1 1 0.5 1 1 1 1 0.5 1 1 1.13437462 1.13437462 1.07092619 1.369527 1.47442293 1.1343745735539736 1.1343745735539736 1.0709262120491514 1.3695270744040255 1.4744229761340903 0.84984225 0.84984225 0.733567655 0.8302718 0.8221373 0.84984228863096656 0.84984228863096656 0.73356785053506501 0.83027178393794032 0.82213728509573358 +7.2 0.9113924 7.2 3.2 6 1.8 0.9113924 0.7272727 0.8695652 0.719999969 7.2000000000000002 0.911392405063291 7.2000000000000002 3.2000000000000002 6 1.8 0.911392405063291 0.72727272727272729 0.86956521739130443 0.72000000000000008 2 0.852941155 0.852941155 0.5 0.857142866 0.6666667 0.8529411764705882 0.8529411764705882 0.5 0.8571428571428571 0.66666666666666663 1 1 0.5 1 1 1 1 0.5 1 1 1.21902943 1.21902943 1.038474 1.44160748 1.26379108 1.2190293924759119 1.2190293924759119 1.0384739025931167 1.4416074467410793 1.2637911224006488 0.939083755 0.939083755 0.6578964 0.851488352 0.778455853 0.93908371694631576 0.93908371694631576 0.65789648182451921 0.85148833619462083 0.77845583371698512 +6.2 0.7848101 6.2 2.8 4.8 1.8 0.7848101 0.6363636 0.6956522 0.719999969 6.2000000000000002 0.78481012658227833 6.2000000000000002 2.7999999999999998 4.7999999999999998 1.8 0.78481012658227833 0.63636363636363635 0.69565217391304346 0.72000000000000008 2 0.5588235 0.5588235 0.3181818 0.5714286 0.6666667 0.55882352941176472 0.55882352941176472 0.31818181818181818 0.5714285714285714 0.66666666666666663 1 1 0 1 1 1 1 0 1 1 1.04971981 1.04971981 0.908664644 1.153286 1.26379108 1.0497197546320352 1.0497197546320352 0.908664664768977 1.1532859573928635 1.2637911224006488 0.6861942 0.6861942 0.296437562 0.7459458 0.778455853 0.6861941068147408 0.6861941068147408 0.29643751019242903 0.74594581373449664 0.77845583371698512 +6.1 0.7721519 6.1 3 4.9 1.8 0.7721519 0.6818181 0.710144937 0.719999969 6.0999999999999996 0.772151898734177 6.0999999999999996 3 4.9000000000000004 1.8 0.772151898734177 0.68181818181818177 0.71014492753623193 0.72000000000000008 2 0.5294118 0.5294118 0.4090909 0.5952381 0.6666667 0.52941176470588236 0.52941176470588236 0.40909090909090912 0.59523809523809523 0.66666666666666663 1 1 0.5 1 1 1 1 0.5 1 1 1.03278887 1.03278887 0.9735693 1.17731273 1.26379108 1.0327887908476474 1.0327887908476474 0.97356928368104678 1.1773127481718815 1.2637911224006488 0.644171059 0.644171059 0.480745673 0.757097244 0.778455853 0.64417093822767468 0.64417093822767468 0.4807456731235793 0.75709721026728072 0.77845583371698512 +6.4 0.8101266 6.4 2.8 5.6 2.1 0.8101266 0.6363636 0.8115942 0.84 6.4000000000000004 0.81012658227848089 6.4000000000000004 2.7999999999999998 5.5999999999999996 2.1000000000000001 0.81012658227848089 0.63636363636363635 0.81159420289855067 0.84000000000000008 2 0.617647052 0.617647052 0.3181818 0.7619048 0.8095238 0.61764705882352944 0.61764705882352944 0.31818181818181818 0.76190476190476186 0.80952380952380953 1 1 0 1 1 1 1 0 1 1 1.08358181 1.08358181 0.908664644 1.34550023 1.47442293 1.0835816822008106 1.0835816822008106 0.908664664768977 1.3455002836250074 1.4744229761340903 0.7613054 0.7613054 0.296437562 0.8225208 0.8221373 0.76130542616799635 0.76130542616799635 0.29643751019242903 0.82252078229590153 0.82213728509573358 +7.2 0.9113924 7.2 3 5.8 1.6 0.9113924 0.6818181 0.8405797 0.640000045 7.2000000000000002 0.911392405063291 7.2000000000000002 3 5.7999999999999998 1.6000000000000001 0.911392405063291 0.68181818181818177 0.84057971014492749 0.64000000000000012 2 0.852941155 0.852941155 0.4090909 0.8095238 0.5714286 0.8529411764705882 0.8529411764705882 0.40909090909090912 0.80952380952380953 0.5714285714285714 1 1 0.5 1 1 1 1 0.5 1 1 1.21902943 1.21902943 0.9735693 1.39355385 1.12336993 1.2190293924759119 1.2190293924759119 0.97356928368104678 1.3935538651830432 1.1233698865783546 0.939083755 0.939083755 0.480745673 0.837673366 0.7413043 0.93908371694631576 0.93908371694631576 0.4807456731235793 0.83767331479677276 0.74130430666213609 +7.4 0.936708868 7.4 2.8 6.1 1.9 0.936708868 0.6363636 0.884057939 0.76 7.4000000000000004 0.93670886075949356 7.4000000000000004 2.7999999999999998 6.0999999999999996 1.8999999999999999 0.93670886075949356 0.63636363636363635 0.88405797101449268 0.76000000000000001 2 0.9117647 0.9117647 0.3181818 0.880952358 0.714285731 0.91176470588235292 0.91176470588235292 0.31818181818181818 0.88095238095238093 0.7142857142857143 1 1 0 1 1 1 1 0 1 1 1.25289142 1.25289142 0.908664644 1.46563423 1.33400178 1.2528913200446872 1.2528913200446872 0.908664664768977 1.4656342375200972 1.3340017403117959 0.959248662 0.959248662 0.296437562 0.8579307 0.794432342 0.95924858044400851 0.95924858044400851 0.29643751019242903 0.85793070191741871 0.7944323164067586 +7.9 1 7.9 3.8 6.4 2 1 0.8636363 0.9275362 0.8 7.9000000000000004 0.99999999999999989 7.9000000000000004 3.7999999999999998 6.4000000000000004 2 0.99999999999999989 0.86363636363636354 0.92753623188405809 0.80000000000000004 2 1 1 0.772727251 0.9285714 0.7619048 1 1 0.77272727272727271 0.9285714285714286 0.76190476190476186 1 1 1 1 1 1 1 1 1 1 1.33754623 1.33754623 1.23318768 1.5377146 1.40421236 1.3375461389666257 1.3375461389666257 1.2331877593293259 1.5377146098571515 1.4042123582229431 0.9863704 0.9863704 0.9472266 0.875559449 0.808938 0.98637034929396195 0.98637034929396195 0.94722658326235554 0.87555942778912454 0.80893802463851161 +6.4 0.8101266 6.4 2.8 5.6 2.2 0.8101266 0.6363636 0.8115942 0.880000055 6.4000000000000004 0.81012658227848089 6.4000000000000004 2.7999999999999998 5.5999999999999996 2.2000000000000002 0.81012658227848089 0.63636363636363635 0.81159420289855067 0.88000000000000012 2 0.617647052 0.617647052 0.3181818 0.7619048 0.857142866 0.61764705882352944 0.61764705882352944 0.31818181818181818 0.76190476190476186 0.8571428571428571 1 1 0 1 1 1 1 0 1 1 1.08358181 1.08358181 0.908664644 1.34550023 1.54463363 1.0835816822008106 1.0835816822008106 0.908664664768977 1.3455002836250074 1.5446335940452376 0.7613054 0.7613054 0.296437562 0.8225208 0.834173143 0.76130542616799635 0.76130542616799635 0.29643751019242903 0.82252078229590153 0.83417310863648386 +6.3 0.797468364 6.3 2.8 5.1 1.5 0.797468364 0.6363636 0.7391304 0.6 6.2999999999999998 0.79746835443037956 6.2999999999999998 2.7999999999999998 5.0999999999999996 1.5 0.79746835443037956 0.63636363636363635 0.73913043478260865 0.60000000000000009 2 0.5882353 0.5882353 0.3181818 0.642857134 0.523809552 0.58823529411764708 0.58823529411764708 0.31818181818181818 0.6428571428571429 0.52380952380952384 1 1 0 1 1 1 1 0 1 1 1.06665075 1.06665075 0.908664644 1.22536623 1.05315924 1.0666507184164229 1.0666507184164229 0.908664664768977 1.2253663297299173 1.0531592686672073 0.72531265 0.72531265 0.296437562 0.777955949 0.719658256 0.72531248018388961 0.72531248018388961 0.29643751019242903 0.77795595083214475 0.71965826470413374 +6.1 0.7721519 6.1 2.6 5.6 1.4 0.7721519 0.590909064 0.8115942 0.56 6.0999999999999996 0.772151898734177 6.0999999999999996 2.6000000000000001 5.5999999999999996 1.3999999999999999 0.772151898734177 0.59090909090909094 0.81159420289855067 0.55999999999999994 2 0.5294118 0.5294118 0.227272734 0.7619048 0.476190478 0.52941176470588236 0.52941176470588236 0.22727272727272727 0.76190476190476186 0.47619047619047616 1 1 0 1 1 1 1 0 1 1 1.03278887 1.03278887 0.84376 1.34550023 0.982948661 1.0327887908476474 1.0327887908476474 0.84376004585690734 1.3455002836250074 0.98294865075606008 0.644171059 0.644171059 0.145245746 0.8225208 0.6955889 0.64417093822767468 0.64417093822767468 0.14524588521591403 0.82252078229590153 0.69558889835906823 +7.7 0.9746835 7.7 3 6.1 2.3 0.9746835 0.6818181 0.884057939 0.92 7.7000000000000002 0.97468354430379733 7.7000000000000002 3 6.0999999999999996 2.2999999999999998 0.97468354430379733 0.68181818181818177 0.88405797101449268 0.91999999999999993 2 0.9705882 0.9705882 0.4090909 0.880952358 0.9047619 0.97058823529411764 0.97058823529411764 0.40909090909090912 0.88095238095238093 0.90476190476190477 1 1 0.5 1 1 1 1 0.5 1 1 1.30368423 1.30368423 0.9735693 1.46563423 1.6148442 1.3036842113978502 1.3036842113978502 0.97356928368104678 1.4656342375200972 1.6148442119563844 0.9785672 0.9785672 0.480745673 0.8579307 0.845170259 0.97856725002805034 0.97856725002805034 0.4807456731235793 0.85793070191741871 0.84517026625737923 +6.3 0.797468364 6.3 3.4 5.6 2.4 0.797468364 0.772727251 0.8115942 0.960000038 6.2999999999999998 0.79746835443037956 6.2999999999999998 3.3999999999999999 5.5999999999999996 2.3999999999999999 0.79746835443037956 0.77272727272727271 0.81159420289855067 0.95999999999999996 2 0.5882353 0.5882353 0.590909064 0.7619048 0.952380955 0.58823529411764708 0.58823529411764708 0.59090909090909094 0.76190476190476186 0.95238095238095233 1 1 1 1 1 1 1 1 1 1 1.06665075 1.06665075 1.10337853 1.34550023 1.6850549 1.0666507184164229 1.0666507184164229 1.1033785215051863 1.3455002836250074 1.6850548298675316 0.72531265 0.72531265 0.797875941 0.8225208 0.8552379 0.72531248018388961 0.72531248018388961 0.79787580879856601 0.82252078229590153 0.85523789511433224 +6.4 0.8101266 6.4 3.1 5.5 1.8 0.8101266 0.7045454 0.797101438 0.719999969 6.4000000000000004 0.81012658227848089 6.4000000000000004 3.1000000000000001 5.5 1.8 0.81012658227848089 0.70454545454545459 0.79710144927536231 0.72000000000000008 2 0.617647052 0.617647052 0.454545468 0.7380952 0.6666667 0.61764705882352944 0.61764705882352944 0.45454545454545453 0.73809523809523814 0.66666666666666663 1 1 0.5 1 1 1 1 0.5 1 1 1.08358181 1.08358181 1.0060215 1.32147348 1.26379108 1.0835816822008106 1.0835816822008106 1.0060215931370817 1.3214734928459895 1.2637911224006488 0.7613054 0.7613054 0.5725629 0.814404547 0.778455853 0.76130542616799635 0.76130542616799635 0.5725628341629212 0.81440457252843534 0.77845583371698512 +6 0.7594937 6 3 4.8 1.8 0.7594937 0.6818181 0.6956522 0.719999969 6 0.75949367088607578 6 3 4.7999999999999998 1.8 0.75949367088607578 0.68181818181818177 0.69565217391304346 0.72000000000000008 2 0.5 0.5 0.4090909 0.5714286 0.6666667 0.5 0.5 0.40909090909090912 0.5714285714285714 0.66666666666666663 1 1 0.5 1 1 1 1 0.5 1 1 1.01585793 1.01585793 0.9735693 1.153286 1.26379108 1.0158578270632599 1.0158578270632599 0.97356928368104678 1.1532859573928635 1.2637911224006488 0.5995771 0.5995771 0.480745673 0.7459458 0.778455853 0.59957706030964308 0.59957706030964308 0.4807456731235793 0.74594581373449664 0.77845583371698512 +6.9 0.873417735 6.9 3.1 5.4 2.1 0.873417735 0.7045454 0.7826087 0.84 6.9000000000000004 0.87341772151898722 6.9000000000000004 3.1000000000000001 5.4000000000000004 2.1000000000000001 0.87341772151898722 0.70454545454545459 0.78260869565217395 0.84000000000000008 2 0.7647059 0.7647059 0.454545468 0.714285731 0.8095238 0.76470588235294112 0.76470588235294112 0.45454545454545453 0.7142857142857143 0.80952380952380953 1 1 0.5 1 1 1 1 0.5 1 1 1.16823661 1.16823661 1.0060215 1.29744673 1.47442293 1.1682365011227489 1.1682365011227489 1.0060215931370817 1.2974467020669715 1.4744229761340903 0.8933714 0.8933714 0.5725629 0.805906951 0.8221373 0.89337135404275458 0.89337135404275458 0.5725628341629212 0.80590691835886541 0.82213728509573358 +6.7 0.848101258 6.7 3.1 5.6 2.4 0.848101258 0.7045454 0.8115942 0.960000038 6.7000000000000002 0.84810126582278467 6.7000000000000002 3.1000000000000001 5.5999999999999996 2.3999999999999999 0.84810126582278467 0.70454545454545459 0.81159420289855067 0.95999999999999996 2 0.7058824 0.7058824 0.454545468 0.7619048 0.952380955 0.70588235294117652 0.70588235294117652 0.45454545454545453 0.76190476190476186 0.95238095238095233 1 1 0.5 1 1 1 1 0.5 1 1 1.13437462 1.13437462 1.0060215 1.34550023 1.6850549 1.1343745735539736 1.1343745735539736 1.0060215931370817 1.3455002836250074 1.6850548298675316 0.84984225 0.84984225 0.5725629 0.8225208 0.8552379 0.84984228863096656 0.84984228863096656 0.5725628341629212 0.82252078229590153 0.85523789511433224 +6.9 0.873417735 6.9 3.1 5.1 2.3 0.873417735 0.7045454 0.7391304 0.92 6.9000000000000004 0.87341772151898722 6.9000000000000004 3.1000000000000001 5.0999999999999996 2.2999999999999998 0.87341772151898722 0.70454545454545459 0.73913043478260865 0.91999999999999993 2 0.7647059 0.7647059 0.454545468 0.642857134 0.9047619 0.76470588235294112 0.76470588235294112 0.45454545454545453 0.6428571428571429 0.90476190476190477 1 1 0.5 1 1 1 1 0.5 1 1 1.16823661 1.16823661 1.0060215 1.22536623 1.6148442 1.1682365011227489 1.1682365011227489 1.0060215931370817 1.2253663297299173 1.6148442119563844 0.8933714 0.8933714 0.5725629 0.777955949 0.845170259 0.89337135404275458 0.89337135404275458 0.5725628341629212 0.77795595083214475 0.84517026625737923 +5.8 0.734177232 5.8 2.7 5.1 1.9 0.734177232 0.6136364 0.7391304 0.76 5.7999999999999998 0.73417721518987333 5.7999999999999998 2.7000000000000002 5.0999999999999996 1.8999999999999999 0.73417721518987333 0.61363636363636365 0.73913043478260865 0.76000000000000001 2 0.441176474 0.441176474 0.272727281 0.642857134 0.714285731 0.44117647058823528 0.44117647058823528 0.27272727272727271 0.6428571428571429 0.7142857142857143 0.6666667 0.6666667 0 1 1 0.66666666666666663 0.66666666666666663 0 1 1 0.981996 0.981996 0.876212358 1.22536623 1.33400178 0.98199589949448451 0.98199589949448451 0.87621235531294217 1.2253663297299173 1.3340017403117959 0.504585147 0.504585147 0.214467555 0.777955949 0.794432342 0.50458515122771275 0.50458515122771275 0.21446754872116464 0.77795595083214475 0.7944323164067586 +6.8 0.860759556 6.8 3.2 5.9 2.3 0.860759556 0.7272727 0.855072439 0.92 6.7999999999999998 0.86075949367088589 6.7999999999999998 3.2000000000000002 5.9000000000000004 2.2999999999999998 0.86075949367088589 0.72727272727272729 0.85507246376811608 0.91999999999999993 2 0.7352941 0.7352941 0.5 0.8333333 0.9047619 0.73529411764705888 0.73529411764705888 0.5 0.83333333333333337 0.90476190476190477 1 1 0.5 1 1 1 1 0.5 1 1 1.15130568 1.15130568 1.038474 1.4175806 1.6148442 1.1513055373383612 1.1513055373383612 1.0384739025931167 1.4175806559620614 1.6148442119563844 0.873058 0.873058 0.6578964 0.8447406 0.845170259 0.87305788341059976 0.87305788341059976 0.65789648182451921 0.8447405985468005 0.84517026625737923 +6.7 0.848101258 6.7 3.3 5.7 2.5 0.848101258 0.74999994 0.8260869 1 6.7000000000000002 0.84810126582278467 6.7000000000000002 3.2999999999999998 5.7000000000000002 2.5 0.84810126582278467 0.74999999999999989 0.82608695652173914 1 2 0.7058824 0.7058824 0.545454562 0.785714269 1 0.70588235294117652 0.70588235294117652 0.54545454545454541 0.7857142857142857 1 1 1 0.5 1 1 1 1 0.5 1 1 1.13437462 1.13437462 1.07092619 1.369527 1.75526547 1.1343745735539736 1.1343745735539736 1.0709262120491514 1.3695270744040255 1.7552654477786789 0.84984225 0.84984225 0.733567655 0.8302718 0.8644717 0.84984228863096656 0.84984228863096656 0.73356785053506501 0.83027178393794032 0.86447170475057145 +6.7 0.848101258 6.7 3 5.2 2.3 0.848101258 0.6818181 0.7536231 0.92 6.7000000000000002 0.84810126582278467 6.7000000000000002 3 5.2000000000000002 2.2999999999999998 0.84810126582278467 0.68181818181818177 0.75362318840579712 0.91999999999999993 2 0.7058824 0.7058824 0.4090909 0.6666667 0.9047619 0.70588235294117652 0.70588235294117652 0.40909090909090912 0.66666666666666663 0.90476190476190477 1 1 0.5 1 1 1 1 0.5 1 1 1.13437462 1.13437462 0.9735693 1.24939311 1.6148442 1.1343745735539736 1.1343745735539736 0.97356928368104678 1.2493931205089355 1.6148442119563844 0.84984225 0.84984225 0.480745673 0.7877 0.845170259 0.84984228863096656 0.84984228863096656 0.4807456731235793 0.78769997391633806 0.84517026625737923 +6.3 0.797468364 6.3 2.5 5 1.9 0.797468364 0.5681818 0.7246376 0.76 6.2999999999999998 0.79746835443037956 6.2999999999999998 2.5 5 1.8999999999999999 0.79746835443037956 0.56818181818181812 0.72463768115942029 0.76000000000000001 2 0.5882353 0.5882353 0.181818187 0.619047642 0.714285731 0.58823529411764708 0.58823529411764708 0.18181818181818182 0.61904761904761907 0.7142857142857143 1 1 0 1 1 1 1 0 1 1 1.06665075 1.06665075 0.8113077 1.20133948 1.33400178 1.0666507184164229 1.0666507184164229 0.81130773640087239 1.2013395389508994 1.3340017403117959 0.72531265 0.72531265 0.09116498 0.7677612 0.794432342 0.72531248018388961 0.72531248018388961 0.091164973250557446 0.76776110588492996 0.7944323164067586 +6.5 0.822784841 6.5 3 5.2 2 0.822784841 0.6818181 0.7536231 0.8 6.5 0.82278481012658211 6.5 3 5.2000000000000002 2 0.82278481012658211 0.68181818181818177 0.75362318840579712 0.80000000000000004 2 0.647058845 0.647058845 0.4090909 0.6666667 0.7619048 0.6470588235294118 0.6470588235294118 0.40909090909090912 0.66666666666666663 0.76190476190476186 1 1 0.5 1 1 1 1 0.5 1 1 1.10051274 1.10051274 0.9735693 1.24939311 1.40421236 1.1005126459851982 1.1005126459851982 0.97356928368104678 1.2493931205089355 1.4042123582229431 0.7940583 0.7940583 0.480745673 0.7877 0.808938 0.79405825408863862 0.79405825408863862 0.4807456731235793 0.78769997391633806 0.80893802463851161 +6.2 0.7848101 6.2 3.4 5.4 2.3 0.7848101 0.772727251 0.7826087 0.92 6.2000000000000002 0.78481012658227833 6.2000000000000002 3.3999999999999999 5.4000000000000004 2.2999999999999998 0.78481012658227833 0.77272727272727271 0.78260869565217395 0.91999999999999993 2 0.5588235 0.5588235 0.590909064 0.714285731 0.9047619 0.55882352941176472 0.55882352941176472 0.59090909090909094 0.7142857142857143 0.90476190476190477 1 1 1 1 1 1 1 1 1 1 1.04971981 1.04971981 1.10337853 1.29744673 1.6148442 1.0497197546320352 1.0497197546320352 1.1033785215051863 1.2974467020669715 1.6148442119563844 0.6861942 0.6861942 0.797875941 0.805906951 0.845170259 0.6861941068147408 0.6861941068147408 0.79787580879856601 0.80590691835886541 0.84517026625737923 +5.9 0.7468355 5.9 3 5.1 1.8 0.7468355 0.6818181 0.7391304 0.719999969 5.9000000000000004 0.74683544303797467 5.9000000000000004 3 5.0999999999999996 1.8 0.74683544303797467 0.68181818181818177 0.73913043478260865 0.72000000000000008 2 0.470588237 0.470588237 0.4090909 0.642857134 0.6666667 0.47058823529411764 0.47058823529411764 0.40909090909090912 0.6428571428571429 0.66666666666666663 1 1 0.5 1 1 1 1 0.5 1 1 0.998926938 0.998926938 0.9735693 1.22536623 1.26379108 0.99892686327887226 0.99892686327887226 0.97356928368104678 1.2253663297299173 1.2637911224006488 0.5528621 0.5528621 0.480745673 0.777955949 0.778455853 0.55286190159866166 0.55286190159866166 0.4807456731235793 0.77795595083214475 0.77845583371698512 diff --git a/test/Microsoft.ML.Tests/Transformers/NormalizerTests.cs b/test/Microsoft.ML.Tests/Transformers/NormalizerTests.cs index 258f9d6e4a..b523fe2327 100644 --- a/test/Microsoft.ML.Tests/Transformers/NormalizerTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/NormalizerTests.cs @@ -11,7 +11,6 @@ using Microsoft.ML.Transforms; using Microsoft.ML.Transforms.Normalizers; using Microsoft.ML.Transforms.Projections; -using System; using System.Collections.Immutable; using System.IO; using Xunit; @@ -52,6 +51,10 @@ public void NormalizerWorkout() new NormalizingEstimator.BinningColumn("float4", "float4bin"), new NormalizingEstimator.BinningColumn("double1", "double1bin"), new NormalizingEstimator.BinningColumn("double4", "double4bin"), + new NormalizingEstimator.SupervisedBinningColumn("float1", "float1supervisedbin", labelColumn:"int1"), + new NormalizingEstimator.SupervisedBinningColumn("float4", "float4supervisedbin", labelColumn: "int1"), + new NormalizingEstimator.SupervisedBinningColumn("double1", "double1supervisedbin", labelColumn: "int1"), + new NormalizingEstimator.SupervisedBinningColumn("double4", "double4supervisedbin", labelColumn: "int1"), new NormalizingEstimator.MeanVarColumn("float1", "float1mv"), new NormalizingEstimator.MeanVarColumn("float4", "float4mv"), new NormalizingEstimator.MeanVarColumn("double1", "double1mv"), @@ -210,6 +213,7 @@ public void SimpleConstructorsAndExtensions() var loader = new TextLoader(Env, new TextLoader.Arguments { Column = new[] { + new TextLoader.Column("Label", DataKind.R4, 0), new TextLoader.Column("float4", DataKind.R4, new[]{new TextLoader.Range(1, 4) }), } }); @@ -237,6 +241,19 @@ public void SimpleConstructorsAndExtensions() CheckSameValues(data1, data4); CheckSameValues(data1, data5); + // Tests for SupervisedBinning + var est6 = new NormalizingEstimator(Env, NormalizingEstimator.NormalizerMode.SupervisedBinning, ("float4", "float4")); + var est7 = new NormalizingEstimator(Env, new NormalizingEstimator.SupervisedBinningColumn("float4")); + var est8 = ML.Transforms.Normalize(NormalizingEstimator.NormalizerMode.SupervisedBinning, ("float4", "float4")); + + var data6 = est6.Fit(data).Transform(data); + var data7 = est7.Fit(data).Transform(data); + var data8 = est8.Fit(data).Transform(data); + CheckSameSchemas(data6.Schema, data7.Schema); + CheckSameSchemas(data6.Schema, data8.Schema); + CheckSameValues(data6, data7); + CheckSameValues(data6, data8); + Done(); } From 017fb09ad7b18f6cee1d51d85f5c48cf8af89259 Mon Sep 17 00:00:00 2001 From: Pete Luferenko <41337831+Zruty0@users.noreply.github.com> Date: Mon, 17 Dec 2018 15:01:27 -0800 Subject: [PATCH 061/100] Internalizing MetadataUtils (#1893) * Internalizing MetadataUtils --- .../Dynamic/MatrixFactorization.cs | 1 - src/Microsoft.ML.Core/Data/IEstimator.cs | 2 +- src/Microsoft.ML.Core/Data/MetadataUtils.cs | 135 ++++++++++-------- .../Data/RoleMappedSchema.cs | 2 +- .../Commands/SaveDataCommand.cs | 4 +- .../Commands/ScoreCommand.cs | 2 +- .../Commands/TrainCommand.cs | 2 +- .../DataView/LambdaColumnMapper.cs | 1 + .../Depricated/Instances/HeaderSchema.cs | 12 +- .../EntryPoints/PredictorModelImpl.cs | 3 +- .../EntryPoints/ScoreColumnSelector.cs | 4 +- .../Evaluators/AnomalyDetectionEvaluator.cs | 1 + .../Evaluators/BinaryClassifierEvaluator.cs | 2 +- .../Evaluators/EvaluatorUtils.cs | 34 ++--- .../Evaluators/MamlEvaluator.cs | 3 +- .../MultiClassClassifierEvaluator.cs | 6 +- .../MultiOutputRegressionEvaluator.cs | 2 +- .../Evaluators/RegressionEvaluatorBase.cs | 1 + .../Model/Pfa/BoundPfaContext.cs | 2 +- .../Model/Pfa/SavePfaCommand.cs | 2 +- .../Scorers/BinaryClassifierScorer.cs | 12 +- .../Scorers/ClusteringScorer.cs | 1 + ...FeatureContributionCalculationTransform.cs | 7 +- .../Scorers/QuantileRegressionScorer.cs | 1 + .../Scorers/ScoreMapperSchema.cs | 4 +- .../StaticPipe/PipelineColumn.cs | 1 + .../StaticPipe/SchemaAssertionContext.cs | 1 + .../Training/TrainerUtils.cs | 16 +-- .../ColumnConcatenatingTransformer.cs | 4 +- .../Transforms/ColumnSelecting.cs | 2 +- .../Transforms/DropSlotsTransform.cs | 4 +- .../Transforms/InvertHashUtils.cs | 2 +- .../Transforms/LabelConvertTransform.cs | 1 + .../Transforms/NormalizeUtils.cs | 8 +- src/Microsoft.ML.Ensemble/PipelineEnsemble.cs | 4 +- .../CrossValidationMacro.cs | 2 +- .../FeatureCombiner.cs | 2 +- src/Microsoft.ML.FastTree/GamTrainer.cs | 7 +- .../TreeEnsemble/TreeEnsemble.cs | 3 +- .../LearningPipelineDebugProxy.cs | 2 +- .../Models/ConfusionMatrix.cs | 1 + src/Microsoft.ML.Legacy/PredictionModel.cs | 2 +- .../LightGbmTrainerBase.cs | 1 + src/Microsoft.ML.Onnx/SaveOnnxCommand.cs | 2 +- .../MatrixFactorizationTrainer.cs | 1 + .../FactorizationMachineTrainer.cs | 3 +- .../Standard/LinearPredictorUtils.cs | 1 + .../LogisticRegression/LogisticRegression.cs | 2 +- .../MultiClass/MetaMulticlassTrainer.cs | 1 + .../Standard/MultiClass/Ova.cs | 2 +- .../Standard/MultiClass/Pkpd.cs | 2 +- .../Standard/Simple/SimpleTrainers.cs | 2 +- .../TensorFlow/TensorflowUtils.cs | 2 +- .../HashJoiningTransform.cs | 3 +- .../MissingValueIndicatorTransform.cs | 1 + .../PermutationFeatureImportance.cs | 2 +- .../Text/LdaTransform.cs | 2 +- .../Text/NgramTransform.cs | 4 +- .../UnitTests/TestCSharpApi.cs | 3 +- .../StaticPipeTests.cs | 6 +- .../DataPipe/TestDataPipeBase.cs | 2 +- .../Transformers/NAIndicatorTests.cs | 2 +- 62 files changed, 193 insertions(+), 159 deletions(-) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/MatrixFactorization.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/MatrixFactorization.cs index 66ce70a322..f8603ea524 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/MatrixFactorization.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/MatrixFactorization.cs @@ -1,5 +1,4 @@ using Microsoft.ML.Data; -using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using System; using System.Collections.Generic; diff --git a/src/Microsoft.ML.Core/Data/IEstimator.cs b/src/Microsoft.ML.Core/Data/IEstimator.cs index 30a0ae58f3..2c07dc7050 100644 --- a/src/Microsoft.ML.Core/Data/IEstimator.cs +++ b/src/Microsoft.ML.Core/Data/IEstimator.cs @@ -173,7 +173,7 @@ internal static SchemaShape Create(Schema schema) for (int iCol = 0; iCol < schema.Count; iCol++) { - if (!schema.IsHidden(iCol)) + if (!schema[iCol].IsHidden) { // First create the metadata. var mCols = new List(); diff --git a/src/Microsoft.ML.Core/Data/MetadataUtils.cs b/src/Microsoft.ML.Core/Data/MetadataUtils.cs index 06994a1acf..efe98762a4 100644 --- a/src/Microsoft.ML.Core/Data/MetadataUtils.cs +++ b/src/Microsoft.ML.Core/Data/MetadataUtils.cs @@ -9,10 +9,11 @@ using System.Linq; using System.Threading; using Microsoft.ML.Core.Data; -using Microsoft.ML.Data; +using Microsoft.ML.Runtime; +using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.Utilities; -namespace Microsoft.ML.Runtime.Data +namespace Microsoft.ML.Data { /// /// Utilities for implementing and using the metadata API of . @@ -117,23 +118,20 @@ public static class ScoreValueKind /// /// Returns a standard exception for responding to an invalid call to GetMetadata. /// - public static Exception ExceptGetMetadata() - { - return Contracts.Except("Invalid call to GetMetadata"); - } + [BestFriend] + internal static Exception ExceptGetMetadata() => Contracts.Except("Invalid call to GetMetadata"); /// /// Returns a standard exception for responding to an invalid call to GetMetadata. /// - public static Exception ExceptGetMetadata(this IExceptionContext ctx) - { - return ctx.Except("Invalid call to GetMetadata"); - } + [BestFriend] + internal static Exception ExceptGetMetadata(this IExceptionContext ctx) => ctx.Except("Invalid call to GetMetadata"); /// /// Helper to marshal a call to GetMetadata{TValue} to a specific type. /// - public static void Marshal(this MetadataGetter getter, int col, ref TNeed dst) + [BestFriend] + internal static void Marshal(this MetadataGetter getter, int col, ref TNeed dst) { Contracts.CheckValue(getter, nameof(getter)); @@ -147,7 +145,8 @@ public static void Marshal(this MetadataGetter getter, int /// Returns a vector type with item type text and the given size. The size must be positive. /// This is a standard type for metadata consisting of multiple text values, eg SlotNames. /// - public static VectorType GetNamesType(int size) + [BestFriend] + internal static VectorType GetNamesType(int size) { Contracts.CheckParam(size > 0, nameof(size), "must be known size"); return new VectorType(TextType.Instance, size); @@ -159,7 +158,8 @@ public static VectorType GetNamesType(int size) /// This is a standard type for metadata consisting of multiple int values that represent /// categorical slot ranges with in a column. /// - public static VectorType GetCategoricalType(int rangeCount) + [BestFriend] + internal static VectorType GetCategoricalType(int rangeCount) { Contracts.CheckParam(rangeCount > 0, nameof(rangeCount), "must be known size"); return new VectorType(NumberType.I4, rangeCount, 2); @@ -170,7 +170,8 @@ public static VectorType GetCategoricalType(int rangeCount) /// /// The type of the ScoreColumnSetId metadata. /// - public static KeyType ScoreColumnSetIdType + [BestFriend] + internal static KeyType ScoreColumnSetIdType { get { @@ -186,7 +187,8 @@ public static KeyType ScoreColumnSetIdType /// /// Returns a key-value pair useful when implementing GetMetadataTypes(col). /// - public static KeyValuePair GetSlotNamesPair(int size) + [BestFriend] + internal static KeyValuePair GetSlotNamesPair(int size) { return GetNamesType(size).GetPair(Kinds.SlotNames); } @@ -195,7 +197,8 @@ public static KeyValuePair GetSlotNamesPair(int size) /// Returns a key-value pair useful when implementing GetMetadataTypes(col). This assumes /// that the values of the key type are Text. /// - public static KeyValuePair GetKeyNamesPair(int size) + [BestFriend] + internal static KeyValuePair GetKeyNamesPair(int size) { return GetNamesType(size).GetPair(Kinds.KeyValues); } @@ -204,7 +207,8 @@ public static KeyValuePair GetKeyNamesPair(int size) /// Given a type and metadata kind string, returns a key-value pair. This is useful when /// implementing GetMetadataTypes(col). /// - public static KeyValuePair GetPair(this ColumnType type, string kind) + [BestFriend] + internal static KeyValuePair GetPair(this ColumnType type, string kind) { Contracts.CheckValue(type, nameof(type)); return new KeyValuePair(kind, type); @@ -215,7 +219,8 @@ public static KeyValuePair GetPair(this ColumnType type, str /// /// Prepends a params array to an enumerable. Useful when implementing GetMetadataTypes. /// - public static IEnumerable Prepend(this IEnumerable tail, params T[] head) + [BestFriend] + internal static IEnumerable Prepend(this IEnumerable tail, params T[] head) { return head.Concat(tail); } @@ -254,7 +259,8 @@ public static uint GetMaxMetadataKind(this Schema schema, out int colMax, string /// Returns the set of column ids which match the value of specified metadata kind. /// The metadata type should be a KeyType with raw type U4. /// - public static IEnumerable GetColumnSet(this Schema schema, string metadataKind, uint value) + [BestFriend] + internal static IEnumerable GetColumnSet(this Schema schema, string metadataKind, uint value) { for (int col = 0; col < schema.Count; col++) { @@ -273,7 +279,8 @@ public static IEnumerable GetColumnSet(this Schema schema, string metadataK /// Returns the set of column ids which match the value of specified metadata kind. /// The metadata type should be of type text. /// - public static IEnumerable GetColumnSet(this Schema schema, string metadataKind, string value) + [BestFriend] + internal static IEnumerable GetColumnSet(this Schema schema, string metadataKind, string value) { for (int col = 0; col < schema.Count; col++) { @@ -290,30 +297,43 @@ public static IEnumerable GetColumnSet(this Schema schema, string metadataK /// /// Returns true if the specified column: - /// * is a vector of length N (including 0) + /// * is a vector of length N /// * has a SlotNames metadata /// * metadata type is VBuffer<ReadOnlyMemory<char>> of length N /// - public static bool HasSlotNames(this Schema schema, int col, int vectorSize) + public static bool HasSlotNames(this Schema.Column column) + => column.Type.IsKnownSizeVector && column.HasSlotNames(column.Type.VectorSize); + + /// + /// Returns true if the specified column: + /// * has a SlotNames metadata + /// * metadata type is VBuffer<ReadOnlyMemory<char>> of length . + /// + [BestFriend] + internal static bool HasSlotNames(this Schema.Column column, int vectorSize) { if (vectorSize == 0) return false; - var type = schema.GetMetadataTypeOrNull(Kinds.SlotNames, col); + var metaColumn = column.Metadata.Schema.GetColumnOrNull(Kinds.SlotNames); return - type != null - && type.IsVector - && type.VectorSize == vectorSize - && type.ItemType.IsText; + metaColumn != null + && metaColumn.Value.Type.IsVector + && metaColumn.Value.Type.VectorSize == vectorSize + && metaColumn.Value.Type.ItemType.IsText; } - public static void GetSlotNames(RoleMappedSchema schema, RoleMappedSchema.ColumnRole role, int vectorSize, ref VBuffer> slotNames) + public static void GetSlotNames(this Schema.Column column, ref VBuffer> slotNames) + => column.Metadata.GetValue(Kinds.SlotNames, ref slotNames); + + [BestFriend] + internal static void GetSlotNames(RoleMappedSchema schema, RoleMappedSchema.ColumnRole role, int vectorSize, ref VBuffer> slotNames) { Contracts.CheckValueOrNull(schema); Contracts.CheckParam(vectorSize >= 0, nameof(vectorSize)); IReadOnlyList list; - if ((list = schema?.GetColumns(role)) == null || list.Count != 1 || !schema.Schema.HasSlotNames(list[0].Index, vectorSize)) + if ((list = schema?.GetColumns(role)) == null || list.Count != 1 || !schema.Schema[list[0].Index].HasSlotNames(vectorSize)) { VBufferUtils.Resize(ref slotNames, vectorSize, 0); } @@ -321,17 +341,18 @@ public static void GetSlotNames(RoleMappedSchema schema, RoleMappedSchema.Column schema.Schema.GetMetadata(Kinds.SlotNames, list[0].Index, ref slotNames); } - public static bool HasKeyValues(this Schema schema, int col, int keyCount) + [BestFriend] + internal static bool HasKeyValues(this Schema.Column column, int keyCount) { if (keyCount == 0) return false; - var type = schema.GetMetadataTypeOrNull(Kinds.KeyValues, col); + var metaColumn = column.Metadata.Schema.GetColumnOrNull(Kinds.KeyValues); return - type != null - && type.IsVector - && type.VectorSize == keyCount - && type.ItemType.IsText; + metaColumn != null + && metaColumn.Value.Type.IsVector + && metaColumn.Value.Type.VectorSize == keyCount + && metaColumn.Value.Type.ItemType.IsText; } [BestFriend] @@ -343,19 +364,17 @@ internal static bool HasKeyValues(this SchemaShape.Column col) } /// - /// Returns whether a column has the metadata set to true. - /// That metadata should be set when the data has undergone transforms that would render it - /// "normalized." + /// Returns true iff has IsNormalized metadata set to true. /// - /// The schema to query - /// Which column in the schema to query - /// True if and only if the column has the metadata - /// set to the scalar value true - public static bool IsNormalized(this Schema schema, int col) + public static bool IsNormalized(this Schema.Column column) { - Contracts.CheckValue(schema, nameof(schema)); - var value = default(bool); - return schema.TryGetMetadata(BoolType.Instance, Kinds.IsNormalized, col, ref value) && value; + var metaColumn = column.Metadata.Schema.GetColumnOrNull((Kinds.IsNormalized)); + if (metaColumn == null || !metaColumn.Value.Type.IsBool) + return false; + + bool value = default; + column.Metadata.GetValue(Kinds.IsNormalized, ref value); + return value; } /// @@ -399,23 +418,19 @@ public static bool HasSlotNames(this SchemaShape.Column col) /// The column /// The value to return, if successful /// True if the metadata of the right type exists, false otherwise - public static bool TryGetMetadata(this Schema schema, PrimitiveType type, string kind, int col, ref T value) + [BestFriend] + internal static bool TryGetMetadata(this Schema schema, PrimitiveType type, string kind, int col, ref T value) { Contracts.CheckValue(schema, nameof(schema)); Contracts.CheckValue(type, nameof(type)); - var metadataType = schema.GetMetadataTypeOrNull(kind, col); + var metadataType = schema[col].Metadata.Schema.GetColumnOrNull(kind)?.Type; if (!type.Equals(metadataType)) return false; - schema.GetMetadata(kind, col, ref value); + schema[col].Metadata.GetValue(kind, ref value); return true; } - /// - /// Return whether the given column index is hidden in the given schema. - /// - public static bool IsHidden(this Schema schema, int col) => schema[col].IsHidden; - /// /// The categoricalFeatures is a vector of the indices of categorical features slots. /// This vector should always have an even number of elements, and the elements should be parsed in groups of two consecutive numbers. @@ -424,7 +439,8 @@ public static bool TryGetMetadata(this Schema schema, PrimitiveType type, str /// The way to interpret that is: feature with indices 0, 1, and 2 are one categorical /// Features with indices 3 and 4 are another categorical. Features 5 and 6 don't appear there, so they are not categoricals. /// - public static bool TryGetCategoricalFeatureIndices(Schema schema, int colIndex, out int[] categoricalFeatures) + [BestFriend] + internal static bool TryGetCategoricalFeatureIndices(Schema schema, int colIndex, out int[] categoricalFeatures) { Contracts.CheckValue(schema, nameof(schema)); Contracts.Check(colIndex >= 0, nameof(colIndex)); @@ -471,7 +487,8 @@ public static bool TryGetCategoricalFeatureIndices(Schema schema, int colIndex, /// Produces sequence of columns that are generated by trainer estimators. /// /// whether we should also append 'IsNormalized' (typically for probability column) - public static IEnumerable GetTrainerOutputMetadata(bool isNormalized = false) + [BestFriend] + internal static IEnumerable GetTrainerOutputMetadata(bool isNormalized = false) { var cols = new List(); cols.Add(new SchemaShape.Column(Kinds.ScoreColumnSetId, SchemaShape.Column.VectorKind.Scalar, NumberType.U4, true)); @@ -486,7 +503,8 @@ public static bool TryGetCategoricalFeatureIndices(Schema schema, int colIndex, /// Produces sequence of columns that are generated by multiclass trainer estimators. /// /// Label column. - public static IEnumerable MetadataForMulticlassScoreColumn(SchemaShape.Column labelColumn) + [BestFriend] + internal static IEnumerable MetadataForMulticlassScoreColumn(SchemaShape.Column labelColumn) { var cols = new List(); if (labelColumn.IsKey && HasKeyValues(labelColumn)) @@ -518,7 +536,8 @@ public MetadataRow(Schema.Metadata metadata) /// /// The metadata to wrap. /// A row that wraps an input metadata. - public static Row MetadataAsRow(Schema.Metadata metadata) + [BestFriend] + internal static Row MetadataAsRow(Schema.Metadata metadata) { Contracts.CheckValue(metadata, nameof(metadata)); return new MetadataRow(metadata); diff --git a/src/Microsoft.ML.Core/Data/RoleMappedSchema.cs b/src/Microsoft.ML.Core/Data/RoleMappedSchema.cs index 15ba2c7586..5d614bc627 100644 --- a/src/Microsoft.ML.Core/Data/RoleMappedSchema.cs +++ b/src/Microsoft.ML.Core/Data/RoleMappedSchema.cs @@ -229,7 +229,7 @@ private RoleMappedSchema(Schema schema, Dictionary /// Version information to be saved in binary format @@ -552,7 +552,7 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00010001, verWeCanReadBack: 0x00010001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(OlsLinearRegressionPredictor).Assembly.FullName); + loaderAssemblyName: typeof(OlsLinearRegressionModelParameters).Assembly.FullName); } // The following will be null iff RSquaredAdjusted is NaN. @@ -565,16 +565,14 @@ private static VersionInfo GetVersionInfo() /// /// The coefficient of determination. /// - public Double RSquared - { get { return _rSquared; } } + public Double RSquared => _rSquared; /// /// The adjusted coefficient of determination. It is only possible to produce /// an adjusted R-squared if there are more examples than parameters in the model /// plus one. If this condition is not met, this value will be NaN. /// - public Double RSquaredAdjusted - { get { return _rSquaredAdjusted; } } + public Double RSquaredAdjusted => _rSquaredAdjusted; /// /// Whether the model has per parameter statistics. This is false iff @@ -585,33 +583,29 @@ public Double RSquaredAdjusted /// /// to false. /// - public bool HasStatistics - { get { return _standardErrors != null; } } + public bool HasStatistics => _standardErrors != null; /// /// The standard error per model parameter, where the first corresponds to the bias, /// and all subsequent correspond to each weight in turn. This is null if and /// only if is false. /// - public IReadOnlyCollection StandardErrors - { get { return _standardErrors.AsReadOnly(); } } + public IReadOnlyCollection StandardErrors => _standardErrors.AsReadOnly(); /// /// t-Statistic values corresponding to each of the model standard errors. This is /// null if and only if is false. /// - public IReadOnlyCollection TValues - { get { return _tValues.AsReadOnly(); } } + public IReadOnlyCollection TValues => _tValues.AsReadOnly(); /// /// p-values corresponding to each of the model standard errors. This is null /// if and only if is false. /// - public IReadOnlyCollection PValues - { get { return _pValues.AsReadOnly(); } } + public IReadOnlyCollection PValues => _pValues.AsReadOnly(); - internal OlsLinearRegressionPredictor(IHostEnvironment env, in VBuffer weights, float bias, - Double[] standardErrors, Double[] tValues, Double[] pValues, Double rSquared, Double rSquaredAdjusted) + public OlsLinearRegressionModelParameters(IHostEnvironment env, in VBuffer weights, float bias, + Double[] standardErrors = null, Double[] tValues = null, Double[] pValues = null, Double rSquared = 1, Double rSquaredAdjusted = float.NaN) : base(env, RegistrationName, in weights, bias) { Contracts.AssertValueOrNull(standardErrors); @@ -647,7 +641,7 @@ internal OlsLinearRegressionPredictor(IHostEnvironment env, in VBuffer we _rSquaredAdjusted = rSquaredAdjusted; } - private OlsLinearRegressionPredictor(IHostEnvironment env, ModelLoadContext ctx) + private OlsLinearRegressionModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, ctx) { // *** Binary format *** @@ -731,12 +725,12 @@ private static void ProbCheckDecode(Double p) Contracts.CheckDecode(0 <= p && p <= 1); } - public static OlsLinearRegressionPredictor Create(IHostEnvironment env, ModelLoadContext ctx) + private static OlsLinearRegressionModelParameters Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); - return new OlsLinearRegressionPredictor(env, ctx); + return new OlsLinearRegressionModelParameters(env, ctx); } private protected override void SaveSummary(TextWriter writer, RoleMappedSchema schema) diff --git a/src/Microsoft.ML.HalLearners/SymSgdClassificationTrainer.cs b/src/Microsoft.ML.HalLearners/SymSgdClassificationTrainer.cs index c8118a0160..b56d6d1fc3 100644 --- a/src/Microsoft.ML.HalLearners/SymSgdClassificationTrainer.cs +++ b/src/Microsoft.ML.HalLearners/SymSgdClassificationTrainer.cs @@ -141,8 +141,8 @@ private protected override TPredictor TrainModelCore(TrainContext context) { var preparedData = PrepareDataFromTrainingExamples(ch, context.TrainingSet, out int weightSetCount); var initPred = context.InitialPredictor; - var linInitPred = (initPred as CalibratedPredictorBase)?.SubPredictor as LinearPredictor; - linInitPred = linInitPred ?? initPred as LinearPredictor; + var linInitPred = (initPred as CalibratedPredictorBase)?.SubPredictor as LinearModelParameters; + linInitPred = linInitPred ?? initPred as LinearModelParameters; Host.CheckParam(context.InitialPredictor == null || linInitPred != null, nameof(context), "Initial predictor was not a linear predictor."); return TrainCore(ch, preparedData, linInitPred, weightSetCount); @@ -195,7 +195,7 @@ private TPredictor CreatePredictor(VBuffer weights, float bias) VBuffer maybeSparseWeights = default; VBufferUtils.CreateMaybeSparseCopy(in weights, ref maybeSparseWeights, Conversions.Instance.GetIsDefaultPredicate(NumberType.R4)); - var predictor = new LinearBinaryPredictor(Host, in maybeSparseWeights, bias); + var predictor = new LinearBinaryModelParameters(Host, in maybeSparseWeights, bias); return new ParameterMixingCalibratedPredictor(Host, predictor, new PlattCalibrator(Host, -1, 0)); } @@ -635,7 +635,7 @@ public void Dispose() } } - private TPredictor TrainCore(IChannel ch, RoleMappedData data, LinearPredictor predictor, int weightSetCount) + private TPredictor TrainCore(IChannel ch, RoleMappedData data, LinearModelParameters predictor, int weightSetCount) { int numFeatures = data.Schema.Feature.Type.VectorSize; var cursorFactory = new FloatLabelCursor.Factory(data, CursOpt.Label | CursOpt.Features | CursOpt.Weight); diff --git a/src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs b/src/Microsoft.ML.StandardLearners/Standard/LinearModelParameters.cs similarity index 80% rename from src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs rename to src/Microsoft.ML.StandardLearners/Standard/LinearModelParameters.cs index b5e1271c4c..0b64f32ffb 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LinearPredictor.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LinearModelParameters.cs @@ -2,13 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Float = System.Single; - -using System; -using System.Collections; -using System.Collections.Generic; -using System.IO; -using System.Linq; +using Microsoft.ML.Data; using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.Calibration; @@ -20,26 +14,30 @@ using Microsoft.ML.Runtime.Model.Pfa; using Microsoft.ML.Runtime.Numeric; using Newtonsoft.Json.Linq; -using Microsoft.ML.Data; +using System; +using System.Collections; +using System.Collections.Generic; +using System.IO; +using System.Linq; // This is for deserialization from a model repository. -[assembly: LoadableClass(typeof(IPredictorProducing), typeof(LinearBinaryPredictor), null, typeof(SignatureLoadModel), +[assembly: LoadableClass(typeof(IPredictorProducing), typeof(LinearBinaryModelParameters), null, typeof(SignatureLoadModel), "Linear Binary Executor", - LinearBinaryPredictor.LoaderSignature)] + LinearBinaryModelParameters.LoaderSignature)] // This is for deserialization from a model repository. -[assembly: LoadableClass(typeof(LinearRegressionPredictor), null, typeof(SignatureLoadModel), +[assembly: LoadableClass(typeof(LinearRegressionModelParameters), null, typeof(SignatureLoadModel), "Linear Regression Executor", - LinearRegressionPredictor.LoaderSignature)] + LinearRegressionModelParameters.LoaderSignature)] // This is for deserialization from a model repository. -[assembly: LoadableClass(typeof(PoissonRegressionPredictor), null, typeof(SignatureLoadModel), +[assembly: LoadableClass(typeof(PoissonRegressionModelParameters), null, typeof(SignatureLoadModel), "Poisson Regression Executor", - PoissonRegressionPredictor.LoaderSignature)] + PoissonRegressionModelParameters.LoaderSignature)] namespace Microsoft.ML.Runtime.Learners { - public abstract class LinearPredictor : PredictorBase, + public abstract class LinearModelParameters : PredictorBase, IValueMapper, ICanSaveInIniFormat, ICanSaveInTextFormat, @@ -47,41 +45,41 @@ public abstract class LinearPredictor : PredictorBase, ICanSaveModel, ICanGetSummaryAsIRow, ICanSaveSummary, - IPredictorWithFeatureWeights, + IPredictorWithFeatureWeights, IFeatureContributionMapper, ISingleCanSavePfa, ISingleCanSaveOnnx { - protected readonly VBuffer Weight; + protected readonly VBuffer Weight; // _weightsDense is not persisted and is used for performance when the input instance is sparse. - private VBuffer _weightsDense; + private VBuffer _weightsDense; private readonly object _weightsDenseLock; - private sealed class WeightsCollection : IReadOnlyList + private sealed class WeightsCollection : IReadOnlyList { - private readonly LinearPredictor _pred; + private readonly LinearModelParameters _pred; public int Count => _pred.Weight.Length; - public Float this[int index] + public float this[int index] { get { Contracts.CheckParam(0 <= index && index < Count, nameof(index), "Out of range"); - Float value = 0; + float value = 0; _pred.Weight.GetItemOrDefault(index, ref value); return value; } } - public WeightsCollection(LinearPredictor pred) + public WeightsCollection(LinearModelParameters pred) { Contracts.AssertValue(pred); _pred = pred; } - public IEnumerator GetEnumerator() + public IEnumerator GetEnumerator() { return _pred.Weight.Items(all: true).Select(iv => iv.Value).GetEnumerator(); } @@ -93,10 +91,10 @@ IEnumerator IEnumerable.GetEnumerator() } /// The predictor's feature weight coefficients. - public IReadOnlyList Weights2 => new WeightsCollection(this); + public IReadOnlyList Weights => new WeightsCollection(this); /// The predictor's bias term. - public Float Bias { get; protected set; } + public float Bias { get; protected set; } private readonly ColumnType _inputType; @@ -112,7 +110,7 @@ IEnumerator IEnumerable.GetEnumerator() /// The weights for the linear predictor. Note that this /// will take ownership of the . /// The bias added to every output score. - internal LinearPredictor(IHostEnvironment env, string name, in VBuffer weights, Float bias) + public LinearModelParameters(IHostEnvironment env, string name, in VBuffer weights, float bias) : base(env, name) { Host.CheckParam(FloatUtils.IsFinite(weights.GetValues()), nameof(weights), "Cannot initialize linear predictor with non-finite weights"); @@ -128,7 +126,7 @@ internal LinearPredictor(IHostEnvironment env, string name, in VBuffer we _weightsDenseLock = new object(); } - protected LinearPredictor(IHostEnvironment env, string name, ModelLoadContext ctx) + protected LinearModelParameters(IHostEnvironment env, string name, ModelLoadContext ctx) : base(env, name, ctx) { // *** Binary format *** @@ -171,9 +169,9 @@ protected LinearPredictor(IHostEnvironment env, string name, ModelLoadContext ct Host.CheckDecode(Utils.Size(weights) == 0 || weights.All(x => FloatUtils.IsFinite(x))); if (cwht == 0) - Weight = VBufferUtils.CreateEmpty(len); + Weight = VBufferUtils.CreateEmpty(len); else - Weight = new VBuffer(len, Utils.Size(weights), weights, indices); + Weight = new VBuffer(len, Utils.Size(weights), weights, indices); _inputType = new VectorType(NumberType.Float, Weight.Length); WarnOnOldNormalizer(ctx, GetType(), Host); @@ -248,7 +246,7 @@ bool ISingleCanSaveOnnx.SaveAsOnnx(OnnxContext ctx, string[] outputs, string fea } // Generate the score from the given values, assuming they have already been normalized. - protected virtual Float Score(in VBuffer src) + protected virtual float Score(in VBuffer src) { if (src.IsDense) { @@ -259,7 +257,7 @@ protected virtual Float Score(in VBuffer src) return Bias + VectorUtils.DotProduct(in _weightsDense, in src); } - protected virtual void GetFeatureContributions(in VBuffer features, ref VBuffer contributions, int top, int bottom, bool normalize) + protected virtual void GetFeatureContributions(in VBuffer features, ref VBuffer contributions, int top, int bottom, bool normalize) { if (features.Length != Weight.Length) throw Contracts.Except("Input is of length {0} does not match expected length of weights {1}", features.Length, Weight.Length); @@ -295,11 +293,11 @@ ColumnType IValueMapper.OutputType ValueMapper IValueMapper.GetMapper() { - Contracts.Check(typeof(TIn) == typeof(VBuffer)); - Contracts.Check(typeof(TOut) == typeof(Float)); + Contracts.Check(typeof(TIn) == typeof(VBuffer)); + Contracts.Check(typeof(TOut) == typeof(float)); - ValueMapper, Float> del = - (in VBuffer src, ref Float dst) => + ValueMapper, float> del = + (in VBuffer src, ref float dst) => { if (src.Length != Weight.Length) throw Contracts.Except("Input is of length {0}, but predictor expected length {1}", src.Length, Weight.Length); @@ -311,14 +309,14 @@ ValueMapper IValueMapper.GetMapper() /// /// Combine a bunch of models into one by averaging parameters /// - protected void CombineParameters(IList> models, out VBuffer weights, out Float bias) + internal void CombineParameters(IList> models, out VBuffer weights, out float bias) { Type type = GetType(); Contracts.Check(type == models[0].GetType(), "Submodel for parameter mixer has the wrong type"); - var first = (LinearPredictor)models[0]; + var first = (LinearModelParameters)models[0]; - weights = default(VBuffer); + weights = default(VBuffer); first.Weight.CopyTo(ref weights); bias = first.Bias; @@ -327,12 +325,12 @@ protected void CombineParameters(IList> models, out VBuff var m = models[i]; Contracts.Check(type == m.GetType(), "Submodel for parameter mixer has the wrong type"); - var sub = (LinearPredictor)m; + var sub = (LinearModelParameters)m; var subweights = sub.Weight; VectorUtils.Add(in subweights, ref weights); bias += sub.Bias; } - VectorUtils.ScaleBy(ref weights, (Float)1 / models.Count); + VectorUtils.ScaleBy(ref weights, (float)1 / models.Count); bias /= models.Count; } @@ -381,31 +379,31 @@ private protected virtual Row GetSummaryIRowOrNull(RoleMappedSchema schema) void ICanSaveInIniFormat.SaveAsIni(TextWriter writer, RoleMappedSchema schema, ICalibrator calibrator) => SaveAsIni(writer, schema, calibrator); - public virtual void GetFeatureWeights(ref VBuffer weights) + public virtual void GetFeatureWeights(ref VBuffer weights) { Weight.CopyTo(ref weights); } - ValueMapper> IFeatureContributionMapper.GetFeatureContributionMapper(int top, int bottom, bool normalize) + ValueMapper> IFeatureContributionMapper.GetFeatureContributionMapper(int top, int bottom, bool normalize) { - Contracts.Check(typeof(TSrc) == typeof(VBuffer)); - Contracts.Check(typeof(TDstContributions) == typeof(VBuffer)); + Contracts.Check(typeof(TSrc) == typeof(VBuffer)); + Contracts.Check(typeof(TDstContributions) == typeof(VBuffer)); - ValueMapper, VBuffer> del = - (in VBuffer src, ref VBuffer dstContributions) => + ValueMapper, VBuffer> del = + (in VBuffer src, ref VBuffer dstContributions) => { GetFeatureContributions(in src, ref dstContributions, top, bottom, normalize); }; - return (ValueMapper>)(Delegate)del; + return (ValueMapper>)(Delegate)del; } } - public sealed partial class LinearBinaryPredictor : LinearPredictor, + public sealed partial class LinearBinaryModelParameters : LinearModelParameters, ICanGetSummaryInKeyValuePairs, - IParameterMixer + IParameterMixer { - public const string LoaderSignature = "Linear2CExec"; - public const string RegistrationName = "LinearBinaryPredictor"; + internal const string LoaderSignature = "Linear2CExec"; + internal const string RegistrationName = "LinearBinaryPredictor"; private const string ModelStatsSubModelFilename = "ModelStats"; private readonly LinearModelStatistics _stats; @@ -422,7 +420,7 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00020001, verWeCanReadBack: 0x00020001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(LinearBinaryPredictor).Assembly.FullName); + loaderAssemblyName: typeof(LinearBinaryModelParameters).Assembly.FullName); } /// @@ -433,14 +431,14 @@ private static VersionInfo GetVersionInfo() /// will take ownership of the . /// The bias added to every output score. /// - public LinearBinaryPredictor(IHostEnvironment env, in VBuffer weights, Float bias, LinearModelStatistics stats = null) + public LinearBinaryModelParameters(IHostEnvironment env, in VBuffer weights, float bias, LinearModelStatistics stats = null) : base(env, RegistrationName, in weights, bias) { Contracts.AssertValueOrNull(stats); _stats = stats; } - private LinearBinaryPredictor(IHostEnvironment env, ModelLoadContext ctx) + private LinearBinaryModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, ctx) { // For model version earlier than 0x00020001, there is no model statisitcs. @@ -454,12 +452,12 @@ private LinearBinaryPredictor(IHostEnvironment env, ModelLoadContext ctx) ctx.LoadModelOrNull(Host, out _stats, ModelStatsSubModelFilename); } - public static IPredictorProducing Create(IHostEnvironment env, ModelLoadContext ctx) + private static IPredictorProducing Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); - var predictor = new LinearBinaryPredictor(env, ctx); + var predictor = new LinearBinaryModelParameters(env, ctx); ICalibrator calibrator; ctx.LoadModelOrNull(env, out calibrator, @"Calibrator"); if (calibrator == null) @@ -488,12 +486,12 @@ private protected override void SaveCore(ModelSaveContext ctx) /// /// Combine a bunch of models into one by averaging parameters /// - public IParameterMixer CombineParameters(IList> models) + IParameterMixer IParameterMixer.CombineParameters(IList> models) { - VBuffer weights; - Float bias; + VBuffer weights; + float bias; CombineParameters(models, out weights, out bias); - return new LinearBinaryPredictor(Host, in weights, bias); + return new LinearBinaryModelParameters(Host, in weights, bias); } private protected override void SaveSummary(TextWriter writer, RoleMappedSchema schema) @@ -542,14 +540,14 @@ private protected override void SaveAsIni(TextWriter writer, RoleMappedSchema sc } } - public abstract class RegressionPredictor : LinearPredictor + public abstract class RegressionModelParameters : LinearModelParameters { - protected RegressionPredictor(IHostEnvironment env, string name, in VBuffer weights, Float bias) + public RegressionModelParameters(IHostEnvironment env, string name, in VBuffer weights, float bias) : base(env, name, in weights, bias) { } - protected RegressionPredictor(IHostEnvironment env, string name, ModelLoadContext ctx) + protected RegressionModelParameters(IHostEnvironment env, string name, ModelLoadContext ctx) : base(env, name, ctx) { } @@ -576,12 +574,12 @@ private protected override void SaveAsIni(TextWriter writer, RoleMappedSchema sc } } - public sealed class LinearRegressionPredictor : RegressionPredictor, - IParameterMixer, + public sealed class LinearRegressionModelParameters : RegressionModelParameters, + IParameterMixer, ICanGetSummaryInKeyValuePairs { - public const string LoaderSignature = "LinearRegressionExec"; - public const string RegistrationName = "LinearRegressionPredictor"; + internal const string LoaderSignature = "LinearRegressionExec"; + internal const string RegistrationName = "LinearRegressionPredictor"; private static VersionInfo GetVersionInfo() { @@ -592,7 +590,7 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00020001, verWeCanReadBack: 0x00020001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(LinearRegressionPredictor).Assembly.FullName); + loaderAssemblyName: typeof(LinearRegressionModelParameters).Assembly.FullName); } /// @@ -602,22 +600,22 @@ private static VersionInfo GetVersionInfo() /// The weights for the linear predictor. Note that this /// will take ownership of the . /// The bias added to every output score. - public LinearRegressionPredictor(IHostEnvironment env, in VBuffer weights, Float bias) + public LinearRegressionModelParameters(IHostEnvironment env, in VBuffer weights, float bias) : base(env, RegistrationName, in weights, bias) { } - private LinearRegressionPredictor(IHostEnvironment env, ModelLoadContext ctx) + private LinearRegressionModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, ctx) { } - public static LinearRegressionPredictor Create(IHostEnvironment env, ModelLoadContext ctx) + private static LinearRegressionModelParameters Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); - return new LinearRegressionPredictor(env, ctx); + return new LinearRegressionModelParameters(env, ctx); } private protected override void SaveCore(ModelSaveContext ctx) @@ -640,12 +638,12 @@ private protected override void SaveSummary(TextWriter writer, RoleMappedSchema /// /// Combine a bunch of models into one by averaging parameters /// - public IParameterMixer CombineParameters(IList> models) + IParameterMixer IParameterMixer.CombineParameters(IList> models) { - VBuffer weights; - Float bias; + VBuffer weights; + float bias; CombineParameters(models, out weights, out bias); - return new LinearRegressionPredictor(Host, in weights, bias); + return new LinearRegressionModelParameters(Host, in weights, bias); } /// @@ -661,10 +659,10 @@ IList> ICanGetSummaryInKeyValuePairs.GetSummaryInKe } } - public sealed class PoissonRegressionPredictor : RegressionPredictor, IParameterMixer + public sealed class PoissonRegressionModelParameters : RegressionModelParameters, IParameterMixer { - public const string LoaderSignature = "PoissonRegressionExec"; - public const string RegistrationName = "PoissonRegressionPredictor"; + internal const string LoaderSignature = "PoissonRegressionExec"; + internal const string RegistrationName = "PoissonRegressionPredictor"; private static VersionInfo GetVersionInfo() { @@ -675,25 +673,25 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00020001, verWeCanReadBack: 0x00020001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(PoissonRegressionPredictor).Assembly.FullName); + loaderAssemblyName: typeof(PoissonRegressionModelParameters).Assembly.FullName); } - internal PoissonRegressionPredictor(IHostEnvironment env, in VBuffer weights, Float bias) + public PoissonRegressionModelParameters(IHostEnvironment env, in VBuffer weights, float bias) : base(env, RegistrationName, in weights, bias) { } - private PoissonRegressionPredictor(IHostEnvironment env, ModelLoadContext ctx) + private PoissonRegressionModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, ctx) { } - public static PoissonRegressionPredictor Create(IHostEnvironment env, ModelLoadContext ctx) + private static PoissonRegressionModelParameters Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); - return new PoissonRegressionPredictor(env, ctx); + return new PoissonRegressionModelParameters(env, ctx); } private protected override void SaveCore(ModelSaveContext ctx) @@ -702,7 +700,7 @@ private protected override void SaveCore(ModelSaveContext ctx) ctx.SetVersionInfo(GetVersionInfo()); } - protected override Float Score(in VBuffer src) + protected override float Score(in VBuffer src) { return MathUtils.ExpSlow(base.Score(in src)); } @@ -721,12 +719,12 @@ private protected override void SaveSummary(TextWriter writer, RoleMappedSchema /// /// Combine a bunch of models into one by averaging parameters /// - public IParameterMixer CombineParameters(IList> models) + IParameterMixer IParameterMixer.CombineParameters(IList> models) { - VBuffer weights; - Float bias; + VBuffer weights; + float bias; CombineParameters(models, out weights, out bias); - return new PoissonRegressionPredictor(Host, in weights, bias); + return new PoissonRegressionModelParameters(Host, in weights, bias); } } } \ No newline at end of file diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsStatic.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsStatic.cs index c4651628f8..76061cee71 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsStatic.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsStatic.cs @@ -103,7 +103,7 @@ public static Scalar PoissonRegression(this RegressionContext.RegressionT int memorySize = Arguments.Defaults.MemorySize, bool enoforceNoNegativity = Arguments.Defaults.EnforceNonNegativity, Action advancedSettings = null, - Action onFit = null) + Action onFit = null) { LbfgsStaticUtils.ValidateParams(label, features, weights, l1Weight, l2Weight, optimizationTolerance, memorySize, enoforceNoNegativity, advancedSettings, onFit); diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs index 44857925cd..37d79e87c6 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs @@ -384,9 +384,9 @@ protected override VBuffer InitializeWeightsFromPredictor(ParameterMixing { Contracts.AssertValue(srcPredictor); - var pred = srcPredictor.SubPredictor as LinearBinaryPredictor; + var pred = srcPredictor.SubPredictor as LinearBinaryModelParameters; Contracts.AssertValue(pred); - return InitializeWeights(pred.Weights2, new[] { pred.Bias }); + return InitializeWeights(pred.Weights, new[] { pred.Bias }); } protected override ParameterMixingCalibratedPredictor CreatePredictor() @@ -400,7 +400,7 @@ protected override ParameterMixingCalibratedPredictor CreatePredictor() CurrentWeights.GetItemOrDefault(0, ref bias); CurrentWeights.CopyTo(ref weights, 1, CurrentWeights.Length - 1); return new ParameterMixingCalibratedPredictor(Host, - new LinearBinaryPredictor(Host, in weights, bias, _stats), + new LinearBinaryModelParameters(Host, in weights, bias, _stats), new PlattCalibrator(Host, -1, 0)); } diff --git a/src/Microsoft.ML.StandardLearners/Standard/ModelStatistics.cs b/src/Microsoft.ML.StandardLearners/Standard/ModelStatistics.cs index 4626ee5b31..0a75f3b948 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/ModelStatistics.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/ModelStatistics.cs @@ -283,7 +283,7 @@ private static void GetUnorderedCoefficientStatistics(LinearModelStatistics stat }; } - private List GetUnorderedCoefficientStatistics(LinearBinaryPredictor parent, RoleMappedSchema schema) + private List GetUnorderedCoefficientStatistics(LinearBinaryModelParameters parent, RoleMappedSchema schema) { Contracts.AssertValue(_env); _env.CheckValue(parent, nameof(parent)); @@ -291,7 +291,7 @@ private List GetUnorderedCoefficientStatistics(LinearBina if (!_coeffStdError.HasValue) return new List(); - var weights = parent.Weights2 as IReadOnlyList; + var weights = parent.Weights as IReadOnlyList; _env.Assert(_paramCount == 1 || weights != null); _env.Assert(_coeffStdError.Value.Length == weights.Count + 1); @@ -324,7 +324,7 @@ private List GetUnorderedCoefficientStatistics(LinearBina /// /// Gets the coefficient statistics as an object. /// - public CoefficientStatistics[] GetCoefficientStatistics(LinearBinaryPredictor parent, RoleMappedSchema schema, int paramCountCap) + public CoefficientStatistics[] GetCoefficientStatistics(LinearBinaryModelParameters parent, RoleMappedSchema schema, int paramCountCap) { Contracts.AssertValue(_env); _env.CheckValue(parent, nameof(parent)); @@ -345,7 +345,7 @@ public CoefficientStatistics[] GetCoefficientStatistics(LinearBinaryPredictor pa return order.Prepend(new[] { new CoefficientStatistics("(Bias)", bias, stdError, zScore, pValue) }).ToArray(); } - public void SaveText(TextWriter writer, LinearBinaryPredictor parent, RoleMappedSchema schema, int paramCountCap) + public void SaveText(TextWriter writer, LinearBinaryModelParameters parent, RoleMappedSchema schema, int paramCountCap) { Contracts.AssertValue(_env); _env.CheckValue(writer, nameof(writer)); @@ -383,7 +383,7 @@ public void SaveText(TextWriter writer, LinearBinaryPredictor parent, RoleMapped writer.WriteLine("Significance codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1"); } - public void SaveSummaryInKeyValuePairs(LinearBinaryPredictor parent, + public void SaveSummaryInKeyValuePairs(LinearBinaryModelParameters parent, RoleMappedSchema schema, int paramCountCap, List> resultCollection) { Contracts.AssertValue(_env); @@ -409,7 +409,7 @@ public void SaveSummaryInKeyValuePairs(LinearBinaryPredictor parent, } } - internal Schema.Metadata MakeStatisticsMetadata(LinearBinaryPredictor parent, RoleMappedSchema schema, in VBuffer> names) + internal Schema.Metadata MakeStatisticsMetadata(LinearBinaryModelParameters parent, RoleMappedSchema schema, in VBuffer> names) { _env.AssertValueOrNull(parent); _env.AssertValue(schema); diff --git a/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedLinear.cs b/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedLinear.cs index a5f0f60092..9e703db34d 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedLinear.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedLinear.cs @@ -93,7 +93,7 @@ private protected abstract class AveragedTrainStateBase : TrainStateBase private readonly AveragedLinearArguments _args; private readonly IScalarOutputLoss _loss; - private protected AveragedTrainStateBase(IChannel ch, int numFeatures, LinearPredictor predictor, AveragedLinearTrainer parent) + private protected AveragedTrainStateBase(IChannel ch, int numFeatures, LinearModelParameters predictor, AveragedLinearTrainer parent) : base(ch, numFeatures, predictor, parent) { // Do the other initializations by setting the setters as if user had set them diff --git a/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedPerceptron.cs b/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedPerceptron.cs index c853aa7f79..6556a98dae 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedPerceptron.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedPerceptron.cs @@ -31,7 +31,7 @@ namespace Microsoft.ML.Trainers.Online // - Feature normalization. By default, rescaling between min and max values for every feature // - Prediction calibration to produce probabilities. Off by default, if on, uses exponential (aka Platt) calibration. /// - public sealed class AveragedPerceptronTrainer : AveragedLinearTrainer, LinearBinaryPredictor> + public sealed class AveragedPerceptronTrainer : AveragedLinearTrainer, LinearBinaryModelParameters> { public const string LoadNameValue = "AveragedPerceptron"; internal const string UserNameValue = "Averaged Perceptron"; @@ -56,12 +56,12 @@ public sealed class Arguments : AveragedLinearArguments private sealed class TrainState : AveragedTrainStateBase { - public TrainState(IChannel ch, int numFeatures, LinearPredictor predictor, AveragedPerceptronTrainer parent) + public TrainState(IChannel ch, int numFeatures, LinearModelParameters predictor, AveragedPerceptronTrainer parent) : base(ch, numFeatures, predictor, parent) { } - public override LinearBinaryPredictor CreatePredictor() + public override LinearBinaryModelParameters CreatePredictor() { Contracts.Assert(WeightsScale == 1); @@ -80,7 +80,7 @@ public override LinearBinaryPredictor CreatePredictor() bias = TotalBias / (float)NumWeightUpdates; } - return new LinearBinaryPredictor(ParentHost, in weights, bias); + return new LinearBinaryModelParameters(ParentHost, in weights, bias); } } @@ -175,15 +175,15 @@ protected override void CheckLabelCompatible(SchemaShape.Column labelCol) error(); } - private protected override TrainStateBase MakeState(IChannel ch, int numFeatures, LinearPredictor predictor) + private protected override TrainStateBase MakeState(IChannel ch, int numFeatures, LinearModelParameters predictor) { return new TrainState(ch, numFeatures, predictor, this); } - protected override BinaryPredictionTransformer MakeTransformer(LinearBinaryPredictor model, Schema trainSchema) - => new BinaryPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); + protected override BinaryPredictionTransformer MakeTransformer(LinearBinaryModelParameters model, Schema trainSchema) + => new BinaryPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); - public BinaryPredictionTransformer Train(IDataView trainData, IPredictor initialPredictor = null) + public BinaryPredictionTransformer Train(IDataView trainData, IPredictor initialPredictor = null) => TrainTransformer(trainData, initPredictor: initialPredictor); [TlcModule.EntryPoint(Name = "Trainers.AveragedPerceptronBinaryClassifier", diff --git a/src/Microsoft.ML.StandardLearners/Standard/Online/LinearSvm.cs b/src/Microsoft.ML.StandardLearners/Standard/Online/LinearSvm.cs index aceb6dd517..716f78d298 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/Online/LinearSvm.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/Online/LinearSvm.cs @@ -31,7 +31,7 @@ namespace Microsoft.ML.Trainers.Online /// /// Linear SVM that implements PEGASOS for training. See: http://ttic.uchicago.edu/~shai/papers/ShalevSiSr07.pdf /// - public sealed class LinearSvm : OnlineLinearTrainer, LinearBinaryPredictor> + public sealed class LinearSvm : OnlineLinearTrainer, LinearBinaryModelParameters> { internal const string LoadNameValue = "LinearSVM"; internal const string ShortName = "svm"; @@ -88,7 +88,7 @@ private sealed class TrainState : TrainStateBase private readonly bool _performProjection; private readonly float _lambda; - public TrainState(IChannel ch, int numFeatures, LinearPredictor predictor, LinearSvm parent) + public TrainState(IChannel ch, int numFeatures, LinearModelParameters predictor, LinearSvm parent) : base(ch, numFeatures, predictor, parent) { _batchSize = parent.Args.BatchSize; @@ -212,11 +212,11 @@ private void UpdateWeights(in VBuffer weightsUpdate, Float weightsUpdateS public override Float Margin(in VBuffer feat) => Bias + VectorUtils.DotProduct(in feat, in Weights) * WeightsScale; - public override LinearBinaryPredictor CreatePredictor() + public override LinearBinaryModelParameters CreatePredictor() { Contracts.Assert(WeightsScale == 1); // below should be `in Weights`, but can't because of https://github.com/dotnet/roslyn/issues/29371 - return new LinearBinaryPredictor(ParentHost, Weights, Bias); + return new LinearBinaryModelParameters(ParentHost, Weights, Bias); } } @@ -274,7 +274,7 @@ protected override void CheckLabels(RoleMappedData data) data.CheckBinaryLabel(); } - private protected override TrainStateBase MakeState(IChannel ch, int numFeatures, LinearPredictor predictor) + private protected override TrainStateBase MakeState(IChannel ch, int numFeatures, LinearModelParameters predictor) { return new TrainState(ch, numFeatures, predictor, this); } @@ -298,7 +298,7 @@ public static CommonOutputs.BinaryClassificationOutput TrainLinearSvm(IHostEnvir calibrator: input.Calibrator, maxCalibrationExamples: input.MaxCalibrationExamples); } - protected override BinaryPredictionTransformer MakeTransformer(LinearBinaryPredictor model, Schema trainSchema) - => new BinaryPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); + protected override BinaryPredictionTransformer MakeTransformer(LinearBinaryModelParameters model, Schema trainSchema) + => new BinaryPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); } } diff --git a/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineGradientDescent.cs b/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineGradientDescent.cs index f1d9469fca..70a53f6d49 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineGradientDescent.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineGradientDescent.cs @@ -28,7 +28,7 @@ namespace Microsoft.ML.Trainers.Online { /// - public sealed class OnlineGradientDescentTrainer : AveragedLinearTrainer, LinearRegressionPredictor> + public sealed class OnlineGradientDescentTrainer : AveragedLinearTrainer, LinearRegressionModelParameters> { internal const string LoadNameValue = "OnlineGradientDescent"; internal const string UserNameValue = "Stochastic Gradient Descent (Regression)"; @@ -62,12 +62,12 @@ internal class OgdDefaultArgs : AveragedDefaultArgs private sealed class TrainState : AveragedTrainStateBase { - public TrainState(IChannel ch, int numFeatures, LinearPredictor predictor, OnlineGradientDescentTrainer parent) + public TrainState(IChannel ch, int numFeatures, LinearModelParameters predictor, OnlineGradientDescentTrainer parent) : base(ch, numFeatures, predictor, parent) { } - public override LinearRegressionPredictor CreatePredictor() + public override LinearRegressionModelParameters CreatePredictor() { Contracts.Assert(WeightsScale == 1); VBuffer weights = default; @@ -84,7 +84,7 @@ public override LinearRegressionPredictor CreatePredictor() VectorUtils.ScaleBy(ref weights, 1 / (float)NumWeightUpdates); bias = TotalBias / (float)NumWeightUpdates; } - return new LinearRegressionPredictor(ParentHost, in weights, bias); + return new LinearRegressionModelParameters(ParentHost, in weights, bias); } } @@ -158,7 +158,7 @@ protected override void CheckLabels(RoleMappedData data) data.CheckRegressionLabel(); } - private protected override TrainStateBase MakeState(IChannel ch, int numFeatures, LinearPredictor predictor) + private protected override TrainStateBase MakeState(IChannel ch, int numFeatures, LinearModelParameters predictor) { return new TrainState(ch, numFeatures, predictor, this); } @@ -181,10 +181,10 @@ public static CommonOutputs.RegressionOutput TrainRegression(IHostEnvironment en () => LearnerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.LabelColumn)); } - protected override RegressionPredictionTransformer MakeTransformer(LinearRegressionPredictor model, Schema trainSchema) - => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); + protected override RegressionPredictionTransformer MakeTransformer(LinearRegressionModelParameters model, Schema trainSchema) + => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); - public RegressionPredictionTransformer Train(IDataView trainData, IPredictor initialPredictor = null) + public RegressionPredictionTransformer Train(IDataView trainData, IPredictor initialPredictor = null) => TrainTransformer(trainData, initPredictor: initialPredictor); } } diff --git a/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineLearnerStatic.cs b/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineLearnerStatic.cs index 8153d0f8cf..c37aab67a5 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineLearnerStatic.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineLearnerStatic.cs @@ -54,7 +54,7 @@ public static (Scalar score, Scalar predictedLabel) AveragedPercept float l2RegularizerWeight = AveragedLinearArguments.AveragedDefaultArgs.L2RegularizerWeight, int numIterations = AveragedLinearArguments.AveragedDefaultArgs.NumIterations, Action advancedSettings = null, - Action onFit = null + Action onFit = null ) { OnlineLinearStaticUtils.CheckUserParams(label, features, weights, learningRate, l2RegularizerWeight, numIterations, onFit, advancedSettings); @@ -116,7 +116,7 @@ public static Scalar OnlineGradientDescent(this RegressionContext.Regress float l2RegularizerWeight = OnlineGradientDescentTrainer.Arguments.OgdDefaultArgs.L2RegularizerWeight, int numIterations = OnlineLinearArguments.OnlineDefaultArgs.NumIterations, Action advancedSettings = null, - Action onFit = null) + Action onFit = null) { OnlineLinearStaticUtils.CheckUserParams(label, features, weights, learningRate, l2RegularizerWeight, numIterations, onFit, advancedSettings); Contracts.CheckValueOrNull(lossFunction); diff --git a/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineLinear.cs b/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineLinear.cs index ebcf5d0ad5..dcdeeac236 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineLinear.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineLinear.cs @@ -15,7 +15,6 @@ using Microsoft.ML.Runtime.Training; using System; using System.Globalization; -using Float = System.Single; namespace Microsoft.ML.Trainers.Online { @@ -34,7 +33,7 @@ public abstract class OnlineLinearArguments : LearnerInputBaseWithLabel [Argument(ArgumentType.AtMostOnce, HelpText = "Init weights diameter", ShortName = "initwts", SortOrder = 140)] [TGUI(Label = "Initial Weights Scale", SuggestedSweeps = "0,0.1,0.5,1")] [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0.0f, 1.0f, numSteps: 5)] - public Float InitWtsDiameter = 0; + public float InitWtsDiameter = 0; [Argument(ArgumentType.AtMostOnce, HelpText = "Whether to shuffle for each training iteration", ShortName = "shuf")] [TlcModule.SweepableDiscreteParamAttribute("Shuffle", new object[] { false, true })] @@ -58,7 +57,7 @@ public abstract class OnlineLinearTrainer : TrainerEstimat /// /// An object to hold the mutable updatable state for the online linear trainers. Specific algorithms should subclass - /// this, and return the instance via . + /// this, and return the instance via . /// private protected abstract class TrainStateBase { @@ -70,7 +69,7 @@ private protected abstract class TrainStateBase public int Iteration; /// - /// The number of examples in the current iteration. Incremented by , + /// The number of examples in the current iteration. Incremented by , /// and reset by . /// public long NumIterExamples; @@ -84,21 +83,21 @@ private protected abstract class TrainStateBase /// Current weights. The weights vector is considered to be scaled by . Storing this separately /// allows us to avoid the overhead of an explicit scaling, which some algorithms will attempt to do on each example's update. /// - public VBuffer Weights; + public VBuffer Weights; /// /// The implicit scaling factor for . Note that this does not affect . /// - public Float WeightsScale; + public float WeightsScale; /// /// The intercept term. /// - public Float Bias; + public float Bias; protected readonly IHost ParentHost; - protected TrainStateBase(IChannel ch, int numFeatures, LinearPredictor predictor, OnlineLinearTrainer parent) + protected TrainStateBase(IChannel ch, int numFeatures, LinearModelParameters predictor, OnlineLinearTrainer parent) { Contracts.CheckValue(ch, nameof(ch)); ch.Check(numFeatures > 0, "Cannot train with zero features!"); @@ -132,22 +131,22 @@ protected TrainStateBase(IChannel ch, int numFeatures, LinearPredictor predictor var weightValues = new float[numFeatures]; for (int i = 0; i < numFeatures; i++) - weightValues[i] = Float.Parse(weightStr[i], CultureInfo.InvariantCulture); + weightValues[i] = float.Parse(weightStr[i], CultureInfo.InvariantCulture); Weights = new VBuffer(numFeatures, weightValues); - Bias = Float.Parse(weightStr[numFeatures], CultureInfo.InvariantCulture); + Bias = float.Parse(weightStr[numFeatures], CultureInfo.InvariantCulture); } else if (parent.Args.InitWtsDiameter > 0) { var weightValues = new float[numFeatures]; for (int i = 0; i < numFeatures; i++) - weightValues[i] = parent.Args.InitWtsDiameter * (parent.Host.Rand.NextSingle() - (Float)0.5); + weightValues[i] = parent.Args.InitWtsDiameter * (parent.Host.Rand.NextSingle() - (float)0.5); Weights = new VBuffer(numFeatures, weightValues); - Bias = parent.Args.InitWtsDiameter * (parent.Host.Rand.NextSingle() - (Float)0.5); + Bias = parent.Args.InitWtsDiameter * (parent.Host.Rand.NextSingle() - (float)0.5); } else if (numFeatures <= 1000) - Weights = VBufferUtils.CreateDense(numFeatures); + Weights = VBufferUtils.CreateDense(numFeatures); else - Weights = VBufferUtils.CreateEmpty(numFeatures); + Weights = VBufferUtils.CreateEmpty(numFeatures); WeightsScale = 1; } @@ -170,7 +169,7 @@ private void ScaleWeights() /// public void ScaleWeightsIfNeeded() { - Float absWeightsScale = Math.Abs(WeightsScale); + float absWeightsScale = Math.Abs(WeightsScale); if (absWeightsScale < _minWeightScale || absWeightsScale > _maxWeightScale) ScaleWeights(); } @@ -202,7 +201,7 @@ public virtual void FinishIteration(IChannel ch) /// /// This should be overridden by derived classes. This implementation simply increments . /// - public virtual void ProcessDataInstance(IChannel ch, in VBuffer feat, Float label, Float weight) + public virtual void ProcessDataInstance(IChannel ch, in VBuffer feat, float label, float weight) { ch.Assert(FloatUtils.IsFinite(feat.GetValues())); ++NumIterExamples; @@ -211,23 +210,23 @@ public virtual void ProcessDataInstance(IChannel ch, in VBuffer feat, Flo /// /// Return the raw margin from the decision hyperplane /// - public Float CurrentMargin(in VBuffer feat) + public float CurrentMargin(in VBuffer feat) => Bias + VectorUtils.DotProduct(in feat, in Weights) * WeightsScale; /// - /// The default implementation just calls . + /// The default implementation just calls . /// /// /// - public virtual Float Margin(in VBuffer feat) + public virtual float Margin(in VBuffer feat) => CurrentMargin(in feat); public abstract TModel CreatePredictor(); } // Our tolerance for the error induced by the weight scale may depend on our precision. - private const Float _maxWeightScale = 1 << 10; // Exponent ranges 127 to -128, tolerate 10 being cut off that. - private const Float _minWeightScale = 1 / _maxWeightScale; + private const float _maxWeightScale = 1 << 10; // Exponent ranges 127 to -128, tolerate 10 being cut off that. + private const float _minWeightScale = 1 / _maxWeightScale; protected const string UserErrorPositive = "must be positive"; protected const string UserErrorNonNegative = "must be non-negative"; @@ -260,7 +259,7 @@ private protected sealed override TModel TrainModelCore(TrainContext context) { Host.CheckValue(context, nameof(context)); var initPredictor = context.InitialPredictor; - var initLinearPred = initPredictor as LinearPredictor ?? (initPredictor as CalibratedPredictorBase)?.SubPredictor as LinearPredictor; + var initLinearPred = initPredictor as LinearModelParameters ?? (initPredictor as CalibratedPredictorBase)?.SubPredictor as LinearModelParameters; Host.CheckParam(initPredictor == null || initLinearPred != null, nameof(context), "Not a linear predictor."); var data = context.TrainingSet; @@ -273,7 +272,7 @@ private protected sealed override TModel TrainModelCore(TrainContext context) TrainCore(ch, data, state); ch.Assert(state.WeightsScale == 1); - Float maxNorm = Math.Max(VectorUtils.MaxNorm(in state.Weights), Math.Abs(state.Bias)); + float maxNorm = Math.Max(VectorUtils.MaxNorm(in state.Weights), Math.Abs(state.Bias)); ch.Check(FloatUtils.IsFinite(maxNorm), "The weights/bias contain invalid values (NaN or Infinite). Potential causes: high learning rates, no normalization, high initial weights, etc."); return state.CreatePredictor(); @@ -316,6 +315,6 @@ private void TrainCore(IChannel ch, RoleMappedData data, TrainStateBase state) } } - private protected abstract TrainStateBase MakeState(IChannel ch, int numFeatures, LinearPredictor predictor); + private protected abstract TrainStateBase MakeState(IChannel ch, int numFeatures, LinearModelParameters predictor); } } \ No newline at end of file diff --git a/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs index f52c33ceaa..70956bdc71 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs @@ -28,7 +28,7 @@ namespace Microsoft.ML.Trainers { /// - public sealed class PoissonRegression : LbfgsTrainerBase, PoissonRegressionPredictor> + public sealed class PoissonRegression : LbfgsTrainerBase, PoissonRegressionModelParameters> { internal const string LoadNameValue = "PoissonRegression"; internal const string UserNameValue = "Poisson Regression"; @@ -95,16 +95,16 @@ protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSc }; } - protected override RegressionPredictionTransformer MakeTransformer(PoissonRegressionPredictor model, Schema trainSchema) - => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); + protected override RegressionPredictionTransformer MakeTransformer(PoissonRegressionModelParameters model, Schema trainSchema) + => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); - public RegressionPredictionTransformer Train(IDataView trainData, IPredictor initialPredictor = null) + public RegressionPredictionTransformer Train(IDataView trainData, IPredictor initialPredictor = null) => TrainTransformer(trainData, initPredictor: initialPredictor); - protected override VBuffer InitializeWeightsFromPredictor(PoissonRegressionPredictor srcPredictor) + protected override VBuffer InitializeWeightsFromPredictor(PoissonRegressionModelParameters srcPredictor) { Contracts.AssertValue(srcPredictor); - return InitializeWeights(srcPredictor.Weights2, new[] { srcPredictor.Bias }); + return InitializeWeights(srcPredictor.Weights, new[] { srcPredictor.Bias }); } protected override void PreTrainingProcessInstance(float label, in VBuffer feat, float weight) @@ -153,13 +153,13 @@ protected override float AccumulateOneGradient(in VBuffer feat, float lab return -(y * dot - lambda) * weight; } - protected override PoissonRegressionPredictor CreatePredictor() + protected override PoissonRegressionModelParameters CreatePredictor() { VBuffer weights = default(VBuffer); CurrentWeights.CopyTo(ref weights, 1, CurrentWeights.Length - 1); float bias = 0; CurrentWeights.GetItemOrDefault(0, ref bias); - return new PoissonRegressionPredictor(Host, in weights, bias); + return new PoissonRegressionModelParameters(Host, in weights, bias); } protected override void ComputeTrainingStatistics(IChannel ch, FloatLabelCursor.Factory factory, float loss, int numParams) diff --git a/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs b/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs index 481eeb0b00..a286d63994 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/SdcaBinary.cs @@ -76,15 +76,15 @@ private protected override TModel TrainModelCore(TrainContext context) { var preparedData = PrepareDataFromTrainingExamples(ch, context.TrainingSet, out int weightSetCount); var initPred = context.InitialPredictor; - var linInitPred = (initPred as CalibratedPredictorBase)?.SubPredictor as LinearPredictor; - linInitPred = linInitPred ?? initPred as LinearPredictor; + var linInitPred = (initPred as CalibratedPredictorBase)?.SubPredictor as LinearModelParameters; + linInitPred = linInitPred ?? initPred as LinearModelParameters; Host.CheckParam(context.InitialPredictor == null || linInitPred != null, nameof(context), "Initial predictor was not a linear predictor."); return TrainCore(ch, preparedData, linInitPred, weightSetCount); } } - protected abstract TModel TrainCore(IChannel ch, RoleMappedData data, LinearPredictor predictor, int weightSetCount); + protected abstract TModel TrainCore(IChannel ch, RoleMappedData data, LinearModelParameters predictor, int weightSetCount); /// /// This method ensures that the data meets the requirements of this trainer and its @@ -281,7 +281,7 @@ protected float WDot(in VBuffer features, in VBuffer weights, floa return VectorUtils.DotProduct(in weights, in features) + bias; } - protected sealed override TModel TrainCore(IChannel ch, RoleMappedData data, LinearPredictor predictor, int weightSetCount) + protected sealed override TModel TrainCore(IChannel ch, RoleMappedData data, LinearModelParameters predictor, int weightSetCount) { Contracts.Assert(predictor == null, "SDCA based trainers don't support continuous training."); Contracts.Assert(weightSetCount >= 1); @@ -1560,7 +1560,7 @@ protected override TScalarPredictor CreatePredictor(VBuffer[] weights, fl VBufferUtils.CreateMaybeSparseCopy(weights[0], ref maybeSparseWeights, Conversions.Instance.GetIsDefaultPredicate(NumberType.Float)); - var predictor = new LinearBinaryPredictor(Host, in maybeSparseWeights, bias[0]); + var predictor = new LinearBinaryModelParameters(Host, in maybeSparseWeights, bias[0]); if (!(_loss is LogLoss)) return predictor; return new ParameterMixingCalibratedPredictor(Host, predictor, new PlattCalibrator(Host, -1, 0)); @@ -1748,7 +1748,7 @@ public BinaryPredictionTransformer Train(IDataView trainData, //For complexity analysis, we assume that // - The number of features is N // - Average number of non-zero per instance is k - protected override TScalarPredictor TrainCore(IChannel ch, RoleMappedData data, LinearPredictor predictor, int weightSetCount) + protected override TScalarPredictor TrainCore(IChannel ch, RoleMappedData data, LinearModelParameters predictor, int weightSetCount) { Contracts.AssertValue(data); Contracts.Assert(weightSetCount == 1); @@ -1925,7 +1925,7 @@ protected override TScalarPredictor TrainCore(IChannel ch, RoleMappedData data, VBuffer maybeSparseWeights = default; VBufferUtils.CreateMaybeSparseCopy(in weights, ref maybeSparseWeights, Conversions.Instance.GetIsDefaultPredicate(NumberType.Float)); - var pred = new LinearBinaryPredictor(Host, in maybeSparseWeights, bias); + var pred = new LinearBinaryModelParameters(Host, in maybeSparseWeights, bias); if (!(_loss is LogLoss)) return pred; return new ParameterMixingCalibratedPredictor(Host, pred, new PlattCalibrator(Host, -1, 0)); diff --git a/src/Microsoft.ML.StandardLearners/Standard/SdcaRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/SdcaRegression.cs index a98cad45bf..0a50df7c31 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/SdcaRegression.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/SdcaRegression.cs @@ -15,7 +15,6 @@ using Microsoft.ML.Runtime.Training; using Microsoft.ML.Trainers; using System; -using Float = System.Single; [assembly: LoadableClass(SdcaRegressionTrainer.Summary, typeof(SdcaRegressionTrainer), typeof(SdcaRegressionTrainer.Arguments), new[] { typeof(SignatureRegressorTrainer), typeof(SignatureTrainer), typeof(SignatureFeatureScorerTrainer) }, @@ -26,7 +25,7 @@ namespace Microsoft.ML.Trainers { /// - public sealed class SdcaRegressionTrainer : SdcaTrainerBase, LinearRegressionPredictor> + public sealed class SdcaRegressionTrainer : SdcaTrainerBase, LinearRegressionModelParameters> { internal const string LoadNameValue = "SDCAR"; internal const string UserNameValue = "Fast Linear Regression (SA-SDCA)"; @@ -100,20 +99,21 @@ internal SdcaRegressionTrainer(IHostEnvironment env, Arguments args) { } - protected override LinearRegressionPredictor CreatePredictor(VBuffer[] weights, Float[] bias) + protected override LinearRegressionModelParameters CreatePredictor(VBuffer[] weights, float[] bias) { Host.CheckParam(Utils.Size(weights) == 1, nameof(weights)); Host.CheckParam(Utils.Size(bias) == 1, nameof(bias)); Host.CheckParam(weights[0].Length > 0, nameof(weights)); - VBuffer maybeSparseWeights = default; + VBuffer maybeSparseWeights = default; // below should be `in weights[0]`, but can't because of https://github.com/dotnet/roslyn/issues/29371 VBufferUtils.CreateMaybeSparseCopy(weights[0], ref maybeSparseWeights, - Conversions.Instance.GetIsDefaultPredicate(NumberType.Float)); - return new LinearRegressionPredictor(Host, in maybeSparseWeights, bias[0]); + Conversions.Instance.GetIsDefaultPredicate(NumberType.Float)); + + return new LinearRegressionModelParameters(Host, in maybeSparseWeights, bias[0]); } - protected override Float GetInstanceWeight(FloatLabelCursor cursor) + protected override float GetInstanceWeight(FloatLabelCursor cursor) { return cursor.Weight; } @@ -137,13 +137,13 @@ protected override int ComputeNumThreads(FloatLabelCursor.Factory cursorFactory) } // Using a different logic for default L2 parameter in regression. - protected override Float TuneDefaultL2(IChannel ch, int maxIterations, long rowCount, int numThreads) + protected override float TuneDefaultL2(IChannel ch, int maxIterations, long rowCount, int numThreads) { Contracts.AssertValue(ch); Contracts.Assert(maxIterations > 0); Contracts.Assert(rowCount > 0); Contracts.Assert(numThreads > 0); - Float l2; + float l2; if (rowCount > 10000) l2 = 1e-04f; @@ -164,8 +164,8 @@ protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSc }; } - protected override RegressionPredictionTransformer MakeTransformer(LinearRegressionPredictor model, Schema trainSchema) - => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); + protected override RegressionPredictionTransformer MakeTransformer(LinearRegressionModelParameters model, Schema trainSchema) + => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); } /// diff --git a/src/Microsoft.ML.StandardLearners/Standard/SdcaStatic.cs b/src/Microsoft.ML.StandardLearners/Standard/SdcaStatic.cs index e53617c6e9..2549206ec8 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/SdcaStatic.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/SdcaStatic.cs @@ -51,7 +51,7 @@ public static Scalar Sdca(this RegressionContext.RegressionTrainers ctx, int? maxIterations = null, ISupportSdcaRegressionLoss loss = null, Action advancedSettings = null, - Action onFit = null) + Action onFit = null) { Contracts.CheckValue(label, nameof(label)); Contracts.CheckValue(features, nameof(features)); @@ -109,7 +109,7 @@ public static (Scalar score, Scalar probability, Scalar pred float? l1Threshold = null, int? maxIterations = null, Action advancedSettings = null, - Action onFit = null) + Action onFit = null) { Contracts.CheckValue(label, nameof(label)); Contracts.CheckValue(features, nameof(features)); @@ -131,7 +131,7 @@ public static (Scalar score, Scalar probability, Scalar pred // Under the default log-loss we assume a calibrated predictor. var model = trans.Model; var cali = (ParameterMixingCalibratedPredictor)model; - var pred = (LinearBinaryPredictor)cali.SubPredictor; + var pred = (LinearBinaryModelParameters)cali.SubPredictor; onFit(pred, cali); }); } @@ -165,7 +165,7 @@ public static (Scalar score, Scalar probability, Scalar pred /// result in any way; it is only a way for the caller to be informed about what was learnt. /// The set of output columns including in order the predicted binary classification score (which will range /// from negative to positive infinity), and the predicted label. - /// + /// public static (Scalar score, Scalar predictedLabel) Sdca( this BinaryClassificationContext.BinaryClassificationTrainers ctx, Scalar label, Vector features, @@ -175,7 +175,7 @@ public static (Scalar score, Scalar predictedLabel) Sdca( float? l1Threshold = null, int? maxIterations = null, Action advancedSettings = null, - Action onFit = null + Action onFit = null ) { Contracts.CheckValue(label, nameof(label)); @@ -200,9 +200,9 @@ public static (Scalar score, Scalar predictedLabel) Sdca( { var model = trans.Model; if (model is ParameterMixingCalibratedPredictor cali) - onFit((LinearBinaryPredictor)cali.SubPredictor); + onFit((LinearBinaryModelParameters)cali.SubPredictor); else - onFit((LinearBinaryPredictor)model); + onFit((LinearBinaryModelParameters)model); }); } return trainer; diff --git a/src/Microsoft.ML.StandardLearners/Standard/StochasticTrainerBase.cs b/src/Microsoft.ML.StandardLearners/Standard/StochasticTrainerBase.cs index 792844ef26..bb4a2965d9 100644 --- a/src/Microsoft.ML.StandardLearners/Standard/StochasticTrainerBase.cs +++ b/src/Microsoft.ML.StandardLearners/Standard/StochasticTrainerBase.cs @@ -35,8 +35,8 @@ private protected override TModel TrainModelCore(TrainContext context) { var preparedData = PrepareDataFromTrainingExamples(ch, context.TrainingSet, out int weightSetCount); var initPred = context.InitialPredictor; - var linInitPred = (initPred as CalibratedPredictorBase)?.SubPredictor as LinearPredictor; - linInitPred = linInitPred ?? initPred as LinearPredictor; + var linInitPred = (initPred as CalibratedPredictorBase)?.SubPredictor as LinearModelParameters; + linInitPred = linInitPred ?? initPred as LinearModelParameters; Host.CheckParam(context.InitialPredictor == null || linInitPred != null, nameof(context), "Initial predictor was not a linear predictor."); return TrainCore(ch, preparedData, linInitPred, weightSetCount); @@ -95,7 +95,7 @@ protected RoleMappedData PrepareDataFromTrainingExamples(IChannel ch, RoleMapped return examplesToFeedTrain; } - protected abstract TModel TrainCore(IChannel ch, RoleMappedData data, LinearPredictor predictor, int weightSetCount); + protected abstract TModel TrainCore(IChannel ch, RoleMappedData data, LinearModelParameters predictor, int weightSetCount); protected abstract void CheckLabel(RoleMappedData examples, out int weightSetCount); } diff --git a/test/Microsoft.ML.StaticPipelineTesting/Training.cs b/test/Microsoft.ML.StaticPipelineTesting/Training.cs index a5b4eb954f..1d43e5579e 100644 --- a/test/Microsoft.ML.StaticPipelineTesting/Training.cs +++ b/test/Microsoft.ML.StaticPipelineTesting/Training.cs @@ -43,7 +43,7 @@ public void SdcaRegression() c => (label: c.LoadFloat(11), features: c.LoadFloat(0, 10)), separator: ';', hasHeader: true); - LinearRegressionPredictor pred = null; + LinearRegressionModelParameters pred = null; var est = reader.MakeNewEstimator() .Append(r => (r.label, score: ctx.Trainers.Sdca(r.label, r.features, maxIterations: 2, @@ -55,7 +55,7 @@ public void SdcaRegression() var model = pipe.Fit(dataSource); Assert.NotNull(pred); // 11 input features, so we ought to have 11 weights. - Assert.Equal(11, pred.Weights2.Count); + Assert.Equal(11, pred.Weights.Count); var data = model.Read(dataSource); @@ -114,7 +114,7 @@ public void SdcaBinaryClassification() var reader = TextLoader.CreateReader(env, c => (label: c.LoadBool(0), features: c.LoadFloat(1, 9))); - LinearBinaryPredictor pred = null; + LinearBinaryModelParameters pred = null; ParameterMixingCalibratedPredictor cali = null; var est = reader.MakeNewEstimator() @@ -131,7 +131,7 @@ public void SdcaBinaryClassification() Assert.NotNull(pred); Assert.NotNull(cali); // 9 input features, so we ought to have 9 weights. - Assert.Equal(9, pred.Weights2.Count); + Assert.Equal(9, pred.Weights.Count); var data = model.Read(dataSource); @@ -160,7 +160,7 @@ public void SdcaBinaryClassificationNoCalibration() var reader = TextLoader.CreateReader(env, c => (label: c.LoadBool(0), features: c.LoadFloat(1, 9))); - LinearBinaryPredictor pred = null; + LinearBinaryModelParameters pred = null; var loss = new HingeLoss(1); @@ -177,7 +177,7 @@ public void SdcaBinaryClassificationNoCalibration() var model = pipe.Fit(dataSource); Assert.NotNull(pred); // 9 input features, so we ought to have 9 weights. - Assert.Equal(9, pred.Weights2.Count); + Assert.Equal(9, pred.Weights.Count); var data = model.Read(dataSource); @@ -204,7 +204,7 @@ public void AveragePerceptronNoCalibration() var reader = TextLoader.CreateReader(env, c => (label: c.LoadBool(0), features: c.LoadFloat(1, 9))); - LinearBinaryPredictor pred = null; + LinearBinaryModelParameters pred = null; var loss = new HingeLoss(1); @@ -218,7 +218,7 @@ public void AveragePerceptronNoCalibration() var model = pipe.Fit(dataSource); Assert.NotNull(pred); // 9 input features, so we ought to have 9 weights. - Assert.Equal(9, pred.Weights2.Count); + Assert.Equal(9, pred.Weights.Count); var data = model.Read(dataSource); @@ -240,7 +240,7 @@ public void AveragePerceptronCalibration() var reader = TextLoader.CreateReader(env, c => (label: c.LoadBool(0), features: c.LoadFloat(1, 9))); - LinearBinaryPredictor pred = null; + LinearBinaryModelParameters pred = null; var loss = new HingeLoss(1); @@ -254,7 +254,7 @@ public void AveragePerceptronCalibration() var model = pipe.Fit(dataSource); Assert.NotNull(pred); // 9 input features, so we ought to have 9 weights. - Assert.Equal(9, pred.Weights2.Count); + Assert.Equal(9, pred.Weights.Count); var data = model.Read(dataSource); @@ -547,7 +547,7 @@ public void PoissonRegression() c => (label: c.LoadFloat(11), features: c.LoadFloat(0, 10)), separator: ';', hasHeader: true); - PoissonRegressionPredictor pred = null; + PoissonRegressionModelParameters pred = null; var est = reader.MakeNewEstimator() .Append(r => (r.label, score: ctx.Trainers.PoissonRegression(r.label, r.features, @@ -673,7 +673,7 @@ public void OnlineGradientDescent() c => (label: c.LoadFloat(11), features: c.LoadFloat(0, 10)), separator: ';', hasHeader: true); - LinearRegressionPredictor pred = null; + LinearRegressionModelParameters pred = null; var loss = new SquaredLoss(); diff --git a/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs b/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs index 1107a0795c..a07f805010 100644 --- a/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs +++ b/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs @@ -19,7 +19,7 @@ public static TEnvironment AddStandardComponents(this TEnvironment where TEnvironment : IHostEnvironment { env.ComponentCatalog.RegisterAssembly(typeof(TextLoader).Assembly); // ML.Data - env.ComponentCatalog.RegisterAssembly(typeof(LinearPredictor).Assembly); // ML.StandardLearners + env.ComponentCatalog.RegisterAssembly(typeof(LinearModelParameters).Assembly); // ML.StandardLearners env.ComponentCatalog.RegisterAssembly(typeof(OneHotEncodingTransformer).Assembly); // ML.Transforms env.ComponentCatalog.RegisterAssembly(typeof(FastTreeBinaryModelParameters).Assembly); // ML.FastTree env.ComponentCatalog.RegisterAssembly(typeof(EnsemblePredictor).Assembly); // ML.Ensemble diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/LbfgsTests.cs b/test/Microsoft.ML.Tests/TrainerEstimators/LbfgsTests.cs index 0e489b763c..2a7f440ca0 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/LbfgsTests.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/LbfgsTests.cs @@ -62,7 +62,7 @@ public void TestLogisticRegressionStats() pipe = pipe.Append(new LogisticRegression(Env, "Label", "Features", advancedSettings: s => { s.ShowTrainingStats = true; })); var transformerChain = pipe.Fit(dataView) as TransformerChain>; - var linearModel = transformerChain.LastTransformer.Model.SubPredictor as LinearBinaryPredictor; + var linearModel = transformerChain.LastTransformer.Model.SubPredictor as LinearBinaryModelParameters; var stats = linearModel.Statistics; LinearModelStatistics.TryGetBiasStatistics(stats, 2, out float stdError, out float zScore, out float pValue); @@ -83,7 +83,7 @@ public void TestLogisticRegressionStats_MKL() var transformerChain = pipe.Fit(dataView) as TransformerChain>; - var linearModel = transformerChain.LastTransformer.Model.SubPredictor as LinearBinaryPredictor; + var linearModel = transformerChain.LastTransformer.Model.SubPredictor as LinearBinaryModelParameters; var stats = linearModel.Statistics; LinearModelStatistics.TryGetBiasStatistics(stats, 2, out float stdError, out float zScore, out float pValue); From 4ed009b816ab9dce81c7934a462d541cf1fcea66 Mon Sep 17 00:00:00 2001 From: Rogan Carr Date: Wed, 12 Dec 2018 12:33:09 -0800 Subject: [PATCH 048/100] Adding support for most learning tasks to PFI (#1832) * Adding support for Multiclass Classification, Clustering, and Ranking to PFI * Not supporting Recommendation for now. --- .../Evaluators/Metrics/ClusteringMetrics.cs | 7 + .../Metrics/MultiClassClassifierMetrics.cs | 13 + .../Evaluators/Metrics/RankerMetrics.cs | 8 + .../PermutationFeatureImportanceExtensions.cs | 259 ++++++++++++- .../PermutationFeatureImportanceTests.cs | 361 ++++++++++++++++-- 5 files changed, 616 insertions(+), 32 deletions(-) diff --git a/src/Microsoft.ML.Data/Evaluators/Metrics/ClusteringMetrics.cs b/src/Microsoft.ML.Data/Evaluators/Metrics/ClusteringMetrics.cs index 62b3352b63..7537fc9c3a 100644 --- a/src/Microsoft.ML.Data/Evaluators/Metrics/ClusteringMetrics.cs +++ b/src/Microsoft.ML.Data/Evaluators/Metrics/ClusteringMetrics.cs @@ -45,5 +45,12 @@ internal ClusteringMetrics(IExceptionContext ectx, Row overallResult, bool calcu if (calculateDbi) Dbi = Fetch(ClusteringEvaluator.Dbi); } + + internal ClusteringMetrics(double nmi, double avgMinScore, double dbi) + { + Nmi = nmi; + AvgMinScore = avgMinScore; + Dbi = dbi; + } } } \ No newline at end of file diff --git a/src/Microsoft.ML.Data/Evaluators/Metrics/MultiClassClassifierMetrics.cs b/src/Microsoft.ML.Data/Evaluators/Metrics/MultiClassClassifierMetrics.cs index 7acdce7025..a89502a33c 100644 --- a/src/Microsoft.ML.Data/Evaluators/Metrics/MultiClassClassifierMetrics.cs +++ b/src/Microsoft.ML.Data/Evaluators/Metrics/MultiClassClassifierMetrics.cs @@ -96,5 +96,18 @@ internal MultiClassClassifierMetrics(IExceptionContext ectx, Row overallResult, PerClassLogLoss = new double[perClassLogLoss.Length]; perClassLogLoss.CopyTo(PerClassLogLoss); } + + internal MultiClassClassifierMetrics(double accuracyMicro, double accuracyMacro, double logLoss, double logLossReduction, + int topK, double topKAccuracy, double[] perClassLogLoss) + { + AccuracyMicro = accuracyMicro; + AccuracyMacro = accuracyMacro; + LogLoss = logLoss; + LogLossReduction = logLossReduction; + TopK = topK; + TopKAccuracy = topKAccuracy; + PerClassLogLoss = new double[perClassLogLoss.Length]; + perClassLogLoss.CopyTo(PerClassLogLoss, 0); + } } } \ No newline at end of file diff --git a/src/Microsoft.ML.Data/Evaluators/Metrics/RankerMetrics.cs b/src/Microsoft.ML.Data/Evaluators/Metrics/RankerMetrics.cs index a3749f0ecc..50cb0ad23d 100644 --- a/src/Microsoft.ML.Data/Evaluators/Metrics/RankerMetrics.cs +++ b/src/Microsoft.ML.Data/Evaluators/Metrics/RankerMetrics.cs @@ -40,5 +40,13 @@ internal RankerMetrics(IExceptionContext ectx, Row overallResult) Dcg = Fetch(RankerEvaluator.Dcg).GetValues().ToArray(); Ndcg = Fetch(RankerEvaluator.Ndcg).GetValues().ToArray(); } + + internal RankerMetrics(double[] dcg, double[] ndcg) + { + Dcg = new double[dcg.Length]; + dcg.CopyTo(Dcg, 0); + Ndcg = new double[ndcg.Length]; + ndcg.CopyTo(Ndcg, 0); + } } } \ No newline at end of file diff --git a/src/Microsoft.ML.Transforms/PermutationFeatureImportanceExtensions.cs b/src/Microsoft.ML.Transforms/PermutationFeatureImportanceExtensions.cs index 35b7db1ad9..611379f320 100644 --- a/src/Microsoft.ML.Transforms/PermutationFeatureImportanceExtensions.cs +++ b/src/Microsoft.ML.Transforms/PermutationFeatureImportanceExtensions.cs @@ -6,12 +6,14 @@ using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Transforms; +using System; using System.Collections.Immutable; namespace Microsoft.ML { public static class PermutationFeatureImportanceExtensions { + #region Regression /// /// Permutation Feature Importance (PFI) for Regression /// @@ -26,7 +28,7 @@ public static class PermutationFeatureImportanceExtensions /// /// PFI works by taking a labeled dataset, choosing a feature, and permuting the values /// for that feature across all the examples, so that each example now has a random value for the feature and - /// the original values for all other features. The evalution metric (e.g. AUC or R-squared) is then calculated + /// the original values for all other features. The evalution metric (e.g. R-squared) is then calculated /// for this modified dataset, and the change in the evaluation metric from the original dataset is computed. /// The larger the change in the evaluation metric, the more important the feature is to the model. /// PFI works by performing this permutation analysis across all the features of a model, one after another. @@ -48,7 +50,7 @@ public static class PermutationFeatureImportanceExtensions /// The model to evaluate. /// The evaluation data set. /// Label column name. - /// Feature column names. + /// Feature column name. /// Use features weight to pre-filter features. /// Limit the number of examples to evaluate on. null means examples (up to ~ 2 bln) from input will be used. /// Array of per-feature 'contributions' to the score. @@ -83,7 +85,9 @@ private static RegressionMetrics RegressionDelta( lossFunction: a.LossFn - b.LossFn, rSquared: a.RSquared - b.RSquared); } + #endregion + #region Binary Classification /// /// Permutation Feature Importance (PFI) for Binary Classification /// @@ -98,7 +102,7 @@ private static RegressionMetrics RegressionDelta( /// /// PFI works by taking a labeled dataset, choosing a feature, and permuting the values /// for that feature across all the examples, so that each example now has a random value for the feature and - /// the original values for all other features. The evalution metric (e.g. AUC or R-squared) is then calculated + /// the original values for all other features. The evalution metric (e.g. AUC) is then calculated /// for this modified dataset, and the change in the evaluation metric from the original dataset is computed. /// The larger the change in the evaluation metric, the more important the feature is to the model. /// PFI works by performing this permutation analysis across all the features of a model, one after another. @@ -120,7 +124,7 @@ private static RegressionMetrics RegressionDelta( /// The model to evaluate. /// The evaluation data set. /// Label column name. - /// Feature column names. + /// Feature column name. /// Use features weight to pre-filter features. /// Limit the number of examples to evaluate on. null means examples (up to ~ 2 bln) from input will be used. /// Array of per-feature 'contributions' to the score. @@ -158,5 +162,252 @@ private static BinaryClassificationMetrics BinaryClassifierDelta( f1Score: a.F1Score - b.F1Score, auprc: a.Auprc - b.Auprc); } + + #endregion Binary Classification + + #region Multiclass Classification + /// + /// Permutation Feature Importance (PFI) for MulticlassClassification + /// + /// + /// + /// Permutation feature importance (PFI) is a technique to determine the global importance of features in a trained + /// machine learning model. PFI is a simple yet powerful technique motivated by Breiman in his Random Forest paper, section 10 + /// (Breiman. "Random Forests." Machine Learning, 2001.) + /// The advantage of the PFI method is that it is model agnostic -- it works with any model that can be + /// evaluated -- and it can use any dataset, not just the training set, to compute feature importance metrics. + /// + /// + /// PFI works by taking a labeled dataset, choosing a feature, and permuting the values + /// for that feature across all the examples, so that each example now has a random value for the feature and + /// the original values for all other features. The evalution metric (e.g. micro-accuracy) is then calculated + /// for this modified dataset, and the change in the evaluation metric from the original dataset is computed. + /// The larger the change in the evaluation metric, the more important the feature is to the model. + /// PFI works by performing this permutation analysis across all the features of a model, one after another. + /// + /// + /// In this implementation, PFI computes the change in all possible multiclass classification evaluation metrics for each feature, and an + /// ImmutableArray of MultiClassClassifierMetrics objects is returned. See the sample below for an + /// example of working with these results to analyze the feature importance of a model. + /// + /// + /// + /// + /// + /// + /// + /// The clustering context. + /// The model to evaluate. + /// The evaluation data set. + /// Label column name. + /// Feature column name. + /// Use features weight to pre-filter features. + /// Limit the number of examples to evaluate on. null means examples (up to ~ 2 bln) from input will be used. + /// Array of per-feature 'contributions' to the score. + public static ImmutableArray + PermutationFeatureImportance( + this MulticlassClassificationContext ctx, + IPredictionTransformer model, + IDataView data, + string label = DefaultColumnNames.Label, + string features = DefaultColumnNames.Features, + bool useFeatureWeightFilter = false, + int? topExamples = null) + { + return PermutationFeatureImportance.GetImportanceMetricsMatrix( + CatalogUtils.GetEnvironment(ctx), + model, + data, + idv => ctx.Evaluate(idv, label), + MulticlassClassificationDelta, + features, + useFeatureWeightFilter, + topExamples); + } + + private static MultiClassClassifierMetrics MulticlassClassificationDelta( + MultiClassClassifierMetrics a, MultiClassClassifierMetrics b) + { + if (a.TopK != b.TopK) + Contracts.Assert(a.TopK== b.TopK, "TopK to compare must be the same length."); + + var perClassLogLoss = ComputeArrayDeltas(a.PerClassLogLoss, b.PerClassLogLoss); + + return new MultiClassClassifierMetrics( + accuracyMicro: a.AccuracyMicro - b.AccuracyMicro, + accuracyMacro: a.AccuracyMacro - b.AccuracyMacro, + logLoss: a.LogLoss - b.LogLoss, + logLossReduction: a.LogLossReduction - b.LogLossReduction, + topK: a.TopK, + topKAccuracy: a.TopKAccuracy - b.TopKAccuracy, + perClassLogLoss: perClassLogLoss + ); + } + + #endregion + + #region Ranking + /// + /// Permutation Feature Importance (PFI) for Ranking + /// + /// + /// + /// Permutation feature importance (PFI) is a technique to determine the global importance of features in a trained + /// machine learning model. PFI is a simple yet powerful technique motivated by Breiman in his Random Forest paper, section 10 + /// (Breiman. "Random Forests." Machine Learning, 2001.) + /// The advantage of the PFI method is that it is model agnostic -- it works with any model that can be + /// evaluated -- and it can use any dataset, not just the training set, to compute feature importance metrics. + /// + /// + /// PFI works by taking a labeled dataset, choosing a feature, and permuting the values + /// for that feature across all the examples, so that each example now has a random value for the feature and + /// the original values for all other features. The evalution metric (e.g. NDCG) is then calculated + /// for this modified dataset, and the change in the evaluation metric from the original dataset is computed. + /// The larger the change in the evaluation metric, the more important the feature is to the model. + /// PFI works by performing this permutation analysis across all the features of a model, one after another. + /// + /// + /// In this implementation, PFI computes the change in all possible ranking evaluation metrics for each feature, and an + /// ImmutableArray of RankingMetrics objects is returned. See the sample below for an + /// example of working with these results to analyze the feature importance of a model. + /// + /// + /// + /// + /// + /// + /// + /// The clustering context. + /// The model to evaluate. + /// The evaluation data set. + /// Label column name. + /// GroupId column name + /// Feature column name. + /// Use features weight to pre-filter features. + /// Limit the number of examples to evaluate on. null means examples (up to ~ 2 bln) from input will be used. + /// Array of per-feature 'contributions' to the score. + public static ImmutableArray + PermutationFeatureImportance( + this RankingContext ctx, + IPredictionTransformer model, + IDataView data, + string label = DefaultColumnNames.Label, + string groupId = DefaultColumnNames.GroupId, + string features = DefaultColumnNames.Features, + bool useFeatureWeightFilter = false, + int? topExamples = null) + { + return PermutationFeatureImportance.GetImportanceMetricsMatrix( + CatalogUtils.GetEnvironment(ctx), + model, + data, + idv => ctx.Evaluate(idv, label, groupId), + RankingDelta, + features, + useFeatureWeightFilter, + topExamples); + } + + private static RankerMetrics RankingDelta( + RankerMetrics a, RankerMetrics b) + { + var dcg = ComputeArrayDeltas(a.Dcg, b.Dcg); + var ndcg = ComputeArrayDeltas(a.Ndcg, b.Ndcg); + + return new RankerMetrics(dcg: dcg, ndcg: ndcg); + } + + #endregion + + #region Clustering + /// + /// Permutation Feature Importance (PFI) for Clustering + /// + /// + /// + /// Permutation feature importance (PFI) is a technique to determine the global importance of features in a trained + /// machine learning model. PFI is a simple yet powerful technique motivated by Breiman in his Random Forest paper, section 10 + /// (Breiman. "Random Forests." Machine Learning, 2001.) + /// The advantage of the PFI method is that it is model agnostic -- it works with any model that can be + /// evaluated -- and it can use any dataset, not just the training set, to compute feature importance metrics. + /// + /// + /// PFI works by taking a labeled dataset, choosing a feature, and permuting the values + /// for that feature across all the examples, so that each example now has a random value for the feature and + /// the original values for all other features. The evalution metric (e.g. normalized mutual information) is then calculated + /// for this modified dataset, and the change in the evaluation metric from the original dataset is computed. + /// The larger the change in the evaluation metric, the more important the feature is to the model. + /// PFI works by performing this permutation analysis across all the features of a model, one after another. + /// + /// + /// In this implementation, PFI computes the change in all possible clustering evaluation metrics for each feature, and an + /// ImmutableArray of ClusteringMetrics objects is returned. See the sample below for an + /// example of working with these results to analyze the feature importance of a model. + /// + /// + /// + /// + /// + /// + /// + /// The clustering context. + /// The model to evaluate. + /// The evaluation data set. + /// Label column name. + /// Feature column name. + /// Use features weight to pre-filter features. + /// Limit the number of examples to evaluate on. null means examples (up to ~ 2 bln) from input will be used. + /// Array of per-feature 'contributions' to the score. + public static ImmutableArray + PermutationFeatureImportance( + this ClusteringContext ctx, + IPredictionTransformer model, + IDataView data, + string label = DefaultColumnNames.Label, + string features = DefaultColumnNames.Features, + bool useFeatureWeightFilter = false, + int? topExamples = null) + { + return PermutationFeatureImportance.GetImportanceMetricsMatrix( + CatalogUtils.GetEnvironment(ctx), + model, + data, + idv => ctx.Evaluate(idv, label), + ClusteringDelta, + features, + useFeatureWeightFilter, + topExamples); + } + + private static ClusteringMetrics ClusteringDelta( + ClusteringMetrics a, ClusteringMetrics b) + { + return new ClusteringMetrics( + nmi: a.Nmi - b.Nmi, + avgMinScore: a.AvgMinScore- b.AvgMinScore, + dbi: a.Dbi - b.Dbi); + } + + #endregion Clustering + + #region Helpers + + private static double[] ComputeArrayDeltas(double[] a, double[] b) + { + Contracts.Assert(a.Length == b.Length, "Arrays to compare must be of the same length."); + + var delta = new double[a.Length]; + for (int i = 0; i < a.Length; i++) + delta[i] = a[i] - b[i]; + return delta; + } + + #endregion } } diff --git a/test/Microsoft.ML.Tests/PermutationFeatureImportanceTests.cs b/test/Microsoft.ML.Tests/PermutationFeatureImportanceTests.cs index 3e3247cea3..b00b93ee6b 100644 --- a/test/Microsoft.ML.Tests/PermutationFeatureImportanceTests.cs +++ b/test/Microsoft.ML.Tests/PermutationFeatureImportanceTests.cs @@ -2,6 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.ML.Core.Data; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.RunTests; @@ -19,10 +21,10 @@ public PermutationFeatureImportanceTests(ITestOutputHelper output) : base(output { } + #region Regression Tests + /// - /// Features: x1, x2, x3, xRand; y = 10*x1 + 20x2 + 5.5x3 + e, xRand- random and Label y is to dependant on xRand. - /// Test verifies that xRand has the least importance: L1, L2, RMS and Loss-Fn do not change a lot when xRand is permuted. - /// Also test checks that x2 has the biggest importance. + /// Test PFI Regression for Dense Features /// [Fact] public void TestPfiRegressionOnDenseFeatures() @@ -55,11 +57,7 @@ public void TestPfiRegressionOnDenseFeatures() } /// - /// Features: x1, x2vBuff(sparce vector), x3. - /// y = 10x1 + 10x2vBuff + 30x3 + e. - /// Within xBuff feature 2nd slot will be sparse most of the time. - /// Test verifies that 2nd slot of xBuff has the least importance: L1, L2, RMS and Loss-Fn do not change a lot when this slot is permuted. - /// Also test checks that x2 has the biggest importance. + /// Test PFI Regression for Sparse Features /// [Fact] public void TestPfiRegressionOnSparseFeatures() @@ -92,6 +90,12 @@ public void TestPfiRegressionOnSparseFeatures() Assert.True(MinDeltaIndex(results, m => m.RSquared) == 5); } + #endregion + + #region Binary Classification Tests + /// + /// Test PFI Binary Classification for Dense Features + /// [Fact] public void TestPfiBinaryClassificationOnDenseFeatures() { @@ -127,11 +131,7 @@ public void TestPfiBinaryClassificationOnDenseFeatures() } /// - /// Features: x1, x2vBuff(sparce vector), x3. - /// y = 10x1 + 10x2vBuff + 30x3 + e. - /// Within xBuff feature 2nd slot will be sparse most of the time. - /// Test verifies that 2nd slot of xBuff has the least importance: L1, L2, RMS and Loss-Fn do not change a lot when this slot is permuted. - /// Also test checks that x2 has the biggest importance. + /// Test PFI Binary Classification for Sparse Features /// [Fact] public void TestPfiBinaryClassificationOnSparseFeatures() @@ -168,9 +168,195 @@ public void TestPfiBinaryClassificationOnSparseFeatures() Done(); } + #endregion + + #region Multiclass Classification Tests + /// + /// Test PFI Multiclass Classification for Dense Features + /// + [Fact] + public void TestPfiMulticlassClassificationOnDenseFeatures() + { + var data = GetDenseDataset(TaskType.MulticlassClassification); + var model = ML.MulticlassClassification.Trainers.LogisticRegression().Fit(data); + var pfi = ML.MulticlassClassification.PermutationFeatureImportance(model, data); + + // Pfi Indices: + // X1: 0 + // X2Important: 1 + // X3: 2 + // X4Rand: 3 + + // For the following metrics higher is better, so minimum delta means more important feature, and vice versa + Assert.True(MaxDeltaIndex(pfi, m => m.AccuracyMicro) == 3); + Assert.True(MinDeltaIndex(pfi, m => m.AccuracyMicro) == 1); + Assert.True(MaxDeltaIndex(pfi, m => m.AccuracyMacro) == 3); + Assert.True(MinDeltaIndex(pfi, m => m.AccuracyMacro) == 1); + Assert.True(MaxDeltaIndex(pfi, m => m.LogLossReduction) == 3); + Assert.True(MinDeltaIndex(pfi, m => m.LogLossReduction) == 1); + + // For the following metrics-delta lower is better, so maximum delta means more important feature, and vice versa + // Because they are _negative_, the difference will be positive for worse classifiers. + Assert.True(MaxDeltaIndex(pfi, m => m.LogLoss) == 1); + Assert.True(MinDeltaIndex(pfi, m => m.LogLoss) == 3); + for (int i = 0; i < pfi[0].PerClassLogLoss.Length; i++) + { + Assert.True(MaxDeltaIndex(pfi, m => m.PerClassLogLoss[i]) == 1); + Assert.True(MinDeltaIndex(pfi, m => m.PerClassLogLoss[i]) == 3); + } + + Done(); + } + + /// + /// Test PFI Multiclass Classification for Sparse Features + /// + [Fact] + public void TestPfiMulticlassClassificationOnSparseFeatures() + { + var data = GetSparseDataset(TaskType.MulticlassClassification); + var model = ML.MulticlassClassification.Trainers.LogisticRegression(advancedSettings: args => { args.MaxIterations = 1000; }).Fit(data); + var pfi = ML.MulticlassClassification.PermutationFeatureImportance(model, data); + + // Pfi Indices: + // X1: 0 + // X2VBuffer-Slot-0: 1 + // X2VBuffer-Slot-1: 2 // Least important + // X2VBuffer-Slot-2: 3 + // X2VBuffer-Slot-3: 4 + // X3Important: 5 // Most important + + // For the following metrics higher is better, so minimum delta means more important feature, and vice versa + Assert.True(MaxDeltaIndex(pfi, m => m.AccuracyMicro) == 2); + Assert.True(MinDeltaIndex(pfi, m => m.AccuracyMicro) == 5); + Assert.True(MaxDeltaIndex(pfi, m => m.AccuracyMacro) == 2); + Assert.True(MinDeltaIndex(pfi, m => m.AccuracyMacro) == 5); + Assert.True(MaxDeltaIndex(pfi, m => m.LogLossReduction) == 2); + Assert.True(MinDeltaIndex(pfi, m => m.LogLossReduction) == 5); + + // For the following metrics-delta lower is better, so maximum delta means more important feature, and vice versa + // Because they are negative metrics, the _difference_ will be positive for worse classifiers. + Assert.True(MaxDeltaIndex(pfi, m => m.LogLoss) == 5); + Assert.True(MinDeltaIndex(pfi, m => m.LogLoss) == 2); + for (int i = 0; i < pfi[0].PerClassLogLoss.Length; i++) + { + Assert.True(MaxDeltaIndex(pfi, m => m.PerClassLogLoss[i]) == 5); + Assert.True(MinDeltaIndex(pfi, m => m.PerClassLogLoss[i]) == 2); + } + + Done(); + } + #endregion + + #region Ranking Tests + /// + /// Test PFI Multiclass Classification for Dense Features + /// + [Fact] + public void TestPfiRankingOnDenseFeatures() + { + var data = GetDenseDataset(TaskType.Ranking); + var model = ML.Ranking.Trainers.FastTree().Fit(data); + var pfi = ML.Ranking.PermutationFeatureImportance(model, data); + // Pfi Indices: + // X1: 0 // For Ranking, this column won't result in misorderings + // X2Important: 1 + // X3: 2 + // X4Rand: 3 + + // For the following metrics higher is better, so minimum delta means more important feature, and vice versa + for (int i = 0; i < pfi[0].Dcg.Length; i++) + { + Assert.True(MaxDeltaIndex(pfi, m => m.Dcg[i]) == 0); + Assert.True(MinDeltaIndex(pfi, m => m.Dcg[i]) == 1); + } + for (int i = 0; i < pfi[0].Ndcg.Length; i++) + { + Assert.True(MaxDeltaIndex(pfi, m => m.Ndcg[i]) == 0); + Assert.True(MinDeltaIndex(pfi, m => m.Ndcg[i]) == 1); + } + + Done(); + } + + /// + /// Test PFI Multiclass Classification for Sparse Features + /// + [Fact] + public void TestPfiRankingOnSparseFeatures() + { + var data = GetSparseDataset(TaskType.Ranking); + var model = ML.Ranking.Trainers.FastTree().Fit(data); + var pfi = ML.Ranking.PermutationFeatureImportance(model, data); + + // Pfi Indices: + // X1: 0 + // X2VBuffer-Slot-0: 1 + // X2VBuffer-Slot-1: 2 // Least important + // X2VBuffer-Slot-2: 3 + // X2VBuffer-Slot-3: 4 + // X3Important: 5 // Most important + + // For the following metrics higher is better, so minimum delta means more important feature, and vice versa + for (int i = 0; i < pfi[0].Dcg.Length; i++) + { + Assert.True(MaxDeltaIndex(pfi, m => m.Dcg[i]) == 2); + Assert.True(MinDeltaIndex(pfi, m => m.Dcg[i]) == 5); + } + for (int i = 0; i < pfi[0].Ndcg.Length; i++) + { + Assert.True(MaxDeltaIndex(pfi, m => m.Ndcg[i]) == 2); + Assert.True(MinDeltaIndex(pfi, m => m.Ndcg[i]) == 5); + } + + Done(); + } + #endregion + + #region Clustering Tests + /// + /// Test PFI Clustering for Dense Features + /// + [Fact] + public void TestPfiClusteringOnDenseFeatures() + { + var data = GetDenseClusteringDataset(); + + var preview = data.Preview(); + + var model = ML.Clustering.Trainers.KMeans("Features", clustersCount: 5, + advancedSettings: args =>{ args.NormalizeFeatures = NormalizeOption.No;}) + .Fit(data); + var pfi = ML.Clustering.PermutationFeatureImportance(model, data); + + // Pfi Indices: + // X1: 0 -- middling importance for clustering (middling range) + // X2Important: 1 -- most important for clustering (largest range) + // X3: 2 -- Least important for clustering (smallest range) + + // For the following metrics lower is better, so maximum delta means more important feature, and vice versa + Assert.True(MinDeltaIndex(pfi, m => m.AvgMinScore) == 0); + Assert.True(MaxDeltaIndex(pfi, m => m.AvgMinScore) == 2); + + // For the following metrics higher is better, so minimum delta means more important feature, and vice versa + Assert.True(MinDeltaIndex(pfi, m => m.Nmi) == 2); + Assert.True(MaxDeltaIndex(pfi, m => m.Nmi) == 0); + + Done(); + } + #endregion + + #region Helpers + /// + /// Features: x1, x2, x3, xRand; y = 10*x1 + 20x2 + 5.5x3 + e, xRand- random and Label y is to dependant on xRand. + /// xRand has the least importance: Evaluation metrics do not change a lot when xRand is permuted. + /// x2 has the biggest importance. + /// private IDataView GetDenseDataset(TaskType task = TaskType.Regression) { + Contracts.Assert(task != TaskType.Clustering, $"TaskType {nameof(TaskType.Clustering)} not supported."); + // Setup synthetic dataset. const int numberOfInstances = 1000; var rand = new Random(10); @@ -192,12 +378,16 @@ private IDataView GetDenseDataset(TaskType task = TaskType.Regression) x4RandArray[i] = x4Rand; var noise = rand.Next(50); + yArray[i] = (float)(10 * x1 + 20 * x2Important + 5.5 * x3 + noise); } // If binary classification, modify the labels - if (task == TaskType.BinaryClassification) - GetBinaryClassificationScores(yArray); + if (task == TaskType.BinaryClassification || + task == TaskType.MulticlassClassification) + GetBinaryClassificationLabels(yArray); + else if (task == TaskType.Ranking) + GetRankingLabels(yArray); // Create data view. var bldr = new ArrayDataViewBuilder(Env); @@ -206,15 +396,28 @@ private IDataView GetDenseDataset(TaskType task = TaskType.Regression) bldr.AddColumn("X3", NumberType.Float, x3Array); bldr.AddColumn("X4Rand", NumberType.Float, x4RandArray); bldr.AddColumn("Label", NumberType.Float, yArray); + if (task == TaskType.Ranking) + bldr.AddColumn("GroupId", NumberType.U4, CreateGroupIds(yArray.Length)); var srcDV = bldr.GetDataView(); var pipeline = ML.Transforms.Concatenate("Features", "X1", "X2Important", "X3", "X4Rand") .Append(ML.Transforms.Normalize("Features")); - var data = pipeline.Fit(srcDV).Transform(srcDV); - return data; + // Create a keytype for Ranking + if (task == TaskType.Ranking) + return pipeline.Append(ML.Transforms.Conversion.MapValueToKey("GroupId")) + .Fit(srcDV).Transform(srcDV); + + return pipeline.Fit(srcDV).Transform(srcDV); } + /// + /// Features: x1, x2vBuff(sparce vector), x3. + /// y = 10x1 + 10x2vBuff + 30x3 + e. + /// Within xBuff feature 2nd slot will be sparse most of the time. + /// 2nd slot of xBuff has the least importance: Evaluation metrics do not change a lot when this slot is permuted. + /// x2 has the biggest importance. + /// private IDataView GetSparseDataset(TaskType task = TaskType.Regression) { // Setup synthetic dataset. @@ -257,8 +460,11 @@ private IDataView GetSparseDataset(TaskType task = TaskType.Regression) } // If binary classification, modify the labels - if (task == TaskType.BinaryClassification) - GetBinaryClassificationScores(yArray); + if (task == TaskType.BinaryClassification || + task == TaskType.MulticlassClassification) + GetBinaryClassificationLabels(yArray); + else if (task == TaskType.Ranking) + GetRankingLabels(yArray); // Create data view. var bldr = new ArrayDataViewBuilder(Env); @@ -266,13 +472,72 @@ private IDataView GetSparseDataset(TaskType task = TaskType.Regression) bldr.AddColumn("X2VBuffer", NumberType.Float, vbArray); bldr.AddColumn("X3Important", NumberType.Float, x3Array); bldr.AddColumn("Label", NumberType.Float, yArray); + if (task == TaskType.Ranking) + bldr.AddColumn("GroupId", NumberType.U4, CreateGroupIds(yArray.Length)); var srcDV = bldr.GetDataView(); var pipeline = ML.Transforms.Concatenate("Features", "X1", "X2VBuffer", "X3Important") .Append(ML.Transforms.Normalize("Features")); - var data = pipeline.Fit(srcDV).Transform(srcDV); - return data; + // Create a keytype for Ranking + if (task == TaskType.Ranking) + return pipeline.Append(ML.Transforms.Conversion.MapValueToKey("GroupId")) + .Fit(srcDV).Transform(srcDV); + + return pipeline.Fit(srcDV).Transform(srcDV); + } + + /// + /// Features: x1, x2, x3, xRand; y = 10*x1 + 20x2 + 5.5x3 + e, xRand- random and Label y is to dependant on xRand. + /// xRand has the least importance: Evaluation metrics do not change a lot when xRand is permuted. + /// x2 has the biggest importance. + /// + private IDataView GetDenseClusteringDataset() + { + // Define the cluster centers + const int clusterCount = 5; + float[][] clusterCenters = new float[clusterCount][]; + for (int i = 0; i < clusterCount; i++) + { + clusterCenters[i] = new float[3] { i, i, i }; + } + + // Create rows of data sampled from these clusters + const int numberOfInstances = 1000; + var rand = new Random(10); + + // The cluster spacing is 1 + float x1Scale = 0.01f; + float x2Scale = 0.1f; + float x3Scale = 1f; + + float[] yArray = new float[numberOfInstances]; + float[] x1Array = new float[numberOfInstances]; + float[] x2Array = new float[numberOfInstances]; + float[] x3Array = new float[numberOfInstances]; + + for (var i = 0; i < numberOfInstances; i++) + { + // Assign a cluster + var clusterLabel = rand.Next(clusterCount); + yArray[i] = clusterLabel; + + x1Array[i] = clusterCenters[clusterLabel][0] + x1Scale * (float)(rand.NextDouble() - 0.5); + x2Array[i] = clusterCenters[clusterLabel][1] + x2Scale * (float)(rand.NextDouble() - 0.5); + x3Array[i] = clusterCenters[clusterLabel][2] + x3Scale * (float)(rand.NextDouble() - 0.5); + } + + // Create data view. + var bldr = new ArrayDataViewBuilder(Env); + bldr.AddColumn("Label", NumberType.Float, yArray); + bldr.AddColumn("X1", NumberType.Float, x1Array); + bldr.AddColumn("X2", NumberType.Float, x2Array); + bldr.AddColumn("X3", NumberType.Float, x3Array); + var srcDV = bldr.GetDataView(); + + var pipeline = ML.Transforms.Concatenate("Features", "X1", "X2", "X3"); + + return pipeline.Fit(srcDV).Transform(srcDV); } private int MinDeltaIndex( @@ -291,23 +556,63 @@ private int MaxDeltaIndex( return metricsDelta.IndexOf(max); } - private void GetBinaryClassificationScores(float[] rawScores) + private void GetBinaryClassificationLabels(float[] rawScores) { - // Compute the average so we can center the response - float averageScore = 0.0f; - for (int i = 0; i < rawScores.Length; i++) - averageScore += rawScores[i]; - averageScore /= rawScores.Length; + float averageScore = GetArrayAverage(rawScores); // Center the response and then take the sigmoid to generate the classes for (int i = 0; i < rawScores.Length; i++) rawScores[i] = MathUtils.Sigmoid(rawScores[i] - averageScore) > 0.5 ? 1 : 0; } + private void GetRankingLabels(float[] rawScores) + { + var min = MathUtils.Min(rawScores); + var max = MathUtils.Max(rawScores); + + for (int i = 0; i < rawScores.Length; i++) + { + // Bin from [zero,one), then expand out to [0,5) and truncate + rawScores[i] = (int)(5 * (rawScores[i] - min) / (max - min)); + if (rawScores[i] == 5) + rawScores[i] = 4; + } + } + + private float GetArrayAverage(float[] scores) + { + // Compute the average so we can center the response + float averageScore = 0.0f; + for (int i = 0; i < scores.Length; i++) + averageScore += scores[i]; + averageScore /= scores.Length; + + return averageScore; + } + + private uint[] CreateGroupIds(int numRows, int rowsPerGroup = 5) + { + var groupIds = new uint[numRows]; + // Construct groups of rowsPerGroup using a modulo counter + uint group = 0; + for (int i = 0; i < groupIds.Length; i++) + { + if (i % rowsPerGroup == 0) + group++; + groupIds[i] = group; + } + + return groupIds; + } + private enum TaskType { Regression, - BinaryClassification + BinaryClassification, + MulticlassClassification, + Ranking, + Clustering } + #endregion } } From 20f0a43fbcb917ee8cf28f1bddff5773914ec3fa Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Wed, 12 Dec 2018 12:42:12 -0800 Subject: [PATCH 049/100] Remove ISchema in MultiClassClassifierScorer.cs (#1863) * Remove ISchema in MultiClassClassifierScorer.cs * Correct selector * Minor style changes --- .../Scorers/MultiClassClassifierScorer.cs | 141 +++++------------- 1 file changed, 38 insertions(+), 103 deletions(-) diff --git a/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs b/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs index d5b7ff9669..c56c9ed1ab 100644 --- a/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs +++ b/src/Microsoft.ML.Data/Scorers/MultiClassClassifierScorer.cs @@ -138,10 +138,7 @@ private LabelNameBindableMapper(IHost host, ModelLoadContext ctx) ctx.LoadNonEmptyString() : MetadataUtils.Kinds.SlotNames; } - public ISchemaBindableMapper Clone(ISchemaBindableMapper inner) - { - return new LabelNameBindableMapper(_host, inner, _type, _getter, _metadataKind, _canWrap); - } + public ISchemaBindableMapper Clone(ISchemaBindableMapper inner) => new LabelNameBindableMapper(_host, inner, _type, _getter, _metadataKind, _canWrap); private Delegate DecodeInit(object value) { @@ -245,14 +242,13 @@ private sealed class Bound : ISchemaBoundRowMapper private readonly VectorType _labelNameType; private readonly string _metadataKind; private readonly ValueGetter> _labelNameGetter; - private readonly SchemaImpl _outSchema; // Lazily initialized by the property. private LabelNameBindableMapper _bindable; private readonly Func _canWrap; public RoleMappedSchema InputRoleMappedSchema => _mapper.InputRoleMappedSchema; public Schema InputSchema => _mapper.InputSchema; - public Schema OutputSchema => _outSchema.AsSchema; + public Schema OutputSchema { get; } public ISchemaBindableMapper Bindable { @@ -292,100 +288,51 @@ public Bound(IHostEnvironment env, ISchemaBoundRowMapper mapper, VectorType type _labelNameType = type; _labelNameGetter = getter; _metadataKind = metadataKind; - - _outSchema = new SchemaImpl(mapper.OutputSchema, scoreIdx, _labelNameType, _labelNameGetter, _metadataKind); _canWrap = canWrap; - } - public Func GetDependencies(Func predicate) - { - return _mapper.GetDependencies(predicate); + OutputSchema = DecorateOutputSchema(mapper.OutputSchema, scoreIdx, _labelNameType, _labelNameGetter, _metadataKind); } - public IEnumerable> GetInputColumnRoles() + /// + /// Append label names to score column as its metadata. + /// + private Schema DecorateOutputSchema(Schema partialSchema, int scoreColumnIndex, VectorType labelNameType, + ValueGetter> labelNameGetter, string labelNameKind) { - return _mapper.GetInputColumnRoles(); + var builder = new SchemaBuilder(); + // Sequentially add columns so that the order of them is not changed comparing with the schema in the mapper + // that computes score column. + for (int i = 0; i < partialSchema.ColumnCount; ++i) + { + var meta = new MetadataBuilder(); + if (i == scoreColumnIndex) + { + // Add label names for score column. + meta.Add(partialSchema[i].Metadata, selector: s => s != labelNameKind); + meta.Add(labelNameKind, labelNameType, labelNameGetter); + } + else + { + // Copy all existing metadata because this transform only affects score column. + meta.Add(partialSchema[i].Metadata, selector: s => true); + } + // Instead of appending extra metadata to the existing score column, we create new one because + // metadata is read-only. + builder.AddColumn(partialSchema[i].Name, partialSchema[i].Type, meta.GetMetadata()); + } + return builder.GetSchema(); } + public Func GetDependencies(Func predicate) => _mapper.GetDependencies(predicate); + + public IEnumerable> GetInputColumnRoles() => _mapper.GetInputColumnRoles(); + public Row GetRow(Row input, Func predicate) { var innerRow = _mapper.GetRow(input, predicate); return new RowImpl(innerRow, OutputSchema); } - private sealed class SchemaImpl : ISchema - { - private readonly Schema _parent; - private readonly int _scoreCol; - private readonly VectorType _labelNameType; - private readonly MetadataUtils.MetadataGetter> _labelNameGetter; - private readonly string _metadataKind; - - public Schema AsSchema { get; } - - public int ColumnCount { get { return _parent.ColumnCount; } } - - public SchemaImpl(Schema parent, int col, VectorType type, ValueGetter> getter, string metadataKind) - { - Contracts.AssertValue(parent); - Contracts.Assert(0 <= col && col < parent.ColumnCount); - Contracts.AssertValue(type); - Contracts.AssertValue(getter); - Contracts.Assert(type.ItemType.RawType == typeof(T)); - Contracts.AssertNonEmpty(metadataKind); - Contracts.Assert(parent.GetMetadataTypeOrNull(metadataKind, col) == null); - - _parent = parent; - _scoreCol = col; - _labelNameType = type; - // We change to this metadata variant of the getter to enable the marshal call to work. - _labelNameGetter = (int c, ref VBuffer val) => getter(ref val); - _metadataKind = metadataKind; - - AsSchema = Schema.Create(this); - } - - public bool TryGetColumnIndex(string name, out int col) - { - return _parent.TryGetColumnIndex(name, out col); - } - - public string GetColumnName(int col) - { - return _parent.GetColumnName(col); - } - - public ColumnType GetColumnType(int col) - { - return _parent.GetColumnType(col); - } - - public IEnumerable> GetMetadataTypes(int col) - { - var result = _parent.GetMetadataTypes(col); - if (col == _scoreCol) - return result.Prepend(_labelNameType.GetPair(_metadataKind)); - return result; - } - - public ColumnType GetMetadataTypeOrNull(string kind, int col) - { - if (col == _scoreCol && kind == _metadataKind) - return _labelNameType; - return _parent.GetMetadataTypeOrNull(kind, col); - } - - public void GetMetadata(string kind, int col, ref TValue value) - { - if (col == _scoreCol && kind == _metadataKind) - { - _labelNameGetter.Marshal(0, ref value); - return; - } - _parent.GetMetadata(kind, col, ref value); - } - } - private sealed class RowImpl : WrappingRow { private readonly Schema _schema; @@ -402,15 +349,9 @@ public RowImpl(Row row, Schema schema) _schema = schema; } - public override bool IsColumnActive(int col) - { - return Input.IsColumnActive(col); - } + public override bool IsColumnActive(int col) => Input.IsColumnActive(col); - public override ValueGetter GetGetter(int col) - { - return Input.GetGetter(col); - } + public override ValueGetter GetGetter(int col) => Input.GetGetter(col); } } } @@ -584,14 +525,8 @@ protected override JToken PredictedLabelPfa(string[] mapperOutputs) return PfaUtils.Call("a.argmax", mapperOutputs[0]); } - private static ColumnType GetPredColType(ColumnType scoreType, ISchemaBoundRowMapper mapper) - { - return new KeyType(DataKind.U4, 0, scoreType.VectorSize); - } + private static ColumnType GetPredColType(ColumnType scoreType, ISchemaBoundRowMapper mapper) => new KeyType(DataKind.U4, 0, scoreType.VectorSize); - private static bool OutputTypeMatches(ColumnType scoreType) - { - return scoreType.IsKnownSizeVector && scoreType.ItemType == NumberType.Float; - } + private static bool OutputTypeMatches(ColumnType scoreType) => scoreType.IsKnownSizeVector && scoreType.ItemType == NumberType.Float; } } From 67c4da5e40f547529a93edbcbd3485dc6629a3f7 Mon Sep 17 00:00:00 2001 From: Shahab Moradi Date: Wed, 12 Dec 2018 17:07:07 -0500 Subject: [PATCH 050/100] Enabled feature contributions for GAM trainers (#1856) * Added IFeatureContributionMapper to GamPredictorBase * Refactored FeatureContributionTest for more similar tests. Current test passes. * Added FCC GAM test * Cosmetic changes * Addressed Rogan's comments * Addressed Ivan's comments * Small renaming --- src/Microsoft.ML.FastTree/GamTrainer.cs | 92 ++++++--------- .../FeatureContributionTests.cs | 106 +++++++++++------- 2 files changed, 102 insertions(+), 96 deletions(-) diff --git a/src/Microsoft.ML.FastTree/GamTrainer.cs b/src/Microsoft.ML.FastTree/GamTrainer.cs index 15d599e237..74eb2a4f22 100644 --- a/src/Microsoft.ML.FastTree/GamTrainer.cs +++ b/src/Microsoft.ML.FastTree/GamTrainer.cs @@ -648,8 +648,8 @@ public Stump(uint splitPoint, double lteValue, double gtValue) } } - public abstract class GamPredictorBase : PredictorBase, - IValueMapper, ICanSaveModel, ICanSaveInTextFormat, ICanSaveSummary + public abstract class GamPredictorBase : PredictorBase, IValueMapper, + IFeatureContributionMapper, ICanSaveModel, ICanSaveInTextFormat, ICanSaveSummary { private readonly double[][] _binUpperBounds; private readonly double[][] _binEffects; @@ -857,60 +857,6 @@ private void Map(in VBuffer features, ref float response) response = (float)value; } - /// - /// Returns a vector of feature contributions for a given example. - /// is used as a buffer to accumulate the contributions across trees. - /// If is null, it will be created, otherwise it will be reused. - /// - internal void GetFeatureContributions(in VBuffer features, ref VBuffer contribs, ref BufferBuilder builder) - { - if (builder == null) - builder = new BufferBuilder(R4Adder.Instance); - - // The model is Intercept + Features - builder.Reset(features.Length + 1, false); - builder.AddFeature(0, (float)Intercept); - - var featuresValues = features.GetValues(); - if (features.IsDense) - { - for (int i = 0; i < featuresValues.Length; ++i) - { - if (_inputFeatureToDatasetFeatureMap.TryGetValue(i, out int j)) - builder.AddFeature(i+1, (float) GetBinEffect(j, featuresValues[i])); - } - } - else - { - int k = -1; - var featuresIndices = features.GetIndices(); - int index = featuresIndices[++k]; - for (int i = 0; i < _numFeatures; ++i) - { - if (_inputFeatureToDatasetFeatureMap.TryGetValue(i, out int j)) - { - double value; - if (i == index) - { - // Get the computed value - value = GetBinEffect(j, featuresValues[index]); - // Increment index to the next feature - if (k < featuresIndices.Length - 1) - index = featuresIndices[++k]; - } - else - // For features not defined, the impact is the impact at 0 - value = GetBinEffect(i, 0); - builder.AddFeature(i + 1, (float)value); - } - } - } - - builder.GetResult(ref contribs); - - return; - } - internal double GetFeatureBinsAndScore(in VBuffer features, int[] bins) { Host.CheckParam(features.Length == _inputLength, nameof(features)); @@ -1048,6 +994,40 @@ void ICanSaveSummary.SaveSummary(TextWriter writer, RoleMappedSchema schema) ((ICanSaveInTextFormat)this).SaveAsText(writer, schema); } + public ValueMapper> GetFeatureContributionMapper + (int top, int bottom, bool normalize) + { + Contracts.Check(typeof(TSrc) == typeof(VBuffer)); + Contracts.Check(typeof(TDstContributions) == typeof(VBuffer)); + + ValueMapper, VBuffer> del = + (in VBuffer srcFeatures, ref VBuffer dstContributions) => + { + GetFeatureContributions(in srcFeatures, ref dstContributions, top, bottom, normalize); + }; + return (ValueMapper>)(Delegate)del; + } + + private void GetFeatureContributions(in VBuffer features, ref VBuffer contributions, + int top, int bottom, bool normalize) + { + var editor = VBufferEditor.Create(ref contributions, features.Length); + + // We need to use dense value of features, b/c the feature contributions could be significant + // even for features with value 0. + var featureIndex = 0; + foreach (var featureValue in features.DenseValues()) + { + float contribution = 0; + if (_inputFeatureToDatasetFeatureMap.TryGetValue(featureIndex, out int j)) + contribution = (float)GetBinEffect(j, featureValue); + editor.Values[featureIndex] = contribution; + featureIndex++; + } + contributions = editor.Commit(); + Runtime.Numeric.VectorUtils.SparsifyNormalize(ref contributions, top, bottom, normalize); + } + /// /// The GAM model visualization command. Because the data access commands must access private members of /// , it is convenient to have the command itself nested within the base diff --git a/test/Microsoft.ML.Tests/FeatureContributionTests.cs b/test/Microsoft.ML.Tests/FeatureContributionTests.cs index 97b6d47ca1..21586b4af4 100644 --- a/test/Microsoft.ML.Tests/FeatureContributionTests.cs +++ b/test/Microsoft.ML.Tests/FeatureContributionTests.cs @@ -3,8 +3,10 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML.Data; +using Microsoft.ML.Runtime; using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.RunTests; +using Microsoft.ML.Runtime.Training; using System; using System.Collections.Generic; using System.Linq; @@ -30,43 +32,78 @@ public FeatureContributionTests(ITestOutputHelper output) : base(output) { } + [Fact] + public void TestOrdinaryLeastSquares() + { + var expectedValues = new List { + new float[4] { 0.06319684F, 1, 0.1386623F, 4.46209469E-06F }, + new float[4] { 0.03841561F, 1, 0.1633037F, 2.68303256E-06F }, + new float[4] { 0.12006103F, 1, 0.254072F, 1.18671605E-05F }, + new float[4] { 0.20861618F, 0.99999994F, 0.407312155F, 6.963478E-05F }, + new float[4] { 0.024050576F, 0.99999994F, 0.31106182F, 8.456762E-06F }, }; + + TestFeatureContribution(ML.Regression.Trainers.OrdinaryLeastSquares(), expectedValues); + } + + [Fact] + public void TestGam() + { + // Index 1: Most important feature + // Index 3: Random feature + var expectedValues = new List { + new float[4] { 0.08439296F, 1F, 0.1442171F, -0.001832674F }, + new float[4] { -0.07902145F, -1F, -0.01937493F, 0.02314214F }, + new float[4] { 0.04072217F, -1F, 0.01370963F, -0.00197823F }, + new float[4] { -0.02197981F, -1F, -0.1051985F, -0.004131221F }, + new float[4] { -0.1072952F, -1F, 0.1284171F, -0.002337188F }, }; + + TestFeatureContribution(ML.Regression.Trainers.GeneralizedAdditiveModels(), expectedValues, 5); + } + /// /// Features: x1, x2, x3, xRand; y = 10*x1 + 20x2 + 5.5x3 + e, xRand- random, Label y is dependant on xRand. /// Test verifies that feature contribution scores are outputted along with a score for predicted data. /// - [Fact] - public void TestFeatureImportance() + private void TestFeatureContribution( + ITrainerEstimator, IPredictor> trainer, + List expectedValues, + int precision = 6) { // Setup synthetic dataset. - const int numberOfInstances = 1000; + const int numInstances = 1000; + const int numFeatures = 4; + var rand = new Random(10); - float[] yArray = new float[numberOfInstances], - x1Array = new float[numberOfInstances], - x2Array = new float[numberOfInstances], - x3Array = new float[numberOfInstances], - x4RandArray = new float[numberOfInstances]; + float[] yArray = new float[numInstances]; + float[][] xArray = new float[numFeatures][]; + int[] xRangeArray = new[] { 1000, 10000, 5000, 1000 }; + float[] xWeightArray = new[] { + 10, + 20, // Most important feature with high weight. Should have the highest contribution. + 5.5f, + 0, // Least important feature. Should have the least contribution. + }; - for (var i = 0; i < numberOfInstances; i++) + for (var instanceIndex = 0; instanceIndex < numInstances; instanceIndex++) { - var x1 = rand.Next(1000); - x1Array[i] = x1; - var x2Important = rand.Next(10000); - x2Array[i] = x2Important; - var x3 = rand.Next(5000); - x3Array[i] = x3; - var x4Rand = rand.Next(1000); - x4RandArray[i] = x4Rand; + for (int featureIndex = 0; featureIndex < numFeatures; featureIndex++) + { + if (xArray[featureIndex] == null) + xArray[featureIndex] = new float[numInstances]; + xArray[featureIndex][instanceIndex] = rand.Next(xRangeArray[featureIndex]); + yArray[instanceIndex] += xArray[featureIndex][instanceIndex] * xWeightArray[featureIndex]; + } var noise = rand.Next(50); - yArray[i] = (float)(10 * x1 + 20 * x2Important + 5.5 * x3 + noise); + yArray[instanceIndex] += noise; } // Create data view. var bldr = new ArrayDataViewBuilder(Env); - bldr.AddColumn("X1", NumberType.Float, x1Array); - bldr.AddColumn("X2Important", NumberType.Float, x2Array); - bldr.AddColumn("X3", NumberType.Float, x3Array); - bldr.AddColumn("X4Rand", NumberType.Float, x4RandArray); + bldr.AddColumn("X1", NumberType.Float, xArray[0]); + bldr.AddColumn("X2Important", NumberType.Float, xArray[1]); + bldr.AddColumn("X3", NumberType.Float, xArray[2]); + bldr.AddColumn("X4Rand", NumberType.Float, xArray[3]); bldr.AddColumn("Label", NumberType.Float, yArray); var srcDV = bldr.GetDataView(); @@ -74,7 +111,7 @@ public void TestFeatureImportance() .AppendCacheCheckpoint(ML) .Append(ML.Transforms.Normalize("Features")); var data = pipeline.Fit(srcDV).Transform(srcDV); - var model = ML.Regression.Trainers.OrdinaryLeastSquares().Fit(data); + var model = trainer.Fit(data); var args = new FeatureContributionCalculationTransform.Arguments() { Bottom = 10, @@ -82,24 +119,13 @@ public void TestFeatureImportance() }; var output = FeatureContributionCalculationTransform.Create(Env, args, data, model.Model, model.FeatureColumn); - // Get prediction scores and contributions - var enumerator = output.AsEnumerable(Env, true).GetEnumerator(); - ScoreAndContribution row = null; - var expectedValues = new List(); - expectedValues.Add(new float[4] { 0.06319684F, 1, 0.1386623F, 4.46209469E-06F }); - expectedValues.Add(new float[4] { 0.03841561F, 1, 0.1633037F, 2.68303256E-06F }); - expectedValues.Add(new float[4] { 0.12006103F, 1, 0.254072F, 1.18671605E-05F }); - expectedValues.Add(new float[4] { 0.20861618F, 0.99999994F, 0.407312155F, 6.963478E-05F }); - expectedValues.Add(new float[4] { 0.024050576F, 0.99999994F, 0.31106182F, 8.456762E-06F }); - int index = 0; - while (enumerator.MoveNext() && index < expectedValues.Count) + var transformedOutput = output.AsEnumerable(Env, true); + int rowIndex = 0; + foreach (var row in transformedOutput.Take(expectedValues.Count)) { - row = enumerator.Current; - // We set predicion to 6 because the limit of floating-point numbers is 7. - Assert.Equal(expectedValues[index][0], row.FeatureContributions[0], 6); - Assert.Equal(expectedValues[index][1], row.FeatureContributions[1], 6); - Assert.Equal(expectedValues[index][2], row.FeatureContributions[2], 6); - Assert.Equal(expectedValues[index++][3], row.FeatureContributions[3], 6); + var expectedValue = expectedValues[rowIndex++]; + for (int i = 0; i < numFeatures; i++) + Assert.Equal(expectedValue[i], row.FeatureContributions[i], precision); } Done(); From ad57f02d058cf301a055c8a5b893b9d006b8349f Mon Sep 17 00:00:00 2001 From: Ivan Matantsev Date: Wed, 12 Dec 2018 16:06:57 -0800 Subject: [PATCH 051/100] Ngram hashing to estimator (#1811) --- .../ConversionsExtensionsCatalog.cs | 9 +- .../Transforms/DropSlotsTransform.cs | 4 +- src/Microsoft.ML.Data/Transforms/Hashing.cs | 10 +- .../Transforms/KeyToVector.cs | 270 +--- .../Transforms/ValueToKeyMappingEstimator.cs | 4 +- .../ValueToKeyMappingTransformer.cs | 100 +- .../VectorWhitening.cs | 6 +- src/Microsoft.ML.Legacy/CSharpApi.cs | 14 +- .../CategoricalCatalog.cs | 5 +- .../CountFeatureSelection.cs | 2 +- .../KeyToVectorMapping.cs | 184 +-- .../MissingValueReplacing.cs | 170 +-- ...codingTransformer.cs => OneHotEncoding.cs} | 30 +- ...ngTransformer.cs => OneHotHashEncoding.cs} | 44 +- .../Text/NgramHashingTransformer.cs | 1262 +++++++++-------- .../Text/NgramTransform.cs | 9 +- .../Text/StopWordsRemovingTransformer.cs | 2 +- .../Text/TextCatalog.cs | 102 +- .../Text/TextStaticExtensions.cs | 18 +- .../Text/TokenizingByCharacters.cs | 1 - .../Text/WordBagTransform.cs | 3 +- .../Text/WordHashBagProducingTransform.cs | 48 +- .../Text/WrappedTextTransformers.cs | 157 +- .../TransformsStatic.cs | 574 +++++++- .../Common/EntryPoints/core_ep-list.tsv | 2 +- .../Common/EntryPoints/core_manifest.json | 2 +- .../SavePipe/SavePipeInvertHash1-Data.txt | 14 + .../SavePipe/SavePipeInvertHash1-Schema.txt | 239 ++++ .../SavePipe/SavePipeInvertHash2-Data.txt | 15 + .../SavePipe/SavePipeInvertHash2-Schema.txt | 190 +++ .../SavePipe/SavePipeInvertHash3-Data.txt | 11 + .../SavePipe/SavePipeInvertHash3-Schema.txt | 48 + .../SavePipe/SavePipeInvertHash4-Data.txt | 13 + .../SavePipe/SavePipeInvertHash4-Schema.txt | 81 ++ .../SavePipe/SavePipeInvertHash5-Data.txt | 12 + .../SavePipe/SavePipeInvertHash5-Schema.txt | 53 + .../SavePipe/SavePipeInvertHash6-Data.txt | 9 + .../SavePipe/SavePipeInvertHash6-Schema.txt | 41 + .../SavePipeNgramHash-Convert-Schema.txt | 2 +- .../SavePipe/SavePipeNgramHash-Schema.txt | 12 +- .../Common/SavePipe/SavePipeWordBag-Data.txt | 113 ++ .../SavePipe/SavePipeWordBag-Schema.txt | 396 ++++++ .../SavePipeWordBagManyToOne-Data.txt | 108 ++ .../SavePipeWordBagManyToOne-Schema.txt | 82 ++ .../SavePipe/SavePipeWordBagTfIdf-Data.txt | 11 + .../SavePipe/SavePipeWordBagTfIdf-Schema.txt | 31 + .../SavePipe/SavePipeWordHash-Schema.txt | 2 +- .../SavePipeWordHashUnordered-Data.txt | 106 ++ .../SavePipeWordHashUnordered-Schema.txt | 52 + .../UnitTests/TestEntryPoints.cs | 6 +- .../DataPipe/TestDataPipe.cs | 197 +++ .../Transformers/KeyToVectorEstimatorTests.cs | 24 +- .../Transformers/NAReplaceTests.cs | 1 + .../Transformers/TextFeaturizerTests.cs | 2 +- 54 files changed, 3374 insertions(+), 1529 deletions(-) rename src/Microsoft.ML.Transforms/{OneHotEncodingTransformer.cs => OneHotEncoding.cs} (89%) rename src/Microsoft.ML.Transforms/{OneHotHashEncodingTransformer.cs => OneHotHashEncoding.cs} (84%) create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeInvertHash1-Data.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeInvertHash1-Schema.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeInvertHash2-Data.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeInvertHash2-Schema.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeInvertHash3-Data.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeInvertHash3-Schema.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeInvertHash4-Data.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeInvertHash4-Schema.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeInvertHash5-Data.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeInvertHash5-Schema.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeInvertHash6-Data.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeInvertHash6-Schema.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeWordBag-Data.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeWordBag-Schema.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeWordBagManyToOne-Data.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeWordBagManyToOne-Schema.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeWordBagTfIdf-Data.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeWordBagTfIdf-Schema.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeWordHashUnordered-Data.txt create mode 100644 test/BaselineOutput/Common/SavePipe/SavePipeWordHashUnordered-Schema.txt diff --git a/src/Microsoft.ML.Data/Transforms/ConversionsExtensionsCatalog.cs b/src/Microsoft.ML.Data/Transforms/ConversionsExtensionsCatalog.cs index f6b45af576..c6b4ab5800 100644 --- a/src/Microsoft.ML.Data/Transforms/ConversionsExtensionsCatalog.cs +++ b/src/Microsoft.ML.Data/Transforms/ConversionsExtensionsCatalog.cs @@ -23,7 +23,10 @@ public static class ConversionsExtensionsCatalog /// Name of the input column. /// Name of the column to be transformed. If this is null '' will be used. /// Number of bits to hash into. Must be between 1 and 31, inclusive. - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. public static HashingEstimator Hash(this TransformsCatalog.ConversionTransforms catalog, string inputColumn, string outputColumn = null, int hashBits = HashDefaults.HashBits, int invertHash = HashDefaults.InvertHash) => new HashingEstimator(CatalogUtils.GetEnvironment(catalog), inputColumn, outputColumn, hashBits, invertHash); @@ -99,8 +102,8 @@ public static KeyToVectorMappingEstimator MapKeyToVector(this TransformsCatalog. /// Name of the column to be transformed. /// Name of the output column. If this is null '' will be used. /// Maximum number of keys to keep per column when auto-training. - /// How items should be ordered when vectorized. By default, they will be in the order encountered. - /// If by value items are sorted according to their default comparison, for example, text sorting will be case sensitive (for example, 'A' then 'Z' then 'a'). + /// How items should be ordered when vectorized. If choosen they will be in the order encountered. + /// If , items are sorted according to their default comparison, for example, text sorting will be case sensitive (for example, 'A' then 'Z' then 'a'). public static ValueToKeyMappingEstimator MapValueToKey(this TransformsCatalog.ConversionTransforms catalog, string inputColumn, string outputColumn = null, diff --git a/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs b/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs index 8e0f146b7c..50ade5bdfb 100644 --- a/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs +++ b/src/Microsoft.ML.Data/Transforms/DropSlotsTransform.cs @@ -200,7 +200,7 @@ public sealed class ColumnInfo /// Describes how the transformer handles one input-output column pair. /// /// Name of the input column. - /// Name of the column resulting from the transformation of . Null means is replaced. + /// Name of the column resulting from the transformation of . Null means is replaced. /// Ranges of indices in the input column to be dropped. Setting max in to null sets max to int.MaxValue. public ColumnInfo(string input, string output = null, params (int min, int? max)[] slots) { @@ -252,7 +252,7 @@ private static VersionInfo GetVersionInfo() /// /// The environment to use. /// Name of the input column. - /// Name of the column resulting from the transformation of . Null means is replaced. + /// Name of the column resulting from the transformation of . Null means is replaced. /// Specifies the lower bound of the range of slots to be dropped. The lower bound is inclusive. /// Specifies the upper bound of the range of slots to be dropped. The upper bound is exclusive. public SlotsDroppingTransformer(IHostEnvironment env, string input, string output = null, int min = default, int? max = null) diff --git a/src/Microsoft.ML.Data/Transforms/Hashing.cs b/src/Microsoft.ML.Data/Transforms/Hashing.cs index c68cbd5d48..b603edd741 100644 --- a/src/Microsoft.ML.Data/Transforms/Hashing.cs +++ b/src/Microsoft.ML.Data/Transforms/Hashing.cs @@ -132,7 +132,10 @@ public sealed class ColumnInfo /// Number of bits to hash into. Must be between 1 and 31, inclusive. /// Hashing seed. /// Whether the position of each term should be included in the hash. - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. public ColumnInfo(string input, string output, int hashBits = HashingEstimator.Defaults.HashBits, uint seed = HashingEstimator.Defaults.Seed, @@ -1211,7 +1214,10 @@ internal static bool IsColumnTypeValid(ColumnType type) /// Name of the column to be transformed. /// Name of the output column. If this is null '' will be used. /// Number of bits to hash into. Must be between 1 and 31, inclusive. - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. public HashingEstimator(IHostEnvironment env, string inputColumn, string outputColumn = null, int hashBits = Defaults.HashBits, int invertHash = Defaults.InvertHash) : this(env, new HashingTransformer.ColumnInfo(inputColumn, outputColumn ?? inputColumn, hashBits: hashBits, invertHash: invertHash)) diff --git a/src/Microsoft.ML.Data/Transforms/KeyToVector.cs b/src/Microsoft.ML.Data/Transforms/KeyToVector.cs index d406a30606..82bc7c8eda 100644 --- a/src/Microsoft.ML.Data/Transforms/KeyToVector.cs +++ b/src/Microsoft.ML.Data/Transforms/KeyToVector.cs @@ -11,8 +11,6 @@ using Microsoft.ML.Runtime.Model; using Microsoft.ML.Runtime.Model.Onnx; using Microsoft.ML.Runtime.Model.Pfa; -using Microsoft.ML.StaticPipe; -using Microsoft.ML.StaticPipe.Runtime; using Microsoft.ML.Transforms.Conversions; using Newtonsoft.Json.Linq; using System; @@ -88,16 +86,26 @@ public sealed class Arguments public bool Bag = KeyToVectorMappingEstimator.Defaults.Bag; } - public class ColumnInfo + /// + /// Describes how the transformer handles one column pair. + /// + public sealed class ColumnInfo { public readonly string Input; public readonly string Output; public readonly bool Bag; - public ColumnInfo(string input, string output, bool bag = KeyToVectorMappingEstimator.Defaults.Bag) + /// + /// Describes how the transformer handles one column pair. + /// + /// Name of input column. + /// Name of the column resulting from the transformation of . Null means is replaced. + /// Whether to combine multiple indicator vectors into a single bag vector instead of concatenating them. This is only relevant when the input column is a vector. + public ColumnInfo(string input, string output = null, bool bag = KeyToVectorMappingEstimator.Defaults.Bag) { + Contracts.CheckNonWhiteSpace(input, nameof(input)); Input = input; - Output = output; + Output = output ?? input; Bag = bag; } } @@ -769,256 +777,4 @@ public override SchemaShape GetOutputSchema(SchemaShape inputSchema) return new SchemaShape(result.Values); } } - - /// - /// Extension methods for the static-pipeline over objects. - /// - public static class KeyToVectorExtensions - { - private interface IColInput - { - PipelineColumn Input { get; } - bool Bag { get; } - } - - private sealed class OutVectorColumn : Vector, IColInput - { - public PipelineColumn Input { get; } - public bool Bag { get; } - - public OutVectorColumn(Key input) - : base(Reconciler.Inst, input) - { - Input = input; - Bag = false; - } - - public OutVectorColumn(Vector> input, bool bag) - : base(Reconciler.Inst, input) - { - Input = input; - Bag = bag; - } - - public OutVectorColumn(VarVector> input) - : base(Reconciler.Inst, input) - { - Input = input; - Bag = true; - } - } - - private sealed class OutVarVectorColumn : VarVector, IColInput - { - public PipelineColumn Input { get; } - public bool Bag { get; } - - public OutVarVectorColumn(VarVector> input) - : base(Reconciler.Inst, input) - { - Input = input; - Bag = false; - } - } - - private sealed class OutVectorColumn : Vector, IColInput - { - public PipelineColumn Input { get; } - public bool Bag { get; } - - public OutVectorColumn(Key input) - : base(Reconciler.Inst, input) - { - Input = input; - Bag = false; - } - - public OutVectorColumn(Vector> input, bool bag) - : base(Reconciler.Inst, input) - { - Input = input; - Bag = bag; - } - - public OutVectorColumn(VarVector> input) - : base(Reconciler.Inst, input) - { - Input = input; - Bag = true; - } - } - - private sealed class OutVarVectorColumn : VarVector, IColInput - { - public PipelineColumn Input { get; } - public bool Bag { get; } - - public OutVarVectorColumn(VarVector> input) - : base(Reconciler.Inst, input) - { - Input = input; - Bag = false; - } - } - - private sealed class Reconciler : EstimatorReconciler - { - public static Reconciler Inst = new Reconciler(); - - private Reconciler() { } - - public override IEstimator Reconcile(IHostEnvironment env, - PipelineColumn[] toOutput, - IReadOnlyDictionary inputNames, - IReadOnlyDictionary outputNames, - IReadOnlyCollection usedNames) - { - var infos = new KeyToVectorMappingTransformer.ColumnInfo[toOutput.Length]; - for (int i = 0; i < toOutput.Length; ++i) - { - var col = (IColInput)toOutput[i]; - infos[i] = new KeyToVectorMappingTransformer.ColumnInfo(inputNames[col.Input], outputNames[toOutput[i]], col.Bag); - } - return new KeyToVectorMappingEstimator(env, infos); - } - } - - /// - /// Takes a column of key type of known cardinality and produces an indicator vector of floats. - /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, - /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. - /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. - /// - public static Vector ToVector(this Key input) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVectorColumn(input); - } - - /// - /// Takes a column of key type of known cardinality and produces an indicator vector of floats. - /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, - /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. - /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. - /// - public static Vector ToVector(this Vector> input) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVectorColumn(input, false); - } - - /// - /// Takes a column of key type of known cardinality and produces an indicator vector of floats. - /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, - /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. - /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. - /// In this case then the indicator vectors for all values in the column will be simply added together, - /// to produce the final vector with type equal to the key cardinality; so, in all cases, whether vector or scalar, - /// the output column will be a vector type of length equal to that cardinality. - /// - public static VarVector ToVector(this VarVector> input) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVarVectorColumn(input); - } - - /// - /// Takes a column of key type of known cardinality and produces an indicator vector of floats. - /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, - /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. - /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. - /// In this case then the indicator vectors for all values in the column will be simply added together, - /// to produce the final vector with type equal to the key cardinality; so, in all cases, whether vector or scalar, - /// the output column will be a vector type of length equal to that cardinality. - /// - public static Vector ToBaggedVector(this Vector> input) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVectorColumn(input, true); - } - - /// - /// Takes a column of key type of known cardinality and produces an indicator vector of floats. - /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, - /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. - /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. - /// In this case then the indicator vectors for all values in the column will be simply added together, - /// to produce the final vector with type equal to the key cardinality; so, in all cases, whether vector or scalar, - /// the output column will be a vector type of length equal to that cardinality. - /// - public static Vector ToBaggedVector(this VarVector> input) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVectorColumn(input); - } - - /// - /// Takes a column of key type of known cardinality and produces an indicator vector of floats. - /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, - /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. - /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. - /// - public static Vector ToVector(this Key input) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVectorColumn(input); - } - - /// - /// Takes a column of key type of known cardinality and produces an indicator vector of floats. - /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, - /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. - /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. - /// - public static Vector ToVector(this Vector> input) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVectorColumn(input, false); - } - - /// - /// Takes a column of key type of known cardinality and produces an indicator vector of floats. - /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, - /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. - /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. - /// In this case then the indicator vectors for all values in the column will be simply added together, - /// to produce the final vector with type equal to the key cardinality; so, in all cases, whether vector or scalar, - /// the output column will be a vector type of length equal to that cardinality. - /// - public static VarVector ToVector(this VarVector> input) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVarVectorColumn(input); - } - - /// - /// Takes a column of key type of known cardinality and produces an indicator vector of floats. - /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, - /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. - /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. - /// In this case then the indicator vectors for all values in the column will be simply added together, - /// to produce the final vector with type equal to the key cardinality; so, in all cases, whether vector or scalar, - /// the output column will be a vector type of length equal to that cardinality. - /// - public static Vector ToBaggedVector(this Vector> input) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVectorColumn(input, true); - } - - /// - /// Takes a column of key type of known cardinality and produces an indicator vector of floats. - /// Each key value of the input is used to create an indicator vector: the indicator vector is the length of the key cardinality, - /// where all values are 0, except for the entry corresponding to the value of the key, which is 1. - /// If the key value is missing, then all values are 0. Naturally this tends to generate very sparse vectors. - /// In this case then the indicator vectors for all values in the column will be simply added together, - /// to produce the final vector with type equal to the key cardinality; so, in all cases, whether vector or scalar, - /// the output column will be a vector type of length equal to that cardinality. - /// - public static Vector ToBaggedVector(this VarVector> input) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVectorColumn(input); - } - } } diff --git a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingEstimator.cs b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingEstimator.cs index 24ff1c6915..ab74396671 100644 --- a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingEstimator.cs +++ b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingEstimator.cs @@ -36,8 +36,8 @@ public static class Defaults /// Name of the column to be transformed. /// Name of the output column. If this is null '' will be used. /// Maximum number of keys to keep per column when auto-training. - /// How items should be ordered when vectorized. By default, they will be in the order encountered. - /// If by value items are sorted according to their default comparison, for example, text sorting will be case sensitive (for example, 'A' then 'Z' then 'a'). + /// How items should be ordered when vectorized. If choosen they will be in the order encountered. + /// If , items are sorted according to their default comparison, for example, text sorting will be case sensitive (for example, 'A' then 'Z' then 'a'). public ValueToKeyMappingEstimator(IHostEnvironment env, string inputColumn, string outputColumn = null, int maxNumTerms = Defaults.MaxNumTerms, ValueToKeyMappingTransformer.SortOrder sort = Defaults.Sort) : this(env, new [] { new ValueToKeyMappingTransformer.ColumnInfo(inputColumn, outputColumn ?? inputColumn, maxNumTerms, sort) }) { diff --git a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs index b179f358dc..8edd76dff6 100644 --- a/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs +++ b/src/Microsoft.ML.Data/Transforms/ValueToKeyMappingTransformer.cs @@ -158,18 +158,11 @@ public ColInfo(string name, string source, ColumnType type) } } + /// + /// Describes how the transformer handles one column pair. + /// public class ColumnInfo { - public ColumnInfo(string input, string output, int maxNumTerms = ValueToKeyMappingEstimator.Defaults.MaxNumTerms, SortOrder sort = ValueToKeyMappingEstimator.Defaults.Sort, string[] term = null, bool textKeyValues = false) - { - Input = input; - Output = output; - Sort = sort; - MaxNumTerms = maxNumTerms; - Term = term; - TextKeyValues = textKeyValues; - } - public readonly string Input; public readonly string Output; public readonly SortOrder Sort; @@ -178,13 +171,41 @@ public ColumnInfo(string input, string output, int maxNumTerms = ValueToKeyMappi public readonly bool TextKeyValues; protected internal string Terms { get; set; } - } - public const string Summary = "Converts input values (words, numbers, etc.) to index in a dictionary."; - public const string UserName = "Term Transform"; - public const string LoaderSignature = "TermTransform"; - - public const string FriendlyName = "To Key"; + /// + /// Describes how the transformer handles one column pair. + /// + /// Name of input column. + /// Name of the column resulting from the transformation of . Null means is replaced. + /// Maximum number of terms to keep per column when auto-training. + /// How items should be ordered when vectorized. If choosen they will be in the order encountered. + /// If , items are sorted according to their default comparison, for example, text sorting will be case sensitive (for example, 'A' then 'Z' then 'a'). + /// List of terms. + /// Whether key value metadata should be text, regardless of the actual input type. + public ColumnInfo(string input, string output = null, + int maxNumTerms = ValueToKeyMappingEstimator.Defaults.MaxNumTerms, + SortOrder sort = ValueToKeyMappingEstimator.Defaults.Sort, + string[] term = null, + bool textKeyValues = false + ) + { + Contracts.CheckNonWhiteSpace(input, nameof(input)); + Input = input; + Output = output ?? input; + Sort = sort; + MaxNumTerms = maxNumTerms; + Term = term; + TextKeyValues = textKeyValues; + } + } + [BestFriend] + internal const string Summary = "Converts input values (words, numbers, etc.) to index in a dictionary."; + [BestFriend] + internal const string UserName = "Term Transform"; + [BestFriend] + internal const string LoaderSignature = "TermTransform"; + [BestFriend] + internal const string FriendlyName = "To Key"; private static VersionInfo GetVersionInfo() { @@ -202,7 +223,7 @@ private static VersionInfo GetVersionInfo() private const uint VerNonTextTypesSupported = 0x00010003; private const uint VerManagerNonTextTypesSupported = 0x00010002; - public const string TermManagerLoaderSignature = "TermManager"; + internal const string TermManagerLoaderSignature = "TermManager"; private static volatile MemoryStreamPool _codecFactoryPool; private volatile CodecFactory _codecFactory; @@ -289,8 +310,9 @@ internal ValueToKeyMappingTransformer(IHostEnvironment env, IDataView input, } } + [BestFriend] // Factory method for SignatureDataTransform. - public static IDataTransform Create(IHostEnvironment env, Arguments args, IDataView input) + internal static IDataTransform Create(IHostEnvironment env, Arguments args, IDataView input) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(args, nameof(args)); @@ -391,47 +413,6 @@ private static IDataTransform Create(IHostEnvironment env, ModelLoadContext ctx, private static IRowMapper Create(IHostEnvironment env, ModelLoadContext ctx, Schema inputSchema) => Create(env, ctx).MakeRowMapper(inputSchema); - /// - /// Initializes a new instance of . - /// - /// Host Environment. - /// Input . This is the output from previous transform or loader. - /// Name of the output column. - /// Name of the column to be transformed. If this is null '' will be used. - /// Maximum number of terms to keep per column when auto-training. - /// How items should be ordered when vectorized. By default, they will be in the order encountered. - /// If by value items are sorted according to their default comparison, for example, text sorting will be case sensitive (for example, 'A' then 'Z' then 'a'). - public static IDataView Create(IHostEnvironment env, - IDataView input, string name, string source = null, - int maxNumTerms = ValueToKeyMappingEstimator.Defaults.MaxNumTerms, SortOrder sort = ValueToKeyMappingEstimator.Defaults.Sort) => - new ValueToKeyMappingTransformer(env, input, new[] { new ColumnInfo(source ?? name, name, maxNumTerms, sort) }).MakeDataTransform(input); - - public static IDataTransform Create(IHostEnvironment env, ArgumentsBase args, ColumnBase[] column, IDataView input) - { - return Create(env, new Arguments() - { - Column = column.Select(x => new Column() - { - MaxNumTerms = x.MaxNumTerms, - Name = x.Name, - Sort = x.Sort, - Source = x.Source, - Term = x.Term, - Terms = x.Terms, - TextKeyValues = x.TextKeyValues - }).ToArray(), - Data = args.Data, - DataFile = args.DataFile, - Loader = args.Loader, - MaxNumTerms = args.MaxNumTerms, - Sort = args.Sort, - Term = args.Term, - Terms = args.Terms, - TermsColumn = args.TermsColumn, - TextKeyValues = args.TextKeyValues - }, input); - } - /// /// Utility method to create the file-based . /// @@ -718,7 +699,6 @@ private sealed class Mapper : OneToOneMapperBase, ISaveAsOnnx, ISaveAsPfa private readonly ColumnType[] _types; private readonly ValueToKeyMappingTransformer _parent; private readonly ColInfo[] _infos; - private readonly BoundTermMap[] _termMap; public bool CanSaveOnnx(OnnxContext ctx) => true; diff --git a/src/Microsoft.ML.HalLearners/VectorWhitening.cs b/src/Microsoft.ML.HalLearners/VectorWhitening.cs index 707e8c780f..5279ae54cd 100644 --- a/src/Microsoft.ML.HalLearners/VectorWhitening.cs +++ b/src/Microsoft.ML.HalLearners/VectorWhitening.cs @@ -11,8 +11,6 @@ using Microsoft.ML.Runtime.Internal.Internallearn; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; -using Microsoft.ML.StaticPipe; -using Microsoft.ML.StaticPipe.Runtime; using Microsoft.ML.Transforms.Projections; using System; using System.Collections.Generic; @@ -131,7 +129,7 @@ public sealed class ColumnInfo /// Describes how the transformer handles one input-output column pair. /// /// Name of the input column. - /// Name of the column resulting from the transformation of . Null means is replaced. + /// Name of the column resulting from the transformation of . Null means is replaced. /// Whitening kind (PCA/ZCA). /// Whitening constant, prevents division by zero. /// Maximum number of rows used to train the transform. @@ -626,7 +624,7 @@ public static unsafe void Gemv(Layout layout, Transpose trans, int m, int n, flo } // See: https://software.intel.com/en-us/node/520750 - [DllImport(MklPath , CallingConvention = CallingConvention.Cdecl, EntryPoint = "cblas_sgemv"), SuppressUnmanagedCodeSecurity] + [DllImport(MklPath, CallingConvention = CallingConvention.Cdecl, EntryPoint = "cblas_sgemv"), SuppressUnmanagedCodeSecurity] private static unsafe extern void Gemv(Layout layout, Transpose trans, int m, int n, float alpha, float* a, int lda, float* x, int incx, float beta, float* y, int incy); diff --git a/src/Microsoft.ML.Legacy/CSharpApi.cs b/src/Microsoft.ML.Legacy/CSharpApi.cs index 54f1a84807..f91479b881 100644 --- a/src/Microsoft.ML.Legacy/CSharpApi.cs +++ b/src/Microsoft.ML.Legacy/CSharpApi.cs @@ -12734,7 +12734,7 @@ public enum OneHotEncodingTransformerOutputKind : byte [Obsolete] - public sealed partial class OneHotHashEncodingTransformerColumn : OneToOneColumn, IOneToOneColumn + public sealed partial class OneHotHashEncodingColumn : OneToOneColumn, IOneToOneColumn { /// /// The number of bits to hash into. Must be between 1 and 30, inclusive. @@ -12814,15 +12814,15 @@ public CategoricalHashOneHotVectorizer(params (string inputColumn, string output public void AddColumn(string inputColumn) { - var list = Column == null ? new List() : new List(Column); - list.Add(OneToOneColumn.Create(inputColumn)); + var list = Column == null ? new List() : new List(Column); + list.Add(OneToOneColumn.Create(inputColumn)); Column = list.ToArray(); } public void AddColumn(string outputColumn, string inputColumn) { - var list = Column == null ? new List() : new List(Column); - list.Add(OneToOneColumn.Create(outputColumn, inputColumn)); + var list = Column == null ? new List() : new List(Column); + list.Add(OneToOneColumn.Create(outputColumn, inputColumn)); Column = list.ToArray(); } @@ -12831,7 +12831,7 @@ public void AddColumn(string outputColumn, string inputColumn) /// New column definition(s) (optional form: name:hashBits:src) /// [Obsolete] - public OneHotHashEncodingTransformerColumn[] Column { get; set; } + public OneHotHashEncodingColumn[] Column { get; set; } /// /// Number of bits to hash into. Must be between 1 and 30, inclusive. @@ -19197,7 +19197,7 @@ public sealed partial class TermLoaderArguments public string[] Term { get; set; } /// - /// How items should be ordered when vectorized. By default, they will be in the order encountered. If by value items are sorted according to their default comparison, for example, text sorting will be case sensitive (for example, 'A' then 'Z' then 'a'). + /// How items should be ordered when vectorized. By default, they will be in the order encountered. If by value, items are sorted according to their default comparison, for example, text sorting will be case sensitive (for example, 'A' then 'Z' then 'a'). /// [Obsolete] public ValueToKeyMappingTransformerSortOrder Sort { get; set; } = ValueToKeyMappingTransformerSortOrder.Occurrence; diff --git a/src/Microsoft.ML.Transforms/CategoricalCatalog.cs b/src/Microsoft.ML.Transforms/CategoricalCatalog.cs index 9f3b3b6547..4cd908b41f 100644 --- a/src/Microsoft.ML.Transforms/CategoricalCatalog.cs +++ b/src/Microsoft.ML.Transforms/CategoricalCatalog.cs @@ -44,7 +44,10 @@ public static OneHotEncodingEstimator OneHotEncoding(this TransformsCatalog.Cate /// The input column /// The output column. If null, is used. /// Number of bits to hash into. Must be between 1 and 30, inclusive. - /// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit. + /// During hashing we constuct mappings between original values and the produced hash values. + /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. + /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. + /// 0 does not retain any input values. -1 retains all input values mapping to each hash. /// The conversion mode. /// public static OneHotHashEncodingEstimator OneHotHashEncoding(this TransformsCatalog.CategoricalTransforms catalog, diff --git a/src/Microsoft.ML.Transforms/CountFeatureSelection.cs b/src/Microsoft.ML.Transforms/CountFeatureSelection.cs index d37d6b76b4..cdf377b365 100644 --- a/src/Microsoft.ML.Transforms/CountFeatureSelection.cs +++ b/src/Microsoft.ML.Transforms/CountFeatureSelection.cs @@ -55,7 +55,7 @@ public sealed class ColumnInfo /// Describes the parameters of the feature selection process for a column pair. /// /// Name of the input column. - /// Name of the column resulting from the transformation of . Null means is replaced. + /// Name of the column resulting from the transformation of . Null means is replaced. /// If the count of non-default values for a slot is greater than or equal to this threshold in the training data, the slot is preserved. public ColumnInfo(string input, string output = null, long minCount = Defaults.Count) { diff --git a/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs b/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs index 9b6c6bb81b..7c43e7d1c3 100644 --- a/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs +++ b/src/Microsoft.ML.Transforms/KeyToVectorMapping.cs @@ -9,8 +9,6 @@ using Microsoft.ML.Runtime.Data; using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; -using Microsoft.ML.StaticPipe; -using Microsoft.ML.StaticPipe.Runtime; using Microsoft.ML.Transforms.Conversions; using System; using System.Collections.Generic; @@ -39,21 +37,32 @@ public sealed class Arguments ShortName = "col", SortOrder = 1)] public KeyToVectorMappingTransformer.Column[] Column; } - public class ColumnInfo + + /// + /// Describes how the transformer handles one column pair. + /// + public sealed class ColumnInfo { public readonly string Input; public readonly string Output; - public ColumnInfo(string input, string output) + /// + /// Describes how the transformer handles one column pair. + /// + /// Name of input column. + /// Name of the column resulting from the transformation of . Null means is replaced. + + public ColumnInfo(string input, string output = null) { + Contracts.CheckNonWhiteSpace(input, nameof(input)); Input = input; - Output = output; + Output = output ?? input; } } internal const string Summary = "Converts a key column to a binary encoded vector."; - public const string UserName = "KeyToBinaryVectorTransform"; - public const string LoaderSignature = "KeyToBinaryTransform"; + internal const string UserName = "KeyToBinaryVectorTransform"; + internal const string LoaderSignature = "KeyToBinaryTransform"; private static VersionInfo GetVersionInfo() { @@ -131,11 +140,11 @@ private KeyToBinaryVectorMappingTransformer(IHost host, ModelLoadContext ctx) _columns[i] = new ColumnInfo(ColumnPairs[i].input, ColumnPairs[i].output); } - public static IDataTransform Create(IHostEnvironment env, IDataView input, params ColumnInfo[] columns) => + private static IDataTransform Create(IHostEnvironment env, IDataView input, params ColumnInfo[] columns) => new KeyToBinaryVectorMappingTransformer(env, columns).MakeDataTransform(input); // Factory method for SignatureDataTransform. - public static IDataTransform Create(IHostEnvironment env, Arguments args, IDataView input) + private static IDataTransform Create(IHostEnvironment env, Arguments args, IDataView input) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(args, nameof(args)); @@ -485,162 +494,5 @@ public override SchemaShape GetOutputSchema(SchemaShape inputSchema) return new SchemaShape(result.Values); } } - /// - /// Extension methods for the static-pipeline over objects. - /// - public static class KeyToBinaryVectorExtensions - { - private interface IColInput - { - PipelineColumn Input { get; } - } - - private sealed class OutVectorColumn : Vector, IColInput - { - public PipelineColumn Input { get; } - - public OutVectorColumn(Vector> input) - : base(Reconciler.Inst, input) - { - Input = input; - } - - public OutVectorColumn(Key input) - : base(Reconciler.Inst, input) - { - Input = input; - } - } - - private sealed class OutVarVectorColumn : VarVector, IColInput - { - public PipelineColumn Input { get; } - public OutVarVectorColumn(VarVector> input) - : base(Reconciler.Inst, input) - { - Input = input; - } - } - - private sealed class OutVectorColumn : Vector, IColInput - { - public PipelineColumn Input { get; } - - public OutVectorColumn(Vector> input) - : base(Reconciler.Inst, input) - { - Input = input; - } - - public OutVectorColumn(Key input) - : base(Reconciler.Inst, input) - { - Input = input; - } - } - - private sealed class OutVarVectorColumn : VarVector, IColInput - { - public PipelineColumn Input { get; } - public OutVarVectorColumn(VarVector> input) - : base(Reconciler.Inst, input) - { - Input = input; - } - } - - private sealed class Reconciler : EstimatorReconciler - { - public static Reconciler Inst = new Reconciler(); - - private Reconciler() { } - - public override IEstimator Reconcile(IHostEnvironment env, - PipelineColumn[] toOutput, - IReadOnlyDictionary inputNames, - IReadOnlyDictionary outputNames, - IReadOnlyCollection usedNames) - { - var infos = new KeyToBinaryVectorMappingTransformer.ColumnInfo[toOutput.Length]; - for (int i = 0; i < toOutput.Length; ++i) - { - var col = (IColInput)toOutput[i]; - infos[i] = new KeyToBinaryVectorMappingTransformer.ColumnInfo(inputNames[col.Input], outputNames[toOutput[i]]); - } - return new KeyToBinaryVectorMappingEstimator(env, infos); - } - } - - /// - /// Takes a column of key type of known cardinality and produces a vector of bits representing the key in binary form. - /// The first value is encoded as all zeros and missing values are encoded as all ones. - /// In the case where a vector has multiple keys, the encoded values are concatenated. - /// Number of bits per key is determined as the number of bits needed to represent the cardinality of the keys plus one. - /// - public static Vector ToBinaryVector(this Key input) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVectorColumn(input); - } - - /// - /// Takes a column of key type of known cardinality and produces a vector of bits representing the key in binary form. - /// The first value is encoded as all zeros and missing values are encoded as all ones. - /// In the case where a vector has multiple keys, the encoded values are concatenated. - /// Number of bits per key is determined as the number of bits needed to represent the cardinality of the keys plus one. - /// - public static Vector ToBinaryVector(this Vector> input) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVectorColumn(input); - } - /// - /// Takes a column of key type of known cardinality and produces a vector of bits representing the key in binary form. - /// The first value is encoded as all zeros and missing values are encoded as all ones. - /// In the case where a vector has multiple keys, the encoded values are concatenated. - /// Number of bits per key is determined as the number of bits needed to represent the cardinality of the keys plus one. - /// - public static VarVector ToBinaryVector(this VarVector> input) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVarVectorColumn(input); - } - - /// - /// Takes a column of key type of known cardinality and produces a vector of bits representing the key in binary form. - /// The first value is encoded as all zeros and missing values are encoded as all ones. - /// In the case where a vector has multiple keys, the encoded values are concatenated. - /// Number of bits per key is determined as the number of bits needed to represent the cardinality of the keys plus one. - /// - public static Vector ToBinaryVector(this Key input) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVectorColumn(input); - } - - /// - /// Takes a column of key type of known cardinality and produces a vector of bits representing the key in binary form. - /// The first value is encoded as all zeros and missing values are encoded as all ones. - /// In the case where a vector has multiple keys, the encoded values are concatenated. - /// Number of bits per key is determined as the number of bits needed to represent the cardinality of the keys plus one. - /// - public static Vector ToBinaryVector(this Vector> input) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVectorColumn(input); - } - - /// - /// Takes a column of key type of known cardinality and produces a vector of bits representing the key in binary form. - /// The first value is encoded as all zeros and missing values are encoded as all ones. - /// In the case where a vector has multiple keys, the encoded values are concatenated. - /// Number of bits per key is determined as the number of bits needed to represent the cardinality of the keys plus one. - /// - public static VarVector ToBinaryVector(this VarVector> input) - { - Contracts.CheckValue(input, nameof(input)); - return new OutVarVectorColumn(input); - } - } } diff --git a/src/Microsoft.ML.Transforms/MissingValueReplacing.cs b/src/Microsoft.ML.Transforms/MissingValueReplacing.cs index a6579cb9b2..01519948a5 100644 --- a/src/Microsoft.ML.Transforms/MissingValueReplacing.cs +++ b/src/Microsoft.ML.Transforms/MissingValueReplacing.cs @@ -12,8 +12,6 @@ using Microsoft.ML.Runtime.Internal.Utilities; using Microsoft.ML.Runtime.Model; using Microsoft.ML.Runtime.Model.Onnx; -using Microsoft.ML.StaticPipe; -using Microsoft.ML.StaticPipe.Runtime; using Microsoft.ML.Transforms; using System; using System.Collections; @@ -175,7 +173,10 @@ private static string TestType(ColumnType type) return null; } - public class ColumnInfo + /// + /// Describes how the transformer handles one column pair. + /// + public sealed class ColumnInfo { public enum ReplacementMode : byte { @@ -194,16 +195,17 @@ public enum ReplacementMode : byte /// Describes how the transformer handles one column pair. ///