diff --git a/build/Dependencies.props b/build/Dependencies.props index 2ed9835fb4..d63101377e 100644 --- a/build/Dependencies.props +++ b/build/Dependencies.props @@ -15,7 +15,7 @@ 3.5.1 2.2.3 2.1.0 - 0.3.0 + 0.4.0 0.0.0.9 2.1.3 4.5.0 diff --git a/src/Microsoft.ML.DnnImageFeaturizer.ResNet18/ResNet18Extension.cs b/src/Microsoft.ML.DnnImageFeaturizer.ResNet18/ResNet18Extension.cs index aa45783b0b..9572201cc1 100644 --- a/src/Microsoft.ML.DnnImageFeaturizer.ResNet18/ResNet18Extension.cs +++ b/src/Microsoft.ML.DnnImageFeaturizer.ResNet18/ResNet18Extension.cs @@ -30,8 +30,8 @@ public static EstimatorChain ResNet18(this DnnImageMod /// /// This allows a custom model location to be specified. This is useful is a custom model is specified, - /// or if the model is desired to be placed or shipped separately in a different folder from the main application. Note that because Onnx models - /// must be in a directory all by themsleves for the OnnxTransformer to work, this method appends a ResNet18Onnx/ResNetPrepOnnx subdirectory + /// or if the model is desired to be placed or shipped separately in a different folder from the main application. Note that because ONNX models + /// must be in a directory all by themselves for the OnnxTransformer to work, this method appends a ResNet18Onnx/ResNetPrepOnnx subdirectory /// to the passed in directory to prevent having to make that directory manually each time. /// public static EstimatorChain ResNet18(this DnnImageModelSelector dnnModelContext, IHostEnvironment env, string outputColumnName, string inputColumnName, string modelDir) diff --git a/src/Microsoft.ML.OnnxTransformer/OnnxTransform.cs b/src/Microsoft.ML.OnnxTransformer/OnnxTransform.cs index 90f5bcad45..6586ce9b91 100644 --- a/src/Microsoft.ML.OnnxTransformer/OnnxTransform.cs +++ b/src/Microsoft.ML.OnnxTransformer/OnnxTransform.cs @@ -46,7 +46,7 @@ namespace Microsoft.ML.Transforms.Onnx /// | Output column data type | The same data type as the input column | /// | Required NuGet in addition to Microsoft.ML | Microsoft.ML.OnnxTransformer | /// - /// Supports inferencing of models in ONNX 1.2 and 1.3 format (opset 7, 8 and 9), using the + /// Supports inferencing of models in ONNX 1.2, 1.3, 1.4, and 1.5 format (opset 7, 8, 9, and 10), using the /// [Microsoft.ML.OnnxRuntime](https://www.nuget.org/packages/Microsoft.ML.OnnxRuntime/) library. /// Models are scored on CPU by default. If GPU execution is needed (optional), use the /// NuGet package available at [Microsoft.ML.OnnxRuntime.Gpu](https://www.nuget.org/packages/Microsoft.ML.OnnxRuntime.Gpu/)