diff --git a/src/Microsoft.ML.OnnxTransformer/OnnxTransform.cs b/src/Microsoft.ML.OnnxTransformer/OnnxTransform.cs
index 9b454fd41c..76ceb81ef1 100644
--- a/src/Microsoft.ML.OnnxTransformer/OnnxTransform.cs
+++ b/src/Microsoft.ML.OnnxTransformer/OnnxTransform.cs
@@ -39,7 +39,7 @@ namespace Microsoft.ML.Transforms.Onnx
/// Please refer to to learn more about the necessary dependencies,
/// and how to run it on a GPU.
///
- public sealed class OnnxTransformer : RowToRowTransformerBase
+ public sealed class OnnxTransformer : RowToRowTransformerBase, IDisposable
{
///
/// A class used for capturing shape information from command line.
@@ -353,6 +353,15 @@ internal int MapDataViewColumnToOnnxOutputTensor(int iinfo)
return Model.ModelInfo.OutputNames.IndexOf(Outputs[iinfo]);
}
+ private bool _isDisposed;
+ public void Dispose()
+ {
+ if (_isDisposed)
+ return;
+ Model?.Dispose();
+ _isDisposed = true;
+ }
+
private sealed class Mapper : MapperBase
{
private readonly OnnxTransformer _parent;
@@ -735,6 +744,9 @@ public NamedOnnxValue GetNamedOnnxValue()
///
/// The inputs and outputs of the ONNX models must be Tensor type. Sequence and Maps are not yet supported.
///
+ /// Internally, OnnxTransformer (the return value of OnnxScoringEstimator.Fit()) holds a reference to an inference session which points to unmanaged memory owned by OnnxRuntime.dll.
+ /// Whenever there is a call to [ApplyOnnxModel](xref:Microsoft.ML.OnnxCatalog.ApplyOnnxModel*) in a pipeline, it is advised to cast the return value of the Fit() call to IDisposable and call Dispose() to ensure that there are no memory leaks.
+ ///
/// OnnxRuntime works on Windows, MacOS and Ubuntu 16.04 Linux 64-bit platforms.
/// Visit [ONNX Models](https://github.com/onnx/models) to see a list of readily available models to get started with.
/// Refer to [ONNX](http://onnx.ai) for more information.
diff --git a/src/Microsoft.ML.OnnxTransformer/OnnxUtils.cs b/src/Microsoft.ML.OnnxTransformer/OnnxUtils.cs
index ca9110260c..02c24b1ad9 100644
--- a/src/Microsoft.ML.OnnxTransformer/OnnxUtils.cs
+++ b/src/Microsoft.ML.OnnxTransformer/OnnxUtils.cs
@@ -339,10 +339,7 @@ public static OnnxModel CreateFromBytes(byte[] modelBytes)
public static OnnxModel CreateFromBytes(byte[] modelBytes, int? gpuDeviceId = null, bool fallbackToCpu = false,
IDictionary shapeDictionary = null)
{
- var tempModelDir = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
- Directory.CreateDirectory(tempModelDir);
-
- var tempModelFile = Path.Combine(tempModelDir, "model.onnx");
+ var tempModelFile = Path.GetTempFileName();
File.WriteAllBytes(tempModelFile, modelBytes);
return new OnnxModel(tempModelFile, gpuDeviceId, fallbackToCpu,
ownModelFile: true, shapeDictionary: shapeDictionary);
diff --git a/test/Microsoft.ML.OnnxTransformerTest/OnnxTransformTests.cs b/test/Microsoft.ML.OnnxTransformerTest/OnnxTransformTests.cs
index 2a25c85909..df681a0ffb 100644
--- a/test/Microsoft.ML.OnnxTransformerTest/OnnxTransformTests.cs
+++ b/test/Microsoft.ML.OnnxTransformerTest/OnnxTransformTests.cs
@@ -145,7 +145,9 @@ public void TestSimpleCase()
pipe.GetOutputSchema(SchemaShape.Create(invalidDataWrongVectorSize.Schema));
try
{
- pipe.Fit(invalidDataWrongVectorSize);
+ var onnxTransformer = pipe.Fit(invalidDataWrongVectorSize);
+ (onnxTransformer as IDisposable)?.Dispose();
+
Assert.False(true);
}
catch (ArgumentOutOfRangeException) { }
@@ -206,6 +208,7 @@ public void TestOldSavingAndLoading(int? gpuDeviceId, bool fallbackToCpu)
}
Assert.InRange(sum, 0.99999, 1.00001);
}
+ (transformer as IDisposable)?.Dispose();
}
}
@@ -232,7 +235,15 @@ public void OnnxWorkout()
TestEstimatorCore(pipe, data);
- var result = pipe.Fit(data).Transform(data);
+ var model = pipe.Fit(data);
+ var result = model.Transform(data);
+
+ // save and reload the model
+ var tempPath = Path.GetTempFileName();
+ ML.Model.Save(model, data.Schema, tempPath);
+ var loadedModel = ML.Model.Load(tempPath, out DataViewSchema modelSchema);
+ (loadedModel as IDisposable)?.Dispose();
+
var softmaxOutCol = result.Schema["softmaxout_1"];
using (var cursor = result.GetRowCursor(softmaxOutCol))
@@ -248,6 +259,8 @@ public void OnnxWorkout()
}
Assert.Equal(4, numRows);
}
+ (model as IDisposable)?.Dispose();
+ File.Delete(tempPath);
}
[OnnxFact]
@@ -279,7 +292,9 @@ public void OnnxModelScenario()
}
});
- var onnx = ML.Transforms.ApplyOnnxModel("softmaxout_1", "data_0", modelFile).Fit(dataView).Transform(dataView);
+ var pipeline = ML.Transforms.ApplyOnnxModel("softmaxout_1", "data_0", modelFile);
+ var onnxTransformer = pipeline.Fit(dataView);
+ var onnx = onnxTransformer.Transform(dataView);
var scoreCol = onnx.Schema["softmaxout_1"];
using (var curs = onnx.GetRowCursor(scoreCol))
@@ -292,6 +307,7 @@ public void OnnxModelScenario()
Assert.Equal(1000, buffer.Length);
}
}
+ (onnxTransformer as IDisposable)?.Dispose();
}
[OnnxFact]
@@ -309,7 +325,9 @@ public void OnnxModelMultiInput()
inb = new float[] {1,2,3,4,5}
}
});
- var onnx = ML.Transforms.ApplyOnnxModel(new[] { "outa", "outb" }, new[] { "ina", "inb" }, modelFile).Fit(dataView).Transform(dataView);
+ var pipeline = ML.Transforms.ApplyOnnxModel(new[] { "outa", "outb" }, new[] { "ina", "inb" }, modelFile);
+ var onnxTransformer = pipeline.Fit(dataView);
+ var onnx = onnxTransformer.Transform(dataView);
var outaCol = onnx.Schema["outa"];
var outbCol = onnx.Schema["outb"];
@@ -330,6 +348,7 @@ public void OnnxModelMultiInput()
Assert.Equal(30, bufferb.GetValues().ToArray().Sum());
}
}
+ (onnxTransformer as IDisposable)?.Dispose();
}
[OnnxFact]
@@ -346,7 +365,9 @@ public void OnnxModelOutputDifferentOrder()
}
});
// The model returns the output columns in the order outa, outb. We are doing the opposite here, making sure the name mapping is correct.
- var onnx = ML.Transforms.ApplyOnnxModel(new[] { "outb", "outa" }, new[] { "ina", "inb" }, modelFile).Fit(dataView).Transform(dataView);
+ var pipeline = ML.Transforms.ApplyOnnxModel(new[] { "outb", "outa" }, new[] { "ina", "inb" }, modelFile);
+ var onnxTransformer = pipeline.Fit(dataView);
+ var onnx = onnxTransformer.Transform(dataView);
var outaCol = onnx.Schema["outa"];
var outbCol = onnx.Schema["outb"];
@@ -367,9 +388,12 @@ public void OnnxModelOutputDifferentOrder()
Assert.Equal(30, bufferb.GetValues().ToArray().Sum());
}
}
+ (onnxTransformer as IDisposable)?.Dispose();
// The model returns the output columns in the order outa, outb. We are doing only a subset, outb, to make sure the mapping works.
- onnx = ML.Transforms.ApplyOnnxModel(new[] { "outb" }, new[] { "ina", "inb" }, modelFile).Fit(dataView).Transform(dataView);
+ pipeline = ML.Transforms.ApplyOnnxModel(new[] { "outb" }, new[] { "ina", "inb" }, modelFile);
+ onnxTransformer = pipeline.Fit(dataView);
+ onnx = onnxTransformer.Transform(dataView);
outbCol = onnx.Schema["outb"];
using (var curs = onnx.GetRowCursor(outbCol))
@@ -384,6 +408,7 @@ public void OnnxModelOutputDifferentOrder()
Assert.Equal(30, bufferb.GetValues().ToArray().Sum());
}
}
+ (onnxTransformer as IDisposable)?.Dispose();
}
[OnnxFact]
@@ -401,12 +426,15 @@ public void TestUnknownDimensions()
};
var idv = mlContext.Data.LoadFromEnumerable(data);
var pipeline = ML.Transforms.ApplyOnnxModel(modelFile);
- var transformedValues = pipeline.Fit(idv).Transform(idv);
+ var onnxTransformer = pipeline.Fit(idv);
+ var transformedValues = onnxTransformer.Transform(idv);
var predictions = mlContext.Data.CreateEnumerable(transformedValues, reuseRowObject: false).ToArray();
Assert.Equal(1, predictions[0].argmax[0]);
Assert.Equal(0, predictions[1].argmax[0]);
Assert.Equal(2, predictions[2].argmax[0]);
+
+ (onnxTransformer as IDisposable)?.Dispose();
}
[OnnxFact]
@@ -424,7 +452,8 @@ public void TestOnnxNoneDimValue()
};
var idv = mlContext.Data.LoadFromEnumerable(data);
var pipeline = ML.Transforms.ApplyOnnxModel(modelFile);
- var transformedValues = pipeline.Fit(idv).Transform(idv);
+ var onnxTransformer = pipeline.Fit(idv);
+ var transformedValues = onnxTransformer.Transform(idv);
var predictions = mlContext.Data.CreateEnumerable(transformedValues, reuseRowObject: false).ToArray();
Assert.Equal(-0.080, Math.Round(predictions[0].variable[0], 3));
@@ -510,6 +539,8 @@ public void OnnxModelInMemoryImage()
foreach (var dataPoint in transformedDataPoints)
foreach (var score in dataPoint.Scores)
Assert.True(score > 0);
+
+ (model as IDisposable)?.Dispose();
}
private class ZipMapInput
@@ -545,7 +576,9 @@ public void TestOnnxZipMapWithInt64Keys()
};
var dataView = ML.Data.LoadFromEnumerable(dataPoints);
- var transformedDataView = ML.Transforms.ApplyOnnxModel(new[] { "output" }, new[] { "input" }, modelFile).Fit(dataView).Transform(dataView);
+ var pipeline = ML.Transforms.ApplyOnnxModel(new[] { "output" }, new[] { "input" }, modelFile);
+ var onnxTransformer = pipeline.Fit(dataView);
+ var transformedDataView = onnxTransformer.Transform(dataView);
// Verify output column carried by an IDataView.
var outputColumn = transformedDataView.Schema["output"];
@@ -579,6 +612,7 @@ public void TestOnnxZipMapWithInt64Keys()
Assert.Equal(dataPoints[i].Input[1], dictionary[17]);
Assert.Equal(dataPoints[i].Input[2], dictionary[36]);
}
+ (onnxTransformer as IDisposable)?.Dispose();
}
///
@@ -595,7 +629,9 @@ public void TestOnnxZipMapWithStringKeys()
};
var dataView = ML.Data.LoadFromEnumerable(dataPoints);
- var transformedDataView = ML.Transforms.ApplyOnnxModel(new[] { "output" }, new[] { "input" }, modelFile).Fit(dataView).Transform(dataView);
+ var pipeline = ML.Transforms.ApplyOnnxModel(new[] { "output" }, new[] { "input" }, modelFile);
+ var onnxTransformer = pipeline.Fit(dataView);
+ var transformedDataView = onnxTransformer.Transform(dataView);
// Verify output column carried by an IDataView.
var outputColumn = transformedDataView.Schema["output"];
@@ -629,6 +665,7 @@ public void TestOnnxZipMapWithStringKeys()
Assert.Equal(dataPoints[i].Input[1], dictionary["B"]);
Assert.Equal(dataPoints[i].Input[2], dictionary["C"]);
}
+ (onnxTransformer as IDisposable)?.Dispose();
}
[OnnxFact]
@@ -748,23 +785,30 @@ public void TestOnnxTransformWithCustomShapes()
var dataView = ML.Data.LoadFromEnumerable(dataPoints);
+ var pipeline = new OnnxScoringEstimator[3];
+ var onnxTransformer = new OnnxTransformer[3];
var transformedDataViews = new IDataView[3];
// Test three public ONNX APIs with the custom shape.
// Test 1.
- transformedDataViews[0] = ML.Transforms.ApplyOnnxModel(
+ pipeline[0] = ML.Transforms.ApplyOnnxModel(
new[] { nameof(PredictionWithCustomShape.argmax) }, new[] { nameof(InputWithCustomShape.input) },
- modelFile, shapeDictionary).Fit(dataView).Transform(dataView);
+ modelFile, shapeDictionary);
+ onnxTransformer[0] = pipeline[0].Fit(dataView);
+ transformedDataViews[0] = onnxTransformer[0].Transform(dataView);
// Test 2.
- transformedDataViews[1] = ML.Transforms.ApplyOnnxModel(
+ pipeline[1] = ML.Transforms.ApplyOnnxModel(
nameof(PredictionWithCustomShape.argmax), nameof(InputWithCustomShape.input),
- modelFile, shapeDictionary).Fit(dataView).Transform(dataView);
+ modelFile, shapeDictionary);
+ onnxTransformer[1] = pipeline[1].Fit(dataView);
+ transformedDataViews[1] = onnxTransformer[1].Transform(dataView);
// Test 3.
- transformedDataViews[2] = ML.Transforms.ApplyOnnxModel(
- modelFile, shapeDictionary).Fit(dataView).Transform(dataView);
+ pipeline[2] = ML.Transforms.ApplyOnnxModel(modelFile, shapeDictionary);
+ onnxTransformer[2] = pipeline[2].Fit(dataView);
+ transformedDataViews[2] = onnxTransformer[2].Transform(dataView);
// Conduct the same check for all the 3 called public APIs.
foreach(var transformedDataView in transformedDataViews)
@@ -787,6 +831,8 @@ public void TestOnnxTransformWithCustomShapes()
for (int i = 0; i < transformedDataPoints.Count; ++i)
Assert.Equal(transformedDataPoints[i].argmax, expectedResults[i]);
}
+ for (int i = 0; i < 3; i++)
+ (onnxTransformer[i] as IDisposable)?.Dispose();
}
///
@@ -948,6 +994,9 @@ public void TestOnnxTransformSaveAndLoadWithCustomShapes()
for (int i = 0; i < transformedDataPoints.Count; ++i)
Assert.Equal(transformedDataPoints[i].argmax, expectedResults[i]);
+
+ (model as IDisposable)?.Dispose();
+ (loadedModel as IDisposable)?.Dispose();
}
}
}
diff --git a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs
index 3b8f589c0f..ddaf81c4fa 100644
--- a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs
+++ b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs
@@ -147,6 +147,7 @@ protected void TestEstimatorCore(IEstimator estimator,
// Schema verification between estimator and transformer.
var scoredTrainSchemaShape = SchemaShape.Create(transformer.GetOutputSchema(validFitInput.Schema));
CheckSameSchemaShape(outSchemaShape, scoredTrainSchemaShape);
+ (loadedTransformer as IDisposable)?.Dispose();
}
private void CheckSameSchemaShape(SchemaShape promised, SchemaShape delivered)
diff --git a/test/Microsoft.ML.Tests/OnnxConversionTest.cs b/test/Microsoft.ML.Tests/OnnxConversionTest.cs
index 56d50ac702..1c4ef5608a 100644
--- a/test/Microsoft.ML.Tests/OnnxConversionTest.cs
+++ b/test/Microsoft.ML.Tests/OnnxConversionTest.cs
@@ -817,6 +817,7 @@ public void RemoveVariablesInPipelineTest()
CompareResults("Score", "Score", transformedData, onnxResult, isRightColumnOnnxScalar: true);
CompareResults("Probability", "Probability", transformedData, onnxResult, isRightColumnOnnxScalar: true);
CompareResults("PredictedLabel", "PredictedLabel", transformedData, onnxResult, isRightColumnOnnxScalar: true);
+ (onnxTransformer as IDisposable)?.Dispose();
}
CheckEquality(subDir, onnxTextName, digitsOfPrecision: 3);
}
@@ -976,6 +977,7 @@ public void PcaOnnxConversionTest(int customOpSetVersion)
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
CompareResults("pca", "pca", transformedData, onnxResult);
+ (onnxTransformer as IDisposable)?.Dispose();
}
}
Done();
@@ -1351,6 +1353,7 @@ public void NgramOnnxConversionTest(
var onnxSlotNames = onnxSlots.DenseValues().ToList();
for (int j = 0; j < mlNetSlots.Length; j++)
Assert.Equal(mlNetSlotNames[j].ToString(), onnxSlotNames[j].ToString());
+ (onnxTransformer as IDisposable)?.Dispose();
}
}
Done();
@@ -1456,6 +1459,7 @@ public void OptionalColumnOnnxTest(DataKind dataKind)
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
CompareResults("Label", "Label", outputData, onnxResult, isRightColumnOnnxScalar: true);
+ (onnxTransformer as IDisposable)?.Dispose();
}
Done();
}
@@ -1586,7 +1590,8 @@ public void UseKeyDataViewTypeAsUInt32InOnnxInput()
{
// Step 5: Apply Onnx Model
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(outputNames, inputNames, onnxModelPath);
- var onnxResult = onnxEstimator.Fit(reloadedData).Transform(reloadedData);
+ var onnxTransformer = onnxEstimator.Fit(reloadedData);
+ var onnxResult = onnxTransformer.Transform(reloadedData);
// Step 6: Compare results to an onnx model created using the mappedData IDataView
// Notice that this ONNX model would actually include the steps to do the ValueToKeyTransformer mapping,
@@ -1598,7 +1603,8 @@ public void UseKeyDataViewTypeAsUInt32InOnnxInput()
using (FileStream stream = new FileStream(onnxModelPath2, FileMode.Create))
mlContext.Model.ConvertToOnnx(model, mappedData, stream);
var onnxEstimator2 = mlContext.Transforms.ApplyOnnxModel(outputNames, inputNames, onnxModelPath2);
- var onnxResult2 = onnxEstimator2.Fit(originalData).Transform(originalData);
+ var onnxTransformer2 = onnxEstimator2.Fit(originalData);
+ var onnxResult2 = onnxTransformer2.Transform(originalData);
var stdSuffix = ".output";
foreach (var name in outputNames)
@@ -1607,6 +1613,8 @@ public void UseKeyDataViewTypeAsUInt32InOnnxInput()
var colName = name.Replace(stdSuffix, "");
CompareResults(colName, colName, onnxResult, onnxResult2);
}
+ (onnxTransformer as IDisposable)?.Dispose();
+ (onnxTransformer2 as IDisposable)?.Dispose();
}
Done();
@@ -2035,6 +2043,7 @@ private void TestPipeline(EstimatorChain pip
{
CompareResults(column.Name, column.Name, transformedData, onnxResult, column.Precision, true);
}
+ (onnxTransformer as IDisposable)?.Dispose();
}
}