public void TestBackAndForthConversionWithDifferentOrder() { IHostEnvironment env = new MLContext(); const int imageHeight = 100; const int imageWidth = 130; var dataFile = GetDataPath("images/images.tsv"); var imageFolder = Path.GetDirectoryName(dataFile); var data = TextLoader.Create(env, new TextLoader.Options() { Columns = new[] { new TextLoader.Column("ImagePath", DataKind.String, 0), new TextLoader.Column("Name", DataKind.String, 1), } }, new MultiFileSource(dataFile)); var images = new ImageLoadingTransformer(env, imageFolder, ("ImageReal", "ImagePath")).Transform(data); var cropped = new ImageResizingTransformer(env, "ImageCropped", imageWidth, imageHeight, "ImageReal").Transform(images); var pixels = new ImagePixelExtractingTransformer(env, "ImagePixels", "ImageCropped", ImagePixelExtractingEstimator.ColorBits.All, orderOfExtraction: ImagePixelExtractingEstimator.ColorsOrder.ABRG).Transform(cropped); IDataView backToBitmaps = new VectorToImageConvertingTransformer(env, "ImageRestored", imageHeight, imageWidth, "ImagePixels", ImagePixelExtractingEstimator.ColorBits.All, orderOfColors: ImagePixelExtractingEstimator.ColorsOrder.ABRG).Transform(pixels); var fname = nameof(TestBackAndForthConversionWithDifferentOrder) + "_model.zip"; var fh = env.CreateOutputFile(fname); using (var ch = env.Start("save")) TrainUtils.SaveModel(env, ch, fh, null, new RoleMappedData(backToBitmaps)); backToBitmaps = ModelFileUtils.LoadPipeline(env, fh.OpenReadStream(), new MultiFileSource(dataFile)); DeleteOutputPath(fname); using (var cursor = backToBitmaps.GetRowCursorForAllColumns()) { var bitmapGetter = cursor.GetGetter <Bitmap>(backToBitmaps.Schema["ImageRestored"]); Bitmap restoredBitmap = default; var bitmapCropGetter = cursor.GetGetter <Bitmap>(backToBitmaps.Schema["ImageCropped"]); Bitmap croppedBitmap = default; while (cursor.MoveNext()) { bitmapGetter(ref restoredBitmap); Assert.NotNull(restoredBitmap); bitmapCropGetter(ref croppedBitmap); Assert.NotNull(croppedBitmap); for (int x = 0; x < imageWidth; x++) { for (int y = 0; y < imageHeight; y++) { var c = croppedBitmap.GetPixel(x, y); var r = restoredBitmap.GetPixel(x, y); if (c != r) { Assert.False(true); } Assert.True(c == r); } } } } Done(); }
public void TestOldSavingAndLoading() { //skip running for x86 as this test using too much memory (over 2GB limit on x86) //and very like to hit memory related issue when running on CI //TODO: optimized memory usage in related code and enable x86 run if (!Environment.Is64BitProcess) { return; } var samplevector = GetSampleArrayData(); var dataView = ML.Data.LoadFromEnumerable( new TestData[] { new TestData() { data_0 = samplevector } }); var inputNames = "data_0"; var outputNames = "output_1"; var est = ML.Transforms.DnnFeaturizeImage(outputNames, m => m.ModelSelector.ResNet18(m.Environment, m.OutputColumn, m.InputColumn), inputNames); var transformer = est.Fit(dataView); var result = transformer.Transform(dataView); var resultRoles = new RoleMappedData(result); using (var ms = new MemoryStream()) { TrainUtils.SaveModel(Env, Env.Start("saving"), ms, null, resultRoles); ms.Position = 0; var loadedView = ModelFileUtils.LoadTransforms(Env, dataView, ms); using (var cursor = loadedView.GetRowCursor(loadedView.Schema[outputNames])) { VBuffer <float> softMaxValue = default; var softMaxGetter = cursor.GetGetter <VBuffer <float> >(loadedView.Schema[outputNames]); float sum = 0f; int i = 0; while (cursor.MoveNext()) { softMaxGetter(ref softMaxValue); var values = softMaxValue.DenseValues(); foreach (var val in values) { sum += val; if (i == 0) { Assert.InRange(val, 0.0, 0.00001); } if (i == 7) { Assert.InRange(val, 0.62935, 0.62940); } if (i == 500) { Assert.InRange(val, 0.15521, 0.155225); } i++; } } Assert.InRange(sum, 83.50, 84.50); } } }
public void TestOldSavingAndLoading() { var data = new[] { new TestClass() { A = 1, B = 3, C = new float[2] { 1, 2 }, D = new double[2] { 3, 4 } }, new TestClass() { A = float.NaN, B = double.NaN, C = new float[2] { float.NaN, float.NaN }, D = new double[2] { double.NaN, double.NaN } }, new TestClass() { A = float.NegativeInfinity, B = double.NegativeInfinity, C = new float[2] { float.NegativeInfinity, float.NegativeInfinity }, D = new double[2] { double.NegativeInfinity, double.NegativeInfinity } }, new TestClass() { A = float.PositiveInfinity, B = double.PositiveInfinity, C = new float[2] { float.PositiveInfinity, float.PositiveInfinity, }, D = new double[2] { double.PositiveInfinity, double.PositiveInfinity } }, new TestClass() { A = 2, B = 1, C = new float[2] { 3, 4 }, D = new double[2] { 5, 6 } }, }; var dataView = ML.Data.LoadFromEnumerable(data); var pipe = ML.Transforms.ReplaceMissingValues( new MissingValueReplacingEstimator.ColumnOptions("NAA", "A", MissingValueReplacingEstimator.ReplacementMode.Mean), new MissingValueReplacingEstimator.ColumnOptions("NAB", "B", MissingValueReplacingEstimator.ReplacementMode.Mean), new MissingValueReplacingEstimator.ColumnOptions("NAC", "C", MissingValueReplacingEstimator.ReplacementMode.Mean), new MissingValueReplacingEstimator.ColumnOptions("NAD", "D", MissingValueReplacingEstimator.ReplacementMode.Mean)); var result = pipe.Fit(dataView).Transform(dataView); var resultRoles = new RoleMappedData(result); using (var ms = new MemoryStream()) { TrainUtils.SaveModel(Env, Env.Start("saving"), ms, null, resultRoles); ms.Position = 0; var loadedView = ModelFileUtils.LoadTransforms(Env, dataView, ms); } }
public void TestBackAndForthConversionWithoutAlphaNoInterleaveNoOffset() { IHostEnvironment env = new MLContext(); const int imageHeight = 100; const int imageWidth = 130; var dataFile = GetDataPath("images/images.tsv"); var imageFolder = Path.GetDirectoryName(dataFile); var data = TextLoader.Create(env, new TextLoader.Arguments() { Column = new[] { new TextLoader.Column("ImagePath", DataKind.TX, 0), new TextLoader.Column("Name", DataKind.TX, 1), } }, new MultiFileSource(dataFile)); var images = new ImageLoaderTransformer(env, imageFolder, ("ImagePath", "ImageReal")).Transform(data); var cropped = new ImageResizerTransformer(env, "ImageReal", "ImageCropped", imageWidth, imageHeight).Transform(images); var pixels = new ImagePixelExtractorTransformer(env, "ImageCropped", "ImagePixels").Transform(cropped); IDataView backToBitmaps = new VectorToImageTransform(env, new VectorToImageTransform.Arguments() { InterleaveArgb = false, Column = new VectorToImageTransform.Column[1] { new VectorToImageTransform.Column() { Source = "ImagePixels", Name = "ImageRestored", ImageHeight = imageHeight, ImageWidth = imageWidth, ContainsAlpha = false } } }, pixels); var fname = nameof(TestBackAndForthConversionWithoutAlphaNoInterleaveNoOffset) + "_model.zip"; var fh = env.CreateOutputFile(fname); using (var ch = env.Start("save")) TrainUtils.SaveModel(env, ch, fh, null, new RoleMappedData(backToBitmaps)); backToBitmaps = ModelFileUtils.LoadPipeline(env, fh.OpenReadStream(), new MultiFileSource(dataFile)); DeleteOutputPath(fname); backToBitmaps.Schema.TryGetColumnIndex("ImageRestored", out int bitmapColumn); backToBitmaps.Schema.TryGetColumnIndex("ImageCropped", out int cropBitmapColumn); using (var cursor = backToBitmaps.GetRowCursorForAllColumns()) { var bitmapGetter = cursor.GetGetter <Bitmap>(bitmapColumn); Bitmap restoredBitmap = default; var bitmapCropGetter = cursor.GetGetter <Bitmap>(cropBitmapColumn); Bitmap croppedBitmap = default; while (cursor.MoveNext()) { bitmapGetter(ref restoredBitmap); Assert.NotNull(restoredBitmap); bitmapCropGetter(ref croppedBitmap); Assert.NotNull(croppedBitmap); for (int x = 0; x < imageWidth; x++) { for (int y = 0; y < imageHeight; y++) { var c = croppedBitmap.GetPixel(x, y); var r = restoredBitmap.GetPixel(x, y); Assert.True(c.R == r.R && c.G == r.G && c.B == r.B); } } } Done(); } }
void TestOldSavingAndLoading(int?gpuDeviceId, bool fallbackToCpu) { if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { return; } var modelFile = "squeezenet/00000001/model.onnx"; var samplevector = GetSampleArrayData(); var dataView = ML.Data.ReadFromEnumerable( new TestData[] { new TestData() { data_0 = samplevector } }); var inputNames = new[] { "data_0" }; var outputNames = new[] { "softmaxout_1" }; var est = new OnnxScoringEstimator(Env, outputNames, inputNames, modelFile, gpuDeviceId, fallbackToCpu); var transformer = est.Fit(dataView); var result = transformer.Transform(dataView); var resultRoles = new RoleMappedData(result); using (var ms = new MemoryStream()) { TrainUtils.SaveModel(Env, Env.Start("saving"), ms, null, resultRoles); ms.Position = 0; var loadedView = ModelFileUtils.LoadTransforms(Env, dataView, ms); loadedView.Schema.TryGetColumnIndex(outputNames[0], out int softMaxOut1); using (var cursor = loadedView.GetRowCursor(loadedView.Schema[softMaxOut1])) { VBuffer <float> softMaxValue = default; var softMaxGetter = cursor.GetGetter <VBuffer <float> >(softMaxOut1); float sum = 0f; int i = 0; while (cursor.MoveNext()) { softMaxGetter(ref softMaxValue); var values = softMaxValue.DenseValues(); foreach (var val in values) { sum += val; if (i == 0) { Assert.InRange(val, 0.00004, 0.00005); } if (i == 1) { Assert.InRange(val, 0.003844, 0.003845); } if (i == 999) { Assert.InRange(val, 0.0029566, 0.0029567); } i++; } } Assert.InRange(sum, 1.0, 1.00001); } } }
public void TestOldSavingAndLoading(int?gpuDeviceId, bool fallbackToCpu) { var modelFile = "squeezenet/00000001/model.onnx"; var samplevector = GetSampleArrayData(); var dataView = ML.Data.LoadFromEnumerable( new TestData[] { new TestData() { data_0 = samplevector } }); var inputNames = new[] { "data_0" }; var outputNames = new[] { "softmaxout_1" }; var est = ML.Transforms.ApplyOnnxModel(outputNames, inputNames, modelFile, gpuDeviceId, fallbackToCpu); var transformer = est.Fit(dataView); var result = transformer.Transform(dataView); var resultRoles = new RoleMappedData(result); using (var ms = new MemoryStream()) { TrainUtils.SaveModel(Env, Env.Start("saving"), ms, null, resultRoles); ms.Position = 0; var loadedView = ModelFileUtils.LoadTransforms(Env, dataView, ms); var sofMaxOut1Col = loadedView.Schema[outputNames[0]]; using (var cursor = loadedView.GetRowCursor(sofMaxOut1Col)) { VBuffer <float> softMaxValue = default; var softMaxGetter = cursor.GetGetter <VBuffer <float> >(sofMaxOut1Col); float sum = 0f; int i = 0; while (cursor.MoveNext()) { softMaxGetter(ref softMaxValue); var values = softMaxValue.DenseValues(); foreach (var val in values) { sum += val; if (i == 0) { Assert.InRange(val, 0.00004, 0.00005); } if (i == 1) { Assert.InRange(val, 0.003844, 0.003845); } if (i == 999) { Assert.InRange(val, 0.0029566, 0.0029567); } i++; } } Assert.InRange(sum, 0.99999, 1.00001); } (transformer as IDisposable)?.Dispose(); } }
public void TestGreyscaleTransformImages() { IHostEnvironment env = new MLContext(); var imageHeight = 150; var imageWidth = 100; var dataFile = GetDataPath("images/images.tsv"); var imageFolder = Path.GetDirectoryName(dataFile); var data = TextLoader.Create(env, new TextLoader.Arguments() { Column = new[] { new TextLoader.Column("ImagePath", DataKind.TX, 0), new TextLoader.Column("Name", DataKind.TX, 1), } }, new MultiFileSource(dataFile)); var images = ImageLoaderTransform.Create(env, new ImageLoaderTransform.Arguments() { Column = new ImageLoaderTransform.Column[1] { new ImageLoaderTransform.Column() { Source = "ImagePath", Name = "ImageReal" } }, ImageFolder = imageFolder }, data); var cropped = ImageResizerTransform.Create(env, new ImageResizerTransform.Arguments() { Column = new ImageResizerTransform.Column[1] { new ImageResizerTransform.Column() { Name = "ImageCropped", Source = "ImageReal", ImageHeight = imageHeight, ImageWidth = imageWidth, Resizing = ImageResizerTransform.ResizingKind.IsoCrop } } }, images); IDataView grey = ImageGrayscaleTransform.Create(env, new ImageGrayscaleTransform.Arguments() { Column = new ImageGrayscaleTransform.Column[1] { new ImageGrayscaleTransform.Column() { Name = "ImageGrey", Source = "ImageCropped" } } }, cropped); var fname = nameof(TestGreyscaleTransformImages) + "_model.zip"; var fh = env.CreateOutputFile(fname); using (var ch = env.Start("save")) TrainUtils.SaveModel(env, ch, fh, null, new RoleMappedData(grey)); grey = ModelFileUtils.LoadPipeline(env, fh.OpenReadStream(), new MultiFileSource(dataFile)); DeleteOutputPath(fname); grey.Schema.TryGetColumnIndex("ImageGrey", out int greyColumn); using (var cursor = grey.GetRowCursor((x) => true)) { var bitmapGetter = cursor.GetGetter <Bitmap>(greyColumn); Bitmap bitmap = default; while (cursor.MoveNext()) { bitmapGetter(ref bitmap); Assert.NotNull(bitmap); for (int x = 0; x < imageWidth; x++) { for (int y = 0; y < imageHeight; y++) { var pixel = bitmap.GetPixel(x, y); // greyscale image has same values for R,G and B Assert.True(pixel.R == pixel.G && pixel.G == pixel.B); } } } } Done(); }
public void TestOldSavingAndLoading() { if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { return; } var samplevector = getSampleArrayData(); var dataView = ComponentCreation.CreateDataView(Env, new TestData[] { new TestData() { data_0 = samplevector } }); var inputNames = "data_0"; var outputNames = "output_1"; var est = new DnnImageFeaturizerEstimator(Env, m => m.ModelSelector.ResNet18(m.Environment, m.InputColumn, m.OutputColumn), inputNames, outputNames); var transformer = est.Fit(dataView); var result = transformer.Transform(dataView); var resultRoles = new RoleMappedData(result); using (var ms = new MemoryStream()) { TrainUtils.SaveModel(Env, Env.Start("saving"), ms, null, resultRoles); ms.Position = 0; var loadedView = ModelFileUtils.LoadTransforms(Env, dataView, ms); loadedView.Schema.TryGetColumnIndex(outputNames, out int softMaxOut1); using (var cursor = loadedView.GetRowCursor(col => col == softMaxOut1)) { VBuffer <float> softMaxValue = default; var softMaxGetter = cursor.GetGetter <VBuffer <float> >(softMaxOut1); float sum = 0f; int i = 0; while (cursor.MoveNext()) { softMaxGetter(ref softMaxValue); var values = softMaxValue.DenseValues(); foreach (var val in values) { sum += val; if (i == 0) { Assert.InRange(val, 0.0, 0.00001); } if (i == 7) { Assert.InRange(val, 0.62935, 0.62940); } if (i == 500) { Assert.InRange(val, 0.15521, 0.155225); } i++; } } Assert.InRange(sum, 83.50, 84.50); } } }