Esempio n. 1
0
        public void TestOldSavingAndLoading()
        {
            var data = new[] {
                new TestClass()
                {
                    A = 1, B = 3, C = new float[2] {
                        1, 2
                    }, D = new double[2] {
                        3, 4
                    }
                },
                new TestClass()
                {
                    A = float.NaN, B = double.NaN, C = new float[2] {
                        float.NaN, float.NaN
                    }, D = new double[2] {
                        double.NaN, double.NaN
                    }
                },
                new TestClass()
                {
                    A = float.NegativeInfinity, B = double.NegativeInfinity, C = new float[2] {
                        float.NegativeInfinity, float.NegativeInfinity
                    }, D = new double[2] {
                        double.NegativeInfinity, double.NegativeInfinity
                    }
                },
                new TestClass()
                {
                    A = float.PositiveInfinity, B = double.PositiveInfinity, C = new float[2] {
                        float.PositiveInfinity, float.PositiveInfinity,
                    }, D = new double[2] {
                        double.PositiveInfinity, double.PositiveInfinity
                    }
                },
                new TestClass()
                {
                    A = 2, B = 1, C = new float[2] {
                        3, 4
                    }, D = new double[2] {
                        5, 6
                    }
                },
            };

            var dataView = ComponentCreation.CreateDataView(Env, data);
            var pipe     = new MissingValueReplacingEstimator(Env,
                                                              new NAReplaceTransform.ColumnInfo("A", "NAA", NAReplaceTransform.ColumnInfo.ReplacementMode.Mean),
                                                              new NAReplaceTransform.ColumnInfo("B", "NAB", NAReplaceTransform.ColumnInfo.ReplacementMode.Mean),
                                                              new NAReplaceTransform.ColumnInfo("C", "NAC", NAReplaceTransform.ColumnInfo.ReplacementMode.Mean),
                                                              new NAReplaceTransform.ColumnInfo("D", "NAD", NAReplaceTransform.ColumnInfo.ReplacementMode.Mean));

            var result      = pipe.Fit(dataView).Transform(dataView);
            var resultRoles = new RoleMappedData(result);

            using (var ms = new MemoryStream())
            {
                TrainUtils.SaveModel(Env, Env.Start("saving"), ms, null, resultRoles);
                ms.Position = 0;
                var loadedView = ModelFileUtils.LoadTransforms(Env, dataView, ms);
            }
        }
Esempio n. 2
0
        public void TestOldSavingAndLoading()
        {
            if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
            {
                return;
            }


            var samplevector = GetSampleArrayData();

            var dataView = ML.Data.ReadFromEnumerable(
                new TestData[] {
                new TestData()
                {
                    data_0 = samplevector
                }
            });

            var inputNames  = "data_0";
            var outputNames = "output_1";
            var est         = new DnnImageFeaturizerEstimator(Env, outputNames, m => m.ModelSelector.ResNet18(m.Environment, m.OutputColumn, m.InputColumn), inputNames);
            var transformer = est.Fit(dataView);
            var result      = transformer.Transform(dataView);
            var resultRoles = new RoleMappedData(result);

            using (var ms = new MemoryStream())
            {
                TrainUtils.SaveModel(Env, Env.Start("saving"), ms, null, resultRoles);
                ms.Position = 0;
                var loadedView = ModelFileUtils.LoadTransforms(Env, dataView, ms);

                loadedView.Schema.TryGetColumnIndex(outputNames, out int softMaxOut1);
                using (var cursor = loadedView.GetRowCursor(loadedView.Schema[outputNames]))
                {
                    VBuffer <float> softMaxValue  = default;
                    var             softMaxGetter = cursor.GetGetter <VBuffer <float> >(softMaxOut1);
                    float           sum           = 0f;
                    int             i             = 0;
                    while (cursor.MoveNext())
                    {
                        softMaxGetter(ref softMaxValue);
                        var values = softMaxValue.DenseValues();
                        foreach (var val in values)
                        {
                            sum += val;
                            if (i == 0)
                            {
                                Assert.InRange(val, 0.0, 0.00001);
                            }
                            if (i == 7)
                            {
                                Assert.InRange(val, 0.62935, 0.62940);
                            }
                            if (i == 500)
                            {
                                Assert.InRange(val, 0.15521, 0.155225);
                            }
                            i++;
                        }
                    }
                    Assert.InRange(sum, 83.50, 84.50);
                }
            }
        }
        void TestOldSavingAndLoading(int?gpuDeviceId, bool fallbackToCpu)
        {
            if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
            {
                return;
            }

            var modelFile    = "squeezenet/00000001/model.onnx";
            var samplevector = GetSampleArrayData();

            var dataView = ML.Data.ReadFromEnumerable(
                new TestData[] {
                new TestData()
                {
                    data_0 = samplevector
                }
            });

            var inputNames  = new[] { "data_0" };
            var outputNames = new[] { "softmaxout_1" };
            var est         = new OnnxScoringEstimator(Env, outputNames, inputNames, modelFile, gpuDeviceId, fallbackToCpu);
            var transformer = est.Fit(dataView);
            var result      = transformer.Transform(dataView);
            var resultRoles = new RoleMappedData(result);

            using (var ms = new MemoryStream())
            {
                TrainUtils.SaveModel(Env, Env.Start("saving"), ms, null, resultRoles);
                ms.Position = 0;
                var loadedView = ModelFileUtils.LoadTransforms(Env, dataView, ms);

                loadedView.Schema.TryGetColumnIndex(outputNames[0], out int softMaxOut1);

                using (var cursor = loadedView.GetRowCursor(loadedView.Schema[softMaxOut1]))
                {
                    VBuffer <float> softMaxValue  = default;
                    var             softMaxGetter = cursor.GetGetter <VBuffer <float> >(softMaxOut1);
                    float           sum           = 0f;
                    int             i             = 0;
                    while (cursor.MoveNext())
                    {
                        softMaxGetter(ref softMaxValue);
                        var values = softMaxValue.DenseValues();
                        foreach (var val in values)
                        {
                            sum += val;
                            if (i == 0)
                            {
                                Assert.InRange(val, 0.00004, 0.00005);
                            }
                            if (i == 1)
                            {
                                Assert.InRange(val, 0.003844, 0.003845);
                            }
                            if (i == 999)
                            {
                                Assert.InRange(val, 0.0029566, 0.0029567);
                            }
                            i++;
                        }
                    }
                    Assert.InRange(sum, 1.0, 1.00001);
                }
            }
        }
        public void TestOldSavingAndLoading()
        {
            //skip running for x86 as this test using too much memory (over 2GB limit on x86)
            //and very like to hit memory related issue when running on CI
            //TODO: optimized memory usage in related code and enable x86 run
            if (!Environment.Is64BitProcess)
            {
                return;
            }

            var samplevector = GetSampleArrayData();

            var dataView = ML.Data.LoadFromEnumerable(
                new TestData[] {
                new TestData()
                {
                    data_0 = samplevector
                }
            });

            var inputNames  = "data_0";
            var outputNames = "output_1";
            var est         = ML.Transforms.DnnFeaturizeImage(outputNames, m => m.ModelSelector.ResNet18(m.Environment, m.OutputColumn, m.InputColumn), inputNames);
            var transformer = est.Fit(dataView);
            var result      = transformer.Transform(dataView);
            var resultRoles = new RoleMappedData(result);

            using (var ms = new MemoryStream())
            {
                TrainUtils.SaveModel(Env, Env.Start("saving"), ms, null, resultRoles);
                ms.Position = 0;
                var loadedView = ModelFileUtils.LoadTransforms(Env, dataView, ms);

                using (var cursor = loadedView.GetRowCursor(loadedView.Schema[outputNames]))
                {
                    VBuffer <float> softMaxValue  = default;
                    var             softMaxGetter = cursor.GetGetter <VBuffer <float> >(loadedView.Schema[outputNames]);
                    float           sum           = 0f;
                    int             i             = 0;
                    while (cursor.MoveNext())
                    {
                        softMaxGetter(ref softMaxValue);
                        var values = softMaxValue.DenseValues();
                        foreach (var val in values)
                        {
                            sum += val;
                            if (i == 0)
                            {
                                Assert.InRange(val, 0.0, 0.00001);
                            }
                            if (i == 7)
                            {
                                Assert.InRange(val, 0.62935, 0.62940);
                            }
                            if (i == 500)
                            {
                                Assert.InRange(val, 0.15521, 0.155225);
                            }
                            i++;
                        }
                    }
                    Assert.InRange(sum, 83.50, 84.50);
                }
            }
        }
Esempio n. 5
0
        public void TestOldSavingAndLoading(int?gpuDeviceId, bool fallbackToCpu)
        {
            var modelFile    = "squeezenet/00000001/model.onnx";
            var samplevector = GetSampleArrayData();

            var dataView = ML.Data.LoadFromEnumerable(
                new TestData[] {
                new TestData()
                {
                    data_0 = samplevector
                }
            });

            var inputNames  = new[] { "data_0" };
            var outputNames = new[] { "softmaxout_1" };
            var est         = ML.Transforms.ApplyOnnxModel(outputNames, inputNames, modelFile, gpuDeviceId, fallbackToCpu);
            var transformer = est.Fit(dataView);
            var result      = transformer.Transform(dataView);
            var resultRoles = new RoleMappedData(result);

            using (var ms = new MemoryStream())
            {
                TrainUtils.SaveModel(Env, Env.Start("saving"), ms, null, resultRoles);
                ms.Position = 0;
                var loadedView = ModelFileUtils.LoadTransforms(Env, dataView, ms);

                var sofMaxOut1Col = loadedView.Schema[outputNames[0]];

                using (var cursor = loadedView.GetRowCursor(sofMaxOut1Col))
                {
                    VBuffer <float> softMaxValue  = default;
                    var             softMaxGetter = cursor.GetGetter <VBuffer <float> >(sofMaxOut1Col);
                    float           sum           = 0f;
                    int             i             = 0;
                    while (cursor.MoveNext())
                    {
                        softMaxGetter(ref softMaxValue);
                        var values = softMaxValue.DenseValues();
                        foreach (var val in values)
                        {
                            sum += val;
                            if (i == 0)
                            {
                                Assert.InRange(val, 0.00004, 0.00005);
                            }
                            if (i == 1)
                            {
                                Assert.InRange(val, 0.003844, 0.003845);
                            }
                            if (i == 999)
                            {
                                Assert.InRange(val, 0.0029566, 0.0029567);
                            }
                            i++;
                        }
                    }
                    Assert.InRange(sum, 0.99999, 1.00001);
                }
                (transformer as IDisposable)?.Dispose();
            }
        }
Esempio n. 6
0
        public void TestOldSavingAndLoading()
        {
            var data = new[] {
                new TestClass()
                {
                    A = 1, B = 3, C = new float[2] {
                        1, 2
                    }, D = new double[2] {
                        3, 4
                    }
                },
                new TestClass()
                {
                    A = float.NaN, B = double.NaN, C = new float[2] {
                        float.NaN, float.NaN
                    }, D = new double[2] {
                        double.NaN, double.NaN
                    }
                },
                new TestClass()
                {
                    A = float.NegativeInfinity, B = double.NegativeInfinity, C = new float[2] {
                        float.NegativeInfinity, float.NegativeInfinity
                    }, D = new double[2] {
                        double.NegativeInfinity, double.NegativeInfinity
                    }
                },
                new TestClass()
                {
                    A = float.PositiveInfinity, B = double.PositiveInfinity, C = new float[2] {
                        float.PositiveInfinity, float.PositiveInfinity,
                    }, D = new double[2] {
                        double.PositiveInfinity, double.PositiveInfinity
                    }
                },
                new TestClass()
                {
                    A = 2, B = 1, C = new float[2] {
                        3, 4
                    }, D = new double[2] {
                        5, 6
                    }
                },
            };

            var dataView = ML.Data.LoadFromEnumerable(data);
            var pipe     = ML.Transforms.IndicateMissingValues(new[] {
                new InputOutputColumnPair("NAA", "A"),
                new InputOutputColumnPair("NAB", "B"),
                new InputOutputColumnPair("NAC", "C"),
                new InputOutputColumnPair("NAD", "D")
            });
            var result      = pipe.Fit(dataView).Transform(dataView);
            var resultRoles = new RoleMappedData(result);

            using (var ms = new MemoryStream())
            {
                TrainUtils.SaveModel(Env, Env.Start("saving"), ms, null, resultRoles);
                ms.Position = 0;
                var loadedView = ModelFileUtils.LoadTransforms(Env, dataView, ms);
            }
        }