Esempio n. 1
0
        public void TrainRanking()
        {
            var rand = new Random(Seed);

            for (int test = 0; test < 5; ++test)
            {
                int numColumns = rand.Next(1, 10);
                var pms        = GenerateParameters(rand, ObjectiveType.LambdaRank, numColumns);
                pms.Objective.EvalAt = new int[] { 5 };    // TODO: need at most one or get 'Expected at most one metric' error
                var numRanks = rand.Next(2, 4);

                Dictionary <int, int> categorical = null;
                var trainData = CreateRandomDenseClassifyData(rand, numRanks, ref categorical, pms.Dataset.UseMissing, numColumns);
                trainData.Groups = GenGroups(rand, trainData.NumRows);
                var validData = (pms.Learning.EarlyStoppingRound > 0 || rand.Next(2) == 0) ? CreateRandomDenseClassifyData(rand, numRanks, ref categorical, pms.Dataset.UseMissing, numColumns) : null;
                if (validData != null)
                {
                    validData.Groups = GenGroups(rand, validData.NumRows);
                }
                pms.Dataset.CategoricalFeature = categorical.Keys.ToArray();

                var learningRateSchedule = (rand.Next(2) == 0) ? (Func <int, double>)null : (iter => pms.Learning.LearningRate * Math.Pow(0.99, iter));

                try
                {
                    using (var datasets = (rand.Next(2) == 0) ? new Datasets(pms.Common, pms.Dataset, trainData, validData) :
                                          new Datasets(pms.Common, pms.Dataset, Dense2Sparse(trainData), Dense2Sparse(validData)))
                        using (var trainer = new RankingTrainer(pms.Learning, pms.Objective))
                        {
                            var model = trainer.Train(datasets, learningRateSchedule);
                            model.Managed.MaxThreads = rand.Next(1, Environment.ProcessorCount);

                            // possibly use subset of trees
                            var numIterations = -1;
                            if (rand.Next(2) == 0)
                            {
                                numIterations             = rand.Next(1, model.Managed.MaxNumTrees);
                                model.Managed.MaxNumTrees = numIterations;
                                model.Native.MaxNumTrees  = numIterations;
                            }

                            RankingPredictor model2 = null;
                            using (var ms = new System.IO.MemoryStream())
                                using (var writer = new System.IO.BinaryWriter(ms))
                                    using (var reader = new System.IO.BinaryReader(ms))
                                    {
                                        PredictorPersist.Save(model.Managed, writer);
                                        ms.Position = 0;
                                        model2      = PredictorPersist.Load <double>(reader) as RankingPredictor;
                                        Assert.Equal(ms.Position, ms.Length);
                                    }

                            RankingNativePredictor model2native = null;
                            using (var ms = new System.IO.MemoryStream())
                                using (var writer = new System.IO.BinaryWriter(ms))
                                    using (var reader = new System.IO.BinaryReader(ms))
                                    {
                                        NativePredictorPersist.Save(model.Native, writer);
                                        ms.Position  = 0;
                                        model2native = NativePredictorPersist.Load <double>(reader) as RankingNativePredictor;
                                        Assert.Equal(ms.Position, ms.Length);
                                    }

                            var output3s = trainer.Evaluate(Booster.PredictType.Normal, trainData.Features, numIterations);
                            Assert.Equal(trainData.Features.Length, output3s.GetLength(0));
                            Assert.Equal(1, output3s.GetLength(1));

                            var output3natives = model.Native.GetOutputs(trainData.Features);
                            Assert.Equal(trainData.Features.Length, output3s.Length);

                            for (int i = 0; i < trainData.Features.Length; i++)
                            {
                                var row = trainData.Features[i];

                                double output = 0;
                                var    input  = new VBuffer <float>(row.Length, row);
                                model.Managed.GetOutput(ref input, ref output);
                                // TODO: NFI what output represents...

                                double output2 = 0;
                                model2.GetOutput(ref input, ref output2);
                                Compare(output, output2);

                                var output3 = trainer.Evaluate(Booster.PredictType.Normal, row, numIterations);
                                Assert.Single(output3);
                                Assert.Equal(output3[0], output3s[i, 0]);
                                Assert.Equal(output3[0], output3natives[i]);
                                Compare(output, output3[0]);
                                //Console.WriteLine(trainer.GetModelString());
                                //throw new Exception($"Output mismatch {output} vs {output3[0]} (error: {Math.Abs(output - output3[0])}) input: {String.Join(", ", row)}");
                            }

                            var normalise   = rand.Next(2) == 0;
                            var getSplits   = rand.Next(2) == 0;
                            var gains       = model.Managed.GetFeatureWeights(normalise, getSplits);
                            var gainsNative = model.Native.GetFeatureWeights(normalise, getSplits);
                            Assert.Equal(gains.Count, gainsNative.Count);
                            foreach (var kv in gains)
                            {
                                Assert.True(0 <= kv.Key && kv.Key < trainData.NumColumns);
                                Assert.True(0.0 <= kv.Value);
                                Compare(kv.Value, gainsNative[kv.Key]);
                            }

                            if (!getSplits && !normalise)
                            {
                                var totGain1 = gains.Values.Sum();
                                var totGain2 = Enumerable.Range(0, trainData.NumColumns).SelectMany(i => model.Managed.GetFeatureGains(i)).Sum();
                                Compare(totGain1, totGain2);
                            }
                        }
                }
                catch (Exception e)
                {
                    throw new Exception($"Failed: {Seed} #{test} {pms}", e);
                }
            }
        }
Esempio n. 2
0
        public void TrainMultiClass()
        {
            var rand = new Random(Seed);

            for (int test = 0; test < 5; ++test)
            {
                int numColumns = rand.Next(1, 10);
                var objective  = (rand.Next(2) == 0) ? ObjectiveType.MultiClass : ObjectiveType.MultiClassOva;
                var pms        = GenerateParameters(rand, objective, numColumns);

                Dictionary <int, int> categorical = null;
                var trainData = CreateRandomDenseClassifyData(rand, pms.Objective.NumClass, ref categorical, pms.Dataset.UseMissing, numColumns);
                var validData = (pms.Learning.EarlyStoppingRound > 0 || rand.Next(2) == 0) ? CreateRandomDenseClassifyData(rand, pms.Objective.NumClass, ref categorical, pms.Dataset.UseMissing, numColumns) : null;
                pms.Dataset.CategoricalFeature = categorical.Keys.ToArray();

                var learningRateSchedule = (rand.Next(2) == 0) ? (Func <int, double>)null : (iter => pms.Learning.LearningRate * Math.Pow(0.99, iter));

                //if (test != 1) continue;

                try
                {
                    using (var datasets = (rand.Next(2) == 0) ? new Datasets(pms.Common, pms.Dataset, trainData, validData) :
                                          new Datasets(pms.Common, pms.Dataset, Dense2Sparse(trainData), Dense2Sparse(validData)))
                        using (var trainer = new MulticlassTrainer(pms.Learning, pms.Objective))
                        {
                            //trainer.ToCommandLineFiles(datasets);

                            var model = trainer.Train(datasets, learningRateSchedule);
                            model.Managed.MaxThreads = rand.Next(1, Environment.ProcessorCount);

                            // possibly use subset of trees
                            var numIterations = -1;
                            if (rand.Next(2) == 0)
                            {
                                numIterations             = rand.Next(1, model.Managed.MaxNumTrees);
                                model.Managed.MaxNumTrees = numIterations;
                                model.Native.MaxNumTrees  = numIterations;
                            }

                            OvaPredictor model2 = null;
                            using (var ms = new System.IO.MemoryStream())
                                using (var writer = new System.IO.BinaryWriter(ms))
                                    using (var reader = new System.IO.BinaryReader(ms))
                                    {
                                        PredictorPersist.Save(model.Managed, writer);
                                        ms.Position = 0;
                                        model2      = PredictorPersist.Load <double[]>(reader) as OvaPredictor;
                                        Assert.Equal(ms.Position, ms.Length);
                                    }

                            MulticlassNativePredictor model2native = null;
                            using (var ms = new System.IO.MemoryStream())
                                using (var writer = new System.IO.BinaryWriter(ms))
                                    using (var reader = new System.IO.BinaryReader(ms))
                                    {
                                        NativePredictorPersist.Save(model.Native, writer);
                                        ms.Position  = 0;
                                        model2native = NativePredictorPersist.Load <double[]>(reader) as MulticlassNativePredictor;
                                        Assert.Equal(ms.Position, ms.Length);
                                    }

                            var rawscore3s = trainer.Evaluate(Booster.PredictType.RawScore, trainData.Features, numIterations);
                            Assert.Equal(trainData.Features.Length, rawscore3s.GetLength(0));
                            Assert.Equal(pms.Objective.NumClass, rawscore3s.GetLength(1));

                            var output3s = trainer.Evaluate(Booster.PredictType.Normal, trainData.Features, numIterations);
                            Assert.Equal(trainData.Features.Length, output3s.GetLength(0));
                            Assert.Equal(pms.Objective.NumClass, output3s.GetLength(1));

                            var output3natives = model.Native.GetOutputs(trainData.Features);
                            Assert.Equal(trainData.Features.Length, output3natives.Length);

                            for (var irow = 0; irow < trainData.Features.Length; irow++)
                            {
                                var row = trainData.Features[irow];
                                // check evaluation of managed model
                                double[] output = null;
                                var      input  = new VBuffer <float>(row.Length, row);
                                model.Managed.GetOutput(ref input, ref output);
                                foreach (var p in output)
                                {
                                    Assert.True(p >= 0);
                                    Assert.True(p <= 1);
                                }
                                Assert.Equal(1, output.Sum(), 5);
                                Assert.Equal(output.Length, pms.Objective.NumClass);

                                // compare with output of serialised model
                                double[] output2 = null;
                                model2.GetOutput(ref input, ref output2);
                                Assert.Equal(output.Length, output2.Length);
                                for (var i = 0; i < output.Length; i++)
                                {
                                    Compare(output[i], output2[i]);
                                }

                                // check raw scores against native booster object
                                var isRf      = (pms.Learning.Boosting == BoostingType.RandomForest);
                                var rawscores = (model.Managed as OvaPredictor).Predictors.Select(p =>
                                {
                                    double outputi = 0;
                                    if (p is CalibratedPredictor)
                                    {
                                        (p as CalibratedPredictor).SubPredictor.GetOutput(ref input, ref outputi);
                                    }
                                    else
                                    {
                                        p.GetOutput(ref input, ref outputi);
                                    }
                                    return(outputi, p.MaxNumTrees);
                                }).ToArray();
                                var rawscores3 = trainer.Evaluate(Booster.PredictType.RawScore, row, numIterations);
                                Assert.Equal(pms.Objective.NumClass, rawscores.Length);
                                Assert.Equal(pms.Objective.NumClass, rawscores3.Length);
                                for (var i = 0; i < rawscores.Length; i++)
                                {
                                    (var rawscore, var numTrees) = rawscores[i];
                                    Compare(isRf ? rawscore * numTrees : rawscore, rawscores3[i]);
                                    Assert.Equal(rawscores3[i], rawscore3s[irow, i]);
                                }
                                //Console.WriteLine(trainer.GetModelString());
                                //throw new Exception($"Raw score mismatch at row {irow}: {rawscores[i]} vs {rawscores3[i]} (error: {Math.Abs(rawscores[i] - rawscores3[i])}) input: {String.Join(", ", row)}");

                                double [] outputNative = null;
                                model.Native.GetOutput(ref input, ref outputNative);

                                double[] outputNative2 = null;
                                model2native.GetOutput(ref input, ref outputNative2);

                                // check probabilities against native booster object
                                var output3 = trainer.Evaluate(Booster.PredictType.Normal, row, numIterations);
                                for (var i = 0; i < output3.Length; i++)
                                {
                                    Assert.Equal(output3s[irow, i], output3[i]);
                                    Assert.Equal(output3natives[irow][i], output3[i]);
                                    Assert.Equal(outputNative[i], output3[i]);
                                    Assert.Equal(outputNative2[i], output3[i]);
                                }

                                if (objective == ObjectiveType.MultiClassOva)
                                {
                                    // booster object doesn't return normalised probabilities for OVA
                                    var sum = output3.Sum();
                                    for (var i = 0; i < output3.Length; i++)
                                    {
                                        output3[i] /= sum;
                                    }
                                }
                                Assert.Equal(pms.Objective.NumClass, output3.Length);
                                for (var i = 0; i < output3.Length; i++)
                                {
                                    Assert.Equal(output[i], output3[i], 3);
                                }
                            }

                            var normalise   = rand.Next(2) == 0;
                            var getSplits   = rand.Next(2) == 0;
                            var gains       = model.Managed.GetFeatureWeights(normalise, getSplits);
                            var gainsNative = model.Native.GetFeatureWeights(normalise, getSplits);
                            Assert.Equal(gains.Count, gainsNative.Count);
                            foreach (var kv in gains)
                            {
                                Assert.True(0 <= kv.Key && kv.Key < trainData.NumColumns);
                                Assert.True(0.0 <= kv.Value);
                                Compare(kv.Value, gainsNative[kv.Key]);
                            }

                            if (!getSplits && !normalise)
                            {
                                var totGain1 = gains.Values.Sum();
                                var totGain2 = Enumerable.Range(0, trainData.NumColumns).SelectMany(i => model.Managed.GetFeatureGains(i)).Sum();
                                Compare(totGain1, totGain2);
                            }
                        }
                }
                catch (Exception e)
                {
                    throw new Exception($"Failed: {Seed} #{test} {pms}", e);
                }
            }
        }
Esempio n. 3
0
        public void TrainRegression()
        {
            var objectiveTypes =
                new ObjectiveType[] {
                ObjectiveType.Regression,
                ObjectiveType.RegressionL1,
                ObjectiveType.Huber,
                ObjectiveType.Fair,
                ObjectiveType.Poisson,
                ObjectiveType.Quantile,
                ObjectiveType.Mape,
                ObjectiveType.Gamma,
                ObjectiveType.Tweedie
            };

            var rand = new Random(Seed);

            for (int test = 0; test < 5; ++test)
            {
                int numColumns = rand.Next(1, 10);
                var objective  = objectiveTypes[rand.Next(objectiveTypes.Length)];
                var pms        = GenerateParameters(rand, objective, numColumns);
                if (rand.Next(2) == 0)
                {
                    pms.Objective.RegSqrt = true;
                }

                var learningRateSchedule = (rand.Next(2) == 0) ? (Func <int, double>)null : (iter => pms.Learning.LearningRate * Math.Pow(0.99, iter));

                try
                {
                    Dictionary <int, int> categorical = null;
                    var trainData = CreateRandomDenseRegressionData(rand, ref categorical, pms.Dataset.UseMissing, numColumns);
                    var validData = (pms.Learning.EarlyStoppingRound > 0 || rand.Next(2) == 0) ? CreateRandomDenseRegressionData(rand, ref categorical, pms.Dataset.UseMissing, numColumns) : null;
                    pms.Dataset.CategoricalFeature = categorical.Keys.ToArray();

                    // make labels positive for certain objective types
                    if (objective == ObjectiveType.Poisson ||
                        objective == ObjectiveType.Gamma ||
                        objective == ObjectiveType.Tweedie)
                    {
                        for (var i = 0; i < trainData.Labels.Length; i++)
                        {
                            trainData.Labels[i] = Math.Abs(trainData.Labels[i]);
                        }

                        if (validData != null)
                        {
                            for (var i = 0; i < validData.Labels.Length; i++)
                            {
                                validData.Labels[i] = Math.Abs(validData.Labels[i]);
                            }
                        }
                    }

                    // uncomment to select particular iteration
                    //if (test != 3)
                    //    continue;

                    using (var datasets = (rand.Next(2) == 0) ? new Datasets(pms.Common, pms.Dataset, trainData, validData) :
                                          new Datasets(pms.Common, pms.Dataset, Dense2Sparse(trainData), Dense2Sparse(validData)))
                        using (var trainer = new RegressionTrainer(pms.Learning, pms.Objective))
                        {
                            //if (true)
                            //    trainer.ToCommandLineFiles(datasets);

                            var model = trainer.Train(datasets, learningRateSchedule);
                            model.Managed.MaxThreads = rand.Next(1, Environment.ProcessorCount);

                            // possibly use subset of trees
                            var numIterations = -1;
                            if (rand.Next(2) == 0)
                            {
                                numIterations             = rand.Next(1, model.Managed.MaxNumTrees);
                                model.Managed.MaxNumTrees = numIterations;
                                model.Native.MaxNumTrees  = numIterations;
                            }

                            IPredictorWithFeatureWeights <double> model2 = null;
                            using (var ms = new System.IO.MemoryStream())
                                using (var writer = new System.IO.BinaryWriter(ms))
                                    using (var reader = new System.IO.BinaryReader(ms))
                                    {
                                        PredictorPersist.Save(model.Managed, writer);
                                        ms.Position = 0;
                                        model2      = PredictorPersist.Load <double>(reader);
                                        Assert.Equal(ms.Position, ms.Length);
                                    }

                            IPredictorWithFeatureWeights <double> model2native = null;
                            using (var ms = new System.IO.MemoryStream())
                                using (var writer = new System.IO.BinaryWriter(ms))
                                    using (var reader = new System.IO.BinaryReader(ms))
                                    {
                                        NativePredictorPersist.Save(model.Native, writer);
                                        ms.Position  = 0;
                                        model2native = NativePredictorPersist.Load <double>(reader);
                                        Assert.Equal(ms.Position, ms.Length);
                                    }

                            var output3s = trainer.Evaluate(Booster.PredictType.Normal, trainData.Features, numIterations);
                            Assert.Equal(trainData.Features.Length, output3s.GetLength(0));
                            Assert.Equal(1, output3s.GetLength(1));

                            var output3natives = model.Native.GetOutputs(trainData.Features);
                            Assert.Equal(trainData.Features.Length, output3s.Length);

                            for (int i = 0; i < trainData.Features.Length; i++)
                            {
                                var row = trainData.Features[i];

                                double output = 0;
                                var    input  = new VBuffer <float>(row.Length, row);
                                model.Managed.GetOutput(ref input, ref output);
                                Assert.False(double.IsNaN(output));

                                double output2 = 0;
                                model2.GetOutput(ref input, ref output2);
                                Compare(output, output2);

                                var output3 = trainer.Evaluate(Booster.PredictType.Normal, row, numIterations);
                                Assert.Single(output3);
                                Assert.Equal(output3[0], output3s[i, 0]);
                                Assert.Equal(output3[0], output3natives[i]);
                                Compare(output, output3[0]);
                                //Console.WriteLine(trainer.GetModelString());
                                //throw new Exception($"Output mismatch {output} vs {output3[0]} (error: {Math.Abs(output - output3[0])}) input: {String.Join(", ", row)}");

                                double outputNative = 0;
                                model.Native.GetOutput(ref input, ref outputNative);
                                Assert.Equal(outputNative, output3[0]);

                                model2native.GetOutput(ref input, ref outputNative);
                                Assert.Equal(outputNative, output3[0]);
                            }

                            var normalise   = rand.Next(2) == 0;
                            var getSplits   = rand.Next(2) == 0;
                            var gains       = model.Managed.GetFeatureWeights(normalise, getSplits);
                            var gainsNative = model.Native.GetFeatureWeights(normalise, getSplits);
                            Assert.Equal(gains.Count, gainsNative.Count);
                            foreach (var kv in gains)
                            {
                                Assert.True(0 <= kv.Key && kv.Key < trainData.NumColumns);
                                Assert.True(0.0 <= kv.Value);
                                Compare(kv.Value, gainsNative[kv.Key]);
                            }

                            if (!getSplits && !normalise)
                            {
                                var totGain1 = gains.Values.Sum();
                                var totGain2 = Enumerable.Range(0, trainData.NumColumns).SelectMany(i => model.Managed.GetFeatureGains(i)).Sum();
                                Compare(totGain1, totGain2);
                            }
                        }
                }
                catch (Exception e)
                {
                    throw new Exception($"Failed: {Seed} #{test} {pms}", e);
                }
            }
        }
Esempio n. 4
0
        public void TrainBinary()
        {
            var rand = new Random(Seed);

            for (int test = 0; test < 5; ++test)
            {
                int numColumns = rand.Next(1, 10);
                var pms        = GenerateParameters(rand, ObjectiveType.Binary, numColumns);
                Dictionary <int, int> categorical = null;
                var trainData = CreateRandomDenseClassifyData(rand, 2, ref categorical, pms.Dataset.UseMissing, numColumns);
                var validData = (pms.Learning.EarlyStoppingRound > 0 || rand.Next(2) == 0) ? CreateRandomDenseClassifyData(rand, 2, ref categorical, pms.Dataset.UseMissing, numColumns) : null;
                pms.Dataset.CategoricalFeature = categorical.Keys.ToArray();

                var learningRateSchedule = (rand.Next(2) == 0) ? (Func <int, double>)null : (iter => pms.Learning.LearningRate * Math.Pow(0.99, iter));

                try
                {
                    using (var datasets = (rand.Next(2) == 0) ? new Datasets(pms.Common, pms.Dataset, trainData, validData) :
                                          new Datasets(pms.Common, pms.Dataset, Dense2Sparse(trainData), Dense2Sparse(validData)))
                        using (var trainer = new BinaryTrainer(pms.Learning, pms.Objective))
                        {
                            //trainer.ToCommandLineFiles(datasets);

                            var datasets2 = (rand.Next(2) == 0) ? null : datasets.Training.GetSubset(Enumerable.Range(0, datasets.Training.NumRows / 2).ToArray());

                            var model = trainer.Train(datasets, learningRateSchedule);
                            {
                                if (datasets2 != null)
                                {
                                    model.Dispose();
                                    model = trainer.ContinueTraining(datasets2, learningRateSchedule);
                                }

                                model.Managed.MaxThreads = rand.Next(1, Environment.ProcessorCount);

                                // possibly use subset of trees
                                var numIterations = -1;
                                if (rand.Next(2) == 0)
                                {
                                    numIterations             = rand.Next(1, model.Managed.MaxNumTrees);
                                    model.Managed.MaxNumTrees = numIterations;
                                    model.Native.MaxNumTrees  = numIterations;
                                }

                                CalibratedPredictor model2 = null;
                                using (var ms = new System.IO.MemoryStream())
                                    using (var writer = new System.IO.BinaryWriter(ms))
                                        using (var reader = new System.IO.BinaryReader(ms))
                                        {
                                            PredictorPersist.Save(model.Managed, writer);
                                            ms.Position = 0;
                                            model2      = PredictorPersist.Load <double>(reader) as CalibratedPredictor;
                                            Assert.Equal(ms.Position, ms.Length);
                                        }

                                BinaryNativePredictor model2native = null;
                                using (var ms = new System.IO.MemoryStream())
                                    using (var writer = new System.IO.BinaryWriter(ms))
                                        using (var reader = new System.IO.BinaryReader(ms))
                                        {
                                            NativePredictorPersist.Save(model.Native, writer);
                                            ms.Position  = 0;
                                            model2native = NativePredictorPersist.Load <double>(reader) as BinaryNativePredictor;
                                            Assert.Equal(ms.Position, ms.Length);
                                        }

                                var rawscore2s = trainer.Evaluate(Booster.PredictType.RawScore, trainData.Features, numIterations);
                                Assert.Equal(trainData.Features.Length, rawscore2s.GetLength(0));
                                Assert.Equal(1, rawscore2s.GetLength(1));

                                var output3s = trainer.Evaluate(Booster.PredictType.Normal, trainData.Features, numIterations);
                                Assert.Equal(trainData.Features.Length, output3s.GetLength(0));
                                Assert.Equal(1, output3s.GetLength(1));

                                var output3natives = model.Native.GetOutputs(trainData.Features);
                                Assert.Equal(trainData.Features.Length, output3s.Length);

                                for (int i = 0; i < trainData.Features.Length; i++)
                                {
                                    var row = trainData.Features[i];

                                    double output = 0;
                                    var    input  = new VBuffer <float>(row.Length, row);
                                    model.Managed.GetOutput(ref input, ref output);
                                    Assert.True(output >= 0);
                                    Assert.True(output <= 1);

                                    double output2 = 0;
                                    model2.GetOutput(ref input, ref output2);
                                    Compare(output, output2);

                                    // check raw score against native booster object
                                    var rawscore = 0.0;
                                    (model.Managed as CalibratedPredictor).SubPredictor.GetOutput(ref input, ref rawscore);
                                    var rawscore2 = trainer.Evaluate(Booster.PredictType.RawScore, row, numIterations);
                                    Assert.Single(rawscore2);
                                    Assert.Equal(rawscore2[0], rawscore2s[i, 0]);
                                    var isRf = (pms.Learning.Boosting == BoostingType.RandomForest);
                                    Compare(isRf ? rawscore * model.Managed.MaxNumTrees : rawscore, rawscore2[0]);

                                    var output3 = trainer.Evaluate(Booster.PredictType.Normal, row, numIterations);
                                    Assert.Single(output3);
                                    Assert.Equal(output3[0], output3s[i, 0]);
                                    Assert.Equal(output3[0], output3natives[i]);
                                    Compare(output, output3[0]);

                                    double outputNative = 0;
                                    model.Native.GetOutput(ref input, ref outputNative);
                                    Assert.Equal(outputNative, output3[0]);

                                    model2native.GetOutput(ref input, ref outputNative);
                                    Assert.Equal(outputNative, output3[0]);

                                    //Console.WriteLine(trainer.GetModelString());
                                    //throw new Exception($"Output mismatch {output} vs {output3[0]} (error: {Math.Abs(output - output3[0])}) input: {String.Join(", ", row)}");
                                }

                                var normalise   = rand.Next(2) == 0;
                                var getSplits   = rand.Next(2) == 0;
                                var gains       = model.Managed.GetFeatureWeights(normalise, getSplits);
                                var gainsNative = model.Native.GetFeatureWeights(normalise, getSplits);
                                Assert.Equal(gains.Count, gainsNative.Count);
                                foreach (var kv in gains)
                                {
                                    Assert.True(0 <= kv.Key && kv.Key < trainData.NumColumns);
                                    Assert.True(0.0 <= kv.Value);
                                    Compare(kv.Value, gainsNative[kv.Key]);
                                }

                                if (!getSplits && !normalise)
                                {
                                    var totGain1 = gains.Values.Sum();
                                    var totGain2 = Enumerable.Range(0, trainData.NumColumns).SelectMany(i => model.Managed.GetFeatureGains(i)).Sum();
                                    Compare(totGain1, totGain2);
                                }
                            }

                            if (datasets2 != null)
                            {
                                datasets2.Dispose();
                            }
                            if (model != null)
                            {
                                model.Dispose();
                            }
                        }
                }
                catch (Exception e)
                {
                    throw new Exception($"Failed: {Seed} #{test} {pms}", e);
                }
            }
        }