예제 #1
1
        public override ConfusionMatrix Execute()
        {
            //Create an network with one layer and one neuron in that layer
            var network = new ActivationNetwork(new ThresholdFunction(), 3, 1);

            //Bind the reference of the neuron
            var neuron = network.Layers[0].Neurons[0] as ActivationNeuron;

            //Create the Perceptron learning algorithm
            //Library perceptron implements a single layer linear classifier
            var teacher = new PerceptronLearning(network);

            teacher.LearningRate = 0.1;

            //Enrich the dimensions of the vectors, padding 1 to the end
            var richTraining = AlgorithmHelpers.PaddDimension(trainingSet);
            var richTesting = AlgorithmHelpers.PaddDimension(testSet);

            //Training the network until the error is small enough
            //or 500 hundred iterations have been computed
            int epochs = 0;
            while (true)
            {
                double error = teacher.RunEpoch(richTraining, trainingOutput);/// trainingSet.Length;
                ++epochs;
                if (error < 0.025 * trainingSet.Length || epochs == 500) break;
            }

            var predicted = richTesting
                   .Select(x => neuron.Compute(x))
                   .Select(x => Convert.ToInt32(x))
                   .ToArray();


            //Create a confusion matrix with the calculated parameters
            ConfusionMatrix cmatrix = new ConfusionMatrix(predicted, expected, POSITIVE, NEGATIVE);

            OnAlgorithmEnded(Enumerable.Repeat(neuron, 1), cmatrix);
            return cmatrix;
        }
예제 #2
0
        public void FisherExactTestConstructorTest1()
        {
            // Example from http://rfd.uoregon.edu/files/rfd/StatisticalResources/lec_05a.txt

            ConfusionMatrix matrix = new ConfusionMatrix
            (
                 14, 10,
                 21, 3
            );

            {
                var target = new FisherExactTest(matrix, OneSampleHypothesis.ValueIsSmallerThanHypothesis);
                Assert.AreEqual(OneSampleHypothesis.ValueIsSmallerThanHypothesis, target.Hypothesis);
                Assert.AreEqual(DistributionTail.OneLower, target.Tail);
                Assert.AreEqual(0.02450, target.PValue, 1e-5);
            }

            {
                var target = new FisherExactTest(matrix, OneSampleHypothesis.ValueIsDifferentFromHypothesis);
                Assert.AreEqual(OneSampleHypothesis.ValueIsDifferentFromHypothesis, target.Hypothesis);
                Assert.AreEqual(DistributionTail.TwoTail, target.Tail);
                Assert.AreEqual(0.04899, target.PValue, 1e-4);
            }

            {
                var target = new FisherExactTest(matrix, OneSampleHypothesis.ValueIsGreaterThanHypothesis);
                Assert.AreEqual(OneSampleHypothesis.ValueIsGreaterThanHypothesis, target.Hypothesis);
                Assert.AreEqual(DistributionTail.OneUpper, target.Tail);
                Assert.AreEqual(0.99607, target.PValue, 1e-4);
            }
        }
예제 #3
0
        public virtual List<ConfusionMatrix> TestModel(TrainningData trainningData)
        {
            int[] expected = trainningData.ClassificationAttribute;
            int[] predicted = ComputeModel(trainningData.TrainningAttributes);
            int positiveValue = trainningData.PositiveValue;
            int negativeValue = trainningData.NegativeValue;

            ConfusionMatrix confusionMatrix = new ConfusionMatrix(predicted, expected, positiveValue, negativeValue);
            return new List<ConfusionMatrix> { confusionMatrix };
        }
        public override ConfusionMatrix Execute()
        {
            var richTesting = testSet;

            ConcurrentDictionary<double, ActivationNetwork> networks = new ConcurrentDictionary<double, ActivationNetwork>();

            Parallel.For(0, 2000, (int i) =>
            {
                //Create an activation network
                ThreadLocal<ActivationNetwork> network = new ThreadLocal<ActivationNetwork>(() =>
                {
                    return new ActivationNetwork(new SigmoidFunction(2), 2, 2, 1);
                });

                ThreadLocal<ResilientBackpropagationLearning> teachear = new ThreadLocal<ResilientBackpropagationLearning>(() =>
                {
                    return new ResilientBackpropagationLearning(network.Value);
                });

                ThreadLocal<int> iter = new ThreadLocal<int>(() => { return 0; });
                ThreadLocal<double> error = new ThreadLocal<double>(() => { return 0; });

                while (networks.IsEmpty)
                {
                    error.Value = teachear.Value.RunEpoch(trainingSet, trainingOutput);
                    iter.Value++;
                    if (iter.Value == 1000) break;
                }
                if (!networks.ContainsKey(error.Value)) networks.TryAdd(error.Value, network.Value);
            }
            );

            Dictionary<ConfusionMatrix, ActivationNetwork> cms = new Dictionary<ConfusionMatrix, ActivationNetwork>();
            foreach (var keyv in networks)
            {
                var p = richTesting
                   .Select(x => keyv.Value.Compute(x))
                   .Select(x => Convert.ToInt32(x[0]))
                   .ToArray();

                ConfusionMatrix cm = new ConfusionMatrix(p, expected, POSITIVE, NEGATIVE);
                cms.Add(cm, keyv.Value);
            }

            var kv = (from x in cms
                      orderby x.Key.Accuracy descending
                      select x).First();

            var neurons = from neuron in kv.Value.Layers[0].Neurons
                          select neuron as ActivationNeuron;

            OnAlgorithmEnded(neurons, kv.Key);
            return kv.Key;
        }
예제 #5
0
 public void OnAlgorithmEnded(IEnumerable<ActivationNeuron> neurons, ConfusionMatrix matrix)
 {
     var handler = Finished;
     if (handler != null)
     {
         var e = new AlgorithmFinishedEventArgs()
         {
             Neurons = neurons,
             Matrix = matrix
         };
         handler(this, e);
     }
 }
예제 #6
0
        /// <summary>
        ///   Creates a new McNemar test.
        /// </summary>
        /// 
        /// <param name="matrix">The contingency table to test.</param>
        /// <param name="yatesCorrection">True to use Yate's correction of
        ///   continuity, falser otherwise. Default is false.</param>
        /// 
        public McNemarTest(ConfusionMatrix matrix, bool yatesCorrection = false)
        {
            int a = matrix.TruePositives;
            int b = matrix.FalseNegatives;
            int c = matrix.FalsePositives;
            int d = matrix.TrueNegatives;

            double u = (yatesCorrection) ? Math.Abs(b - c) - 0.5 : u = b - c;

            double chiSquare = (u * u) / (b + c);
            int df = 1;

            Compute(chiSquare, df);
        }
예제 #7
0
        public void ConfusionMatrixConstructorTest()
        {
            // The correct and expected output values (as confirmed by a Gold
            //  standard rule, actual experiment or true verification)
            int[] expected = { 0, 0, 1, 0, 1, 0, 0, 0, 0, 0 };

            // The values as predicted by the decision system or
            //  the test whose performance is being measured.
            int[] predicted = { 0, 0, 0, 1, 1, 0, 0, 0, 0, 1 };


            // In this test, 1 means positive, 0 means negative
            int positiveValue = 1;
            int negativeValue = 0;

            // Create a new confusion matrix using the given parameters
            ConfusionMatrix matrix = new ConfusionMatrix(predicted, expected,
                positiveValue, negativeValue);

            // At this point,
            //   True Positives should be equal to 1;
            //   True Negatives should be equal to 6;
            //   False Negatives should be equal to 1;
            //   False Positives should be equal to 2.


            int falseNegatives = 1;
            int falsePositives = 2;
            int truePositives = 1;
            int trueNegatives = 6;

            Assert.AreEqual(predicted.Length, matrix.Samples);
            Assert.AreEqual(8, matrix.ActualNegatives);
            Assert.AreEqual(2, matrix.ActualPositives);
            Assert.AreEqual(7, matrix.PredictedNegatives);
            Assert.AreEqual(3, matrix.PredictedPositives);

            Assert.AreEqual(falseNegatives, matrix.FalseNegatives);
            Assert.AreEqual(falsePositives, matrix.FalsePositives);
            Assert.AreEqual(truePositives, matrix.TruePositives);
            Assert.AreEqual(trueNegatives, matrix.TrueNegatives);

            Assert.AreEqual(0.7, matrix.Accuracy);
            Assert.AreEqual(0.5, matrix.Sensitivity);
            Assert.AreEqual(0.75, matrix.Specificity);
            Assert.AreEqual((0.5 + 0.75) / 2.0, matrix.Efficiency);

            Assert.AreEqual(0.21821789023599239, matrix.MatthewsCorrelationCoefficient);
        }
예제 #8
0
        public override List<ConfusionMatrix> TestModel(TrainningData trainningData)
        {
            ContinuousDataTableAdapter continuousDataTableAdapter = new ContinuousDataTableAdapter();

            DataTable continuousDataTable = continuousDataTableAdapter.GetData();
            DataTable dataTable = continuousDataTable.DefaultView.ToTable(false, TableMetaData.TestingAttributes);
            string[] columnNames;
            double[][] inputs = dataTable.ToArray(out columnNames);

            int[] expected = trainningData.ClassificationAttribute;
            int[] predicted = ComputeModel(inputs);
            int positiveValue = trainningData.PositiveValue;
            int negativeValue = trainningData.NegativeValue;

            ConfusionMatrix confusionMatrix = new ConfusionMatrix(predicted, expected, positiveValue, negativeValue);
            return new List<ConfusionMatrix> { confusionMatrix };
        }
예제 #9
0
        /// <summary>
        /// Validate than Accord.Net DecisionTree or MulticlassSupportVectorMachine class
        /// </summary>
        /// <param name="test">The test dataset</param>
        /// <param name="obj">The Accord.Net DecisionTree or MulticlassSupportVectorMachine object</param>
        public static void Validate(DataTable test, object obj)
        {
            List<int> expected = new List<int>();
            List<int> predicted = new List<int>();

            foreach (DataRow row in test.Rows)
            {
                double gender = 1;
                if (string.Compare((string)row["Gender"], "F", true, CultureInfo.CurrentCulture) == 0)
                {
                    gender = 0;
                }

                double[] testQuery = new double[]
                                                 {
                                                    gender, Convert.ToDouble(row["YearOfBirth"], CultureInfo.CurrentCulture), Convert.ToDouble(row["SmokingEffectiveYear"], CultureInfo.CurrentCulture), Convert.ToDouble(row["NISTcode"], CultureInfo.CurrentCulture),
                                                    Convert.ToDouble(row["Height"], CultureInfo.CurrentCulture), Convert.ToDouble(row["Weight"], CultureInfo.CurrentCulture), Convert.ToDouble(row["BMI"], CultureInfo.CurrentCulture),
                                                    Convert.ToDouble(row["SystolicBP"], CultureInfo.CurrentCulture), Convert.ToDouble(row["DiastolicBP"], CultureInfo.CurrentCulture), Convert.ToDouble(row["RespiratoryRate"], CultureInfo.CurrentCulture),
                                                    Convert.ToDouble(row["Temperature"], CultureInfo.CurrentCulture)
                                                 };

                int output = -1;
                if (obj is DecisionTree)
                {
                    output = ((DecisionTree)obj).Compute(testQuery);
                }
                else if (obj is MulticlassSupportVectorMachine)
                {
                    output = ((MulticlassSupportVectorMachine)obj).Compute(testQuery);
                }
                else
                {
                    throw new ArgumentException("Unknown algorithm for validation.");
                }

                expected.Add(Convert.ToInt32(row["DMIndicator"], CultureInfo.CurrentCulture));
                predicted.Add(output);
            }

            var confusionMatrix = new ConfusionMatrix(predicted.ToArray(), expected.ToArray());
            Logger.Info("The following is the confusion matrix (aka truth table).  Look for TP (true positive), FP (false positive), etc...");
            Logger.Info("Accuracy :" + confusionMatrix.ToString());
            Logger.Info("Hit Enter to continue....");
            Console.ReadLine();
        }
        public override ConfusionMatrix Execute()
        {
            //Create a knn classifer with 2 classes
            var knn = new KNearestNeighbors(k: k,
                classes: 2,
                inputs: trainingSet,
                outputs: trainingOutput);

            //Map the classifier over the test set
            //This wil return an array where index i is the classificatioon of the i-th vector
            //of the testSet
            var predicted = AlgorithmHelpers
                .MergeArrays(trainingSet, testSet)
                .Select(x => knn.Compute(x))
                .ToArray();

            //Create a new confusion matrix with the calculated parameters
            var cmatrix = new ConfusionMatrix(predicted, AlgorithmHelpers.MergeArrays(trainingOutput, expected), POSITIVE, NEGATIVE);
            return cmatrix;
        }
예제 #11
0
        public void McNemarTestConstructorTest()
        {
            int[,] matrix = 
            {
                { 101, 121 },
                {  59,  33 },
            };

            ConfusionMatrix a = new ConfusionMatrix(matrix);

            McNemarTest target = new McNemarTest(a, true);

            Assert.AreEqual(21.0125, target.Statistic);
            Assert.AreEqual(1, target.DegreesOfFreedom);

            McNemarTest target2 = new McNemarTest(a, false);

            Assert.AreEqual(21.355555, target2.Statistic, 1e-5);
            Assert.AreEqual(1, target2.DegreesOfFreedom);
        }
예제 #12
0
        public override ConfusionMatrix Execute()
        {
            //The Least Squares algorithm
            //It uses a PartialLeastSquaresAnalysis library object using a non-linear iterative partial least squares algorithm
            //and runs on the mean-centered and standardized data

            //Create an analysis
            var pls = new PartialLeastSquaresAnalysis(trainingSet,
                trainingOutput,
                AnalysisMethod.Standardize,
                PartialLeastSquaresAlgorithm.NIPALS);

            pls.Compute();

            //After computing the analysis
            //create a linear model to predict new variables
            MultivariateLinearRegression regression = pls.CreateRegression();

            //This will hold the result of the classifications
            var predictedLifted = new int[testSet.GetLength(0)][];

            for (int i = 0; i < predictedLifted.Length; ++i)
            {
                predictedLifted[i] = regression
                    .Compute(testSet.GetRow(i)) //Retrieve the row vector of the test set
                    .Select(x => Convert.ToInt32(x))// Convert the result to int
                    .ToArray();
            }

            //Unlift the prediction vector
            var predicted = predictedLifted
                .SelectMany(x => x)
                .ToArray();

            //Create a new confusion matrix with the calculated parameters
            ConfusionMatrix cmatrix = new ConfusionMatrix(predicted, expected, POSITIVE, NEGATIVE);
            return cmatrix;
        }
예제 #13
0
        //This is the function that runs the demo application
        //It handles the file loading the execution of the algorithms
        //and the manipulation of the controls
        private void SearchSolution()
        {
            //Read the data from the files
            Double[][] file1DataRaw = UtilityProvider.ReadMatrixFromFile(@"class_1.dat");
            Double[][] file2DataRaw = UtilityProvider.ReadMatrixFromFile(@"class_2.dat");

            //Chose 2 features
            class_1 = UtilityProvider.ScaleDown(UtilityProvider.ChooseFeatures(file1DataRaw));
            class_2 = UtilityProvider.ScaleDown(UtilityProvider.ChooseFeatures(file2DataRaw));

            //Fill the charts with the data
            ShowTrainingData();
            //Clear the list view
            ClearListView();
            //Fill it with the confusion matrix for each algorithm per iteration
            ConfusionMatrix[,] statistics = new ConfusionMatrix[4, 5];
            //Merge the two data sets
            //and run kmeans
            RunKMeans(UtilityProvider.MergeArrays(file1DataRaw, file2DataRaw));

            this.Invoke(new Action(() => progressBar1.Step *= 2));
            //Partition 5 times and run the algorithms
            for (int i = 0; i < 5; ++i)
            {

                this.Invoke(new Action(() => progressBar1.PerformStep()));
                //Partition its class to training and testing set
                var partitions = new DataPartition[] { UtilityProvider.Partition(class_1), UtilityProvider.Partition(class_2) };

                //Create the training data
                var trainingPair = UtilityProvider.CreateDataPair(partitions[0].Item1, partitions[1].Item1);
                var trainingSet = trainingPair.Item1;
                var trainingOutput = trainingPair.Item2;

                //Create the testing data
                var testingPair = UtilityProvider.CreateDataPair(partitions[0].Item2, partitions[1].Item2);
                var testingSet = testingPair.Item1;
                var testingOutput = testingPair.Item2;
                //Some functions need the training output to be a vector of doubles
                var doubleTO = trainingOutput
                                .Select(x =>
                                    new Double[] { Convert.ToDouble(x) })
                                    .ToArray();

                for (int k = 1; k < 3; ++k)
                {
                    if (BestKNN == null)
                    {
                        BestKNN = RunKNN(k, trainingSet, trainingOutput, testingSet, testingOutput);
                    }
                    else
                    {
                        var iter = RunKNN(k, trainingSet, trainingOutput, testingSet, testingOutput);
                        if (iter.Accuracy > BestKNN.Accuracy)
                            BestKNN = iter;
                    }
                }

               //Compute the confusion matrices for the four classifiers
                statistics[0, i] = RunPerceptron(trainingSet, doubleTO, testingSet, testingOutput);
                statistics[1, i] = RunLS(UtilityProvider.JaggedToMD(trainingSet), UtilityProvider.JaggedToMD(doubleTO), UtilityProvider.JaggedToMD(testingSet), testingOutput);
                //Use the most accurate K of KNN
                statistics[2, i] = BestKNN;
                statistics[3, i] = ParallelRunNN(trainingSet, doubleTO, testingSet, testingOutput);
                //RunAnotherNN(trainingSet, doubleTO, testingSet, testingOutput);
            }

            //Update the classifier lines in the charts
            //with the most accurate of the 5 iterations
            ChartUpdate(perceChart.Name, "classifier", MostAccuratePerceptron.Item1);
            ChartUpdate(nnChart.Name, "classifier1", MostAccurateNN.Item1);

            //Process the array with the Confusion Matrices
            //and update the list view
            var processed = UtilityProvider.ProcessStatistics(statistics);
            UpdateStatisticsListView(processed);
        }
        public void TransformTest()
        {
            var inputs = yinyang.Submatrix(null, 0, 1).ToArray();
            var labels = yinyang.GetColumn(2).ToInt32();
            
            ConfusionMatrix actual, expected;
            SequentialMinimalOptimization a, b;

            var kernel = new Polynomial(2, 0);

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                a = new SequentialMinimalOptimization(machine, inputs, labels);
                a.UseComplexityHeuristic = true;
                a.Run();

                int[] values = new int[labels.Length];
                for (int i = 0; i < values.Length; i++)
                    values[i] = Math.Sign(machine.Compute(inputs[i]));

                expected = new ConfusionMatrix(values, labels);
            }

            {
                var projection = inputs.Apply(kernel.Transform);
                var machine = new SupportVectorMachine(projection[0].Length);
                b = new SequentialMinimalOptimization(machine, projection, labels);
                b.UseComplexityHeuristic = true;
                b.Run();

                int[] values = new int[labels.Length];
                for (int i = 0; i < values.Length; i++)
                    values[i] = Math.Sign(machine.Compute(projection[i]));

                actual = new ConfusionMatrix(values, labels);
            }

            Assert.AreEqual(a.Complexity, b.Complexity, 1e-15);
            Assert.AreEqual(expected.TrueNegatives, actual.TrueNegatives);
            Assert.AreEqual(expected.TruePositives, actual.TruePositives);
            Assert.AreEqual(expected.FalseNegatives, actual.FalseNegatives);
            Assert.AreEqual(expected.FalsePositives, actual.FalsePositives);
        }
예제 #15
0
 public void MatthewsCorrelationCoefficientTest2()
 {
     ConfusionMatrix matrix = new ConfusionMatrix(100, 100, 200, 600);
     Assert.AreEqual(0.21821789023599236, matrix.MatthewsCorrelationCoefficient);
 }
        public void WeightRatioTest()
        {
            var dataset = KernelSupportVectorMachineTest.training;
            var inputs = dataset.Submatrix(null, 0, 3);
            var labels = Tools.Scale(0, 1, -1, 1, dataset.GetColumn(4)).ToInt32();

            Gaussian kernel = Gaussian.Estimate(inputs);

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity = 1.0;
                smo.WeightRatio = 10;

                double error = smo.Run();

                Assert.AreEqual(1.0, smo.PositiveWeight);
                Assert.AreEqual(0.1, smo.NegativeWeight);
                Assert.AreEqual(0.7142857142857143, error);
                Assert.AreEqual(265.78327637381551, (machine.Kernel as Gaussian).Sigma);
                Assert.AreEqual(39, machine.SupportVectors.Length);


                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(12, matrix.TruePositives); // has more importance
                Assert.AreEqual(0, matrix.FalseNegatives); // has more importance
                Assert.AreEqual(30, matrix.FalsePositives);
                Assert.AreEqual(0, matrix.TrueNegatives);

                Assert.AreEqual(1.0, matrix.Sensitivity);
                Assert.AreEqual(0.0, matrix.Specificity);

                Assert.AreEqual(0.44444444444444448, matrix.FScore);
                Assert.AreEqual(0.0, matrix.MatthewsCorrelationCoefficient);
            }

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity = 1.0;
                smo.WeightRatio = 0.1;

                double error = smo.Run();

                Assert.AreEqual(0.1, smo.PositiveWeight);
                Assert.AreEqual(1.0, smo.NegativeWeight);
                Assert.AreEqual(0.21428571428571427, error);
                Assert.AreEqual(265.78327637381551, (machine.Kernel as Gaussian).Sigma);
                Assert.AreEqual(18, machine.SupportVectors.Length);


                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(8, matrix.FalseNegatives);
                Assert.AreEqual(1, matrix.FalsePositives); // has more importance
                Assert.AreEqual(4, matrix.TruePositives);
                Assert.AreEqual(29, matrix.TrueNegatives); // has more importance

                Assert.AreEqual(0.33333333333333331, matrix.Sensitivity);
                Assert.AreEqual(0.96666666666666667, matrix.Specificity);

                Assert.AreEqual(0.47058823529411764, matrix.FScore);
                Assert.AreEqual(0.41849149947774944, matrix.MatthewsCorrelationCoefficient);
            }
        }
        public void FixedWeightsTest()
        {
            var dataset = KernelSupportVectorMachineTest.training;
            var inputs = dataset.Submatrix(null, 0, 3);
            var labels = Tools.Scale(0, 1, -1, 1, dataset.GetColumn(4)).ToInt32();

            KernelSupportVectorMachine machine = new KernelSupportVectorMachine(
                Gaussian.Estimate(inputs), inputs[0].Length);

            var smo = new SequentialMinimalOptimization(machine, inputs, labels);

            smo.Complexity = 10;

            double error = smo.Run();

            Assert.AreEqual(0.19047619047619047, error);
            Assert.AreEqual(265.78327637381551, (machine.Kernel as Gaussian).Sigma);
            Assert.AreEqual(29, machine.SupportVectors.Length);

            double[] expectedWeights =
            {
                1.65717694716503, 1.20005456611466, -5.70824245415995, 10,
                10, -2.38755497916487, 10, -8.15723436363058, 10, -10, 10,
                10, -0.188634936781317, -5.4354281009458, -8.48341139483265,
                -5.91105702760141, -5.71489190049223, 10, -2.37289205235858,
                -3.33031262413522, -1.97545116517677, 10, -10, -9.563186799279,
                -3.917941544845, -0.532584110773336, 4.81951847548326, 0.343668292727091,
                -4.34159482731336
            };

            Assert.IsTrue(expectedWeights.IsEqual(machine.Weights, 1e-6));

            int[] actual = new int[labels.Length];
            for (int i = 0; i < actual.Length; i++)
                actual[i] = Math.Sign(machine.Compute(inputs[i]));

            ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

            Assert.AreEqual(8, matrix.FalseNegatives);
            Assert.AreEqual(0, matrix.FalsePositives);
            Assert.AreEqual(4, matrix.TruePositives);
            Assert.AreEqual(30, matrix.TrueNegatives);

            Assert.AreEqual(1 / 3.0, matrix.Sensitivity);
            Assert.AreEqual(1, matrix.Specificity);

            Assert.AreEqual(0.5, matrix.FScore);
            Assert.AreEqual(0.5129891760425771, matrix.MatthewsCorrelationCoefficient);
        }
        public void UseClassProportionsTest()
        {
            var dataset = KernelSupportVectorMachineTest.training;
            var inputs = dataset.Submatrix(null, 0, 3);
            var labels = Tools.Scale(0, 1, -1, 1, dataset.GetColumn(4)).ToInt32();

            Gaussian kernel = Gaussian.Estimate(inputs);
            var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
            var smo = new SequentialMinimalOptimization(machine, inputs, labels);

            smo.Complexity = 1.0;
            smo.UseClassProportions = true;

            double error = smo.Run();

            Assert.AreEqual(1, smo.Complexity);
            Assert.AreEqual(0.4, smo.PositiveWeight);
            Assert.AreEqual(1.0, smo.NegativeWeight);
            Assert.AreEqual(0.4, smo.WeightRatio, 1e-10);
            Assert.AreEqual(0.2857142857142857, error);
            Assert.AreEqual(265.78327637381551, (machine.Kernel as Gaussian).Sigma);
            Assert.AreEqual(26, machine.SupportVectors.Length);


            int[] actual = new int[labels.Length];
            for (int i = 0; i < actual.Length; i++)
                actual[i] = Math.Sign(machine.Compute(inputs[i]));

            ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
            Assert.AreEqual(12, matrix.FalseNegatives);
            Assert.AreEqual(0, matrix.FalsePositives);
            Assert.AreEqual(0, matrix.TruePositives);
            Assert.AreEqual(30, matrix.TrueNegatives);
        }
예제 #19
0
        public void ToGeneralMatrixTest1()
        {
            // Example from http://www.iph.ufrgs.br/corpodocente/marques/cd/rd/presabs.htm

            ConfusionMatrix matrix = new ConfusionMatrix
            (
                truePositives: 70,
                trueNegatives: 95,
                falsePositives: 5,
                falseNegatives: 30
            );

            Assert.AreEqual(70, matrix.TruePositives);
            Assert.AreEqual(5, matrix.FalsePositives);
            Assert.AreEqual(95, matrix.TrueNegatives);
            Assert.AreEqual(30, matrix.FalseNegatives);

            GeneralConfusionMatrix general = matrix.ToGeneralMatrix();

            Assert.AreEqual(matrix.Kappa, general.Kappa, 1e-10);
            Assert.AreEqual(matrix.Accuracy, general.OverallAgreement, 1e-10);
            Assert.AreEqual(matrix.StandardError, general.StandardError, 1e-10);
            Assert.AreEqual(matrix.StandardErrorUnderNull, general.StandardErrorUnderNull, 1e-10);
            Assert.AreEqual(matrix.Variance, general.Variance, 1e-10);
            Assert.AreEqual(matrix.VarianceUnderNull, general.VarianceUnderNull, 1e-10);
            Assert.AreEqual(matrix.Samples, general.Samples);

            Assert.IsFalse(double.IsNaN(general.Kappa));
            Assert.IsFalse(double.IsNaN(general.OverallAgreement));
            Assert.IsFalse(double.IsNaN(general.StandardError));
            Assert.IsFalse(double.IsNaN(general.StandardErrorUnderNull));
            Assert.IsFalse(double.IsNaN(general.Variance));
            Assert.IsFalse(double.IsNaN(general.VarianceUnderNull));
        }
예제 #20
0
        public void ConfusionMatrixConstructorTest3()
        {
            // System output
            int[] predicted = new int[] { 2, 0, 1 };

            // Correct output
            int[] expected = new int[] { 5, 2, 1 };

            // 1 means positive (the others shall be treated as negatives)
            int positiveValue = 1;


            ConfusionMatrix target = new ConfusionMatrix(predicted, expected, positiveValue);


            int falseNegatives = 0;
            int falsePositives = 0;
            int truePositives = 1;
            int trueNegatives = 2;

            Assert.AreEqual(predicted.Length, target.Observations);
            Assert.AreEqual(2, target.ActualNegatives);
            Assert.AreEqual(1, target.ActualPositives);
            Assert.AreEqual(2, target.PredictedNegatives);
            Assert.AreEqual(1, target.PredictedPositives);

            Assert.AreEqual(falseNegatives, target.FalseNegatives);
            Assert.AreEqual(falsePositives, target.FalsePositives);
            Assert.AreEqual(truePositives, target.TruePositives);
            Assert.AreEqual(trueNegatives, target.TrueNegatives);

            Assert.AreEqual(1.0, target.Accuracy);
            Assert.AreEqual(1.0, target.Sensitivity);
            Assert.AreEqual(1.0, target.Specificity);
            Assert.AreEqual(1.0, target.Efficiency);

            // Perfect prediction
            Assert.AreEqual(1.0, target.MatthewsCorrelationCoefficient);
        }
        public void ComputeTest5()
        {
            var dataset = SequentialMinimalOptimizationTest.yinyang;

            double[][] inputs = dataset.Submatrix(null, 0, 1).ToArray();
            int[] labels = dataset.GetColumn(2).ToInt32();

            var kernel = new Polynomial(2, 1);

            Accord.Math.Tools.SetupGenerator(0);
            var projection = inputs.Apply(kernel.Transform);
            var machine = new SupportVectorMachine(projection[0].Length);
            var smo = new LinearCoordinateDescent(machine, projection, labels)
            {
                Complexity = 1000000,
                Tolerance = 1e-15
            };

            double error = smo.Run();

            Assert.AreEqual(1000000.0, smo.Complexity, 1e-15);

            int[] actual = new int[labels.Length];
            for (int i = 0; i < actual.Length; i++)
                actual[i] = Math.Sign(machine.Compute(projection[i]));

            ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
            Assert.AreEqual(6, matrix.FalseNegatives);
            Assert.AreEqual(7, matrix.FalsePositives);
            Assert.AreEqual(44, matrix.TruePositives);
            Assert.AreEqual(43, matrix.TrueNegatives);
        }
예제 #22
0
        private void btnTestingRun_Click(object sender, EventArgs e)
        {
            if (ann == null || dgvTestingSource.DataSource == null)
            {
                MessageBox.Show("Please create a machine first.");
                return;
            }

            // Creates a matrix from the source data table
            double[,] sourceMatrix = (dgvTestingSource.DataSource as DataTable).ToMatrix();

            // Extract inputs
            double[][] inputs = new double[sourceMatrix.GetLength(0)][];
            for (int i = 0; i < inputs.Length; i++)
                inputs[i] = new double[] { sourceMatrix[i, 0], sourceMatrix[i, 1] };

            // Get only the label outputs
            int[] expected = new int[sourceMatrix.GetLength(0)];
            for (int i = 0; i < expected.Length; i++)
                expected[i] = (int)sourceMatrix[i, 2];

            // Compute the machine outputs
            int[] output = new int[expected.Length];
            for (int i = 0; i < expected.Length; i++)
                output[i] = System.Math.Sign(ann.Compute(inputs[i])[0]);

            double[] expectedd = new double[expected.Length];
            double[] outputd = new double[expected.Length];
            for (int i = 0; i < expected.Length; i++)
            {
                expectedd[i] = expected[i];
                outputd[i] = output[i];
            }

            // Use confusion matrix to compute some statistics.
            ConfusionMatrix confusionMatrix = new ConfusionMatrix(output, expected, 1, -1);
            dgvPerformance.DataSource = new List<ConfusionMatrix> { confusionMatrix };

            foreach (DataGridViewColumn col in dgvPerformance.Columns) col.Visible = true;
            Column1.Visible = Column2.Visible = false;

            // Create performance scatterplot
            CreateResultScatterplot(zedGraphControl1, inputs, expectedd, outputd);
        }
        public void ComputeTest5()
        {
            var dataset = yinyang;

            double[][] inputs = dataset.Submatrix(null, 0, 1).ToArray();
            int[] labels = dataset.GetColumn(2).ToInt32();

            {
                Linear kernel = new Linear();
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity = 1.0;

                double error = smo.Run();

                Assert.AreEqual(1.0, smo.Complexity);
                Assert.AreEqual(1.0, smo.WeightRatio);
                Assert.AreEqual(1.0, smo.NegativeWeight);
                Assert.AreEqual(1.0, smo.PositiveWeight);
                Assert.AreEqual(0.14, error);
                Assert.AreEqual(30, machine.SupportVectors.Length);

                double[] actualWeights = machine.Weights;
                double[] expectedWeights = { -1, -1, 1, -1, -1, 1, 1, -1, 1, -1, 1, 1, -1, 0.337065120144639, -1, 1, -0.337065120144639, -1, 1, 1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1 };
                Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10));

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(7, matrix.FalseNegatives);
                Assert.AreEqual(7, matrix.FalsePositives);
                Assert.AreEqual(43, matrix.TruePositives);
                Assert.AreEqual(43, matrix.TrueNegatives);
            }

            {
                Linear kernel = new Linear();
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity = 1.0;
                smo.PositiveWeight = 0.3;
                smo.NegativeWeight = 1.0;

                double error = smo.Run();

                Assert.AreEqual(1.0, smo.Complexity);
                Assert.AreEqual(0.3 / 1.0, smo.WeightRatio);
                Assert.AreEqual(1.0, smo.NegativeWeight);
                Assert.AreEqual(0.3, smo.PositiveWeight);
                Assert.AreEqual(0.21, error);
                Assert.AreEqual(24, machine.SupportVectors.Length);

                double[] actualWeights = machine.Weights;
                //string str = actualWeights.ToString(Accord.Math.Formats.CSharpArrayFormatProvider.InvariantCulture);
                double[] expectedWeights = { -0.771026323762095, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -0.928973676237905, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 };
                Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10));

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                    actual[i] = (int)machine.Compute(inputs[i]);

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(50, matrix.FalseNegatives);
                Assert.AreEqual(0, matrix.FalsePositives);
                Assert.AreEqual(0, matrix.TruePositives);
                Assert.AreEqual(50, matrix.TrueNegatives);
            }

            {
                Linear kernel = new Linear();
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity = 1.0;
                smo.PositiveWeight = 1.0;
                smo.NegativeWeight = 0.3;

                double error = smo.Run();

                Assert.AreEqual(1.0, smo.Complexity);
                Assert.AreEqual(1.0 / 0.3, smo.WeightRatio);
                Assert.AreEqual(0.3, smo.NegativeWeight);
                Assert.AreEqual(1.0, smo.PositiveWeight);
                Assert.AreEqual(0.15, error);
                Assert.AreEqual(19, machine.SupportVectors.Length);

                double[] actualWeights = machine.Weights;
                double[] expectedWeights = new double[] { 1, 1, -0.3, 1, -0.3, 1, 1, -0.3, 1, 1, 1, 1, 1, 1, 1, 1, 0.129080057278249, 1, 0.737797469918795 };
                Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10));

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(0, matrix.FalseNegatives);
                Assert.AreEqual(50, matrix.FalsePositives);
                Assert.AreEqual(50, matrix.TruePositives);
                Assert.AreEqual(0, matrix.TrueNegatives);
            }
        }
예제 #24
0
        public void ComputeTest5()
        {
            var dataset = SequentialMinimalOptimizationTest.yinyang;
            var inputs = dataset.Submatrix(null, 0, 1).ToArray();
            var labels = dataset.GetColumn(2).ToInt32();

            var kernel = new Polynomial(2, 0);

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo = new SequentialMinimalOptimization(machine, inputs, labels);
                smo.UseComplexityHeuristic = true;

                double error = smo.Run();
                Assert.AreEqual(0.2, error);

                Assert.AreEqual(0.11714451552090824, smo.Complexity);

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
                Assert.AreEqual(20, matrix.FalseNegatives);
                Assert.AreEqual(0, matrix.FalsePositives);
                Assert.AreEqual(30, matrix.TruePositives);
                Assert.AreEqual(50, matrix.TrueNegatives);
            }

            {
                Accord.Math.Tools.SetupGenerator(0);

                var projection = inputs.Apply(kernel.Transform);
                var machine = new SupportVectorMachine(projection[0].Length);
                var smo = new LinearNewtonMethod(machine, projection, labels);
                smo.UseComplexityHeuristic = true;

                double error = smo.Run();
                Assert.AreEqual(0.18, error);

                Assert.AreEqual(0.11714451552090821, smo.Complexity, 1e-15);

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                    actual[i] = Math.Sign(machine.Compute(projection[i]));

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
                Assert.AreEqual(17, matrix.FalseNegatives);
                Assert.AreEqual(1, matrix.FalsePositives);
                Assert.AreEqual(33, matrix.TruePositives);
                Assert.AreEqual(49, matrix.TrueNegatives);
            }

        }
예제 #25
0
        public void Start()
        {
            //Read the data from the files
            var file1DataRaw = AlgorithmHelpers.ReadMatrixFromFile(@"class_1.dat");
            var file2DataRaw = AlgorithmHelpers.ReadMatrixFromFile(@"class_2.dat");

            ClassA = AlgorithmHelpers.ScaleDown(AlgorithmHelpers.ChooseFeatures(file1DataRaw));
            ClassB = AlgorithmHelpers.ScaleDown(AlgorithmHelpers.ChooseFeatures(file2DataRaw));

            m_maxX = ClassA.Max(0)[0] > ClassB.Max(0)[0] ? ClassA.Max(0)[0] : ClassB.Max(0)[0];
            m_minX = ClassA.Min(0)[0] < ClassB.Min(0)[0] ? ClassA.Min(0)[0] : ClassB.Min(0)[0];

            //Fill the charts with the data
            m_view.ShowTrainingData(ClassA, ClassB);
            //Clear the list view
            m_view.ClearListView();
            //Fill it with the confusion matrix for each algorithm per iteration
            var statistics = new ConfusionMatrix[4, 5];


            //Merge the two data sets
            //and run kmeans
            var kmeans = new KMeansClustering(AlgorithmHelpers.MergeArrays(file1DataRaw, file2DataRaw),
                m_view.Iterations,
                m_view.ThetaStep);

            var idx = kmeans.Classify();

            m_view.ClustersTextUpdate(idx.Distinct().Length.ToString());

            m_view.ZeroProgressBar();
            m_view.StepProgressBar();

            //Partition m_iterations times and run the algorithms
            for (int i = 0; i < m_iterations; ++i)
            {
                m_view.PerformStep();
                //Partition its class to training and testing set
                var partitions = new DataPartition[] { AlgorithmHelpers.Partition(ClassA), AlgorithmHelpers.Partition(ClassB) };

                //Create the training data
                var trainingPair = AlgorithmHelpers.CreateDataPair(partitions[0].Item1, partitions[1].Item1);
                var trainingSet = trainingPair.Item1;
                var trainingOutput = trainingPair.Item2;

                //Create the testing data
                var testingPair = AlgorithmHelpers.CreateDataPair(partitions[0].Item2, partitions[1].Item2);
                var testingSet = testingPair.Item1;
                var testingOutput = testingPair.Item2;

                //Some functions need the training output to be a vector of doubles
                var doubleTO = trainingOutput
                    .Select(x => new[] { Convert.ToDouble(x) })
                    .ToArray();

                for (int k = 1; k < 3; ++k)
                {
                    var nn = new KNearestNeighboursRuntime(k, trainingSet, trainingOutput, testingSet, testingOutput);

                    if (BestKNN == null)
                    {

                        BestKNN = nn.Execute();
                    }
                    else
                    {
                        var iter = nn.Execute();
                        if (iter.Accuracy > BestKNN.Accuracy)
                            BestKNN = iter;
                    }
                }

                var perceptron = new PerceptronRuntime(trainingSet, doubleTO, testingSet, testingOutput);
                perceptron.Finished += perceptron_Finished;

                var leastSquare = new LeastSquaresRuntime(AlgorithmHelpers.JaggedToMD(trainingSet), AlgorithmHelpers.JaggedToMD(doubleTO), AlgorithmHelpers.JaggedToMD(testingSet), testingOutput);
                var neuralNetwork = new ParallelNeuralNetworkRuntime(trainingSet, doubleTO, testingSet, testingOutput);

                neuralNetwork.Finished += neuralNetwork_Finished;
                //Compute the confusion matrices for the four classifiers                 
                statistics[0, i] = perceptron.Execute();
                statistics[1, i] = leastSquare.Execute();
                //Use the most accurate K of KNN
                statistics[2, i] = BestKNN;
                statistics[3, i] = neuralNetwork.Execute();
            }

            //Update the classifier lines in the charts
            //with the most accurate of the 5 iterations
            m_view.ChartUpdate("", "classifier", MostAccuratePerceptron.Item1);
            m_view.ChartUpdate("", "classifier1", MostAccurateNN.Item1);

            //Process the array with the Confusion Matrices
            //and update the list view
            var processed = AlgorithmHelpers.ProcessStatistics(statistics);
            m_view.UpdateStatisticsListView(processed);
        }
예제 #26
0
        /// <summary>
        ///   Tests the previously created tree into a new set of data.
        /// </summary>
        /// 
        private void btnTestingRun_Click(object sender, EventArgs e)
        {
            if (tree == null || dgvTestingSource.DataSource == null)
            {
                MessageBox.Show("Please create a machine first.");
                return;
            }


            // Creates a matrix from the entire source data table
            double[,] table = (dgvLearningSource.DataSource as DataTable).ToMatrix(out columnNames);

            // Get only the input vector values (first two columns)
            double[][] inputs = table.GetColumns(0, 1).ToArray();

            // Get the expected output labels (last column)
            int[] expected = table.GetColumn(2).ToInt32();


            // Compute the actual tree outputs
            int[] actual = new int[inputs.Length];
            for (int i = 0; i < inputs.Length; i++)
                actual[i] = tree.Compute(inputs[i]);


            // Use confusion matrix to compute some statistics.
            ConfusionMatrix confusionMatrix = new ConfusionMatrix(actual, expected, 1, 0);
            dgvPerformance.DataSource = new [] { confusionMatrix };

            // Create performance scatter plot
            CreateResultScatterplot(zedGraphControl1, inputs, expected.ToDouble(), actual.ToDouble());
        }
예제 #27
0
        public void ConfusionMatrixConstructorTest2()
        {
            // The correct and expected output values (as confirmed by a Gold
            //  standard rule, actual experiment or true verification)
            bool[] expected = { false, false, true, false, true, false, false, false, false, false };

            // The values as predicted by the decision system or
            //  the test whose performance is being measured.
            bool[] predicted = { false, false, false, true, true, false, false, false, false, true };


            // Create a new confusion matrix using the given parameters
            ConfusionMatrix matrix = new ConfusionMatrix(predicted, expected);


            int falseNegatives = 1;
            int falsePositives = 2;
            int truePositives = 1;
            int trueNegatives = 6;

            Assert.AreEqual(predicted.Length, matrix.Observations);
            Assert.AreEqual(8, matrix.ActualNegatives);
            Assert.AreEqual(2, matrix.ActualPositives);
            Assert.AreEqual(7, matrix.PredictedNegatives);
            Assert.AreEqual(3, matrix.PredictedPositives);

            Assert.AreEqual(falseNegatives, matrix.FalseNegatives);
            Assert.AreEqual(falsePositives, matrix.FalsePositives);
            Assert.AreEqual(truePositives, matrix.TruePositives);
            Assert.AreEqual(trueNegatives, matrix.TrueNegatives);

        }
        public void WeightsTest1()
        {
            var dataset = yinyang;
            double[][] inputs = dataset.Submatrix(null, 0, 1).ToArray();
            int[] labels = dataset.GetColumn(2).ToInt32();

            Accord.Math.Tools.SetupGenerator(0);

            var kernel = new Linear(1);

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity = 1.0;
                smo.PositiveWeight = 1;
                smo.NegativeWeight = 1;
                smo.Tolerance = 0.001;

                double error = smo.Run();

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(43, matrix.TruePositives); // both classes are
                Assert.AreEqual(43, matrix.TrueNegatives); // well equilibrated
                Assert.AreEqual(7, matrix.FalseNegatives);
                Assert.AreEqual(7, matrix.FalsePositives);

                Assert.AreEqual(1.0, smo.Complexity);
                Assert.AreEqual(1.0, smo.WeightRatio);
                Assert.AreEqual(1.0, smo.NegativeWeight);
                Assert.AreEqual(1.0, smo.PositiveWeight);
                Assert.AreEqual(0.14, error);
                Assert.AreEqual(0.001, smo.Tolerance);
                Assert.AreEqual(31, machine.SupportVectors.Length);
            }

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity = 1;
                smo.PositiveWeight = 100;
                smo.NegativeWeight = 1;
                smo.Tolerance = 0.001;

                double error = smo.Run();

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(50, matrix.TruePositives); // has more importance
                Assert.AreEqual(23, matrix.TrueNegatives);
                Assert.AreEqual(0, matrix.FalseNegatives); // has more importance
                Assert.AreEqual(27, matrix.FalsePositives);

                Assert.AreEqual(1.0, smo.Complexity);
                Assert.AreEqual(100, smo.WeightRatio);
                Assert.AreEqual(1.0, smo.NegativeWeight);
                Assert.AreEqual(100, smo.PositiveWeight);
                Assert.AreEqual(0.001, smo.Tolerance);
                Assert.AreEqual(0.27, error);
                Assert.AreEqual(41, machine.SupportVectors.Length);
            }

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity = 1;
                smo.PositiveWeight = 1;
                smo.NegativeWeight = 100;
                smo.Tolerance = 0.001;

                double error = smo.Run();

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));

                var matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(25, matrix.TruePositives);
                Assert.AreEqual(50, matrix.TrueNegatives); // has more importance
                Assert.AreEqual(25, matrix.FalseNegatives);
                Assert.AreEqual(0, matrix.FalsePositives);  // has more importance

                Assert.AreEqual(1.0, smo.Complexity);
                Assert.AreEqual(0.01, smo.WeightRatio);
                Assert.AreEqual(100, smo.NegativeWeight);
                Assert.AreEqual(1.0, smo.PositiveWeight);
                Assert.AreEqual(0.25, error);
                Assert.AreEqual(0.001, smo.Tolerance);
                Assert.AreEqual(40, machine.SupportVectors.Length);
            }
        }
예제 #29
0
        public void ConfusionMatrixConstructorTest4()
        {
            // Example from http://www.iph.ufrgs.br/corpodocente/marques/cd/rd/presabs.htm

            ConfusionMatrix matrix = new ConfusionMatrix(70, 95, 5, 30);

            // Prevalence	    0.500	0.100	0.011
            // Overall Power	0.500	0.900	0.989
            // Sensitivity	    0.700	0.700	0.700
            // Specificity	    0.950	0.950	0.950
            // PPP	            0.933	0.610	0.130
            // NPP	            0.760	0.970	0.997
            // Misc. Rate	    0.175	0.075	0.053
            // Odds Ratio	    44.333	44.333	44.333
            // Kappa	        0.650	0.610	0.210
            // NMI	            0.371	0.360	0.264

            Assert.AreEqual(0.500, matrix.OverallDiagnosticPower, 1e-3);
            Assert.AreEqual(0.700, matrix.Sensitivity, 1e-3);
            Assert.AreEqual(0.950, matrix.Specificity, 1e-3);
            Assert.AreEqual(0.933, matrix.PositivePredictiveValue, 1e-3);
            Assert.AreEqual(0.760, matrix.NegativePredictiveValue, 1e-3);
            Assert.AreEqual(0.175, 1 - matrix.Accuracy, 1e-3);
            Assert.AreEqual(44.333, matrix.OddsRatio, 1e-3);
            Assert.AreEqual(0.650, matrix.Kappa, 1e-3);
            Assert.AreEqual(0.371, matrix.NormalizedMutualInformation, 1e-3);

            Assert.IsFalse(double.IsNaN(matrix.OverallDiagnosticPower));
            Assert.IsFalse(double.IsNaN(matrix.Sensitivity));
            Assert.IsFalse(double.IsNaN(matrix.Specificity));
            Assert.IsFalse(double.IsNaN(matrix.PositivePredictiveValue));
            Assert.IsFalse(double.IsNaN(matrix.NegativePredictiveValue));
            Assert.IsFalse(double.IsNaN(matrix.Accuracy));
            Assert.IsFalse(double.IsNaN(matrix.OddsRatio));
            Assert.IsFalse(double.IsNaN(matrix.Kappa));
            Assert.IsFalse(double.IsNaN(matrix.NormalizedMutualInformation));
        }
        private static void testWeights(double[][] inputs, int[] labels, IKernel kernel)
        {
            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.PositiveWeight = 100;
                smo.NegativeWeight = 1;

                double error = smo.Run();

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(50, matrix.TruePositives); // has more importance
                Assert.AreEqual(0, matrix.FalseNegatives); // has more importance
            }

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.PositiveWeight = 1;
                smo.NegativeWeight = 100;

                double error = smo.Run();

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));

                var matrix = new ConfusionMatrix(actual, labels);
                Assert.AreEqual(50, matrix.TrueNegatives); // has more importance
                Assert.AreEqual(0, matrix.FalsePositives);  // has more importance
            }
        }