public void large_transform_few_components()
        {
            int n = 100;

            double[][] data   = Jagged.Random(n, n);
            int[]      labels = Vector.Random(n, 0, 10);

            var kda    = new LinearDiscriminantAnalysis();
            var target = kda.Learn(data, labels);

            var expected = kda.Transform(data, 2);

            Assert.AreEqual(n, expected.Rows());
            Assert.AreEqual(2, expected.Columns());

            kda.NumberOfOutputs = 2;
            target = kda.Learn(data, labels);

            var actual = target.First.Transform(data);

            Assert.AreEqual(n, actual.Rows());
            Assert.AreEqual(2, actual.Columns());

            Assert.IsTrue(actual.IsEqual(expected));
        }
        public void ComputeTest2()
        {
            LinearDiscriminantAnalysis lda = new LinearDiscriminantAnalysis(inputs, output);

            // Compute the analysis
            lda.Compute();

            Assert.AreEqual(2, lda.Classes.Count);
            Assert.AreEqual(3.0, lda.Classes[0].Mean[0]);
            Assert.AreEqual(3.6, lda.Classes[0].Mean[1]);
            Assert.AreEqual(5, lda.Classes[0].Indices.Length);

            Assert.AreEqual(0, lda.Classes[0].Indices[0]);
            Assert.AreEqual(1, lda.Classes[0].Indices[1]);
            Assert.AreEqual(2, lda.Classes[0].Indices[2]);
            Assert.AreEqual(3, lda.Classes[0].Indices[3]);
            Assert.AreEqual(4, lda.Classes[0].Indices[4]);

            Assert.AreEqual(5, lda.Classes[1].Indices[0]);
            Assert.AreEqual(6, lda.Classes[1].Indices[1]);
            Assert.AreEqual(7, lda.Classes[1].Indices[2]);
            Assert.AreEqual(8, lda.Classes[1].Indices[3]);
            Assert.AreEqual(9, lda.Classes[1].Indices[4]);

            Assert.AreEqual(2, lda.Discriminants.Count);
            Assert.AreEqual(15.65685019206146, lda.Discriminants[0].Eigenvalue);
            Assert.AreEqual(-0.00000000000000, lda.Discriminants[1].Eigenvalue, 1e-15);

            Assert.AreEqual(5.7, lda.Means[0]);
            Assert.AreEqual(5.6, lda.Means[1]);
        }
        public void ProjectionTest()
        {
            LinearDiscriminantAnalysis lda = new LinearDiscriminantAnalysis(inputs, output);

            // Compute the analysis
            lda.Compute();

            // Project the input data into discriminant space
            double[,] projection = lda.Transform(inputs);

            Assert.AreEqual(projection[0, 0], 4.4273255813953485);
            Assert.AreEqual(projection[0, 1], 1.9629629629629628);
            Assert.AreEqual(projection[1, 0], 3.7093023255813953);
            Assert.AreEqual(projection[1, 1], -2.5185185185185186);
            Assert.AreEqual(projection[2, 0], 3.2819767441860463);
            Assert.AreEqual(projection[2, 1], -1.5185185185185186);
            Assert.AreEqual(projection[3, 0], 5.5639534883720927);
            Assert.AreEqual(projection[3, 1], -3.7777777777777777);
            Assert.AreEqual(projection[4, 0], 5.7093023255813957);
            Assert.AreEqual(projection[4, 1], -1.0370370370370372);
            Assert.AreEqual(projection[5, 0], 13.273255813953488);
            Assert.AreEqual(projection[5, 1], -3.3333333333333339);
            Assert.AreEqual(projection[6, 0], 9.4186046511627914);
            Assert.AreEqual(projection[6, 1], -3.5555555555555554);
            Assert.AreEqual(projection[7, 0], 11.136627906976745);
            Assert.AreEqual(projection[7, 1], 1.6666666666666661);
            Assert.AreEqual(projection[8, 0], 10.991279069767442);
            Assert.AreEqual(projection[8, 1], -1.0740740740740744);
            Assert.AreEqual(projection[9, 0], 13.418604651162791);
            Assert.AreEqual(projection[9, 1], -0.59259259259259345);

            // Assert the result equals the transformation of the input
            double[,] result = lda.Result;
            Assert.IsTrue(Matrix.IsEqual(result, projection));
        }
        public void ClassifyTest1()
        {
            // Create some sample input data instances. This is the same
            // data used in the Gutierrez-Osuna's example available on:
            // http://research.cs.tamu.edu/prism/lectures/pr/pr_l10.pdf

            double[][] inputs =
            {
                // Class 0
                new double[] {  4,  1 },
                new double[] {  2,  4 },
                new double[] {  2,  3 },
                new double[] {  3,  6 },
                new double[] {  4,  4 },

                // Class 1
                new double[] {  9, 10 },
                new double[] {  6,  8 },
                new double[] {  9,  5 },
                new double[] {  8,  7 },
                new double[] { 10,  8 }
            };

            int[] output =
            {
                0, 0, 0, 0, 0, // The first five are from class 0
                1, 1, 1, 1, 1  // The last five are from class 1
            };

            // Then, we will create a LDA for the given instances.
            var lda = new LinearDiscriminantAnalysis(inputs, output);

            lda.Compute(); // Compute the analysis


            // Now we can project the data into KDA space:
            double[][] projection = lda.Transform(inputs);

            double[][] classifierProjection = lda.Classifier.First.Transform(inputs);
            Assert.IsTrue(projection.IsEqual(classifierProjection));

            // Or perform classification using:
            int[] results = lda.Classify(inputs);


            // Test the classify method
            for (int i = 0; i < 5; i++)
            {
                int expected = 0;
                int actual   = results[i];
                Assert.AreEqual(expected, actual);
            }

            for (int i = 5; i < 10; i++)
            {
                int expected = 1;
                int actual   = results[i];
                Assert.AreEqual(expected, actual);
            }
        }
        //Protected methods (called by the public methods in parent class)
        protected override void DoTrain(System.Drawing.Bitmap[] charImgs, char[] chars)
        {
            double[][] input = charImgs.Select(img => Converters.ThresholdedBitmapToDoubleArray(img)).ToArray();
            int[] labels = getClassLabels(chars);

            lda = new LinearDiscriminantAnalysis(input, labels);
            lda.Compute();
        }
Beispiel #6
0
        public void LDAIrisVariableImpactTest()
        {
            IClassificationProblemData problemData = LoadIrisProblem();
            IClassificationSolution    solution    = LinearDiscriminantAnalysis.CreateLinearDiscriminantAnalysisSolution(problemData);

            ClassificationSolutionVariableImpactsCalculator.CalculateImpacts(solution);
            Dictionary <string, double> expectedImpacts = GetExpectedValuesForIrisLDAModel();

            CheckDefaultAsserts(solution, expectedImpacts);
        }
Beispiel #7
0
 private void buildModel()
 {
     if (independentVls == null)
     {
         getMatrix();
     }
     lda = new Accord.Statistics.Analysis.LinearDiscriminantAnalysis(independentVls, dependentVls);
     lda.Compute();
     meanValues = lda.Means;
     stdValues  = lda.StandardDeviations;
 }
        public void ClassifyTest()
        {
            LinearDiscriminantAnalysis lda = new LinearDiscriminantAnalysis(inputs, output);

            // Compute the analysis
            lda.Compute();

            for (int i = 0; i < output.Length; i++)
            {
                Assert.AreEqual(lda.Classify(inputs.GetRow(i)), output[i]);
            }
        }
Beispiel #9
0
        public IActionResult GetLinearDiscriminantAnalysisResult([FromBody] GetLinearDiscriminantAnalysisRequest request)
        {
            var linearDiscriminantAnalysis = new LinearDiscriminantAnalysis();

            linearDiscriminantAnalysis.Compute(request.Input, request.Output);
            var transformed = linearDiscriminantAnalysis.Transform(request.Input);
            var result      = new LinearDiscriminantAnalysisResponse
            {
                Transformed = transformed.DoubleValues,
                LinearDiscriminantClasses = linearDiscriminantAnalysis.Result.Classes.Select(_ => _.ToDto()).ToArray()
            };

            return(new OkObjectResult(result));
        }
Beispiel #10
0
        public void  Train(List <TrainingValue> trainingData)
        {
            List <DecisionVariable> trainingVariables = new List <DecisionVariable>();

            for (int i = 0; i < featureSize; i++)
            {
                trainingVariables.Add(DecisionVariable.Continuous(i.ToString()));
            }

            tree = new DecisionTree(inputs: trainingVariables, classes: 2);


            double[][] featuresArray = new double[trainingData.Count][];
            int[]      labels        = new int[trainingData.Count];

            for (int i = 0; i < featuresArray.Length; i++)
            {
                featuresArray[i] = trainingData[i].Features;
                labels[i]        = Convert.ToInt32(trainingData[i].State);
            }

            switch (type)
            {
            case ClassifierType.DecisionTree:
                C45Learning teacher = new C45Learning(tree);
                teacher.Learn(featuresArray, labels);
                break;

            case ClassifierType.LDA:
                LinearDiscriminantAnalysis lda = new LinearDiscriminantAnalysis();
                pipeline = lda.Learn(featuresArray, labels);
                break;

            case ClassifierType.SVM:
                LinearCoordinateDescent svmLearner = new LinearCoordinateDescent();
                svm = svmLearner.Learn(featuresArray, labels);
                break;

            case ClassifierType.Bayes:
                NaiveBayesLearning <NormalDistribution> learner = new NaiveBayesLearning <NormalDistribution>();
                bayes = learner.Learn(featuresArray, labels);
                break;
            }

            Trained = true;
        }
        public void SerializeTest()
        {
            double[][] actual, expected = new double[][] {
                new double[] { 3.97283892300425, 1.19607843137255 },
                new double[] { 1.89135569201701, -2.90196078431373 },
                new double[] { 1.91851676901275, -1.90196078431373 },
                new double[] { 2.83703353802551, -4.35294117647059 },
                new double[] { 3.89135569201701, -1.80392156862745 },
                new double[] { 8.72838923004251, -5.05882352941177 },
                new double[] { 5.78271138403401, -4.70588235294118 },
                new double[] { 8.86419461502126, -0.0588235294117654 },
                new double[] { 7.80987246102976, -2.6078431372549 },
                new double[] { 9.78271138403401, -2.50980392156863 }
            };

            int[] output = { 0, 0, 0, 0, 0, 0, 1, 1, 1, 1 };

            var target = new LinearDiscriminantAnalysis();

            target.Learn(inputs.ToJagged(), output);

            actual = target.Transform(inputs.ToJagged());
            var str = actual.ToCSharp();

            Assert.IsTrue(Matrix.IsEqual(actual, expected, 0.01));

            var copy = Serializer.DeepClone(target);

            actual = copy.Transform(inputs.ToJagged());
            Assert.IsTrue(Matrix.IsEqual(actual, expected, 0.01));

            Assert.IsTrue(target.ScatterBetweenClass.IsEqual(copy.ScatterBetweenClass));
            Assert.IsTrue(target.ScatterMatrix.IsEqual(copy.ScatterMatrix));
            Assert.IsTrue(target.ScatterWithinClass.IsEqual(copy.ScatterWithinClass));
            Assert.IsTrue(target.StandardDeviations.IsEqual(copy.StandardDeviations));
            Assert.IsTrue(target.Classifications.IsEqual(copy.Classifications));
            Assert.IsTrue(target.Classifier.NumberOfInputs.IsEqual(copy.Classifier.NumberOfInputs));
            Assert.IsTrue(target.Classifier.NumberOfOutputs.IsEqual(copy.Classifier.NumberOfOutputs));
            Assert.IsTrue(target.Classifier.First.Weights.IsEqual(copy.Classifier.First.Weights));
            Assert.IsTrue(target.Classifier.Second.Function.Equals(copy.Classifier.Second.Function));
            Assert.IsTrue(target.Classifier.Second.Means.IsEqual(copy.Classifier.Second.Means));
            Assert.IsTrue(target.NumberOfClasses.IsEqual(copy.NumberOfClasses));
            Assert.IsTrue(target.NumberOfInputs.Equals(copy.NumberOfInputs));
            Assert.IsTrue(target.NumberOfOutputs.Equals(copy.NumberOfOutputs));
        }
Beispiel #12
0
        private IEnumerable <IClassificationSolution> GenerateClassificationSolutions(IClassificationProblemData problemData)
        {
            var newSolutions = new List <IClassificationSolution>();
            var zeroR        = ZeroR.CreateZeroRSolution(problemData);

            zeroR.Name = "ZeroR Classification Solution";
            newSolutions.Add(zeroR);
            var oneR = OneR.CreateOneRSolution(problemData);

            oneR.Name = "OneR Classification Solution";
            newSolutions.Add(oneR);
            try {
                var lda = LinearDiscriminantAnalysis.CreateLinearDiscriminantAnalysisSolution(problemData);
                lda.Name = "Linear Discriminant Analysis Solution";
                newSolutions.Add(lda);
            } catch (NotSupportedException) { } catch (ArgumentException) { }
            return(newSolutions);
        }
        public void ComputeTest()
        {
            LinearDiscriminantAnalysis lda = new LinearDiscriminantAnalysis(inputs, output);

            // Compute the analysis
            lda.Compute();

            double[,] expectedScatter1 = 
            {
                {  0.80, -0.40 }, 
                { -0.40,  2.64 } 
            };

            double[,] expectedScatter2 = 
            {
                {  1.84, -0.04 }, 
                { -0.04,  2.64 }
            };

            double[,] expectedBetween = 
            {
                { 29.16, 21.60 },
                { 21.60, 16.00 },
            };

            double[,] expectedWithin = 
            {
                {  2.64, -0.44 },
                { -0.44,  5.28 }
            };

            Assert.IsTrue(Matrix.IsEqual(lda.Classes[0].Scatter, expectedScatter1, 0.01));
            Assert.IsTrue(Matrix.IsEqual(lda.Classes[1].Scatter, expectedScatter2, 0.01));

            Assert.IsTrue(Matrix.IsEqual(lda.ScatterBetweenClass, expectedBetween, 0.01));
            Assert.IsTrue(Matrix.IsEqual(lda.ScatterWithinClass, expectedWithin, 0.01));
        }
        public void ComputeTest()
        {
            LinearDiscriminantAnalysis lda = new LinearDiscriminantAnalysis(inputs, output);

            // Compute the analysis
            lda.Compute();

            double[,] expectedScatter1 =
            {
                {  0.80, -0.40 },
                { -0.40,  2.64 }
            };

            double[,] expectedScatter2 =
            {
                {  1.84, -0.04 },
                { -0.04,  2.64 }
            };

            double[,] expectedBetween =
            {
                { 29.16, 21.60 },
                { 21.60, 16.00 },
            };

            double[,] expectedWithin =
            {
                {  2.64, -0.44 },
                { -0.44,  5.28 }
            };

            Assert.IsTrue(Matrix.IsEqual(lda.Classes[0].Scatter, expectedScatter1, 0.01));
            Assert.IsTrue(Matrix.IsEqual(lda.Classes[1].Scatter, expectedScatter2, 0.01));

            Assert.IsTrue(Matrix.IsEqual(lda.ScatterBetweenClass, expectedBetween, 0.01));
            Assert.IsTrue(Matrix.IsEqual(lda.ScatterWithinClass, expectedWithin, 0.01));
        }
Beispiel #15
0
        /// <summary>
        ///   Launched when the user clicks the "Run analysis" button.
        /// </summary>
        ///
        private void btnCompute_Click(object sender, EventArgs e)
        {
            // Save any pending changes
            dgvAnalysisSource.EndEdit();

            if (dgvAnalysisSource.DataSource == null)
            {
                MessageBox.Show("Please load some data using File > Open!");
                return;
            }

            // Create a matrix from the source data table
            double[,] sourceMatrix = (dgvAnalysisSource.DataSource as DataTable).ToMatrix(out columnNames);

            int rows = sourceMatrix.GetLength(0);
            int cols = sourceMatrix.GetLength(1);


            // Create and compute a new Simple Descriptive Analysis
            sda = new DescriptiveAnalysis(sourceMatrix, columnNames);

            sda.Compute();

            // Show the descriptive analysis on the screen
            dgvDistributionMeasures.DataSource = sda.Measures;


            // Get the input values (the two first columns)
            double[,] inputs = sourceMatrix.GetColumns(0, 1);

            // Get only the associated labels (last column)
            int[] outputs = sourceMatrix.GetColumn(2).ToInt32();



            // Create a Linear Discriminant Analysis for the data
            lda = new LinearDiscriminantAnalysis(inputs, outputs);


            lda.Compute(); // Finally, compute the analysis!


            // Perform the transformation of the data using two dimensions
            double[,] result = lda.Transform(inputs, 2);

            // Create a new plot with the original Z column
            double[,] points = result.InsertColumn(sourceMatrix.GetColumn(2));


            // Create output scatter plot
            outputScatterplot.DataSource = points;

            // Create the output table
            dgvProjectionResult.DataSource = new ArrayDataView(points, columnNames);


            // Populate discriminants overview with analysis data
            dgvFeatureVectors.DataSource      = new ArrayDataView(lda.DiscriminantMatrix);
            dgvScatterBetween.DataSource      = new ArrayDataView(lda.ScatterBetweenClass);
            dgvScatterWithin.DataSource       = new ArrayDataView(lda.ScatterWithinClass);
            dgvScatterTotal.DataSource        = new ArrayDataView(lda.ScatterMatrix);
            dgvPrincipalComponents.DataSource = lda.Discriminants;
            distributionView.DataSource       = lda.Discriminants;
            cumulativeView.DataSource         = lda.Discriminants;

            // Populate classes information
            dgvClasses.DataSource = lda.Classes;

            lbStatus.Text = "Good! Feel free to browse the other tabs to see what has been found.";
        }
        public void ClassifyTest()
        {
            LinearDiscriminantAnalysis lda = new LinearDiscriminantAnalysis(inputs, output);

            // Compute the analysis
            lda.Compute();

            for (int i = 0; i < output.Length; i++)
                Assert.AreEqual(lda.Classify(inputs.GetRow(i)), output[i]);
        }
        public void ProjectionTest()
        {
            LinearDiscriminantAnalysis lda = new LinearDiscriminantAnalysis(inputs, output);

            // Compute the analysis
            lda.Compute();

            // Project the input data into discriminant space
            double[,] projection = lda.Transform(inputs);

            Assert.AreEqual(projection[0, 0], 4.4273255813953485);
            Assert.AreEqual(projection[0, 1], 1.9629629629629628);
            Assert.AreEqual(projection[1, 0], 3.7093023255813953);
            Assert.AreEqual(projection[1, 1], -2.5185185185185186);
            Assert.AreEqual(projection[2, 0], 3.2819767441860463);
            Assert.AreEqual(projection[2, 1], -1.5185185185185186);
            Assert.AreEqual(projection[3, 0], 5.5639534883720927);
            Assert.AreEqual(projection[3, 1], -3.7777777777777777);
            Assert.AreEqual(projection[4, 0], 5.7093023255813957);
            Assert.AreEqual(projection[4, 1], -1.0370370370370372);
            Assert.AreEqual(projection[5, 0], 13.273255813953488);
            Assert.AreEqual(projection[5, 1], -3.3333333333333339);
            Assert.AreEqual(projection[6, 0], 9.4186046511627914);
            Assert.AreEqual(projection[6, 1], -3.5555555555555554);
            Assert.AreEqual(projection[7, 0], 11.136627906976745);
            Assert.AreEqual(projection[7, 1], 1.6666666666666661);
            Assert.AreEqual(projection[8, 0], 10.991279069767442);
            Assert.AreEqual(projection[8, 1], -1.0740740740740744);
            Assert.AreEqual(projection[9, 0], 13.418604651162791);
            Assert.AreEqual(projection[9, 1], -0.59259259259259345);

            // Assert the result equals the transformation of the input
            double[,] result = lda.Result;
            Assert.IsTrue(Matrix.IsEqual(result, projection));
        }
        public void ComputeTest2()
        {
            LinearDiscriminantAnalysis lda = new LinearDiscriminantAnalysis(inputs, output);

            // Compute the analysis
            lda.Compute();

            Assert.AreEqual(2, lda.Classes.Count);
            Assert.AreEqual(3.0, lda.Classes[0].Mean[0]);
            Assert.AreEqual(3.6, lda.Classes[0].Mean[1]);
            Assert.AreEqual(5, lda.Classes[0].Indices.Length);

            Assert.AreEqual(0, lda.Classes[0].Indices[0]);
            Assert.AreEqual(1, lda.Classes[0].Indices[1]);
            Assert.AreEqual(2, lda.Classes[0].Indices[2]);
            Assert.AreEqual(3, lda.Classes[0].Indices[3]);
            Assert.AreEqual(4, lda.Classes[0].Indices[4]);

            Assert.AreEqual(5, lda.Classes[1].Indices[0]);
            Assert.AreEqual(6, lda.Classes[1].Indices[1]);
            Assert.AreEqual(7, lda.Classes[1].Indices[2]);
            Assert.AreEqual(8, lda.Classes[1].Indices[3]);
            Assert.AreEqual(9, lda.Classes[1].Indices[4]);

            Assert.AreEqual(2, lda.Discriminants.Count);
            Assert.AreEqual(15.65685019206146, lda.Discriminants[0].Eigenvalue);
            Assert.AreEqual(-0.00000000000000, lda.Discriminants[1].Eigenvalue, 1e-15);

            Assert.AreEqual(5.7, lda.Means[0]);
            Assert.AreEqual(5.6, lda.Means[1]);
        }
Beispiel #19
0
        static void Main(string[] args)
        {
            Console.WriteLine("This is a demo application that combines Linear Discriminant Analysis (LDA) and Multilayer Perceptron(MLP).");
            double[,] inputs =
            {
              {  4,  1 },
              {  2,  4 },
              {  2,  3 },
              {  3,  6 },
              {  4,  4 },
              {  9, 10 },
              {  6,  8 },
              {  9,  5 },
              {  8,  7 },
              { 10,  8 }
            };

            int[] output =
            {
              1, 1, 2, 1, 1, 2, 2, 2, 1, 2
            };

            Console.WriteLine("\r\nProcessing sample data, pease wait...");

            //1.1
            var lda = new LinearDiscriminantAnalysis(inputs, output);

            //1.2 Compute the analysis
            lda.Compute();

            //1.3
            double[,] projection = lda.Transform(inputs);

            //both LDA and MLP have a little bit different inputs
            //e.x double[,] to double[][], etc.
            //e.x. LDA needs int classes and MLP needs classes to be in the range [0..1]
            #region convertions
            int vector_count = projection.GetLength(0);
            int dimensions = projection.GetLength(1);

            //====================================================================

            // conver for NN
            double[][] input2 = new double[vector_count][];
            double[][] output2 = new double[vector_count][];

            for (int i = 0; i < input2.Length; i++)
            {
                input2[i] = new double[projection.GetLength(1)];
                for (int j = 0; j < projection.GetLength(1); j++)
                {
                    input2[i][j] = projection[i, j];
                }

                output2[i] = new double[1];

                //we turn classes from ints to doubles in the range [0..1], because we use sigmoid for the NN
                output2[i][0] = Convert.ToDouble(output[i]) / 10;
            }
            #endregion

            //2.1 create neural network
            ActivationNetwork network = new ActivationNetwork(
                new SigmoidFunction(2),
                dimensions, // inputs neurons in the network
                dimensions, // neurons in the first layer
                1); // one neuron in the second layer

            //2.2 create teacher
            BackPropagationLearning teacher = new BackPropagationLearning(network);

            //2.3 loop
            int p = 0;
            while (true)
            {
                // run epoch of learning procedure
                double error = teacher.RunEpoch(input2, output2);

                p++;
                if (p > 1000000) break;
                // instead of iterations we can check error values to see if we need to stop
            }

            //====================================================================

            //3. Classify
            double[,] sample = { { 10, 8 } };
            double[,] projectedSample = lda.Transform(sample);
            double[] projectedSample2 = new double[2];

            projectedSample2[0] = projectedSample[0, 0];
            projectedSample2[1] = projectedSample[0, 1];

            double[] classs = network.Compute(projectedSample2);

            Console.WriteLine("========================");

            //we convert back to int classes by first rounding and then multipling by 10 (because we devided to 10 before)
            //if you do not get the expected result
            //- rounding might be a problem
            //- try more training

            Console.WriteLine(Math.Round(classs[0], 1, MidpointRounding.AwayFromZero)*10);
            Console.ReadLine();
        }
Beispiel #20
0
        private void button1_Click(object sender, EventArgs e)
        {
            // Finishes and save any pending changes to the given data
            dgvAnalysisSource.EndEdit();

            if (dgvAnalysisSource.DataSource == null)
            {
                return;
            }

            // Creates a matrix from the source data table
            double[,] sourceMatrix = (dgvAnalysisSource.DataSource as DataTable).ToMatrix(out sourceColumns);

            int rows = sourceMatrix.GetLength(0);
            int cols = sourceMatrix.GetLength(1);


            // Creates a new Simple Descriptive Analysis
            sda = new DescriptiveAnalysis(sourceMatrix, sourceColumns);

            sda.Compute();

            // Populates statistics overview tab with analysis data
            dgvDistributionMeasures.DataSource = sda.Measures;


            // Get only the input values (exclude the class label indicator column)
            double[,] data = sourceMatrix.Submatrix(null, startColumn: 0, endColumn: 1);

            // Get only the associated labels
            int[] labels = sourceMatrix.GetColumn(2).ToInt32();


            // Creates the Linear Discriminant Analysis of the given source
            lda = new LinearDiscriminantAnalysis(data, labels);


            // Computes the analysis
            lda.Compute();


            // Performs the transformation of the data using two dimensions
            double[,] result = lda.Transform(data, 2);

            // Create a new plot with the original Z column
            double[,] points = result.InsertColumn(sourceMatrix.GetColumn(2));


            // Create output scatter plot
            outputScatterplot.DataSource = points;

            // Create output table
            dgvProjectionResult.DataSource = new ArrayDataView(points, sourceColumns);


            // Populates discriminants overview with analysis data
            dgvPrincipalComponents.DataSource = lda.Discriminants;
            dgvFeatureVectors.DataSource      = new ArrayDataView(lda.DiscriminantMatrix);
            dgvScatterBetween.DataSource      = new ArrayDataView(lda.ScatterBetweenClass);
            dgvScatterWithin.DataSource       = new ArrayDataView(lda.ScatterWithinClass);
            dgvScatterTotal.DataSource        = new ArrayDataView(lda.ScatterMatrix);


            // Populates classes information
            dgvClasses.DataSource = lda.Classes;


            CreateComponentCumulativeDistributionGraph(graphCurve);
            CreateComponentDistributionGraph(graphShare);
        }
Beispiel #21
0
        /// <summary>
        ///   Launched when the user clicks the "Run analysis" button.
        /// </summary>
        /// 
        private void btnCompute_Click(object sender, EventArgs e)
        {
            // Save any pending changes 
            dgvAnalysisSource.EndEdit();

            if (dgvAnalysisSource.DataSource == null)
            {
                MessageBox.Show("Please load some data using File > Open!");
                return;
            }

            // Create a matrix from the source data table
            double[][] sourceMatrix = (dgvAnalysisSource.DataSource as DataTable).ToArray(out columnNames);

            // Create and compute a new Simple Descriptive Analysis
            sda = new DescriptiveAnalysis(columnNames);

            sda.Learn(sourceMatrix);

            // Show the descriptive analysis on the screen
            dgvDistributionMeasures.DataSource = sda.Measures;


            // Get the input values (the two first columns)
            double[][] inputs = sourceMatrix.GetColumns(0, 1);

            // Get only the associated labels (last column)
            int[] outputs = sourceMatrix.GetColumn(2).ToInt32();
            outputs = outputs.Subtract(outputs.Min()); // start at 0

            // Create a Linear Discriminant Analysis for the data 
            lda = new LinearDiscriminantAnalysis()
            {
                NumberOfOutputs = 2
            };

            // Compute the analysis!
            lda.Learn(inputs, outputs); 


            // Perform the transformation of the data
            double[][] result = lda.Transform(inputs);

            // Create a new plot with the original Z column
            double[][] points = result.InsertColumn(sourceMatrix.GetColumn(2));

            // Create output scatter plot
            outputScatterplot.DataSource = points;

            // Create the output table
            dgvProjectionResult.DataSource = new ArrayDataView(points, columnNames);

            // Populate discriminants overview with analysis data
            dgvFeatureVectors.DataSource = new ArrayDataView(lda.DiscriminantVectors);
            dgvScatterBetween.DataSource = new ArrayDataView(lda.ScatterBetweenClass);
            dgvScatterWithin.DataSource = new ArrayDataView(lda.ScatterWithinClass);
            dgvScatterTotal.DataSource = new ArrayDataView(lda.ScatterMatrix);
            dgvPrincipalComponents.DataSource = lda.Discriminants;
            distributionView.DataSource = lda.Discriminants;
            cumulativeView.DataSource = lda.Discriminants;

            // Populate classes information
            dgvClasses.DataSource = lda.Classes;

            lbStatus.Text = "Good! Feel free to browse the other tabs to see what has been found.";
        }
        public void ComputeTest3()
        {
            // Schölkopf KPCA toy example
            double[][] inputs = KernelDiscriminantAnalysisTest.scholkopf().ToJagged();

            int[] output = Matrix.Expand(new int[, ] {
                { 1 }, { 2 }, { 3 }
            }, new int[] { 30, 30, 30 }).GetColumn(0);

            var target = new LinearDiscriminantAnalysis(inputs, output);

            target.Compute();


            double[][] actualOutput = target.Transform(inputs, 2);

            double[][] expectedOutput = new double[][]
            {
                new double[] { -0.538139989229878, -0.121488441426448 },
                new double[] { -0.520567977909383, -0.236347775257103 },
                new double[] { -0.613477771536265, -0.237553378277353 },
                new double[] { -0.881409292261883, 0.0935329540430248 },
                new double[] { -0.786030327227691, -0.194447244320605 },
                new double[] { -0.551442781305912, -0.0123559223681317 },
                new double[] { -0.654158684224005, -0.197674316111905 },
                new double[] { -0.559262527603992, 0.013941098843778 },
                new double[] { -0.66411263033584, -0.150490536781379 },
                new double[] { -0.450278115670319, -0.26635277047329 },
                new double[] { -0.754277919814726, -0.362102431498804 },
                new double[] { -0.734928584895253, -0.248980106866025 },
                new double[] { -0.653608644698921, 0.0143647201181394 },
                new double[] { -0.760931829205159, -0.210515053383166 },
                new double[] { -0.618516474044195, -0.142285367330635 },
                new double[] { -0.779342671809792, -0.141199637690287 },
                new double[] { -0.735924645881001, -0.146617711795974 },
                new double[] { -0.785744941649802, -0.31168984794763 },
                new double[] { -0.669124608334209, -0.420106774148463 },
                new double[] { -0.824474062918818, 0.147088211780247 },
                new double[] { -0.799320425464541, -0.0637527478684568 },
                new double[] { -0.663385572908364, -0.341675337652223 },
                new double[] { -0.711489490612721, -0.285076461900782 },
                new double[] { -0.629974516987287, -0.0793021800418604 },
                new double[] { -0.65653220838978, -0.215831476310217 },
                new double[] { -0.732028761895192, -0.0344445204239324 },
                new double[] { -0.747862524505661, -0.0387281405057906 },
                new double[] { -0.584471308297719, -0.146019839184912 },
                new double[] { -0.505999843470041, -0.292203766994798 },
                new double[] { -0.753145346001892, -0.344521076589941 },
                new double[] { 0.524001176904191, -0.64158358593467 },
                new double[] { 0.423231837049123, -0.286159521674357 },
                new double[] { 0.656426922526874, -0.734236743185728 },
                new double[] { 0.400687334850924, -0.55115062988607 },
                new double[] { 0.636240473795815, -0.748303834209756 },
                new double[] { 0.434843292522556, -0.566740271085617 },
                new double[] { 0.6104713046872, -0.678967931597066 },
                new double[] { 0.705262787734728, -0.640414054245901 },
                new double[] { 0.447832238019099, -0.661180602320438 },
                new double[] { 0.659661046824861, -0.630212303468225 },
                new double[] { 0.672147865129419, -0.503357319234685 },
                new double[] { 0.638711507323203, -0.644310115155753 },
                new double[] { 0.536863923134139, -0.438197907521421 },
                new double[] { 0.496141960347812, -0.530750925839334 },
                new double[] { 0.906503239478175, -0.59100400335581 },
                new double[] { 0.604370405460113, -0.46954478102178 },
                new double[] { 0.412131895699799, -0.758049666960606 },
                new double[] { 0.423464497686766, -0.438725534434289 },
                new double[] { 0.351983120391112, -0.693723302359591 },
                new double[] { 0.600453835286623, -0.446793343407863 },
                new double[] { 0.585438337076168, -0.544511883828685 },
                new double[] { 0.727841528212698, -0.650301108602448 },
                new double[] { 0.751448391254333, -0.633046233976002 },
                new double[] { 0.857558106835016, -0.587237152739008 },
                new double[] { 0.554131023905099, -0.639630778761857 },
                new double[] { 0.604769997035484, -0.660127547060936 },
                new double[] { 0.532120384569746, -0.448864888884797 },
                new double[] { 0.62587117635701, -0.482512841662285 },
                new double[] { 0.580333409415421, -0.80962907380129 },
                new double[] { 0.601495554392805, -0.730598326012776 },
                new double[] { 0.593941507609862, 0.350118652741363 },
                new double[] { 0.357712432226073, 0.2963287302749 },
                new double[] { 0.551383385237947, 0.374412117881701 },
                new double[] { 0.690356212604399, 0.240090830766309 },
                new double[] { 0.462549608533101, 0.337029321214765 },
                new double[] { 0.613846624949793, 0.302978372516851 },
                new double[] { 0.632960280224768, 0.690169219132759 },
                new double[] { 0.56675518056767, 0.218090431387548 },
                new double[] { 0.511872653377024, 0.0692203349420495 },
                new double[] { 0.177443905363662, 0.23100145864499 },
                new double[] { 0.327851974844022, 0.415060901754569 },
                new double[] { 0.341124386412447, 0.416335789100053 },
                new double[] { 0.44860383164398, 0.214369753920447 },
                new double[] { 0.63110091195233, 0.59664872441043 },
                new double[] { 0.587620021924801, 0.451661866983025 },
                new double[] { 0.433140254056975, 0.56057876616672 },
                new double[] { 0.640109409731833, 0.298279362477078 },
                new double[] { 0.140413240631302, 0.233509735221199 },
                new double[] { 0.751771638050688, 0.407674765260726 },
                new double[] { 0.57522328805595, 0.296203994397562 },
                new double[] { 0.394007233177402, 0.32004606890218 },
                new double[] { 0.323309388831049, 0.188114883322704 },
                new double[] { 0.478221796731402, 0.409092441378802 },
                new double[] { 0.673650933463591, 0.561639241955278 },
                new double[] { 0.645748558652938, 0.282496300419708 },
                new double[] { 0.588553164739597, 0.428759787951118 },
                new double[] { 0.377052961673182, 0.466388880012159 },
                new double[] { 0.752164965657736, 0.289900686186869 },
                new double[] { 0.247467021467445, 0.361971115290112 },
                new double[] { 0.636721385361009, 0.399430035006511 }
            };

            Assert.IsTrue(Matrix.IsEqual(actualOutput, expectedOutput, 1e-5));

            // Assert the result equals the transformation of the input
            double[][] result     = target.Result.ToJagged();
            double[][] projection = target.Transform(inputs);
            Assert.IsTrue(Matrix.IsEqual(result, projection));
            Assert.IsTrue(Matrix.IsEqual(result, expectedOutput, 1e-6));


            int[] actual2 = target.Classify(inputs);
            Assert.IsTrue(Matrix.IsEqual(actual2, output));

            double[][] scores  = new double[inputs.Length][];
            int[]      actual3 = new int[inputs.Length];
            for (int i = 0; i < inputs.Length; i++)
            {
                actual3[i] = target.Classify(inputs[i], out scores[i]);
            }

            Assert.IsTrue(Matrix.IsEqual(actual3, output));
            var actualMeans = target.projectedMeans;

            scores = scores.Get(0, 5, null);
            //var str2 = scores.ToCSharp();

            var expected = new double[][] {
                new double[] { -0.0213345342185279, -1.48626837046456, -1.31720201011333 },
                new double[] { -0.0295574706116435, -1.35158673700292, -1.40393954892816 },
                new double[] { -0.0092314990039484, -1.5648696800027, -1.60459093003653 },
                new double[] { -0.108880936496527, -2.6251849668169, -1.99175104959092 },
                new double[] { -0.0126381832555252, -2.04103325730257, -1.97099575187989 }
            };

            Assert.IsTrue(Matrix.IsEqual(scores, expected, 1e-6));
        }
Beispiel #23
0
        public override void Train(EEGRecord record)
        {
            if (!EEGRecordStorage.IsRecordValid(record))
            {
                throw new InvalidRecordException();
            }
            List <double[]> outputInput = record.FeatureVectorsOutputInput;

            double[,] inputs = null;
            int[] outputs = null;
            Converters.Convert(outputInput, ref inputs, ref outputs);

            //output classes must be consecutive: 1,2,3 ...
            _lda = new LinearDiscriminantAnalysis(inputs, outputs);

            if (this.Progress != null)
            {
                this.Progress(10);
            }

            // Compute the analysis
            _lda.Compute();

            if (this.Progress != null)
            {
                this.Progress(35);
            }

            double[,] projection = _lda.Transform(inputs);

            // convert for NN format
            double[][] input2  = null;
            int[]      output2 = null;
            Converters.Convert(projection, outputs, ref input2, ref output2);

            int dimensions   = projection.GetLength(1);
            int output_count = outputs.Max();

            // Create a new Linear kernel
            IKernel kernel = new Linear();

            // Create a new Multi-class Support Vector Machine with one input,
            //  using the linear kernel and for four disjoint classes.
            _machine = new MulticlassSupportVectorMachine(dimensions, kernel, output_count);

            // Create the Multi-class learning algorithm for the machine
            var teacher = new MulticlassSupportVectorLearning(_machine, input2, output2);

            // Configure the learning algorithm to use SMO to train the
            //  underlying SVMs in each of the binary class subproblems.
            teacher.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                                new SequentialMinimalOptimization(svm, classInputs, classOutputs);

            // Run the learning algorithm
            double error = teacher.Run();

            if (this.Progress != null)
            {
                this.Progress(100);
            }
        }
        public void ClassifyTest1()
        {
            // Create some sample input data instances. This is the same
            // data used in the Gutierrez-Osuna's example available on:
            // http://research.cs.tamu.edu/prism/lectures/pr/pr_l10.pdf

            double[][] inputs =
            {
                // Class 0
                new double[] {  4,  1 },
                new double[] {  2,  4 },
                new double[] {  2,  3 },
                new double[] {  3,  6 },
                new double[] {  4,  4 },

                // Class 1
                new double[] {  9, 10 },
                new double[] {  6,  8 },
                new double[] {  9,  5 },
                new double[] {  8,  7 },
                new double[] { 10,  8 }
            };

            int[] output =
            {
                0, 0, 0, 0, 0, // The first five are from class 0
                1, 1, 1, 1, 1  // The last five are from class 1
            };

            // Then, we will create a LDA for the given instances.
            var lda = new LinearDiscriminantAnalysis(inputs, output);

            lda.Compute(); // Compute the analysis


            // Now we can project the data into KDA space:
            double[][] projection = lda.Transform(inputs);

            double[][] classifierProjection = lda.Classifier.First.Transform(inputs);
            Assert.IsTrue(projection.IsEqual(classifierProjection));

            // Or perform classification using:
            int[] results = lda.Classify(inputs);

            double[][] expected = new double[][]
            {
                new double[] { 4.42732558139535, 1.96296296296296 },
                new double[] { 3.7093023255814, -2.51851851851852 },
                new double[] { 3.28197674418605, -1.51851851851852 },
                new double[] { 5.56395348837209, -3.77777777777778 },
                new double[] { 5.7093023255814, -1.03703703703704 },
                new double[] { 13.2732558139535, -3.33333333333333 },
                new double[] { 9.41860465116279, -3.55555555555556 },
                new double[] { 11.1366279069767, 1.66666666666667 },
                new double[] { 10.9912790697674, -1.07407407407407 },
                new double[] { 13.4186046511628, -0.592592592592593 }
            };

            Assert.IsTrue(expected.IsEqual(projection, 1e-6));

            // Test the classify method
            for (int i = 0; i < 5; i++)
            {
                int actual = results[i];
                Assert.AreEqual(0, actual);
            }

            for (int i = 5; i < 10; i++)
            {
                int actual = results[i];
                Assert.AreEqual(1, actual);
            }
        }
        public void large_transform_few_components()
        {
            int n = 100;
            double[][] data = Jagged.Random(n, n);
            int[] labels = Vector.Random(n, 0, 10);

            var kda = new LinearDiscriminantAnalysis();
            var target = kda.Learn(data, labels);

            var expected = kda.Transform(data, 2);
            Assert.AreEqual(n, expected.Rows());
            Assert.AreEqual(2, expected.Columns());

            kda.NumberOfOutputs = 2;
            target = kda.Learn(data, labels);

            var actual = target.First.Transform(data);
            Assert.AreEqual(n, actual.Rows());
            Assert.AreEqual(2, actual.Columns());

            Assert.IsTrue(actual.IsEqual(expected));
        }
Beispiel #26
0
        static void Main(string[] args)
        {
            Console.WriteLine("This is a demo application that combines Linear Discriminant Analysis (LDA) and Multilayer Perceptron(MLP).");
            double[,] inputs =
            {
                {  4,  1 },
                {  2,  4 },
                {  2,  3 },
                {  3,  6 },
                {  4,  4 },
                {  9, 10 },
                {  6,  8 },
                {  9,  5 },
                {  8,  7 },
                { 10,  8 }
            };

            int[] output =
            {
                1, 1, 2, 1, 1, 2, 2, 2, 1, 2
            };

            Console.WriteLine("\r\nProcessing sample data, pease wait...");

            //1.1
            var lda = new LinearDiscriminantAnalysis(inputs, output);

            //1.2 Compute the analysis
            lda.Compute();

            //1.3
            double[,] projection = lda.Transform(inputs);

            //both LDA and MLP have a little bit different inputs
            //e.x double[,] to double[][], etc.
            //e.x. LDA needs int classes and MLP needs classes to be in the range [0..1]
            #region convertions
            int vector_count = projection.GetLength(0);
            int dimensions   = projection.GetLength(1);

            //====================================================================

            // conver for NN
            double[][] input2  = new double[vector_count][];
            double[][] output2 = new double[vector_count][];

            for (int i = 0; i < input2.Length; i++)
            {
                input2[i] = new double[projection.GetLength(1)];
                for (int j = 0; j < projection.GetLength(1); j++)
                {
                    input2[i][j] = projection[i, j];
                }

                output2[i] = new double[1];

                //we turn classes from ints to doubles in the range [0..1], because we use sigmoid for the NN
                output2[i][0] = Convert.ToDouble(output[i]) / 10;
            }
            #endregion


            //2.1 create neural network
            ActivationNetwork network = new ActivationNetwork(
                new SigmoidFunction(2),
                dimensions, // inputs neurons in the network
                dimensions, // neurons in the first layer
                1);         // one neuron in the second layer

            //2.2 create teacher
            BackPropagationLearning teacher = new BackPropagationLearning(network);

            //2.3 loop
            int p = 0;
            while (true)
            {
                // run epoch of learning procedure
                double error = teacher.RunEpoch(input2, output2);

                p++;
                if (p > 1000000)
                {
                    break;
                }
                // instead of iterations we can check error values to see if we need to stop
            }

            //====================================================================

            //3. Classify
            double[,] sample          = { { 10, 8 } };
            double[,] projectedSample = lda.Transform(sample);
            double[] projectedSample2 = new double[2];

            projectedSample2[0] = projectedSample[0, 0];
            projectedSample2[1] = projectedSample[0, 1];

            double[] classs = network.Compute(projectedSample2);

            Console.WriteLine("========================");

            //we convert back to int classes by first rounding and then multipling by 10 (because we devided to 10 before)
            //if you do not get the expected result
            //- rounding might be a problem
            //- try more training

            Console.WriteLine(Math.Round(classs[0], 1, MidpointRounding.AwayFromZero) * 10);
            Console.ReadLine();
        }
Beispiel #27
0
        protected override IEnumerable <IClassificationSolution> GenerateClassificationSolutions()
        {
            var solutionsBase = base.GenerateClassificationSolutions();
            var solutions     = new List <IClassificationSolution>();

            var symbolicSolution = Content;

            // does not support lagged variables
            if (symbolicSolution.Model.SymbolicExpressionTree.IterateNodesPrefix().OfType <LaggedVariableTreeNode>().Any())
            {
                return(solutionsBase);
            }

            var problemData = (IClassificationProblemData)symbolicSolution.ProblemData.Clone();

            if (!problemData.TrainingIndices.Any())
            {
                return(null);                              // don't create an comparison models if the problem does not have a training set (e.g. loaded into an existing model)
            }
            var usedVariables = Content.Model.SymbolicExpressionTree.IterateNodesPostfix()
                                .OfType <IVariableTreeNode>()
                                .Select(node => node.VariableName).ToArray();

            var usedDoubleVariables = usedVariables
                                      .Where(name => problemData.Dataset.VariableHasType <double>(name))
                                      .Distinct();

            var usedFactorVariables = usedVariables
                                      .Where(name => problemData.Dataset.VariableHasType <string>(name))
                                      .Distinct();

            // gkronber: for binary factors we actually produce a binary variable in the new dataset
            // but only if the variable is not used as a full factor anyway (LR creates binary columns anyway)
            var usedBinaryFactors =
                Content.Model.SymbolicExpressionTree.IterateNodesPostfix().OfType <BinaryFactorVariableTreeNode>()
                .Where(node => !usedFactorVariables.Contains(node.VariableName))
                .Select(node => Tuple.Create(node.VariableValue, node.VariableValue));

            // create a new problem and dataset
            var variableNames =
                usedDoubleVariables
                .Concat(usedFactorVariables)
                .Concat(usedBinaryFactors.Select(t => t.Item1 + "=" + t.Item2))
                .Concat(new string[] { problemData.TargetVariable })
                .ToArray();
            var variableValues =
                usedDoubleVariables.Select(name => (IList)problemData.Dataset.GetDoubleValues(name).ToList())
                .Concat(usedFactorVariables.Select(name => problemData.Dataset.GetStringValues(name).ToList()))
                .Concat(
                    // create binary variable
                    usedBinaryFactors.Select(t => problemData.Dataset.GetReadOnlyStringValues(t.Item1).Select(val => val == t.Item2 ? 1.0 : 0.0).ToList())
                    )
                .Concat(new[] { problemData.Dataset.GetDoubleValues(problemData.TargetVariable).ToList() });

            var newDs          = new Dataset(variableNames, variableValues);
            var newProblemData = new ClassificationProblemData(newDs, variableNames.Take(variableNames.Length - 1), variableNames.Last());

            newProblemData.PositiveClass           = problemData.PositiveClass;
            newProblemData.TrainingPartition.Start = problemData.TrainingPartition.Start;
            newProblemData.TrainingPartition.End   = problemData.TrainingPartition.End;
            newProblemData.TestPartition.Start     = problemData.TestPartition.Start;
            newProblemData.TestPartition.End       = problemData.TestPartition.End;

            try {
                var oneR = OneR.CreateOneRSolution(newProblemData);
                oneR.Name = "OneR Classification Solution (subset)";
                solutions.Add(oneR);
            } catch (NotSupportedException) { } catch (ArgumentException) { }
            try {
                var lda = LinearDiscriminantAnalysis.CreateLinearDiscriminantAnalysisSolution(newProblemData);
                lda.Name = "Linear Discriminant Analysis Solution (subset)";
                solutions.Add(lda);
            } catch (NotSupportedException) { } catch (ArgumentException) { }

            return(solutionsBase.Concat(solutions));
        }
        public void ClassifyTest1()
        {
            // Create some sample input data instances. This is the same
            // data used in the Gutierrez-Osuna's example available on:
            // http://research.cs.tamu.edu/prism/lectures/pr/pr_l10.pdf

            double[][] inputs = 
            {
                // Class 0
                new double[] {  4,  1 }, 
                new double[] {  2,  4 },
                new double[] {  2,  3 },
                new double[] {  3,  6 },
                new double[] {  4,  4 },

                // Class 1
                new double[] {  9, 10 },
                new double[] {  6,  8 },
                new double[] {  9,  5 },
                new double[] {  8,  7 },
                new double[] { 10,  8 }
            };

            int[] output = 
            {
                0, 0, 0, 0, 0, // The first five are from class 0
                1, 1, 1, 1, 1  // The last five are from class 1
            };

            // Then, we will create a LDA for the given instances.
            var lda = new LinearDiscriminantAnalysis(inputs, output);

            lda.Compute(); // Compute the analysis


            // Now we can project the data into KDA space:
            double[][] projection = lda.Transform(inputs);

            // Or perform classification using:
            int[] results = lda.Classify(inputs);


            // Test the classify method
            for (int i = 0; i < 5; i++)
            {
                int expected = 0;
                int actual = results[i];
                Assert.AreEqual(expected, actual);
            }

            for (int i = 5; i < 10; i++)
            {
                int expected = 1;
                int actual = results[i];
                Assert.AreEqual(expected, actual);
            }

        }
Beispiel #29
0
        public bool setData(double[][] data, int [] tagForData)
        {
            /*if (data.GetLength (0) != tagForData.Length)
             *  throw new ArgumentException (
             *      "The number of rows in the input array must match the number of given tagForData.");
             * if (data.GetLength (1) != 4 * Buffer.SRATE)
             *      throw new ArgumentException (
             *         "The number of rows in the input array must a 4sec-length data");
             */
            computePower(data);

            if (tagForData[0] == 0 && !getTrainDataStateArray(TRAINING_OPENEYE))
            {
                extractFeatures_open_eye();
            }
            if (tagForData[0] == 1 && !getTrainDataStateArray(TRAINING_CLOSEEYE))
            {
                extractFeatures_close_eye();
            }
            if (getTrainDataStateArray(TRAINING_ALL))   //TRAINING_CLOSEEYE_FULL && TRAINING_OPENEYE_FULL && !READY_TO_ONLINE) {
            //form the dataset
            {
                double[][] inputs1 = trainingDataOpenEye.First.Value;
                double[][] inputs2 = trainingDataCloseEye.First.Value;
                int        pts     = inputs1.GetLength(0) * trainingDataOpenEye.Count +
                                     inputs2.GetLength(0) * trainingDataCloseEye.Count;
                int[] tag = new int[pts];
                for (int i = 0; i < pts; i++)
                {
                    if (i >= ((inputs1.GetLength(0) * trainingDataOpenEye.Count)))
                    {
                        tag[i] = 1;
                    }
                }
                double[][] input = new double[pts][];
                double[][] temp;
                int        ptsIndex = 0;

                //merge into one array
                for (int i = 0; i < trainingDataOpenEye.Count; i++)
                {
                    temp = trainingDataOpenEye.First.Value;
                    for (int j = 0; j < temp.GetLength(0); j++, ptsIndex++)
                    {
                        input[ptsIndex] = temp[j];
                    }
                }
                for (int i = 0; i < trainingDataCloseEye.Count; i++)
                {
                    temp = trainingDataCloseEye.First.Value;
                    for (int j = 0; j < temp.GetLength(0); j++, ptsIndex++)
                    {
                        input[ptsIndex] = temp[j];
                    }
                }
                //debug

                /*Console.Write ("power in training: ");
                 * for (int i = 0; i < input.Length; i++) {
                 *  Console.Write (input[i].ToString()+",");
                 * }
                 * Console.WriteLine("");*/
                //-------------------------

                lda = new LinearDiscriminantAnalysis(input, tag);
                lda.Compute();  // Compute the analysis

                READY_TO_ONLINE = true;
            }
            return(READY_TO_ONLINE);
        }
        public void ComputeTest3()
        {
            // Schölkopf KPCA toy example
            double[][] inputs = KernelDiscriminantAnalysisTest.scholkopf().ToJagged();

            int[] output = Matrix.Expand(new int[,] { { 1 }, { 2 }, { 3 } }, new int[] { 30, 30, 30 }).GetColumn(0);

            var target = new LinearDiscriminantAnalysis(inputs, output);

            target.Compute();


            double[][] actualOutput = target.Transform(inputs, 2);

            double[][] expectedOutput = new double[][] 
            {
                new double[] { -0.538139989229878, -0.121488441426448 },
                new double[] { -0.520567977909383, -0.236347775257103 },
                new double[] { -0.613477771536265, -0.237553378277353 },
                new double[] { -0.881409292261883, 0.0935329540430248 },
                new double[] { -0.786030327227691, -0.194447244320605 },
                new double[] { -0.551442781305912, -0.0123559223681317 },
                new double[] { -0.654158684224005, -0.197674316111905 },
                new double[] { -0.559262527603992, 0.013941098843778 },
                new double[] { -0.66411263033584, -0.150490536781379 },
                new double[] { -0.450278115670319, -0.26635277047329 },
                new double[] { -0.754277919814726, -0.362102431498804 },
                new double[] { -0.734928584895253, -0.248980106866025 },
                new double[] { -0.653608644698921, 0.0143647201181394 },
                new double[] { -0.760931829205159, -0.210515053383166 },
                new double[] { -0.618516474044195, -0.142285367330635 },
                new double[] { -0.779342671809792, -0.141199637690287 },
                new double[] { -0.735924645881001, -0.146617711795974 },
                new double[] { -0.785744941649802, -0.31168984794763 },
                new double[] { -0.669124608334209, -0.420106774148463 },
                new double[] { -0.824474062918818, 0.147088211780247 },
                new double[] { -0.799320425464541, -0.0637527478684568 },
                new double[] { -0.663385572908364, -0.341675337652223 },
                new double[] { -0.711489490612721, -0.285076461900782 },
                new double[] { -0.629974516987287, -0.0793021800418604 },
                new double[] { -0.65653220838978, -0.215831476310217 },
                new double[] { -0.732028761895192, -0.0344445204239324 },
                new double[] { -0.747862524505661, -0.0387281405057906 },
                new double[] { -0.584471308297719, -0.146019839184912 },
                new double[] { -0.505999843470041, -0.292203766994798 },
                new double[] { -0.753145346001892, -0.344521076589941 },
                new double[] { 0.524001176904191, -0.64158358593467 },
                new double[] { 0.423231837049123, -0.286159521674357 },
                new double[] { 0.656426922526874, -0.734236743185728 },
                new double[] { 0.400687334850924, -0.55115062988607 },
                new double[] { 0.636240473795815, -0.748303834209756 },
                new double[] { 0.434843292522556, -0.566740271085617 },
                new double[] { 0.6104713046872, -0.678967931597066 },
                new double[] { 0.705262787734728, -0.640414054245901 },
                new double[] { 0.447832238019099, -0.661180602320438 },
                new double[] { 0.659661046824861, -0.630212303468225 },
                new double[] { 0.672147865129419, -0.503357319234685 },
                new double[] { 0.638711507323203, -0.644310115155753 },
                new double[] { 0.536863923134139, -0.438197907521421 },
                new double[] { 0.496141960347812, -0.530750925839334 },
                new double[] { 0.906503239478175, -0.59100400335581 },
                new double[] { 0.604370405460113, -0.46954478102178 },
                new double[] { 0.412131895699799, -0.758049666960606 },
                new double[] { 0.423464497686766, -0.438725534434289 },
                new double[] { 0.351983120391112, -0.693723302359591 },
                new double[] { 0.600453835286623, -0.446793343407863 },
                new double[] { 0.585438337076168, -0.544511883828685 },
                new double[] { 0.727841528212698, -0.650301108602448 },
                new double[] { 0.751448391254333, -0.633046233976002 },
                new double[] { 0.857558106835016, -0.587237152739008 },
                new double[] { 0.554131023905099, -0.639630778761857 },
                new double[] { 0.604769997035484, -0.660127547060936 },
                new double[] { 0.532120384569746, -0.448864888884797 },
                new double[] { 0.62587117635701, -0.482512841662285 },
                new double[] { 0.580333409415421, -0.80962907380129 },
                new double[] { 0.601495554392805, -0.730598326012776 },
                new double[] { 0.593941507609862, 0.350118652741363 },
                new double[] { 0.357712432226073, 0.2963287302749 },
                new double[] { 0.551383385237947, 0.374412117881701 },
                new double[] { 0.690356212604399, 0.240090830766309 },
                new double[] { 0.462549608533101, 0.337029321214765 },
                new double[] { 0.613846624949793, 0.302978372516851 },
                new double[] { 0.632960280224768, 0.690169219132759 },
                new double[] { 0.56675518056767, 0.218090431387548 },
                new double[] { 0.511872653377024, 0.0692203349420495 },
                new double[] { 0.177443905363662, 0.23100145864499 },
                new double[] { 0.327851974844022, 0.415060901754569 },
                new double[] { 0.341124386412447, 0.416335789100053 },
                new double[] { 0.44860383164398, 0.214369753920447 },
                new double[] { 0.63110091195233, 0.59664872441043 },
                new double[] { 0.587620021924801, 0.451661866983025 },
                new double[] { 0.433140254056975, 0.56057876616672 },
                new double[] { 0.640109409731833, 0.298279362477078 },
                new double[] { 0.140413240631302, 0.233509735221199 },
                new double[] { 0.751771638050688, 0.407674765260726 },
                new double[] { 0.57522328805595, 0.296203994397562 },
                new double[] { 0.394007233177402, 0.32004606890218 },
                new double[] { 0.323309388831049, 0.188114883322704 },
                new double[] { 0.478221796731402, 0.409092441378802 },
                new double[] { 0.673650933463591, 0.561639241955278 },
                new double[] { 0.645748558652938, 0.282496300419708 },
                new double[] { 0.588553164739597, 0.428759787951118 },
                new double[] { 0.377052961673182, 0.466388880012159 },
                new double[] { 0.752164965657736, 0.289900686186869 },
                new double[] { 0.247467021467445, 0.361971115290112 },
                new double[] { 0.636721385361009, 0.399430035006511 } 
            };

            Assert.IsTrue(Matrix.IsEqual(actualOutput, expectedOutput, 1e-5));

            // Assert the result equals the transformation of the input
            double[][] result = target.Result.ToJagged();
            double[][] projection = target.Transform(inputs);
            Assert.IsTrue(Matrix.IsEqual(result, projection));
            Assert.IsTrue(Matrix.IsEqual(result, expectedOutput, 1e-6));


            int[] actual2 = target.Classify(inputs);
            Assert.IsTrue(Matrix.IsEqual(actual2, output));

            double[][] scores = new double[inputs.Length][];
            int[] actual3 = new int[inputs.Length];
            for (int i = 0; i < inputs.Length; i++)
                actual3[i] = target.Classify(inputs[i], out scores[i]);

            Assert.IsTrue(Matrix.IsEqual(actual3, output));
            var actualMeans = target.projectedMeans;

            scores = scores.Get(0, 5, null);
            //var str2 = scores.ToCSharp();

            var expected = new double[][] {
                new double[] { -0.0213345342185279, -1.48626837046456, -1.31720201011333 },
                new double[] { -0.0295574706116435, -1.35158673700292, -1.40393954892816 },
                new double[] { -0.0092314990039484, -1.5648696800027, -1.60459093003653 },
                new double[] { -0.108880936496527, -2.6251849668169, -1.99175104959092 },
                new double[] { -0.0126381832555252, -2.04103325730257, -1.97099575187989 } 
            };

            Assert.IsTrue(Matrix.IsEqual(scores, expected, 1e-6));
        }
 public LinearDiscriminantAnalysisClassifier(string path)
 {
     LearningAlgorithm = new LinearDiscriminantAnalysis();
     Load(path);
 }
        public void ClassifyTest1()
        {
            // Create some sample input data instances. This is the same
            // data used in the Gutierrez-Osuna's example available on:
            // http://research.cs.tamu.edu/prism/lectures/pr/pr_l10.pdf

            double[][] inputs = 
            {
                // Class 0
                new double[] {  4,  1 }, 
                new double[] {  2,  4 },
                new double[] {  2,  3 },
                new double[] {  3,  6 },
                new double[] {  4,  4 },

                // Class 1
                new double[] {  9, 10 },
                new double[] {  6,  8 },
                new double[] {  9,  5 },
                new double[] {  8,  7 },
                new double[] { 10,  8 }
            };

            int[] output = 
            {
                0, 0, 0, 0, 0, // The first five are from class 0
                1, 1, 1, 1, 1  // The last five are from class 1
            };

            // Then, we will create a LDA for the given instances.
            var lda = new LinearDiscriminantAnalysis(inputs, output);

            lda.Compute(); // Compute the analysis


            // Now we can project the data into KDA space:
            double[][] projection = lda.Transform(inputs);

            double[][] classifierProjection = lda.Classifier.First.Transform(inputs);
            Assert.IsTrue(projection.IsEqual(classifierProjection));

            // Or perform classification using:
            int[] results = lda.Classify(inputs);

            double[][] expected = new double[][] 
            {
                new double[] { 4.42732558139535, 1.96296296296296 },
                new double[] { 3.7093023255814, -2.51851851851852 },
                new double[] { 3.28197674418605, -1.51851851851852 },
                new double[] { 5.56395348837209, -3.77777777777778 },
                new double[] { 5.7093023255814, -1.03703703703704 },
                new double[] { 13.2732558139535, -3.33333333333333 },
                new double[] { 9.41860465116279, -3.55555555555556 },
                new double[] { 11.1366279069767, 1.66666666666667 },
                new double[] { 10.9912790697674, -1.07407407407407 },
                new double[] { 13.4186046511628, -0.592592592592593 } 
            };

            Assert.IsTrue(expected.IsEqual(projection, 1e-6));

            // Test the classify method
            for (int i = 0; i < 5; i++)
            {
                int actual = results[i];
                Assert.AreEqual(0, actual);
            }

            for (int i = 5; i < 10; i++)
            {
                int actual = results[i];
                Assert.AreEqual(1, actual);
            }
        }
Beispiel #33
0
        public override void Train(EEGRecord record)
        {
            if (!EEGRecordStorage.IsRecordValid(record))
            {
                throw new InvalidRecordException();
            }
            List <double[]> outputInput = record.FeatureVectorsOutputInput;

            double[,] inputs = null;
            int[] outputs = null;
            Converters.Convert(outputInput, ref inputs, ref outputs);

            //output classes must be consecutive: 1,2,3 ...
            _lda = new LinearDiscriminantAnalysis(inputs, outputs);

            if (this.Progress != null)
            {
                this.Progress(10);
            }

            // Compute the analysis
            _lda.Compute();

            if (this.Progress != null)
            {
                this.Progress(35);
            }

            double[,] projection = _lda.Transform(inputs);

            // convert for NN format
            double[][] input2  = null;
            double[][] output2 = null;
            Converters.Convert(projection, outputs, ref input2, ref output2);

            // create neural network
            int dimensions   = projection.GetLength(1);
            int output_count = outputs.Max();

            _network = new ActivationNetwork(
                new SigmoidFunction(2),
                dimensions,    // inputs neurons in the network
                dimensions,    // neurons in the first layer
                output_count); // output neurons

            // create teacher
            BackPropagationLearning teacher = new BackPropagationLearning(_network);

            int ratio = 4;
            NNTrainDataIterator iter = new NNTrainDataIterator(ratio, input2, output2);

            //actual training
            while (iter.HasMore) //we do the training each time spliting the data to different 'train' and 'validate' sets
            {
                #region get new data
                double[][] trainDataInput;
                double[][] trainDataOutput;
                double[][] validateDataInput;
                double[][] validateDataOutput;

                iter.NextData(out trainDataInput, out trainDataOutput, out validateDataInput, out validateDataOutput);
                #endregion

                //validationSetError = CalculateError(validateDataInput, validateDataOutput);
                double old_val_error1 = 100002;
                double old_val_error2 = 100001;
                double new_val_error  = 100000;

                //We do the training over the 'train' set until the error of the 'validate' set start to increase.
                //This way we prevent overfitting.
                int count = 0;
                while (((old_val_error1 - new_val_error) > 0.001) || ((old_val_error2 - old_val_error1) > 0.001))
                {
                    count++;
                    RunEpoch(teacher, trainDataInput, trainDataOutput, true);

                    old_val_error2 = old_val_error1;
                    old_val_error1 = new_val_error;

                    new_val_error = CalculateError(validateDataInput, validateDataOutput);
                }

                if (this.Progress != null)
                {
                    this.Progress(35 + (iter.CurrentIterationIndex) * (65 / ratio));
                }
            }

            //now we have a model of a NN+LDA which we can use for classification
            if (this.Progress != null)
            {
                this.Progress(100);
            }
        }
 public LinearDiscriminantAnalysisClassifier()
 {
     LearningAlgorithm = new LinearDiscriminantAnalysis();
 }
Beispiel #35
0
        private void button1_Click(object sender, EventArgs e)
        {
            // Finishes and save any pending changes to the given data
            dgvAnalysisSource.EndEdit();

            if (dgvAnalysisSource.DataSource == null) return;

            // Creates a matrix from the source data table
            double[,] sourceMatrix = (dgvAnalysisSource.DataSource as DataTable).ToMatrix(out sourceColumns);

            int rows = sourceMatrix.GetLength(0);
            int cols = sourceMatrix.GetLength(1);


            // Creates a new Simple Descriptive Analysis
            sda = new DescriptiveAnalysis(sourceMatrix, sourceColumns);

            sda.Compute();

            // Populates statistics overview tab with analysis data
            dgvDistributionMeasures.DataSource = sda.Measures;


            // Get only the input values (exclude the class label indicator column)
            double[,] data = sourceMatrix.Submatrix(null, startColumn: 0, endColumn: 1);

            // Get only the associated labels
            int[] labels = sourceMatrix.GetColumn(2).ToInt32();


            // Creates the Linear Discriminant Analysis of the given source
            lda = new LinearDiscriminantAnalysis(data, labels);


            // Computes the analysis
            lda.Compute();


            // Performs the transformation of the data using two dimensions
            double[,] result = lda.Transform(data, 2);

            // Create a new plot with the original Z column
            double[,] points = result.InsertColumn(sourceMatrix.GetColumn(2));


            // Create output scatter plot
            outputScatterplot.DataSource = points;

            // Create output table
            dgvProjectionResult.DataSource = new ArrayDataView(points, sourceColumns);


            // Populates discriminants overview with analysis data
            dgvPrincipalComponents.DataSource = lda.Discriminants;
            dgvFeatureVectors.DataSource = new ArrayDataView(lda.DiscriminantMatrix);
            dgvScatterBetween.DataSource = new ArrayDataView(lda.ScatterBetweenClass);
            dgvScatterWithin.DataSource = new ArrayDataView(lda.ScatterWithinClass);
            dgvScatterTotal.DataSource = new ArrayDataView(lda.ScatterMatrix);


            // Populates classes information
            dgvClasses.DataSource = lda.Classes;


            CreateComponentCumulativeDistributionGraph(graphCurve);
            CreateComponentDistributionGraph(graphShare);

        }
 private void buildModel()
 {
     if (independentVls == null) getMatrix();
     lda = new Accord.Statistics.Analysis.LinearDiscriminantAnalysis(independentVls, dependentVls);
     lda.Compute();
     meanValues = lda.Means;
     stdValues = lda.StandardDeviations;
 }