Example #1
0
        public void init(List<double[]> patterns,List<int> labels)
        {
            IKernel kernel = createKernel(3);
            double complexity = 0.00001;
            double tolerance = 0.2;
            int cacheSize =500;
            SelectionStrategy strategy = SelectionStrategy.Sequential;

            // Create the Multi-class Support Vector Machine using the selected Kernel
            ksvm = new MulticlassSupportVectorMachine(128, kernel, 4);

            // Create the learning algorithm using the machine and the training data
            MulticlassSupportVectorLearning ml = new MulticlassSupportVectorLearning(ksvm, patterns.ToArray(), labels.ToArray())
            {
                // Configure the learning algorithm
                Algorithm = (svm, classInputs, classOutputs, i, j) =>

                    // Use Platt's Sequential Minimal Optimization algorithm
                    new SequentialMinimalOptimization(svm, classInputs, classOutputs)
                    {
                        Complexity = complexity,
                        Tolerance = tolerance,
                        CacheSize = cacheSize,
                        Strategy = strategy,
                        Compact = (kernel is Linear)
                    }
            };
            double error = ml.Run();
             Console.WriteLine(error);
        }
        public double v3_0_1()
        {
            var ksvm = new MulticlassSupportVectorMachine(784, new Polynomial(2), 10);
            var smo = new MulticlassSupportVectorLearning(ksvm, problem.Training.Inputs, problem.Training.Output);
            smo.Algorithm = (svm, x, y, i, j) => new SequentialMinimalOptimization(svm, x, y);

            return smo.Run(computeError: false);
        }
        //// Do training for all existing trained Data
        public SVM(string TrainedDataInputFile)
        {
            _engine = new TesseractEngine(@"./tessdata3", "eng", EngineMode.TesseractAndCube);
            _engine.SetVariable("tessedit_char_whitelist", "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ");
            _engine.SetVariable("tessedit_char_blacklist", "¢§+~»~`!@#$%^&*()_+-={}[]|\\:\";\'<>?,./");

            string[] TrainedData = Directory.GetFiles(TrainedDataInputFile, "*.png");
            double[][] inputs = new double[TrainedData.Length][];   ///
            double[] InputArray = new double[784];
            int[] Outputs = new int[TrainedData.Length];

            for (int i = 0; i < TrainedData.Length; i++)
            {
                string filename = Path.GetFileNameWithoutExtension(TrainedData[i]);
                Bitmap TrainingImage = new Bitmap(TrainedData[i]);
                string[] split = filename.Split('.');
                for (int j = 0; j < 28; j++)
                {
                    for (int k = 0; k < 28; k++)
                    {
                        if ((!TrainingImage.GetPixel(j, k).Name.Equals("ffffffff")))
                            InputArray[j * 28 + k] = 1;
                        else
                            InputArray[j * 28 + k] = 0;
                    }
                }

                inputs[i] = InputArray;
                Outputs[i] = Convert.ToInt32(split[0]);
                InputArray = new double[784];
            }

            IKernel kernel;
            kernel = new Polynomial(2, 0);
            ksvm = new MulticlassSupportVectorMachine(784, kernel, 2);
            MulticlassSupportVectorLearning ml = new MulticlassSupportVectorLearning(ksvm, inputs, Outputs);

            double complexity = 1;   ///// set these three parameters Carefuly later
            double epsilon = 0.001;
            double tolerance = 0.2;

            ml.Algorithm = (svm, classInputs, classOutputs, i, j) =>
            {
                var smo = new SequentialMinimalOptimization(svm, classInputs, classOutputs);
                smo.Complexity = complexity;  /// Cost parameter for SVM
                smo.Epsilon = epsilon;
                smo.Tolerance = tolerance;
                return smo;
            };

            // Train the machines. It should take a while.
            double error = ml.Run();
        }
Example #4
0
File: Svm.cs Project: ejulio/signa
        public void Aprender(IDadosSinaisEstaticos dados)
        {
            var kernel = new Polynomial(degree: 3, constant: 1);
            svm = new MulticlassSupportVectorMachine(QuantidadeIndeterminadaDeCaracteristicas, kernel, dados.QuantidadeClasses);

            var teacher = new MulticlassSupportVectorLearning(svm, dados.CaracteristicasSinais, dados.IdentificadoresSinais)
            {
                Algorithm = (machine, classInputs, classOutputs, j, k) =>
                    new SequentialMinimalOptimization(machine, classInputs, classOutputs)
                    {
                        Complexity = 1
                    }
            };

            teacher.Run();
        }
        public void RunTest()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Sample data
            //   The following is a simple auto association function
            //   in which each input correspond to its own class. This
            //   problem should be easily solved using a Linear kernel.

            // Sample input data
            double[][] inputs =
            {
                new double[] { 0 },
                new double[] { 3 },
                new double[] { 1 },
                new double[] { 2 },
            };

            // Output for each of the inputs
            int[] outputs = { 0, 3, 1, 2 };


            // Create a new Linear kernel
            IKernel kernel = new Linear();

            // Create a new Multi-class Support Vector Machine for one input,
            //  using the linear kernel and four disjoint classes.
            var machine = new MulticlassSupportVectorMachine(1, kernel, 4);

            // Create the Multi-class learning algorithm for the machine
            var teacher = new MulticlassSupportVectorLearning(machine, inputs, outputs);

            // Configure the learning algorithm to use SMO to train the
            //  underlying SVMs in each of the binary class subproblems.
            teacher.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(svm, classInputs, classOutputs);

            // Run the learning algorithm
            double error = teacher.Run();

            Assert.AreEqual(0, error);
            Assert.AreEqual(0, machine.Compute(inputs[0]));
            Assert.AreEqual(3, machine.Compute(inputs[1]));
            Assert.AreEqual(1, machine.Compute(inputs[2]));
            Assert.AreEqual(2, machine.Compute(inputs[3]));

        }
Example #6
0
        private void btnLearn_Click(object sender, EventArgs e)
        {
            if (gridSamples.Rows.Count == 0)
            {
                MessageBox.Show("Please load or insert some data first.");
                return;
            }

            BindingList<Sequence> samples = database.Samples;
            BindingList<String> classes = database.Classes;

            double[][] inputs = new double[samples.Count][];
            int[] outputs = new int[samples.Count];

            for (int i = 0; i < inputs.Length; i++)
            {
                inputs[i] = samples[i].Input;
                outputs[i] = samples[i].Output;
            }


            svm = new MulticlassSupportVectorMachine(0, new DynamicTimeWarping(2), classes.Count);


            // Create the learning algorithm for the ensemble classifier
            var teacher = new MulticlassSupportVectorLearning(svm, inputs, outputs);

            teacher.Algorithm = (machine, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(machine, classInputs, classOutputs);

            // Run the learning algorithm
            double error = teacher.Run();


            // Classify all training instances
            foreach (var sample in database.Samples)
            {
                sample.RecognizedAs = svm.Compute(sample.Input);
            }

            foreach (DataGridViewRow row in gridSamples.Rows)
            {
                var sample = row.DataBoundItem as Sequence;
                row.DefaultCellStyle.BackColor = (sample.RecognizedAs == sample.Output) ?
                    Color.LightGreen : Color.White;
            }
        }
Example #7
0
File: Svm.cs Project: ejulio/AMail
        public void Treinar(DadosTreinamento dadosTreinamento)
        {
            var kernel = new Linear(1);
            var quantidadeCaracteristicas = dadosTreinamento.Entradas[0].Length;
            var quantidadeClasses = dadosTreinamento.Saidas.Distinct().Length;
            svm = new MulticlassSupportVectorMachine(quantidadeCaracteristicas, kernel, quantidadeClasses);

            var learning = new MulticlassSupportVectorLearning(svm, dadosTreinamento.Entradas, dadosTreinamento.Saidas)
            {
                Algorithm = (machine, inputs, outputs, a, b) => new SequentialMinimalOptimization(machine, inputs, outputs)
                {
                    Complexity = 1.0
                }
            };

            learning.Run();
        }
Example #8
0
        /*************************** Primary Methods *******************************/

        public double learning(DataSet trainSet)
        {
            // Train Data Conversion
            var ingredient = convertToTrainIntputTable(trainSet);
            trainInputArray = (double[][])ingredient.Item1;
            trainOutputVector = (int[])ingredient.Item2;

            // Create the Multi-class learning algorithm for the SVM machine
            teacher = new MulticlassSupportVectorLearning(machine, trainInputArray, trainOutputVector);

            // Configure the learning algorithm to use SMO(Sequential Minimal Optimization)
            //  to train the underlying SVMs in each of the binary class subproblems
            teacher.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(svm, classInputs, classOutputs);

            // Run the learning algorithm and return learning error            
            return teacher.Run();
        }
        public ClassificationAlgorithmBuildResult Build(TrainingExample[] trainingSet, ClassificationAlgorithmParams classificationAlgorithmParams)
        {
            var featuresDimensionality = trainingSet[0].Features.Length;
            var outputClassesCount = trainingSet.Max(x => x.ExpectedResult) + 1;

            var inputs = trainingSet.Select(example => example.Features.ToDoubleArray()).ToArray();
            var outputs = trainingSet.Select(example => example.ExpectedResult).ToArray();

            var classifier = new MulticlassSupportVectorMachine(featuresDimensionality, new Linear(), outputClassesCount);
            var teacher = new MulticlassSupportVectorLearning(classifier, inputs, outputs)
                {
                    Algorithm = (svm, classInputs, classOutputs, i, j) =>
                                new SequentialMinimalOptimization(svm, classInputs, classOutputs)
                };

            teacher.Run();
            var result = new SupportVectorMachineAlgorithm(classifier);

            return ClassificationAlgorithmBuildResult.Create(result, trainingSet);
        }
        public void RunTest2()
        {

            double[][] inputs =
            {
                new double[] { 0, 1, 1, 0 }, // 0
                new double[] { 0, 1, 0, 0 }, // 0
                new double[] { 0, 0, 1, 0 }, // 0
                new double[] { 0, 1, 1, 0 }, // 0
                new double[] { 0, 1, 0, 0 }, // 0
                new double[] { 1, 0, 0, 0 }, // 1
                new double[] { 1, 0, 0, 0 }, // 1
                new double[] { 1, 0, 0, 1 }, // 1
                new double[] { 0, 0, 0, 1 }, // 1
                new double[] { 0, 0, 0, 1 }, // 1
                new double[] { 1, 1, 1, 1 }, // 2
                new double[] { 1, 0, 1, 1 }, // 2
                new double[] { 1, 1, 0, 1 }, // 2
                new double[] { 0, 1, 1, 1 }, // 2
                new double[] { 1, 1, 1, 1 }, // 2
            };

            int[] outputs =
            {
                0, 0, 0, 0, 0,
                1, 1, 1, 1, 1,
                2, 2, 2, 2, 2,
            };

            IKernel kernel = new Linear();
            MulticlassSupportVectorMachine machine = new MulticlassSupportVectorMachine(4, kernel, 3);
            MulticlassSupportVectorLearning target = new MulticlassSupportVectorLearning(machine, inputs, outputs);

            target.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(svm, classInputs, classOutputs);


            double actual = target.Run();
            double expected = 0;

            Assert.AreEqual(expected, actual);

            for (int i = 0; i < inputs.Length; i++)
            {
                actual = machine.Compute(inputs[i]);
                expected = outputs[i];
                Assert.AreEqual(expected, actual);
            }

        }
        public void ApplyTest2()
        {
            // Suppose we have a data table relating the age of
            // a person and its categorical classification, as 
            // in "child", "adult" or "elder".

            // The Codification filter is able to extract those
            // string labels and transform them into discrete
            // symbols, assigning integer labels to each of them
            // such as "child" = 0, "adult" = 1, and "elder" = 3.

            // Create the aforementioned sample table
            DataTable table = new DataTable("Sample data");
            table.Columns.Add("Age", typeof(int));
            table.Columns.Add("Label", typeof(string));

            //            age   label
            table.Rows.Add(10, "child");
            table.Rows.Add(07, "child");
            table.Rows.Add(04, "child");
            table.Rows.Add(21, "adult");
            table.Rows.Add(27, "adult");
            table.Rows.Add(12, "child");
            table.Rows.Add(79, "elder");
            table.Rows.Add(40, "adult");
            table.Rows.Add(30, "adult");


            // Now, let's say we need to translate those text labels
            // into integer symbols. Let's use a Codification filter:

            Codification codebook = new Codification(table);


            // After that, we can use the codebook to "translate"
            // the text labels into discrete symbols, such as:

            int a = codebook.Translate("Label", "child"); // returns 0
            int b = codebook.Translate("Label", "adult"); // returns 1
            int c = codebook.Translate("Label", "elder"); // returns 2

            // We can also do the reverse:
            string labela = codebook.Translate("Label", 0); // returns "child"
            string labelb = codebook.Translate("Label", 1); // returns "adult"
            string labelc = codebook.Translate("Label", 2); // returns "elder"


            // We can also process an entire data table at once:
            DataTable result = codebook.Apply(table);

            // The resulting table can be transformed to jagged array:
            double[][] matrix = Matrix.ToArray(result);

            // and the resulting matrix will be given by
            string str = matrix.ToString(CSharpJaggedMatrixFormatProvider.InvariantCulture);

            // str == new double[][] 
            // {
            //     new double[] { 10, 0 },
            //     new double[] {  7, 0 },
            //     new double[] {  4, 0 },
            //     new double[] { 21, 1 },
            //     new double[] { 27, 1 },
            //     new double[] { 12, 0 },
            //     new double[] { 79, 2 },
            //     new double[] { 40, 1 },
            //     new double[] { 30, 1 } 
            // };



            // Now we will be able to feed this matrix to any machine learning
            // algorithm without having to worry about text labels in our data:

            int classes = codebook["Label"].Symbols; // 3 classes (child, adult, elder)

            // Use the first column as input variables,
            // and the second column as outputs classes
            //
            double[][] inputs = matrix.GetColumns(0);
            int[] outputs = matrix.GetColumn(1).ToInt32();


            // Create a multi-class SVM for 1 input (Age) and 3 classes (Label)
            var machine = new MulticlassSupportVectorMachine(inputs: 1, classes: classes);

            // Create a Multi-class learning algorithm for the machine
            var teacher = new MulticlassSupportVectorLearning(machine, inputs, outputs);

            // Configure the learning algorithm to use SMO to train the
            //  underlying SVMs in each of the binary class subproblems.
            teacher.Algorithm = (svm, classInputs, classOutputs, i, j) =>
            {
                return new SequentialMinimalOptimization(svm, classInputs, classOutputs)
                {
                    Complexity = 1
                };
            };

            // Run the learning algorithm
            double error = teacher.Run();


            // After we have learned the machine, we can use it to classify
            // new data points, and use the codebook to translate the machine
            // outputs to the original text labels:

            string result1 = codebook.Translate("Label", machine.Compute(10)); // child
            string result2 = codebook.Translate("Label", machine.Compute(40)); // adult
            string result3 = codebook.Translate("Label", machine.Compute(70)); // elder


            Assert.AreEqual(0, a);
            Assert.AreEqual(1, b);
            Assert.AreEqual(2, c);
            Assert.AreEqual("child", labela);
            Assert.AreEqual("adult", labelb);
            Assert.AreEqual("elder", labelc);

            Assert.AreEqual("child", result1);
            Assert.AreEqual("adult", result2);
            Assert.AreEqual("elder", result3);

        }
Example #12
0
        /// <summary>
        ///   Creates a Support Vector Machine and estimate 
        ///   its parameters using a learning algorithm.
        /// </summary>
        /// 
        private void btnRunTraining_Click(object sender, EventArgs e)
        {
            if (dgvTrainingSource.Rows.Count == 0)
            {
                MessageBox.Show("Please load the training data before clicking this button");
                return;
            }

            lbStatus.Text = "Gathering data. This may take a while...";
            Application.DoEvents();



            // Extract inputs and outputs
            int rows = dgvTrainingSource.Rows.Count;
            double[][] input = new double[rows][];
            int[] output = new int[rows];
            for (int i = 0; i < rows; i++)
            {
                input[i] = (double[])dgvTrainingSource.Rows[i].Cells["colTrainingFeatures"].Value;
                output[i] = (int)dgvTrainingSource.Rows[i].Cells["colTrainingLabel"].Value;
            }

            // Create the chosen kernel function 
            // using the user interface parameters
            //
            IKernel kernel = createKernel();

            // Extract training parameters from the interface
            double complexity = (double)numComplexity.Value;
            double tolerance = (double)numTolerance.Value;
            int cacheSize = (int)numCache.Value;
            SelectionStrategy strategy = (SelectionStrategy)cbStrategy.SelectedItem;


            // Create the Multi-class Support Vector Machine using the selected Kernel
            ksvm = new MulticlassSupportVectorMachine(1024, kernel, 10);

            // Create the learning algorithm using the machine and the training data
            MulticlassSupportVectorLearning ml = new MulticlassSupportVectorLearning(ksvm, input, output)
            {
                // Configure the learning algorithm
                Algorithm = (svm, classInputs, classOutputs, i, j) =>

                    // Use Platt's Sequential Minimal Optimization algorithm
                    new SequentialMinimalOptimization(svm, classInputs, classOutputs)
                    {
                        Complexity = complexity,
                        Tolerance = tolerance,
                        CacheSize = cacheSize,
                        Strategy = strategy,
                        Compact = (kernel is Linear)
                    }
            };


            lbStatus.Text = "Training the classifiers. This may take a (very) significant amount of time...";
            Application.DoEvents();

            Stopwatch sw = Stopwatch.StartNew();

            // Train the machines. It should take a while.
            double error = ml.Run();

            sw.Stop();


            lbStatus.Text = String.Format(
                "Training complete ({0}ms, {1}er). Click Classify to test the classifiers.",
                sw.ElapsedMilliseconds, error);

            // Update the interface status
            btnClassifyVoting.Enabled = true;
            btnClassifyElimination.Enabled = true;
            btnCalibration.Enabled = true;


            // Populate the information tab with the machines
            dgvMachines.Rows.Clear();
            int k = 1;
            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < i; j++, k++)
                {
                    var machine = ksvm[i, j];

                    int sv = machine.SupportVectors == null ? 0 : machine.SupportVectors.Length;

                    int c = dgvMachines.Rows.Add(k, i + "-vs-" + j, sv, machine.Threshold);
                    dgvMachines.Rows[c].Tag = machine;
                }
            }

            // approximate size in bytes = 
            //   number of support vectors * number of doubles in a support vector * size of double
            int bytes = ksvm.SupportVectorUniqueCount * 1024 * sizeof(double);
            float megabytes = bytes / (1024 * 1024);
            lbSize.Text = String.Format("{0} ({1} MB)", ksvm.SupportVectorUniqueCount, megabytes);
        }
Example #13
0
        /// <summary>
        ///   Calibrates the current Support Vector Machine to produce
        ///   probabilistic outputs using ProbabilisticOutputLearning.
        /// </summary>
        /// 
        private void btnRunCalibration_Click(object sender, EventArgs e)
        {
            if (ksvm == null)
            {
                MessageBox.Show("Please train the machines first.");
                return;
            }

            // Extract inputs and outputs
            int rows = dgvTrainingSource.Rows.Count;
            double[][] input = new double[rows][];
            int[] output = new int[rows];
            for (int i = 0; i < rows; i++)
            {
                input[i] = (double[])dgvTrainingSource.Rows[i].Cells["colTrainingFeatures"].Value;
                output[i] = (int)dgvTrainingSource.Rows[i].Cells["colTrainingLabel"].Value;
            }



            // Create the calibration algorithm using the training data
            var ml = new MulticlassSupportVectorLearning(ksvm, input, output)
            {
                // Configure the calibration algorithm
                Algorithm = (svm, classInputs, classOutputs, i, j) =>
                    new ProbabilisticOutputLearning(svm, classInputs, classOutputs)
            };


            lbStatus.Text = "Calibrating the classifiers. This may take a (very) significant amount of time...";
            Application.DoEvents();

            Stopwatch sw = Stopwatch.StartNew();

            // Train the machines. It should take a while.
            double error = ml.Run();

            sw.Stop();

            lbStatus.Text = String.Format(
                "Calibration complete ({0}ms, {1}er). Click Classify to test the classifiers.",
                sw.ElapsedMilliseconds, error);

            btnClassifyVoting.Enabled = true;
        }
        public void ComputeTest1()
        {
            double[][] inputs =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] outputs =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };

            IKernel kernel = new Polynomial(2);
            var msvm = new MulticlassSupportVectorMachine(5, kernel, 4);
            var smo = new MulticlassSupportVectorLearning(msvm, inputs, outputs);
            smo.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(svm, classInputs, classOutputs);

            double error = smo.Run();

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                double actual = msvm.Compute(inputs[i], MulticlassComputeMethod.Elimination);
                Assert.AreEqual(expected, actual);
            }

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                double actual = msvm.Compute(inputs[i], MulticlassComputeMethod.Voting);
                Assert.AreEqual(expected, actual);
            }
        }
        public void ComputeTest1()
        {
            double[][] inputs =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] outputs =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };


            IKernel kernel = new Polynomial(2);
            var msvm = new MulticlassSupportVectorMachine(5, kernel, 4);
            var smo = new MulticlassSupportVectorLearning(msvm, inputs, outputs);
            smo.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(svm, classInputs, classOutputs)
                {
                    Complexity = 1
                };

            Assert.AreEqual(0, msvm.GetLastKernelEvaluations());

            double error = smo.Run();

            Assert.AreEqual(6, msvm.GetLastKernelEvaluations());

            int[] evals = new int[inputs.Length];
            int[] evalexp = { 8, 8, 7, 7, 7, 7, 6, 6 };
            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                double actual = msvm.Compute(inputs[i], MulticlassComputeMethod.Elimination);
                Assert.AreEqual(expected, actual);
                evals[i] = msvm.GetLastKernelEvaluations();
            }

            for (int i = 0; i < evals.Length; i++)
                Assert.AreEqual(evals[i], evalexp[i]);

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                double actual = msvm.Compute(inputs[i], MulticlassComputeMethod.Voting);
                Assert.AreEqual(expected, actual);
                evals[i] = msvm.GetLastKernelEvaluations();
            }

            for (int i = 0; i < evals.Length; i++)
                Assert.AreEqual(msvm.SupportVectorUniqueCount, evals[i], 1);
        }
        public void RunTest2()
        {
            double[][] inputs =
            {
                new double[] { 0, 1, 1, 0 }, // 0
                new double[] { 0, 1, 0, 0 }, // 0
                new double[] { 0, 0, 1, 0 }, // 0
                new double[] { 0, 1, 1, 0 }, // 0
                new double[] { 0, 1, 0, 0 }, // 0
                new double[] { 1, 0, 0, 0 }, // 1
                new double[] { 1, 0, 0, 0 }, // 1
                new double[] { 1, 0, 0, 1 }, // 1
                new double[] { 0, 0, 0, 1 }, // 1
                new double[] { 0, 0, 0, 1 }, // 1
                new double[] { 1, 1, 1, 1 }, // 2
                new double[] { 1, 0, 1, 1 }, // 2
                new double[] { 1, 1, 0, 1 }, // 2
                new double[] { 0, 1, 1, 1 }, // 2
                new double[] { 1, 1, 1, 1 }, // 2
            };

            int[] outputs =
            {
                0, 0, 0, 0, 0,
                1, 1, 1, 1, 1,
                2, 2, 2, 2, 2,
            };

            IKernel kernel = new Linear();
            var machine = new MulticlassSupportVectorMachine(4, kernel, 3);
            var target = new MulticlassSupportVectorLearning(machine, inputs, outputs);

            target.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(svm, classInputs, classOutputs);

            double error1 = target.Run();
            Assert.AreEqual(0, error1);

            int[] actual = new int[outputs.Length];
            var paths = new Decision[outputs.Length][];
            for (int i = 0; i < actual.Length; i++)
            {
                actual[i] = machine.Decide(inputs[i]);
                paths[i] = machine.GetLastDecisionPath();
                Assert.AreEqual(outputs[i], actual[i]);
            }

            var original = (MulticlassSupportVectorMachine)machine.Clone();

            target.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new ProbabilisticOutputCalibration(svm, classInputs, classOutputs);

            double error2 = target.Run();
            Assert.AreEqual(0, error2);

            int[] actual2 = new int[outputs.Length];
            var paths2 = new Decision[outputs.Length][];
            for (int i = 0; i < actual.Length; i++)
            {
                actual2[i] = machine.Decide(inputs[i]);
                paths2[i] = machine.GetLastDecisionPath();
                Assert.AreEqual(outputs[i], actual[i]);
            }

            var svm21 = machine[2, 1];
            var org21 = original[2, 1];
            var probe = inputs[12];
            var w21 = svm21.Weights;
            var o21 = org21.Weights;
            Assert.IsFalse(w21.IsEqual(o21, rtol: 1e-2));
            bool b = svm21.Decide(probe);
            bool a = org21.Decide(probe);
            Assert.AreEqual(a, b);

            double[][] probabilities = machine.Probabilities(inputs);

            //string str = probabilities.ToString(CSharpJaggedMatrixFormatProvider.InvariantCulture);

            double[][] expected = new double[][]
            {
                new double[] { 0.978013252309678, 0.00665988562670578, 0.015326862063616 },
                new double[] { 0.923373734751393, 0.0433240974867644, 0.033302167761843 },
                new double[] { 0.902265207121918, 0.0651939200306017, 0.0325408728474804 },
                new double[] { 0.978013252309678, 0.00665988562670578, 0.015326862063616 },
                new double[] { 0.923373734751393, 0.0433240974867644, 0.033302167761843 },
                new double[] { 0.0437508203303804, 0.79994737664453, 0.156301803025089 },
                new double[] { 0.0437508203303804, 0.79994737664453, 0.156301803025089 },
                new double[] { 0.0147601290467641, 0.948443224264852, 0.0367966466883842 },
                new double[] { 0.0920231845129213, 0.875878175972548, 0.0320986395145312 },
                new double[] { 0.0920231845129213, 0.875878175972548, 0.0320986395145312 },
                new double[] { 0.00868243281954335, 0.00491075178001821, 0.986406815400439 },
                new double[] { 0.0144769600209954, 0.0552754387307989, 0.930247601248206 },
                new double[] { 0.0144769600209954, 0.0552754387307989, 0.930247601248206 },
                new double[] { 0.0584631682316073, 0.0122104663095354, 0.929326365458857 },
                new double[] { 0.00868243281954335, 0.00491075178001821, 0.986406815400439 } 
            };

            Assert.IsTrue(probabilities.IsEqual(expected, rtol: 1e-8));
        }
        public void SerializeTest1()
        {
            double[][] inputs =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] outputs =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };

            IKernel kernel = new Linear();
            var msvm = new MulticlassSupportVectorMachine(5, kernel, 4);
            var smo = new MulticlassSupportVectorLearning(msvm, inputs, outputs);
            smo.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(svm, classInputs, classOutputs);

            double expected = smo.Run();


            MemoryStream stream = new MemoryStream();

            // Save the machines
            msvm.Save(stream);

            // Rewind
            stream.Seek(0, SeekOrigin.Begin);

            // Reload the machines
            var target = MulticlassSupportVectorMachine.Load(stream);

            double actual;

            int count = 0; // Compute errors
            for (int i = 0; i < inputs.Length; i++)
            {
                double y = target.Compute(inputs[i]);
                if (y != outputs[i]) count++;
            }

            actual = (double)count / inputs.Length;


            Assert.AreEqual(expected, actual);

            Assert.AreEqual(msvm.Inputs, target.Inputs);
            Assert.AreEqual(msvm.Classes, target.Classes);
            for (int i = 0; i < msvm.Machines.Length; i++)
            {
                for (int j = 0; j < msvm.Machines.Length; j++)
                {
                    var a = msvm[i, j];
                    var b = target[i, j];

                    if (i != j)
                    {
                        Assert.IsTrue(a.SupportVectors.IsEqual(b.SupportVectors));
                    }
                    else
                    {
                        Assert.IsNull(a);
                        Assert.IsNull(b);
                    }
                }
            }
        }
        public void LinearTest()
        {

            // Let's say we have the following data to be classified
            // into three possible classes. Those are the samples:
            //
            double[][] inputs =
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0 
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 1, 0 }, //  0
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 1, 1, 1, 1 }, //  2
                new double[] { 1, 0, 1, 1 }, //  2
                new double[] { 1, 1, 0, 1 }, //  2
                new double[] { 0, 1, 1, 1 }, //  2
                new double[] { 1, 1, 1, 1 }, //  2
            };

            int[] outputs = // those are the class labels
            {
                0, 0, 0, 0, 0,
                1, 1, 1, 1, 1,
                2, 2, 2, 2, 2,
            };

            // Create a new multi-class linear support vector machine for 3 classes
            var machine = new MulticlassSupportVectorMachine(inputs: 4, classes: 3);

            // Create a one-vs-one learning algorithm using LIBLINEAR's L2-loss SVC dual
            var teacher = new MulticlassSupportVectorLearning(machine, inputs, outputs)
            {
                Algorithm = (svm, classInputs, classOutputs, i, j) =>
                    new LinearDualCoordinateDescent(svm, classInputs, classOutputs)
                    {
                        Loss = Loss.L2
                    }
            };

            // Teach the machine
            double error = teacher.Run(); // should be 0.

            Assert.AreEqual(0, error);
            for (int i = 0; i < inputs.Length; i++)
            {
                error = machine.Compute(inputs[i]);
                double expected = outputs[i];
                Assert.AreEqual(expected, error);
            }
        }
        // Creates the Learning function for MC-SVM : SequentialMinimalOptimization with a hardline Complexity of 0.1
        public double MCSVMLearn()
        {
            mcsvmLearning = new MulticlassSupportVectorLearning(mcsvm, Inputs, Outputs);
            mcsvmLearning.Algorithm = (machine, inputs, outputs, class1, class2) => new LinearDualCoordinateDescent(machine, inputs, outputs)
            {
                Complexity = 0.007
            };

            return mcsvmLearning.Run();
        }
        private void CreateAndTrainKSVM(IList<double[]> inputs, IList<int> outputs)
        {
            _ksvm = new MulticlassSupportVectorMachine(inputs[0].Length, Kernel, TargetDirectories.Count);
            MulticlassSupportVectorLearning ml = new MulticlassSupportVectorLearning(_ksvm, inputs.ToArray(), outputs.ToArray());
            
            double complexity = SequentialMinimalOptimization.EstimateComplexity(Kernel, inputs.ToArray());
            SelectionStrategy strategy = SelectionStrategy.Sequential;
            ml.Algorithm = (svm, classInputs, classOutputs, i, j) =>
            {
                return new SequentialMinimalOptimization(svm, classInputs, classOutputs)
                {
                    Complexity = complexity,
                    Tolerance = Tolerance,
                    CacheSize = CacheSize,
                    Strategy = strategy,
                };
            };

            Console.WriteLine("Starting SVM training");
            ml.Run();
            Console.WriteLine("SVM trained");
        }
Example #21
0
        /// <summary>
        /// Core machine learning method for parsing csv data, training the svm, and calculating the accuracy.
        /// </summary>
        /// <param name="path">string - path to csv file (training, csv, test).</param>
        /// <param name="count">int - max number of rows to process. This is useful for preparing learning curves, by using gradually increasing values. Use Int32.MaxValue to read all rows.</param>
        /// <param name="machine">MulticlassSupportVectorMachine - Leave null for initial training.</param>
        /// <returns>MulticlassSupportVectorMachine</returns>
        private static MulticlassSupportVectorMachine RunSvm(string path, int count, MulticlassSupportVectorMachine machine = null)
        {
            double[][] inputs;
            int[] outputs;

            // Parse the csv file to get inputs and outputs.
            ReadData(path, count, out inputs, out outputs, new FrontLabelParser());

            if (machine == null)
            {
                // Training.
                MulticlassSupportVectorLearning teacher = null;

                // Create the svm.
                machine = new MulticlassSupportVectorMachine(_pixelCount, new Gaussian(_sigma), _classCount);
                teacher = new MulticlassSupportVectorLearning(machine, inputs, outputs);
                teacher.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs) { CacheSize = 0 };

                // Train the svm.
                Utility.ShowProgressFor(() => teacher.Run(), "Training");
            }

            // Calculate accuracy.
            double accuracy = Utility.ShowProgressFor<double>(() => Accuracy.CalculateAccuracy(machine, inputs, outputs), "Calculating Accuracy");
            Console.WriteLine("Accuracy: " + Math.Round(accuracy * 100, 2) + "%");

            return machine;
        }
        public void RunTest3()
        {

            double[][] inputs =
            {
                // Tickets with the following structure should be assigned to location 0
                new double[] { 1, 4, 2, 0, 1 }, // should be assigned to location 0
                new double[] { 1, 3, 2, 0, 1 }, // should be assigned to location 0

                // Tickets with the following structure should be assigned to location 1
                new double[] { 3, 0, 1, 1, 1 }, // should be assigned to location 1
                new double[] { 3, 0, 1, 0, 1 }, // should be assigned to location 1

                // Tickets with the following structure should be assigned to location 2
                new double[] { 0, 5, 5, 5, 5 }, // should be assigned to location 2
                new double[] { 1, 5, 5, 5, 5 }, // should be assigned to location 2

                // Tickets with the following structure should be assigned to location 3
                new double[] { 1, 0, 0, 0, 0 }, // should be assigned to location 3
                new double[] { 1, 0, 0, 0, 0 }, // should be assigned to location 3
            };

            int[] outputs =
            {
                0, 0, // Those are the locations for the first two vectors above
                1, 1, // Those are the locations for the next two vectors above
                2, 2, // Those are the locations for the next two vectors above
                3, 3, // Those are the locations for the last two vectors above
            };

            // Since this is a simplification, a linear machine will suffice:
            IKernel kernel = new Linear();

            // Create the machine for feature vectors of length 5, for 4 possible locations
            MulticlassSupportVectorMachine machine = new MulticlassSupportVectorMachine(5, kernel, 4);

            // Create a new learning algorithm to train the machine
            MulticlassSupportVectorLearning target = new MulticlassSupportVectorLearning(machine, inputs, outputs);

            // Use the standard SMO algorithm
            target.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(svm, classInputs, classOutputs);

            // Train the machines
            double actual = target.Run();


            // Compute the answer for all training samples
            for (int i = 0; i < inputs.Length; i++)
            {
                double[] answersWeights;

                double answer = machine.Compute(inputs[i], MulticlassComputeMethod.Voting, out answersWeights);

                // Assert it has been classified correctly
                Assert.AreEqual(outputs[i], answer);

                // Assert the most probable answer is indeed the correct one
                int imax; Matrix.Max(answersWeights, out imax);
                Assert.AreEqual(answer, imax);
            }

        }
Example #23
0
        /// <summary>
        ///   Loads the stored gestures and learns a SVM using those data.
        /// </summary>
        /// 
        private void btnLearn_Click(object sender, EventArgs e)
        {
            if (gridSamples.Rows.Count == 0)
            {
                MessageBox.Show("Please load or insert some data first.");
                return;
            }

            BindingList<Sequence> samples = database.Samples;
            BindingList<String> classes = database.Classes;

            double[][] inputs = new double[samples.Count][];
            int[] outputs = new int[samples.Count];

            for (int i = 0; i < inputs.Length; i++)
            {
                inputs[i] = samples[i].Input;
                outputs[i] = samples[i].Output;
            }


            // Creates a new learning machine. Please note how the number of inputs is given
            // as zero: this means the machine will accept variable-length sequences as input.
            //
            svm = new MulticlassSupportVectorMachine(inputs: 0, 
                kernel: new DynamicTimeWarping(2), classes: classes.Count);


            // Create the learning algorithm to teach the multiple class classifier
            var teacher = new MulticlassSupportVectorLearning(svm, inputs, outputs)
            {
                // Setup the learning algorithm for each 1x1 subproblem
                Algorithm = (machine, classInputs, classOutputs, i, j) =>
                    new SequentialMinimalOptimization(machine, classInputs, classOutputs)
            };


            // Run the learning algorithm
            double error = teacher.Run();


            // Classify all training instances
            foreach (var sample in database.Samples)
            {
                sample.RecognizedAs = svm.Compute(sample.Input);
            }

            foreach (DataGridViewRow row in gridSamples.Rows)
            {
                var sample = row.DataBoundItem as Sequence;
                row.DefaultCellStyle.BackColor = (sample.RecognizedAs == sample.Output) ?
                    Color.LightGreen : Color.White;
            }
        }
        public void RunTest2()
        {
            double[][] inputs =
            {
                new double[] { 0, 1, 1, 0 }, // 0
                new double[] { 0, 1, 0, 0 }, // 0
                new double[] { 0, 0, 1, 0 }, // 0
                new double[] { 0, 1, 1, 0 }, // 0
                new double[] { 0, 1, 0, 0 }, // 0
                new double[] { 1, 0, 0, 0 }, // 1
                new double[] { 1, 0, 0, 0 }, // 1
                new double[] { 1, 0, 0, 1 }, // 1
                new double[] { 0, 0, 0, 1 }, // 1
                new double[] { 0, 0, 0, 1 }, // 1
                new double[] { 1, 1, 1, 1 }, // 2
                new double[] { 1, 0, 1, 1 }, // 2
                new double[] { 1, 1, 0, 1 }, // 2
                new double[] { 0, 1, 1, 1 }, // 2
                new double[] { 1, 1, 1, 1 }, // 2
            };

            int[] outputs =
            {
                0, 0, 0, 0, 0,
                1, 1, 1, 1, 1,
                2, 2, 2, 2, 2,
            };

            IKernel kernel = new Linear();
            MulticlassSupportVectorMachine machine = new MulticlassSupportVectorMachine(4, kernel, 3);
            MulticlassSupportVectorLearning target = new MulticlassSupportVectorLearning(machine, inputs, outputs);

            target.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(svm, classInputs, classOutputs);

            double error1 = target.Run();
            Assert.AreEqual(0, error1);

            target.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new ProbabilisticOutputCalibration(svm, classInputs, classOutputs);

            double error2 = target.Run();
            Assert.AreEqual(0, error2);


        }
        public void ComputeTest2()
        {
            double[][] input =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] output =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };


            IKernel kernel = new Polynomial(2);
            int classes = 4;
            int inputs = 5;


            // Create the Multi-class Support Vector Machine using the selected Kernel
            var msvm = new MulticlassSupportVectorMachine(inputs, kernel, classes);

            // Create the learning algorithm using the machine and the training data
            var ml = new MulticlassSupportVectorLearning(msvm, input, output);

            // Configure the learning algorithm
            ml.Algorithm = (svm, classInputs, classOutputs, i, j) =>
            {
                var smo = new SequentialMinimalOptimization(svm, classInputs, classOutputs)
                {
                    Complexity = 1
                };
                return smo;
            };

            Assert.AreEqual(0, msvm.GetLastKernelEvaluations());

            // Executes the training algorithm
            double error = ml.Run();

            Assert.AreEqual(6, msvm.GetLastKernelEvaluations());

            int[] evals = new int[input.Length];
            int[] evalexp = { 8, 8, 7, 7, 7, 7, 6, 6 };
#if NET35
            AForge.Parallel.For(0, input.Length, i =>
#else
            Parallel.For(0, input.Length, i =>
#endif
            {
                double[] data = input[i];
                double[] responses;

                int num = msvm.Compute(data, MulticlassComputeMethod.Elimination, out responses);
                Assert.AreEqual(output[i], num);

                evals[i] = msvm.GetLastKernelEvaluations();
            });

            for (int i = 0; i < evals.Length; i++)
                Assert.AreEqual(evals[i], evalexp[i]);

#if NET35
            AForge.Parallel.For(0, input.Length, i =>
#else
            Parallel.For(0, input.Length, i =>
#endif
            {
                double[] data = input[i];
                double[] responses;

                int num = msvm.Compute(data, MulticlassComputeMethod.Voting, out responses);
                Assert.AreEqual(output[i], num);

                evals[i] = msvm.GetLastKernelEvaluations();
            });

            for (int i = 0; i < evals.Length; i++)
                Assert.AreEqual(msvm.SupportVectorUniqueCount, evals[i]);
        }
        public void LinearComputeTest1()
        {
            double[][] inputs =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] outputs =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };


            var msvm = new MulticlassSupportVectorMachine(5, 4);
            var smo = new MulticlassSupportVectorLearning(msvm, inputs, outputs);
            smo.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new LinearCoordinateDescent(svm, classInputs, classOutputs)
                {
                    Complexity = 1
                };

            msvm.ParallelOptions.MaxDegreeOfParallelism = 1;
            smo.ParallelOptions.MaxDegreeOfParallelism = 1;

            Assert.AreEqual(0, msvm.GetLastKernelEvaluations());

            double error = smo.Run();

            // Linear machines in compact form do not require kernel evaluations
            Assert.AreEqual(0, msvm.GetLastKernelEvaluations());

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                double actual = msvm.Compute(inputs[i], MulticlassComputeMethod.Elimination);
                Assert.AreEqual(expected, actual);
                Assert.AreEqual(0, msvm.GetLastKernelEvaluations());
            }

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                double actual = msvm.Compute(inputs[i], MulticlassComputeMethod.Voting);
                Assert.AreEqual(expected, actual);
                Assert.AreEqual(0, msvm.GetLastKernelEvaluations());
            }
        }
Example #27
0
        /// <summary>
        ///   Creates the Support Vector Machines that will identify images based on
        ///   their Bag-of-Visual-Words feature vector representation.
        /// </summary>
        /// 
        private void btnCreateVectorMachines_Click(object sender, EventArgs e)
        {
            double[][] inputs;
            int[] outputs;

            getData(out inputs, out outputs);

            int classes = outputs.Distinct().Count();

            var kernel = getKernel();

            // Create the Multi-class Support Vector Machine using the selected Kernel
            ksvm = new MulticlassSupportVectorMachine(inputs[0].Length, kernel, classes);

            // Create the learning algorithm using the machine and the training data
            MulticlassSupportVectorLearning ml = new MulticlassSupportVectorLearning(ksvm, inputs, outputs);

            // Extract training parameters from the interface
            double complexity = (double)numComplexity.Value;
            double tolerance = (double)numTolerance.Value;
            int cacheSize = (int)numCache.Value;
            SelectionStrategy strategy = (SelectionStrategy)cbStrategy.SelectedItem;

            // Configure the learning algorithm
            ml.Algorithm = (svm, classInputs, classOutputs, i, j) =>
            {
                return new SequentialMinimalOptimization(svm, classInputs, classOutputs)
                {
                    Complexity = complexity,
                    Tolerance = tolerance,
                    CacheSize = cacheSize,
                    Strategy = strategy,
                };
            };


            lbStatus.Text = "Training the classifiers. This may take a (very) significant amount of time...";
            Application.DoEvents();

            Stopwatch sw = Stopwatch.StartNew();

            // Train the machines. It should take a while.
            double error = ml.Run();

            sw.Stop();

            lbStatus.Text = String.Format(
                "Training complete ({0}ms, {1}er). Click Classify to test the classifiers.",
                sw.ElapsedMilliseconds, error);

            btnClassifyElimination.Enabled = true;

            // Populate the information tab with the machines
            dgvMachines.Rows.Clear();
            int k = 1;
            for (int i = 0; i < classes; i++)
            {
                for (int j = 0; j < i; j++, k++)
                {
                    var machine = ksvm[i, j];

                    int sv = machine.SupportVectors == null ? 0 : machine.SupportVectors.Length;

                    int c = dgvMachines.Rows.Add(k, i + "-vs-" + j, sv, machine.Threshold);
                    dgvMachines.Rows[c].Tag = machine;
                }
            }

            // approximate size in bytes = 
            //   number of support vectors *
            //   number of doubles in a support vector *
            //   size of double
            int bytes = ksvm.SupportVectorUniqueCount * 1024 * sizeof(double);
            float megabytes = bytes / (1024 * 1024);
            lbSize.Text = String.Format("{0} ({1} MB)", ksvm.SupportVectorUniqueCount, megabytes);
        }
Example #28
-1
        /// <summary>
        /// The main entry point for the program
        /// </summary>
        public static void Main()
        {
            try
            {
                #region Exploratory Data Analysis Explanation
                /*
                    John Tukey coined the term Exploratory Data Analysis in his seminal book of the same name.  There really is not a prescribed way to do an EDA.
                    Tools I use for EDA include Microsoft Excel, plots and visual inspection of the data.  Without creating an early bias, gut feelings do play a role in a good EDA.
                    Some objectives of EDA are to:
                        •	Identify the types of data in the dataset
                        •	Examine the statistical properties of the data
                        •	Look for invalid data (may need Domain or Subject Matter experts)
                        •	Understand the provenance of the data
                        •	Aide in the selection of appropriate statistical tools and techniques

                    For our diabetes dataset, notice that there is both quantitative and qualitative data.  Note that the result or outcome variable (which indicates if the person has
                    diabetes) is nominal data with only two states.  This is called dichotomous or binary categorical data which rules out some machine learning algorithms and directs
                    us to others.
                */
                #endregion
                // Because of time constraints, the loading of the DataTables and EDA is complete.
                XmlConfigurator.Configure();

                Logger.Info("Exploratory Data Analysis");

                FileInfo fi = new FileInfo("training.csv");
                DataTable training = DataTableCsvConvertor.GetDataTableFromCsv(fi);

                fi = new FileInfo("test.csv");
                DataTable test = DataTableCsvConvertor.GetDataTableFromCsv(fi);

                // Print out the first few table rows.
                Head.PrintHead(training);

                //Logger.Info(string.Empty);
                //BasicStatistics.BasicStats(training); // For most EDA's Basic Descriptive statistics are important, but this outputs a lot of information

                #region Data Imputation & Cleanup Explanation
                /*
                    Keep in mind that Machine Learning algorithms operate on numerical data only, something will have to be done with the data is text or NULL.  Also predictor
                    variables(aka features or columns of data) that do not vary will not be predictive and may need to be removed.  Due to time constraints the EDA, ETL (Extract, Transform and Load)
                    and data cleaning is already completed in the solution.  For this analysis, the HeartRate column because it is all NULL and remove any rows of data that contain NULLs.
                */
                #endregion
                // Delete any columns that are not needed.
                training.Columns.Remove("HeartRate");
                test.Columns.Remove("HeartRate");

                // How to handle rows containing missing or NA data - data imputation or deletion?
                training = DataImputation.RemoveMissing(training);
                test = DataImputation.RemoveMissing(test);

                Codification codebook = new Codification(training);
                int outputClasses = 2;

                string[] inputColumns =
                {
                    "Gender", "YearOfBirth", "SmokingEffectiveYear", "NISTcode", "Height", "Weight", "BMI", "SystolicBP", "DiastolicBP", "RespiratoryRate", "Temperature"
                };

                string outputColumn = "DMIndicator";

                // Translate our training data into integer symbols using our codebook:
                DataTable symbols = codebook.Apply(training);
                double[][] inputs = symbols.ToArray(inputColumns);
                int[] outputs = Matrix.ToArray<int>(training, outputColumn);

                #region Decision Tree Overview
                /*
                    Decision Trees are very powerful, especially with a binary classification model, and are somewhat resistant to over-fitting the data.
                    Additionally, they are intuitive to explain to stakeholders.
                */
                #endregion
                Logger.Info(string.Empty);
                Logger.Info("Decision Tree");

                DecisionVariable[] attributes =
                {
                    new DecisionVariable("Gender", 2), // 2 possible values (Male, Female)
                    new DecisionVariable("YearOfBirth", DecisionVariableKind.Continuous),
                    new DecisionVariable("SmokingEffectiveYear", DecisionVariableKind.Continuous),
                    new DecisionVariable("NISTcode", DecisionVariableKind.Continuous),
                    new DecisionVariable("Height", DecisionVariableKind.Continuous),
                    new DecisionVariable("Weight", DecisionVariableKind.Continuous),
                    new DecisionVariable("BMI", DecisionVariableKind.Continuous),
                    new DecisionVariable("SystolicBP", DecisionVariableKind.Continuous),
                    new DecisionVariable("DiastolicBP", DecisionVariableKind.Continuous),
                    new DecisionVariable("RespiratoryRate", DecisionVariableKind.Continuous),
                    new DecisionVariable("Temperature", DecisionVariableKind.Continuous)
                };

                DecisionTree tree = new DecisionTree(attributes, outputClasses);

                C45Learning c45learning = new C45Learning(tree);

                // Learn the training instances!
                c45learning.Run(inputs, outputs);

                // The next two lines are optional to save the model into IL for future use.
                // Convert to an expression tree
                var expression = tree.ToExpression();
                // Compiles the expression to IL
                var func = expression.Compile();

                #region Evaluation Explanation
                /*
                    To evaluate the model, now use each row of the test dataset to predict the output variable (DMIndicator) using the DecisionTree’s compute method passing in the same
                    variables that were used to train the model.  Store the test dataset’s value of DMIndicator and the predicted value in a DataTable and integer collection for future
                    validation of the model.
                */
                #endregion
                Evaluator.Evaluate(test, tree);

                #region Validation Explanation
                /*
                    There are many ways to validate models, but we will use a confusion matrix because it is intuitive and a very accepted way to validate binary classification models.
                    Most conveniently the Accord.Net has a ConfusionMatrix class to create this matrix for you.  Passing in the collection of integers of predicted and actual values
                    stored earlier to the ConfusionMatrix class and output the matrix and accuracy.
                */
                #endregion
                Validator.Validate(test, tree);

                #region Support Vector Machine Overview
                /*
                    Support Vector Machines are powerful classification machine learning algorithms with very few knobs to turn.  The kernel of the SVM can be exchanged to use
                    a number of different mathematical algorithms including polynomials, neural networks and Gaussian functions.
                */
                #endregion
                Logger.Info(string.Empty);
                Logger.Info("Support Vector Machine");

                // Add SVM code here
                IKernel kernel = new Linear();

                // Create the Multi-class Support Vector Machine using the selected Kernel
                int inputDimension = inputs[0].Length;
                var ksvm = new MulticlassSupportVectorMachine(inputDimension, kernel, outputClasses);

                // Create the learning algorithm using the machine and the training data
                var ml = new MulticlassSupportVectorLearning(ksvm, inputs, outputs)
                {
                    Algorithm = (svm, classInputs, classOutputs, i, j) =>
                    {
                        return new SequentialMinimalOptimization(svm, classInputs, classOutputs)
                        {
                            CacheSize = 0
                        };
                    }
                };

                double svmError = ml.Run();

                #region Evaluation Explanation
                /*
                    To evaluate the model, now use each row of the test dataset to predict the output variable (DMIndicator) using the DecisionTree’s compute method passing in the same
                    variables that were used to train the model.  Store the test dataset’s value of DMIndicator and the predicted value in a DataTable and integer collection for future
                    validation of the model.
                */
                #endregion
                Evaluator.Evaluate(test, ksvm);

                #region Validation Explanation
                /*
                    There are many ways to validate models, but we will use a confusion matrix because it is intuitive and a very accepted way to validate binary classification models.
                    Most conveniently the Accord.Net has a ConfusionMatrix class to create this matrix for you.  Passing in the collection of integers of predicted and actual values
                    stored earlier to the ConfusionMatrix class and output the matrix and accuracy.
                */
                #endregion
                Validator.Validate(test, ksvm);
            }
            catch (Exception ex)
            {
                Logger.Error(ex.ToString());
            }
        }