예제 #1
0
        private static void XorLearn()
        {
            double[][] inputs =
            {
                new double[] { 0, 0 },
                new double[] { 1, 0 },
                new double[] { 0, 1 },
                new double[] { 1, 1 }
            };

            int[] outputs =
            {
                0, 1, 1, 0
            };

            var smo = new SequentialMinimalOptimization <Gaussian>()
            {
                Complexity = 100
            };

            var svm = smo.Learn(inputs, outputs);

            bool[] prediction = svm.Decide(inputs);

            double error = new AccuracyLoss(outputs).Loss(prediction);

            Console.WriteLine("Error: " + error);

            ScatterplotBox.Show("Training data", inputs, outputs);
            ScatterplotBox.Show("SVM results", inputs, prediction.ToZeroOne());
        }
예제 #2
0
        public double Predict(double[][] observations, int[] labels)
        {
            int[]  predicted = machine.Decide(observations);
            double error     = new AccuracyLoss(labels).Loss(predicted);

            return(1 - error);
        }
        public void Accuracy_Loss_2()
        {
            var sut     = new AccuracyLoss();
            var targets = Matrix <float> .Build.Dense(3, 2, new float[] { 1, 0, 1, 0, 1, 0 });

            var predictions = Matrix <float> .Build.Dense(3, 2, new float[] { 0.9f, 0.8f, 0.3f, 0.1f, 0.2f, 0.7f });

            var actual = sut.Loss(targets, predictions);

            Assert.AreEqual(0.6666, actual, 0.001);
        }
예제 #4
0
파일: Test1.cs 프로젝트: kiba518/MvpCode
        public static void Excute()
        {
            double[][] inputs =
            {
                /* 1.*/ new double[] { 0, 0 },
                /* 2.*/ new double[] { 1, 0 },
                /* 3.*/ new double[] { 0, 1 },
                /* 4.*/ new double[] { 1, 1 },
            };
            double[][] inputs2 =
            {
                /* 1.*/ new double[] { 0, 0 },
                /* 2.*/ new double[] { 1, 0 },
                /* 3.*/ new double[] { 1, 1 },
                /* 4.*/ new double[] { 1, 1 },
            };
            int[] outputs =
            {
                /* 1. 0 xor 0 = 0: */ 0,
                /* 2. 1 xor 0 = 1: */ 1,
                /* 3. 0 xor 1 = 1: */ 1,
                /* 4. 1 xor 1 = 0: */ 0,
            };

            // Create the learning algorithm with the chosen kernel
            var smo = new SequentialMinimalOptimization <Gaussian>()
            {
                Complexity = 100 // Create a hard-margin SVM
            };

            // Use the algorithm to learn the svm
            var svm = smo.Learn(inputs, outputs);

            // Compute the machine's answers for the given inputs
            bool[] prediction = svm.Decide(inputs2);

            // Compute the classification error between the expected
            // values and the values actually predicted by the machine:
            double error = new AccuracyLoss(outputs).Loss(prediction);

            Console.WriteLine("Error: " + error);

            // Show results on screen
            ScatterplotBox.Show("Training data", inputs, outputs);
            ScatterplotBox.Show("SVM results", inputs, prediction.ToZeroOne());

            Console.ReadKey();
        }
예제 #5
0
        public double Learn(double[][] observations, int[] labels)
        {
            var teacher = new MulticlassSupportVectorLearning <Gaussian>()
            {
                Learner = (param) => new SequentialMinimalOptimization <Gaussian>()
                {
                    UseKernelEstimation = true,
                }
            };

            machine = teacher.Learn(observations, labels);

            int[] predicted = machine.Decide(observations);

            double error = new AccuracyLoss(labels).Loss(predicted);

            return(1 - error);
        }
예제 #6
0
        public double Learn(double[][] observations, int[] labels)
        {
            int max = observations[0].Length;

            DecisionVariable[] a = new DecisionVariable[max];
            for (int i = 0; i < max; i++)
            {
                a[i] = DecisionVariable.Continuous(i.ToString());
            }
            C45Learning teacher = new C45Learning(a);

            // Use the learning algorithm to induce the tree
            machine = teacher.Learn(observations, labels);

            // Classify the samples using the model
            int[]  predicted = machine.Decide(observations);
            double error     = new AccuracyLoss(labels).Loss(predicted);

            return(1 - error);
        }
예제 #7
0
        public double Learn(double[][] observations, int[] labels)
        {
            var teacher = new NaiveBayesLearning <CauchyDistribution>();

            //teacher.Options.InnerOption = new NormalOptions()
            //{
            //    Regularization = 1e-5
            //};


            // Use the learning algorithm to learn
            machine = teacher.Learn(observations, labels);

            // Classify the samples using the model
            int[] predicted = machine.Decide(observations);

            double error = new AccuracyLoss(labels).Loss(predicted);


            return(1 - error);
        }
예제 #8
0
        static void Main(string[] args)
        {
            double[][] inputs =
            {
                /* 1. */ new double[] { 0, 0 },
                /* 2. */ new double[] { 1, 0 },
                /* 3. */ new double[] { 0, 1 },
                /* 4. */ new double[] { 1, 1 }
            };

            int[] outputs =
            {
                /* 1. 0 xor 0 = 0: */ 1,
                /* 2. 1 xor 0 = 1: */ 0,
                /* 3. 0 xor 1 = 1: */ 0,
                /* 4. 1 xor 1 = 0: */ 0,
            };

            var smo = new SequentialMinimalOptimization <Gaussian>()
            {
                Complexity = 100
            };

            var svm = smo.Learn(inputs, outputs);

            bool[] prediction = svm.Decide(inputs);

            double error = new AccuracyLoss(outputs).Loss(prediction);

            Console.WriteLine("Error: " + error);

            ScatterplotBox.Show("Training data", inputs, outputs);
            ScatterplotBox.Show("SVM results", inputs, prediction.ToZeroOne());

            Console.ReadKey();
        }
예제 #9
0
        private static void multinomial(double[][] inputs, int[] outputs)
        {
            var lbnr = new LowerBoundNewtonRaphson()
            {
                MaxIterations = 100,
                Tolerance     = 1e-6
            };

            // Learn a multinomial logistic regression using the teacher:
            MultinomialLogisticRegression mlr = lbnr.Learn(inputs, outputs);

            // We can compute the model answers
            int[] answers = mlr.Decide(inputs);

            // And also the probability of each of the answers
            double[][] probabilities = mlr.Probabilities(inputs);

            // Now we can check how good our model is at predicting
            double error = new AccuracyLoss(outputs).Loss(answers);

            // We can also verify the classes with highest
            // probability are the ones being decided for:
            int[] argmax = probabilities.ArgMax(dimension: 1); // should be same as 'answers'
        }
예제 #10
0
        [MTAThread]//用于宣告这个程序的线程模型为多线程单元,如果说单线程则是STAThread

        public void linnerSVM(DataSet dataSet)
        {
            {
                int      i      = 0;
                string[] output = new string[dataSet.Tables[0].Rows.Count];//434行的数组

                int[] outputInt = new int[dataSet.Tables[0].Rows.Count];
                foreach (DataRow mDr in dataSet.Tables[0].Rows)
                {
                    output[i++] = mDr[dataSet.Tables[0].Columns.Count - 1].ToString();
                    if (output[i - 1] == "差气层")
                    {
                        outputInt[i - 1] = 0;
                    }
                    else if (output[i - 1] == "气层")
                    {
                        outputInt[i - 1] = 1;
                    }
                    else if (output[i - 1] == "干层")
                    {
                        outputInt[i - 1] = -1;
                    }
                    else if (output[i - 1] == "水层")
                    {
                        outputInt[i - 1] = -1;
                    }
                    Console.WriteLine("{0},{1},{2}", output[i - 1], outputInt[i - 1], i - 1);//验证点
                }

                double[][] inputs = new double[dataSet.Tables[0].Rows.Count][];

                Console.WriteLine(dataSet.Tables[0].Rows.Count);
                for (int j = 0; j < dataSet.Tables[0].Rows.Count; j++)
                {
                    inputs[j] = new double[11];                                   //dataSet.Tables[0].Columns.Count-7
                    for (int k = 6; k < dataSet.Tables[0].Columns.Count - 1; k++) //k=6 是从第7列作为维度,-2是最后一列不算
                    {
                        Console.WriteLine(dataSet.Tables[0].Rows[j][k].ToString());
                        inputs[j][k - 6] = Convert.ToDouble(dataSet.Tables[0].Rows[j][k].ToString());
                        Console.WriteLine("{0},{1}", j, k - 6);//验证点
                    }
                }
                //var ksvm = new SupportVectorMachine(new Gaussian(), 2);
                //var ksvm = new SupportVectorMachine(11);

                //创建采用我们所给予的输入与输出的学习算法
                var smm = new SequentialMinimalOptimization <Gaussian>()
                {
                    Complexity = 100
                };

                var    svm        = smm.Learn(inputs, outputInt);
                bool[] prediction = svm.Decide(inputs);
                double error      = new AccuracyLoss(outputInt).Loss(prediction);
                Console.WriteLine("error:" + error);
                // Show results on screen
                Console.WriteLine(prediction.ToZeroOne());

                //ScatterplotBox.Show("SVM results", inputs, prediction.ToZeroOne());
                //svm.Decide();测试



                Console.ReadKey();
            }
        }