コード例 #1
0
        /// <summary>
        /// Receives a county, trains SVR algorithm with data from the county and returns the traied model.
        /// </summary>
        /// <param name="county"></param>
        /// <returns>Trained SVR object</returns>
        public SupportVectorMachine <IKernel> TrainSVR(County county)
        {
            //Creates an optimization object and sets its parameters.
            var teacher = new SequentialMinimalOptimizationRegression()
            {
                //Radial basis function kernel is used.
                Kernel = new Gaussian()
                {
                    Gamma = 0.01
                },
                Complexity = 350,
                Epsilon    = 0.25
            };
            //Inputs to train SVR. The inputs are the month and the year.
            var trainingInputs = new double[county.Records.Count][];
            //Training outputs of SVR. They are the number of crimes divided by 1000.
            var trainingOutputs = new double[county.Records.Count];

            foreach (var record in county.Records)
            {
                //Fills inputs and outputs array with values.
                var index = county.Records.ToList().IndexOf(record);
                //trainingInputs[index] = new double[] { record.Date.Month, record.Date.Year };
                trainingInputs[index]  = new double[] { record.Date.Month, record.Date.Year };
                trainingOutputs[index] = record.AllCrimes / 1000.0;
            }
            //Trains the algorithm.
            var svr = teacher.Learn(trainingInputs, trainingOutputs);

            return(svr);
        }
コード例 #2
0
ファイル: Window2.cs プロジェクト: erinwl/AbnormalDetection
        double[] GenerateDataFromData(double[] outputs)//扩充小样本
        {
            Accord.Math.Random.Generator.Seed = 0;
            double[][] inputs = new double[outputs.Count()][];
            for (int index = 0; index < outputs.Count(); index++)
            {
                inputs[index] = new double[] { (double)index / outputs.Count() };
            }

            var learn = new SequentialMinimalOptimizationRegression <Polynomial>()
            {
                Kernel     = new Polynomial(2), // Polynomial Kernel of 2nd degree
                Complexity = 100
            };

            // Run the learning algorithm
            SupportVectorMachine <Polynomial> svm = learn.Learn(inputs, outputs);

            // Compute the predicted scores


            int mCount = 1000;//输出样本数量

            double[][] tempinputs = new double[mCount][];
            for (int index = 0; index < mCount; index++)
            {
                tempinputs[index] = new double[] { (double)index / mCount };
            }
            double[] predicted = svm.Score(tempinputs);
            return(predicted);
        }
コード例 #3
0
        public void Learn()
        {
            if (Equals(LearningInputs, null))
            {
                return;
            }
            if (Equals(LearningOutputs, null))
            {
                return;
            }

            //Set kernal params :
            UseKernel = KernelEnum.Gaussian;
            InitilizeKernel();

            // Creates a new SMO for regression learning algorithm
            var teacher = new SequentialMinimalOptimizationRegression()
            {
                // Set learning parameters
                Complexity = Param_Complexity,
                Tolerance  = Param_Tolerance,
                Epsilon    = Param_Epsilon,
                Kernel     = kernel
            };

            // Use the teacher to create a machine
            svm = teacher.Learn(LearningInputs, LearningOutputs);

            // Check if we got support vectors
            if (svm.SupportVectors.Length == 0)
            {
                Console.WriteLine("Sorry, No SVMs.");
                return;
            }

            // Compute results for learning and testing data
            _Computed_LearningOutputs = svm.Score(LearningInputs);

            //foreach (double[] itm in TestingInputs)
            //{
            //    foreach (double value in itm)
            //    {
            //        Console.Write(value);
            //    }
            //    Console.WriteLine("");
            //}

            _Computed_TestingOutputs = svm.Score(TestingInputs);

            // foreach (double value in _Computed_TestingOutputs)
            //{
            //   Console.WriteLine(value);
            //}

            // Compute statistical results


            BestLearningScore = Statistics.Compute_DeterminationCoeff_R2(LearningOutputs, _Computed_LearningOutputs);
            BestTestingScore  = Statistics.Compute_DeterminationCoeff_R2(TestingOutputs, _Computed_TestingOutputs);
        }
コード例 #4
0
        public void Train(MathN::Matrix <double> inputs, MathN::Matrix <double> outputs)
        {
            // Example regression problem. Suppose we are trying
            // to model the following equation: f(x, y) = 2x + y

            double[][] inputArray = new double[inputs.RowCount][];

            for (int i = 0; i < inputs.RowCount; ++i)
            {
                inputArray[i] = new double[inputs.ColumnCount];

                for (int j = 0; j < inputs.ColumnCount; ++j)
                {
                    inputArray[i][j] = inputs[i, j];
                }
            }

            machines = new List <KernelSupportVectorMachine>(outputs.ColumnCount);

            Parallel.ForEach(outputs.ColumnEnumerator(), col =>
            {
                double[] outputArray = col.Item2.ToArray();
                int colID            = col.Item1;

                KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Polynomial(2), inputs: inputs.ColumnCount);
                machines.Add(machine);

                var learn        = new SequentialMinimalOptimizationRegression(machine, inputArray, outputArray);
                learn.Complexity = C;
                learn.Epsilon    = epsilon;
                learn.Run(false);
            });
        }
コード例 #5
0
        public void TrainTest()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Example regression problem. Suppose we are trying
            // to model the following equation: f(x, y) = 2x + y

            double[][] inputs =         // (x, y)
            {
                new double[] { 0,  1 }, // 2*0 + 1 =  1
                new double[] { 4,  3 }, // 2*4 + 3 = 11
                new double[] { 8, -8 }, // 2*8 - 8 =  8
                new double[] { 2,  2 }, // 2*2 + 2 =  6
                new double[] { 6,  1 }, // 2*6 + 1 = 13
                new double[] { 5,  4 }, // 2*5 + 4 = 14
                new double[] { 9,  1 }, // 2*9 + 1 = 19
                new double[] { 1,  6 }, // 2*1 + 6 =  8
            };

            double[] outputs = // f(x, y)
            {
                1, 11, 8, 6, 13, 14, 19, 8
            };

            // Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree
            var machine = new KernelSupportVectorMachine(new Polynomial(2), inputs: 2);

            // Create the sequential minimal optimization teacher
            var learn = new SequentialMinimalOptimizationRegression(machine, inputs, outputs)
            {
                Complexity = 100
            };

            // Run the learning algorithm
            double error = learn.Run();

            // Compute the answer for one particular example
            double fxy = machine.Compute(inputs[0]); // 1.0003849827673186

            // Check for correct answers
            double[] answers = new double[inputs.Length];
            for (int i = 0; i < answers.Length; i++)
            {
                answers[i] = machine.Compute(inputs[i]);
            }

            Assert.AreEqual(1.0, fxy, 1e-2);
            for (int i = 0; i < outputs.Length; i++)
            {
                Assert.AreEqual(outputs[i], answers[i], 1e-2);
            }
        }
        public void learn_test()
        {
            #region doc_learn
            Accord.Math.Random.Generator.Seed = 0;

            // Example regression problem. Suppose we are trying
            // to model the following equation: f(x, y) = 2x + y

            double[][] inputs =         // (x, y)
            {
                new double[] { 0,  1 }, // 2*0 + 1 =  1
                new double[] { 4,  3 }, // 2*4 + 3 = 11
                new double[] { 8, -8 }, // 2*8 - 8 =  8
                new double[] { 2,  2 }, // 2*2 + 2 =  6
                new double[] { 6,  1 }, // 2*6 + 1 = 13
                new double[] { 5,  4 }, // 2*5 + 4 = 14
                new double[] { 9,  1 }, // 2*9 + 1 = 19
                new double[] { 1,  6 }, // 2*1 + 6 =  8
            };

            double[] outputs = // f(x, y)
            {
                1, 11, 8, 6, 13, 14, 19, 8
            };

            // Create the sequential minimal optimization teacher
            var learn = new SequentialMinimalOptimizationRegression <Polynomial>()
            {
                Kernel     = new Polynomial(2), // Polynomial Kernel of 2nd degree
                Complexity = 100
            };

            // Run the learning algorithm
            SupportVectorMachine <Polynomial> svm = learn.Learn(inputs, outputs);

            // Compute the predicted scores
            double[] predicted = svm.Score(inputs);

            // Compute the error between the expected and predicted
            double error = new SquareLoss(outputs).Loss(predicted);

            // Compute the answer for one particular example
            double fxy = svm.Score(inputs[0]); // 1.0003849827673186
            #endregion

            Assert.AreEqual(1.0, fxy, 1e-2);
            for (int i = 0; i < outputs.Length; i++)
            {
                Assert.AreEqual(outputs[i], predicted[i], 1e-2);
            }
        }
コード例 #7
0
        public void LearnEO()
        {
            if (Equals(LearningInputs, null))
            {
                return;
            }
            if (Equals(LearningOutputs, null))
            {
                return;
            }

            //Set kernal params :
            UseKernel = KernelEnum.Gaussian;
            if (Equals(kernel, null))
            {
                kernelG = new Gaussian(sigmaKernel);
            }
            else
            {
                kernelG.Sigma = sigmaKernel;
            }

            teacherSMOR        = new SequentialMinimalOptimizationRegression();
            teacherSMOR.Kernel = kernelG;
            teacherSMOR.UseComplexityHeuristic = true;
            teacherSMOR.UseKernelEstimation    = false;

            // Space dimension :must 4.
            int D = 4;

            List <Interval> intervals = new List <Interval>();

            intervals.Add(new Interval(0.9, 1.2));     //Sigma of Gaussian
            intervals.Add(new Interval(25, 40));       // Complexity
            intervals.Add(new Interval(0.001, 0.001)); // Tolerance
            intervals.Add(new Interval(0.001, 0.001)); // Epsilon

            Optimizer = new PSOGSA_Optimizer(PopulationSize, D, intervals, MaxIterations);
            Optimizer.ObjectiveFunction += Optimizer_ObjectiveFunction;

            Optimizer.LuanchComputation();

            _BestScore    = Optimizer.BestScore;
            _BestSolution = Optimizer.BestSolution;
        }
コード例 #8
0
            public void LearnEO()
            {
                if (Equals(LearningInputs, null))
                {
                    return;
                }
                if (Equals(LearningOutputs, null))
                {
                    return;
                }

                //Set kernal params :
                UseKernel = KernelEnum.Gaussian;
                if (Equals(kernel, null))
                {
                    kernelG = new Gaussian(sigmaKernel);
                }
                else
                {
                    kernelG.Sigma = sigmaKernel;
                }

                teacherSMOR        = new SequentialMinimalOptimizationRegression();
                teacherSMOR.Kernel = kernelG;
                teacherSMOR.UseComplexityHeuristic = true;
                teacherSMOR.UseKernelEstimation    = false;

                // Space dimension :must 4.
                int D = 4;

                List <MonoObjectiveEOALib.Range> ranges = new List <MonoObjectiveEOALib.Range>();

                ranges.Add(new MonoObjectiveEOALib.Range(0.1, 10));      //Sigma of Gaussian
                ranges.Add(new MonoObjectiveEOALib.Range(1, 500));       // Complexity
                ranges.Add(new MonoObjectiveEOALib.Range(0.001, 0.001)); // Tolerance
                ranges.Add(new MonoObjectiveEOALib.Range(0.001, 0.05));  // Epsilon

                Optimizer = new PSOGSA_Optimizer(PopulationSize, D, ranges, MaxIterations);
                Optimizer.ObjectiveFunction += Optimizer_ObjectiveFunction;

                Optimizer.Compute();

                _BestScore    = Optimizer.BestScore;
                _BestSolution = Optimizer.BestSolution;
            }
コード例 #9
0
        private static void kernelSvm2(double[][] inputs, double[] outputs)
        {
            // Create a new Sequential Minimal Optimization (SMO) learning
            // algorithm and estimate the complexity parameter C from data
            var teacher = new SequentialMinimalOptimizationRegression <Gaussian>()
            {
                UseComplexityHeuristic = true,
                UseKernelEstimation    = true // estimate the kernel from the data
            };

            // Teach the vector machine
            var svm = teacher.Learn(inputs, outputs);

            // Classify the samples using the model
            double[] answers = svm.Score(inputs);

            double error = new SquareLoss(outputs).Loss(answers); // should be
        }
コード例 #10
0
        /// <summary>
        /// Train the model.
        /// </summary>
        /// <remarks>
        /// Input is a list of Song objects which are the expected outputs.
        /// This function will pull the bextract values and use the class's bextract subset for training.
        /// </remarks>
        /// <param name="expectedOutputs">List of Song objects to use for training.</param>
        public void Train(List <Song> expectedOutputs)
        {
            //Pull out the expected outputs
            string[] songPaths     = new string[expectedOutputs.Count];
            double[] posOutputs    = new double[expectedOutputs.Count];
            double[] energyOutputs = new double[expectedOutputs.Count];
            for (int i = 0; i < expectedOutputs.Count; i++)
            {
                Song song = expectedOutputs[i];
                songPaths[i]     = song.title;
                posOutputs[i]    = song.positivity;
                energyOutputs[i] = song.energy;
            }

            //Get bextract values
            System.Console.WriteLine(System.DateTime.Now.ToString() + " Extracting features...");
            List <SongDataDTO> songFeatures = getFeatures(songPaths);

            //Stick them in double arrays
            double[][] posInputs    = new double[songFeatures.Count][];
            double[][] energyInputs = new double[songFeatures.Count][];
            for (int i = 0; i < songFeatures.Count; i++)
            {
                ConvertSongDataDtoToDoubleArrays(songFeatures[i], ref posInputs[i], ref energyInputs[i]);
            }

            //Train
            System.Console.WriteLine(System.DateTime.Now.ToString() + " Training positivity.");
            var learn = new SequentialMinimalOptimizationRegression()
            {
                Kernel = new Gaussian(0.25),
                UseComplexityHeuristic = true
            };

            posSvm = learn.Learn(posInputs, posOutputs);

            System.Console.WriteLine(System.DateTime.Now.ToString() + " Training energy.");
            learn = new SequentialMinimalOptimizationRegression()
            {
                Kernel = new Gaussian(0.5),
                UseComplexityHeuristic = true
            };
            energySvm = learn.Learn(energyInputs, energyOutputs);
        }
コード例 #11
0
ファイル: Program.cs プロジェクト: jthornca/accord-framework
        private static void linearSvm1()
        {
            // Declare a very simple regression problem
            // with only 2 input variables (x and y):
            double[][] inputs =
            {
                new[] { 3.0, 1.0 },
                new[] { 7.0, 1.0 },
                new[] { 3.0, 1.0 },
                new[] { 3.0, 2.0 },
                new[] { 6.0, 1.0 },
            };

            // The task is to output a weighted sum of those numbers
            // plus an independent constant term: 7.4x + 1.1y + 42
            double[] outputs =
            {
                7.4 * 3.0 + 1.1 * 1.0 + 42.0,
                7.4 * 7.0 + 1.1 * 1.0 + 42.0,
                7.4 * 3.0 + 1.1 * 1.0 + 42.0,
                7.4 * 3.0 + 1.1 * 2.0 + 42.0,
                7.4 * 6.0 + 1.1 * 1.0 + 42.0,
            };

            // Create a new Sequential Minimal Optimization (SMO) learning
            // algorithm and estimate the complexity parameter C from data
            var teacher = new SequentialMinimalOptimizationRegression <Linear>()
            {
                UseComplexityHeuristic = true,
                Complexity             = 100000.0 // Note: do not do this in an actual application!
                                                  // Setting the Complexity property to a very high value forces the SVM
                                                  // to "believe literally" in whatever the data says. Normally, the SVM
                                                  // would be more cautions under the (valid) assumption that the data
                                                  // might actually contain noise and/or incorrect measurements.
            };

            // Teach the vector machine
            var svm = teacher.Learn(inputs, outputs);

            // Classify the samples using the model
            double[] answers = svm.Score(inputs);

            double error = new SquareLoss(outputs).Loss(answers); // should be 0.0
        }
コード例 #12
0
ファイル: AI.cs プロジェクト: MiFuciy/Projects
        public void create_regression(double[,] arr)
        {
            try
            {
                // Creates a matrix from the entire source data table
                double[][] table = arr.ToJagged();

                // Get only the input vector values (first column)
                double[][] inputs = table.GetColumns(0);

                // Get only the outputs (last column)
                double[] outputs = table.GetColumn(1);


                // Create the specified Kernel
                IKernel kernel = createKernel(arr);

                // Creates a new SMO for regression learning algorithm
                var teacher = new SequentialMinimalOptimizationRegression()
                {
                    // Set learning parameters
                    Complexity = 1.0000000,
                    Tolerance  = 0.0010000,
                    Epsilon    = 0.0010000,
                    Kernel     = kernel
                };

                svm = teacher.Learn(inputs, outputs);

                // Show the support vector labels on the scatter plot
                var supportVectorLabels = new double[svm.SupportVectors.Length];
                for (int i = 0; i < supportVectorLabels.Length; i++)
                {
                    int j = inputs.Find(sv => sv == svm.SupportVectors[i])[0];
                    supportVectorLabels[i] = outputs[j];
                }

                testing(arr);
            }
            catch { MessageBox.Show("Kernel create error."); }
        }
コード例 #13
0
        private void btnCreate_Click(object sender, EventArgs e)
        {
            if (dgvLearningSource.DataSource == null)
            {
                MessageBox.Show("Please load some data first.");
                return;
            }

            // Finishes and save any pending changes to the given data
            dgvLearningSource.EndEdit();



            // Creates a matrix from the entire source data table
            double[,] table = (dgvLearningSource.DataSource as DataTable).ToMatrix(out columnNames);

            // Get only the input vector values (first two columns)
            double[][] inputs = table.GetColumns(0).ToArray();

            // Get only the outputs (last column)
            double[] outputs = table.GetColumn(1);


            // Create the specified Kernel
            IKernel kernel = createKernel();


            // Create the Support Vector Machine for 1 input variable
            svm = new KernelSupportVectorMachine(kernel, inputs: 1);

            // Creates a new instance of the SMO for regression learning algorithm
            var smo = new SequentialMinimalOptimizationRegression(svm, inputs, outputs)
            {
                // Set learning parameters
                Complexity = (double)numC.Value,
                Tolerance  = (double)numT.Value,
                Epsilon    = (double)numEpsilon.Value
            };



            try
            {
                // Run
                double error = smo.Run();

                lbStatus.Text = "Training complete!";
            }
            catch (ConvergenceException)
            {
                lbStatus.Text = "Convergence could not be attained. " +
                                "The learned machine might still be usable.";
            }



            // Check if we got support vectors
            if (svm.SupportVectors.Length == 0)
            {
                dgvSupportVectors.DataSource = null;
                graphSupportVectors.GraphPane.CurveList.Clear();
                return;
            }



            // Show support vectors on the Support Vectors tab page
            double[][] supportVectorsWeights = svm.SupportVectors.InsertColumn(svm.Weights);

            string[] supportVectorNames = columnNames.RemoveAt(columnNames.Length - 1).Concatenate("Weight");
            dgvSupportVectors.DataSource = new ArrayDataView(supportVectorsWeights, supportVectorNames);



            // Show the support vector labels on the scatter plot
            double[] supportVectorLabels = new double[svm.SupportVectors.Length];
            for (int i = 0; i < supportVectorLabels.Length; i++)
            {
                int j = inputs.Find(sv => sv == svm.SupportVectors[i])[0];
                supportVectorLabels[i] = outputs[j];
            }

            double[][] graph = svm.SupportVectors.InsertColumn(supportVectorLabels);

            CreateScatterplot(graphSupportVectors, graph.ToMatrix());



            // Get the ranges for each variable (X and Y)
            DoubleRange range = table.GetColumn(0).GetRange();

            double[][] map = Vector.Interval(range, 0.05).ToArray();

            // Classify each point in the Cartesian coordinate system
            double[] result = map.Apply(svm.Compute);
            double[,] surface = map.ToMatrix().InsertColumn(result);

            CreateScatterplot(zedGraphControl2, surface);
        }
コード例 #14
0
        public void TestSvrAccuracy(int monthsToPredict)
        {
            //Total Number of montths predicted correctly
            double correctGuesses = 0;
            //Total number of attemts
            double totalGuesses = 0;
            //Actual Low , predicted Low
            double aLpL = 0;
            double aLpN = 0;
            double aLpH = 0;
            //Actual Normal , predicted Low
            double aNpL = 0;
            double aNpN = 0;
            double aNpH = 0;
            //Actual Normal , predicted Low
            double aHpL = 0;
            double aHpN = 0;
            double aHpH = 0;
            //Precision and recall lists.
            List <double> Precisions = new List <double>();
            List <double> Recalls    = new List <double>();
            //Variable to measure the sum of all deviations to be used in MAE, MSE , RMSE
            double summedDeviations = 0;
            //Variable to measure the sum of all squared deviations to be used in MAE, MSE , RMSE
            double summedSquaredDeviations = 0;
            // Mean Absolute Error, Mean Squared Error, and Root Mean Squared Error
            double MAE      = 0;
            double MSE      = 0;
            double RMSE     = 0;
            var    counties = _db.Counties.Where(x => x.Id != 97 && x.Id != 111 && x.Id != 125 && x.Id != 130 && x.Id != 131).ToList();

            foreach (var county in counties)
            {
                var totalRecords    = county.Records.OrderBy(x => x.Date).ToList();
                var trainingInputs  = new double[totalRecords.Count - monthsToPredict][];
                var trainingOutputs = new double[totalRecords.Count - monthsToPredict];
                var testingInputs   = new double[monthsToPredict][];
                var testingOutputs  = new double[monthsToPredict];
                for (var i = 0; i < totalRecords.Count; i++)
                {
                    if (i < trainingInputs.Length)
                    {
                        trainingInputs[i]  = new double[] { totalRecords[i].Date.Month, totalRecords[i].Date.Year };
                        trainingOutputs[i] = totalRecords[i].AllCrimes / 1000.0;
                    }
                    else
                    {
                        testingInputs[i - trainingInputs.Length]  = new double[] { totalRecords[i].Date.Month, totalRecords[i].Date.Year };
                        testingOutputs[i - trainingInputs.Length] = totalRecords[i].AllCrimes / 1000.0;
                    }
                }
                var teacher = new SequentialMinimalOptimizationRegression()
                {
                    Kernel = new Gaussian()
                    {
                        Gamma = 0.01
                    },
                    Complexity = 280,
                    Epsilon    = 0.25
                };
                var svr    = teacher.Learn(trainingInputs, trainingOutputs);
                var output = svr.Score(testingInputs);

                for (var i = 0; i < output.Length; i++)
                {
                    double prediction     = output[i] * 1000;
                    int    predictionRank = _regression.ReturnPredictedRank(county, output[i] * 1000).Rank.Value;
                    double actual         = testingOutputs[i] * 1000;
                    int    actualRank     = _regression.ReturnPredictedRank(county, testingOutputs[i] * 1000).Rank.Value;
                    if (actualRank == 2 && predictionRank == 2)
                    {
                        aLpL++; correctGuesses++;
                    }
                    ;
                    if (actualRank == 2 && predictionRank == 3)
                    {
                        aLpN++;
                    }
                    ;
                    if (actualRank == 2 && predictionRank == 4)
                    {
                        aLpH++;
                    }
                    ;
                    if (actualRank == 3 && predictionRank == 2)
                    {
                        aNpL++;
                    }
                    ;
                    if (actualRank == 3 && predictionRank == 3)
                    {
                        aNpN++; correctGuesses++;
                    }
                    ;
                    if (actualRank == 3 && predictionRank == 4)
                    {
                        aNpH++;
                    }
                    ;
                    if (actualRank == 4 && predictionRank == 2)
                    {
                        aHpL++;
                    }
                    ;
                    if (actualRank == 4 && predictionRank == 3)
                    {
                        aHpN++;
                    }
                    ;
                    if (actualRank == 4 && predictionRank == 4)
                    {
                        aHpH++; correctGuesses++;
                    }
                    ;
                    summedDeviations       += Math.Abs(prediction - actual);
                    summedSquaredDeviations = summedDeviations * summedDeviations;
                    Debug.WriteLine(actual + "        " + actualRank + "        " + prediction + "        " + predictionRank);
                }
                totalGuesses += testingInputs.Length;
            }
            var accuracy   = (double)aLpL + aNpN + aHpH / totalGuesses;
            var precisionL = (double)(aLpL / aLpL + aLpN + aLpH);
            var precisionN = (double)(aNpN / aNpL + aNpN + aNpH);
            var precisionH = (double)(aHpH / aHpL + aHpN + aHpH);

            MAE  = summedDeviations / totalGuesses;
            MSE  = summedSquaredDeviations / totalGuesses;
            RMSE = Math.Sqrt(MSE);
        }
コード例 #15
0
        /// <summary>
        /// Compares Linear and SVR models
        /// </summary>
        /// <param name="monthsToPredict"></param>
        public void CompareModels(int monthsToPredict)
        {
            List <double> SlrMAEs  = new List <double>();
            List <double> SlrRMSEs = new List <double>();
            List <double> SvrMAEs  = new List <double>();
            List <double> SvrRMSEs = new List <double>();

            foreach (var county in _db.Counties)
            {
                double SlrAbsoluteErrors = 0;
                double SvrAbsoluteErrors = 0;
                double SlrSquaredErros   = 0;
                double SvrSquaredErros   = 0;
                //All records of the county ordered by date.
                List <Record> totalRecords = county.Records.OrderBy(x => x.Date).ToList();
                //Training inputs and outputs.
                var trainingInputs  = new double[totalRecords.Count - monthsToPredict][];
                var trainingOutputs = new double[totalRecords.Count - monthsToPredict];
                //Testing inputs and outputs.
                var testingInputs  = new double[monthsToPredict][];
                var testingOutputs = new double[monthsToPredict];
                //Fill the arrays trainning and testing arrays with inputs and outputs.
                for (var i = 0; i < totalRecords.Count; i++)
                {
                    if (i < trainingInputs.Length)
                    {
                        trainingInputs[i] = new double[] { i };
                        //Values are downscaled dividing by 1000
                        trainingOutputs[i] = totalRecords[i].AllCrimes / 1000.0;
                    }
                    else
                    {
                        testingInputs[i - trainingInputs.Length] = new double[] { totalRecords[i].Date.Month, totalRecords[i].Date.Year };
                        //Values are downscaled dividing by 1000
                        testingOutputs[i - trainingInputs.Length] = totalRecords[i].AllCrimes / 1000.0;
                    }
                }
                var slr       = Ols.Learn(trainingInputs, trainingOutputs);
                var outputSlr = slr.Transform(testingInputs);
                var teacher   = new SequentialMinimalOptimizationRegression()
                {
                    Kernel = new Gaussian()
                    {
                        Gamma = 0.01
                    },
                    Complexity = 280,
                    Epsilon    = 0.25
                };
                var svr       = teacher.Learn(trainingInputs, trainingOutputs);
                var outputSvr = svr.Score(testingInputs);
                for (int i = 0; i < testingOutputs.Length; i++)
                {
                    SvrAbsoluteErrors += Math.Abs(testingOutputs[i] - outputSvr[i]);
                    SlrAbsoluteErrors += Math.Abs(testingOutputs[i] - outputSlr[i]);
                    SvrSquaredErros   += Math.Abs(testingOutputs[i] - outputSvr[i]) * Math.Abs(testingOutputs[i] - outputSvr[i]);
                    SlrSquaredErros   += Math.Abs(testingOutputs[i] - outputSlr[i]) * Math.Abs(testingOutputs[i] - outputSlr[i]);
                }
                SlrMAEs.Add(SlrAbsoluteErrors / (double)testingOutputs.Length);
                SvrMAEs.Add(SvrAbsoluteErrors / (double)testingOutputs.Length);
                SvrRMSEs.Add(SlrSquaredErros / ((double)testingOutputs.Length));
                SvrRMSEs.Add(SlrSquaredErros / ((double)testingOutputs.Length));
            }
        }
コード例 #16
0
        void TestSVM()///检测SVM正确性
        {
            Accord.Math.Random.Generator.Seed = 0;

            // Example regression problem. Suppose we are trying
            // to model the following equation: f(x, y) = 2x + y

            //            double[][] inputs = // (x, y)
            //            {
            //    new double[] { 0,  1 }, // 2*0 + 1 =  1
            //    new double[] { 4,  3 }, // 2*4 + 3 = 11
            //    new double[] { 8, -8 }, // 2*8 - 8 =  8
            //    new double[] { 2,  2 }, // 2*2 + 2 =  6
            //    new double[] { 6,  1 }, // 2*6 + 1 = 13
            //    new double[] { 5,  4 }, // 2*5 + 4 = 14
            //    new double[] { 9,  1 }, // 2*9 + 1 = 19
            //    new double[] { 1,  6 }, // 2*1 + 6 =  8
            //};
            double[][] inputs = new double[8][];
            for (int i = 0; i < 8; i++)
            {
                double[] temp = new double[1];
                temp[0]   = Math.PI / 2 / 8 * i;
                inputs[i] = temp;
            }
            //            double[] outputs = // f(x, y)
            //            {
            //    1, 11, 8, 6, 13, 14, 19, 8
            //};
            double[] outputs = new double[8];
            for (int i = 0; i < 8; i++)
            {
                outputs[i] = Math.Sin(Math.PI / 2 / 8 * i);
            }

            // Create the sequential minimal optimization teacher
            var learn = new SequentialMinimalOptimizationRegression <Gaussian>()
            {
                //Kernel = new Polynomial(3), // Polynomial Kernel of 2nd degree
                //Kernel = new Gaussian(2),
                Complexity = 100
            };

            // Run the learning algorithm
            SupportVectorMachine <Gaussian> svm = learn.Learn(inputs, outputs);

            // Compute the predicted scores
            double[] predicted = svm.Score(inputs);

            // Compute the error between the expected and predicted
            double error = new SquareLoss(outputs).Loss(predicted);

            // Compute the answer for one particular example
            double fxy = svm.Score(new double[1] {
                2
            });                                          // 1.0003849827673186

            //double[] myerror = new double[8];
            //for(int i=0;i<8;i++)
            //{
            //    myerror[i]=Math.Abs(predicted[i]-outputs[i])/
            //}
        }