static void Main(string[] args)
        {
            // sample input and output
            double[] inputs  = { 10, 20, 30, 40, 50 };
            double[] outputs = { 1, 2, 3, 4, 5 };

            // 1. Linear Regression
            var learner = new OrdinaryLeastSquares()
            {
                UseIntercept = true
            };
            var model = learner.Learn(inputs, outputs);
            var preds = model.Transform(inputs);

            Console.WriteLine("\n\n* Linear Regression Preds: {0}", String.Join(", ", preds));

            // 2. Linear SVM
            var learner2 = new LinearRegressionNewtonMethod()
            {
                Epsilon   = 2.1,
                Tolerance = 1e-5,
                UseComplexityHeuristic = true
            };

            var svmInputs = inputs.Select(x => new double[] { x, x }).ToArray();

            var model2 = learner2.Learn(svmInputs, outputs);
            var preds2 = model2.Score(svmInputs);

            Console.WriteLine("\n\n* Linear SVM Preds: {0}", String.Join(", ", preds2));

            // 3. Polynomial SVM
            var learner3 = new FanChenLinSupportVectorRegression <Polynomial>()
            {
                Kernel = new Polynomial(3)
            };
            var model3 = learner3.Learn(svmInputs, outputs);

            var preds3 = model3.Score(svmInputs);

            Console.WriteLine("\n\n* Polynomial SVM Preds: {0}", String.Join(", ", preds3));

            // 4. Gaussian SVM
            var learner4 = new FanChenLinSupportVectorRegression <Gaussian>()
            {
                Kernel = new Gaussian()
            };
            var model4 = learner4.Learn(svmInputs, outputs);

            var preds4 = model4.Score(svmInputs);

            Console.WriteLine("\n\n* Gaussian SVM Preds: {0}", String.Join(", ", preds4));


            Console.WriteLine("\n\n\n\nDONE!!");
            Console.ReadKey();
        }
Esempio n. 2
0
        public void learn_test()
        {
            #region doc_learn
            Accord.Math.Random.Generator.Seed = 0;

            // Example regression problem. Suppose we are trying
            // to model the following equation: f(x, y) = 2x + y

            double[][] inputs =         // (x, y)
            {
                new double[] { 0,  1 }, // 2*0 + 1 =  1
                new double[] { 4,  3 }, // 2*4 + 3 = 11
                new double[] { 8, -8 }, // 2*8 - 8 =  8
                new double[] { 2,  2 }, // 2*2 + 2 =  6
                new double[] { 6,  1 }, // 2*6 + 1 = 13
                new double[] { 5,  4 }, // 2*5 + 4 = 14
                new double[] { 9,  1 }, // 2*9 + 1 = 19
                new double[] { 1,  6 }, // 2*1 + 6 =  8
            };

            double[] outputs = // f(x, y)
            {
                1, 11, 8, 6, 13, 14, 19, 8
            };

            // Create the sequential minimal optimization teacher
            var learn = new FanChenLinSupportVectorRegression()
            {
                Complexity = 100
            };

            // Run the learning algorithm
            SupportVectorMachine svm = learn.Learn(inputs, outputs);

            // Compute the predicted scores
            double[] predicted = svm.Score(inputs);

            // Compute the error between the expected and predicted
            double error = new SquareLoss(outputs).Loss(predicted);

            // Compute the answer for one particular example
            double fxy = svm.Score(inputs[0]); // 1.000776033448912
            #endregion

            Assert.AreEqual(1.0, fxy, 1e-3);
            for (int i = 0; i < outputs.Length; i++)
            {
                Assert.AreEqual(outputs[i], predicted[i], 2e-3);
            }
        }
Esempio n. 3
0
        public void Learn(IList <XYtoZ> dsLearn)
        {
            double [][] inputs  = dsLearn.Select(i => new double[] { i.X, i.Y }).ToArray();
            double []   outputs = dsLearn.Select(i => i.Z).ToArray();

            var fclsvr = new FanChenLinSupportVectorRegression <Gaussian>()
            {
                Tolerance              = _tolerance,
                UseKernelEstimation    = _useKernelEstimation,
                UseComplexityHeuristic = _useComplexityHeuristic,
                Complexity             = _complexity,
                Kernel = new Gaussian()
            };

            _supportVectorMachine = fclsvr.Learn(inputs, outputs);
        }
Esempio n. 4
0
        private static void kernelSvm1(double[][] inputs, double[] outputs)
        {
            // Create a LibSVM-based support vector regression algorithm
            var teacher = new FanChenLinSupportVectorRegression <Gaussian>()
            {
                Tolerance  = 1e-5,
                Complexity = 10000,
                Kernel     = new Gaussian(0.1)
            };

            // Use the algorithm to learn the machine
            var svm = teacher.Learn(inputs, outputs);

            // Get machine's predictions for inputs
            double[] prediction = svm.Score(inputs);

            // Compute the error in the prediction (should be 0.0)
            double error = new SquareLoss(outputs).Loss(prediction);

            Console.WriteLine(error);
        }
Esempio n. 5
0
        public void learn_test_square_polynomial()
        {
            Accord.Math.Random.Generator.Seed = 0;

            // Example regression problem. Suppose we are trying
            // to model the following equation: f(x) = x * x

            double[][] inputs = // (x)
            {
                new double[] {  -1 },
                new double[] {   4 },
                new double[] {   8 },
                new double[] {   2 },
                new double[] {   6 },
                new double[] {   5 },
                new double[] {   9 },
                new double[] {   1 },
                new double[] {   6 },
                new double[] {  -5 },
                new double[] {  -2 },
                new double[] {  -3 },
                new double[] {   5 },
                new double[] {   4 },
                new double[] {   1 },
                new double[] {   2 },
                new double[] {   0 },
                new double[] {   4 },
                new double[] {   8 },
                new double[] {   2 },
                new double[] {   6 },
                new double[] {  52 },
                new double[] {  95 },
                new double[] {   1 },
                new double[] {   6 },
                new double[] {   5 },
                new double[] {  -1 },
                new double[] {   2 },
                new double[] {   5 },
                new double[] {   4 },
                new double[] {  -4 },
                new double[] { -50 },
            };

            double[] outputs = inputs.GetColumn(0).Pow(2);

            // Create the sequential minimal optimization teacher
            var learn = new FanChenLinSupportVectorRegression <Polynomial>()
            {
                Kernel     = new Polynomial(degree: 2, constant: 0),
                Complexity = 100
            };

            // Run the learning algorithm
            SupportVectorMachine <Polynomial> svm = learn.Learn(inputs, outputs);

            // Compute the predicted scores
            double[] predicted = svm.Score(inputs);

            // Compute the error between the expected and predicted
            double error = new SquareLoss(outputs).Loss(predicted);

            // Compute the answer for one particular example
            double fxy = svm.Score(inputs[0]); // 1.000776033448912

            Assert.AreEqual(1.0, fxy, 1e-3);
            for (int i = 0; i < outputs.Length; i++)
            {
                Assert.AreEqual(outputs[i], predicted[i], 2e-3);
            }
        }
        static void Main(string[] args)
        {
            Console.SetWindowSize(100, 50);

            // Read in the file we created in the previous step
            // TODO: change the path to point to your data directory
            string dataDirPath = @"\\Mac\Home\Documents\c-sharp-machine-learning\ch.5\input-data";

            // Load the data into a data frame
            Console.WriteLine("Loading data...");
            var featuresDF = Frame.ReadCsv(
                Path.Combine(dataDirPath, "features.csv"),
                hasHeaders: true,
                inferTypes: true
                ).FillMissing(0.0);

            // Split the sample set into train and test sets
            double trainProportion = 0.8;

            int[] shuffledIndexes = featuresDF.RowKeys.ToArray();
            shuffledIndexes.Shuffle();

            int trainSetIndexMax = (int)(featuresDF.RowCount * trainProportion);

            int[] trainIndexes = shuffledIndexes.Where(i => i < trainSetIndexMax).ToArray();
            int[] testIndexes  = shuffledIndexes.Where(i => i >= trainSetIndexMax).ToArray();

            var trainSet = featuresDF.Where(x => trainIndexes.Contains(x.Key));
            var testSet  = featuresDF.Where(x => testIndexes.Contains(x.Key));

            Console.WriteLine("\nTrain Set Shape: ({0}, {1})", trainSet.RowCount, trainSet.ColumnCount);
            Console.WriteLine("Test Set Shape: ({0}, {1})", testSet.RowCount, testSet.ColumnCount);

            string targetVar = "LogSalePrice";

            string[] features = featuresDF.ColumnKeys.Where(
                x => !x.Equals("Id") && !x.Equals(targetVar) && !x.Equals("SalePrice")
                ).ToArray();

            double[][] trainX = BuildJaggedArray(
                trainSet.Columns[features].ToArray2D <double>(),
                trainSet.RowCount,
                features.Length
                );
            double[][] testX = BuildJaggedArray(
                testSet.Columns[features].ToArray2D <double>(),
                testSet.RowCount,
                features.Length
                );

            double[] trainY = trainSet[targetVar].ValuesAll.ToArray();
            double[] testY  = testSet[targetVar].ValuesAll.ToArray();

            Console.WriteLine("\n**** Linear Regression Model ****");

            // OLS learning algorithm
            var ols = new OrdinaryLeastSquares()
            {
                UseIntercept = true,
                IsRobust     = true
            };

            // Fit a linear regression model
            MultipleLinearRegression regFit = ols.Learn(
                trainX,
                trainY
                );

            // in-sample predictions
            double[] regInSamplePreds = regFit.Transform(trainX);
            // out-of-sample predictions
            double[] regOutSamplePreds = regFit.Transform(testX);

            ValidateModelResults("Linear Regression", regInSamplePreds, regOutSamplePreds, trainX, trainY, testX, testY);

            //Console.WriteLine("\n* Linear Regression Coefficients:");
            //for (int i = 0; i < features.Length; i++)
            //{
            //    Console.WriteLine("\t{0}: {1:0.0000}", features[i], regFit.Weights[i]);
            //}

            //Console.WriteLine("\tIntercept: {0:0.0000}", regFit.Intercept);


            Console.WriteLine("\n**** Linear Support Vector Machine ****");
            // Linear SVM Learning Algorithm
            var teacher = new LinearRegressionNewtonMethod()
            {
                Epsilon   = 0.5,
                Tolerance = 1e-5,
                UseComplexityHeuristic = true
            };

            // Train SVM
            var svm = teacher.Learn(trainX, trainY);

            // in-sample predictions
            double[] linSVMInSamplePreds = svm.Score(trainX);
            // out-of-sample predictions
            double[] linSVMOutSamplePreds = svm.Score(testX);

            ValidateModelResults("Linear SVM", linSVMInSamplePreds, linSVMOutSamplePreds, trainX, trainY, testX, testY);

            Console.WriteLine("\n**** Support Vector Machine with Polynomial Kernel ****");
            // SVM with Polynomial Kernel
            var polySVMLearner = new FanChenLinSupportVectorRegression <Polynomial>()
            {
                Epsilon                = 0.1,
                Tolerance              = 1e-5,
                UseKernelEstimation    = true,
                UseComplexityHeuristic = true,
                Kernel = new Polynomial(3)
            };

            // Train SVM with Polynomial Kernel
            var polySvm = polySVMLearner.Learn(trainX, trainY);

            // in-sample predictions
            double[] polySVMInSamplePreds = polySvm.Score(trainX);
            // out-of-sample predictions
            double[] polySVMOutSamplePreds = polySvm.Score(testX);

            ValidateModelResults("Polynomial SVM", polySVMInSamplePreds, polySVMOutSamplePreds, trainX, trainY, testX, testY);


            Console.WriteLine("\n**** Support Vector Machine with Gaussian Kernel ****");
            // SVM with Gaussian Kernel
            var gaussianSVMLearner = new FanChenLinSupportVectorRegression <Gaussian>()
            {
                Epsilon             = 0.1,
                Tolerance           = 1e-5,
                Complexity          = 1e-4,
                UseKernelEstimation = true,
                Kernel = new Gaussian()
            };

            // Train SVM with Gaussian Kernel
            var gaussianSvm = gaussianSVMLearner.Learn(trainX, trainY);

            // in-sample predictions
            double[] guassianSVMInSamplePreds = gaussianSvm.Score(trainX);
            // out-of-sample predictions
            double[] guassianSVMOutSamplePreds = gaussianSvm.Score(testX);

            ValidateModelResults("Guassian SVM", guassianSVMInSamplePreds, guassianSVMOutSamplePreds, trainX, trainY, testX, testY);


            Console.WriteLine("\n\n\nDONE!!");
            Console.ReadKey();
        }