static void Main(string[] args)
        {
            // sample input and output
            double[] inputs  = { 10, 20, 30, 40, 50 };
            double[] outputs = { 1, 2, 3, 4, 5 };

            // 1. Linear Regression
            var learner = new OrdinaryLeastSquares()
            {
                UseIntercept = true
            };
            var model = learner.Learn(inputs, outputs);
            var preds = model.Transform(inputs);

            Console.WriteLine("\n\n* Linear Regression Preds: {0}", String.Join(", ", preds));

            // 2. Linear SVM
            var learner2 = new LinearRegressionNewtonMethod()
            {
                Epsilon   = 2.1,
                Tolerance = 1e-5,
                UseComplexityHeuristic = true
            };

            var svmInputs = inputs.Select(x => new double[] { x, x }).ToArray();

            var model2 = learner2.Learn(svmInputs, outputs);
            var preds2 = model2.Score(svmInputs);

            Console.WriteLine("\n\n* Linear SVM Preds: {0}", String.Join(", ", preds2));

            // 3. Polynomial SVM
            var learner3 = new FanChenLinSupportVectorRegression <Polynomial>()
            {
                Kernel = new Polynomial(3)
            };
            var model3 = learner3.Learn(svmInputs, outputs);

            var preds3 = model3.Score(svmInputs);

            Console.WriteLine("\n\n* Polynomial SVM Preds: {0}", String.Join(", ", preds3));

            // 4. Gaussian SVM
            var learner4 = new FanChenLinSupportVectorRegression <Gaussian>()
            {
                Kernel = new Gaussian()
            };
            var model4 = learner4.Learn(svmInputs, outputs);

            var preds4 = model4.Score(svmInputs);

            Console.WriteLine("\n\n* Gaussian SVM Preds: {0}", String.Join(", ", preds4));


            Console.WriteLine("\n\n\n\nDONE!!");
            Console.ReadKey();
        }
        public void RunTest()
        {
            // Example regression problem. Suppose we are trying
            // to model the following equation: f(x, y) = 2x + y

            double[][] inputs =         // (x, y)
            {
                new double[] { 0,  1 }, // 2*0 + 1 =  1
                new double[] { 4,  3 }, // 2*4 + 3 = 11
                new double[] { 8, -8 }, // 2*8 - 8 =  8
                new double[] { 2,  2 }, // 2*2 + 2 =  6
                new double[] { 6,  1 }, // 2*6 + 1 = 13
                new double[] { 5,  4 }, // 2*5 + 4 = 14
                new double[] { 9,  1 }, // 2*9 + 1 = 19
                new double[] { 1,  6 }, // 2*1 + 6 =  8
            };

            double[] outputs = // f(x, y)
            {
                1, 11, 8, 6, 13, 14, 19, 8
            };

            // Create a new linear Support Vector Machine
            var machine = new SupportVectorMachine(inputs: 2);

            // Create the linear regression coordinate descent teacher
            var learn = new LinearRegressionNewtonMethod(machine, inputs, outputs)
            {
                Complexity = 100000000,
                Epsilon    = 1e-15,
                Tolerance  = 1e-15,
            };

            // Run the learning algorithm
            double error = learn.Run();

            Assert.AreEqual(0.0000000000000000030183002120114053, error);

            // Compute the answer for one particular example
            double fxy = machine.Compute(inputs[0]); // 1.0003849827673186

            // Check for correct answers
            double[] answers = new double[inputs.Length];
            for (int i = 0; i < answers.Length; i++)
            {
                answers[i] = machine.Compute(inputs[i]);
            }

            Assert.AreEqual(1.0, fxy, 1e-5);
            for (int i = 0; i < outputs.Length; i++)
            {
                Assert.AreEqual(outputs[i], answers[i], 1e-5);
            }
        }
Beispiel #3
0
        public void learn_test()
        {
            Accord.Math.Random.Generator.Seed = 0;

            #region doc_learn
            // Example regression problem. Suppose we are trying
            // to model the following equation: f(x, y) = 2x + y

            double[][] inputs =         // (x, y)
            {
                new double[] { 0,  1 }, // 2*0 + 1 =  1
                new double[] { 4,  3 }, // 2*4 + 3 = 11
                new double[] { 8, -8 }, // 2*8 - 8 =  8
                new double[] { 2,  2 }, // 2*2 + 2 =  6
                new double[] { 6,  1 }, // 2*6 + 1 = 13
                new double[] { 5,  4 }, // 2*5 + 4 = 14
                new double[] { 9,  1 }, // 2*9 + 1 = 19
                new double[] { 1,  6 }, // 2*1 + 6 =  8
            };

            double[] outputs = // f(x, y)
            {
                1, 11, 8, 6, 13, 14, 19, 8
            };

            // Create the linear regression coordinate descent teacher
            var learn = new LinearRegressionNewtonMethod()
            {
                Complexity = 100000000,
                Epsilon    = 1e-15,
                Tolerance  = 1e-15,
            };

            // Run the learning algorithm
            var svm = learn.Learn(inputs, outputs);

            // Compute the answer for one particular example
            double fxy = svm.Score(inputs[0]); // 1.000

            // Check for correct answers
            double[] answers = svm.Score(inputs);
            #endregion

            Assert.AreEqual(1.0, fxy, 1e-5);
            for (int i = 0; i < outputs.Length; i++)
            {
                Assert.AreEqual(outputs[i], answers[i], 1e-2);
            }

            Assert.AreEqual(1, svm.Weights[0]);
            Assert.AreEqual(2, svm.SupportVectors[0][0], 1e-8);
            Assert.AreEqual(1, svm.SupportVectors[0][1], 1e-8);
        }
        public void RunTest()
        {
            // Example regression problem. Suppose we are trying
            // to model the following equation: f(x, y) = 2x + y

            double[][] inputs = // (x, y)
            {
                new double[] { 0,  1 }, // 2*0 + 1 =  1
                new double[] { 4,  3 }, // 2*4 + 3 = 11
                new double[] { 8, -8 }, // 2*8 - 8 =  8
                new double[] { 2,  2 }, // 2*2 + 2 =  6
                new double[] { 6,  1 }, // 2*6 + 1 = 13
                new double[] { 5,  4 }, // 2*5 + 4 = 14
                new double[] { 9,  1 }, // 2*9 + 1 = 19
                new double[] { 1,  6 }, // 2*1 + 6 =  8
            };

            double[] outputs = // f(x, y)
            {
                    1, 11, 8, 6, 13, 14, 19, 8
            };

            // Create a new linear Support Vector Machine 
            var machine = new SupportVectorMachine(inputs: 2);

            // Create the linear regression coordinate descent teacher
            var learn = new LinearRegressionNewtonMethod(machine, inputs, outputs)
            {
                Complexity = 100000000,
                Epsilon = 1e-15,
                Tolerance = 1e-15,
            };

            // Run the learning algorithm
            double error = learn.Run();
            Assert.AreEqual(860.0, error);

            // Compute the answer for one particular example
            double fxy = machine.Compute(inputs[0]); // 1.0003849827673186

            // Check for correct answers
            double[] answers = new double[inputs.Length];
            for (int i = 0; i < answers.Length; i++)
                answers[i] = machine.Compute(inputs[i]);

            Assert.AreEqual(1.0, fxy, 1e-5);
            for (int i = 0; i < outputs.Length; i++)
                Assert.AreEqual(outputs[i], answers[i], 1e-5);
        }
Beispiel #5
0
        private SupportVectorMachine <Linear> getSVMRegression(GeoWave geoWave, int labelIdx, bool[] Dim2TakeNode, ref double[] svmApprox)
        {
            SupportVectorMachine <Linear> svmRegression = null;

            double[][] dataForRegression  = new double[geoWave.pointsIdArray.Count][];
            double[]   labelForRegression = new double[geoWave.pointsIdArray.Count];
            int        amountOfFeatures   = training_dt[0].Length;

            for (int i = 0; i < geoWave.pointsIdArray.Count; i++)
            {
                int index = geoWave.pointsIdArray[i];
                dataForRegression[i] = new double[userConfig.nFeatures];
                int k = 0;
                for (int j = 0; j < amountOfFeatures; j++)
                {
                    if (Dim2TakeNode[j])
                    {
                        dataForRegression[i][k] = training_dt[index][j];
                        k++;
                    }
                }
                labelForRegression[i] = training_label[index][labelIdx];
            }

            LinearRegressionNewtonMethod tmpSvmRegression = new LinearRegressionNewtonMethod()
            {
                UseComplexityHeuristic = true
            };

            try
            {
                svmRegression = tmpSvmRegression.Learn(dataForRegression, labelForRegression);
                svmApprox     = svmRegression.Score(dataForRegression);
            }
            catch (Exception e)
            {
                return(null);
            }
            if (svmApprox.Contains(double.NaN))
            {
                return(null);
            }
            return(svmRegression);
        }
Beispiel #6
0
        private static void linearSvm2()
        {
            // Declare a very simple regression problem
            // with only 2 input variables (x and y):
            double[][] inputs =
            {
                new[] { 3.0, 1.0 },
                new[] { 7.0, 1.0 },
                new[] { 3.0, 1.0 },
                new[] { 3.0, 2.0 },
                new[] { 6.0, 1.0 },
            };

            // The task is to output a weighted sum of those numbers
            // plus an independent constant term: 7.4x + 1.1y + 42
            double[] outputs =
            {
                7.4 * 3.0 + 1.1 * 1.0 + 42.0,
                7.4 * 7.0 + 1.1 * 1.0 + 42.0,
                7.4 * 3.0 + 1.1 * 1.0 + 42.0,
                7.4 * 3.0 + 1.1 * 2.0 + 42.0,
                7.4 * 6.0 + 1.1 * 1.0 + 42.0,
            };

            // Create Newton-based support vector regression
            var teacher = new LinearRegressionNewtonMethod()
            {
                Tolerance  = 1e-5,
                Complexity = 10000
            };

            // Use the algorithm to learn the machine
            var svm = teacher.Learn(inputs, outputs);

            // Get machine's predictions for inputs
            double[] prediction = svm.Score(inputs);

            // Compute the error in the prediction (should be 0.0)
            double error = new SquareLoss(outputs).Loss(prediction);

            Console.WriteLine(error);
        }
        static void Main(string[] args)
        {
            Console.SetWindowSize(100, 50);

            // Read in the file we created in the previous step
            // TODO: change the path to point to your data directory
            string dataDirPath = @"<path-to-your-dir>";

            // Load the data into a data frame
            Console.WriteLine("Loading data...");
            var featuresDF = Frame.ReadCsv(
                Path.Combine(dataDirPath, "eurusd-features.csv"),
                hasHeaders: true,
                inferTypes: true
                );

            // Split the sample set into train and test sets
            double trainProportion  = 0.9;
            int    trainSetIndexMax = (int)(featuresDF.RowCount * trainProportion);

            var trainSet = featuresDF.Where(x => x.Key < trainSetIndexMax);
            var testSet  = featuresDF.Where(x => x.Key >= trainSetIndexMax);

            Console.WriteLine("\nTrain Set Shape: ({0}, {1})", trainSet.RowCount, trainSet.ColumnCount);
            Console.WriteLine("Test Set Shape: ({0}, {1})", testSet.RowCount, testSet.ColumnCount);

            string[] features = new string[] {
                "DailyReturn", "Close_minus_10_MA", "Close_minus_20_MA", "Close_minus_50_MA",
                "Close_minus_200_MA", "20_day_std", "Close_minus_BollingerUpperBound",
                "Close_minus_BollingerLowerBound", "DailyReturn_T-1", "DailyReturn_T-2",
                "DailyReturn_T-3", "DailyReturn_T-4", "DailyReturn_T-5",
                "Close_minus_10_MA_T-1",
                "Close_minus_10_MA_T-2", "Close_minus_10_MA_T-3", "Close_minus_10_MA_T-4",
                "Close_minus_10_MA_T-5", "Close_minus_20_MA_T-1", "Close_minus_20_MA_T-2",
                "Close_minus_20_MA_T-3", "Close_minus_20_MA_T-4", "Close_minus_20_MA_T-5",
                "Close_minus_50_MA_T-1", "Close_minus_50_MA_T-2", "Close_minus_50_MA_T-3",
                "Close_minus_50_MA_T-4", "Close_minus_50_MA_T-5", "Close_minus_200_MA_T-1",
                "Close_minus_200_MA_T-2", "Close_minus_200_MA_T-3", "Close_minus_200_MA_T-4",
                "Close_minus_200_MA_T-5",
                "Close_minus_BollingerUpperBound_T-1",
                "Close_minus_BollingerUpperBound_T-2", "Close_minus_BollingerUpperBound_T-3",
                "Close_minus_BollingerUpperBound_T-4", "Close_minus_BollingerUpperBound_T-5"
            };

            double[][] trainX = BuildJaggedArray(
                trainSet.Columns[features].ToArray2D <double>(),
                trainSet.RowCount,
                features.Length
                );
            double[][] testX = BuildJaggedArray(
                testSet.Columns[features].ToArray2D <double>(),
                testSet.RowCount,
                features.Length
                );

            double[] trainY = trainSet["Target"].ValuesAll.ToArray();
            double[] testY  = testSet["Target"].ValuesAll.ToArray();

            Console.WriteLine("\n**** Linear Regression Model ****");

            // OLS learning algorithm
            var ols = new OrdinaryLeastSquares()
            {
                UseIntercept = true
            };

            // Fit a linear regression model
            MultipleLinearRegression regFit = ols.Learn(trainX, trainY);

            // in-sample predictions
            double[] regInSamplePreds = regFit.Transform(trainX);
            // out-of-sample predictions
            double[] regOutSamplePreds = regFit.Transform(testX);

            ValidateModelResults("Linear Regression", regInSamplePreds, regOutSamplePreds, trainX, trainY, testX, testY);

            Console.WriteLine("\n* Linear Regression Coefficients:");
            for (int i = 0; i < features.Length; i++)
            {
                Console.WriteLine("\t{0}: {1:0.0000}", features[i], regFit.Weights[i]);
            }

            Console.WriteLine("\tIntercept: {0:0.0000}", regFit.Intercept);


            Console.WriteLine("\n**** Linear Support Vector Machine ****");
            // Linear SVM Learning Algorithm
            var teacher = new LinearRegressionNewtonMethod()
            {
                Epsilon   = 2.1,
                Tolerance = 1e-5,
                UseComplexityHeuristic = true
            };

            // Train SVM
            var svm = teacher.Learn(trainX, trainY);

            // in-sample predictions
            double[] linSVMInSamplePreds = svm.Score(trainX);
            // out-of-sample predictions
            double[] linSVMOutSamplePreds = svm.Score(testX);

            ValidateModelResults("Linear SVM", linSVMInSamplePreds, linSVMOutSamplePreds, trainX, trainY, testX, testY);

            Console.WriteLine("\n\n\nDONE!!");
            Console.ReadKey();
        }
        static void Main(string[] args)
        {
            Console.SetWindowSize(100, 50);

            // Read in the file we created in the previous step
            // TODO: change the path to point to your data directory
            string dataDirPath = @"\\Mac\Home\Documents\c-sharp-machine-learning\ch.5\input-data";

            // Load the data into a data frame
            Console.WriteLine("Loading data...");
            var featuresDF = Frame.ReadCsv(
                Path.Combine(dataDirPath, "features.csv"),
                hasHeaders: true,
                inferTypes: true
                ).FillMissing(0.0);

            // Split the sample set into train and test sets
            double trainProportion = 0.8;

            int[] shuffledIndexes = featuresDF.RowKeys.ToArray();
            shuffledIndexes.Shuffle();

            int trainSetIndexMax = (int)(featuresDF.RowCount * trainProportion);

            int[] trainIndexes = shuffledIndexes.Where(i => i < trainSetIndexMax).ToArray();
            int[] testIndexes  = shuffledIndexes.Where(i => i >= trainSetIndexMax).ToArray();

            var trainSet = featuresDF.Where(x => trainIndexes.Contains(x.Key));
            var testSet  = featuresDF.Where(x => testIndexes.Contains(x.Key));

            Console.WriteLine("\nTrain Set Shape: ({0}, {1})", trainSet.RowCount, trainSet.ColumnCount);
            Console.WriteLine("Test Set Shape: ({0}, {1})", testSet.RowCount, testSet.ColumnCount);

            string targetVar = "LogSalePrice";

            string[] features = featuresDF.ColumnKeys.Where(
                x => !x.Equals("Id") && !x.Equals(targetVar) && !x.Equals("SalePrice")
                ).ToArray();

            double[][] trainX = BuildJaggedArray(
                trainSet.Columns[features].ToArray2D <double>(),
                trainSet.RowCount,
                features.Length
                );
            double[][] testX = BuildJaggedArray(
                testSet.Columns[features].ToArray2D <double>(),
                testSet.RowCount,
                features.Length
                );

            double[] trainY = trainSet[targetVar].ValuesAll.ToArray();
            double[] testY  = testSet[targetVar].ValuesAll.ToArray();

            Console.WriteLine("\n**** Linear Regression Model ****");

            // OLS learning algorithm
            var ols = new OrdinaryLeastSquares()
            {
                UseIntercept = true,
                IsRobust     = true
            };

            // Fit a linear regression model
            MultipleLinearRegression regFit = ols.Learn(
                trainX,
                trainY
                );

            // in-sample predictions
            double[] regInSamplePreds = regFit.Transform(trainX);
            // out-of-sample predictions
            double[] regOutSamplePreds = regFit.Transform(testX);

            ValidateModelResults("Linear Regression", regInSamplePreds, regOutSamplePreds, trainX, trainY, testX, testY);

            //Console.WriteLine("\n* Linear Regression Coefficients:");
            //for (int i = 0; i < features.Length; i++)
            //{
            //    Console.WriteLine("\t{0}: {1:0.0000}", features[i], regFit.Weights[i]);
            //}

            //Console.WriteLine("\tIntercept: {0:0.0000}", regFit.Intercept);


            Console.WriteLine("\n**** Linear Support Vector Machine ****");
            // Linear SVM Learning Algorithm
            var teacher = new LinearRegressionNewtonMethod()
            {
                Epsilon   = 0.5,
                Tolerance = 1e-5,
                UseComplexityHeuristic = true
            };

            // Train SVM
            var svm = teacher.Learn(trainX, trainY);

            // in-sample predictions
            double[] linSVMInSamplePreds = svm.Score(trainX);
            // out-of-sample predictions
            double[] linSVMOutSamplePreds = svm.Score(testX);

            ValidateModelResults("Linear SVM", linSVMInSamplePreds, linSVMOutSamplePreds, trainX, trainY, testX, testY);

            Console.WriteLine("\n**** Support Vector Machine with Polynomial Kernel ****");
            // SVM with Polynomial Kernel
            var polySVMLearner = new FanChenLinSupportVectorRegression <Polynomial>()
            {
                Epsilon                = 0.1,
                Tolerance              = 1e-5,
                UseKernelEstimation    = true,
                UseComplexityHeuristic = true,
                Kernel = new Polynomial(3)
            };

            // Train SVM with Polynomial Kernel
            var polySvm = polySVMLearner.Learn(trainX, trainY);

            // in-sample predictions
            double[] polySVMInSamplePreds = polySvm.Score(trainX);
            // out-of-sample predictions
            double[] polySVMOutSamplePreds = polySvm.Score(testX);

            ValidateModelResults("Polynomial SVM", polySVMInSamplePreds, polySVMOutSamplePreds, trainX, trainY, testX, testY);


            Console.WriteLine("\n**** Support Vector Machine with Gaussian Kernel ****");
            // SVM with Gaussian Kernel
            var gaussianSVMLearner = new FanChenLinSupportVectorRegression <Gaussian>()
            {
                Epsilon             = 0.1,
                Tolerance           = 1e-5,
                Complexity          = 1e-4,
                UseKernelEstimation = true,
                Kernel = new Gaussian()
            };

            // Train SVM with Gaussian Kernel
            var gaussianSvm = gaussianSVMLearner.Learn(trainX, trainY);

            // in-sample predictions
            double[] guassianSVMInSamplePreds = gaussianSvm.Score(trainX);
            // out-of-sample predictions
            double[] guassianSVMOutSamplePreds = gaussianSvm.Score(testX);

            ValidateModelResults("Guassian SVM", guassianSVMInSamplePreds, guassianSVMOutSamplePreds, trainX, trainY, testX, testY);


            Console.WriteLine("\n\n\nDONE!!");
            Console.ReadKey();
        }