示例#1
0
        public void TestFitIntercept()
        {
            var x2 = DenseMatrix.OfArray(new[, ]
            {
                { 0.38349978, 0.61650022 },
                { 0.58853682, 0.41146318 }
            });
            var x3 = DenseMatrix.OfArray(new[, ]
            {
                { 0.27677969, 0.70693172, 0.01628859 },
                { 0.08385139, 0.20692515, 0.70922346 }
            });
            var y = DenseVector.OfEnumerable(new double[] { 1, 1 });


            var lr2WithoutIntercept = new LinearRegression(fitIntercept: false);

            lr2WithoutIntercept.Fit(x2, y);
            var lr2WithIntercept = new LinearRegression(fitIntercept: true);

            lr2WithIntercept.Fit(x2, y);

            var lr3WithoutIntercept = new LinearRegression(fitIntercept: false);

            lr3WithoutIntercept.Fit(x3, y);
            var lr3WithIntercept = new LinearRegression(fitIntercept: true);

            lr3WithIntercept.Fit(x3, y);

            Assert.AreEqual(lr2WithIntercept.Coef.Column(0).Count,
                            lr2WithoutIntercept.Coef.Column(0).Count);
            Assert.AreEqual(lr3WithIntercept.Coef.Column(0).Count,
                            lr3WithoutIntercept.Coef.Column(0).Count);
        }
示例#2
0
        public void TestLinearRegression()
        {
            // Control.LinearAlgebraProvider = new MklLinearAlgebraProvider();
            // a simple dataset
            var x = DenseMatrix.OfArray(new double[, ] {
                { 1 }, { 2 }
            });
            var y = DenseVector.OfEnumerable(new double[] { 1, 2 });

            var clf = new LinearRegression();

            clf.Fit(x, y);

            Assert.AreEqual(1.0, clf.Coef.Column(0)[0], 1E-5);
            //Assert.AreEqual(0.0, clf.Intercept[0]);
            Assert.IsTrue(DenseVector.OfEnumerable(new double[] { 1, 2 }).AlmostEquals(clf.Predict(x).Column(0)));

            // test it also for degenerate input
            x = DenseMatrix.OfArray(new double[, ] {
                { 1 }
            });
            y = DenseVector.OfEnumerable(new double[] { 0 });


            clf = new LinearRegression(fitIntercept: false);
            clf.Fit(x, y);
            Assert.AreEqual(0.0, clf.Coef.Column(0)[0]);
            //assert_array_almost_equal(clf.intercept_, [0])
            Assert.AreEqual(0.0, clf.Predict(x).Column(0)[0]);
        }
        private static void TestLinearRegression(FeatureVector training, FeatureVector test)
        {
            LinearRegression      lr          = new LinearRegression();
            LinearRegressionModel lrModel     = (LinearRegressionModel)lr.Fit(training);
            FeatureVector         predictions = lrModel.transform(test);

            PrintPredictionsAndEvaluate(predictions);
        }
示例#4
0
        public void TestLinearRegressionSparse()
        {
            const int n    = 100;
            Matrix    x    = SparseMatrix.Identity(n);
            var       beta = DenseVector.CreateRandom(n, new Normal());
            var       y    = x * beta;

            var ols = new LinearRegression(fitIntercept: true);

            ols.Fit(x, y);
            Assert.IsTrue((ols.Coef.Row(0) + ols.Intercept[0]).AlmostEquals(beta));
        }
示例#5
0
        public void TestLinearRegressionSparseMultipleOutcome()
        {
            var             random    = new Random(0);
            var             r         = SampleGenerator.MakeSparseUncorrelated(random: random);
            Matrix          x         = SparseMatrix.OfMatrix(r.X);
            Vector <double> y         = r.Y.Column(0);
            Matrix          y1        = DenseMatrix.OfColumns(y.Count, 2, new[] { y, y });
            int             nFeatures = x.ColumnCount;

            var ols = new LinearRegression();

            ols.Fit(x, y1);
            Assert.AreEqual(Tuple.Create(2, nFeatures), ols.Coef.Shape());
            Assert.AreEqual(Tuple.Create(2, nFeatures), ols.Coef.Shape());
            Matrix <double> yPred = ols.Predict(x);

            ols.Fit(x, y);
            Matrix <double> yPred1 = ols.Predict(x);

            Assert.IsTrue(yPred1.Column(0).AlmostEquals(yPred.Column(0)));
            Assert.IsTrue(yPred1.Column(0).AlmostEquals(yPred.Column(1)));
        }
示例#6
0
        public void TestLinearRegressionMultipleOutcome()
        {
            var result = SampleGenerator.MakeRegression(shuffle: false, random: new Random(0));

            Matrix y = DenseMatrix.OfColumns(
                result.Y.RowCount,
                2,
                new[] { result.Y.Column(0), result.Y.Column(0) });
            var numFeatures = result.X.RowCount;

            var clf = new LinearRegression(fitIntercept: true);

            clf.Fit(result.X, y);
            Assert.AreEqual(Tuple.Create(2, numFeatures), clf.Coef.Shape());

            Matrix <double> yPred = clf.Predict(result.X);

            clf.Fit(result.X, result.Y);
            Matrix <double> yPred1 = clf.Predict(result.X);

            Assert.IsTrue(yPred1.Column(0).AlmostEquals(yPred.Column(0)));
            Assert.IsTrue(yPred1.Column(0).AlmostEquals(yPred.Column(1)));
        }
示例#7
0
        public void TestRidgeVsLstsq()
        {
            var random = new Random(0);
            // we need more samples than features
            const int nSamples  = 5;
            const int nFeatures = 4;
            var       y         = DenseVector.CreateRandom(nSamples, new Normal {
                RandomSource = random
            });
            var x = DenseMatrix.CreateRandom(nSamples, nFeatures, new Normal {
                RandomSource = random
            });

            var ridge = new RidgeRegression(alpha: 0.0, fitIntercept: false);
            var ols   = new LinearRegression(fitIntercept: false);

            ridge.Fit(x, y);
            ols.Fit(x, y);
            Assert.IsTrue(ridge.Coef.AlmostEquals(ols.Coef));

            ridge.Fit(x, y);
            ols.Fit(x, y);
            Assert.IsTrue(ridge.Coef.AlmostEquals(ols.Coef));
        }
示例#8
0
        public override double HandleNextTick(Tick t)
        {
            double value = double.NaN;

            MA.HandleNextTick(t);
            indicator.HandleNextTick(t);

            if (!MA[0].Equals(double.NaN))
            {
                double[] MAArray  = MA.ToArray();
                double[] indArray = indicator.ToArray();
                SortedList <double, double> tickHighs = new SortedList <double, double>();
                SortedList <double, double> tickLows  = new SortedList <double, double>();
                SortedList <double, double> indHighs  = new SortedList <double, double>();
                SortedList <double, double> indLows   = new SortedList <double, double>();
                for (int i = 2; i < MAArray.Length; i++)
                {
                    //high
                    if (MAArray[i] < MAArray[i - 1] && MAArray[i - 1] > MAArray[i - 2])
                    {
                        tickHighs.Add(i - 1, MAArray[i - 1]);
                    }
                    if (indArray[i] < indArray[i - 1] && indArray[i - 1] > indArray[i - 2])
                    {
                        indHighs.Add(i - 1, indArray[i - 1]);
                    }

                    //low
                    if (MAArray[i] > MAArray[i - 1] && MAArray[i - 1] < MAArray[i - 2])
                    {
                        tickLows.Add(i - 1, MAArray[i - 1]);
                    }
                    if (indArray[i] > indArray[i - 1] && indArray[i - 1] < indArray[i - 2])
                    {
                        indLows.Add(i - 1, indArray[i - 1]);
                    }
                }

                if (tickHighs.Count > 0 && indHighs.Count > 0 &&
                    tickLows.Count > 0 && indLows.Count > 0)
                {
                    LinearRegression lrTickHighs = new LinearRegression();
                    LinearRegression lrTickLows  = new LinearRegression();
                    LinearRegression lrindkHighs = new LinearRegression();
                    LinearRegression lrindLows   = new LinearRegression();

                    lrTickHighs.Model(tickHighs.Keys.ToArray(), tickHighs.Values.ToArray());
                    lrTickLows.Model(tickLows.Keys.ToArray(), tickLows.Values.ToArray());
                    lrindkHighs.Model(indHighs.Keys.ToArray(), indHighs.Values.ToArray());
                    lrindLows.Model(indLows.Keys.ToArray(), indLows.Values.ToArray());

                    if (lrTickHighs.X2 > 0 && lrindkHighs.X2 < 0)
                    {
                        value = 1;
                    }
                    else if (lrTickLows.X2 < 0 && lrindLows.X2 > 0)
                    {
                        value = -1;
                    }
                    else
                    {
                        value = 0;
                    }

                    value = lrindkHighs.Fit(indArray[0]);
                }
            }

            indicatorData.Enqueue(value);
            return(value);
        }
示例#9
0
        private void buttonForDataSplitNext_Click(object sender, EventArgs e)
        {
            trainingSetPercentage = (double)numericUpDownForTrainingSetPercent.Value / 100.0;
            numFolds = (int)numericUpDownForNumFolds.Value;

            double[] smaOut         = null;
            double[] wmaOut         = null;
            double[] emaOut         = null;
            double[] macdOut        = null;
            double[] stochasticsOut = null;
            double[] williamsROut   = null;
            double[] rsiOut         = null;
            double[] closesOut      = null;

            var data = IndicatorService.GetData(code, targetDate, new string[] { "Tarih", "Kapanis" }, numberOfData + 1);

            if (isSMAChecked)
            {
                smaOut = IndicatorDataPreprocessor.GetSMAOut(MovingAverage.Simple(code, targetDate, smaPeriod, numberOfData));
            }
            if (isWMAChecked)
            {
                wmaOut = IndicatorDataPreprocessor.GetWMAOut(MovingAverage.Weighted(code, targetDate, wmaPeriod, numberOfData));
            }
            if (isEMAChecked)
            {
                emaOut = IndicatorDataPreprocessor.GetEMAOut(MovingAverage.Exponential(code, targetDate, emaPeriod, numberOfData));
            }
            if (isMACDChecked)
            {
                macdOut = IndicatorDataPreprocessor.GetMACDOut(new MovingAverageConvergenceDivergence(code, targetDate, firstPeriod, secondPeriod, triggerPeriod, numberOfData));
            }
            if (isStochasticsChecked)
            {
                stochasticsOut = IndicatorDataPreprocessor.GetStochasticsOut(new Stochastics(code, targetDate, fastKPeriod, fastDPeriod, slowDPeriod, numberOfData));
            }
            if (isWilliamsRChecked)
            {
                williamsROut = IndicatorDataPreprocessor.GetWilliamsROut(WilliamsR.Wsr(code, targetDate, williamsRPeriod, numberOfData));
            }
            if (isRSIChecked)
            {
                rsiOut = IndicatorDataPreprocessor.GetRSIOut(RelativeStrengthIndex.Rsi(code, targetDate, rsiPeriod, numberOfData));
            }
            closesOut = IndicatorDataPreprocessor.GetClosesOut(numberOfData, data);

            int minRowCount = 1000000;

            if (smaOut != null)
            {
                minRowCount = smaOut.Length;
            }
            if (wmaOut != null)
            {
                minRowCount = minRowCount < wmaOut.Length ? minRowCount : wmaOut.Length;
            }
            if (emaOut != null)
            {
                minRowCount = minRowCount < emaOut.Length ? minRowCount : emaOut.Length;
            }
            if (macdOut != null)
            {
                minRowCount = minRowCount < macdOut.Length ? minRowCount : macdOut.Length;
            }
            if (rsiOut != null)
            {
                minRowCount = minRowCount < rsiOut.Length ? minRowCount : rsiOut.Length;
            }
            if (williamsROut != null)
            {
                minRowCount = minRowCount < williamsROut.Length ? minRowCount : williamsROut.Length;
            }
            if (stochasticsOut != null)
            {
                minRowCount = minRowCount < stochasticsOut.Length ? minRowCount : stochasticsOut.Length;
            }
            if (closesOut != null)
            {
                minRowCount = minRowCount < closesOut.Length ? minRowCount : closesOut.Length;
            }

            var fv = new FeatureVector();

            if (isSMAChecked)
            {
                fv.AddColumn("SMA", smaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (isWMAChecked)
            {
                fv.AddColumn("WMA", wmaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (isEMAChecked)
            {
                fv.AddColumn("EMA", emaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (isMACDChecked)
            {
                fv.AddColumn("MACD", macdOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (isRSIChecked)
            {
                fv.AddColumn("RSI", rsiOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (isWilliamsRChecked)
            {
                fv.AddColumn("WilliamsR", williamsROut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (isStochasticsChecked)
            {
                fv.AddColumn("Stochastics", stochasticsOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            fv.AddColumn("label", closesOut.Select(p => (object)string.Format("{0:0.0}", p).ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());

            var training = new FeatureVector();
            var test     = new FeatureVector();
            int count    = fv.Values[0].Length;

            for (int i = 0; i < fv.ColumnName.Count; i++)
            {
                training.AddColumn(fv.ColumnName[i], fv.Values[i].Take((int)(count * trainingSetPercentage)).ToArray());
            }

            for (int i = 0; i < fv.ColumnName.Count; i++)
            {
                test.AddColumn(fv.ColumnName[i], fv.Values[i].Skip((int)(count * trainingSetPercentage)).Take(count).ToArray()); // Take(count) means take the rest of all elements, number of the rest of the elements is smaller than count.
            }

            if (numFolds > 0)
            {
                BinaryClassificationEvaluator bce1    = new BinaryClassificationEvaluator();
                LinearRegression    linearRegression  = new LinearRegression();
                CrossValidator      cvLinReg          = new CrossValidator(linearRegression, bce1, numFolds);
                CrossValidatorModel cvLinRegModel     = (CrossValidatorModel)cvLinReg.Fit(training);
                FeatureVector       linRegPredictions = cvLinRegModel.transform(test);
                bce1.evaluate(linRegPredictions);
                linRegAcc = bce1.Accuracy;

                BinaryClassificationEvaluator bce2 = new BinaryClassificationEvaluator();
                LogisticRegression            logisticRegression = new LogisticRegression();
                CrossValidator      cvLogReg          = new CrossValidator(logisticRegression, bce2, numFolds);
                CrossValidatorModel cvLogRegModel     = (CrossValidatorModel)cvLogReg.Fit(training);
                FeatureVector       logRegPredictions = cvLogRegModel.transform(test);
                bce2.evaluate(logRegPredictions);
                logRegAcc = bce2.Accuracy;

                BinaryClassificationEvaluator bce3    = new BinaryClassificationEvaluator();
                NaiveBayes          naiveBayes        = new NaiveBayes();
                CrossValidator      cvNaiBay          = new CrossValidator(naiveBayes, bce3, numFolds);
                CrossValidatorModel cvNaiBayModel     = (CrossValidatorModel)cvNaiBay.Fit(training);
                FeatureVector       naiBayPredictions = cvNaiBayModel.transform(test);
                bce3.evaluate(naiBayPredictions);
                naiBayAcc = bce3.Accuracy;
            }
            else
            {
                BinaryClassificationEvaluator bce1          = new BinaryClassificationEvaluator();
                LinearRegression      linearRegression      = new LinearRegression();
                LinearRegressionModel linearRegressionModel = (LinearRegressionModel)linearRegression.Fit(training);
                FeatureVector         linRegPredictions     = linearRegressionModel.transform(test);
                bce1.evaluate(linRegPredictions);
                linRegAcc = bce1.Accuracy;

                BinaryClassificationEvaluator bce2 = new BinaryClassificationEvaluator();
                LogisticRegression            logicticRegression      = new LogisticRegression();
                LogisticRegressionModel       logisticRegressionModel = (LogisticRegressionModel)logicticRegression.Fit(training);
                FeatureVector logRegPredictions = logisticRegressionModel.transform(test);
                bce2.evaluate(logRegPredictions);
                logRegAcc = bce2.Accuracy;

                BinaryClassificationEvaluator bce3 = new BinaryClassificationEvaluator();
                NaiveBayes      naiveBayes         = new NaiveBayes();
                NaiveBayesModel naiveBayesModel    = (NaiveBayesModel)naiveBayes.Fit(training);
                FeatureVector   naiBayPredictions  = naiveBayesModel.transform(test);
                bce3.evaluate(naiBayPredictions);
                naiBayAcc = bce3.Accuracy;
            }

            labelForLinRegAcc.Text = linRegAcc.ToString();
            labelForLogRegAcc.Text = logRegAcc.ToString();
            labelForNaiBayAcc.Text = naiBayAcc.ToString();

            panelForResults.BringToFront();
        }
        private static double CalculateAccuracy(List <int> indicators, int mlAlgorithm, bool isCrossValidationEnabled, int minRowCount, double trainingSetPercentage, double[] smaOut, double[] wmaOut, double[] emaOut, double[] macdOut, double[] rsiOut, double[] williamsROut, double[] stochasticsOut, double[] closesOut)
        {
            FeatureVector vector = new FeatureVector();

            if (indicators.Contains(IndicatorService.SMA))
            {
                vector.AddColumn("SMA", smaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (indicators.Contains(IndicatorService.WMA))
            {
                vector.AddColumn("WMA", wmaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (indicators.Contains(IndicatorService.EMA))
            {
                vector.AddColumn("EMA", emaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (indicators.Contains(IndicatorService.MACD))
            {
                vector.AddColumn("MACD", macdOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (indicators.Contains(IndicatorService.RSI))
            {
                vector.AddColumn("RSI", rsiOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (indicators.Contains(IndicatorService.WilliamsR))
            {
                vector.AddColumn("WilliamsR", williamsROut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (indicators.Contains(IndicatorService.Stochastics))
            {
                vector.AddColumn("Stochastics", stochasticsOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            vector.AddColumn("label", closesOut.Select(p => (object)string.Format("{0:0.0}", p).ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());

            new CSVExporter(vector).Export("c:\\users\\yasin\\indicatorOutput.csv");
            int           count    = vector.Values[0].Length;
            FeatureVector training = new FeatureVector();

            for (int i = 0; i < vector.ColumnName.Count; i++)
            {
                training.AddColumn(vector.ColumnName[i], vector.Values[i].Take((int)(count * trainingSetPercentage)).ToArray());
            }

            FeatureVector test = new FeatureVector();

            for (int i = 0; i < vector.ColumnName.Count; i++)
            {
                test.AddColumn(vector.ColumnName[i], vector.Values[i].Skip((int)(count * trainingSetPercentage)).Take(count).ToArray());
            }

            double accuracy = 0;

            if (mlAlgorithm == MLAService.LIN_REG)
            {
                var linReg = new LinearRegression();
                var bce    = new BinaryClassificationEvaluator();
                if (isCrossValidationEnabled)
                {
                    var cv          = new CrossValidator(linReg, bce, 10);
                    var cvModel     = (CrossValidatorModel)cv.Fit(training);
                    var predictions = cvModel.transform(test);
                    bce.evaluate(predictions);
                    accuracy = bce.Accuracy;
                }
                else
                {
                    var linRegModel = (LinearRegressionModel)linReg.Fit(training);
                    var predictions = linRegModel.transform(test);
                    bce.evaluate(predictions);
                    accuracy = bce.Accuracy;
                }
            }
            else if (mlAlgorithm == MLAService.LOG_REG)
            {
                var logReg = new LogisticRegression();
                var bce    = new BinaryClassificationEvaluator();
                if (isCrossValidationEnabled)
                {
                    var cv          = new CrossValidator(logReg, bce, 10);
                    var cvModel     = (CrossValidatorModel)cv.Fit(training);
                    var predictions = cvModel.transform(test);
                    bce.evaluate(predictions);
                    accuracy = bce.Accuracy;
                }
                else
                {
                    var logRegModel = (LogisticRegressionModel)logReg.Fit(training);
                    var predictions = logRegModel.transform(test);
                    bce.evaluate(predictions);
                    accuracy = bce.Accuracy;
                }
            }
            else if (mlAlgorithm == MLAService.NAI_BAY)
            {
                var naiBay = new NaiveBayes();
                var bce    = new BinaryClassificationEvaluator();
                if (isCrossValidationEnabled)
                {
                    var cv          = new CrossValidator(naiBay, bce, 10);
                    var cvModel     = (CrossValidatorModel)cv.Fit(training);
                    var predictions = cvModel.transform(test);
                    bce.evaluate(predictions);
                    accuracy = bce.Accuracy;
                }
                else
                {
                    var naiBayModel = (NaiveBayesModel)naiBay.Fit(training);
                    var predictions = naiBayModel.transform(test);
                    bce.evaluate(predictions);
                    accuracy = bce.Accuracy;
                }
            }
            return(accuracy);
        }
示例#11
0
        public void TestRidgeVsLstsq()
        {
            var random = new Random(0);
            // we need more samples than features
            const int nSamples = 5;
            const int nFeatures = 4;
            var y = DenseVector.CreateRandom(nSamples, new Normal { RandomSource = random });
            var x = DenseMatrix.CreateRandom(nSamples, nFeatures, new Normal { RandomSource = random });

            var ridge = new RidgeRegression(alpha: 0.0, fitIntercept: false);
            var ols = new LinearRegression(fitIntercept: false);

            ridge.Fit(x, y);
            ols.Fit(x, y);
            Assert.IsTrue(ridge.Coef.AlmostEquals(ols.Coef));

            ridge.Fit(x, y);
            ols.Fit(x, y);
            Assert.IsTrue(ridge.Coef.AlmostEquals(ols.Coef));
        }