private static void TestLogisticRegression(FeatureVector training, FeatureVector test)
        {
            LogisticRegression      lr          = new LogisticRegression(0.1, 3000);
            LogisticRegressionModel lrModel     = (LogisticRegressionModel)lr.Fit(training);
            FeatureVector           predictions = lrModel.transform(test);

            PrintPredictionsAndEvaluate(predictions);
        }
Esempio n. 2
0
        // ********************************************************************

        #region Constructor

        public ViewModel()
        {
            Measurements       = new ObservableCollection <Measurement>();
            LineData           = new LineModel();
            LogisticRegression = new LogisticRegressionModel();

            CreateCommands();

            Alpha = 0.05;
        }
Esempio n. 3
0
        public void Logistic_Regression_Test_Prediction()
        {
            Matrix m = new[, ]
            {
                { 8, 1, 6 },
                { 3, 5, 7 },
                { 4, 9, 2 }
            };

            var model = new LogisticRegressionModel()
            {
                LogisticFunction = new Math.Functions.Logistic(),
                Theta            = new Vector(new double[] { 1, 2, 1, -9 })
            };

            var p = model.Predict(new Vector(new double[] { 8, 1, 6 }));

            Assert.AreEqual(1d, p);
        }
Esempio n. 4
0
 public LogisticRegressionPresenter(DataSetManagerPresenter dataSetPresenter)
 {
     this.dataSetPresenter = dataSetPresenter;
     this.model            = new LogisticRegressionModel();
 }
Esempio n. 5
0
        private void buttonForDataSplitNext_Click(object sender, EventArgs e)
        {
            trainingSetPercentage = (double)numericUpDownForTrainingSetPercent.Value / 100.0;
            numFolds = (int)numericUpDownForNumFolds.Value;

            double[] smaOut         = null;
            double[] wmaOut         = null;
            double[] emaOut         = null;
            double[] macdOut        = null;
            double[] stochasticsOut = null;
            double[] williamsROut   = null;
            double[] rsiOut         = null;
            double[] closesOut      = null;

            var data = IndicatorService.GetData(code, targetDate, new string[] { "Tarih", "Kapanis" }, numberOfData + 1);

            if (isSMAChecked)
            {
                smaOut = IndicatorDataPreprocessor.GetSMAOut(MovingAverage.Simple(code, targetDate, smaPeriod, numberOfData));
            }
            if (isWMAChecked)
            {
                wmaOut = IndicatorDataPreprocessor.GetWMAOut(MovingAverage.Weighted(code, targetDate, wmaPeriod, numberOfData));
            }
            if (isEMAChecked)
            {
                emaOut = IndicatorDataPreprocessor.GetEMAOut(MovingAverage.Exponential(code, targetDate, emaPeriod, numberOfData));
            }
            if (isMACDChecked)
            {
                macdOut = IndicatorDataPreprocessor.GetMACDOut(new MovingAverageConvergenceDivergence(code, targetDate, firstPeriod, secondPeriod, triggerPeriod, numberOfData));
            }
            if (isStochasticsChecked)
            {
                stochasticsOut = IndicatorDataPreprocessor.GetStochasticsOut(new Stochastics(code, targetDate, fastKPeriod, fastDPeriod, slowDPeriod, numberOfData));
            }
            if (isWilliamsRChecked)
            {
                williamsROut = IndicatorDataPreprocessor.GetWilliamsROut(WilliamsR.Wsr(code, targetDate, williamsRPeriod, numberOfData));
            }
            if (isRSIChecked)
            {
                rsiOut = IndicatorDataPreprocessor.GetRSIOut(RelativeStrengthIndex.Rsi(code, targetDate, rsiPeriod, numberOfData));
            }
            closesOut = IndicatorDataPreprocessor.GetClosesOut(numberOfData, data);

            int minRowCount = 1000000;

            if (smaOut != null)
            {
                minRowCount = smaOut.Length;
            }
            if (wmaOut != null)
            {
                minRowCount = minRowCount < wmaOut.Length ? minRowCount : wmaOut.Length;
            }
            if (emaOut != null)
            {
                minRowCount = minRowCount < emaOut.Length ? minRowCount : emaOut.Length;
            }
            if (macdOut != null)
            {
                minRowCount = minRowCount < macdOut.Length ? minRowCount : macdOut.Length;
            }
            if (rsiOut != null)
            {
                minRowCount = minRowCount < rsiOut.Length ? minRowCount : rsiOut.Length;
            }
            if (williamsROut != null)
            {
                minRowCount = minRowCount < williamsROut.Length ? minRowCount : williamsROut.Length;
            }
            if (stochasticsOut != null)
            {
                minRowCount = minRowCount < stochasticsOut.Length ? minRowCount : stochasticsOut.Length;
            }
            if (closesOut != null)
            {
                minRowCount = minRowCount < closesOut.Length ? minRowCount : closesOut.Length;
            }

            var fv = new FeatureVector();

            if (isSMAChecked)
            {
                fv.AddColumn("SMA", smaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (isWMAChecked)
            {
                fv.AddColumn("WMA", wmaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (isEMAChecked)
            {
                fv.AddColumn("EMA", emaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (isMACDChecked)
            {
                fv.AddColumn("MACD", macdOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (isRSIChecked)
            {
                fv.AddColumn("RSI", rsiOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (isWilliamsRChecked)
            {
                fv.AddColumn("WilliamsR", williamsROut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (isStochasticsChecked)
            {
                fv.AddColumn("Stochastics", stochasticsOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            fv.AddColumn("label", closesOut.Select(p => (object)string.Format("{0:0.0}", p).ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());

            var training = new FeatureVector();
            var test     = new FeatureVector();
            int count    = fv.Values[0].Length;

            for (int i = 0; i < fv.ColumnName.Count; i++)
            {
                training.AddColumn(fv.ColumnName[i], fv.Values[i].Take((int)(count * trainingSetPercentage)).ToArray());
            }

            for (int i = 0; i < fv.ColumnName.Count; i++)
            {
                test.AddColumn(fv.ColumnName[i], fv.Values[i].Skip((int)(count * trainingSetPercentage)).Take(count).ToArray()); // Take(count) means take the rest of all elements, number of the rest of the elements is smaller than count.
            }

            if (numFolds > 0)
            {
                BinaryClassificationEvaluator bce1    = new BinaryClassificationEvaluator();
                LinearRegression    linearRegression  = new LinearRegression();
                CrossValidator      cvLinReg          = new CrossValidator(linearRegression, bce1, numFolds);
                CrossValidatorModel cvLinRegModel     = (CrossValidatorModel)cvLinReg.Fit(training);
                FeatureVector       linRegPredictions = cvLinRegModel.transform(test);
                bce1.evaluate(linRegPredictions);
                linRegAcc = bce1.Accuracy;

                BinaryClassificationEvaluator bce2 = new BinaryClassificationEvaluator();
                LogisticRegression            logisticRegression = new LogisticRegression();
                CrossValidator      cvLogReg          = new CrossValidator(logisticRegression, bce2, numFolds);
                CrossValidatorModel cvLogRegModel     = (CrossValidatorModel)cvLogReg.Fit(training);
                FeatureVector       logRegPredictions = cvLogRegModel.transform(test);
                bce2.evaluate(logRegPredictions);
                logRegAcc = bce2.Accuracy;

                BinaryClassificationEvaluator bce3    = new BinaryClassificationEvaluator();
                NaiveBayes          naiveBayes        = new NaiveBayes();
                CrossValidator      cvNaiBay          = new CrossValidator(naiveBayes, bce3, numFolds);
                CrossValidatorModel cvNaiBayModel     = (CrossValidatorModel)cvNaiBay.Fit(training);
                FeatureVector       naiBayPredictions = cvNaiBayModel.transform(test);
                bce3.evaluate(naiBayPredictions);
                naiBayAcc = bce3.Accuracy;
            }
            else
            {
                BinaryClassificationEvaluator bce1          = new BinaryClassificationEvaluator();
                LinearRegression      linearRegression      = new LinearRegression();
                LinearRegressionModel linearRegressionModel = (LinearRegressionModel)linearRegression.Fit(training);
                FeatureVector         linRegPredictions     = linearRegressionModel.transform(test);
                bce1.evaluate(linRegPredictions);
                linRegAcc = bce1.Accuracy;

                BinaryClassificationEvaluator bce2 = new BinaryClassificationEvaluator();
                LogisticRegression            logicticRegression      = new LogisticRegression();
                LogisticRegressionModel       logisticRegressionModel = (LogisticRegressionModel)logicticRegression.Fit(training);
                FeatureVector logRegPredictions = logisticRegressionModel.transform(test);
                bce2.evaluate(logRegPredictions);
                logRegAcc = bce2.Accuracy;

                BinaryClassificationEvaluator bce3 = new BinaryClassificationEvaluator();
                NaiveBayes      naiveBayes         = new NaiveBayes();
                NaiveBayesModel naiveBayesModel    = (NaiveBayesModel)naiveBayes.Fit(training);
                FeatureVector   naiBayPredictions  = naiveBayesModel.transform(test);
                bce3.evaluate(naiBayPredictions);
                naiBayAcc = bce3.Accuracy;
            }

            labelForLinRegAcc.Text = linRegAcc.ToString();
            labelForLogRegAcc.Text = logRegAcc.ToString();
            labelForNaiBayAcc.Text = naiBayAcc.ToString();

            panelForResults.BringToFront();
        }