private static void ExportFeaturesAndLabel(string code) { string filePath = (Environment.OSVersion.Platform == PlatformID.Unix || Environment.OSVersion.Platform == PlatformID.MacOSX) ? Environment.GetEnvironmentVariable("HOME") : Environment.ExpandEnvironmentVariables("%HOMEDRIVE%%HOMEPATH%"); filePath += "\\indicatorOutput.txt"; DateTime targetDate = IndicatorService.LastDate(code); int numberOfData = 4000; var data = IndicatorService.GetData(code, targetDate, new string[] { "Tarih", "Kapanis" }, numberOfData + 1); double[] sma = MovingAverage.Simple(code, targetDate, 14, numberOfData); double[] wma = MovingAverage.Weighted(code, targetDate, 14, numberOfData); double[] ema = MovingAverage.Exponential(code, targetDate, 14, numberOfData); MovingAverageConvergenceDivergence macd = new MovingAverageConvergenceDivergence(code, targetDate, 12, 26, 9, numberOfData); double[] rsi = RelativeStrengthIndex.Rsi(code, targetDate, 14, numberOfData); double[] williams = WilliamsR.Wsr(code, targetDate, 14, numberOfData); Stochastics stochastics = new Stochastics(code, targetDate, 14, 3, 3, numberOfData); double[] closesOut = IndicatorDataPreprocessor.GetClosesOut(numberOfData, data); double[] smaOut = IndicatorDataPreprocessor.GetSMAOut(sma); double[] wmaOut = IndicatorDataPreprocessor.GetWMAOut(wma); double[] emaOut = IndicatorDataPreprocessor.GetEMAOut(ema); double[] macdOut = IndicatorDataPreprocessor.GetMACDOut(macd); double[] rsiOut = IndicatorDataPreprocessor.GetRSIOut(rsi); double[] williamsROut = IndicatorDataPreprocessor.GetWilliamsROut(williams); double[] stochasticsOut = IndicatorDataPreprocessor.GetStochasticsOut(stochastics); int minRowCount; minRowCount = smaOut.Length; minRowCount = minRowCount < wmaOut.Length ? minRowCount : wmaOut.Length; minRowCount = minRowCount < emaOut.Length ? minRowCount : emaOut.Length; minRowCount = minRowCount < macdOut.Length ? minRowCount : macdOut.Length; minRowCount = minRowCount < rsiOut.Length ? minRowCount : rsiOut.Length; minRowCount = minRowCount < williamsROut.Length ? minRowCount : williamsROut.Length; minRowCount = minRowCount < stochasticsOut.Length ? minRowCount : stochasticsOut.Length; minRowCount = minRowCount < closesOut.Length ? minRowCount : closesOut.Length; FeatureVector vector = new FeatureVector(); vector.AddColumn("SMA", smaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); vector.AddColumn("WMA", wmaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); vector.AddColumn("EMA", emaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); vector.AddColumn("MACD", macdOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); vector.AddColumn("RSI", rsiOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); vector.AddColumn("WilliamsR", williamsROut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); vector.AddColumn("Stochastics", stochasticsOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); vector.AddColumn("label", closesOut.Select(p => (object)string.Format("{0:0.0}", p).ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); new CSVExporter(vector).Export(filePath); Console.WriteLine("Operations completed."); }
public FeatureVector transform(FeatureVector vectorToBePredicted) { object[] predictions = new object[vectorToBePredicted.Values[0].Length]; for (int i = 0; i < vectorToBePredicted.Values[0].Length; i++) { double[] features = new double[weights.Length]; features[0] = 1; for (int j = 1; j < weights.Length; j++) { features[j] = Convert.ToDouble(vectorToBePredicted.Values[j - 1][i], CultureInfo.InvariantCulture); predictions[i] = (object)string.Format("{0:0.0}", Classify(weights, features)).ToString(CultureInfo.InvariantCulture); } } vectorToBePredicted.AddColumn("prediction", predictions); return(vectorToBePredicted); }
public FeatureVector transform(FeatureVector vectorToBePredicted) { object[] predictions = new object[vectorToBePredicted.Values[0].Length]; for (int i = 0; i < vectorToBePredicted.Values[0].Length; i++) { double mostProbableProbability = 0; object mostProbableKey = null; foreach (var label in labelCounts) { double probability = 1; for (int j = 0; j < vectorToBePredicted.Values.Count - 1; j++) { int featureIndex = j; object feature = vectorToBePredicted.Values[j][i]; object key = Tuple.Create(featureIndex, feature, label.Key); if (!featureCounts.ContainsKey(key)) { featureCounts[key] = 1; } probability *= (double)featureCounts[key] / label.Value; } probability *= (double)label.Value / numberOfTrainingData; if (probability > mostProbableProbability) { mostProbableProbability = probability; mostProbableKey = label.Key; } } predictions[i] = mostProbableKey; } vectorToBePredicted.AddColumn("prediction", predictions); return(vectorToBePredicted); }
private void PreprocessIndicators() { var data = IndicatorService.GetData(code, targetDate, new string[] { "Tarih", "Kapanis" }, numberOfData + 1); double[] sma = null; double[] wma = null; double[] ema = null; MovingAverageConvergenceDivergence macd = null; double[] rsi = null; double[] williamsR = null; Stochastics stochastics = null; if (isSMA) { sma = MovingAverage.Simple(code, targetDate, 14, numberOfData); } if (isWMA) { wma = MovingAverage.Weighted(code, targetDate, 14, numberOfData); } if (isEMA) { ema = MovingAverage.Exponential(code, targetDate, 14, numberOfData); } if (isMACD) { macd = new MovingAverageConvergenceDivergence(code, targetDate, 12, 26, 9, numberOfData); } if (isRSI) { rsi = RelativeStrengthIndex.Rsi(code, targetDate, 14, numberOfData); } if (isWilliamsR) { williamsR = WilliamsR.Wsr(code, targetDate, 14, numberOfData); } if (isStochastics) { stochastics = new Stochastics(code, targetDate, 14, 3, 3, numberOfData); } double[] closesOut = IndicatorDataPreprocessor.GetClosesOut(numberOfData, data); double[] smaOut = null; double[] wmaOut = null; double[] emaOut = null; double[] macdOut = null; double[] rsiOut = null; double[] williamsROut = null; double[] stochasticsOut = null; if (isSMA) { smaOut = IndicatorDataPreprocessor.GetSMAOut(sma); } if (isWMA) { wmaOut = IndicatorDataPreprocessor.GetWMAOut(wma); } if (isEMA) { emaOut = IndicatorDataPreprocessor.GetEMAOut(ema); } if (isMACD) { macdOut = IndicatorDataPreprocessor.GetMACDOut(macd); } if (isRSI) { rsiOut = IndicatorDataPreprocessor.GetRSIOut(rsi, false); } if (isWilliamsR) { williamsROut = IndicatorDataPreprocessor.GetWilliamsROut(williamsR, false); } if (isStochastics) { stochasticsOut = IndicatorDataPreprocessor.GetStochasticsOut(stochastics, false); } minRowCount = closesOut.Length; if (isSMA) { minRowCount = minRowCount < smaOut.Length ? minRowCount : smaOut.Length; } if (isWMA) { minRowCount = minRowCount < wmaOut.Length ? minRowCount : wmaOut.Length; } if (isEMA) { minRowCount = minRowCount < emaOut.Length ? minRowCount : emaOut.Length; } if (isMACD) { minRowCount = minRowCount < macdOut.Length ? minRowCount : macdOut.Length; } if (isRSI) { minRowCount = minRowCount < rsiOut.Length ? minRowCount : rsiOut.Length; } if (isWilliamsR) { minRowCount = minRowCount < williamsROut.Length ? minRowCount : williamsROut.Length; } if (isStochastics) { minRowCount = minRowCount < stochasticsOut.Length ? minRowCount : stochasticsOut.Length; } FeatureVector featureVector = new FeatureVector(); if (isSMA) { featureVector.AddColumn("SMA", smaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } if (isWMA) { featureVector.AddColumn("WMA", wmaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } if (isEMA) { featureVector.AddColumn("EMA", emaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } if (isMACD) { featureVector.AddColumn("MACD", macdOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } if (isRSI) { featureVector.AddColumn("RSI", rsiOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } if (isWilliamsR) { featureVector.AddColumn("WilliamsR", williamsROut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } if (isStochastics) { featureVector.AddColumn("Stochastics", stochasticsOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } featureVector.AddColumn("label", closesOut.Select(p => (object)string.Format("{0:0.0}", p).ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); int count = featureVector.Values[0].Length; training = new FeatureVector(); test = new FeatureVector(); for (int i = 0; i < featureVector.ColumnName.Count; i++) { training.AddColumn(featureVector.ColumnName[i], featureVector.Values[i].Take((int)(count * trainingPercentage / 100.0)).ToArray()); test.AddColumn(featureVector.ColumnName[i], featureVector.Values[i].Skip((int)(count * trainingPercentage / 100.0)).Take(count).ToArray()); // Take(count) means take the rest of all elements, number of the rest of the elements is smaller than count. } }
private void buttonForDataSplitNext_Click(object sender, EventArgs e) { trainingSetPercentage = (double)numericUpDownForTrainingSetPercent.Value / 100.0; numFolds = (int)numericUpDownForNumFolds.Value; double[] smaOut = null; double[] wmaOut = null; double[] emaOut = null; double[] macdOut = null; double[] stochasticsOut = null; double[] williamsROut = null; double[] rsiOut = null; double[] closesOut = null; var data = IndicatorService.GetData(code, targetDate, new string[] { "Tarih", "Kapanis" }, numberOfData + 1); if (isSMAChecked) { smaOut = IndicatorDataPreprocessor.GetSMAOut(MovingAverage.Simple(code, targetDate, smaPeriod, numberOfData)); } if (isWMAChecked) { wmaOut = IndicatorDataPreprocessor.GetWMAOut(MovingAverage.Weighted(code, targetDate, wmaPeriod, numberOfData)); } if (isEMAChecked) { emaOut = IndicatorDataPreprocessor.GetEMAOut(MovingAverage.Exponential(code, targetDate, emaPeriod, numberOfData)); } if (isMACDChecked) { macdOut = IndicatorDataPreprocessor.GetMACDOut(new MovingAverageConvergenceDivergence(code, targetDate, firstPeriod, secondPeriod, triggerPeriod, numberOfData)); } if (isStochasticsChecked) { stochasticsOut = IndicatorDataPreprocessor.GetStochasticsOut(new Stochastics(code, targetDate, fastKPeriod, fastDPeriod, slowDPeriod, numberOfData)); } if (isWilliamsRChecked) { williamsROut = IndicatorDataPreprocessor.GetWilliamsROut(WilliamsR.Wsr(code, targetDate, williamsRPeriod, numberOfData)); } if (isRSIChecked) { rsiOut = IndicatorDataPreprocessor.GetRSIOut(RelativeStrengthIndex.Rsi(code, targetDate, rsiPeriod, numberOfData)); } closesOut = IndicatorDataPreprocessor.GetClosesOut(numberOfData, data); int minRowCount = 1000000; if (smaOut != null) { minRowCount = smaOut.Length; } if (wmaOut != null) { minRowCount = minRowCount < wmaOut.Length ? minRowCount : wmaOut.Length; } if (emaOut != null) { minRowCount = minRowCount < emaOut.Length ? minRowCount : emaOut.Length; } if (macdOut != null) { minRowCount = minRowCount < macdOut.Length ? minRowCount : macdOut.Length; } if (rsiOut != null) { minRowCount = minRowCount < rsiOut.Length ? minRowCount : rsiOut.Length; } if (williamsROut != null) { minRowCount = minRowCount < williamsROut.Length ? minRowCount : williamsROut.Length; } if (stochasticsOut != null) { minRowCount = minRowCount < stochasticsOut.Length ? minRowCount : stochasticsOut.Length; } if (closesOut != null) { minRowCount = minRowCount < closesOut.Length ? minRowCount : closesOut.Length; } var fv = new FeatureVector(); if (isSMAChecked) { fv.AddColumn("SMA", smaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } if (isWMAChecked) { fv.AddColumn("WMA", wmaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } if (isEMAChecked) { fv.AddColumn("EMA", emaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } if (isMACDChecked) { fv.AddColumn("MACD", macdOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } if (isRSIChecked) { fv.AddColumn("RSI", rsiOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } if (isWilliamsRChecked) { fv.AddColumn("WilliamsR", williamsROut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } if (isStochasticsChecked) { fv.AddColumn("Stochastics", stochasticsOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } fv.AddColumn("label", closesOut.Select(p => (object)string.Format("{0:0.0}", p).ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); var training = new FeatureVector(); var test = new FeatureVector(); int count = fv.Values[0].Length; for (int i = 0; i < fv.ColumnName.Count; i++) { training.AddColumn(fv.ColumnName[i], fv.Values[i].Take((int)(count * trainingSetPercentage)).ToArray()); } for (int i = 0; i < fv.ColumnName.Count; i++) { test.AddColumn(fv.ColumnName[i], fv.Values[i].Skip((int)(count * trainingSetPercentage)).Take(count).ToArray()); // Take(count) means take the rest of all elements, number of the rest of the elements is smaller than count. } if (numFolds > 0) { BinaryClassificationEvaluator bce1 = new BinaryClassificationEvaluator(); LinearRegression linearRegression = new LinearRegression(); CrossValidator cvLinReg = new CrossValidator(linearRegression, bce1, numFolds); CrossValidatorModel cvLinRegModel = (CrossValidatorModel)cvLinReg.Fit(training); FeatureVector linRegPredictions = cvLinRegModel.transform(test); bce1.evaluate(linRegPredictions); linRegAcc = bce1.Accuracy; BinaryClassificationEvaluator bce2 = new BinaryClassificationEvaluator(); LogisticRegression logisticRegression = new LogisticRegression(); CrossValidator cvLogReg = new CrossValidator(logisticRegression, bce2, numFolds); CrossValidatorModel cvLogRegModel = (CrossValidatorModel)cvLogReg.Fit(training); FeatureVector logRegPredictions = cvLogRegModel.transform(test); bce2.evaluate(logRegPredictions); logRegAcc = bce2.Accuracy; BinaryClassificationEvaluator bce3 = new BinaryClassificationEvaluator(); NaiveBayes naiveBayes = new NaiveBayes(); CrossValidator cvNaiBay = new CrossValidator(naiveBayes, bce3, numFolds); CrossValidatorModel cvNaiBayModel = (CrossValidatorModel)cvNaiBay.Fit(training); FeatureVector naiBayPredictions = cvNaiBayModel.transform(test); bce3.evaluate(naiBayPredictions); naiBayAcc = bce3.Accuracy; } else { BinaryClassificationEvaluator bce1 = new BinaryClassificationEvaluator(); LinearRegression linearRegression = new LinearRegression(); LinearRegressionModel linearRegressionModel = (LinearRegressionModel)linearRegression.Fit(training); FeatureVector linRegPredictions = linearRegressionModel.transform(test); bce1.evaluate(linRegPredictions); linRegAcc = bce1.Accuracy; BinaryClassificationEvaluator bce2 = new BinaryClassificationEvaluator(); LogisticRegression logicticRegression = new LogisticRegression(); LogisticRegressionModel logisticRegressionModel = (LogisticRegressionModel)logicticRegression.Fit(training); FeatureVector logRegPredictions = logisticRegressionModel.transform(test); bce2.evaluate(logRegPredictions); logRegAcc = bce2.Accuracy; BinaryClassificationEvaluator bce3 = new BinaryClassificationEvaluator(); NaiveBayes naiveBayes = new NaiveBayes(); NaiveBayesModel naiveBayesModel = (NaiveBayesModel)naiveBayes.Fit(training); FeatureVector naiBayPredictions = naiveBayesModel.transform(test); bce3.evaluate(naiBayPredictions); naiBayAcc = bce3.Accuracy; } labelForLinRegAcc.Text = linRegAcc.ToString(); labelForLogRegAcc.Text = logRegAcc.ToString(); labelForNaiBayAcc.Text = naiBayAcc.ToString(); panelForResults.BringToFront(); }
private static void RunConsoleApplication() { string filePath = (Environment.OSVersion.Platform == PlatformID.Unix || Environment.OSVersion.Platform == PlatformID.MacOSX) ? Environment.GetEnvironmentVariable("HOME") : Environment.ExpandEnvironmentVariables("%HOMEDRIVE%%HOMEPATH%"); filePath += "\\indicatorOutput.txt"; string code = "AKBNK"; DateTime targetDate = new DateTime(2018, 11, 1).ToLocalTime(); int numberOfData = 1000; var data = IndicatorService.GetData(code, targetDate, new string[] { "Tarih", "Kapanis" }, numberOfData + 1); double[] sma = MovingAverage.Simple(code, targetDate, 14, numberOfData); double[] wma = MovingAverage.Weighted(code, targetDate, 14, numberOfData); double[] ema = MovingAverage.Exponential(code, targetDate, 14, numberOfData); MovingAverageConvergenceDivergence macd = new MovingAverageConvergenceDivergence(code, targetDate, 12, 26, 9, numberOfData); double[] rsi = RelativeStrengthIndex.Rsi(code, targetDate, 14, numberOfData); double[] williams = WilliamsR.Wsr(code, targetDate, 14, numberOfData); Stochastics stochastics = new Stochastics(code, targetDate, 14, 3, 3, numberOfData); double[] closesOut = IndicatorDataPreprocessor.GetClosesOut(numberOfData, data); double[] smaOut = IndicatorDataPreprocessor.GetSMAOut(sma); double[] wmaOut = IndicatorDataPreprocessor.GetWMAOut(wma); double[] emaOut = IndicatorDataPreprocessor.GetEMAOut(ema); double[] macdOut = IndicatorDataPreprocessor.GetMACDOut(macd); double[] rsiOut = IndicatorDataPreprocessor.GetRSIOut(rsi); double[] williamsROut = IndicatorDataPreprocessor.GetWilliamsROut(williams); double[] stochasticsOut = IndicatorDataPreprocessor.GetStochasticsOut(stochastics); int minRowCount; minRowCount = smaOut.Length; minRowCount = minRowCount < wmaOut.Length ? minRowCount : wmaOut.Length; minRowCount = minRowCount < emaOut.Length ? minRowCount : emaOut.Length; minRowCount = minRowCount < macdOut.Length ? minRowCount : macdOut.Length; minRowCount = minRowCount < rsiOut.Length ? minRowCount : rsiOut.Length; minRowCount = minRowCount < williamsROut.Length ? minRowCount : williamsROut.Length; minRowCount = minRowCount < stochasticsOut.Length ? minRowCount : stochasticsOut.Length; minRowCount = minRowCount < closesOut.Length ? minRowCount : closesOut.Length; FeatureVector vector = new FeatureVector(); vector.AddColumn("SMA", smaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); vector.AddColumn("WMA", wmaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); vector.AddColumn("EMA", emaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); vector.AddColumn("MACD", macdOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); vector.AddColumn("RSI", rsiOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); vector.AddColumn("WilliamsR", williamsROut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); vector.AddColumn("Stochastics", stochasticsOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); vector.AddColumn("label", closesOut.Select(p => (object)string.Format("{0:0.0}", p).ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); new LabeledPointExporter(vector).Export(filePath); int count = vector.Values[0].Length; FeatureVector training = new FeatureVector(); for (int i = 0; i < vector.ColumnName.Count; i++) { training.AddColumn(vector.ColumnName[i], vector.Values[i].Take(count / 2).ToArray()); } FeatureVector test = new FeatureVector(); for (int i = 0; i < vector.ColumnName.Count; i++) { test.AddColumn(vector.ColumnName[i], vector.Values[i].Skip(count / 2).Take(count / 2).ToArray()); } //TestNaiveBayes(training, test); //TestNaiveBayesUsingCrossValidation(training, test); //TestLinearRegression(training, test); //TestLinearRegressionUsingCrossValidation(training, test); //TestLogisticRegression(training, test); //TestLogisticRegressionUsingCrossValidation(training, test); }
private static double CalculateAccuracy(List <int> indicators, int mlAlgorithm, bool isCrossValidationEnabled, int minRowCount, double trainingSetPercentage, double[] smaOut, double[] wmaOut, double[] emaOut, double[] macdOut, double[] rsiOut, double[] williamsROut, double[] stochasticsOut, double[] closesOut) { FeatureVector vector = new FeatureVector(); if (indicators.Contains(IndicatorService.SMA)) { vector.AddColumn("SMA", smaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } if (indicators.Contains(IndicatorService.WMA)) { vector.AddColumn("WMA", wmaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } if (indicators.Contains(IndicatorService.EMA)) { vector.AddColumn("EMA", emaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } if (indicators.Contains(IndicatorService.MACD)) { vector.AddColumn("MACD", macdOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } if (indicators.Contains(IndicatorService.RSI)) { vector.AddColumn("RSI", rsiOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } if (indicators.Contains(IndicatorService.WilliamsR)) { vector.AddColumn("WilliamsR", williamsROut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } if (indicators.Contains(IndicatorService.Stochastics)) { vector.AddColumn("Stochastics", stochasticsOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); } vector.AddColumn("label", closesOut.Select(p => (object)string.Format("{0:0.0}", p).ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray()); new CSVExporter(vector).Export("c:\\users\\yasin\\indicatorOutput.csv"); int count = vector.Values[0].Length; FeatureVector training = new FeatureVector(); for (int i = 0; i < vector.ColumnName.Count; i++) { training.AddColumn(vector.ColumnName[i], vector.Values[i].Take((int)(count * trainingSetPercentage)).ToArray()); } FeatureVector test = new FeatureVector(); for (int i = 0; i < vector.ColumnName.Count; i++) { test.AddColumn(vector.ColumnName[i], vector.Values[i].Skip((int)(count * trainingSetPercentage)).Take(count).ToArray()); } double accuracy = 0; if (mlAlgorithm == MLAService.LIN_REG) { var linReg = new LinearRegression(); var bce = new BinaryClassificationEvaluator(); if (isCrossValidationEnabled) { var cv = new CrossValidator(linReg, bce, 10); var cvModel = (CrossValidatorModel)cv.Fit(training); var predictions = cvModel.transform(test); bce.evaluate(predictions); accuracy = bce.Accuracy; } else { var linRegModel = (LinearRegressionModel)linReg.Fit(training); var predictions = linRegModel.transform(test); bce.evaluate(predictions); accuracy = bce.Accuracy; } } else if (mlAlgorithm == MLAService.LOG_REG) { var logReg = new LogisticRegression(); var bce = new BinaryClassificationEvaluator(); if (isCrossValidationEnabled) { var cv = new CrossValidator(logReg, bce, 10); var cvModel = (CrossValidatorModel)cv.Fit(training); var predictions = cvModel.transform(test); bce.evaluate(predictions); accuracy = bce.Accuracy; } else { var logRegModel = (LogisticRegressionModel)logReg.Fit(training); var predictions = logRegModel.transform(test); bce.evaluate(predictions); accuracy = bce.Accuracy; } } else if (mlAlgorithm == MLAService.NAI_BAY) { var naiBay = new NaiveBayes(); var bce = new BinaryClassificationEvaluator(); if (isCrossValidationEnabled) { var cv = new CrossValidator(naiBay, bce, 10); var cvModel = (CrossValidatorModel)cv.Fit(training); var predictions = cvModel.transform(test); bce.evaluate(predictions); accuracy = bce.Accuracy; } else { var naiBayModel = (NaiveBayesModel)naiBay.Fit(training); var predictions = naiBayModel.transform(test); bce.evaluate(predictions); accuracy = bce.Accuracy; } } return(accuracy); }