public Data(StreamReader r1, StreamReader r2, double learning_rate, WeightBias bestWB) { Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); AccuracyWeightB = new Dictionary <int, AccuracyWB>(); SetData(r1, r2); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, false, 0, null, false); Accuracy = perceptron.GetAccuracy(Test_Data, bestWB); }
public Data(StreamReader r1, StreamReader r2, Random r, int epochs, double learning_rate, double margin, double c, bool logistic_regression, double tradeoff) { C = c; Tradeoff = tradeoff; Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); AccuracyWeightB = new Dictionary <int, AccuracyWB>(); SetData(r1, r2); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, margin, C, logistic_regression, Tradeoff, r); Dictionary <int, double> w = new Dictionary <int, double>(); double b = (r.NextDouble() * (0.01 + 0.01) - 0.01); //for (int i = 1; i < 67693; i++) //{ // double randomNumber = (r.NextDouble() * (0.01 + 0.01) - 0.01); // if (randomNumber != 0) // { // w.Add(i, randomNumber); // } //} WeightBias wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); AccuracyWeightB.Add(i + 1, new AccuracyWB(perceptron.GetAccuracy(Test_Data, wb), wb)); perceptron.ShuffleTraining_Data(r); } AccuracyWB bestAccuracy = AccuracyWeightB.OrderByDescending(x => x.Value.Accuracy).ThenByDescending(y => y.Key).Select(z => z.Value).First(); Training_Accuracy = perceptron.GetAccuracy(Training_Data, bestAccuracy.Weight_Bias); //Train Accuracy Accuracy = bestAccuracy.Accuracy; //Test Accuracy BestWeightBias = bestAccuracy.Weight_Bias; Learning_Rate = learning_rate; }
public Data(StreamReader r1, StreamReader r2, StreamReader r3, StreamReader r4, StreamReader r5, int epochs, double learning_rate, Random r, bool DymanicLearningRate, double margin, bool Average, bool Aggressive) { double[] w_average = new double[68]; double b_average; WeightBias wb_average = null; if (Average) { for (int i = 0; i < 68; i++) { double randomNumber = (r.NextDouble() * (0.01 + 0.01) - 0.01); w_average[i] = randomNumber; } b_average = (r.NextDouble() * (0.01 + 0.01) - 0.01); wb_average = new WeightBias(w_average, b_average, 0); } double temp_accuracy1; double temp_accuracy2; double temp_accuracy3; double temp_accuracy4; double temp_accuracy5; Learning_Rate = learning_rate; Margin = margin; #region First Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); SetData(r1, r5); SetData(r2); SetData(r3); SetData(r4); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, DymanicLearningRate, margin, wb_average, Aggressive); double[] w = new double[68]; double b = (r.NextDouble() * (0.01 + 0.01) - 0.01); for (int i = 0; i < 68; i++) { double randomNumber = (r.NextDouble() * (0.01 + 0.01) - 0.01); w[i] = randomNumber; } WeightBias wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy1 = perceptron.GetAccuracy(Test_Data, wb); if (Average) { temp_accuracy1 = perceptron.GetAccuracy(Test_Data, perceptron.WeightBias_Average); } #endregion #region Second Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); SetData(r1, r4); SetData(r2); SetData(r3); SetData(r5); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, DymanicLearningRate, margin, wb_average, Aggressive); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy2 = perceptron.GetAccuracy(Test_Data, wb); if (Average) { temp_accuracy2 = perceptron.GetAccuracy(Test_Data, perceptron.WeightBias_Average); } #endregion #region Third Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); SetData(r1, r3); SetData(r2); SetData(r4); SetData(r5); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, DymanicLearningRate, margin, wb_average, Aggressive); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy3 = perceptron.GetAccuracy(Test_Data, wb); if (Average) { temp_accuracy3 = perceptron.GetAccuracy(Test_Data, perceptron.WeightBias_Average); } #endregion #region Fourth Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); SetData(r1, r2); SetData(r3); SetData(r4); SetData(r5); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, DymanicLearningRate, margin, wb_average, Aggressive); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy4 = perceptron.GetAccuracy(Test_Data, wb); if (Average) { temp_accuracy4 = perceptron.GetAccuracy(Test_Data, perceptron.WeightBias_Average); } #endregion #region Fifth Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); SetData(r2, r1); SetData(r3); SetData(r4); SetData(r5); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, DymanicLearningRate, margin, wb_average, Aggressive); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy5 = perceptron.GetAccuracy(Test_Data, wb); if (Average) { temp_accuracy5 = perceptron.GetAccuracy(Test_Data, perceptron.WeightBias_Average); } #endregion Accuracy = (temp_accuracy1 + temp_accuracy2 + temp_accuracy3 + temp_accuracy4 + temp_accuracy5) / 5; }
public Data(int epochs, double learning_rate, Random r, bool DymanicLearningRate, double margin, bool Average, bool Aggressive, double c, bool svm, double tradeoff, bool logistic_regression, StreamReader train, StreamReader test) { C = c; SVM = svm; Tradeoff = tradeoff; Logistic_Regression = logistic_regression; double[] w_average = new double[16]; double b_average; WeightBias wb_average = null; if (Average) { for (int i = 0; i < 16; i++) { double randomNumber = (r.NextDouble() * (0.01 + 0.01) - 0.01); w_average[i] = randomNumber; } b_average = (r.NextDouble() * (0.01 + 0.01) - 0.01); wb_average = new WeightBias(w_average, b_average, 0); } double temp_accuracy1; double temp_accuracy2; double temp_accuracy3; double temp_accuracy4; double temp_accuracy5; Learning_Rate = learning_rate; Margin = margin; Cross_Validate_Data = new List <Entry>(); Cross_1 = new List <Entry>(); Cross_2 = new List <Entry>(); Cross_3 = new List <Entry>(); Cross_4 = new List <Entry>(); Cross_5 = new List <Entry>(); SetValidateData(train, test, r); #region First Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); Training_Data = Cross_1.Concat(Cross_2.Concat(Cross_3.Concat(Cross_4))).ToList(); Test_Data = Cross_5; //SetData(r1, r5); //SetData(r2); //SetData(r3); //SetData(r4); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, DymanicLearningRate, margin, wb_average, Aggressive, C, SVM, Tradeoff, Logistic_Regression); double[] w = new double[16]; double b = (r.NextDouble() * (0.01 + 0.01) - 0.01); for (int i = 0; i < 16; i++) { double randomNumber = (r.NextDouble() * (0.01 + 0.01) - 0.01); w[i] = randomNumber; } WeightBias wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy1 = perceptron.GetAccuracy(Test_Data, wb); if (Average) { temp_accuracy1 = perceptron.GetAccuracy(Test_Data, perceptron.WeightBias_Average); } #endregion #region Second Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); Training_Data = Cross_1.Concat(Cross_2.Concat(Cross_3.Concat(Cross_5))).ToList(); Test_Data = Cross_4; //SetData(r1, r4); //SetData(r2); //SetData(r3); //SetData(r5); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, DymanicLearningRate, margin, wb_average, Aggressive, C, SVM, Tradeoff, Logistic_Regression); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy2 = perceptron.GetAccuracy(Test_Data, wb); if (Average) { temp_accuracy2 = perceptron.GetAccuracy(Test_Data, perceptron.WeightBias_Average); } #endregion #region Third Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); Training_Data = Cross_1.Concat(Cross_2.Concat(Cross_4.Concat(Cross_5))).ToList(); Test_Data = Cross_3; //SetData(r1, r3); //SetData(r2); //SetData(r4); //SetData(r5); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, DymanicLearningRate, margin, wb_average, Aggressive, C, SVM, Tradeoff, Logistic_Regression); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy3 = perceptron.GetAccuracy(Test_Data, wb); if (Average) { temp_accuracy3 = perceptron.GetAccuracy(Test_Data, perceptron.WeightBias_Average); } #endregion #region Fourth Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); Training_Data = Cross_1.Concat(Cross_3.Concat(Cross_4.Concat(Cross_5))).ToList(); Test_Data = Cross_2; //SetData(r1, r2); //SetData(r3); //SetData(r4); //SetData(r5); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, DymanicLearningRate, margin, wb_average, Aggressive, C, SVM, Tradeoff, Logistic_Regression); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy4 = perceptron.GetAccuracy(Test_Data, wb); if (Average) { temp_accuracy4 = perceptron.GetAccuracy(Test_Data, perceptron.WeightBias_Average); } #endregion #region Fifth Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); Training_Data = Cross_2.Concat(Cross_3.Concat(Cross_4.Concat(Cross_5))).ToList(); Test_Data = Cross_1; //SetData(r2, r1); //SetData(r3); //SetData(r4); //SetData(r5); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, DymanicLearningRate, margin, wb_average, Aggressive, C, SVM, Tradeoff, Logistic_Regression); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy5 = perceptron.GetAccuracy(Test_Data, wb); if (Average) { temp_accuracy5 = perceptron.GetAccuracy(Test_Data, perceptron.WeightBias_Average); } #endregion Accuracy = (temp_accuracy1 + temp_accuracy2 + temp_accuracy3 + temp_accuracy4 + temp_accuracy5) / 5; }
public Data(StreamReader r1, StreamReader r2, int epochs, double learning_rate, Random r, bool DymanicLearningRate, double margin, bool Average, bool Aggressive, double c, bool svm, double tradeoff, bool logistic_regression) { C = c; SVM = svm; Tradeoff = tradeoff; Logistic_Regression = logistic_regression; double[] w_average = new double[16]; double b_average; WeightBias wb_average = null; if (Average) { for (int i = 0; i < 16; i++) { double randomNumber = (r.NextDouble() * (0.01 + 0.01) - 0.01); w_average[i] = randomNumber; } b_average = (r.NextDouble() * (0.01 + 0.01) - 0.01); wb_average = new WeightBias(w_average, b_average, 0); } Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); AccuracyWeightB = new Dictionary <int, AccuracyWB>(); SetData(r1, r2); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, DymanicLearningRate, margin, wb_average, Aggressive, C, SVM, Tradeoff, Logistic_Regression); double[] w = new double[16]; double b = (r.NextDouble() * (0.01 + 0.01) - 0.01); for (int i = 0; i < 16; i++) { double randomNumber = (r.NextDouble() * (0.01 + 0.01) - 0.01); w[i] = randomNumber; } WeightBias wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); if (Average) { perceptron.WeightBias_Average.Updates = wb.Updates; AccuracyWeightB.Add(i + 1, new AccuracyWB(perceptron.GetAccuracy(Test_Data, perceptron.WeightBias_Average), perceptron.WeightBias_Average)); } else { AccuracyWeightB.Add(i + 1, new AccuracyWB(perceptron.GetAccuracy(Test_Data, wb), wb)); } perceptron.ShuffleTraining_Data(r); } //foreach (var item in AccuracyWeightB) //{ // Console.WriteLine(item.Value.Accuracy); //} AccuracyWB bestAccuracy = AccuracyWeightB.OrderByDescending(x => x.Value.Accuracy).ThenByDescending(y => y.Key).Select(z => z.Value).First(); Accuracy = bestAccuracy.Accuracy; BestWeightBias = bestAccuracy.Weight_Bias; Learning_Rate = learning_rate; List <int> temp = perceptron.Labels; perceptron.Labels = new List <int>(); Train_Accuracy = perceptron.GetAccuracy(Training_Data, bestAccuracy.Weight_Bias); perceptron.Labels = temp; //Console.WriteLine("\n" + Accuracy); }
public Data(int epochs, double learning_rate, double margin, double c, bool logistic_regression, double tradeoff, Random r, StreamReader train, StreamReader test, StreamReader r1, StreamReader r2, StreamReader r3, StreamReader r4, StreamReader r5) { double temp_accuracy1; double temp_accuracy2; double temp_accuracy3; double temp_accuracy4; double temp_accuracy5; Learning_Rate = learning_rate; Margin = margin; C = c; Tradeoff = tradeoff; #region First Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); SetData(r1, r5); SetData(r2); SetData(r3); SetData(r4); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, margin, C, logistic_regression, Tradeoff, r); Dictionary <int, double> w = new Dictionary <int, double>(); double b = (r.NextDouble() * (0.01 + 0.01) - 0.01); //for (int i = 1; i < 67693; i++) //{ // double randomNumber = (r.NextDouble() * (0.01 + 0.01) - 0.01); // if(randomNumber != 0) // { // w.Add(i, randomNumber); // } //} WeightBias wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy1 = perceptron.GetAccuracy(Test_Data, wb); #endregion #region Second Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); SetData(r1, r4); SetData(r2); SetData(r3); SetData(r5); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, margin, C, logistic_regression, Tradeoff, r); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy2 = perceptron.GetAccuracy(Test_Data, wb); #endregion #region Third Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); SetData(r1, r3); SetData(r2); SetData(r4); SetData(r5); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, margin, C, logistic_regression, Tradeoff, r); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy3 = perceptron.GetAccuracy(Test_Data, wb); #endregion #region Fourth Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); SetData(r1, r2); SetData(r3); SetData(r4); SetData(r5); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, margin, C, logistic_regression, Tradeoff, r); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy4 = perceptron.GetAccuracy(Test_Data, wb); #endregion #region Fifth Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); SetData(r2, r1); SetData(r3); SetData(r4); SetData(r5); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, margin, C, logistic_regression, Tradeoff, r); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy5 = perceptron.GetAccuracy(Test_Data, wb); #endregion Accuracy = (temp_accuracy1 + temp_accuracy2 + temp_accuracy3 + temp_accuracy4 + temp_accuracy5) / 5; }