public Perceptron(List <Entry> train, List <Entry> test, double learning_rate, bool dymanicLearningRate, double margin, WeightBias wb_average, bool aggressive, double c, bool svm, double tradeoff, bool logistic_regression, int forestSize) { Training_Data = train; Test_Data = test; Learning_Rate = learning_rate; Initial_Learning_Rate = learning_rate; DymanicLearningRate = dymanicLearningRate; if (DymanicLearningRate) { T_Count = 1; } Margin = margin; if (wb_average != null) { WeightBias_Average = wb_average; } Aggressive = aggressive; Labels = new List <int>(); C = c; SVM = svm; Tradeoff = tradeoff; Logistic_Regression = logistic_regression; ForestSize = forestSize; }
public Data(List <Entry> r1, List <Entry> r2, Random r, int epochs, double learning_rate, double margin, double c, bool logistic_regression, double tradeoff) { C = c; Tradeoff = tradeoff; Training_Data = r1; Test_Data = r2; AccuracyWeightB = new Dictionary <int, AccuracyWB>(); perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, margin, C, logistic_regression, Tradeoff, r); Dictionary <int, double> w = new Dictionary <int, double>(); double b = (r.NextDouble() * (0.01 + 0.01) - 0.01); WeightBias wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); AccuracyWeightB.Add(i + 1, new AccuracyWB(perceptron.GetAccuracy(Test_Data, wb), wb)); perceptron.ShuffleTraining_Data(r); } AccuracyWB bestAccuracy = AccuracyWeightB.OrderByDescending(x => x.Value.Accuracy).ThenByDescending(y => y.Key).Select(z => z.Value).First(); Train_Accuracy = perceptron.GetAccuracy(Training_Data, bestAccuracy.Weight_Bias); //Train Accuracy Test_Accuracy = bestAccuracy.Accuracy; //Test Accuracy BestWeightBias = bestAccuracy.Weight_Bias; Learning_Rate = learning_rate; }
public double GetAccuracy(List <Entry> test_Data, WeightBias wb) { double[] w = wb.Weight; double b = wb.Bias; double TotalErrors = 0; foreach (var item in test_Data) { int y = item.Sign; int yguess; double[] x = item.Vector; double xw = 0; for (int i = 0; i < ForestSize; i++) { xw = xw + (x[i] * w[i]); } xw += b; if (xw >= 0) { yguess = +1; Labels.Add(yguess); } else { yguess = -1; Labels.Add(0); } if (y != yguess) { TotalErrors++; } } return(100 - ((TotalErrors / Convert.ToDouble(test_Data.Count)) * 100)); }
public Data(List <Entry> r, StreamReader r2, double learning_rate, WeightBias bestWB, int forestSize) { ForestSize = forestSize; data_1 = r; data_2 = new List <Entry>(); AccuracyWeightB = new Dictionary <int, AccuracyWB>(); Predictions = new List <Prediction>(); //SetData(r); perceptron = new Perceptron(data_1, null, learning_rate, false, 0, null, false, 0, false, 0, false, ForestSize); Test_Accuracy = perceptron.GetAccuracy(data_1, bestWB); SetAccountIDs(r2, perceptron.Labels); }
public double GetAccuracy(List <Entry> test_Data, WeightBias wb) { Dictionary <int, double> w = wb.Weight; double b = wb.Bias; double TotalErrors = 0; foreach (var item in test_Data) { int y = item.Sign; int yguess; Dictionary <int, double> x = item.Vector; double xw = 0; foreach (var xi in x) { if (w.ContainsKey(xi.Key)) { xw = xw + (w[xi.Key] * xi.Value); } } xw += b; if (xw >= 0) { yguess = +1; } else { yguess = -1; } Labels.Add(yguess); if (y != yguess) { TotalErrors++; } } return(100 - ((TotalErrors / Convert.ToDouble(test_Data.Count)) * 100)); }
public WeightBias CalculateWB(WeightBias wb) { Dictionary <int, double> w = wb.Weight; double b = wb.Bias; int errors = wb.Updates; foreach (var item in Training_Data) { int y = item.Sign; // true label Dictionary <int, double> x = item.Vector; double xw = 0; foreach (var xi in x) { if (w.ContainsKey(xi.Key)) { xw = xw + (w[xi.Key] * xi.Value); } } xw += b; if (Logistic_Regression) //Logistic Regression { foreach (var xi in x) //foreach (KeyValuePair<int, double> wi in w) //update this 16 here. { if (w.ContainsKey(xi.Key)) //if contains key { w[xi.Key] = ((1 - (2 * Learning_Rate / Tradeoff)) * w[xi.Key]) + ((Learning_Rate * y * xi.Value) / (Math.Exp(y * xw) + 1)); } else //if doesn't contain key, it would x[wi.Key] would result to 0, so: { w[xi.Key] = ((1 - (2 * Learning_Rate / Tradeoff)) * RandomNumber()) + ((Learning_Rate * y * xi.Value) / (Math.Exp(y * xw) + 1)); } } b = ((1 - (2 * Learning_Rate / Tradeoff)) * b) + ((Learning_Rate * y) / (Math.Exp(y * b) + 1)); errors++; } else //Support Vector Machine (SVM) { if (y * xw <= 1) { foreach (var xi in x) //foreach (KeyValuePair<int, double> wi in w) //update this 16 here. { if (w.ContainsKey(xi.Key)) //if contains key { w[xi.Key] = ((1 - Learning_Rate) * w[xi.Key]) + (Learning_Rate * C * y * xi.Value); } else //if doesn't contain key, it would x[wi.Key] would result to 0, so: { w.Add(xi.Key, ((1 - Learning_Rate) * RandomNumber()) + (Learning_Rate * C * y * xi.Value)); } } b = ((1 - Learning_Rate) * b) + (Learning_Rate * C * y); errors++; } else { foreach (var xi in x) { if (w.ContainsKey(xi.Key)) { w[xi.Key] = ((1 - Learning_Rate) * w[xi.Key]); } else { w.Add(xi.Key, ((1 - Learning_Rate) * RandomNumber())); } } b = ((1 - Learning_Rate) * b); } } } return(new WeightBias(w, b, errors)); }
public Data(int epochs, double learning_rate, double margin, double c, bool logistic_regression, double tradeoff, Random r, List <Entry> train, List <Entry> test) { double temp_accuracy1; double temp_accuracy2; double temp_accuracy3; double temp_accuracy4; double temp_accuracy5; Learning_Rate = learning_rate; C = c; Tradeoff = tradeoff; Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); Cross_Validate_Data = train.Concat(test).ToList(); Cross_1 = new List <Entry>(); Cross_2 = new List <Entry>(); Cross_3 = new List <Entry>(); Cross_4 = new List <Entry>(); Cross_5 = new List <Entry>(); SetValidateData(r); #region First Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); Training_Data = Cross_1.Concat(Cross_2.Concat(Cross_3.Concat(Cross_4))).ToList(); Test_Data = Cross_5; perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, margin, C, logistic_regression, Tradeoff, r); Dictionary <int, double> w = new Dictionary <int, double>(); double b = (r.NextDouble() * (0.01 + 0.01) - 0.01); WeightBias wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy1 = perceptron.GetAccuracy(Test_Data, wb); #endregion #region Second Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); Training_Data = Cross_1.Concat(Cross_2.Concat(Cross_3.Concat(Cross_5))).ToList(); Test_Data = Cross_4; perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, margin, C, logistic_regression, Tradeoff, r); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy2 = perceptron.GetAccuracy(Test_Data, wb); #endregion #region Third Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); Training_Data = Cross_1.Concat(Cross_2.Concat(Cross_4.Concat(Cross_5))).ToList(); Test_Data = Cross_3; perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, margin, C, logistic_regression, Tradeoff, r); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy3 = perceptron.GetAccuracy(Test_Data, wb); #endregion #region Fourth Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); Training_Data = Cross_1.Concat(Cross_3.Concat(Cross_4.Concat(Cross_5))).ToList(); Test_Data = Cross_2; perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, margin, C, logistic_regression, Tradeoff, r); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy4 = perceptron.GetAccuracy(Test_Data, wb); #endregion #region Fifth Fold Training_Data = new List <Entry>(); Test_Data = new List <Entry>(); Training_Data = Cross_2.Concat(Cross_3.Concat(Cross_4.Concat(Cross_5))).ToList(); Test_Data = Cross_1; perceptron = new Perceptron(Training_Data, Test_Data, learning_rate, margin, C, logistic_regression, Tradeoff, r); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy5 = perceptron.GetAccuracy(Test_Data, wb); #endregion Test_Accuracy = (temp_accuracy1 + temp_accuracy2 + temp_accuracy3 + temp_accuracy4 + temp_accuracy5) / 5; }
public WeightBias CalculateWB(WeightBias wb) { double[] w = wb.Weight; double b = wb.Bias; int updates = wb.Updates; foreach (var item in Training_Data) { if (DymanicLearningRate) { Learning_Rate = Initial_Learning_Rate / T_Count; } int y = item.Sign; int yguess; double[] x = item.Vector; double xw = 0; for (int i = 0; i < ForestSize; i++) { xw = xw + (x[i] * w[i]); } xw += b; if (Logistic_Regression) //Logistic Regression { for (int i = 0; i < ForestSize; i++) //(var xi in x) //foreach (KeyValuePair<int, double> wi in w) //update this ForestSize here. { if (x[i] != 0) { w[i] = ((1 - (2 * Learning_Rate / Tradeoff)) * w[i]) + ((Learning_Rate * y * x[i]) / (Math.Exp(y * xw) + 1)); } } b = ((1 - (2 * Learning_Rate / Tradeoff)) * b) + ((Learning_Rate * y) / (Math.Exp(y * b) + 1)); updates++; } else if (SVM) { if (y * xw <= 1) { for (int i = 0; i < ForestSize; i++) //foreach (KeyValuePair<int, double> wi in w) //update this ForestSize here. { if (x[i] != 0) { w[i] = ((1 - Learning_Rate) * w[i]) + (Learning_Rate * C * y * x[i]); } } b = ((1 - Learning_Rate) * b) + (Learning_Rate * C * y); updates++; } else { for (int i = 0; i < ForestSize; i++) //foreach (var xi in x) { if (x[i] != 0) { w[i] = ((1 - Learning_Rate) * w[i]); } } b = ((1 - Learning_Rate) * b); } } else //Perceptron { if (xw >= Margin) { yguess = +1; } else { yguess = -1; } if (y != yguess) { if (Aggressive) { double rhs = y * xw; double top = Margin - rhs; double xx = 0; for (int i = 0; i < ForestSize; i++) { xx = xx + (x[i] * x[i]); } xx++; Learning_Rate = top / xx; for (int i = 0; i < ForestSize; i++) { w[i] = w[i] + (Learning_Rate * y * x[i]); } b = b + (Learning_Rate * y); } else { for (int i = 0; i < ForestSize; i++) { w[i] = w[i] + (Learning_Rate * y * x[i]); } b = b + (Learning_Rate * y); } updates++; } if (DymanicLearningRate) { T_Count++; } if (WeightBias_Average != null) { for (int i = 0; i < ForestSize; i++) { WeightBias_Average.Weight[i] += w[i]; } WeightBias_Average.Bias += b; } } } return(new WeightBias(w, b, updates)); }
public AccuracyWB(double accuracy, WeightBias weight_Bias) { Weight_Bias = weight_Bias; Accuracy = accuracy; }
public Data(List <Entry> train, List <Entry> test, int epochs, double learning_rate, Random r, bool DymanicLearningRate, double margin, bool Average, bool Aggressive, double c, bool svm, double tradeoff, bool logistic_regression, int forestSize) { C = c; SVM = svm; Tradeoff = tradeoff; Logistic_Regression = logistic_regression; ForestSize = forestSize; double[] w_average = new double[ForestSize]; double b_average; WeightBias wb_average = null; if (Average) { for (int i = 0; i < ForestSize; i++) { double randomNumber = (r.NextDouble() * (0.01 + 0.01) - 0.01); w_average[i] = randomNumber; } b_average = (r.NextDouble() * (0.01 + 0.01) - 0.01); wb_average = new WeightBias(w_average, b_average, 0); } data_1 = train; data_2 = test; AccuracyWeightB = new Dictionary <int, AccuracyWB>(); perceptron = new Perceptron(data_1, data_2, learning_rate, DymanicLearningRate, margin, wb_average, Aggressive, C, SVM, Tradeoff, Logistic_Regression, ForestSize); double[] w = new double[ForestSize]; double b = (r.NextDouble() * (0.01 + 0.01) - 0.01); for (int i = 0; i < ForestSize; i++) { double randomNumber = (r.NextDouble() * (0.01 + 0.01) - 0.01); w[i] = randomNumber; } WeightBias wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); if (Average) { perceptron.WeightBias_Average.Updates = wb.Updates; AccuracyWeightB.Add(i + 1, new AccuracyWB(perceptron.GetAccuracy(data_2, perceptron.WeightBias_Average), perceptron.WeightBias_Average)); } else { AccuracyWeightB.Add(i + 1, new AccuracyWB(perceptron.GetAccuracy(data_2, wb), wb)); } perceptron.ShuffleTraining_Data(r); } //foreach (var item in AccuracyWeightB) //{ // Console.WriteLine(item.Value.Accuracy); //} AccuracyWB bestAccuracy = AccuracyWeightB.OrderByDescending(x => x.Value.Accuracy).ThenByDescending(y => y.Key).Select(z => z.Value).First(); Test_Accuracy = bestAccuracy.Accuracy; BestWeightBias = bestAccuracy.Weight_Bias; Learning_Rate = learning_rate; //Console.WriteLine("\n" + Accuracy); }
public Data(int epochs, double learning_rate, Random r, bool DymanicLearningRate, double margin, bool Average, bool Aggressive, double c, bool svm, double tradeoff, bool logistic_regression, List <Entry> train, List <Entry> test, int forestSize) { double temp_accuracy1; double temp_accuracy2; double temp_accuracy3; double temp_accuracy4; double temp_accuracy5; data_1 = new List <Entry>(); data_2 = new List <Entry>(); Cross_Validate_Data = train.Concat(test).ToList(); Cross_1 = new List <Entry>(); Cross_2 = new List <Entry>(); Cross_3 = new List <Entry>(); Cross_4 = new List <Entry>(); Cross_5 = new List <Entry>(); SetValidateData(null, null, r); C = c; SVM = svm; Tradeoff = tradeoff; Logistic_Regression = logistic_regression; Learning_Rate = learning_rate; Margin = margin; ForestSize = forestSize; double[] w_average = new double[ForestSize]; double b_average; WeightBias wb_average = null; if (Average) { for (int i = 0; i < ForestSize; i++) { double randomNumber = (r.NextDouble() * (0.01 + 0.01) - 0.01); w_average[i] = randomNumber; } b_average = (r.NextDouble() * (0.01 + 0.01) - 0.01); wb_average = new WeightBias(w_average, b_average, 0); } #region First Fold data_1 = Cross_1.Concat(Cross_2.Concat(Cross_3.Concat(Cross_4))).ToList(); data_2 = Cross_5; perceptron = new Perceptron(data_1, data_2, learning_rate, DymanicLearningRate, margin, wb_average, Aggressive, C, SVM, Tradeoff, Logistic_Regression, ForestSize); double[] w = new double[ForestSize]; double b = (r.NextDouble() * (0.01 + 0.01) - 0.01); for (int i = 0; i < ForestSize; i++) { double randomNumber = (r.NextDouble() * (0.01 + 0.01) - 0.01); w[i] = randomNumber; } WeightBias wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy1 = perceptron.GetAccuracy(data_2, wb); if (Average) { temp_accuracy1 = perceptron.GetAccuracy(data_2, perceptron.WeightBias_Average); } #endregion #region Second Fold data_1 = new List <Entry>(); data_2 = new List <Entry>(); data_1 = Cross_1.Concat(Cross_2.Concat(Cross_3.Concat(Cross_5))).ToList(); data_2 = Cross_4; perceptron = new Perceptron(data_1, data_2, learning_rate, DymanicLearningRate, margin, wb_average, Aggressive, C, SVM, Tradeoff, Logistic_Regression, ForestSize); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy2 = perceptron.GetAccuracy(data_2, wb); if (Average) { temp_accuracy2 = perceptron.GetAccuracy(data_2, perceptron.WeightBias_Average); } #endregion #region Third Fold data_1 = new List <Entry>(); data_2 = new List <Entry>(); data_1 = Cross_1.Concat(Cross_2.Concat(Cross_4.Concat(Cross_5))).ToList(); data_2 = Cross_3; perceptron = new Perceptron(data_1, data_2, learning_rate, DymanicLearningRate, margin, wb_average, Aggressive, C, SVM, Tradeoff, Logistic_Regression, ForestSize); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy3 = perceptron.GetAccuracy(data_2, wb); if (Average) { temp_accuracy3 = perceptron.GetAccuracy(data_2, perceptron.WeightBias_Average); } #endregion #region Fourth Fold data_1 = new List <Entry>(); data_2 = new List <Entry>(); data_1 = Cross_1.Concat(Cross_3.Concat(Cross_4.Concat(Cross_5))).ToList(); data_2 = Cross_2; perceptron = new Perceptron(data_1, data_2, learning_rate, DymanicLearningRate, margin, wb_average, Aggressive, C, SVM, Tradeoff, Logistic_Regression, ForestSize); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy4 = perceptron.GetAccuracy(data_2, wb); if (Average) { temp_accuracy4 = perceptron.GetAccuracy(data_2, perceptron.WeightBias_Average); } #endregion #region Fifth Fold data_1 = new List <Entry>(); data_2 = new List <Entry>(); data_1 = Cross_2.Concat(Cross_3.Concat(Cross_4.Concat(Cross_5))).ToList(); data_2 = Cross_1; perceptron = new Perceptron(data_1, data_2, learning_rate, DymanicLearningRate, margin, wb_average, Aggressive, C, SVM, Tradeoff, Logistic_Regression, ForestSize); wb = new WeightBias(w, b, 0); for (int i = 0; i < epochs; i++) { wb = perceptron.CalculateWB(wb); perceptron.ShuffleTraining_Data(r); } temp_accuracy5 = perceptron.GetAccuracy(data_2, wb); if (Average) { temp_accuracy5 = perceptron.GetAccuracy(data_2, perceptron.WeightBias_Average); } #endregion Test_Accuracy = (temp_accuracy1 + temp_accuracy2 + temp_accuracy3 + temp_accuracy4 + temp_accuracy5) / 5; }