static public int [] IterativeLeastSquares(double[][] input1, int[] output1, string fName) { double[] labels = System.Array.ConvertAll <int, double>(output1, x => x); var learner = new IterativeReweightedLeastSquares <LogisticRegression>() { // Gets or sets the tolerance value used to determine whether the algorithm has converged. Tolerance = 1e-4, // Let's set some convergence parameters MaxIterations = 10, //MaxIterations = 100, // maximum number of iterations to perform Regularization = 0 }; // Now, we can use the learner to finally estimate our model: LogisticRegression regression = learner.Learn(input1, output1); double [] coefficients = learner.Solution; double[] scores = regression.Probability(input1); regression.Save(fName.Replace(".csv", ".IRLS.save"), compression: SerializerCompression.None); // Finally, if we would like to arrive at a conclusion regarding // each sample, we can use the Decide method, which will transform // the probabilities (from 0 to 1) into actual true/false values: return(Funcs.Utility.BoolToInt(regression.Decide(input1))); // mean(double(p == y)) * 100); }
/// <summary> /// Classify our data using Logistic Regression classifer and save the model. /// </summary> /// <param name="train_data">Frame objects that we will use to train classifers.</param> /// <param name="test_data">Frame objects that we will use to test classifers.</param> /// <param name="train_label">Labels of the train data.</param> /// <param name="test_label">Labels of the test data.</param> /// <param name="Classifier_Path">Path where we want to save the classifer on the disk.</param> /// <param name="Classifier_Name">Name of the classifer we wnat to save.</param> /// <returns></returns> public void LogisticRegression(double[][] train_data, double[][] test_data, int[] train_label, int[] test_label, String Classifier_Path, String Classifier_Name) { var learner = new IterativeReweightedLeastSquares <LogisticRegression>() { Tolerance = 1e-4, MaxIterations = 100, Regularization = 0 }; LogisticRegression regression = learner.Learn(train_data, train_label); double ageOdds = regression.GetOddsRatio(0); double smokeOdds = regression.GetOddsRatio(1); double[] scores = regression.Probability(test_data); //bool[] pre = regression.Decide(test_data); var cm = GeneralConfusionMatrix.Estimate(regression, test_data, test_label); double error = cm.Error; Console.WriteLine(error); regression.Save(Path.Combine(Classifier_Path, Classifier_Name)); }
public void Classification_Train(double[,] train_docrule, int[] label, string algorithm) { string classmodelpath; int attrSize = eclatlitems.Count; int attrSizeTest = eclatlitems.Count; // Specify the input variables DecisionVariable[] variables = new DecisionVariable[attrSize]; for (int i = 0; i < attrSize; i++) { variables[i] = new DecisionVariable((i + 1).ToString(), DecisionVariableKind.Discrete); } if (algorithm == "Tree") { classmodelpath = algorithm + ".model"; //RandomForest tree2 = new RandomForest(2, variables); DecisionTree tree = new DecisionTree(variables, 2); C45Learning teacher = new C45Learning(tree); var model = teacher.Learn(train_docrule.ToJagged(), label); //save model teacher.Save(Path.Combine("", classmodelpath)); } if (algorithm == "SVM") { classmodelpath = algorithm + ".model"; var learn = new SequentialMinimalOptimization() { UseComplexityHeuristic = true, UseKernelEstimation = false }; SupportVectorMachine teacher = learn.Learn(train_docrule.ToJagged(), label); //save model teacher.Save(Path.Combine("", classmodelpath)); } if (algorithm == "Logistic") { classmodelpath = algorithm + ".model"; var learner = new IterativeReweightedLeastSquares <LogisticRegression>() { Tolerance = 1e-4, // Let's set some convergence parameters Iterations = 1, // maximum number of iterations to perform Regularization = 0 }; LogisticRegression teacher = learner.Learn(train_docrule.ToJagged(), label); teacher.Save(Path.Combine("", classmodelpath)); } if (algorithm == "GA") { weights_ga_matlab(); } }