//LogisticRegression reg; public LogClassifier(string fileName, int countLayers, int countEpoch) { Codebook = new BagOfWords() { MaximumOccurance = 1 }; int samples = 0; var dictionary = Utilities.ReadHostFile(fileName, ref samples); Samples = samples; if (dictionary.Item1.Length != 0 && dictionary.Item2.Length != 0) { Codebook.Learn(dictionary.Item1); double[][] inputs = Codebook.Transform(dictionary.Item1); int count = inputs.Count(); //var learner = new IterativeReweightedLeastSquares<LogisticRegression>() //{ // Tolerance = 1e-4, // Let's set some convergence parameters // Iterations = 10, // maximum number of iterations to perform // Regularization = 0 //}; //reg = learner.Learn(inputs, outputs2); double[][] outputs = Utilities.BoolToDouble(dictionary.Item2); classifier = new SimpleClassifierNN(inputs, outputs, count, countLayers, countEpoch); var trainingResult = classifier.Train(inputs, outputs); Error = trainingResult.Item1; TrainingTime = trainingResult.Item2; } }
public LogClassifier(string networkFileName, string dictionaryFileName) { Codebook = new BagOfWords() { MaximumOccurance = 1 }; int samples = 0; var dictionary = Utilities.ReadHostFile(dictionaryFileName, ref samples); if (dictionary.Item1.Length != 0) { Codebook.Learn(dictionary.Item1); } classifier = new SimpleClassifierNN(networkFileName); }