private void Predict() { TrainingValue tmp = new TrainingValue(); tmp.Features = new double[FEATURE_COUNT]; FillTrainingValue(ref tmp, fftResults); int result = trainer.Predict(tmp); predictedMuscleState = (MuscleState)result; }
private void RunFFT() { readingsMean.Clear(); for (int i = 0; i < af7.Count; i++) { readingsMean.Add(af7[i] + tp9[i] + tp10[i] + af8[i] / FEATURE_COUNT); } CalculateFFT(readingsMean, FFTResults); CalculateFFT(af7, AF7FFT); CalculateFFT(af8, AF8FFT); CalculateFFT(tp9, TP9FFT); CalculateFFT(tp10, TP10FFT); if (!Training || (Training && ignore == 0)) { TrainingValue trainingValue = new TrainingValue((int)Status, FEATURE_COUNT); trainingValue.Features[0] = PSD(TP9FFT, FREQ_STEP); trainingValue.Features[1] = PSD(AF7FFT, FREQ_STEP); trainingValue.Features[2] = PSD(AF8FFT, FREQ_STEP); trainingValue.Features[3] = PSD(TP10FFT, FREQ_STEP); if (!Training && trainer != null && trainer.Trained) { Status = (EyesStatus)trainer.Predict(trainingValue); } if (training || keepTrainingData) { TrainingValues.Add(trainingValue); } } else if (Training && ignore != 0) { ignore--; } }
public static void CrossValidate(List <List <TrainingValue> > dataSets, List <int> ks, int featureSize, List <string> filenames) { List <ClassifierType> types = new List <ClassifierType> { ClassifierType.Bayes, ClassifierType.DecisionTree, ClassifierType.LDA, ClassifierType.SVM, }; double[] typeAvgs = new double[types.Count]; int dataSetIndex = 0; foreach (List <TrainingValue> data in dataSets) { Console.WriteLine("\nFile: " + filenames[dataSetIndex++]); int typeIndex = 0; foreach (ClassifierType type in types) { Console.WriteLine("\nClassifier: " + type); Console.WriteLine("========================================\n"); double typeAvg = 0; foreach (int k in ks) { int toDiscard = data.Count % k; data.RemoveRange(data.Count - toDiscard, toDiscard); int sampleSize = data.Count / k; List <TrainingValue> workingCopy = new List <TrainingValue>(); double avg = 0; for (int index = 0; index < k; index++) { workingCopy.AddRange(data); List <TrainingValue> sample = workingCopy.GetRange(index * sampleSize, sampleSize); workingCopy.RemoveRange(index * sampleSize, sampleSize); Trainer trainer = new Trainer(featureSize, type); trainer.Train(workingCopy); int[,] confMat = new int[2, 2]; foreach (TrainingValue predValue in sample) { int result = trainer.Predict(predValue); int i = (predValue.State == 1) ? 1 : 0; int j = (result == 1) ? 1 : 0; confMat[i, j]++; } avg += (confMat[0, 0] + confMat[1, 1]) / (double)sampleSize; workingCopy.Clear(); } avg /= k; typeAvg += avg; Console.WriteLine("k = " + k + ": " + avg); } typeAvgs[typeIndex++] += typeAvg / ks.Count; Console.WriteLine("Total average: " + typeAvg / ks.Count); Console.WriteLine(""); } } Console.WriteLine("\n\nClasifiers precision"); Console.WriteLine("========================================\n"); int aux = 0; foreach (ClassifierType type in types) { Console.WriteLine(type + ": " + typeAvgs[aux++] / dataSets.Count); } }