public List <PredictionResult> DoStacking(SAMDataPoint.FeelingModel feelingsmodel, int nFold, bool useIAPSratings = false, Normalize normalizeFormat = Normalize.OneMinusOne) { List <PredictionResult> classifiers = new List <PredictionResult>(); //For each classifier run a crossvalidation and find the best params foreach (StdClassifier classifier in standardClassifiers) { List <PredictionResult> results = classifier.OldCrossValidate(feelingsmodel, 1, useIAPSratings, normalizeFormat); classifiers.Add(results.OrderBy(x => x.GetAverageFScore()).First()); } List <List <double> > featureList = new List <List <double> >(); //Create a List of list of answers from each machine for (int i = 0; i < samData.dataPoints.Count; i++) { List <double> featuresToDataPoint = new List <double>(); foreach (PredictionResult classifier in classifiers) { featuresToDataPoint.Add(classifier.guesses[i]); } featureList.Add(featuresToDataPoint); } //Split into nfold problems List <Tuple <SVMProblem, SVMProblem> > problems = featureList.GetCrossValidationSets <double>(samData, feelingsmodel, nFold, useIAPSratings); //Get correct results int[] answers = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray(); List <PredictionResult> finalResults = new List <PredictionResult>(); //Run for each parameter setting int cnt = 1; foreach (SVMParameter SVMpara in Parameters) { if (UpdateCallback != null) { UpdateCallback(cnt++, Parameters.Count); } List <double> guesses = new List <double>(); //model and predict each nfold foreach (Tuple <SVMProblem, SVMProblem> tupleProblem in problems) { guesses.AddRange(tupleProblem.Item2.Predict(tupleProblem.Item1.Train(SVMpara))); } int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel); //Calculate scoring results double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels); List <double> pres = CalculatePrecision(confus, numberOfLabels); List <double> recall = CalculateRecall(confus, numberOfLabels); List <double> fscore = CalculateFScore(pres, recall); PredictionResult pR = new PredictionResult(confus, recall, pres, fscore, SVMpara, new List <Feature> { }, answers.ToList(), guesses.ConvertAll(x => (int)x)); finalResults.Add(pR); } return(finalResults); }
public Classifier(string Name, SVMParameter Parameter, SAMData samData) { this.Name = Name; this.Parameters = new List <SVMParameter>() { Parameter }; this.samData = samData; }
public Classifier(string Name, List <SVMParameter> Parameters, SAMData samData) { this.Name = Name; this.Parameters = new List <SVMParameter>(); Parameters.ForEach((x) => { this.Parameters.Add(x); }); this.samData = samData; }
public Classifier(string Name, List<SVMParameter> Parameters, SAMData samData) { this.Name = Name; this.Parameters = new List<SVMParameter>(); Parameters.ForEach((x) => { this.Parameters.Add(x); }); this.samData = samData; }
public List<double> GetAllValues(SAMData samd) { if (dataReadings == null) { throw new Exception("ERROR: You need to call SetData before this function is usable"); } List<double> values = new List<double>(); foreach (SAMDataPoint sd in samd.dataPoints) { values.Add(GetValue(sd)); } return values; }
public List <double> GetAllValues(SAMData samd) { if (dataReadings == null) { throw new Exception("ERROR: You need to call SetData before this function is usable"); } List <double> values = new List <double>(); foreach (SAMDataPoint sd in samd.dataPoints) { values.Add(GetValue(sd)); } return(values); }
public static SAMData LoadFromPath(string path) { SAMData data = new SAMData(); try { System.Web.Script.Serialization.JavaScriptSerializer hej = new System.Web.Script.Serialization.JavaScriptSerializer(); string jsonTxt = File.ReadAllText(path); hej.MaxJsonLength = int.MaxValue; var JObj = hej.Deserialize<dynamic>(jsonTxt); data.startTime = DateTimeFromUnixTime((long)JObj["startTime"]); int i = 0; while (true) { try { data.dataPoints.Add(new SAMDataPoint( JObj["data"][i]["time_image_shown"], JObj["data"][i]["time_clicked_next"], int.Parse(JObj["data"][i]["arousal"]), int.Parse(JObj["data"][i]["valence"]), (double)JObj["data"][i]["control_arousal"], (double)JObj["data"][i]["control_valence"], JObj["data"][i]["image_type"] )); i++; } catch { if (i == 0) { //Log.LogMessage(innerE.Message); } break; } } } catch { //Log.LogMessage("[ERROR] SAM data is corrupt!" + "\n" + e.Message); } return data; }
public static SAMData LoadFromPath(string path) { SAMData data = new SAMData(); try { System.Web.Script.Serialization.JavaScriptSerializer hej = new System.Web.Script.Serialization.JavaScriptSerializer(); string jsonTxt = File.ReadAllText(path); hej.MaxJsonLength = int.MaxValue; var JObj = hej.Deserialize <dynamic>(jsonTxt); data.startTime = DateTimeFromUnixTime((long)JObj["startTime"]); int i = 0; while (true) { try { data.dataPoints.Add(new SAMDataPoint( JObj["data"][i]["time_image_shown"], JObj["data"][i]["time_clicked_next"], int.Parse(JObj["data"][i]["arousal"]), int.Parse(JObj["data"][i]["valence"]), (double)JObj["data"][i]["control_arousal"], (double)JObj["data"][i]["control_valence"], JObj["data"][i]["image_type"] )); i++; } catch { if (i == 0) { //Log.LogMessage(innerE.Message); } break; } } } catch { //Log.LogMessage("[ERROR] SAM data is corrupt!" + "\n" + e.Message); } return(data); }
/// <summary> /// Get the values for each features /// </summary> /// <param name="Features"></param> /// <returns>A List for each datapoint which contains the feature values for that datapoint</returns> private List <List <double> > GetFeatureValues(List <Feature> Features, SAMData samd) { List <List <double> > temp = new List <List <double> >(); for (int j = 0; j < samd.dataPoints.Count; j++) { temp.Add(new List <double>()); } foreach (Feature f in Features) { List <double> values = f.GetAllValues(samd); for (int i = 0; i < values.Count; i++) { temp[i].Add(values[i]); } } return(temp); }
public List <PredictionResult> CrossValidateWithBoosting(SAMDataPoint.FeelingModel feelingsmodel, int nFold, double[] answersFromPrevious, bool useIAPSratings = false, Normalize normalizationType = Normalize.OneMinusOne) { List <PredictionResult> predictedResults = new List <PredictionResult>(); List <List <double> > tempFeatuers = GetFeatureValues(features, samData); if (answersFromPrevious.Length != tempFeatuers.Count) { //answers from previous is not the same size as current feature list, e.g. something is wrong Log.LogMessage("The number of guessses from previous machine is the same as number of datapoints in this"); return(null); } //Split into crossvalidation parts List <List <double> > tempFeatures = tempFeatuers.NormalizeFeatureList <double>(normalizationType).ToList(); for (int i = 0; i < tempFeatuers.Count; i++) { tempFeatuers[i].Add(answersFromPrevious[i]); } List <Tuple <SVMProblem, SVMProblem> > problems = tempFeatuers.GetCrossValidationSets <double>(samData, feelingsmodel, nFold, useIAPSratings); //Get correct results int[] answers = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray(); foreach (SVMParameter SVMpara in Parameters) { List <double> guesses = new List <double>(); //model and predict each nfold foreach (Tuple <SVMProblem, SVMProblem> tupleProblem in problems) { guesses.AddRange(tupleProblem.Item2.Predict(tupleProblem.Item1.Train(SVMpara))); } int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel); //Calculate scoring results double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels); List <double> pres = CalculatePrecision(confus, numberOfLabels); List <double> recall = CalculateRecall(confus, numberOfLabels); List <double> fscore = CalculateFScore(pres, recall); PredictionResult pR = new PredictionResult(confus, recall, pres, fscore, SVMpara, features, answers.ToList(), guesses.ConvertAll(x => (int)x).ToList()); predictedResults.Add(pR); } return(predictedResults); }
public MetaClassifier(string Name, SVMParameter Parameter, SAMData SamData, List<StdClassifier> Classifiers) : base(Name, Parameter, SamData) { standardClassifiers = Classifiers; }
bool LoadData(string path, FusionData fd) { currentPath = path; Log.LogMessage("Selected folder: " + path); //load fusion data samData = SAMData.LoadFromPath(path + @"\SAM.json"); string temp = samData.ShouldSkip(); if (!(temp == "")) { Log.LogMessage(temp); return false; } shouldRun = fd.LoadFromFile(new string[] { path + @"\EEG.dat", path + @"\GSR.dat", path + @"\HR.dat", path + @"\KINECT.dat" }, samData.startTime); //Slicing List<SAMDataPoint> throwaway = new List<SAMDataPoint>(); foreach (SAMDataPoint samD in samData.dataPoints) { if (FeatureCreator.EEGDataSlice(fd.eegData.ToList<DataReading>(), samD).Count == 0 || FeatureCreator.GSRDataSlice(fd.gsrData.ToList<DataReading>(), samD).Count == 0 || FeatureCreator.HRDataSlice(fd.hrData.ToList<DataReading>(), samD).Count == 0 || FeatureCreator.FaceDataSlice(fd.faceData.ToList<DataReading>(), samD).Count == 0) { throwaway.Add(samD); } } if (throwaway.Count > 5) { return false; } for (int i = 0; i < throwaway.Count; i++) { Log.LogMessage("Threw away a sam data point"); samData.dataPoints.Remove(throwaway[i]); } if (throwaway.Count > 5 && samData.ShouldSkip() == "") { Log.LogMessage("Too many data points thrown away (" + throwaway.Count + ")"); return false; } Log.LogMessage("Fusion Data loaded!"); Log.LogMessage("Applying data to features.."); FeatureCreator.GSRArousalOptimizationFeatures.ForEach(x => x.SetData(fd.gsrData.ToList<DataReading>())); FeatureCreator.HRArousalOptimizationFeatures.ForEach(x => x.SetData(fd.hrData.ToList<DataReading>())); FeatureCreator.HRValenceOptimizationFeatures.ForEach(x => x.SetData(fd.hrData.ToList<DataReading>())); FeatureCreator.EEGArousalOptimizationFeatures.ForEach(x => x.SetData(fd.eegData.ToList<DataReading>())); FeatureCreator.EEGValenceOptimizationFeatures.ForEach(x => x.SetData(fd.eegData.ToList<DataReading>())); FeatureCreator.FACEArousalOptimizationFeatures.ForEach(x => x.SetData(fd.faceData.ToList<DataReading>())); FeatureCreator.FACEValenceOptimizationFeatures.ForEach(x => x.SetData(fd.faceData.ToList<DataReading>())); Log.LogMessage("Looking for configurations..."); svmConfs.Clear(); if (Directory.Exists(path + @"\STD")) { var files = Directory.GetFiles(path + @"\STD"); Log.LogMessage("Found STD! Contains " + files.Length + " configurations."); foreach (var item in files) { svmConfs.Add(SVMConfiguration.Deserialize(File.ReadAllText(item))); } } if (Directory.Exists(path + @"\META")) { var files = Directory.GetFiles(path + @"\META"); Log.LogMessage("Found META! Contains " + files.Length + " configurations."); /* same procedure?? */ foreach (var item in files) { metaConfs.Add(MetaSVMConfiguration.Deserialize(File.ReadAllText(item))); } } if (svmConfs.Count == 0 && metaConfs.Count == 0) { Log.LogMessage("No configurations found, maybe you should run some optimizations on some features."); } return true; }
public StdClassifier(string Name, SVMParameter Parameter, List<Feature> Features, SAMData samData) : base(Name, Parameter, samData) { features = new List<Feature>(Features); }
public StdClassifier(SVMConfiguration conf, SAMData samData) : base(conf.Name, conf.GetParameter(), samData) { features = conf.GetFeautres(); }
/// <summary> /// Run crossvalidation for the feature setup for this machine /// </summary> /// <param name="feelingsmodel"></param> /// <param name="nFold"></param> /// <param name="useIAPSratings"></param> public List <PredictionResult> OldCrossValidate(SAMDataPoint.FeelingModel feelingsmodel, int nFold, bool useIAPSratings = false, Normalize normalizationType = Normalize.OneMinusOne) { List <PredictionResult> predictedResults = new List <PredictionResult>(); //Split into crossvalidation parts List <Tuple <SVMProblem, SVMProblem> > problems = GetFeatureValues(features, samData).NormalizeFeatureList <double>(normalizationType).GetCrossValidationSets <double>(samData, feelingsmodel, nFold, useIAPSratings); //Get correct results int[] answers = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray(); int progressCounter = 0; bool postedOneClassError = false; if (answers.Distinct().Count() <= 1) { int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel); //Calculate scoring results double[,] confus = CalculateConfusion(answers.ToList().ConvertAll(x => (double)x).ToArray(), answers, numberOfLabels); List <double> pres = CalculatePrecision(confus, numberOfLabels); List <double> recall = CalculateRecall(confus, numberOfLabels); List <double> fscore = CalculateFScore(pres, recall); PredictionResult pR = new PredictionResult(confus, recall, pres, fscore, new SVMParameter(), features, answers.ToList(), answers.ToList().ConvertAll(x => (int)x)); predictedResults.Add(pR); progressCounter++; Log.LogMessage(ONLY_ONE_CLASS); Log.LogMessage(""); return(predictedResults); } foreach (SVMParameter SVMpara in Parameters) { if (UpdateCallback != null) { UpdateCallback(progressCounter, Parameters.Count); } List <double> guesses = new List <double>(); //model and predict each nfold try { foreach (Tuple <SVMProblem, SVMProblem> tupleProblem in problems) { SVMModel trainingModel = tupleProblem.Item1.Train(SVMpara); if (trainingModel.ClassCount <= 1) { if (!postedOneClassError) { Log.LogMessage(ONLY_ONE_CLASS_IN_TRAINING); postedOneClassError = true; } guesses.AddRange(tupleProblem.Item1.Y.ToList().Take(tupleProblem.Item2.Y.Count()).ToList()); } else { double[] d = tupleProblem.Item2.Predict(trainingModel); guesses.AddRange(d); } } } catch (Exception e) { for (int i = 0; i < samData.dataPoints.Count; i++) { guesses.Add(-1); } } int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel); //Calculate scoring results double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels); List <double> pres = CalculatePrecision(confus, numberOfLabels); List <double> recall = CalculateRecall(confus, numberOfLabels); List <double> fscore = CalculateFScore(pres, recall); PredictionResult pR = new PredictionResult(confus, recall, pres, fscore, SVMpara, features, answers.ToList(), guesses.ConvertAll(x => (int)x)); predictedResults.Add(pR); progressCounter++; if (UpdateCallback != null) { UpdateCallback(progressCounter, Parameters.Count); } } return(predictedResults); }
/// <summary> /// Get the values for each features /// </summary> /// <param name="Features"></param> /// <returns>A List for each datapoint which contains the feature values for that datapoint</returns> private List<List<double>> GetFeatureValues(List<Feature> Features, SAMData samd) { List<List<double>> temp = new List<List<double>>(); for (int j = 0; j < samd.dataPoints.Count; j++) { temp.Add(new List<double>()); } foreach (Feature f in Features) { List<double> values = f.GetAllValues(samd); for (int i = 0; i < values.Count; i++) { temp[i].Add(values[i]); } } return temp; }
public PredictionResult DoVoting(SAMDataPoint.FeelingModel feelingsmodel, int nFold, bool useIAPSratings = false, Normalize normalizeFormat = Normalize.OneMinusOne) { List <PredictionResult> classifiers = new List <PredictionResult>(); //For each classifier run a crossvalidation and find the best params int prg = 0; foreach (StdClassifier classifier in standardClassifiers) { if (UpdateCallback != null) { UpdateCallback(prg++, standardClassifiers.Count); } List <PredictionResult> results = classifier.OldCrossValidate(feelingsmodel, 1, useIAPSratings, normalizeFormat); classifiers.Add(results.OrderBy(x => x.GetAverageFScore()).First()); } if (UpdateCallback != null) { UpdateCallback(standardClassifiers.Count, standardClassifiers.Count); } int labelCount = SAMData.GetNumberOfLabels(feelingsmodel); //Full List of indicies List <int> counter = new List <int>(); for (int k = 0; k < samData.dataPoints.Count(); k++) { counter.Add(k); } //Divide indicies into correct nfold List <List <int> > trainIndicies = new List <List <int> >(); List <List <int> > predictIndicies = new List <List <int> >(); for (int i = 0; i < samData.dataPoints.Count(); i += nFold) { var temp = counter.Skip(i).Take(nFold).ToList(); predictIndicies.Add(temp); trainIndicies.Add(counter.Except(temp).ToList()); } List <Dictionary <int, double> > weightedGuesses = new List <Dictionary <int, double> >(); //Fill up weightedGuesses List for (int nGuesses = 0; nGuesses < samData.dataPoints.Count; nGuesses++) { Dictionary <int, double> tempGuess = new Dictionary <int, double>(); for (int indexClass = 0; indexClass < labelCount; indexClass++) { tempGuess.Add(indexClass, 0); } weightedGuesses.Add(tempGuess); } //Split classifiers for (int i = 0; i < trainIndicies.Count; i++) { foreach (PredictionResult predictResult in classifiers) { double correct = 0; //calculate weights for (int trainingIndex = 0; trainingIndex < trainIndicies[i].Count; trainingIndex++) { if (predictResult.guesses[trainIndicies[i][trainingIndex]] == samData.dataPoints[trainIndicies[i][trainingIndex]].ToAVCoordinate(feelingsmodel)) { correct++; } } //Add weight from the trainingset to each of the guesses weightedGuesses[i][predictResult.guesses[i]] += (correct / trainIndicies.Count); } } //Calculate final answers List <double> guesses = new List <double>(); foreach (Dictionary <int, double> answer in weightedGuesses) { int tempKey = -1; double tempMax = -1; foreach (int key in answer.Keys) { if (answer[key] > tempMax) { tempKey = key; tempMax = answer[key]; } } guesses.Add(tempKey); } //Get correct results int[] answers = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray(); int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel); //Calculate scoring results double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels); List <double> pres = CalculatePrecision(confus, numberOfLabels); List <double> recall = CalculateRecall(confus, numberOfLabels); List <double> fscore = CalculateFScore(pres, recall); return(new PredictionResult(confus, recall, pres, fscore, new SVMParameter(), new List <Feature> { }, answers.ToList(), guesses.ConvertAll(x => (int)x))); }
public List <PredictionResult> CrossValidate(SAMDataPoint.FeelingModel feelingsmodel, bool useIAPSratings = false, Normalize normalizationType = Normalize.OneMinusOne) { List <PredictionResult> predictedResults = new List <PredictionResult>(); //Split into crossvalidation parts SVMProblem problems = GetFeatureValues(features, samData).NormalizeFeatureList <double>(normalizationType).CreateCompleteProblem(samData, feelingsmodel); //Get correct results int[] answers = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray(); int progressCounter = 0; if (answers.Distinct().Count() <= 1) { int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel); //Calculate scoring results double[,] confus = CalculateConfusion(answers.ToList().ConvertAll(x => (double)x).ToArray(), answers, numberOfLabels); List <double> pres = CalculatePrecision(confus, numberOfLabels); List <double> recall = CalculateRecall(confus, numberOfLabels); List <double> fscore = CalculateFScore(pres, recall); PredictionResult pR = new PredictionResult(confus, recall, pres, fscore, new SVMParameter(), features, answers.ToList(), answers.ToList().ConvertAll(x => (int)x)); predictedResults.Add(pR); progressCounter++; Log.LogMessage(ONLY_ONE_CLASS); Log.LogMessage(""); return(predictedResults); } else if (problems.X.Count == 0) { Log.LogMessage("Empty problem in " + Name); return(null); } foreach (SVMParameter SVMpara in Parameters) { if (UpdateCallback != null) { UpdateCallback(progressCounter, Parameters.Count); } double[] guesses = new double[samData.dataPoints.Count]; //model and predict each nfold try { problems.CrossValidation(SVMpara, samData.dataPoints.Count, out guesses); } catch (Exception e) { for (int i = 0; i < samData.dataPoints.Count; i++) { guesses[i] = -1; } } int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel); //Calculate scoring results double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels); List <double> pres = CalculatePrecision(confus, numberOfLabels); List <double> recall = CalculateRecall(confus, numberOfLabels); List <double> fscore = CalculateFScore(pres, recall); PredictionResult pR = new PredictionResult(confus, recall, pres, fscore, SVMpara, features, answers.ToList(), Array.ConvertAll(guesses, (x => (int)x)).ToList()); predictedResults.Add(pR); progressCounter++; if (UpdateCallback != null) { UpdateCallback(progressCounter, Parameters.Count); } } return(predictedResults); }
public MetaClassifier(string Name, SVMParameter Parameter, SAMData SamData, List <StdClassifier> Classifiers) : base(Name, Parameter, SamData) { standardClassifiers = Classifiers; }
/// <summary> /// Item1 is the training set, Item2 is the prediction set. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="original"></param> /// <param name="nFold"></param> /// <returns>returns null if the collection can't be nfolded</returns> public static List <Tuple <SVMProblem, SVMProblem> > GetCrossValidationSets <T>(this IEnumerable <List <double> > original, SAMData samData, SAMDataPoint.FeelingModel feelingsmodel, int nFold, bool UseIAPSRatings = false) { //TODO: Needs to be tested, can't test before data can be loaded into the program List <Tuple <SVMProblem, SVMProblem> > allSets = new List <Tuple <SVMProblem, SVMProblem> >(); if (original.Count() % nFold != 0) { return(null); } //Full List of indicies List <int> counter = new List <int>(); for (int k = 0; k < original.Count(); k++) { counter.Add(k); } //Divide indicies into correct nfold List <List <int> > trainIndicies = new List <List <int> >(); List <List <int> > predictIndicies = new List <List <int> >(); for (int i = 0; i < original.Count(); i += nFold) { var temp = counter.Skip(i).Take(nFold).ToList(); predictIndicies.Add(temp); trainIndicies.Add(counter.Except(temp).ToList()); } for (int j = 0; j < original.Count(); j++) { //Create training problem SVMProblem trainSVMProblem = new SVMProblem(); //Foreach training index, add features to the problem foreach (int trainIndex in trainIndicies[j]) { SVMNode[] featureVector = new SVMNode[original.ElementAt(trainIndex).Count]; for (int w = 0; w < original.ElementAt(trainIndex).Count; w++) { featureVector[w] = new SVMNode(w + 1, original.ElementAt(trainIndex)[w]); } trainSVMProblem.Add(featureVector, samData.dataPoints[trainIndex].ToAVCoordinate(feelingsmodel, UseIAPSRatings)); } //Create predict problem SVMProblem predictSVMProblem = new SVMProblem(); //Foreach predict index, add features to the problem foreach (int predictIndex in predictIndicies[j]) { SVMNode[] featureVector = new SVMNode[original.ElementAt(predictIndex).Count]; for (int w = 0; w < original.ElementAt(predictIndex).Count; w++) { featureVector[w] = new SVMNode(w + 1, original.ElementAt(predictIndex)[w]); } predictSVMProblem.Add(featureVector, samData.dataPoints[predictIndex].ToAVCoordinate(feelingsmodel, UseIAPSRatings)); } allSets.Add(new Tuple <SVMProblem, SVMProblem>(trainSVMProblem, predictSVMProblem)); } return(allSets); }
public static SVMProblem CreateCompleteProblem(this IEnumerable <List <double> > original, SAMData sam, SAMDataPoint.FeelingModel feelingModel) { SVMProblem completeProblem = new SVMProblem(); for (int i = 0; i < original.Count(); i++) { SVMNode[] nodeSet = new SVMNode[original.ElementAt(i).Count]; for (int j = 0; j < original.ElementAt(i).Count; j++) { SVMNode currentNode = new SVMNode(); currentNode.Index = j + 1; currentNode.Value = original.ElementAt(i)[j]; nodeSet[j] = currentNode; } completeProblem.Add(nodeSet, sam.dataPoints[i].ToAVCoordinate(feelingModel)); } return(completeProblem); }
/// <summary> /// Run crossvalidation for each combination of the features for this machine /// </summary> /// <param name="feelingsmodel"></param> /// <param name="nFold"></param> /// <param name="useIAPSratings"></param> public List <PredictionResult> CrossValidateCombinations(SAMDataPoint.FeelingModel feelingsmodel, int nFold, bool useIAPSratings = false, Normalize normalizationType = Normalize.OneMinusOne) { List <List <bool> > combinations = CalculateCombinations(new List <bool>() { }, features.Count); //Get different combination of problems List <Tuple <List <Tuple <SVMProblem, SVMProblem> >, List <Feature> > > featureCombinationProblems = new List <Tuple <List <Tuple <SVMProblem, SVMProblem> >, List <Feature> > >(); for (int i = 0; i < combinations.Count; i++) { List <Feature> tempFeatures = new List <Feature>(); for (int j = 0; j < combinations[i].Count; j++) { // For each feature combination save the different problems for crossvalidation if (combinations[i][j] == true) { tempFeatures.Add(features[j]); } } featureCombinationProblems.Add ( new Tuple <List <Tuple <SVMProblem, SVMProblem> >, List <Feature> > ( GetFeatureValues(tempFeatures, samData).NormalizeFeatureList <double>(normalizationType).GetCrossValidationSets <double>(samData, feelingsmodel, nFold, useIAPSratings), tempFeatures ) ); } //Get correct results int[] answers = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray(); int progressCounter = 0; List <PredictionResult> predictionResults = new List <PredictionResult>(); foreach (SVMParameter SVMpara in Parameters) { //For each feature setup for (int n = 0; n < featureCombinationProblems.Count; n++) { if (UpdateCallback != null) { UpdateCallback(progressCounter, Parameters.Count * featureCombinationProblems.Count); } //PrintProgress(progressCounter, featureCombinationProblems.Count); List <double> guesses = new List <double>(); //model and predict each nfold foreach (var tupleProblem in featureCombinationProblems[n].Item1) { guesses.AddRange(tupleProblem.Item2.Predict(tupleProblem.Item1.Train(SVMpara))); } int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel); //Calculate scoring results double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels); List <double> pres = CalculatePrecision(confus, numberOfLabels); List <double> recall = CalculateRecall(confus, numberOfLabels); List <double> fscore = CalculateFScore(pres, recall); PredictionResult pR = new PredictionResult(confus, recall, pres, fscore, SVMpara, featureCombinationProblems[n].Item2, answers.ToList(), guesses.ConvertAll(x => (int)x)); predictionResults.Add(pR); progressCounter++; } } if (UpdateCallback != null) { UpdateCallback(progressCounter, Parameters.Count * featureCombinationProblems.Count); } return(predictionResults); }
public Classifier(string Name, SVMParameter Parameter, SAMData samData) { this.Name = Name; this.Parameters = new List<SVMParameter>() { Parameter }; this.samData = samData; }
public StdClassifier(string Name, SVMParameter Parameter, List <Feature> Features, SAMData samData) : base(Name, Parameter, samData) { features = new List <Feature>(Features); }