bool LoadData(string path, FusionData fd) { currentPath = path; Log.LogMessage("Selected folder: " + path); //load fusion data samData = SAMData.LoadFromPath(path + @"\SAM.json"); string temp = samData.ShouldSkip(); if (!(temp == "")) { Log.LogMessage(temp); return false; } shouldRun = fd.LoadFromFile(new string[] { path + @"\EEG.dat", path + @"\GSR.dat", path + @"\HR.dat", path + @"\KINECT.dat" }, samData.startTime); //Slicing List<SAMDataPoint> throwaway = new List<SAMDataPoint>(); foreach (SAMDataPoint samD in samData.dataPoints) { if (FeatureCreator.EEGDataSlice(fd.eegData.ToList<DataReading>(), samD).Count == 0 || FeatureCreator.GSRDataSlice(fd.gsrData.ToList<DataReading>(), samD).Count == 0 || FeatureCreator.HRDataSlice(fd.hrData.ToList<DataReading>(), samD).Count == 0 || FeatureCreator.FaceDataSlice(fd.faceData.ToList<DataReading>(), samD).Count == 0) { throwaway.Add(samD); } } if (throwaway.Count > 5) { return false; } for (int i = 0; i < throwaway.Count; i++) { Log.LogMessage("Threw away a sam data point"); samData.dataPoints.Remove(throwaway[i]); } if (throwaway.Count > 5 && samData.ShouldSkip() == "") { Log.LogMessage("Too many data points thrown away (" + throwaway.Count + ")"); return false; } Log.LogMessage("Fusion Data loaded!"); Log.LogMessage("Applying data to features.."); FeatureCreator.GSRArousalOptimizationFeatures.ForEach(x => x.SetData(fd.gsrData.ToList<DataReading>())); FeatureCreator.HRArousalOptimizationFeatures.ForEach(x => x.SetData(fd.hrData.ToList<DataReading>())); FeatureCreator.HRValenceOptimizationFeatures.ForEach(x => x.SetData(fd.hrData.ToList<DataReading>())); FeatureCreator.EEGArousalOptimizationFeatures.ForEach(x => x.SetData(fd.eegData.ToList<DataReading>())); FeatureCreator.EEGValenceOptimizationFeatures.ForEach(x => x.SetData(fd.eegData.ToList<DataReading>())); FeatureCreator.FACEArousalOptimizationFeatures.ForEach(x => x.SetData(fd.faceData.ToList<DataReading>())); FeatureCreator.FACEValenceOptimizationFeatures.ForEach(x => x.SetData(fd.faceData.ToList<DataReading>())); Log.LogMessage("Looking for configurations..."); svmConfs.Clear(); if (Directory.Exists(path + @"\STD")) { var files = Directory.GetFiles(path + @"\STD"); Log.LogMessage("Found STD! Contains " + files.Length + " configurations."); foreach (var item in files) { svmConfs.Add(SVMConfiguration.Deserialize(File.ReadAllText(item))); } } if (Directory.Exists(path + @"\META")) { var files = Directory.GetFiles(path + @"\META"); Log.LogMessage("Found META! Contains " + files.Length + " configurations."); /* same procedure?? */ foreach (var item in files) { metaConfs.Add(MetaSVMConfiguration.Deserialize(File.ReadAllText(item))); } } if (svmConfs.Count == 0 && metaConfs.Count == 0) { Log.LogMessage("No configurations found, maybe you should run some optimizations on some features."); } return true; }
private void btn_ExportDataCompare_Click(object sender, EventArgs e) { FolderBrowserDialog fbd = new FolderBrowserDialog(); if (fbd.ShowDialog() == DialogResult.OK) { int counterino = 1; var dirs = Directory.GetDirectories(fbd.SelectedPath); foreach (var dirPath in dirs) { if (dirPath == "results") continue; List<string> files = new List<string>() { "EEG.dat", //"GSR.dat", //"HR.dat", //"KINECT.dat" }; bool runIndividualTasks = true; files.RemoveAll(f => !File.Exists($"{dirPath}/test/{f}") || !File.Exists($"{dirPath}/recall/{f}")); string subject = dirPath.Split('\\').Last(); statusLabel.Text = $"{counterino++} / {dirs.Length}"; var metaLines = File.ReadAllLines(dirPath + "/meta.txt"); fdTest = new FusionData(); fdRecall = new FusionData(); var fdTestStatus = fdTest.LoadFromFile(files.Select(f => dirPath + "/test/" + f).ToArray()); if (!fdTest.Loaded) { //throw new Exception("I crashed because bad loading of fdTest"); Log.LogMessage("ERROR in " + dirPath + " couldn't load test data"); continue; } var fdRecallStatus = fdRecall.LoadFromFile(files.Select(x => dirPath + "/recall/" + x).ToArray()); if (!fdRecall.Loaded) { //throw new Exception("I crashed because bad loading of fdRecall"); Log.LogMessage("ERROR in " + dirPath + " couldn't load recall data"); continue; } var testEvents = File.ReadAllLines(dirPath + "/test/SecondTest.dat"); //fix waiting period offset til at være første event i testevents int offset = int.Parse(metaLines.ToList().First(x => x.StartsWith("sync")).Split(':', '=').Last()); string stimul = metaLines[1].Split('=').Last(); string time = metaLines[0].Split('=').Last(); int waitPeriodDone = int.Parse(testEvents[0].Split('#')[0]); //int firstTwoTasksDone = int.Parse(testEvents[2].Split('#')[0]); int wholePeriodDone = int.Parse(testEvents[testEvents.Length - 1].Split('#')[0]); //int waitPeriodDone = 180000; List<TaskStartEnd> tasks = new List<TaskStartEnd>(); if (!runIndividualTasks) { tasks.Add(new TaskStartEnd(waitPeriodDone, wholePeriodDone, "full")); } else { List<string> taskEvents = testEvents.Where(x => x.Contains("TaskWizard - ")).ToList(); int start = waitPeriodDone; for (int s = 0; s < taskEvents.Count; s++) { string[] currentEvent = taskEvents[s].Split(new char[] { '-', ' ', '#' }, StringSplitOptions.RemoveEmptyEntries); int end = int.Parse(currentEvent[0]); tasks.Add(new TaskStartEnd(start, end, currentEvent[currentEvent.Length - 1])); start = end; } } for (int ta = 0; ta < tasks.Count; ta++) { string csvTimePath = "csv/Time " + time + "/" + subject + "_" + tasks[ta].filenameAppend + "_"; string csvStimuliPath = "csv/Stimuli " + (stimul == "neu" ? "low" : "high") + "/" + subject + "_" + tasks[ta].filenameAppend + "_"; //string csvPath = "csv/" + subject + "_"; Directory.CreateDirectory("csv/Time " + time); Directory.CreateDirectory("csv/Stimuli " + (stimul == "neu" ? "low" : "high")); if (files.Contains("GSR.dat")) { Log.LogMessage("Starting GSR"); var fdTestGsr = fdTest.gsrData.SkipWhile(x => x.timestamp < tasks[ta].start).TakeWhile(x => x.timestamp < tasks[ta].end).Select(x => Tuple.Create(x.timestamp, (double)x.resistance)).ToList(); var fdRecallGsr = fdRecall.gsrData.SkipWhile(x => x.timestamp - offset < tasks[ta].start).TakeWhile(x => x.timestamp - offset < tasks[ta].end).Select(x => Tuple.Create(x.timestamp - offset, (double)x.resistance)).ToList(); var gsr = FilterData( fdTestGsr, fdRecallGsr ); if (gsr.Item2.Count != 0 || gsr.Item3.Count != 0) { var gsrNorm = NormalizeFilterData(gsr); var pearsCorr = MathNet.Numerics.Statistics.Correlation.Pearson(gsrNorm.Item1.GetRange(0, Math.Min(gsrNorm.Item1.Count, gsrNorm.Item2.Count)), gsrNorm.Item2.GetRange(0, Math.Min(gsrNorm.Item1.Count, gsrNorm.Item2.Count))); //var nonTemporal = gsrNorm.Item1.Zip(gsrNorm.Item2, (a, b) => Tuple.Create(a, b)).OrderBy(x => x.Item1); //var nonTempA = nonTemporal.Select(x => x.Item1).ToList(); //var nonTempB = nonTemporal.Select(x => x.Item2).ToList(); SavePng(csvTimePath + "GSR.png", $"{subject} (Time: {time}, Stim: {stimul}, Corr: ) - Red = test, blue = recall", gsrNorm.Item1, gsrNorm.Item2); //SavePngScatter(csvTimePath + "GSR_Scatter.png", $"{subject} (Time: {time}, Stim: {stimul})", gsrNorm.Item1, gsrNorm.Item2); SaveZip(csvTimePath + "GSR.csv", gsrNorm.Item1, gsrNorm.Item2); //SavePng(csvTimePath + "GSR_nonTemporal.png", $"{subject} (Time: {time}, Stim: {stimul}, Corr: {pearsCorr.ToString("0.000")}) - Red = test, blue = recall", nonTempA, nonTempB); int t; if (int.TryParse(time, out t) && t != 0) { SavePng(csvStimuliPath + "GSR.png", $"{subject} (Time: {time}, Stim: {stimul}, Corr: ) - Red = test, blue = recall", gsrNorm.Item1, gsrNorm.Item2); //SavePngScatter(csvStimuliPath + "GSR_Scatter.png", $"{subject} (Time: {time}, Stim: {stimul})", gsrNorm.Item1, gsrNorm.Item2); SaveZip(csvStimuliPath + "GSR.csv", gsrNorm.Item1, gsrNorm.Item2); } } Log.LogMessage("GSR done, data filtered: " + gsr.Item1.ToString("0.0") + "%"); } if (files.Contains("HR.dat")) { Log.LogMessage("Starting HR"); var hr = FilterData( fdTest.hrData.SkipWhile(x => x.timestamp < tasks[ta].start).TakeWhile(x => x.timestamp < tasks[ta].end).Select(x => Tuple.Create(x.timestamp, (double)x.BPM)).ToList(), fdRecall.hrData.SkipWhile(x => x.timestamp - offset < tasks[ta].start).TakeWhile(x => x.timestamp - offset < tasks[ta].end).Select(x => Tuple.Create(x.timestamp - offset, (double)x.BPM)).ToList(), 20 ); if (hr.Item2.Count != 0 && hr.Item3.Count != 0) { var hrNorm = NormalizeFilterData(hr); var setA = hrNorm.Item1;//.MedianFilter(25); var setB = hrNorm.Item2;//.MedianFilter(25); var pearsCorr = MathNet.Numerics.Statistics.Correlation.Pearson(setA.GetRange(0, Math.Min(setA.Count, setB.Count)), setB.GetRange(0, Math.Min(setA.Count, setB.Count))); SavePng(csvTimePath + "HR.png", $"{subject} (Time: {time}, Stim: {stimul}, Corr: {pearsCorr.ToString("0.000")}) - Red = test, blue = recall", hrNorm.Item1, hrNorm.Item2); //SavePngScatter(csvTimePath + "HR_Scatter.png", $"{subject} (Time: {time}, Stim: {stimul})", hrNorm.Item1, hrNorm.Item2); SaveZip(csvTimePath + "HR.csv", setA, setB); int t; if (int.TryParse(time, out t) && t != 0) { SavePng(csvStimuliPath + "HR.png", $"{subject} (Time: {time}, Stim: {stimul}, Corr: {pearsCorr.ToString("0.000")}) - Red = test, blue = recall", hrNorm.Item1, hrNorm.Item2); //SavePngScatter(csvStimuliPath + "HR_Scatter.png", $"{subject} (Time: {time}, Stim: {stimul})", hrNorm.Item1, hrNorm.Item2); SaveZip(csvStimuliPath + "HR.csv", hrNorm.Item1, hrNorm.Item2); //SavePng(csvStimuliPath + "GSR_nonTemporal.png", $"{subject} (Time: {time}, Stim: {stimul}, Corr: {pearsCorr.ToString("0.000")}) - Red = test, blue = recall", nonTempA, nonTempB); } } Log.LogMessage($"HR done, data filtered: {hr.Item1.ToString("0.0")}%"); } if (files.Contains("EEG.dat")) { Log.LogMessage("Starting EEG"); foreach (var item in Enum.GetNames(typeof(EEGDataReading.ELECTRODE))) { var eeg = FilterData( fdTest.eegData.SkipWhile(x => x.timestamp < tasks[ta].start).TakeWhile(x => x.timestamp < tasks[ta].end).Select(x => Tuple.Create(x.timestamp, (double)x.data[item])).ToList(), fdRecall.eegData.SkipWhile(x => x.timestamp - offset < tasks[ta].start).TakeWhile(x => x.timestamp - offset < tasks[ta].end).Select(x => Tuple.Create(x.timestamp - offset, (double)x.data[item])).ToList(), 8 ); if (eeg.Item2.Count == 0 || eeg.Item3.Count == 0) continue; /* var eegNorm = NormalizeFilterData(eeg); var setA = eegNorm.Item1.VarianceFilter(64); var setB = eegNorm.Item2.VarianceFilter(64); */ var setA = eeg.Item2.VarianceFilter(64).CalculateNormalized(); var setB = eeg.Item3.VarianceFilter(64).CalculateNormalized(); var min = Math.Min(setA.Count, setB.Count); Log.LogMessage($"{item} done, data filtered: {eeg.Item1.ToString("0.0")}%"); var pearsCorr = MathNet.Numerics.Statistics.Correlation.Pearson(setA.GetRange(0, min), setB.GetRange(0, min)); SavePng(csvTimePath + "EEG_" + item + ".png", $"{subject} (Time: {time}, Stim: {stimul}, Corr: {pearsCorr.ToString("0.000")}) - Red = test, blue = recall", setA, setB); SaveZip(csvTimePath + "EEG_" + item + ".csv", setA, setB); int t; if (int.TryParse(time, out t) && t != 0) { SavePng(csvStimuliPath + "EEG_" + item + ".png", $"{subject} (Time: {time}, Stim: {stimul}, Corr: {pearsCorr.ToString("0.000")}) - Red = test, blue = recall", setA, setB); SaveZip(csvStimuliPath + "EEG_" + item + ".csv", setA, setB); } } Log.LogMessage("EEG done"); } if (files.Contains("KINECT.dat")) { Log.LogMessage("Starting Kinect"); foreach (Microsoft.Kinect.Face.FaceShapeAnimations item in Enum.GetValues(typeof(Microsoft.Kinect.Face.FaceShapeAnimations))) { if (item == Microsoft.Kinect.Face.FaceShapeAnimations.Count) continue; var kinect = FilterData( fdTest.faceData.SkipWhile(x => x.timestamp < tasks[ta].start).TakeWhile(x => x.timestamp < tasks[ta].end).Select(x => Tuple.Create(x.timestamp, (double)x.data[item])).ToList(), fdRecall.faceData.SkipWhile(x => x.timestamp - offset < tasks[ta].start).TakeWhile(x => x.timestamp - offset < tasks[ta].end).Select(x => Tuple.Create(x.timestamp - offset, (double)x.data[item])).ToList(), 34 ); if (kinect.Item2.Count == 0 || kinect.Item3.Count == 0) continue; var kiNorm = NormalizeFilterData(kinect); var setA = kiNorm.Item1.MovingAverageFilter(25); var setB = kiNorm.Item2.MovingAverageFilter(25); var min = Math.Min(setA.Count, setB.Count); Log.LogMessage($"{item} done, data filtered: {kinect.Item1.ToString("0.0")}%"); var pearsCorr = MathNet.Numerics.Statistics.Correlation.Pearson(setA.GetRange(0, min), setB.GetRange(0, min)); SavePng(csvTimePath + "FACE_" + item + ".png", $"{subject} (Time: {time}, Stim: {stimul}, Corr: {pearsCorr.ToString("0.000")}) - Red = test, blue = recall", setA, setB); SaveZip(csvTimePath + "FACE_" + item + ".csv", setA, setB); int t; if (int.TryParse(time, out t) && t != 0) { SavePng(csvStimuliPath + "FACE_" + item + ".png", $"{subject} (Time: {time}, Stim: {stimul}, Corr: {pearsCorr.ToString("0.000")}) - Red = test, blue = recall", setA, setB); SaveZip(csvStimuliPath + "FACE_" + item + ".csv", setA, setB); //SavePng(csvStimuliPath + "GSR_nonTemporal.png", $"{subject} (Time: {time}, Stim: {stimul}, Corr: {pearsCorr.ToString("0.000")}) - Red = test, blue = recall", nonTempA, nonTempB); } } Log.LogMessage("Kinect done"); } } //var resA = Pearson(result.Item2, result.Item3);//MathNet.Numerics.Statistics.Correlation.Pearson(result.Item2, result.Item3); //var sigA = Significance(resA, result.Item2.Count); //Log.LogMessage($"Best case pearson correlation: {resA.ToString("0.000")}"); //var resB = Pearson(result.Item2, result.Item4);//MathNet.Numerics.Statistics.Correlation.Pearson(result.Item2, result.Item4); //var sigB = Significance(resB, result.Item2.Count); //Log.LogMessage($"Second best case pearson correlation: {resB.ToString("0.000")}"); //List<string> toWrite = new List<string>(); //toWrite.Add($"[{dirPath.Split('\\').Last()}]"); //toWrite.Add($"Data removed={result.Item1}"); //toWrite.Add($"Pearson closest={resA}"); //toWrite.Add($"Significance={sigA}"); //toWrite.Add($"Pearson second closest={resB}"); //toWrite.Add($"Significance={sigB}"); //toWrite.Add(""); //File.AppendAllLines("results.txt", toWrite); } Log.LogMessage("DonnoDK!"); } }