private void PredictionNuThread(ref int count, SENSOR sensor, int start, int end, ref ConcurrentStack<SVMParameter> svmParams, List<SVMNode[]> data, int svmCount, ref NoveltyResult bestCoveredResult, ref Mutex mutex, ref ConcurrentBag<string> nuResults) { OneClassClassifier occ = new OneClassClassifier(data); List<OneClassFV> anomali = new List<OneClassFV>(); List<Events> eventResult = new List<Events>(); int maxCount = svmCount; string sensorPath = path + "/" + sensor.ToString(); foreach (Events p in events) { var evt = p.Copy(); eventResult.Add(evt); } while (!svmParams.IsEmpty) { SVMParameter svmParam = null; svmParams.TryPop(out svmParam); if (svmParam == null) { break; } anomali = new List<OneClassFV>(); occ.CreateModel(svmParam); anomali.AddRange(occ.PredictOutliers(featureVectors[sensor].Where(x => start < x.TimeStamp && x.TimeStamp < end).ToList())); PointsOfInterest dPointsOfInterest = new PointsOfInterest(anomali); foreach (Events evt in eventResult) { evt.SetPointOfInterest(dPointsOfInterest); } NoveltyResult tempResult = new NoveltyResult(dPointsOfInterest, eventResult, start, end, svmParam, anomali); /*NoveltyResult.ConfusionMatrix cfm = tempResult.CalculateConfusionMatrix(); decimal tpr = ((decimal)cfm.TruePostive) / ((decimal)cfm.TruePostive + cfm.FalseNegative); decimal fpr = 1 - ((decimal)cfm.TrueNegative / ((decimal)cfm.TrueNegative + cfm.FalsePostive)); */ double temp = tempResult.FlaggedAreaSize(); double temp2 = tempResult.CalculateTotalNormalArea(); double areaCovered = ((double)tempResult.FlaggedAreaSize() / tempResult.CalculateTotalNormalArea() > 1) ? 1 : tempResult.FlaggedAreaSize() / (double)tempResult.CalculateTotalNormalArea(); nuResults.Add($"{tempResult.parameter.Nu.ToString()}:" + $"{tempResult.CalculateHitResult().eventHits/ (double)tempResult.CalculateHitResult().eventsTotal};" + $"{tempResult.CalculateHitResult().hits / ((double)tempResult.CalculateHitResult().misses+ tempResult.CalculateHitResult().hits)};" + $"{areaCovered}"); mutex.WaitOne(); count++; SetProgress(count, sensor); mutex.ReleaseMutex(); } Log.LogMessage(sensor + " done!"); }
private void CreateSVM(SENSOR machine, List<OneClassFV> trainingSet) { var data = trainingSet.Select(x => x.Features).ToList(); OneClassClassifier occ = new OneClassClassifier(data); SVMParameter svmP = new SVMParameter(); svmP.Kernel = SVMKernelType.RBF; svmP.C = 100; svmP.Gamma = 0.01; svmP.Nu = 0.01; svmP.Type = SVMType.ONE_CLASS; occ.CreateModel(svmP); machines.Add(machine, occ); }
private void PredictionThread(ref int count, SENSOR sensor, int start, int end, ref ConcurrentStack<SVMParameter> svmParams, List<SVMNode[]> data, int svmCount, ref NoveltyResult bestCoveredResult, ref Mutex mutex) { OneClassClassifier occ = new OneClassClassifier(data); List<OneClassFV> anomali = new List<OneClassFV>(); List<Events> eventResult = new List<Events>(); List<OneClassFV> outliersFromSam = new List<OneClassFV>(); int maxCount = svmCount; string sensorPath = path + "/" + sensor.ToString(); foreach (Events p in events) { var evt = p.Copy(); eventResult.Add(evt); } while (!svmParams.IsEmpty) { SVMParameter svmParam = null; svmParams.TryPop(out svmParam); if (svmParam == null) { break; } anomali = new List<OneClassFV>(); occ.CreateModel(svmParam); anomali.AddRange(occ.PredictOutliers(featureVectors[sensor].Where(x => start < x.TimeStamp && x.TimeStamp < end).ToList())); PointsOfInterest dPointsOfInterest = new PointsOfInterest(anomali); foreach (Events evt in eventResult) { evt.SetPointOfInterest(dPointsOfInterest); } NoveltyResult tempResult = new NoveltyResult(dPointsOfInterest, eventResult, start, end, svmParam, anomali); /*NoveltyResult.ConfusionMatrix cfm = tempResult.CalculateConfusionMatrix(); decimal tpr = ((decimal)cfm.TruePostive) / ((decimal)cfm.TruePostive + cfm.FalseNegative); decimal fpr = 1 - ((decimal)cfm.TrueNegative / ((decimal)cfm.TrueNegative + cfm.FalsePostive)); */ mutex.WaitOne(); if (bestCoveredResult == null) { bestCoveredResult = new NoveltyResult(dPointsOfInterest, eventResult, start, end, svmParam, anomali); } else if (tempResult.CalculateCoveredScore() > bestCoveredResult.CalculateCoveredScore()) { //bestResult = new NoveltyResult(dPointsOfInterest, eventResult, start, end, svmParam, anomali); ; bestCoveredResult = tempResult; } count++; SetProgress(count, sensor); mutex.ReleaseMutex(); } Log.LogMessage(sensor + " done!"); }
private void button1_Click(object sender, EventArgs e) { noveltyChart.ChartAreas.First().BackColor = Color.DarkGray; FolderBrowserDialog fbd = new FolderBrowserDialog(); if (fbd.ShowDialog() == DialogResult. OK) { string path = fbd.SelectedPath; string testSubjectId = path.Split('\\')[path.Split('\\').Length - 2]; fdNovelty.LoadFromFile(new string[] { path + @"\EEG.dat", path + @"\GSR.dat", path + @"\HR.dat", path + @"\KINECT.dat" }, DateTime.Now, false); events = File.ReadAllLines(path + @"\SecondTest.dat"); string[] tmpSevents = File.ReadAllLines(path + @"\sam.dat"); foreach (string ev in tmpSevents) { sEvents.Add(new samEvents(int.Parse(ev.Split(':')[0]), int.Parse(ev.Split(':')[1]), int.Parse(ev.Split(':')[2]))); } } if (events.Length == 0) { //if events is not assigned with second test data return; } int start = (useRestInTraining.Checked) ? 180000 : 0; int trainingEnd = int.Parse(events[2].Split('#')[0]); int windowSize = 5000; int stepSize = 100; int delay = 2000; //Split into training & prediction set List<List<double>> featureVectors = new List<List<double>>(); List<int> timeStamps = new List<int>(); for (int i = 0; i < fdNovelty.gsrData.Last().timestamp - fdNovelty.gsrData.First().timestamp - windowSize; i += stepSize) { List<double> featureVector = new List<double>(); List<double> data = fdNovelty.gsrData.SkipWhile(x => (x.timestamp - fdNovelty.gsrData.First().timestamp) < i).TakeWhile(x => i + windowSize > (x.timestamp - fdNovelty.gsrData.First().timestamp)).Select(x => (double)x.resistance).ToList(); if (data.Count == 0) continue; featureVector.Add(data.Average()); featureVector.Add(data.Max()); featureVector.Add(data.Min()); double avg = data.Average(); double sd = Math.Sqrt(data.Average(x => Math.Pow(x - avg, 2))); featureVector.Add(sd); featureVectors.Add(featureVector); timeStamps.Add(i); } featureVectors = featureVectors.NormalizeFeatureList<double>(Normalize.OneMinusOne).ToList(); var dataSet = featureVectors.Zip(timeStamps, (first, second) => { return Tuple.Create(first, second); }); var trainingSet = dataSet.SkipWhile(x => x.Item2 < start).TakeWhile(x => x.Item2 < trainingEnd); var predictionSet = dataSet.SkipWhile(x => x.Item2 < trainingEnd); int count = predictionSet.Count(); int firstPredcition = predictionSet.First().Item2; OneClassClassifier occ = new OneClassClassifier(trainingSet.Select(x => x.Item1).ToList()); SVMParameter svmP = new SVMParameter(); svmP.Kernel = SVMKernelType.RBF; svmP.C = 100; svmP.Gamma = 0.01; svmP.Nu = 0.01; svmP.Type = SVMType.ONE_CLASS; occ.CreateModel(svmP); /* List<int> indexes = occ.PredictOutliers(predictionSet.Select(x => x.Item1).ToList()); foreach (int index in indexes) { timestampsOutliers.Add(predictionSet.ElementAt(index).Item2 - firstPredcition + 180000 + 4000); } */ updateChart(); int k = 0; }