private void InitializeClusters(ApplicationProtocolClassificationStatisticsMeter precMeasure) { if (this.PrecMeasure == precMeasure) { return; } var cluesters = this.AppIdentMainVm.AppProtoModelEval.ApplicationProtocolModelsHierachivalClustering(); this.Nodes.Clear(); if (cluesters == null) { return; } foreach (var cluster in cluesters) { cluster.UpdateStatistics(precMeasure); var clusterNode = new ClusterNodeModel { Cluster = cluster }; clusterNode.ParrentCluster = clusterNode; this.Nodes.Add(clusterNode); } }
public void SaveStatisticsToxml(string fileName, ApplicationProtocolClassificationStatisticsMeter applicationStaticticsMeter) { try { var serializer = new DataContractSerializer(typeof(ApplicationProtocolClassificationStatisticsMeter)); var writer = XmlWriter.Create(fileName); serializer.WriteObject(writer, applicationStaticticsMeter); writer.Close(); } catch (Exception ex) { Console.WriteLine("EXCEPTION during serialization\n" + ex); } }
private void UpdatePrecMeter() { var applicationProtocolClassificationStatisticsMeter = new ApplicationProtocolClassificationStatisticsMeter(); foreach (var node in this.Nodes.Where(n => n.IsLiefNode)) { applicationProtocolClassificationStatisticsMeter.AppStatistics.AddOrUpdate(node.Cluster.ClusterAppTags, node.Cluster.ApplicationProtocolClassificationStatistics, (s, statistics) => statistics); } this.PrecMeasure = applicationProtocolClassificationStatisticsMeter; this.AppIdentMainVm.EpiPrecMeasure = applicationProtocolClassificationStatisticsMeter; }
// TODO: not tested yet private void PrintConversationClassificationPrecision(ApplicationProtocolClassificationStatisticsMeter precMeasure, List <L7Conversation> testSet) { if (this.OmmitClassifiedModelDetails) { return; } foreach (var conv in testSet) { var classifiedFeatureVector = precMeasure.ConversationsWithClassification[conv]; Debug.WriteLine("Application predict: " + classifiedFeatureVector.ApplicationProtocolName + "/" + conv.AppTag + " precision: " + classifiedFeatureVector.Precision); } }
public void UpdateStatistics(ApplicationProtocolClassificationStatisticsMeter applicationProtocolClassificationStatisticsMeter) { if (!this.Children.IsNullOrEmpty()) { foreach (var cluster in this.Children) //drill down { cluster.UpdateStatistics(applicationProtocolClassificationStatisticsMeter); } this.ApplicationProtocolClassificationStatistics = this.RecalculateApplicationProtocolClassificationStatisticsForCluster(); } else { this.ApplicationProtocolClassificationStatistics = applicationProtocolClassificationStatisticsMeter[this.Members.First().Label]; } }
public EPIEvaluator MlEpiTestBase( string pcapFilePath, double trainingToClassifyingRatio, out ApplicationProtocolClassificationStatisticsMeter epiprecMeasure, out ApplicationProtocolClassificationStatisticsMeter mlprecMeasure, double precisionTrashHold, int minFlows = 1) { this.FrameworkController.ProcessCapture(this.PrepareCaptureForProcessing(SnoopersPcaps.Default.app_identification_testM2_cap)); var appIdentDataSource = this.AppIdentService.CreateAppIdentDataSource(this.L7Conversations, minFlows, trainingToClassifyingRatio); EPIEvaluator epiEvaluator; epiprecMeasure = this.AppIdentService.EpiClasify(appIdentDataSource, new FeatureSelector(), out epiEvaluator); mlprecMeasure = this.AppIdentService.BayesianClassify(appIdentDataSource, 0.7, precisionTrashHold); return(epiEvaluator); }
// DECISION TREE CLF private EPIEvaluator DecisionTreeTestBase( string pcapFilePath, double trainingToClassifyingRatio, out ApplicationProtocolClassificationStatisticsMeter precMeasure, double precisionTrashHold = 0.99, int minFlows = 1) { this.ProcessPcapFile(pcapFilePath); List <L7Conversation> testSet; var applicationProtocolModelEvaluator = this.AppIdentService.CreateApplicationProtocolModels(this.L7Conversations, trainingToClassifyingRatio, out testSet); applicationProtocolModelEvaluator.TrainingFeatureVectors.ToList(); precMeasure = this.AppIdentService.DecisionTreeClassify(precisionTrashHold, applicationProtocolModelEvaluator, testSet); this.PrintConversationClassificationPrecision(precMeasure, testSet); return(applicationProtocolModelEvaluator); }
public ApplicationProtocolClassificationStatisticsMeter AccordClassify( AppIdentDataSource appIdentDataSource, MulticlassClassifierBase model, FeatureSelector featureSelector, AppIdentTestContext appIdentTestContext) { var precMeasure = new ApplicationProtocolClassificationStatisticsMeter(); var appIdentAcordSource = this.GetAppIdentAcordSource(appIdentDataSource.VerificationSet, featureSelector); var predictedValues = model.Decide(appIdentAcordSource.Samples); for (var j = 0; j < predictedValues.Length; j++) { precMeasure.UpdateStatistics(appIdentAcordSource.LabelsFromInteges[predictedValues[j]], appIdentAcordSource.Labels[j]); } appIdentTestContext.Save(precMeasure); return(precMeasure); }
public ApplicationProtocolClassificationStatisticsMeter RandomForestCrossValidation( AppIdentDataSource appIdentDataSource, FeatureSelector featureSelector, GridSearchParameterCollection bestParameters, int folds, AppIdentTestContext appIdentTestContext) { var precMeasure = new ApplicationProtocolClassificationStatisticsMeter(); var accordAppIdent = new AccordAppIdent(); var appIdentAcordSource = this.GetAppIdentAcordSource(appIdentDataSource.TrainingSet, featureSelector); var cvResults = accordAppIdent.GetCrossValidationResultsOfRandomForestModel(appIdentAcordSource, bestParameters, folds); Console.WriteLine("### CV Results ###"); Console.WriteLine("\n### Training stats ###"); Console.WriteLine(">> model error mean: {0}\n>> model std: {1}", Math.Round(cvResults.Training.Mean, 6), Math.Round(cvResults.Training.StandardDeviation, 6)); Console.WriteLine("\n### Validation stats ###"); Console.WriteLine(">> model error mean: {0}\n>> model std: {1}", Math.Round(cvResults.Validation.Mean, 6), Math.Round(cvResults.Validation.StandardDeviation, 6)); var minErorr = cvResults.Validation.Values.Min(); var bestIndex = cvResults.Validation.Values.IndexOf(minErorr); var classifier = cvResults.Models[bestIndex]; var model = classifier.Model; var labels = appIdentAcordSource.Labels.Distinct(); var modelFilePath = appIdentTestContext.Save(model, labels); var validationDataSource = classifier.Tag as AccordAppIdent.ValidationDataSource; var predictedValues = classifier.Model.Decide(validationDataSource.ValidationInputs); for (var j = 0; j < predictedValues.Length; j++) { precMeasure.UpdateStatistics(appIdentAcordSource.LabelsFromInteges[predictedValues[j]], appIdentAcordSource.LabelsFromInteges[validationDataSource.ValidationOutputs[j]]); } appIdentTestContext.SaveCrossValidation(precMeasure); return(precMeasure); }
public ApplicationProtocolClassificationStatisticsMeter BayesianClassify(AppIdentDataSource appIdentDataSource, double trainingToVerificationRatio, double precisionTrashHold, AppIdentTestContext appIdentTestContext = null) { var precMeasure = new ApplicationProtocolClassificationStatisticsMeter(); //this.CreateDatasetAndTestset(appIdentDataSource, trainingToVerificationRatio, out var trainingSet, out var verificationSet); var classifier = new NaiveBayesClassifier(appIdentDataSource.TrainingSet); foreach (var featureVector in appIdentDataSource.VerificationSet) { var appTag = featureVector.Label.Replace("_", "").Replace("-", ""); featureVector.Label = "Unknown"; classifier.Normalizator.Normalize(featureVector); var cl = classifier.ClassifierModel.Predict(featureVector, true); if (cl.Precision > precisionTrashHold) { precMeasure.UpdateStatistics(cl.Label, appTag); } } appIdentTestContext?.Save(precMeasure); return(precMeasure); }
public ApplicationProtocolClassificationStatisticsMeter LoadStatisticsFromXml(string fileName) { var statisticsMeter = new ApplicationProtocolClassificationStatisticsMeter(); try { var dcs = new DataContractSerializer(typeof(ApplicationProtocolClassificationStatisticsMeter)); var fs = new FileStream(fileName, FileMode.Open); var reader = XmlDictionaryReader.CreateTextReader(fs, new XmlDictionaryReaderQuotas()); statisticsMeter = (ApplicationProtocolClassificationStatisticsMeter)dcs.ReadObject(reader); reader.Close(); fs.Close(); return(statisticsMeter); } catch (Exception ex) { Console.WriteLine("Cannot Load model " + fileName); Console.WriteLine(ex); throw; } }
private static void ShowResultsInApp(EPIEvaluator appProtoModelEval, ApplicationProtocolClassificationStatisticsMeter epiprecMeasure, ApplicationProtocolClassificationStatisticsMeter mlprecMeasure) { WrapperWindow mainView = null; var t = new Thread(() => { mainView = new WrapperWindow { DataContext = new AppIdentMainVm(appProtoModelEval, epiprecMeasure, mlprecMeasure) }; // Initiates the dispatcher thread shutdown when the mainView closes mainView.Closed += (s, e) => mainView.Dispatcher.InvokeShutdown(); mainView.Show(); // Makes the thread support message pumping System.Windows.Threading.Dispatcher.Run(); }); // Configure the thread t.SetApartmentState(ApartmentState.STA); t.Start(); t.Join(); }
//public void CreateDatasetAndTestset( // AppIdentDataSource appIdentDataSource, // double trainingToClassifyingRatio, // out List<FeatureVector> trainingSet, // out List<FeatureVector> verificationSet) //{ // trainingSet = new List<FeatureVector>(); // verificationSet = new List<FeatureVector>(); // var groupedFeatureVectors = from featureVector in appIdentDataSource.FeatureVectors // group featureVector by featureVector.Label // into featureVectors // orderby featureVectors.Key // select featureVectors; // //todo this can me managed more randomly // foreach(var gc in groupedFeatureVectors) // { // var conves = gc.ToList(); // var ratioIndex = (int) (conves.Count * trainingToClassifyingRatio); // var testingDataCount = conves.Count - ratioIndex; // trainingSet.AddRange(conves.GetRange(0, ratioIndex)); // verificationSet.AddRange(conves.GetRange(ratioIndex, testingDataCount)); // } //} public ApplicationProtocolClassificationStatisticsMeter DecisionTreeClassify( AppIdentDataSource appIdentDataSource, double trainingToVerificationRatio, double precisionTrashHold) { var precMeasure = new ApplicationProtocolClassificationStatisticsMeter(); //this.CreateDatasetAndTestset(appIdentDataSource, trainingToVerificationRatio, out var trainingSet, out var verificationSet); var classifier = new DecisionTreeClassifier(appIdentDataSource.TrainingSet); foreach (var feature in appIdentDataSource.VerificationSet) { var appTag = feature.Label; feature.Label = "Unknown"; classifier.Normalizator.Normalize(feature); var cl = classifier.ClassifierModel.Predict(feature); if (cl.Precision > precisionTrashHold) { precMeasure.UpdateStatistics(feature.Label, appTag); } } return(precMeasure); }
private void CompareStatistics(ApplicationProtocolClassificationStatisticsMeter newStatistics, string fileNameOldStatsXml) { var oldStatistics = this.AppIdentService.LoadStatisticsFromXml(fileNameOldStatsXml); int newScore = 0; int oldScore = 0; bool change = false; Console.WriteLine("##############Compare statistics##############"); foreach (var applicationStatistic in newStatistics.AppStatistics) { if (!oldStatistics.AppStatistics.ContainsKey(applicationStatistic.Key)) { Console.WriteLine("Missing application " + applicationStatistic.Key + " in old statistics.\n"); continue; } var oldStat = oldStatistics.AppStatistics[applicationStatistic.Key]; var newPrec = applicationStatistic.Value.Precission; var oldPrec = oldStat.Precission; if (newPrec.CompareTo(oldPrec) > 0) { newScore++; change = true; } else if (newPrec.CompareTo(oldPrec) < 0) { oldScore++; change = true; } var newRec = applicationStatistic.Value.Recall; var oldRec = oldStat.Recall; if (newRec.CompareTo(oldRec) > 0) { newScore++; change = true; } else if (newRec.CompareTo(oldRec) < 0) { oldScore++; change = true; } var newFmes = applicationStatistic.Value.FMeasure; var oldFmes = oldStat.FMeasure; if (newFmes.CompareTo(oldFmes) > 0) { newScore++; change = true; } else if (newFmes.CompareTo(oldFmes) < 0) { oldScore++; change = true; } if (change) { Console.WriteLine(applicationStatistic.Key); Console.WriteLine("New model TP: " + applicationStatistic.Value.TP + " FP: " + applicationStatistic.Value.FP + " FN: " + applicationStatistic.Value.FN); Console.WriteLine("Old model TP: " + oldStat.TP + " FP: " + oldStat.FP + " FN: " + oldStat.FN); Console.WriteLine("New model/Old model Precision: " + newPrec + "/" + oldPrec); Console.WriteLine("New model/Old model Recall: " + newRec + "/" + oldRec); Console.WriteLine("New model/Old model F-Measure: " + newFmes + "/" + oldFmes); Console.WriteLine(); } change = false; } Console.WriteLine("Score new/old: " + newScore + "/" + oldScore); }