public void PerformClassification(OperationContext context, ExperimentTruthTable truthTable, ClassificationDatasetSeparationEnum distributionRule, ILogBuilder log) { log.log("Performing classification"); if (truthTable == null) { truthTable = new ExperimentTruthTable(); notes.log(":: DEPLOYING IN-FOLD TRUTH TABLE ::"); truthTable.Deploy(context.featureSpace, context.spaceModel.labels.Select(x => x.name).ToList(), log); } DistributeTrainingAndTestSets(distributionRule, truthTable, context.featureSpace, log, context.testSet, context.trainingSet); if (!context.trainingSet.Any()) { notes.log("TRAINING SET EMPTY ---- APPLYING 1:1 EXPERIMENT SHEME: training and test set are the same"); } else { notes.log("Training [" + classifier.name + "] with [" + context.trainingSet.Count + "] feature vectors."); classifier.DoTraining(context.trainingSet, log); log.log("Training [" + classifier.name + "] completed."); notes.log("Testing [" + classifier.name + "] with [" + context.testSet.Count + "] feature vectors."); context.testResults = new List <FeatureVectorWithLabelID>(); var ts = context.testSet.Select(x => x.vector); foreach (FeatureVector fv in ts) { Int32 result = classifier.DoSelect(fv, log); FeatureVectorWithLabelID fvl = new FeatureVectorWithLabelID(fv, result); context.testResults.Add(fvl); } log.log("Testing [" + classifier.name + "] completed."); } }
public void PerformClassification(OperationContext context, ExperimentTruthTable truthTable, ClassificationDatasetSeparationEnum distributionRule, ILogBuilder log) { log.log("Performing classification"); if (truthTable == null) { truthTable = new ExperimentTruthTable(); notes.log(":: DEPLOYING IN-FOLD TRUTH TABLE ::"); log.log(":: DEPLOYING IN-FOLD TRUTH TABLE ::"); truthTable.Deploy(context.featureSpace, context.spaceModel.labels.Select(x => x.name).ToList(), log); } DistributeTrainingAndTestSets(distributionRule, truthTable, context.featureSpace, log, context.testSet, context.trainingSet); if (!context.trainingSet.Any()) { notes.log("TRAINING SET EMPTY ---- APPLYING 1:1 EXPERIMENT SHEME: training and test set are the same"); } else { notes.log("Training [" + classifier.name + "] with [" + context.trainingSet.Count + "] feature vectors."); classifier.DoTraining(context.trainingSet, log); log.log("Training [" + classifier.name + "] completed."); notes.log("Testing [" + classifier.name + "] with [" + context.testSet.Count + "] feature vectors."); context.testResults = new List <FeatureVectorWithLabelID>(); var ts = context.testSet.Select(x => x.vector); List <Int32> distinctResults = new List <int>(); foreach (FeatureVector fv in ts) { Int32 result = classifier.DoSelect(fv, log); if (!distinctResults.Contains(result)) { distinctResults.Add(result); } FeatureVectorWithLabelID fvl = new FeatureVectorWithLabelID(fv, result); context.testResults.Add(fvl); } if (distinctResults.Count < truthTable.labels_without_unknown.Count) { List <String> no_match_labels = truthTable.labels_without_unknown.ToList(); foreach (Int32 d in distinctResults) { no_match_labels.Remove(truthTable.labels_without_unknown[d]); } log.log("WARNING --- [" + classifier.name + "] ONLY [" + distinctResults.Count + "] of [" + truthTable.labels_without_unknown.Count + "] were assigned by the classifier"); foreach (String l in no_match_labels) { log.log("Class [" + l + "] received no assigment"); } foreach (var v in context.testSet) { var dist = v.GetDistinctValuesAtVector(); if (dist.Count < 2) { log.log("Test vector [" + v.name + "] has [" + dist.Count + "] distinct values at [" + v.dimensions.Length + "] dimensions!"); } } foreach (var v in context.trainingSet) { var dist = v.GetDistinctValuesAtVector(); if (dist.Count < 2) { log.log("Training vector [" + v.name + "] has [" + dist.Count + "] distinct values at [" + v.dimensions.Length + "] dimensions!"); } } } log.log("Testing [" + classifier.name + "] completed."); } }