public void TrainTest1() { const int LearnCount = 100; const int TestCount = 1000; const int Length = 300; const double PositiveRatio = 0.1; // create samples List <(float[] x, bool y, float weight)> samples = SupportVectorMachineTest.GenerateSamples(LearnCount + TestCount, Length, PositiveRatio) .Select(x => (x.x.Select(w => (float)w).ToArray(), x.y, (float)x.weight)) .ToList(); // learn SequentualMinimalOptimization smo = new SequentualMinimalOptimization(new ChiSquare()) { Algorithm = SMOAlgorithm.LibSVM, Tolerance = 0.01f, }; SupportVectorMachine machine = SupportVectorMachine.Learn( smo, samples.Take(LearnCount).Select(x => x.x).ToList(), samples.Take(LearnCount).Select(x => x.y).ToList(), samples.Take(LearnCount).Select(x => x.weight).ToList(), CancellationToken.None); // test List <ClassificationResult <bool?> > results = samples .Skip(LearnCount) .Select(x => new ClassificationResult <bool?>(null, machine.Classify(x.x) > 0.5f, x.y, 1.0f, true)) .ToList(); ClassificationReport <bool?> report = new ClassificationReport <bool?>(results); }
public (List <string> classes, KMeans kmeans, OneVsAllSupportVectorMachine svm) FinishLearning( int vectorLength, CancellationToken cancellationToken) { // count classes List <string> classes = new List <string>(this.features.Select(x => x.truth).ToLookup(x => x).Select(x => x.Key)); if (classes.Count < 2) { throw new ArgumentException(); } classes.Sort(); // count vectors int numberOfVectors = this.features.Sum(x => x.features.Count); // copy vectors Dictionary <IVector <float>, float> vectors = new Dictionary <IVector <float>, float>(numberOfVectors); for (int i = 0, ii = this.features.Count; i < ii; i++) { FeatureDetectors.Features f = this.features[i].features; for (int j = 0, jj = f.Count, len = f.Length, off = 0; j < jj; j++, off += len) { ////DenseVectorF vector = new DenseVectorF(len, f.X, off); SparseVectorF vector = SparseVectorF.FromDense(len, f.X, off); vectors[vector] = vectors.TryGetValue(vector, out float weight) ? weight + 1.0f : 1.0f; } } cancellationToken.ThrowIfCancellationRequested(); // learn k-means KMeans kmeans = KMeans.Learn( vectorLength, KMeansSeeding.Random, 2, default(EuclideanDistance), vectors.Keys.ToList(), vectors.Values.ToList(), cancellationToken); cancellationToken.ThrowIfCancellationRequested(); // learn svm Dictionary <string, int> classesLookup = classes.ToDictionary((x, i) => x, (x, i) => i); SequentualMinimalOptimization smo = new SequentualMinimalOptimization(new ChiSquare()) { Algorithm = SMOAlgorithm.LibSVM, Tolerance = 0.01f, }; List <float[]> svmx = new List <float[]>(this.features.Count); List <int> svmy = new List <int>(this.features.Count); for (int i = 0, ii = this.features.Count; i < ii; i++) { (FeatureDetectors.Features features, string truth) = this.features[i]; svmx.Add(PointsOfInterestClassifier.PrepareVector(kmeans, features, cancellationToken)); svmy.Add(classesLookup[truth]); } cancellationToken.ThrowIfCancellationRequested(); OneVsAllSupportVectorMachine svm = OneVsAllSupportVectorMachine.Learn( smo, classes.Count, svmx, svmy, null, cancellationToken); cancellationToken.ThrowIfCancellationRequested(); return(classes, kmeans, svm); }