Ejemplo n.º 1
0
        public void TestDecisionFunction()
        {
            // multi class:
            var clf = new Svc <int>(kernel: Kernel.Linear, c: 0.1);

            clf.Fit(iris.Data, iris.Target);

            var dec = (iris.Data * clf.Coef.Transpose()).AddRowVector(clf.Intercept);

            Assert.IsTrue(dec.AlmostEquals(clf.DecisionFunction(iris.Data)));

            // binary:
            clf.Fit(X, Y);
            dec = (X * clf.Coef.Transpose()).AddRowVector(clf.Intercept);
            int[] prediction = clf.Predict(X);
            Assert.IsTrue(dec.AlmostEquals(clf.DecisionFunction(X)));

            var b = clf.DecisionFunction(X).Column(0).Select(v => clf.Classes[v > 0 ? 1 : 0]);

            Assert.IsTrue(prediction.SequenceEqual(b));

            var expected = DenseMatrix.OfArray(new[, ] {
                { -1.0 }, { -0.66 }, { -1.0 }, { 0.66 }, { 1.0 }, { 1.0 }
            });

            Assert.IsTrue(clf.DecisionFunction(X).AlmostEquals(expected, 1E-2));
        }
Ejemplo n.º 2
0
        public void TestSvc()
        {
            var clf = new Svc <int>(kernel: Kernel.Linear, probability: true);

            clf.Fit(X, Y);

            var spClf = new Svc <int>(kernel: Kernel.Linear, probability: true);

            spClf.Fit(SparseMatrix.OfMatrix(X), Y);

            Assert.IsTrue(spClf.Predict(T).SequenceEqual(true_result));

            Assert.IsTrue(spClf.SupportVectors is SparseMatrix);
            Assert.IsTrue(clf.SupportVectors.AlmostEquals(spClf.SupportVectors));

            Assert.IsTrue(spClf.DualCoef is SparseMatrix);
            Assert.IsTrue(clf.DualCoef.AlmostEquals(spClf.DualCoef));

            Assert.IsTrue(spClf.Coef is SparseMatrix);
            Assert.IsTrue(clf.Coef.AlmostEquals(spClf.Coef));
            Assert.IsTrue(clf.Support.SequenceEqual(spClf.Support));
            Assert.IsTrue(clf.Predict(T).SequenceEqual(spClf.Predict(T)));

            // refit with a different dataset

            clf.Fit(X2, Y2);
            spClf.Fit(SparseMatrix.OfMatrix(X2), Y2);
            Assert.IsTrue(clf.SupportVectors.AlmostEquals(spClf.SupportVectors));
            Assert.IsTrue(clf.DualCoef.AlmostEquals(spClf.DualCoef));
            Assert.IsTrue(clf.Coef.AlmostEquals(spClf.Coef));
            Assert.IsTrue(clf.Support.SequenceEqual(spClf.Support));
            Assert.IsTrue(clf.Predict(T2).SequenceEqual(spClf.Predict(T2)));
            Assert.IsTrue(clf.PredictProba(T2).AlmostEquals(spClf.PredictProba(T2), 0.001));
        }
Ejemplo n.º 3
0
        public void TestSparseRealdata()
        {
            var x = new SparseMatrix(80, 36);

            x[7, 6]   = 0.03771744;
            x[39, 5]  = 0.1003567;
            x[77, 35] = 0.01174647;
            x[77, 31] = 0.027069;

            var y = new[]
            {
                1.0, 0.0, 2.0, 2.0, 1.0, 1.0, 1.0, 2.0, 2.0, 0.0, 1.0, 2.0, 2.0,
                0.0, 2.0, 0.0, 3.0, 0.0, 3.0, 0.0, 1.0, 1.0, 3.0, 2.0, 3.0, 2.0,
                0.0, 3.0, 1.0, 0.0, 2.0, 1.0, 2.0, 0.0, 1.0, 0.0, 2.0, 3.0, 1.0,
                3.0, 0.0, 1.0, 0.0, 0.0, 2.0, 0.0, 1.0, 2.0, 2.0, 2.0, 3.0, 2.0,
                0.0, 3.0, 2.0, 1.0, 2.0, 3.0, 2.0, 2.0, 0.0, 1.0, 0.0, 1.0, 2.0,
                3.0, 0.0, 0.0, 2.0, 2.0, 1.0, 3.0, 1.0, 1.0, 0.0, 1.0, 2.0, 1.0,
                1.0, 3.0
            };

            var clf = new Svc <double>(kernel: Kernel.Linear);

            clf.Fit(DenseMatrix.OfMatrix(x), y);
            var spClf = new Svc <double>(kernel: Kernel.Linear);

            spClf.Fit(x, y);

            Assert.IsTrue(clf.SupportVectors.AlmostEquals(spClf.SupportVectors));
            Assert.IsTrue(clf.DualCoef.AlmostEquals(spClf.DualCoef));
        }
Ejemplo n.º 4
0
        /// <summary>
        /// Make some classification predictions on a toy dataset using a SVC
        ///
        /// If binary is True restrict to a binary classification problem instead of a
        /// multiclass classification problem
        /// </summary>
        /// <param name="x"></param>
        /// <param name="y"></param>
        /// <param name="binary"></param>
        private static Tuple <int[], int[], Matrix <double> > MakePrediction(
            Matrix <double> x = null,
            int[] y           = null,
            bool binary       = false)
        {
            if (x == null && y == null)
            {
                // import some data to play with
                var dataset = IrisDataset.Load();
                x = dataset.Data;
                y = dataset.Target;
            }

            if (binary)
            {
                // restrict to a binary classification task
                x = x.RowsAt(y.Indices(v => v < 2));
                y = y.Where(v => v < 2).ToArray();
            }

            int nSamples  = x.RowCount;
            int nFeatures = x.ColumnCount;
            var rng       = new Random(37);

            int[] p = Shuffle(rng, Enumerable.Range(0, nSamples).ToArray());
            x = x.RowsAt(p);
            y = y.ElementsAt(p);
            var half = nSamples / 2;

            // add noisy features to make the problem harder and avoid perfect results
            rng = new Random(0);
            x   = x.HStack(DenseMatrix.CreateRandom(nSamples, 200, new Normal {
                RandomSource = rng
            }));

            // run classifier, get class probabilities and label predictions
            var clf = new Svc <int>(kernel: Kernel.Linear, probability: true);

            clf.Fit(x.SubMatrix(0, half, 0, x.ColumnCount), y.Take(half).ToArray());
            Matrix <double> probasPred = clf.PredictProba(x.SubMatrix(half, x.RowCount - half, 0, x.ColumnCount));

            if (binary)
            {
                // only interested in probabilities of the positive case
                // XXX: do we really want a special API for the binary case?
                probasPred = probasPred.SubMatrix(0, probasPred.RowCount, 1, 1);
            }

            var yPred = clf.Predict(x.SubMatrix(half, x.RowCount - half, 0, x.ColumnCount));
            var yTrue = y.Skip(half).ToArray();

            return(Tuple.Create(yTrue, yPred, probasPred));
        }
Ejemplo n.º 5
0
        public void TestSvcWithCustomKernel()
        {
            var clfLin = new Svc <int>(kernel: Kernel.Linear);

            clfLin.Fit(SparseMatrix.OfMatrix(X), Y);
            var clfMylin =
                new Svc <int>(kernel: Kernel.FromFunction((x, y) => x * y.Transpose()));

            clfMylin.Fit(SparseMatrix.OfMatrix(X), Y);
            Assert.IsTrue(
                clfLin.Predict(SparseMatrix.OfMatrix(X)).SequenceEqual(clfMylin.Predict(SparseMatrix.OfMatrix(X))));
        }
Ejemplo n.º 6
0
 public void TestLibsvmIris()
 {
     // shuffle the dataset so that labels are not ordered
     foreach (var k in new[] { Kernel.Linear, Kernel.Rbf })
     {
         var clf = new Svc <int>(kernel: k);
         clf.Fit(iris.Data, iris.Target);
         var pred      = clf.Predict(iris.Data);
         var matchingN = pred.Zip(iris.Target, Tuple.Create).Where(t => t.Item1 == t.Item2).Count();
         Assert.IsTrue(1.0 * matchingN / pred.Length > 0.9);
         Assert.IsTrue(clf.Classes.SequenceEqual(clf.Classes.OrderBy(v => v)));
     }
 }
Ejemplo n.º 7
0
        public void TestSingleSample_1D()
        {
            var clf = new Svc <int>();

            clf.Fit(X, Y);
            var p = clf.Predict(X.Row(0).ToRowMatrix());

            Assert.AreEqual(Y[0], p[0]);

            //todo:
            //clf = svm.LinearSVC(random_state=0).fit(X, Y)
            //clf.predict(X[0])
        }
Ejemplo n.º 8
0
        public void TestLibsvmParameters()
        {
            var clf = new Svc<int>(kernel: Kernel.Linear);
            clf.Fit(X, Y);
            Assert.IsTrue(clf.DualCoef.AlmostEquals(DenseMatrix.OfArray(new[,] {{0.25, -.25}})));
            Assert.IsTrue(clf.Support.SequenceEqual(new[] {1, 3}));
            Assert.IsTrue(
                clf.SupportVectors.AlmostEquals(
                DenseMatrix.OfRows(2, X.ColumnCount, new[] {X.Row(1), X.Row(3)})));

            Assert.IsTrue(clf.Intercept.SequenceEqual(new[] {0.0}));
            Assert.IsTrue(clf.Predict(X).SequenceEqual(Y));
        }
Ejemplo n.º 9
0
        /// <summary>
        /// Make some classification predictions on a toy dataset using a SVC
        ///
        /// If binary is True restrict to a binary classification problem instead of a
        /// multiclass classification problem
        /// </summary>
        /// <param name="x"></param>
        /// <param name="y"></param>
        /// <param name="binary"></param>
        private static Tuple<int[], int[], Matrix<double>> MakePrediction(
            Matrix<double> x = null,
            int[] y = null,
            bool binary = false)
        {
            if (x == null && y == null)
            {
                // import some data to play with
                var dataset = IrisDataset.Load();
                x = dataset.Data;
                y = dataset.Target;
            }

            if (binary)
            {
                // restrict to a binary classification task
                x = x.RowsAt(y.Indices(v => v < 2));
                y = y.Where(v => v < 2).ToArray();
            }

            int nSamples = x.RowCount;
            int nFeatures = x.ColumnCount;
            var rng = new Random(37);
            int[] p = Shuffle(rng, Enumerable.Range(0, nSamples).ToArray());
            x = x.RowsAt(p);
            y = y.ElementsAt(p);
            var half = nSamples/2;

            // add noisy features to make the problem harder and avoid perfect results
            rng = new Random(0);
            x = x.HStack(DenseMatrix.CreateRandom(nSamples, 200, new Normal{RandomSource = rng}));

            // run classifier, get class probabilities and label predictions
            var clf = new Svc<int>(kernel: Kernel.Linear, probability: true);
            clf.Fit(x.SubMatrix(0, half, 0, x.ColumnCount), y.Take(half).ToArray());
            Matrix<double> probasPred = clf.PredictProba(x.SubMatrix(half, x.RowCount - half, 0, x.ColumnCount));

            if (binary)
            {
                // only interested in probabilities of the positive case
                // XXX: do we really want a special API for the binary case?
                probasPred = probasPred.SubMatrix(0, probasPred.RowCount, 1, 1);
            }

            var yPred = clf.Predict(x.SubMatrix(half, x.RowCount - half, 0, x.ColumnCount));
            var yTrue = y.Skip(half).ToArray();
            return Tuple.Create(yTrue, yPred, probasPred);
        }
Ejemplo n.º 10
0
        public void TestLibsvmParameters()
        {
            var clf = new Svc <int>(kernel: Kernel.Linear);

            clf.Fit(X, Y);
            Assert.IsTrue(clf.DualCoef.AlmostEquals(DenseMatrix.OfArray(new[, ] {
                { 0.25, -.25 }
            })));
            Assert.IsTrue(clf.Support.SequenceEqual(new[] { 1, 3 }));
            Assert.IsTrue(
                clf.SupportVectors.AlmostEquals(
                    DenseMatrix.OfRows(2, X.ColumnCount, new[] { X.Row(1), X.Row(3) })));

            Assert.IsTrue(clf.Intercept.SequenceEqual(new[] { 0.0 }));
            Assert.IsTrue(clf.Predict(X).SequenceEqual(Y));
        }
Ejemplo n.º 11
0
        /// <summary>
        /// Make sure some tweaking of parameters works.
        ///
        /// We change clf.dual_coef_ at run time and expect .predict() to change
        /// accordingly. Notice that this is not trivial since it involves a lot
        /// of C/Python copying in the libsvm bindings.
        ///
        /// The success of this test ensures that the mapping between libsvm and
        /// the python classifier is complete.
        /// </summary>
        public void TestTweakParams()
        {
            var clf = new Svc <int>(kernel: Kernel.Linear, c: 1.0);

            clf.Fit(X, Y);
            Assert.IsTrue(clf.DualCoef.AlmostEquals(DenseMatrix.OfArray(new[, ] {
                { 0.25, -0.25 }
            })));
            Assert.IsTrue(clf.Predict(DenseMatrix.OfArray(new[, ] {
                { -.1, -.1 }
            })).SequenceEqual(new[] { 1 }));
            clf.DualCoef = DenseMatrix.OfArray(new[, ] {
                { 0.0, 1.0 }
            });
            Assert.IsTrue(clf.Predict(DenseMatrix.OfArray(new[, ] {
                { -.1, -.1 }
            })).SequenceEqual(new[] { 2 }));
        }
Ejemplo n.º 12
0
        public void TestSvcWithCallableKernel()
        {
            // create SVM with callable linear kernel, check that results are the same
            // as with built-in linear kernel
            var svmCallable = new Svc <int>(kernel: Kernel.FromFunction((x, y) => x * (y.Transpose())),
                                            probability: true);

            svmCallable.Fit(X, Y);
            var svmBuiltin = new Svc <int>(kernel: Kernel.Linear, probability: true);

            svmBuiltin.Fit(X, Y);

            Assert.IsTrue(svmCallable.DualCoef.AlmostEquals(svmBuiltin.DualCoef));
            Assert.IsTrue(svmCallable.Intercept.AlmostEquals(svmBuiltin.Intercept));
            Assert.IsTrue(svmCallable.Predict(X).SequenceEqual(svmBuiltin.Predict(X)));

            Assert.IsTrue(svmCallable.PredictProba(X).AlmostEquals(svmBuiltin.PredictProba(X), 1));
            Assert.IsTrue(svmCallable.DecisionFunction(X).AlmostEquals(svmBuiltin.DecisionFunction(X), 2));
        }
Ejemplo n.º 13
0
        public void TestSvcIris()
        {
            foreach (var k in new[] { Kernel.Linear, Kernel.Poly, Kernel.Rbf })
            {
                var spClf = new Svc <int>(kernel: k);
                spClf.Fit(SparseMatrix.OfMatrix(iris.Data), iris.Target);
                var clf = new Svc <int>(kernel: k);
                clf.Fit(DenseMatrix.OfMatrix(iris.Data), iris.Target);

                Assert.IsTrue(clf.SupportVectors.AlmostEquals(spClf.SupportVectors));
                Assert.IsTrue(clf.DualCoef.AlmostEquals(spClf.DualCoef));
                Assert.IsTrue(
                    clf.Predict(DenseMatrix.OfMatrix(iris.Data)).SequenceEqual(
                        spClf.Predict(SparseMatrix.OfMatrix(iris.Data))));

                if (k == Kernel.Linear)
                {
                    Assert.IsTrue(clf.Coef.AlmostEquals(spClf.Coef));
                }
            }
        }
Ejemplo n.º 14
0
        public void test_weight()
        {
            var clf = new Svc <int>(classWeightEstimator: ClassWeightEstimator <int> .Explicit(new Dictionary <int, double> {
                { 1, 0.1 }
            }));

            // we give a small weights to class 1
            clf.Fit(X, Y);
            // so all predicted values belong to class 2
            Assert.IsTrue(clf.Predict(X).SequenceEqual(Enumerable.Repeat(2, 6)));

            /*
             * X_, y_ = make_classification(n_samples=200, n_features=10,
             *                   weights=[0.833, 0.167], random_state=2)
             *
             * for clf in (linear_model.LogisticRegression(),
             *  svm.LinearSVC(random_state=0), svm.SVC()):
             * clf.set_params(class_weight={0: .1, 1: 10})
             * clf.fit(X_[:100], y_[:100])
             * y_pred = clf.predict(X_[100:])
             * assert_true(f1_score(y_[100:], y_pred) > .3)
             * */
        }
Ejemplo n.º 15
0
        public void TestSingleSample_1D()
        {
            var clf = new Svc<int>();
            clf.Fit(X, Y);
            var p = clf.Predict(X.Row(0).ToRowMatrix());
            Assert.AreEqual(Y[0], p[0]);

            //todo:
            //clf = svm.LinearSVC(random_state=0).fit(X, Y)
            //clf.predict(X[0])
        }
Ejemplo n.º 16
0
        public void TestSparseRealdata()
        {
            var x = new SparseMatrix(80, 36);
            x[7, 6] = 0.03771744;
            x[39, 5] = 0.1003567;
            x[77, 35] = 0.01174647;
            x[77, 31] = 0.027069;

            var y = new[]
                             {
                                 1.0, 0.0, 2.0, 2.0, 1.0, 1.0, 1.0, 2.0, 2.0, 0.0, 1.0, 2.0, 2.0,
                                 0.0, 2.0, 0.0, 3.0, 0.0, 3.0, 0.0, 1.0, 1.0, 3.0, 2.0, 3.0, 2.0,
                                 0.0, 3.0, 1.0, 0.0, 2.0, 1.0, 2.0, 0.0, 1.0, 0.0, 2.0, 3.0, 1.0,
                                 3.0, 0.0, 1.0, 0.0, 0.0, 2.0, 0.0, 1.0, 2.0, 2.0, 2.0, 3.0, 2.0,
                                 0.0, 3.0, 2.0, 1.0, 2.0, 3.0, 2.0, 2.0, 0.0, 1.0, 0.0, 1.0, 2.0,
                                 3.0, 0.0, 0.0, 2.0, 2.0, 1.0, 3.0, 1.0, 1.0, 0.0, 1.0, 2.0, 1.0,
                                 1.0, 3.0
                             };

            var clf = new Svc<double>(kernel: Kernel.Linear);
            clf.Fit(DenseMatrix.OfMatrix(x), y);
            var spClf = new Svc<double>(kernel: Kernel.Linear);
            spClf.Fit(x, y);

            Assert.IsTrue(clf.SupportVectors.AlmostEquals(spClf.SupportVectors));
            Assert.IsTrue(clf.DualCoef.AlmostEquals(spClf.DualCoef));
        }
Ejemplo n.º 17
0
        public void TestSvcIris()
        {
            foreach (var k in new[] {Kernel.Linear, Kernel.Poly, Kernel.Rbf})
            {
                var spClf = new Svc<int>(kernel: k);
                spClf.Fit(SparseMatrix.OfMatrix(iris.Data), iris.Target);
                var clf = new Svc<int>(kernel: k);
                clf.Fit(DenseMatrix.OfMatrix(iris.Data), iris.Target);

                Assert.IsTrue(clf.SupportVectors.AlmostEquals(spClf.SupportVectors));
                Assert.IsTrue(clf.DualCoef.AlmostEquals(spClf.DualCoef));
                Assert.IsTrue(
                    clf.Predict(DenseMatrix.OfMatrix(iris.Data)).SequenceEqual(
                        spClf.Predict(SparseMatrix.OfMatrix(iris.Data))));

                if (k == Kernel.Linear)
                {
                    Assert.IsTrue(clf.Coef.AlmostEquals(spClf.Coef));
                }
            }
        }
Ejemplo n.º 18
0
 public void TestSvcWithCustomKernel()
 {
     var clfLin = new Svc<int>(kernel: Kernel.Linear);
     clfLin.Fit(SparseMatrix.OfMatrix(X), Y);
     var clfMylin =
         new Svc<int>(kernel: Kernel.FromFunction((x, y) => x*y.Transpose()));
     clfMylin.Fit(SparseMatrix.OfMatrix(X), Y);
     Assert.IsTrue(
         clfLin.Predict(SparseMatrix.OfMatrix(X)).SequenceEqual(clfMylin.Predict(SparseMatrix.OfMatrix(X))));
 }
Ejemplo n.º 19
0
 public void TestSvcBadKernel()
 {
     var svc = new Svc<int>(kernel: Kernel.FromFunction((x, y) => x));
     svc.Fit(X, Y);
 }
Ejemplo n.º 20
0
        public void TestSvcWithCallableKernel()
        {
            // create SVM with callable linear kernel, check that results are the same
            // as with built-in linear kernel
            var svmCallable = new Svc<int>(kernel: Kernel.FromFunction((x, y) => x*(y.Transpose())),
                                            probability: true);

            svmCallable.Fit(X, Y);
            var svmBuiltin = new Svc<int>(kernel: Kernel.Linear, probability: true);
            svmBuiltin.Fit(X, Y);

            Assert.IsTrue(svmCallable.DualCoef.AlmostEquals(svmBuiltin.DualCoef));
            Assert.IsTrue(svmCallable.Intercept.AlmostEquals(svmBuiltin.Intercept));
            Assert.IsTrue(svmCallable.Predict(X).SequenceEqual(svmBuiltin.Predict(X)));

            Assert.IsTrue(svmCallable.PredictProba(X).AlmostEquals(svmBuiltin.PredictProba(X), 1));
            Assert.IsTrue(svmCallable.DecisionFunction(X).AlmostEquals(svmBuiltin.DecisionFunction(X), 2));
        }
Ejemplo n.º 21
0
        public void test_weight()
        {
            var clf = new Svc<int>(classWeightEstimator: ClassWeightEstimator<int>.Explicit(new Dictionary<int, double> {{1, 0.1}}));

            // we give a small weights to class 1
            clf.Fit(X, Y);
            // so all predicted values belong to class 2
            Assert.IsTrue(clf.Predict(X).SequenceEqual(Enumerable.Repeat(2, 6)));
            /*
    X_, y_ = make_classification(n_samples=200, n_features=10,
                                 weights=[0.833, 0.167], random_state=2)

    for clf in (linear_model.LogisticRegression(),
                svm.LinearSVC(random_state=0), svm.SVC()):
        clf.set_params(class_weight={0: .1, 1: 10})
        clf.fit(X_[:100], y_[:100])
        y_pred = clf.predict(X_[100:])
        assert_true(f1_score(y_[100:], y_pred) > .3)
             * */
        }
Ejemplo n.º 22
0
        public void TestDecisionFunction()
        {
            // multi class:
            var clf = new Svc<int>(kernel: Kernel.Linear, c: 0.1);
            clf.Fit(iris.Data, iris.Target);

            var dec = (iris.Data*clf.Coef.Transpose()).AddRowVector(clf.Intercept);

            Assert.IsTrue(dec.AlmostEquals(clf.DecisionFunction(iris.Data)));

            // binary:
            clf.Fit(X, Y);
            dec = (X*clf.Coef.Transpose()).AddRowVector(clf.Intercept);
            int[] prediction = clf.Predict(X);
            Assert.IsTrue(dec.AlmostEquals(clf.DecisionFunction(X)));

            var b = clf.DecisionFunction(X).Column(0).Select(v => clf.Classes[v > 0 ? 1 : 0]);
            Assert.IsTrue(prediction.SequenceEqual(b));

            var expected = DenseMatrix.OfArray(new[,] {{-1.0}, {-0.66}, {-1.0}, {0.66}, {1.0}, {1.0}});
            Assert.IsTrue(clf.DecisionFunction(X).AlmostEquals(expected, 1E-2));
        }
Ejemplo n.º 23
0
 /// <summary>
 /// Make sure some tweaking of parameters works.
 /// 
 /// We change clf.dual_coef_ at run time and expect .predict() to change
 /// accordingly. Notice that this is not trivial since it involves a lot
 /// of C/Python copying in the libsvm bindings.
 ///
 /// The success of this test ensures that the mapping between libsvm and
 /// the python classifier is complete.
 /// </summary>
 public void TestTweakParams()
 {
     var clf = new Svc<int>(kernel: Kernel.Linear, c: 1.0);
     clf.Fit(X, Y);
     Assert.IsTrue(clf.DualCoef.AlmostEquals(DenseMatrix.OfArray(new[,]{{0.25, -0.25}})));
     Assert.IsTrue(clf.Predict(DenseMatrix.OfArray(new[,] {{-.1, -.1}})).SequenceEqual(new[] {1}));
     clf.DualCoef = DenseMatrix.OfArray(new[,] {{0.0, 1.0}});
     Assert.IsTrue(clf.Predict(DenseMatrix.OfArray(new[,] {{-.1, -.1}})).SequenceEqual(new[] {2}));
 }
Ejemplo n.º 24
0
        public void TestPrecomputed()
        {
            var clf = new Svc <int>(kernel: Kernel.Precomputed);
            // Gram matrix for train data (square matrix)
            // (we use just a linear kernel)
            var k = X * (X.Transpose());

            clf.Fit(k, Y);
            // Gram matrix for test data (rectangular matrix)
            var kt   = T * X.Transpose();
            var pred = clf.Predict(kt);

            try
            {
                clf.Predict(kt.Transpose());
                Assert.Fail();
            }
            catch (ArgumentException)
            {
            }

            Assert.IsTrue(clf.DualCoef.AlmostEquals(DenseMatrix.OfArray(new[, ] {
                { 0.25, -.25 }
            })));
            Assert.IsTrue(clf.Support.SequenceEqual(new[] { 1, 3 }));
            Assert.IsTrue(clf.Intercept.SequenceEqual(new[] { 0.0 }));
            Assert.IsTrue(pred.SequenceEqual(true_result));

            // Gram matrix for test data but compute KT[i,j]
            // for support vectors j only.
            kt = kt.CreateMatrix(kt.RowCount, kt.ColumnCount);
            for (int i = 0; i < T.RowCount; i++)
            {
                foreach (var j in clf.Support)
                {
                    kt[i, j] = T.Row(i) * X.Row(j);
                }
            }

            pred = clf.Predict(kt);
            Assert.IsTrue(pred.SequenceEqual(true_result));

            // same as before, but using a callable function instead of the kernel
            // matrix. kernel is just a linear kernel

            clf = new Svc <int>(kernel: Kernel.FromFunction((x, y) => x * y.Transpose()));
            clf.Fit(X, Y);
            pred = clf.Predict(T);

            Assert.IsTrue(clf.DualCoef.AlmostEquals(DenseMatrix.OfArray(new[, ] {
                { 0.25, -.25 }
            })));
            Assert.IsTrue(clf.Support.SequenceEqual(new[] { 1, 3 }));
            Assert.IsTrue(clf.Intercept.SequenceEqual(new[] { 0.0 }));
            Assert.IsTrue(pred.SequenceEqual(true_result));

            // test a precomputed kernel with the iris dataset
            // and check parameters against a linear SVC
            clf = new Svc <int>(kernel: Kernel.Precomputed);
            var clf2 = new Svc <int>(kernel: Kernel.Linear);

            k = iris.Data * iris.Data.Transpose();
            clf.Fit(k, iris.Target);
            clf2.Fit(iris.Data, iris.Target);
            pred = clf.Predict(k);
            Assert.IsTrue(clf.Support.SequenceEqual(clf2.Support));
            Assert.IsTrue(clf.DualCoef.AlmostEquals(clf2.DualCoef));
            Assert.IsTrue(clf.Intercept.AlmostEquals(clf2.Intercept));

            var matchingN = pred.Zip(iris.Target, Tuple.Create).Where(t => t.Item1 == t.Item2).Count();

            Assert.IsTrue(1.0 * matchingN / pred.Length > 0.99);

            // Gram matrix for test data but compute KT[i,j]
            // for support vectors j only.
            k = k.CreateMatrix(k.RowCount, k.ColumnCount);
            for (int i = 0; i < iris.Data.RowCount; i++)
            {
                foreach (var j in clf.Support)
                {
                    k[i, j] = iris.Data.Row(i) * iris.Data.Row(j);
                }
            }

            pred      = clf.Predict(k);
            matchingN = pred.Zip(iris.Target, Tuple.Create).Where(t => t.Item1 == t.Item2).Count();
            Assert.IsTrue(1.0 * matchingN / pred.Length > 0.99);

            clf = new Svc <int>(kernel: Kernel.FromFunction((x, y) => x * y.Transpose()));
            clf.Fit(iris.Data, iris.Target);
            matchingN = pred.Zip(iris.Target, Tuple.Create).Where(t => t.Item1 == t.Item2).Count();
            Assert.IsTrue(1.0 * matchingN / pred.Length > 0.99);
        }
Ejemplo n.º 25
0
        public void TestSvc()
        {
            var clf = new Svc<int>(kernel: Kernel.Linear, probability: true);
            clf.Fit(X, Y);

            var spClf = new Svc<int>(kernel: Kernel.Linear, probability: true);
            spClf.Fit(SparseMatrix.OfMatrix(X), Y);

            Assert.IsTrue(spClf.Predict(T).SequenceEqual(true_result));

            Assert.IsTrue(spClf.SupportVectors is SparseMatrix);
            Assert.IsTrue(clf.SupportVectors.AlmostEquals(spClf.SupportVectors));

            Assert.IsTrue(spClf.DualCoef is SparseMatrix);
            Assert.IsTrue(clf.DualCoef.AlmostEquals(spClf.DualCoef));

            Assert.IsTrue(spClf.Coef is SparseMatrix);
            Assert.IsTrue(clf.Coef.AlmostEquals(spClf.Coef));
            Assert.IsTrue(clf.Support.SequenceEqual(spClf.Support));
            Assert.IsTrue(clf.Predict(T).SequenceEqual(spClf.Predict(T)));

            // refit with a different dataset

            clf.Fit(X2, Y2);
            spClf.Fit(SparseMatrix.OfMatrix(X2), Y2);
            Assert.IsTrue(clf.SupportVectors.AlmostEquals(spClf.SupportVectors));
            Assert.IsTrue(clf.DualCoef.AlmostEquals(spClf.DualCoef));
            Assert.IsTrue(clf.Coef.AlmostEquals(spClf.Coef));
            Assert.IsTrue(clf.Support.SequenceEqual(spClf.Support));
            Assert.IsTrue(clf.Predict(T2).SequenceEqual(spClf.Predict(T2)));
            Assert.IsTrue(clf.PredictProba(T2).AlmostEquals(spClf.PredictProba(T2), 0.001));
        }
Ejemplo n.º 26
0
        public void TestSvcBadKernel()
        {
            var svc = new Svc <int>(kernel: Kernel.FromFunction((x, y) => x));

            svc.Fit(X, Y);
        }
Ejemplo n.º 27
0
 public void TestLibsvmIris()
 {
     // shuffle the dataset so that labels are not ordered
     foreach (var k in new[] {Kernel.Linear, Kernel.Rbf})
     {
         var clf = new Svc<int>(kernel: k);
         clf.Fit(iris.Data, iris.Target);
         var pred = clf.Predict(iris.Data);
         var matchingN = pred.Zip(iris.Target, Tuple.Create).Where(t => t.Item1 == t.Item2).Count();
         Assert.IsTrue(1.0*matchingN/pred.Length > 0.9);
         Assert.IsTrue(clf.Classes.SequenceEqual(clf.Classes.OrderBy(v => v)));
     }
 }
Ejemplo n.º 28
0
        public void TestPrecomputed()
        {
            var clf = new Svc<int>(kernel: Kernel.Precomputed);
            // Gram matrix for train data (square matrix)
            // (we use just a linear kernel)
            var k = X*(X.Transpose());
            clf.Fit(k, Y);
            // Gram matrix for test data (rectangular matrix)
            var kt = T*X.Transpose();
            var pred = clf.Predict(kt);
            try
            {
                clf.Predict(kt.Transpose());
                Assert.Fail();
            }
            catch (ArgumentException)
            {
            }

            Assert.IsTrue(clf.DualCoef.AlmostEquals(DenseMatrix.OfArray(new[,] {{0.25, -.25}})));
            Assert.IsTrue(clf.Support.SequenceEqual(new[] {1, 3}));
            Assert.IsTrue(clf.Intercept.SequenceEqual(new[] {0.0}));
            Assert.IsTrue(pred.SequenceEqual(true_result));

            // Gram matrix for test data but compute KT[i,j]
            // for support vectors j only.
            kt = kt.CreateMatrix(kt.RowCount, kt.ColumnCount);
            for (int i = 0; i < T.RowCount; i++)
            {
                foreach (var j in clf.Support)
                {
                    kt[i, j] = T.Row(i)*X.Row(j);
                }
            }

            pred = clf.Predict(kt);
            Assert.IsTrue(pred.SequenceEqual(true_result));

            // same as before, but using a callable function instead of the kernel
            // matrix. kernel is just a linear kernel

            clf = new Svc<int>(kernel: Kernel.FromFunction((x, y) => x*y.Transpose()));
            clf.Fit(X, Y);
            pred = clf.Predict(T);

            Assert.IsTrue(clf.DualCoef.AlmostEquals(DenseMatrix.OfArray(new[,] {{0.25, -.25}})));
            Assert.IsTrue(clf.Support.SequenceEqual(new[] {1, 3}));
            Assert.IsTrue(clf.Intercept.SequenceEqual(new[] {0.0}));
            Assert.IsTrue(pred.SequenceEqual(true_result));

            // test a precomputed kernel with the iris dataset
            // and check parameters against a linear SVC
            clf = new Svc<int>(kernel: Kernel.Precomputed);
            var clf2 = new Svc<int>(kernel: Kernel.Linear);
            k = iris.Data*iris.Data.Transpose();
            clf.Fit(k, iris.Target);
            clf2.Fit(iris.Data, iris.Target);
            pred = clf.Predict(k);
            Assert.IsTrue(clf.Support.SequenceEqual(clf2.Support));
            Assert.IsTrue(clf.DualCoef.AlmostEquals(clf2.DualCoef));
            Assert.IsTrue(clf.Intercept.AlmostEquals(clf2.Intercept));

            var matchingN = pred.Zip(iris.Target, Tuple.Create).Where(t => t.Item1 == t.Item2).Count();
            Assert.IsTrue(1.0*matchingN/pred.Length > 0.99);

            // Gram matrix for test data but compute KT[i,j]
            // for support vectors j only.
            k = k.CreateMatrix(k.RowCount, k.ColumnCount);
            for (int i = 0; i < iris.Data.RowCount; i++)
            {
                foreach (var j in clf.Support)
                    k[i, j] = iris.Data.Row(i)*iris.Data.Row(j);
            }

            pred = clf.Predict(k);
            matchingN = pred.Zip(iris.Target, Tuple.Create).Where(t => t.Item1 == t.Item2).Count();
            Assert.IsTrue(1.0*matchingN/pred.Length > 0.99);

            clf = new Svc<int>(kernel: Kernel.FromFunction((x, y) => x*y.Transpose()));
            clf.Fit(iris.Data, iris.Target);
            matchingN = pred.Zip(iris.Target, Tuple.Create).Where(t => t.Item1 == t.Item2).Count();
            Assert.IsTrue(1.0*matchingN/pred.Length > 0.99);
        }