public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width = 0.8;
        double tau = 1e-6;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        double[] trainlab = Load.load_labels("../data/label_train_twoclass.dat");

        RealFeatures feats_train = new RealFeatures(traindata_real);
        RealFeatures feats_test = new RealFeatures(testdata_real);
        GaussianKernel kernel= new GaussianKernel(feats_train, feats_train, width);

        Labels labels = new Labels(trainlab);

        KRR krr = new KRR(tau, kernel, labels);
        krr.train(feats_train);

        kernel.init(feats_train, feats_test);
        double[] out_labels = krr.apply().get_labels();

        foreach(double item in out_labels) {
            Console.Write(item);
        }

        modshogun.exit_shogun();
    }
	public static void Main() {
		modshogun.init_shogun_with_defaults();
		double width = 2.1;
		double epsilon = 1e-5;
		double C = 1.0;

		double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
		double[,] testdata_real = Load.load_numbers("../data/fm_test_real.dat");

		double[] trainlab = Load.load_labels("../data/label_train_multiclass.dat");

		RealFeatures feats_train = new RealFeatures();
		feats_train.set_feature_matrix(traindata_real);
		RealFeatures feats_test = new RealFeatures();
		feats_test.set_feature_matrix(testdata_real);

		GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);

		MulticlassLabels labels = new MulticlassLabels(trainlab);

		LaRank svm = new LaRank(C, kernel, labels);
		svm.set_batch_mode(false);
		svm.set_epsilon(epsilon);
		svm.train();
		double[] out_labels = LabelsFactory.to_multiclass(svm.apply(feats_train)).get_labels();

		foreach(double item in out_labels) {
			Console.Write(item);
		}

	}
Example #3
0
    static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();
        double width = 1.2;

        DoubleMatrix traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        DoubleMatrix testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        RealFeatures feats_train = new RealFeatures(traindata_real);
        RealFeatures feats_test = new RealFeatures(testdata_real);

        GaussianKernel kernel = new GaussianKernel(feats_train, feats_test, width);
        DoubleMatrix km_train = kernel.get_kernel_matrix();
        AsciiFile f =new AsciiFile("gaussian_train.ascii",'w');
        kernel.save(f);

        kernel.init(feats_train, feats_test);
        DoubleMatrix km_test = kernel.get_kernel_matrix();
        AsciiFile f_test =new AsciiFile("gaussian_train.ascii",'w');
        kernel.save(f_test);

        Console.WriteLine(km_train.ToString());
        Console.WriteLine(km_test.ToString());

        modshogun.exit_shogun();
    }
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width = 0.8;
        int C = 1;
        double epsilon = 1e-5;
        double tube_epsilon = 1e-2;
        int num_threads = 3;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        double[] trainlab = Load.load_labels("../data/label_train_twoclass.dat");

        RealFeatures feats_train = new RealFeatures(traindata_real);
        RealFeatures feats_test = new RealFeatures(testdata_real);
        GaussianKernel kernel= new GaussianKernel(feats_train, feats_train, width);

        RegressionLabels labels = new RegressionLabels(trainlab);

        SVRLight svr = new SVRLight(C, epsilon, kernel, labels);
        svr.set_tube_epsilon(tube_epsilon);
        //svr.parallel.set_num_threads(num_threads);
        svr.train();

        kernel.init(feats_train, feats_test);
        double[] out_labels = RegressionLabels.obtain_from_generic(svr.apply()).get_labels();

        foreach (double item in out_labels)
            Console.Write(item);

        modshogun.exit_shogun();
    }
Example #5
0
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width = 0.8;
        double tau   = 1e-6;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real  = Load.load_numbers("../data/fm_test_real.dat");

        double[] trainlab = Load.load_labels("../data/label_train_twoclass.dat");

        RealFeatures   feats_train = new RealFeatures(traindata_real);
        RealFeatures   feats_test  = new RealFeatures(testdata_real);
        GaussianKernel kernel      = new GaussianKernel(feats_train, feats_train, width);

        Labels labels = new Labels(trainlab);

        KRR krr = new KRR(tau, kernel, labels);

        krr.train(feats_train);

        kernel.init(feats_train, feats_test);
        double[] out_labels = krr.apply().get_labels();

        foreach (double item in out_labels)
        {
            Console.Write(item);
        }

        modshogun.exit_shogun();
    }
        public void SanFranciscoCrimeSVMClassificationDataSetTest()
        {
            DataSetLoader dataSetLoader = new DataSetLoader();

            Console.WriteLine(" Reading DataSet.. ");
            var           crimes        = dataSetLoader.SelectCrimes();
            Kernel        kernel        = new GaussianKernel(0.9);
            SVMClassifier svmClassifier =
                new SVMClassifier(crimes, kernel);

            svmClassifier.Train();
            var crimeTests  = dataSetLoader.SelectCrimes();
            var trueCounter = 0;
            var counter     = 0;

            foreach (var item in crimeTests)
            {
                var outputValue = svmClassifier.Classify(item.Item1);
                if (outputValue == item.Item2)
                {
                    trueCounter++;
                }
                Debug.WriteLine(string.Format("Value {0} - Predicted {1} = {2}",
                                              item.Item2, outputValue, (outputValue == item.Item2) ? "true" : "false"));
                counter++;
            }
            Debug.WriteLine(string.Format("Data {0} - True {1} Verhältnis: {2}",
                                          counter.ToString(), trueCounter.ToString(), (Convert.ToDouble(trueCounter) / Convert.ToDouble(counter)).ToString()));
        }
    static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();

        int num = 1000;
        double dist = 1.0;
        double width = 2.1;
        double C = 1.0;

        DoubleMatrix offs =ones(2, num).mmul(dist);
        DoubleMatrix x = randn(2, num).sub(offs);
        DoubleMatrix y = randn(2, num).add(offs);
        DoubleMatrix traindata_real = concatHorizontally(x, y);

        DoubleMatrix m = randn(2, num).sub(offs);
        DoubleMatrix n = randn(2, num).add(offs);
        DoubleMatrix testdata_real = concatHorizontally(m, n);

        DoubleMatrix o = ones(1,num);
        DoubleMatrix trainlab = concatHorizontally(o.neg(), o);
        DoubleMatrix testlab = concatHorizontally(o.neg(), o);

        RealFeatures feats_train = new RealFeatures(traindata_real);
        RealFeatures feats_test = new RealFeatures(testdata_real);
        GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);
        Labels labels = new Labels(trainlab);
        LibSVM svm = new LibSVM(C, kernel, labels);
        svm.train();

        DoubleMatrix @out = svm.apply(feats_test).get_labels();

        Console.WriteLine("Mean Error = " + signum(@out).ne(testlab).mean());
        modshogun.exit_shogun();
    }
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width = 1.3;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        RealFeatures feats_train = new RealFeatures(traindata_real);
        RealFeatures feats_test = new RealFeatures(testdata_real);

        GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);

        double[,] km_train = kernel.get_kernel_matrix();
        kernel.init(feats_train, feats_test);
        double[,] km_test = kernel.get_kernel_matrix();

        foreach(double item in km_train) {
            Console.Write(item);
        }

        foreach(double item in km_test) {
            Console.Write(item);
        }

        modshogun.exit_shogun();
    }
	public static void Main() {
		modshogun.init_shogun_with_defaults();
		double width = 2.1;
		double epsilon = 1e-5;
		double C = 1.0;

		double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
		double[,] testdata_real = Load.load_numbers("../data/fm_test_real.dat");

		RealFeatures feats_train = new RealFeatures();
		feats_train.set_feature_matrix(traindata_real);
		RealFeatures feats_test = new RealFeatures();
		feats_test.set_feature_matrix(testdata_real);

		GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);

		LibSVMOneClass svm = new LibSVMOneClass(C, kernel);
		svm.set_epsilon(epsilon);
		svm.train();

		kernel.init(feats_train, feats_test);
		double[] out_labels = svm.apply().get_labels();

		foreach (double item in out_labels)
		    Console.Write(item);

		modshogun.exit_shogun();
	}
Example #10
0
    static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();
        double width = 2.1;
        double epsilon = 1e-5;
        double C = 1.0;

        DoubleMatrix traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        DoubleMatrix testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        DoubleMatrix trainlab = Load.load_labels("../data/label_train_multiclass.dat");

        RealFeatures feats_train = new RealFeatures();
        feats_train.set_feature_matrix(traindata_real);
        RealFeatures feats_test = new RealFeatures();
        feats_test.set_feature_matrix(testdata_real);

        GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);

        Labels labels = new Labels(trainlab);

        LaRank svm = new LaRank(C, kernel, labels);
        svm.set_batch_mode(false);
        svm.set_epsilon(epsilon);
        svm.train();
        DoubleMatrix out_labels = svm.apply(feats_train).get_labels();
        Console.WriteLine(out_labels.ToString());

        modshogun.exit_shogun();
    }
        public void fit_gaussian_test()
        {
            #region doc_fit_gaussian
            // Suppose we have the following data, and we would
            // like to estimate a distribution from this data

            double[][] samples =
            {
                new double[] { 0, 1 },
                new double[] { 1, 2 },
                new double[] { 5, 1 },
                new double[] { 7, 1 },
                new double[] { 6, 1 },
                new double[] { 5, 7 },
                new double[] { 2, 1 },
            };

            // Start by specifying a density kernel
            IDensityKernel kernel = new GaussianKernel(dimension: 2);

            // The density kernel gives a window function centered in a particular sample.
            // By creating one of those windows for each sample, we can achieve an empirical
            // multivariate distribution function. An output example for a single Gaussian
            // kernel would be:
            double z = kernel.Function(new double[] { 0, 1 }); // should be 0.096532352630053914


            // Create a multivariate Empirical distribution from the samples
            var dist = new MultivariateEmpiricalDistribution(kernel, samples);

            // Common measures
            double[] mean   = dist.Mean;     // { 3.71, 2.00 }
            double[] median = dist.Median;   // { 3.71, 2.00 }
            double[] var    = dist.Variance; // { 7.23, 5.00 } (diagonal from cov)
            double[,] cov = dist.Covariance; // { { 7.23, 0.83 }, { 0.83, 5.00 } }

            // Probability mass functions
            double pdf1 = dist.ProbabilityDensityFunction(new double[] { 2, 1 });    // 0.017657515909330332
            double pdf2 = dist.ProbabilityDensityFunction(new double[] { 4, 2 });    // 0.011581172997320841
            double pdf3 = dist.ProbabilityDensityFunction(new double[] { 5, 7 });    // 0.0072297668067630525
            double lpdf = dist.LogProbabilityDensityFunction(new double[] { 5, 7 }); // -4.929548496891365
            #endregion

            Assert.AreEqual(0.096532352630053914, z);

            Assert.AreEqual(3.7142857142857144, mean[0]);
            Assert.AreEqual(2.0, mean[1]);
            Assert.AreEqual(3.7142857142857144, median[0]);
            Assert.AreEqual(2.0, median[1]);
            Assert.AreEqual(7.2380952380952381, var[0]);
            Assert.AreEqual(5.0, var[1]);
            Assert.AreEqual(7.2380952380952381, cov[0, 0]);
            Assert.AreEqual(0.83333333333333337, cov[0, 1]);
            Assert.AreEqual(0.83333333333333337, cov[1, 0]);
            Assert.AreEqual(5.0, cov[1, 1]);
            Assert.AreEqual(0.017657515909330332, pdf1);
            Assert.AreEqual(0.011581172997320841, pdf2);
            Assert.AreEqual(0.0072297668067630525, pdf3);
            Assert.AreEqual(-4.929548496891365, lpdf);
        }
	public static void Main() {

		modshogun.init_shogun_with_defaults();
		double width = 0.8;
		int C = 1;
		double epsilon = 1e-5;
		double tube_epsilon = 1e-2;

		double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
		double[,] testdata_real = Load.load_numbers("../data/fm_test_real.dat");

		double[] trainlab = Load.load_labels("../data/label_train_twoclass.dat");

		RealFeatures feats_train = new RealFeatures(traindata_real);
		RealFeatures feats_test = new RealFeatures(testdata_real);
		GaussianKernel kernel= new GaussianKernel(feats_train, feats_train, width);

		Labels labels = new Labels(trainlab);

		LibSVR svr = new LibSVR(C, epsilon, kernel, labels);
		svr.set_tube_epsilon(tube_epsilon);
		svr.train();

		kernel.init(feats_train, feats_test);
		double[] out_labels = svr.apply().get_labels();
		
		foreach (double item in out_labels)
		    Console.Write(out_labels);

		modshogun.exit_shogun();

	}
	public static void Main() {
		modshogun.init_shogun_with_defaults();

		int num = 1000;
		double dist = 1.0;
		double width = 2.1;
		double C = 1.0;

		Random RandomNumber = new Random();

		double[,] traindata_real = new double[2, num * 2];
		for (int i = 0; i < num; i ++) {
			traindata_real[0, i] = RandomNumber.NextDouble() - dist;
			traindata_real[0, i + num] = RandomNumber.NextDouble() + dist;
			traindata_real[1, i] = RandomNumber.NextDouble() - dist;
			traindata_real[1, i + num] = RandomNumber.NextDouble() + dist;
		}

		double[,] testdata_real = new double[2, num * 2];
		for (int i = 0; i < num; i ++) {
			testdata_real[0, i] = RandomNumber.NextDouble() - dist;
			testdata_real[0, i + num] = RandomNumber.NextDouble() + dist;
			testdata_real[1, i] = RandomNumber.NextDouble() - dist;
			testdata_real[1, i + num] = RandomNumber.NextDouble() + dist;
		}

		double[] trainlab = new double[num * 2];
		for (int i = 0; i < num; i ++) {
			trainlab[i] = -1;
			trainlab[i + num] = 1;
		}

		double[] testlab = new double[num * 2];
		for (int i = 0; i < num; i ++) {
			testlab[i] = -1;
			testlab[i + num] = 1;
		}

		RealFeatures feats_train = new RealFeatures(traindata_real);
		RealFeatures feats_test = new RealFeatures(testdata_real);
		GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);
		BinaryLabels labels = new BinaryLabels(trainlab);
		LibSVM svm = new LibSVM(C, kernel, labels);
		svm.train();

		double[] result = LabelsFactory.to_binary(svm.apply(feats_test)).get_labels();

		int err_num = 0;
		for (int i = 0; i < num; i++) {
			if (result[i] > 0) {
				err_num += 1;
			}
			if (result[i+num] < 0) {
				err_num += 1;
			}
		}

		double testerr=err_num/(2*num);
		Console.WriteLine(testerr);
	}
Example #14
0
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width        = 0.8;
        int    C            = 1;
        double epsilon      = 1e-5;
        double tube_epsilon = 1e-2;
        int    num_threads  = 3;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real  = Load.load_numbers("../data/fm_test_real.dat");

        double[] trainlab = Load.load_labels("../data/label_train_twoclass.dat");

        RealFeatures   feats_train = new RealFeatures(traindata_real);
        RealFeatures   feats_test  = new RealFeatures(testdata_real);
        GaussianKernel kernel      = new GaussianKernel(feats_train, feats_train, width);

        RegressionLabels labels = new RegressionLabels(trainlab);

        SVRLight svr = new SVRLight(C, epsilon, kernel, labels);

        svr.set_tube_epsilon(tube_epsilon);
        //svr.parallel.set_num_threads(num_threads);
        svr.train();

        kernel.init(feats_train, feats_test);
        double[] out_labels = LabelsFactory.to_regression(svr.apply()).get_labels();

        foreach (double item in out_labels)
        {
            Console.Write(item);
        }
    }
	public static void Main() {
		modshogun.init_shogun_with_defaults();
		double width = 2.1;
		double epsilon = 1e-5;
		double C = 1.0;

		double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
		double[,] testdata_real = Load.load_numbers("../data/fm_test_real.dat");

		//  already tried double[,]
		double[] trainlab = Load.load_labels("../data/label_train_twoclass.dat");

		RealFeatures feats_train = new RealFeatures();
		feats_train.set_feature_matrix(traindata_real);
		RealFeatures feats_test = new RealFeatures();
		feats_test.set_feature_matrix(testdata_real);

		GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);

		BinaryLabels labels = new BinaryLabels(trainlab);

		MPDSVM svm = new MPDSVM(C, kernel, labels);
		svm.set_epsilon(epsilon);
		svm.train();

		kernel.init(feats_train, feats_test);
		//  already tried double[,]
		double[] out_labels = LabelsFactory.to_binary(svm.apply()).get_labels();

		foreach (double item in out_labels)
		      Console.Write(item);

	}
    static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();
        double width = 2.1;
        double epsilon = 1e-5;
        double C = 1.0;

        DoubleMatrix traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        DoubleMatrix testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        RealFeatures feats_train = new RealFeatures();
        feats_train.set_feature_matrix(traindata_real);
        RealFeatures feats_test = new RealFeatures();
        feats_test.set_feature_matrix(testdata_real);

        GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);

        LibSVMOneClass svm = new LibSVMOneClass(C, kernel);
        svm.set_epsilon(epsilon);
        svm.train();

        kernel.init(feats_train, feats_test);
        DoubleMatrix out_labels = svm.apply().get_labels();
        Console.WriteLine(out_labels.ToString());

        modshogun.exit_shogun();
    }
Example #17
0
    public static void Main(string[] args)
    {
        Library.init_shogun();
        GaussianKernel k = new GaussianKernel();

        Console.WriteLine(k.get_width());
    }
    static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();

        int num = 1000;
        double dist = 1.0;
        double width = 2.1;
        double C = 1.0;

        DoubleMatrix offs =ones(2, num).mmul(dist);
        DoubleMatrix x = randn(2, num).sub(offs);
        DoubleMatrix y = randn(2, num).add(offs);
        DoubleMatrix traindata_real = concatHorizontally(x, y);

        DoubleMatrix o = ones(1,num);
        DoubleMatrix trainlab = concatHorizontally(o.neg(), o);
        DoubleMatrix testlab = concatHorizontally(o.neg(), o);

        RealFeatures feats = new RealFeatures(traindata_real);
        GaussianKernel kernel = new GaussianKernel(feats, feats, width);
        Labels labels = new Labels(trainlab);
        GMNPSVM svm = new GMNPSVM(C, kernel, labels);
        feats.add_preprocessor(new NormOne());
        feats.add_preprocessor(new LogPlusOne());
        feats.set_preprocessed(1);
        svm.train(feats);

        SerializableAsciiFile fstream = new SerializableAsciiFile("blaah.asc", 'w');
        //svm.save_serializable(fstream);

        modshogun.exit_shogun();
    }
Example #19
0
        static int[] Cluster(IEnumerable <int> integers, int bandwidth)
        {
            #if DEBUG
            var stopwatch = new Stopwatch();
            stopwatch.Start();
            #endif

            var kernel    = new GaussianKernel(1);
            var meanshift = new MeanShift(1, kernel, bandwidth);
            meanshift.UseParallelProcessing = false;

            var points = integers.Select(i => new[] { Convert.ToDouble(i) }).ToArray();

            try
            {
                var labels = meanshift.Compute(points);
            }
            catch (Exception exception)
            {
                throw;
            }

            #if DEBUG
            stopwatch.Stop();
            Console.WriteLine($"Performed meanshift on {points.Length} points in {stopwatch.ElapsedMilliseconds}ms");
            #endif

            return(meanshift.Clusters.Modes.Select(m => Convert.ToInt32(m[0])).ToArray());
        }
Example #20
0
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width = 1.6;

        double[,] train_real = Load.load_numbers("../data/fm_train_real.dat");
        double[] trainlab = Load.load_labels("../data/label_train_twoclass.dat");

        RealFeatures   feats_train = new RealFeatures(train_real);
        GaussianKernel subkernel   = new GaussianKernel(feats_train, feats_train, width);

        BinaryLabels labels = new BinaryLabels(trainlab);

        AUCKernel kernel = new AUCKernel(0, subkernel);

        kernel.setup_auc_maximization(labels);

        double[,] km_train = kernel.get_kernel_matrix();

        int numRows = km_train.GetLength(0);
        int numCols = km_train.GetLength(1);

        Console.Write("km_train:\n");

        for (int i = 0; i < numRows; i++)
        {
            for (int j = 0; j < numCols; j++)
            {
                Console.Write(km_train[i, j] + " ");
            }
            Console.Write("\n");
        }
    }
Example #21
0
    public static void Main(string[] args)
    {
        modshogun.init_shogun_with_defaults();
        GaussianKernel k = new GaussianKernel();

        Console.WriteLine(k.get_width());
    }
Example #22
0
	public static void Main() {
		modshogun.init_shogun_with_defaults();
		double width = 1.6;

		double[,] train_real = Load.load_numbers("../data/fm_train_real.dat");
		double[] trainlab = Load.load_labels("../data/label_train_twoclass.dat");

		RealFeatures feats_train = new RealFeatures(train_real);
		GaussianKernel subkernel = new GaussianKernel(feats_train, feats_train, width);

		Labels labels = new Labels(trainlab);

		AUCKernel kernel = new AUCKernel(0, subkernel);
		kernel.setup_auc_maximization(labels);

		double[,] km_train = kernel.get_kernel_matrix();
		
		int numRows = km_train.GetLength(0);
		int numCols = km_train.GetLength(1);
		
		Console.Write("km_train:\n");
		
		for(int i = 0; i < numRows; i++){
			for(int j = 0; j < numCols; j++){
				Console.Write(km_train[i,j] +" ");
			}
			Console.Write("\n");
		}
		
		modshogun.exit_shogun();
	}
Example #23
0
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width   = 2.1;
        double epsilon = 1e-5;
        double C       = 1.0;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real  = Load.load_numbers("../data/fm_test_real.dat");

        RealFeatures feats_train = new RealFeatures();

        feats_train.set_feature_matrix(traindata_real);
        RealFeatures feats_test = new RealFeatures();

        feats_test.set_feature_matrix(testdata_real);

        GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);

        LibSVMOneClass svm = new LibSVMOneClass(C, kernel);

        svm.set_epsilon(epsilon);
        svm.train();

        kernel.init(feats_train, feats_test);
        double[] out_labels = BinaryLabels.obtain_from_generic(svm.apply()).get_labels();

        foreach (double item in out_labels)
        {
            Console.Write(item);
        }

        modshogun.exit_shogun();
    }
        private void DualPerceptron(List <Tuple <double[], double> > data)
        {
            Kernel kernel = new LinearKernel();

            foreach (var item in netMLObject.Options)
            {
                if (item == "linearkernel")
                {
                    kernel = new LinearKernel();
                }
                else if (item == "gaussiankernel")
                {
                    kernel = new GaussianKernel(1.0);
                }
                else if (item == "polynomialkernel")
                {
                    kernel = new PolynomialKernel(1);
                }
                else if (item == "logitkernel")
                {
                    kernel = new LogitKernel();
                }
                else if (item == "tanhkernel")
                {
                    kernel = new TanhKernel();
                }
            }
            classification = new DualPerceptronClassifier(data, kernel);
        }
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width = 1.3;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real  = Load.load_numbers("../data/fm_test_real.dat");

        RealFeatures feats_train = new RealFeatures(traindata_real);
        RealFeatures feats_test  = new RealFeatures(testdata_real);

        GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);

        double[,] km_train = kernel.get_kernel_matrix();
        kernel.init(feats_train, feats_test);
        double[,] km_test = kernel.get_kernel_matrix();

        foreach (double item in km_train)
        {
            Console.Write(item);
        }

        foreach (double item in km_test)
        {
            Console.Write(item);
        }

        modshogun.exit_shogun();
    }
Example #26
0
    static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();
        double width = 0.8;
        double tau = 1e-6;

        DoubleMatrix traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        DoubleMatrix testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        DoubleMatrix trainlab = Load.load_labels("../data/label_train_twoclass.dat");

        RealFeatures feats_train = new RealFeatures(traindata_real);
        RealFeatures feats_test = new RealFeatures(testdata_real);
        GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);

        Labels labels = new Labels(trainlab);

        KRR krr = new KRR(tau, kernel, labels);
        krr.train(feats_train);

        kernel.init(feats_train, feats_test);
        DoubleMatrix out_labels = krr.apply().get_labels();
        Console.WriteLine(out_labels.ToString());

        modshogun.exit_shogun();
    }
Example #27
0
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width        = 0.8;
        int    C            = 1;
        double epsilon      = 1e-5;
        double tube_epsilon = 1e-2;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real  = Load.load_numbers("../data/fm_test_real.dat");

        double[] trainlab = Load.load_labels("../data/label_train_twoclass.dat");

        RealFeatures   feats_train = new RealFeatures(traindata_real);
        RealFeatures   feats_test  = new RealFeatures(testdata_real);
        GaussianKernel kernel      = new GaussianKernel(feats_train, feats_train, width);

        RegressionLabels labels = new RegressionLabels(trainlab);

        LibSVR svr = new LibSVR(C, epsilon, kernel, labels);

        svr.set_tube_epsilon(tube_epsilon);
        svr.train();

        kernel.init(feats_train, feats_test);
        double[] out_labels = RegressionLabels.obtain_from_generic(svr.apply()).get_labels();

        foreach (double item in out_labels)
        {
            Console.Write(out_labels);
        }

        modshogun.exit_shogun();
    }
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width = 2.1;
        double epsilon = 1e-5;
        double C = 1.0;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        double[] trainlab = Load.load_labels("../data/label_train_multiclass.dat");

        RealFeatures feats_train = new RealFeatures();
        feats_train.set_feature_matrix(traindata_real);
        RealFeatures feats_test = new RealFeatures();
        feats_test.set_feature_matrix(testdata_real);

        GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);

        MulticlassLabels labels = new MulticlassLabels(trainlab);

        MulticlassLibSVM svm = new MulticlassLibSVM(C, kernel, labels);
        svm.set_epsilon(epsilon);
        svm.train();

        kernel.init(feats_train, feats_test);
        double[] out_labels = MulticlassLabels.obtain_from_generic(svm.apply()).get_labels();

        foreach (double item in out_labels)
            Console.Write(item);

        modshogun.exit_shogun();
    }
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width   = 2.1;
        double epsilon = 1e-5;
        double C       = 1.0;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real  = Load.load_numbers("../data/fm_test_real.dat");

        double[] trainlab = Load.load_labels("../data/label_train_twoclass.dat");

        RealFeatures feats_train = new RealFeatures();

        feats_train.set_feature_matrix(traindata_real);
        RealFeatures feats_test = new RealFeatures();

        feats_test.set_feature_matrix(testdata_real);

        GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);

        BinaryLabels labels = new BinaryLabels(trainlab);

        GPBTSVM svm = new GPBTSVM(C, kernel, labels);

        svm.set_epsilon(epsilon);
        svm.train();
        kernel.init(feats_train, feats_test);
        double[] out_labels = LabelsFactory.to_binary(svm.apply()).get_labels();

        foreach (double item in out_labels)
        {
            Console.Write(item);
        }
    }
Example #30
0
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width    = 2.1;
        double epsilon  = 1e-5;
        double C        = 1.0;
        int    mkl_norm = 2;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real  = Load.load_numbers("../data/fm_test_real.dat");

        double[] trainlab = Load.load_labels("../data/label_train_multiclass.dat");

        CombinedKernel   kernel      = new CombinedKernel();
        CombinedFeatures feats_train = new CombinedFeatures();
        CombinedFeatures feats_test  = new CombinedFeatures();

        RealFeatures subkfeats1_train = new RealFeatures(traindata_real);
        RealFeatures subkfeats1_test  = new RealFeatures(testdata_real);

        GaussianKernel subkernel = new GaussianKernel(10, width);

        feats_train.append_feature_obj(subkfeats1_train);
        feats_test.append_feature_obj(subkfeats1_test);
        kernel.append_kernel(subkernel);

        RealFeatures subkfeats2_train = new RealFeatures(traindata_real);
        RealFeatures subkfeats2_test  = new RealFeatures(testdata_real);

        LinearKernel subkernel2 = new LinearKernel();

        feats_train.append_feature_obj(subkfeats2_train);
        feats_test.append_feature_obj(subkfeats2_test);
        kernel.append_kernel(subkernel2);

        RealFeatures subkfeats3_train = new RealFeatures(traindata_real);
        RealFeatures subkfeats3_test  = new RealFeatures(testdata_real);

        PolyKernel subkernel3 = new PolyKernel(10, 2);

        feats_train.append_feature_obj(subkfeats3_train);
        feats_test.append_feature_obj(subkfeats3_test);
        kernel.append_kernel(subkernel3);

        kernel.init(feats_train, feats_train);

        MulticlassLabels labels = new MulticlassLabels(trainlab);

        MKLMulticlass mkl = new MKLMulticlass(C, kernel, labels);

        mkl.set_epsilon(epsilon);
        mkl.set_mkl_epsilon(epsilon);
        mkl.set_mkl_norm(mkl_norm);

        mkl.train();

        kernel.init(feats_train, feats_test);
        double[] outMatrix = LabelsFactory.to_multiclass(mkl.apply()).get_labels();
    }
Example #31
0
        private void doBayesianParzenAlgorithmTest()
        {
            var metric = new EuclideanMetric();
            var kernel = new GaussianKernel();
            var alg    = new BayesianParzenAlgorithm(metric, kernel, 1.0F);

            alg.Train(Data.TrainingSample);

            // LOO
            var hmin = 0.01D;
            var hmax = 5.0D;
            var step = 0.05D;

            StatUtils.OptimizeLOO(alg, hmin, hmax, step);
            var optH = alg.H;

            Console.WriteLine("Bayesian: optimal h is {0}", optH);
            Console.WriteLine();

            // Margins
            Console.WriteLine("Margins:");
            calculateMargin(alg);
            Console.WriteLine();

            //Error distribution
            var message = string.Empty;

            Console.WriteLine("Errors:");
            for (double h1 = hmin; h1 <= hmax; h1 = Math.Round(h1 + step, 8))
            {
                var h = h1;
                if (h <= optH && h + step > optH)
                {
                    h = optH;
                }

                alg.H = h;
                var errors = alg.GetErrors(Data.Data, 0, true);
                var ec     = errors.Count();
                var dc     = Data.Data.Count;
                var pct    = Math.Round(100.0F * ec / dc, 2);
                var mes    = string.Format("{0}:\t{1} of {2}\t({3}%) {4}", Math.Round(h, 2), ec, dc, pct, h == optH ? "<-LOO optimal" : string.Empty);
                Console.WriteLine(mes);

                if (h == optH)
                {
                    message = mes;
                }
            }
            Console.WriteLine();
            Console.WriteLine("-----------------------------------------");
            Console.WriteLine("Bayesian: optimal h is {0}", optH);
            Console.WriteLine(message);

            alg.H = optH;
            Visualizer.Run(alg);
        }
Example #32
0
        public void SetUp()
        {
            var kernel = new GaussianKernel(1, 1);

            _gp = new GaussianProcess(kernel);
            _gp.AddDataPoint(new DataPoint(1.02, 0.79));
            _gp.AddDataPoint(new DataPoint(1.99, 0.94));
            _gp.AddDataPoint(new DataPoint(4.04, 0.65));
        }
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width = 2.1;
        double epsilon = 1e-5;
        double C = 1.0;
        int mkl_norm = 2;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        double[] trainlab = Load.load_labels("../data/label_train_multiclass.dat");

        CombinedKernel kernel = new CombinedKernel();
        CombinedFeatures feats_train = new CombinedFeatures();
        CombinedFeatures feats_test = new CombinedFeatures();

        RealFeatures subkfeats1_train = new RealFeatures(traindata_real);
        RealFeatures subkfeats1_test = new RealFeatures(testdata_real);

        GaussianKernel subkernel = new GaussianKernel(10, width);
        feats_train.append_feature_obj(subkfeats1_train);
        feats_test.append_feature_obj(subkfeats1_test);
        kernel.append_kernel(subkernel);

        RealFeatures subkfeats2_train = new RealFeatures(traindata_real);
        RealFeatures subkfeats2_test = new RealFeatures(testdata_real);

        LinearKernel subkernel2 = new LinearKernel();
        feats_train.append_feature_obj(subkfeats2_train);
        feats_test.append_feature_obj(subkfeats2_test);
        kernel.append_kernel(subkernel2);

        RealFeatures subkfeats3_train = new RealFeatures(traindata_real);
        RealFeatures subkfeats3_test = new RealFeatures(testdata_real);

        PolyKernel subkernel3 = new PolyKernel(10, 2);
        feats_train.append_feature_obj(subkfeats3_train);
        feats_test.append_feature_obj(subkfeats3_test);
        kernel.append_kernel(subkernel3);

        kernel.init(feats_train, feats_train);

        MulticlassLabels labels = new MulticlassLabels(trainlab);

        MKLMulticlass mkl = new MKLMulticlass(C, kernel, labels);
        mkl.set_epsilon(epsilon);
        mkl.set_mkl_epsilon(epsilon);
        mkl.set_mkl_norm(mkl_norm);

        mkl.train();

        kernel.init(feats_train, feats_test);
        double[] outMatrix =  MulticlassLabels.obtain_from_generic(mkl.apply()).get_labels();

        modshogun.exit_shogun();
    }
Example #34
0
        public void MeanShiftConstructorTest()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Test Samples
            double[][] samples =
            {
                new double[] { 0, 1 },
                new double[] { 1, 2 },
                new double[] { 1, 1 },
                new double[] { 0, 7 },
                new double[] { 1, 1 },
                new double[] { 6, 2 },
                new double[] { 6, 5 },
                new double[] { 5, 1 },
                new double[] { 7, 1 },
                new double[] { 5, 1 }
            };


            var       kernel    = new GaussianKernel(dimension: 2);
            MeanShift meanShift = new MeanShift(2, kernel, 3);

            // Compute the model (estimate)
            int[] labels = meanShift.Compute(samples);

            int a = 0;
            int b = 1;

            if (0.2358896594197982.IsRelativelyEqual(meanShift.Clusters.Modes[1][0], 1e-10))
            {
                a = 1;
                b = 0;
            }

            for (int i = 0; i < 5; i++)
            {
                Assert.AreEqual(a, labels[i]);
            }

            for (int i = 5; i < samples.Length; i++)
            {
                Assert.AreEqual(b, labels[i]);
            }

            Assert.AreEqual(0.2358896594197982, meanShift.Clusters.Modes[a][0], 1e-10);
            Assert.AreEqual(1.0010865560750339, meanShift.Clusters.Modes[a][1], 1e-10);

            Assert.AreEqual(6.7284908155626031, meanShift.Clusters.Modes[b][0], 1e-10);
            Assert.AreEqual(1.2713970467590967, meanShift.Clusters.Modes[b][1], 1e-10);

            Assert.AreEqual(2, meanShift.Clusters.Count);
            Assert.AreEqual(2, meanShift.Clusters.Modes.Length);
        }
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width = 1.2;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real  = Load.load_numbers("../data/fm_test_real.dat");

        RealFeatures feats_train = new RealFeatures(traindata_real);
        RealFeatures feats_test  = new RealFeatures(testdata_real);

        GaussianKernel kernel = new GaussianKernel(feats_train, feats_test, width);

        double[,] km_train = kernel.get_kernel_matrix();
        AsciiFile f = new AsciiFile("gaussian_train.ascii", 'w');

        kernel.save(f);

        kernel.init(feats_train, feats_test);
        double[,] km_test = kernel.get_kernel_matrix();
        AsciiFile f_test = new AsciiFile("gaussian_train.ascii", 'w');

        kernel.save(f_test);

        //  Parse and Display km_train
        Console.Write("km_train:\n");
        int numRows = km_train.GetLength(0);
        int numCols = km_train.GetLength(1);

        for (int i = 0; i < numRows; i++)
        {
            for (int j = 0; j < numCols; j++)
            {
                Console.Write(km_train[i, j] + " ");
            }
            Console.Write("\n");
        }

        //  Parse and Display km_test
        Console.Write("\nkm_test:\n");
        numRows = km_test.GetLength(0);
        numCols = km_test.GetLength(1);

        for (int i = 0; i < numRows; i++)
        {
            for (int j = 0; j < numCols; j++)
            {
                Console.Write(km_test[i, j] + " ");
            }
            Console.Write("\n");
        }

        modshogun.exit_shogun();
    }
Example #36
0
 void Start()
 {
     _outlineMaterial           = new Material(Outline);
     TempCam                    = new GameObject().AddComponent <Camera>();
     TempCam.name               = "Outliner Camera";
     TempCam.transform.position = gameObject.transform.position;                             //Setting outliner camera in same coordinates as the main camera before setting it...
     TempCam.transform.rotation = gameObject.transform.rotation;                             //.. as parent so when unity asigns it as parent it aligns where we want it
     TempCam.transform.SetParent(gameObject.transform);                                      //We set it as child so it follows the main camera and takes advantage of the already implemented code
     TempCam.depth = 2;                                                                      //The highest render order is the least number(e.g. maincamera is 0 depth)
     kernel        = GaussianKernel.Calculate(5, 21);
 }
Example #37
0
        public void MeanShiftConstructorTest()
        {
            Accord.Math.Random.Generator.Seed = 0;

            // Test Samples
            double[][] samples =
            {
                new double[] { 0, 1 },
                new double[] { 1, 2 },
                new double[] { 1, 1 },
                new double[] { 0, 7 },
                new double[] { 1, 1 },
                new double[] { 6, 2 },
                new double[] { 6, 5 },
                new double[] { 5, 1 },
                new double[] { 7, 1 },
                new double[] { 5, 1 }
            };


            var       kernel    = new GaussianKernel(dimension: 2);
            MeanShift meanShift = new MeanShift(2, kernel, 2.0);

            meanShift.UseParallelProcessing = false;

            // Compute the model (estimate)
            int[] labels = meanShift.Compute(samples);

            int a = labels[0];
            int b = (a == 0) ? 1 : 0;

            for (int i = 0; i < 5; i++)
            {
                Assert.AreEqual(a, labels[i]);
            }

            for (int i = 5; i < samples.Length; i++)
            {
                Assert.AreEqual(b, labels[i]);
            }

            Assert.AreEqual(1.1922811512028066, meanShift.Clusters.Modes[a][0], 1e-3);
            Assert.AreEqual(1.2567196159235963, meanShift.Clusters.Modes[a][1], 1e-3);

            Assert.AreEqual(5.2696337859175868, meanShift.Clusters.Modes[b][0], 1e-3);
            Assert.AreEqual(1.4380326532534968, meanShift.Clusters.Modes[b][1], 1e-3);

            Assert.AreEqual(2, meanShift.Clusters.Count);
            Assert.AreEqual(2, meanShift.Clusters.Modes.Length);

            Assert.AreEqual(0.5, meanShift.Clusters.Proportions[0]);
            Assert.AreEqual(0.5, meanShift.Clusters.Proportions[1]);
        }
Example #38
0
        private void doParzenFixedAlgorithmTest()
        {
            var timer = new System.Diagnostics.Stopwatch();

            timer.Start();

            var metric = new EuclideanMetric();
            var kernel = new GaussianKernel();
            var alg    = new ParzenFixedAlgorithm(metric, kernel, 1.0F);

            alg.Train(Data.TrainingSample);

            // LOO
            StatUtils.OptimizeLOO(alg, 0.1F, 20.0F, 0.2F);
            var optH = alg.H;

            Console.WriteLine("Parzen Fixed: optimal h is {0}", optH);
            Console.WriteLine();

            // Margins
            Console.WriteLine("Margins:");
            calculateMargin(alg);
            Console.WriteLine();

            //var x = algorithm.Classify(new Point(new double[] { -3, 0 }));

            //Error distribution
            Console.WriteLine("Errors:");
            var step = 0.1F;

            for (double h1 = step; h1 < 5; h1 += step)
            {
                var h = h1;
                if (h <= optH && h + step > optH)
                {
                    h = optH;
                }

                alg.H = h;
                var errors = alg.GetErrors(Data.Data, 0, true);
                var ec     = errors.Count();
                var dc     = Data.Data.Count;
                var pct    = Math.Round(100.0F * ec / dc, 2);
                Console.WriteLine("{0}:\t{1} of {2}\t({3}%) {4}", Math.Round(h, 2), ec, dc, pct, h == optH ? "<-LOO optimal" : string.Empty);
            }
            Console.WriteLine();

            Visualizer.Run(alg);

            timer.Stop();
            Console.WriteLine(timer.ElapsedMilliseconds / 1000.0F);
        }
        private void SupportVectorMachine(List <Tuple <double[], double> > data)
        {
            Kernel kernel   = new LinearKernel();
            double n        = 0.0;
            double C        = 0.0;
            bool   nAndCSet = false;

            foreach (var item in netMLObject.Options)
            {
                if (item == "linearkernel")
                {
                    kernel = new LinearKernel();
                }
                else if (item == "gaussiankernel")
                {
                    kernel = new GaussianKernel(1.0);
                }
                else if (item == "polynomialkernel")
                {
                    kernel = new PolynomialKernel(1);
                }
                else if (item == "logitkernel")
                {
                    kernel = new LogitKernel();
                }
                else if (item == "tanhkernel")
                {
                    kernel = new TanhKernel();
                }
            }
            foreach (var value in netMLObject.DoubleValues)
            {
                if (value.Key == "n")
                {
                    n        = value.Value;
                    nAndCSet = true;
                }
                else if (value.Key == "c")
                {
                    C        = value.Value;
                    nAndCSet = true;
                }
            }
            if (nAndCSet)
            {
                classification = new SVMClassifier(data, kernel, n, C);
            }
            else
            {
                classification = new SVMClassifier(data, kernel);
            }
        }
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        int cardinality = 2;
        int cache       = 10;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real  = Load.load_numbers("../data/fm_test_real.dat");
        String[] fm_train_dna = Load.load_dna("../data/fm_train_dna.dat");
        String[] fm_test_dna  = Load.load_dna("../data/fm_test_dna.dat");

        RealFeatures subfeats_train = new RealFeatures(traindata_real);
        RealFeatures subfeats_test  = new RealFeatures(testdata_real);

        CombinedKernel   kernel      = new CombinedKernel();
        CombinedFeatures feats_train = new CombinedFeatures();
        CombinedFeatures feats_test  = new CombinedFeatures();

        GaussianKernel subkernel = new GaussianKernel(cache, 1.1);

        feats_train.append_feature_obj(subfeats_train);
        feats_test.append_feature_obj(subfeats_test);
        kernel.append_kernel(subkernel);

        StringCharFeatures subkfeats_train = new StringCharFeatures(fm_train_dna, EAlphabet.DNA);
        StringCharFeatures subkfeats_test  = new StringCharFeatures(fm_test_dna, EAlphabet.DNA);

        int degree = 3;

        FixedDegreeStringKernel subkernel2 = new FixedDegreeStringKernel(10, degree);

        feats_train.append_feature_obj(subkfeats_train);
        feats_test.append_feature_obj(subkfeats_test);
        kernel.append_kernel(subkernel2);

        subkfeats_train = new StringCharFeatures(fm_train_dna, EAlphabet.DNA);
        subkfeats_test  = new StringCharFeatures(fm_test_dna, EAlphabet.DNA);
        LocalAlignmentStringKernel subkernel3 = new LocalAlignmentStringKernel(10);

        feats_train.append_feature_obj(subkfeats_train);
        feats_test.append_feature_obj(subkfeats_test);
        kernel.append_kernel(subkernel3);

        kernel.init(feats_train, feats_train);
        double[,] km_train = kernel.get_kernel_matrix();

        kernel.init(feats_train, feats_test);
        double[,] km_test = kernel.get_kernel_matrix();

        modshogun.exit_shogun();
    }
Example #41
0
    public virtual object run(IList para)
    {
        modshogun.init_shogun_with_defaults();
        int cardinality = (int)((int?)para[0]);
        int size_cache = (int)((int?)para[1]);

        DoubleMatrix traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        DoubleMatrix testdata_real = Load.load_numbers("../data/fm_test_real.dat");
        string[] fm_train_dna = Load.load_dna("../data/fm_train_dna.dat");
        string[] fm_test_dna = Load.load_dna("../data/fm_test_dna.dat");

        RealFeatures subfeats_train = new RealFeatures(traindata_real);
        RealFeatures subfeats_test = new RealFeatures(testdata_real);

        CombinedKernel kernel = new CombinedKernel();
        CombinedFeatures feats_train = new CombinedFeatures();
        CombinedFeatures feats_test = new CombinedFeatures();

        GaussianKernel subkernel = new GaussianKernel(10, 1.1);
        feats_train.append_feature_obj(subfeats_train);
        feats_test.append_feature_obj(subfeats_test);
        kernel.append_kernel(subkernel);

        StringCharFeatures subkfeats_train = new StringCharFeatures(fm_train_dna, DNA);
        StringCharFeatures subkfeats_test = new StringCharFeatures(fm_test_dna, DNA);
        int degree = 3;
        FixedDegreeStringKernel subkernel2 = new FixedDegreeStringKernel(10, degree);
        feats_train.append_feature_obj(subkfeats_train);
        feats_test.append_feature_obj(subkfeats_test);
        kernel.append_kernel(subkernel2);

        subkfeats_train = new StringCharFeatures(fm_train_dna, DNA);
        subkfeats_test = new StringCharFeatures(fm_test_dna, DNA);
        LocalAlignmentStringKernel subkernel3 = new LocalAlignmentStringKernel(10);
        feats_train.append_feature_obj(subkfeats_train);
        feats_test.append_feature_obj(subkfeats_test);
        kernel.append_kernel(subkernel3);

        kernel.init(feats_train, feats_train);
        DoubleMatrix km_train =kernel.get_kernel_matrix();
        kernel.init(feats_train, feats_test);
        DoubleMatrix km_test =kernel.get_kernel_matrix();

        ArrayList result = new ArrayList();
        result.Add(km_train);
        result.Add(km_test);
        result.Add(kernel);

        modshogun.exit_shogun();
        return (object)result;
    }
Example #42
0
    static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();
        double width = 2.1;
        double epsilon = 1e-5;
        double C = 1.0;
        int mkl_norm = 2;

        DoubleMatrix traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        DoubleMatrix testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        DoubleMatrix trainlab = Load.load_labels("../data/label_train_twoclass.dat");

        CombinedKernel kernel = new CombinedKernel();
        CombinedFeatures feats_train = new CombinedFeatures();
        CombinedFeatures feats_test = new CombinedFeatures();

        RealFeatures subkfeats_train = new RealFeatures(traindata_real);
        RealFeatures subkfeats_test = new RealFeatures(testdata_real);

        GaussianKernel subkernel = new GaussianKernel(10, width);
        feats_train.append_feature_obj(subkfeats_train);
        feats_test.append_feature_obj(subkfeats_test);
        kernel.append_kernel(subkernel);

        LinearKernel subkernel2 = new LinearKernel();
        feats_train.append_feature_obj(subkfeats_train);
        feats_test.append_feature_obj(subkfeats_test);
        kernel.append_kernel(subkernel2);

        PolyKernel subkernel3 = new PolyKernel(10, 2);
        feats_train.append_feature_obj(subkfeats_train);
        feats_test.append_feature_obj(subkfeats_test);
        kernel.append_kernel(subkernel3);

        kernel.init(feats_train, feats_train);

        Labels labels = new Labels(trainlab);

        MKLMultiClass mkl = new MKLMultiClass(C, kernel, labels);
        mkl.set_epsilon(epsilon);
        mkl.set_mkl_epsilon(epsilon);
        mkl.set_mkl_norm(mkl_norm);

        mkl.train();

        kernel.init(feats_train, feats_test);
        DoubleMatrix @out = mkl.apply().get_labels();

        modshogun.exit_shogun();
    }
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width = 2.0;
        double threshold = 0.05;

        double[,] data = Load.load_numbers("../data/fm_train_real.dat");
        RealFeatures features = new RealFeatures(data);

        GaussianKernel kernel = new GaussianKernel(features, features, width);

        KernelPCA preprocessor = new KernelPCA(kernel);
        preprocessor.init(features);
        preprocessor.apply_to_feature_matrix(features);
    }
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        int cardinality = 2;
        int cache = 10;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real = Load.load_numbers("../data/fm_test_real.dat");
        String[] fm_train_dna = Load.load_dna("../data/fm_train_dna.dat");
        String[] fm_test_dna = Load.load_dna("../data/fm_test_dna.dat");

        RealFeatures subfeats_train = new RealFeatures(traindata_real);
        RealFeatures subfeats_test = new RealFeatures(testdata_real);

        CombinedKernel kernel= new CombinedKernel();
        CombinedFeatures feats_train = new CombinedFeatures();
        CombinedFeatures feats_test = new CombinedFeatures();

        GaussianKernel subkernel = new GaussianKernel(cache, 1.1);
        feats_train.append_feature_obj(subfeats_train);
        feats_test.append_feature_obj(subfeats_test);
        kernel.append_kernel(subkernel);

        StringCharFeatures subkfeats_train = new StringCharFeatures(fm_train_dna, EAlphabet.DNA);
        StringCharFeatures subkfeats_test = new StringCharFeatures(fm_test_dna, EAlphabet.DNA);

        int degree = 3;

        FixedDegreeStringKernel subkernel2= new FixedDegreeStringKernel(10, degree);
        feats_train.append_feature_obj(subkfeats_train);
        feats_test.append_feature_obj(subkfeats_test);
        kernel.append_kernel(subkernel2);

        subkfeats_train = new StringCharFeatures(fm_train_dna, EAlphabet.DNA);
        subkfeats_test = new StringCharFeatures(fm_test_dna, EAlphabet.DNA);
        LocalAlignmentStringKernel subkernel3 = new LocalAlignmentStringKernel(10);
        feats_train.append_feature_obj(subkfeats_train);
        feats_test.append_feature_obj(subkfeats_test);
        kernel.append_kernel(subkernel3);

        kernel.init(feats_train, feats_train);
        double[,] km_train=kernel.get_kernel_matrix();

        kernel.init(feats_train, feats_test);
        double[,] km_test=kernel.get_kernel_matrix();

        modshogun.exit_shogun();
    }
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width     = 2.0;
        double threshold = 0.05;

        double[,] data = Load.load_numbers("../data/fm_train_real.dat");
        RealFeatures features = new RealFeatures(data);

        GaussianKernel kernel = new GaussianKernel(features, features, width);

        KernelPCA preprocessor = new KernelPCA(kernel);

        preprocessor.init(features);
        preprocessor.apply_to_feature_matrix(features);
    }
Example #46
0
        public void GaussianKernel_Value()
        {
            var kernel = new GaussianKernel();

            Assert.AreEqual(0.00193045413F, kernel.Value(-2.5F), EPS);
            Assert.AreEqual(0.01831563888F, kernel.Value(-2.0F), EPS);
            Assert.AreEqual(0.10539922456F, kernel.Value(-1.5F), EPS);
            Assert.AreEqual(0.36787944117F, kernel.Value(-1.0F), EPS);
            Assert.AreEqual(0.77880078307F, kernel.Value(-0.5F), EPS);
            Assert.AreEqual(1.0F, kernel.Value(0.0F), EPS);
            Assert.AreEqual(0.77880078307F, kernel.Value(0.5F), EPS);
            Assert.AreEqual(0.36787944117F, kernel.Value(1.0F), EPS);
            Assert.AreEqual(0.10539922456F, kernel.Value(1.5F), EPS);
            Assert.AreEqual(0.01831563888F, kernel.Value(2.0F), EPS);
            Assert.AreEqual(0.00193045413F, kernel.Value(2.5F), EPS);
        }
Example #47
0
        public void GaussianKernel_Value()
        {
            var kernel = new GaussianKernel();

            Assert.AreEqual(0.01752830049F, kernel.Value(-2.5F), EPS);
            Assert.AreEqual(0.05399096651F, kernel.Value(-2.0F), EPS);
            Assert.AreEqual(0.12951759566F, kernel.Value(-1.5F), EPS);
            Assert.AreEqual(0.24197072451F, kernel.Value(-1.0F), EPS);
            Assert.AreEqual(0.35206532676F, kernel.Value(-0.5F), EPS);
            Assert.AreEqual(0.3989422804F, kernel.Value(0.0F), EPS);
            Assert.AreEqual(0.35206532676F, kernel.Value(0.5F), EPS);
            Assert.AreEqual(0.24197072451F, kernel.Value(1.0F), EPS);
            Assert.AreEqual(0.12951759566F, kernel.Value(1.5F), EPS);
            Assert.AreEqual(0.05399096651F, kernel.Value(2.0F), EPS);
            Assert.AreEqual(0.01752830049F, kernel.Value(2.5F), EPS);
        }
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width = 1.2;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        RealFeatures feats_train = new RealFeatures(traindata_real);
        RealFeatures feats_test = new RealFeatures(testdata_real);

        GaussianKernel kernel = new GaussianKernel(feats_train, feats_test, width);
        double[,] km_train = kernel.get_kernel_matrix();
        AsciiFile f=new AsciiFile("gaussian_train.ascii",'w');
        kernel.save(f);

        kernel.init(feats_train, feats_test);
        double[,] km_test = kernel.get_kernel_matrix();
        AsciiFile f_test=new AsciiFile("gaussian_train.ascii",'w');
        kernel.save(f_test);

        //  Parse and Display km_train
        Console.Write("km_train:\n");
        int numRows = km_train.GetLength(0);
        int numCols = km_train.GetLength(1);

        for(int i = 0; i < numRows; i++){
            for(int j = 0; j < numCols; j++){
                Console.Write(km_train[i,j] +" ");
            }
            Console.Write("\n");
        }

        //  Parse and Display km_test
        Console.Write("\nkm_test:\n");
        numRows = km_test.GetLength(0);
        numCols = km_test.GetLength(1);

        for(int i = 0; i < numRows; i++){
            for(int j = 0; j < numCols; j++){
                Console.Write(km_test[i,j] +" ");
            }
            Console.Write("\n");
        }

        modshogun.exit_shogun();
    }
    static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();
        double width = 2.0;
        double threshold = 0.05;

        DoubleMatrix data = Load.load_numbers("../data/fm_train_real.dat");
        RealFeatures features = new RealFeatures(data);

        GaussianKernel kernel = new GaussianKernel(features, features, width);

        KernelPCACut preprocessor = new KernelPCACut(kernel, threshold);
        preprocessor.init(features);
        preprocessor.apply_to_feature_matrix(features);

        modshogun.exit_shogun();
    }
Example #50
0
        /// <summary>
        ///   Runs the Mean-Shift algorithm.
        /// </summary>
        ///
        private void runMeanShift()
        {
            int pixelSize = 3;

            // Retrieve the kernel bandwidth
            double sigma = (double)numBandwidth.Value;

            // Load original image
            Bitmap image = Properties.Resources.leaf;

            // Create converters
            ImageToArray imageToArray = new ImageToArray(min: -1, max: +1);
            ArrayToImage arrayToImage = new ArrayToImage(image.Width, image.Height, min: -1, max: +1);

            // Transform the image into an array of pixel values
            double[][] pixels; imageToArray.Convert(image, out pixels);


            // Create a MeanShift algorithm using the given bandwidth
            // and a Gaussian density kernel as the kernel function:

            IRadiallySymmetricKernel kernel = new GaussianKernel(pixelSize);

            var meanShift = new MeanShift(pixelSize, kernel, sigma)
            {
                Tolerance     = 0.05,
                MaxIterations = 10
            };


            // Compute the mean-shift algorithm until the difference
            // in shift vectors between two iterations is below 0.05

            int[] idx = meanShift.Compute(pixels);


            // Replace every pixel with its corresponding centroid
            pixels.ApplyInPlace((x, i) => meanShift.Clusters.Modes[idx[i]]);

            // Show resulting image in the picture box
            Bitmap result; arrayToImage.Convert(pixels, out result);

            pictureBox.Image = result;
        }
Example #51
0
        private void doPotentialFixedAlgorithmTest()
        {
            var metric = new EuclideanMetric();
            var kernel = new GaussianKernel();

            var eqps = new PotentialFunctionAlgorithm.KernelEquipment[Data.TrainingSample.Count];

            for (int i = 0; i < Data.TrainingSample.Count; i++)
            {
                eqps[i] = new PotentialFunctionAlgorithm.KernelEquipment(1.0F, 1.5F);
            }
            var alg = new PotentialFunctionAlgorithm(Data.TrainingSample, metric, kernel, eqps);

            Console.WriteLine("Margin:");
            calculateMargin(alg);

            outputError(alg);

            Visualizer.Run(alg);
        }
Example #52
0
    static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();
        double width = 1.6;

        DoubleMatrix train_real = Load.load_numbers("../data/fm_train_real.dat");
        DoubleMatrix trainlab = Load.load_labels("../data/label_train_twoclass.dat");

        RealFeatures feats_train = new RealFeatures(train_real);
        GaussianKernel subkernel = new GaussianKernel(feats_train, feats_train, width);

        Labels labels = new Labels(trainlab);

        AUCKernel kernel = new AUCKernel(0, subkernel);
        kernel.setup_auc_maximization(labels);

        DoubleMatrix km_train = kernel.get_kernel_matrix();
        Console.WriteLine(km_train.ToString());

        modshogun.exit_shogun();
    }
Example #53
0
        public override void Initialize()
        {
            SetStartDate(2016, 1, 1);
            SetEndDate(2016, 7, 1);
            SetCash(10000);

            AddSecurity(SecurityType.Equity, symbol, Resolution.Hour);

            var tradeBarHistory = History <TradeBar>(symbol, TimeSpan.FromDays(7), Resolution.Hour);

            // we can loop over the return value from these functions and we get TradeBars
            // we can use these TradeBars to initialize indicators or perform other math
            var closes = new double[][] { tradeBarHistory.Select((tb) => tb.Close).ToDoubleArray() };

            IRadiallySymmetricKernel kernel = new GaussianKernel(1);

            var meanShift = new MeanShift(kernel, 1)
            {
                //Tolerance = 0.05,
                //MaxIterations = 10
            };


            // Compute the mean-shift algorithm until the difference
            // in shift vectors between two iterations is below 0.05

            int[] idx = meanShift.Learn(closes).Decide(closes);


            // Replace every pixel with its corresponding centroid
            result = closes.Apply((x, i) => meanShift.Clusters.Modes[idx[i]], result: closes);

            foreach (var rr in result)
            {
                foreach (var r in rr)
                {
                    Debug("" + r);
                }
            }
        }
Example #54
0
        private static void Run(int quries)
        {
            var kernel = new GaussianKernel(0.25, 1);
            var model  = new Model(kernel, 0, 8, 800, ObjectiveFunction);
            var output = model.Explore(quries);

            var er = output.EstimationValues
                     .Select(q => new double[] { q.Mean, q.UpperBound, q.LowerBound, q.X })
                     .ToArray();

            var qr = output.QueryValues
                     .Select(q => new double[] { q.X, q.FX })
                     .ToArray();

            var af = output.AquisitionValues
                     .Select(q => new double[] { q.X, q.FX })
                     .ToArray();

            var json1 = JsonConvert.SerializeObject(er, Formatting.Indented);

            File.WriteAllText("predicted_test.json", json1);

            var json2 = JsonConvert.SerializeObject(qr, Formatting.Indented);

            File.WriteAllText("observed_test.json", json2);

            var json3 = JsonConvert.SerializeObject(af, Formatting.Indented);

            File.WriteAllText("aquisition_test.json", json3);

            RunCmd("script.py", new string[]
            {
                "predicted_test.json",
                "observed_test.json",
                "aquisition_test.json",
                $"{quries}.png"
            });
        }
Example #55
0
 public static void Main(string[] args)
 {
     Library.init_shogun_with_defaults();
     GaussianKernel k = new GaussianKernel();
     Console.WriteLine(k.get_width());
 }
Example #56
0
 internal static HandleRef getCPtr(GaussianKernel obj) {
   return (obj == null) ? new HandleRef(null, IntPtr.Zero) : obj.swigCPtr;
 }
    static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();

        int num = 1000;
        double dist = 1.0;
        double width = 2.1;
        double C = 1.0;

        DoubleMatrix offs =ones(2, num).mmul(dist);
        DoubleMatrix x = randn(2, num).sub(offs);
        DoubleMatrix y = randn(2, num).add(offs);
        DoubleMatrix traindata_real = concatHorizontally(x, y);

        DoubleMatrix m = randn(2, num).sub(offs);
        DoubleMatrix n = randn(2, num).add(offs);
        DoubleMatrix testdata_real = concatHorizontally(m, n);

        DoubleMatrix o = ones(1,num);
        DoubleMatrix trainlab = concatHorizontally(o.neg(), o);
        DoubleMatrix testlab = concatHorizontally(o.neg(), o);

        RealFeatures feats_train = new RealFeatures(traindata_real);
        RealFeatures feats_test = new RealFeatures(testdata_real);
        GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);
        Labels labels = new Labels(trainlab);
        SVMLight svm = new SVMLight(C, kernel, labels);
        svm.train();

        ArrayList result = new ArrayList();
        result.Add(svm);
        string fname = "out.txt";
        //save(fname, (Serializable)result);
        //ArrayList r = (ArrayList)load(fname);
        //SVMLight svm2 = (SVMLight)r.get(0);

        modshogun.exit_shogun();
    }