Пример #1
0
        public void IrisSupportVectorMachineClassifierTest()
        {
            DataSetLoader dataSetLoader = new DataSetLoader();

            Console.WriteLine(" Reading DataSet.. ");
            var irises = dataSetLoader.SelectIrises();

            for (double i = 0; i < 1; i = i + 1)
            {
                Kernel        kernel = new LinearKernel();
                SVMClassifier animalSVMClassifier = new SVMClassifier(irises, kernel, 0.001, 10.0);
                animalSVMClassifier.Train();
                var irisesTest  = dataSetLoader.SelectIrises();
                var trueCounter = 0;
                var counter     = 0;
                foreach (var item in irisesTest)
                {
                    var outputValue = animalSVMClassifier.Classify(item.Item1);
                    if (outputValue == item.Item2)
                    {
                        trueCounter++;
                    }
                    Debug.WriteLine(string.Format("Value {0} - Predicted {1} = {2}",
                                                  item.Item2, outputValue, (outputValue == item.Item2) ? "true" : "false"));
                    counter++;
                }
                Debug.WriteLine(string.Format(" i = {0} Data {1} - True {2} Verhältnis: {3}", i,
                                              counter.ToString(), trueCounter.ToString(), (Convert.ToDouble(trueCounter) / Convert.ToDouble(counter)).ToString()));
            }
        }
Пример #2
0
        public void AnimalDualPerceptronTest()
        {
            DataSetLoader dataSetLoader = new DataSetLoader();

            Console.WriteLine(" Reading DataSet.. ");
            var animals = dataSetLoader.SelectAnimals();

            for (double i = 0; i < 1; i = i + 1)
            {
                Kernel kernel = new LinearKernel();
                DualPerceptronClassifier dualPerceptronClassifier = new DualPerceptronClassifier(animals, kernel);
                dualPerceptronClassifier.Train();
                var animalsTest = dataSetLoader.SelectAnimals();
                var trueCounter = 0;
                var counter     = 0;
                foreach (var item in animalsTest)
                {
                    var outputValue = dualPerceptronClassifier.Classify(item.Item1);
                    if (outputValue == item.Item2)
                    {
                        trueCounter++;
                    }
                    Debug.WriteLine(string.Format("Value {0} - Predicted {1} = {2}",
                                                  item.Item2, outputValue, (outputValue == item.Item2) ? "true" : "false"));
                    counter++;
                }
                Debug.WriteLine(string.Format(" i = {0} Data {1} - True {2} Verhältnis: {3}", i,
                                              counter.ToString(), trueCounter.ToString(), (Convert.ToDouble(trueCounter) / Convert.ToDouble(counter)).ToString()));
            }
        }
Пример #3
0
        private void DualPerceptron(List <Tuple <double[], double> > data)
        {
            Kernel kernel = new LinearKernel();

            foreach (var item in netMLObject.Options)
            {
                if (item == "linearkernel")
                {
                    kernel = new LinearKernel();
                }
                else if (item == "gaussiankernel")
                {
                    kernel = new GaussianKernel(1.0);
                }
                else if (item == "polynomialkernel")
                {
                    kernel = new PolynomialKernel(1);
                }
                else if (item == "logitkernel")
                {
                    kernel = new LogitKernel();
                }
                else if (item == "tanhkernel")
                {
                    kernel = new TanhKernel();
                }
            }
            classification = new DualPerceptronClassifier(data, kernel);
        }
Пример #4
0
        public void CreditDataClassifyMethod()
        {
            DataSetLoader          dataSetLoader          = new DataSetLoader();
            var                    creditData             = dataSetLoader.SelectCreditData();
            var                    data                   = dataSetLoader.CalculatePercent(100, creditData);
            DecisionTreeClassifier decisionTreeClassifier =
                new DecisionTreeClassifier(data.Item1, new ShannonEntropySplitter());
            NaiveBayesClassifier naiveBayes =
                new NaiveBayesClassifier(data.Item1);
            var           list          = new List <NetML.Classification>();
            Kernel        kernel        = new LinearKernel();
            SVMClassifier SVMClassifier =
                new SVMClassifier(creditData, kernel, 0.001, 10.0);
            var neuronalCreditData = dataSetLoader.SelectNeuronalNetworksCreditData();
            NeuronalNetworkClassifier neuronalNetworkClassifier =
                new NeuronalNetworkClassifier(neuronalCreditData, 20, 2, 20, 5000, 0.1);

            list.Add(decisionTreeClassifier);
            list.Add(naiveBayes);
            list.Add(SVMClassifier);
            //list.Add(neuronalNetworkClassifier);
            Classifier classifier = new Classifier();

            classifier.Classify(list, creditData);
        }
Пример #5
0
        public void AnimalClassifyMethod()
        {
            DataSetLoader          dataSetLoader          = new DataSetLoader();
            var                    animals                = dataSetLoader.SelectAnimals();
            var                    data                   = dataSetLoader.CalculatePercent(50, animals);
            DecisionTreeClassifier decisionTreeClassifier =
                new DecisionTreeClassifier(data.Item1, new ShannonEntropySplitter());
            NaiveBayesClassifier naiveBayes =
                new NaiveBayesClassifier(data.Item1);
            var           list   = new List <NetML.Classification>();
            Kernel        kernel = new LinearKernel();
            SVMClassifier animalSVMClassifier =
                new SVMClassifier(animals, kernel, 0.001, 10.0);
            var neuronalAnimals = dataSetLoader.SelectNeuronalNetworkAnimals();
            NeuronalNetworkClassifier neuronalNetworkClassifier =
                new NeuronalNetworkClassifier(neuronalAnimals, 16, 7, 16, 500, 0.1);

            list.Add(decisionTreeClassifier);
            list.Add(naiveBayes);
            list.Add(animalSVMClassifier);
            list.Add(neuronalNetworkClassifier);
            Classifier classifier = new Classifier();

            classifier.Classify(list, data.Item2);
        }
Пример #6
0
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width    = 2.1;
        double epsilon  = 1e-5;
        double C        = 1.0;
        int    mkl_norm = 2;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real  = Load.load_numbers("../data/fm_test_real.dat");

        double[] trainlab = Load.load_labels("../data/label_train_multiclass.dat");

        CombinedKernel   kernel      = new CombinedKernel();
        CombinedFeatures feats_train = new CombinedFeatures();
        CombinedFeatures feats_test  = new CombinedFeatures();

        RealFeatures subkfeats1_train = new RealFeatures(traindata_real);
        RealFeatures subkfeats1_test  = new RealFeatures(testdata_real);

        GaussianKernel subkernel = new GaussianKernel(10, width);

        feats_train.append_feature_obj(subkfeats1_train);
        feats_test.append_feature_obj(subkfeats1_test);
        kernel.append_kernel(subkernel);

        RealFeatures subkfeats2_train = new RealFeatures(traindata_real);
        RealFeatures subkfeats2_test  = new RealFeatures(testdata_real);

        LinearKernel subkernel2 = new LinearKernel();

        feats_train.append_feature_obj(subkfeats2_train);
        feats_test.append_feature_obj(subkfeats2_test);
        kernel.append_kernel(subkernel2);

        RealFeatures subkfeats3_train = new RealFeatures(traindata_real);
        RealFeatures subkfeats3_test  = new RealFeatures(testdata_real);

        PolyKernel subkernel3 = new PolyKernel(10, 2);

        feats_train.append_feature_obj(subkfeats3_train);
        feats_test.append_feature_obj(subkfeats3_test);
        kernel.append_kernel(subkernel3);

        kernel.init(feats_train, feats_train);

        MulticlassLabels labels = new MulticlassLabels(trainlab);

        MKLMulticlass mkl = new MKLMulticlass(C, kernel, labels);

        mkl.set_epsilon(epsilon);
        mkl.set_mkl_epsilon(epsilon);
        mkl.set_mkl_norm(mkl_norm);

        mkl.train();

        kernel.init(feats_train, feats_test);
        double[] outMatrix = LabelsFactory.to_multiclass(mkl.apply()).get_labels();
    }
Пример #7
0
        public CuRBFCSRKernel(float gamma)
        {
            linKernel             = new LinearKernel();
            Gamma                 = gamma;
            cudaProductKernelName = "rbfCsrFormatKernel";

            cudaModuleName = "KernelsCSR.cubin";
        }
        public CudafyRBFSlicedEllpackKernel(float gamma)
        {
            linKernel = new LinearKernel();
            Gamma     = gamma;

            threadsPerRow = 4;
            sliceSize     = 64;
        }
Пример #9
0
 public CuRBFEllpackKernel(float gamma)
 {
     linKernel             = new LinearKernel();
     Gamma                 = gamma;
     cudaProductKernelName = "rbfEllpackFormatKernel";
     cudaModuleName        = "KernelsEllpack.cubin";
     MakeDenseVectorOnGPU  = false;
 }
Пример #10
0
 public CuRBFEllpackKernel(float gamma)
 {
     linKernel = new LinearKernel();
     Gamma = gamma;
     cudaProductKernelName = "rbfEllpackFormatKernel";
     cudaModuleName = "KernelsEllpack.cubin";
     MakeDenseVectorOnGPU = false;
     
 }
Пример #11
0
        public CuRBFCSRKernel(float gamma)
        {
            linKernel = new LinearKernel();
            Gamma = gamma;
            cudaProductKernelName = "rbfCsrFormatKernel";

            cudaModuleName = "KernelsCSR.cubin";

        }
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width = 2.1;
        double epsilon = 1e-5;
        double C = 1.0;
        int mkl_norm = 2;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        double[] trainlab = Load.load_labels("../data/label_train_multiclass.dat");

        CombinedKernel kernel = new CombinedKernel();
        CombinedFeatures feats_train = new CombinedFeatures();
        CombinedFeatures feats_test = new CombinedFeatures();

        RealFeatures subkfeats1_train = new RealFeatures(traindata_real);
        RealFeatures subkfeats1_test = new RealFeatures(testdata_real);

        GaussianKernel subkernel = new GaussianKernel(10, width);
        feats_train.append_feature_obj(subkfeats1_train);
        feats_test.append_feature_obj(subkfeats1_test);
        kernel.append_kernel(subkernel);

        RealFeatures subkfeats2_train = new RealFeatures(traindata_real);
        RealFeatures subkfeats2_test = new RealFeatures(testdata_real);

        LinearKernel subkernel2 = new LinearKernel();
        feats_train.append_feature_obj(subkfeats2_train);
        feats_test.append_feature_obj(subkfeats2_test);
        kernel.append_kernel(subkernel2);

        RealFeatures subkfeats3_train = new RealFeatures(traindata_real);
        RealFeatures subkfeats3_test = new RealFeatures(testdata_real);

        PolyKernel subkernel3 = new PolyKernel(10, 2);
        feats_train.append_feature_obj(subkfeats3_train);
        feats_test.append_feature_obj(subkfeats3_test);
        kernel.append_kernel(subkernel3);

        kernel.init(feats_train, feats_train);

        MulticlassLabels labels = new MulticlassLabels(trainlab);

        MKLMulticlass mkl = new MKLMulticlass(C, kernel, labels);
        mkl.set_epsilon(epsilon);
        mkl.set_mkl_epsilon(epsilon);
        mkl.set_mkl_norm(mkl_norm);

        mkl.train();

        kernel.init(feats_train, feats_test);
        double[] outMatrix =  MulticlassLabels.obtain_from_generic(mkl.apply()).get_labels();

        modshogun.exit_shogun();
    }
Пример #13
0
        private void SupportVectorMachine(List <Tuple <double[], double> > data)
        {
            Kernel kernel   = new LinearKernel();
            double n        = 0.0;
            double C        = 0.0;
            bool   nAndCSet = false;

            foreach (var item in netMLObject.Options)
            {
                if (item == "linearkernel")
                {
                    kernel = new LinearKernel();
                }
                else if (item == "gaussiankernel")
                {
                    kernel = new GaussianKernel(1.0);
                }
                else if (item == "polynomialkernel")
                {
                    kernel = new PolynomialKernel(1);
                }
                else if (item == "logitkernel")
                {
                    kernel = new LogitKernel();
                }
                else if (item == "tanhkernel")
                {
                    kernel = new TanhKernel();
                }
            }
            foreach (var value in netMLObject.DoubleValues)
            {
                if (value.Key == "n")
                {
                    n        = value.Value;
                    nAndCSet = true;
                }
                else if (value.Key == "c")
                {
                    C        = value.Value;
                    nAndCSet = true;
                }
            }
            if (nAndCSet)
            {
                classification = new SVMClassifier(data, kernel, n, C);
            }
            else
            {
                classification = new SVMClassifier(data, kernel);
            }
        }
Пример #14
0
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double scale = 1.2;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real  = Load.load_numbers("../data/fm_test_real.dat");

        RealFeatures feats_train = new RealFeatures(traindata_real);
        RealFeatures feats_test  = new RealFeatures(testdata_real);

        LinearKernel kernel = new LinearKernel(feats_train, feats_test);

        kernel.set_normalizer(new AvgDiagKernelNormalizer(scale));
        kernel.init(feats_train, feats_train);

        double[,] km_train = kernel.get_kernel_matrix();
        kernel.init(feats_train, feats_test);
        double[,] km_test = kernel.get_kernel_matrix();


        //  Parse and Display km_train
        Console.Write("km_train:\n");
        int numRows = km_train.GetLength(0);
        int numCols = km_train.GetLength(1);

        for (int i = 0; i < numRows; i++)
        {
            for (int j = 0; j < numCols; j++)
            {
                Console.Write(km_train[i, j] + " ");
            }
            Console.Write("\n");
        }

        //  Parse and Display km_test
        Console.Write("\nkm_test:\n");
        numRows = km_test.GetLength(0);
        numCols = km_test.GetLength(1);

        for (int i = 0; i < numRows; i++)
        {
            for (int j = 0; j < numCols; j++)
            {
                Console.Write(km_test[i, j] + " ");
            }
            Console.Write("\n");
        }


        modshogun.exit_shogun();
    }
Пример #15
0
        public void TestSummmationReadWrite()
        {
            double log_length = System.Math.Log(1.234);
            double log_sig_sd = System.Math.Log(2.345);
            double log_nse_sd = System.Math.Log(3.456);

            double[]        log_var = { System.Math.Log(0.987), System.Math.Log(0.876), System.Math.Log(0.765) };
            SummationKernel kf      = new SummationKernel(new SquaredExponential(log_length, log_sig_sd));

            kf += new LinearKernel(log_var);
            kf += new WhiteNoise(log_nse_sd);
            TestReadWrite(kf);
        }
Пример #16
0
    static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();
        double width = 2.1;
        double epsilon = 1e-5;
        double C = 1.0;
        int mkl_norm = 2;

        DoubleMatrix traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        DoubleMatrix testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        DoubleMatrix trainlab = Load.load_labels("../data/label_train_twoclass.dat");

        CombinedKernel kernel = new CombinedKernel();
        CombinedFeatures feats_train = new CombinedFeatures();
        CombinedFeatures feats_test = new CombinedFeatures();

        RealFeatures subkfeats_train = new RealFeatures(traindata_real);
        RealFeatures subkfeats_test = new RealFeatures(testdata_real);

        GaussianKernel subkernel = new GaussianKernel(10, width);
        feats_train.append_feature_obj(subkfeats_train);
        feats_test.append_feature_obj(subkfeats_test);
        kernel.append_kernel(subkernel);

        LinearKernel subkernel2 = new LinearKernel();
        feats_train.append_feature_obj(subkfeats_train);
        feats_test.append_feature_obj(subkfeats_test);
        kernel.append_kernel(subkernel2);

        PolyKernel subkernel3 = new PolyKernel(10, 2);
        feats_train.append_feature_obj(subkfeats_train);
        feats_test.append_feature_obj(subkfeats_test);
        kernel.append_kernel(subkernel3);

        kernel.init(feats_train, feats_train);

        Labels labels = new Labels(trainlab);

        MKLMultiClass mkl = new MKLMultiClass(C, kernel, labels);
        mkl.set_epsilon(epsilon);
        mkl.set_mkl_epsilon(epsilon);
        mkl.set_mkl_norm(mkl_norm);

        mkl.train();

        kernel.init(feats_train, feats_test);
        DoubleMatrix @out = mkl.apply().get_labels();

        modshogun.exit_shogun();
    }
Пример #17
0
        /// <summary>
        ///   Initialises a SVMGenerator object
        /// </summary>
        public SVMGenerator()
        {
            Bias              = 0d;
            C                 = 1d;
            Epsilon           = 0.001;
            MaxIterations     = 10;
            KernelFunction    = new LinearKernel();
            NormalizeFeatures = true;

            if (SelectionFunction == null)
            {
                SelectionFunction = new WorkingSetSelection3();
            }
        }
Пример #18
0
        public CuRBFERTILPKernel(float gamma)
        {
            linKernel             = new LinearKernel();
            Gamma                 = gamma;
            cudaProductKernelName = "rbfERTILP";
            //cudaProductKernelName = "rbfEllRTILP_old";

            cudaModuleName = "KernelsEllpack.cubin";

            MakeDenseVectorOnGPU = false;

            ThreadsPerRow = 4;
            Prefetch      = 2;
        }
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double scale = 1.2;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_word.dat");
        double[,] testdata_real  = Load.load_numbers("../data/fm_test_word.dat");

        short[,] traindata_word = new short[traindata_real.GetLength(0), traindata_real.GetLength(1)];
        for (int i = 0; i < traindata_real.GetLength(0); i++)
        {
            for (int j = 0; j < traindata_real.GetLength(1); j++)
            {
                traindata_word[i, j] = (short)traindata_real[i, j];
            }
        }

        short[,] testdata_word = new short[testdata_real.GetLength(0), testdata_real.GetLength(1)];
        for (int i = 0; i < testdata_real.GetLength(0); i++)
        {
            for (int j = 0; j < testdata_real.GetLength(1); j++)
            {
                testdata_word[i, j] = (short)testdata_real[i, j];
            }
        }
        WordFeatures feats_train = new WordFeatures(traindata_word);
        WordFeatures feats_test  = new WordFeatures(testdata_word);

        LinearKernel kernel = new LinearKernel(feats_train, feats_test);

        kernel.set_normalizer(new AvgDiagKernelNormalizer(scale));
        kernel.init(feats_train, feats_train);

        double[,] km_train = kernel.get_kernel_matrix();
        kernel.init(feats_train, feats_test);
        double[,] km_test = kernel.get_kernel_matrix();

        foreach (double item in km_train)
        {
            Console.Write(item);
        }

        foreach (double item in km_test)
        {
            Console.Write(item);
        }

        modshogun.exit_shogun();
    }
Пример #20
0
        public CuRBFERTILPKernel(float gamma)
        {
            linKernel = new LinearKernel();
            Gamma = gamma;
            cudaProductKernelName = "rbfERTILP";
            //cudaProductKernelName = "rbfEllRTILP_old";

            cudaModuleName = "KernelsEllpack.cubin";

            MakeDenseVectorOnGPU = false;

            ThreadsPerRow = 4;
            Prefetch =2;
            
        }
Пример #21
0
        public CuRBFSlEllKernel(float gamma)
        {
            linKernel = new LinearKernel();
            Gamma = gamma;
            
            cudaProductKernelName = "rbfSlicedEllpackKernel";

            cudaModuleName = "KernelsSlicedEllpack.cubin";

            threadsPerRow =  4;
            sliceSize =  64;

            //threadsPerRow = 2;
            //sliceSize = 4;
        }
Пример #22
0
        public CuRBFSlEllKernel(float gamma)
        {
            linKernel = new LinearKernel();
            Gamma     = gamma;

            cudaProductKernelName = "rbfSlicedEllpackKernel";

            cudaModuleName = "KernelsSlicedEllpack.cubin";

            threadsPerRow = 4;
            sliceSize     = 64;

            //threadsPerRow = 2;
            //sliceSize = 4;
        }
Пример #23
0
        public CuRBFEllILPKernelCol2(float gamma)
        {
            linKernel             = new LinearKernel();
            Gamma                 = gamma;
            cudaProductKernelName = "rbfEllpackILPcol2";
            //cudaProductKernelName = "rbfEllpackILPcol2_Prefetch2";

            cudaModuleName = "KernelsEllpackCol2.cubin";

            MakeDenseVectorOnGPU = false;

            preFetch = 2;

            //threadsPerBlock = 128;
        }
Пример #24
0
        public CuRBFEllILPKernelCol2(float gamma)
        {
            linKernel = new LinearKernel();
            Gamma = gamma;
            cudaProductKernelName = "rbfEllpackILPcol2";
            //cudaProductKernelName = "rbfEllpackILPcol2_Prefetch2";

            cudaModuleName = "KernelsEllpackCol2.cubin";

            MakeDenseVectorOnGPU = false;

            preFetch = 2;

            //threadsPerBlock = 128;
            
        }
Пример #25
0
	public static void Main() {
		modshogun.init_shogun_with_defaults();
		double scale = 1.2;

		double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
		double[,] testdata_real = Load.load_numbers("../data/fm_test_real.dat");

		RealFeatures feats_train = new RealFeatures(traindata_real);
		RealFeatures feats_test = new RealFeatures(testdata_real);

		LinearKernel kernel = new LinearKernel(feats_train, feats_test);
		kernel.set_normalizer(new AvgDiagKernelNormalizer(scale));
		kernel.init(feats_train, feats_train);

		double[,] km_train = kernel.get_kernel_matrix();
		kernel.init(feats_train, feats_test);
		double[,] km_test = kernel.get_kernel_matrix();

		
		//  Parse and Display km_train
		Console.Write("km_train:\n");
		int numRows = km_train.GetLength(0);
		int numCols = km_train.GetLength(1);
		
		for(int i = 0; i < numRows; i++){
			for(int j = 0; j < numCols; j++){
				Console.Write(km_train[i,j] +" ");
			}
			Console.Write("\n");
		}
		
		//  Parse and Display km_test
		Console.Write("\nkm_test:\n");
		numRows = km_test.GetLength(0);
		numCols = km_test.GetLength(1);
		
		for(int i = 0; i < numRows; i++){
			for(int j = 0; j < numCols; j++){
				Console.Write(km_test[i,j] +" ");
			}
			Console.Write("\n");
		}
		
		
		modshogun.exit_shogun();
	}
Пример #26
0
        public void TestSumKDerivs()
        {
            double[] log_length = { System.Math.Log(0.543), System.Math.Log(0.432), System.Math.Log(0.321) };
            double   log_sig_sd = System.Math.Log(2.345);

            double[]        log_var    = { System.Math.Log(0.987), System.Math.Log(0.876), System.Math.Log(0.765) };
            double          log_nse_sd = System.Math.Log(3.456);
            SummationKernel kf         = new SummationKernel(new ARD(log_length, log_sig_sd));

            kf += new LinearKernel(log_var);
            kf += new WhiteNoise(log_nse_sd);
            double[] x1    = { 0.1, 0.2, 0.3 };
            double[] x2    = { 0.9, 0.7, 0.5 };
            Vector   x1Vec = Vector.FromArray(x1);
            Vector   x2Vec = Vector.FromArray(x2);

            TestDerivatives(kf, x1Vec, x1Vec);
            TestDerivatives(kf, x1Vec, x2Vec);
        }
Пример #27
0
        public CuRBFSERTILPKernel(float gamma)
        {
            linKernel = new LinearKernel();
            Gamma = gamma;



            cudaProductKernelName = "rbfSERTILP";
            //cudaProductKernelName = "rbfSlicedEllpackKernel_shared";

            cudaModuleName = "KernelsSlicedEllpack.cubin";

            threadsPerRow =  4;
            sliceSize =  64;
            preFechSize = 2;

            //threadsPerRow = 2;
            //sliceSize = 4;
        }
Пример #28
0
        public CuRBFSERTILPKernel(float gamma)
        {
            linKernel = new LinearKernel();
            Gamma     = gamma;



            cudaProductKernelName = "rbfSERTILP";
            //cudaProductKernelName = "rbfSlicedEllpackKernel_shared";

            cudaModuleName = "KernelsSlicedEllpack.cubin";

            threadsPerRow = 4;
            sliceSize     = 64;
            preFechSize   = 2;

            //threadsPerRow = 2;
            //sliceSize = 4;
        }
Пример #29
0
        public void TestLinearKernelDerivs()
        {
            double[]     logVariances = { -0.123, 0.456, 1.789 };
            LinearKernel lk           = new LinearKernel(logVariances);

            double[]      x1    = { 0.1, 0.2, 0.3 };
            double[]      x2    = { 0.9, 0.7, 0.5 };
            Vector        v1    = Vector.FromArray(x1);
            Vector        v2    = Vector.FromArray(x2);
            List <Vector> xlist = new List <Vector>(2);

            xlist.Add(v1);
            xlist.Add(v2);
            lk.InitialiseFromData(xlist);

            Vector x1Vec = Vector.FromArray(x1);
            Vector x2Vec = Vector.FromArray(x2);

            TestDerivatives(lk, x1Vec, x1Vec);
            TestDerivatives(lk, x1Vec, x2Vec);
        }
Пример #30
0
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double scale = 1.2;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_word.dat");
        double[,] testdata_real = Load.load_numbers("../data/fm_test_word.dat");

        short[,] traindata_word = new short[traindata_real.GetLength(0), traindata_real.GetLength(1)];
        for (int i = 0; i < traindata_real.GetLength(0); i++){
            for (int j = 0; j < traindata_real.GetLength(1); j++)
                traindata_word[i, j] = (short)traindata_real[i, j];
        }

        short[,] testdata_word = new short[testdata_real.GetLength(0), testdata_real.GetLength(1)];
        for (int i = 0; i < testdata_real.GetLength(0); i++){
            for (int j = 0; j < testdata_real.GetLength(1); j++)
                testdata_word[i, j] = (short)testdata_real[i, j];
        }
        WordFeatures feats_train = new WordFeatures(traindata_word);
        WordFeatures feats_test = new WordFeatures(testdata_word);

        LinearKernel kernel = new LinearKernel(feats_train, feats_test);
        kernel.set_normalizer(new AvgDiagKernelNormalizer(scale));
        kernel.init(feats_train, feats_train);

        double[,] km_train = kernel.get_kernel_matrix();
        kernel.init(feats_train, feats_test);
        double[,] km_test = kernel.get_kernel_matrix();

        foreach(double item in km_train) {
            Console.Write(item);
        }

        foreach(double item in km_test) {
            Console.Write(item);
        }

        modshogun.exit_shogun();
    }
Пример #31
0
    static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();
        double scale = 1.2;

        DoubleMatrix traindata_real = Load.load_numbers("../data/fm_train_word.dat");
        DoubleMatrix testdata_real = Load.load_numbers("../data/fm_test_word.dat");

        WordFeatures feats_train = new WordFeatures(traindata_real);
        WordFeatures feats_test = new WordFeatures(testdata_real);

        LinearKernel kernel = new LinearKernel(feats_train, feats_test);
        kernel.set_normalizer(new AvgDiagKernelNormalizer(scale));
        kernel.init(feats_train, feats_train);

        DoubleMatrix km_train = kernel.get_kernel_matrix();
        kernel.init(feats_train, feats_test);
        DoubleMatrix km_test = kernel.get_kernel_matrix();

        Console.WriteLine(km_train.ToString());
        Console.WriteLine(km_test.ToString());

        modshogun.exit_shogun();
    }
Пример #32
0
        //const string cudaKernelName = "linearCsrFormatKernel";


        public CuLinearKernel()
        {
            linKernel             = new LinearKernel();
            cudaProductKernelName = "linearCsrFormatKernel";
        }
Пример #33
0
 public RBFDualEvaluator(float gamma)
 {
     linKernel  = new LinearKernel();
     this.gamma = gamma;
 }
Пример #34
0
 internal static HandleRef getCPtr(LinearKernel obj) {
   return (obj == null) ? new HandleRef(null, IntPtr.Zero) : obj.swigCPtr;
 }
Пример #35
0
 internal static HandleRef getCPtr(LinearKernel obj)
 {
     return((obj == null) ? new HandleRef(null, IntPtr.Zero) : obj.swigCPtr);
 }
Пример #36
0
        private static IEnumerable <Assignment> AngularCluster(IList <Matrix <double> > features, ulong numberClusters)
        {
            if (features == null)
            {
                throw new ArgumentNullException(nameof(features));
            }

            var size = features.Count;

            if (size == 0)
            {
                throw new ArgumentException("The dataset can't be empty", nameof(features));
            }

            var featureSize = features[0].Size;

            for (var index = 0; index < size; ++index)
            {
                if (features[index].Size != featureSize)
                {
                    throw new ArgumentException("All feature vectors must have the same length.", nameof(features));
                }
            }

            // find the centroid of feats
            Matrix <double> tmp;
            var             m = Matrix <double> .CreateTemplateParameterizeMatrix(0, 1);

            for (var index = 0; index < size; ++index)
            {
                tmp = m + features[index];
                m.Dispose();
                m = tmp;
            }

            tmp = m;
            m   = tmp / size;
            tmp.Dispose();

            // Now center feats and then project onto the unit sphere.  The reason for projecting
            // onto the unit sphere is so pick_initial_centers() works in a sensible way.
            for (var index = 0; index < size; ++index)
            {
                tmp = features[index] - m;
                features[index].Dispose();
                features[index] = tmp;

                var length = Dlib.Length(features[index]);
                if (Math.Abs(length) > double.Epsilon)
                {
                    tmp = features[index] / length;
                    features[index].Dispose();
                    features[index] = tmp;
                }
            }

            // now do angular clustering of the points
            var linearKernel = new LinearKernel <Matrix <double> >(0, 1);
            var tempCenters  = Dlib.PickInitialCenters((int)numberClusters, features, linearKernel, 0.05).ToArray();
            var centers      = Dlib.FindClustersUsingAngularKMeans(features, tempCenters).ToArray();

            foreach (var center in tempCenters)
            {
                center.Dispose();
            }
            linearKernel.Dispose();

            // and then report the resulting assignments
            var assignments = new List <Assignment>(size);

            for (var index = 0; index < size; ++index)
            {
                var temp = new Assignment();
                temp.C = Dlib.NearestCenter(centers, features[index]);
                using (var temp2 = features[index] - centers[temp.C])
                    temp.Distance = Dlib.Length(temp2);
                temp.Index = (ulong)index;
                assignments.Add(temp);
            }

            return(assignments);
        }
Пример #37
0
        //const string cudaKernelName = "linearCsrFormatKernel";
       

        public CuLinearKernel()
        {
            linKernel = new LinearKernel();
            cudaProductKernelName = "linearCsrFormatKernel";
        }