static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();

        int num = 1000;
        double dist = 1.0;
        double width = 2.1;
        double C = 1.0;

        DoubleMatrix offs =ones(2, num).mmul(dist);
        DoubleMatrix x = randn(2, num).sub(offs);
        DoubleMatrix y = randn(2, num).add(offs);
        DoubleMatrix traindata_real = concatHorizontally(x, y);

        DoubleMatrix m = randn(2, num).sub(offs);
        DoubleMatrix n = randn(2, num).add(offs);
        DoubleMatrix testdata_real = concatHorizontally(m, n);

        DoubleMatrix o = ones(1,num);
        DoubleMatrix trainlab = concatHorizontally(o.neg(), o);
        DoubleMatrix testlab = concatHorizontally(o.neg(), o);

        RealFeatures feats_train = new RealFeatures(traindata_real);
        RealFeatures feats_test = new RealFeatures(testdata_real);
        GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);
        Labels labels = new Labels(trainlab);
        LibSVM svm = new LibSVM(C, kernel, labels);
        svm.train();

        DoubleMatrix @out = svm.apply(feats_test).get_labels();

        Console.WriteLine("Mean Error = " + signum(@out).ne(testlab).mean());
        modshogun.exit_shogun();
    }
    static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();
        double width = 2.1;
        double epsilon = 1e-5;
        double C = 1.0;

        DoubleMatrix traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        DoubleMatrix testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        DoubleMatrix trainlab = Load.load_labels("../data/label_train_twoclass.dat");

        RealFeatures feats_train = new RealFeatures();
        feats_train.set_feature_matrix(traindata_real);
        RealFeatures feats_test = new RealFeatures();
        feats_test.set_feature_matrix(testdata_real);

        GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);

        Labels labels = new Labels(trainlab);

        LibSVM svm = new LibSVM(C, kernel, labels);
        svm.set_epsilon(epsilon);
        svm.train();

        kernel.init(feats_train, feats_test);
        DoubleMatrix out_labels = svm.apply().get_labels();
        Console.WriteLine(out_labels.ToString());

        modshogun.exit_shogun();
    }
        public double[] ShapeClassification()
        {
            //Get single Test Instance from CSV file
            CSVLoader loader = new CSVLoader();

            loader.setSource(new java.io.File("GetOrientation.csv"));
            Instances testinstances = loader.getDataSet();

            testinstances.setClassIndex(testinstances.numAttributes() - 1);
            Instance sekarang = testinstances.lastInstance();

            //Get and build saved model
            LibSVM modelShape = new LibSVM();
            LibSVM modelOri   = new LibSVM();

            modelOri   = (LibSVM)SerializationHelper.read("OrientationModel.model");
            modelShape = (LibSVM)SerializationHelper.read("ShapeModel.model");

            //Classify actual test instance
            double valueShape = modelShape.classifyInstance(sekarang);
            double valueOri   = modelOri.classifyInstance(sekarang);

            Console.WriteLine(valueOri);
            Console.WriteLine(valueShape);
            double[] value = new double[] { valueShape, valueOri };

            return(value);
        }
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width = 2.1;
        double epsilon = 1e-5;
        double C = 1.0;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        double[] trainlab = Load.load_labels("../data/label_train_twoclass.dat");

        RealFeatures feats_train = new RealFeatures();
        feats_train.set_feature_matrix(traindata_real);
        RealFeatures feats_test = new RealFeatures();
        feats_test.set_feature_matrix(testdata_real);

        GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);

        BinaryLabels labels = new BinaryLabels(trainlab);

        LibSVM svm = new LibSVM(C, kernel, labels);
        svm.set_epsilon(epsilon);
        svm.train();

        kernel.init(feats_train, feats_test);
        double[] out_labels = LabelsFactory.to_binary(svm.apply()).get_labels();

        foreach(double item in out_labels) {
            Console.Write(item);
        }

        modshogun.exit_shogun();
    }
	public static void Main() {
		modshogun.init_shogun_with_defaults();

		int num = 1000;
		double dist = 1.0;
		double width = 2.1;
		double C = 1.0;

		Random RandomNumber = new Random();

		double[,] traindata_real = new double[2, num * 2];
		for (int i = 0; i < num; i ++) {
			traindata_real[0, i] = RandomNumber.NextDouble() - dist;
			traindata_real[0, i + num] = RandomNumber.NextDouble() + dist;
			traindata_real[1, i] = RandomNumber.NextDouble() - dist;
			traindata_real[1, i + num] = RandomNumber.NextDouble() + dist;
		}

		double[,] testdata_real = new double[2, num * 2];
		for (int i = 0; i < num; i ++) {
			testdata_real[0, i] = RandomNumber.NextDouble() - dist;
			testdata_real[0, i + num] = RandomNumber.NextDouble() + dist;
			testdata_real[1, i] = RandomNumber.NextDouble() - dist;
			testdata_real[1, i + num] = RandomNumber.NextDouble() + dist;
		}

		double[] trainlab = new double[num * 2];
		for (int i = 0; i < num; i ++) {
			trainlab[i] = -1;
			trainlab[i + num] = 1;
		}

		double[] testlab = new double[num * 2];
		for (int i = 0; i < num; i ++) {
			testlab[i] = -1;
			testlab[i + num] = 1;
		}

		RealFeatures feats_train = new RealFeatures(traindata_real);
		RealFeatures feats_test = new RealFeatures(testdata_real);
		GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);
		BinaryLabels labels = new BinaryLabels(trainlab);
		LibSVM svm = new LibSVM(C, kernel, labels);
		svm.train();

		double[] result = LabelsFactory.to_binary(svm.apply(feats_test)).get_labels();

		int err_num = 0;
		for (int i = 0; i < num; i++) {
			if (result[i] > 0) {
				err_num += 1;
			}
			if (result[i+num] < 0) {
				err_num += 1;
			}
		}

		double testerr=err_num/(2*num);
		Console.WriteLine(testerr);
	}
    static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();
        double width = 2.1;
        double epsilon = 1e-5;
        double C = 1.0;

        DoubleMatrix traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        DoubleMatrix testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        DoubleMatrix trainlab = Load.load_labels("../data/label_train_twoclass.dat");

        CombinedKernel kernel = new CombinedKernel();
        CombinedFeatures feats_train = new CombinedFeatures();

        RealFeatures tfeats = new RealFeatures(traindata_real);
        PolyKernel tkernel = new PolyKernel(10,3);
        tkernel.init(tfeats, tfeats);
        DoubleMatrix K = tkernel.get_kernel_matrix();
        kernel.append_kernel(new CustomKernel(K));

        RealFeatures subkfeats_train = new RealFeatures(traindata_real);
        feats_train.append_feature_obj(subkfeats_train);
        PolyKernel subkernel = new PolyKernel(10,2);
        kernel.append_kernel(subkernel);

        kernel.init(feats_train, feats_train);

        Labels labels = new Labels(trainlab);

        LibSVM svm = new LibSVM(C, kernel, labels);
        svm.train();

        CombinedKernel kernel_pred = new CombinedKernel();
        CombinedFeatures feats_pred = new CombinedFeatures();

        RealFeatures pfeats = new RealFeatures(testdata_real);
        PolyKernel tkernel_pred = new PolyKernel(10,3);
        tkernel_pred.init(tfeats, pfeats);
        DoubleMatrix KK = tkernel.get_kernel_matrix();
        kernel_pred.append_kernel(new CustomKernel(KK));

        RealFeatures subkfeats_test = new RealFeatures(testdata_real);
        feats_pred.append_feature_obj(subkfeats_train);
        PolyKernel subkernel_pred = new PolyKernel(10,2);
        kernel_pred.append_kernel(subkernel_pred);

        kernel_pred.init(feats_train, feats_pred);

        svm.set_kernel(kernel_pred);
        svm.apply();
        DoubleMatrix km_train =kernel.get_kernel_matrix();
        Console.WriteLine(km_train.ToString());

        modshogun.exit_shogun();
    }
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double width   = 2.1;
        double epsilon = 1e-5;
        double C       = 1.0;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real  = Load.load_numbers("../data/fm_test_real.dat");

        double[] trainlab = Load.load_labels("../data/label_train_twoclass.dat");

        RealFeatures feats_train = new RealFeatures();

        feats_train.set_feature_matrix(traindata_real);
        RealFeatures feats_test = new RealFeatures();

        feats_test.set_feature_matrix(testdata_real);

        GaussianKernel kernel = new GaussianKernel(feats_train, feats_train, width);

        BinaryLabels labels = new BinaryLabels(trainlab);

        LibSVM svm = new LibSVM(C, kernel, labels);

        svm.set_epsilon(epsilon);
        svm.train();

        kernel.init(feats_train, feats_test);
        double[] out_labels = BinaryLabels.obtain_from_generic(svm.apply()).get_labels();

        foreach (double item in out_labels)
        {
            Console.Write(item);
        }

        modshogun.exit_shogun();
    }
        public double MarginClassification()
        {
            //Get single Test Instance from CSV file
            CSVLoader loader = new CSVLoader();

            loader.setSource(new java.io.File("GetMargin.csv"));
            Instances testinstances = loader.getDataSet();

            testinstances.setClassIndex(testinstances.numAttributes() - 1);
            Instance sekarang = testinstances.lastInstance();

            //Get and build saved model
            LibSVM svm = new LibSVM();

            svm = (LibSVM)SerializationHelper.read("MarginModel.model");

            //Classify actual test instance
            double clsValue = svm.classifyInstance(sekarang);

            Console.WriteLine(clsValue);

            return(clsValue);
        }
Exemple #9
0
        public Outcome MachineLearning(Instances instances)
        {
            Outcome outcome = new Outcome();

            //load model
            string projectPath = AppDomain.CurrentDomain.BaseDirectory;
            LibSVM cls         = (LibSVM)SerializationHelper.read(projectPath + "/MachineLearning/svmModel.model");

            //predict outcome
            instances.setClassIndex(19);
            double[] values = cls.distributionForInstance(instances.instance(0));

            if (values[0] < values[1])
            {
                outcome.Success = false;
            }
            else
            {
                outcome.Success = true;
            }

            return(outcome);
        }
Exemple #10
0
        public static void testSVMPredictOnlyPackage()
        {
            string data_folder = @"D:\My_Dropbox\UProjects\OriBrainLearner\TestData\";
            string data_file   = data_folder + "svmguide1.t";
            string range_file  = data_folder + "svmguide1.range";
            string svm_file    = data_folder + "svmguide1.scale.model";

            GuiPreferences.Instance.setLog("THIS PACKAGE ONLY PREDICTS!");

            //Read in SVM model and feature range data
            LibSVM svm = new LibSVM(); svm.LoadModel(svm_file);
            double target_min, target_max; double[] features_min, features_max;

            LibSVM.ReadRange(range_file, out target_min, out target_max, out features_min, out features_max);

            //Read in Ground Truth data
            List <int>          labels_g = new List <int>();
            List <LibSVMNode[]> samples = new List <LibSVMNode[]>();

            using (StreamReader sr = new StreamReader(data_file))
            {
                string line; LibSVMNode[] sample; double label;
                while ((line = sr.ReadLine()) != null)
                {
                    LibSVM.ToLibSVMFormat(line, out sample, out label);
                    samples.Add(sample);
                    labels_g.Add((int)label);
                }
            }

            //Convert to Array and List format
            List <double[]>       samples_array = new List <double[]>();
            List <List <double> > samples_list = new List <List <double> >();

            for (int i = 0; i < samples.Count; i++)
            {
                LibSVMNode[]  sample      = samples[i];
                List <double> sample_list = new List <double>();
                for (int k = 0, l = 0; k < sample.Length; k++)
                {
                    int index = sample[k].index; if (index == -1)
                    {
                        break;
                    }
                    double value = sample[k].value;
                    for (int m = l; m < index - 1; m++)
                    {
                        sample_list.Add(double.NaN);
                    }
                    sample_list.Add(value); l = index;
                }
                samples_list.Add(sample_list);
                samples_array.Add(sample_list.ToArray());
            }

            int count; double[] probs, dec_values;

            //Scale the original data
            LibSVMNode[][] samples_scaled = new LibSVMNode[samples.Count][];
            GuiPreferences.Instance.setLog("Test scale data from SVMNode[] to SVMNode[]");
            for (int i = 0; i < samples.Count; i++)
            {
                samples_scaled[i] = LibSVM.ScaleData(samples[i], target_min, target_max, features_min, features_max);
            }
            GuiPreferences.Instance.setLog("Test Predict functions. ");
            count = 0;
            for (int i = 0; i < samples_scaled.GetLength(0); i++)
            {
                double label = svm.Predict(samples_scaled[i]);
                if ((int)label == labels_g[i])
                {
                    count++;
                }
            }
            GuiPreferences.Instance.setLog("Accuracy = " + ((double)count / labels_g.Count).ToString());

            GuiPreferences.Instance.setLog("Test scale data from double[] to SVMNode[]");
            for (int i = 0; i < samples_array.Count; i++)
            {
                samples_scaled[i] = LibSVM.ScaleData(samples_array[i], target_min, target_max, features_min, features_max);
            }
            GuiPreferences.Instance.setLog("Test Predict functions. ");
            count = 0;
            for (int i = 0; i < samples_scaled.GetLength(0); i++)
            {
                double label = svm.Predict(samples_scaled[i]);
                if ((int)label == labels_g[i])
                {
                    count++;
                }
            }
            GuiPreferences.Instance.setLog("Accuracy = " + ((double)count / labels_g.Count).ToString());

            GuiPreferences.Instance.setLog("Test scale data from list<double> to SVMNode[]");
            for (int i = 0; i < samples_list.Count; i++)
            {
                samples_scaled[i] = LibSVM.ScaleData(samples_list[i], target_min, target_max, features_min, features_max);
            }
            GuiPreferences.Instance.setLog("Test Predict functions. ");
            count = 0;
            for (int i = 0; i < samples_scaled.GetLength(0); i++)
            {
                double label = svm.Predict(samples_scaled[i]);
                if ((int)label == labels_g[i])
                {
                    count++;
                }
            }
            GuiPreferences.Instance.setLog("Accuracy = " + ((double)count / labels_g.Count).ToString());

            GuiPreferences.Instance.setLog("Test scale data from double[][] to SVMNode[][]");
            samples_scaled = LibSVM.ScaleData(samples_array.ToArray(), target_min, target_max, features_min, features_max);
            GuiPreferences.Instance.setLog("Test Predict functions. "); count = 0;
            for (int i = 0; i < samples_scaled.GetLength(0); i++)
            {
                double label = svm.Predict(samples_scaled[i]);
                if ((int)label == labels_g[i])
                {
                    count++;
                }
            }
            GuiPreferences.Instance.setLog("Accuracy = " + ((double)count / labels_g.Count).ToString());

            GuiPreferences.Instance.setLog("Test scale data from List<List<double>> to SVMNode[][]");
            samples_scaled = LibSVM.ScaleData(samples_list, target_min, target_max, features_min, features_max);
            GuiPreferences.Instance.setLog("Test Predict functions. "); count = 0;
            for (int i = 0; i < samples_scaled.GetLength(0); i++)
            {
                double label = svm.Predict(samples_scaled[i]);
                if ((int)label == labels_g[i])
                {
                    count++;
                }
            }
            GuiPreferences.Instance.setLog("Accuracy = " + ((double)count / labels_g.Count).ToString());

            //Convert scaled data to double[] and list<double> format
            List <double[]>       samples_scaled_array = new List <double[]>();
            List <List <double> > samples_scaled_list = new List <List <double> >();

            for (int i = 0; i < samples_scaled.Length; i++)
            {
                LibSVMNode[]  sample      = samples_scaled[i];
                List <double> sample_list = new List <double>();
                for (int k = 0, l = 0; k < sample.Length; k++)
                {
                    int index = sample[k].index; if (index == -1)
                    {
                        break;
                    }
                    double value = sample[k].value;
                    for (int m = l; m < index - 1; m++)
                    {
                        sample_list.Add(double.NaN);
                    }
                    sample_list.Add(value); l = index;
                }
                samples_scaled_list.Add(sample_list);
                samples_scaled_array.Add(sample_list.ToArray());
            }

            GuiPreferences.Instance.setLog("Test Predict(SVMNode[]) functions. "); count = 0;
            for (int i = 0; i < samples_scaled.GetLength(0); i++)
            {
                double label = svm.Predict(samples_scaled[i]);
                if ((int)label == labels_g[i])
                {
                    count++;
                }
            }
            GuiPreferences.Instance.setLog("Accuracy = " + ((double)count / labels_g.Count).ToString());


            GuiPreferences.Instance.setLog("Test Predict(List<double>) functions. "); count = 0;
            for (int i = 0; i < samples_scaled_list.Count; i++)
            {
                double label = svm.Predict(samples_scaled_list[i]);
                if ((int)label == labels_g[i])
                {
                    count++;
                }
            }
            GuiPreferences.Instance.setLog("Accuracy = " + ((double)count / labels_g.Count).ToString());

            GuiPreferences.Instance.setLog("Test Predict(double[]) functions. "); count = 0;
            for (int i = 0; i < samples_scaled_array.Count; i++)
            {
                double label = svm.Predict(samples_scaled_array[i]);
                if ((int)label == labels_g[i])
                {
                    count++;
                }
            }
            GuiPreferences.Instance.setLog("Accuracy = " + ((double)count / labels_g.Count).ToString());

            GuiPreferences.Instance.setLog("Test PredictProb(SVMNode[]) functions. "); count = 0;
            for (int i = 0; i < samples_scaled.GetLength(0); i++)
            {
                double label = svm.PredictProb(samples_scaled[i], out probs);
                if ((int)label == labels_g[i])
                {
                    count++;
                }
            }
            GuiPreferences.Instance.setLog("Accuracy = " + ((double)count / labels_g.Count).ToString());


            GuiPreferences.Instance.setLog("Test PredictProb(List<double>) functions. "); count = 0;
            for (int i = 0; i < samples_scaled_list.Count; i++)
            {
                double label = svm.PredictProb(samples_scaled_list[i], out probs);
                if ((int)label == labels_g[i])
                {
                    count++;
                }
            }
            GuiPreferences.Instance.setLog("Accuracy = " + ((double)count / labels_g.Count).ToString());


            GuiPreferences.Instance.setLog("Test PredictProb(double[]) functions. "); count = 0;
            for (int i = 0; i < samples_scaled_array.Count; i++)
            {
                double label = svm.PredictProb(samples_scaled_array[i], out probs);
                if ((int)label == labels_g[i])
                {
                    count++;
                }
            }
            GuiPreferences.Instance.setLog("Accuracy = " + ((double)count / labels_g.Count).ToString());


            GuiPreferences.Instance.setLog("Test PredictValues(SVMNode[]) functions. "); count = 0;
            for (int i = 0; i < samples_scaled.GetLength(0); i++)
            {
                double label = svm.PredictValues(samples_scaled[i], out dec_values);
                if ((int)label == labels_g[i])
                {
                    count++;
                }
            }
            GuiPreferences.Instance.setLog("Accuracy = " + ((double)count / labels_g.Count).ToString());


            GuiPreferences.Instance.setLog("Test PredictValues(List<double>) functions. "); count = 0;
            for (int i = 0; i < samples_scaled_list.Count; i++)
            {
                double label = svm.PredictValues(samples_scaled_list[i], out dec_values);
                if ((int)label == labels_g[i])
                {
                    count++;
                }
            }
            GuiPreferences.Instance.setLog("Accuracy = " + ((double)count / labels_g.Count).ToString());


            GuiPreferences.Instance.setLog("Test PredictValues(double[]) functions. "); count = 0;
            for (int i = 0; i < samples_scaled_array.Count; i++)
            {
                double label = svm.PredictValues(samples_scaled_array[i], out dec_values);
                if ((int)label == labels_g[i])
                {
                    count++;
                }
            }
            GuiPreferences.Instance.setLog("Accuracy = " + ((double)count / labels_g.Count).ToString());
        }
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();

        int    num   = 1000;
        double dist  = 1.0;
        double width = 2.1;
        double C     = 1.0;

        Random RandomNumber = new Random();

        double[,] traindata_real = new double[2, num * 2];
        for (int i = 0; i < num; i++)
        {
            traindata_real[0, i]       = RandomNumber.NextDouble() - dist;
            traindata_real[0, i + num] = RandomNumber.NextDouble() + dist;
            traindata_real[1, i]       = RandomNumber.NextDouble() - dist;
            traindata_real[1, i + num] = RandomNumber.NextDouble() + dist;
        }

        double[,] testdata_real = new double[2, num * 2];
        for (int i = 0; i < num; i++)
        {
            testdata_real[0, i]       = RandomNumber.NextDouble() - dist;
            testdata_real[0, i + num] = RandomNumber.NextDouble() + dist;
            testdata_real[1, i]       = RandomNumber.NextDouble() - dist;
            testdata_real[1, i + num] = RandomNumber.NextDouble() + dist;
        }

        double[] trainlab = new double[num * 2];
        for (int i = 0; i < num; i++)
        {
            trainlab[i]       = -1;
            trainlab[i + num] = 1;
        }

        double[] testlab = new double[num * 2];
        for (int i = 0; i < num; i++)
        {
            testlab[i]       = -1;
            testlab[i + num] = 1;
        }

        RealFeatures   feats_train = new RealFeatures(traindata_real);
        RealFeatures   feats_test  = new RealFeatures(testdata_real);
        GaussianKernel kernel      = new GaussianKernel(feats_train, feats_train, width);
        BinaryLabels   labels      = new BinaryLabels(trainlab);
        LibSVM         svm         = new LibSVM(C, kernel, labels);

        svm.train();

        double[] result = BinaryLabels.obtain_from_generic(svm.apply(feats_test)).get_labels();

        int err_num = 0;

        for (int i = 0; i < num; i++)
        {
            if (result[i] > 0)
            {
                err_num += 1;
            }
            if (result[i + num] < 0)
            {
                err_num += 1;
            }
        }

        double testerr = err_num / (2 * num);

        Console.WriteLine(testerr);
        modshogun.exit_shogun();
    }
Exemple #12
0
 internal static HandleRef getCPtr(LibSVM obj)
 {
     return((obj == null) ? new HandleRef(null, IntPtr.Zero) : obj.swigCPtr);
 }
Exemple #13
0
 internal static HandleRef getCPtr(LibSVM obj) {
   return (obj == null) ? new HandleRef(null, IntPtr.Zero) : obj.swigCPtr;
 }