Example #1
0
 /// <summary>
 /// Training on 100% of the input data.
 /// </summary>
 /// <param name="trainingSet"></param>
 /// <param name="testingSet"></param>
 /// <param name="test_indices"></param>
 public void TrainOnAllData(libSVM_ExtendedProblem trainingSet)
 {
     Preferences.Instance.svmWrapper = new libSVM_Extension();
     GuiPreferences.Instance.setLog("Training 100% started");
     Preferences.Instance.svmWrapper.Train(trainingSet, Parameter);
     GuiPreferences.Instance.setLog("Training 100% finished");
 }
Example #2
0
        public void TestOnAllData(libSVM_ExtendedProblem testingSet)
        {
            GuiPreferences.Instance.setLog("Testing started");
            Preferences.Instance.svmWrapper.GetAccuracyFromTest(testingSet);
            double accuracy = Preferences.Instance.svmWrapper.output.accuracy;

            GuiPreferences.Instance.setLog("Testing finished");
            GuiPreferences.Instance.setLog("Predicted accuracy from testing set: " + accuracy.ToString());
            for (int i = 0; i < testingSet.samples.Length; i++)
            {
                GuiPreferences.Instance.setLog(i.ToString() + ": " + testingSet.labels[i].ToString() + " => " + Preferences.Instance.svmWrapper.output.getValue(i).ToString());
            }
        }
Example #3
0
        /*public IntPtr getModel()
         * {
         *  if (_model_ptr == IntPtr.Zero) throw new Exception("model neither loaded nor trained");
         * return _model_ptr;
         * }
         *
         * public void  setModel(IntPtr model)
         * {
         *  _model_ptr = model;
         * }
         *
         * /// <summary>
         * /// Reload libSVM model
         * /// added externally and is a duplication of the internal libsvm function
         * /// </summary>
         * /// <param name="filename">name of model file</param>
         * public new void Reload(string filename)
         * {
         *  Dispose();
         *
         *  _model_ptr = svm_load_model(filename);
         *
         *  if (_model_ptr == IntPtr.Zero) throw new Exception("bad model file");
         * }
         *
         * /// <summary>
         * ///  Load model from file
         * /// added externally and is a duplication of the internal libsvm function
         * /// </summary>
         * /// <param name="filename">Model file</param>
         * /// <returns>libSVM</returns>
         * public new static libSVM_Extension Load(string filename)
         * {
         *  libSVM_Extension svm = new libSVM_Extension();
         *
         *  svm.Reload(filename);
         *
         *  return svm;
         * }*/

        // private void ptr_to_array(int len, ref double[] array, IntPtr array_ptr)
        public void CrossValidate(libSVM_ExtendedProblem problem, libSVM_Parameter parameter, int nr_fold)
        {
            int    i;
            int    total_correct = 0;
            double total_error = 0;
            double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;

            double[] predictedLabels = new double[problem.labels.Length];

            IntPtr _target_ptr    = array_to_ptr(predictedLabels);
            IntPtr _problem_ptr   = libSVM_Problem_to_svm_problem_ptr(problem);
            IntPtr _parameter_ptr = libSVM_Parameter_to_svm_parameter_ptr(parameter);

            svm_cross_validation(_problem_ptr, _parameter_ptr, nr_fold, _target_ptr);
            ptr_to_array(problem.labels.Length, ref predictedLabels, _target_ptr);
            output.setDoubleArray(predictedLabels);
            predictedLabels = null;

            if (parameter.svm_type == SVM_TYPE.EPSILON_SVR || parameter.svm_type == SVM_TYPE.NU_SVR)
            {
                for (i = 0; i < problem.labels.Length; i++)
                {
                    double y = problem.labels[i];
                    double v = output.getValue(i);
                    total_error += (v - y) * (v - y);
                    sumv        += v;
                    sumy        += y;
                    sumvv       += v * v;
                    sumyy       += y * y;
                    sumvy       += v * y;
                }
                output.meanSquaredError = total_error / problem.labels.Length;
                output.squaredCorrelationCoefficient = ((problem.labels.Length * sumvy - sumv * sumy) * (problem.labels.Length * sumvy - sumv * sumy)) /
                                                       ((problem.labels.Length * sumvv - sumv * sumv) * (problem.labels.Length * sumyy - sumy * sumy));
            }
            else
            {
                for (i = 0; i < problem.labels.Length; i++)
                {
                    if (output.getValue(i) == problem.labels[i])
                    {
                        ++total_correct;
                    }
                }
                //Console.Write("Cross Validation Accuracy = %g%%\n", 100.0 * total_correct / problem.labels.Length);
                output.accuracy = 100.0 * total_correct / problem.labels.Length;
            }
        }
Example #4
0
        public void TrainTestSplit(libSVM_ExtendedProblem trainingSet, libSVM_ExtendedProblem testingSet, int[] test_indices)
        {
            Preferences.Instance.svmWrapper = new libSVM_Extension();
            GuiPreferences.Instance.setLog("Training 90% started");
            Preferences.Instance.svmWrapper.Train(trainingSet, Parameter);
            GuiPreferences.Instance.setLog("Training 90% finished");
            GuiPreferences.Instance.setLog("Testing 10% started");
            Preferences.Instance.svmWrapper.GetAccuracyFromTest(testingSet);
            double accuracy = Preferences.Instance.svmWrapper.output.accuracy;

            GuiPreferences.Instance.setLog("Testing 10% finished");
            GuiPreferences.Instance.setLog("Train90/Test10 Split, Predicted accuracy from testing set: " + accuracy.ToString());
            for (int i = 0; i < testingSet.samples.Length; i++)
            {
                GuiPreferences.Instance.setLog((GuiPreferences.Instance.FromTR + test_indices[i]).ToString() + ": " + testingSet.labels[i].ToString() + " => " + Preferences.Instance.svmWrapper.output.getValue(i).ToString());
            }
            GuiPreferences.Instance.setLog("TODO: NEED PROGRESSIVE ANALYSIS OF TRS");
        }
Example #5
0
        public void printTRStatistics(libSVM_ExtendedProblem problem)
        {
            GuiPreferences.Instance.setLog("Prediction only at TR:");
            string line = "";

            List <IntIntStr> protocolWORests = Preferences.Instance.prot.getHDREvents();

            //possibly using Preferences.Instance.maxKvectorsToWaitBeforeClassification is not the right thing and we still need to use GLM.K some how. (its not static anymore so an instance need to be passed)
            int[,] stats = new int[Preferences.Instance.maxKvectorsToWaitBeforeClassification, 2];
            //for each full event
            for (int i = 0; i < protocolWORests.Count() - 1; i++)
            {
                int j = 0;
                //if the predicted label if its the same as the original label
                foreach (int key in Preferences.Instance.svmWrapper.output.getKeys())
                {
                    if ((key <= protocolWORests[i].var2) && (key >= protocolWORests[i].var1))
                    {
                        //get TR position
                        int TR = key - protocolWORests[i].var1;
                        if (Preferences.Instance.svmWrapper.output.getValue(key) == problem.labels[key])
                        {
                            //possibly using Preferences.Instance.maxKvectorsToWaitBeforeClassification is not the right thing and we still need to use GLM.K some how. (its not static anymore so an instance need to be passed)
                            //actual trs that have correct predictions
                            if (TR < Preferences.Instance.maxKvectorsToWaitBeforeClassification)
                            {
                                stats[TR, 1]++;
                            }
                        }
                        //total count of trs (correct and wrong)
                        stats[TR, 0]++;
                    }
                    j++;
                }
            }

            //possibly using Preferences.Instance.maxKvectorsToWaitBeforeClassification is not the right thing and we still need to use GLM.K some how. (its not static anymore so an instance need to be passed)
            //print tr statistics
            for (int i = 0; i < Preferences.Instance.maxKvectorsToWaitBeforeClassification - 1; i++)
            {
                line += (i + 1).ToString() + ": " + ((int)((double)stats[i, 1] / (double)stats[i, 0] * 100)).ToString() + " ";
            }
            GuiPreferences.Instance.setLog(line);
        }
Example #6
0
        public void TrainFolds(int N, libSVM_ExtendedProblem problem, trainingType type)
        {
            folds = N;
            Preferences.Instance.svmWrapper = new libSVM_Extension();
            //svm.TrainAuto(10, Problem, Parameter, libSVM_Grid.C(), libSVM_Grid.gamma(), libSVM_Grid.p(), libSVM_Grid.nu(), libSVM_Grid.coef0(), libSVM_Grid.degree());


            //used without grids. only folds
            libSVM_Grid grid = new libSVM_Grid();

            GuiPreferences.Instance.setLog("Training + Cross Validation Started");
            double accuracy = -1;

            if (type == trainingType.best)
            {
                accuracy = Preferences.Instance.svmWrapper.TrainAuto(N, problem, Parameter, grid, null, null, null, null, null);
            }
            else if (type == trainingType.bestTotal)
            {
                Preferences.Instance.svmWrapper.TrainAutoBestTotal(N, problem, Parameter, grid, null, null, null, null, null);
                accuracy = Preferences.Instance.svmWrapper.output.accuracy;
            }
            else if (type == trainingType.cfv)
            {
                Preferences.Instance.svmWrapper.CrossValidate(problem, Parameter, N);
                accuracy = Preferences.Instance.svmWrapper.output.accuracy;
            }



            //used with default grids + only folds
            //double accuracy = Preferences.Instance.svm.TrainAuto(N, problem, Parameter);//, grid, null, null, null, null, null);

            /*
             * //used to compare against libsvm with grids and folds. when grids have the default minimum and max = min +1 and step = 2
             * libSVM_Grid gridc = libSVM_Grid.C();
             * gridc.max = gridc.min + 1;
             * gridc.step = 2;
             * libSVM_Grid gridgamma= libSVM_Grid.gamma();
             * gridgamma.max = gridgamma.min + 1;
             * gridgamma.step = 2;
             * libSVM_Grid gridp = libSVM_Grid.p();
             * gridp.max = gridp.min + 1;
             * gridp.step = 2;
             * libSVM_Grid gridnu = libSVM_Grid.nu();
             * gridnu.max = gridnu.min + 1;
             * gridnu.step = 2;
             * libSVM_Grid gridcoef = libSVM_Grid.coef0();
             * gridcoef.max = gridcoef.min + 1;
             * gridcoef.step = 2;
             * libSVM_Grid griddegree = libSVM_Grid.degree();
             * griddegree.max = griddegree.min + 1;
             * griddegree.step = 2;
             * double accuracy = Preferences.Instance.svm.TrainAuto(N, problem, Parameter, gridc, gridgamma, gridp, gridnu, gridcoef, griddegree);
             */

            GuiPreferences.Instance.setLog("Training + Cross Validation Finished");
            //calculate tr statistics
            if (type == trainingType.bestTotal)
            {
                int i = 1;
                foreach (int key in Preferences.Instance.svmWrapper.output.getKeys())
                {
                    GuiPreferences.Instance.setLog(i.ToString() + ". " + key.ToString() +
                                                   ": " + problem.labels[key].ToString() +
                                                   " => " + Preferences.Instance.svmWrapper.output.getValue(key).ToString());
                    i++;
                }
                GuiPreferences.Instance.setLog("Best Model Predicted accuracy from " + folds.ToString() + " cross fold validation: " + accuracy.ToString());

                printTRStatistics(problem);
            }
            else
            {
                GuiPreferences.Instance.setLog("Predicted accuracy from " + folds.ToString() + " cross fold validation: " + accuracy.ToString());
            }
        }