Beispiel #1
0
        // for training the face,
        public void face_training(SVMProblem f_training)
        {
            SVMProblem trainingSet = SVMProblemHelper.Load(@"C:\Users\temp\Desktop\0921_towp.txt");
            SVMProblem testSet     = SVMProblemHelper.Load(@"C:\Users\temp\Desktop\0921_towpt.txt");

            trainingSet = trainingSet.Normalize(SVMNormType.L2);
            testSet     = testSet.Normalize(SVMNormType.L2);

            SVMParameter parameter = new SVMParameter();

            parameter.Type        = SVMType.NU_SVC;
            parameter.Kernel      = SVMKernelType.SIGMOID;
            parameter.C           = 1;
            parameter.Gamma       = 1;
            parameter.Probability = true;
            double[] crossValidationResults;
            int      nFold = 10;

            trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);
            double   crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);
            SVMModel model = trainingSet.Train(parameter);


            double[] testResults = testSet.Predict(model);
            int[,] confusionMatrix;
            double testAccuracy = testSet.EvaluateClassificationProblem(testResults, model.Labels, out confusionMatrix);

            Training_result.Content    = "testAccuracy:" + testAccuracy + "\nCross validation accuracy: " + crossValidationAccuracy + "\nCount " + trainingSet.Y.Count;
            Training_result.FontSize   = 14;
            Training_result.FontStyle  = FontStyles.Normal;
            Training_result.Foreground = Brushes.Red;
            Training_result.Background = Brushes.Black;
            index++;
        }
Beispiel #2
0
        private static void Train(string prefix)
        {
            SVMProblem trainingSet = SVMProblemHelper.Load(MnistDataPath + prefix + ".txt");

            trainingSet = trainingSet.Normalize(SVMNormType.L2);

            SVMParameter parameter = new SVMParameter();

            parameter.Type   = SVMType.C_SVC;
            parameter.Kernel = SVMKernelType.RBF;
            parameter.C      = 1;
            parameter.Gamma  = 1;

            double[] crossValidationResults;
            int      nFold = 5;

            trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);
            double crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);

            Console.WriteLine("\n\nCross validation accuracy: " + crossValidationAccuracy);

            SVMModel model = trainingSet.Train(parameter);

            SVM.SaveModel(model, MnistDataPath + "model.txt");
            Console.WriteLine("\n\nModel ok!");
        }
        public void face_training(SVMProblem f_training)
        {
            SVMProblem trainingSet = SVMProblemHelper.Load(@"C:\Users\temp\Desktop\0921_towp.txt");
            SVMProblem testSet     = SVMProblemHelper.Load(@"C:\Users\temp\Desktop\0921_towpt.txt");

            // f_training.Save(@"C:\Users\temp\Desktop\1005f.txt");
            //  trainingSet.Insert(index, f_training.X[0], 2);
            trainingSet.Add(f_training.X[0], 1);
            trainingSet.Save(@"C:\Users\temp\Desktop\flag.txt");
            //   trainingSet.Save(@"C:\Users\temp\Desktop\1005.txt");
            // Console.WriteLine();
            //   SVMNode node = new SVMNode();
            //  node.Index = Convert.ToInt32(o);
            //  node.Value = Convert.ToDouble(f_training.X);
            //  nodes.Add(node);
            //  trainingSet.Add(nodes.ToArray(), 1);
            //  int number = randon.Next(0, trainingSet.X.Count);
            //  int trainingsample = Convert.ToInt32(trainingSet.X.Count * 2 / 3);
            //  int testingsample = Convert.ToInt32(trainingSet.X.Count / 3);

            trainingSet = trainingSet.Normalize(SVMNormType.L2);
            testSet     = testSet.Normalize(SVMNormType.L2);

            SVMParameter parameter = new SVMParameter();

            parameter.Type        = SVMType.NU_SVC;
            parameter.Kernel      = SVMKernelType.SIGMOID;
            parameter.C           = 1;
            parameter.Gamma       = 1;
            parameter.Probability = true;
            int        nFold = 10;
            MainWindow main  = new MainWindow();

            double[] crossValidationResults; // output labels
            trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);
            double   crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);
            SVMModel model = SVM.Train(trainingSet, parameter);

            // SVMModel model = trainingSet.Train(parameter);

            SVM.SaveModel(model, @"C:\Users\temp\Desktop\1005.txt");

            double[] testResults = testSet.Predict(model);
            //     Console.WriteLine("");
            int[,] confusionMatrix;
            double testAccuracy = testSet.EvaluateClassificationProblem(testResults, model.Labels, out confusionMatrix);

            // Console.WriteLine("\n\nCross validation accuracy: " + crossValidationAccuracy);
            //  Console.WriteLine("testAccuracy:" + testAccuracy);
            //  Console.WriteLine(Convert.ToString(trainingSet.X.Count));
            main.Training_result.Content    = "testAccuracy:" + testAccuracy + "\nCross validation accuracy: " + crossValidationAccuracy + "\nCount " + trainingSet.X.Count;
            main.Training_result.FontSize   = 14;
            main.Training_result.FontStyle  = FontStyles.Normal;
            main.Training_result.Foreground = Brushes.Red;
            main.Training_result.Background = Brushes.Black;
            // Console.WriteLine(trainingSet1.Length);
            //  trainingSet.Save(@"C:\Users\temp\Desktop\1005.txt");
            index++;
        }
        public static SVMParameter FindBestHyperparameters(SVMProblem problem, SVMParameter parameter)
        {
            int nFold   = int.Parse(Configuration.Get("nFold"));
            int logTo   = int.Parse(Configuration.Get("logTo"));
            int logFrom = int.Parse(Configuration.Get("logFrom"));

            BlockingCollection <ParameterResult> results = new BlockingCollection <ParameterResult>();
            List <Task> tasks = new List <Task>();

            for (double cLog = logFrom; cLog <= logTo; cLog++)
            {
                double c = Math.Pow(2, cLog);
                tasks.Add(Task.Factory.StartNew(() =>
                {
                    for (double gammaLog = logFrom; gammaLog <= logTo; gammaLog++)
                    {
                        SVMParameter parameterUnderTest = parameter.Clone();
                        parameterUnderTest.C            = c;
                        parameterUnderTest.Gamma        = Math.Pow(2, gammaLog);
                        problem.CrossValidation(parameterUnderTest, nFold, out var crossValidationResults);
                        double crossValidationAccuracy = problem.EvaluateClassificationProblem(crossValidationResults);

                        results.Add(new ParameterResult()
                        {
                            Accuracy = crossValidationAccuracy, C = parameterUnderTest.C,
                            Gamma    = parameterUnderTest.Gamma
                        });
                    }
                }));
            }
            Task.WaitAll(tasks.ToArray());

            var resultList = results.ToList();

            resultList.Sort();

            ParameterResult bestParameter =
                HighestScore(resultList);

            SaveToCsv(results, "svmData.txt");
            SVMParameter returnValue = parameter.Clone();

            returnValue.C     = bestParameter.C;
            returnValue.Gamma = bestParameter.Gamma;
            return(returnValue);
        }
Beispiel #5
0
        //학습모델 생성
        public static SVMModel SVM_GenModel(String dataset)
        {
            SVMProblem trainingSet = SVMProblemHelper.Load(dataset); //학습데이터셋 열기

            trainingSet = trainingSet.Normalize(SVMNormType.L2);

            SVMParameter parameter = new SVMParameter();

            parameter.Type   = SVMType.C_SVC;
            parameter.Kernel = SVMKernelType.RBF;
            parameter.C      = 1;
            parameter.Gamma  = 1;

            double[] crossValidationResults;
            int      nFold = 5;

            trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);
            double crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);

            SVMModel model = trainingSet.Train(parameter);  // 학습된 모델 생성

            SVM.SaveModel(model, "model_" + dataset);       // 모델 저장
            return(model);
        }
Beispiel #6
0
        static void Main(string[] args)
        {
            // Load the datasets: In this example I use the same datasets for training and testing which is not suggested
            SVMProblem trainingSet = SVMProblemHelper.Load(@"Dataset\wine.txt");
            SVMProblem testSet     = SVMProblemHelper.Load(@"Dataset\wine2.txt");

            // Normalize the datasets if you want: L2 Norm => x / ||x||
            trainingSet = trainingSet.Normalize(SVMNormType.L2);
            testSet     = testSet.Normalize(SVMNormType.L2);

            // Select the parameter set
            SVMParameter parameter = new SVMParameter();

            parameter.Type   = SVMType.C_SVC;
            parameter.Kernel = SVMKernelType.RBF;
            parameter.C      = 1;
            parameter.Gamma  = 1;

            // Do cross validation to check this parameter set is correct for the dataset or not
            double[] crossValidationResults; // output labels
            int      nFold = 5;

            trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);

            // Evaluate the cross validation result
            // If it is not good enough, select the parameter set again
            double crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);

            // Train the model, If your parameter set gives good result on cross validation
            SVMModel model = trainingSet.Train(parameter);

            // Save the model
            SVM.SaveModel(model, @"Model\wine_model.txt");

            // Predict the instances in the test set
            double[] testResults = testSet.Predict(model);

            Console.WriteLine("aaa:" + testResults[0] + "\n");

            /*
             * // Evaluate the test results
             * int[,] confusionMatrix;
             * double testAccuracy = testSet.EvaluateClassificationProblem(testResults, model.Labels, out confusionMatrix);
             *
             *
             *
             *
             * // Print the resutls
             * Console.WriteLine("\n\nCross validation accuracy: " + crossValidationAccuracy);
             * Console.WriteLine("\nTest accuracy: " + testAccuracy);
             * Console.WriteLine("\nConfusion matrix:\n");
             *
             * // Print formatted confusion matrix
             * Console.Write(String.Format("{0,6}", ""));
             * for (int i = 0; i < model.Labels.Length; i++)
             *  Console.Write(String.Format("{0,5}", "(" + model.Labels[i] + ")"));
             * Console.WriteLine();
             * for (int i = 0; i < confusionMatrix.GetLength(0); i++)
             * {
             *  Console.Write(String.Format("{0,5}", "(" + model.Labels[i] + ")"));
             *  for (int j = 0; j < confusionMatrix.GetLength(1); j++)
             *      Console.Write(String.Format("{0,5}", confusionMatrix[i,j]));
             *  Console.WriteLine();
             * }
             *
             * Console.WriteLine("\n\nPress any key to quit...");
             * Console.ReadLine();*/
        }
Beispiel #7
0
        public static bool trainProblem()
        {
            if (checkExistingDataset())
            {
                SVMProblem                problem       = SVMProblemHelper.Load(Constants.DATA_PATH);
                SVMProblem                randdata      = SVMProblemHelper.Load(Constants.RAND_PATH);
                List <string>             resultsstring = new List <string>();
                List <SVMClass.SVMResult> ResultsList   = new List <SVMClass.SVMResult>();

                double C, gammasq;
                double Cmin = 1, Cmax = 10000, Cstep = 10;
                double gmin = 0.0001, gmax = 1000, gstep = 10;
                bool   satisfied = false;
                while (!satisfied)
                {
                    for (C = Cmin; C <= Cmax; C = C * Cstep)
                    {
                        for (gammasq = gmin; gammasq <= gmax; gammasq = gammasq * gstep)
                        {
                            SVMParameter tempparameter = new SVMParameter();
                            tempparameter.Type   = SVMType.C_SVC;
                            tempparameter.Kernel = SVMKernelType.RBF;
                            tempparameter.C      = C;
                            tempparameter.Gamma  = gammasq;

                            SVMModel tempmodel = SVM.Train(problem, tempparameter);

                            SVMProblem testData = SVMProblemHelper.Load(Constants.RAND_PATH);
                            double[]   results  = testData.Predict(tempmodel);
                            int[,] confusionMatrix;
                            double testAccuracy = testData.EvaluateClassificationProblem(results, tempmodel.Labels, out confusionMatrix);

                            // Do cross validation to check this parameter set is correct for the dataset or not
                            double[] crossValidationResults; // output labels
                            int      nFold = 10;
                            problem.CrossValidation(tempparameter, nFold, out crossValidationResults);

                            // Evaluate the cross validation result
                            // If it is not good enough, select the parameter set again
                            double crossValidationAccuracy = problem.EvaluateClassificationProblem(crossValidationResults);

                            SVMClass.SVMResult compiled = new SVMClass.SVMResult();
                            compiled.C             = C;
                            compiled.gamma         = gammasq;
                            compiled.testAcc       = testAccuracy;
                            compiled.crossValidAcc = crossValidationAccuracy;
                            ResultsList.Add(compiled);
                        }
                    }

                    // Evaluate the test results
                    double maxTestAcc = ResultsList.Max(resultdata => resultdata.testAcc);
                    //int maxTestAccIndex = ResultsList.FindIndex(resultdata => resultdata.testAcc.Equals(maxTestAcc));
                    double maxValidAcc = ResultsList.Max(resultdata => resultdata.crossValidAcc);
                    //int maxValidAccIndex = ResultsList.FindIndex(resultdata => resultdata.crossValidAcc.Equals(maxValidAcc));
                    if (maxTestAcc < 95 || maxValidAcc < 95)
                    {
                        satisfied = false;
                        Cstep--;
                        gstep--;
                    }
                    else
                    {
                        satisfied = true;

                        List <SVMClass.SVMResult> topResults = ResultsList.FindAll(resultdata => resultdata.testAcc.Equals(maxTestAcc));
                        List <SVMClass.SVMResult> topValid   = ResultsList.FindAll(resultdata => resultdata.crossValidAcc.Equals(maxValidAcc));
                        while (topResults.Count > topValid.Count)
                        {
                            topResults.RemoveAt(ResultsList.FindIndex(resultsdata => resultsdata.crossValidAcc.Equals(ResultsList.Min(resultdata => resultdata.crossValidAcc))));
                        }

                        double maxC      = topResults.Max(resultdata => resultdata.C);
                        int    maxCIndex = topResults.FindIndex(resultdata => resultdata.C.Equals(maxC));
                        double bestgamma = topResults[maxCIndex].gamma;
                        // maxC or not???
                        //double bestC = topResults[topResults.Count - 2].C; //topResults[maxCIndex].C;
                        //double bestgamma = topResults[topResults.Count - 2].gamma;//topResults[maxCIndex].gamma;
                        Console.WriteLine("Best C: " + maxC + "  Best gammasq: " + bestgamma);
                        Constants.C       = maxC;
                        Constants.gammasq = bestgamma;

                        foreach (SVMClass.SVMResult resultdata in topResults)
                        {
                            Console.WriteLine(resultdata.C.ToString() + " " + resultdata.gamma.ToString());
                        }
                    }
                }

                SVMParameter parameter = new SVMParameter();
                parameter.Type   = SVMType.C_SVC;
                parameter.Kernel = SVMKernelType.RBF;
                parameter.C      = Constants.C;
                parameter.Gamma  = Constants.gammasq;

                Variables.model = SVM.Train(problem, parameter);
                //File.WriteAllText(Constants.MODEL_PATH, String.Empty);
                //SVM.SaveModel(Variables.model, Constants.MODEL_PATH);
                Console.WriteLine("Trained and saved model.\n");
                //return Variables.model;
                return(true);
            }
            else
            {
                MessageBox.Show("Invalid training data!");
                return(false);
            }
        }
Beispiel #8
0
        private void testSVM()
        {
            if (!holdCommandListener)
            {
                holdCommandListener = true;
            }
            string        parentpath    = System.AppDomain.CurrentDomain.BaseDirectory;
            string        DATA_PATH     = parentpath + "Datasets\\dataset - Copy (2).txt";
            string        MODEL_PATH    = parentpath + "Model\\testmodel.txt";
            string        NEWDATA_PATH  = parentpath + "Datasets\\testdata.txt";
            string        RESULTS_PATH  = parentpath + "Datasets\\results.txt";
            List <string> resultsstring = new List <string>();

            SVMProblem   testSet       = SVMProblemHelper.Load(NEWDATA_PATH);
            SVMParameter testparameter = new SVMParameter();

            testparameter.Type   = SVMType.C_SVC;
            testparameter.Kernel = SVMKernelType.RBF;
            testparameter.C      = 0.1;   //Constants.C;
            testparameter.Gamma  = 0.001; // Constants.gammasq;

            List <SVMClass.SVMResult> ResultsList = new List <SVMClass.SVMResult>();

            SVMProblem problem = SVMProblemHelper.Load(DATA_PATH);
            double     C       = 0.001;
            double     gammasq = 0.001;

            for (C = 1; C <= 1000; C = C * 10)
            {
                for (gammasq = 0.001; gammasq <= 1000; gammasq = gammasq * 10)
                {
                    SVMParameter parameter = new SVMParameter();
                    parameter.Type   = SVMType.C_SVC;
                    parameter.Kernel = SVMKernelType.RBF;
                    parameter.C      = C;
                    parameter.Gamma  = gammasq;

                    SVMModel model = SVM.Train(problem, parameter);
                    //File.WriteAllText(MODEL_PATH, String.Empty);
                    //SVM.SaveModel(model, MODEL_PATH);
                    //Console.WriteLine("Trained and saved model.\n");

                    //model = SVM.LoadModel(MODEL_PATH);

                    SVMProblem newData = SVMProblemHelper.Load(NEWDATA_PATH);
                    //Console.Write("Predicted Result:\n");
                    double[] results = newData.Predict(model);
                    //Console.Write(results[0]);
                    int[,] confusionMatrix;
                    double testAccuracy = newData.EvaluateClassificationProblem(results, model.Labels, out confusionMatrix);

                    // Do cross validation to check this parameter set is correct for the dataset or not
                    double[] crossValidationResults; // output labels
                    int      nFold = 10;
                    problem.CrossValidation(parameter, nFold, out crossValidationResults);

                    // Evaluate the cross validation result
                    // If it is not good enough, select the parameter set again
                    double crossValidationAccuracy = problem.EvaluateClassificationProblem(crossValidationResults);
                    //Console.WriteLine("\n\nCross validation accuracy: " + crossValidationAccuracy);

                    string temp = "";

                    string resultstring = "Predict accuracy: " + testAccuracy + " C: " + C + " gamma: " + gammasq + " Cross validation accuracy: " + crossValidationAccuracy;
                    resultsstring.Add(resultstring);

                    if (parameter.C == testparameter.C && parameter.Gamma == testparameter.Gamma)
                    {
                        resultsstring.Add("This one is same as separate test.");
                    }

                    foreach (double res in results)
                    {
                        temp += res.ToString() + " ";
                    }
                    resultsstring.Add(temp);

                    SVMClass.SVMResult compiled = new SVMClass.SVMResult();
                    compiled.C             = C;
                    compiled.gamma         = gammasq;
                    compiled.testAcc       = testAccuracy;
                    compiled.crossValidAcc = crossValidationAccuracy;
                    ResultsList.Add(compiled);
                }
            }
            File.WriteAllLines(RESULTS_PATH, resultsstring);


            SVMModel testmodel = SVM.Train(problem, testparameter);

            // Predict the instances in the test set
            double[] testResults = testSet.Predict(testmodel);
            foreach (double result in testResults)
            {
                Console.WriteLine(result);
            }

            // Evaluate the test results

            double maxTestAcc      = ResultsList.Max(resultdata => resultdata.testAcc);
            int    maxTestAccIndex = ResultsList.FindIndex(resultdata => resultdata.testAcc.Equals(maxTestAcc));
            //double maxValidAcc = ResultsList.Max(resultdata => resultdata.crossValidAcc);
            //int maxValidAccIndex = ResultsList.FindIndex(resultdata => resultdata.crossValidAcc.Equals(maxValidAcc));
            List <SVMClass.SVMResult> topResults = ResultsList.FindAll(resultdata => resultdata.testAcc.Equals(maxTestAcc));
            double maxC      = topResults.Max(resultdata => resultdata.C);
            int    maxCIndex = topResults.FindIndex(resultdata => resultdata.C.Equals(maxC));

            double bestC     = topResults[topResults.Count - 2].C;     //topResults[maxCIndex].C;
            double bestgamma = topResults[topResults.Count - 2].gamma; //topResults[maxCIndex].gamma;

            Console.WriteLine("Best C: " + bestC + "  Best gammasq: " + bestgamma);

            foreach (SVMClass.SVMResult resultdata in topResults)
            {
                Console.WriteLine(resultdata.C.ToString() + " " + resultdata.gamma.ToString());
            }
            //int[,] confusionMatrix;
            //double testAccuracy = testSet.EvaluateClassificationProblem(testResults, testmodel.Labels, out confusionMatrix);
            //Console.WriteLine("\n\nTest accuracy: " + testAccuracy);
        }
Beispiel #9
0
        public List <PredictionResult> CrossValidate(SAMDataPoint.FeelingModel feelingsmodel, bool useIAPSratings = false, Normalize normalizationType = Normalize.OneMinusOne)
        {
            List <PredictionResult> predictedResults = new List <PredictionResult>();
            //Split into crossvalidation parts
            SVMProblem problems = GetFeatureValues(features, samData).NormalizeFeatureList <double>(normalizationType).CreateCompleteProblem(samData, feelingsmodel);

            //Get correct results
            int[] answers         = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray();
            int   progressCounter = 0;

            if (answers.Distinct().Count() <= 1)
            {
                int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);
                //Calculate scoring results
                double[,] confus = CalculateConfusion(answers.ToList().ConvertAll(x => (double)x).ToArray(), answers, numberOfLabels);
                List <double>    pres   = CalculatePrecision(confus, numberOfLabels);
                List <double>    recall = CalculateRecall(confus, numberOfLabels);
                List <double>    fscore = CalculateFScore(pres, recall);
                PredictionResult pR     = new PredictionResult(confus, recall, pres, fscore, new SVMParameter(), features, answers.ToList(), answers.ToList().ConvertAll(x => (int)x));
                predictedResults.Add(pR);
                progressCounter++;
                Log.LogMessage(ONLY_ONE_CLASS);
                Log.LogMessage("");
                return(predictedResults);
            }
            else if (problems.X.Count == 0)
            {
                Log.LogMessage("Empty problem in " + Name);
                return(null);
            }
            foreach (SVMParameter SVMpara in Parameters)
            {
                if (UpdateCallback != null)
                {
                    UpdateCallback(progressCounter, Parameters.Count);
                }
                double[] guesses = new double[samData.dataPoints.Count];
                //model and predict each nfold
                try
                {
                    problems.CrossValidation(SVMpara, samData.dataPoints.Count, out guesses);
                }
                catch (Exception e)
                {
                    for (int i = 0; i < samData.dataPoints.Count; i++)
                    {
                        guesses[i] = -1;
                    }
                }
                int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);
                //Calculate scoring results
                double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels);
                List <double>    pres   = CalculatePrecision(confus, numberOfLabels);
                List <double>    recall = CalculateRecall(confus, numberOfLabels);
                List <double>    fscore = CalculateFScore(pres, recall);
                PredictionResult pR     = new PredictionResult(confus, recall, pres, fscore, SVMpara, features, answers.ToList(), Array.ConvertAll(guesses, (x => (int)x)).ToList());
                predictedResults.Add(pR);
                progressCounter++;
                if (UpdateCallback != null)
                {
                    UpdateCallback(progressCounter, Parameters.Count);
                }
            }

            return(predictedResults);
        }
Beispiel #10
0
        private void button2_Click(object sender, EventArgs e)
        {
            SVMProblem trainingSet = new SVMProblem();
            SVMProblem testSet     = trainingSet;

            foreach (DataInfo info in mList)
            {
                SVMNode[] node = new SVMNode[2];
                node[0] = new SVMNode(1, info.X / mWidth);
                node[1] = new SVMNode(2, info.Y / mHeight);
                trainingSet.Add(node, info.Group);
            }


            // Normalize the datasets if you want: L2 Norm => x / ||x||
            //trainingSet = trainingSet.Normalize(SVMNormType.L2);

            // Select the parameter set
            SVMParameter parameter = new SVMParameter();

            parameter.Type   = SVMType.C_SVC;
            parameter.Kernel = SVMKernelType.RBF;
            parameter.C      = 1;
            parameter.Gamma  = 4;
            parameter.Coef0  = hScrollBar1.Value;
            parameter.Degree = 3;

            // Do cross validation to check this parameter set is correct for the dataset or not
            double[] crossValidationResults; // output labels
            int      nFold = 5;

            trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);

            // Evaluate the cross validation result
            // If it is not good enough, select the parameter set again
            double crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);

            // Train the model, If your parameter set gives good result on cross validation
            SVMModel model = trainingSet.Train(parameter);

            // Save the model
            SVM.SaveModel(model, FILE_MODEL);

            // Predict the instances in the test set
            double[] testResults = testSet.Predict(model);

            // Evaluate the test results
            int[,] confusionMatrix;
            double testAccuracy = testSet.EvaluateClassificationProblem(testResults, model.Labels, out confusionMatrix);

            // Print the resutls
            Console.WriteLine("\n\nCross validation accuracy: " + crossValidationAccuracy);
            Console.WriteLine("\nTest accuracy: " + testAccuracy);
            Console.WriteLine("\nConfusion matrix:\n");

            // Print formatted confusion matrix
            Console.Write(String.Format("{0,6}", ""));
            for (int i = 0; i < model.Labels.Length; i++)
            {
                Console.Write(String.Format("{0,5}", "(" + model.Labels[i] + ")"));
            }
            Console.WriteLine();
            for (int i = 0; i < confusionMatrix.GetLength(0); i++)
            {
                Console.Write(String.Format("{0,5}", "(" + model.Labels[i] + ")"));
                for (int j = 0; j < confusionMatrix.GetLength(1); j++)
                {
                    Console.Write(String.Format("{0,5}", confusionMatrix[i, j]));
                }
                Console.WriteLine();
            }

            Pen[] pen = new Pen[4];
            pen[0] = new Pen(Color.Black, 1);
            pen[1] = new Pen(Color.Red, 1);
            pen[2] = new Pen(Color.LightGreen, 1);
            pen[3] = new Pen(Color.Blue, 1);

            Pen[] pen2 = new Pen[4];
            pen2[0] = new Pen(Color.LightGray, 1);
            pen2[1] = new Pen(Color.DarkRed, 1);
            pen2[2] = new Pen(Color.DarkGreen, 1);
            pen2[3] = new Pen(Color.DarkBlue, 1);

            Bitmap canvas = new Bitmap(pictureBox1.ClientSize.Width, pictureBox1.ClientSize.Height);

            using (Graphics g = Graphics.FromImage(canvas))
            {
                for (int i = 0; i < pictureBox1.ClientSize.Width; i++)
                {
                    for (int j = 0; j < pictureBox1.ClientSize.Height; j++)
                    {
                        SVMNode[] node = new SVMNode[2];
                        node[0] = new SVMNode(1, (double)i / (double)mWidth);
                        node[1] = new SVMNode(2, (double)j / (double)mHeight);

                        double result = SVM.Predict(model, node);
                        g.DrawRectangle(pen2[(int)result], i, j, 1, 1);
                    }
                }

                foreach (DataInfo info in mList)
                {
                    g.DrawEllipse(pen[(int)info.Group], (float)info.X - 5, (float)info.Y - 5, 5, 5);
                }
            }

            Bitmap image = new Bitmap(pictureBox1.ClientSize.Width, pictureBox1.ClientSize.Height);

            pictureBox1.BackgroundImage = canvas; // 設置為背景層
            pictureBox1.Refresh();
            pictureBox1.CreateGraphics().DrawImage(canvas, 0, 0);
        }