Exemplo n.º 1
0
        // for training the face,
        public void face_training(SVMProblem f_training)
        {
            SVMProblem trainingSet = SVMProblemHelper.Load(@"C:\Users\temp\Desktop\0921_towp.txt");
            SVMProblem testSet     = SVMProblemHelper.Load(@"C:\Users\temp\Desktop\0921_towpt.txt");

            trainingSet = trainingSet.Normalize(SVMNormType.L2);
            testSet     = testSet.Normalize(SVMNormType.L2);

            SVMParameter parameter = new SVMParameter();

            parameter.Type        = SVMType.NU_SVC;
            parameter.Kernel      = SVMKernelType.SIGMOID;
            parameter.C           = 1;
            parameter.Gamma       = 1;
            parameter.Probability = true;
            double[] crossValidationResults;
            int      nFold = 10;

            trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);
            double   crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);
            SVMModel model = trainingSet.Train(parameter);


            double[] testResults = testSet.Predict(model);
            int[,] confusionMatrix;
            double testAccuracy = testSet.EvaluateClassificationProblem(testResults, model.Labels, out confusionMatrix);

            Training_result.Content    = "testAccuracy:" + testAccuracy + "\nCross validation accuracy: " + crossValidationAccuracy + "\nCount " + trainingSet.Y.Count;
            Training_result.FontSize   = 14;
            Training_result.FontStyle  = FontStyles.Normal;
            Training_result.Foreground = Brushes.Red;
            Training_result.Background = Brushes.Black;
            index++;
        }
        public void face_training(SVMProblem f_training)
        {
            SVMProblem trainingSet = SVMProblemHelper.Load(@"C:\Users\temp\Desktop\0921_towp.txt");
            SVMProblem testSet     = SVMProblemHelper.Load(@"C:\Users\temp\Desktop\0921_towpt.txt");

            // f_training.Save(@"C:\Users\temp\Desktop\1005f.txt");
            //  trainingSet.Insert(index, f_training.X[0], 2);
            trainingSet.Add(f_training.X[0], 1);
            trainingSet.Save(@"C:\Users\temp\Desktop\flag.txt");
            //   trainingSet.Save(@"C:\Users\temp\Desktop\1005.txt");
            // Console.WriteLine();
            //   SVMNode node = new SVMNode();
            //  node.Index = Convert.ToInt32(o);
            //  node.Value = Convert.ToDouble(f_training.X);
            //  nodes.Add(node);
            //  trainingSet.Add(nodes.ToArray(), 1);
            //  int number = randon.Next(0, trainingSet.X.Count);
            //  int trainingsample = Convert.ToInt32(trainingSet.X.Count * 2 / 3);
            //  int testingsample = Convert.ToInt32(trainingSet.X.Count / 3);

            trainingSet = trainingSet.Normalize(SVMNormType.L2);
            testSet     = testSet.Normalize(SVMNormType.L2);

            SVMParameter parameter = new SVMParameter();

            parameter.Type        = SVMType.NU_SVC;
            parameter.Kernel      = SVMKernelType.SIGMOID;
            parameter.C           = 1;
            parameter.Gamma       = 1;
            parameter.Probability = true;
            int        nFold = 10;
            MainWindow main  = new MainWindow();

            double[] crossValidationResults; // output labels
            trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);
            double   crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);
            SVMModel model = SVM.Train(trainingSet, parameter);

            // SVMModel model = trainingSet.Train(parameter);

            SVM.SaveModel(model, @"C:\Users\temp\Desktop\1005.txt");

            double[] testResults = testSet.Predict(model);
            //     Console.WriteLine("");
            int[,] confusionMatrix;
            double testAccuracy = testSet.EvaluateClassificationProblem(testResults, model.Labels, out confusionMatrix);

            // Console.WriteLine("\n\nCross validation accuracy: " + crossValidationAccuracy);
            //  Console.WriteLine("testAccuracy:" + testAccuracy);
            //  Console.WriteLine(Convert.ToString(trainingSet.X.Count));
            main.Training_result.Content    = "testAccuracy:" + testAccuracy + "\nCross validation accuracy: " + crossValidationAccuracy + "\nCount " + trainingSet.X.Count;
            main.Training_result.FontSize   = 14;
            main.Training_result.FontStyle  = FontStyles.Normal;
            main.Training_result.Foreground = Brushes.Red;
            main.Training_result.Background = Brushes.Black;
            // Console.WriteLine(trainingSet1.Length);
            //  trainingSet.Save(@"C:\Users\temp\Desktop\1005.txt");
            index++;
        }
Exemplo n.º 3
0
        private static void TestOne(string prefix)
        {
            SVMModel   model   = SVM.LoadModel(MnistDataPath + "model.txt");
            SVMProblem testSet = SVMProblemHelper.Load(MnistDataPath + prefix + ".txt");

            testSet = testSet.Normalize(SVMNormType.L2);
            double[] testResults = testSet.Predict(model);
            Console.WriteLine("\nTest result: " + testResults[0].ToString());
        }
Exemplo n.º 4
0
        private static void Test(string prefix)
        {
            SVMModel   model   = SVM.LoadModel(MnistDataPath + "model.txt");
            SVMProblem testSet = SVMProblemHelper.Load(MnistDataPath + prefix + ".txt");

            testSet = testSet.Normalize(SVMNormType.L2);
            double[] testResults = testSet.Predict(model);
            int[,] confusionMatrix;
            double testAccuracy = testSet.EvaluateClassificationProblem(testResults, model.Labels, out confusionMatrix);

            Console.WriteLine("\nTest accuracy: " + testAccuracy);
        }
Exemplo n.º 5
0
        //인식 하기
        public static int SVM_Classification(SVMModel md)
        {
            int result = 0;

            SVMProblem testSet = SVMProblemHelper.Load("tmp.txt");    //인식데이터셋 열기

            testSet = testSet.Normalize(SVMNormType.L2);
            double[] testResults = testSet.Predict(md);

            result = (int)testResults[0];

            return(result);
        }
Exemplo n.º 6
0
        public void ClassifiGesture(string pathModel, string pathTest, string pathResult)
        { 
            testProblem = SVMProblemHelper.Load(pathTest);
            model = SVM.LoadModel(pathModel);

            double[] testResults = testProblem.Predict(model);

            using (StreamWriter file = new StreamWriter(pathResult, true))
            {
                foreach (double element in testResults)
                {
                    file.Write(element.ToString()+"\n");
                    file.Write(Environment.NewLine); 
                }
            }
        }//end ClassifyGesture 
Exemplo n.º 7
0
        public static int predictSVM()
        {
            double[] results = { 99 };
            //Variables.model = getExistingModel();
            if (!Variables.newdata.Contains("null"))
            {
                SVMProblem newData = SVMProblemHelper.Load(Constants.NEWDATA_PATH);

                Console.Write("Predicted command:\n");
                results = newData.Predict(Variables.model);

                /*foreach (var item in results)
                 * {
                 *  Console.WriteLine(item.ToString());
                 * }*/
                Console.WriteLine(results[0]);
            }
            else
            {
                Console.WriteLine("invalid new data");
            }
            return((int)results[0]);
        }
Exemplo n.º 8
0
        private void listBox2_SelectedIndexChanged(object sender, EventArgs e)
        {
            File.Create(parameter["hog_test_file"]).Dispose();
            System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
            if (listBox2.SelectedIndex < 0)
            {
                return;
            }
            else
            {
                try
                {
                    List <Mat> ListImage   = new List <Mat>();
                    Mat        img_display = new Mat();
                    string     dir         = listBox2.SelectedItem.ToString();
                    img_display = Cv2.ImRead(dir);

                    pictureBox2.Image = img_display.ToBitmap();
                    // Process predict
                    sw.Start();
                    Cv2.Resize(img_display, img_display, sizes2);
                    ListImage.Add(img_display);
                    FeatureExtraction.compute_hog_test(ListImage, sizes2, 100, parameter["hog_test_file"]);
                    SVMProblem Test   = SVMProblemHelper.Load(parameter["hog_test_file"]);
                    double[]   Target = Test.Predict(model_load);
                    sw.Stop();
                    label13.Text = MAPPING[(int)(Target[0])].ToString();
                    label10.Text = sw.ElapsedMilliseconds.ToString() + " (ms)";
                    string time = sw.ElapsedMilliseconds.ToString();
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message);
                }
            }
        }
        public static SvmResult TrainAndTestSvm(SVMProblem trainingSet, SVMProblem testSet)
        {
            // find the ratio of malignant:benign cases:
            double mbTrainRatio = trainingSet.Y.Where(x => x == 0).ToArray().Length *1F / trainingSet.Y.Count;

            Console.WriteLine($"MB TRAIN RATIO: {mbTrainRatio}");
            double mbTestRatio = testSet.Y.Where(x => x == 0).ToArray().Length * 1F / testSet.Y.Count;

            Console.WriteLine($"MB TEST RATIO: {mbTestRatio}");

            SVMParameter parameter = new SVMParameter
            {
                Type         = SVMType.C_SVC,
                Kernel       = SVMKernelType.RBF,
                C            = double.Parse(Configuration.Get("C")),
                Gamma        = double.Parse(Configuration.Get("Gamma")),
                Probability  = true,
                WeightLabels = new[] { 0, 1 },
                Weights      = new[] { (1 - mbTrainRatio) / mbTrainRatio, 1 }
            };

            //parameter = TrainingHelper.FindBestHyperparameters(trainingSet, parameter);
            Console.WriteLine($"Found best parameters: c={parameter.C},gamma={parameter.Gamma}");

            SVMModel model = trainingSet.Train(parameter);

            SVM.SaveModel(model, Configuration.Get("ModelLocation"));

            // The following evaluation has code from:
            // https://csharp.hotexamples.com/examples/LibSVMsharp/SVMParameter/-/php-svmparameter-class-examples.html

            // Predict the instances in the test set
            double[] testResults = testSet.Predict(model);


            // Evaluate the test results
            double testAccuracy =
                testSet.EvaluateClassificationProblem(testResults, model.Labels, out var confusionMatrix);

            // Print the resutls
            Console.WriteLine("\nTest accuracy: " + testAccuracy);
            Console.WriteLine("\nConfusion matrix:\n");


            // Print formatted confusion matrix

            Console.Write($"{"",6}");
            for (int i = 0; i < model.Labels.Length; i++)
            {
                Console.Write($"{"(" + model.Labels[i] + ")",5}");
            }
            Console.WriteLine();
            for (int i = 0; i < confusionMatrix.GetLength(0); i++)
            {
                Console.Write($"{"(" + model.Labels[i] + ")",5}");
                for (int j = 0; j < confusionMatrix.GetLength(1); j++)
                {
                    Console.Write($"{confusionMatrix[i, j],5}");
                }
                Console.WriteLine();
            }

            double sensitivity = confusionMatrix[0, 0] * 1.0 /
                                 (confusionMatrix[0, 1] + confusionMatrix[0, 0]);
            double specificity = confusionMatrix[1, 1] * 1.0 /
                                 (confusionMatrix[1, 1] + confusionMatrix[1, 0]);



            double[] results = testSet.PredictProbability(model, out var probabilities);
            for (int i = 0; i < probabilities.Count; i++)
            {
                // ReSharper disable once CompareOfFloatsByEqualityOperator
                String x = results[i] != testSet.Y[i] ? "MISPREDICTION" :"";
                Console.WriteLine($"{results[i]} | {probabilities[i][0]} | {probabilities[i][1]} | {testSet.Y[i]} | {x}");
            }

            return(new SvmResult()
            {
                C = parameter.C, Gamma = parameter.Gamma, TestAccuracy = testAccuracy, Sensitivity = sensitivity,
                Specificity = specificity
            });
        }
Exemplo n.º 10
0
        private static void Main(string[] args)
        {
            var           report        = new MakeReport();
            List <string> consoleOutput = new List <string>();

            consoleOutput.Add($"{System.DateTime.Now} - Loading");

            var train = ConvertHelper.CSVToDataConstrutor(File.ReadAllLines("train.csv"), ',');
            var test  = ConvertHelper.CSVToDataConstrutor(File.ReadAllLines("test.csv"), ',');

            var fs = new DataProcessing(train, test, "SalePrice", "Id");

            fs.GetFeature("SalePrice").Transform((value) => Math.Log(1 + value));

            fs.SetInactive(new List <string> {
                "Id", "PoolArea", "LandContour", "PoolQC", "LotConfig", "Utilities", "Alley",
                "Street", "BsmtHalfBath", "LowQualFinSF", "3SsnPorch", "LandSlope", "YrSold", "Condition1", "BsmtFinType2", "RoofMatl",
                "MiscVal", "MiscFeature", "BsmtFinSF2", "Condition2", "BldgType", "ScreenPorch", "MoSold", "Functional"
            });

            fs.SetInactive(new List <string> {
                "BsmtCond", "BsmtUnfSF", "GarageCars", "PavedDrive", "SaleType", "SaleCondition",
                "BsmtExposure", "GarageCond", "Fence", "Heating", "BsmtQual",
            });

            //fs.SetInactive(new List<string> { "EnclosedPorch" });

            fs.SetTrainRowsInactiveByIds(new List <string> {
                "1299", "186", "198", "636", "1032", "1183", "1153", "1174"
            });
            //fs.SetTrainRowsInactiveByIds(new List<string> { "130", "188", "199", "268", "305", "497", "524", "530", "692", "770", "884", "1025", "1231", "1371", "1387", "1424", "1441" });
            consoleOutput.Add($"{System.DateTime.Now} - Transforming model");

            fs.GetFeature("LotFrontage").ReplaceValues("NA", "0");
            fs.GetFeature("MasVnrArea").ReplaceValues("NA", "-1");
            fs.GetFeature("GarageYrBlt").ReplaceValues("NA", "-1");

            fs.Features.Where(f => !f.IsNumeric() && !f.IsClass).All(n => { n.TransformEnumToInt(); return(true); });
            //fs.Features.Select(f => new OutlierLine { FeatureName = f.Name, Outliers = string.Join(", ", f.GetOutliers()) }).ToList().ForEach(o => consoleOutput.Add($"{o.FeatureName} => {o.Outliers}"));
            //TODO: To jakoś nie polepszyło, a nawet pogorszyło, trzeba by dodać taką funkcję zamiast wyliczać to tutaj i ująć zmienną GarageYrBlt
            //var oldestYearBuild = fs.GetFeature("YearBuilt").Values.Min(v => double.Parse(v.NewValue));
            //fs.GetFeature("YearBuilt").Transform((value) => (double.Parse(value) - oldestYearBuild).ToString());

            //fs.Features.ToList().ForEach(f => report.AddScatterPlot(f.Name, f.Values.Where(v => !v.IsTest).Select(v => new Point() { X = double.Parse(v.NewValue), Y = double.Parse(fs.GetClassForTrainId(v.RowId)) }).ToList()));
            //report.CreatePDF("Report.pdf");

            //var oldestYearRemodAdd = fs.GetFeature("YearRemodAdd").Values.Min(v => double.Parse(v.NewValue));
            //fs.GetFeature("YearRemodAdd").Transform((value) => (double.Parse(value) - oldestYearRemodAdd).ToString());

            //OUTLIERS
            //foreach (var feature in fs.Features.Where(f => f.IsActive && !f.IsClass && !f.IsId && new List<string> { "EnclosedPorch", "BsmtFinSF2", "GarageYrBlt", "OpenPorchSF", "ScreenPorch", "MasVnrArea", "LotArea", "Condition1", "MSSubClass", "MiscVal" }.IndexOf(f.Name) < 0))
            //{
            //    feature.MarkOutliers();
            //}
            //fs.PrintOutliersAmount();
            consoleOutput.Add($"{System.DateTime.Now} - Gathering data");

            var dataModel = fs.GetDataModel();

            File.WriteAllLines("output-train.csv", ConvertHelper.DataSetToCSV(dataModel.HeadersWithClass, dataModel.OutputTrain, ","));
            File.WriteAllLines("output-test.csv", ConvertHelper.DataSetToCSV(dataModel.HeadersWithoutClass, dataModel.OutputTest, ","));

            consoleOutput.Add($"{System.DateTime.Now} - Preparing SVM problem");

            //if (false)
            {
                //SVM.SetPrintStringFunction(null);
                SVMProblem testSet     = SVMLoadHelper.Load(dataModel.OutputTest, isWithClass: false);
                SVMProblem trainingSet = SVMLoadHelper.Load(dataModel.OutputTrain, isWithClass: true);

                SVMParameter parameter = new SVMParameter()
                {
                    Type   = SVMType.EPSILON_SVR,
                    Kernel = SVMKernelType.RBF,
                    //C = 10,
                    Gamma     = 0.01,
                    CacheSize = 2000,
                    Eps       = 0.1,// * Math.Pow(10, i);
                    //parameter.Probability = true;
                };

                //List<Tuple<string, double>> rmseAfterRemoveFeature = new List<Tuple<string, double>>();
                //var activeFeatures = fs.Features.Where(f => f.IsActive || !f.IsClass);
                //foreach (var feature in activeFeatures)
                //{
                //    feature.IsActive = false;
                //    var outputTrain = fs.GetTransfomedTrain();
                //    SVMProblem trainingSet = SVMLoadHelper.Load(outputTrain, isWithClass: true);

                //    consoleOutput.Add("=====================");
                //    //SVMModel model = trainingSet.Train(parameter);
                //    //double[] trainingResults = trainingSet.Predict(model);
                //    double[] crossvalidationResults;
                //    trainingSet.CrossValidation(parameter, 3, out crossvalidationResults);
                //    //consoleOutput.Add(parameter.GetOutput());
                //    var rmselog = EvaulationHelper.RMSELog(trainingSet.Y.ToArray(), crossvalidationResults);
                //    rmseAfterRemoveFeature.Add(new Tuple<string, double>(feature.Name, rmselog));
                //    consoleOutput.Add($"{System.DateTime.Now} - {feature.Name} - {rmselog}");
                //    feature.IsActive = true;
                //}

                //consoleOutput.Add($"{System.DateTime.Now} - {bestToRemove.Item1} - {bestToRemove.Item2}");
                consoleOutput.Add("=====================");
                consoleOutput.Add("Ordered");
                //consoleOutput.AddRange(rmseAfterRemoveFeature.OrderBy(t => t.Item2).Select(t => $"{t.Item1} - {t.Item2}"));
                SVMModel model        = trainingSet.Train(parameter);
                double[] trainResults = trainingSet.Predict(model);
                double[] testResults  = testSet.Predict(model);

                //double meanSquaredErr = testSet.EvaluateRegressionProblem(testResults, out correlationCoef);

                trainingSet.Y
                .Select((y, index) => Math.Abs(y - trainResults[index]))
                .Select((v, index) => new { index, v })
                .OrderByDescending(v => v.v)
                .Take(15)
                .ToList()
                .ForEach(e => consoleOutput.Add($"{e.index}:{e.v}"));

                var rmselog = EvaulationHelper.RMSELog(trainingSet.Y.ToArray(), trainResults);
                consoleOutput.Add($"{System.DateTime.Now} - {rmselog}");

                File.WriteAllLines("submission_fs.csv", ConvertHelper.ResultToCSV("Id,SalePrice", testResults.Select(v => Math.Exp(v) - 1).ToArray(), dataModel.TestIds));
                consoleOutput.Add($"{System.DateTime.Now} -I had finish");
                SVM.SaveModel(model, "model.txt");
            }

            //report.CreatePDF("Report.pdf");
            consoleOutput.ForEach(s => System.Console.WriteLine(s));
            File.WriteAllLines("consoleOutput.txt", consoleOutput);

            System.Console.ReadLine();
        }
Exemplo n.º 11
0
        public int SVM_face_recognition()
        {
            SVMProblem face_data = SVMProblemHelper.Load(@"C:\Users\temp\Desktop\Face_feature.txt");

            face_data = face_data.Normalize(SVMNormType.L2);


            //using Libsvm package which has api to calculate the probabilty
            face_data.PredictProbability(face_recognition_model, out prolist);

            var ok = prolist.ToArray();
            var v  = ok[0];
            // we have 13 person

            int    maxconfidenceindex = 0;
            double maxconfidence      = v[maxconfidenceindex];
            double threshold          = 0.25;

            for (int i = 0; i < v.Count(); i++)
            {
                if (v[i] > maxconfidence)
                {
                    maxconfidenceindex = i;
                    maxconfidence      = v[i];
                }
            }
            if (threshold < maxconfidence)
            {
                f1 = v[0];
                f2 = v[1];
                f3 = v[2];
                f4 = v[3];
                f5 = v[4];

                /*
                 *    f6 = v[5];
                 *    f7 = v[6];
                 *    f8 = v[7];
                 *    f9 = v[8];
                 *    f10 = v[9];
                 *    f11 = v[10];
                 *    f12 = v[11];
                 *    f13 = v[12];
                 */
                double[] faceresult = face_data.Predict(face_recognition_model);
                facename = Convert.ToInt16(faceresult[0]);
                //  facename =facemodel.Labels[maxconfidenceindex];
                faceshow++;
            }

            int labelnum = face_recognition_model.Labels[maxconfidenceindex];

            if (threshold > maxconfidence)
            {
                // Console.WriteLine("Unknow");
                facename        = 0;
                display.Content = "Unknow";
                facefail++;
            }



            return(facename);
        }
Exemplo n.º 12
0
        private void btnTrain_Click(object sender, EventArgs e)
        {
            System.Diagnostics.Stopwatch time = new System.Diagnostics.Stopwatch();
            parameter = load_json_file(parameter_file);
            try
            {
                File.Create(parameter["path_model"] + textBox3.Text + ".txt").Dispose();
                File.Create(parameter["result_file"]).Dispose();
                if (textBox3.Text == "")
                {
                    MessageBox.Show("create model name");
                }
                //if(SHOWRESULT.ContainsKey(txtModelName.Text))
                //{
                //    MessageBox.Show("Model name already exits");
                //}
                else
                {
                    time.Start();
                    #region Creat file Model
                    // Creat param SVM
                    SVMProblem   FileTrain = SVMProblemHelper.Load(parameter["hog_train_file"]);
                    SVMParameter param     = new SVMParameter();
                    param.Type = SVMType.C_SVC;
                    if (parameter["kernel_svm"] == "RBF")
                    {
                        param.Kernel = SVMKernelType.RBF;
                    }
                    if (parameter["kernel_svm"] == "Linear")
                    {
                        param.Kernel = SVMKernelType.LINEAR;
                    }
                    if (parameter["kernel_svm"] == "Poly")
                    {
                        param.Kernel = SVMKernelType.POLY;
                    }
                    if (parameter["kernel_svm"] == "Sigmoid")
                    {
                        param.Kernel = SVMKernelType.SIGMOID;
                    }
                    // param.C = Convert.ToDouble(parameter["c"]);
                    param.C      = double.Parse(parameter["c"], CultureInfo.InvariantCulture);
                    param.P      = double.Parse(parameter["p"], CultureInfo.InvariantCulture);
                    param.Gamma  = double.Parse(parameter["gamma"], CultureInfo.InvariantCulture);
                    param.Degree = Convert.ToInt16(parameter["degree"]);
                    param.Nu     = double.Parse(parameter["nu"], CultureInfo.InvariantCulture);
                    param.Coef0  = double.Parse(parameter["coef0"], CultureInfo.InvariantCulture);
                    param.Eps    = double.Parse(parameter["eps"], CultureInfo.InvariantCulture);
                    //Train model
                    model = LibSVMsharp.SVM.Train(FileTrain, param);
                    LibSVMsharp.SVM.SaveModel(model, parameter["path_model"] + textBox3.Text + ".txt");

                    time.Stop();
                    double train_time = time.ElapsedMilliseconds;
                    #endregion

                    #region Validation data
                    SVMProblem   Validation        = SVMProblemHelper.Load(parameter["hog_val_file"]);
                    double[]     Target_validation = Validation.Predict(model);
                    StreamWriter sw = new StreamWriter(parameter["result_file"], true, Encoding.UTF8);
                    for (int i = 0; i < Target_validation.Length; i++)
                    {
                        string lines = Target_validation[i].ToString();
                        sw.WriteLine(lines);
                    }
                    sw.Close();
                    Accuracy = SVMHelper.EvaluateClassificationProblem(Validation, Target_validation);
                    Accuracy = Math.Round(Accuracy, 3);


                    // show result training
                    textBox4.Text = (train_time / 1000).ToString();
                    textBox5.Text = Accuracy.ToString();
                    MessageBox.Show("Trainning sucessful");

                    #endregion
                }
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
Exemplo n.º 13
0
        // activity classification
        public void SVM_Classification()
        {
            testSet1 = SVMProblemHelper.Load(@"Dataset\ADLfall_test1.txt");

            testSet1 = testSet1.Normalize(SVMNormType.L2);

            float sum;

            if (testSet1.Length != 0)
            {
                try
                {
                    //var resut = model.Predict(testSet1.X[testSet1.Length - 1]);
                    //  p = Convert.ToInt16(resut);
                    //predict the result using model, return result
                    var result = testSet1.Predict(activity_model);
                    p = Convert.ToInt16(result[0]);
                    //put the result into enqueue
                    myq.Enqueue(p);

                    switch (p)
                    {
                    case 1:
                        q++;

                        break;

                    case 2:
                        w++;

                        break;

                    case 3:
                        e++;

                        break;

                    case 4:
                        r++;

                        break;
                    }
                }
                catch
                {
                }
                // if the collected data is larger than 30
                if (myq.Count > 30)
                {
                    //  dequeue the old one
                    myq.TryDequeue(out p);
                    switch (p)
                    {
                    case 1:
                        q--;

                        break;

                    case 2:
                        w--;

                        break;

                    case 3:
                        e--;

                        break;

                    case 4:
                        r--;

                        break;
                    }
                    // proportional
                    sum = q + w + e + r;

                    //   activity.Content = ("Sit down:" + sit_down + "\n" + "Walking" + walkig + "\n" + "Standing" + standing + "\n" + "Fall event" + fallevent);
                    activity.Content = ("Sit down: " + Math.Round(e / sum, 2) * 100 + "%" + "\n" + "Walking: " + Math.Round(q / sum, 2) * 100 + "%" + "\n" + "Standing: " + Math.Round(w / sum, 2) * 100 + "%" + "\n" + "Fall event: " + Math.Round(r / sum, 2) * 100 + "%");
                    //  activity.Content = ("Sit down:" + Math.Round(h / sum, 2) + "\n" + "Walking" + Math.Round(w / sum, 2) + "\n" + "Standing" + Math.Round(q / sum, 2) + "\n" + "Fall event" + Math.Round(r / sum, 2));
                    if (e / sum > 0.5)
                    {
                        label.Content = ("You have sit down"); label.Foreground = Brushes.Red;
                    }
                    else if (q / sum > 0.5)
                    {
                        label.Content = "You are walking"; label.Foreground = Brushes.Red;
                    }
                    else if (w / sum > 0.5)
                    {
                        label.Content = "You are standing"; label.Foreground = Brushes.Red;
                    }
                    else if (r / sum > 0.5)
                    {
                        label.Content = "You fell down"; label.Foreground = Brushes.Red;
                    }

                    activity.FontSize   = 20;
                    activity.FontStyle  = FontStyles.Normal;
                    activity.Foreground = Brushes.Red;
                    activity.Background = Brushes.Black;
                }
            }
        }
        static void Main(string[] args)
        {
            // Load the datasets: In this example I use the same datasets for training and testing which is not suggested
            SVMProblem trainingSet = SVMProblemHelper.Load(@"C:\Users\temp\Desktop\ADLfall_train.txt");
            //    SVMProblem testSet = SVMProblemHelper.Load(@"C:\Users\temp\Desktop\ADLfall_test.txt");
            SVMProblem testSet1 = SVMProblemHelper.Load(@"C:\Users\temp\Desktop\ADLfall_test1.txt");

            // SVMProblem testSet1 = SVMProblemHelper.Load(@"C:\Users\temp\Desktop\result.txt");

            // Normalize the datasets if you want: L2 Norm => x / ||x||
            trainingSet = trainingSet.Normalize(SVMNormType.L2);
            //   testSet = testSet.Normalize(SVMNormType.L2);
            testSet1 = testSet1.Normalize(SVMNormType.L2);
            // Select the parameter set

            SVMParameter parameter = new SVMParameter();

            parameter.Type   = SVMType.C_SVC;
            parameter.Kernel = SVMKernelType.RBF;
            parameter.C      = 32768.0;
            parameter.Gamma  = 8.0;


            // Do cross validation to check this parameter set is correct for the dataset or not
            double[] crossValidationResults; // output labels
            int      nFold = 5;
            //  trainingSet1.CrossValidation(parameter, nFold, out crossValidationResults);

            // Evaluate the cross validation result
            // If it is not good enough, select the parameter set again
            //  double crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);

            // Train the model, If your parameter set gives good result on cross validation
            //   SVMModel model = trainingSet.Train(parameter);


            // Save the model
            //   SVM.SaveModel(model, @"Model\activity_recognition.txt");
            SVMModel model = SVM.LoadModel(@"Model\activity_recognition.txt");

            int    p, q, w, e, r, ok = 0;
            double sum;

            q = 0;
            w = 0;
            e = 0;
            r = 0;
            // Predict the instances in the test set
            double[] testResults = testSet1.Predict(model);

            while (ok < testSet1.Length)
            {
                var resut = model.Predict(testSet1.X[ok]);
                //    Console.WriteLine("resut111:" + resut);
                p = Convert.ToInt16(resut);
                switch (p)

                {
                case 1:
                    q++;
                    break;

                case 2:
                    w++;
                    break;

                case 3:
                    e++;
                    break;

                case 4:
                    r++;
                    break;
                }

                ok++;
            }
            sum = q + w + e + r;


            Console.WriteLine("result:" + Math.Round(q / sum, 2) + "," + Math.Round(w / sum, 2) + "," + Math.Round(e / sum, 2) + "," + Math.Round(r / sum, 2));
            // Evaluate the test results

            int[,] confusionMatrix;
            double testAccuracy = testSet1.EvaluateClassificationProblem(testResults, model.Labels, out confusionMatrix);

            // Print the resutls
            //  Console.WriteLine("\n\nCross validation accuracy: " + crossValidationAccuracy);
            Console.WriteLine("\nTest accuracy: " + testAccuracy);
            Console.WriteLine("\nConfusion matrix:\n");

            // Print formatted confusion matrix
            Console.Write(String.Format("{0,6}", ""));
            for (int i = 0; i < model.Labels.Length; i++)
            {
                Console.Write(String.Format("{0,5}", "(" + model.Labels[i] + ")"));
            }
            Console.WriteLine();
            for (int i = 0; i < confusionMatrix.GetLength(0); i++)
            {
                Console.Write(String.Format("{0,5}", "(" + model.Labels[i] + ")"));
                for (int j = 0; j < confusionMatrix.GetLength(1); j++)
                {
                    Console.Write(String.Format("{0,5}", confusionMatrix[i, j]));
                }
                Console.WriteLine();
            }

            Console.WriteLine("\n\nPress any key to quit...");
            Console.ReadLine();
        }
Exemplo n.º 15
0
        static void Main(string[] args)
        {
            SVMProblem testSet = SVMProblemHelper.Load(@"Dataset\wine.txt"); // Same as the training set
            SVMModel   model   = SVM.LoadModel(@"Model\wine_model.txt");

            Console.WriteLine("Feature count in one instance: " + model.SV[0].Length + "\n\n");

            // Test 1: Predict instances with SVMProblem's Predict extension method.

            sw.Start();

            double[] target = testSet.Predict(model);

            sw.Stop();
            double elapsedTimeInTest1 = (double)sw.ElapsedMilliseconds / (double)testSet.Length;

            Console.WriteLine("> Test 1: \nPredict instances with SVMProblem's Predict extension method.\n");
            Console.WriteLine("\tAverage elapsed time of one prediction: " + elapsedTimeInTest1 + " ms\n");

            // Test 2: Predict instances with RapidPreditor class which is an explicit implementation of the method used in Test 1.

            using (RapidPredictor predictor = new RapidPredictor(model)) // It needs to be Disposed
            {
                sw.Start();

                target = new double[testSet.Length];
                for (int i = 0; i < testSet.Length; i++)
                {
                    target[i] = predictor.Predict(testSet.X[i]);
                }

                sw.Stop();
            }
            double elapsedTimeInTest2 = (double)sw.ElapsedMilliseconds / (double)testSet.Length;

            Console.WriteLine("> Test 2: \nPredict instances with RapidPreditor class which is an explicit implementation of the method used in Test 1.\n");
            Console.WriteLine("\tAverage elapsed time of one prediction: " + elapsedTimeInTest2 + " ms\n");

            // Test 3: Predict instances with standard SVM.Predict method or SVMNode[]'s predict extension method.

            sw.Start();

            target = new double[testSet.Length];
            for (int i = 0; i < testSet.Length; i++)
            {
                target[i] = SVM.Predict(model, testSet.X[i]);
            }

            sw.Stop();
            double elapsedTimeInTest3 = (double)sw.ElapsedMilliseconds / (double)testSet.Length;

            Console.WriteLine("> Test 3: \nPredict instances with standard SVM.Predict method or SVMNode[]'s Predict extension method.\n");
            Console.WriteLine("\tAverage elapsed time of one prediction: " + elapsedTimeInTest3 + " ms\n");

            // Print the results
            Console.WriteLine("\nExplanation:\n");
            Console.WriteLine(
                "In standard SVM.Predict method, the SVMModel object is allocated and deallocated every time when the method called. " +
                "Also the SVMNode[]'s Predict extension methods directly calls the SVM.Predict. " +
                "However, the model is allocated once and is used to predict whole instances with its pointer in SVMProblem's " +
                "Predict extension method as implemented in the RapidPredictor class. You can take or modify this class in order " +
                "to use in your applications, if you have performance considerations. " +
                "I am not suggesting that SVMProblem's Predict extension method is used in real-time, because the model is allocated" +
                "in every method call.");

            Console.WriteLine("\n\nPress any key to quit...");
            Console.ReadLine();
        }
Exemplo n.º 16
0
        public static bool trainProblem()
        {
            if (checkExistingDataset())
            {
                SVMProblem                problem       = SVMProblemHelper.Load(Constants.DATA_PATH);
                SVMProblem                randdata      = SVMProblemHelper.Load(Constants.RAND_PATH);
                List <string>             resultsstring = new List <string>();
                List <SVMClass.SVMResult> ResultsList   = new List <SVMClass.SVMResult>();

                double C, gammasq;
                double Cmin = 1, Cmax = 10000, Cstep = 10;
                double gmin = 0.0001, gmax = 1000, gstep = 10;
                bool   satisfied = false;
                while (!satisfied)
                {
                    for (C = Cmin; C <= Cmax; C = C * Cstep)
                    {
                        for (gammasq = gmin; gammasq <= gmax; gammasq = gammasq * gstep)
                        {
                            SVMParameter tempparameter = new SVMParameter();
                            tempparameter.Type   = SVMType.C_SVC;
                            tempparameter.Kernel = SVMKernelType.RBF;
                            tempparameter.C      = C;
                            tempparameter.Gamma  = gammasq;

                            SVMModel tempmodel = SVM.Train(problem, tempparameter);

                            SVMProblem testData = SVMProblemHelper.Load(Constants.RAND_PATH);
                            double[]   results  = testData.Predict(tempmodel);
                            int[,] confusionMatrix;
                            double testAccuracy = testData.EvaluateClassificationProblem(results, tempmodel.Labels, out confusionMatrix);

                            // Do cross validation to check this parameter set is correct for the dataset or not
                            double[] crossValidationResults; // output labels
                            int      nFold = 10;
                            problem.CrossValidation(tempparameter, nFold, out crossValidationResults);

                            // Evaluate the cross validation result
                            // If it is not good enough, select the parameter set again
                            double crossValidationAccuracy = problem.EvaluateClassificationProblem(crossValidationResults);

                            SVMClass.SVMResult compiled = new SVMClass.SVMResult();
                            compiled.C             = C;
                            compiled.gamma         = gammasq;
                            compiled.testAcc       = testAccuracy;
                            compiled.crossValidAcc = crossValidationAccuracy;
                            ResultsList.Add(compiled);
                        }
                    }

                    // Evaluate the test results
                    double maxTestAcc = ResultsList.Max(resultdata => resultdata.testAcc);
                    //int maxTestAccIndex = ResultsList.FindIndex(resultdata => resultdata.testAcc.Equals(maxTestAcc));
                    double maxValidAcc = ResultsList.Max(resultdata => resultdata.crossValidAcc);
                    //int maxValidAccIndex = ResultsList.FindIndex(resultdata => resultdata.crossValidAcc.Equals(maxValidAcc));
                    if (maxTestAcc < 95 || maxValidAcc < 95)
                    {
                        satisfied = false;
                        Cstep--;
                        gstep--;
                    }
                    else
                    {
                        satisfied = true;

                        List <SVMClass.SVMResult> topResults = ResultsList.FindAll(resultdata => resultdata.testAcc.Equals(maxTestAcc));
                        List <SVMClass.SVMResult> topValid   = ResultsList.FindAll(resultdata => resultdata.crossValidAcc.Equals(maxValidAcc));
                        while (topResults.Count > topValid.Count)
                        {
                            topResults.RemoveAt(ResultsList.FindIndex(resultsdata => resultsdata.crossValidAcc.Equals(ResultsList.Min(resultdata => resultdata.crossValidAcc))));
                        }

                        double maxC      = topResults.Max(resultdata => resultdata.C);
                        int    maxCIndex = topResults.FindIndex(resultdata => resultdata.C.Equals(maxC));
                        double bestgamma = topResults[maxCIndex].gamma;
                        // maxC or not???
                        //double bestC = topResults[topResults.Count - 2].C; //topResults[maxCIndex].C;
                        //double bestgamma = topResults[topResults.Count - 2].gamma;//topResults[maxCIndex].gamma;
                        Console.WriteLine("Best C: " + maxC + "  Best gammasq: " + bestgamma);
                        Constants.C       = maxC;
                        Constants.gammasq = bestgamma;

                        foreach (SVMClass.SVMResult resultdata in topResults)
                        {
                            Console.WriteLine(resultdata.C.ToString() + " " + resultdata.gamma.ToString());
                        }
                    }
                }

                SVMParameter parameter = new SVMParameter();
                parameter.Type   = SVMType.C_SVC;
                parameter.Kernel = SVMKernelType.RBF;
                parameter.C      = Constants.C;
                parameter.Gamma  = Constants.gammasq;

                Variables.model = SVM.Train(problem, parameter);
                //File.WriteAllText(Constants.MODEL_PATH, String.Empty);
                //SVM.SaveModel(Variables.model, Constants.MODEL_PATH);
                Console.WriteLine("Trained and saved model.\n");
                //return Variables.model;
                return(true);
            }
            else
            {
                MessageBox.Show("Invalid training data!");
                return(false);
            }
        }
Exemplo n.º 17
0
        private void testSVM()
        {
            if (!holdCommandListener)
            {
                holdCommandListener = true;
            }
            string        parentpath    = System.AppDomain.CurrentDomain.BaseDirectory;
            string        DATA_PATH     = parentpath + "Datasets\\dataset - Copy (2).txt";
            string        MODEL_PATH    = parentpath + "Model\\testmodel.txt";
            string        NEWDATA_PATH  = parentpath + "Datasets\\testdata.txt";
            string        RESULTS_PATH  = parentpath + "Datasets\\results.txt";
            List <string> resultsstring = new List <string>();

            SVMProblem   testSet       = SVMProblemHelper.Load(NEWDATA_PATH);
            SVMParameter testparameter = new SVMParameter();

            testparameter.Type   = SVMType.C_SVC;
            testparameter.Kernel = SVMKernelType.RBF;
            testparameter.C      = 0.1;   //Constants.C;
            testparameter.Gamma  = 0.001; // Constants.gammasq;

            List <SVMClass.SVMResult> ResultsList = new List <SVMClass.SVMResult>();

            SVMProblem problem = SVMProblemHelper.Load(DATA_PATH);
            double     C       = 0.001;
            double     gammasq = 0.001;

            for (C = 1; C <= 1000; C = C * 10)
            {
                for (gammasq = 0.001; gammasq <= 1000; gammasq = gammasq * 10)
                {
                    SVMParameter parameter = new SVMParameter();
                    parameter.Type   = SVMType.C_SVC;
                    parameter.Kernel = SVMKernelType.RBF;
                    parameter.C      = C;
                    parameter.Gamma  = gammasq;

                    SVMModel model = SVM.Train(problem, parameter);
                    //File.WriteAllText(MODEL_PATH, String.Empty);
                    //SVM.SaveModel(model, MODEL_PATH);
                    //Console.WriteLine("Trained and saved model.\n");

                    //model = SVM.LoadModel(MODEL_PATH);

                    SVMProblem newData = SVMProblemHelper.Load(NEWDATA_PATH);
                    //Console.Write("Predicted Result:\n");
                    double[] results = newData.Predict(model);
                    //Console.Write(results[0]);
                    int[,] confusionMatrix;
                    double testAccuracy = newData.EvaluateClassificationProblem(results, model.Labels, out confusionMatrix);

                    // Do cross validation to check this parameter set is correct for the dataset or not
                    double[] crossValidationResults; // output labels
                    int      nFold = 10;
                    problem.CrossValidation(parameter, nFold, out crossValidationResults);

                    // Evaluate the cross validation result
                    // If it is not good enough, select the parameter set again
                    double crossValidationAccuracy = problem.EvaluateClassificationProblem(crossValidationResults);
                    //Console.WriteLine("\n\nCross validation accuracy: " + crossValidationAccuracy);

                    string temp = "";

                    string resultstring = "Predict accuracy: " + testAccuracy + " C: " + C + " gamma: " + gammasq + " Cross validation accuracy: " + crossValidationAccuracy;
                    resultsstring.Add(resultstring);

                    if (parameter.C == testparameter.C && parameter.Gamma == testparameter.Gamma)
                    {
                        resultsstring.Add("This one is same as separate test.");
                    }

                    foreach (double res in results)
                    {
                        temp += res.ToString() + " ";
                    }
                    resultsstring.Add(temp);

                    SVMClass.SVMResult compiled = new SVMClass.SVMResult();
                    compiled.C             = C;
                    compiled.gamma         = gammasq;
                    compiled.testAcc       = testAccuracy;
                    compiled.crossValidAcc = crossValidationAccuracy;
                    ResultsList.Add(compiled);
                }
            }
            File.WriteAllLines(RESULTS_PATH, resultsstring);


            SVMModel testmodel = SVM.Train(problem, testparameter);

            // Predict the instances in the test set
            double[] testResults = testSet.Predict(testmodel);
            foreach (double result in testResults)
            {
                Console.WriteLine(result);
            }

            // Evaluate the test results

            double maxTestAcc      = ResultsList.Max(resultdata => resultdata.testAcc);
            int    maxTestAccIndex = ResultsList.FindIndex(resultdata => resultdata.testAcc.Equals(maxTestAcc));
            //double maxValidAcc = ResultsList.Max(resultdata => resultdata.crossValidAcc);
            //int maxValidAccIndex = ResultsList.FindIndex(resultdata => resultdata.crossValidAcc.Equals(maxValidAcc));
            List <SVMClass.SVMResult> topResults = ResultsList.FindAll(resultdata => resultdata.testAcc.Equals(maxTestAcc));
            double maxC      = topResults.Max(resultdata => resultdata.C);
            int    maxCIndex = topResults.FindIndex(resultdata => resultdata.C.Equals(maxC));

            double bestC     = topResults[topResults.Count - 2].C;     //topResults[maxCIndex].C;
            double bestgamma = topResults[topResults.Count - 2].gamma; //topResults[maxCIndex].gamma;

            Console.WriteLine("Best C: " + bestC + "  Best gammasq: " + bestgamma);

            foreach (SVMClass.SVMResult resultdata in topResults)
            {
                Console.WriteLine(resultdata.C.ToString() + " " + resultdata.gamma.ToString());
            }
            //int[,] confusionMatrix;
            //double testAccuracy = testSet.EvaluateClassificationProblem(testResults, testmodel.Labels, out confusionMatrix);
            //Console.WriteLine("\n\nTest accuracy: " + testAccuracy);
        }
Exemplo n.º 18
0
        static void Main(string[] args)
        {
            // Load the datasets: In this example I use the same datasets for training and testing which is not suggested
            SVMProblem trainingSet = SVMProblemHelper.Load(@"Dataset\wine.txt");
            SVMProblem testSet     = SVMProblemHelper.Load(@"Dataset\wine2.txt");

            // Normalize the datasets if you want: L2 Norm => x / ||x||
            trainingSet = trainingSet.Normalize(SVMNormType.L2);
            testSet     = testSet.Normalize(SVMNormType.L2);

            // Select the parameter set
            SVMParameter parameter = new SVMParameter();

            parameter.Type   = SVMType.C_SVC;
            parameter.Kernel = SVMKernelType.RBF;
            parameter.C      = 1;
            parameter.Gamma  = 1;

            // Do cross validation to check this parameter set is correct for the dataset or not
            double[] crossValidationResults; // output labels
            int      nFold = 5;

            trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);

            // Evaluate the cross validation result
            // If it is not good enough, select the parameter set again
            double crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);

            // Train the model, If your parameter set gives good result on cross validation
            SVMModel model = trainingSet.Train(parameter);

            // Save the model
            SVM.SaveModel(model, @"Model\wine_model.txt");

            // Predict the instances in the test set
            double[] testResults = testSet.Predict(model);

            Console.WriteLine("aaa:" + testResults[0] + "\n");

            /*
             * // Evaluate the test results
             * int[,] confusionMatrix;
             * double testAccuracy = testSet.EvaluateClassificationProblem(testResults, model.Labels, out confusionMatrix);
             *
             *
             *
             *
             * // Print the resutls
             * Console.WriteLine("\n\nCross validation accuracy: " + crossValidationAccuracy);
             * Console.WriteLine("\nTest accuracy: " + testAccuracy);
             * Console.WriteLine("\nConfusion matrix:\n");
             *
             * // Print formatted confusion matrix
             * Console.Write(String.Format("{0,6}", ""));
             * for (int i = 0; i < model.Labels.Length; i++)
             *  Console.Write(String.Format("{0,5}", "(" + model.Labels[i] + ")"));
             * Console.WriteLine();
             * for (int i = 0; i < confusionMatrix.GetLength(0); i++)
             * {
             *  Console.Write(String.Format("{0,5}", "(" + model.Labels[i] + ")"));
             *  for (int j = 0; j < confusionMatrix.GetLength(1); j++)
             *      Console.Write(String.Format("{0,5}", confusionMatrix[i,j]));
             *  Console.WriteLine();
             * }
             *
             * Console.WriteLine("\n\nPress any key to quit...");
             * Console.ReadLine();*/
        }
Exemplo n.º 19
0
        private void button2_Click(object sender, EventArgs e)
        {
            SVMProblem trainingSet = new SVMProblem();
            SVMProblem testSet     = trainingSet;

            foreach (DataInfo info in mList)
            {
                SVMNode[] node = new SVMNode[2];
                node[0] = new SVMNode(1, info.X / mWidth);
                node[1] = new SVMNode(2, info.Y / mHeight);
                trainingSet.Add(node, info.Group);
            }


            // Normalize the datasets if you want: L2 Norm => x / ||x||
            //trainingSet = trainingSet.Normalize(SVMNormType.L2);

            // Select the parameter set
            SVMParameter parameter = new SVMParameter();

            parameter.Type   = SVMType.C_SVC;
            parameter.Kernel = SVMKernelType.RBF;
            parameter.C      = 1;
            parameter.Gamma  = 4;
            parameter.Coef0  = hScrollBar1.Value;
            parameter.Degree = 3;

            // Do cross validation to check this parameter set is correct for the dataset or not
            double[] crossValidationResults; // output labels
            int      nFold = 5;

            trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);

            // Evaluate the cross validation result
            // If it is not good enough, select the parameter set again
            double crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);

            // Train the model, If your parameter set gives good result on cross validation
            SVMModel model = trainingSet.Train(parameter);

            // Save the model
            SVM.SaveModel(model, FILE_MODEL);

            // Predict the instances in the test set
            double[] testResults = testSet.Predict(model);

            // Evaluate the test results
            int[,] confusionMatrix;
            double testAccuracy = testSet.EvaluateClassificationProblem(testResults, model.Labels, out confusionMatrix);

            // Print the resutls
            Console.WriteLine("\n\nCross validation accuracy: " + crossValidationAccuracy);
            Console.WriteLine("\nTest accuracy: " + testAccuracy);
            Console.WriteLine("\nConfusion matrix:\n");

            // Print formatted confusion matrix
            Console.Write(String.Format("{0,6}", ""));
            for (int i = 0; i < model.Labels.Length; i++)
            {
                Console.Write(String.Format("{0,5}", "(" + model.Labels[i] + ")"));
            }
            Console.WriteLine();
            for (int i = 0; i < confusionMatrix.GetLength(0); i++)
            {
                Console.Write(String.Format("{0,5}", "(" + model.Labels[i] + ")"));
                for (int j = 0; j < confusionMatrix.GetLength(1); j++)
                {
                    Console.Write(String.Format("{0,5}", confusionMatrix[i, j]));
                }
                Console.WriteLine();
            }

            Pen[] pen = new Pen[4];
            pen[0] = new Pen(Color.Black, 1);
            pen[1] = new Pen(Color.Red, 1);
            pen[2] = new Pen(Color.LightGreen, 1);
            pen[3] = new Pen(Color.Blue, 1);

            Pen[] pen2 = new Pen[4];
            pen2[0] = new Pen(Color.LightGray, 1);
            pen2[1] = new Pen(Color.DarkRed, 1);
            pen2[2] = new Pen(Color.DarkGreen, 1);
            pen2[3] = new Pen(Color.DarkBlue, 1);

            Bitmap canvas = new Bitmap(pictureBox1.ClientSize.Width, pictureBox1.ClientSize.Height);

            using (Graphics g = Graphics.FromImage(canvas))
            {
                for (int i = 0; i < pictureBox1.ClientSize.Width; i++)
                {
                    for (int j = 0; j < pictureBox1.ClientSize.Height; j++)
                    {
                        SVMNode[] node = new SVMNode[2];
                        node[0] = new SVMNode(1, (double)i / (double)mWidth);
                        node[1] = new SVMNode(2, (double)j / (double)mHeight);

                        double result = SVM.Predict(model, node);
                        g.DrawRectangle(pen2[(int)result], i, j, 1, 1);
                    }
                }

                foreach (DataInfo info in mList)
                {
                    g.DrawEllipse(pen[(int)info.Group], (float)info.X - 5, (float)info.Y - 5, 5, 5);
                }
            }

            Bitmap image = new Bitmap(pictureBox1.ClientSize.Width, pictureBox1.ClientSize.Height);

            pictureBox1.BackgroundImage = canvas; // 設置為背景層
            pictureBox1.Refresh();
            pictureBox1.CreateGraphics().DrawImage(canvas, 0, 0);
        }