コード例 #1
0
ファイル: Program.cs プロジェクト: rennisa/cozy
        private static void Train(string prefix)
        {
            SVMProblem trainingSet = SVMProblemHelper.Load(MnistDataPath + prefix + ".txt");

            trainingSet = trainingSet.Normalize(SVMNormType.L2);

            SVMParameter parameter = new SVMParameter();

            parameter.Type   = SVMType.C_SVC;
            parameter.Kernel = SVMKernelType.RBF;
            parameter.C      = 1;
            parameter.Gamma  = 1;

            double[] crossValidationResults;
            int      nFold = 5;

            trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);
            double crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);

            Console.WriteLine("\n\nCross validation accuracy: " + crossValidationAccuracy);

            SVMModel model = trainingSet.Train(parameter);

            SVM.SaveModel(model, MnistDataPath + "model.txt");
            Console.WriteLine("\n\nModel ok!");
        }
コード例 #2
0
ファイル: Program.cs プロジェクト: CVML/LibSVMsharp
        static void Main(string[] args)
        {
            // Load the datasets: In this example I use the same datasets for training and testing which is not suggested
            SVMProblem trainingSet = SVMProblemHelper.Load(@"Dataset\wine.txt");
            SVMProblem testSet = SVMProblemHelper.Load(@"Dataset\wine.txt");

            // Normalize the datasets if you want: L2 Norm => x / ||x||
            trainingSet = trainingSet.Normalize(SVMNormType.L2);
            testSet = testSet.Normalize(SVMNormType.L2);

            // Select the parameter set
            SVMParameter parameter = new SVMParameter();
            parameter.Type = SVMType.C_SVC;
            parameter.Kernel = SVMKernelType.RBF;
            parameter.C = 1;
            parameter.Gamma = 1;

            // Do cross validation to check this parameter set is correct for the dataset or not
            double[] crossValidationResults; // output labels
            int nFold = 5;
            trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);

            // Evaluate the cross validation result
            // If it is not good enough, select the parameter set again
            double crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);

            // Train the model, If your parameter set gives good result on cross validation
            SVMModel model = trainingSet.Train(parameter);

            // Save the model
            SVM.SaveModel(model, @"Model\wine_model.txt");

            // Predict the instances in the test set
            double[] testResults = testSet.Predict(model);

            // Evaluate the test results
            int[,] confusionMatrix;
            double testAccuracy = testSet.EvaluateClassificationProblem(testResults, model.Labels, out confusionMatrix);

            // Print the resutls
            Console.WriteLine("\n\nCross validation accuracy: " + crossValidationAccuracy);
            Console.WriteLine("\nTest accuracy: " + testAccuracy);
            Console.WriteLine("\nConfusion matrix:\n");

            // Print formatted confusion matrix
            Console.Write(String.Format("{0,6}", ""));
            for (int i = 0; i < model.Labels.Length; i++)
                Console.Write(String.Format("{0,5}", "(" + model.Labels[i] + ")"));
            Console.WriteLine();
            for (int i = 0; i < confusionMatrix.GetLength(0); i++)
            {
                Console.Write(String.Format("{0,5}", "(" + model.Labels[i] + ")"));
                for (int j = 0; j < confusionMatrix.GetLength(1); j++)
                    Console.Write(String.Format("{0,5}", confusionMatrix[i,j]));
                Console.WriteLine();
            }

            Console.WriteLine("\n\nPress any key to quit...");
            Console.ReadLine();
        }
コード例 #3
0
        // for training the face,
        public void face_training(SVMProblem f_training)
        {
            SVMProblem trainingSet = SVMProblemHelper.Load(@"C:\Users\temp\Desktop\0921_towp.txt");
            SVMProblem testSet     = SVMProblemHelper.Load(@"C:\Users\temp\Desktop\0921_towpt.txt");

            trainingSet = trainingSet.Normalize(SVMNormType.L2);
            testSet     = testSet.Normalize(SVMNormType.L2);

            SVMParameter parameter = new SVMParameter();

            parameter.Type        = SVMType.NU_SVC;
            parameter.Kernel      = SVMKernelType.SIGMOID;
            parameter.C           = 1;
            parameter.Gamma       = 1;
            parameter.Probability = true;
            double[] crossValidationResults;
            int      nFold = 10;

            trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);
            double   crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);
            SVMModel model = trainingSet.Train(parameter);


            double[] testResults = testSet.Predict(model);
            int[,] confusionMatrix;
            double testAccuracy = testSet.EvaluateClassificationProblem(testResults, model.Labels, out confusionMatrix);

            Training_result.Content    = "testAccuracy:" + testAccuracy + "\nCross validation accuracy: " + crossValidationAccuracy + "\nCount " + trainingSet.Y.Count;
            Training_result.FontSize   = 14;
            Training_result.FontStyle  = FontStyles.Normal;
            Training_result.Foreground = Brushes.Red;
            Training_result.Background = Brushes.Black;
            index++;
        }
コード例 #4
0
        public void Training(IResults results, int start, int end)
        {
            for (int scanNum = start; scanNum <= end; scanNum++)
            {
                if (results.Contains(scanNum))
                {
                    List <IScore> scores = results.GetResult(scanNum);
                    foreach (IScore score in scores)
                    {
                        double         y = (score as FDRScoreProxy).IsDecoy() ? 0 : 1;
                        List <SVMNode> X = new List <SVMNode>();
                        // store score value in X
                        int idx = 0;
                        foreach (MassType type in types)
                        {
                            SVMNode node = new SVMNode();
                            node.Index = idx;
                            node.Value = score.GetScore(type);
                            X.Add(node);
                            idx++;
                        }
                        problem.Add(X.ToArray(), y);
                    }
                }
            }

            // training
            SVMParameter parameter = new SVMParameter();

            parameter.Probability = true;
            model = problem.Train(parameter);
        }
コード例 #5
0
        public void TestLibsvmClassify()
        {
            var advancedClassify = new AdvancedClassify();
            var numericalset     = advancedClassify.LoadNumerical();
            var result           = advancedClassify.ScaleData(numericalset);
            var scaledSet        = result.Item1;
            var scalef           = result.Item2;
            var prob             = new SVMProblem();

            foreach (var matchRow in scaledSet)
            {
                prob.Add(matchRow.NumData.Select((v, i) => new SVMNode(i + 1, v)).ToArray(), matchRow.Match);
            }
            var param = new SVMParameter()
            {
                Kernel = SVMKernelType.RBF
            };
            var m = prob.Train(param);

            m.SaveModel("trainModel");
            Func <double[], SVMNode[]> makeInput = ma => scalef(ma).Select((v, i) => new SVMNode(i + 1, v)).ToArray();
            var newrow = new[] { 28, -1, -1, 26, -1, 1, 2, 0.8 };//男士不想要小孩,而女士想要

            TestOutput(m.Predict(makeInput(newrow)));
            newrow = new[] { 28, -1, 1, 26, -1, 1, 2, 0.8 };//双方都想要小孩
            TestOutput(m.Predict(makeInput(newrow)));
        }
コード例 #6
0
        public void face_training(SVMProblem f_training)
        {
            SVMProblem trainingSet = SVMProblemHelper.Load(@"C:\Users\temp\Desktop\0921_towp.txt");
            SVMProblem testSet     = SVMProblemHelper.Load(@"C:\Users\temp\Desktop\0921_towpt.txt");

            // f_training.Save(@"C:\Users\temp\Desktop\1005f.txt");
            //  trainingSet.Insert(index, f_training.X[0], 2);
            trainingSet.Add(f_training.X[0], 1);
            trainingSet.Save(@"C:\Users\temp\Desktop\flag.txt");
            //   trainingSet.Save(@"C:\Users\temp\Desktop\1005.txt");
            // Console.WriteLine();
            //   SVMNode node = new SVMNode();
            //  node.Index = Convert.ToInt32(o);
            //  node.Value = Convert.ToDouble(f_training.X);
            //  nodes.Add(node);
            //  trainingSet.Add(nodes.ToArray(), 1);
            //  int number = randon.Next(0, trainingSet.X.Count);
            //  int trainingsample = Convert.ToInt32(trainingSet.X.Count * 2 / 3);
            //  int testingsample = Convert.ToInt32(trainingSet.X.Count / 3);

            trainingSet = trainingSet.Normalize(SVMNormType.L2);
            testSet     = testSet.Normalize(SVMNormType.L2);

            SVMParameter parameter = new SVMParameter();

            parameter.Type        = SVMType.NU_SVC;
            parameter.Kernel      = SVMKernelType.SIGMOID;
            parameter.C           = 1;
            parameter.Gamma       = 1;
            parameter.Probability = true;
            int        nFold = 10;
            MainWindow main  = new MainWindow();

            double[] crossValidationResults; // output labels
            trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);
            double   crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);
            SVMModel model = SVM.Train(trainingSet, parameter);

            // SVMModel model = trainingSet.Train(parameter);

            SVM.SaveModel(model, @"C:\Users\temp\Desktop\1005.txt");

            double[] testResults = testSet.Predict(model);
            //     Console.WriteLine("");
            int[,] confusionMatrix;
            double testAccuracy = testSet.EvaluateClassificationProblem(testResults, model.Labels, out confusionMatrix);

            // Console.WriteLine("\n\nCross validation accuracy: " + crossValidationAccuracy);
            //  Console.WriteLine("testAccuracy:" + testAccuracy);
            //  Console.WriteLine(Convert.ToString(trainingSet.X.Count));
            main.Training_result.Content    = "testAccuracy:" + testAccuracy + "\nCross validation accuracy: " + crossValidationAccuracy + "\nCount " + trainingSet.X.Count;
            main.Training_result.FontSize   = 14;
            main.Training_result.FontStyle  = FontStyles.Normal;
            main.Training_result.Foreground = Brushes.Red;
            main.Training_result.Background = Brushes.Black;
            // Console.WriteLine(trainingSet1.Length);
            //  trainingSet.Save(@"C:\Users\temp\Desktop\1005.txt");
            index++;
        }
コード例 #7
0
ファイル: Classifier.cs プロジェクト: Malakahh/BesterUI
 public Classifier(string Name, SVMParameter Parameter, SAMData samData)
 {
     this.Name       = Name;
     this.Parameters = new List <SVMParameter>()
     {
         Parameter
     };
     this.samData = samData;
 }
コード例 #8
0
        //Coin SVM按鈕事件
        private void button8_Click(object sender, EventArgs e)
        {
            StreamWriter Train_txt = new StreamWriter(@"train.txt");
            StreamWriter Test_txt  = new StreamWriter(@"test.txt");

            for (int i = 0; i < 2000; i++)
            {
                if (CoinTrainingSet[i, 0] == 1)
                {
                    Train_txt.WriteLine("1" + " 1:" + CoinTrainingSet[i, 1]);
                }
                else
                {
                    Train_txt.WriteLine("-1" + " 1:" + CoinTrainingSet[i, 1]);
                }
            }

            for (int i = 0; i < 20000; i++)
            {
                if (i < 10000)
                {
                    Test_txt.WriteLine("1" + " 1:" + RV_XY[i]);
                }
                else
                {
                    Test_txt.WriteLine("-1" + " 1:" + RV_XY[i]);
                }
            }

            Train_txt.Close();
            Test_txt.Close();

            SVMProblem problem     = SVMProblemHelper.Load(@"train.txt");
            SVMProblem testProblem = SVMProblemHelper.Load(@"test.txt");

            SVMParameter parameter = new SVMParameter();

            parameter.Type   = SVMType.C_SVC;
            parameter.Kernel = SVMKernelType.RBF;
            parameter.C      = 1;
            parameter.Gamma  = 0.0001;

            SVMModel model = SVM.Train(problem, parameter);

            double[] target = new double[testProblem.Length];
            for (int i = 0; i < testProblem.Length; i++)
            {
                target[i] = SVM.Predict(model, testProblem.X[i]);
            }

            double accuracy = SVMHelper.EvaluateClassificationProblem(testProblem, target);

            label6.Text = accuracy.ToString();
        }
コード例 #9
0
 public PredictionResult(double[,] ConfusionMatrix, List <double> Recalls, List <double> Precisions, List <double> Fscores, SVMParameter SVMParam, List <Feature> Features, List <int> Answers, List <int> Guesses)
 {
     confusionMatrix = ConfusionMatrix;
     recalls         = Recalls;
     precisions      = Precisions;
     fscores         = Fscores;
     svmParams       = SVMParam;
     features        = Features;
     guesses         = Guesses;
     correct         = Answers;
 }
コード例 #10
0
        /// <summary>
        /// Create a model from the training data, has to be called before predicting
        /// </summary>
        /// <param name="paramater">The SvmParameter is converted to OneClass type</param>
        /// <returns>returns true if the model was created correctly</returns>
        public bool CreateModel(SVMParameter parameter)
        {
            parameter.Type = SVMType.ONE_CLASS;

            try
            {
                _model = _trainingData.Train(parameter);
                return(true);
            }
            catch
            {
                return(false);
            }
        }
コード例 #11
0
 public static string GetOutput(this SVMParameter parameter)
 {
     return($"C:{parameter.C}\n" +
            $"CacheSize:{parameter.CacheSize}\n" +
            $"Coef0:{parameter.Coef0}\n" +
            $"Degree:{parameter.Degree}\n" +
            $"Eps:{parameter.Eps}\n" +
            $"Gamma:{parameter.Gamma}\n" +
            $"Kernel:{parameter.Kernel}\n" +
            $"Nu:{parameter.Nu}\n" +
            $"P:{parameter.P}\n" +
            $"Probability:{parameter.Probability}\n" +
            $"Shrinking:{parameter.Shrinking}\n" +
            $"Type:{parameter.Type}\n" +
            $"Wheights:{string.Join(",", parameter.Weights)}\n");
 }
コード例 #12
0
        public static SVMParameter FindBestHyperparameters(SVMProblem problem, SVMParameter parameter)
        {
            int nFold   = int.Parse(Configuration.Get("nFold"));
            int logTo   = int.Parse(Configuration.Get("logTo"));
            int logFrom = int.Parse(Configuration.Get("logFrom"));

            BlockingCollection <ParameterResult> results = new BlockingCollection <ParameterResult>();
            List <Task> tasks = new List <Task>();

            for (double cLog = logFrom; cLog <= logTo; cLog++)
            {
                double c = Math.Pow(2, cLog);
                tasks.Add(Task.Factory.StartNew(() =>
                {
                    for (double gammaLog = logFrom; gammaLog <= logTo; gammaLog++)
                    {
                        SVMParameter parameterUnderTest = parameter.Clone();
                        parameterUnderTest.C            = c;
                        parameterUnderTest.Gamma        = Math.Pow(2, gammaLog);
                        problem.CrossValidation(parameterUnderTest, nFold, out var crossValidationResults);
                        double crossValidationAccuracy = problem.EvaluateClassificationProblem(crossValidationResults);

                        results.Add(new ParameterResult()
                        {
                            Accuracy = crossValidationAccuracy, C = parameterUnderTest.C,
                            Gamma    = parameterUnderTest.Gamma
                        });
                    }
                }));
            }
            Task.WaitAll(tasks.ToArray());

            var resultList = results.ToList();

            resultList.Sort();

            ParameterResult bestParameter =
                HighestScore(resultList);

            SaveToCsv(results, "svmData.txt");
            SVMParameter returnValue = parameter.Clone();

            returnValue.C     = bestParameter.C;
            returnValue.Gamma = bestParameter.Gamma;
            return(returnValue);
        }
コード例 #13
0
        public SVMClassifier(int folds = 5)
        {
            Folds         = folds;
            DataFile      = @"Data\data.csv";
            Data          = new DataLoader(Folds, DataFile);
            TrainProblems = new SVMProblem[folds];
            TestProblems  = new SVMProblem[folds];

            for (int i = 0; i < folds; i++)
            {
                TrainProblems[i] = new SVMProblem();
                TestProblems[i]  = new SVMProblem();
            }

            for (int i = 0; i < folds; i++)
            {
                for (int j = 0; j < folds; j++)
                {
                    for (int k = 0; k < Data.InstancesPerFold; k++)
                    {
                        var nodes = new SVMNode[Data.FeatureCount];
                        var label = (double)Data.Labels[j, k];
                        for (int x = 0; x < Data.FeatureCount; x++)
                        {
                            nodes[x] = new SVMNode(x + 1, Data.Data[j, k, x]);
                        }
                        if (i != j)
                        {
                            TrainProblems[i].Add(nodes, label);
                        }
                        else
                        {
                            TestProblems[i].Add(nodes, label);
                        }
                    }
                }
            }
            Parameters        = new SVMParameter();
            Parameters.Type   = SVMType.C_SVC;
            Parameters.Kernel = SVMKernelType.RBF;
            Parameters.C      = 1000000;
            Parameters.Degree = 3;
            Parameters.Coef0  = 0;
            Parameters.Gamma  = 0.001;
        }
コード例 #14
0
        public void LibsvmFirstLook()
        {
            var prob = new SVMProblem();

            prob.Add(new[] { new SVMNode(1, 1), new SVMNode(2, 0), new SVMNode(3, 1) }, 1);
            prob.Add(new[] { new SVMNode(1, -1), new SVMNode(2, 0), new SVMNode(3, -1) }, -1);
            var param = new SVMParameter();

            param.Kernel = SVMKernelType.LINEAR;
            param.C      = 10;
            var m = prob.Train(param);

            TestOutput(m.Predict(new [] { new SVMNode(1, 1), new SVMNode(2, 1), new SVMNode(3, 1) }));
            m.SaveModel("trainModel");
            var ml = SVM.LoadModel("trainModel");

            TestOutput(ml.Predict(new[] { new SVMNode(1, 1), new SVMNode(2, 1), new SVMNode(3, 1) }));
        }
コード例 #15
0
ファイル: SVMManage.cs プロジェクト: tome-beta/FaceJudge
        //--------------------------------------------------------------------------------------
        // private
        //---------------------------------------------------------------------------------------

        /// <summary>
        /// 辞書ファイルを作成する
        /// </summary>
        /// <param name="input_learing_file"></param>
        /// <param name="gammma"></param>
        /// <param name="cost"></param>
        private void Training(string input_learing_file, float gammma, float cost)
        {
            //LibSVMのテスト
            //学習用のデータの読み込み
            SVMProblem problem = SVMProblemHelper.Load(input_learing_file);

            //SVMパラメータ
            SVMParameter parameter = new SVMParameter();

            parameter.Type   = LibSVMsharp.SVMType.C_SVC;
            parameter.Kernel = LibSVMsharp.SVMKernelType.RBF;
            parameter.C      = cost;
            parameter.Gamma  = gammma;

            //svmModelが上手く作れていない?ラベルが付けられてない!!

            libSVM_model = SVM.Train(problem, parameter);
            //辞書ファイルを出力(xmlファイル)
            string xml_name = @"model_" + input_learing_file;

            xml_name = xml_name.Replace(@".csv", @".xml");
            SVM.SaveModel(libSVM_model, xml_name);

            //判定結果をファイルに出してみる
            SVMProblem testProblem = SVMProblemHelper.Load(input_learing_file);

            double[] target         = new double[testProblem.Length];
            string   debug_file_str = @"debug_" + input_learing_file;

            using (StreamWriter w = new StreamWriter(debug_file_str))
            {
                for (int i = 0; i < testProblem.Length; i++)
                {
                    target[i] = SVM.Predict(libSVM_model, testProblem.X[i]);
                    w.Write(target[i] + "\n");
                    Console.Out.WriteLine(@"{0} : {1}", i, target[i]);
                }
            }
            //正解率を出す。
            double accuracy = SVMHelper.EvaluateClassificationProblem(testProblem, target);
        }
コード例 #16
0
        static void Main(string[] args)
        {
            SVMProblem problem = SVMProblemHelper.Load(@"Datasets\wine.txt");

            problem = SVMProblemHelper.Normalize(problem, SVMNormType.L2); // Optional

            SVMParameter parameter = new SVMParameter();

            parameter.Type   = SVMType.C_SVC;
            parameter.Kernel = SVMKernelType.RBF;
            parameter.C      = 1;
            parameter.Gamma  = 1;

            // Do 10-fold cross validation
            double[] target;
            SVM.CrossValidation(problem, parameter, 10, out target);

            double crossValidationAccuracy = SVMHelper.EvaluateClassificationProblem(problem, target);

            // Train the model
            SVMModel model = SVM.Train(problem, parameter);

            double correct = 0;

            for (int i = 0; i < problem.Length; i++)
            {
                double y = SVM.Predict(model, problem.X[i]);
                if (y == problem.Y[i])
                {
                    correct++;
                }
            }

            double trainingAccuracy = correct / (double)problem.Length;

            Console.WriteLine("\nCross validation accuracy: " + crossValidationAccuracy);
            Console.WriteLine("\nTraining accuracy: " + trainingAccuracy);

            Console.ReadLine();
        }
コード例 #17
0
 public override void Setup(IEvolutionState state, IParameter paramBase)
 {
     imageTransformer = new ImageTransformer(200, 200, @"F:\Gesty\bin", @"F:\Gesty\rescaled");
     currentImage     = new Image <Gray, Byte> [4];
     originalImage    = new Image <Gray, Byte> [4];
     for (int i = 0; i < 4; i++)
     {
         currentImage[i]  = new Image <Gray, Byte>(200, 200);
         originalImage[i] = new Image <Gray, Byte>(200, 200);
     }
     Parameter        = new SVMParameter();
     Parameter.Type   = SVMType.C_SVC;
     Parameter.Kernel = SVMKernelType.POLY;
     base.Setup(state, paramBase);
     Input = (ProblemData)state.Parameters.GetInstanceForParameterEq(
         paramBase.Push(P_DATA), null, typeof(ProblemData));
     Input.Setup(state, paramBase.Push(P_DATA));
     // imageTransformer.RescaleAndSaveImages();
     // imageList = Directory.GetFiles(@"F:\Gesty\rescaled");
     imageTransformer.RescaleAndRotate();
     imageList = Directory.GetFiles(@"F:\Gesty\rotated");
 }
コード例 #18
0
ファイル: Program.cs プロジェクト: src8655/sku
        //학습모델 생성
        public static SVMModel SVM_GenModel(String dataset)
        {
            SVMProblem trainingSet = SVMProblemHelper.Load(dataset); //학습데이터셋 열기

            trainingSet = trainingSet.Normalize(SVMNormType.L2);

            SVMParameter parameter = new SVMParameter();

            parameter.Type   = SVMType.C_SVC;
            parameter.Kernel = SVMKernelType.RBF;
            parameter.C      = 1;
            parameter.Gamma  = 1;

            double[] crossValidationResults;
            int      nFold = 5;

            trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);
            double crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);

            SVMModel model = trainingSet.Train(parameter);  // 학습된 모델 생성

            SVM.SaveModel(model, "model_" + dataset);       // 모델 저장
            return(model);
        }
コード例 #19
0
 public static string CheckParameter(this SVMProblem problem, SVMParameter parameter)
 {
     return SVM.CheckParameter(problem, parameter);
 }
コード例 #20
0
ファイル: Program.cs プロジェクト: Coft/FeatureSelection2
        private static void Main(string[] args)
        {
            var           report        = new MakeReport();
            List <string> consoleOutput = new List <string>();

            consoleOutput.Add($"{System.DateTime.Now} - Loading");

            var train = ConvertHelper.CSVToDataConstrutor(File.ReadAllLines("train.csv"), ',');
            var test  = ConvertHelper.CSVToDataConstrutor(File.ReadAllLines("test.csv"), ',');

            var fs = new DataProcessing(train, test, "SalePrice", "Id");

            fs.GetFeature("SalePrice").Transform((value) => Math.Log(1 + value));

            fs.SetInactive(new List <string> {
                "Id", "PoolArea", "LandContour", "PoolQC", "LotConfig", "Utilities", "Alley",
                "Street", "BsmtHalfBath", "LowQualFinSF", "3SsnPorch", "LandSlope", "YrSold", "Condition1", "BsmtFinType2", "RoofMatl",
                "MiscVal", "MiscFeature", "BsmtFinSF2", "Condition2", "BldgType", "ScreenPorch", "MoSold", "Functional"
            });

            fs.SetInactive(new List <string> {
                "BsmtCond", "BsmtUnfSF", "GarageCars", "PavedDrive", "SaleType", "SaleCondition",
                "BsmtExposure", "GarageCond", "Fence", "Heating", "BsmtQual",
            });

            //fs.SetInactive(new List<string> { "EnclosedPorch" });

            fs.SetTrainRowsInactiveByIds(new List <string> {
                "1299", "186", "198", "636", "1032", "1183", "1153", "1174"
            });
            //fs.SetTrainRowsInactiveByIds(new List<string> { "130", "188", "199", "268", "305", "497", "524", "530", "692", "770", "884", "1025", "1231", "1371", "1387", "1424", "1441" });
            consoleOutput.Add($"{System.DateTime.Now} - Transforming model");

            fs.GetFeature("LotFrontage").ReplaceValues("NA", "0");
            fs.GetFeature("MasVnrArea").ReplaceValues("NA", "-1");
            fs.GetFeature("GarageYrBlt").ReplaceValues("NA", "-1");

            fs.Features.Where(f => !f.IsNumeric() && !f.IsClass).All(n => { n.TransformEnumToInt(); return(true); });
            //fs.Features.Select(f => new OutlierLine { FeatureName = f.Name, Outliers = string.Join(", ", f.GetOutliers()) }).ToList().ForEach(o => consoleOutput.Add($"{o.FeatureName} => {o.Outliers}"));
            //TODO: To jakoś nie polepszyło, a nawet pogorszyło, trzeba by dodać taką funkcję zamiast wyliczać to tutaj i ująć zmienną GarageYrBlt
            //var oldestYearBuild = fs.GetFeature("YearBuilt").Values.Min(v => double.Parse(v.NewValue));
            //fs.GetFeature("YearBuilt").Transform((value) => (double.Parse(value) - oldestYearBuild).ToString());

            //fs.Features.ToList().ForEach(f => report.AddScatterPlot(f.Name, f.Values.Where(v => !v.IsTest).Select(v => new Point() { X = double.Parse(v.NewValue), Y = double.Parse(fs.GetClassForTrainId(v.RowId)) }).ToList()));
            //report.CreatePDF("Report.pdf");

            //var oldestYearRemodAdd = fs.GetFeature("YearRemodAdd").Values.Min(v => double.Parse(v.NewValue));
            //fs.GetFeature("YearRemodAdd").Transform((value) => (double.Parse(value) - oldestYearRemodAdd).ToString());

            //OUTLIERS
            //foreach (var feature in fs.Features.Where(f => f.IsActive && !f.IsClass && !f.IsId && new List<string> { "EnclosedPorch", "BsmtFinSF2", "GarageYrBlt", "OpenPorchSF", "ScreenPorch", "MasVnrArea", "LotArea", "Condition1", "MSSubClass", "MiscVal" }.IndexOf(f.Name) < 0))
            //{
            //    feature.MarkOutliers();
            //}
            //fs.PrintOutliersAmount();
            consoleOutput.Add($"{System.DateTime.Now} - Gathering data");

            var dataModel = fs.GetDataModel();

            File.WriteAllLines("output-train.csv", ConvertHelper.DataSetToCSV(dataModel.HeadersWithClass, dataModel.OutputTrain, ","));
            File.WriteAllLines("output-test.csv", ConvertHelper.DataSetToCSV(dataModel.HeadersWithoutClass, dataModel.OutputTest, ","));

            consoleOutput.Add($"{System.DateTime.Now} - Preparing SVM problem");

            //if (false)
            {
                //SVM.SetPrintStringFunction(null);
                SVMProblem testSet     = SVMLoadHelper.Load(dataModel.OutputTest, isWithClass: false);
                SVMProblem trainingSet = SVMLoadHelper.Load(dataModel.OutputTrain, isWithClass: true);

                SVMParameter parameter = new SVMParameter()
                {
                    Type   = SVMType.EPSILON_SVR,
                    Kernel = SVMKernelType.RBF,
                    //C = 10,
                    Gamma     = 0.01,
                    CacheSize = 2000,
                    Eps       = 0.1,// * Math.Pow(10, i);
                    //parameter.Probability = true;
                };

                //List<Tuple<string, double>> rmseAfterRemoveFeature = new List<Tuple<string, double>>();
                //var activeFeatures = fs.Features.Where(f => f.IsActive || !f.IsClass);
                //foreach (var feature in activeFeatures)
                //{
                //    feature.IsActive = false;
                //    var outputTrain = fs.GetTransfomedTrain();
                //    SVMProblem trainingSet = SVMLoadHelper.Load(outputTrain, isWithClass: true);

                //    consoleOutput.Add("=====================");
                //    //SVMModel model = trainingSet.Train(parameter);
                //    //double[] trainingResults = trainingSet.Predict(model);
                //    double[] crossvalidationResults;
                //    trainingSet.CrossValidation(parameter, 3, out crossvalidationResults);
                //    //consoleOutput.Add(parameter.GetOutput());
                //    var rmselog = EvaulationHelper.RMSELog(trainingSet.Y.ToArray(), crossvalidationResults);
                //    rmseAfterRemoveFeature.Add(new Tuple<string, double>(feature.Name, rmselog));
                //    consoleOutput.Add($"{System.DateTime.Now} - {feature.Name} - {rmselog}");
                //    feature.IsActive = true;
                //}

                //consoleOutput.Add($"{System.DateTime.Now} - {bestToRemove.Item1} - {bestToRemove.Item2}");
                consoleOutput.Add("=====================");
                consoleOutput.Add("Ordered");
                //consoleOutput.AddRange(rmseAfterRemoveFeature.OrderBy(t => t.Item2).Select(t => $"{t.Item1} - {t.Item2}"));
                SVMModel model        = trainingSet.Train(parameter);
                double[] trainResults = trainingSet.Predict(model);
                double[] testResults  = testSet.Predict(model);

                //double meanSquaredErr = testSet.EvaluateRegressionProblem(testResults, out correlationCoef);

                trainingSet.Y
                .Select((y, index) => Math.Abs(y - trainResults[index]))
                .Select((v, index) => new { index, v })
                .OrderByDescending(v => v.v)
                .Take(15)
                .ToList()
                .ForEach(e => consoleOutput.Add($"{e.index}:{e.v}"));

                var rmselog = EvaulationHelper.RMSELog(trainingSet.Y.ToArray(), trainResults);
                consoleOutput.Add($"{System.DateTime.Now} - {rmselog}");

                File.WriteAllLines("submission_fs.csv", ConvertHelper.ResultToCSV("Id,SalePrice", testResults.Select(v => Math.Exp(v) - 1).ToArray(), dataModel.TestIds));
                consoleOutput.Add($"{System.DateTime.Now} -I had finish");
                SVM.SaveModel(model, "model.txt");
            }

            //report.CreatePDF("Report.pdf");
            consoleOutput.ForEach(s => System.Console.WriteLine(s));
            File.WriteAllLines("consoleOutput.txt", consoleOutput);

            System.Console.ReadLine();
        }
コード例 #21
0
ファイル: SVMClass.cs プロジェクト: pinkiepanda/MouseApp
        public static bool trainProblem()
        {
            if (checkExistingDataset())
            {
                SVMProblem                problem       = SVMProblemHelper.Load(Constants.DATA_PATH);
                SVMProblem                randdata      = SVMProblemHelper.Load(Constants.RAND_PATH);
                List <string>             resultsstring = new List <string>();
                List <SVMClass.SVMResult> ResultsList   = new List <SVMClass.SVMResult>();

                double C, gammasq;
                double Cmin = 1, Cmax = 10000, Cstep = 10;
                double gmin = 0.0001, gmax = 1000, gstep = 10;
                bool   satisfied = false;
                while (!satisfied)
                {
                    for (C = Cmin; C <= Cmax; C = C * Cstep)
                    {
                        for (gammasq = gmin; gammasq <= gmax; gammasq = gammasq * gstep)
                        {
                            SVMParameter tempparameter = new SVMParameter();
                            tempparameter.Type   = SVMType.C_SVC;
                            tempparameter.Kernel = SVMKernelType.RBF;
                            tempparameter.C      = C;
                            tempparameter.Gamma  = gammasq;

                            SVMModel tempmodel = SVM.Train(problem, tempparameter);

                            SVMProblem testData = SVMProblemHelper.Load(Constants.RAND_PATH);
                            double[]   results  = testData.Predict(tempmodel);
                            int[,] confusionMatrix;
                            double testAccuracy = testData.EvaluateClassificationProblem(results, tempmodel.Labels, out confusionMatrix);

                            // Do cross validation to check this parameter set is correct for the dataset or not
                            double[] crossValidationResults; // output labels
                            int      nFold = 10;
                            problem.CrossValidation(tempparameter, nFold, out crossValidationResults);

                            // Evaluate the cross validation result
                            // If it is not good enough, select the parameter set again
                            double crossValidationAccuracy = problem.EvaluateClassificationProblem(crossValidationResults);

                            SVMClass.SVMResult compiled = new SVMClass.SVMResult();
                            compiled.C             = C;
                            compiled.gamma         = gammasq;
                            compiled.testAcc       = testAccuracy;
                            compiled.crossValidAcc = crossValidationAccuracy;
                            ResultsList.Add(compiled);
                        }
                    }

                    // Evaluate the test results
                    double maxTestAcc = ResultsList.Max(resultdata => resultdata.testAcc);
                    //int maxTestAccIndex = ResultsList.FindIndex(resultdata => resultdata.testAcc.Equals(maxTestAcc));
                    double maxValidAcc = ResultsList.Max(resultdata => resultdata.crossValidAcc);
                    //int maxValidAccIndex = ResultsList.FindIndex(resultdata => resultdata.crossValidAcc.Equals(maxValidAcc));
                    if (maxTestAcc < 95 || maxValidAcc < 95)
                    {
                        satisfied = false;
                        Cstep--;
                        gstep--;
                    }
                    else
                    {
                        satisfied = true;

                        List <SVMClass.SVMResult> topResults = ResultsList.FindAll(resultdata => resultdata.testAcc.Equals(maxTestAcc));
                        List <SVMClass.SVMResult> topValid   = ResultsList.FindAll(resultdata => resultdata.crossValidAcc.Equals(maxValidAcc));
                        while (topResults.Count > topValid.Count)
                        {
                            topResults.RemoveAt(ResultsList.FindIndex(resultsdata => resultsdata.crossValidAcc.Equals(ResultsList.Min(resultdata => resultdata.crossValidAcc))));
                        }

                        double maxC      = topResults.Max(resultdata => resultdata.C);
                        int    maxCIndex = topResults.FindIndex(resultdata => resultdata.C.Equals(maxC));
                        double bestgamma = topResults[maxCIndex].gamma;
                        // maxC or not???
                        //double bestC = topResults[topResults.Count - 2].C; //topResults[maxCIndex].C;
                        //double bestgamma = topResults[topResults.Count - 2].gamma;//topResults[maxCIndex].gamma;
                        Console.WriteLine("Best C: " + maxC + "  Best gammasq: " + bestgamma);
                        Constants.C       = maxC;
                        Constants.gammasq = bestgamma;

                        foreach (SVMClass.SVMResult resultdata in topResults)
                        {
                            Console.WriteLine(resultdata.C.ToString() + " " + resultdata.gamma.ToString());
                        }
                    }
                }

                SVMParameter parameter = new SVMParameter();
                parameter.Type   = SVMType.C_SVC;
                parameter.Kernel = SVMKernelType.RBF;
                parameter.C      = Constants.C;
                parameter.Gamma  = Constants.gammasq;

                Variables.model = SVM.Train(problem, parameter);
                //File.WriteAllText(Constants.MODEL_PATH, String.Empty);
                //SVM.SaveModel(Variables.model, Constants.MODEL_PATH);
                Console.WriteLine("Trained and saved model.\n");
                //return Variables.model;
                return(true);
            }
            else
            {
                MessageBox.Show("Invalid training data!");
                return(false);
            }
        }
コード例 #22
0
 public static void CrossValidation(this SVMProblem problem, SVMParameter parameter, int nFolds, out double[] target)
 {
     SVM.CrossValidation(problem, parameter, nFolds, out target);
 }
コード例 #23
0
        //SVM按鈕事件
        private void button5_Click(object sender, EventArgs e)
        {
            StreamWriter Train_txt = new StreamWriter(@"train.txt");
            StreamWriter Test_txt  = new StreamWriter(@"test.txt");

            int[] get;
            for (int i = 0; i < TrainingSet.Count; i++)
            {
                get = TrainingSet[i];
                if (get[0] == 1)
                {
                    Train_txt.WriteLine("1" + " 1:" + get[1] + " 2:" + get[2] + " 3:" + get[3]);
                }
                else
                {
                    Train_txt.WriteLine("-1" + " 1:" + get[1] + " 2:" + get[2] + " 3:" + get[3]);
                }
            }

            for (int i = 0; i < height; i++)
            {
                for (int j = 0; j < wide; j++)
                {
                    Test_txt.WriteLine("1" + " 1:" + Image[0, j, i] + " 2:" + Image[1, j, i] + " 3:" + Image[2, j, i]);
                }
            }

            Train_txt.Close();
            Test_txt.Close();

            SVMProblem problem     = SVMProblemHelper.Load(@"train.txt");
            SVMProblem testProblem = SVMProblemHelper.Load(@"test.txt");

            SVMParameter parameter = new SVMParameter();

            parameter.Type   = SVMType.C_SVC;
            parameter.Kernel = SVMKernelType.RBF;
            parameter.C      = 1;
            parameter.Gamma  = 0.0001;

            SVMModel model = SVM.Train(problem, parameter);

            double[] target = new double[testProblem.Length];
            for (int i = 0; i < testProblem.Length; i++)
            {
                target[i] = SVM.Predict(model, testProblem.X[i]);
            }

            //改圖
            Rectangle recta = new Rectangle(0, 0, wide, height);

            BmData = copy.LockBits(recta, ImageLockMode.ReadWrite, PixelFormat.Format24bppRgb);
            IntPtr Scan   = BmData.Scan0;
            int    Offset = BmData.Stride - wide * 3;

            unsafe
            {
                byte *P = (byte *)(void *)Scan;
                for (int y = 0; y < height; y++, P += Offset)
                {
                    for (int x = 0; x < wide; x++, P += 3)
                    {
                        if (target[y * wide + x] > 0)
                        {
                            P[2] = 255;
                            P[1] = 255;
                            P[0] = 255;
                        }
                        else
                        {
                            P[2] = 0;
                            P[1] = 0;
                            P[0] = 0;
                        }
                    }
                }
            }
            copy.UnlockBits(BmData);
            pictureBox3.Image = copy;
        }
コード例 #24
0
        public static SvmResult TrainAndTestSvm(SVMProblem trainingSet, SVMProblem testSet)
        {
            // find the ratio of malignant:benign cases:
            double mbTrainRatio = trainingSet.Y.Where(x => x == 0).ToArray().Length *1F / trainingSet.Y.Count;

            Console.WriteLine($"MB TRAIN RATIO: {mbTrainRatio}");
            double mbTestRatio = testSet.Y.Where(x => x == 0).ToArray().Length * 1F / testSet.Y.Count;

            Console.WriteLine($"MB TEST RATIO: {mbTestRatio}");

            SVMParameter parameter = new SVMParameter
            {
                Type         = SVMType.C_SVC,
                Kernel       = SVMKernelType.RBF,
                C            = double.Parse(Configuration.Get("C")),
                Gamma        = double.Parse(Configuration.Get("Gamma")),
                Probability  = true,
                WeightLabels = new[] { 0, 1 },
                Weights      = new[] { (1 - mbTrainRatio) / mbTrainRatio, 1 }
            };

            //parameter = TrainingHelper.FindBestHyperparameters(trainingSet, parameter);
            Console.WriteLine($"Found best parameters: c={parameter.C},gamma={parameter.Gamma}");

            SVMModel model = trainingSet.Train(parameter);

            SVM.SaveModel(model, Configuration.Get("ModelLocation"));

            // The following evaluation has code from:
            // https://csharp.hotexamples.com/examples/LibSVMsharp/SVMParameter/-/php-svmparameter-class-examples.html

            // Predict the instances in the test set
            double[] testResults = testSet.Predict(model);


            // Evaluate the test results
            double testAccuracy =
                testSet.EvaluateClassificationProblem(testResults, model.Labels, out var confusionMatrix);

            // Print the resutls
            Console.WriteLine("\nTest accuracy: " + testAccuracy);
            Console.WriteLine("\nConfusion matrix:\n");


            // Print formatted confusion matrix

            Console.Write($"{"",6}");
            for (int i = 0; i < model.Labels.Length; i++)
            {
                Console.Write($"{"(" + model.Labels[i] + ")",5}");
            }
            Console.WriteLine();
            for (int i = 0; i < confusionMatrix.GetLength(0); i++)
            {
                Console.Write($"{"(" + model.Labels[i] + ")",5}");
                for (int j = 0; j < confusionMatrix.GetLength(1); j++)
                {
                    Console.Write($"{confusionMatrix[i, j],5}");
                }
                Console.WriteLine();
            }

            double sensitivity = confusionMatrix[0, 0] * 1.0 /
                                 (confusionMatrix[0, 1] + confusionMatrix[0, 0]);
            double specificity = confusionMatrix[1, 1] * 1.0 /
                                 (confusionMatrix[1, 1] + confusionMatrix[1, 0]);



            double[] results = testSet.PredictProbability(model, out var probabilities);
            for (int i = 0; i < probabilities.Count; i++)
            {
                // ReSharper disable once CompareOfFloatsByEqualityOperator
                String x = results[i] != testSet.Y[i] ? "MISPREDICTION" :"";
                Console.WriteLine($"{results[i]} | {probabilities[i][0]} | {probabilities[i][1]} | {testSet.Y[i]} | {x}");
            }

            return(new SvmResult()
            {
                C = parameter.C, Gamma = parameter.Gamma, TestAccuracy = testAccuracy, Sensitivity = sensitivity,
                Specificity = specificity
            });
        }
コード例 #25
0
        /// <summary>
        /// Function which will run forever, continously classifying histogram batches into key events.
        /// </summary>
        public void run()
        {
            List <Histogram> temp = null;
            List <Histogram> hb;

            SVMProblem problem = SVMProblemHelper.Load(PipelineConstants.SVMFeaturesFile);

            SVMParameter parameter = new SVMParameter();

            parameter.Type   = SVMType.C_SVC;
            parameter.Kernel = SVMKernelType.RBF;
            parameter.C      = 13.9;
            parameter.Gamma  = .029;
            SVMModel model = SVM.Train(problem, parameter);

            string[]        eventTrigger = { "standing", "leftShoulder", "rightShoulder", "leftHip", "rightHip" };
            ScanCodeShort[] keyEvents    = { ScanCodeShort.KEY_S, ScanCodeShort.KEY_I, ScanCodeShort.KEY_O, ScanCodeShort.KEY_K, ScanCodeShort.KEY_L };

            // Continuously scan the histogram share point for new histogram data
            while (true)
            {
                hb = hsp.histBatch;
                // Compare references -- if the share point has a different reference, we're out of date
                if (temp != hb && hb != null)
                {
                    temp = hb;
                    int count = 1;
                    // Convert histogram bins into SVM feature vectors
                    List <SVMNode> nodes = new List <SVMNode>();
                    for (int i = 0; i < temp.Count; i++)
                    {
                        Histogram histObject = temp[i];
                        for (int j = 0; j < histObject.BucketCount; j++)
                        {
                            SVMNode node = new SVMNode();
                            node.Index = count++;
                            node.Value = histObject[j].Count / SkeletonFrameWindowProcessor.WindowSize;
                            nodes.Add(node);
                        }
                    }
                    // Get a prediction
                    double y = SVM.Predict(model, nodes.ToArray());
                    // Use a sliding of votes to filter out brief moments of misclassification
                    votingWindow.Add(y);
                    while (votingWindow.Count > VotingWindowSize)
                    {
                        votingWindow.RemoveAt(0);
                    }
                    // Neat one-liner taken from http://stackoverflow.com/a/8260598
                    // Group the votes, sorty by group size, select the largest, select the associated vote value
                    double vote = votingWindow.GroupBy(v => v).OrderByDescending(g => g.Count()).First().Key;

                    // Change the console title to make it clear what the classifier is seeing
                    System.Console.Title = eventTrigger[(int)vote];

                    // Only trigger a keypress when the voted value changes
                    // This has the result of holding a pose being equivalent to quickly dropping it
                    // i.e., the gesture is invariant to duration
                    if (vote != 0 && vote != previousVote)
                    {
                        SendInputWithAPI(keyEvents[(int)vote]);
                    }
                    previousVote = vote;
                }
            }
        }
コード例 #26
0
 public static void CrossValidation(this SVMProblem problem, SVMParameter parameter, int nFolds, out double[] target)
 {
     SVM.CrossValidation(problem, parameter, nFolds, out target);
 }
コード例 #27
0
ファイル: Program.cs プロジェクト: src8655/sku
        static void Main(string[] args)
        {
            // Load the datasets: In this example I use the same datasets for training and testing which is not suggested
            SVMProblem trainingSet = SVMProblemHelper.Load(@"Dataset\wine.txt");
            SVMProblem testSet     = SVMProblemHelper.Load(@"Dataset\wine2.txt");

            // Normalize the datasets if you want: L2 Norm => x / ||x||
            trainingSet = trainingSet.Normalize(SVMNormType.L2);
            testSet     = testSet.Normalize(SVMNormType.L2);

            // Select the parameter set
            SVMParameter parameter = new SVMParameter();

            parameter.Type   = SVMType.C_SVC;
            parameter.Kernel = SVMKernelType.RBF;
            parameter.C      = 1;
            parameter.Gamma  = 1;

            // Do cross validation to check this parameter set is correct for the dataset or not
            double[] crossValidationResults; // output labels
            int      nFold = 5;

            trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);

            // Evaluate the cross validation result
            // If it is not good enough, select the parameter set again
            double crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);

            // Train the model, If your parameter set gives good result on cross validation
            SVMModel model = trainingSet.Train(parameter);

            // Save the model
            SVM.SaveModel(model, @"Model\wine_model.txt");

            // Predict the instances in the test set
            double[] testResults = testSet.Predict(model);

            Console.WriteLine("aaa:" + testResults[0] + "\n");

            /*
             * // Evaluate the test results
             * int[,] confusionMatrix;
             * double testAccuracy = testSet.EvaluateClassificationProblem(testResults, model.Labels, out confusionMatrix);
             *
             *
             *
             *
             * // Print the resutls
             * Console.WriteLine("\n\nCross validation accuracy: " + crossValidationAccuracy);
             * Console.WriteLine("\nTest accuracy: " + testAccuracy);
             * Console.WriteLine("\nConfusion matrix:\n");
             *
             * // Print formatted confusion matrix
             * Console.Write(String.Format("{0,6}", ""));
             * for (int i = 0; i < model.Labels.Length; i++)
             *  Console.Write(String.Format("{0,5}", "(" + model.Labels[i] + ")"));
             * Console.WriteLine();
             * for (int i = 0; i < confusionMatrix.GetLength(0); i++)
             * {
             *  Console.Write(String.Format("{0,5}", "(" + model.Labels[i] + ")"));
             *  for (int j = 0; j < confusionMatrix.GetLength(1); j++)
             *      Console.Write(String.Format("{0,5}", confusionMatrix[i,j]));
             *  Console.WriteLine();
             * }
             *
             * Console.WriteLine("\n\nPress any key to quit...");
             * Console.ReadLine();*/
        }
コード例 #28
0
 public MetaClassifier(string Name, SVMParameter Parameter, SAMData SamData, List <StdClassifier> Classifiers) : base(Name, Parameter, SamData)
 {
     standardClassifiers = Classifiers;
 }
コード例 #29
0
ファイル: Program.cs プロジェクト: Tanson/SVM-Neuro-Matching
        static void wordsSvm()
        {//http://www.svm-tutorial.com/2014/10/svm-tutorial-classify-text-csharp/
            var index   = 11;
            var rooms11 = File.ReadAllText(string.Format("../../room{0}.txt", index));
            var rooms22 = File.ReadAllText(string.Format("../../room{0}.txt", ++index));

            var rooms1 = rooms11.Split(';');
            var rooms2 = rooms22.Split(';');

            var termFreqWeight = new PanGuTermFreqWeight();

            termFreqWeight.computerFW(rooms1, rooms2);

            var problem = new SVMProblem(); //SVM.SetPrintStringFunction(mySVMPrintFunction);

            var lableFeaturesBuilder = new LableFeaturesBuilder();
            var segment = new PanGuSegment();

            foreach (var room in rooms2)
            {
                var words = termFreqWeight.TermsByDoc[1, room];
                words = words.Where(t => t.Length > 1 && termFreqWeight.TermFWByGlobal.ContainsKey(t) && termFreqWeight.TermFWByType.ContainsKey(0, t)).ToArray();
                //lableFeaturesBuilder.AddToProblem(problem, room, words.Select(t => new KeyValuePair<string, double>(t,termFreqWeight.TermFWByDoc[1, room, t].Freq*GetW(t))));
                lableFeaturesBuilder.AddToProblem(problem, room, words.Select(t => new KeyValuePair <string, double>(t, Math.Max(termFreqWeight.TermFWByType[0, t].Freq, termFreqWeight.TermFWByType[1, t].Freq))));
            }

            SVMParameter parameter = new SVMParameter();

            parameter.Type        = SVMType.C_SVC;
            parameter.Kernel      = SVMKernelType.LINEAR;
            parameter.C           = 1;
            parameter.Probability = true;
            //parameter.
            //parameter.WeightLabels = lableFeaturesBuilder.CreateWeightFeatures("大", "双", "套", "大床", "双床").ToArray();
            //parameter.Weights = new double[] { 1.90, 1.90, 1.90, 1.99, 1.99 };

            //parameter.WeightLabels = lableFeaturesBuilder.CreateWeightLables<int>("行政湖景双床房").ToArray();
            //parameter.Weights = new double[] { 1.90, 1.90, 1.90, 1.99, 1.99 };

            problem = problem.Normalize(SVMNormType.L1);
            problem.CheckParameter(parameter);

            var model2 = LibSVMsharp.SVM.Train(problem, parameter);

            model2.SaveModel("roomMatching.model");

            var model = LibSVMsharp.SVM.LoadModel("roomMatching.model");

            foreach (var room in rooms1)
            {
                var words = termFreqWeight.TermsByDoc[0, room];
                words = words.Where(t => t.Length > 1 && termFreqWeight.TermFWByGlobal.ContainsKey(t) && termFreqWeight.TermFWByType.ContainsKey(1, t)).ToArray();

                //var nodes = lableFeaturesBuilder.CreateNodes(words.Select(t => new KeyValuePair<string, double>(t, termFreqWeight.TermFWByDoc[0, room, t].Freq * GetW(t))));
                var nodes = lableFeaturesBuilder.CreateNodes(words.Select(t => new KeyValuePair <string, double>(t, Math.Max(termFreqWeight.TermFWByType[0, t].Freq, termFreqWeight.TermFWByType[1, t].Freq))));
                if (nodes.Length > 0)
                {
                    nodes = nodes.Normalize(SVMNormType.L1);

                    double predictedY = 0;
                    predictedY = LibSVMsharp.SVM.Predict(model, nodes);

                    double[] values = null; double probabilityValue = 0;
                    probabilityValue = LibSVMsharp.SVM.PredictValues(model, nodes, out values);

                    double[] est = null; double probability = 0;
                    probability = LibSVMsharp.SVM.PredictProbability(model, nodes, out est);

                    Console.WriteLine("{0,22}\t{1},{2},{3},{4},{5}", room, lableFeaturesBuilder.GetLable(predictedY), predictedY, probabilityValue, probability, string.Empty);
                }
            }

            Console.WriteLine(new string('=', 80));
        }
コード例 #30
0
        public void FindBestHyperparametersGivenAnswersTest()
        {
            SVMProblem problem = new SVMProblem();

            problem.Add(new SVMNode[]
            {
                new SVMNode(1, 1),
                new SVMNode(2, 10),
                new SVMNode(3, 72),
                new SVMNode(4, 55),
                new SVMNode(5, 1),
            }, 1);

            problem.Add(new SVMNode[]
            {
                new SVMNode(1, 1),
                new SVMNode(2, 10),
                new SVMNode(3, 2),
                new SVMNode(4, 95),
                new SVMNode(5, 16),
            }, 1);

            problem.Add(new SVMNode[]
            {
                new SVMNode(1, 1),
                new SVMNode(2, 12),
                new SVMNode(3, 13),
                new SVMNode(4, 14),
                new SVMNode(5, 15),
            }, 1);

            problem.Add(new SVMNode[]
            {
                new SVMNode(1, 0),
                new SVMNode(2, 13),
                new SVMNode(3, 37),
                new SVMNode(4, 4),
                new SVMNode(5, 18),
            }, 0);

            problem.Add(new SVMNode[]
            {
                new SVMNode(1, 0),
                new SVMNode(2, 100),
                new SVMNode(3, 720),
                new SVMNode(4, 550),
                new SVMNode(5, 10),
            }, 0);


            //Setup test SVMParameter
            SVMParameter parameter = new SVMParameter
            {
                Type        = SVMType.C_SVC,
                Kernel      = SVMKernelType.RBF,
                C           = 2,
                Gamma       = 1,
                Probability = true,
            };


            var actualParameter = TrainingHelper.FindBestHyperparameters(problem, parameter);

            Assert.AreEqual(0.015625d, actualParameter.C);
            Assert.AreEqual(0.125d, actualParameter.Gamma);
        }
コード例 #31
0
 public static string CheckParameter(this SVMProblem problem, SVMParameter parameter)
 {
     return(SVM.CheckParameter(problem, parameter));
 }
コード例 #32
0
 public static SVMModel Train(this SVMProblem problem, SVMParameter parameter)
 {
     return SVM.Train(problem, parameter);
 }
コード例 #33
0
 public static SVMModel Train(this SVMProblem problem, SVMParameter parameter)
 {
     return(SVM.Train(problem, parameter));
 }
コード例 #34
0
        private void testSVM()
        {
            if (!holdCommandListener)
            {
                holdCommandListener = true;
            }
            string        parentpath    = System.AppDomain.CurrentDomain.BaseDirectory;
            string        DATA_PATH     = parentpath + "Datasets\\dataset - Copy (2).txt";
            string        MODEL_PATH    = parentpath + "Model\\testmodel.txt";
            string        NEWDATA_PATH  = parentpath + "Datasets\\testdata.txt";
            string        RESULTS_PATH  = parentpath + "Datasets\\results.txt";
            List <string> resultsstring = new List <string>();

            SVMProblem   testSet       = SVMProblemHelper.Load(NEWDATA_PATH);
            SVMParameter testparameter = new SVMParameter();

            testparameter.Type   = SVMType.C_SVC;
            testparameter.Kernel = SVMKernelType.RBF;
            testparameter.C      = 0.1;   //Constants.C;
            testparameter.Gamma  = 0.001; // Constants.gammasq;

            List <SVMClass.SVMResult> ResultsList = new List <SVMClass.SVMResult>();

            SVMProblem problem = SVMProblemHelper.Load(DATA_PATH);
            double     C       = 0.001;
            double     gammasq = 0.001;

            for (C = 1; C <= 1000; C = C * 10)
            {
                for (gammasq = 0.001; gammasq <= 1000; gammasq = gammasq * 10)
                {
                    SVMParameter parameter = new SVMParameter();
                    parameter.Type   = SVMType.C_SVC;
                    parameter.Kernel = SVMKernelType.RBF;
                    parameter.C      = C;
                    parameter.Gamma  = gammasq;

                    SVMModel model = SVM.Train(problem, parameter);
                    //File.WriteAllText(MODEL_PATH, String.Empty);
                    //SVM.SaveModel(model, MODEL_PATH);
                    //Console.WriteLine("Trained and saved model.\n");

                    //model = SVM.LoadModel(MODEL_PATH);

                    SVMProblem newData = SVMProblemHelper.Load(NEWDATA_PATH);
                    //Console.Write("Predicted Result:\n");
                    double[] results = newData.Predict(model);
                    //Console.Write(results[0]);
                    int[,] confusionMatrix;
                    double testAccuracy = newData.EvaluateClassificationProblem(results, model.Labels, out confusionMatrix);

                    // Do cross validation to check this parameter set is correct for the dataset or not
                    double[] crossValidationResults; // output labels
                    int      nFold = 10;
                    problem.CrossValidation(parameter, nFold, out crossValidationResults);

                    // Evaluate the cross validation result
                    // If it is not good enough, select the parameter set again
                    double crossValidationAccuracy = problem.EvaluateClassificationProblem(crossValidationResults);
                    //Console.WriteLine("\n\nCross validation accuracy: " + crossValidationAccuracy);

                    string temp = "";

                    string resultstring = "Predict accuracy: " + testAccuracy + " C: " + C + " gamma: " + gammasq + " Cross validation accuracy: " + crossValidationAccuracy;
                    resultsstring.Add(resultstring);

                    if (parameter.C == testparameter.C && parameter.Gamma == testparameter.Gamma)
                    {
                        resultsstring.Add("This one is same as separate test.");
                    }

                    foreach (double res in results)
                    {
                        temp += res.ToString() + " ";
                    }
                    resultsstring.Add(temp);

                    SVMClass.SVMResult compiled = new SVMClass.SVMResult();
                    compiled.C             = C;
                    compiled.gamma         = gammasq;
                    compiled.testAcc       = testAccuracy;
                    compiled.crossValidAcc = crossValidationAccuracy;
                    ResultsList.Add(compiled);
                }
            }
            File.WriteAllLines(RESULTS_PATH, resultsstring);


            SVMModel testmodel = SVM.Train(problem, testparameter);

            // Predict the instances in the test set
            double[] testResults = testSet.Predict(testmodel);
            foreach (double result in testResults)
            {
                Console.WriteLine(result);
            }

            // Evaluate the test results

            double maxTestAcc      = ResultsList.Max(resultdata => resultdata.testAcc);
            int    maxTestAccIndex = ResultsList.FindIndex(resultdata => resultdata.testAcc.Equals(maxTestAcc));
            //double maxValidAcc = ResultsList.Max(resultdata => resultdata.crossValidAcc);
            //int maxValidAccIndex = ResultsList.FindIndex(resultdata => resultdata.crossValidAcc.Equals(maxValidAcc));
            List <SVMClass.SVMResult> topResults = ResultsList.FindAll(resultdata => resultdata.testAcc.Equals(maxTestAcc));
            double maxC      = topResults.Max(resultdata => resultdata.C);
            int    maxCIndex = topResults.FindIndex(resultdata => resultdata.C.Equals(maxC));

            double bestC     = topResults[topResults.Count - 2].C;     //topResults[maxCIndex].C;
            double bestgamma = topResults[topResults.Count - 2].gamma; //topResults[maxCIndex].gamma;

            Console.WriteLine("Best C: " + bestC + "  Best gammasq: " + bestgamma);

            foreach (SVMClass.SVMResult resultdata in topResults)
            {
                Console.WriteLine(resultdata.C.ToString() + " " + resultdata.gamma.ToString());
            }
            //int[,] confusionMatrix;
            //double testAccuracy = testSet.EvaluateClassificationProblem(testResults, testmodel.Labels, out confusionMatrix);
            //Console.WriteLine("\n\nTest accuracy: " + testAccuracy);
        }