Ejemplo n.º 1
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="problem"></param>
        /// <param name="parameter"></param>
        /// <param name="nFolds"></param>
        /// <param name="target"></param>
        public static void CrossValidation(SVMProblem problem, SVMParameter parameter, int nFolds, out double[] target)
        {
            if (problem == null)
            {
                throw new ArgumentNullException("problem");
            }

            if (parameter == null)
            {
                throw new ArgumentNullException("parameter");
            }

            if (nFolds < 2)
            {
                throw new ArgumentOutOfRangeException("nFolds");
            }

            IntPtr ptr_target    = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(double)) * problem.Length);
            IntPtr ptr_problem   = SVMProblem.Allocate(problem);
            IntPtr ptr_parameter = SVMParameter.Allocate(parameter);

            libsvm.svm_cross_validation(ptr_problem, ptr_parameter, nFolds, ptr_target);

            target = new double[problem.Length];
            Marshal.Copy(ptr_target, target, 0, target.Length);

            SVMProblem.Free(ptr_problem);
            SVMParameter.Free(ptr_parameter);
            Marshal.FreeHGlobal(ptr_target);
            ptr_target = IntPtr.Zero;
        }
Ejemplo n.º 2
0
        /*public Classification(string training, string test)
        {
            problem = SVMProblemHelper.Load(training);
            testProblem = SVMProblemHelper.Load(test);
        }*/

        public void ClasificationHand()
        {

            System.Diagnostics.Debug.WriteLine("comienza a clasificar");
            parameter = new SVMParameter();

            parameter.Type = SVMType.C_SVC;
            parameter.Kernel = SVMKernelType.RBF;
            parameter.C = 1;
            parameter.Gamma = 1;

            System.Diagnostics.Debug.WriteLine("cargo los valores");
            // System.Windows.MessageBox.Show("Algo real");
            SVMModel model = SVM.Train(problem, parameter);
            //System.Windows.MessageBox.Show("Oscar");
            System.Diagnostics.Debug.WriteLine("creo el modelo");
            double[] target;
            target = new double[testProblem.Length];
            int a = 3;
            for (int i = 0; i < testProblem.Length; i++)
                target[i] = SVM.Predict(model, testProblem.X[i]);
            System.Diagnostics.Debug.WriteLine("aqui ya clasifico");
            double accuracy = SVMHelper.EvaluateClassificationProblem(testProblem, target);
            System.Diagnostics.Debug.WriteLine("termino");
        }
Ejemplo n.º 3
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="problem"></param>
        /// <param name="parameter"></param>
        /// <returns></returns>
        public static string CheckParameter(SVMProblem problem, SVMParameter parameter)
        {
            if (problem == null)
            {
                throw new ArgumentNullException("problem");
            }

            if (parameter == null)
            {
                throw new ArgumentNullException("parameter");
            }

            IntPtr ptr_problem   = SVMProblem.Allocate(problem);
            IntPtr ptr_parameter = SVMParameter.Allocate(parameter);

            IntPtr ptr_output = libsvm.svm_check_parameter(ptr_problem, ptr_parameter);

            SVMProblem.Free(ptr_problem);
            SVMParameter.Free(ptr_parameter);

            string output = Marshal.PtrToStringAnsi(ptr_output);

            Marshal.FreeHGlobal(ptr_output);
            ptr_output = IntPtr.Zero;

            return(output);
        }
Ejemplo n.º 4
0
        static void Main(string[] args)
        {
            // Load the datasets: In this example I use the same datasets for training and testing which is not suggested
            SVMProblem trainingSet = SVMProblemHelper.Load(@"C:\Users\lozzr\Documents\Visual Studio 2015\Projects\MM\MM\Dataset\rt-1.txt");
            SVMProblem testSet = SVMProblemHelper.Load(@"C:\Users\lozzr\Documents\Visual Studio 2015\Projects\MM\MM\Dataset\rt-1.txt");

            // Normalize the datasets if you want: L2 Norm => x / ||x||
            trainingSet = trainingSet.Normalize(SVMNormType.L2);
            testSet = testSet.Normalize(SVMNormType.L2);

            // Select the parameter set
            SVMParameter parameter = new SVMParameter();
            parameter.Type = SVMType.ONE_CLASS;
            parameter.Kernel = SVMKernelType.RBF;
            parameter.C = 8.0;
            parameter.Gamma = 0.0078125;

            // Do cross validation to check this parameter set is correct for the dataset or not
            double[] crossValidationResults; // output labels
            int nFold = 5;
            trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);

            // Evaluate the cross validation result
            // If it is not good enough, select the parameter set again
            double crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);

            // Train the model, If your parameter set gives good result on cross validation
            SVMModel model = trainingSet.Train(parameter);

            // Save the model
            SVM.SaveModel(model, @"C:\Users\lozzr\Documents\Visual Studio 2015\Projects\MM\MM\Model\rt-1_model.txt");

            // Predict the instances in the test set
            double[] testResults = testSet.Predict(model);

            // Evaluate the test results
            // int[,] confusionMatrix;
            //double testAccuracy = testSet.EvaluateClassificationProblem(testResults, model.Labels, out confusionMatrix);

            // Print the resutls
            Console.WriteLine("\n\nCross validation accuracy: " + crossValidationAccuracy);
            // Console.WriteLine("\nTest accuracy: " + testAccuracy);
            //Console.WriteLine("\nConfusion matrix:\n");

            // Print formatted confusion matrix
            //Console.Write(String.Format("{0,6}", ""));
            /*for (int i = 0; i < model.Labels.Length; i++)
                Console.Write(String.Format("{0,5}", "(" + model.Labels[i] + ")"));
            Console.WriteLine();
               /* for (int i = 0; i < confusionMatrix.GetLength(0); i++)
            {
                Console.Write(String.Format("{0,5}", "(" + model.Labels[i] + ")"));
                for (int j = 0; j < confusionMatrix.GetLength(1); j++)
                    Console.Write(String.Format("{0,5}", confusionMatrix[i, j]));
                Console.WriteLine();
            }*/

            Console.WriteLine("\n\nPress any key to quit...");
            Console.ReadLine();
        }
Ejemplo n.º 5
0
        public static SVMParameter Convert(svm_parameter x)
        {
            SVMParameter y = new SVMParameter();

            y.Type        = (SVMType)x.svm_type;
            y.Kernel      = (SVMKernelType)x.kernel_type;
            y.Degree      = x.degree;
            y.Gamma       = x.gamma;
            y.Coef0       = x.coef0;
            y.CacheSize   = x.cache_size;
            y.Eps         = x.eps;
            y.C           = x.C;
            y.Nu          = x.nu;
            y.P           = x.p;
            y.Shrinking   = x.shrinking != 0;
            y.Probability = x.probability != 0;

            int length = x.nr_weight;

            y.WeightLabels = new int[length];
            if (length > 0)
            {
                Marshal.Copy(x.weight_label, y.WeightLabels, 0, length);
            }

            y.Weights = new double[length];
            if (length > 0)
            {
                Marshal.Copy(x.weight, y.Weights, 0, length);
            }

            return(y);
        }
Ejemplo n.º 6
0
        public static SVMParameter Convert(IntPtr ptr)
        {
            if (ptr == IntPtr.Zero)
            {
                return(null);
            }

            svm_parameter x = (svm_parameter)Marshal.PtrToStructure(ptr, typeof(svm_parameter));

            return(SVMParameter.Convert(x));
        }
Ejemplo n.º 7
0
 public PredictionResult(double[,] ConfusionMatrix, List<double> Recalls, List<double> Precisions, List<double> Fscores, SVMParameter SVMParam, List<Feature> Features, List<int> Answers, List<int> Guesses)
 {
     confusionMatrix = ConfusionMatrix;
     recalls = Recalls;
     precisions = Precisions;
     fscores = Fscores;
     svmParams = SVMParam;
     features = Features;
     guesses = Guesses;
     correct = Answers;
 }
Ejemplo n.º 8
0
        /// <summary>
        /// This function constructs and returns an SVM model according to the given training data and parameters.
        /// </summary>
        /// <param name="problem">Training data.</param>
        /// <param name="parameter">Parameter set.</param>
        /// <returns>SVM model according to the given training data and parameters.</returns>
        public static SVMModel Train(SVMProblem problem, SVMParameter parameter)
        {
            IntPtr ptr_problem = SVMProblem.Allocate(problem);
            IntPtr ptr_parameter = SVMParameter.Allocate(parameter);

            IntPtr ptr_model = libsvm.svm_train(ptr_problem, ptr_parameter);
            SVMModel model = SVMModel.Convert(ptr_model);

            SVMProblem.Free(ptr_problem);
            SVMParameter.Free(ptr_parameter);
            libsvm.svm_free_model_content(ptr_model);

            return model;
        }
Ejemplo n.º 9
0
        /// <summary>
        /// Create a model from the training data, has to be called before predicting
        /// </summary>
        /// <param name="paramater">The SvmParameter is converted to OneClass type</param>
        /// <returns>returns true if the model was created correctly</returns>
        public bool CreateModel(SVMParameter parameter)
        {
            parameter.Type = SVMType.ONE_CLASS;

            try
            {
                _model = _trainingData.Train(parameter);
                return true;
            }
            catch
            {
                return false;
            }
        }
Ejemplo n.º 10
0
        /// <summary>
        /// This function constructs and returns an SVM model according to the given training data and parameters.
        /// </summary>
        /// <param name="problem">Training data.</param>
        /// <param name="parameter">Parameter set.</param>
        /// <returns>SVM model according to the given training data and parameters.</returns>
        public static SVMModel Train(SVMProblem problem, SVMParameter parameter)
        {
            IntPtr ptr_problem   = SVMProblem.Allocate(problem);
            IntPtr ptr_parameter = SVMParameter.Allocate(parameter);

            IntPtr   ptr_model = libsvm.svm_train(ptr_problem, ptr_parameter);
            SVMModel model     = SVMModel.Convert(ptr_model);

            SVMProblem.Free(ptr_problem);
            SVMParameter.Free(ptr_parameter);
            libsvm.svm_free_model_content(ptr_model);

            return(model);
        }
Ejemplo n.º 11
0
        public static void Free(svm_model x)
        {
            Marshal.FreeHGlobal(x.rho);
            x.rho = IntPtr.Zero;

            Marshal.FreeHGlobal(x.probA);
            x.probA = IntPtr.Zero;

            Marshal.FreeHGlobal(x.probB);
            x.probB = IntPtr.Zero;

            Marshal.FreeHGlobal(x.sv_indices);
            x.sv_indices = IntPtr.Zero;

            Marshal.FreeHGlobal(x.label);
            x.label = IntPtr.Zero;

            Marshal.FreeHGlobal(x.nSV);
            x.nSV = IntPtr.Zero;

            SVMParameter.Free(x.param);

            IntPtr i_ptr_sv = x.SV;

            for (int i = 0; i < x.l; i++)
            {
                IntPtr ptr_nodes = (IntPtr)Marshal.PtrToStructure(i_ptr_sv, typeof(IntPtr));
                SVMNode.Free(ptr_nodes);

                i_ptr_sv = IntPtr.Add(i_ptr_sv, Marshal.SizeOf(typeof(IntPtr)));
            }

            Marshal.FreeHGlobal(x.SV);
            x.SV = IntPtr.Zero;

            IntPtr i_ptr_svcoef = x.sv_coef;

            for (int i = 0; i < x.nr_class - 1; i++)
            {
                IntPtr temp = (IntPtr)Marshal.PtrToStructure(i_ptr_svcoef, typeof(IntPtr));
                Marshal.FreeHGlobal(temp);
                temp = IntPtr.Zero;

                i_ptr_svcoef = IntPtr.Add(i_ptr_svcoef, Marshal.SizeOf(typeof(IntPtr)));
            }

            Marshal.FreeHGlobal(x.sv_coef);
            x.sv_coef = IntPtr.Zero;
        }
Ejemplo n.º 12
0
        public static void Free(IntPtr ptr)
        {
            if (ptr == IntPtr.Zero)
            {
                return;
            }

            svm_parameter x = (svm_parameter)Marshal.PtrToStructure(ptr, typeof(svm_parameter));

            SVMParameter.Free(x);

            Marshal.DestroyStructure(ptr, typeof(svm_parameter));
            Marshal.FreeHGlobal(ptr);
            ptr = IntPtr.Zero;
        }
Ejemplo n.º 13
0
 public SVMModel()
 {
     Parameter    = new SVMParameter();
     ClassCount   = 0;
     TotalSVCount = 0;
     SV           = null;
     SVCoefs      = null;
     Rho          = null;
     ProbabilityA = null;
     ProbabilityB = null;
     SVIndices    = null;
     Labels       = null;
     SVCounts     = null;
     Creation     = CreationType.LOAD_MODEL;
 }
Ejemplo n.º 14
0
 public SVMModel()
 {
     Parameter = new SVMParameter();
     ClassCount = 0;
     TotalSVCount = 0;
     SV = null;
     SVCoefs = null;
     Rho = null;
     ProbabilityA = null;
     ProbabilityB = null;
     SVIndices = null;
     Labels = null;
     SVCounts = null;
     Creation = CreationType.LOAD_MODEL;
 }
Ejemplo n.º 15
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="problem"></param>
        /// <param name="parameter"></param>
        /// <param name="nFolds"></param>
        /// <param name="target"></param>
        public static void CrossValidation(SVMProblem problem, SVMParameter parameter, int nFolds, out double[] target)
        {
            IntPtr ptr_target    = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(double)) * problem.Length);
            IntPtr ptr_problem   = SVMProblem.Allocate(problem);
            IntPtr ptr_parameter = SVMParameter.Allocate(parameter);

            libsvm.svm_cross_validation(ptr_problem, ptr_parameter, nFolds, ptr_target);

            target = new double[problem.Length];
            Marshal.Copy(ptr_target, target, 0, target.Length);

            SVMProblem.Free(ptr_problem);
            SVMParameter.Free(ptr_parameter);
            Marshal.FreeHGlobal(ptr_target);
            ptr_target = IntPtr.Zero;
        }
Ejemplo n.º 16
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="problem"></param>
        /// <param name="parameter"></param>
        /// <returns></returns>
        public static string CheckParameter(SVMProblem problem, SVMParameter parameter)
        {
            IntPtr ptr_problem = SVMProblem.Allocate(problem);
            IntPtr ptr_parameter = SVMParameter.Allocate(parameter);

            IntPtr ptr_output = libsvm.svm_check_parameter(ptr_problem, ptr_parameter);

            SVMProblem.Free(ptr_problem);
            SVMParameter.Free(ptr_parameter);

            string output = Marshal.PtrToStringAnsi(ptr_output);
            Marshal.FreeHGlobal(ptr_output);
            ptr_output = IntPtr.Zero;

            return output;
        }
Ejemplo n.º 17
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="problem"></param>
        /// <param name="parameter"></param>
        /// <returns></returns>
        public static string CheckParameter(SVMProblem problem, SVMParameter parameter)
        {
            IntPtr ptr_problem   = SVMProblem.Allocate(problem);
            IntPtr ptr_parameter = SVMParameter.Allocate(parameter);

            IntPtr ptr_output = libsvm.svm_check_parameter(ptr_problem, ptr_parameter);

            SVMProblem.Free(ptr_problem);
            SVMParameter.Free(ptr_parameter);

            string output = Marshal.PtrToStringAnsi(ptr_output);

            Marshal.FreeHGlobal(ptr_output);
            ptr_output = IntPtr.Zero;

            return(output);
        }
Ejemplo n.º 18
0
        public static IntPtr Allocate(SVMParameter x)
        {
            if (x == null)
            {
                return(IntPtr.Zero);
            }

            svm_parameter y = new svm_parameter();

            y.svm_type    = (int)x.Type;
            y.kernel_type = (int)x.Kernel;
            y.degree      = x.Degree;
            y.gamma       = x.Gamma;
            y.coef0       = x.Coef0;
            y.cache_size  = x.CacheSize;
            y.eps         = x.Eps;
            y.C           = x.C;
            y.nu          = x.Nu;
            y.p           = x.P;
            y.shrinking   = x.Shrinking ? 1 : 0;
            y.probability = x.Probability ? 1 : 0;
            y.nr_weight   = x.WeightLabels.Length;

            y.weight_label = IntPtr.Zero;
            if (y.nr_weight > 0)
            {
                y.weight_label = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(int)) * x.WeightLabels.Length);
                Marshal.Copy(x.WeightLabels, 0, y.weight_label, x.WeightLabels.Length);
            }

            y.weight = IntPtr.Zero;
            if (y.nr_weight > 0)
            {
                y.weight = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(double)) * x.Weights.Length);
                Marshal.Copy(x.Weights, 0, y.weight, x.Weights.Length);
            }

            int    size = Marshal.SizeOf(y);
            IntPtr ptr  = Marshal.AllocHGlobal(size);

            Marshal.StructureToPtr(y, ptr, true);

            return(ptr);
        }
Ejemplo n.º 19
0
        public SVMParameter Clone()
        {
            SVMParameter y = new SVMParameter();

            y.Type         = Type;
            y.Kernel       = Kernel;
            y.Degree       = Degree;
            y.Gamma        = Gamma;
            y.Coef0        = Coef0;
            y.C            = C;
            y.Nu           = Nu;
            y.P            = P;
            y.CacheSize    = CacheSize;
            y.Eps          = Eps;
            y.Shrinking    = Shrinking;
            y.Probability  = Probability;
            y.WeightLabels = WeightLabels.Select(a => a).ToArray();
            y.Weights      = Weights.Select(a => a).ToArray();
            return(y);
        }
Ejemplo n.º 20
0
        /*public Classification(string training, string test)
        {
            problem = SVMProblemHelper.Load(training);
            testProblem = SVMProblemHelper.Load(test);
        }*/

        public void ClasificationHand()
        {

            parameter = new SVMParameter();

            parameter.Type = SVMType.C_SVC;
            parameter.Kernel = SVMKernelType.RBF;
            parameter.C = 1;
            parameter.Gamma = 1;


           // System.Windows.MessageBox.Show("Algo real");
            SVMModel model = SVM.Train(problem, parameter);
            //System.Windows.MessageBox.Show("Oscar");

            double[] target;
            target = new double[testProblem.Length];
            int a = 3;
            for (int i = 0; i < testProblem.Length; i++)
                target[i] = SVM.Predict(model, testProblem.X[i]);

            double accuracy = SVMHelper.EvaluateClassificationProblem(testProblem, target);
        }
Ejemplo n.º 21
0
        /// <summary>
        /// This function constructs and returns an SVM model according to the given training data and parameters.
        /// </summary>
        /// <param name="problem">Training data.</param>
        /// <param name="parameter">Parameter set.</param>
        /// <returns>SVM model according to the given training data and parameters.</returns>
        public static SVMModel Train(SVMProblem problem, SVMParameter parameter)
        {
            if (problem == null)
            {
                throw new ArgumentNullException("problem");
            }

            if (parameter == null)
            {
                throw new ArgumentNullException("parameter");
            }

            IntPtr ptr_problem   = SVMProblem.Allocate(problem);
            IntPtr ptr_parameter = SVMParameter.Allocate(parameter);

            IntPtr   ptr_model = libsvm.svm_train(ptr_problem, ptr_parameter);
            SVMModel model     = SVMModel.Convert(ptr_model);

            SVMProblem.Free(ptr_problem);
            SVMParameter.Free(ptr_parameter);
            libsvm.svm_free_model_content(ptr_model);

            return(model);
        }
Ejemplo n.º 22
0
        /// <summary>
        /// This function constructs and returns an SVM model according to the given training data and parameters.
        /// </summary>
        /// <param name="problem">Training data.</param>
        /// <param name="parameter">Parameter set.</param>
        /// <returns>SVM model according to the given training data and parameters.</returns>
        public static SVMModel Train(SVMProblem problem, SVMParameter parameter)
        {
            if (problem == null)
            {
                throw new ArgumentNullException("problem");
            }

            if (parameter == null)
            {
                throw new ArgumentNullException("parameter");
            }

            IntPtr ptr_problem = SVMProblem.Allocate(problem);
            IntPtr ptr_parameter = SVMParameter.Allocate(parameter);

            IntPtr ptr_model = libsvm.svm_train(ptr_problem, ptr_parameter);
            SVMModel model = SVMModel.Convert(ptr_model);

            SVMProblem.Free(ptr_problem);
            SVMParameter.Free(ptr_parameter);
            libsvm.svm_free_model_content(ptr_model);

            return model;
        }
Ejemplo n.º 23
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="problem"></param>
        /// <param name="parameter"></param>
        /// <param name="nFolds"></param>
        /// <param name="target"></param>
        public static void CrossValidation(SVMProblem problem, SVMParameter parameter, int nFolds, out double[] target)
        {
            IntPtr ptr_target = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(double)) * problem.Length);
            IntPtr ptr_problem = SVMProblem.Allocate(problem);
            IntPtr ptr_parameter = SVMParameter.Allocate(parameter);

            libsvm.svm_cross_validation(ptr_problem, ptr_parameter, nFolds, ptr_target);

            target = new double[problem.Length];
            Marshal.Copy(ptr_target, target, 0, target.Length);

            SVMProblem.Free(ptr_problem);
            SVMParameter.Free(ptr_parameter);
            Marshal.FreeHGlobal(ptr_target);
            ptr_target = IntPtr.Zero;
        }
Ejemplo n.º 24
0
 public MetaClassifier(string Name, SVMParameter Parameter, SAMData SamData, List<StdClassifier> Classifiers)
     : base(Name, Parameter, SamData)
 {
     standardClassifiers = Classifiers;
 }
Ejemplo n.º 25
0
 public Classifier(string Name, SVMParameter Parameter, SAMData samData)
 {
     this.Name = Name;
     this.Parameters = new List<SVMParameter>() { Parameter };
     this.samData = samData;
 }
Ejemplo n.º 26
0
       // private int numberOfTasks = 10;
        private async Task<NoveltyResult> DoNoveltyDetection(SENSOR sensor, int start, int end)
        {
            string sensorPath = path + "/" + sensor.ToString();
            var data = featureVectors[sensor].Select(x => x.Features).ToList();
            ConcurrentStack<SVMParameter> svmParams = new ConcurrentStack<SVMParameter>();
            //Debug purpose
            /*for (int i = 0; i < 10; i++)
            {
                SVMParameter s = new SVMParameter();
                s.C = 100;
                s.Gamma = 0.01;
                s.Kernel = SVMKernelType.RBF;
                s.Type = SVMType.ONE_CLASS;
                s.Nu = 0.01;
                svmParams.Push(s);
            }*/
            svmParams.PushRange(GenerateOneClassSVMParameters().ToArray());
            SetProgressMax(svmParams.Count + 1);
            NoveltyResult bestCoveredResult = null;
            Mutex bestResultMu = new Mutex(false, sensor.ToString());
            int count = 1;
            List<Task> tasks = new List<Task>();
            for (int i = 0; i < threadMAX.Value; i++)
            {
                Task task = Task.Run(() => PredictionThread(ref count, sensor, start, end, ref svmParams, data, svmParams.Count, ref bestCoveredResult, ref bestResultMu));
                tasks.Add(task);
            }
            await Task.WhenAll(tasks);
            List<int> integers = Enumerable.Range(1, 100).ToList();
            List <double> nus = integers.Select(x => ((double)x) / 100).ToList();
            //Cover
            foreach (double d in nus)
            {
                SVMParameter para = new SVMParameter();
                para.Gamma = bestCoveredResult.parameter.Gamma;
                para.Nu = d;
                para.Kernel = bestCoveredResult.parameter.Kernel;
                svmParams.Push(para);
            }

            count = 1;
            SetProgressMax(svmParams.Count + 1);
            bestResultMu.Dispose();
            bestResultMu = new Mutex(false, sensor.ToString());
            List<Task> nuCov = new List<Task>();
            ConcurrentBag<string> covNu = new ConcurrentBag<string>();
            for (int i = 0; i < threadMAX.Value; i++)
            {
                Task task = Task.Run(() => PredictionNuThread(ref count, sensor, start, end, ref svmParams, data, svmParams.Count, ref bestCoveredResult, ref bestResultMu, ref covNu));
                tasks.Add(task);
            }
            await Task.WhenAll(tasks);
            File.WriteAllLines(path + "/Params.txt", new List<string> { bestCoveredResult.parameter.Gamma.ToString(), bestCoveredResult.parameter.Kernel.ToString() });
            File.WriteAllLines(path + $"/{sensor.ToString()}CovNu.txt", covNu.ToList());
            
            bestResultMu.Dispose();
            return bestCoveredResult;
        }
Ejemplo n.º 27
0
 public SVMConfiguration(string Name, SVMParameter Parameters, List<Feature> Features)
     : this(Parameters, Features)
 {
     this.Name = Name;
 }
Ejemplo n.º 28
0
        public static IntPtr Allocate(SVMParameter x)
        {
            if (x == null)
                return IntPtr.Zero;

            svm_parameter y = new svm_parameter();
            y.svm_type = (int)x.Type;
            y.kernel_type = (int)x.Kernel;
            y.degree = x.Degree;
            y.gamma = x.Gamma;
            y.coef0 = x.Coef0;
            y.cache_size = x.CacheSize;
            y.eps = x.Eps;
            y.C = x.C;
            y.nu = x.Nu;
            y.p = x.P;
            y.shrinking = x.Shrinking ? 1 : 0;
            y.probability = x.Probability ? 1 : 0;
            y.nr_weight = x.WeightLabels.Length;

            y.weight_label = IntPtr.Zero;
            if (y.nr_weight > 0)
            {
                y.weight_label = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(int)) * x.WeightLabels.Length);
                Marshal.Copy(x.WeightLabels, 0, y.weight_label, x.WeightLabels.Length);
            }

            y.weight = IntPtr.Zero;
            if (y.nr_weight > 0)
            {
                y.weight = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(double)) * x.Weights.Length);
                Marshal.Copy(x.Weights, 0, y.weight, x.Weights.Length);
            }

            int size = Marshal.SizeOf(y);
            IntPtr ptr = Marshal.AllocHGlobal(size);
            Marshal.StructureToPtr(y, ptr, true);

            return ptr;
        }
Ejemplo n.º 29
0
        private void button1_Click(object sender, EventArgs e)
        {
            noveltyChart.ChartAreas.First().BackColor = Color.DarkGray;
            FolderBrowserDialog fbd = new FolderBrowserDialog();
            if (fbd.ShowDialog() == DialogResult.
                OK)
            {
                string path = fbd.SelectedPath;
                string testSubjectId = path.Split('\\')[path.Split('\\').Length - 2];

                fdNovelty.LoadFromFile(new string[] { path + @"\EEG.dat", path + @"\GSR.dat", path + @"\HR.dat", path + @"\KINECT.dat" }, DateTime.Now, false);
                events = File.ReadAllLines(path + @"\SecondTest.dat");

                string[] tmpSevents = File.ReadAllLines(path + @"\sam.dat");
                foreach (string ev in tmpSevents)
                {
                    sEvents.Add(new samEvents(int.Parse(ev.Split(':')[0]), int.Parse(ev.Split(':')[1]), int.Parse(ev.Split(':')[2])));
                }

            }
            if (events.Length == 0)
            {
                //if events is not assigned with second test data
                return;
            }
            int start = (useRestInTraining.Checked) ? 180000 : 0;
            int trainingEnd = int.Parse(events[2].Split('#')[0]);
            int windowSize = 5000;
            int stepSize = 100;
            int delay = 2000;

            //Split into training & prediction set
            List<List<double>> featureVectors = new List<List<double>>();
            List<int> timeStamps = new List<int>();

            for (int i = 0; i < fdNovelty.gsrData.Last().timestamp - fdNovelty.gsrData.First().timestamp - windowSize; i += stepSize)
            {
                List<double> featureVector = new List<double>();
                List<double> data = fdNovelty.gsrData.SkipWhile(x => (x.timestamp - fdNovelty.gsrData.First().timestamp) < i).TakeWhile(x => i + windowSize > (x.timestamp - fdNovelty.gsrData.First().timestamp)).Select(x => (double)x.resistance).ToList();
                if (data.Count == 0) continue;
                featureVector.Add(data.Average());
                featureVector.Add(data.Max());
                featureVector.Add(data.Min());
                double avg = data.Average();
                double sd = Math.Sqrt(data.Average(x => Math.Pow(x - avg, 2)));
                featureVector.Add(sd);
                featureVectors.Add(featureVector);
                timeStamps.Add(i);
            }

            featureVectors = featureVectors.NormalizeFeatureList<double>(Normalize.OneMinusOne).ToList();
            var dataSet = featureVectors.Zip(timeStamps, (first, second) => { return Tuple.Create(first, second); });

            var trainingSet = dataSet.SkipWhile(x => x.Item2 < start).TakeWhile(x => x.Item2 < trainingEnd);
            var predictionSet = dataSet.SkipWhile(x => x.Item2 < trainingEnd);

            int count = predictionSet.Count();
            int firstPredcition = predictionSet.First().Item2;
            OneClassClassifier occ = new OneClassClassifier(trainingSet.Select(x => x.Item1).ToList());
            SVMParameter svmP = new SVMParameter();
            svmP.Kernel = SVMKernelType.RBF;
            svmP.C = 100;
            svmP.Gamma = 0.01;
            svmP.Nu = 0.01;
            svmP.Type = SVMType.ONE_CLASS;
            occ.CreateModel(svmP);
            /* List<int> indexes = occ.PredictOutliers(predictionSet.Select(x => x.Item1).ToList());

             foreach (int index in indexes)
             {
                 timestampsOutliers.Add(predictionSet.ElementAt(index).Item2 - firstPredcition + 180000 + 4000);
             }
             */

            updateChart();

            int k = 0;
        }
Ejemplo n.º 30
0
 private List<SVMParameter> GenerateSVMParameters()
 {
     List<double> cTypes = new List<double>() { };
     List<double> gammaTypes = new List<double>() { };
     List<SVMKernelType> kernels = new List<SVMKernelType> { SVMKernelType.LINEAR, SVMKernelType.POLY, SVMKernelType.RBF, SVMKernelType.SIGMOID };
     for (int t = -5; t <= 15; t++)
     {
         cTypes.Add(Math.Pow(2, t));
     }
     for (int t = -15; t <= 3; t++)
     {
         gammaTypes.Add(Math.Pow(2, t));
     }
     //Generate SVMParams
     List<SVMParameter> svmParams = new List<SVMParameter>();
     foreach (SVMKernelType kernel in kernels)
     {
         foreach (double c in cTypes)
         {
             for (int i = 0; (kernel != SVMKernelType.LINEAR) ? i < gammaTypes.Count : i < 1; i++)
             {
                 SVMParameter t = new SVMParameter();
                 t.Kernel = kernel;
                 t.C = c;
                 t.Gamma = gammaTypes[i];
                 svmParams.Add(t);
             }
         }
     }
     return svmParams;
 }
Ejemplo n.º 31
0
        //学習ファイルの作成
        public void TrainingExec(List<FaceFeature.FeatureValue> FeatureList)
        {
            //特徴量をMatに移し替える 2個で一つ
            //2個のfloat * LISTの大きさの配列
            double[] feature_array = new double[2 * FeatureList.Count];

            //特徴量をSVMで扱えるように配列に置き換える
            SetFeatureListToArray(FeatureList,ref feature_array);
            CvPoint2D32f[] feature_points = new CvPoint2D32f[feature_array.Length/2];
            int id = 0;
            for (int i = 0; i < feature_array.Length / 2; i++)
            {
                feature_points[id].X = (float)feature_array[i * 2];
                feature_points[id].Y = (float)feature_array[i * 2 + 1];
                id++;
            }
            CvMat dataMat = new CvMat(feature_points.Length, 2, MatrixType.F32C1, feature_points, true);

            //これがラベル番号
            int[] id_array = new int[FeatureList.Count];
            for(int i = 0; i < id_array.Length;i++)
            {
                id_array[i] = FeatureList[i].ID;
            }
            CvMat resMat = new CvMat(id_array.Length, 1, MatrixType.S32C1, id_array, true);


            // dataとresponsesの様子を描画
            CvPoint2D32f[] points = new CvPoint2D32f[id_array.Length];
            int idx = 0;
            for (int i = 0; i < id_array.Length; i++)
            {
                points[idx].X = (float)feature_array[i * 2];
                points[idx].Y = (float)feature_array[i * 2 + 1];
                idx++;
            }

            //学習データを図にする
            Debug_DrawInputFeature(points, id_array);

            //デバッグ用 学習させる特徴量を出力する
            OutPut_FeatureAndID(points, id_array);

            //LibSVMのテスト
            //学習用のデータの読み込み
            SVMProblem problem = SVMProblemHelper.Load(@"wine.txt");
            SVMProblem testProblem = SVMProblemHelper.Load(@"wine.txt");

            SVMParameter parameter = new SVMParameter();
            parameter.Type = LibSVMsharp.SVMType.C_SVC;
            parameter.Kernel = LibSVMsharp.SVMKernelType.RBF;
            parameter.C = 1;
            parameter.Gamma = 1;

            SVMModel model = SVM.Train(problem, parameter);
            double[] target = new double[testProblem.Length];


             for (int i = 0; i < testProblem.Length; i++)
            {
                target[i] = SVM.Predict(model, testProblem.X[i]);
            }
            double accuracy = SVMHelper.EvaluateClassificationProblem(testProblem, target);

            
            //SVMの用意
            CvTermCriteria criteria = new CvTermCriteria(1000, 0.000001);
                        CvSVMParams param = new CvSVMParams(
                            OpenCvSharp.CPlusPlus.SVMType.CSvc,
                            OpenCvSharp.CPlusPlus.SVMKernelType.Rbf,
                            10.0,  // degree
                            100.0,  // gamma        調整
                            1.0, // coeff0
                            10.0, // c               調整
                            0.5, // nu
                            0.1, // p
                            null,
                            criteria);

                        //学習実行
                        svm.Train(dataMat, resMat, null, null, param);

            Debug_DispPredict();

        }
Ejemplo n.º 32
0
        private void btn_CalculateResults_Click(object sender, EventArgs e)
        {
            FolderBrowserDialog fbd = new FolderBrowserDialog() { Description = "Select folder to load test subjects from" };

            if (fbd.ShowDialog() == DialogResult.OK)
            {
                FolderBrowserDialog sfd = new FolderBrowserDialog() { Description = "Select folder to load excel sheets from" };
                if (sfd.ShowDialog() == DialogResult.OK)
                {
                    Excel.Application exc = new Excel.Application() { Visible = false };

                    List<string> singles = new List<string>()
                    {
                        "EEG",
                        "FACE",
                        "GSR",
                        "HR"
                    };

                    List<string> fusion = new List<string>()
                    {
                        "Stacking",
                        "Voting"
                    };

                    Dictionary<SAMDataPoint.FeelingModel, int> cellOffsets = new Dictionary<SAMDataPoint.FeelingModel, int>()
                    {
                        [SAMDataPoint.FeelingModel.Arousal2High] = 11,
                        [SAMDataPoint.FeelingModel.Arousal2Low] = 24,
                        [SAMDataPoint.FeelingModel.Arousal3] = 40,
                        [SAMDataPoint.FeelingModel.Valence2High] = 53,
                        [SAMDataPoint.FeelingModel.Valence2Low] = 66,
                        [SAMDataPoint.FeelingModel.Valence3] = 82
                    };

                    //test subject[SAMModel[machineType]]
                    //items: C, Gamma, Kernel
                    var configs = new Dictionary<string, Dictionary<SAMDataPoint.FeelingModel, Dictionary<string, Tuple<double, double, int>>>>();

                    foreach (var configBook in singles.Concat(fusion))
                    {
                        Log.LogMessage($"Reading {configBook}...");
                        Excel.Workbook dataBook = exc.Workbooks.Open(sfd.SelectedPath + $"/{configBook}.xlsx", ExcelHandler.missingValue,
                                                                    false,
                                                                    ExcelHandler.missingValue,
                                                                    ExcelHandler.missingValue,
                                                                    ExcelHandler.missingValue,
                                                                    true,
                                                                    ExcelHandler.missingValue,
                                                                    ExcelHandler.missingValue,
                                                                    true,
                                                                    ExcelHandler.missingValue,
                                                                    ExcelHandler.missingValue,
                                                                    ExcelHandler.missingValue);

                        foreach (Excel.Worksheet sheet in dataBook.Sheets)
                        {
                            if (sheet.Name == "Overview" || sheet.Name == "First" || sheet.Name == "Last") continue;

                            if (!configs.ContainsKey(sheet.Name))
                            {
                                configs.Add(sheet.Name, new Dictionary<SAMDataPoint.FeelingModel, Dictionary<string, Tuple<double, double, int>>>());
                            }

                            if (configs[sheet.Name].Count == 0)
                            {
                                foreach (SAMDataPoint.FeelingModel feel in Enum.GetValues(typeof(SAMDataPoint.FeelingModel)))
                                {
                                    configs[sheet.Name].Add(feel, new Dictionary<string, Tuple<double, double, int>>());

                                    singles.ForEach(x => configs[sheet.Name][feel].Add(x, null));
                                    fusion.ForEach(x => configs[sheet.Name][feel].Add(x, null));

                                    if (feel.ToString().Contains("Valence"))
                                    {
                                        configs[sheet.Name][feel].Remove("GSR");
                                    }
                                }
                            }

                            foreach (SAMDataPoint.FeelingModel feel in Enum.GetValues(typeof(SAMDataPoint.FeelingModel)))
                            {
                                if (configBook == "GSR" && feel.ToString().Contains("Valence")) continue;

                                Excel.Range c = (Excel.Range)sheet.Cells[cellOffsets[feel], 3];
                                Excel.Range gamma = (Excel.Range)sheet.Cells[cellOffsets[feel] + 1, 3];
                                Excel.Range kernel = (Excel.Range)sheet.Cells[cellOffsets[feel] + 2, 3];

                                if (c.Value != null && gamma.Value != null && kernel.Value != null)
                                {
                                    configs[sheet.Name][feel][configBook] = Tuple.Create(c.Value, gamma.Value, (int)kernel.Value);
                                }

                            }

                        }

                        dataBook.Close();
                    }

                    List<string> skippedList = new List<string>();
                    foreach (var subject in configs)
                    {
                        foreach (var feel in subject.Value)
                        {
                            foreach (var machine in feel.Value)
                            {
                                if (machine.Value == null)
                                {
                                    skippedList.Add(subject.Key);
                                }
                            }
                        }
                    }

                    foreach (var skipped in skippedList)
                    {
                        configs.Remove(skipped);
                        Log.LogMessage($"Skipped {skipped}");
                    }

                    var resultsList = new Dictionary<string, Dictionary<string, List<int>>>();
                    foreach (string feel in Enum.GetValues(typeof(SAMDataPoint.FeelingModel)))
                    {
                        resultsList.Add(feel, new Dictionary<string, List<int>>());
                        resultsList.Add("SAM", new Dictionary<string, List<int>>());
                        foreach (var item in singles.Concat(fusion))
                        {
                            resultsList[feel].Add(item, new List<int>());
                        }
                    }

                    foreach (var subject in configs)
                    {
                        if (LoadData(fbd.SelectedPath + $"/{subject.Key}", _fd))
                        {
                            foreach (var feel in subject.Value)
                            {
                                resultsList["SAM"][feel.Key.ToString()].AddRange(samData.dataPoints.Select(x => x.ToAVCoordinate(feel.Key)));

                                foreach (var machine in feel.Value)
                                {
                                    Log.LogMessage($"Calculating {subject.Key} / {feel} / {machine}");

                                    var param = new SVMParameter()
                                    {
                                        C = machine.Value.Item1,
                                        Gamma = machine.Value.Item2,
                                        Kernel = (SVMKernelType)machine.Value.Item3
                                    };

                                    if (singles.Contains(machine.Key))
                                    {
                                        var classifier = new StdClassifier(new SVMConfiguration(param, FeatureCreator.GetFeatures(machine.Key, feel.Key)), samData);

                                        var results = classifier.OldCrossValidate(feel.Key, 1);
                                        foreach (var res in results[0].guesses)
                                        {
                                            resultsList[feel.Key.ToString()][machine.Key].Add(res);
                                        }
                                    }
                                    else
                                    {
                                        //var classifier = new MetaClassifier(machine.Key, param, samData)
                                    }
                                }
                            }
                        }
                        else
                        {
                            Log.LogMessage($"Skipped {subject.Key} due to bad data");
                            skippedList.Add(subject.Key);
                        }
                    }

                    Excel.Workbook resultBook = exc.Workbooks.Add(ExcelHandler.missingValue);

                    Excel.Worksheet metaSheet = resultBook.Sheets.Add();
                    metaSheet.Name = "Meta";

                    metaSheet.Cells[1, 1] = "Skipped " + skippedList.Count;
                    for (int i = 0; i < skippedList.Count; i++)
                    {
                        metaSheet.Cells[i + 2, 1] = skippedList[i];
                    }

                    foreach (var feel in Enum.GetNames(typeof(SAMDataPoint.FeelingModel)))
                    {
                        Excel.Worksheet feelSheet = resultBook.Sheets.Add();
                        feelSheet.Name = feel;
                    }

                    exc.Quit();
                }
            }
        }
Ejemplo n.º 33
0
        private List<SVMParameter> GenerateOneClassSVMParameters()
        {
            List<double> gammaTypes = new List<double>() { };
            List<SVMKernelType> kernels = new List<SVMKernelType> { SVMKernelType.RBF, SVMKernelType.SIGMOID };

            for (int t = -14; t <= 2; t += 1)
            {
                gammaTypes.Add(Math.Pow(2, t));
            }
            //Generate SVMParams
            List<SVMParameter> svmParams = new List<SVMParameter>();
            foreach (SVMKernelType kernel in kernels)
            {
                for (int i = 0; i < gammaTypes.Count; i++)
                {
                    SVMParameter t = new SVMParameter();
                    t.Kernel = kernel;
                    t.Nu = 0.05;
                    t.Gamma = gammaTypes[i];
                    svmParams.Add(t);
                }
            }
            return svmParams;
        }
Ejemplo n.º 34
0
        public static IntPtr Allocate(SVMModel x)
        {
            if (x == null || x.ClassCount < 1 || x.Labels == null || x.Labels.Length < 1 || x.Parameter == null ||
                x.Rho == null || x.Rho.Length < 1 || x.SVCoefs == null || x.SVCoefs.Count < 1 || x.TotalSVCount < 1 ||
                x.SVCounts == null || x.SVCounts.Length < 1)
            {
                return(IntPtr.Zero);
            }

            svm_model y = new svm_model();

            y.nr_class = x.ClassCount;
            y.l        = x.TotalSVCount;
            y.free_sv  = (int)x.Creation;

            // Allocate model.parameter
            IntPtr ptr_param = SVMParameter.Allocate(x.Parameter);

            y.param = (svm_parameter)Marshal.PtrToStructure(ptr_param, typeof(svm_parameter));

            // Allocate model.rho
            y.rho = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(double)) * x.Rho.Length);
            Marshal.Copy(x.Rho, 0, y.rho, x.Rho.Length);

            // Allocate model.probA
            y.probA = IntPtr.Zero;
            if (x.ProbabilityA != null)
            {
                y.probA = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(double)) * x.ProbabilityA.Length);
                Marshal.Copy(x.ProbabilityA, 0, y.probA, x.ProbabilityA.Length);
            }

            // Allocate model.probB
            y.probB = IntPtr.Zero;
            if (x.ProbabilityB != null)
            {
                y.probB = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(double)) * x.ProbabilityB.Length);
                Marshal.Copy(x.ProbabilityB, 0, y.probB, x.ProbabilityB.Length);
            }

            // Allocate model.label
            y.label = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(int)) * x.Labels.Length);
            Marshal.Copy(x.Labels, 0, y.label, x.Labels.Length);

            // Allocate model.nSV
            y.nSV = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(int)) * x.SVCounts.Length);
            Marshal.Copy(x.SVCounts, 0, y.nSV, x.SVCounts.Length);

            // Allocate model.sv_coef
            y.sv_coef = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(IntPtr)) * x.SVCoefs.Count);
            IntPtr i_ptr_svcoef = y.sv_coef;

            for (int i = 0; i < x.SVCoefs.Count; i++)
            {
                IntPtr temp = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(double)) * x.SVCoefs[i].Length);
                Marshal.Copy(x.SVCoefs[i], 0, temp, x.SVCoefs[i].Length);
                Marshal.StructureToPtr(temp, i_ptr_svcoef, true);
                i_ptr_svcoef = IntPtr.Add(i_ptr_svcoef, Marshal.SizeOf(typeof(IntPtr)));
            }

            // Allocate model.sv_indices
            y.sv_indices = IntPtr.Zero;
            if (x.SVIndices != null)
            {
                y.sv_indices = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(int)) * x.SVIndices.Length);
                Marshal.Copy(x.SVIndices, 0, y.sv_indices, x.SVIndices.Length);
            }

            // Allocate model.SV
            y.SV = IntPtr.Zero;
            if (x.SV != null)
            {
                y.SV = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(IntPtr)) * x.SV.Count);
                IntPtr i_ptr_sv = y.SV;
                for (int i = 0; i < x.SV.Count; i++)
                {
                    // Prepare each node array
                    // 1) All nodes containing zero value is removed
                    // 2) A node which index is -1 is added to the end
                    List <SVMNode> temp = x.SV[i].Where(a => a.Value != 0).ToList();
                    temp.Add(new SVMNode(-1, 0));
                    SVMNode[] nodes = temp.ToArray();

                    // Allocate node array
                    IntPtr ptr_nodes = SVMNode.Allocate(nodes);
                    Marshal.StructureToPtr(ptr_nodes, i_ptr_sv, true);

                    i_ptr_sv = IntPtr.Add(i_ptr_sv, Marshal.SizeOf(typeof(IntPtr)));
                }
            }

            // Allocate the model
            int    size = Marshal.SizeOf(y);
            IntPtr ptr  = Marshal.AllocHGlobal(size);

            Marshal.StructureToPtr(y, ptr, true);

            return(ptr);
        }
Ejemplo n.º 35
0
        public static void Libsvm(string user)
        {
            Thread.Sleep(5000);
            int Y = 4;
            string word = "east";
            while (Y > 0)
            {
                if (Y == 3)
                {

                    word = "Ground";

                }
                if (Y == 2)
                {
                    word = "sea";

                }

                if (Y == 1)
                {

                    word = "station";

                }

                if (File.Exists(@"..\..\..\keyloggerattack\Dataset\SVM_" + user + "_" + word + ".txt"))
                {

                    // Load the datasets: In this example I use the same datasets for training and testing which is not suggested
                    SVMProblem trainingSet = SVMProblemHelper.Load(@"..\..\..\keyloggerattack\Dataset\SVM_" + user + "_" + word + ".txt");
                    //SVMProblem testSet = SVMProblemHelper.Load(@"..\..\..\keyloggerattack\Dataset\SVM_" + user + "_east.txt");
                    //SVMProblem trainingSet = SVMProblemHelper.Load(@"..\..\..\keyloggerattack\Dataset\SVM_temp_east.txt");
                    SVMProblem testSet = SVMProblemHelper.Load(@"..\..\..\keyloggerattack\Dataset\SVM_" + user + "_" + word + ".txt");
                    //SVMProblem trainingSet = SVMProblemHelper.Load(@"..\..\..\keyloggerattack\Dataset\1012964-Ground-train.txt");
                    //SVMProblem testSet = SVMProblemHelper.Load(@"..\..\..\keyloggerattack\Dataset\1012964-Ground-train.txt");

                    //Normalize the datasets if you want: L2 Norm => x / ||x||
                    trainingSet = trainingSet.Normalize(SVMNormType.L2);
                    testSet = testSet.Normalize(SVMNormType.L2);

                    // Select the parameter set
                    SVMParameter parameter = new SVMParameter();
                    parameter.Type = SVMType.ONE_CLASS;
                    parameter.Kernel = SVMKernelType.RBF;
                    parameter.C = 8;
                    parameter.Gamma = 0.0078125;
                    parameter.Nu = 0.3;

                    // Do cross validation to check this parameter set is correct for the dataset or not
                    //double[] crossValidationResults; // output labels
                    //int nFold = 5;
                    //trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);

                    // Evaluate the cross validation result
                    // If it is not good enough, select the parameter set again
                    //double crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);

                    // Train the model, If your parameter set gives good result on cross validation
                    SVMModel model = SVM.Train(trainingSet, parameter);

                    // Save the model
                    SVM.SaveModel(model, @"..\..\..\keyloggerattack\Dataset\SVM_" + user + "_" + word + "_model.txt");
                    //SVM.SaveModel(model, @"..\..\..\keyloggerattack\Dataset\1012964-Ground-train_model.txt");

                    // Predict the instances in the test set
                    double[] testResults = testSet.Predict(model);
                    //for (int i = 0; i < testSet.Length; i++)
                       // MessageBox.Show("results"+testResults[i]);
                    double[] target = new double[testSet.Length];
                    // Evaluate the test results
                    //int[,] confusionMatrix;
                    //double testAccuracy = testSet.EvaluateClassificationProblem(testResults, model.Labels, out confusionMatrix);
                    for (int i = 0; i < testSet.Length; i++)
                    {
                        target[i] = SVM.Predict(model, testSet.X[i]);

                    }

                    double accuracy = SVMHelper.EvaluateClassificationProblem(testSet, testResults);

                    // Print the resutls
                    //Console.WriteLine("\n\nThe accuracy: " + accuracy);
                    MessageBox.Show("(認知因子)精確率為:" + accuracy);
                    //MessageBox.Show("test精確率為:" + testAccuracy);
                    //SVM.Free(trainingSet);
                }
                if (File.Exists(@"..\..\..\keyloggerattack\Dataset\SVM_Inertia_" + user + "_" + word + ".txt"))
                {

                    // Load the datasets: In this example I use the same datasets for training and testing which is not suggested
                    SVMProblem trainingSet = SVMProblemHelper.Load(@"..\..\..\keyloggerattack\Dataset\SVM_Inertia_" + user + "_" + word + ".txt");
                    //SVMProblem testSet = SVMProblemHelper.Load(@"..\..\..\keyloggerattack\Dataset\SVM_" + user + "_east.txt");
                    //SVMProblem trainingSet = SVMProblemHelper.Load(@"..\..\..\keyloggerattack\Dataset\SVM_temp_east.txt");
                    SVMProblem testSet = SVMProblemHelper.Load(@"..\..\..\keyloggerattack\Dataset\SVM_Inertia_" + user + "_" + word + ".txt");
                    //SVMProblem trainingSet = SVMProblemHelper.Load(@"..\..\..\keyloggerattack\Dataset\1012964-Ground-train.txt");
                    //SVMProblem testSet = SVMProblemHelper.Load(@"..\..\..\keyloggerattack\Dataset\1012964-Ground-train.txt");

                    //Normalize the datasets if you want: L2 Norm => x / ||x||
                    trainingSet = trainingSet.Normalize(SVMNormType.L2);
                    testSet = testSet.Normalize(SVMNormType.L2);

                    // Select the parameter set
                    SVMParameter parameter = new SVMParameter();
                    parameter.Type = SVMType.ONE_CLASS;
                    parameter.Kernel = SVMKernelType.RBF;
                    parameter.C = 8;
                    parameter.Gamma = 0.0078125;
                    parameter.Nu = 0.3;

                    // Do cross validation to check this parameter set is correct for the dataset or not
                    //double[] crossValidationResults; // output labels
                    //int nFold = 5;
                    //trainingSet.CrossValidation(parameter, nFold, out crossValidationResults);

                    // Evaluate the cross validation result
                    // If it is not good enough, select the parameter set again
                    //double crossValidationAccuracy = trainingSet.EvaluateClassificationProblem(crossValidationResults);

                    // Train the model, If your parameter set gives good result on cross validation
                    SVMModel model = SVM.Train(trainingSet, parameter);

                    // Save the model
                    SVM.SaveModel(model, @"..\..\..\keyloggerattack\Dataset\SVM_Inertia_" + user + "_" + word + "_model.txt");
                    //SVM.SaveModel(model, @"..\..\..\keyloggerattack\Dataset\1012964-Ground-train_model.txt");

                    // Predict the instances in the test set
                    double[] testResults = testSet.Predict(model);
                    //for (int i = 0; i < testSet.Length; i++)
                    // MessageBox.Show("results"+testResults[i]);
                    double[] target = new double[testSet.Length];
                    // Evaluate the test results
                    //int[,] confusionMatrix;
                    //double testAccuracy = testSet.EvaluateClassificationProblem(testResults, model.Labels, out confusionMatrix);
                    for (int i = 0; i < testSet.Length; i++)
                    {
                        target[i] = SVM.Predict(model, testSet.X[i]);

                    }

                    double accuracy = SVMHelper.EvaluateClassificationProblem(testSet, testResults);
                    // Print the resutls
                    //Console.WriteLine("\n\nThe accuracy: " + accuracy);
                    //MessageBox.Show("(慣性因子)精確率為:" + accuracy);
                    //MessageBox.Show("test精確率為:" + testAccuracy);
                    //SVM.Free(trainingSet);
                }
                Y--;

            }
            // Console.WriteLine("\n\nCross validation accuracy: " + crossValidationAccuracy);
            // Console.WriteLine("\nTest accuracy: " + testAccuracy);
            //Console.WriteLine("\nConfusion matrix:\n");

            // Print formatted confusion matrix
            //Console.Write(String.Format("{0,6}", ""));
            /*for (int i = 0; i < model.Labels.Length; i++)
                Console.Write(String.Format("{0,5}", "(" + model.Labels[i] + ")"));
            Console.WriteLine();
               /* for (int i = 0; i < confusionMatrix.GetLength(0); i++)
            {
                Console.Write(String.Format("{0,5}", "(" + model.Labels[i] + ")"));
                for (int j = 0; j < confusionMatrix.GetLength(1); j++)
                    Console.Write(String.Format("{0,5}", confusionMatrix[i, j]));
                Console.WriteLine();
            }*/
        }
Ejemplo n.º 36
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="problem"></param>
        /// <param name="parameter"></param>
        /// <returns></returns>
        public static string CheckParameter(SVMProblem problem, SVMParameter parameter)
        {
            if (problem == null)
            {
                throw new ArgumentNullException("problem");
            }

            if (parameter == null)
            {
                throw new ArgumentNullException("parameter");
            }

            IntPtr ptr_problem = SVMProblem.Allocate(problem);
            IntPtr ptr_parameter = SVMParameter.Allocate(parameter);

            IntPtr ptr_output = libsvm.svm_check_parameter(ptr_problem, ptr_parameter);

            SVMProblem.Free(ptr_problem);
            SVMParameter.Free(ptr_parameter);

            string output = Marshal.PtrToStringAnsi(ptr_output);
            Marshal.FreeHGlobal(ptr_output);
            ptr_output = IntPtr.Zero;

            return output;
        }
Ejemplo n.º 37
0
        public static SVMParameter Convert(svm_parameter x)
        {
            SVMParameter y = new SVMParameter();
            y.Type = (SVMType)x.svm_type;
            y.Kernel = (SVMKernelType)x.kernel_type;
            y.Degree = x.degree;
            y.Gamma = x.gamma;
            y.Coef0 = x.coef0;
            y.CacheSize = x.cache_size;
            y.Eps = x.eps;
            y.C = x.C;
            y.Nu = x.nu;
            y.P = x.p;
            y.Shrinking = x.shrinking != 0;
            y.Probability = x.probability != 0;

            int length = x.nr_weight;
            y.WeightLabels = new int[length];
            if (length > 0)
                Marshal.Copy(x.weight_label, y.WeightLabels, 0, length);

            y.Weights = new double[length];
            if (length > 0)
                Marshal.Copy(x.weight, y.Weights, 0, length);

            return y;
        }
Ejemplo n.º 38
0
 public SVMConfiguration(SVMParameter Parameters, List<Feature> Features)
 {
     parameters = Parameters;
     features = Features;
 }
Ejemplo n.º 39
0
 public SVMParameter Clone()
 {
     SVMParameter y = new SVMParameter();
     y.Type = Type;
     y.Kernel = Kernel;
     y.Degree = Degree;
     y.Gamma = Gamma;
     y.Coef0 = Coef0;
     y.C = C;
     y.Nu = Nu;
     y.P = P;
     y.CacheSize = CacheSize;
     y.Eps = Eps;
     y.Shrinking = Shrinking;
     y.Probability = Probability;
     y.WeightLabels = WeightLabels.Select(a => a).ToArray();
     y.Weights = Weights.Select(a => a).ToArray();
     return y;
 }
Ejemplo n.º 40
0
        public static SVMModel Convert(svm_model x)
        {
            SVMModel y = new SVMModel();

            y.Creation     = (CreationType)x.free_sv;
            y.ClassCount   = x.nr_class;
            y.TotalSVCount = x.l;

            if (y.Creation == CreationType.LOAD_MODEL)
            {
                y.Parameter        = new SVMParameter();
                y.Parameter.Type   = (SVMType)x.param.svm_type;
                y.Parameter.Kernel = (SVMKernelType)x.param.kernel_type;
                switch (y.Parameter.Kernel)
                {
                case SVMKernelType.LINEAR:
                    break;

                case SVMKernelType.POLY:
                    y.Parameter.Gamma  = x.param.gamma;
                    y.Parameter.Coef0  = x.param.coef0;
                    y.Parameter.Degree = x.param.degree;
                    break;

                case SVMKernelType.RBF:
                    y.Parameter.Gamma = x.param.gamma;
                    break;

                case SVMKernelType.SIGMOID:
                    y.Parameter.Gamma = x.param.gamma;
                    y.Parameter.Coef0 = x.param.coef0;
                    break;
                }
            }
            else
            {
                y.Parameter = SVMParameter.Convert(x.param);
            }

            int problemCount = (int)(y.ClassCount * (y.ClassCount - 1) * 0.5);

            y.Rho = new double[problemCount];
            Marshal.Copy(x.rho, y.Rho, 0, y.Rho.Length);

            y.ProbabilityA = null;
            if (x.probA != IntPtr.Zero)
            {
                y.ProbabilityA = new double[problemCount];
                Marshal.Copy(x.probA, y.ProbabilityA, 0, y.ProbabilityA.Length);
            }

            y.ProbabilityB = null;
            if (x.probB != IntPtr.Zero)
            {
                y.ProbabilityB = new double[problemCount];
                Marshal.Copy(x.probB, y.ProbabilityB, 0, y.ProbabilityB.Length);
            }

            y.SVCounts = new int[y.ClassCount];
            Marshal.Copy(x.nSV, y.SVCounts, 0, y.SVCounts.Length);

            y.Labels = new int[y.ClassCount];
            Marshal.Copy(x.label, y.Labels, 0, y.Labels.Length);

            y.SVCoefs = new List <double[]>(y.ClassCount - 1);
            IntPtr i_ptr_svcoef = x.sv_coef;

            for (int i = 0; i < y.ClassCount - 1; i++)
            {
                y.SVCoefs.Add(new double[y.TotalSVCount]);
                IntPtr coef_ptr = (IntPtr)Marshal.PtrToStructure(i_ptr_svcoef, typeof(IntPtr));
                Marshal.Copy(coef_ptr, y.SVCoefs[i], 0, y.SVCoefs[i].Length);
                i_ptr_svcoef = IntPtr.Add(i_ptr_svcoef, Marshal.SizeOf(typeof(IntPtr)));
            }

            y.SVIndices = null;
            if (x.sv_indices != IntPtr.Zero)
            {
                y.SVIndices = new int[y.TotalSVCount];
                Marshal.Copy(x.sv_indices, y.SVIndices, 0, y.SVIndices.Length);
            }

            y.SV = new List <SVMNode[]>();
            IntPtr i_ptr_sv = x.SV;

            for (int i = 0; i < x.l; i++)
            {
                IntPtr    ptr_nodes = (IntPtr)Marshal.PtrToStructure(i_ptr_sv, typeof(IntPtr));
                SVMNode[] nodes     = SVMNode.Convert(ptr_nodes);
                y.SV.Add(nodes);
                i_ptr_sv = IntPtr.Add(i_ptr_sv, Marshal.SizeOf(typeof(IntPtr)));
            }

            return(y);
        }
Ejemplo n.º 41
0
 public StdClassifier(string Name, SVMParameter Parameter, List<Feature> Features, SAMData samData)
     : base(Name, Parameter, samData)
 {
     features = new List<Feature>(Features);
 }
Ejemplo n.º 42
0
        private void CreateSVM(SENSOR machine, List<OneClassFV> trainingSet)
        {

            var data = trainingSet.Select(x => x.Features).ToList();

            OneClassClassifier occ = new OneClassClassifier(data);
            SVMParameter svmP = new SVMParameter();
            svmP.Kernel = SVMKernelType.RBF;
            svmP.C = 100;
            svmP.Gamma = 0.01;
            svmP.Nu = 0.01;
            svmP.Type = SVMType.ONE_CLASS;
            occ.CreateModel(svmP);

            machines.Add(machine, occ);
        }
Ejemplo n.º 43
0
        //--------------------------------------------------------------------------------------
        // private 
        //---------------------------------------------------------------------------------------

        /// <summary>
        /// 辞書ファイルを作成する
        /// </summary>
        /// <param name="input_learing_file"></param>
        /// <param name="gammma"></param>
        /// <param name="cost"></param>
        private void Training(string input_learing_file, float gammma, float cost)
        {
            //LibSVMのテスト
            //学習用のデータの読み込み
            SVMProblem problem = SVMProblemHelper.Load(input_learing_file);

            //SVMパラメータ
            SVMParameter parameter = new SVMParameter();
            parameter.Type = LibSVMsharp.SVMType.C_SVC;
            parameter.Kernel = LibSVMsharp.SVMKernelType.RBF;
            parameter.C = cost;
            parameter.Gamma = gammma;

            libSVM_model = SVM.Train(problem, parameter);
            //辞書ファイルを出力(xmlファイル)
            string xml_name = @"model_" + input_learing_file;
            xml_name = xml_name.Replace(@".csv", @".xml");
            SVM.SaveModel(libSVM_model, xml_name);

            //判定結果をファイルに出してみる
            SVMProblem testProblem = SVMProblemHelper.Load(input_learing_file);
            double[] target = new double[testProblem.Length];
            string debug_file_str = @"debug_" + input_learing_file;
            using (StreamWriter w = new StreamWriter(debug_file_str))
            {
                for (int i = 0; i < testProblem.Length; i++)
                {
                    target[i] = SVM.Predict(libSVM_model, testProblem.X[i]);
                    w.Write(target[i] + "\n");
                    Console.Out.WriteLine(@"{0} : {1}", i, target[i]);
                }
            }
            //正解率を出す。
            double accuracy = SVMHelper.EvaluateClassificationProblem(testProblem, target);
        }
Ejemplo n.º 44
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="problem"></param>
        /// <param name="parameter"></param>
        /// <param name="nFolds"></param>
        /// <param name="target"></param>
        public static void CrossValidation(SVMProblem problem, SVMParameter parameter, int nFolds, out double[] target)
        {
            if (problem == null)
            {
                throw new ArgumentNullException("problem");
            }

            if (parameter == null)
            {
                throw new ArgumentNullException("parameter");
            }

            if (nFolds < 2)
            {
                throw new ArgumentOutOfRangeException("nFolds");
            }

            IntPtr ptr_target = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(double)) * problem.Length);
            IntPtr ptr_problem = SVMProblem.Allocate(problem);
            IntPtr ptr_parameter = SVMParameter.Allocate(parameter);

            libsvm.svm_cross_validation(ptr_problem, ptr_parameter, nFolds, ptr_target);

            target = new double[problem.Length];
            Marshal.Copy(ptr_target, target, 0, target.Length);

            SVMProblem.Free(ptr_problem);
            SVMParameter.Free(ptr_parameter);
            Marshal.FreeHGlobal(ptr_target);
            ptr_target = IntPtr.Zero;
        }