Esempio n. 1
0
        /// <summary>
        /// Perform validation procedure, train on trainProblem and test on testProblem
        /// </summary>
        /// <typeparam name="TProblemElement">Problem element</typeparam>
        /// <param name="TrainingProblem">Train <see cref="Problem{TProblemElement}">Problem</see>  </param>
        /// <param name="TestProblem">Problem to test</param>
        /// <param name="Kernel">SVM kernel</param>
        /// <param name="C">Penalty parameter fo SVM solver</param>
        /// <returns>Accuracy</returns>
        public double TrainAndTestValidation(
            Problem <TProblemElement> TrainingProblem, Problem <TProblemElement> TestProblem)
        {
            CSVM <TProblemElement> svm = new CSVM <TProblemElement>(TrainingProblem, Kernel, C, Evaluator);

            Stopwatch t = Stopwatch.StartNew();

            svm.Init();

            //todo: change in in production code
            Console.WriteLine("SVM init time {0}", t.Elapsed);
            // Debug.WriteLine("SVM init time {0}", t.Elapsed);


            svm.Train();

            Console.WriteLine("Svm train takes {0}", svm.model.ModelTimeMs);
            //svm.model.WriteToFile("modelFile.txt");
            int correct = 0;

            Debug.WriteLine("Start Predict");
            t.Restart();


            //for (int i = 0; i < TestProblem.ElementsCount; i++)
            //{

            //    float predictedLabel = svm.Predict(TestProblem.Elements[i]);

            //    if (predictedLabel == TestProblem.Labels[i])
            //        ++correct;

            //}



            var predictions = svm.Predict(TestProblem.Elements);

            t.Stop();
            for (int i = 0; i < TestProblem.ElementsCount; i++)
            {
                float predictedLabel = predictions[i];

                if (predictedLabel == TestProblem.Y[i])
                {
                    ++correct;
                }
            }

            double accuracy = (float)correct / TestProblem.ElementsCount;

            Console.WriteLine(string.Format("init, dispose and prediction on {0} elements takes {1}, correct={2}", TestProblem.ElementsCount, t.Elapsed, correct));
            return(accuracy);
        }
Esempio n. 2
0
        /// <summary>
        /// Do cross validation procedure on specified folds.
        /// </summary>
        /// <typeparam name="TProblemElement">The type of the problem element.</typeparam>
        /// <param name="probSize">Size of the problem.</param>
        /// <param name="foldsElements">Array of folds, each list contains elements which belonds to fold .</param>
        /// <param name="foldsLabels">Arrat of fodls, each list contains elements labels.</param>
        /// <param name="Kernel">The kernel.</param>
        /// <param name="C">Penalty parameter C.</param>
        /// <returns>Accuracy</returns>
        public double CrossValidateOnFolds(
            int probSize,
            List <TProblemElement>[] foldsElements,
            List <float>[] foldsLabels)
        {
            Debug.Assert(foldsElements.Length == foldsLabels.Length, "array lenght should have the same lenght");
            int  nrFolds = foldsElements.Length;
            long correct = 0L;

            for (int i = 0; i < nrFolds; i++)
            {
                //array of sub problem elements (comes form permutation)
                int subProbSize      = 0;
                var trainFoldIndexes = from t in Enumerable.Range(0, nrFolds)
                                       where t != i
                                       select t;

                foreach (var s in trainFoldIndexes)
                {
                    subProbSize += foldsElements[s].Count;
                }

                float[]           subLabels;
                TProblemElement[] subProbElem = CreateSubProblem(foldsElements, foldsLabels, trainFoldIndexes, subProbSize, out subLabels);

                //create sub problem based on previous subProblemElements and subProbLabels
                Problem <TProblemElement> trainSubprob = new Problem <TProblemElement>(subProbElem, subLabels);

                CSVM <TProblemElement> svm = new CSVM <TProblemElement>(trainSubprob, Kernel, C, Evaluator);

                svm.Init();
                svm.Train();

                for (int j = 0; j < foldsElements[i].Count; j++)
                {
                    var element = foldsElements[i][j];

                    var prediction = svm.Predict(element);
                    if (prediction == foldsLabels[i][j])
                    {
                        ++correct;
                    }
                }
            }

            //todo: accuracy count for cross validation
            return((float)correct / probSize);
        }