public void ProcessInput(string line)
        {
            string[] items;

            int i = TrainingData.Count;

            TrainingData.Add(i, new List <int>(64));
            items = line.Split(',');
            for (int j = 0; j < 64; j++)
            {
                TrainingData[i].Add(int.Parse(items[j]));
            }
            ExpectedResults.Add(int.Parse(items[64]));
            //Console.WriteLine(line);
        }
Ejemplo n.º 2
0
        public void Learn_Clicked(object sender, EventArgs args)
        {
            Task.Factory.StartNew(() =>
            {
                var bow = CreateBow();
                foreach (var image in Images)
                {
                    TrainingData.Add(GetData(image, bow));
                }

                var kernel  = new Polynomial();
                var teacher = new MulticlassSupportVectorLearning <IKernel>()
                {
                    Kernel  = kernel,
                    Learner = (param) => new SequentialMinimalOptimization <IKernel>()
                    {
                        Kernel = kernel
                    }
                };

                var svm   = teacher.Learn(TrainingData.ToArray(), Tags.ToArray());
                var error = new ZeroOneLoss(Tags.ToArray()).Loss(svm.Decide(TrainingData.ToArray()));
                Error.Dispatcher.Invoke(() => Error.Text = error.ToString());

                //var kernel = new Polynomial(16, 5);
                //var complexity = CalculateComplexity(kernel);
                //var ml = new MulticlassSupportVectorLearning<IKernel>()
                //{
                //    Learner = (param) => new SequentialMinimalOptimization<IKernel>()
                //    {
                //        Complexity = complexity,
                //        Kernel = kernel
                //    }
                //};

                //machine = ml.Learn(TrainingData.ToArray(), Tags.ToArray());
                //var result = machine.Decide(TrainingData.ToArray());
                //var error = new ZeroOneLoss(Tags.ToArray())
                //{
                //    Mean = true
                //}.Loss(result);

                //Error.Dispatcher.Invoke(() => Error.Text = error.ToString());
            });
        }
Ejemplo n.º 3
0
        /// <summary>
        /// Generates both training set consisting of feasible and infeasible examples
        /// </summary>
        public void GenerateTrainingData()
        {
            Feasibles = GenerateFeasibleExamples(GlobalVariables.FeasibleExamplesCount);

            // Fill training data
            TrainingData = Feasibles.AddColumnWithValues(1.0);

            // Fill output List
            var tempArr = new double[TrainingData.Count];

            Output.AddRange(tempArr.Select(x => 1));

            _dal.TrainingFeasibleExamples = Feasibles.ToArray();

            GaussianMixtureModel gmm = new GaussianMixtureModel(GlobalVariables.Components);

            gmm = new GaussianMixtureModel(GlobalVariables.Components)
            {
                Initializations = 100,
                MaxIterations   = 10000,
                ParallelOptions = new System.Threading.Tasks.ParallelOptions()
                {
                    MaxDegreeOfParallelism = 1
                },
                Tolerance = 10E-11,
                Options   = new Accord.Statistics.Distributions.Fitting.NormalOptions()
                {
                    Regularization = double.Epsilon
                }
            };

            // Estimate the Gaussian Mixture
            gmm.Learn(_dal.TrainingFeasibleExamples);
            var iterations   = gmm.Iterations;
            var distribution = gmm.ToMixtureDistribution();

            // Get minimal probability of probability density function from distribution (percentile 0)
            var minimalProbability = _dal.TrainingFeasibleExamples
                                     .Select(item => distribution.ProbabilityDensityFunction(item))
                                     .Min();

            // Rescale data range for infeasible example creation
            NewBoundries = BoundryRescaler.Rescale(Feasibles);

            // Generate infeasible examples
            var infeasibles = new List <double[]>();

            while (infeasibles.Count < GlobalVariables.InfeasibleExamplesCount)
            {
                // Generate points within new boundry
                var x = GenerateLimitedInputs(GlobalVariables.Dimensions, NewBoundries);

                // Calculate probability density function value for given input
                var probability = distribution.ProbabilityDensityFunction(x);

                // Check if the value is smaller than smallest probability of all feasible examples
                if (probability > minimalProbability)
                {
                    continue;
                }

                infeasibles.Add(x);

                TrainingData.Add(x.ExtendArrayWithValue(0.0));

                Output.Add(0);
            }

            _dal.TrainingInfeasibleExamples = infeasibles.ToArray();
            _dal.TrainingData = TrainingData.ToArray();
        }