public bool Initialize(Header header)
        {
            // Ensure we have reproducible results
            Generator.Seed = 0;

            learner = new B.NaiveBayesLearning()
            {
            };

            Initialized = true;
            return(Initialized);
        }
Пример #2
0
 public void run_shit()
 {
     naiveBayes = new Accord.MachineLearning.Bayes.NaiveBayesLearning();
     load_dictionary();
     //load_files();
     //load_ball_training();
     //load_empty_training();
     //load_error_training();
     //Arffgen arffgen = new Arffgen(trainingInput,trainingOutput,arrayCounter);
     load_model();
     build_model();
     //save_model();
 }
Пример #3
0
        public double Estimate(double[][] inputs, int[] outputs, bool empirical = true)
        {
            var teacher = new NaiveBayesLearning <TDistribution>()
            {
                Model = this
            };

#if DEBUG
            teacher.ParallelOptions.MaxDegreeOfParallelism = 1;
#endif
            teacher.Empirical = empirical;
            NaiveBayes <TDistribution, double> result = teacher.Learn(inputs, outputs);
            base.Distributions = result.Distributions;
            this.Priors        = result.Priors;
            return(new ZeroOneLoss(outputs)
            {
                Mean = true
            }.Loss(Decide(inputs)));
        }
Пример #4
0
        public double Estimate <TOptions>(double[][] inputs, int[] outputs,
                                          bool empirical = true, TOptions options = null)
            where TOptions : class, IFittingOptions, new()
        {
            var teacher = new NaiveBayesLearning <TDistribution, TOptions>()
            {
                Model = this
            };

#if DEBUG
            teacher.ParallelOptions.MaxDegreeOfParallelism = 1;
#endif
            teacher.Empirical           = empirical;
            teacher.Options.InnerOption = options;
            NaiveBayes <TDistribution, double> result = teacher.Learn(inputs, outputs);
            base.Distributions = result.Distributions;
            this.Priors        = result.Priors;
            return(new ZeroOneLoss(outputs)
            {
                Mean = true
            }.Loss(Decide(inputs)));
        }
Пример #5
0
        public double Estimate(int[][] inputs, int[] outputs,
                               bool empirical = true, double regularization = 1e-5)
        {
            var teacher = new NaiveBayesLearning()
            {
                Model = this
            };

            teacher.Empirical = empirical;
            teacher.Options.InnerOption.Regularization = regularization;
            teacher.Options.InnerOption.Minimum        = 0;
#if DEBUG
            teacher.ParallelOptions.MaxDegreeOfParallelism = 1;
#endif
            var result = teacher.Learn(inputs, outputs);
            var b      = result as NaiveBayes <GeneralDiscreteDistribution, int>;
            base.Distributions = b.Distributions;
            this.Priors        = b.Priors;
            this.symbols       = result.symbols;
            return(new ZeroOneLoss(outputs)
            {
                Mean = true
            }.Loss(Decide(inputs)));
        }