Exemple #1
0
        public override FS TuneUpFuzzySystem(FS Approx, ILearnAlgorithmConf conf)
        {
            theFuzzySystem = Approx;
            Init(conf);
            for (int r = 0; r < countRules; r++)
            {
                oneIterate(theFuzzySystem);
            }

            Final();
            Approx.RulesDatabaseSet[0].TermsSet.Trim();
            return(Approx);
        }
Exemple #2
0
        void makeKons(FS CurrentFS, List <Term> forClass)
        {
#if PClass
            string Kons = FuzzySystem.PittsburghClassifier.KNNClassName.NearestClass(CurrentFS, forClass);
            theRule = new Rule(thePositionOfBee.TermsSet, numOfTerms, Kons);
#elif SApprox
            double Kons = FuzzySystem.SingletoneApproximate.KNNConsequent.NearestApprox(CurrentFS, forClass);
            theRule = new Rule(thePositionOfBee.TermsSet, numOfTerms, Kons);
#elif TSApprox
            double[] koefs;
            double   val = FuzzySystem.TakagiSugenoApproximate.LSMWeghtReqursiveSimple.EvaluteConsiquent(CurrentFS, forClass, out koefs);
            theRule = new Rule(thePositionOfBee.TermsSet, numOfTerms, val, koefs);
#else
            double Kons = FuzzySystem.SingletoneApproximate.KNNConsequent.NearestApprox(CurrentFS, forClass);
            theRule = new Rule(thePositionOfBee.TermsSet, numOfTerms, Kons);
#endif
        }
Exemple #3
0
        public virtual void oneIterate(FS result)
        {
            theScouts.Clear();
            theWorkers.Clear();
            baseLine  = result.ErrorLearnSamples(result.RulesDatabaseSet[0]);
            Best      = result.RulesDatabaseSet[0];
            theScout  = null;
            theWorker = null;
            for (int s = 0; s < countScouts; s++)
            {
                double goodsLine = -1;
                int    ztryes    = 0;
                while ((goodsLine <= 0) && (ztryes < 100))
                {
                    theScout = new Scout(Best, theFuzzySystem);
                    theScout.generateNewRule(typeTerm, rand);
                    goodsLine = theScout.getGoodsImproove(baseLine);
                }
                theScouts.Add(theScout);
            }
            BeeComparer toBeeSort = new BeeComparer();

            theScouts.Sort(toBeeSort);
            KnowlegeBaseRules ScoutBest = theScouts[theScouts.Count - 1].PositionOfBee;
            int ScoutBestNumRule        = theScouts[theScouts.Count - 1].NumOFRule;

            for (int w = 0; w < countWorkers; w++)
            {
                theWorker = new Worker(ScoutBest, theFuzzySystem);
                theWorkers.Add(theWorker);
                theWorkers[theWorkers.Count - 1].WorkerFly(ScoutBestNumRule, rand);
                theWorkers[theWorkers.Count - 1].getGoodsImproove(baseLine);
            }

            theWorkers.Sort(toBeeSort);

            if (theScouts[theScouts.Count - 1].Goods > theWorkers[theWorkers.Count - 1].Goods)
            {
                Best = theScouts[theScouts.Count - 1].PositionOfBee;
            }
            else
            {
                Best = theWorkers[theWorkers.Count - 1].PositionOfBee;
            }
            theFuzzySystem.RulesDatabaseSet[0] = Best;
        }
Exemple #4
0
        public virtual void oneIterate(FS result)
        {
            //    theScouts.Clear();
            //  theWorkers.Clear();
            baseLine = result.ErrorLearnSamples(result.RulesDatabaseSet[0]);
            Best     = result.RulesDatabaseSet[0];

            /*     for (int s = 0; s < countScouts; s++)
             *   {
             *       double goodsLine = -1;
             *       int ztryes = 0;
             *       while ((goodsLine <= 0) && (ztryes < 100))
             *       {
             *           theScout = new Scout(Best, theFuzzySystem);
             *           theScout.generateNewRule(typeTerm, rand);
             *           goodsLine = theScout.getGoodsImproove(baseLine);
             *       ztryes++;
             *       }
             *       theScouts.Add(theScout);
             *   }*/
            Parallel.For(0, countScouts, new ParallelOptions {
                MaxDegreeOfParallelism = countScouts, TaskScheduler = null
            }, s =>
                         //        for (int s = 0; s < countScouts; s++)
            {
                double goodsLine = -1;
                int ztryes       = 0;
                while ((goodsLine <= 0) && (ztryes < 100))
                {
                    theScouts[s] = new Scout(Best, theFuzzySystem);
                    theScouts[s].generateNewRule(typeTerm, rand.Value);
                    goodsLine = theScouts[s].getGoodsImproove(baseLine);
                    ztryes++;
                }
            }
                         );



            BeeComparer toBeeSort = new BeeComparer();

            Array.Sort(theScouts, toBeeSort);
            KnowlegeBaseRules ScoutBest = theScouts[countScouts - 1].PositionOfBee;
            int ScoutBestNumRule        = theScouts[countScouts - 1].NumOFRule;

            /*for (int w = 0; w < countWorkers; w++)
             * {
             *  theWorker = new Worker(ScoutBest, theFuzzySystem);
             *  theWorkers.Add(theWorker);
             *  theWorkers[theWorkers.Count - 1].WorkerFly( ScoutBestNumRule, rand);
             *  theWorkers[countWorkers*3 - 1].getGoodsImproove(baseLine);
             * }*/

            for (int a = 0; a < 3; a++)
            {
                Parallel.For(0, countWorkers, new ParallelOptions {
                    MaxDegreeOfParallelism = countWorkers, TaskScheduler = null
                }, w =>
                             //   for (int w = 0; w < countWorkers; w++)
                {
                    theWorkers[countWorkers * a + w] = new Worker(theScouts[theScouts.Length - 1 - a].PositionOfBee, theFuzzySystem);
                    theWorkers[countWorkers * a + w].WorkerFly(ScoutBestNumRule, rand.Value);
                    theWorkers[countWorkers * a + w].getGoodsImproove(baseLine);
                }
                             );
            }

            Array.Sort(theWorkers, toBeeSort);

            if (theScouts[countScouts - 1].Goods > theWorkers[countWorkers * 3 - 1].Goods)
            {
                Best = theScouts[countScouts - 1].PositionOfBee;
            }
            else
            {
                Best = theWorkers[countWorkers * 3 - 1].PositionOfBee;
            }
            theFuzzySystem.RulesDatabaseSet[0] = Best;
        }
Exemple #5
0
 public Worker(KnowlegeBaseRules theSource, FS parrent)
     : base(theSource, parrent)
 {
 }
        public override FuzzySystem.PittsburghClassifier.PCFuzzySystem TuneUpFuzzySystem(FuzzySystem.PittsburghClassifier.PCFuzzySystem Classifier, ILearnAlgorithmConf conf)
        {
            PCFuzzySystem result = Classifier;

            count_iteration = ((CuckooConf)conf).CuckooCountIterate;
            count_particle  = ((CuckooConf)conf).CuckooPopulationSize;
            m    = ((CuckooConf)conf).CuckooWorse;
            p    = ((CuckooConf)conf).CuckooLifeChance;
            beta = ((CuckooConf)conf).CuckooBeta;


            KnowlegeBasePCRules[] X = new KnowlegeBasePCRules[count_particle + 1];
            double[] Errors         = new double[count_particle + 1];
            double[] Er             = new double[count_particle + 1];

            Random rnd  = new Random();
            int    best = 0;

            for (int i = 0; i < count_particle + 1; i++)
            {
                KnowlegeBasePCRules temp_c_Rule = new KnowlegeBasePCRules(result.RulesDatabaseSet[0]);
                X[i]      = temp_c_Rule;
                Errors[i] = result.ClassifyLearnSamples(result.RulesDatabaseSet[0]);
            }
            ///////////
            for (int i = 0; i < count_iteration; i++)
            {
                X[0] = new  KnowlegeBasePCRules(X[0]);
                for (int k = 0; k < X[0].TermsSet.Count; k++)
                {
                    for (int q = 0; q < X[0].TermsSet[k].CountParams; q++)
                    {
                        double b = (rnd.Next(1000, 2000) / Convert.ToDouble(1000));
                        X[0].TermsSet[k].Parametrs[q] = X[0].TermsSet[k].Parametrs[q] + Levi(BM(sigu(beta)), BM(1.0), beta);
                    }
                }

                for (int k = 0; k < X[0].Weigths.Length; k++)
                {
                    X[0].Weigths[k] = rnd.NextDouble() / 200;
                }

                result.RulesDatabaseSet.Add(X[0]);
                int temp_index = result.RulesDatabaseSet.Count - 1;
                Errors[0] = result.ClassifyLearnSamples(result.RulesDatabaseSet[temp_index]);
                result.RulesDatabaseSet.RemoveAt(temp_index);

                int s = rnd.Next(1, count_particle + 1);

                if (Errors[0] > Errors[s])
                {
                    X[s]      = X[0];
                    Errors[s] = Errors[0];
                }
                else
                {
                    X[0]      = X[s];
                    Errors[0] = Errors[s];
                }

                for (int v = 0; v < m; v++)
                {
                    double max = Errors[1];
                    int    ind = 1;
                    for (int r = 2; r < count_particle + 1; r++)
                    {
                        if (Errors[r] < max)
                        {
                            max = Errors[r];
                            ind = r;
                        }
                        else
                        {
                        };
                    }
                    double h = (rnd.Next(1, 1000) / Convert.ToDouble(1000));
                    if (h > p)
                    {
                        X[ind] = new KnowlegeBasePCRules(X[ind]);
                        for (int j = 0; j < X[ind].TermsSet.Count; j++)
                        {
                            for (int k = 0; k < X[ind].TermsSet[j].CountParams; k++)
                            {
                                X[ind].TermsSet[j].Parametrs[k] = X[0].TermsSet[j].Parametrs[k] + (rnd.Next(-1000, 1000) / Convert.ToDouble(1000));
                            }
                            for (int k = 0; k < X[ind].Weigths.Length; k++)
                            {
                                X[ind].Weigths[k] = X[0].Weigths[k] + (rnd.Next(1, 1000) / Convert.ToDouble(10000));
                            }
                        }
                        result.RulesDatabaseSet.Add(X[ind]);
                        temp_index  = result.RulesDatabaseSet.Count - 1;
                        Errors[ind] = result.ClassifyLearnSamples(result.RulesDatabaseSet[temp_index]);
                        result.RulesDatabaseSet.RemoveAt(temp_index);
                    }
                }
            }

            double min = Errors[0];

            best = 0;
            for (int g = 1; g < count_particle + 1; g++)
            {
                if (Errors[g] > min)
                {
                    min  = Errors[g];
                    best = g;
                }
            }

            X[0] = X[best];

            result.RulesDatabaseSet.Add(X[0]);
            int t_index = result.RulesDatabaseSet.Count - 1;

            Errors[0] = result.ClassifyLearnSamples(result.RulesDatabaseSet[t_index]);
            result.RulesDatabaseSet.RemoveAt(t_index);


            result.RulesDatabaseSet[0] = X[0];
            result.RulesDatabaseSet[0].TermsSet.Trim();
            return(result);
        }
        public override FuzzySystem.PittsburghClassifier.PCFuzzySystem TuneUpFuzzySystem(FuzzySystem.PittsburghClassifier.PCFuzzySystem Classifier, ILearnAlgorithmConf conf)
        {
            PCFuzzySystem result = Classifier;



            FalsePositiveLearn = 0;
            FalseNegativeLearn = 0;

            FalsePositiveTest = 0;
            FalseNegativeTest = 0;

            string normalClass = "normal.";

            for (int i = 0; i < Classifier.LearnSamplesSet.CountSamples; i++)
            {
                string ClassifierResult = Classifier.classifyBase(Classifier.LearnSamplesSet[i].InputAttributeValue, Classifier.RulesDatabaseSet[0]);
                if (Classifier.LearnSamplesSet.DataRows[i].StringOutput.Contains(normalClass))
                {
                    if (!ClassifierResult.Contains(normalClass))
                    {
                        FalsePositiveLearn++;
                        continue;
                    }
                }

                if (ClassifierResult.Contains(normalClass))
                {
                    if (!Classifier.LearnSamplesSet.DataRows[i].StringOutput.Contains(normalClass))
                    {
                        FalseNegativeLearn++;
                    }
                }
            }
            FalsePositiveLearn = FalsePositiveLearn / Classifier.LearnSamplesSet.CountSamples * 100;
            FalseNegativeLearn = FalseNegativeLearn / Classifier.LearnSamplesSet.CountSamples * 100;

            for (int i = 0; i < Classifier.TestSamplesSet.CountSamples; i++)
            {
                string ClassifierResult = Classifier.classifyBase(Classifier.TestSamplesSet[i].InputAttributeValue, Classifier.RulesDatabaseSet[0]);
                if (Classifier.TestSamplesSet.DataRows[i].StringOutput.Contains(normalClass))
                {
                    if (!ClassifierResult.Contains(normalClass))
                    {
                        FalsePositiveTest++;
                        continue;
                    }
                }

                if (ClassifierResult.Contains(normalClass))
                {
                    if (!Classifier.TestSamplesSet.DataRows[i].StringOutput.Contains(normalClass))
                    {
                        FalseNegativeTest++;
                    }
                }
            }
            FalsePositiveTest = FalsePositiveTest / Classifier.TestSamplesSet.CountSamples * 100;
            FalseNegativeTest = FalseNegativeTest / Classifier.TestSamplesSet.CountSamples * 100;



            Classifier.RulesDatabaseSet[0].TermsSet.Trim();
            return(Classifier);
        }
Exemple #8
0
 public Scout(KnowlegeBaseRules theSource, FS parrent)
     : base(theSource, parrent)
 {
 }
Exemple #9
0
        public override FuzzySystem.PittsburghClassifier.PCFuzzySystem TuneUpFuzzySystem(FuzzySystem.PittsburghClassifier.PCFuzzySystem Classifier, ILearnAlgorithmConf conf)
        {
            result = Classifier;
            Init(conf);

            //OneIteration
            for (int i = 0; i < count_iteration; i++)
            {
                oneIterate(result);
            }
            Final();
            return(result);
        }
Exemple #10
0
        public Bee(KnowlegeBaseRules theSource, FS parrent)
        {
            thePositionOfBee = new KnowlegeBaseRules(theSource);

            Parrent = parrent;
        }
        public override PCFuzzySystem Generate(FuzzySystem.PittsburghClassifier.PCFuzzySystem Classifier, IGeneratorConf config)
        {
            Random        rand   = new Random();
            PCFuzzySystem result = Classifier;

            if (result.RulesDatabaseSet.Count == 0)
            {
                KnowlegeBasePCRules temp_rules = new KnowlegeBasePCRules();
                result.RulesDatabaseSet.Add(temp_rules);
            }

            type_term    = ((GeneratorRullesSimpleRandomConfig)config).RSRTypeFunc;
            stable_terms = (int)((GeneratorRullesSimpleRandomConfig)config).RSRConstant;
            count_rules  = ((GeneratorRullesSimpleRandomConfig)config).RSRCountRules;

            for (int j = 0; j < count_rules; j++)
            {
                int[]            order = new int[result.CountFeatures];
                TypeTermFuncEnum temp_type_term;
                if (stable_terms == 0)
                {
                    temp_type_term = type_term;
                }
                else
                {
                    temp_type_term = Generator_type_term();
                }

                List <Term> temp_term_list = new List <Term>();
                for (int k = 0; k < result.CountFeatures; k++)
                {
                    double[] parametrs = new double[Term.CountParamsinSelectedTermType(temp_type_term)];

                    switch (temp_type_term)
                    {
                    case TypeTermFuncEnum.Треугольник:
                        parametrs[0] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min);
                        parametrs[1] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min);
                        parametrs[2] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min);
                        Array.Sort(parametrs);
                        break;

                    case TypeTermFuncEnum.Гауссоида: parametrs[0] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min);
                        parametrs[1] = (rand.NextDouble() + 0.01) * 0.5 *
                                       (result.LearnSamplesSet.InputAttributes[k].Max -
                                        result.LearnSamplesSet.InputAttributes[k].Min);
                        break;

                    case TypeTermFuncEnum.Парабола: parametrs[0] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min);
                        parametrs[1] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min);
                        Array.Sort(parametrs);
                        break;

                    case TypeTermFuncEnum.Трапеция: parametrs[0] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min);
                        parametrs[1] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min);
                        parametrs[2] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min);
                        parametrs[3] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min);

                        Array.Sort(parametrs);

                        break;
                    }
                    Term temp_term = new Term(parametrs, temp_type_term, k);
                    result.RulesDatabaseSet[0].TermsSet.Add(temp_term);
                    temp_term_list.Add(temp_term);
                    order[k] = result.RulesDatabaseSet[0].TermsSet.Count - 1;
                }
                string class_label = KNNClassName.NearestClass(result, temp_term_list);
                PCRule temp_Rule   = new PCRule(result.RulesDatabaseSet[0].TermsSet, order, class_label, 1.0);
                result.RulesDatabaseSet[0].RulesDatabase.Add(temp_Rule);
            }



            result.UnlaidProtectionFix(result.RulesDatabaseSet[0]);
            return(result);
        }