예제 #1
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classifier, ILearnAlgorithmConf conf)
        {
            BacteryAlgorithmConfig Config = conf as BacteryAlgorithmConfig;

            sendBactery    = Config.BFOCountSolution;
            interPSOtoSend = Config.BFOCountIteration;
            result         = Classifier;


            if (result.RulesDatabaseSet.Count < 1)
            {
                throw new InvalidDataException("Нечеткая система не проинициализированна");
            }
            KnowlegeBasePCRules backSave = new KnowlegeBasePCRules(result.RulesDatabaseSet[0]);
            double backResult            = result.ErrorLearnSamples(result.RulesDatabaseSet[0]);

            savetoUFS(result.RulesDatabaseSet, 0, 0, 0);
            BacteryRunner();
            KnowlegeBasePCRules[] solutions = loadDatabase();
            solutions = sortSolution(solutions);
            if (solutions.Count() < 1)
            {
                result.RulesDatabaseSet[0] = backSave; return(result);
            }
            result.RulesDatabaseSet[0] = solutions[0];
            double newResult = result.ErrorLearnSamples(result.RulesDatabaseSet[0]);

            if (newResult > backResult)
            {
                result.RulesDatabaseSet[0] = backSave;
            }
            result.RulesDatabaseSet[0].TermsSet.Trim();
            return(result);
        }
예제 #2
0
 private KnowlegeBasePCRules[] SortRules(KnowlegeBasePCRules[] Source)
 {
     double[] keys = new double[Source.Count()];
     KnowlegeBasePCRules[] tempSol = Source.Clone() as KnowlegeBasePCRules[];
     for (int i = 0; i < Source.Count(); i++)
     {
         keys[i] = result.ErrorLearnSamples(Source[i]);
     }
     Array.Sort(keys, tempSol);
     return(Source);
 }
예제 #3
0
        public void preIterate(PCFuzzySystem Classify, ILearnAlgorithmConf conf)
        {
            result = Classify;
            rand   = new Random();
            //Узнаем название папки с данными
            string path_name   = "../../OLD/Data/Keel/Classifier/KEEL-10/";
            string folder_name = "";

            foreach (var letter in result.LearnSamplesSet.FileName)
            {
                if (letter != '-')
                {
                    folder_name += letter;
                }
                else
                {
                    break;
                }
            }
            Init(conf);
            //Создаем новые обучающую и тестовую выбоки и удаляем из них некоторое количество случайных элементов
            results = new List <PCFuzzySystem>();
            for (int i = 0; i < numberOfPopulations; i++)
            {
                SampleSet new_learn = new SampleSet(path_name + folder_name + "/" + result.LearnSamplesSet.FileName);
                SampleSet new_test  = new SampleSet(path_name + folder_name + "/" + result.TestSamplesSet.FileName);
                results.Add(new PCFuzzySystem(new_learn, new_test));
                int ground = (int)Math.Round(results[i].LearnSamplesSet.DataRows.Count * 0.1);
                for (int j = 0; j < ground; j++)
                {
                    results[i].LearnSamplesSet.DataRows.RemoveAt(rand.Next(0, results[i].LearnSamplesSet.DataRows.Count));
                }
            }
            //Создаем популяции и архив лучших положений каждой частицы
            Populations = new List <List <KnowlegeBasePCRules> >();
            for (int i = 0; i < numberOfPopulations; i++)
            {
                Populations.Add(SetPopulation(new List <KnowlegeBasePCRules>()));
            }
            BEST      = new List <KnowlegeBasePCRules>();
            bestError = new double[numberOfPopulations];
            for (int i = 0; i < numberOfPopulations; i++)
            {
                BEST.Add(new KnowlegeBasePCRules(result.RulesDatabaseSet[0]));
                bestError[i] = result.ErrorLearnSamples(BEST[i]);
                if (double.IsNaN(bestError[i]) || double.IsInfinity(bestError[i]) || bestError[i] == 100)
                {
                    result.UnlaidProtectionFix(BEST[i]);
                    bestError[i] = result.ErrorLearnSamples(BEST[i]);
                }
            }
        }
예제 #4
0
        public virtual void Final()
        {
            KnowlegeBasePCRules best = main_pop.get_best_database();
            double errorBest         = result.ErrorLearnSamples(best);
            double currentError      = result.ErrorLearnSamples(result.RulesDatabaseSet[0]);

            if (currentError > errorBest)
            {
                result.RulesDatabaseSet[0] = main_pop.get_best_database();
            }
            result.RulesDatabaseSet[0] = main_pop.get_best_database();
            GC.Collect();
        }
예제 #5
0
 public virtual void oneWatchJump(PCFuzzySystem result)
 {
     for (int j = 0; j < population_count; j++)
     {
         WJVector_gen(j);
         if (result.ErrorLearnSamples(WJVector) <= result.ErrorLearnSamples(monkey[j]))
         {
             monkey[j] = WJVector;
             // delete
             //double testval = result.ErrorLearnSamples(monkey[j]);
             //Console.WriteLine(j.ToString() + " " + iter.ToString() + " " + testval.ToString() + " - Success at WatchJump");
         }
     }
 }
        public override PCFuzzySystem Generate(PCFuzzySystem Classifier, IGeneratorConf config)
        {
            PCFuzzySystem result = Classifier;

            if (result.RulesDatabaseSet.Count == 0)
            {
                AbstractNotSafeGenerator tempGen = new GeneratorRulesEveryoneWithEveryone();
                result = tempGen.Generate(result, config);
                GC.Collect();
            }

            count_shrink = ((TermShrinkAndRotateConf)config).TSARCShrinkVars;
            size_shrink  = ((TermShrinkAndRotateConf)config).TSARCShrinkTerm;
            type_func    = ((TermShrinkAndRotateConf)config).IEWEFuncType;
            count_slices = ((TermShrinkAndRotateConf)config).IEWECountSlice;



            List <int> Varians_of_run_system = new List <int>();

            for (int i = 0; i < Classifier.CountFeatures; i++)
            {
                int count_terms_for_var = Classifier.RulesDatabaseSet[0].TermsSet.FindAll(x => x.NumVar == i).Count;
                if (i < count_shrink)
                {
                    Varians_of_run_system.Add(count_terms_for_var - size_shrink);
                }
                else
                {
                    Varians_of_run_system.Add(count_terms_for_var);
                }
            }

            Varians_of_run_system.Sort();
            TypeTermFuncEnum type_of_term = Classifier.RulesDatabaseSet[0].TermsSet[0].TermFuncType;

            Generate_all_variant_in_pool(Varians_of_run_system);

            for (int i = 0; i < Pull_of_systems.Count; i++)
            {
                Classifier.RulesDatabaseSet.Clear();

                GeneratorRulesEveryoneWithEveryone.InitRulesEveryoneWithEveryone(Classifier, type_of_term, Pull_of_systems[i].ToArray());
                Systems_ready_to_test.Add(Classifier.RulesDatabaseSet[0]);
                errors_of_systems.Add(result.ErrorLearnSamples(result.RulesDatabaseSet[0]));
            }

            int best_index = errors_of_systems.IndexOf(errors_of_systems.Min());

            result.RulesDatabaseSet.Clear();
            result.RulesDatabaseSet.Add(Systems_ready_to_test[best_index]);
            Console.WriteLine(Pull_of_systems.Count());



            GC.Collect();
//            result.UnlaidProtectionFix();
            result.RulesDatabaseSet[0].TermsSet.Trim();
            return(result);
        }
예제 #7
0
        public void eliteSelection()
        {
            double[] currentError = new double[childrenMassive.Count()];
            for (int i = 0; i < childrenMassive.Count(); i++)
            //Parallel.For(0, childrenMassive.Count(), i =>
            {
                fullFuzzySystem.RulesDatabaseSet.Add(childrenMassive[i]);
                fullFuzzySystem.UnlaidProtectionFix(childrenMassive[i]);

                currentError[i] = (fullFuzzySystem.ErrorLearnSamples(fullFuzzySystem.RulesDatabaseSet[i + 1]));
            }
            //});
            Array.Sort(currentError, childrenMassive);
            populationMassive = childrenMassive.ToList().GetRange(0, populationMassive.Count()).ToArray();
            fullFuzzySystem.RulesDatabaseSet.RemoveRange(1, childrenMassive.Count());
        }
예제 #8
0
 public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classifier, ILearnAlgorithmConf conf)
 {
     result = Classifier;
     groups = new List <int[]>();
     Init(conf);
     SetPopulation();
     Population = SortRules(Population);
     NS         = new int[m];
     for (int i = 0; i < m; i++)
     {
         NS[i] = (N - 1) / m;
     }
     cur_iter = 0;
     while (cur_iter < iter)
     {
         groups = GroupStream();
         if (p_one > rand.NextDouble())
         {
             ChooseOneCluster();
         }
         else
         {
             ChooseTwoClusters();
         }
         Population = ListPittsburgClassifierTool.SortRules(Population, result);
         Console.WriteLine(cur_iter + " - Итерация");
         Console.WriteLine("Обуч. выборка = " + result.ErrorLearnSamples(Population[0]));
         Console.WriteLine("Тест. выборка = " + result.ErrorTestSamples(Population[0]));
         cur_iter++;
     }
     Population = ListPittsburgClassifierTool.SortRules(Population, result);
     result.RulesDatabaseSet[0] = Population[0];
     return(result);
 }
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classifier, ILearnAlgorithmConf config)
        {
            PCFuzzySystem result = Classifier;

            if (result.RulesDatabaseSet.Count == 0)
            {
                throw new System.FormatException("Что то не то с входными данными");
            }
            OptimizeTermShrinkAndRotateConf Config = config as OptimizeTermShrinkAndRotateConf;

            count_shrink = Config.OTSARCountShrinkVars;
            size_shrink  = Config.OTSARCountShrinkTerm;



            List <int> Varians_of_run_system = new List <int>();

            for (int i = 0; i < Classifier.CountFeatures; i++)
            {
                int count_terms_for_var = Classifier.RulesDatabaseSet[0].TermsSet.FindAll(x => x.NumVar == i).Count;
                if (i < count_shrink)
                {
                    Varians_of_run_system.Add(count_terms_for_var - size_shrink);
                }
                else
                {
                    Varians_of_run_system.Add(count_terms_for_var);
                }
            }

            Varians_of_run_system.Sort();
            TypeTermFuncEnum type_of_term = Classifier.RulesDatabaseSet[0].TermsSet[0].TermFuncType;

            Generate_all_variant_in_pool(Varians_of_run_system);

            for (int i = 0; i < Pull_of_systems.Count; i++)
            {
                Classifier.RulesDatabaseSet.Clear();

                GeneratorRulesEveryoneWithEveryone.InitRulesEveryoneWithEveryone(Classifier, type_of_term, Pull_of_systems[i].ToArray());
                Systems_ready_to_test.Add(Classifier.RulesDatabaseSet[0]);
                errors_of_systems.Add(result.ErrorLearnSamples(result.RulesDatabaseSet[0]));
            }

            int best_index = errors_of_systems.IndexOf(errors_of_systems.Min());

            result.RulesDatabaseSet.Clear();
            result.RulesDatabaseSet.Add(Systems_ready_to_test[best_index]);
            Console.WriteLine(Pull_of_systems.Count());



            result.RulesDatabaseSet[0].TermsSet.Trim();
//            result.UnlaidProtectionFix();
            return(result);
        }
예제 #10
0
        public override PCFuzzySystem Generate(PCFuzzySystem Classifier, IGeneratorConf config)
        {
            start_add_rules = Classifier.RulesDatabaseSet.Count;
            PCFuzzySystem result = Classifier;

            if (result.RulesDatabaseSet.Count == 0)
            {
                AbstractNotSafeGenerator tempGen = new GeneratorRulesEveryoneWithEveryone();
                result = tempGen.Generate(result, config);
                GC.Collect();
            }



            Request_count_rules = ((RullesShrinkConf)config).RSCCountRules;
            max_count_rules     = ((RullesShrinkConf)config).RSCMaxRules;
            count_slices        = ((RullesShrinkConf)config).IEWECountSlice;
            min_count_rules     = ((RullesShrinkConf)config).RSCMinRules;
            type_term           = ((RullesShrinkConf)config).IEWEFuncType;

            int         count_of_swith_off    = ((RullesShrinkConf)config).RSCMaxRules - Request_count_rules;
            List <byte> Varians_of_run_system = new List <byte>();

            for (int i = 0; i < Classifier.RulesDatabaseSet[0].RulesDatabase.Count; i++)
            {
                Varians_of_run_system.Add(1);
            }
            for (int i = 0; i < count_of_swith_off; i++)
            {
                Varians_of_run_system[i] = 0;
            }
            Generate_all_variant_in_pool(Varians_of_run_system);
            for (int i = 0; i < Pull_of_systems.Count; i++)
            {
                KnowlegeBasePCRules temp_rules = new  KnowlegeBasePCRules(result.RulesDatabaseSet[0], Pull_of_systems[i]);
                temp_rules.TrimTerms();

                result.RulesDatabaseSet.Add(temp_rules);
                result.UnlaidProtectionFix(result.RulesDatabaseSet[start_add_rules + i]);
                errors_of_systems.Add(result.ErrorLearnSamples(result.RulesDatabaseSet[start_add_rules + i]));
            }

            int best_index           = errors_of_systems.IndexOf(errors_of_systems.Min());
            KnowlegeBasePCRules best = result.RulesDatabaseSet[start_add_rules + best_index];

            result.RulesDatabaseSet.Clear();
            result.RulesDatabaseSet.Add(best);
            Console.WriteLine(Pull_of_systems.Count());



            GC.Collect();
//            result.UnlaidProtectionFix();
            result.RulesDatabaseSet[0].TermsSet.Trim();
            return(result);
        }
 public static KnowlegeBasePCRules[] SortRules(this KnowlegeBasePCRules[] Source, PCFuzzySystem Classifier)
 {
     double[] keys = new double[Source.Count()];
     KnowlegeBasePCRules[] tempSol = Source.Clone() as KnowlegeBasePCRules[];
     for (int i = 0; i < Source.Count(); i++)
     {
         keys[i] = Classifier.ErrorLearnSamples(Source[i]);
     }
     Array.Sort(keys, tempSol);
     return(tempSol);
 }
예제 #12
0
 public virtual void oneGlobalJump(PCFuzzySystem result)
 {
     for (int j = 0; j < population_count; j++)
     {
         // SSVector_gen();
         SSVector           = bestsolution;
         IndividualSSVector = new KnowlegeBasePCRules(monkey[j]);
         for (int k = 0; k < monkey[j].TermsSet.Count; k++)
         {
             for (int q = 0; q < monkey[j].TermsSet[k].CountParams; q++)
             {
                 IndividualSSVector.TermsSet[k].Parametrs[q] += (somersault_interval_left + (somersault_interval_right - somersault_interval_left) * StaticRandom.NextDouble()) * (SSVector.TermsSet[k].Parametrs[q] - monkey[j].TermsSet[k].Parametrs[q]);
             }
         }
         if (result.ErrorLearnSamples(IndividualSSVector) <= result.ErrorLearnSamples(monkey[j]))
         {
             monkey[j] = IndividualSSVector;
             // delete
             //double testval = result.ErrorLearnSamples(monkey[j]);
             //Console.WriteLine(j.ToString() + " " + iter.ToString() + " " + testval.ToString() + " - Success at Global Jump");
         }
     }
 }
예제 #13
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classifier, ILearnAlgorithmConf config)
        {
            start_add_rules = Classifier.RulesDatabaseSet.Count;
            PCFuzzySystem result = Classifier;

            if (result.RulesDatabaseSet.Count == 0)
            {
                throw new System.FormatException("Что то не то с входными данными");
            }



            OptimizeRullesShrinkConf Config = config as OptimizeRullesShrinkConf;

            Request_shrink_Rule = Config.ORSCCountShrinkRules;
            int         count_of_swith_off    = Request_shrink_Rule;
            List <byte> Varians_of_run_system = new List <byte>();

            for (int i = 0; i < Classifier.RulesDatabaseSet[0].RulesDatabase.Count; i++)
            {
                Varians_of_run_system.Add(1);
            }
            for (int i = 0; i < count_of_swith_off; i++)
            {
                Varians_of_run_system[i] = 0;
            }
            Generate_all_variant_in_pool(Varians_of_run_system);
            for (int i = 0; i < Pull_of_systems.Count; i++)
            {
                KnowlegeBasePCRules temp_rules = new  KnowlegeBasePCRules(result.RulesDatabaseSet[0], Pull_of_systems[i]);
                temp_rules.TrimTerms();

                result.RulesDatabaseSet.Add(temp_rules);
                result.UnlaidProtectionFix(result.RulesDatabaseSet[start_add_rules + i]);
                errors_of_systems.Add(result.ErrorLearnSamples(result.RulesDatabaseSet[start_add_rules + i]));
            }

            int best_index           = errors_of_systems.IndexOf(errors_of_systems.Min());
            KnowlegeBasePCRules best = result.RulesDatabaseSet[start_add_rules + best_index];

            result.RulesDatabaseSet.Clear();
            result.RulesDatabaseSet.Add(best);
            Console.WriteLine(Pull_of_systems.Count());



            result.RulesDatabaseSet[0].TermsSet.Trim();
//            result.UnlaidProtectionFix();
            return(result);
        }
예제 #14
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Class, ILearnAlgorithmConf Conf)
        {
            result = Class;
            List <int[]> groups = new List <int[]>();

            Init(Conf);
            SetPopulation();
            Population = SortRules(Population);
            NS         = new int[Nsr];
            NS         = SetNS(Population, Nsr);
            groups     = GroupStream();
            double BestMSETest  = result.ErrorTestSamples(Population[0]);
            double BestMSELearn = result.ErrorLearnSamples(Population[0]);
            int    BestIter     = 0;

            for (int i = 1; i <= MaxIter; i++)
            {
                Console.Clear();
                Console.WriteLine((double)i * 100 / MaxIter + "%");
                Population = SetNextPosition(groups, Population);
                Population = Replacement(groups, Population);
                if (flag)
                {
                    Evaporation(groups.Last());//Испарение
                }
                if (BestMSETest > result.ErrorTestSamples(Population[0]))
                {
                    BestMSETest  = result.ErrorTestSamples(Population[0]);
                    BestMSELearn = result.ErrorLearnSamples(Population[0]);
                    BestIter     = i;
                }
            }
            Console.WriteLine(ToString(true));
            Console.WriteLine("Итер - " + BestIter + " MSET - " + BestMSETest + " MSEL - " + BestMSELearn);
            result.RulesDatabaseSet[0] = Population[0];
            return(result);
        }
예제 #15
0
        public override PCFuzzySystem Generate(PCFuzzySystem Classifier, IGeneratorConf config)
        {
            PCFuzzySystem result = Classifier;

            Systems_ready_to_test = new List <KnowlegeBasePCRules>();
            errors_of_systems     = new List <double>();


            InitEveryoneWithEveryone config1 = config as InitEveryoneWithEveryone;

            type_func        = config1.IEWEFuncType;
            count_slice_vars = config1.IEWECountSlice;


            List <int> Varians_of_run_system = new List <int>();

            for (int i = 0; i < Classifier.CountFeatures; i++)
            {
                int count_terms_for_var = count_slice_vars[i];
                Varians_of_run_system.Add(count_terms_for_var);
            }

            Varians_of_run_system.Sort();
            Generate_all_variant_in_pool(Varians_of_run_system);

            for (int i = 0; i < Pull_of_systems.Count; i++)
            {
                Classifier.RulesDatabaseSet.Clear();

                GeneratorRulesEveryoneWithEveryone.InitRulesEveryoneWithEveryone(result, type_func, Pull_of_systems[i].ToArray());
                Systems_ready_to_test.Add(Classifier.RulesDatabaseSet[0]);
                errors_of_systems.Add(result.ErrorLearnSamples(result.RulesDatabaseSet[0]));
            }

            int best_index = errors_of_systems.IndexOf(errors_of_systems.Min());

            result.RulesDatabaseSet.Clear();
            result.RulesDatabaseSet.Add(Systems_ready_to_test[best_index]);
            for (int i = 0; i < count_slice_vars.Count(); i++)
            {
                count_slice_vars[i] = result.RulesDatabaseSet[0].TermsSet.Count(x => x.NumVar == i);
            }
            Console.WriteLine(Pull_of_systems.Count());

            GC.Collect();
            result.RulesDatabaseSet[0].TermsSet.Trim();
            return(result);
        }
예제 #16
0
        private string ErrorInfoPC(IFuzzySystem FS)
        {
            PCFuzzySystem IFS = FS as PCFuzzySystem;

            if (IFS.RulesDatabaseSet.Count < 1)
            {
                return("Точность нечеткой системы недоступна");
            }
            classLearnResult.Add(IFS.ClassifyLearnSamples(IFS.RulesDatabaseSet[0]));
            classTestResult.Add(IFS.ClassifyTestSamples(IFS.RulesDatabaseSet[0]));
            classErLearn.Add(IFS.ErrorLearnSamples(IFS.RulesDatabaseSet[0]));
            classErTest.Add(IFS.ErrorTestSamples(IFS.RulesDatabaseSet[0]));

            return("Точностью на обучающей выборке  " + classLearnResult[classLearnResult.Count - 1].ToString() + " , Точность на тестовой выборке  " + classTestResult[classTestResult.Count - 1].ToString() + " " + Environment.NewLine +
                   "Ошибкой на обучающей выборке  " + classErLearn[classErLearn.Count - 1].ToString() + " , Ошибкой на тестовой выборке  " + classErTest[classErTest.Count - 1].ToString() + " " + Environment.NewLine);
        }
예제 #17
0
        protected KnowlegeBasePCRules[] sortSolution(KnowlegeBasePCRules[] Source)
        {
            KnowlegeBasePCRules temp = result.RulesDatabaseSet[0];

            double[] keys = new double[Source.Count()];

            KnowlegeBasePCRules[] tempSol = Source.Clone() as KnowlegeBasePCRules[];
            for (int i = 0; i < Source.Count(); i++)
            {
                result.RulesDatabaseSet[0] = Source[i];
                keys[i] = result.ErrorLearnSamples(result.RulesDatabaseSet[0]);
            }

            Array.Sort(keys, tempSol);

            result.RulesDatabaseSet[0] = temp;
            return(tempSol);
        }
예제 #18
0
        public virtual void CheckForBest(PCFuzzySystem result)
        {
            KnowlegeBasePCRules temp = monkey.SelectBest(result, 1)[0];
            double tempnumber        = result.ErrorLearnSamples(temp);

            if (bestsolutionnumber > tempnumber)
            {
                bestsolution       = new KnowlegeBasePCRules(temp);
                bestsolutionnumber = tempnumber;
                // delete
                Console.WriteLine("NEWBEST " + bestsolutionnumber);
                final_iter = 0;
            }
            else
            {
                final_iter++;
            }
        }
예제 #19
0
        public override int Run(string[] args)
        {
            Console.WriteLine("Start");
            fill_params(args);


            foreach (string filenametra in System.IO.Directory.GetFiles(file_learn, "*tra.dat", System.IO.SearchOption.AllDirectories))
            {
                string filenameTST = filenametra.Replace("tra.dat", "tst.dat");

                Console.WriteLine("Params get \nfile tra {0} \nfile name tst {1} ", filenametra, filenameTST);
                Class_learn_set = new SampleSet(filenametra);
                Console.WriteLine("Tra create");
                Class_test_set = new SampleSet(filenameTST);
                Console.WriteLine("Tst create");
                conf = new InitBySamplesConfig();
                conf.Init(Class_learn_set.CountVars);

                // fill_conf();
                conf.loadParams(confParams);

                file_out = filenametra.Replace("tra.dat", ((InitBySamplesConfig)conf).IBSTypeFunc.ToString() + "_out.ufs");
                Console.WriteLine("Conf Filed");
                Class_Pittsburg = new PCFuzzySystem(Class_learn_set, Class_test_set);
                Console.WriteLine("Classifier created");
                generator = new GeneratorRulesBySamples();

                Class_Pittsburg = generator.Generate(Class_Pittsburg, conf);
                Console.WriteLine("Generation complite");

                PCFSUFSWriter.saveToUFS(Class_Pittsburg, file_out);

                StreamWriter sw = new StreamWriter(Path.Combine(file_learn, ((InitBySamplesConfig)conf).IBSTypeFunc.ToString() + "_log.txt"), true);
                sw.WriteLine(filenametra + "\t" + Class_Pittsburg.ErrorLearnSamples(Class_Pittsburg.RulesDatabaseSet[0]));
                sw.WriteLine(filenameTST + "\t" + Class_Pittsburg.ErrorTestSamples(Class_Pittsburg.RulesDatabaseSet[0]));
                sw.Close();

                Console.WriteLine("Saved");
            }
            return(1);
        }
예제 #20
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Class, ILearnAlgorithmConf conf) // Здесь ведется оптимизация вашим алгоритмом
        {
            theFuzzySystem = Class;
            //    Console.WriteLine(theFuzzySystem.RulesDatabaseSet[0].TermsSet.Count);
            iterMax = ((gsa_conf)conf).Количество_итераций;
            MCount  = ((gsa_conf)conf).Количество_частиц;
            G0      = ((gsa_conf)conf).Гравитационная_постоянная;
            alpha   = ((gsa_conf)conf).Коэффициент_уменьшения;
            epsilon = ((gsa_conf)conf).Малая_константа;
            X       = new KnowlegeBasePCRules[MCount];
            Errors  = new double[MCount];
            mass    = new double[MCount];

            temp_c_Rule = new KnowlegeBasePCRules(theFuzzySystem.RulesDatabaseSet[0]);
            X[0]        = temp_c_Rule;
            Errors[0]   = theFuzzySystem.ErrorLearnSamples(X[0]);

            ErrorZero    = Errors[0];
            ErrorBest    = Errors[0];
            BestSolution = new KnowlegeBasePCRules(theFuzzySystem.RulesDatabaseSet[0]);

            R     = new double[MCount][, , ];
            speed = new double[MCount, X[0].TermsSet.Count, X[0].TermsSet[0].Parametrs.Count()];
            for (int i = 0; i < MCount; i++)
            {
                R[i] = new double[MCount, X[0].TermsSet.Count, X[0].TermsSet[0].Parametrs.Count()];
            }
            RR = new double[MCount, MCount];
            a  = new double[MCount, X[0].TermsSet.Count, X[0].TermsSet[0].Parametrs.Count()];

            for (int i = 1; i < MCount; i++)
            {
                temp_c_Rule = new KnowlegeBasePCRules(theFuzzySystem.RulesDatabaseSet[0]);
                X[i]        = temp_c_Rule;
                for (int j = 0; j < X[i].TermsSet.Count; j++)
                {
                    for (int k = 0; k < X[i].TermsSet[j].Parametrs.Count(); k++)
                    {
                        X[i].TermsSet[j].Parametrs[k] = GaussRandom.Random_gaussian(rand, X[i].TermsSet[j].Parametrs[k], 0.1 * (X[i].TermsSet[j].Parametrs[k])) + theFuzzySystem.LearnSamplesSet.InputAttributes[X[i].TermsSet[j].NumVar].Scatter * 0.05;
                    }
                }

                //theFuzzySystem.RulesDatabaseSet.Add(X[i]);
                //theFuzzySystem.UnlaidProtectionFix(theFuzzySystem.RulesDatabaseSet.Count - 1);
                //Errors[i] = theFuzzySystem.ErrorLearnSamples(X[i]);
                //X[i] = theFuzzySystem.RulesDatabaseSet[theFuzzySystem.RulesDatabaseSet.Count - 1];
                //theFuzzySystem.RulesDatabaseSet.Remove(X[i]);
                theFuzzySystem.UnlaidProtectionFix(X[i]);
                Errors[i] = theFuzzySystem.ErrorLearnSamples(X[i]);
            }

            for (int iter = 0; iter < iterMax; iter++)
            {
                //g(t) = G(0)*e^(-a*t/T);
                G = G0 * Math.Pow(Math.E, ((-1) * alpha * iter / iterMax));

                algorithm();

                for (int r = 0; r < MCount; r++)
                {
                    theFuzzySystem.UnlaidProtectionFix(X[r]);
                    Errors[r] = theFuzzySystem.ErrorLearnSamples(X[r]);
                }

                minValue  = Errors.Min();
                iminIndex = Errors.ToList().IndexOf(minValue);

                if (minValue < ErrorBest)
                {
                    ErrorBest    = minValue;
                    BestSolution = new KnowlegeBasePCRules(X[iminIndex]);
                }
            }
            //theFuzzySystem.RulesDatabaseSet[0] = BestSolution;

            if (ErrorBest < ErrorZero)
            {
                theFuzzySystem.RulesDatabaseSet[0] = BestSolution;
            }

            return(theFuzzySystem);
        }
예제 #21
0
        private void weight()
        {
            double sum   = 0;
            double best  = mass[0];
            double worst = mass[0];

            int[] index = new Int32[MCount];
            int   count = 0;

            for (int i = 1; i < MCount; i++)
            {
                mass[i] = Errors[i];
                if (mass[i] > best)
                {
                    best = mass[i];
                }
                if (mass[i] < worst)
                {
                    worst = mass[i];
                }
            }
            for (int i = 0; i < MCount; i++)
            {
                if (mass[i] == best)
                {
                    count++;
                    index[count - 1] = i;
                }
            }
            if (count > 1)
            {
                for (int i = 1; i < count; i++)
                {
                    ///X[index[i]] = ;
                    int f = index[i];
                    KnowlegeBasePCRules temp_c_Rule = new KnowlegeBasePCRules(theFuzzySystem.RulesDatabaseSet[0]);
                    temp_c_Rule = new KnowlegeBasePCRules(theFuzzySystem.RulesDatabaseSet[0]);
                    X[f]        = temp_c_Rule;
                    for (int j = 0; j < X[f].TermsSet.Count; j++)
                    {
                        for (int k = 0; k < X[f].TermsSet[j].Parametrs.Count(); k++)
                        {
                            X[f].TermsSet[j].Parametrs[k] = GaussRandom.Random_gaussian(rand, X[f].TermsSet[j].Parametrs[k], 0.1 * (X[f].TermsSet[j].Parametrs[k])) + theFuzzySystem.LearnSamplesSet.InputAttributes[X[f].TermsSet[j].NumVar].Scatter * 0.05;
                        }
                    }
                    theFuzzySystem.UnlaidProtectionFix(X[f]);
                    Errors[f] = theFuzzySystem.ErrorLearnSamples(X[f]);
                    mass[f]   = Errors[f];
                    if (mass[f] > best)
                    {
                        i--;
                    }
                }
            }
            for (int i = 0; i < MCount; i++)
            {
                mass[i] = (mass[i] - worst) / (best - worst);
                sum     = sum + mass[i];
            }
            for (int i = 0; i < MCount; i++)
            {
                mass[i] = mass[i] / sum;
            }
        }
예제 #22
0
        //основные вычисления
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classify, ILearnAlgorithmConf conf)
        {
            result = Classify;
            rand   = new Random();
            Init(conf);
            SetPopulation();
            bool[] BEST      = result.AcceptedFeatures;
            double bestError = result.ErrorLearnSamples(result.RulesDatabaseSet[0]);
            //отчистка консоли
            Dictionary <bool[], double> PopulationWithAccuracy = new Dictionary <bool[], double>();
            double accuracy = 0;

            //запуск итераций
            for (int it = 0; it < iter; it++)
            {
                //расчитыавем значение фитнес-функции
                for (int i = 0; i < Population.Count; i++)
                {
                    result.AcceptedFeatures = Population[i];
                    accuracy = result.ErrorLearnSamples(result.RulesDatabaseSet[0]);
                    PopulationWithAccuracy.Add(Population[i], accuracy);
                }
                Population.Clear();
                foreach (var pair in PopulationWithAccuracy.OrderByDescending(pair => pair.Value))
                {
                    Population.Add(pair.Key);
                }
                PopulationWithAccuracy.Clear();
                double[] K    = new double[Population.Count];
                double   sumK = 0;
                double   avK  = 0;
                for (int i = 0; i < Population.Count; i++)
                {
                    result.AcceptedFeatures = Population[i];
                    K[i]  = result.ErrorLearnSamples(result.RulesDatabaseSet[0]);
                    sumK += K[i];
                }
                avK = sumK / K.Length;
                bool[] KDis = new bool[result.CountFeatures];
                for (int i = 0; i < KDis.Length; i++)
                {
                    if (rand.Next(0, 2) == 0)
                    {
                        KDis[i] = false;
                    }
                    else
                    {
                        KDis[i] = true;
                    }
                }

                //перебрать значения фитнес функции
                //расчитыавем значение D
                double dit;
                dit = it;
                double diter;
                diter = iter;

                bool D = true;

                //расчитыавем значение Xfood
                var Xfood = new bool[result.CountFeatures];
                for (int t = 0; t < Xfood.Length; t++)
                {
                    Xfood[t] = false;

                    for (int i = 0; i < Population.Count; i++)
                    {
                        Xfood[t] = merge(Population[i][t], KDis[t]);
                    }
                }
                //расчитываем значение Cfood
                double Cfood = 2 * (1 - (dit / diter));
                bool   CDisfood;
                if (Cfood <= 1)
                {
                    CDisfood = false;
                }
                else
                {
                    CDisfood = true;
                }

                //расчитываем значение Bfood
                List <bool[]> Bfood = new List <bool[]>(Population.Count);
                for (int i = 0; i < Population.Count; i++)
                {
                    Bfood.Add(new bool[Population[i].Length]);
                    if (rand.Next(0, 2) == 0)
                    {
                        Kroofifood = false;
                    }
                    else
                    {
                        Kroofifood = true;
                    }
                    for (int t = 0; t < Bfood[i].Length; t++)
                    {
                        Bfood[i][t] = merge(merge(CDisfood, Kroofifood), Xfood[t]);
                    }
                }

                //расчитываем значение Bbest
                List <bool[]> Bbest = new List <bool[]>(Population.Count);
                for (int i = 0; i < Population.Count; i++)
                {
                    Bbest.Add(new bool[Population[i].Length]);
                    if (rand.Next(0, 2) == 0)
                    {
                        Kroofifood = false;
                    }
                    else
                    {
                        Kroofifood = true;
                    }
                    for (int t = 0; t < Bbest[i].Length; t++)
                    {
                        Bbest[i][t] = merge(Kroofifood, Xfood[t]);
                    }
                }

                //расчитываем значение B
                List <bool[]> B = new List <bool[]>(Population.Count);
                for (int i = 0; i < Population.Count; i++)
                {
                    B.Add(new bool[Population[i].Length]);
                    for (int t = 0; t < B[i].Length; t++)
                    {
                        B[i][t] = merge(Bfood[i][t], Bbest[i][t]);
                    }
                }

                //расчитываем значение F
                List <bool[]> F = new List <bool[]>(Population.Count);
                for (int i = 0; i < Population.Count; i++)
                {
                    if (i == 0)
                    {
                        F.Add(new bool[Population[i].Length]);
                        for (int t = 0; t < F[i].Length; t++)
                        {
                            F[i][t] = merge(true, B[i][t]);
                        }
                    }
                    else
                    {
                        F.Add(new bool[Population[i].Length]);
                        for (int t = 0; t < F[i].Length; t++)
                        {
                            F[i][t] = merge(merge(true, B[i][t]), merge(false, F[i - 1][t]));
                        }
                    }
                }

                List <int>[] neihbors = new List <int> [Population.Count];
                //расчитываем значение alocal
                List <bool[]> alocal = new List <bool[]>(Population.Count);
                for (int i = 0; i < Population.Count; i++)
                {
                    alocal.Add(new bool[Population[i].Length]);
                    neihbors[i] = countneihbors(Population[i]);

                    for (int t = 0; t < alocal[i].Length; t++)
                    {
                        alocal[i][t] = false;
                        for (int j = 0; j < neihbors[i].Count; j++)
                        {
                            bool   KRoofij = merge(KDis[t], Population[neihbors[i][j]][t]);
                            bool[] XRoofij = new bool[Population.Count];
                            XRoofij = CalcXroof(Population[i], Population[neihbors[i][j]]);

                            alocal[i][t] = merge(KRoofij, XRoofij[t]);
                        }
                    }
                }

                //расчитываем значение Cbest
                bool Cbest;
                if (rand.Next(0, 2) == 0)
                {
                    Cbest = false;
                }
                else
                {
                    Cbest = true;
                }


                //расчитываем значение atarget
                List <bool[]> atarget = new List <bool[]>(Population.Count);
                for (int i = 0; i < Population.Count; i++)
                {
                    atarget.Add(new bool[Population[i].Length]);
                    bool[] XRoofibest = new bool[Population.Count];
                    XRoofibest = CalcXroof(Population[i], Population[0]);
                    for (int t = 0; t < alocal[i].Length; t++)
                    {
                        bool KRoofibest = KDis[t];
                        atarget[i][t] = merge(merge(Cbest, KRoofibest), XRoofibest[t]);
                    }
                }

                //расчитываем значение a
                List <bool[]> a = new List <bool[]>(Population.Count);
                for (int i = 0; i < Population.Count; i++)
                {
                    a.Add(new bool[Population[i].Length]);
                    for (int t = 0; t < a[i].Length; t++)
                    {
                        a[i][t] = merge(atarget[i][t], alocal[i][t]);
                    }
                }

                //расчитываем значение N
                List <bool[]> N = new List <bool[]>(Population.Count);
                for (int i = 0; i < Population.Count; i++)
                {
                    if (i == 0)
                    {
                        N.Add(new bool[Population[i].Length]);
                        for (int t = 0; t < N[i].Length; t++)
                        {
                            N[i][t] = merge(true, a[i][t]);
                        }
                    }
                    else
                    {
                        N.Add(new bool[Population[i].Length]);
                        for (int t = 0; t < F[i].Length; t++)
                        {
                            N[i][t] = merge(a[i][t], N[i - 1][t]);
                        }
                    }
                }
                //расчитываем значение dX
                List <bool[]> dX = new List <bool[]>(Population.Count);
                for (int i = 0; i < Population.Count; i++)
                {
                    dX.Add(new bool[Population[i].Length]);
                    for (int t = 0; t < a[i].Length; t++)
                    {
                        dX[i][t] = merge(merge(F[i][t], N[i][t]), D);
                    }
                }

                //выводим значение BEST
                //   Console.Write("Значение BEST_ = ");
                //  Console.WriteLine(BEST);


                //расчитываем значение X(t+dt)
                for (int i = 0; i < Population.Count; i++)
                {
                    Population[i] = new bool[Population[i].Length];
                    for (int t = 0; t < Population[i].Length; t++)
                    {
                        Population[i][t] = merge(Population[i][t], dX[i][t]);
                    }
                }

                for (int i = 0; i < Population.Count; i++)
                {
                    result.AcceptedFeatures = Population[i];
                    double temp = result.ErrorLearnSamples(result.RulesDatabaseSet[0]);

                    if (temp < bestError)
                    {
                        BEST      = Population[i];
                        bestError = temp;
                    }
                }

                double y = it;
                if (y % 10 == 0 & y != 0)
                {
                    Console.WriteLine(it);
                    Console.WriteLine(bestError);
                }
            }
            result.AcceptedFeatures = BEST;
            for (int i = 0; i < result.AcceptedFeatures.Length; i++)
            {
                if (result.AcceptedFeatures[i] == false)
                {
                    Console.Write("0 ");
                }
                else
                {
                    Console.Write("1 ");
                }
            }
            Console.WriteLine();
            Console.WriteLine(result.ErrorLearnSamples(result.RulesDatabaseSet[0]));
            Console.WriteLine(result.ErrorTestSamples(result.RulesDatabaseSet[0]));
            return(result);
        }
예제 #23
0
 public PittsburgElementofStorage(PCFuzzySystem Checker, KnowlegeBasePCRules SourceElem, string algName) : base(algName)
 {
     Element    = new KnowlegeBasePCRules(SourceElem);
     LearnError = Checker.ErrorLearnSamples(SourceElem);
     TestError  = Checker.ErrorTestSamples(SourceElem);
 }
예제 #24
0
        //основные вычисления
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classify, ILearnAlgorithmConf conf)
        {
            rand   = new Random();
            result = Classify;
            Init(conf);
            SetPopulation();
            KnowlegeBasePCRules BEST = new KnowlegeBasePCRules(result.RulesDatabaseSet[0]);
            double bestError         = result.ErrorLearnSamples(BEST);

            //отчистка консоли

#if debug
            Console.Clear();
#endif
            //запуск итераций
            for (int it = 0; it < iter; it++)
            {
#if debug
                //вывод номера итерации
                Console.Write("Итерация __№__ = ");
                Console.WriteLine(it);
#endif
                //расчитыавем значение фитнес-функции
                Population = ListPittsburgClassifierTool.SortRules(Population, result);
                double[] K = new double[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    K[i] = result.ErrorLearnSamples(Population[i]);
#if debug
                    Console.Write("Значние  K[i1] = ");
                    Console.WriteLine(K[i]);
#endif
                    if (double.IsNaN(K[i]) || double.IsInfinity(K[i]))
                    {
                        result.UnlaidProtectionFix(Population[i]);
                        K[i] = result.ErrorLearnSamples(Population[i]);

#if debug
                        Console.Write("Значние  K[i2] = ");
                        Console.WriteLine(K[i]);
#endif
                    }
                }
                Kworst = K.Max();
                if (double.IsNaN(Kworst) || double.IsInfinity(Kworst))
                {
                    int iworst = K.ToList().IndexOf(Kworst);
 #if debug
                    Console.Write("Значние iworst = ");
                    Console.WriteLine(iworst);
#endif
                }

#if debug
                //вывод Kworst
                Console.Write("Значние KWorst = ");
                Console.WriteLine(Kworst);
#endif
                Kbest = K.Min();
#if debug
                //вывод Kbest
                Console.Write("Значние Kbest = ");
                Console.WriteLine(Kbest);
#endif
                int ibest = K.ToList().IndexOf(Kbest);
#if debug
                //вывод ibest
                Console.Write("Значние ibest = ");
                Console.WriteLine(ibest);
#endif
                //перебрать значения фитнес функции
                //расчитыавем значение D
                double dit;
                dit = it;
                double diter;
                diter = iter;

                double D = (dmax * (rand.NextDouble() * 2 - 1) * (dit / diter));

                //расчитываем значение rand1 для D
                double rand1;
                rand1 = D / (dmax * (it) / iter);
#if debug
                //выводим значение rand1 для D
                Console.Write("Значение Drand = ");
                Console.WriteLine(rand1);

                //выводим значение D
                Console.Write("Значение __D__ = ");
                Console.WriteLine(D);
#endif
                //расчитыавем значение Xfood
                double divide = K.Select(x => 1 / x).ToList().Sum();
                var    Xfood  = new KnowlegeBasePCRules(Population[0]);
                for (int t = 0; t < Xfood.TermsSet.Count; t++)
                {
                    for (int p = 0; p < Xfood.TermsSet[t].CountParams; p++)
                    {
                        Xfood.TermsSet[t].Parametrs[p] = 0;

                        for (int i = 0; i < Population.Length; i++)
                        {
                            Xfood.TermsSet[t].Parametrs[p] += Population[i].TermsSet[t].Parametrs[p] / K[i];
#if debug
                            //выводим значение Xfood
                            Console.Write("Значение Xfood = ");
                            Console.WriteLine(Xfood.TermsSet[t].Parametrs[p]);
#endif
                        }
                        Xfood.TermsSet[t].Parametrs[p] /= divide;
                    }
                }
#if debug
                //вывод divide
                Console.Write("Значние divide = ");
                Console.WriteLine(divide);
#endif
                //расчитываем значение Kfood
                double Kfood = result.ErrorLearnSamples(Xfood);
                if (double.IsNaN(Kfood) || double.IsInfinity(Kfood))
                {
                    result.UnlaidProtectionFix(Xfood);
                    Kfood = result.ErrorLearnSamples(Xfood);
                }
#if debug
                //выводим значение Kfood
                Console.Write("Значение Kfood = ");
                Console.WriteLine(Kfood);
#endif
                //расчитываем значение Cfood
                double Cfood = 2 * (1 - (dit / diter));
#if debug
                //выводим значение Cfood
                Console.Write("Значение Cfood = ");
                Console.WriteLine(Cfood);
#endif
                //расчитываем значение Bfood
                KnowlegeBasePCRules[] Bfood = new KnowlegeBasePCRules[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    Bfood[i] = new KnowlegeBasePCRules(Population[i]);
                    double KRoofifood = CalcKroof(K[i], Kfood);
                    KnowlegeBasePCRules Xroofifood = new KnowlegeBasePCRules(CalcXroof(Population[i], Xfood));
                    for (int t = 0; t < Bfood[i].TermsSet.Count; t++)
                    {
                        for (int p = 0; p < Bfood[i].TermsSet[t].CountParams; p++)
                        {
                            Bfood[i].TermsSet[t].Parametrs[p] = Cfood * KRoofifood * Xroofifood.TermsSet[t].Parametrs[p];
#if debug
                            //выводим значение Bfood
                            Console.Write("Значение Bfood = ");
                            Console.WriteLine(Bfood[i].TermsSet[t].Parametrs[p]);
#endif
                        }
                    }
                }

                //расчитываем значение Bbest
                KnowlegeBasePCRules[] Bbest = new KnowlegeBasePCRules[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    Bbest[i] = new KnowlegeBasePCRules(Population[i]);
                    double KRoofifood = CalcKroof(K[i], K[ibest]);
                    KnowlegeBasePCRules Xroofifood = new KnowlegeBasePCRules(CalcXroof(Population[i], Population[ibest]));
                    for (int t = 0; t < Bbest[i].TermsSet.Count; t++)
                    {
                        for (int p = 0; p < Bbest[i].TermsSet[t].CountParams; p++)
                        {
                            Bbest[i].TermsSet[t].Parametrs[p] = KRoofifood * Xroofifood.TermsSet[t].Parametrs[p];
#if debug
                            //выводим значение Bbest
                            Console.Write("Значение Bbest = ");
                            Console.WriteLine(Bbest[i].TermsSet[t].Parametrs[p]);
#endif
                        }
                    }
                }

                //расчитываем значение B
                KnowlegeBasePCRules[] B = new KnowlegeBasePCRules[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    B[i] = new KnowlegeBasePCRules(Population[i]);
                    for (int t = 0; t < B[i].TermsSet.Count; t++)
                    {
                        for (int p = 0; p < B[i].TermsSet[t].CountParams; p++)
                        {
                            B[i].TermsSet[t].Parametrs[p] = Bfood[i].TermsSet[t].Parametrs[p] + Bbest[i].TermsSet[t].Parametrs[p];
#if debug
                            //выводим значение B
                            Console.Write("Значение __B__ = ");
                            Console.WriteLine(B[i].TermsSet[t].Parametrs[p]);
#endif
                        }
                    }
                }

                //расчитываем значение F
                KnowlegeBasePCRules[] F = new KnowlegeBasePCRules[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    if (i == 0)
                    {
                        F[i] = new KnowlegeBasePCRules(Population[i]);
                        for (int t = 0; t < F[i].TermsSet.Count; t++)
                        {
                            for (int p = 0; p < F[i].TermsSet[t].CountParams; p++)
                            {
                                F[i].TermsSet[t].Parametrs[p] = Vf * B[i].TermsSet[t].Parametrs[p];
#if debug
                                //выводим значение F
                                Console.Write("Значение __F__ = ");
                                Console.WriteLine(F[i].TermsSet[t].Parametrs[p]);
#endif
                            }
                        }
                    }
                    else
                    {
                        F[i] = new KnowlegeBasePCRules(Population[i]);
                        for (int t = 0; t < F[i].TermsSet.Count; t++)
                        {
                            for (int p = 0; p < F[i].TermsSet[t].CountParams; p++)
                            {
                                F[i].TermsSet[t].Parametrs[p] = Vf * B[i].TermsSet[t].Parametrs[p] + wf * F[i - 1].TermsSet[t].Parametrs[p];
#if debug
                                //выводим значение F
                                Console.Write("Значение __F__ = ");
                                Console.WriteLine(F[i].TermsSet[t].Parametrs[p]);
#endif
                            }
                        }
                    }
                }
                List <int> [] neihbors = new List <int> [Population.Length];
                //расчитываем значение alocal
                KnowlegeBasePCRules[] alocal = new KnowlegeBasePCRules[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    alocal[i]   = new KnowlegeBasePCRules(Population[i]);
                    neihbors[i] = countneihbors(Population[i]);

/*
 #if debug
 *                  //вывод значений количества соседей
 *                  for (int g = 0; g < Population.Length; g++)
 *                  {
 *                      Console.Write("Знаение countneihbors = ");
 *                      Console.WriteLine(countneihbors(Population[g]));
 *                  }
 #endif
 */

                    for (int t = 0; t < alocal[i].TermsSet.Count; t++)
                    {
                        for (int p = 0; p < alocal[i].TermsSet[t].CountParams; p++)
                        {
                            alocal[i].TermsSet[t].Parametrs[p] = 0;
                            for (int j = 0; j < neihbors[i].Count; j++)
                            {
                                double KRoofij = CalcKroof(K[i], K[neihbors[i][j]]);
                                KnowlegeBasePCRules XRoofij = new KnowlegeBasePCRules(CalcXroof(Population[i], Population[neihbors[i][j]]));

                                alocal[i].TermsSet[t].Parametrs[p] += KRoofij * XRoofij.TermsSet[t].Parametrs[p];

#if debug
                                //выводим значение alocal
                                Console.Write("Знаение alocal = ");
                                Console.WriteLine(alocal[i].TermsSet[t].Parametrs[p]);
#endif
                            }
                        }
                    }
                }

                //расчитываем значение Cbest
                double Cbest = 2 * (rand.NextDouble() - (dit / diter));
#if debug
                //выводим значение Cbest
                Console.Write("Значение Сbest = ");
                Console.WriteLine(Cbest);
#endif
                //расчитываем значение rand для Cbest
                double rand2;
                rand2 = it / iter - Cbest / 2;
#if debug
                //выводим значение rand2 для Cbest
                Console.Write("Значение Crand = ");
                Console.WriteLine(rand2);
#endif
                //расчитываем значение atarget
                KnowlegeBasePCRules[] atarget = new KnowlegeBasePCRules[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    atarget[i] = new KnowlegeBasePCRules(Population[i]);
                    double KRoofibest = CalcKroof(K[i], K[ibest]);
                    KnowlegeBasePCRules XRoofibest = new KnowlegeBasePCRules(CalcXroof(Population[i], Population[ibest]));
                    for (int t = 0; t < alocal[i].TermsSet.Count; t++)
                    {
                        for (int p = 0; p < atarget[i].TermsSet[t].CountParams; p++)
                        {
                            atarget[i].TermsSet[t].Parametrs[p] = Cbest * KRoofibest * XRoofibest.TermsSet[t].Parametrs[p];
#if debug
                            //выводим значение atarget
                            Console.Write("Знание atarget = ");
                            Console.WriteLine(atarget[i].TermsSet[t].Parametrs[p]);
#endif
                        }
                    }
                }

                //расчитываем значение a
                KnowlegeBasePCRules[] a = new KnowlegeBasePCRules[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    a[i] = new KnowlegeBasePCRules(Population[i]);
                    for (int t = 0; t < a[i].TermsSet.Count; t++)
                    {
                        for (int p = 0; p < a[i].TermsSet[t].CountParams; p++)
                        {
                            a[i].TermsSet[t].Parametrs[p] = atarget[i].TermsSet[t].Parametrs[p] + alocal[i].TermsSet[t].Parametrs[p];
#if debug
                            //выводим значение a
                            Console.Write("Значение __a__ = ");
                            Console.WriteLine(a[i].TermsSet[t].Parametrs[p]);
#endif
                        }
                    }
                }

                //расчитываем значение N
                KnowlegeBasePCRules[] N = new KnowlegeBasePCRules[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    if (i == 0)
                    {
                        N[i] = new KnowlegeBasePCRules(Population[i]);
                        for (int t = 0; t < N[i].TermsSet.Count; t++)
                        {
                            for (int p = 0; p < F[i].TermsSet[t].CountParams; p++)
                            {
                                N[i].TermsSet[t].Parametrs[p] = Vf * a[i].TermsSet[t].Parametrs[p];
#if debug
                                //выводим значение N
                                Console.Write("Значение __N__ = ");
                                Console.WriteLine(N[i].TermsSet[t].Parametrs[p]);
#endif
                            }
                        }
                    }
                    else
                    {
                        N[i] = new KnowlegeBasePCRules(Population[i]);
                        for (int t = 0; t < F[i].TermsSet.Count; t++)
                        {
                            for (int p = 0; p < N[i].TermsSet[t].CountParams; p++)
                            {
                                N[i].TermsSet[t].Parametrs[p] = nmax * a[i].TermsSet[t].Parametrs[p] + wn * N[i - 1].TermsSet[t].Parametrs[p];
#if debug
                                //выводим значение N
                                Console.Write("Значение __N__ = ");
                                Console.WriteLine(N[i].TermsSet[t].Parametrs[p]);
#endif
                            }
                        }
                    }
                }

                //расчитываем значение dX
                KnowlegeBasePCRules[] dX = new KnowlegeBasePCRules[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    dX[i] = new KnowlegeBasePCRules(Population[i]);
                    for (int t = 0; t < a[i].TermsSet.Count; t++)
                    {
                        for (int p = 0; p < a[i].TermsSet[t].CountParams; p++)
                        {
                            dX[i].TermsSet[t].Parametrs[p] = F[i].TermsSet[t].Parametrs[p] + N[i].TermsSet[t].Parametrs[p] + D;
#if debug
                            //выводим значение dX
                            Console.Write("Значение _dX__ = ");
                            Console.WriteLine(dX[i].TermsSet[t].Parametrs[p]);
#endif
                        }
                    }
                }



                //выводим значение BEST
                //   Console.Write("Значение BEST_ = ");
                //  Console.WriteLine(BEST);


                //расчитываем значение X(t+dt)
                for (int i = 0; i < Population.Length; i++)
                {
                    Population[i] = new KnowlegeBasePCRules(Population[i]);
                    for (int t = 0; t < Population[i].TermsSet.Count; t++)
                    {
                        for (int p = 0; p < F[i].TermsSet[t].CountParams; p++)
                        {
                            Population[i].TermsSet[t].Parametrs[p] = Population[i].TermsSet[t].Parametrs[p] + calcdeltat(ct) * dX[i].TermsSet[t].Parametrs[p];
#if debug
                            //выводим значение Xnew
                            Console.Write("Знание X(t+dt) = ");
                            Console.WriteLine(Population[i].TermsSet[t].Parametrs[p]);
#endif
                        }
                    }
                }



                for (int i = 0; i < Population.Length; i++)
                {
                    double temp = result.ErrorLearnSamples(Population[i]);
                    if (double.IsNaN(temp) || double.IsInfinity(temp))
                    {
                        result.UnlaidProtectionFix(Xfood);
                        temp = result.ErrorLearnSamples(Population[i]);
                    }

                    if (temp < bestError)
                    {
                        BEST      = new KnowlegeBasePCRules(Population[i]);
                        bestError = temp;
                    }
                }


                double y = it;
                if (y % 10 == 0 & y != 0)
                {
                    Console.WriteLine(it);
                    Console.WriteLine(bestError);
                }
#if debug
                // выводим значение лучшей ошибки Kbest
                Console.Write("Значние BestError = ");
                Console.WriteLine(bestError);

                Console.WriteLine(".");
#endif
            }
            result.RulesDatabaseSet[0] = BEST;
            return(result);
        }
예제 #25
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classifier, ILearnAlgorithmConf conf)
        {
            Random        rand   = new Random(DateTime.Now.Millisecond);
            PCFuzzySystem result = Classifier;

            BW.DoWork             += new DoWorkEventHandler(BW_DoWork);
            BW.RunWorkerCompleted += new RunWorkerCompletedEventHandler(BW_RunWorkerCompleted);
            BW.RunWorkerAsync();



            MultiGoalOptimaze_conf config = conf as MultiGoalOptimaze_conf;
            string PathAlg = (new FileInfo(Application.ExecutablePath)).DirectoryName + "\\FS\\";

            config.Init2(PathAlg, Classifier.LearnSamplesSet.FileName);

            countFuzzySystem = config.Итераций_алгоритма;

            allowSqare  = config.Допустимый_процент_перекрытия_по_площади_термов / 100;
            allowBorder = config.Допустимый_процент_перекрытия_по_границам / 100;
            int seedPath = rand.Next();

            sizePercent      = config.азмер_шага_по_точности;
            sizeComplexity   = config.азмер_шага_по_сложности;
            sizeInteraply    = config.азмер_шага_по_интерпретируемости;
            diviver          = config.Уменьшать_шаги_в;
            trysBeforeDivide = config.Уменьшать_шаг_после;
            path             = config.path;
            dataSetName      = config.dataSetName;
            toMany           = config.азрешено_похожих_систем;
            isPSO            = config.toBool(config.Использовать_АРЧ);
            isANT            = config.toBool(config.Использовать_НАМК);
            isBEE            = config.toBool(config.Использовать_САПК);
            isES             = config.toBool(config.Использовать_ЕС);
            isGA             = config.toBool(config.Использовать_ГА);
            //   isBFO = config.toBool(config.Использовать_АПБ);
            isTermShrink     = config.toBool(config.Удалять_термы);
            isRuleShrink     = config.toBool(config.Удалять_правила);
            isUnionTerm      = config.toBool(config.Объединять_термы);
            isLindBreakCross = config.toBool(config.Исключать_пересечение_лигвистически_далеких_термов);
            countANT         = config.Использовать_НАМК_раз_за_такт;
            countPSO         = config.Использовать_за_такт_АРЧ_раз;
            countBEE         = config.Использовать_САПК_раз_за_такт;
            countES          = config.Использовать_ЕС_раз_за_такт;
            countGA          = config.Использовать_ГА_раз_за_такт;
            //  countBFO = config.Использовать_за_такт_АПБ_раз;
            typeComplexity   = (int)config.Критерий_сложности;
            typeInterpreting = (int)config.Критерий_интерпретируемости;

            List <AbstractNotSafeLearnAlgorithm> learnAlgorithms       = initAlgoritms();
            List <ILearnAlgorithmConf>           learnAlgorithmsconfig = initAlgoritmsConfigs(Classifier.CountFeatures);
            List <double> ValueLPercent        = new List <double>();
            List <double> ValueTPercent        = new List <double>();
            List <double> ValueComplexity      = new List <double>();
            List <double> ValueInterability    = new List <double>();
            List <double> SummaryGoods         = new List <double>();
            List <KnowlegeBasePCRules> Storage = new List <KnowlegeBasePCRules>();
            List <int> candidate = new List <int>();

            KnowlegeBasePCRules Best = result.RulesDatabaseSet[0];

            // result.RulesDatabaseSet[0] = Best;
            baseLearn = (result.ErrorLearnSamples(result.RulesDatabaseSet[0]));
            ValueLPercent.Add(baseLearn);
            ValueTPercent.Add(result.ErrorTestSamples(result.RulesDatabaseSet[0])); // ClassifyTestSamples());
            baseComplexity = getComplexity(result);
            ValueComplexity.Add(baseComplexity);
            baseIntebility = getInterpreting(result, allowBorder, allowSqare);
            ValueInterability.Add(baseIntebility);


            Storage.Add(Best);
            int NSCount = 0;
            int deleted = 0;

            for (int numberStep = 0; numberStep < countFuzzySystem; numberStep++)
            {
                bool mustToDivide = true;
                int  usedAlg      = 0;
                for (int tr = 0; tr < trysBeforeDivide; tr++)
                {
                    deleted = 0;

                    // Parallel.For(0, learnAlgorithms.Count(), i =>
                    usedAlg = 0;
                    for (int i = 0; i < learnAlgorithms.Count(); i++)
                    {
                        Console.WriteLine("F****d in Storage.Add(new c_Rules(Best))");
                        Storage.Add(new KnowlegeBasePCRules(Best));
                        Console.WriteLine("F****d in result.RulesDatabaseSet.Clear()");
                        result.RulesDatabaseSet.Clear();
                        Console.WriteLine("F****d in result.RulesDatabaseSet.Add( Storage[Storage.Count - 1])");
                        result.RulesDatabaseSet.Add(Storage[Storage.Count - 1]);
                        usedAlg++;
                        bool before_VAlue = true;
                        try
                        {
                            learnAlgorithms[i].TuneUpFuzzySystem(result, learnAlgorithmsconfig[i]);
                            GC.Collect();
                            before_VAlue = false;
                            ValueLPercent.Add(result.ErrorLearnSamples(result.RulesDatabaseSet[0])); // ClassifyLearnSamples());
                            ValueTPercent.Add(result.ErrorTestSamples(result.RulesDatabaseSet[0]));  //ClassifyTestSamples());
                            ValueComplexity.Add(getComplexity(result));
                            ValueInterability.Add(getInterpreting(result, allowBorder, allowSqare));
                            double temp = ValueLPercent[ValueLPercent.Count - 1] + ValueComplexity[ValueComplexity.Count() - 1] + ValueInterability[ValueInterability.Count() - 1];
                            Storage[Storage.Count - 1] = result.RulesDatabaseSet[0];

                            if (double.IsNaN(temp))
                            {
                                Console.WriteLine("FuckAlarm " + i.ToString() + learnAlgorithms[i].ToString() + " is NAN");



                                ValueLPercent.RemoveAt(ValueLPercent.Count() - 1);
                                ValueTPercent.RemoveAt(ValueTPercent.Count() - 1);
                                ValueComplexity.RemoveAt(ValueComplexity.Count() - 1);
                                ValueInterability.RemoveAt(ValueInterability.Count() - 1);
                                Storage.RemoveAt(Storage.Count() - 1);
                                usedAlg--;
                            }
                        }
                        catch (Exception)
                        {
                            if (before_VAlue)
                            {
                                Console.WriteLine("FuckAlarm " + i.ToString() + learnAlgorithms[i].ToString() + " before VAlue");
                            }
                            else
                            {
                                Console.WriteLine("FuckAlarm " + i.ToString() + learnAlgorithms[i].ToString() + " after VAlue");

                                ValueLPercent.RemoveAt(ValueLPercent.Count() - 1);
                                ValueTPercent.RemoveAt(ValueTPercent.Count() - 1);
                                ValueComplexity.RemoveAt(ValueComplexity.Count() - 1);
                                ValueInterability.RemoveAt(ValueInterability.Count() - 1);
                                Storage.RemoveAt(Storage.Count() - 1);
                            }
                        }

                        NSCount++;
                        Console.WriteLine("F****d in ResultShow");
                        ResultShow += "[" + NSCount.ToString() + "]\t" + ValueLPercent[ValueLPercent.Count() - 1].ToString() + "\t" + ValueTPercent[ValueTPercent.Count() - 1].ToString() +
                                      "\t" + ValueComplexity[ValueComplexity.Count() - 1].ToString() + "\t" + ValueInterability[ValueInterability.Count() - 1].ToString() + Environment.NewLine;
                        //     i++;
                    }
                    //);
                    Console.WriteLine("F****d in deleted");

                    deleted  = removeDublicate(ValueLPercent, ValueComplexity, ValueInterability, ValueTPercent, Storage, rand);
                    usedAlg -= deleted;
                    Console.WriteLine("F****d in candidate");

                    candidate = canBeNext(ValueLPercent, ValueComplexity, ValueInterability);

                    if (candidate.Count() > 0)
                    {
                        mustToDivide = false; break;
                    }
                }

                if (mustToDivide)
                {
                    MessageBox.Show("Divided happend ");

                    sizePercent    = sizePercent / diviver;
                    sizeComplexity = sizeComplexity / diviver;
                    sizeInteraply  = sizeInteraply / diviver;
                    continue;
                }

                Console.WriteLine("F****d in SummaryGoods");

                SummaryGoods = reCalcSummary(SummaryGoods, ValueLPercent, ValueComplexity, ValueInterability);

                Console.WriteLine("F****d in indexofBest");
                int indexofBest = getNewBest(candidate, SummaryGoods);
                if (usedAsNext.ContainsKey(indexofBest))
                {
                    usedAsNext[indexofBest]++;
                }
                else
                {
                    usedAsNext.Add(indexofBest, 1);
                }

                Console.WriteLine("Best");
                Best = Storage[indexofBest];

                Console.WriteLine("F****d in for (int i = (Storage.Count - learnAlgorithms.Count); i < Storage.Count(); i++)");
                int toSaveCounter = NSCount - usedAlg;
                for (int i = (Storage.Count - usedAlg); i < Storage.Count(); i++)
                {
                    result.RulesDatabaseSet[0] = Storage[i];
                    saveFS(result, path, dataSetName, seedPath, numberStep, toSaveCounter, Best.Equals(result.RulesDatabaseSet[0]));
                    toSaveCounter++;
                }

                Console.WriteLine("F****d in result.RulesDatabaseSet[0] = Best;");
                result.RulesDatabaseSet[0] = Best;

                Console.WriteLine("F****d in End");
                baseLearn      = (result.ErrorLearnSamples(result.RulesDatabaseSet[0]));// ClassifyLearnSamples();
                baseComplexity = getComplexity(result);
                baseIntebility = getInterpreting(result, allowBorder, allowSqare);
                candidate.Clear();
                GC.Collect();
            }
            isEnd = true;
            Thread.Sleep(1000);
            result.RulesDatabaseSet[0].TermsSet.Trim();
            return(result);
        }
예제 #26
0
 public void calc_Error(PCFuzzySystem error_checker)
 {
     error_checker.UnlaidProtectionFix(hrom_vector.Core_Check);
     Error = error_checker.ErrorLearnSamples(hrom_vector.Core_Check);
 }
예제 #27
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Approximate, ILearnAlgorithmConf conf) // + override
        {
            result = Approximate;


            List <KnowlegeBasePCRules> Archive = new List <KnowlegeBasePCRules>();
            List <double> ErrorsArchive        = new List <double>();

            var config = (DynamicTuneConf)conf;

            maxError  = config.MaxError;
            RuleCount = config.RulesCount;
            TryCount  = config.TryCount;
            double error        = result.ErrorLearnSamples(result.RulesDatabaseSet[0]);
            var    kbToOptimize = new KnowlegeBasePCRules(result.RulesDatabaseSet[0]);
            var    kbBest       = new KnowlegeBasePCRules(kbToOptimize);
            double errorBefore  = Double.MaxValue;

            result.UnlaidProtectionFix(kbToOptimize);

            List <input_space> variable_spaces = new List <input_space>();

            for (int i = 0; i < result.LearnSamplesSet.InputAttributes.Count; i++)
            {
                List <Term> terms_of_variable = new List <Term>();
                terms_of_variable = kbToOptimize.TermsSet.Where(term => term.NumVar == i).ToList();
                variable_spaces.Add(new input_space(terms_of_variable, i));
            }

            int indexRegion = -1,
                indexVar    = -1,
                number_of_input_variables = variable_spaces.Count;

            int tryCount = 0;



            while (error > maxError)
            {
                if (Double.IsInfinity(error))
                {
                    throw new Exception("Something went wrong, error is Infinity, region: " + indexRegion);
                }
                if (Double.IsNaN(error))
                {
                    throw new Exception("Something went wrong, error is NaN, region: " + indexRegion);
                }

                region_side[][] sides = new region_side[number_of_input_variables][];
                for (int i = 0; i < number_of_input_variables; i++)
                {
                    sides[i] = variable_spaces[i].get_region_sides();
                }
                var cartresult = CartesianProduct.Get(sides);

                List <region2> regions = new List <region2>();

                foreach (var x in cartresult)
                {
                    regions.Add(new region2(x.ToList(), result, variable_spaces));
                }

                List <double> region_errors = regions.Select(x => x.region_error()).ToList();
                indexRegion = region_errors.IndexOf(region_errors.Max());

                for (int i = 0; i < region_errors.Count; i++)
                {
                    if (Double.IsNaN(region_errors[i]) || Double.IsInfinity(region_errors[i]) ||
                        Double.IsNegativeInfinity(region_errors[i]) || Double.IsPositiveInfinity(region_errors[i]))
                    {
                        region_errors[i] = 0;
                    }
                }

                List <double> variable_errors = regions[indexRegion].variable_errors();
                bool          check1          = false;
                for (int i = 1; i < variable_errors.Count; i++)
                {
                    if (variable_errors[i - 1] != variable_errors[i])
                    {
                        check1 = true;
                        break;
                    }
                }
                if (!check1)
                {
                    indexVar = StaticRandom.Next(variable_errors.Count - 1);
                }
                else
                {
                    indexVar = variable_errors.IndexOf(variable_errors.Max());
                }

                Term new_term = regions[indexRegion].new_term(indexVar);
                result.RulesDatabaseSet[0] = kbToOptimize;
                kbToOptimize.TermsSet.Add(new_term);

                // Rules (CHECK REFERENCE TYPES)
                int @var = indexVar;

                var rulesLeft = kbToOptimize.RulesDatabase.Where(
                    rule => rule.ListTermsInRule.Contains(regions[indexRegion].sides[indexVar].left)).ToList();
                var rulesRight = kbToOptimize.RulesDatabase.Where(
                    rule => rule.ListTermsInRule.Contains(regions[indexRegion].sides[indexVar].right)).ToList();
                for (int j = 0; j < rulesLeft.Count; j++)
                {
                    int[] order = new int[rulesLeft[j].ListTermsInRule.Count];
                    for (int k = 0; k < rulesLeft[j].ListTermsInRule.Count; k++)
                    {
                        Term temp_term = rulesLeft[j].ListTermsInRule[k];
                        if (temp_term == regions[indexRegion].sides[indexVar].left)
                        {
                            temp_term = new_term;
                        }
                        order[k] = kbToOptimize.TermsSet.FindIndex(x => x == temp_term);
                    }
///!!!!
                    string temp_approx_Values = kbToOptimize.RulesDatabase[j].LabelOfClass;

                    /*        double[] temp_approx_RegressionConstantConsequent =
                     *          kbToOptimize.RulesDatabase[j].RegressionConstantConsequent.Clone() as double[];
                     */


                    PCRule temp_rule = new PCRule(
                        kbToOptimize.TermsSet, order, temp_approx_Values);

                    // double[] dC = null;
//!!!
                    temp_rule.LabelOfClass = KNNClassName.NearestClass(result, temp_rule.ListTermsInRule.ToList());



                    kbToOptimize.RulesDatabase.Add(temp_rule);


//!!!
                    rulesLeft[j].LabelOfClass = KNNClassName.NearestClass(result, rulesLeft[j].ListTermsInRule.ToList());
                    //           rulesLeft[j].RegressionConstantConsequent = (double[])dC.Clone();
                }

                foreach (var rule in rulesRight)
                {
//!!!

                    rule.LabelOfClass = KNNClassName.NearestClass(
                        result, rule.ListTermsInRule.ToList());
                    //               rule.RegressionConstantConsequent = dC;
                }

                variable_spaces[indexVar].terms.Add(new_term);
                variable_spaces[indexVar].terms.Sort(new CompararerByPick());

                // Re-evaluate the system's error
//!!!!
                error = result.ErrorLearnSamples(kbToOptimize);

                if ((kbToOptimize.RulesDatabase.Count > config.RulesCount))
                {
                    break;
                }

#if Console
                Console.WriteLine(error + " " + kbToOptimize.TermsSet.Count + " terms\n");
                for (int i = 0; i < variable_spaces.Count; i++)
                {
                    Console.WriteLine(variable_spaces[i].terms.Count + " термов по " + i + "му параметру\n");
                }
#endif
                result.RulesDatabaseSet[0] = kbToOptimize;
                // Get the best knowledge base on the 1st place
                if (error < errorBefore)
                {
                    kbBest      = new KnowlegeBasePCRules(kbToOptimize);
                    errorBefore = error;
                    tryCount    = 0;
                }
                else
                {
                    tryCount++;
                }
                if (tryCount > TryCount)
                {
                    break;
                }
            }


            result.RulesDatabaseSet[0] = kbBest;
            RuleCount = kbBest.RulesDatabase.Count;
            TryCount  = tryCount;

            return(result);
        }
예제 #28
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classifier, ILearnAlgorithmConf conf)
        {
            Init(conf);
            KnowlegeBasePCRules temp_c_Rule = new KnowlegeBasePCRules(Classifier.RulesDatabaseSet[0]);
            PCFuzzySystem       result      = Classifier;
            string file_string        = @"..\logs_" + result.TestSamplesSet.FileName + ".txt";
            string file_string_to_txt = @"..\result_" + result.TestSamplesSet.FileName + ".txt";

            for (int t = 0; t < population_count; t++)
            {
                monkey[t] = new KnowlegeBasePCRules(result.RulesDatabaseSet[0]);
                if (t > 3)
                {
                    for (int k = 0; k < result.RulesDatabaseSet[0].TermsSet.Count; k++)
                    {
                        for (int q = 0; q < result.RulesDatabaseSet[0].TermsSet[k].CountParams; q++)
                        {
                            //monkey[t].TermsSet[k].Parametrs[q] = StaticRandom.NextDouble() * (result.RulesDatabaseSet[0].TermsSet[k].Max - result.RulesDatabaseSet[0].TermsSet[k].Min);
                            monkey[t].TermsSet[k].Parametrs[q] = GaussRandom.Random_gaussian(rand, monkey[t].TermsSet[k].Parametrs[q], monkey[t].TermsSet[k].Parametrs[q] * 0.05);
                        }
                    }
                }

                result.UnlaidProtectionFix(monkey[t]);

                // delete
                testvals[t] = result.ErrorLearnSamples(monkey[t]);
                Console.WriteLine("Begin: " + t.ToString() + " " + iter.ToString() + " " + testvals[t].ToString());
            }
            bestsolution       = new KnowlegeBasePCRules(monkey.SelectBest(result, 1)[0]);
            bestsolutionnumber = result.ErrorLearnSamples(bestsolution);
            deltaLength        = result.RulesDatabaseSet[0].TermsSet.Sum(x => x.Parametrs.Length);
            if (debug)
            {
                using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true))
                {
                    file.WriteLine(DateTime.Today.ToString() + "\t" + result.LearnSamplesSet.FileName);
                    file.WriteLine("Parameters:");
                    file.WriteLine("Population\t" + population_count.ToString());
                    file.WriteLine("Iteration count\t" + iter_amount.ToString());
                    file.WriteLine("Crawl count\t" + crawl_iter.ToString());
                    file.WriteLine("Jump count\t" + jump_iter.ToString());
                    file.WriteLine("Somersault count\t" + somersault_iter.ToString());
                    file.WriteLine("Crawl step\t" + step.ToString());    // crawl step
                    file.WriteLine("Jump step\t" + watch_jump_parameter.ToString());
                    file.WriteLine("Somersault left border\t" + somersault_interval_left.ToString());
                    file.WriteLine("Somersault right border\t" + somersault_interval_right.ToString());
                    file.WriteLine("\t\tMonkeys");
                    file.Write("Iterations\t");
                    for (int t = 0; t < population_count; t++)
                    {
                        file.Write("\t" + t);
                    }
                    file.WriteLine();
                    file.Write("0\tbegin");
                    for (int t = 0; t < population_count; t++)
                    {
                        file.Write("\t" + testvals[t].ToString());
                    }

                    // excel вставки
                    // наибольший в таблице
                    file.WriteLine();
                }
            }


            //iter_amount = somersault_iter * (1 + jump_iter * (1 + crawl_iter));
            iter_amount = (((crawl_iter + jump_iter) * jump_iter) + somersault_iter) * somersault_iter;
            for (int r = 0; r < somersault_iter; r++)
            {
                for (int t = 0; t < jump_iter; t++)
                {
                    for (int e = 0; e < crawl_iter; e++)
                    {
                        iter++;
                        oneClimb(result, deltaLength, step);
                        CheckForBest(result);
                        //Console.WriteLine(iter_amount.ToString() + "/" + iter.ToString

                        // дебаг
                        if (debug)
                        {
                            using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true))
                            {
                                file.Write(iter.ToString() + "\tcrawl");
                                for (int p = 0; p < population_count; p++)
                                {
                                    file.Write("\t" + result.ErrorLearnSamples(monkey[p]).ToString());
                                }
                                file.WriteLine();
                            }
                        }
                    }
                    for (int e = 0; e < jump_iter; e++)
                    {
                        iter++;
                        oneWatchJump(result);
                        //Console.WriteLine(iter_amount.ToString() + "/" + iter.ToString());
                        CheckForBest(result);
                        // дебаг
                        if (debug)
                        {
                            using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true))
                            {
                                file.Write(iter.ToString() + "\tlocaljump");
                                for (int p = 0; p < population_count; p++)
                                {
                                    file.Write("\t" + result.ErrorLearnSamples(monkey[p]).ToString());
                                }
                                file.WriteLine();
                            }
                        }
                    }
                }
                for (int e = 0; e < somersault_iter; e++)
                {
                    iter++;
                    oneGlobalJump(result);
                    CheckForBest(result);
                    // дебаг
                    if (debug)
                    {
                        using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true))
                        {
                            file.Write(iter.ToString() + "\tglobaljump");
                            for (int p = 0; p < population_count; p++)
                            {
                                file.Write("\t" + result.ErrorLearnSamples(monkey[p]).ToString());
                            }
                            file.WriteLine();
                        }
                    }
                    Console.WriteLine(iter_amount.ToString() + "/" + iter.ToString());
                }
            }
            //Console.WriteLine(final_iter.ToString() + "/" + final_counter.ToString());
            //FOR VICTORY!!!
            while (final_iter < final_counter)
            {
                step *= 0.9;
                watch_jump_parameter      *= 0.9;
                somersault_interval_left  *= 0.9;
                somersault_interval_right *= 0.9;
                for (int r = 0; r < somersault_iter; r++)
                {
                    oneClimb(result, deltaLength, step);
                    CheckForBest(result);
                    iter++;
                }
                for (int t = 0; t < jump_iter; t++)
                {
                    oneWatchJump(result);
                    CheckForBest(result);
                    iter++;
                }
                for (int e = 0; e < crawl_iter; e++)
                {
                    oneGlobalJump(result);
                    CheckForBest(result);
                    iter++;
                }
                Console.WriteLine(iter_amount.ToString() + "/" + iter.ToString());
            }

            /*  for (int t = 0; t < population_count; t++)
             *    if (result.ErrorLearnSamples(monkey[best]) < result.ErrorLearnSamples(monkey[t]))
             *        best = t; */
            CheckForBest(result);
            if (bestsolutionnumber <= result.ErrorLearnSamples(result.RulesDatabaseSet[0]))
            {
                result.RulesDatabaseSet[0] = bestsolution;
            }
            iter = 0;
            if (debug)
            {
                using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true))
                {
                    file.WriteLine("Results\t" + result.ErrorLearnSamples(bestsolution).ToString() + "\t" + result.ErrorTestSamples(bestsolution).ToString());
                }
            }
            if (totxt)
            {
                using (System.IO.StreamWriter file_result = new System.IO.StreamWriter(file_string_to_txt, true))
                {
                    file_result.WriteLine(result.ErrorLearnSamples(bestsolution).ToString() + "\t" + result.ErrorTestSamples(bestsolution).ToString());
                }
            }
            return(result);
        }
예제 #29
0
 private static void writeAboutEstimates(XmlWriter writer, PCFuzzySystem Classifier)
 {
     writer.WriteStartElement("Estimates");
     if (Classifier.TestSamplesSet != null)
     {
         writer.WriteAttributeString("Count", XmlConvert.ToString(22));
         writer.WriteStartElement("Estimate");
         writer.WriteAttributeString("Table", Classifier.LearnSamplesSet.FileName);
         writer.WriteAttributeString("Type", "PrecisionPercent");
         writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.ClassifyLearnSamples(Classifier.RulesDatabaseSet[0])));
         writer.WriteEndElement();
         writer.WriteStartElement("Estimate");
         writer.WriteAttributeString("Table", Classifier.LearnSamplesSet.FileName);
         writer.WriteAttributeString("Type", "ErrorPercent");
         writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.ErrorLearnSamples(Classifier.RulesDatabaseSet[0])));
         writer.WriteEndElement();
         writer.WriteStartElement("Estimate");
         writer.WriteAttributeString("Table", Classifier.TestSamplesSet.FileName);
         writer.WriteAttributeString("Type", "PrecisionPercent");
         writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.ClassifyTestSamples(Classifier.RulesDatabaseSet[0])));
         writer.WriteEndElement();
         writer.WriteStartElement("Estimate");
         writer.WriteAttributeString("Table", Classifier.TestSamplesSet.FileName);
         writer.WriteAttributeString("Type", "ErrorPercent");
         writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.ErrorTestSamples(Classifier.RulesDatabaseSet[0])));
         writer.WriteEndElement();
     }
     else
     {
         writer.WriteAttributeString("Count", XmlConvert.ToString(20));
         writer.WriteStartElement("Estimate");
         writer.WriteAttributeString("Table", Classifier.LearnSamplesSet.FileName);
         writer.WriteAttributeString("Type", "PrecisionPercent");
         writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.ClassifyLearnSamples(Classifier.RulesDatabaseSet[0])));
         writer.WriteEndElement();
         writer.WriteStartElement("Estimate");
         writer.WriteAttributeString("Table", Classifier.LearnSamplesSet.FileName);
         writer.WriteAttributeString("Type", "ErrorPercent");
         writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.ErrorLearnSamples(Classifier.RulesDatabaseSet[0])));
         writer.WriteEndElement();
     }
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GIBNormal");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGIBNormal()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GIBSumStraigh");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGIBSumStrait()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GIBSumReverse");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGIBSumReverse()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GICNormal");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGICNormal()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GICSumStraigh");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGICSumStraigth()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GICSumReverse");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGICSumReverce()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GISNormal");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGISNormal()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GISSumStraigh");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGISSumStraigt()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GISSumReverce");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGISSumReverce()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "LindisNormal");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getLindisNormal()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "LindisSumStraigh");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getLindisSumStraight()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "LindisSumReverse");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getLindisSumReverse()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "NormalIndex");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getNormalIndex()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "RealIndex");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getIndexReal()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "SumStraigthIndex");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getIndexSumStraigt()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "SumReverseIndex");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getIndexSumReverse()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "ComplexitIt");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getComplexit()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "CountRules");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getRulesCount()));
     writer.WriteEndElement();
     writer.WriteEndElement();
 }
예제 #30
0
        private void OriginalOperator(List <KnowlegeBasePCRules> Population, int cluster_index)
        {
            NewPopulation = new KnowlegeBasePCRules[groups[cluster_index].Length];
            double epsi_newstep = rand.NextDouble() * Math.Exp(1 - (iter / (iter - cur_iter + 1)));

            for (int i = 0; i < groups[cluster_index].Length; i++)
            {
                NewPopulation[i] = Population[groups[cluster_index][i]];
            }
            for (int i = 0; i < groups[cluster_index].Length; i++)
            {
                int number = groups[cluster_index][i];

                for (int j = 0; j < NewPopulation[i].TermsSet.Count; j++)
                {
                    for (int k = 0; k < NewPopulation[i].TermsSet[j].Parametrs.Length; k++)
                    {
                        NewPopulation[i].TermsSet[j].Parametrs[k] += epsi_newstep * rand.NextDouble();
                    }
                }
                double errornew = result.ErrorLearnSamples(NewPopulation[i]);
                double errorold = result.ErrorLearnSamples(Population[number]);
                if (errornew < errorold)
                {
                    Population[number] = NewPopulation[i];
                }
            }
        }