Example #1
0
        public override FuzzySystem.SingletoneApproximate.SAFuzzySystem TuneUpFuzzySystem(FuzzySystem.SingletoneApproximate.SAFuzzySystem Approximate, ILearnAlgorithmConf conf)
        {
            Mnk_lib.Mnk_class Mnk_me = new Mnk_lib.Mnk_class();

            double [,,] Extracted_rules  = extract_Rules(Approximate.RulesDatabaseSet[0]);
            double [,] Extracted_Samples = extract_Sample_table(Approximate.LearnSamplesSet);
            double [] Extracted_Samples_out = extract_Sample_table_Out(Approximate.LearnSamplesSet);
            int       count_rules           = Approximate.RulesDatabaseSet[0].RulesDatabase.Count;
            int       count_samples         = Approximate.LearnSamplesSet.CountSamples;
            int       count_Vars            = Approximate.LearnSamplesSet.CountVars;

            double []        New_consq = new double[count_rules];
            TypeTermFuncEnum type_Func = Approximate.RulesDatabaseSet[0].TermsSet[0].TermFuncType;
            int type_func = (int)type_Func;

            Mnk_me.mnk_R(Extracted_rules, count_rules, type_func, Extracted_Samples, Extracted_Samples_out, count_samples, count_Vars, out New_consq);

            SAFuzzySystem Result = Approximate;

            double result_before = Result.approxLearnSamples(Result.RulesDatabaseSet[0]);

            double [] Back_consq = Result.RulesDatabaseSet[0].all_conq_of_rules;
            Result.RulesDatabaseSet[0].all_conq_of_rules = New_consq;
            double result_after = Result.approxLearnSamples(Result.RulesDatabaseSet[0]);

            if (result_before < result_after)
            {
                Result.RulesDatabaseSet[0].all_conq_of_rules = Back_consq;
            }
            GC.Collect();
            Result.RulesDatabaseSet[0].TermsSet.Trim();
            return(Result);
        }
Example #2
0
        public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approximate, ILearnAlgorithmConf conf)
        {
            BacteryAlgorithmConfig Config = conf as BacteryAlgorithmConfig;

            sendBactery    = Config.BFOCountSolution;
            interPSOtoSend = Config.BFOCountIteration;
            result         = Approximate;


            if (result.RulesDatabaseSet.Count < 1)
            {
                throw new InvalidDataException("Нечеткая система не проинициализированна");
            }
            KnowlegeBaseSARules backSave = new KnowlegeBaseSARules(result.RulesDatabaseSet[0]);
            double backResult            = result.approxLearnSamples(result.RulesDatabaseSet[0]);

            savetoUFS(result.RulesDatabaseSet, 0, 0, 0);
            BacteryRunner();
            KnowlegeBaseSARules[] solutions = loadDatabase();
            solutions = sortSolution(solutions);
            if (solutions.Count() < 1)
            {
                result.RulesDatabaseSet[0] = backSave; return(result);
            }
            result.RulesDatabaseSet[0] = solutions[0];
            double newResult = result.approxLearnSamples(result.RulesDatabaseSet[0]);

            if (newResult > backResult)
            {
                result.RulesDatabaseSet[0] = backSave;
            }

            result.RulesDatabaseSet[0].TermsSet.Trim();
            return(result);
        }
Example #3
0
        public virtual void Init(ILearnAlgorithmConf conf)
        {
            currentConf     = conf as GeneticConf;
            fullFuzzySystem = result;
            step            = 0;
            errorAfter      = 0;
            errorBefore     = result.approxLearnSamples(result.RulesDatabaseSet[0]);
            backUp          = result.RulesDatabaseSet[0];

            initFunc = new initFuncType(localInit);
            if (currentConf.GENCTypeInit == GeneticConf.Alg_Init_Type.Глобальный)
            {
                initFunc = new initFuncType(globalInit);
            }

            crossoverFunc = new crossoverFuncType(unifiedCrossover);
            if (currentConf.GENCTypeCrossover == GeneticConf.Alg_Crossover_Type.Многоточечный)
            {
                crossoverFunc = new crossoverFuncType(pointsCrossover);
            }

            selectionFunc = new selectionFuncType(rouletteSelection);
            if (currentConf.GENCTypeSelection == GeneticConf.Alg_Selection_Type.Случайный)
            {
                selectionFunc = new selectionFuncType(randomSelection);
            }
            if (currentConf.GENCTypeSelection == GeneticConf.Alg_Selection_Type.Элитарный)
            {
                selectionFunc = new selectionFuncType(eliteSelection);
            }
            fullInit(); // Здесь проходит инициализация
        }
Example #4
0
        /// <summary>
        /// Last step
        /// </summary>
        protected virtual void prepareFinalFuzzySystem()
        {
            double startError = result.approxLearnSamples(result.RulesDatabaseSet[0]);
            double afterError = result.approxLearnSamples(newSolution);

            if (startError > afterError)
            {
                result.RulesDatabaseSet[0] = newSolution;
            }
            result.RulesDatabaseSet.RemoveRange(1, result.RulesDatabaseSet.Count - 1);
        }
Example #5
0
        public void eliteSelection()
        {
            double[] currentError = new double[childrenMassive.Count()];
            for (int i = 0; i < childrenMassive.Count(); i++)
            //Parallel.For(0, childrenMassive.Count(), i =>
            {
                fullFuzzySystem.RulesDatabaseSet.Add(childrenMassive[i]);
                fullFuzzySystem.UnlaidProtectionFix(childrenMassive[i]);

                currentError[i] = fullFuzzySystem.approxLearnSamples(fullFuzzySystem.RulesDatabaseSet[i + 1]);
            }
            //});
            Array.Sort(currentError, childrenMassive);
            populationMassive = childrenMassive.ToList().GetRange(0, populationMassive.Count()).ToArray();
            fullFuzzySystem.RulesDatabaseSet.RemoveRange(1, childrenMassive.Count());
        }
Example #6
0
        public override SAFuzzySystem Generate(FuzzySystem.SingletoneApproximate.SAFuzzySystem Approximate, IGeneratorConf config)
        {
            SAFuzzySystem result = Approximate;

            if (result.RulesDatabaseSet.Count == 0)
            {
                AbstractNotSafeGenerator tempGen = new GeneratorRulesEveryoneWithEveryone();
                result = tempGen.Generate(result, config);
                GC.Collect();
            }

            count_shrink = ((TermShrinkAndRotateConf)config).TSARCShrinkVars;
            size_shrink  = ((TermShrinkAndRotateConf)config).TSARCShrinkTerm;
            type_func    = ((TermShrinkAndRotateConf)config).IEWEFuncType;
            count_slices = ((TermShrinkAndRotateConf)config).IEWECountSlice;



            List <int> Varians_of_run_system = new List <int>();

            for (int i = 0; i < Approximate.CountFeatures; i++)
            {
                int count_terms_for_var = Approximate.RulesDatabaseSet[0].TermsSet.FindAll(x => x.NumVar == i).Count;
                if (i < count_shrink)
                {
                    Varians_of_run_system.Add(count_terms_for_var - size_shrink);
                }
                else
                {
                    Varians_of_run_system.Add(count_terms_for_var);
                }
            }

            Varians_of_run_system.Sort();
            TypeTermFuncEnum type_of_term = Approximate.RulesDatabaseSet[0].TermsSet[0].TermFuncType;

            Generate_all_variant_in_pool(Varians_of_run_system);

            for (int i = 0; i < Pull_of_systems.Count; i++)
            {
                Approximate.RulesDatabaseSet.Clear();

                GeneratorRulesEveryoneWithEveryone.InitRulesEveryoneWithEveryone(Approximate, type_of_term, Pull_of_systems[i].ToArray());
                Systems_ready_to_test.Add(Approximate.RulesDatabaseSet[0]);
                errors_of_systems.Add(result.approxLearnSamples(result.RulesDatabaseSet[0]));
            }

            int best_index = errors_of_systems.IndexOf(errors_of_systems.Min());

            result.RulesDatabaseSet.Clear();
            result.RulesDatabaseSet.Add(Systems_ready_to_test[best_index]);
            Console.WriteLine(Pull_of_systems.Count());



            GC.Collect();
//            result.UnlaidProtectionFix();
            result.RulesDatabaseSet[0].TermsSet.Trim();
            return(result);
        }
Example #7
0
        private int[] SetNS(KnowlegeBaseSARules[] Populationt, int Nsrt)
        {
            double Sum = 0;

            int[] NSt = new int[Nsrt];
            for (int i = 0; i < Nsrt; i++)
            {
                Sum += result.approxLearnSamples(Populationt[i]);
            }
            for (int i = 0; i < Nsrt; i++)
            {
                double tmp = result.approxLearnSamples(Populationt[i]);
                NSt[i] = (int)Math.Round((tmp / Sum) * Nraindrops);
            }
            NSt[Nsrt - 1] += (Nraindrops - NSt.Sum());
            return(NSt);
        }
Example #8
0
        public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approx, ILearnAlgorithmConf conf)
        {
            result = Approx;
            Init(conf);
            HeadLeader       = new KnowlegeBaseSARules(result.RulesDatabaseSet[0]);
            VelocityVector   = new KnowlegeBaseSARules(result.RulesDatabaseSet[0]);
            VelocityVectorLL = new KnowlegeBaseSARules(result.RulesDatabaseSet[0]);
            VelocityVectorHL = new KnowlegeBaseSARules(result.RulesDatabaseSet[0]);
            for (int i = 0; i < VelocityVector.TermsSet.Count; i++)
            {
                for (int j = 0; j < VelocityVector.TermsSet[i].Parametrs.Length; j++)
                {
                    VelocityVector.TermsSet[i].Parametrs[j]   = 0;
                    VelocityVectorLL.TermsSet[i].Parametrs[j] = 0;
                    VelocityVectorHL.TermsSet[i].Parametrs[j] = 0;
                }
            }
            SetPopulation();
            ParticlesBest = new Dictionary <KnowlegeBaseSARules, KnowlegeBaseSARules>();
            foreach (var Particle in Population)
            {
                ParticlesBest.Add(Particle, Universal);
            }
            LocalLeaders = new KnowlegeBaseSARules[numberOfLocalLeaders];
            Console.WriteLine(LocalLeaders.Length);
            ExplorerParticles = new KnowlegeBaseSARules[numberOfAllParts - numberOfAimlessParts - numberOfLocalLeaders - 1];
            Console.WriteLine(ExplorerParticles.Length);
            AimlessParticles = new KnowlegeBaseSARules[numberOfAimlessParts];
            Console.WriteLine(AimlessParticles.Length);
            while (iter < MaxIter)
            {
                Population = ListSingletonApproximateTool.SortRules(Population, result);
                SetRoles();
                ChangeExplorersPositions();
                ChangeAimlessPositions();
                DiscardRoles();
                iter++;
                Console.WriteLine("Iteration: " + iter.ToString());
                Console.WriteLine(result.RMSEtoMSEforLearn(result.approxLearnSamples(Population[0])));
                Console.WriteLine(result.RMSEtoMSEforTest(result.approxTestSamples(Population[0])));
                Console.WriteLine(result.approxLearnSamples(Population[numberOfLocalLeaders + 1]));
            }

            result.RulesDatabaseSet[0] = Population[0];
            return(result);
        }
Example #9
0
        public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approx, ILearnAlgorithmConf conf)
        {
            result = Approx;
            List <int[]> groups = new List <int[]>();

            Init(conf);
            SetPopulation();
            Population = ListSingletonApproximateTool.SortRules(Population, result);
            NS         = new int[Nsr];
            NS         = SetNS(Population, Nsr);
            groups     = GroupStream();
            double BestMSETest  = result.RMSEtoMSEforTest(result.approxTestSamples(Population[0]));
            double BestMSELearn = result.RMSEtoMSEforLearn(result.approxLearnSamples(Population[0]));
            int    BestIter     = 0;

            /*StringBuilder sb = new StringBuilder();
             * sb.AppendLine("sep=.");*/
            for (int i = 1; i <= MaxIter; i++)
            {
                Console.Clear();
                Console.WriteLine((double)i * 100 / MaxIter + "%");
                Population = SetNextPosition(groups, Population);
                Population = Replacement(groups, Population);
                if (flag)
                {
                    Evaporation(groups.Last());//Испарение
                }
                if (BestMSETest > result.RMSEtoMSEforTest(result.approxTestSamples(Population[0])))
                {
                    BestMSETest  = result.RMSEtoMSEforTest(result.approxTestSamples(Population[0]));
                    BestMSELearn = result.RMSEtoMSEforLearn(result.approxLearnSamples(Population[0]));
                    BestIter     = i;
                }
                //sb.AppendLine((result.RMSEtoMSEforLearn(result.approxLearnSamples(Population[0]))).ToString() + "." + (result.RMSEtoMSEforTest(result.approxTestSamples(Population[0]))).ToString());
            }

            /*FileStream file1 = new FileStream("F:\\Table.scv", FileMode.Create);
             * StreamWriter writer = new StreamWriter(file1);
             * writer.Write(sb);
             * writer.Close();
             * file1.Close();*/
            Console.WriteLine(ToString(true));
            Console.WriteLine("Итер - " + BestIter + " MSET - " + BestMSETest + " MSEL - " + BestMSELearn);
            result.RulesDatabaseSet[0] = Population[0];
            return(result);
        }
Example #10
0
        public override SAFuzzySystem Generate(FuzzySystem.SingletoneApproximate.SAFuzzySystem Approximate, IGeneratorConf config)
        {
            start_add_rules = Approximate.RulesDatabaseSet.Count;
            SAFuzzySystem result = Approximate;

            if (result.RulesDatabaseSet.Count == 0)
            {
                AbstractNotSafeGenerator tempGen = new GeneratorRulesEveryoneWithEveryone();
                result = tempGen.Generate(result, config);

                GC.Collect();
            }



            Request_count_rules = ((RullesShrinkConf)config).RSCCountRules;
            max_count_rules     = ((RullesShrinkConf)config).RSCMaxRules;
            count_slices        = ((RullesShrinkConf)config).IEWECountSlice;
            min_count_rules     = ((RullesShrinkConf)config).RSCMinRules;
            type_term           = ((RullesShrinkConf)config).IEWEFuncType;

            int         count_of_swith_off    = ((RullesShrinkConf)config).RSCMaxRules - Request_count_rules;
            List <byte> Varians_of_run_system = new List <byte>();

            for (int i = 0; i < Approximate.RulesDatabaseSet[0].RulesDatabase.Count; i++)
            {
                Varians_of_run_system.Add(1);
            }
            for (int i = 0; i < count_of_swith_off; i++)
            {
                Varians_of_run_system[i] = 0;
            }
            Generate_all_variant_in_pool(Varians_of_run_system);
            for (int i = 0; i < Pull_of_systems.Count; i++)
            {
                KnowlegeBaseSARules temp_rules = new  KnowlegeBaseSARules(result.RulesDatabaseSet[0], Pull_of_systems[i]);
                temp_rules.TrimTerms();

                result.RulesDatabaseSet.Add(temp_rules);
                result.UnlaidProtectionFix(result.RulesDatabaseSet[start_add_rules + i]);
                errors_of_systems.Add(result.approxLearnSamples(result.RulesDatabaseSet[start_add_rules + i]));
            }

            int best_index           = errors_of_systems.IndexOf(errors_of_systems.Min());
            KnowlegeBaseSARules best = result.RulesDatabaseSet[start_add_rules + best_index];

            result.RulesDatabaseSet.Clear();
            result.RulesDatabaseSet.Add(best);
            Console.WriteLine(Pull_of_systems.Count());



            GC.Collect();
//            result.UnlaidProtectionFix();
            result.RulesDatabaseSet[0].TermsSet.Trim();
            return(result);
        }
        public override SAFuzzySystem TuneUpFuzzySystem(FuzzySystem.SingletoneApproximate.SAFuzzySystem Approximate, ILearnAlgorithmConf config)
        {
            SAFuzzySystem result = Approximate;

            if (result.RulesDatabaseSet.Count == 0)
            {
                throw new System.FormatException("Что то не то с входными данными");
            }
            OptimizeTermShrinkAndRotateConf Config = config as OptimizeTermShrinkAndRotateConf;

            count_shrink = Config.OTSARCountShrinkVars;
            size_shrink  = Config.OTSARCountShrinkTerm;



            List <int> Varians_of_run_system = new List <int>();

            for (int i = 0; i < Approximate.CountFeatures; i++)
            {
                int count_terms_for_var = Approximate.RulesDatabaseSet[0].TermsSet.FindAll(x => x.NumVar == i).Count;
                if (i < count_shrink)
                {
                    Varians_of_run_system.Add(count_terms_for_var - size_shrink);
                }
                else
                {
                    Varians_of_run_system.Add(count_terms_for_var);
                }
            }

            Varians_of_run_system.Sort();
            TypeTermFuncEnum type_of_term = Approximate.RulesDatabaseSet[0].TermsSet[0].TermFuncType;

            Generate_all_variant_in_pool(Varians_of_run_system);

            for (int i = 0; i < Pull_of_systems.Count; i++)
            {
                Approximate.RulesDatabaseSet.Clear();

                GeneratorRulesEveryoneWithEveryone.InitRulesEveryoneWithEveryone(result, type_of_term, Pull_of_systems[i].ToArray());
                Systems_ready_to_test.Add(Approximate.RulesDatabaseSet[0]);
                errors_of_systems.Add(result.approxLearnSamples(result.RulesDatabaseSet[0]));
            }

            int best_index = errors_of_systems.IndexOf(errors_of_systems.Min());

            result.RulesDatabaseSet.Clear();
            result.RulesDatabaseSet.Add(Systems_ready_to_test[best_index]);
            Console.WriteLine(Pull_of_systems.Count());



            result.RulesDatabaseSet[0].TermsSet.Trim();
//            result.UnlaidProtectionFix();
            return(result);
        }
 public static KnowlegeBaseSARules[] SortRules(this KnowlegeBaseSARules[] Source, SAFuzzySystem Approx)
 {
     double[] keys = new double[Source.Count()];
     KnowlegeBaseSARules[] tempSol = Source.Clone() as KnowlegeBaseSARules[];
     for (int i = 0; i < Source.Count(); i++)
     {
         keys[i] = Approx.approxLearnSamples(Source[i]);
     }
     Array.Sort(keys, tempSol);
     return(tempSol);
 }
Example #13
0
        public virtual void oneIterate(SAFuzzySystem result)
        {
            for (int j = 0; j < count_particle; j++)
            {
                w = 1 / (1 + Math.Exp(-(Errors[j] - OldErrors[j]) / 0.01));
                for (int k = 0; k < X[j].TermsSet.Count; k++)
                {
                    for (int q = 0; q < X[j].TermsSet[k].CountParams; q++)
                    {
                        double bp = Pi[j].TermsSet[k].Parametrs[q];
                        V[j].TermsSet[k].Parametrs[q] = V[j].TermsSet[k].Parametrs[q] * w + c1 * rnd.NextDouble() * (bp - X[j].TermsSet[k].Parametrs[q]) +
                                                        c2 * rnd.NextDouble() * (Pg.TermsSet[k].Parametrs[q] - X[j].TermsSet[k].Parametrs[q]);
                        X[j].TermsSet[k].Parametrs[q] += V[j].TermsSet[k].Parametrs[q];
                    }
                }
                double[] bf  = new double[V[j].all_conq_of_rules.Length];
                double[] bfw = new double[V[j].all_conq_of_rules.Length];
                for (int k = 0; k < V[j].all_conq_of_rules.Length; k++)
                {
                    bfw[k] = V[j].all_conq_of_rules[k] * w + c1 * rnd.NextDouble() * (Pi[j].all_conq_of_rules[k] - X[j].all_conq_of_rules[k]) +
                             c2 * rnd.NextDouble() * (Pg.all_conq_of_rules[k] - X[j].all_conq_of_rules[k]);
                    double sw = X[j].all_conq_of_rules[k] + bfw[k];
                    bf[k] = sw;
                }
                X[j].all_conq_of_rules = bf;
                V[j].all_conq_of_rules = bfw;
                double newError = 0;
                result.RulesDatabaseSet.Add(X[j]);
                int  temp_index = result.RulesDatabaseSet.Count - 1;
                bool success    = true;
                try
                {
                    newError = result.approxLearnSamples(result.RulesDatabaseSet[temp_index]);
                }
                catch (Exception)
                {
                    success = false;
                }
                result.RulesDatabaseSet.RemoveAt(temp_index);
                if (success && (newError < Errors[j]))
                {
                    OldErrors[j] = Errors[j];
                    Errors[j]    = newError;

                    Pi[j] = new KnowlegeBaseSARules(X[j]);
                }
                if (minError > newError)
                {
                    minError = newError;
                    Pg       = new KnowlegeBaseSARules(X[j]);
                }
            }
        }
Example #14
0
        public override SAFuzzySystem TuneUpFuzzySystem(FuzzySystem.SingletoneApproximate.SAFuzzySystem Approximate, ILearnAlgorithmConf config)
        {
            start_add_rules = Approximate.RulesDatabaseSet.Count;
            SAFuzzySystem result = Approximate;

            if (result.RulesDatabaseSet.Count == 0)
            {
                throw new System.FormatException("Что то не то с входными данными");
            }



            OptimizeRullesShrinkConf Config = config as OptimizeRullesShrinkConf;

            count_Shrink_rule = Config.ORSCCountShrinkRules;

            int         count_of_swith_off    = count_Shrink_rule;
            List <byte> Varians_of_run_system = new List <byte>();

            for (int i = 0; i < Approximate.RulesDatabaseSet[0].RulesDatabase.Count; i++)
            {
                Varians_of_run_system.Add(1);
            }
            for (int i = 0; i < count_of_swith_off; i++)
            {
                Varians_of_run_system[i] = 0;
            }
            Generate_all_variant_in_pool(Varians_of_run_system);
            for (int i = 0; i < Pull_of_systems.Count; i++)
            {
                KnowlegeBaseSARules temp_rules = new  KnowlegeBaseSARules(result.RulesDatabaseSet[0], Pull_of_systems[i]);
                temp_rules.TrimTerms();

                result.RulesDatabaseSet.Add(temp_rules);
                result.UnlaidProtectionFix(result.RulesDatabaseSet[start_add_rules + i]);
                errors_of_systems.Add(result.approxLearnSamples(result.RulesDatabaseSet[start_add_rules + i]));
            }

            int best_index           = errors_of_systems.IndexOf(errors_of_systems.Min());
            KnowlegeBaseSARules best = result.RulesDatabaseSet[start_add_rules + best_index];

            result.RulesDatabaseSet.Clear();
            result.RulesDatabaseSet.Add(best);
            Console.WriteLine(Pull_of_systems.Count());



            result.RulesDatabaseSet[0].TermsSet.Trim();
//            result.UnlaidProtectionFix();
            return(result);
        }
Example #15
0
        public override SAFuzzySystem TuneUpFuzzySystem(FuzzySystem.SingletoneApproximate.SAFuzzySystem Approximate, ILearnAlgorithmConf config)
        {
            theFuzzySystem = Approximate;
            if (theFuzzySystem.RulesDatabaseSet.Count == 0)
            {
                throw new System.FormatException("Что то не то с входными данными");
            }
            OptimizeTermShrinkHardCoreConf Config = config as OptimizeTermShrinkHardCoreConf;

            count_shrink = Config.OTSHCCountShrinkTerm;


            for (int i = 0; i < Approximate.CountFeatures; i++)
            {
                int count_terms_for_var = Approximate.RulesDatabaseSet[0].TermsSet.FindAll(x => x.NumVar == i).Count;

                if (count_terms_for_var >= count_shrink)
                {
                    int        shrinkcounter  = count_shrink;
                    List <int> Varians_of_cut = new List <int>();
                    for (int j = 0; j < count_terms_for_var; j++)
                    {
                        if (shrinkcounter > 0)
                        {
                            Varians_of_cut.Add(0);
                        }
                        else
                        {
                            Varians_of_cut.Add(1);
                        }
                        shrinkcounter--;
                    }
                    Generate_all_variant_in_pool(Varians_of_cut);


                    for (int j = 0; j < Pull_of_systems.Count; j++)
                    {
                        KnowlegeBaseSARules current = MakeCut(Approximate.RulesDatabaseSet[0], Pull_of_systems[j], i);
                        Systems_ready_to_test.Add(current);
                        errors_of_systems.Add(theFuzzySystem.approxLearnSamples(current));
                    }
                    Pull_of_systems.Clear();
                }
            }

            int best_index = errors_of_systems.IndexOf(errors_of_systems.Min());

            theFuzzySystem.RulesDatabaseSet[0] = Systems_ready_to_test[best_index];

            return(theFuzzySystem);
        }
Example #16
0
        public double DB(double[] shrapnel, int count_terms, int type, SAFuzzySystem Approx)
        {
            int k = 1;

            for (int l = 0; l < count_terms; l++)
            {
                for (int p = 0; p < type; p++)
                {
                    Approx.RulesDatabaseSet[0].TermsSet[l].Parametrs[p] = shrapnel[k];
                    k++;
                }
            }
            double MSE = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);

            return(MSE);
        }
Example #17
0
        /// <summary>
        /// Step 1
        /// </summary>
        /// <param name="Classifier"></param>
        /// <param name="config"></param>

        protected virtual void init(SAFuzzySystem Approx, ACOSearchConf config)
        {
            ACO_iterationCount       = config.ACOCountIteration;
            ACO_antCount             = config.ACOCountAnt;
            ACO_decisionArchiveCount = config.ACODescisionArchiveSize;
            ACO_q       = config.ACOQ;
            ACO_xi      = config.ACOXi;
            result      = Approx;
            colonyCount = result.RulesDatabaseSet[0].TermsSet.Count;
            colonyList  = new List <Colony>();
            newSolution = new KnowlegeBaseSARules(result.RulesDatabaseSet[0]);

            //    current_database = result.RulesDatabaseSet.Count -1;

            baseError = result.approxLearnSamples(newSolution);
        }
        public override SAFuzzySystem Generate(SAFuzzySystem Approximate, IGeneratorConf config)
        {
            SAFuzzySystem result = Approximate;

            Systems_ready_to_test = new List <KnowlegeBaseSARules>();
            errors_of_systems     = new List <double>();

            InitEveryoneWithOptimal config1 = config as InitEveryoneWithOptimal;

            type_func        = config1.IEWOTypeFunc;
            count_slice_vars = config1.count_terms;


            List <int> Varians_of_run_system = new List <int>();

            for (int i = 0; i < Approximate.CountFeatures; i++)
            {
                int count_terms_for_var = count_slice_vars[i];
                Varians_of_run_system.Add(count_terms_for_var);
            }

            Varians_of_run_system.Sort();
            Generate_all_variant_in_pool(Varians_of_run_system);

            for (int i = 0; i < Pull_of_systems.Count; i++)
            {
                Approximate.RulesDatabaseSet.Clear();

                GeneratorRulesEveryoneWithEveryone.InitRulesEveryoneWithEveryone(Approximate, type_func, Pull_of_systems[i].ToArray());
                Systems_ready_to_test.Add(Approximate.RulesDatabaseSet[0]);
                errors_of_systems.Add(result.approxLearnSamples(result.RulesDatabaseSet[0]));
            }

            int best_index = errors_of_systems.IndexOf(errors_of_systems.Min());

            result.RulesDatabaseSet.Clear();
            result.RulesDatabaseSet.Add(Systems_ready_to_test[best_index]);
            for (int i = 0; i < count_slice_vars.Count(); i++)
            {
                count_slice_vars[i] = result.RulesDatabaseSet[0].TermsSet.Count(x => x.NumVar == i);
            }
            Console.WriteLine(Pull_of_systems.Count());
            result.RulesDatabaseSet[0].TermsSet.Trim();
            GC.Collect();

            return(result);
        }
Example #19
0
        private void preIterate(SAFuzzySystem result)
        {
            for (int i = 0; i < count_particle; i++)
            {
                KnowlegeBaseSARules temp_c_Rule = new KnowlegeBaseSARules(result.RulesDatabaseSet[0]);
                X[i]         = temp_c_Rule;
                Errors[i]    = result.approxLearnSamples(result.RulesDatabaseSet[0]);
                OldErrors[i] = Errors[i];
                Pi[i]        = new KnowlegeBaseSARules(X[i]);
                V[i]         = new KnowlegeBaseSARules(X[i]);

                for (int j = 0; j < V[i].TermsSet.Count; j++)
                {
                    for (int k = 0; k < Term.CountParamsinSelectedTermType(V[i].TermsSet[j].TermFuncType); k++)
                    {
                        if (i == 0)
                        {
                            V[i].TermsSet[j].Parametrs[k] = 0;
                        }
                        else
                        {
                            V[i].TermsSet[j].Parametrs[k] = rnd.NextDouble() - 0.5;
                        }
                    }
                    double[] bf = new double[V[i].all_conq_of_rules.Length];
                    for (int k = 0; k < V[i].all_conq_of_rules.Length; k++)
                    {
                        if (i == 0)
                        {
                            bf[k] = V[i].all_conq_of_rules[k];
                        }
                        else
                        {
                            bf[k] = GaussRandom.Random_gaussian(rand, V[i].all_conq_of_rules[k], V[i].all_conq_of_rules[k] * 0.01);
                        }
                    }
                    V[i].all_conq_of_rules = bf;
                }
            }
            Pg        = new KnowlegeBaseSARules(result.RulesDatabaseSet[0]);
            minError  = Errors[0];
            best_new  = minError;
            best_old  = minError;
            worst_new = minError;
        }
Example #20
0
        protected KnowlegeBaseSARules[] sortSolution(KnowlegeBaseSARules[] Source)
        {
            KnowlegeBaseSARules temp = result.RulesDatabaseSet[0];

            double[] keys = new double[Source.Count()];

            KnowlegeBaseSARules[] tempSol = Source.Clone() as KnowlegeBaseSARules[];
            for (int i = 0; i < Source.Count(); i++)
            {
                result.RulesDatabaseSet[0] = Source[i];
                keys[i] = result.approxLearnSamples(result.RulesDatabaseSet[0]);
            }

            Array.Sort(keys, tempSol);

            result.RulesDatabaseSet[0] = temp;
            return(tempSol);
        }
Example #21
0
        private void addApproxValue(SAFuzzySystem Approx)
        {
            double Value = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);

            ValueLGoodsRMSE.Add(Value);
            ValueLGoodsMSE.Add(Approx.RMSEtoMSEforLearn(Value));

            Value = Approx.approxTestSamples(Approx.RulesDatabaseSet[0]);
            ValueTGoodsRMSE.Add(Value);
            ValueTGoodsMSE.Add(Approx.RMSEtoMSEforTest(Value));

            Value = Approx.getComplexit();
            ValueComplexityFull.Add(Value);
            Value = Approx.getRulesCount();
            ValueComplexityRules.Add(Value);

            Value = Approx.getNormalIndex();
            ValueInterpretyNominal.Add(Value);
            Value = Approx.getIndexReal();
            ValueInterpretyReal.Add(Value);
        }
Example #22
0
        private string ErrorInfoSA(IFuzzySystem FS)
        {
            SAFuzzySystem IFS = FS as SAFuzzySystem;

            if (IFS.RulesDatabaseSet.Count < 1)
            {
                return("Точность нечеткой системы недоступна");
            }


            approxLearnResult.Add(IFS.approxLearnSamples(IFS.RulesDatabaseSet[0]));
            approxTestResult.Add(IFS.approxTestSamples(IFS.RulesDatabaseSet[0]));

            approxLearnResultMSE.Add(IFS.RMSEtoMSEforLearn(approxLearnResult[approxLearnResult.Count - 1]));
            approxTestResultMSE.Add(IFS.RMSEtoMSEforTest(approxTestResult[approxTestResult.Count - 1]));

            approxLearnResultMSEdiv2.Add(IFS.RMSEtoMSEdiv2forLearn(approxLearnResult[approxLearnResult.Count - 1]));
            approxTestResultMSEdiv2.Add(IFS.RMSEtoMSEdiv2forTest(approxTestResult[approxTestResult.Count - 1]));


            return("Точностью на обучающей выборке(RSME)  " + approxLearnResult [approxLearnResult.Count - 1].ToString() + " , Точность на тестовой выборке(RMSE)  " + approxTestResult[approxTestResult.Count - 1].ToString() + " " + Environment.NewLine +
                   "Точностью на обучающей выборке(MSE)  " + approxLearnResultMSE[approxLearnResultMSE.Count - 1].ToString() + " , Точность на тестовой выборке(MSE)  " + approxTestResultMSE[approxTestResultMSE.Count - 1].ToString() + " " + Environment.NewLine +
                   "Точностью на обучающей выборке(MSE/2)  " + approxLearnResultMSEdiv2[approxLearnResultMSEdiv2.Count - 1].ToString() + " , Точность на тестовой выборке(MSE/2)  " + approxTestResultMSEdiv2[approxTestResultMSEdiv2.Count - 1].ToString() + " " + Environment.NewLine);
        }
Example #23
0
        private void weight()
        {
            double sum   = 0;
            double worst = mass[0];
            double best  = mass[0];

            int[] index = new Int32[MCount];
            int   count = 0;

            for (int i = 1; i < MCount; i++)
            {
                mass[i] = Errors[i];
                if (mass[i] > worst)
                {
                    worst = mass[i];
                }
                if (mass[i] < best)
                {
                    best = mass[i];
                }
            }
            for (int i = 0; i < MCount; i++)
            {
                if (mass[i] == worst)
                {
                    count++;
                    index[count - 1] = i;
                }
            }
            if (count > 1)
            {
                for (int i = 1; i < count; i++)
                {
                    ///X[index[i]] = ;
                    int f = index[i];
                    KnowlegeBaseSARules temp_c_Rule = new KnowlegeBaseSARules(theFuzzySystem.RulesDatabaseSet[0]);
                    temp_c_Rule = new KnowlegeBaseSARules(theFuzzySystem.RulesDatabaseSet[0]);
                    X[f]        = temp_c_Rule;
                    for (int j = 0; j < X[f].TermsSet.Count; j++)
                    {
                        for (int k = 0; k < X[f].TermsSet[j].Parametrs.Count(); k++)
                        {
                            X[f].TermsSet[j].Parametrs[k] = GaussRandom.Random_gaussian(rand, X[f].TermsSet[j].Parametrs[k], 0.1 * (X[f].TermsSet[j].Parametrs[k])) + theFuzzySystem.LearnSamplesSet.InputAttributes[X[f].TermsSet[j].NumVar].Scatter * 0.05;
                        }
                    }
                    theFuzzySystem.RulesDatabaseSet.Add(X[f]);
                    theFuzzySystem.UnlaidProtectionFix(theFuzzySystem.RulesDatabaseSet[theFuzzySystem.RulesDatabaseSet.Count - 1]);
                    Errors[f] = theFuzzySystem.RMSEtoMSEforLearn(theFuzzySystem.approxLearnSamples(theFuzzySystem.RulesDatabaseSet[theFuzzySystem.RulesDatabaseSet.Count - 1]));
                    X[f]      = theFuzzySystem.RulesDatabaseSet[theFuzzySystem.RulesDatabaseSet.Count - 1];
                    theFuzzySystem.RulesDatabaseSet.Remove(X[f]);
                    mass[f] = Errors[f];
                    if (mass[f] > worst)
                    {
                        i--;
                    }
                }
            }
            for (int i = 0; i < MCount; i++)
            {
                mass[i] = (mass[i] - worst) / (best - worst);
                sum     = sum + mass[i];
            }
            for (int i = 0; i < MCount; i++)
            {
                mass[i] = mass[i] / sum;
            }
        }
Example #24
0
        public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approx, ILearnAlgorithmConf conf)
        {
            Random        rand   = new Random(DateTime.Now.Millisecond);
            SAFuzzySystem result = Approx;

            BW.DoWork             += new DoWorkEventHandler(BW_DoWork);
            BW.RunWorkerCompleted += new RunWorkerCompletedEventHandler(BW_RunWorkerCompleted);
            BW.RunWorkerAsync();



            MultiGoalOptimaze_conf config = conf as MultiGoalOptimaze_conf;
            string PathAlg = (new FileInfo(Application.ExecutablePath)).DirectoryName + "\\FS\\";

            config.Init2(PathAlg, Approx.LearnSamplesSet.FileName);

            countFuzzySystem = config.Итераций_алгоритма;

            allowSqare  = config.Допустимый_процент_перекрытия_по_площади_термов / 100;
            allowBorder = config.Допустимый_процент_перекрытия_по_границам / 100;
            int seedPath = rand.Next();

            sizePercent      = config.азмер_шага_по_точности;
            sizeComplexity   = config.азмер_шага_по_сложности;
            sizeInteraply    = config.азмер_шага_по_интерпретируемости;
            diviver          = config.Уменьшать_шаги_в;
            trysBeforeDivide = config.Уменьшать_шаг_после;
            path             = config.path;
            dataSetName      = config.dataSetName;
            toMany           = config.азрешено_похожих_систем;
            isPSO            = config.toBool(config.Использовать_АРЧ);
            //  isBFO = config.toBool(config.Использовать_АПБ);
            isANT            = config.toBool(config.Использовать_НАМК);
            isBEE            = config.toBool(config.Использовать_САПК);
            isES             = config.toBool(config.Использовать_ЕС);
            isGA             = config.toBool(config.Использовать_ГА);
            isTermShrink     = config.toBool(config.Удалять_термы);
            isRuleShrink     = config.toBool(config.Удалять_правила);
            isUnionTerm      = config.toBool(config.Объединять_термы);
            isLindBreakCross = config.toBool(config.Исключать_пересечение_лигвистически_далеких_термов);
            countANT         = config.Использовать_НАМК_раз_за_такт;
            // countBFO = config.Использовать_за_такт_АПБ_раз;
            countPSO         = config.Использовать_за_такт_АРЧ_раз;
            countBEE         = config.Использовать_САПК_раз_за_такт;
            countES          = config.Использовать_ЕС_раз_за_такт;
            countGA          = config.Использовать_ГА_раз_за_такт;
            typeComplexity   = (int)config.Критерий_сложности;
            typeInterpreting = (int)config.Критерий_интерпретируемости;

            List <IAbstractLearnAlgorithm> learnAlgorithms       = initAlgoritms();
            List <ILearnAlgorithmConf>     learnAlgorithmsconfig = initAlgoritmsConfigs(Approx.CountFeatures);
            List <double> ValueLPercent        = new List <double>();
            List <double> ValueTPercent        = new List <double>();
            List <double> ValueComplexity      = new List <double>();
            List <double> ValueInterability    = new List <double>();
            List <double> SummaryGoods         = new List <double>();
            List <KnowlegeBaseSARules> Storage = new List <KnowlegeBaseSARules>();
            List <int> candidate = new List <int>();

            KnowlegeBaseSARules Best = result.RulesDatabaseSet[0];


            baseLearn = result.approxLearnSamples(result.RulesDatabaseSet[0]);
            ValueLPercent.Add(baseLearn);
            ValueTPercent.Add(result.approxTestSamples(result.RulesDatabaseSet[0]));
            baseComplexity = getComplexity(result);
            ValueComplexity.Add(baseComplexity);
            baseIntebility = getInterpreting(result, allowBorder, allowSqare);
            ValueInterability.Add(baseIntebility);


            Storage.Add(Best);
            int NSCount = 0;
            int deleted = 0;

            for (int numberStep = 0; numberStep < countFuzzySystem; numberStep++)
            {
                bool mustToDivide = true;
                int  usedAlg      = 0;
                for (int tr = 0; tr < trysBeforeDivide; tr++)
                {
                    deleted = 0;

                    // Parallel.For(0, learnAlgorithms.Count(), i =>
                    usedAlg = 0;
                    for (int i = 0; i < learnAlgorithms.Count(); i++)
                    {
                        Console.WriteLine("F****d in Storage.Add(new a_Rules(Best))");
                        Storage.Add(new KnowlegeBaseSARules(Best));
                        Console.WriteLine("F****d in result.RulesDatabaseSet.Clear()");
                        result.RulesDatabaseSet.Clear();
                        Console.WriteLine("F****d in result.RulesDatabaseSet.Add( Storage[Storage.Count - 1])");
                        result.RulesDatabaseSet.Add(Storage[Storage.Count - 1]);
                        usedAlg++;
                        bool before_VAlue = true;
                        try
                        {
                            learnAlgorithms[i].TuneUpFuzzySystem(result, learnAlgorithmsconfig[i]);
                            GC.Collect();
                            before_VAlue = false;
                            ValueLPercent.Add(result.approxLearnSamples(result.RulesDatabaseSet[0]));
                            ValueTPercent.Add(result.approxTestSamples(result.RulesDatabaseSet[0]));
                            ValueComplexity.Add(getComplexity(result));
                            ValueInterability.Add(getInterpreting(result, allowBorder, allowSqare));
                            double temp = ValueLPercent[ValueLPercent.Count - 1] + ValueComplexity[ValueComplexity.Count() - 1] + ValueInterability[ValueInterability.Count() - 1];
                            Storage[Storage.Count - 1] = result.RulesDatabaseSet[0];

                            if (double.IsNaN(temp))
                            {
                                Console.WriteLine("FuckAlarm " + i.ToString() + learnAlgorithms[i].ToString() + " is NAN");



                                ValueLPercent.RemoveAt(ValueLPercent.Count() - 1);
                                ValueTPercent.RemoveAt(ValueTPercent.Count() - 1);
                                ValueComplexity.RemoveAt(ValueComplexity.Count() - 1);
                                ValueInterability.RemoveAt(ValueInterability.Count() - 1);
                                Storage.RemoveAt(Storage.Count() - 1);
                                usedAlg--;
                            }
                        }
                        catch (Exception)
                        {
                            if (before_VAlue)
                            {
                                Console.WriteLine("FuckAlarm " + i.ToString() + learnAlgorithms[i].ToString() + " before VAlue");
                            }
                            else
                            {
                                Console.WriteLine("FuckAlarm " + i.ToString() + learnAlgorithms[i].ToString() + " after VAlue");

                                ValueLPercent.RemoveAt(ValueLPercent.Count() - 1);
                                ValueTPercent.RemoveAt(ValueTPercent.Count() - 1);
                                ValueComplexity.RemoveAt(ValueComplexity.Count() - 1);
                                ValueInterability.RemoveAt(ValueInterability.Count() - 1);
                                Storage.RemoveAt(Storage.Count() - 1);
                            }
                        }

                        NSCount++;
                        Console.WriteLine("F****d in ResultShow");
                        ResultShow += "[" + NSCount.ToString() + "]\t" + ValueLPercent[ValueLPercent.Count() - 1].ToString() + "\t" + ValueTPercent[ValueTPercent.Count() - 1].ToString() +
                                      "\t" + ValueComplexity[ValueComplexity.Count() - 1].ToString() + "\t" + ValueInterability[ValueInterability.Count() - 1].ToString() + Environment.NewLine;
                        //     i++;
                    }
                    //);
                    Console.WriteLine("F****d in deleted");

                    deleted  = removeDublicate(ValueLPercent, ValueComplexity, ValueInterability, ValueTPercent, Storage, rand);
                    usedAlg -= deleted;
                    Console.WriteLine("F****d in candidate");

                    candidate = canBeNext(ValueLPercent, ValueComplexity, ValueInterability);

                    if (candidate.Count() > 0)
                    {
                        mustToDivide = false; break;
                    }
                }

                if (mustToDivide)
                {
                    MessageBox.Show("Divided happend ");

                    sizePercent    = sizePercent / diviver;
                    sizeComplexity = sizeComplexity / diviver;
                    sizeInteraply  = sizeInteraply / diviver;
                    continue;
                }

                Console.WriteLine("F****d in SummaryGoods");

                SummaryGoods = reCalcSummary(SummaryGoods, ValueLPercent, ValueComplexity, ValueInterability);

                Console.WriteLine("F****d in indexofBest");
                int indexofBest = getNewBest(candidate, SummaryGoods);
                if (usedAsNext.ContainsKey(indexofBest))
                {
                    usedAsNext[indexofBest]++;
                }
                else
                {
                    usedAsNext.Add(indexofBest, 1);
                }

                Console.WriteLine("Best");
                Best = Storage[indexofBest];

                Console.WriteLine("F****d in for (int i = (Storage.Count - learnAlgorithms.Count); i < Storage.Count(); i++)");
                int toSaveCounter = NSCount - usedAlg;
                for (int i = (Storage.Count - usedAlg); i < Storage.Count(); i++)
                {
                    result.RulesDatabaseSet[0] = Storage[i];
                    saveFS(result, path, dataSetName, seedPath, numberStep, toSaveCounter, Best.Equals(result.RulesDatabaseSet[0]));
                    toSaveCounter++;
                }

                Console.WriteLine("F****d in result.RulesDatabaseSet[0] = Best;");
                result.RulesDatabaseSet[0] = Best;

                Console.WriteLine("F****d in End");
                baseLearn      = result.approxLearnSamples(result.RulesDatabaseSet[0]);// ClassifyLearnSamples();
                baseComplexity = getComplexity(result);
                baseIntebility = getInterpreting(result, allowBorder, allowSqare);
                candidate.Clear();
                GC.Collect();
            }
            isEnd = true;
            Thread.Sleep(10000);
            result.RulesDatabaseSet[0].TermsSet.Trim();
            return(result);
        }
Example #25
0
        public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approx, ILearnAlgorithmConf conf)
        {
            iskl_prizn      = "";
            count_iteration = ((Param)conf).Количество_итераций;
            count_populate  = ((Param)conf).Число_осколков;
            exploration     = ((Param)conf).Фактор_исследования;
            reduce_koef     = ((Param)conf).Уменьшающий_коэффициент;
            priznaki_usech  = ((Param)conf).Усечённые_признаки;
            iter_descrete   = ((Param)conf).Итерации_дискр_алг;

            int           iter = 0, iter2, i, j, count_terms, count_iter = 0;
            int           count_cons, count_best2 = 0, best_pred = 0;
            double        RMSE_best, cosFi, RMSE_best2;
            int           Nd, variables, k = 1, best2 = 0;
            SAFuzzySystem result = Approx;
            int           type   = Approx.RulesDatabaseSet[0].TermsSet[0].CountParams;

            Nd = Approx.RulesDatabaseSet[0].TermsSet.Count * type;
            double[] X_best = new double[Nd + 1];
            double[,] X_pred    = new double[2, Nd + 1];
            double[,] direction = new double[count_populate, Nd + 1];
            double[,] d         = new double[count_populate, Nd + 1];
            double[,] explosion = new double[count_populate, Nd + 1];
            double[,] shrapnel  = new double[count_populate, Nd + 1];
            cosFi      = Math.Cos(2 * Math.PI / count_populate);
            RMSE_best  = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);
            RMSE_best2 = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);
            count_cons = Approx.RulesDatabaseSet[0].all_conq_of_rules.Count();
            double[] RMSE      = new double[count_populate];
            double[] RMSE_all  = new double[iter];
            double[] RMSE_tst  = new double[count_populate];
            double[] RMSE2     = new double[count_populate];
            double[] RMSE_pred = new double[2];
            double[] cons_best = new double[count_cons];
            variables   = Approx.LearnSamplesSet.CountVars;
            count_terms = Approx.RulesDatabaseSet[0].TermsSet.Count;
            int[] terms = new int[variables];

            double[] X_best2 = new double[variables];
            double[,] d3      = new double[count_populate, variables];
            double[,] priznak = new double[count_populate, variables];
            for (i = 0; i < variables; i++)
            {
                priznak[0, i] = 1;
                X_best2[i]    = 1;
            }
            KnowlegeBaseSARules[] X = new KnowlegeBaseSARules[count_populate];
            for (int s = 0; s < count_populate - 1; s++)
            {
                X[s] = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[0]);
                Approx.RulesDatabaseSet.Add(X[s]);
            }

            for (iter2 = 0; iter2 < iter_descrete; iter2++)
            {
                best2 = 0;
                //if (count_best2 < 10)
                //{
                if (iter == 0)
                {
                    for (k = 0; k < variables; k++)
                    {
                        d3[0, k] = RandomNext(Approx.LearnSamplesSet.InputAttributes[k].Min, Approx.LearnSamplesSet.InputAttributes[k].Max);
                    }
                }
                for (i = 0; i < variables; i++)
                {
                    for (j = 1; j < count_populate; j++)
                    {
generate:
                        d3[j, i]      = d3[j - 1, i] * randn();
                        priznak[j, i] = d3[j, i] * cosFi;

                        if ((priznak[j, i] < Approx.LearnSamplesSet.InputAttributes[i].Min) || (priznak[j, i] > Approx.LearnSamplesSet.InputAttributes[i].Max))
                        {
                            goto generate;
                        }
                        Random random = new Random();
                        if (random.NextDouble() < descret(priznak[j, i]))
                        {
                            priznak[j, i] = 1;
                        }
                        else
                        {
                            priznak[j, i] = 0;
                        }
                    }
                }


                for (j = 1; j < count_populate; j++)
                {
                    for (int h = 0; h < variables; h++)
                    {
                        if (priznak[j, h] == 1)
                        {
                            Approx.AcceptedFeatures[h] = true;
                        }
                        else
                        {
                            Approx.AcceptedFeatures[h] = false;
                        }
                    }
                    RMSE2[j] = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);
                    if (RMSE2[j] > RMSE_best2)
                    {
                        RMSE_best2 = RMSE2[j];
                        best2      = j;
                    }
                    for (int h = 0; h < variables; h++)
                    {
                        X_best2[h] = priznak[best2, h];
                    }
                }
                if (best_pred == best2)
                {
                    count_best2++;
                }
                else
                {
                    count_best2 = 0;
                }
                for (k = 0; k < variables; k++)
                {
                    priznak[0, k] = priznak[best2, k];
                }
                count_iter++;
                //}
            }

            for (k = 0; k < variables; k++)
            {
                if (priznak[best2, k] == 1)
                {
                    Approx.AcceptedFeatures[k] = true;
                }
                else
                {
                    Approx.AcceptedFeatures[k] = false;
                    iskl_prizn += (k + 1).ToString() + " ";
                }
            }

            return(result);
        }
Example #26
0
 public double CalcNewProfit(KnowlegeBaseSARules Solution)
 {
     Tempory.Add(Solution);
     theFuzzySystem.UnlaidProtectionFix(Solution);
     return(theFuzzySystem.approxLearnSamples(Solution));
 }
Example #27
0
        public void oneIterate()
        {
            best_old  = best_new;
            best_new  = Errors[0];
            worst_new = Errors[0];

            for (int j = 0; j < count_particle; j++)
            {
                w = 1 / (1 + Math.Exp(-(Errors[j] - OldErrors[j]) / 0.01));
                for (int k = 0; k < X[j].TermsSet.Count; k++)
                {
                    for (int q = 0; q < X[j].TermsSet[k].CountParams; q++)
                    {
                        double bp = Pi[j].TermsSet[k].Parametrs[q];
                        V[j].TermsSet[k].Parametrs[q] = V[j].TermsSet[k].Parametrs[q] * w + c1 * rnd.NextDouble() * (bp - X[j].TermsSet[k].Parametrs[q]) +
                                                        c2 * rnd.NextDouble() * (Pg.TermsSet[k].Parametrs[q] - X[j].TermsSet[k].Parametrs[q]);
                        X[j].TermsSet[k].Parametrs[q] += V[j].TermsSet[k].Parametrs[q];
                    }
                }
                double[] bf  = new double[V[j].all_conq_of_rules.Length];
                double[] bfw = new double[V[j].all_conq_of_rules.Length];
                for (int k = 0; k < V[j].all_conq_of_rules.Length; k++)
                {
                    bfw[k] = V[j].all_conq_of_rules[k] * w + c1 * rnd.NextDouble() * (Pi[j].all_conq_of_rules[k] - X[j].all_conq_of_rules[k]) +
                             c2 * rnd.NextDouble() * (Pg.all_conq_of_rules[k] - X[j].all_conq_of_rules[k]);
                    double sw = X[j].all_conq_of_rules[k] + bfw[k];
                    bf[k] = sw;
                }
                X[j].all_conq_of_rules = bf;
                V[j].all_conq_of_rules = bfw;
                double newError = 0;
                bool   success  = true;
                try
                {
                    newError = theFuzzySystem.approxLearnSamples(X[j]);
                }
                catch (Exception)
                {
                    success = false;
                }
                if (success && (newError < Errors[j]))
                {
                    OldErrors[j] = Errors[j];
                    Errors[j]    = newError;

                    Pi[j] = new KnowlegeBaseSARules(X[j]);
                }
                if (minError > newError)
                {
                    minError = newError;
                    Pg       = new KnowlegeBaseSARules(X[j]);
                }
                if (best_new > Errors[j])
                {
                    best_new = Errors[j];
                }
                if (worst_new < Errors[j])
                {
                    worst_new = Errors[j];
                }
            }
        }
Example #28
0
        public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approx, ILearnAlgorithmConf conf)
        {
            count_iteration = ((Param)conf).Количество_итераций;
            count_populate  = ((Param)conf).Число_осколков;
            exploration     = ((Param)conf).Фактор_исследования;
            reduce_koef     = ((Param)conf).Уменьшающий_коэффициент;

            int           iter = 0, i, j, count_terms, var = 0;
            int           count_cons;
            double        RMSE_best, cosFi, MSEbefore, MSEafter;
            int           Nd, variables, k = 1, best = 0;
            SAFuzzySystem result = Approx;
            int           type   = Approx.RulesDatabaseSet[0].TermsSet[0].CountParams;

            Nd = Approx.RulesDatabaseSet[0].TermsSet.Count * type;
            double[] X_best = new double[Nd + 1];
            double[,] X_pred    = new double[2, Nd + 1];
            double[,] direction = new double[count_populate, Nd + 1];
            double[,] d         = new double[count_populate, Nd + 1];
            double[,] explosion = new double[count_populate, Nd + 1];
            double[,] shrapnel  = new double[count_populate, Nd + 1];
            cosFi      = Math.Cos(2 * Math.PI / count_populate);
            RMSE_best  = Approx.approxLearnSamples(0);
            count_cons = Approx.RulesDatabaseSet[0].all_conq_of_rules.Count();
            double[] RMSE      = new double[count_populate];
            double[] RMSE_tst  = new double[count_populate];
            double[] RMSE2     = new double[count_populate];
            double[] RMSE_pred = new double[2];
            double[] cons_best = new double[count_cons];
            count_terms = Approx.RulesDatabaseSet[0].TermsSet.Count;
            variables   = Approx.LearnSamplesSet.CountVars;
            int[] terms = new int[variables];

            KnowlegeBaseSARules[] X = new KnowlegeBaseSARules[count_populate];
            for (int s = 0; s < count_populate - 1; s++)
            {
                X[s] = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[0]);
                Approx.RulesDatabaseSet.Add(X[s]);
            }
            RMSE_best = Approx.approxLearnSamples(0);
            for (int h = 0; h < count_terms; h++)
            {
                if (Approx.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar == var)
                {
                    terms[var]++;
                }
                else
                {
                    terms[var + 1]++;
                    var++;
                }
            }
            for (iter = 0; iter <= count_iteration; iter++)
            {
                best = 0;
                if (iter == 0)
                {
                    k = 1;
                    for (int h = 0; h < count_terms; h++)
                    {
                        for (int p = 0; p < type; p++)
                        {
                            shrapnel[0, k] = Approx.RulesDatabaseSet[0].TermsSet[h].Parametrs[p];
                            X_best[k]      = shrapnel[0, k];
                            X_pred[0, k]   = shrapnel[0, k];
                            X_pred[1, k]   = shrapnel[0, k];
                            k++;
                        }
                    }
                    RMSE_pred[0] = Approx.approxLearnSamples(0);
                    RMSE_pred[1] = Approx.approxLearnSamples(0);
                    k            = 1;
                    for (int h = 0; h < count_terms; h++)
                    {
                        for (int p = 0; p < type; p++)
                        {
                            d[0, k] = RandomNext(Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar).Min, Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar).Max);
                            k++;
                        }
                    }
                }
                for (i = 1; i <= Nd; i++)
                {
                    if (exploration > iter)
                    {
                        for (j = 1; j < count_populate; j++)
                        {
                            int sum = 0, sum2 = 0;
generate:
                            sum++;
                            sum2++;
                            //формула расстояния исправлена

                            d[j, i] = d[j - 1, i] * randn();

                            //double sluch = randn();
                            //if (sluch < 0) d[j, i] = d[j - 1, i] * (-1) * Math.Pow(sluch, 2);
                            //else d[j, i] = d[j - 1, i] * Math.Pow(sluch, 2);
                            explosion[j, i] = d[j, i] * cosFi;
                            if (sum > 20)
                            {
                                if ((i + (type - 2)) % type == 0)
                                {
                                    shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    if (sum2 > 2)
                                    {
                                        shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]);
                                        sum            = 19;
                                    }
                                    if (sum2 > 3)
                                    {
                                        shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                        sum            = 19;
                                        sum2           = 0;
                                    }
                                }
                                else
                                {
                                    shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    sum            = 19;
                                }
                            }
                            else
                            {
                                shrapnel[j, i] = shrapnel[0, i] + explosion[j, i];
                            }
                            if ((i == 2) || (i == 1))
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min;
                            }

                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 2) / type)].NumberOfInputVar)
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min; goto exit;
                            }

                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - type) / type)].NumberOfInputVar)
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min; goto exit;
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != (variables - 1))
                            {
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i) / type)].NumberOfInputVar)
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max; goto exit;
                                }
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i + 1) / type)].NumberOfInputVar)
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max; goto exit;
                                }
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar == (variables - 1))
                            {
                                if ((i == (count_terms * 3 - 1)) || (i == (count_terms * 3)))
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max;
                                }
                            }

                            if (((i + (type - 2)) % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                if (shrapnel[j, i] == Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min)
                                {
                                    i--;
                                }
                                goto generate;
                            }
                            if ((i % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                goto generate;
                            }
                            if (i != 1)
                            {
                                if (((i - (type - 2)) % type == 0) && ((shrapnel[j, i] > shrapnel[j, i - 1]) || (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Max) || (shrapnel[j, i] < Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min)))
                                {
                                    goto generate;
                                }
                            }
                            if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min) || (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Max)))
                            {
                                goto generate;
                            }
exit:
                            if (i > type)
                            {
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != 0)
                                {
                                    if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1 - type) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar - 1)
                                    {
                                        if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < shrapnel[j, i - type])))
                                        {
                                            goto generate;
                                        }
                                    }
                                }
                            }
                        }
                    }
                    else
                    {
                        d[0, i] = d2(X_pred[0, i], X_pred[1, i], RMSE_pred[0], RMSE_pred[1]);

                        for (j = 1; j < count_populate; j++)
                        {
                            if ((X_pred[1, i] - X_pred[0, i]) != 0)
                            {
                                direction[j, i] = m(X_pred[0, i], X_pred[1, i], RMSE_pred[0], RMSE_pred[1]);
                            }
                            else
                            {
                                direction[j, i] = 1;
                            }
                            int sum = 0, sum2 = 0;
generate:
                            sum++;
                            sum2++;
                            double random;
                            random = randn();
                            if (random < 0)
                            {
                                explosion[j, i] = d[j - 1, i] * rand.NextDouble() * cosFi * (-1);
                            }
                            else
                            {
                                explosion[j, i] = d[j - 1, i] * rand.NextDouble() * cosFi;
                            }
                            if (sum2 > 50)
                            {
                                sum2 = 0;
                            }

                            if (sum > 20)
                            {
                                if ((i + (type - 2)) % type == 0)
                                {
                                    shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    if (sum2 > 2)
                                    {
                                        shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]);
                                        sum            = 19;
                                    }
                                    if (sum2 > 3)
                                    {
                                        shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                        sum            = 19;
                                        sum2           = 0;
                                    }
                                }
                                else
                                {
                                    shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    sum            = 19;
                                }
                            }
                            else
                            {
                                shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]);
                            }

                            if ((i == 2) || (i == 1))
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min;
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 2) / type)].NumberOfInputVar)
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min; goto exit;
                            }

                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - type) / type)].NumberOfInputVar)
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min; goto exit;
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != (variables - 1))
                            {
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i) / type)].NumberOfInputVar)
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max; goto exit;
                                }
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i + 1) / type)].NumberOfInputVar)
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max; goto exit;
                                }
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar == (variables - 1))
                            {
                                if ((i == (count_terms * 3 - 1)) || (i == (count_terms * 3)))
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max;
                                }
                            }

                            if (((i + (type - 2)) % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                if (shrapnel[j, i] == Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min)
                                {
                                    i--;
                                }
                                goto generate;
                            }
                            if ((i % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                goto generate;
                            }
                            if (i != 1)
                            {
                                if (((i - (type - 2)) % type == 0) && ((shrapnel[j, i] > shrapnel[j, i - 1]) || (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Max) || (shrapnel[j, i] < Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min)))
                                {
                                    goto generate;
                                }
                            }
                            if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min) || (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Max)))
                            {
                                goto generate;
                            }
exit:
                            if (i > type)
                            {
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != 0)
                                {
                                    if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1 - type) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar - 1)
                                    {
                                        if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < shrapnel[j, i - type])))
                                        {
                                            goto generate;
                                        }
                                    }
                                }
                            }
                            d[j, i] = d[j - 1, i] / Math.Pow(Math.E, (double)iter / (double)reduce_koef);
                        }
                    }
                }

                for (int z = 0; z < count_populate; z++)
                {
                    k = 1;
                    for (int h = 0; h < count_terms; h++)
                    {
                        for (int p = 0; p < type; p++)
                        {
                            Approx.RulesDatabaseSet[z].TermsSet[h].Parametrs[p] = shrapnel[z, k];
                            k++;
                        }
                    }
                }
                for (j = 0; j < count_populate; j++)
                {
                    RMSE[j]     = Approx.approxLearnSamples(j);
                    RMSE_tst[j] = Approx.approxTestSamples(j);
                    if (RMSE[j] < RMSE_best)
                    {
                        RMSE_best = RMSE[j];
                        best      = j;
                    }
                }
                if ((iter != 0) && (iter % 1000 == 0))
                {
                    Adaptive_LSM LSM = new Adaptive_LSM();
                    MSEbefore = RMSE[best];
                    KnowlegeBaseSARules zeroSolution = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[0]);
                    Approx.RulesDatabaseSet[0] = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[best]);
                    KnowlegeBaseSARules tempSolution = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[best]);
                    Approx   = LSM.TuneUpFuzzySystem(Approx, new NullConfForAll()) as SAFuzzySystem;
                    MSEafter = Approx.approxLearnSamples(0);
                    if (MSEafter > MSEbefore)
                    {
                        Approx.RulesDatabaseSet[0] = tempSolution;
                        RMSE2[best] = MSEbefore;
                    }
                    else
                    {
                        RMSE2[best] = MSEafter;
                        for (int p = 0; p < count_cons; p++)
                        {
                            cons_best[p] = Approx.RulesDatabaseSet[0].all_conq_of_rules[p];
                        }
                    }
                    if (RMSE2[best] < RMSE_best)
                    {
                        RMSE_best = RMSE2[best];
                    }
                    Approx.RulesDatabaseSet[best] = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[0]);
                    Approx.RulesDatabaseSet[0]    = new KnowlegeBaseSARules(zeroSolution);
                    for (int z = 0; z < count_populate; z++)
                    {
                        for (int p = 0; p < count_cons; p++)
                        {
                            Approx.RulesDatabaseSet[z].RulesDatabase[p].Cons_DoubleOutput = cons_best[p];
                        }
                    }
                }
                k = 1;
                if (iter % 100 == 0)
                {
                    k = 1;
                }
                for (int h = 0; h < count_terms; h++)
                {
                    for (int p = 0; p < type; p++)
                    {
                        shrapnel[0, k] = shrapnel[best, k];
                        if (exploration > iter)
                        {
                            d[0, k] = RandomNext(Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar).Min, Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar).Max);
                        }
                        Approx.RulesDatabaseSet[0].TermsSet[h].Parametrs[p] = shrapnel[0, k];
                        k++;
                    }
                }

                if (iter % 10 == 0)
                {
                    if (RMSE_pred[1] > RMSE2[best])
                    {
                        for (k = 1; k <= Nd; k++)
                        {
                            X_pred[0, k] = X_pred[1, k];
                            X_pred[1, k] = shrapnel[best, k];
                        }
                        RMSE_pred[0] = RMSE_pred[1];
                        RMSE_pred[1] = RMSE2[best];
                    }
                }
                else
                {
                    if (RMSE_pred[1] > RMSE[best])
                    {
                        for (k = 1; k <= Nd; k++)
                        {
                            X_pred[0, k] = X_pred[1, k];
                            X_pred[1, k] = shrapnel[best, k];
                        }
                        RMSE_pred[0] = RMSE_pred[1];
                        RMSE_pred[1] = RMSE[best];
                    }
                }
            }

            return(result);
        }
Example #29
0
        public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approx, ILearnAlgorithmConf conf) // Здесь ведется оптимизация вашим алгоритмом
        {
            theFuzzySystem = Approx;

            iterMax = ((gsa_conf)conf).Количество_итераций;
            MCount  = ((gsa_conf)conf).Количество_частиц;
            G0      = ((gsa_conf)conf).Гравитационная_постоянная;
            alpha   = ((gsa_conf)conf).Коэффициент_уменьшения;
            epsilon = ((gsa_conf)conf).Малая_константа;
            X       = new KnowlegeBaseSARules[MCount];
            Errors  = new double[MCount];
            mass    = new double[MCount];
            double ErrorBest;
            KnowlegeBaseSARules BestSolution;
            double minValue;
            int    iminIndex;
            KnowlegeBaseSARules temp_c_Rule = new KnowlegeBaseSARules(theFuzzySystem.RulesDatabaseSet[0]);

            X[0]      = temp_c_Rule;
            Errors[0] = theFuzzySystem.RMSEtoMSEforLearn(theFuzzySystem.approxLearnSamples(X[0]));
            double ErrorZero = Errors[0];

            ErrorBest    = ErrorZero;
            BestSolution = temp_c_Rule;
            //number = X[0].TermsSet.Count * X[0].TermsSet[0].Parametrs.Count();

            R     = new double[MCount][, , ];
            speed = new double[MCount, X[0].TermsSet.Count, X[0].TermsSet[0].Parametrs.Count()];

            for (int i = 0; i < MCount; i++)
            {
                R[i] = new double[MCount, X[0].TermsSet.Count, X[0].TermsSet[0].Parametrs.Count()];
            }
            RR = new double[MCount, MCount];
            a  = new double[MCount, X[0].TermsSet.Count, X[0].TermsSet[0].Parametrs.Count()];

            for (int i = 1; i < MCount; i++)
            {
                temp_c_Rule = new KnowlegeBaseSARules(theFuzzySystem.RulesDatabaseSet[0]);
                X[i]        = temp_c_Rule;
                for (int j = 0; j < X[i].TermsSet.Count; j++)
                {
                    for (int k = 0; k < X[i].TermsSet[j].Parametrs.Count(); k++)
                    {
                        X[i].TermsSet[j].Parametrs[k] = GaussRandom.Random_gaussian(rand, X[i].TermsSet[j].Parametrs[k], 0.1 * (X[i].TermsSet[j].Parametrs[k])) + theFuzzySystem.LearnSamplesSet.InputAttributes[X[i].TermsSet[j].NumVar].Scatter * 0.05;
                    }
                }
                theFuzzySystem.RulesDatabaseSet.Add(X[i]);
                theFuzzySystem.UnlaidProtectionFix(theFuzzySystem.RulesDatabaseSet[theFuzzySystem.RulesDatabaseSet.Count - 1]);
                Errors[i] = theFuzzySystem.RMSEtoMSEforLearn(theFuzzySystem.approxLearnSamples(theFuzzySystem.RulesDatabaseSet[theFuzzySystem.RulesDatabaseSet.Count - 1]));

                X[i] = theFuzzySystem.RulesDatabaseSet[theFuzzySystem.RulesDatabaseSet.Count - 1];

                theFuzzySystem.RulesDatabaseSet.Remove(X[i]);
            }

            for (int iter = 0; iter < iterMax; iter++)
            {
                //g(t) = G(0)*e^(-a*t/T);
                G = G0 * Math.Pow(Math.E, ((-1) * alpha * iter / iterMax));

                /*  if (iter >= 100) {
                 *    Console.WriteLine("Wait");
                 * }*/
                algorithm();
                for (int r = 0; r < MCount; r++)
                {
                    theFuzzySystem.RulesDatabaseSet.Add(X[r]);
                    theFuzzySystem.UnlaidProtectionFix(theFuzzySystem.RulesDatabaseSet[theFuzzySystem.RulesDatabaseSet.Count - 1]);
                    Errors[r] = theFuzzySystem.RMSEtoMSEforLearn(theFuzzySystem.approxLearnSamples(theFuzzySystem.RulesDatabaseSet[theFuzzySystem.RulesDatabaseSet.Count - 1]));
                    X[r]      = theFuzzySystem.RulesDatabaseSet[theFuzzySystem.RulesDatabaseSet.Count - 1];
                    theFuzzySystem.RulesDatabaseSet.Remove(X[r]);
                }
                minValue  = Errors.Min();
                iminIndex = Errors.ToList().IndexOf(minValue);
                if (minValue < ErrorBest)
                {
                    ErrorBest    = minValue;
                    BestSolution = new KnowlegeBaseSARules(X[iminIndex]);
                }
            }

            if (ErrorBest < ErrorZero)
            {
                theFuzzySystem.RulesDatabaseSet[0] = BestSolution;
            }

            return(theFuzzySystem);
        }
        protected override void make_Log(Log_line EventCall, SAFuzzySystem FS = null, string name_Alg = "", DateTime TimerValue = new DateTime(), TimeSpan TimerSpan = new TimeSpan())
        {
            switch (EventCall)
            {
            case Log_line.Start:
            {
                LOG += "(" + TimerValue.ToString() + ")" + " Начало построения системы" + Environment.NewLine;
                break;
            }

            case Log_line.StartGenerate:
            {
                LOG += "(" + TimerValue.ToString() + ")" + " Начата генерация системы" + Environment.NewLine;

                break;
            }

            case Log_line.StartOptimaze:
            {
                LOG += "(" + DateTime.Now.ToString() + ")" + " Начата оптимизация системы" + Environment.NewLine;
                break;
            }


            case Log_line.PreGenerate_log:
            {
                LOG += "(" + DateTime.Now.ToString() + ")" + " Генерация алгоритмом " + name_Alg.ToString() + Environment.NewLine;
                break;
            }

            case Log_line.PostGenerate_log:
            {
                double LearnResult = FS.approxLearnSamples();
                double TestResult  = FS.approxTestSamples();

                double LearnResultMSE = FS.RMSEtoMSEforLearn(LearnResult);
                double TestResultMSE  = FS.RMSEtoMSEforTest(TestResult);

                double LearnResultMSEdiv2 = FS.RMSEtoMSEdiv2forLearn(LearnResult);
                double TestResultMSEdiv2  = FS.RMSEtoMSEdiv2forTest(TestResult);


                LOG += "(" + DateTime.Now.ToString() + ")" + " Сгенерирована система сложностью " + FS.ValueComplexity().ToString() + Environment.NewLine +
                       "Точностью на обучающей выборке(RSME) " + LearnResult.ToString() + ", Точность на тестовой выборке(RMSE) " + TestResult.ToString() + Environment.NewLine +
                       "Точностью на обучающей выборке(MSE) " + LearnResultMSE.ToString() + ", Точность на тестовой выборке(MSE) " + TestResultMSE.ToString() + Environment.NewLine +
                       "Точностью на обучающей выборке(MSE/2) " + LearnResultMSEdiv2.ToString() + ", Точность на тестовой выборке(MSE/2) " + TestResultMSEdiv2.ToString() + Environment.NewLine;

                LOG += "Использован " + name_Alg.ToString() + Environment.NewLine;
                break;
            }

            case Log_line.PreOptimaze_log:
            {
                LOG += "(" + DateTime.Now.ToString() + ")" + " Оптимизация алгоритмом " + name_Alg.ToString() + Environment.NewLine;

                break;
            }

            case Log_line.PostOptimaze_log:
            {
                double LearnResult = FS.approxLearnSamples();
                double TestResult  = FS.approxTestSamples();


                double LearnResultMSE = FS.RMSEtoMSEforLearn(LearnResult);
                double TestResultMSE  = FS.RMSEtoMSEforTest(TestResult);

                double LearnResultMSEdiv2 = FS.RMSEtoMSEdiv2forLearn(LearnResult);
                double TestResultMSEdiv2  = FS.RMSEtoMSEdiv2forTest(TestResult);

                LOG += "(" + DateTime.Now.ToString() + ")" + " оптимизированная система сложностью " + FS.ValueComplexity().ToString() + Environment.NewLine +
                       "Точностью на обучающей выборке(RMSE) " + LearnResult.ToString() + ", Точность на тестовой выборке(RMSE) " + TestResult.ToString() + Environment.NewLine +
                       "Точностью на обучающей выборке(MSE) " + LearnResultMSE.ToString() + ", Точность на тестовой выборке(MSE) " + TestResultMSE.ToString() + Environment.NewLine +
                       "Точностью на обучающей выборке(MSE/2) " + LearnResultMSEdiv2.ToString() + ", Точность на тестовой выборке(MSE/2) " + TestResultMSEdiv2.ToString() + Environment.NewLine;


                LOG += "Использован " + name_Alg.ToString() + Environment.NewLine;

                break;
            }


            case Log_line.EndCircle:
            {
                LOG += "(" + DateTime.Now.ToString() + ")" + " Время построения системы" + TimerSpan.TotalSeconds.ToString() + Environment.NewLine; break;
            }

            case Log_line.End:
            {
                LOG += "(" + DateTime.Now.ToString() + ")" + " Время построения всех систем" + TimerSpan.TotalSeconds.ToString() + Environment.NewLine; break;
            }

            default: { LOG += "Не верный вызов" + Environment.NewLine; break; }
            }
        }