Exemple #1
0
        public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approx, ILearnAlgorithmConf conf)
        {
            result = Approx;
            List <int[]> groups = new List <int[]>();

            Init(conf);
            SetPopulation();
            Population = ListSingletonApproximateTool.SortRules(Population, result);
            NS         = new int[Nsr];
            NS         = SetNS(Population, Nsr);
            groups     = GroupStream();
            double BestMSETest  = result.RMSEtoMSEforTest(result.approxTestSamples(Population[0]));
            double BestMSELearn = result.RMSEtoMSEforLearn(result.approxLearnSamples(Population[0]));
            int    BestIter     = 0;

            /*StringBuilder sb = new StringBuilder();
             * sb.AppendLine("sep=.");*/
            for (int i = 1; i <= MaxIter; i++)
            {
                Console.Clear();
                Console.WriteLine((double)i * 100 / MaxIter + "%");
                Population = SetNextPosition(groups, Population);
                Population = Replacement(groups, Population);
                if (flag)
                {
                    Evaporation(groups.Last());//Испарение
                }
                if (BestMSETest > result.RMSEtoMSEforTest(result.approxTestSamples(Population[0])))
                {
                    BestMSETest  = result.RMSEtoMSEforTest(result.approxTestSamples(Population[0]));
                    BestMSELearn = result.RMSEtoMSEforLearn(result.approxLearnSamples(Population[0]));
                    BestIter     = i;
                }
                //sb.AppendLine((result.RMSEtoMSEforLearn(result.approxLearnSamples(Population[0]))).ToString() + "." + (result.RMSEtoMSEforTest(result.approxTestSamples(Population[0]))).ToString());
            }

            /*FileStream file1 = new FileStream("F:\\Table.scv", FileMode.Create);
             * StreamWriter writer = new StreamWriter(file1);
             * writer.Write(sb);
             * writer.Close();
             * file1.Close();*/
            Console.WriteLine(ToString(true));
            Console.WriteLine("Итер - " + BestIter + " MSET - " + BestMSETest + " MSEL - " + BestMSELearn);
            result.RulesDatabaseSet[0] = Population[0];
            return(result);
        }
Exemple #2
0
        public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approx, ILearnAlgorithmConf conf)
        {
            result = Approx;
            Init(conf);
            HeadLeader       = new KnowlegeBaseSARules(result.RulesDatabaseSet[0]);
            VelocityVector   = new KnowlegeBaseSARules(result.RulesDatabaseSet[0]);
            VelocityVectorLL = new KnowlegeBaseSARules(result.RulesDatabaseSet[0]);
            VelocityVectorHL = new KnowlegeBaseSARules(result.RulesDatabaseSet[0]);
            for (int i = 0; i < VelocityVector.TermsSet.Count; i++)
            {
                for (int j = 0; j < VelocityVector.TermsSet[i].Parametrs.Length; j++)
                {
                    VelocityVector.TermsSet[i].Parametrs[j]   = 0;
                    VelocityVectorLL.TermsSet[i].Parametrs[j] = 0;
                    VelocityVectorHL.TermsSet[i].Parametrs[j] = 0;
                }
            }
            SetPopulation();
            ParticlesBest = new Dictionary <KnowlegeBaseSARules, KnowlegeBaseSARules>();
            foreach (var Particle in Population)
            {
                ParticlesBest.Add(Particle, Universal);
            }
            LocalLeaders = new KnowlegeBaseSARules[numberOfLocalLeaders];
            Console.WriteLine(LocalLeaders.Length);
            ExplorerParticles = new KnowlegeBaseSARules[numberOfAllParts - numberOfAimlessParts - numberOfLocalLeaders - 1];
            Console.WriteLine(ExplorerParticles.Length);
            AimlessParticles = new KnowlegeBaseSARules[numberOfAimlessParts];
            Console.WriteLine(AimlessParticles.Length);
            while (iter < MaxIter)
            {
                Population = ListSingletonApproximateTool.SortRules(Population, result);
                SetRoles();
                ChangeExplorersPositions();
                ChangeAimlessPositions();
                DiscardRoles();
                iter++;
                Console.WriteLine("Iteration: " + iter.ToString());
                Console.WriteLine(result.RMSEtoMSEforLearn(result.approxLearnSamples(Population[0])));
                Console.WriteLine(result.RMSEtoMSEforTest(result.approxTestSamples(Population[0])));
                Console.WriteLine(result.approxLearnSamples(Population[numberOfLocalLeaders + 1]));
            }

            result.RulesDatabaseSet[0] = Population[0];
            return(result);
        }
Exemple #3
0
        private void addApproxValue(SAFuzzySystem Approx)
        {
            double Value = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);

            ValueLGoodsRMSE.Add(Value);
            ValueLGoodsMSE.Add(Approx.RMSEtoMSEforLearn(Value));

            Value = Approx.approxTestSamples(Approx.RulesDatabaseSet[0]);
            ValueTGoodsRMSE.Add(Value);
            ValueTGoodsMSE.Add(Approx.RMSEtoMSEforTest(Value));

            Value = Approx.getComplexit();
            ValueComplexityFull.Add(Value);
            Value = Approx.getRulesCount();
            ValueComplexityRules.Add(Value);

            Value = Approx.getNormalIndex();
            ValueInterpretyNominal.Add(Value);
            Value = Approx.getIndexReal();
            ValueInterpretyReal.Add(Value);
        }
Exemple #4
0
        private string ErrorInfoSA(IFuzzySystem FS)
        {
            SAFuzzySystem IFS = FS as SAFuzzySystem;

            if (IFS.RulesDatabaseSet.Count < 1)
            {
                return("Точность нечеткой системы недоступна");
            }


            approxLearnResult.Add(IFS.approxLearnSamples(IFS.RulesDatabaseSet[0]));
            approxTestResult.Add(IFS.approxTestSamples(IFS.RulesDatabaseSet[0]));

            approxLearnResultMSE.Add(IFS.RMSEtoMSEforLearn(approxLearnResult[approxLearnResult.Count - 1]));
            approxTestResultMSE.Add(IFS.RMSEtoMSEforTest(approxTestResult[approxTestResult.Count - 1]));

            approxLearnResultMSEdiv2.Add(IFS.RMSEtoMSEdiv2forLearn(approxLearnResult[approxLearnResult.Count - 1]));
            approxTestResultMSEdiv2.Add(IFS.RMSEtoMSEdiv2forTest(approxTestResult[approxTestResult.Count - 1]));


            return("Точностью на обучающей выборке(RSME)  " + approxLearnResult [approxLearnResult.Count - 1].ToString() + " , Точность на тестовой выборке(RMSE)  " + approxTestResult[approxTestResult.Count - 1].ToString() + " " + Environment.NewLine +
                   "Точностью на обучающей выборке(MSE)  " + approxLearnResultMSE[approxLearnResultMSE.Count - 1].ToString() + " , Точность на тестовой выборке(MSE)  " + approxTestResultMSE[approxTestResultMSE.Count - 1].ToString() + " " + Environment.NewLine +
                   "Точностью на обучающей выборке(MSE/2)  " + approxLearnResultMSEdiv2[approxLearnResultMSEdiv2.Count - 1].ToString() + " , Точность на тестовой выборке(MSE/2)  " + approxTestResultMSEdiv2[approxTestResultMSEdiv2.Count - 1].ToString() + " " + Environment.NewLine);
        }
Exemple #5
0
        public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approx, ILearnAlgorithmConf conf)
        {
            count_iteration = ((Param)conf).Количество_итераций;
            count_populate  = ((Param)conf).Число_осколков;
            exploration     = ((Param)conf).Фактор_исследования;
            reduce_koef     = ((Param)conf).Уменьшающий_коэффициент;

            int           iter = 0, i, j, count_terms, var = 0;
            int           count_cons;
            double        RMSE_best, cosFi, MSEbefore, MSEafter;
            int           Nd, variables, k = 1, best = 0;
            SAFuzzySystem result = Approx;
            int           type   = Approx.RulesDatabaseSet[0].TermsSet[0].CountParams;

            Nd = Approx.RulesDatabaseSet[0].TermsSet.Count * type;
            double[] X_best = new double[Nd + 1];
            double[,] X_pred    = new double[2, Nd + 1];
            double[,] direction = new double[count_populate, Nd + 1];
            double[,] d         = new double[count_populate, Nd + 1];
            double[,] explosion = new double[count_populate, Nd + 1];
            double[,] shrapnel  = new double[count_populate, Nd + 1];
            cosFi      = Math.Cos(2 * Math.PI / count_populate);
            RMSE_best  = Approx.approxLearnSamples(0);
            count_cons = Approx.RulesDatabaseSet[0].all_conq_of_rules.Count();
            double[] RMSE      = new double[count_populate];
            double[] RMSE_tst  = new double[count_populate];
            double[] RMSE2     = new double[count_populate];
            double[] RMSE_pred = new double[2];
            double[] cons_best = new double[count_cons];
            count_terms = Approx.RulesDatabaseSet[0].TermsSet.Count;
            variables   = Approx.LearnSamplesSet.CountVars;
            int[] terms = new int[variables];

            KnowlegeBaseSARules[] X = new KnowlegeBaseSARules[count_populate];
            for (int s = 0; s < count_populate - 1; s++)
            {
                X[s] = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[0]);
                Approx.RulesDatabaseSet.Add(X[s]);
            }
            RMSE_best = Approx.approxLearnSamples(0);
            for (int h = 0; h < count_terms; h++)
            {
                if (Approx.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar == var)
                {
                    terms[var]++;
                }
                else
                {
                    terms[var + 1]++;
                    var++;
                }
            }
            for (iter = 0; iter <= count_iteration; iter++)
            {
                best = 0;
                if (iter == 0)
                {
                    k = 1;
                    for (int h = 0; h < count_terms; h++)
                    {
                        for (int p = 0; p < type; p++)
                        {
                            shrapnel[0, k] = Approx.RulesDatabaseSet[0].TermsSet[h].Parametrs[p];
                            X_best[k]      = shrapnel[0, k];
                            X_pred[0, k]   = shrapnel[0, k];
                            X_pred[1, k]   = shrapnel[0, k];
                            k++;
                        }
                    }
                    RMSE_pred[0] = Approx.approxLearnSamples(0);
                    RMSE_pred[1] = Approx.approxLearnSamples(0);
                    k            = 1;
                    for (int h = 0; h < count_terms; h++)
                    {
                        for (int p = 0; p < type; p++)
                        {
                            d[0, k] = RandomNext(Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar).Min, Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar).Max);
                            k++;
                        }
                    }
                }
                for (i = 1; i <= Nd; i++)
                {
                    if (exploration > iter)
                    {
                        for (j = 1; j < count_populate; j++)
                        {
                            int sum = 0, sum2 = 0;
generate:
                            sum++;
                            sum2++;
                            //формула расстояния исправлена

                            d[j, i] = d[j - 1, i] * randn();

                            //double sluch = randn();
                            //if (sluch < 0) d[j, i] = d[j - 1, i] * (-1) * Math.Pow(sluch, 2);
                            //else d[j, i] = d[j - 1, i] * Math.Pow(sluch, 2);
                            explosion[j, i] = d[j, i] * cosFi;
                            if (sum > 20)
                            {
                                if ((i + (type - 2)) % type == 0)
                                {
                                    shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    if (sum2 > 2)
                                    {
                                        shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]);
                                        sum            = 19;
                                    }
                                    if (sum2 > 3)
                                    {
                                        shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                        sum            = 19;
                                        sum2           = 0;
                                    }
                                }
                                else
                                {
                                    shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    sum            = 19;
                                }
                            }
                            else
                            {
                                shrapnel[j, i] = shrapnel[0, i] + explosion[j, i];
                            }
                            if ((i == 2) || (i == 1))
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min;
                            }

                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 2) / type)].NumberOfInputVar)
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min; goto exit;
                            }

                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - type) / type)].NumberOfInputVar)
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min; goto exit;
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != (variables - 1))
                            {
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i) / type)].NumberOfInputVar)
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max; goto exit;
                                }
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i + 1) / type)].NumberOfInputVar)
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max; goto exit;
                                }
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar == (variables - 1))
                            {
                                if ((i == (count_terms * 3 - 1)) || (i == (count_terms * 3)))
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max;
                                }
                            }

                            if (((i + (type - 2)) % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                if (shrapnel[j, i] == Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min)
                                {
                                    i--;
                                }
                                goto generate;
                            }
                            if ((i % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                goto generate;
                            }
                            if (i != 1)
                            {
                                if (((i - (type - 2)) % type == 0) && ((shrapnel[j, i] > shrapnel[j, i - 1]) || (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Max) || (shrapnel[j, i] < Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min)))
                                {
                                    goto generate;
                                }
                            }
                            if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min) || (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Max)))
                            {
                                goto generate;
                            }
exit:
                            if (i > type)
                            {
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != 0)
                                {
                                    if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1 - type) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar - 1)
                                    {
                                        if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < shrapnel[j, i - type])))
                                        {
                                            goto generate;
                                        }
                                    }
                                }
                            }
                        }
                    }
                    else
                    {
                        d[0, i] = d2(X_pred[0, i], X_pred[1, i], RMSE_pred[0], RMSE_pred[1]);

                        for (j = 1; j < count_populate; j++)
                        {
                            if ((X_pred[1, i] - X_pred[0, i]) != 0)
                            {
                                direction[j, i] = m(X_pred[0, i], X_pred[1, i], RMSE_pred[0], RMSE_pred[1]);
                            }
                            else
                            {
                                direction[j, i] = 1;
                            }
                            int sum = 0, sum2 = 0;
generate:
                            sum++;
                            sum2++;
                            double random;
                            random = randn();
                            if (random < 0)
                            {
                                explosion[j, i] = d[j - 1, i] * rand.NextDouble() * cosFi * (-1);
                            }
                            else
                            {
                                explosion[j, i] = d[j - 1, i] * rand.NextDouble() * cosFi;
                            }
                            if (sum2 > 50)
                            {
                                sum2 = 0;
                            }

                            if (sum > 20)
                            {
                                if ((i + (type - 2)) % type == 0)
                                {
                                    shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    if (sum2 > 2)
                                    {
                                        shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]);
                                        sum            = 19;
                                    }
                                    if (sum2 > 3)
                                    {
                                        shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                        sum            = 19;
                                        sum2           = 0;
                                    }
                                }
                                else
                                {
                                    shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    sum            = 19;
                                }
                            }
                            else
                            {
                                shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]);
                            }

                            if ((i == 2) || (i == 1))
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min;
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 2) / type)].NumberOfInputVar)
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min; goto exit;
                            }

                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - type) / type)].NumberOfInputVar)
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min; goto exit;
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != (variables - 1))
                            {
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i) / type)].NumberOfInputVar)
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max; goto exit;
                                }
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i + 1) / type)].NumberOfInputVar)
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max; goto exit;
                                }
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar == (variables - 1))
                            {
                                if ((i == (count_terms * 3 - 1)) || (i == (count_terms * 3)))
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max;
                                }
                            }

                            if (((i + (type - 2)) % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                if (shrapnel[j, i] == Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min)
                                {
                                    i--;
                                }
                                goto generate;
                            }
                            if ((i % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                goto generate;
                            }
                            if (i != 1)
                            {
                                if (((i - (type - 2)) % type == 0) && ((shrapnel[j, i] > shrapnel[j, i - 1]) || (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Max) || (shrapnel[j, i] < Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min)))
                                {
                                    goto generate;
                                }
                            }
                            if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min) || (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Max)))
                            {
                                goto generate;
                            }
exit:
                            if (i > type)
                            {
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != 0)
                                {
                                    if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1 - type) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar - 1)
                                    {
                                        if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < shrapnel[j, i - type])))
                                        {
                                            goto generate;
                                        }
                                    }
                                }
                            }
                            d[j, i] = d[j - 1, i] / Math.Pow(Math.E, (double)iter / (double)reduce_koef);
                        }
                    }
                }

                for (int z = 0; z < count_populate; z++)
                {
                    k = 1;
                    for (int h = 0; h < count_terms; h++)
                    {
                        for (int p = 0; p < type; p++)
                        {
                            Approx.RulesDatabaseSet[z].TermsSet[h].Parametrs[p] = shrapnel[z, k];
                            k++;
                        }
                    }
                }
                for (j = 0; j < count_populate; j++)
                {
                    RMSE[j]     = Approx.approxLearnSamples(j);
                    RMSE_tst[j] = Approx.approxTestSamples(j);
                    if (RMSE[j] < RMSE_best)
                    {
                        RMSE_best = RMSE[j];
                        best      = j;
                    }
                }
                if ((iter != 0) && (iter % 1000 == 0))
                {
                    Adaptive_LSM LSM = new Adaptive_LSM();
                    MSEbefore = RMSE[best];
                    KnowlegeBaseSARules zeroSolution = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[0]);
                    Approx.RulesDatabaseSet[0] = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[best]);
                    KnowlegeBaseSARules tempSolution = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[best]);
                    Approx   = LSM.TuneUpFuzzySystem(Approx, new NullConfForAll()) as SAFuzzySystem;
                    MSEafter = Approx.approxLearnSamples(0);
                    if (MSEafter > MSEbefore)
                    {
                        Approx.RulesDatabaseSet[0] = tempSolution;
                        RMSE2[best] = MSEbefore;
                    }
                    else
                    {
                        RMSE2[best] = MSEafter;
                        for (int p = 0; p < count_cons; p++)
                        {
                            cons_best[p] = Approx.RulesDatabaseSet[0].all_conq_of_rules[p];
                        }
                    }
                    if (RMSE2[best] < RMSE_best)
                    {
                        RMSE_best = RMSE2[best];
                    }
                    Approx.RulesDatabaseSet[best] = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[0]);
                    Approx.RulesDatabaseSet[0]    = new KnowlegeBaseSARules(zeroSolution);
                    for (int z = 0; z < count_populate; z++)
                    {
                        for (int p = 0; p < count_cons; p++)
                        {
                            Approx.RulesDatabaseSet[z].RulesDatabase[p].Cons_DoubleOutput = cons_best[p];
                        }
                    }
                }
                k = 1;
                if (iter % 100 == 0)
                {
                    k = 1;
                }
                for (int h = 0; h < count_terms; h++)
                {
                    for (int p = 0; p < type; p++)
                    {
                        shrapnel[0, k] = shrapnel[best, k];
                        if (exploration > iter)
                        {
                            d[0, k] = RandomNext(Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar).Min, Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar).Max);
                        }
                        Approx.RulesDatabaseSet[0].TermsSet[h].Parametrs[p] = shrapnel[0, k];
                        k++;
                    }
                }

                if (iter % 10 == 0)
                {
                    if (RMSE_pred[1] > RMSE2[best])
                    {
                        for (k = 1; k <= Nd; k++)
                        {
                            X_pred[0, k] = X_pred[1, k];
                            X_pred[1, k] = shrapnel[best, k];
                        }
                        RMSE_pred[0] = RMSE_pred[1];
                        RMSE_pred[1] = RMSE2[best];
                    }
                }
                else
                {
                    if (RMSE_pred[1] > RMSE[best])
                    {
                        for (k = 1; k <= Nd; k++)
                        {
                            X_pred[0, k] = X_pred[1, k];
                            X_pred[1, k] = shrapnel[best, k];
                        }
                        RMSE_pred[0] = RMSE_pred[1];
                        RMSE_pred[1] = RMSE[best];
                    }
                }
            }

            return(result);
        }
Exemple #6
0
        public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approx, ILearnAlgorithmConf conf)
        {
            Random        rand   = new Random(DateTime.Now.Millisecond);
            SAFuzzySystem result = Approx;

            BW.DoWork             += new DoWorkEventHandler(BW_DoWork);
            BW.RunWorkerCompleted += new RunWorkerCompletedEventHandler(BW_RunWorkerCompleted);
            BW.RunWorkerAsync();



            MultiGoalOptimaze_conf config = conf as MultiGoalOptimaze_conf;
            string PathAlg = (new FileInfo(Application.ExecutablePath)).DirectoryName + "\\FS\\";

            config.Init2(PathAlg, Approx.LearnSamplesSet.FileName);

            countFuzzySystem = config.Итераций_алгоритма;

            allowSqare  = config.Допустимый_процент_перекрытия_по_площади_термов / 100;
            allowBorder = config.Допустимый_процент_перекрытия_по_границам / 100;
            int seedPath = rand.Next();

            sizePercent      = config.азмер_шага_по_точности;
            sizeComplexity   = config.азмер_шага_по_сложности;
            sizeInteraply    = config.азмер_шага_по_интерпретируемости;
            diviver          = config.Уменьшать_шаги_в;
            trysBeforeDivide = config.Уменьшать_шаг_после;
            path             = config.path;
            dataSetName      = config.dataSetName;
            toMany           = config.азрешено_похожих_систем;
            isPSO            = config.toBool(config.Использовать_АРЧ);
            //  isBFO = config.toBool(config.Использовать_АПБ);
            isANT            = config.toBool(config.Использовать_НАМК);
            isBEE            = config.toBool(config.Использовать_САПК);
            isES             = config.toBool(config.Использовать_ЕС);
            isGA             = config.toBool(config.Использовать_ГА);
            isTermShrink     = config.toBool(config.Удалять_термы);
            isRuleShrink     = config.toBool(config.Удалять_правила);
            isUnionTerm      = config.toBool(config.Объединять_термы);
            isLindBreakCross = config.toBool(config.Исключать_пересечение_лигвистически_далеких_термов);
            countANT         = config.Использовать_НАМК_раз_за_такт;
            // countBFO = config.Использовать_за_такт_АПБ_раз;
            countPSO         = config.Использовать_за_такт_АРЧ_раз;
            countBEE         = config.Использовать_САПК_раз_за_такт;
            countES          = config.Использовать_ЕС_раз_за_такт;
            countGA          = config.Использовать_ГА_раз_за_такт;
            typeComplexity   = (int)config.Критерий_сложности;
            typeInterpreting = (int)config.Критерий_интерпретируемости;

            List <IAbstractLearnAlgorithm> learnAlgorithms       = initAlgoritms();
            List <ILearnAlgorithmConf>     learnAlgorithmsconfig = initAlgoritmsConfigs(Approx.CountFeatures);
            List <double> ValueLPercent        = new List <double>();
            List <double> ValueTPercent        = new List <double>();
            List <double> ValueComplexity      = new List <double>();
            List <double> ValueInterability    = new List <double>();
            List <double> SummaryGoods         = new List <double>();
            List <KnowlegeBaseSARules> Storage = new List <KnowlegeBaseSARules>();
            List <int> candidate = new List <int>();

            KnowlegeBaseSARules Best = result.RulesDatabaseSet[0];


            baseLearn = result.approxLearnSamples(result.RulesDatabaseSet[0]);
            ValueLPercent.Add(baseLearn);
            ValueTPercent.Add(result.approxTestSamples(result.RulesDatabaseSet[0]));
            baseComplexity = getComplexity(result);
            ValueComplexity.Add(baseComplexity);
            baseIntebility = getInterpreting(result, allowBorder, allowSqare);
            ValueInterability.Add(baseIntebility);


            Storage.Add(Best);
            int NSCount = 0;
            int deleted = 0;

            for (int numberStep = 0; numberStep < countFuzzySystem; numberStep++)
            {
                bool mustToDivide = true;
                int  usedAlg      = 0;
                for (int tr = 0; tr < trysBeforeDivide; tr++)
                {
                    deleted = 0;

                    // Parallel.For(0, learnAlgorithms.Count(), i =>
                    usedAlg = 0;
                    for (int i = 0; i < learnAlgorithms.Count(); i++)
                    {
                        Console.WriteLine("F****d in Storage.Add(new a_Rules(Best))");
                        Storage.Add(new KnowlegeBaseSARules(Best));
                        Console.WriteLine("F****d in result.RulesDatabaseSet.Clear()");
                        result.RulesDatabaseSet.Clear();
                        Console.WriteLine("F****d in result.RulesDatabaseSet.Add( Storage[Storage.Count - 1])");
                        result.RulesDatabaseSet.Add(Storage[Storage.Count - 1]);
                        usedAlg++;
                        bool before_VAlue = true;
                        try
                        {
                            learnAlgorithms[i].TuneUpFuzzySystem(result, learnAlgorithmsconfig[i]);
                            GC.Collect();
                            before_VAlue = false;
                            ValueLPercent.Add(result.approxLearnSamples(result.RulesDatabaseSet[0]));
                            ValueTPercent.Add(result.approxTestSamples(result.RulesDatabaseSet[0]));
                            ValueComplexity.Add(getComplexity(result));
                            ValueInterability.Add(getInterpreting(result, allowBorder, allowSqare));
                            double temp = ValueLPercent[ValueLPercent.Count - 1] + ValueComplexity[ValueComplexity.Count() - 1] + ValueInterability[ValueInterability.Count() - 1];
                            Storage[Storage.Count - 1] = result.RulesDatabaseSet[0];

                            if (double.IsNaN(temp))
                            {
                                Console.WriteLine("FuckAlarm " + i.ToString() + learnAlgorithms[i].ToString() + " is NAN");



                                ValueLPercent.RemoveAt(ValueLPercent.Count() - 1);
                                ValueTPercent.RemoveAt(ValueTPercent.Count() - 1);
                                ValueComplexity.RemoveAt(ValueComplexity.Count() - 1);
                                ValueInterability.RemoveAt(ValueInterability.Count() - 1);
                                Storage.RemoveAt(Storage.Count() - 1);
                                usedAlg--;
                            }
                        }
                        catch (Exception)
                        {
                            if (before_VAlue)
                            {
                                Console.WriteLine("FuckAlarm " + i.ToString() + learnAlgorithms[i].ToString() + " before VAlue");
                            }
                            else
                            {
                                Console.WriteLine("FuckAlarm " + i.ToString() + learnAlgorithms[i].ToString() + " after VAlue");

                                ValueLPercent.RemoveAt(ValueLPercent.Count() - 1);
                                ValueTPercent.RemoveAt(ValueTPercent.Count() - 1);
                                ValueComplexity.RemoveAt(ValueComplexity.Count() - 1);
                                ValueInterability.RemoveAt(ValueInterability.Count() - 1);
                                Storage.RemoveAt(Storage.Count() - 1);
                            }
                        }

                        NSCount++;
                        Console.WriteLine("F****d in ResultShow");
                        ResultShow += "[" + NSCount.ToString() + "]\t" + ValueLPercent[ValueLPercent.Count() - 1].ToString() + "\t" + ValueTPercent[ValueTPercent.Count() - 1].ToString() +
                                      "\t" + ValueComplexity[ValueComplexity.Count() - 1].ToString() + "\t" + ValueInterability[ValueInterability.Count() - 1].ToString() + Environment.NewLine;
                        //     i++;
                    }
                    //);
                    Console.WriteLine("F****d in deleted");

                    deleted  = removeDublicate(ValueLPercent, ValueComplexity, ValueInterability, ValueTPercent, Storage, rand);
                    usedAlg -= deleted;
                    Console.WriteLine("F****d in candidate");

                    candidate = canBeNext(ValueLPercent, ValueComplexity, ValueInterability);

                    if (candidate.Count() > 0)
                    {
                        mustToDivide = false; break;
                    }
                }

                if (mustToDivide)
                {
                    MessageBox.Show("Divided happend ");

                    sizePercent    = sizePercent / diviver;
                    sizeComplexity = sizeComplexity / diviver;
                    sizeInteraply  = sizeInteraply / diviver;
                    continue;
                }

                Console.WriteLine("F****d in SummaryGoods");

                SummaryGoods = reCalcSummary(SummaryGoods, ValueLPercent, ValueComplexity, ValueInterability);

                Console.WriteLine("F****d in indexofBest");
                int indexofBest = getNewBest(candidate, SummaryGoods);
                if (usedAsNext.ContainsKey(indexofBest))
                {
                    usedAsNext[indexofBest]++;
                }
                else
                {
                    usedAsNext.Add(indexofBest, 1);
                }

                Console.WriteLine("Best");
                Best = Storage[indexofBest];

                Console.WriteLine("F****d in for (int i = (Storage.Count - learnAlgorithms.Count); i < Storage.Count(); i++)");
                int toSaveCounter = NSCount - usedAlg;
                for (int i = (Storage.Count - usedAlg); i < Storage.Count(); i++)
                {
                    result.RulesDatabaseSet[0] = Storage[i];
                    saveFS(result, path, dataSetName, seedPath, numberStep, toSaveCounter, Best.Equals(result.RulesDatabaseSet[0]));
                    toSaveCounter++;
                }

                Console.WriteLine("F****d in result.RulesDatabaseSet[0] = Best;");
                result.RulesDatabaseSet[0] = Best;

                Console.WriteLine("F****d in End");
                baseLearn      = result.approxLearnSamples(result.RulesDatabaseSet[0]);// ClassifyLearnSamples();
                baseComplexity = getComplexity(result);
                baseIntebility = getInterpreting(result, allowBorder, allowSqare);
                candidate.Clear();
                GC.Collect();
            }
            isEnd = true;
            Thread.Sleep(10000);
            result.RulesDatabaseSet[0].TermsSet.Trim();
            return(result);
        }
        protected override void make_Log(Log_line EventCall, SAFuzzySystem FS = null, string name_Alg = "", DateTime TimerValue = new DateTime(), TimeSpan TimerSpan = new TimeSpan())
        {
            switch (EventCall)
            {
            case Log_line.Start:
            {
                LOG += "(" + TimerValue.ToString() + ")" + " Начало построения системы" + Environment.NewLine;
                break;
            }

            case Log_line.StartGenerate:
            {
                LOG += "(" + TimerValue.ToString() + ")" + " Начата генерация системы" + Environment.NewLine;

                break;
            }

            case Log_line.StartOptimaze:
            {
                LOG += "(" + DateTime.Now.ToString() + ")" + " Начата оптимизация системы" + Environment.NewLine;
                break;
            }


            case Log_line.PreGenerate_log:
            {
                LOG += "(" + DateTime.Now.ToString() + ")" + " Генерация алгоритмом " + name_Alg.ToString() + Environment.NewLine;
                break;
            }

            case Log_line.PostGenerate_log:
            {
                double LearnResult = FS.approxLearnSamples();
                double TestResult  = FS.approxTestSamples();

                double LearnResultMSE = FS.RMSEtoMSEforLearn(LearnResult);
                double TestResultMSE  = FS.RMSEtoMSEforTest(TestResult);

                double LearnResultMSEdiv2 = FS.RMSEtoMSEdiv2forLearn(LearnResult);
                double TestResultMSEdiv2  = FS.RMSEtoMSEdiv2forTest(TestResult);


                LOG += "(" + DateTime.Now.ToString() + ")" + " Сгенерирована система сложностью " + FS.ValueComplexity().ToString() + Environment.NewLine +
                       "Точностью на обучающей выборке(RSME) " + LearnResult.ToString() + ", Точность на тестовой выборке(RMSE) " + TestResult.ToString() + Environment.NewLine +
                       "Точностью на обучающей выборке(MSE) " + LearnResultMSE.ToString() + ", Точность на тестовой выборке(MSE) " + TestResultMSE.ToString() + Environment.NewLine +
                       "Точностью на обучающей выборке(MSE/2) " + LearnResultMSEdiv2.ToString() + ", Точность на тестовой выборке(MSE/2) " + TestResultMSEdiv2.ToString() + Environment.NewLine;

                LOG += "Использован " + name_Alg.ToString() + Environment.NewLine;
                break;
            }

            case Log_line.PreOptimaze_log:
            {
                LOG += "(" + DateTime.Now.ToString() + ")" + " Оптимизация алгоритмом " + name_Alg.ToString() + Environment.NewLine;

                break;
            }

            case Log_line.PostOptimaze_log:
            {
                double LearnResult = FS.approxLearnSamples();
                double TestResult  = FS.approxTestSamples();


                double LearnResultMSE = FS.RMSEtoMSEforLearn(LearnResult);
                double TestResultMSE  = FS.RMSEtoMSEforTest(TestResult);

                double LearnResultMSEdiv2 = FS.RMSEtoMSEdiv2forLearn(LearnResult);
                double TestResultMSEdiv2  = FS.RMSEtoMSEdiv2forTest(TestResult);

                LOG += "(" + DateTime.Now.ToString() + ")" + " оптимизированная система сложностью " + FS.ValueComplexity().ToString() + Environment.NewLine +
                       "Точностью на обучающей выборке(RMSE) " + LearnResult.ToString() + ", Точность на тестовой выборке(RMSE) " + TestResult.ToString() + Environment.NewLine +
                       "Точностью на обучающей выборке(MSE) " + LearnResultMSE.ToString() + ", Точность на тестовой выборке(MSE) " + TestResultMSE.ToString() + Environment.NewLine +
                       "Точностью на обучающей выборке(MSE/2) " + LearnResultMSEdiv2.ToString() + ", Точность на тестовой выборке(MSE/2) " + TestResultMSEdiv2.ToString() + Environment.NewLine;


                LOG += "Использован " + name_Alg.ToString() + Environment.NewLine;

                break;
            }


            case Log_line.EndCircle:
            {
                LOG += "(" + DateTime.Now.ToString() + ")" + " Время построения системы" + TimerSpan.TotalSeconds.ToString() + Environment.NewLine; break;
            }

            case Log_line.End:
            {
                LOG += "(" + DateTime.Now.ToString() + ")" + " Время построения всех систем" + TimerSpan.TotalSeconds.ToString() + Environment.NewLine; break;
            }

            default: { LOG += "Не верный вызов" + Environment.NewLine; break; }
            }
        }
Exemple #8
0
 public SingletonElementofStorage(SAFuzzySystem Checker, KnowlegeBaseSARules SourceElem, string algName) : base(algName)
 {
     element    = new KnowlegeBaseSARules(SourceElem);
     LearnError = Checker.approxLearnSamples(SourceElem);
     TestError  = Checker.approxTestSamples(SourceElem);
 }
Exemple #9
0
 protected static void writeAboutEstimates(XmlWriter writer, SAFuzzySystem Approximate)
 {
     writer.WriteStartElement("Estimates");
     if (Approximate.TestSamplesSet != null)
     {
         writer.WriteAttributeString("Count", XmlConvert.ToString(22));
         writer.WriteStartElement("Estimate");
         writer.WriteAttributeString("Table", Approximate.LearnSamplesSet.FileName);
         writer.WriteAttributeString("Type", "RMSE");
         writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.approxLearnSamples(Approximate.RulesDatabaseSet[0])));
         writer.WriteEndElement();
         writer.WriteStartElement("Estimate");
         writer.WriteAttributeString("Table", Approximate.LearnSamplesSet.FileName);
         writer.WriteAttributeString("Type", "MSE");
         writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.RMSEtoMSEforLearn(Approximate.approxLearnSamples(Approximate.RulesDatabaseSet[0]))));
         writer.WriteEndElement();
         writer.WriteStartElement("Estimate");
         writer.WriteAttributeString("Table", Approximate.TestSamplesSet.FileName);
         writer.WriteAttributeString("Type", "RMSE");
         writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.approxTestSamples(Approximate.RulesDatabaseSet[0])));
         writer.WriteEndElement();
         writer.WriteStartElement("Estimate");
         writer.WriteAttributeString("Table", Approximate.TestSamplesSet.FileName);
         writer.WriteAttributeString("Type", "MSE");
         writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.RMSEtoMSEforTest(Approximate.approxTestSamples(Approximate.RulesDatabaseSet[0]))));
         writer.WriteEndElement();
     }
     else
     {
         writer.WriteAttributeString("Count", XmlConvert.ToString(20));
         writer.WriteStartElement("Estimate");
         writer.WriteAttributeString("Table", Approximate.LearnSamplesSet.FileName);
         writer.WriteAttributeString("Type", "RMSE");
         writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.approxLearnSamples(Approximate.RulesDatabaseSet[0])));
         writer.WriteEndElement();
         writer.WriteStartElement("Estimate");
         writer.WriteAttributeString("Table", Approximate.LearnSamplesSet.FileName);
         writer.WriteAttributeString("Type", "MSE");
         writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.RMSEtoMSEforLearn(Approximate.approxLearnSamples(Approximate.RulesDatabaseSet[0]))));
         writer.WriteEndElement();
     }
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GIBNormal");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.getGIBNormal()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GIBSumStraigh");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.getGIBSumStrait()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GIBSumReverse");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.getGIBSumReverse()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GICNormal");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.getGICNormal()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GICSumStraigh");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.getGICSumStraigth()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GICSumReverse");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.getGICSumReverce()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GISNormal");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.getGISNormal()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GISSumStraigh");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.getGISSumStraigt()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GISSumReverce");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.getGISSumReverce()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "LindisNormal");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.getLindisNormal()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "LindisSumStraigh");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.getLindisSumStraight()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "LindisSumReverse");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.getLindisSumReverse()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "NormalIndex");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.getNormalIndex()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "RealIndex");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.getIndexReal()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "SumStraigthIndex");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.getIndexSumStraigt()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "SumReverseIndex");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.getIndexSumReverse()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "ComplexitIt");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.getComplexit()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "CountRules");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Approximate.getRulesCount()));
     writer.WriteEndElement();
     writer.WriteEndElement();
 }
Exemple #10
0
        public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approx, ILearnAlgorithmConf conf)
        {
            iskl_prizn      = "";
            count_iteration = ((Param)conf).Количество_итераций;
            count_populate  = ((Param)conf).Число_осколков;
            exploration     = ((Param)conf).Фактор_исследования;
            reduce_koef     = ((Param)conf).Уменьшающий_коэффициент;
            priznaki_usech  = ((Param)conf).Усечённые_признаки;

            int    iter = 0, /* iter2,*/ i, j, count_terms /*, count_iter*/;
            int    count_cons;                     //, count_best2 , best_pred ;
            double RMSE_best, cosFi, RMSE_best2, MSEbefore, MSEafter;
            int    Nd, variables, k = 1, best = 0; // best2 = 0;

            string[] buf;
            buf = priznaki_usech.Split(' ');
            SAFuzzySystem result = Approx;
            int           type   = Approx.RulesDatabaseSet[0].TermsSet[0].CountParams;

            Nd = Approx.RulesDatabaseSet[0].TermsSet.Count * type;
            double[] X_best = new double[Nd + 1];
            double[,] X_pred    = new double[2, Nd + 1];
            double[,] direction = new double[count_populate, Nd + 1];
            double[,] d         = new double[count_populate, Nd + 1];
            double[,] explosion = new double[count_populate, Nd + 1];
            double[,] shrapnel  = new double[count_populate, Nd + 1];
            cosFi      = Math.Cos(2 * Math.PI / count_populate);
            RMSE_best  = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);
            RMSE_best2 = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);
            count_cons = Approx.RulesDatabaseSet[0].all_conq_of_rules.Count();
            double[] RMSE      = new double[count_populate];
            double[] RMSE_all  = new double[iter];
            double[] RMSE_tst  = new double[count_populate];
            double[] RMSE2     = new double[count_populate];
            double[] RMSE_pred = new double[2];
            double[] cons_best = new double[count_cons];
            count_terms = Approx.RulesDatabaseSet[0].TermsSet.Count;
            variables   = Approx.LearnSamplesSet.CountVars;
            int[] terms = new int[variables];

            double[] X_best2 = new double[variables];
            double[,] d3      = new double[count_populate, variables];
            double[,] priznak = new double[count_populate, variables];
            for (i = 0; i < variables; i++)
            {
                priznak[0, i] = 1;
                X_best2[i]    = 1;
            }
            KnowlegeBaseSARules[] X = new KnowlegeBaseSARules[count_populate];
            for (int s = 0; s < count_populate - 1; s++)
            {
                X[s] = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[0]);
                Approx.RulesDatabaseSet.Add(X[s]);
            }


            if (buf[0] != "")
            {
                for (k = 0; k < buf.Count(); k++)
                {
                    Approx.AcceptedFeatures[int.Parse(buf[k]) - 1] = false;
                    priznak[0, int.Parse(buf[k]) - 1] = 0;
                    iskl_prizn += buf[k] + " ";
                }
            }

            RMSE_best = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);
            for (iter = 0; iter <= count_iteration; iter++)
            {
                best = 0;
                if (iter == 0)
                {
                    k = 1;
                    for (int h = 0; h < count_terms; h++)
                    {
                        for (int p = 0; p < type; p++)
                        {
                            shrapnel[0, k] = Approx.RulesDatabaseSet[0].TermsSet[h].Parametrs[p];
                            X_best[k]      = shrapnel[0, k];
                            X_pred[0, k]   = shrapnel[0, k];
                            X_pred[1, k]   = shrapnel[0, k];
                            k++;
                        }
                    }
                    RMSE_pred[0] = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);
                    RMSE_pred[1] = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);
                    k            = 1;
                    for (int h = 0; h < count_terms; h++)
                    {
                        for (int p = 0; p < type; p++)
                        {
                            d[0, k] = RandomNext(Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[h].NumVar].Min, Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[h].NumVar].Max);
                            k++;
                        }
                    }
                }
                for (i = 1; i <= Nd; i++)
                {
                    if (exploration > iter)
                    {
                        for (j = 1; j < count_populate; j++)
                        {
                            int sum = 0, sum2 = 0;
generate:
                            sum++;
                            sum2++;
                            //формула расстояния исправлена

                            d[j, i] = d[j - 1, i] * randn();

                            //double sluch = randn();
                            //if (sluch < 0) d[j, i] = d[j - 1, i] * (-1) * Math.Pow(sluch, 2);
                            //else d[j, i] = d[j - 1, i] * Math.Pow(sluch, 2);
                            explosion[j, i] = d[j, i] * cosFi;
                            if (sum > 20)
                            {
                                if ((i + (type - 2)) % type == 0)
                                {
                                    shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    if (sum2 > 2)
                                    {
                                        shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]);
                                        sum            = 19;
                                    }
                                    if (sum2 > 3)
                                    {
                                        shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                        sum            = 19;
                                        sum2           = 0;
                                    }
                                }
                                else
                                {
                                    shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    sum            = 19;
                                }
                            }
                            else
                            {
                                shrapnel[j, i] = shrapnel[0, i] + explosion[j, i];
                            }
                            if ((i == 2) || (i == 1))
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumVar].Min;
                            }

                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 2) / type)].NumVar)
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumVar].Min; goto exit;
                            }

                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - type) / type)].NumVar)
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumVar].Min; goto exit;
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumVar != (variables - 1))
                            {
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i) / type)].NumVar)
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumVar].Max; goto exit;
                                }
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i + 1) / type)].NumVar)
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumVar].Max; goto exit;
                                }
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumVar == (variables - 1))
                            {
                                if ((i == (count_terms * 3 - 1)) || (i == (count_terms * 3)))
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumVar].Max;
                                }
                            }

                            if (((i + (type - 2)) % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                if (shrapnel[j, i] == Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumVar].Min)
                                {
                                    i--;
                                }
                                goto generate;
                            }
                            if ((i % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                goto generate;
                            }
                            if (i != 1)
                            {
                                if (((i - (type - 2)) % type == 0) && ((shrapnel[j, i] > shrapnel[j, i - 1]) || (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumVar].Max) || (shrapnel[j, i] < Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumVar].Min)))
                                {
                                    goto generate;
                                }
                            }
                            if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumVar].Min) || (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumVar].Max)))
                            {
                                goto generate;
                            }
exit:
                            if (i > type)
                            {
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumVar != 0)
                                {
                                    if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1 - type) / type)].NumVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumVar - 1)
                                    {
                                        if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < shrapnel[j, i - type])))
                                        {
                                            goto generate;
                                        }
                                    }
                                }
                            }
                        }
                    }
                    else
                    {
                        //d[0, i] = d2(X_pred[0, i], X_pred[1, i], RMSE_pred[0], RMSE_pred[1]);

                        //for (j = 1; j < count_populate; j++)
                        //{
                        //    if ((X_pred[1, i] - X_pred[0, i]) != 0)
                        //    {
                        //        direction[j, i] = m(X_pred[0, i], X_pred[1, i], RMSE_pred[0], RMSE_pred[1]);
                        //    }
                        //    else direction[j, i] = 1;
                        //    int sum = 0, sum2 = 0;
                        //generate:
                        //    sum++;
                        //    sum2++;
                        //    double random;
                        //    random = randn();
                        //    if(random<0) explosion[j, i] = d[j-1, i]*rand.NextDouble() * cosFi*(-1);
                        //    else explosion[j, i] = d[j - 1, i] * rand.NextDouble() * cosFi;
                        //    if (sum2 > 50) sum2 = 0;

                        //    if (sum > 20)
                        //    {
                        //        if ((i + (type - 2)) % type == 0)
                        //        {
                        //            shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                        //            if (sum2 > 2)
                        //            {
                        //                shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]);
                        //                sum = 19;
                        //            }
                        //            if (sum2 > 3)
                        //            {
                        //                shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                        //                sum = 19;
                        //                sum2 = 0;
                        //            }
                        //        }
                        //        else
                        //        {
                        //            shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                        //            sum = 19;
                        //        }
                        //    }
                        //    else shrapnel[j, i] = Shrapnel(explosion[j,i],shrapnel[0,i],d[j-1,i],direction[j,i]);

                        //    if ((i == 2) || (i == 1))
                        //    {
                        //        shrapnel[j, i] = Approx.LearnSamplesSet.InputAttributeMin(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar);
                        //    }
                        //    if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 2) / type)].NumberOfInputVar)
                        //    {
                        //        shrapnel[j, i] = Approx.LearnSamplesSet.InputAttributeMin(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar); goto exit;
                        //    }

                        //    if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - type) / type)].NumberOfInputVar)
                        //    {
                        //        shrapnel[j, i] = Approx.LearnSamplesSet.InputAttributeMin(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar); goto exit;
                        //    }
                        //    if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != (variables - 1))
                        //    {
                        //        if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i) / type)].NumberOfInputVar)
                        //        {
                        //            shrapnel[j, i] = Approx.LearnSamplesSet.InputAttributeMax(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar); goto exit;
                        //        }
                        //        if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i + 1) / type)].NumberOfInputVar)
                        //        {
                        //            shrapnel[j, i] = Approx.LearnSamplesSet.InputAttributeMax(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar); goto exit;
                        //        }
                        //    }
                        //    if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar == (variables - 1))
                        //    {
                        //        if((i==(count_terms*3-1))||(i==(count_terms*3)))
                        //        shrapnel[j, i] = Approx.LearnSamplesSet.InputAttributeMax(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar);
                        //    }

                        //    if (((i + (type - 2)) % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                        //    {
                        //        if (shrapnel[j, i] == Approx.LearnSamplesSet.InputAttributeMin(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar)) i--;
                        //        goto generate;
                        //    }
                        //    if ((i % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                        //    {
                        //        goto generate;
                        //    }
                        //    if (i != 1)
                        //    {
                        //        if (((i - (type - 2)) % type == 0) && ((shrapnel[j, i] > shrapnel[j, i - 1]) || (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttributeMax(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar)) || (shrapnel[j, i] < Approx.LearnSamplesSet.InputAttributeMin(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar))))
                        //        {
                        //            goto generate;
                        //        }
                        //    }
                        //    if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < Approx.LearnSamplesSet.InputAttributeMin(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar)) || (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttributeMax(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar))))
                        //    {
                        //        goto generate;
                        //    }
                        //exit:
                        //    if (i > type)
                        //    {
                        //        if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != 0)
                        //        {
                        //            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1 - type) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar - 1)
                        //            {
                        //                if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < shrapnel[j, i - type])))
                        //                {
                        //                    goto generate;
                        //                }
                        //            }
                        //        }
                        //    }
                        //    d[j, i] = d[j-1,i] / Math.Pow(Math.E, (double)iter/(double)reduce_koef);
                        //}
                    }
                }

                for (int z = 0; z < count_populate; z++)
                {
                    k = 1;
                    for (int h = 0; h < count_terms; h++)
                    {
                        for (int p = 0; p < type; p++)
                        {
                            Approx.RulesDatabaseSet[z].TermsSet[h].Parametrs[p] = shrapnel[z, k];
                            k++;
                        }
                    }
                }
                for (j = 0; j < count_populate; j++)
                {
                    RMSE[j]     = Approx.approxLearnSamples(Approx.RulesDatabaseSet[j]);
                    RMSE_tst[j] = Approx.approxTestSamples(Approx.RulesDatabaseSet[j]);
                    if (RMSE[j] < RMSE_best)
                    {
                        RMSE_best = RMSE[j];
                        best      = j;
                    }
                }
                if ((iter != 0) && (iter % 1000 == 0))
                {
                    Adaptive_LSM LSM = new Adaptive_LSM();
                    MSEbefore = RMSE[best];
                    KnowlegeBaseSARules zeroSolution = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[0]);
                    Approx.RulesDatabaseSet[0] = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[best]);
                    KnowlegeBaseSARules tempSolution = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[best]);
                    Approx   = LSM.TuneUpFuzzySystem(Approx, new NullConfForAll()) as SAFuzzySystem;
                    MSEafter = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);
                    if (MSEafter > MSEbefore)
                    {
                        Approx.RulesDatabaseSet[0] = tempSolution;
                        RMSE2[best] = MSEbefore;
                    }
                    else
                    {
                        RMSE2[best] = MSEafter;
                        for (int p = 0; p < count_cons; p++)
                        {
                            cons_best[p] = Approx.RulesDatabaseSet[0].all_conq_of_rules[p];
                        }
                    }
                    if (RMSE2[best] < RMSE_best)
                    {
                        RMSE_best = RMSE2[best];
                    }
                    Approx.RulesDatabaseSet[best] = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[0]);
                    Approx.RulesDatabaseSet[0]    = new KnowlegeBaseSARules(zeroSolution);
                    for (int z = 0; z < count_populate; z++)
                    {
                        for (int p = 0; p < count_cons; p++)
                        {
                            Approx.RulesDatabaseSet[z].RulesDatabase[p].IndependentConstantConsequent = cons_best[p];
                        }
                    }
                }
                k = 1;
                for (int h = 0; h < count_terms; h++)
                {
                    for (int p = 0; p < type; p++)
                    {
                        shrapnel[0, k] = shrapnel[best, k];
                        if (exploration > iter)
                        {
                            d[0, k] = RandomNext(Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[h].NumVar].Min, Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[h].NumVar].Max);
                        }
                        Approx.RulesDatabaseSet[0].TermsSet[h].Parametrs[p] = shrapnel[0, k];
                        k++;
                    }
                }

                if (iter % 10 == 0)
                {
                    if (RMSE_pred[1] > RMSE2[best])
                    {
                        for (k = 1; k <= Nd; k++)
                        {
                            X_pred[0, k] = X_pred[1, k];
                            X_pred[1, k] = shrapnel[best, k];
                        }
                        RMSE_pred[0] = RMSE_pred[1];
                        RMSE_pred[1] = RMSE2[best];
                    }
                }
                else
                {
                    if (RMSE_pred[1] > RMSE[best])
                    {
                        for (k = 1; k <= Nd; k++)
                        {
                            X_pred[0, k] = X_pred[1, k];
                            X_pred[1, k] = shrapnel[best, k];
                        }
                        RMSE_pred[0] = RMSE_pred[1];
                        RMSE_pred[1] = RMSE[best];
                    }
                }
            }

            return(result);
        }