コード例 #1
0
ファイル: WCATSAprox.cs プロジェクト: CDMMKY/fuzzy_core
        public override TSAFuzzySystem TuneUpFuzzySystem(TSAFuzzySystem Approximate, ILearnAlgorithmConf Conf)
        {
            result = Approximate;
            List <int[]> groups = new List <int[]>();

            Init(Conf);
            SetPopulation();
            Population = SortRules(Population);
            NS         = new int[Nsr];
            NS         = SetNS(Population, Nsr);
            groups     = GroupStream();
            double BestMSETest  = result.RMSEtoMSEforTest(result.approxTestSamples(Population[0]));
            double BestMSELearn = result.RMSEtoMSEforLearn(result.approxLearnSamples(Population[0]));
            int    BestIter     = 0;

            for (int i = 1; i <= MaxIter; i++)
            {
                Console.WriteLine(i + " - Итерация");
                Population = SetNextPosition(groups, Population);
                Population = Replacement(groups, Population);
                if (flag)
                {
                    Evaporation(groups.Last());//Испарение
                }
                if (BestMSETest > result.RMSEtoMSEforTest(result.approxTestSamples(Population[0])))
                {
                    BestMSETest  = result.RMSEtoMSEforTest(result.approxTestSamples(Population[0]));
                    BestMSELearn = result.RMSEtoMSEforLearn(result.approxLearnSamples(Population[0]));
                    BestIter     = i;
                }
            }
            Console.WriteLine(ToString(true));
            Console.WriteLine("Итер - " + BestIter + " MSET - " + BestMSETest + " MSEL - " + BestMSELearn);
            result.RulesDatabaseSet[0] = Population[0];
            return(result);
        }
コード例 #2
0
ファイル: Term_Config_PSO.cs プロジェクト: CDMMKY/fuzzy_core
        public override FuzzySystem.PittsburghClassifier.PCFuzzySystem TuneUpFuzzySystem(FuzzySystem.PittsburghClassifier.PCFuzzySystem Classifier, ILearnAlgorithmConf conf)
        {
            result = Classifier;
            Init(conf);

            //OneIteration
            for (int i = 0; i < count_iteration; i++)
            {
                oneIterate(result);
            }
            Final();
            return(result);
        }
コード例 #3
0
ファイル: DicrKrillHerd.cs プロジェクト: CDMMKY/fuzzy_core
        //основные вычисления
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classify, ILearnAlgorithmConf conf)
        {
            result = Classify;
            rand   = new Random();
            Init(conf);
            SetPopulation();
            bool[] BEST      = result.AcceptedFeatures;
            double bestError = result.ErrorLearnSamples(result.RulesDatabaseSet[0]);
            //отчистка консоли
            Dictionary <bool[], double> PopulationWithAccuracy = new Dictionary <bool[], double>();
            double accuracy = 0;

            //запуск итераций
            for (int it = 0; it < iter; it++)
            {
                //расчитыавем значение фитнес-функции
                for (int i = 0; i < Population.Count; i++)
                {
                    result.AcceptedFeatures = Population[i];
                    accuracy = result.ErrorLearnSamples(result.RulesDatabaseSet[0]);
                    PopulationWithAccuracy.Add(Population[i], accuracy);
                }
                Population.Clear();
                foreach (var pair in PopulationWithAccuracy.OrderByDescending(pair => pair.Value))
                {
                    Population.Add(pair.Key);
                }
                PopulationWithAccuracy.Clear();
                double[] K    = new double[Population.Count];
                double   sumK = 0;
                double   avK  = 0;
                for (int i = 0; i < Population.Count; i++)
                {
                    result.AcceptedFeatures = Population[i];
                    K[i]  = result.ErrorLearnSamples(result.RulesDatabaseSet[0]);
                    sumK += K[i];
                }
                avK = sumK / K.Length;
                bool[] KDis = new bool[result.CountFeatures];
                for (int i = 0; i < KDis.Length; i++)
                {
                    if (rand.Next(0, 2) == 0)
                    {
                        KDis[i] = false;
                    }
                    else
                    {
                        KDis[i] = true;
                    }
                }

                //перебрать значения фитнес функции
                //расчитыавем значение D
                double dit;
                dit = it;
                double diter;
                diter = iter;

                bool D = true;

                //расчитыавем значение Xfood
                var Xfood = new bool[result.CountFeatures];
                for (int t = 0; t < Xfood.Length; t++)
                {
                    Xfood[t] = false;

                    for (int i = 0; i < Population.Count; i++)
                    {
                        Xfood[t] = merge(Population[i][t], KDis[t]);
                    }
                }
                //расчитываем значение Cfood
                double Cfood = 2 * (1 - (dit / diter));
                bool   CDisfood;
                if (Cfood <= 1)
                {
                    CDisfood = false;
                }
                else
                {
                    CDisfood = true;
                }

                //расчитываем значение Bfood
                List <bool[]> Bfood = new List <bool[]>(Population.Count);
                for (int i = 0; i < Population.Count; i++)
                {
                    Bfood.Add(new bool[Population[i].Length]);
                    if (rand.Next(0, 2) == 0)
                    {
                        Kroofifood = false;
                    }
                    else
                    {
                        Kroofifood = true;
                    }
                    for (int t = 0; t < Bfood[i].Length; t++)
                    {
                        Bfood[i][t] = merge(merge(CDisfood, Kroofifood), Xfood[t]);
                    }
                }

                //расчитываем значение Bbest
                List <bool[]> Bbest = new List <bool[]>(Population.Count);
                for (int i = 0; i < Population.Count; i++)
                {
                    Bbest.Add(new bool[Population[i].Length]);
                    if (rand.Next(0, 2) == 0)
                    {
                        Kroofifood = false;
                    }
                    else
                    {
                        Kroofifood = true;
                    }
                    for (int t = 0; t < Bbest[i].Length; t++)
                    {
                        Bbest[i][t] = merge(Kroofifood, Xfood[t]);
                    }
                }

                //расчитываем значение B
                List <bool[]> B = new List <bool[]>(Population.Count);
                for (int i = 0; i < Population.Count; i++)
                {
                    B.Add(new bool[Population[i].Length]);
                    for (int t = 0; t < B[i].Length; t++)
                    {
                        B[i][t] = merge(Bfood[i][t], Bbest[i][t]);
                    }
                }

                //расчитываем значение F
                List <bool[]> F = new List <bool[]>(Population.Count);
                for (int i = 0; i < Population.Count; i++)
                {
                    if (i == 0)
                    {
                        F.Add(new bool[Population[i].Length]);
                        for (int t = 0; t < F[i].Length; t++)
                        {
                            F[i][t] = merge(true, B[i][t]);
                        }
                    }
                    else
                    {
                        F.Add(new bool[Population[i].Length]);
                        for (int t = 0; t < F[i].Length; t++)
                        {
                            F[i][t] = merge(merge(true, B[i][t]), merge(false, F[i - 1][t]));
                        }
                    }
                }

                List <int>[] neihbors = new List <int> [Population.Count];
                //расчитываем значение alocal
                List <bool[]> alocal = new List <bool[]>(Population.Count);
                for (int i = 0; i < Population.Count; i++)
                {
                    alocal.Add(new bool[Population[i].Length]);
                    neihbors[i] = countneihbors(Population[i]);

                    for (int t = 0; t < alocal[i].Length; t++)
                    {
                        alocal[i][t] = false;
                        for (int j = 0; j < neihbors[i].Count; j++)
                        {
                            bool   KRoofij = merge(KDis[t], Population[neihbors[i][j]][t]);
                            bool[] XRoofij = new bool[Population.Count];
                            XRoofij = CalcXroof(Population[i], Population[neihbors[i][j]]);

                            alocal[i][t] = merge(KRoofij, XRoofij[t]);
                        }
                    }
                }

                //расчитываем значение Cbest
                bool Cbest;
                if (rand.Next(0, 2) == 0)
                {
                    Cbest = false;
                }
                else
                {
                    Cbest = true;
                }


                //расчитываем значение atarget
                List <bool[]> atarget = new List <bool[]>(Population.Count);
                for (int i = 0; i < Population.Count; i++)
                {
                    atarget.Add(new bool[Population[i].Length]);
                    bool[] XRoofibest = new bool[Population.Count];
                    XRoofibest = CalcXroof(Population[i], Population[0]);
                    for (int t = 0; t < alocal[i].Length; t++)
                    {
                        bool KRoofibest = KDis[t];
                        atarget[i][t] = merge(merge(Cbest, KRoofibest), XRoofibest[t]);
                    }
                }

                //расчитываем значение a
                List <bool[]> a = new List <bool[]>(Population.Count);
                for (int i = 0; i < Population.Count; i++)
                {
                    a.Add(new bool[Population[i].Length]);
                    for (int t = 0; t < a[i].Length; t++)
                    {
                        a[i][t] = merge(atarget[i][t], alocal[i][t]);
                    }
                }

                //расчитываем значение N
                List <bool[]> N = new List <bool[]>(Population.Count);
                for (int i = 0; i < Population.Count; i++)
                {
                    if (i == 0)
                    {
                        N.Add(new bool[Population[i].Length]);
                        for (int t = 0; t < N[i].Length; t++)
                        {
                            N[i][t] = merge(true, a[i][t]);
                        }
                    }
                    else
                    {
                        N.Add(new bool[Population[i].Length]);
                        for (int t = 0; t < F[i].Length; t++)
                        {
                            N[i][t] = merge(a[i][t], N[i - 1][t]);
                        }
                    }
                }
                //расчитываем значение dX
                List <bool[]> dX = new List <bool[]>(Population.Count);
                for (int i = 0; i < Population.Count; i++)
                {
                    dX.Add(new bool[Population[i].Length]);
                    for (int t = 0; t < a[i].Length; t++)
                    {
                        dX[i][t] = merge(merge(F[i][t], N[i][t]), D);
                    }
                }

                //выводим значение BEST
                //   Console.Write("Значение BEST_ = ");
                //  Console.WriteLine(BEST);


                //расчитываем значение X(t+dt)
                for (int i = 0; i < Population.Count; i++)
                {
                    Population[i] = new bool[Population[i].Length];
                    for (int t = 0; t < Population[i].Length; t++)
                    {
                        Population[i][t] = merge(Population[i][t], dX[i][t]);
                    }
                }

                for (int i = 0; i < Population.Count; i++)
                {
                    result.AcceptedFeatures = Population[i];
                    double temp = result.ErrorLearnSamples(result.RulesDatabaseSet[0]);

                    if (temp < bestError)
                    {
                        BEST      = Population[i];
                        bestError = temp;
                    }
                }

                double y = it;
                if (y % 10 == 0 & y != 0)
                {
                    Console.WriteLine(it);
                    Console.WriteLine(bestError);
                }
            }
            result.AcceptedFeatures = BEST;
            for (int i = 0; i < result.AcceptedFeatures.Length; i++)
            {
                if (result.AcceptedFeatures[i] == false)
                {
                    Console.Write("0 ");
                }
                else
                {
                    Console.Write("1 ");
                }
            }
            Console.WriteLine();
            Console.WriteLine(result.ErrorLearnSamples(result.RulesDatabaseSet[0]));
            Console.WriteLine(result.ErrorTestSamples(result.RulesDatabaseSet[0]));
            return(result);
        }
コード例 #4
0
ファイル: ChoosePlus.cs プロジェクト: CDMMKY/fuzzy_core
 public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem FSystem, ILearnAlgorithmConf conf)
 {
     isClass = true;
     return(UniversalMethod(FSystem, conf) as PCFuzzySystem);
 }
コード例 #5
0
 public PCFuzzySystem TuneUpFuzzySystem(PittsburgHybride Ocean, PCFuzzySystem Classifier, ILearnAlgorithmConf conf)
 {
     HybrideOcean = Ocean;
     base.TuneUpFuzzySystem(Classifier, conf);
     Ocean.Store(chooseDiscovers(1), this.ToString());
     return(fullFuzzySystem);
 }
コード例 #6
0
ファイル: Approx_discret.cs プロジェクト: CDMMKY/fuzzy_core
        public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approx, ILearnAlgorithmConf conf)
        {
            iskl_prizn      = "";
            count_iteration = ((Param)conf).Количество_итераций;
            count_populate  = ((Param)conf).Число_осколков;
            exploration     = ((Param)conf).Фактор_исследования;
            reduce_koef     = ((Param)conf).Уменьшающий_коэффициент;
            priznaki_usech  = ((Param)conf).Усечённые_признаки;
            iter_descrete   = ((Param)conf).Итерации_дискр_алг;

            int           iter = 0, iter2, i, j, count_terms, count_iter = 0;
            int           count_cons, count_best2 = 0, best_pred = 0;
            double        RMSE_best, cosFi, RMSE_best2;
            int           Nd, variables, k = 1, best2 = 0;
            SAFuzzySystem result = Approx;
            int           type   = Approx.RulesDatabaseSet[0].TermsSet[0].CountParams;

            Nd = Approx.RulesDatabaseSet[0].TermsSet.Count * type;
            double[] X_best = new double[Nd + 1];
            double[,] X_pred    = new double[2, Nd + 1];
            double[,] direction = new double[count_populate, Nd + 1];
            double[,] d         = new double[count_populate, Nd + 1];
            double[,] explosion = new double[count_populate, Nd + 1];
            double[,] shrapnel  = new double[count_populate, Nd + 1];
            cosFi      = Math.Cos(2 * Math.PI / count_populate);
            RMSE_best  = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);
            RMSE_best2 = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);
            count_cons = Approx.RulesDatabaseSet[0].all_conq_of_rules.Count();
            double[] RMSE      = new double[count_populate];
            double[] RMSE_all  = new double[iter];
            double[] RMSE_tst  = new double[count_populate];
            double[] RMSE2     = new double[count_populate];
            double[] RMSE_pred = new double[2];
            double[] cons_best = new double[count_cons];
            variables   = Approx.LearnSamplesSet.CountVars;
            count_terms = Approx.RulesDatabaseSet[0].TermsSet.Count;
            int[] terms = new int[variables];

            double[] X_best2 = new double[variables];
            double[,] d3      = new double[count_populate, variables];
            double[,] priznak = new double[count_populate, variables];
            for (i = 0; i < variables; i++)
            {
                priznak[0, i] = 1;
                X_best2[i]    = 1;
            }
            KnowlegeBaseSARules[] X = new KnowlegeBaseSARules[count_populate];
            for (int s = 0; s < count_populate - 1; s++)
            {
                X[s] = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[0]);
                Approx.RulesDatabaseSet.Add(X[s]);
            }

            for (iter2 = 0; iter2 < iter_descrete; iter2++)
            {
                best2 = 0;
                //if (count_best2 < 10)
                //{
                if (iter == 0)
                {
                    for (k = 0; k < variables; k++)
                    {
                        d3[0, k] = RandomNext(Approx.LearnSamplesSet.InputAttributes[k].Min, Approx.LearnSamplesSet.InputAttributes[k].Max);
                    }
                }
                for (i = 0; i < variables; i++)
                {
                    for (j = 1; j < count_populate; j++)
                    {
generate:
                        d3[j, i]      = d3[j - 1, i] * randn();
                        priznak[j, i] = d3[j, i] * cosFi;

                        if ((priznak[j, i] < Approx.LearnSamplesSet.InputAttributes[i].Min) || (priznak[j, i] > Approx.LearnSamplesSet.InputAttributes[i].Max))
                        {
                            goto generate;
                        }
                        Random random = new Random();
                        if (random.NextDouble() < descret(priznak[j, i]))
                        {
                            priznak[j, i] = 1;
                        }
                        else
                        {
                            priznak[j, i] = 0;
                        }
                    }
                }


                for (j = 1; j < count_populate; j++)
                {
                    for (int h = 0; h < variables; h++)
                    {
                        if (priznak[j, h] == 1)
                        {
                            Approx.AcceptedFeatures[h] = true;
                        }
                        else
                        {
                            Approx.AcceptedFeatures[h] = false;
                        }
                    }
                    RMSE2[j] = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);
                    if (RMSE2[j] > RMSE_best2)
                    {
                        RMSE_best2 = RMSE2[j];
                        best2      = j;
                    }
                    for (int h = 0; h < variables; h++)
                    {
                        X_best2[h] = priznak[best2, h];
                    }
                }
                if (best_pred == best2)
                {
                    count_best2++;
                }
                else
                {
                    count_best2 = 0;
                }
                for (k = 0; k < variables; k++)
                {
                    priznak[0, k] = priznak[best2, k];
                }
                count_iter++;
                //}
            }

            for (k = 0; k < variables; k++)
            {
                if (priznak[best2, k] == 1)
                {
                    Approx.AcceptedFeatures[k] = true;
                }
                else
                {
                    Approx.AcceptedFeatures[k] = false;
                    iskl_prizn += (k + 1).ToString() + " ";
                }
            }

            return(result);
        }
コード例 #7
0
        public override TSAFuzzySystem TuneUpFuzzySystem(TSAFuzzySystem Approx, ILearnAlgorithmConf conf)
        // Здесь ведется оптимизация вашим алгоритмом
        {
            var    first        = true;
            var    WeedOriginal = new TSAFuzzySystemWithErrorKnowledgeBase(Approx);
            double KLIRange     = ((WeedsCut_conf)conf).Граница;

            maxiter = ((WeedsCut_conf)conf).Количество_итераций;
            var    MaxUsingRule = ((WeedsCut_conf)conf).Максимально_потомков;
            var    MinUsingRule = ((WeedsCut_conf)conf).Минимально_потомков;
            Random r            = new Random();

            while (first || Double.IsNaN((WeedOriginal.RulesDatabaseSet[0] as KnowlegeBaseTSARules).error))
            {
                int allUsingRuleCount = ((WeedsCut_conf)conf).Начальное_количество;
                WeedOriginal = new TSAFuzzySystemWithErrorKnowledgeBase(Approx);

                first = false;
                List <int> list    = new List <int>();
                int        counter = 0;
                for (int i = 0; i < allUsingRuleCount; i++)
                {
                    list.Add(counter);
                    counter++;
                }
                for (int i = 0; i < WeedOriginal.AcceptedFeatures.Count(); i++)
                {
                    WeedOriginal.AcceptedFeatures[i] = false;
                }
                for (int i = 0; i < allUsingRuleCount; i++)
                {
                    int index = r.Next(0, allUsingRuleCount - i);
                    int ddd   = list[index];
                    WeedOriginal.AcceptedFeatures[ddd] = true;
                    list.RemoveAt(index);
                }

                global::KLI.KLI s = new global::KLI.KLI();
                WeedOriginal = new TSAFuzzySystemWithErrorKnowledgeBase(s.Generate(WeedOriginal as TSAFuzzySystem, new KLI_conf()
                {
                    MaxValue = KLIRange
                }));
                WeedOriginal.reinit();
                (WeedOriginal.RulesDatabaseSet[0] as KnowlegeBaseTSARules).error = WeedOriginal.ErrorLearnSamples(WeedOriginal.RulesDatabaseSet[0]);
                (WeedOriginal.RulesDatabaseSet[0] as KnowlegeBaseTSARules).RulesAcceptedFeatures = new List <bool>(WeedOriginal.AcceptedFeatures.ToList());
            }
            string added2 = (WeedOriginal.RulesDatabaseSet.Last() as  KnowlegeBaseTSARules).RulesAcceptedFeatures.Aggregate(String.Empty, (current, v) => current + (" " + (v ? "1" : "0")));

            File.AppendAllLines("./out2.txt", new List <string>()
            {
                WeedOriginal.RulesDatabaseSet.Last().RulesDatabase.Count + " " +
                (WeedOriginal.RulesDatabaseSet.Last() as KnowlegeBaseTSARules).RulesAcceptedFeatures.Count(x => x) + " " + WeedOriginal.RMSEtoMSEforLearn(WeedOriginal.ErrorLearnSamples(WeedOriginal.RulesDatabaseSet.Last())) / 2 + " " + WeedOriginal.RMSEtoMSEforTest(WeedOriginal.ErrorTestSamples(WeedOriginal.RulesDatabaseSet.Last())) / 2 + " " + added2
            });

            Random rand = new Random();

            for (int iter = 0; iter < maxiter; iter++)
            {
                List <KnowlegeBaseTSARules> newtstmp = new List <KnowlegeBaseTSARules>();
                for (int k = 0; k < WeedOriginal.RulesDatabaseSet.Count; k++)
                {
                    double fiBest = 1;

                    double fiWorst = (1 - (WeedOriginal.RulesDatabaseSet[k] as KnowlegeBaseTSARules).error);
                    if (!Double.IsNaN((WeedOriginal.RulesDatabaseSet[k] as KnowlegeBaseTSARules).error))
                    {
                        if (!Double.IsInfinity((WeedOriginal.RulesDatabaseSet[k] as KnowlegeBaseTSARules).error))
                        {
                            var CurrertChild =
                                Convert.ToInt32((MaxChild - MinChild) / (fiBest - fiWorst) * (1 - (WeedOriginal.RulesDatabaseSet[k] as KnowlegeBaseTSARules).error) +
                                                (fiBest * MinChild - fiWorst * MaxChild) / (fiBest - fiWorst));
                            double delta = originalDelta * (maxiter - iter) / maxiter;


                            newtstmp.AddRange(WeedsCut.WeedsRegenerateIteration(WeedOriginal, (WeedOriginal.RulesDatabaseSet[k] as KnowlegeBaseTSARules), delta, rand, CurrertChild));

                            //
                            var item = newtstmp.Last();
                            item.RulesAcceptedFeatures = new List <bool>((WeedOriginal.RulesDatabaseSet[k] as KnowlegeBaseTSARules).RulesAcceptedFeatures);
                            for (int index = 0; index < item.RulesAcceptedFeatures.Count; index++)
                            {
                                if (Math.Sqrt(-2 * Math.Log(rand.NextDouble())) * Math.Cos(2 * Math.PI * rand.NextDouble()) > delta)
                                {
                                    item.RulesAcceptedFeatures[index] = !item.RulesAcceptedFeatures[index];
                                }
                            }

                            while (item.RulesAcceptedFeatures.Count(x => x) > MaxUsingRule)
                            {
                                item.RulesAcceptedFeatures[(rand.Next(item.RulesAcceptedFeatures.Count))] = false;
                            }

                            while (item.RulesAcceptedFeatures.Count(x => x) < MinUsingRule)
                            {
                                item.RulesAcceptedFeatures[(rand.Next(item.RulesAcceptedFeatures.Count))] = true;
                            }
                        }
                    }
                }

                foreach (var weedBase in newtstmp)
                {
                    RLS rls = new RLS();
                    var tmp = new TSAFuzzySystemWithErrorKnowledgeBase(WeedOriginal);

                    tmp.RulesDatabaseSet.Clear();
                    tmp.AcceptedFeatures = weedBase.RulesAcceptedFeatures.ToArray();
                    global::KLI.KLI s = new global::KLI.KLI();
                    tmp = s.Generate(tmp, new KLI_conf()
                    {
                        MaxValue = KLIRange
                    }) as TSAFuzzySystemWithErrorKnowledgeBase;
                    tmp.reinit();
                    (tmp.RulesDatabaseSet[0] as KnowlegeBaseTSARules).error = tmp.ErrorLearnSamples(tmp.RulesDatabaseSet[0]);
                    (tmp.RulesDatabaseSet[0] as KnowlegeBaseTSARules).RulesAcceptedFeatures = new List <bool>(tmp.AcceptedFeatures.ToList());
                    if (!double.IsNaN((tmp.RulesDatabaseSet[0] as KnowlegeBaseTSARules).error))
                    {
                        WeedOriginal.RulesDatabaseSet.Add(tmp.RulesDatabaseSet[0]);
                        string added = (tmp.RulesDatabaseSet[0] as KnowlegeBaseTSARules).RulesAcceptedFeatures.Aggregate(String.Empty, (current, v) => current + (" " + (v ? "1" : "0")));
                        File.AppendAllLines("./out2.txt", new List <string>()
                        {
                            tmp.RulesDatabaseSet[0].RulesDatabase.Count + " " +
                            (tmp.RulesDatabaseSet[0] as KnowlegeBaseTSARules).RulesAcceptedFeatures.Count(x => x) + " " + tmp.RMSEtoMSEforLearn(tmp.ErrorLearnSamples(tmp.RulesDatabaseSet[0])) / 2 + " " + tmp.RMSEtoMSEforTest(tmp.ErrorTestSamples(tmp.RulesDatabaseSet[0])) / 2 + " " + added
                        });
                    }
                }
                WeedOriginal.RulesDatabaseSet.RemoveAll(x => Double.IsInfinity((x as KnowlegeBaseTSARules).error) || Double.IsInfinity((x as KnowlegeBaseTSARules).error));
                WeedOriginal.RulesDatabaseSet.Sort((c1, c2) => (c1 as KnowlegeBaseTSARules).error.CompareTo((c2 as KnowlegeBaseTSARules).error));
                if (WeedOriginal.RulesDatabaseSet.Count > 6)
                {
                    WeedOriginal.RulesDatabaseSet.RemoveRange(6, WeedOriginal.RulesDatabaseSet.Count - 6);
                }
            }
            WeedOriginal.AcceptedFeatures = (WeedOriginal.RulesDatabaseSet[0] as  KnowlegeBaseTSARules).RulesAcceptedFeatures.ToArray();

            return(WeedOriginal);
        }
コード例 #8
0
 void Init(ILearnAlgorithmConf Conf)
 {
     Config = Conf as IslandsConfig;
 }
コード例 #9
0
ファイル: CuckooClassifier.cs プロジェクト: CDMMKY/fuzzy_core
        public override FuzzySystem.PittsburghClassifier.PCFuzzySystem TuneUpFuzzySystem(FuzzySystem.PittsburghClassifier.PCFuzzySystem Classifier, ILearnAlgorithmConf conf)
        {
            PCFuzzySystem result = Classifier;

            count_iteration = ((CuckooConf)conf).CuckooCountIterate;
            count_particle  = ((CuckooConf)conf).CuckooPopulationSize;
            m    = ((CuckooConf)conf).CuckooWorse;
            p    = ((CuckooConf)conf).CuckooLifeChance;
            beta = ((CuckooConf)conf).CuckooBeta;


            KnowlegeBasePCRules[] X = new KnowlegeBasePCRules[count_particle + 1];
            double[] Errors         = new double[count_particle + 1];
            double[] Er             = new double[count_particle + 1];

            Random rnd  = new Random();
            int    best = 0;

            for (int i = 0; i < count_particle + 1; i++)
            {
                KnowlegeBasePCRules temp_c_Rule = new KnowlegeBasePCRules(result.RulesDatabaseSet[0]);
                X[i]      = temp_c_Rule;
                Errors[i] = result.ClassifyLearnSamples(result.RulesDatabaseSet[0]);
            }
            ///////////
            for (int i = 0; i < count_iteration; i++)
            {
                X[0] = new  KnowlegeBasePCRules(X[0]);
                for (int k = 0; k < X[0].TermsSet.Count; k++)
                {
                    for (int q = 0; q < X[0].TermsSet[k].CountParams; q++)
                    {
                        double b = (rnd.Next(1000, 2000) / Convert.ToDouble(1000));
                        X[0].TermsSet[k].Parametrs[q] = X[0].TermsSet[k].Parametrs[q] + Levi(BM(sigu(beta)), BM(1.0), beta);
                    }
                }

                for (int k = 0; k < X[0].Weigths.Length; k++)
                {
                    X[0].Weigths[k] = rnd.NextDouble() / 200;
                }

                result.RulesDatabaseSet.Add(X[0]);
                int temp_index = result.RulesDatabaseSet.Count - 1;
                Errors[0] = result.ClassifyLearnSamples(result.RulesDatabaseSet[temp_index]);
                result.RulesDatabaseSet.RemoveAt(temp_index);

                int s = rnd.Next(1, count_particle + 1);

                if (Errors[0] > Errors[s])
                {
                    X[s]      = X[0];
                    Errors[s] = Errors[0];
                }
                else
                {
                    X[0]      = X[s];
                    Errors[0] = Errors[s];
                }

                for (int v = 0; v < m; v++)
                {
                    double max = Errors[1];
                    int    ind = 1;
                    for (int r = 2; r < count_particle + 1; r++)
                    {
                        if (Errors[r] < max)
                        {
                            max = Errors[r];
                            ind = r;
                        }
                        else
                        {
                        };
                    }
                    double h = (rnd.Next(1, 1000) / Convert.ToDouble(1000));
                    if (h > p)
                    {
                        X[ind] = new KnowlegeBasePCRules(X[ind]);
                        for (int j = 0; j < X[ind].TermsSet.Count; j++)
                        {
                            for (int k = 0; k < X[ind].TermsSet[j].CountParams; k++)
                            {
                                X[ind].TermsSet[j].Parametrs[k] = X[0].TermsSet[j].Parametrs[k] + (rnd.Next(-1000, 1000) / Convert.ToDouble(1000));
                            }
                            for (int k = 0; k < X[ind].Weigths.Length; k++)
                            {
                                X[ind].Weigths[k] = X[0].Weigths[k] + (rnd.Next(1, 1000) / Convert.ToDouble(10000));
                            }
                        }
                        result.RulesDatabaseSet.Add(X[ind]);
                        temp_index  = result.RulesDatabaseSet.Count - 1;
                        Errors[ind] = result.ClassifyLearnSamples(result.RulesDatabaseSet[temp_index]);
                        result.RulesDatabaseSet.RemoveAt(temp_index);
                    }
                }
            }

            double min = Errors[0];

            best = 0;
            for (int g = 1; g < count_particle + 1; g++)
            {
                if (Errors[g] > min)
                {
                    min  = Errors[g];
                    best = g;
                }
            }

            X[0] = X[best];

            result.RulesDatabaseSet.Add(X[0]);
            int t_index = result.RulesDatabaseSet.Count - 1;

            Errors[0] = result.ClassifyLearnSamples(result.RulesDatabaseSet[t_index]);
            result.RulesDatabaseSet.RemoveAt(t_index);


            result.RulesDatabaseSet[0] = X[0];
            result.RulesDatabaseSet[0].TermsSet.Trim();
            return(result);
        }
コード例 #10
0
        public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approximate, ILearnAlgorithmConf conf)
        {
            SAFuzzySystem result = Approximate;
            var           config = (RLSconfig)conf;

            numberOfIterations = config.NumberOfIterantions;
            lambda             = config.ForgettingFactor;

            var knowledgeBaseToOptimize = new KnowlegeBaseSARules(result.RulesDatabaseSet[0]);

            // r[0]
            double[] consequents = new double[knowledgeBaseToOptimize.all_conq_of_rules.Length];
            knowledgeBaseToOptimize.all_conq_of_rules.CopyTo(consequents, 0);
            dividerXi = new double[result.LearnSamplesSet.CountSamples];
            xiValue   = new double[result.LearnSamplesSet.CountSamples][];

            Vector r = new Vector(consequents);

            // p[0]
            double[][] p = new double[consequents.Length][];
            for (int i = 0; i < consequents.Length; i++)
            {
                p[i] = new double[consequents.Length];
                for (int j = 0; j < consequents.Length; j++)
                {
                    p[i][j] = (i == j) ? 2000 : 0;
                }
            }

            Matrix P = new Matrix(p);

            // x, y
            double[][] x = new double[result.LearnSamplesSet.CountSamples][];
            double[]   y = new double[result.LearnSamplesSet.CountSamples];
            for (int i = 0; i < result.LearnSamplesSet.CountSamples; i++)
            {
                x[i] = result.LearnSamplesSet.DataRows[i].InputAttributeValue;
                y[i] = result.LearnSamplesSet.DataRows[i].DoubleOutput;

                xiValue[i] = new double[knowledgeBaseToOptimize.all_conq_of_rules.Length];
            }

            EvalXi(x, y, knowledgeBaseToOptimize);

            // Main cycle
            for (int i = 0; i < numberOfIterations; i++)
            {
                for (int j = 0; j < result.LearnSamplesSet.CountSamples; j++)
                {
                    var temp1 = -1d * P;
                    var temp2 = new Matrix(Xi(j), false);
                    var temp3 = temp1 * temp2;

                    var temp4 = new Matrix(Xi(j), true);
                    var temp5 = temp4 * P * temp2;
                    var temp6 = lambda + temp5.Elements[0][0]; //// 1 >> lambda
                    var temp7 = temp3 / temp6;
                    var temp8 = temp4 * P;
                    var temp9 = temp7 * temp8;
                    P += temp9;
                    P /= lambda;

                    P = (P + -1 * P * temp2 / (lambda + (temp4 * P * temp2).Elements[0][0]) * temp4 * P) / lambda;

                    r += P * Xi(j) * (y[j] - Xi(j) * r);
                }
            }

            knowledgeBaseToOptimize.all_conq_of_rules = r.Elements;

            // Get the best knowledge base on the 1st place
            result.RulesDatabaseSet.Add(knowledgeBaseToOptimize);
            double errorBefore = result.approxLearnSamples(result.RulesDatabaseSet[0]),
                   errorAfter  = result.approxLearnSamples(result.RulesDatabaseSet[result.RulesDatabaseSet.Count - 1]);

            if (errorAfter < errorBefore)
            {
                result.RulesDatabaseSet.Remove(knowledgeBaseToOptimize);
                result.RulesDatabaseSet.Insert(0, knowledgeBaseToOptimize);
            }

            return(result);
        }
コード例 #11
0
        public override TSAFuzzySystem TuneUpFuzzySystem(TSAFuzzySystem Approximate, ILearnAlgorithmConf conf)
        {
            TSAFuzzySystem result = Approximate;

            double errorBefore = result.approxLearnSamples(result.RulesDatabaseSet[0]);

            var a = 2000;

            #region Basic initialization
            var config = (RLSconfig)conf;
            numberOfIterations = config.NumberOfIterantions;
            lambda             = config.ForgettingFactor;

            var knowledgeBaseToOptimize = new KnowlegeBaseTSARules(result.RulesDatabaseSet[0]);
            var kbToOptimize            = new KnowlegeBaseTSARules(result.RulesDatabaseSet[0]);

            R = knowledgeBaseToOptimize.RulesDatabase.Count;                                  // Number of rules
            m = result.LearnSamplesSet.CountSamples;                                          // Number of samples
            n = knowledgeBaseToOptimize.RulesDatabase[0].RegressionConstantConsequent.Length; // Number of variables
            #endregion

            #region x, y
            double[][] x = new double[m][];
            double[]   y = new double[m];
            for (int i = 0; i < m; i++)
            {
                x[i] = result.LearnSamplesSet.DataRows[i].InputAttributeValue;
                y[i] = result.LearnSamplesSet.DataRows[i].DoubleOutput;
            }
            #endregion

            #region B
            double[][] consequents = new double[n + 1][];

            // B[0]
            consequents[0] = new double[R];
            for (int i = 0; i < R; i++)
            {
                consequents[0][i] = knowledgeBaseToOptimize.RulesDatabase[i].IndependentConstantConsequent;
            }

            // B[1..n+1]
            for (int i = 1; i < n + 1; i++)
            {
                consequents[i] = new double[R];
                for (int j = 0; j < R; j++)
                {
                    consequents[i][j] = knowledgeBaseToOptimize.RulesDatabase[j].RegressionConstantConsequent[i - 1];
                }
            }

            HyperVector B = new HyperVector(consequents);
            #endregion

            #region P
            double[][] p = new double[n + 1][];
            for (int i = 0; i < n + 1; i++)
            {
                p[i] = new double[n + 1];
                for (int j = 0; j < n + 1; j++)
                {
                    p[i][j] = (i == j) ? a : 0;
                }
            }

            Matrix P = new Matrix(p);
            #endregion

            #region Xi
            dividerXi = new double[m];
            xiValue   = new double[m][];
            for (int i = 0; i < m; i++)
            {
                xiValue[i] = new double[R];
            }
            EvalXi(x, y, knowledgeBaseToOptimize);

            // XiBold
            xiBoldValue = new HyperVector[m];
            EvalXiBold(x);
            #endregion

            GC.Collect();

            //double[][] aDoubles = new[]
            //                          {
            //                              new[] { 0.1, 0.2, 0.123 },
            //                              new[] { 1.213, 2.1, 1.2 },
            //                              new[] { 13.3, 0.1231, 31.1 }
            //                          },
            //           bDoubles = new[]
            //                          {
            //                              new[] { 0.1, 12.2, 5.445 },
            //                              new[] { 5.3, 4.553, 1.545 },
            //                              new[] { 3.4, 87.545, 0.255 }
            //                          };
            //Matrix aMatrix = new Matrix(aDoubles);
            //HyperVector bVector = new HyperVector(bDoubles);
            //HyperVector cVector = new HyperVector(aDoubles);

            //var c = aMatrix * bVector;
            //var cc = c;
            //var d = bVector * cVector;
            //var dd = d;
            //var e = bVector ^ cVector;
            //var ee = e;
            //var f = cVector * aMatrix;
            //var ff = f;



            #region The Cycle
            for (int i = 0; i < numberOfIterations; i++)
            {
                for (int j = 0; j < m; j++)
                {
                    var temp1 = -1d * P;
                    var temp3 = temp1 * xiBoldValue[j];

                    var temp5 = xiBoldValue[j] * P * xiBoldValue[j];
                    var temp6 = lambda + temp5;
                    var temp7 = temp3 * (1d / temp6);
                    var temp8 = xiBoldValue[j] * P;
                    var temp9 = temp7 ^ temp8;
                    P += temp9;
                    P /= lambda;

                    ////P = (P + (-1d * P * xiBoldValue[j] * (1d / (lambda + (xiBoldValue[j] * P * xiBoldValue[j]))) ^ xiBoldValue[j] * P)) / lambda;

                    B += P * xiBoldValue[j] * (y[j] - xiBoldValue[j] * B);
                }
            }
            #endregion

            #region Comparison
            // Get consequents into the KB
            for (int i = 0; i < R; i++)
            {
                knowledgeBaseToOptimize.RulesDatabase[i].IndependentConstantConsequent = B.Elements[0].Elements[i];

                for (int j = 1; j < n + 1; j++)
                {
                    knowledgeBaseToOptimize.RulesDatabase[i].RegressionConstantConsequent[j - 1] = B.Elements[j].Elements[i]; // NOT WORKING!!!
                }
            }

            // Get the best knowledge base on the 1st place
            double errorAfter = result.approxLearnSamples(kbToOptimize);

            if (errorAfter < errorBefore)
            {
                result.RulesDatabaseSet.Insert(0, knowledgeBaseToOptimize);
            }
            else
            {
                result.RulesDatabaseSet.Insert(0, kbToOptimize);
                //    result.RulesDatabaseSet.Add(kbToOptimize);
            }

            return(result);

            #endregion
        }
コード例 #12
0
        public override FuzzySystem.PittsburghClassifier.PCFuzzySystem TuneUpFuzzySystem(FuzzySystem.PittsburghClassifier.PCFuzzySystem Classifier, ILearnAlgorithmConf conf)
        {
            PCFuzzySystem result = Classifier;



            FalsePositiveLearn = 0;
            FalseNegativeLearn = 0;

            FalsePositiveTest = 0;
            FalseNegativeTest = 0;

            string normalClass = "normal.";

            for (int i = 0; i < Classifier.LearnSamplesSet.CountSamples; i++)
            {
                string ClassifierResult = Classifier.classifyBase(Classifier.LearnSamplesSet[i].InputAttributeValue, Classifier.RulesDatabaseSet[0]);
                if (Classifier.LearnSamplesSet.DataRows[i].StringOutput.Contains(normalClass))
                {
                    if (!ClassifierResult.Contains(normalClass))
                    {
                        FalsePositiveLearn++;
                        continue;
                    }
                }

                if (ClassifierResult.Contains(normalClass))
                {
                    if (!Classifier.LearnSamplesSet.DataRows[i].StringOutput.Contains(normalClass))
                    {
                        FalseNegativeLearn++;
                    }
                }
            }
            FalsePositiveLearn = FalsePositiveLearn / Classifier.LearnSamplesSet.CountSamples * 100;
            FalseNegativeLearn = FalseNegativeLearn / Classifier.LearnSamplesSet.CountSamples * 100;

            for (int i = 0; i < Classifier.TestSamplesSet.CountSamples; i++)
            {
                string ClassifierResult = Classifier.classifyBase(Classifier.TestSamplesSet[i].InputAttributeValue, Classifier.RulesDatabaseSet[0]);
                if (Classifier.TestSamplesSet.DataRows[i].StringOutput.Contains(normalClass))
                {
                    if (!ClassifierResult.Contains(normalClass))
                    {
                        FalsePositiveTest++;
                        continue;
                    }
                }

                if (ClassifierResult.Contains(normalClass))
                {
                    if (!Classifier.TestSamplesSet.DataRows[i].StringOutput.Contains(normalClass))
                    {
                        FalseNegativeTest++;
                    }
                }
            }
            FalsePositiveTest = FalsePositiveTest / Classifier.TestSamplesSet.CountSamples * 100;
            FalseNegativeTest = FalseNegativeTest / Classifier.TestSamplesSet.CountSamples * 100;



            Classifier.RulesDatabaseSet[0].TermsSet.Trim();
            return(Classifier);
        }
コード例 #13
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classify, ILearnAlgorithmConf conf)
        {
            result = Classify;
            string folder_name = "";

            foreach (var letter in result.LearnSamplesSet.FileName)
            {
                if (letter != '-')
                {
                    folder_name += letter;
                }
                else
                {
                    break;
                }
            }
            numberOfFeatures = result.CountFeatures;
            Init(conf);
            rand       = new Random();
            HeadLeader = new bool[numberOfFeatures];
            SetPopulation();
            Population[0].CopyTo(HeadLeader, 0);
            result.AcceptedFeatures = HeadLeader;
            double HLAcc = result.ClassifyLearnSamples(result.RulesDatabaseSet[0]);

            iter = 0;
            while (iter < MaxIter)
            {
                ChangePositions();
                SortPopulation();
                result.AcceptedFeatures = Population[0];
                if (result.ClassifyLearnSamples(result.RulesDatabaseSet[0]) > HLAcc)
                {
                    HLAcc = result.ClassifyLearnSamples(result.RulesDatabaseSet[0]);
                    Population[0].CopyTo(HeadLeader, 0);
                }
                iter++;
            }
            int count_ones = 0;

            result.AcceptedFeatures = HeadLeader;
            for (int j = 0; j < HeadLeader.Length; j++)
            {
                if (HeadLeader[j])
                {
                    Console.Write("1 ");
                    count_ones++;
                }
                else
                {
                    Console.Write("0 ");
                }
            }
            Console.WriteLine();
            Console.WriteLine("Обуч: " + Math.Round(result.ClassifyLearnSamples(result.RulesDatabaseSet[0]), 2));
            Console.WriteLine("Тест: " + Math.Round(result.ClassifyTestSamples(result.RulesDatabaseSet[0]), 2));
            File.AppendAllText("E:/TUSUR/GPO/Эксперименты/Behavior/RS" + folder_name + ".txt", "Признаки: " + count_ones + Environment.NewLine);
            File.AppendAllText("E:/TUSUR/GPO/Эксперименты/Behavior/RS" + folder_name + ".txt", "Тест: " + Math.Round(result.ClassifyTestSamples(result.RulesDatabaseSet[0]), 2) + Environment.NewLine);
            File.AppendAllText("E:/TUSUR/GPO/Эксперименты/Behavior/RS" + folder_name + ".txt", "Время: " + Environment.NewLine);
            return(result);
        }
コード例 #14
0
 public virtual void Init(ILearnAlgorithmConf Conf)
 {
     Config           = Conf as RandomSearchConf;
     MaxIter          = ((RandomSearchConf)Conf).TRSCCountIteration;
     numberOfAllParts = ((RandomSearchConf)Conf).TRSCCountparticles;
 }
コード例 #15
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classifier, ILearnAlgorithmConf conf)
        {
            result = Classifier;
            //Узнаем название папки с данными
            string path_name   = "../../OLD/Data/Keel/Classifier/KEEL-10/";
            string folder_name = "";

            foreach (var letter in result.LearnSamplesSet.FileName)
            {
                if (letter != '-')
                {
                    folder_name += letter;
                }
                else
                {
                    break;
                }
            }
            groups = new List <int[]>();
            Init(conf);
            //Создаем новые обучающую и тестовую выбоки и удаляем из них некоторое количество случайных элементов
            List <PCFuzzySystem> results = new List <PCFuzzySystem>();

            for (int i = 0; i < numberOfPopulations; i++)
            {
                SampleSet new_learn = new SampleSet(path_name + folder_name + "/" + result.LearnSamplesSet.FileName);
                SampleSet new_test  = new SampleSet(path_name + folder_name + "/" + result.TestSamplesSet.FileName);
                results.Add(new PCFuzzySystem(new_learn, new_test));
                int ground = (int)Math.Round(results[i].LearnSamplesSet.DataRows.Count * 0.25);
                for (int j = 0; j < ground; j++)
                {
                    results[i].LearnSamplesSet.DataRows.RemoveAt(rand.Next(0, results[i].LearnSamplesSet.DataRows.Count));
                }
            }
            Populations = new List <List <KnowlegeBasePCRules> >();
            for (int i = 0; i < numberOfPopulations; i++)
            {
                Populations.Add(SetPopulation(new List <KnowlegeBasePCRules>()));
                Populations[i] = ListPittsburgClassifierTool.SortRules(Populations[i], result);
            }
            NS = new int[m];
            for (int i = 0; i < m; i++)
            {
                NS[i] = (N - 1) / m;
            }
            cur_iter = 0;
            while (cur_iter < iter)
            {
                for (int p_i = 0; p_i < Populations.Count; p_i++)
                {
                    groups = GroupStream(Populations[p_i]);
                    if (p_one > rand.NextDouble())
                    {
                        ChooseOneCluster(Populations[p_i]);
                    }
                    else
                    {
                        ChooseTwoClusters(Populations[p_i]);
                    }
                    Populations[p_i] = ListPittsburgClassifierTool.SortRules(Populations[p_i], results[p_i]);
                    //Console.WriteLine(cur_iter + " - Итерация");
                    //Console.WriteLine("Обуч. выборка = " + result.ErrorLearnSamples(Populations[p_i][0]));
                    //Console.WriteLine("Тест. выборка = " + result.ErrorTestSamples(Populations[p_i][0]));
                }
                cur_iter++;
            }
            //Выводим точность классификации для лучшей частицы из каждой популяции
            for (int j = 0; j < Populations.Count; j++)
            {
                Populations[j] = ListPittsburgClassifierTool.SortRules(Populations[j], results[j]);
                Console.WriteLine("Популяция №" + j + ":");
                Console.WriteLine("Обуч: " + Math.Round(result.ClassifyLearnSamples(Populations[j][0]), 2));
                Console.WriteLine("Тест: " + Math.Round(result.ClassifyTestSamples(Populations[j][0]), 2));
            }
            //Допобавляем в базу правил лучшие решения
            if (result.RulesDatabaseSet.Count == 1)
            {
                result.RulesDatabaseSet.Clear();
            }
            for (int i = 0; i < Populations.Count; i++)
            {
                result.RulesDatabaseSet.Add(Populations[i][0]);
            }
            //Возвращаем результат
            return(result);
        }
コード例 #16
0
        public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approx, ILearnAlgorithmConf conf)
        {
            Random        rand   = new Random(DateTime.Now.Millisecond);
            SAFuzzySystem result = Approx;

            BW.DoWork             += new DoWorkEventHandler(BW_DoWork);
            BW.RunWorkerCompleted += new RunWorkerCompletedEventHandler(BW_RunWorkerCompleted);
            BW.RunWorkerAsync();



            MultiGoalOptimaze_conf config = conf as MultiGoalOptimaze_conf;
            string PathAlg = (new FileInfo(Application.ExecutablePath)).DirectoryName + "\\FS\\";

            config.Init2(PathAlg, Approx.LearnSamplesSet.FileName);

            countFuzzySystem = config.Итераций_алгоритма;

            allowSqare  = config.Допустимый_процент_перекрытия_по_площади_термов / 100;
            allowBorder = config.Допустимый_процент_перекрытия_по_границам / 100;
            int seedPath = rand.Next();

            sizePercent      = config.азмер_шага_по_точности;
            sizeComplexity   = config.азмер_шага_по_сложности;
            sizeInteraply    = config.азмер_шага_по_интерпретируемости;
            diviver          = config.Уменьшать_шаги_в;
            trysBeforeDivide = config.Уменьшать_шаг_после;
            path             = config.path;
            dataSetName      = config.dataSetName;
            toMany           = config.азрешено_похожих_систем;
            isPSO            = config.toBool(config.Использовать_АРЧ);
            //  isBFO = config.toBool(config.Использовать_АПБ);
            isANT            = config.toBool(config.Использовать_НАМК);
            isBEE            = config.toBool(config.Использовать_САПК);
            isES             = config.toBool(config.Использовать_ЕС);
            isGA             = config.toBool(config.Использовать_ГА);
            isTermShrink     = config.toBool(config.Удалять_термы);
            isRuleShrink     = config.toBool(config.Удалять_правила);
            isUnionTerm      = config.toBool(config.Объединять_термы);
            isLindBreakCross = config.toBool(config.Исключать_пересечение_лигвистически_далеких_термов);
            countANT         = config.Использовать_НАМК_раз_за_такт;
            // countBFO = config.Использовать_за_такт_АПБ_раз;
            countPSO         = config.Использовать_за_такт_АРЧ_раз;
            countBEE         = config.Использовать_САПК_раз_за_такт;
            countES          = config.Использовать_ЕС_раз_за_такт;
            countGA          = config.Использовать_ГА_раз_за_такт;
            typeComplexity   = (int)config.Критерий_сложности;
            typeInterpreting = (int)config.Критерий_интерпретируемости;

            List <IAbstractLearnAlgorithm> learnAlgorithms       = initAlgoritms();
            List <ILearnAlgorithmConf>     learnAlgorithmsconfig = initAlgoritmsConfigs(Approx.CountFeatures);
            List <double> ValueLPercent        = new List <double>();
            List <double> ValueTPercent        = new List <double>();
            List <double> ValueComplexity      = new List <double>();
            List <double> ValueInterability    = new List <double>();
            List <double> SummaryGoods         = new List <double>();
            List <KnowlegeBaseSARules> Storage = new List <KnowlegeBaseSARules>();
            List <int> candidate = new List <int>();

            KnowlegeBaseSARules Best = result.RulesDatabaseSet[0];


            baseLearn = result.approxLearnSamples(result.RulesDatabaseSet[0]);
            ValueLPercent.Add(baseLearn);
            ValueTPercent.Add(result.approxTestSamples(result.RulesDatabaseSet[0]));
            baseComplexity = getComplexity(result);
            ValueComplexity.Add(baseComplexity);
            baseIntebility = getInterpreting(result, allowBorder, allowSqare);
            ValueInterability.Add(baseIntebility);


            Storage.Add(Best);
            int NSCount = 0;
            int deleted = 0;

            for (int numberStep = 0; numberStep < countFuzzySystem; numberStep++)
            {
                bool mustToDivide = true;
                int  usedAlg      = 0;
                for (int tr = 0; tr < trysBeforeDivide; tr++)
                {
                    deleted = 0;

                    // Parallel.For(0, learnAlgorithms.Count(), i =>
                    usedAlg = 0;
                    for (int i = 0; i < learnAlgorithms.Count(); i++)
                    {
                        Console.WriteLine("F****d in Storage.Add(new a_Rules(Best))");
                        Storage.Add(new KnowlegeBaseSARules(Best));
                        Console.WriteLine("F****d in result.RulesDatabaseSet.Clear()");
                        result.RulesDatabaseSet.Clear();
                        Console.WriteLine("F****d in result.RulesDatabaseSet.Add( Storage[Storage.Count - 1])");
                        result.RulesDatabaseSet.Add(Storage[Storage.Count - 1]);
                        usedAlg++;
                        bool before_VAlue = true;
                        try
                        {
                            learnAlgorithms[i].TuneUpFuzzySystem(result, learnAlgorithmsconfig[i]);
                            GC.Collect();
                            before_VAlue = false;
                            ValueLPercent.Add(result.approxLearnSamples(result.RulesDatabaseSet[0]));
                            ValueTPercent.Add(result.approxTestSamples(result.RulesDatabaseSet[0]));
                            ValueComplexity.Add(getComplexity(result));
                            ValueInterability.Add(getInterpreting(result, allowBorder, allowSqare));
                            double temp = ValueLPercent[ValueLPercent.Count - 1] + ValueComplexity[ValueComplexity.Count() - 1] + ValueInterability[ValueInterability.Count() - 1];
                            Storage[Storage.Count - 1] = result.RulesDatabaseSet[0];

                            if (double.IsNaN(temp))
                            {
                                Console.WriteLine("FuckAlarm " + i.ToString() + learnAlgorithms[i].ToString() + " is NAN");



                                ValueLPercent.RemoveAt(ValueLPercent.Count() - 1);
                                ValueTPercent.RemoveAt(ValueTPercent.Count() - 1);
                                ValueComplexity.RemoveAt(ValueComplexity.Count() - 1);
                                ValueInterability.RemoveAt(ValueInterability.Count() - 1);
                                Storage.RemoveAt(Storage.Count() - 1);
                                usedAlg--;
                            }
                        }
                        catch (Exception)
                        {
                            if (before_VAlue)
                            {
                                Console.WriteLine("FuckAlarm " + i.ToString() + learnAlgorithms[i].ToString() + " before VAlue");
                            }
                            else
                            {
                                Console.WriteLine("FuckAlarm " + i.ToString() + learnAlgorithms[i].ToString() + " after VAlue");

                                ValueLPercent.RemoveAt(ValueLPercent.Count() - 1);
                                ValueTPercent.RemoveAt(ValueTPercent.Count() - 1);
                                ValueComplexity.RemoveAt(ValueComplexity.Count() - 1);
                                ValueInterability.RemoveAt(ValueInterability.Count() - 1);
                                Storage.RemoveAt(Storage.Count() - 1);
                            }
                        }

                        NSCount++;
                        Console.WriteLine("F****d in ResultShow");
                        ResultShow += "[" + NSCount.ToString() + "]\t" + ValueLPercent[ValueLPercent.Count() - 1].ToString() + "\t" + ValueTPercent[ValueTPercent.Count() - 1].ToString() +
                                      "\t" + ValueComplexity[ValueComplexity.Count() - 1].ToString() + "\t" + ValueInterability[ValueInterability.Count() - 1].ToString() + Environment.NewLine;
                        //     i++;
                    }
                    //);
                    Console.WriteLine("F****d in deleted");

                    deleted  = removeDublicate(ValueLPercent, ValueComplexity, ValueInterability, ValueTPercent, Storage, rand);
                    usedAlg -= deleted;
                    Console.WriteLine("F****d in candidate");

                    candidate = canBeNext(ValueLPercent, ValueComplexity, ValueInterability);

                    if (candidate.Count() > 0)
                    {
                        mustToDivide = false; break;
                    }
                }

                if (mustToDivide)
                {
                    MessageBox.Show("Divided happend ");

                    sizePercent    = sizePercent / diviver;
                    sizeComplexity = sizeComplexity / diviver;
                    sizeInteraply  = sizeInteraply / diviver;
                    continue;
                }

                Console.WriteLine("F****d in SummaryGoods");

                SummaryGoods = reCalcSummary(SummaryGoods, ValueLPercent, ValueComplexity, ValueInterability);

                Console.WriteLine("F****d in indexofBest");
                int indexofBest = getNewBest(candidate, SummaryGoods);
                if (usedAsNext.ContainsKey(indexofBest))
                {
                    usedAsNext[indexofBest]++;
                }
                else
                {
                    usedAsNext.Add(indexofBest, 1);
                }

                Console.WriteLine("Best");
                Best = Storage[indexofBest];

                Console.WriteLine("F****d in for (int i = (Storage.Count - learnAlgorithms.Count); i < Storage.Count(); i++)");
                int toSaveCounter = NSCount - usedAlg;
                for (int i = (Storage.Count - usedAlg); i < Storage.Count(); i++)
                {
                    result.RulesDatabaseSet[0] = Storage[i];
                    saveFS(result, path, dataSetName, seedPath, numberStep, toSaveCounter, Best.Equals(result.RulesDatabaseSet[0]));
                    toSaveCounter++;
                }

                Console.WriteLine("F****d in result.RulesDatabaseSet[0] = Best;");
                result.RulesDatabaseSet[0] = Best;

                Console.WriteLine("F****d in End");
                baseLearn      = result.approxLearnSamples(result.RulesDatabaseSet[0]);// ClassifyLearnSamples();
                baseComplexity = getComplexity(result);
                baseIntebility = getInterpreting(result, allowBorder, allowSqare);
                candidate.Clear();
                GC.Collect();
            }
            isEnd = true;
            Thread.Sleep(10000);
            result.RulesDatabaseSet[0].TermsSet.Trim();
            return(result);
        }
コード例 #17
0
        public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approx, ILearnAlgorithmConf conf)
        {
            count_iteration = ((Param)conf).Количество_итераций;
            count_populate  = ((Param)conf).Число_осколков;
            exploration     = ((Param)conf).Фактор_исследования;
            reduce_koef     = ((Param)conf).Уменьшающий_коэффициент;

            int           iter = 0, i, j, count_terms, var = 0;
            int           count_cons;
            double        RMSE_best, cosFi, MSEbefore, MSEafter;
            int           Nd, variables, k = 1, best = 0;
            SAFuzzySystem result = Approx;
            int           type   = Approx.RulesDatabaseSet[0].TermsSet[0].CountParams;

            Nd = Approx.RulesDatabaseSet[0].TermsSet.Count * type;
            double[] X_best = new double[Nd + 1];
            double[,] X_pred    = new double[2, Nd + 1];
            double[,] direction = new double[count_populate, Nd + 1];
            double[,] d         = new double[count_populate, Nd + 1];
            double[,] explosion = new double[count_populate, Nd + 1];
            double[,] shrapnel  = new double[count_populate, Nd + 1];
            cosFi      = Math.Cos(2 * Math.PI / count_populate);
            RMSE_best  = Approx.approxLearnSamples(0);
            count_cons = Approx.RulesDatabaseSet[0].all_conq_of_rules.Count();
            double[] RMSE      = new double[count_populate];
            double[] RMSE_tst  = new double[count_populate];
            double[] RMSE2     = new double[count_populate];
            double[] RMSE_pred = new double[2];
            double[] cons_best = new double[count_cons];
            count_terms = Approx.RulesDatabaseSet[0].TermsSet.Count;
            variables   = Approx.LearnSamplesSet.CountVars;
            int[] terms = new int[variables];

            KnowlegeBaseSARules[] X = new KnowlegeBaseSARules[count_populate];
            for (int s = 0; s < count_populate - 1; s++)
            {
                X[s] = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[0]);
                Approx.RulesDatabaseSet.Add(X[s]);
            }
            RMSE_best = Approx.approxLearnSamples(0);
            for (int h = 0; h < count_terms; h++)
            {
                if (Approx.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar == var)
                {
                    terms[var]++;
                }
                else
                {
                    terms[var + 1]++;
                    var++;
                }
            }
            for (iter = 0; iter <= count_iteration; iter++)
            {
                best = 0;
                if (iter == 0)
                {
                    k = 1;
                    for (int h = 0; h < count_terms; h++)
                    {
                        for (int p = 0; p < type; p++)
                        {
                            shrapnel[0, k] = Approx.RulesDatabaseSet[0].TermsSet[h].Parametrs[p];
                            X_best[k]      = shrapnel[0, k];
                            X_pred[0, k]   = shrapnel[0, k];
                            X_pred[1, k]   = shrapnel[0, k];
                            k++;
                        }
                    }
                    RMSE_pred[0] = Approx.approxLearnSamples(0);
                    RMSE_pred[1] = Approx.approxLearnSamples(0);
                    k            = 1;
                    for (int h = 0; h < count_terms; h++)
                    {
                        for (int p = 0; p < type; p++)
                        {
                            d[0, k] = RandomNext(Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar).Min, Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar).Max);
                            k++;
                        }
                    }
                }
                for (i = 1; i <= Nd; i++)
                {
                    if (exploration > iter)
                    {
                        for (j = 1; j < count_populate; j++)
                        {
                            int sum = 0, sum2 = 0;
generate:
                            sum++;
                            sum2++;
                            //формула расстояния исправлена

                            d[j, i] = d[j - 1, i] * randn();

                            //double sluch = randn();
                            //if (sluch < 0) d[j, i] = d[j - 1, i] * (-1) * Math.Pow(sluch, 2);
                            //else d[j, i] = d[j - 1, i] * Math.Pow(sluch, 2);
                            explosion[j, i] = d[j, i] * cosFi;
                            if (sum > 20)
                            {
                                if ((i + (type - 2)) % type == 0)
                                {
                                    shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    if (sum2 > 2)
                                    {
                                        shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]);
                                        sum            = 19;
                                    }
                                    if (sum2 > 3)
                                    {
                                        shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                        sum            = 19;
                                        sum2           = 0;
                                    }
                                }
                                else
                                {
                                    shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    sum            = 19;
                                }
                            }
                            else
                            {
                                shrapnel[j, i] = shrapnel[0, i] + explosion[j, i];
                            }
                            if ((i == 2) || (i == 1))
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min;
                            }

                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 2) / type)].NumberOfInputVar)
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min; goto exit;
                            }

                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - type) / type)].NumberOfInputVar)
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min; goto exit;
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != (variables - 1))
                            {
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i) / type)].NumberOfInputVar)
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max; goto exit;
                                }
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i + 1) / type)].NumberOfInputVar)
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max; goto exit;
                                }
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar == (variables - 1))
                            {
                                if ((i == (count_terms * 3 - 1)) || (i == (count_terms * 3)))
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max;
                                }
                            }

                            if (((i + (type - 2)) % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                if (shrapnel[j, i] == Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min)
                                {
                                    i--;
                                }
                                goto generate;
                            }
                            if ((i % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                goto generate;
                            }
                            if (i != 1)
                            {
                                if (((i - (type - 2)) % type == 0) && ((shrapnel[j, i] > shrapnel[j, i - 1]) || (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Max) || (shrapnel[j, i] < Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min)))
                                {
                                    goto generate;
                                }
                            }
                            if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min) || (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Max)))
                            {
                                goto generate;
                            }
exit:
                            if (i > type)
                            {
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != 0)
                                {
                                    if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1 - type) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar - 1)
                                    {
                                        if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < shrapnel[j, i - type])))
                                        {
                                            goto generate;
                                        }
                                    }
                                }
                            }
                        }
                    }
                    else
                    {
                        d[0, i] = d2(X_pred[0, i], X_pred[1, i], RMSE_pred[0], RMSE_pred[1]);

                        for (j = 1; j < count_populate; j++)
                        {
                            if ((X_pred[1, i] - X_pred[0, i]) != 0)
                            {
                                direction[j, i] = m(X_pred[0, i], X_pred[1, i], RMSE_pred[0], RMSE_pred[1]);
                            }
                            else
                            {
                                direction[j, i] = 1;
                            }
                            int sum = 0, sum2 = 0;
generate:
                            sum++;
                            sum2++;
                            double random;
                            random = randn();
                            if (random < 0)
                            {
                                explosion[j, i] = d[j - 1, i] * rand.NextDouble() * cosFi * (-1);
                            }
                            else
                            {
                                explosion[j, i] = d[j - 1, i] * rand.NextDouble() * cosFi;
                            }
                            if (sum2 > 50)
                            {
                                sum2 = 0;
                            }

                            if (sum > 20)
                            {
                                if ((i + (type - 2)) % type == 0)
                                {
                                    shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    if (sum2 > 2)
                                    {
                                        shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]);
                                        sum            = 19;
                                    }
                                    if (sum2 > 3)
                                    {
                                        shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                        sum            = 19;
                                        sum2           = 0;
                                    }
                                }
                                else
                                {
                                    shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    sum            = 19;
                                }
                            }
                            else
                            {
                                shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]);
                            }

                            if ((i == 2) || (i == 1))
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min;
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 2) / type)].NumberOfInputVar)
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min; goto exit;
                            }

                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - type) / type)].NumberOfInputVar)
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min; goto exit;
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != (variables - 1))
                            {
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i) / type)].NumberOfInputVar)
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max; goto exit;
                                }
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i + 1) / type)].NumberOfInputVar)
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max; goto exit;
                                }
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar == (variables - 1))
                            {
                                if ((i == (count_terms * 3 - 1)) || (i == (count_terms * 3)))
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max;
                                }
                            }

                            if (((i + (type - 2)) % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                if (shrapnel[j, i] == Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min)
                                {
                                    i--;
                                }
                                goto generate;
                            }
                            if ((i % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                goto generate;
                            }
                            if (i != 1)
                            {
                                if (((i - (type - 2)) % type == 0) && ((shrapnel[j, i] > shrapnel[j, i - 1]) || (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Max) || (shrapnel[j, i] < Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min)))
                                {
                                    goto generate;
                                }
                            }
                            if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min) || (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Max)))
                            {
                                goto generate;
                            }
exit:
                            if (i > type)
                            {
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != 0)
                                {
                                    if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1 - type) / type)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar - 1)
                                    {
                                        if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < shrapnel[j, i - type])))
                                        {
                                            goto generate;
                                        }
                                    }
                                }
                            }
                            d[j, i] = d[j - 1, i] / Math.Pow(Math.E, (double)iter / (double)reduce_koef);
                        }
                    }
                }

                for (int z = 0; z < count_populate; z++)
                {
                    k = 1;
                    for (int h = 0; h < count_terms; h++)
                    {
                        for (int p = 0; p < type; p++)
                        {
                            Approx.RulesDatabaseSet[z].TermsSet[h].Parametrs[p] = shrapnel[z, k];
                            k++;
                        }
                    }
                }
                for (j = 0; j < count_populate; j++)
                {
                    RMSE[j]     = Approx.approxLearnSamples(j);
                    RMSE_tst[j] = Approx.approxTestSamples(j);
                    if (RMSE[j] < RMSE_best)
                    {
                        RMSE_best = RMSE[j];
                        best      = j;
                    }
                }
                if ((iter != 0) && (iter % 1000 == 0))
                {
                    Adaptive_LSM LSM = new Adaptive_LSM();
                    MSEbefore = RMSE[best];
                    KnowlegeBaseSARules zeroSolution = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[0]);
                    Approx.RulesDatabaseSet[0] = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[best]);
                    KnowlegeBaseSARules tempSolution = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[best]);
                    Approx   = LSM.TuneUpFuzzySystem(Approx, new NullConfForAll()) as SAFuzzySystem;
                    MSEafter = Approx.approxLearnSamples(0);
                    if (MSEafter > MSEbefore)
                    {
                        Approx.RulesDatabaseSet[0] = tempSolution;
                        RMSE2[best] = MSEbefore;
                    }
                    else
                    {
                        RMSE2[best] = MSEafter;
                        for (int p = 0; p < count_cons; p++)
                        {
                            cons_best[p] = Approx.RulesDatabaseSet[0].all_conq_of_rules[p];
                        }
                    }
                    if (RMSE2[best] < RMSE_best)
                    {
                        RMSE_best = RMSE2[best];
                    }
                    Approx.RulesDatabaseSet[best] = new KnowlegeBaseSARules(Approx.RulesDatabaseSet[0]);
                    Approx.RulesDatabaseSet[0]    = new KnowlegeBaseSARules(zeroSolution);
                    for (int z = 0; z < count_populate; z++)
                    {
                        for (int p = 0; p < count_cons; p++)
                        {
                            Approx.RulesDatabaseSet[z].RulesDatabase[p].Cons_DoubleOutput = cons_best[p];
                        }
                    }
                }
                k = 1;
                if (iter % 100 == 0)
                {
                    k = 1;
                }
                for (int h = 0; h < count_terms; h++)
                {
                    for (int p = 0; p < type; p++)
                    {
                        shrapnel[0, k] = shrapnel[best, k];
                        if (exploration > iter)
                        {
                            d[0, k] = RandomNext(Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar).Min, Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar).Max);
                        }
                        Approx.RulesDatabaseSet[0].TermsSet[h].Parametrs[p] = shrapnel[0, k];
                        k++;
                    }
                }

                if (iter % 10 == 0)
                {
                    if (RMSE_pred[1] > RMSE2[best])
                    {
                        for (k = 1; k <= Nd; k++)
                        {
                            X_pred[0, k] = X_pred[1, k];
                            X_pred[1, k] = shrapnel[best, k];
                        }
                        RMSE_pred[0] = RMSE_pred[1];
                        RMSE_pred[1] = RMSE2[best];
                    }
                }
                else
                {
                    if (RMSE_pred[1] > RMSE[best])
                    {
                        for (k = 1; k <= Nd; k++)
                        {
                            X_pred[0, k] = X_pred[1, k];
                            X_pred[1, k] = shrapnel[best, k];
                        }
                        RMSE_pred[0] = RMSE_pred[1];
                        RMSE_pred[1] = RMSE[best];
                    }
                }
            }

            return(result);
        }
コード例 #18
0
        public override SAFuzzySystem TuneUpFuzzySystem(FuzzySystem.SingletoneApproximate.SAFuzzySystem Approximate, ILearnAlgorithmConf config)
        {
            SAFuzzySystem result = Approximate;

            if (result.RulesDatabaseSet.Count == 0)
            {
                throw new System.FormatException("Что то не то с входными данными");
            }
            OptimizeTermShrinkAndRotateConf Config = config as OptimizeTermShrinkAndRotateConf;

            count_shrink = Config.OTSARCountShrinkVars;
            size_shrink  = Config.OTSARCountShrinkTerm;



            List <int> Varians_of_run_system = new List <int>();

            for (int i = 0; i < Approximate.CountFeatures; i++)
            {
                int count_terms_for_var = Approximate.RulesDatabaseSet[0].TermsSet.FindAll(x => x.NumVar == i).Count;
                if (i < count_shrink)
                {
                    Varians_of_run_system.Add(count_terms_for_var - size_shrink);
                }
                else
                {
                    Varians_of_run_system.Add(count_terms_for_var);
                }
            }

            Varians_of_run_system.Sort();
            TypeTermFuncEnum type_of_term = Approximate.RulesDatabaseSet[0].TermsSet[0].TermFuncType;

            Generate_all_variant_in_pool(Varians_of_run_system);

            for (int i = 0; i < Pull_of_systems.Count; i++)
            {
                Approximate.RulesDatabaseSet.Clear();

                GeneratorRulesEveryoneWithEveryone.InitRulesEveryoneWithEveryone(result, type_of_term, Pull_of_systems[i].ToArray());
                Systems_ready_to_test.Add(Approximate.RulesDatabaseSet[0]);
                errors_of_systems.Add(result.approxLearnSamples(result.RulesDatabaseSet[0]));
            }

            int best_index = errors_of_systems.IndexOf(errors_of_systems.Min());

            result.RulesDatabaseSet.Clear();
            result.RulesDatabaseSet.Add(Systems_ready_to_test[best_index]);
            Console.WriteLine(Pull_of_systems.Count());



            result.RulesDatabaseSet[0].TermsSet.Trim();
//            result.UnlaidProtectionFix();
            return(result);
        }
コード例 #19
0
        public override TSAFuzzySystem TuneUpFuzzySystem(TSAFuzzySystem Classifier, ILearnAlgorithmConf conf)
        {
            Init(conf);
            KnowlegeBaseTSARules temp_c_Rule = new KnowlegeBaseTSARules(Classifier.RulesDatabaseSet[0]);
            TSAFuzzySystem       result      = Classifier;
            string file_string        = @"..\logs_" + result.TestSamplesSet.FileName + ".txt";
            string file_string_to_txt = @"..\result_" + result.TestSamplesSet.FileName + ".txt";

            for (int t = 0; t < population_count; t++)
            {
                monkey[t] = new KnowlegeBaseTSARules(result.RulesDatabaseSet[0]);
                if (t > 3)
                {
                    for (int k = 0; k < result.RulesDatabaseSet[0].TermsSet.Count; k++)
                    {
                        for (int q = 0; q < result.RulesDatabaseSet[0].TermsSet[k].CountParams; q++)
                        {
                            //monkey[t].TermsSet[k].Parametrs[q] = StaticRandom.NextDouble() * (result.RulesDatabaseSet[0].TermsSet[k].Max - result.RulesDatabaseSet[0].TermsSet[k].Min);
                            monkey[t].TermsSet[k].Parametrs[q] = GaussRandom.Random_gaussian(rand, monkey[t].TermsSet[k].Parametrs[q], monkey[t].TermsSet[k].Parametrs[q] * 0.05);
                        }
                    }
                }

                double unlaidtest = result.ErrorLearnSamples(monkey[t]);
                //result.UnlaidProtectionFix(monkey[t]);
                //Console.WriteLine("Unlaid: " + result.ErrorLearnSamples(monkey[0]).ToString());
                if (double.IsNaN(unlaidtest) || double.IsInfinity(unlaidtest))
                {
                    result.UnlaidProtectionFix(monkey[t]);
                }
                // delete
                testvals[t] = result.ErrorLearnSamples(monkey[t]);
                Console.WriteLine("Begin: " + t.ToString() + " " + iter.ToString() + " " + testvals[t].ToString());
            }
            bestsolution       = new KnowlegeBaseTSARules(monkey.SelectBest(result, 1)[0]);
            bestsolutionnumber = result.ErrorLearnSamples(bestsolution);
            deltaLength        = result.RulesDatabaseSet[0].TermsSet.Sum(x => x.Parametrs.Length);
            if (debug)
            {
                using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true))
                {
                    file.WriteLine(DateTime.Today.ToString() + "\t" + result.LearnSamplesSet.FileName);
                    file.WriteLine("Parameters:");
                    file.WriteLine("Population\t" + population_count.ToString());
                    file.WriteLine("Iteration count\t" + iter_amount.ToString());
                    file.WriteLine("Crawl count\t" + crawl_iter.ToString());
                    file.WriteLine("Jump count\t" + jump_iter.ToString());
                    file.WriteLine("Somersault count\t" + somersault_iter.ToString());
                    file.WriteLine("Crawl step\t" + step.ToString());    // crawl step
                    file.WriteLine("Jump step\t" + watch_jump_parameter.ToString());
                    file.WriteLine("Somersault left border\t" + somersault_interval_left.ToString());
                    file.WriteLine("Somersault right border\t" + somersault_interval_right.ToString());
                    file.WriteLine("\t\tMonkeys");
                    file.Write("Iterations\t");
                    for (int t = 0; t < population_count; t++)
                    {
                        file.Write("\t" + t);
                    }
                    file.WriteLine();
                    file.Write("0\tbegin");
                    for (int t = 0; t < population_count; t++)
                    {
                        file.Write("\t" + testvals[t].ToString());
                    }

                    // excel вставки
                    // наибольший в таблице
                    file.WriteLine();
                }
            }


            //iter_amount = somersault_iter * (1 + jump_iter * (1 + crawl_iter));
            iter_amount = (((crawl_iter + jump_iter) * jump_iter) + somersault_iter) * somersault_iter;
            for (int r = 0; r < somersault_iter; r++)
            {
                for (int t = 0; t < jump_iter; t++)
                {
                    for (int e = 0; e < crawl_iter; e++)
                    {
                        iter++;
                        oneClimb(result, deltaLength, step);
                        CheckForBest(result);
                        //Console.WriteLine(iter_amount.ToString() + "/" + iter.ToString

                        // дебаг
                        if (debug)
                        {
                            using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true))
                            {
                                file.Write(iter.ToString() + "\tcrawl");
                                for (int p = 0; p < population_count; p++)
                                {
                                    file.Write("\t" + result.ErrorLearnSamples(monkey[p]).ToString());
                                }
                                file.WriteLine();
                            }
                        }
                    }
                    for (int e = 0; e < jump_iter; e++)
                    {
                        iter++;
                        oneWatchJump(result);
                        //Console.WriteLine(iter_amount.ToString() + "/" + iter.ToString());
                        CheckForBest(result);
                        // дебаг
                        if (debug)
                        {
                            using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true))
                            {
                                file.Write(iter.ToString() + "\tlocaljump");
                                for (int p = 0; p < population_count; p++)
                                {
                                    file.Write("\t" + result.ErrorLearnSamples(monkey[p]).ToString());
                                }
                                file.WriteLine();
                            }
                        }
                    }
                }
                for (int e = 0; e < somersault_iter; e++)
                {
                    iter++;
                    oneGlobalJump(result);
                    CheckForBest(result);
                    // дебаг
                    if (debug)
                    {
                        using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true))
                        {
                            file.Write(iter.ToString() + "\tglobaljump");
                            for (int p = 0; p < population_count; p++)
                            {
                                file.Write("\t" + result.ErrorLearnSamples(monkey[p]).ToString());
                            }
                            file.WriteLine();
                        }
                    }
                    Console.WriteLine(iter_amount.ToString() + "/" + iter.ToString());
                }
            }
            //Console.WriteLine(final_iter.ToString() + "/" + final_counter.ToString());
            //FOR VICTORY!!!
            while ((final_iter < final_counter) && (last == true))
            {
                step *= 0.9;
                watch_jump_parameter      *= 0.9;
                somersault_interval_left  *= 0.9;
                somersault_interval_right *= 0.9;
                for (int r = 0; r < somersault_iter; r++)
                {
                    oneClimb(result, deltaLength, step);
                    CheckForBest(result);
                    iter++;
                }
                for (int t = 0; t < jump_iter; t++)
                {
                    oneWatchJump(result);
                    CheckForBest(result);
                    iter++;
                }
                for (int e = 0; e < crawl_iter; e++)
                {
                    oneGlobalJump(result);
                    CheckForBest(result);
                    iter++;
                }
                Console.WriteLine(iter_amount.ToString() + "/" + iter.ToString());
            }

            /*  for (int t = 0; t < population_count; t++)
             *    if (result.ErrorLearnSamples(monkey[best]) < result.ErrorLearnSamples(monkey[t]))
             *        best = t; */
            CheckForBest(result);
            if (bestsolutionnumber <= result.ErrorLearnSamples(result.RulesDatabaseSet[0]))
            {
                result.RulesDatabaseSet[0] = bestsolution;
            }
            iter = 0;
            if (debug)
            {
                using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true))
                {
                    file.WriteLine("Results\t" + result.ErrorLearnSamples(bestsolution).ToString() + "\t" + result.ErrorTestSamples(bestsolution).ToString());
                }
            }
            if (totxt)
            {
                using (System.IO.StreamWriter file_result = new System.IO.StreamWriter(file_string_to_txt, true))
                {
                    file_result.WriteLine(result.ErrorLearnSamples(bestsolution).ToString() + "\t" + result.ErrorTestSamples(bestsolution).ToString());
                }
            }
            return(result);
        }
コード例 #20
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classifier, ILearnAlgorithmConf conf)
        {
            iskl_prizn      = "";
            count_iteration = ((Param)conf).Количество_итераций;
            count_populate  = ((Param)conf).Число_осколков;
            priznaki_usech  = ((Param)conf).Усечённые_признаки;
            iter_descrete   = ((Param)conf).Итерации_дискр_алг;

            int iter = 0, iter2, i, j, count_terms;//, count_iter ;
            //     int count_best2 = 0;//, best_pred ;
            double        RMSE_best, cosFi, RMSE_best2;
            int           Nd, variables, k = 1, best2 = 0;
            PCFuzzySystem result = Classifier;
            int           type   = Classifier.RulesDatabaseSet[0].TermsSet[0].CountParams;

            Nd = Classifier.RulesDatabaseSet[0].TermsSet.Count * type;
            double[] X_best = new double[Nd + 1];
            double[,] X_pred    = new double[2, Nd + 1];
            double[,] direction = new double[count_populate, Nd + 1];
            double[,] d         = new double[count_populate, Nd + 1];
            double[,] explosion = new double[count_populate, Nd + 1];
            double[,] shrapnel  = new double[count_populate, Nd + 1];
            cosFi      = Math.Cos(2 * Math.PI / count_populate);
            RMSE_best  = Classifier.ClassifyLearnSamples(Classifier.RulesDatabaseSet[0]);
            RMSE_best2 = Classifier.ClassifyLearnSamples(Classifier.RulesDatabaseSet[0]);
            double[] RMSE2     = new double[count_populate];
            double[] RMSE_pred = new double[2];
            variables   = Classifier.LearnSamplesSet.CountVars;
            count_terms = Classifier.RulesDatabaseSet[0].TermsSet.Count;
            int[] terms = new int[variables];

            double[] X_best2 = new double[variables];
            double[,] d3      = new double[count_populate, variables];
            double[,] priznak = new double[count_populate, variables];
            for (i = 0; i < variables; i++)
            {
                Random random = new Random();
                if (random.NextDouble() < 0.5)
                {
                    priznak[0, i] = 1;
                }
                else
                {
                    priznak[0, i] = 0;
                }
            }
            KnowlegeBasePCRules[] X = new KnowlegeBasePCRules[count_populate];
            for (int s = 0; s < count_populate - 1; s++)
            {
                X[s] = new KnowlegeBasePCRules(Classifier.RulesDatabaseSet[0]);
                Classifier.RulesDatabaseSet.Add(X[s]);
            }

            for (iter2 = 0; iter2 < iter_descrete; iter2++)
            {
                best2 = 0;
                if (iter == 0)
                {
                    for (k = 0; k < variables; k++)
                    {
                        d3[0, k] = RandomNext(Classifier.LearnSamplesSet.InputAttributes[k].Min, Classifier.LearnSamplesSet.InputAttributes[k].Max);
                    }
                }
                for (i = 0; i < variables; i++)
                {
                    for (j = 1; j < count_populate; j++)
                    {
generate:
                        d3[j, i]      = d3[j - 1, i] * randn();
                        priznak[j, i] = d3[j, i] * cosFi;

                        if ((priznak[j, i] < Classifier.LearnSamplesSet.InputAttributes[i].Min) || (priznak[j, i] > Classifier.LearnSamplesSet.InputAttributes[i].Max))
                        {
                            goto generate;
                        }
                        Random random = new Random();
                        if (random.NextDouble() < descret(priznak[j, i]))
                        {
                            priznak[j, i] = 1;
                        }
                        else
                        {
                            priznak[j, i] = 0;
                        }
                    }
                }

                for (j = 1; j < count_populate; j++)
                {
                    for (int h = 0; h < variables; h++)
                    {
                        if (priznak[j, h] == 1)
                        {
                            Classifier.AcceptedFeatures[h] = true;
                        }
                        else
                        {
                            Classifier.AcceptedFeatures[h] = false;
                        }
                    }
                    RMSE2[j] = Classifier.ClassifyLearnSamples(Classifier.RulesDatabaseSet[j]);
                    if (RMSE2[j] > RMSE_best2)
                    {
                        RMSE_best2 = RMSE2[j];
                        best2      = j;
                    }
                }

                for (k = 0; k < variables; k++)
                {
                    priznak[0, k] = priznak[best2, k];
                }
            }

            for (k = 0; k < variables; k++)
            {
                if (priznak[best2, k] == 1)
                {
                    Classifier.AcceptedFeatures[k] = true;
                }
                else
                {
                    Classifier.AcceptedFeatures[k] = false;
                    iskl_prizn += (k + 1).ToString() + " ";
                }
            }

            return(result);
        }
コード例 #21
0
ファイル: PSOHybrideOcean.cs プロジェクト: CDMMKY/fuzzy_core
 public PCFuzzySystem TuneUpFuzzySystem(PittsburgHybride Ocean, PCFuzzySystem Approximate, ILearnAlgorithmConf conf)
 {
     HybrideOcean = Ocean;
     base.TuneUpFuzzySystem(Approximate, conf);
     Ocean.Store(chooseDiscovers(1), this.ToString());
     result.RulesDatabaseSet[0].TermsSet.Trim();
     return(result);
 }
コード例 #22
0
 public SAFuzzySystem TuneUpFuzzySystem(SingletonHybride Ocean, SAFuzzySystem Approximate, ILearnAlgorithmConf conf)
 {
     HybrideOcean = Ocean;
     base.TuneUpFuzzySystem(Approximate, conf);
     Ocean.Store(chooseDiscovers(1), this.ToString());
     return(result);
 }
コード例 #23
0
ファイル: Takagi_approx.cs プロジェクト: CDMMKY/fuzzy_core
        public override TSAFuzzySystem TuneUpFuzzySystem(TSAFuzzySystem Approx, ILearnAlgorithmConf conf)
        {
            iskl_prizn      = "";
            count_iteration = ((Param)conf).Количество_итераций;
            count_populate  = ((Param)conf).Число_осколков;
            exploration     = ((Param)conf).Фактор_исследования;
            reduce_koef     = ((Param)conf).Уменьшающий_коэффициент;
            priznaki_usech  = ((Param)conf).Усечённые_признаки;

            int    iter = 0, i, j, count_terms;
            double cosFi;
            int    Nd, variables, k = 1, best = 0;

            string[] buf;
            buf = priznaki_usech.Split(' ');
            TSAFuzzySystem result = Approx;
            int            type   = Approx.RulesDatabaseSet[0].TermsSet[0].CountParams;

            Nd = Approx.RulesDatabaseSet[0].TermsSet.Count * type;
            double[] X_best = new double[Nd + 1];
            double[,] X_pred    = new double[2, Nd + 1];
            double[,] d         = new double[count_populate, Nd + 1];
            double[,] explosion = new double[count_populate, Nd + 1];
            double[,] shrapnel  = new double[count_populate, Nd + 1];
            cosFi = Math.Cos(2 * Math.PI / count_populate);
            double RMSE_best = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);

            double[] RMSE      = new double[count_populate];
            double[] RMSE2     = new double[count_populate];
            double[] RMSE_pred = new double[2];
            count_terms = Approx.RulesDatabaseSet[0].TermsSet.Count;
            variables   = Approx.LearnSamplesSet.CountVars;
            double[] X_best2 = new double[variables];
            double[,] priznak = new double[count_populate, variables];
            for (i = 0; i < variables; i++)
            {
                priznak[0, i] = 1;
                X_best2[i]    = 1;
            }
            KnowlegeBaseTSARules[] X = new KnowlegeBaseTSARules[count_populate];
            for (int s = 0; s < count_populate - 1; s++)
            {
                X[s] = new KnowlegeBaseTSARules(Approx.RulesDatabaseSet[0]);
                Approx.RulesDatabaseSet.Add(X[s]);
            }

            if (buf[0] != "")
            {
                for (k = 0; k < buf.Count(); k++)
                {
                    Approx.AcceptedFeatures[int.Parse(buf[k]) - 1] = false;
                    priznak[0, int.Parse(buf[k]) - 1] = 0;
                    //iskl_prizn += buf[k] + " ";
                }
            }
            for (k = 0; k < variables; k++)
            {
                if (Approx.AcceptedFeatures[k] == false)
                {
                    iskl_prizn += (k + 1).ToString() + " ";
                }
            }
            RMSE_best2 = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);
            //for (j = 0; j < count_populate; j++)
            //{
            //    for (int h = 0; h < variables; h++)
            //    {
            //        if (priznak[0, h] == 1) Approx.AcceptedFeatures[h] = true;
            //        else
            //        {
            //            Approx.AcceptedFeatures[h] = false;
            //            for (int h1 = 0; h1 < Approx.RulesDatabaseSet[0].RulesDatabase.Count(); h1++)
            //            {
            //                Approx.RulesDatabaseSet[j].RulesDatabase[h1].RegressionConstantConsequent[h] = 0;
            //            }
            //        }
            //    }
            //}
            countRules = Approx.RulesDatabaseSet[0].RulesDatabase.Count();
            RMSE_best  = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);
            for (iter = 0; iter <= count_iteration; iter++)
            {
                best = 0;
                if (iter == 0)
                {
                    k = 1;
                    for (int h = 0; h < count_terms; h++)
                    {
                        for (int p = 0; p < type; p++)
                        {
                            shrapnel[0, k] = Approx.RulesDatabaseSet[0].TermsSet[h].Parametrs[p];
                            X_best[k]      = shrapnel[0, k];
                            X_pred[0, k]   = shrapnel[0, k];
                            X_pred[1, k]   = shrapnel[0, k];
                            k++;
                        }
                    }
                    RMSE_pred[0] = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);
                    RMSE_pred[1] = Approx.approxLearnSamples(Approx.RulesDatabaseSet[0]);
                    k            = 1;
                    for (int h = 0; h < count_terms; h++)
                    {
                        for (int p = 0; p < type; p++)
                        {
                            d[0, k] = RandomNext(Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[h].NumVar].Min, Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[h].NumVar].Max);
                            k++;
                        }
                    }
                }
                for (i = 1; i <= Nd; i++)
                {
                    for (j = 1; j < count_populate; j++)
                    {
                        int sum = 0, sum2 = 0;
generate:
                        sum++;
                        sum2++;

                        d[j, i] = d[j - 1, i] * randn();

                        explosion[j, i] = d[j, i] * cosFi;
                        if (type == 2)
                        {
                            if (sum > 20)
                            {
                                if ((i + 1) % type == 0)
                                {
                                    if (i != 1)
                                    {
                                        shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i] - shrapnel[j, i - 2]);
                                    }
                                }
                                if (sum2 > 1000)
                                {
                                    sum = 0; sum2 = 0;
                                }
                            }
                            else
                            {
                                shrapnel[j, i] = shrapnel[0, i] + explosion[j, i];
                            }
                            if (i != 1)
                            {
                                if (((i + 1) % 2 == 0) && (shrapnel[j, i] < shrapnel[j, i - 2]) && (Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumVar))
                                {
                                    goto generate;
                                }
                            }
                            if (((i + 1) % 2 == 0) && (shrapnel[j, i] < Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i) / type].NumVar].Min))
                            {
                                goto generate;
                            }
                            if (((i + 1) % 2 == 0) && (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i) / type].NumVar].Max))
                            {
                                goto generate;
                            }
                            if ((i % 2 == 0) && (shrapnel[j, i] < 0))
                            {
                                goto generate;
                            }
                        }
                        if (type != 2)
                        {
                            if (sum > 20)
                            {
                                if ((i + (type - 2)) % type == 0)
                                {
                                    shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    if (sum2 > 2)
                                    {
                                        shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]);
                                        sum            = 19;
                                    }
                                    if (sum2 > 3)
                                    {
                                        shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                        sum            = 19;
                                        sum2           = 0;
                                    }
                                }
                                else
                                {
                                    shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    sum            = 19;
                                }
                            }
                            else
                            {
                                shrapnel[j, i] = shrapnel[0, i] + explosion[j, i];
                            }
                        }

                        if (type != 2)
                        {
                            if ((i == 2) || (i == 1))
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumVar].Min;
                            }

                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 2) / type)].NumVar)
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumVar].Min; goto exit;
                            }

                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - type) / type)].NumVar)
                            {
                                shrapnel[j, i] = Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumVar].Min; goto exit;
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumVar != (variables - 1))
                            {
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i) / type)].NumVar)
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumVar].Max; goto exit;
                                }
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i + 1) / type)].NumVar)
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumVar].Max; goto exit;
                                }
                            }
                            if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumVar == (variables - 1))
                            {
                                if ((i == (count_terms * 3 - 1)) || (i == (count_terms * 3)))
                                {
                                    shrapnel[j, i] = Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumVar].Max;
                                }
                            }

                            if (((i + (type - 2)) % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                if (shrapnel[j, i] == Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumVar].Min)
                                {
                                    i--;
                                }
                                goto generate;
                            }
                            if ((i % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                goto generate;
                            }
                            if (i != 1)
                            {
                                if (((i - (type - 2)) % type == 0) && ((shrapnel[j, i] > shrapnel[j, i - 1]) || (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumVar].Max) || (shrapnel[j, i] < Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumVar].Min)))
                                {
                                    goto generate;
                                }
                            }
                            if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumVar].Min) || (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumVar].Max)))
                            {
                                goto generate;
                            }
exit:
                            if (i > type)
                            {
                                if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumVar != 0)
                                {
                                    if (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1 - type) / type)].NumVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumVar - 1)
                                    {
                                        if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < shrapnel[j, i - type])))
                                        {
                                            goto generate;
                                        }
                                    }
                                }
                            }
                        }

                        //    else
                        //    {
                        //    if (i > 1)
                        //    {
                        //        if ((i%2!=0) && (shrapnel[j, i] < shrapnel[j, i - 2]) && (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / 2)].NumberOfInputVar == Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 2) / 2)].NumberOfInputVar))
                        //        {
                        //            goto generate;
                        //        }
                        //        if((i%2!=0) && (shrapnel[j, i] > Approx.LearnSamplesSet.InputAttribute(Approx.RulesDatabaseSet[0].TermsSet[(int)(i / 2)].NumberOfInputVar).Max) && (Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / 2)].NumberOfInputVar != Approx.RulesDatabaseSet[0].TermsSet[(int)((i - 2) / 2)].NumberOfInputVar))
                        //        {
                        //            goto generate;
                        //        }
                        //    }
                        //}
                    }
                }

                for (int z = 0; z < count_populate; z++)
                {
                    k = 1;
                    for (int h = 0; h < count_terms; h++)
                    {
                        for (int p = 0; p < type; p++)
                        {
                            Approx.RulesDatabaseSet[z].TermsSet[h].Parametrs[p] = shrapnel[z, k];
                            k++;
                        }
                    }
                }
                for (j = 0; j < count_populate; j++)
                {
                    RMSE[j] = Approx.approxLearnSamples(Approx.RulesDatabaseSet[j]);
                    if (RMSE[j] < RMSE_best)
                    {
                        RMSE_best = RMSE[j];
                        best      = j;
                    }
                }

                k = 1;
                for (int h = 0; h < count_terms; h++)
                {
                    for (int p = 0; p < type; p++)
                    {
                        shrapnel[0, k] = shrapnel[best, k];
                        if (exploration > iter)
                        {
                            d[0, k] = RandomNext(Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[h].NumVar].Min, Approx.LearnSamplesSet.InputAttributes[Approx.RulesDatabaseSet[0].TermsSet[h].NumVar].Max);
                        }
                        Approx.RulesDatabaseSet[0].TermsSet[h].Parametrs[p] = shrapnel[0, k];
                        k++;
                    }
                }

                if (iter % 10 == 0)
                {
                    if (RMSE_pred[1] > RMSE2[best])
                    {
                        for (k = 1; k <= Nd; k++)
                        {
                            X_pred[0, k] = X_pred[1, k];
                            X_pred[1, k] = shrapnel[best, k];
                        }
                        RMSE_pred[0] = RMSE_pred[1];
                        RMSE_pred[1] = RMSE2[best];
                    }
                }
                else
                {
                    if (RMSE_pred[1] > RMSE[best])
                    {
                        for (k = 1; k <= Nd; k++)
                        {
                            X_pred[0, k] = X_pred[1, k];
                            X_pred[1, k] = shrapnel[best, k];
                        }
                        RMSE_pred[0] = RMSE_pred[1];
                        RMSE_pred[1] = RMSE[best];
                    }
                }
            }

            return(result);
        }
コード例 #24
0
ファイル: DynamicTune.cs プロジェクト: CDMMKY/fuzzy_core
        public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approximate, ILearnAlgorithmConf conf) // + override
        {
            result = Approximate;


            List <KnowlegeBaseSARules> Archive = new List <KnowlegeBaseSARules>();
            List <double> ErrorsArchive        = new List <double>();

            var config = (DynamicTuneConf)conf;

            maxError  = config.MaxError;
            RuleCount = config.RulesCount;
            TryCount  = config.TryCount;
            double error        = result.RMSEtoMSEdiv2forLearn(result.approxLearnSamples(result.RulesDatabaseSet[0]));
            var    kbToOptimize = new KnowlegeBaseSARules(result.RulesDatabaseSet[0]);
            var    kbBest       = new KnowlegeBaseSARules(kbToOptimize);
            double errorBefore  = Double.MaxValue;

            result.UnlaidProtectionFix(kbToOptimize);

            List <input_space> variable_spaces = new List <input_space>();

            for (int i = 0; i < result.LearnSamplesSet.InputAttributes.Count; i++)
            {
                List <Term> terms_of_variable = new List <Term>();
                terms_of_variable = kbToOptimize.TermsSet.Where(term => term.NumVar == i).ToList();
                variable_spaces.Add(new input_space(terms_of_variable, i));
            }

            int indexRegion = -1,
                indexVar    = -1,
                number_of_input_variables = variable_spaces.Count;

            int tryCount = 0;



            while (error > maxError)
            {
                if (Double.IsInfinity(error))
                {
                    throw new Exception("Something went wrong, error is Infinity, region: " + indexRegion);
                }
                if (Double.IsNaN(error))
                {
                    throw new Exception("Something went wrong, error is NaN, region: " + indexRegion);
                }

                region_side[][] sides = new region_side[number_of_input_variables][];
                for (int i = 0; i < number_of_input_variables; i++)
                {
                    sides[i] = variable_spaces[i].get_region_sides();
                }
                var cartresult = CartesianProduct.Get(sides);

                List <region2> regions = new List <region2>();

                foreach (var x in cartresult)
                {
                    regions.Add(new region2(x.ToList(), result, variable_spaces));
                }

                List <double> region_errors = regions.Select(x => x.region_error()).ToList();
                indexRegion = region_errors.IndexOf(region_errors.Max());

                for (int i = 0; i < region_errors.Count; i++)
                {
                    if (Double.IsNaN(region_errors[i]) || Double.IsInfinity(region_errors[i]) ||
                        Double.IsNegativeInfinity(region_errors[i]) || Double.IsPositiveInfinity(region_errors[i]))
                    {
                        region_errors[i] = 0;
                    }
                }

                List <double> variable_errors = regions[indexRegion].variable_errors();
                bool          check1          = false;
                for (int i = 1; i < variable_errors.Count; i++)
                {
                    if (variable_errors[i - 1] != variable_errors[i])
                    {
                        check1 = true;
                        break;
                    }
                }
                if (!check1)
                {
                    indexVar = StaticRandom.Next(variable_errors.Count - 1);
                }
                else
                {
                    indexVar = variable_errors.IndexOf(variable_errors.Max());
                }

                Term new_term = regions[indexRegion].new_term(indexVar);
                result.RulesDatabaseSet[0] = kbToOptimize;
                kbToOptimize.TermsSet.Add(new_term);

                // Rules (CHECK REFERENCE TYPES)
                int @var = indexVar;

                var rulesLeft = kbToOptimize.RulesDatabase.Where(
                    rule => rule.ListTermsInRule.Contains(regions[indexRegion].sides[indexVar].left)).ToList();
                var rulesRight = kbToOptimize.RulesDatabase.Where(
                    rule => rule.ListTermsInRule.Contains(regions[indexRegion].sides[indexVar].right)).ToList();
                for (int j = 0; j < rulesLeft.Count; j++)
                {
                    int[] order = new int[rulesLeft[j].ListTermsInRule.Count];
                    for (int k = 0; k < rulesLeft[j].ListTermsInRule.Count; k++)
                    {
                        Term temp_term = rulesLeft[j].ListTermsInRule[k];
                        if (temp_term == regions[indexRegion].sides[indexVar].left)
                        {
                            temp_term = new_term;
                        }
                        order[k] = kbToOptimize.TermsSet.FindIndex(x => x == temp_term);
                    }
///!!!!
                    double temp_approx_Values = kbToOptimize.RulesDatabase[j].IndependentConstantConsequent;

                    /*        double[] temp_approx_RegressionConstantConsequent =
                     *          kbToOptimize.RulesDatabase[j].RegressionConstantConsequent.Clone() as double[];
                     */


                    SARule temp_rule = new SARule(
                        kbToOptimize.TermsSet, order, temp_approx_Values);

                    // double[] dC = null;
//!!!
                    temp_rule.IndependentConstantConsequent = KNNConsequent.NearestApprox(result, temp_rule.ListTermsInRule.ToList());



                    kbToOptimize.RulesDatabase.Add(temp_rule);


//!!!
                    rulesLeft[j].IndependentConstantConsequent = KNNConsequent.NearestApprox(result, rulesLeft[j].ListTermsInRule.ToList());
                    //           rulesLeft[j].RegressionConstantConsequent = (double[])dC.Clone();
                }

                foreach (var rule in rulesRight)
                {
//!!!

                    rule.IndependentConstantConsequent = KNNConsequent.NearestApprox(
                        result, rule.ListTermsInRule.ToList());
                    //               rule.RegressionConstantConsequent = dC;
                }

                variable_spaces[indexVar].terms.Add(new_term);
                variable_spaces[indexVar].terms.Sort(new CompararerByPick());

                // Re-evaluate the system's error
                error = result.RMSEtoMSEdiv2forLearn(result.ErrorLearnSamples(kbToOptimize));

                if ((kbToOptimize.RulesDatabase.Count > config.RulesCount))
                {
                    break;
                }

#if Console
                Console.WriteLine(error + " " + kbToOptimize.TermsSet.Count + " terms\n");
                for (int i = 0; i < variable_spaces.Count; i++)
                {
                    Console.WriteLine(variable_spaces[i].terms.Count + " термов по " + i + "му параметру\n");
                }
#endif
                result.RulesDatabaseSet[0] = kbToOptimize;
                // Get the best knowledge base on the 1st place
                if (error < errorBefore)
                {
                    kbBest      = new KnowlegeBaseSARules(kbToOptimize);
                    errorBefore = error;
                    tryCount    = 0;
                }
                else
                {
                    tryCount++;
                }
                if (tryCount > TryCount)
                {
                    break;
                }
            }


            result.RulesDatabaseSet[0] = kbBest;
            RuleCount = kbBest.RulesDatabase.Count;
            TryCount  = tryCount;

            return(result);
        }
コード例 #25
0
        public override SAFuzzySystem TuneUpFuzzySystem(FuzzySystem.SingletoneApproximate.SAFuzzySystem Approximate, ILearnAlgorithmConf config)
        {
            theFuzzySystem = Approximate;
            if (theFuzzySystem.RulesDatabaseSet.Count == 0)
            {
                throw new System.FormatException("Что то не то с входными данными");
            }
            OptimizeTermShrinkHardCoreConf Config = config as OptimizeTermShrinkHardCoreConf;

            count_shrink = Config.OTSHCCountShrinkTerm;


            for (int i = 0; i < Approximate.CountFeatures; i++)
            {
                int count_terms_for_var = Approximate.RulesDatabaseSet[0].TermsSet.FindAll(x => x.NumVar == i).Count;

                if (count_terms_for_var >= count_shrink)
                {
                    int        shrinkcounter  = count_shrink;
                    List <int> Varians_of_cut = new List <int>();
                    for (int j = 0; j < count_terms_for_var; j++)
                    {
                        if (shrinkcounter > 0)
                        {
                            Varians_of_cut.Add(0);
                        }
                        else
                        {
                            Varians_of_cut.Add(1);
                        }
                        shrinkcounter--;
                    }
                    Generate_all_variant_in_pool(Varians_of_cut);


                    for (int j = 0; j < Pull_of_systems.Count; j++)
                    {
                        KnowlegeBaseSARules current = MakeCut(Approximate.RulesDatabaseSet[0], Pull_of_systems[j], i);
                        Systems_ready_to_test.Add(current);
                        errors_of_systems.Add(theFuzzySystem.approxLearnSamples(current));
                    }
                    Pull_of_systems.Clear();
                }
            }

            int best_index = errors_of_systems.IndexOf(errors_of_systems.Min());

            theFuzzySystem.RulesDatabaseSet[0] = Systems_ready_to_test[best_index];

            return(theFuzzySystem);
        }
コード例 #26
0
 public SAFuzzySystem TuneUpFuzzySystem(SingletonHybride Ocean, SAFuzzySystem Approximate, ILearnAlgorithmConf conf)
 {
     HybrideOcean = Ocean;
     base.TuneUpFuzzySystem(Approximate, conf);
     Ocean.Store(chooseDiscovers(1), this.ToString());
     theFuzzySystem.RulesDatabaseSet[0].TermsSet.Trim();
     return(theFuzzySystem);
 }
コード例 #27
0
ファイル: ChoosePlus.cs プロジェクト: CDMMKY/fuzzy_core
 public override TSAFuzzySystem TuneUpFuzzySystem(TSAFuzzySystem FSystem, ILearnAlgorithmConf conf)
 {
     return(UniversalMethod(FSystem, conf) as TSAFuzzySystem);
 }
コード例 #28
0
ファイル: SSODiscret.cs プロジェクト: CDMMKY/fuzzy_core
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classify, ILearnAlgorithmConf conf)
        {
            result = Classify;
            string folder_name = "";

            foreach (var letter in result.LearnSamplesSet.FileName)
            {
                if (letter != '-')
                {
                    folder_name += letter;
                }
                else
                {
                    break;
                }
            }
            numberOfFeatures = result.CountFeatures;
            Init(conf);
            HeadLeader       = new bool[numberOfFeatures];
            VelocityVector   = new bool[numberOfFeatures];
            VelocityVectorLL = new bool[numberOfFeatures];
            VelocityVectorHL = new bool[numberOfFeatures];

            SetPopulation();

            LocalLeaders      = new List <bool[]>();
            ExplorerParticles = new List <bool[]>();
            AimlessParticles  = new List <bool[]>();

            iter = 0;
            while (iter < MaxIter)
            {
                SortPopulation();

                SetRoles();

                ChangeExplorersPositions();
                ChangeAimlessPositions();

                DiscardRoles();

                iter++;
            }

            SortPopulation();

            result.AcceptedFeatures = Population[0];
            for (int j = 0; j < Population[0].Length; j++)
            {
                if (Population[0][j])
                {
                    Console.Write("1 ");
                }
                else
                {
                    Console.Write("0 ");
                }
            }
            Console.WriteLine();
            Console.WriteLine("Обуч: " + Math.Round(result.ClassifyLearnSamples(result.RulesDatabaseSet[0]), 2));
            Console.WriteLine("Тест: " + Math.Round(result.ClassifyTestSamples(result.RulesDatabaseSet[0]), 2));
            return(result);
        }
コード例 #29
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Class, ILearnAlgorithmConf conf) // Здесь ведется оптимизация вашим алгоритмом
        {
            theFuzzySystem = Class;
            //    Console.WriteLine(theFuzzySystem.RulesDatabaseSet[0].TermsSet.Count);
            iterMax = ((gsa_conf)conf).Количество_итераций;
            MCount  = ((gsa_conf)conf).Количество_частиц;
            G0      = ((gsa_conf)conf).Гравитационная_постоянная;
            alpha   = ((gsa_conf)conf).Коэффициент_уменьшения;
            epsilon = ((gsa_conf)conf).Малая_константа;
            X       = new KnowlegeBasePCRules[MCount];
            Errors  = new double[MCount];
            mass    = new double[MCount];

            temp_c_Rule = new KnowlegeBasePCRules(theFuzzySystem.RulesDatabaseSet[0]);
            X[0]        = temp_c_Rule;
            Errors[0]   = theFuzzySystem.ErrorLearnSamples(X[0]);

            ErrorZero    = Errors[0];
            ErrorBest    = Errors[0];
            BestSolution = new KnowlegeBasePCRules(theFuzzySystem.RulesDatabaseSet[0]);

            R     = new double[MCount][, , ];
            speed = new double[MCount, X[0].TermsSet.Count, X[0].TermsSet[0].Parametrs.Count()];
            for (int i = 0; i < MCount; i++)
            {
                R[i] = new double[MCount, X[0].TermsSet.Count, X[0].TermsSet[0].Parametrs.Count()];
            }
            RR = new double[MCount, MCount];
            a  = new double[MCount, X[0].TermsSet.Count, X[0].TermsSet[0].Parametrs.Count()];

            for (int i = 1; i < MCount; i++)
            {
                temp_c_Rule = new KnowlegeBasePCRules(theFuzzySystem.RulesDatabaseSet[0]);
                X[i]        = temp_c_Rule;
                for (int j = 0; j < X[i].TermsSet.Count; j++)
                {
                    for (int k = 0; k < X[i].TermsSet[j].Parametrs.Count(); k++)
                    {
                        X[i].TermsSet[j].Parametrs[k] = GaussRandom.Random_gaussian(rand, X[i].TermsSet[j].Parametrs[k], 0.1 * (X[i].TermsSet[j].Parametrs[k])) + theFuzzySystem.LearnSamplesSet.InputAttributes[X[i].TermsSet[j].NumVar].Scatter * 0.05;
                    }
                }

                //theFuzzySystem.RulesDatabaseSet.Add(X[i]);
                //theFuzzySystem.UnlaidProtectionFix(theFuzzySystem.RulesDatabaseSet.Count - 1);
                //Errors[i] = theFuzzySystem.ErrorLearnSamples(X[i]);
                //X[i] = theFuzzySystem.RulesDatabaseSet[theFuzzySystem.RulesDatabaseSet.Count - 1];
                //theFuzzySystem.RulesDatabaseSet.Remove(X[i]);
                theFuzzySystem.UnlaidProtectionFix(X[i]);
                Errors[i] = theFuzzySystem.ErrorLearnSamples(X[i]);
            }

            for (int iter = 0; iter < iterMax; iter++)
            {
                //g(t) = G(0)*e^(-a*t/T);
                G = G0 * Math.Pow(Math.E, ((-1) * alpha * iter / iterMax));

                algorithm();

                for (int r = 0; r < MCount; r++)
                {
                    theFuzzySystem.UnlaidProtectionFix(X[r]);
                    Errors[r] = theFuzzySystem.ErrorLearnSamples(X[r]);
                }

                minValue  = Errors.Min();
                iminIndex = Errors.ToList().IndexOf(minValue);

                if (minValue < ErrorBest)
                {
                    ErrorBest    = minValue;
                    BestSolution = new KnowlegeBasePCRules(X[iminIndex]);
                }
            }
            //theFuzzySystem.RulesDatabaseSet[0] = BestSolution;

            if (ErrorBest < ErrorZero)
            {
                theFuzzySystem.RulesDatabaseSet[0] = BestSolution;
            }

            return(theFuzzySystem);
        }
コード例 #30
0
        //основные вычисления
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classify, ILearnAlgorithmConf conf)
        {
            rand   = new Random();
            result = Classify;
            Init(conf);
            SetPopulation();
            KnowlegeBasePCRules BEST = new KnowlegeBasePCRules(result.RulesDatabaseSet[0]);
            double bestError         = result.ErrorLearnSamples(BEST);

            //отчистка консоли

#if debug
            Console.Clear();
#endif
            //запуск итераций
            for (int it = 0; it < iter; it++)
            {
#if debug
                //вывод номера итерации
                Console.Write("Итерация __№__ = ");
                Console.WriteLine(it);
#endif
                //расчитыавем значение фитнес-функции
                Population = ListPittsburgClassifierTool.SortRules(Population, result);
                double[] K = new double[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    K[i] = result.ErrorLearnSamples(Population[i]);
#if debug
                    Console.Write("Значние  K[i1] = ");
                    Console.WriteLine(K[i]);
#endif
                    if (double.IsNaN(K[i]) || double.IsInfinity(K[i]))
                    {
                        result.UnlaidProtectionFix(Population[i]);
                        K[i] = result.ErrorLearnSamples(Population[i]);

#if debug
                        Console.Write("Значние  K[i2] = ");
                        Console.WriteLine(K[i]);
#endif
                    }
                }
                Kworst = K.Max();
                if (double.IsNaN(Kworst) || double.IsInfinity(Kworst))
                {
                    int iworst = K.ToList().IndexOf(Kworst);
 #if debug
                    Console.Write("Значние iworst = ");
                    Console.WriteLine(iworst);
#endif
                }

#if debug
                //вывод Kworst
                Console.Write("Значние KWorst = ");
                Console.WriteLine(Kworst);
#endif
                Kbest = K.Min();
#if debug
                //вывод Kbest
                Console.Write("Значние Kbest = ");
                Console.WriteLine(Kbest);
#endif
                int ibest = K.ToList().IndexOf(Kbest);
#if debug
                //вывод ibest
                Console.Write("Значние ibest = ");
                Console.WriteLine(ibest);
#endif
                //перебрать значения фитнес функции
                //расчитыавем значение D
                double dit;
                dit = it;
                double diter;
                diter = iter;

                double D = (dmax * (rand.NextDouble() * 2 - 1) * (dit / diter));

                //расчитываем значение rand1 для D
                double rand1;
                rand1 = D / (dmax * (it) / iter);
#if debug
                //выводим значение rand1 для D
                Console.Write("Значение Drand = ");
                Console.WriteLine(rand1);

                //выводим значение D
                Console.Write("Значение __D__ = ");
                Console.WriteLine(D);
#endif
                //расчитыавем значение Xfood
                double divide = K.Select(x => 1 / x).ToList().Sum();
                var    Xfood  = new KnowlegeBasePCRules(Population[0]);
                for (int t = 0; t < Xfood.TermsSet.Count; t++)
                {
                    for (int p = 0; p < Xfood.TermsSet[t].CountParams; p++)
                    {
                        Xfood.TermsSet[t].Parametrs[p] = 0;

                        for (int i = 0; i < Population.Length; i++)
                        {
                            Xfood.TermsSet[t].Parametrs[p] += Population[i].TermsSet[t].Parametrs[p] / K[i];
#if debug
                            //выводим значение Xfood
                            Console.Write("Значение Xfood = ");
                            Console.WriteLine(Xfood.TermsSet[t].Parametrs[p]);
#endif
                        }
                        Xfood.TermsSet[t].Parametrs[p] /= divide;
                    }
                }
#if debug
                //вывод divide
                Console.Write("Значние divide = ");
                Console.WriteLine(divide);
#endif
                //расчитываем значение Kfood
                double Kfood = result.ErrorLearnSamples(Xfood);
                if (double.IsNaN(Kfood) || double.IsInfinity(Kfood))
                {
                    result.UnlaidProtectionFix(Xfood);
                    Kfood = result.ErrorLearnSamples(Xfood);
                }
#if debug
                //выводим значение Kfood
                Console.Write("Значение Kfood = ");
                Console.WriteLine(Kfood);
#endif
                //расчитываем значение Cfood
                double Cfood = 2 * (1 - (dit / diter));
#if debug
                //выводим значение Cfood
                Console.Write("Значение Cfood = ");
                Console.WriteLine(Cfood);
#endif
                //расчитываем значение Bfood
                KnowlegeBasePCRules[] Bfood = new KnowlegeBasePCRules[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    Bfood[i] = new KnowlegeBasePCRules(Population[i]);
                    double KRoofifood = CalcKroof(K[i], Kfood);
                    KnowlegeBasePCRules Xroofifood = new KnowlegeBasePCRules(CalcXroof(Population[i], Xfood));
                    for (int t = 0; t < Bfood[i].TermsSet.Count; t++)
                    {
                        for (int p = 0; p < Bfood[i].TermsSet[t].CountParams; p++)
                        {
                            Bfood[i].TermsSet[t].Parametrs[p] = Cfood * KRoofifood * Xroofifood.TermsSet[t].Parametrs[p];
#if debug
                            //выводим значение Bfood
                            Console.Write("Значение Bfood = ");
                            Console.WriteLine(Bfood[i].TermsSet[t].Parametrs[p]);
#endif
                        }
                    }
                }

                //расчитываем значение Bbest
                KnowlegeBasePCRules[] Bbest = new KnowlegeBasePCRules[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    Bbest[i] = new KnowlegeBasePCRules(Population[i]);
                    double KRoofifood = CalcKroof(K[i], K[ibest]);
                    KnowlegeBasePCRules Xroofifood = new KnowlegeBasePCRules(CalcXroof(Population[i], Population[ibest]));
                    for (int t = 0; t < Bbest[i].TermsSet.Count; t++)
                    {
                        for (int p = 0; p < Bbest[i].TermsSet[t].CountParams; p++)
                        {
                            Bbest[i].TermsSet[t].Parametrs[p] = KRoofifood * Xroofifood.TermsSet[t].Parametrs[p];
#if debug
                            //выводим значение Bbest
                            Console.Write("Значение Bbest = ");
                            Console.WriteLine(Bbest[i].TermsSet[t].Parametrs[p]);
#endif
                        }
                    }
                }

                //расчитываем значение B
                KnowlegeBasePCRules[] B = new KnowlegeBasePCRules[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    B[i] = new KnowlegeBasePCRules(Population[i]);
                    for (int t = 0; t < B[i].TermsSet.Count; t++)
                    {
                        for (int p = 0; p < B[i].TermsSet[t].CountParams; p++)
                        {
                            B[i].TermsSet[t].Parametrs[p] = Bfood[i].TermsSet[t].Parametrs[p] + Bbest[i].TermsSet[t].Parametrs[p];
#if debug
                            //выводим значение B
                            Console.Write("Значение __B__ = ");
                            Console.WriteLine(B[i].TermsSet[t].Parametrs[p]);
#endif
                        }
                    }
                }

                //расчитываем значение F
                KnowlegeBasePCRules[] F = new KnowlegeBasePCRules[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    if (i == 0)
                    {
                        F[i] = new KnowlegeBasePCRules(Population[i]);
                        for (int t = 0; t < F[i].TermsSet.Count; t++)
                        {
                            for (int p = 0; p < F[i].TermsSet[t].CountParams; p++)
                            {
                                F[i].TermsSet[t].Parametrs[p] = Vf * B[i].TermsSet[t].Parametrs[p];
#if debug
                                //выводим значение F
                                Console.Write("Значение __F__ = ");
                                Console.WriteLine(F[i].TermsSet[t].Parametrs[p]);
#endif
                            }
                        }
                    }
                    else
                    {
                        F[i] = new KnowlegeBasePCRules(Population[i]);
                        for (int t = 0; t < F[i].TermsSet.Count; t++)
                        {
                            for (int p = 0; p < F[i].TermsSet[t].CountParams; p++)
                            {
                                F[i].TermsSet[t].Parametrs[p] = Vf * B[i].TermsSet[t].Parametrs[p] + wf * F[i - 1].TermsSet[t].Parametrs[p];
#if debug
                                //выводим значение F
                                Console.Write("Значение __F__ = ");
                                Console.WriteLine(F[i].TermsSet[t].Parametrs[p]);
#endif
                            }
                        }
                    }
                }
                List <int> [] neihbors = new List <int> [Population.Length];
                //расчитываем значение alocal
                KnowlegeBasePCRules[] alocal = new KnowlegeBasePCRules[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    alocal[i]   = new KnowlegeBasePCRules(Population[i]);
                    neihbors[i] = countneihbors(Population[i]);

/*
 #if debug
 *                  //вывод значений количества соседей
 *                  for (int g = 0; g < Population.Length; g++)
 *                  {
 *                      Console.Write("Знаение countneihbors = ");
 *                      Console.WriteLine(countneihbors(Population[g]));
 *                  }
 #endif
 */

                    for (int t = 0; t < alocal[i].TermsSet.Count; t++)
                    {
                        for (int p = 0; p < alocal[i].TermsSet[t].CountParams; p++)
                        {
                            alocal[i].TermsSet[t].Parametrs[p] = 0;
                            for (int j = 0; j < neihbors[i].Count; j++)
                            {
                                double KRoofij = CalcKroof(K[i], K[neihbors[i][j]]);
                                KnowlegeBasePCRules XRoofij = new KnowlegeBasePCRules(CalcXroof(Population[i], Population[neihbors[i][j]]));

                                alocal[i].TermsSet[t].Parametrs[p] += KRoofij * XRoofij.TermsSet[t].Parametrs[p];

#if debug
                                //выводим значение alocal
                                Console.Write("Знаение alocal = ");
                                Console.WriteLine(alocal[i].TermsSet[t].Parametrs[p]);
#endif
                            }
                        }
                    }
                }

                //расчитываем значение Cbest
                double Cbest = 2 * (rand.NextDouble() - (dit / diter));
#if debug
                //выводим значение Cbest
                Console.Write("Значение Сbest = ");
                Console.WriteLine(Cbest);
#endif
                //расчитываем значение rand для Cbest
                double rand2;
                rand2 = it / iter - Cbest / 2;
#if debug
                //выводим значение rand2 для Cbest
                Console.Write("Значение Crand = ");
                Console.WriteLine(rand2);
#endif
                //расчитываем значение atarget
                KnowlegeBasePCRules[] atarget = new KnowlegeBasePCRules[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    atarget[i] = new KnowlegeBasePCRules(Population[i]);
                    double KRoofibest = CalcKroof(K[i], K[ibest]);
                    KnowlegeBasePCRules XRoofibest = new KnowlegeBasePCRules(CalcXroof(Population[i], Population[ibest]));
                    for (int t = 0; t < alocal[i].TermsSet.Count; t++)
                    {
                        for (int p = 0; p < atarget[i].TermsSet[t].CountParams; p++)
                        {
                            atarget[i].TermsSet[t].Parametrs[p] = Cbest * KRoofibest * XRoofibest.TermsSet[t].Parametrs[p];
#if debug
                            //выводим значение atarget
                            Console.Write("Знание atarget = ");
                            Console.WriteLine(atarget[i].TermsSet[t].Parametrs[p]);
#endif
                        }
                    }
                }

                //расчитываем значение a
                KnowlegeBasePCRules[] a = new KnowlegeBasePCRules[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    a[i] = new KnowlegeBasePCRules(Population[i]);
                    for (int t = 0; t < a[i].TermsSet.Count; t++)
                    {
                        for (int p = 0; p < a[i].TermsSet[t].CountParams; p++)
                        {
                            a[i].TermsSet[t].Parametrs[p] = atarget[i].TermsSet[t].Parametrs[p] + alocal[i].TermsSet[t].Parametrs[p];
#if debug
                            //выводим значение a
                            Console.Write("Значение __a__ = ");
                            Console.WriteLine(a[i].TermsSet[t].Parametrs[p]);
#endif
                        }
                    }
                }

                //расчитываем значение N
                KnowlegeBasePCRules[] N = new KnowlegeBasePCRules[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    if (i == 0)
                    {
                        N[i] = new KnowlegeBasePCRules(Population[i]);
                        for (int t = 0; t < N[i].TermsSet.Count; t++)
                        {
                            for (int p = 0; p < F[i].TermsSet[t].CountParams; p++)
                            {
                                N[i].TermsSet[t].Parametrs[p] = Vf * a[i].TermsSet[t].Parametrs[p];
#if debug
                                //выводим значение N
                                Console.Write("Значение __N__ = ");
                                Console.WriteLine(N[i].TermsSet[t].Parametrs[p]);
#endif
                            }
                        }
                    }
                    else
                    {
                        N[i] = new KnowlegeBasePCRules(Population[i]);
                        for (int t = 0; t < F[i].TermsSet.Count; t++)
                        {
                            for (int p = 0; p < N[i].TermsSet[t].CountParams; p++)
                            {
                                N[i].TermsSet[t].Parametrs[p] = nmax * a[i].TermsSet[t].Parametrs[p] + wn * N[i - 1].TermsSet[t].Parametrs[p];
#if debug
                                //выводим значение N
                                Console.Write("Значение __N__ = ");
                                Console.WriteLine(N[i].TermsSet[t].Parametrs[p]);
#endif
                            }
                        }
                    }
                }

                //расчитываем значение dX
                KnowlegeBasePCRules[] dX = new KnowlegeBasePCRules[Population.Length];
                for (int i = 0; i < Population.Length; i++)
                {
                    dX[i] = new KnowlegeBasePCRules(Population[i]);
                    for (int t = 0; t < a[i].TermsSet.Count; t++)
                    {
                        for (int p = 0; p < a[i].TermsSet[t].CountParams; p++)
                        {
                            dX[i].TermsSet[t].Parametrs[p] = F[i].TermsSet[t].Parametrs[p] + N[i].TermsSet[t].Parametrs[p] + D;
#if debug
                            //выводим значение dX
                            Console.Write("Значение _dX__ = ");
                            Console.WriteLine(dX[i].TermsSet[t].Parametrs[p]);
#endif
                        }
                    }
                }



                //выводим значение BEST
                //   Console.Write("Значение BEST_ = ");
                //  Console.WriteLine(BEST);


                //расчитываем значение X(t+dt)
                for (int i = 0; i < Population.Length; i++)
                {
                    Population[i] = new KnowlegeBasePCRules(Population[i]);
                    for (int t = 0; t < Population[i].TermsSet.Count; t++)
                    {
                        for (int p = 0; p < F[i].TermsSet[t].CountParams; p++)
                        {
                            Population[i].TermsSet[t].Parametrs[p] = Population[i].TermsSet[t].Parametrs[p] + calcdeltat(ct) * dX[i].TermsSet[t].Parametrs[p];
#if debug
                            //выводим значение Xnew
                            Console.Write("Знание X(t+dt) = ");
                            Console.WriteLine(Population[i].TermsSet[t].Parametrs[p]);
#endif
                        }
                    }
                }



                for (int i = 0; i < Population.Length; i++)
                {
                    double temp = result.ErrorLearnSamples(Population[i]);
                    if (double.IsNaN(temp) || double.IsInfinity(temp))
                    {
                        result.UnlaidProtectionFix(Xfood);
                        temp = result.ErrorLearnSamples(Population[i]);
                    }

                    if (temp < bestError)
                    {
                        BEST      = new KnowlegeBasePCRules(Population[i]);
                        bestError = temp;
                    }
                }


                double y = it;
                if (y % 10 == 0 & y != 0)
                {
                    Console.WriteLine(it);
                    Console.WriteLine(bestError);
                }
#if debug
                // выводим значение лучшей ошибки Kbest
                Console.Write("Значние BestError = ");
                Console.WriteLine(bestError);

                Console.WriteLine(".");
#endif
            }
            result.RulesDatabaseSet[0] = BEST;
            return(result);
        }