Esempio n. 1
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classify, ILearnAlgorithmConf conf)
        {
            preIterate(Classify, conf);
            //Оптимизируем параметры
            iter = 1;
            while (iter <= MaxIter)
            {
                if (iter % changeParts == 0)
                {
                    swapParticles();
                }
                oneIterate();
            }
            //Выводим точность классификации для лучшей частицы из каждой популяции
            for (int j = 0; j < Populations.Count; j++)
            {
                Populations[j] = ListPittsburgClassifierTool.SortRules(Populations[j], result);
                Console.WriteLine("Популяция №" + j + ":");
                Console.WriteLine("Обуч: " + Math.Round(result.ClassifyLearnSamples(Populations[j][0]), 2));
                Console.WriteLine("Тест: " + Math.Round(result.ClassifyTestSamples(Populations[j][0]), 2));
            }
            //Находим самое лучшее решение из всех популяций
            List <KnowlegeBasePCRules> BestPopulation = new List <KnowlegeBasePCRules>();

            for (int i = 0; i < numberOfPopulations; i++)
            {
                BestPopulation.Add(Populations[i][0]);
            }
            BestPopulation             = ListPittsburgClassifierTool.SortRules(BestPopulation, result);
            result.RulesDatabaseSet[0] = BestPopulation[0];
            //Возвращаем результат
            return(result);
        }
Esempio n. 2
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classif, ILearnAlgorithmConf conf)
        {
            result           = Classif;
            numberOfFeatures = result.CountFeatures;
            Init(conf);

            HeadLeader    = new int[numberOfFeatures];
            BestParticles = new List <int[]>();
            for (int i = 0; i < numberOfFeatures; i++)
            {
                BestParticles.Add(new int[2]);
                BestParticles[i][0] = 0;
                BestParticles[i][1] = 0;
            }

            SetPopulation();

            iter = 0;
            while (iter < MaxIter)
            {
                SortPopulation();

                for (int i = 0; i < Population[0].Length; i++)
                {
                    BestParticles[i][Population[0][i]] += 1;
                }

                HeadLeader = Population[0];

                ChangeParticles();

                iter++;
            }

            SortPopulation();

            for (int j = 0; j < Population[0].Length; j++)
            {
                Console.Write(Convert.ToString(Population[0][j]) + ' ');
                if (Population[0][j] == 0)
                {
                    result.AcceptedFeatures[j] = false;
                }
                else
                {
                    result.AcceptedFeatures[j] = true;
                }
            }
            Console.WriteLine();
            Console.WriteLine("Обуч: " + Math.Round(result.ClassifyLearnSamples(result.RulesDatabaseSet[0]), 2));
            Console.WriteLine("Тест: " + Math.Round(result.ClassifyTestSamples(result.RulesDatabaseSet[0]), 2));
            return(result);
        }
Esempio n. 3
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classifier, ILearnAlgorithmConf conf)
        {
            result           = Classifier;
            numberOfFeatures = result.CountFeatures;
            groups           = new List <int[][]>();
            Init(conf);
            SetPopulation();
            NS = new int[m];
            for (int i = 0; i < m; i++)
            {
                NS[i] = (N - 1) / m;
            }
            cur_iter = 0;
            double featurecount = 0;

            while (cur_iter < iter)
            {
                //Console.WriteLine("\nИТЕРАЦИЯ " + cur_iter);

                SortPopulation();

                groups = GroupStream();
                if (p_one > rand.NextDouble())
                {
                    ChooseOneCluster();
                }
                else
                {
                    ChooseTwoClusters();
                }

                SortPopulation();
                cur_iter++;
            }
            for (int j = 0; j < Population[0].Length; j++)
            {
                if (Population[0][j] == true)
                {
                    featurecount += 1;
                }
            }
            result.AcceptedFeatures = Population[0];
            Console.WriteLine();
            Console.WriteLine("Обуч: " + Math.Round(result.ClassifyLearnSamples(result.RulesDatabaseSet[0]), 2));
            Console.WriteLine("Тест: " + Math.Round(result.ClassifyTestSamples(result.RulesDatabaseSet[0]), 2));
            Console.WriteLine("Признаки:" + featurecount);
            featurecount = 0;
            return(result);
        }
Esempio n. 4
0
 public virtual void oneIterate()
 {
     SetRoles();
     ChangeExplorersPositions();
     ChangeAimlessPositions();
     DiscardRoles();
     Population = ListPittsburgClassifierTool.SortRules(Population, result);
     iter++;
     if (iter == MaxIter)
     {
         Console.WriteLine("Обуч: " + Math.Round(result.ClassifyLearnSamples(Population[0]), 2));
         Console.WriteLine("Тест: " + Math.Round(result.ClassifyTestSamples(Population[0]), 2));
         //Console.WriteLine();
     }
 }
        private string ErrorInfoPC(IFuzzySystem FS)
        {
            PCFuzzySystem IFS = FS as PCFuzzySystem;

            if (IFS.RulesDatabaseSet.Count < 1)
            {
                return("Точность нечеткой системы недоступна");
            }
            classLearnResult.Add(IFS.ClassifyLearnSamples(IFS.RulesDatabaseSet[0]));
            classTestResult.Add(IFS.ClassifyTestSamples(IFS.RulesDatabaseSet[0]));
            classErLearn.Add(IFS.ErrorLearnSamples(IFS.RulesDatabaseSet[0]));
            classErTest.Add(IFS.ErrorTestSamples(IFS.RulesDatabaseSet[0]));

            return("Точностью на обучающей выборке  " + classLearnResult[classLearnResult.Count - 1].ToString() + " , Точность на тестовой выборке  " + classTestResult[classTestResult.Count - 1].ToString() + " " + Environment.NewLine +
                   "Ошибкой на обучающей выборке  " + classErLearn[classErLearn.Count - 1].ToString() + " , Ошибкой на тестовой выборке  " + classErTest[classErTest.Count - 1].ToString() + " " + Environment.NewLine);
        }
Esempio n. 6
0
        public override string ToString(bool with_param = false)
        {
            if (with_param)
            {
                string result = "Алгоритм пчелинной колонии дискретный {";
                result += "Количество разведчиков= " + countScouts.ToString() + " ;" + Environment.NewLine;
                result += "Количество рабочих пчел= " + countWorkers.ToString() + " ;" + Environment.NewLine;
                for (int z = solutionInfo.Count - countBestBase; z < solutionInfo.Count; z++)
                {
                    int saved = solutionInfo[z].PositionOfBee.Where(x => x == true).Count();
                    int lost  = solutionInfo[z].PositionOfBee.Length - saved;
                    theFuzzySystem.AcceptedFeatures = solutionInfo[z].PositionOfBee;
                    double accuracyLearn = theFuzzySystem.ClassifyLearnSamples(theFuzzySystem.RulesDatabaseSet[0]);
                    double accuracyTest  = theFuzzySystem.ClassifyTestSamples(theFuzzySystem.RulesDatabaseSet[0]);
                    result += $"Оставшиеся признаки {saved}:{accuracyLearn}||{accuracyTest} [ ";
                    for (int i = 0; i < solutionInfo[z].PositionOfBee.Length; i++)
                    {
                        if (solutionInfo[z].PositionOfBee[i] == true)
                        {
                            result += (i + 1).ToString() + ", ";
                        }
                    }
                    result += "]" + Environment.NewLine;
                    result += $"Усеченные признаки {saved}:{accuracyLearn}||{accuracyTest} [ ";
                    for (int i = 0; i < solutionInfo[z].PositionOfBee.Length; i++)
                    {
                        if (solutionInfo[z].PositionOfBee[i] == false)
                        {
                            result += (i + 1).ToString() + ", ";
                        }
                    }

                    result += "]" + Environment.NewLine;
                }

                result += "}";
                return(result);
            }
            return("Алгоритм пчелинной колонии дискретный");
        }
Esempio n. 7
0
        private void addClassifierValue(PCFuzzySystem Classifier)
        {
            double Value = Classifier.ClassifyLearnSamples(Classifier.RulesDatabaseSet[0]);

            ValueLGoodsPercent.Add(Value);
            ValueLGoodsError.Add(100 - Value);

            Value = Classifier.ClassifyTestSamples(Classifier.RulesDatabaseSet[0]);
            ValueTGoodsPercent.Add(Value);
            ValueTGoodsError.Add(100 - Value);


            Value = Classifier.getComplexit();
            ValueComplexityFull.Add(Value);
            Value = Classifier.getRulesCount();
            ValueComplexityRules.Add(Value);

            Value = Classifier.getNormalIndex();
            ValueInterpretyNominal.Add(Value);
            Value = Classifier.getIndexReal();
            ValueInterpretyReal.Add(Value);
        }
Esempio n. 8
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classify, ILearnAlgorithmConf conf)
        {
            result = Classify;
            string folder_name = "";

            foreach (var letter in result.LearnSamplesSet.FileName)
            {
                if (letter != '-')
                {
                    folder_name += letter;
                }
                else
                {
                    break;
                }
            }
            numberOfFeatures = result.CountFeatures;
            Init(conf);
            HeadLeader       = new bool[numberOfFeatures];
            VelocityVector   = new bool[numberOfFeatures];
            VelocityVectorLL = new bool[numberOfFeatures];
            VelocityVectorHL = new bool[numberOfFeatures];

            SetPopulation();

            LocalLeaders      = new List <bool[]>();
            ExplorerParticles = new List <bool[]>();
            AimlessParticles  = new List <bool[]>();

            iter = 0;
            while (iter < MaxIter)
            {
                SortPopulation();

                SetRoles();

                ChangeExplorersPositions();
                ChangeAimlessPositions();

                DiscardRoles();

                iter++;
            }

            SortPopulation();

            result.AcceptedFeatures = Population[0];
            for (int j = 0; j < Population[0].Length; j++)
            {
                if (Population[0][j])
                {
                    Console.Write("1 ");
                }
                else
                {
                    Console.Write("0 ");
                }
            }
            Console.WriteLine();
            Console.WriteLine("Обуч: " + Math.Round(result.ClassifyLearnSamples(result.RulesDatabaseSet[0]), 2));
            Console.WriteLine("Тест: " + Math.Round(result.ClassifyTestSamples(result.RulesDatabaseSet[0]), 2));
            return(result);
        }
Esempio n. 9
0
        private void make_Log(Log_line EventCall, PCFuzzySystem FS = null, string name_Alg = "", DateTime TimerValue = new DateTime(), TimeSpan TimerSpan = new TimeSpan())
        {
            switch (EventCall)
            {
            case Log_line.Start:
            {
                LOG += "(" + TimerValue.ToString() + ")" + " Начало построения системы" + Environment.NewLine;
                break;
            }

            case Log_line.StartGenerate:
            {
                LOG += "(" + TimerValue.ToString() + ")" + " Начата генерация системы" + Environment.NewLine;

                break;
            }

            case Log_line.StartOptimaze:
            {
                LOG += "(" + DateTime.Now.ToString() + ")" + " Начата оптимизация системы" + Environment.NewLine;
                break;
            }


            case Log_line.PreGenerate_log:
            {
                LOG += "(" + DateTime.Now.ToString() + ")" + " Генерация алгоритмом " + name_Alg.ToString() + Environment.NewLine;
                break;
            }

            case Log_line.PostGenerate_log:
            {
                double LearnResult = FS.ClassifyLearnSamples();
                double TestResult  = FS.ClassifyTestSamples();


                LOG += "(" + DateTime.Now.ToString() + ")" + " Сгенерирована система сложностью " + FS.ValueComplexity().ToString() + Environment.NewLine +
                       "Точностью на обучающей выборке " + LearnResult.ToString() + ", Точность на тестовой выборке " + TestResult.ToString() + Environment.NewLine;
                LOG += "Использован " + name_Alg.ToString() + Environment.NewLine;
                break;
            }

            case Log_line.PreOptimaze_log:
            {
                LOG += "(" + DateTime.Now.ToString() + ")" + " Оптимизация алгоритмом " + name_Alg.ToString() + Environment.NewLine;

                break;
            }

            case Log_line.PostOptimaze_log:
            {
                double LearnResult = FS.ClassifyLearnSamples();
                double TestResult  = FS.ClassifyTestSamples();


                LOG += "(" + DateTime.Now.ToString() + ")" + " оптимизированная система сложностью " + FS.ValueComplexity().ToString() + Environment.NewLine +
                       "Точностью на обучающей выборке " + LearnResult.ToString() + ", Точность на тестовой выборке " + TestResult.ToString() + Environment.NewLine;
                LOG += "Использован " + name_Alg.ToString() + Environment.NewLine;

                break;
            }


            case Log_line.EndCircle:
            {
                LOG += "(" + DateTime.Now.ToString() + ")" + " Время построения системы" + TimerSpan.TotalSeconds.ToString() + Environment.NewLine; break;
            }

            case Log_line.End:
            {
                LOG += "(" + DateTime.Now.ToString() + ")" + " Время построения всех систем" + TimerSpan.TotalSeconds.ToString() + Environment.NewLine; break;
            }

            default: { LOG += "Не верный вызов" + Environment.NewLine; break; }
            }
        }
Esempio n. 10
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classify, ILearnAlgorithmConf conf)
        {
            result = Classify;
            string folder_name = "";

            foreach (var letter in result.LearnSamplesSet.FileName)
            {
                if (letter != '-')
                {
                    folder_name += letter;
                }
                else
                {
                    break;
                }
            }
            numberOfFeatures = result.CountFeatures;
            Init(conf);
            rand       = new Random();
            HeadLeader = new bool[numberOfFeatures];
            SetPopulation();
            Population[0].CopyTo(HeadLeader, 0);
            result.AcceptedFeatures = HeadLeader;
            double HLAcc = result.ClassifyLearnSamples(result.RulesDatabaseSet[0]);

            iter = 0;
            while (iter < MaxIter)
            {
                ChangePositions();
                SortPopulation();
                result.AcceptedFeatures = Population[0];
                if (result.ClassifyLearnSamples(result.RulesDatabaseSet[0]) > HLAcc)
                {
                    HLAcc = result.ClassifyLearnSamples(result.RulesDatabaseSet[0]);
                    Population[0].CopyTo(HeadLeader, 0);
                }
                iter++;
            }
            int count_ones = 0;

            result.AcceptedFeatures = HeadLeader;
            for (int j = 0; j < HeadLeader.Length; j++)
            {
                if (HeadLeader[j])
                {
                    Console.Write("1 ");
                    count_ones++;
                }
                else
                {
                    Console.Write("0 ");
                }
            }
            Console.WriteLine();
            Console.WriteLine("Обуч: " + Math.Round(result.ClassifyLearnSamples(result.RulesDatabaseSet[0]), 2));
            Console.WriteLine("Тест: " + Math.Round(result.ClassifyTestSamples(result.RulesDatabaseSet[0]), 2));
            File.AppendAllText("E:/TUSUR/GPO/Эксперименты/Behavior/RS" + folder_name + ".txt", "Признаки: " + count_ones + Environment.NewLine);
            File.AppendAllText("E:/TUSUR/GPO/Эксперименты/Behavior/RS" + folder_name + ".txt", "Тест: " + Math.Round(result.ClassifyTestSamples(result.RulesDatabaseSet[0]), 2) + Environment.NewLine);
            File.AppendAllText("E:/TUSUR/GPO/Эксперименты/Behavior/RS" + folder_name + ".txt", "Время: " + Environment.NewLine);
            return(result);
        }
Esempio n. 11
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classifier, ILearnAlgorithmConf conf)
        {
            result = Classifier;
            //Узнаем название папки с данными
            string path_name   = "../../OLD/Data/Keel/Classifier/KEEL-10/";
            string folder_name = "";

            foreach (var letter in result.LearnSamplesSet.FileName)
            {
                if (letter != '-')
                {
                    folder_name += letter;
                }
                else
                {
                    break;
                }
            }
            groups = new List <int[]>();
            Init(conf);
            //Создаем новые обучающую и тестовую выбоки и удаляем из них некоторое количество случайных элементов
            List <PCFuzzySystem> results = new List <PCFuzzySystem>();

            for (int i = 0; i < numberOfPopulations; i++)
            {
                SampleSet new_learn = new SampleSet(path_name + folder_name + "/" + result.LearnSamplesSet.FileName);
                SampleSet new_test  = new SampleSet(path_name + folder_name + "/" + result.TestSamplesSet.FileName);
                results.Add(new PCFuzzySystem(new_learn, new_test));
                int ground = (int)Math.Round(results[i].LearnSamplesSet.DataRows.Count * 0.25);
                for (int j = 0; j < ground; j++)
                {
                    results[i].LearnSamplesSet.DataRows.RemoveAt(rand.Next(0, results[i].LearnSamplesSet.DataRows.Count));
                }
            }
            Populations = new List <List <KnowlegeBasePCRules> >();
            for (int i = 0; i < numberOfPopulations; i++)
            {
                Populations.Add(SetPopulation(new List <KnowlegeBasePCRules>()));
                Populations[i] = ListPittsburgClassifierTool.SortRules(Populations[i], result);
            }
            NS = new int[m];
            for (int i = 0; i < m; i++)
            {
                NS[i] = (N - 1) / m;
            }
            cur_iter = 0;
            while (cur_iter < iter)
            {
                for (int p_i = 0; p_i < Populations.Count; p_i++)
                {
                    groups = GroupStream(Populations[p_i]);
                    if (p_one > rand.NextDouble())
                    {
                        ChooseOneCluster(Populations[p_i]);
                    }
                    else
                    {
                        ChooseTwoClusters(Populations[p_i]);
                    }
                    Populations[p_i] = ListPittsburgClassifierTool.SortRules(Populations[p_i], results[p_i]);
                    //Console.WriteLine(cur_iter + " - Итерация");
                    //Console.WriteLine("Обуч. выборка = " + result.ErrorLearnSamples(Populations[p_i][0]));
                    //Console.WriteLine("Тест. выборка = " + result.ErrorTestSamples(Populations[p_i][0]));
                }
                cur_iter++;
            }
            //Выводим точность классификации для лучшей частицы из каждой популяции
            for (int j = 0; j < Populations.Count; j++)
            {
                Populations[j] = ListPittsburgClassifierTool.SortRules(Populations[j], results[j]);
                Console.WriteLine("Популяция №" + j + ":");
                Console.WriteLine("Обуч: " + Math.Round(result.ClassifyLearnSamples(Populations[j][0]), 2));
                Console.WriteLine("Тест: " + Math.Round(result.ClassifyTestSamples(Populations[j][0]), 2));
            }
            //Допобавляем в базу правил лучшие решения
            if (result.RulesDatabaseSet.Count == 1)
            {
                result.RulesDatabaseSet.Clear();
            }
            for (int i = 0; i < Populations.Count; i++)
            {
                result.RulesDatabaseSet.Add(Populations[i][0]);
            }
            //Возвращаем результат
            return(result);
        }
Esempio n. 12
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classify, ILearnAlgorithmConf conf)
        {
            result = Classify;
            //Узнаем название папки с данными
            string path_name   = "../../OLD/Data/Keel/Classifier/KEEL-10/";
            string folder_name = "";

            foreach (var letter in result.LearnSamplesSet.FileName)
            {
                if (letter != '-')
                {
                    folder_name += letter;
                }
                else
                {
                    break;
                }
            }
            //Инициализируем параметры
            Init(conf);
            //Создаем новые обучающую и тестовую выбоки и удаляем из них некоторое количество случайных элементов
            List <PCFuzzySystem> results = new List <PCFuzzySystem>();

            for (int i = 0; i < numberOfPopulations; i++)
            {
                SampleSet new_learn = new SampleSet(path_name + folder_name + "/" + result.LearnSamplesSet.FileName);
                SampleSet new_test  = new SampleSet(path_name + folder_name + "/" + result.TestSamplesSet.FileName);
                results.Add(new PCFuzzySystem(new_learn, new_test));
                int ground = (int)Math.Round(results[i].LearnSamplesSet.DataRows.Count * 0.25);
                for (int j = 0; j < ground; j++)
                {
                    results[i].LearnSamplesSet.DataRows.RemoveAt(rand.Next(0, results[i].LearnSamplesSet.DataRows.Count));
                }
            }
            //Инициализируем и зануляем вектора алгоритма
            HeadLeader       = new KnowlegeBasePCRules(result.RulesDatabaseSet[0]);
            VelocityVector   = new KnowlegeBasePCRules(result.RulesDatabaseSet[0]);
            VelocityVectorLL = new KnowlegeBasePCRules(result.RulesDatabaseSet[0]);
            VelocityVectorHL = new KnowlegeBasePCRules(result.RulesDatabaseSet[0]);
            for (int i = 0; i < VelocityVector.TermsSet.Count; i++)
            {
                for (int j = 0; j < VelocityVector.TermsSet[i].Parametrs.Length; j++)
                {
                    VelocityVector.TermsSet[i].Parametrs[j]   = 0;
                    VelocityVectorLL.TermsSet[i].Parametrs[j] = 0;
                    VelocityVectorHL.TermsSet[i].Parametrs[j] = 0;
                }
            }
            //Создаем популяции и архив лучших положений каждой частицы
            Populations = new List <List <KnowlegeBasePCRules> >();
            for (int i = 0; i < numberOfPopulations; i++)
            {
                Populations.Add(SetPopulation(new List <KnowlegeBasePCRules>()));
            }
            ParticlesBest = new Dictionary <KnowlegeBasePCRules, KnowlegeBasePCRules>();
            foreach (var Population in Populations)
            {
                foreach (var Particle in Population)
                {
                    ParticlesBest.Add(Particle, Universal);
                }
            }
            //Инициализируем роли частиц
            LocalLeaders      = new KnowlegeBasePCRules[numberOfLocalLeaders];
            ExplorerParticles = new KnowlegeBasePCRules[numberOfAllParts - numberOfAimlessParts - numberOfLocalLeaders - 1];
            AimlessParticles  = new KnowlegeBasePCRules[numberOfAimlessParts];
            //Оптимизируем параметры
            iter = 1;
            while (iter < MaxIter)
            {
                for (int p_i = 0; p_i < Populations.Count; p_i++)
                {
                    Populations[p_i] = ListPittsburgClassifierTool.SortRules(Populations[p_i], results[p_i]);
                    SetRoles(Populations[p_i]);
                    ChangeExplorersPositions();
                    ChangeAimlessPositions();
                    Populations[p_i] = DiscardRoles(Populations[p_i]);
                }
                iter++;
            }
            //Выводим точность классификации для лучшей частицы из каждой популяции
            for (int j = 0; j < Populations.Count; j++)
            {
                Populations[j] = ListPittsburgClassifierTool.SortRules(Populations[j], results[j]);
                Console.WriteLine("Популяция №" + j + ":");
                Console.WriteLine("Обуч: " + Math.Round(result.ClassifyLearnSamples(Populations[j][0]), 2));
                Console.WriteLine("Тест: " + Math.Round(result.ClassifyTestSamples(Populations[j][0]), 2));
            }
            //Допобавляем в базу правил лучшие решения
            if (result.RulesDatabaseSet.Count == 1)
            {
                result.RulesDatabaseSet.Clear();
            }
            for (int i = 0; i < Populations.Count; i++)
            {
                result.RulesDatabaseSet.Add(Populations[i][0]);
            }
            //Возвращаем результат
            return(result);
        }
Esempio n. 13
0
 private static void writeAboutEstimates(XmlWriter writer, PCFuzzySystem Classifier)
 {
     writer.WriteStartElement("Estimates");
     if (Classifier.TestSamplesSet != null)
     {
         writer.WriteAttributeString("Count", XmlConvert.ToString(22));
         writer.WriteStartElement("Estimate");
         writer.WriteAttributeString("Table", Classifier.LearnSamplesSet.FileName);
         writer.WriteAttributeString("Type", "PrecisionPercent");
         writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.ClassifyLearnSamples(Classifier.RulesDatabaseSet[0])));
         writer.WriteEndElement();
         writer.WriteStartElement("Estimate");
         writer.WriteAttributeString("Table", Classifier.LearnSamplesSet.FileName);
         writer.WriteAttributeString("Type", "ErrorPercent");
         writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.ErrorLearnSamples(Classifier.RulesDatabaseSet[0])));
         writer.WriteEndElement();
         writer.WriteStartElement("Estimate");
         writer.WriteAttributeString("Table", Classifier.TestSamplesSet.FileName);
         writer.WriteAttributeString("Type", "PrecisionPercent");
         writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.ClassifyTestSamples(Classifier.RulesDatabaseSet[0])));
         writer.WriteEndElement();
         writer.WriteStartElement("Estimate");
         writer.WriteAttributeString("Table", Classifier.TestSamplesSet.FileName);
         writer.WriteAttributeString("Type", "ErrorPercent");
         writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.ErrorTestSamples(Classifier.RulesDatabaseSet[0])));
         writer.WriteEndElement();
     }
     else
     {
         writer.WriteAttributeString("Count", XmlConvert.ToString(20));
         writer.WriteStartElement("Estimate");
         writer.WriteAttributeString("Table", Classifier.LearnSamplesSet.FileName);
         writer.WriteAttributeString("Type", "PrecisionPercent");
         writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.ClassifyLearnSamples(Classifier.RulesDatabaseSet[0])));
         writer.WriteEndElement();
         writer.WriteStartElement("Estimate");
         writer.WriteAttributeString("Table", Classifier.LearnSamplesSet.FileName);
         writer.WriteAttributeString("Type", "ErrorPercent");
         writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.ErrorLearnSamples(Classifier.RulesDatabaseSet[0])));
         writer.WriteEndElement();
     }
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GIBNormal");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGIBNormal()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GIBSumStraigh");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGIBSumStrait()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GIBSumReverse");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGIBSumReverse()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GICNormal");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGICNormal()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GICSumStraigh");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGICSumStraigth()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GICSumReverse");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGICSumReverce()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GISNormal");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGISNormal()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GISSumStraigh");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGISSumStraigt()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "GISSumReverce");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGISSumReverce()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "LindisNormal");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getLindisNormal()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "LindisSumStraigh");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getLindisSumStraight()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "LindisSumReverse");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getLindisSumReverse()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "NormalIndex");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getNormalIndex()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "RealIndex");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getIndexReal()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "SumStraigthIndex");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getIndexSumStraigt()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "SumReverseIndex");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getIndexSumReverse()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "ComplexitIt");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getComplexit()));
     writer.WriteEndElement();
     writer.WriteStartElement("Estimate");
     writer.WriteAttributeString("Type", "CountRules");
     writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getRulesCount()));
     writer.WriteEndElement();
     writer.WriteEndElement();
 }
Esempio n. 14
0
        public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classifier, ILearnAlgorithmConf conf)
        {
            count_iteration = ((Param)conf).Количество_итераций;
            count_populate  = ((Param)conf).Число_осколков;
            exploration     = ((Param)conf).Фактор_исследования;
            reduce_koef     = ((Param)conf).Уменьшающий_коэффициент;

            int           iter = 0, i, j, count_terms, var = 0;
            int           count_cons;
            double        RMSE_best, cosFi;
            int           Nd, variables, k = 1, best = 0;
            PCFuzzySystem result = Classifier;
            int           type   = Classifier.RulesDatabaseSet[0].TermsSet[0].CountParams;

            Nd = Classifier.RulesDatabaseSet[0].TermsSet.Count * type;
            double[] X_best = new double[Nd + 1];
            double[,] X_pred    = new double[2, Nd + 1];
            double[,] direction = new double[count_populate, Nd + 1];
            double[,] d         = new double[count_populate, Nd + 1];
            double[,] explosion = new double[count_populate, Nd + 1];
            double[,] shrapnel  = new double[count_populate, Nd + 1];
            cosFi      = Math.Cos(2 * Math.PI / count_populate);
            RMSE_best  = Classifier.ClassifyLearnSamples(0);
            count_cons = Classifier.RulesDatabaseSet[0].Weigths.Count();
            double[] RMSE      = new double[count_populate];
            double[] RMSE_tst  = new double[count_populate];
            double[] RMSE2     = new double[count_populate];
            double[] RMSE_pred = new double[2];
            double[] cons_best = new double[count_cons];
            count_terms = Classifier.RulesDatabaseSet[0].TermsSet.Count;
            variables   = Classifier.LearnSamplesSet.CountVars;
            int[] terms = new int[variables];


            KnowlegeBasePCRules[] X = new KnowlegeBasePCRules[count_populate];
            for (int s = 0; s < count_populate - 1; s++)
            {
                X[s] = new KnowlegeBasePCRules(Classifier.RulesDatabaseSet[0]);
                Classifier.RulesDatabaseSet.Add(X[s]);
            }
            RMSE_best = Classifier.ClassifyLearnSamples(0);
            for (int h = 0; h < count_terms; h++)
            {
                if (Classifier.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar == var)
                {
                    terms[var]++;
                }
                else
                {
                    terms[var + 1]++;
                    var++;
                }
            }
            for (iter = 0; iter <= count_iteration; iter++)
            {
                best = 0;
                if (iter == 0)
                {
                    k = 1;
                    for (int h = 0; h < count_terms; h++)
                    {
                        for (int p = 0; p < type; p++)
                        {
                            shrapnel[0, k] = Classifier.RulesDatabaseSet[0].TermsSet[h].Parametrs[p];
                            X_best[k]      = shrapnel[0, k];
                            X_pred[0, k]   = shrapnel[0, k];
                            X_pred[1, k]   = shrapnel[0, k];
                            k++;
                        }
                    }
                    RMSE_pred[0] = Classifier.ClassifyLearnSamples(0);
                    RMSE_pred[1] = Classifier.ClassifyLearnSamples(0);
                    k            = 1;
                    for (int h = 0; h < count_terms; h++)
                    {
                        for (int p = 0; p < type; p++)
                        {
                            d[0, k] = RandomNext(Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar).Min, Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar).Max);
                            k++;
                        }
                    }
                }
                for (i = 1; i <= Nd; i++)
                {
                    if (exploration > iter)
                    {
                        for (j = 1; j < count_populate; j++)
                        {
                            int sum = 0, sum2 = 0;
generate:
                            sum++;
                            sum2++;
                            //формула расстояния исправлена

                            d[j, i] = d[j - 1, i] * randn();

                            //double sluch = randn();
                            //if (sluch < 0) d[j, i] = d[j - 1, i] * (-1) * Math.Pow(sluch, 2);
                            //else d[j, i] = d[j - 1, i] * Math.Pow(sluch, 2);
                            explosion[j, i] = d[j, i] * cosFi;
                            if (sum > 20)
                            {
                                if ((i + (type - 2)) % type == 0)
                                {
                                    shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    if (sum2 > 2)
                                    {
                                        shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]);
                                        sum            = 19;
                                    }
                                    if (sum2 > 3)
                                    {
                                        shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                        sum            = 19;
                                        sum2           = 0;
                                    }
                                }
                                else
                                {
                                    shrapnel[j, i] = (shrapnel[0, i] + explosion[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    sum            = 19;
                                }
                            }
                            else
                            {
                                shrapnel[j, i] = shrapnel[0, i] + explosion[j, i];
                            }

                            if ((i == 2) || (i == 1))
                            {
                                shrapnel[j, i] = Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min;
                            }
                            if (Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 2) / type)].NumberOfInputVar)
                            {
                                shrapnel[j, i] = Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min; goto exit;
                            }
                            if (Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - type) / type)].NumberOfInputVar)
                            {
                                shrapnel[j, i] = Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min; goto exit;
                            }
                            if (Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != (variables - 1))
                            {
                                if (Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Classifier.RulesDatabaseSet[0].TermsSet[(int)((i) / type)].NumberOfInputVar)
                                {
                                    shrapnel[j, i] = Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max; goto exit;
                                }
                                if (Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Classifier.RulesDatabaseSet[0].TermsSet[(int)((i + 1) / type)].NumberOfInputVar)
                                {
                                    shrapnel[j, i] = Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max; goto exit;
                                }
                            }
                            if (Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar == (variables - 1))
                            {
                                if ((i == (count_terms * 3 - 1)) || (i == (count_terms * 3)))
                                {
                                    shrapnel[j, i] = Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max;
                                }
                            }

                            if (((i + (type - 2)) % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                if (shrapnel[j, i] == Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min)
                                {
                                    i--;
                                }
                                goto generate;
                            }
                            if ((i % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                goto generate;
                            }
                            if (i != 1)
                            {
                                if (((i - (type - 2)) % type == 0) && ((shrapnel[j, i] > shrapnel[j, i - 1]) || (shrapnel[j, i] > Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Max) || (shrapnel[j, i] < Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min)))
                                {
                                    goto generate;
                                }
                            }
                            if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min) || (shrapnel[j, i] > Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Max)))
                            {
                                goto generate;
                            }
exit:
                            if (i > type)
                            {
                                if (Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != 0)
                                {
                                    if (Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 1 - type) / type)].NumberOfInputVar != Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar - 1)
                                    {
                                        if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < shrapnel[j, i - type])))
                                        {
                                            goto generate;
                                        }
                                    }
                                }
                            }
                        }
                    }
                    else
                    {
                        d[0, i] = d2(X_pred[0, i], X_pred[1, i], RMSE_pred[0], RMSE_pred[1]);

                        for (j = 1; j < count_populate; j++)
                        {
                            if ((X_pred[1, i] - X_pred[0, i]) != 0)
                            {
                                direction[j, i] = m(X_pred[0, i], X_pred[1, i], RMSE_pred[0], RMSE_pred[1]);
                            }
                            else
                            {
                                direction[j, i] = 1;
                            }
                            int sum = 0, sum2 = 0;
generate:
                            sum++;
                            sum2++;
                            double random;
                            random = randn();
                            if (random < 0)
                            {
                                explosion[j, i] = d[j - 1, i] * rand.NextDouble() * cosFi * (-1);
                            }
                            else
                            {
                                explosion[j, i] = d[j - 1, i] * rand.NextDouble() * cosFi;
                            }


                            if (sum > 20)
                            {
                                if ((i + (type - 2)) % type == 0)
                                {
                                    shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    if (sum2 > 2)
                                    {
                                        shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]);
                                        sum            = 19;
                                    }
                                    if (sum2 > 3)
                                    {
                                        shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - type] - shrapnel[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                        sum            = 19;
                                        sum2           = 0;
                                    }
                                }
                                else
                                {
                                    shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]) + (shrapnel[j, i - 1] - shrapnel[j, i]);
                                    sum            = 19;
                                }
                            }
                            else
                            {
                                shrapnel[j, i] = Shrapnel(explosion[j, i], shrapnel[0, i], d[j - 1, i], direction[j, i]);
                            }

                            if ((i == 2) || (i == 1))
                            {
                                shrapnel[j, i] = Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min;
                            }
                            if (Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 2) / type)].NumberOfInputVar)
                            {
                                shrapnel[j, i] = Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min;
                            }
                            if (Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - type) / type)].NumberOfInputVar)
                            {
                                shrapnel[j, i] = Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Min;
                            }
                            if (Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != (variables - 1))
                            {
                                if (Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Classifier.RulesDatabaseSet[0].TermsSet[(int)((i) / type)].NumberOfInputVar)
                                {
                                    shrapnel[j, i] = Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max;
                                }
                                if (Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != Classifier.RulesDatabaseSet[0].TermsSet[(int)((i + 1) / type)].NumberOfInputVar)
                                {
                                    shrapnel[j, i] = Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max;
                                }
                            }
                            if (Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar == (variables - 1))
                            {
                                if ((i == (count_terms * 3 - 1)) || (i == (count_terms * 3)))
                                {
                                    shrapnel[j, i] = Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i - 1) / type].NumberOfInputVar).Max;
                                }
                            }

                            if (((i + (type - 2)) % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                if (shrapnel[j, i] == Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min)
                                {
                                    i--;
                                }
                                goto generate;
                            }
                            if ((i % type == 0) && (shrapnel[j, i] < shrapnel[j, i - 1]))
                            {
                                goto generate;
                            }
                            if (i != 1)
                            {
                                if (((i - (type - 2)) % type == 0) && ((shrapnel[j, i] > shrapnel[j, i - 1]) || (shrapnel[j, i] > Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Max) || (shrapnel[j, i] < Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min)))
                                {
                                    goto generate;
                                }
                            }
                            if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Min) || (shrapnel[j, i] > Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[(int)(i / type)].NumberOfInputVar).Max)))
                            {
                                goto generate;
                            }
                            if (i > type)
                            {
                                if (Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar != 0)
                                {
                                    if (Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 1 - type) / type)].NumberOfInputVar != Classifier.RulesDatabaseSet[0].TermsSet[(int)((i - 1) / type)].NumberOfInputVar - 1)
                                    {
                                        if (((i + (type - 2)) % type == 0) && ((shrapnel[j, i] < shrapnel[j, i - type])))
                                        {
                                            goto generate;
                                        }
                                    }
                                }
                            }
                            d[j, i] = d[j - 1, i] / Math.Pow(Math.E, (double)iter / (double)reduce_koef);
                        }
                    }
                }

                for (int z = 0; z < count_populate; z++)
                {
                    k = 1;
                    for (int h = 0; h < count_terms; h++)
                    {
                        for (int p = 0; p < type; p++)
                        {
                            Classifier.RulesDatabaseSet[z].TermsSet[h].Parametrs[p] = shrapnel[z, k];
                            k++;
                        }
                    }
                }
                for (j = 0; j < count_populate; j++)
                {
                    RMSE[j]     = Classifier.ClassifyLearnSamples(j);
                    RMSE_tst[j] = Classifier.ClassifyTestSamples(j);
                    if (RMSE[j] > RMSE_best)
                    {
                        RMSE_best = RMSE[j];
                        best      = j;
                    }
                }

                k = 1;
                if (iter % 10 == 0)
                {
                    k = 1;
                }
                for (int h = 0; h < count_terms; h++)
                {
                    for (int p = 0; p < type; p++)
                    {
                        shrapnel[0, k] = shrapnel[best, k];
                        if (exploration > iter)
                        {
                            d[0, k] = RandomNext(Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar).Min, Classifier.LearnSamplesSet.InputAttribute(Classifier.RulesDatabaseSet[0].TermsSet[h].NumberOfInputVar).Max);
                        }
                        Classifier.RulesDatabaseSet[0].TermsSet[h].Parametrs[p] = shrapnel[0, k];
                        k++;
                    }
                }


                if (RMSE_pred[1] < RMSE[best])
                {
                    for (k = 1; k <= Nd; k++)
                    {
                        X_pred[0, k] = X_pred[1, k];
                        X_pred[1, k] = shrapnel[best, k];
                    }
                    RMSE_pred[0] = RMSE_pred[1];
                    RMSE_pred[1] = RMSE[best];
                }
            }

            return(result);
        }