public virtual void oneWatchJump(TSAFuzzySystem result) { for (int j = 0; j < population_count; j++) { WJVector_gen(j); if (result.ErrorLearnSamples(WJVector) <= result.ErrorLearnSamples(monkey[j])) { monkey[j] = WJVector; // delete //double testval = result.ErrorLearnSamples(monkey[j]); //Console.WriteLine(j.ToString() + " " + iter.ToString() + " " + testval.ToString() + " - Success at WatchJump"); } } }
public override TSAFuzzySystem TuneUpFuzzySystem(TSAFuzzySystem Approx, ILearnAlgorithmConf conf) { result = Approx; groups = new List <int[]>(); Init(conf); SetPopulation(); Population = ListTakagiSugenoApproximateTool.SortRules(Population, result); NS = new int[m]; for (int i = 0; i < m; i++) { NS[i] = (N - 1) / m; } cur_iter = 0; while (cur_iter < iter) { groups = GroupStream(); if (p_one > rand.NextDouble()) { ChooseOneCluster(); } else { ChooseTwoClusters(); } // Population = ListTakagiSugenoApproximateTool.SortRules(Population, result); Console.WriteLine(cur_iter + " - Итерация"); Console.WriteLine("Обуч. выборка = " + result.ErrorLearnSamples(Population[0])); Console.WriteLine("Тест. выборка = " + result.ErrorTestSamples(Population[0])); cur_iter++; } Population = ListTakagiSugenoApproximateTool.SortRules(Population, result); result.RulesDatabaseSet[0] = Population[0]; return(result); }
internal static List <KnowlegeBaseTSARules> WeedsRegenerateIteration(TSAFuzzySystem fuzzy, KnowlegeBaseTSARules b, double delta, Random rand, int count) { var rez = new List <KnowlegeBaseTSARules>(); for (int k = 0; k < count; k++) { var bforedit = new KnowlegeBaseTSARules(b); bforedit.error = fuzzy.ErrorLearnSamples(bforedit); rez.Add(bforedit); } return(rez); }
public virtual void oneGlobalJump(TSAFuzzySystem result) { for (int j = 0; j < population_count; j++) { // SSVector_gen(); SSVector = bestsolution; IndividualSSVector = new KnowlegeBaseTSARules(monkey[j]); for (int k = 0; k < monkey[j].TermsSet.Count; k++) { for (int q = 0; q < monkey[j].TermsSet[k].CountParams; q++) { IndividualSSVector.TermsSet[k].Parametrs[q] += (somersault_interval_left + (somersault_interval_right - somersault_interval_left) * StaticRandom.NextDouble()) * (SSVector.TermsSet[k].Parametrs[q] - monkey[j].TermsSet[k].Parametrs[q]); } } if (result.ErrorLearnSamples(IndividualSSVector) <= result.ErrorLearnSamples(monkey[j])) { monkey[j] = IndividualSSVector; // delete //double testval = result.ErrorLearnSamples(monkey[j]); //Console.WriteLine(j.ToString() + " " + iter.ToString() + " " + testval.ToString() + " - Success at Global Jump"); } } }
public virtual void CheckForBest(TSAFuzzySystem result) { KnowlegeBaseTSARules temp = monkey.SelectBest(result, 1)[0]; double tempnumber = result.ErrorLearnSamples(temp); if (bestsolutionnumber > tempnumber) { bestsolution = new KnowlegeBaseTSARules(temp); bestsolutionnumber = tempnumber; // delete Console.WriteLine("NEWBEST " + bestsolutionnumber); final_iter = 0; } else { final_iter++; } }
internal static List <KnowlegeBaseTSARules> WeedsRegenerateIteration(TSAFuzzySystem fuzzy, KnowlegeBaseTSARules b, double delta, Random rand, int count) { var rez = new List <KnowlegeBaseTSARules>(); for (int k = 0; k < count; k++) { var bforedit = new KnowlegeBaseTSARulesWithError(b); foreach (var rule in bforedit.RulesDatabase) { for (int i = 0; i < rule.ListTermsInRule.Count; i++) { var term = rule.ListTermsInRule[i]; for (int index = 0; index < term.Parametrs.Length; index++) { term.Parametrs[index] += fuzzy.LearnSamplesSet.InputAttributes[i].Scatter * delta * Math.Sqrt(-2 * Math.Log(rand.NextDouble())) * Math.Cos(2 * Math.PI * rand.NextDouble()); } } } bforedit.error = fuzzy.ErrorLearnSamples(bforedit); rez.Add(bforedit); } return(rez); }
public override FuzzySystem.TakagiSugenoApproximate.TSAFuzzySystem Generate(FuzzySystem.TakagiSugenoApproximate.TSAFuzzySystem Approximate, FuzzySystem.FuzzyAbstract.conf.IGeneratorConf config) { TSAFuzzySystem result = Approximate; type_func = TypeTermFuncEnum.Гауссоида; ResultSystem = Approximate; var kliConf = config as KLI_conf; if (kliConf != null) { double meanValue = result.LearnSamplesSet.DataRows.Select(x => x.DoubleOutput).Average(); var mayError = kliConf.MaxValue * meanValue; double centerValue = mayError; ScoreListElem[] ResultsKLI = new ScoreListElem[3]; ScoreListElem[] ResultsKLI2 = new ScoreListElem[3]; List <ScoreListElem> ResultsKLIFull = new List <ScoreListElem>(); List <ScoreListElem> ResultsKLI2Full = new List <ScoreListElem>(); double magic = 0.25; KLI.kliGenerate(result, type_func, centerValue); ScoreListElem tempRes = new ScoreListElem(); tempRes.MaxError = centerValue; tempRes.Alg = "Кли"; tempRes.Res = result.RulesDatabaseSet[0]; tempRes.Error = result.ErrorLearnSamples(tempRes.Res); ResultsKLI[1] = new ScoreListElem(tempRes); if (!double.IsNaN(tempRes.Error)) { ResultsKLIFull.Add(new ScoreListElem(tempRes)); } for (int i = 0; i < 100; i++) { double leftValue = centerValue * (1 - magic); double rigthValue = centerValue * (1 + magic); tempRes = new ScoreListElem(); KLI.kliGenerate(result, type_func, leftValue); tempRes.Alg = "Кли"; tempRes.MaxError = leftValue; tempRes.Res = result.RulesDatabaseSet[0]; tempRes.Error = result.ErrorLearnSamples(tempRes.Res); ResultsKLI[0] = new ScoreListElem(tempRes); if (!double.IsNaN(tempRes.Error)) { ResultsKLIFull.Add(new ScoreListElem(tempRes)); } result.RulesDatabaseSet.Clear(); tempRes = new ScoreListElem(); KLI.kliGenerate(result, type_func, rigthValue); tempRes.Alg = "Кли"; tempRes.MaxError = rigthValue; tempRes.Res = result.RulesDatabaseSet[0]; tempRes.Error = result.ErrorLearnSamples(tempRes.Res); ResultsKLI[2] = tempRes; if (!double.IsNaN(tempRes.Error)) { ResultsKLIFull.Add(new ScoreListElem(tempRes)); } result.RulesDatabaseSet.Clear(); if (((ResultsKLI[0].Error >= ResultsKLI[1].Error) || double.IsNaN(ResultsKLI[0].Error)) && ((ResultsKLI[1].Error <= ResultsKLI[2].Error) || (double.IsNaN(ResultsKLI[2].Error)))) { magic *= 0.7; } double err = ResultsKLIFull.Min(y => y.Error); ResultsKLI[1].Error = err; centerValue = ResultsKLIFull.First(x => x.Error == err).MaxError; } centerValue = mayError; tempRes = new ScoreListElem(); kliGenerate(result, type_func, centerValue); tempRes.Alg = "Кли2"; tempRes.MaxError = centerValue; tempRes.Res = result.RulesDatabaseSet[0]; tempRes.Error = result.ErrorLearnSamples(tempRes.Res); ResultsKLI2[1] = new ScoreListElem(tempRes); if (!double.IsNaN(tempRes.Error)) { ResultsKLI2Full.Add(new ScoreListElem(tempRes)); } result.RulesDatabaseSet.Clear(); magic = 0.25; for (int i = 0; i < 100; i++) { double leftValue = centerValue * (1 - magic); double rigthValue = centerValue * (1 + magic); tempRes = new ScoreListElem(); kliGenerate(result, type_func, leftValue); tempRes.Alg = "Кли2"; tempRes.MaxError = leftValue; tempRes.Res = result.RulesDatabaseSet[0]; tempRes.Error = result.ErrorLearnSamples(tempRes.Res); ResultsKLI2[0] = new ScoreListElem(tempRes); if (!double.IsNaN(tempRes.Error)) { ResultsKLI2Full.Add(new ScoreListElem(tempRes)); } result.RulesDatabaseSet.Clear(); tempRes = new ScoreListElem(); kliGenerate(result, type_func, rigthValue); tempRes.Alg = "Кли2"; tempRes.MaxError = rigthValue; tempRes.Res = result.RulesDatabaseSet[0]; tempRes.Error = result.ErrorLearnSamples(tempRes.Res); ResultsKLI2[2] = new ScoreListElem(tempRes); if (!double.IsNaN(tempRes.Error)) { ResultsKLI2Full.Add(new ScoreListElem(tempRes)); } result.RulesDatabaseSet.Clear(); if (((ResultsKLI2[0].Error >= ResultsKLI2[1].Error) || double.IsNaN(ResultsKLI2[0].Error)) && ((ResultsKLI2[1].Error <= ResultsKLI2[2].Error) || (double.IsNaN(ResultsKLI2[2].Error)))) { magic *= 0.7; } double err = ResultsKLI2Full.Min(y => y.Error); ResultsKLI2[1].Error = err; centerValue = ResultsKLI2Full.First(x => x.Error == err).MaxError; } ScoreListElem BestKli = ResultsKLIFull.Where(x => x.Error == ResultsKLIFull.Min(y => y.Error)).First(); ScoreListElem BestKli2 = ResultsKLI2Full.Where(x => x.Error == ResultsKLI2Full.Min(y => y.Error)).First(); if (BestKli.Error > BestKli2.Error) { Best = BestKli2; } else { Best = BestKli; } result.RulesDatabaseSet.Add(Best.Res); } return(result); }
public override TSAFuzzySystem TuneUpFuzzySystem(TSAFuzzySystem Classifier, ILearnAlgorithmConf conf) { Init(conf); KnowlegeBaseTSARules temp_c_Rule = new KnowlegeBaseTSARules(Classifier.RulesDatabaseSet[0]); TSAFuzzySystem result = Classifier; string file_string = @"..\logs_" + result.TestSamplesSet.FileName + ".txt"; string file_string_to_txt = @"..\result_" + result.TestSamplesSet.FileName + ".txt"; for (int t = 0; t < population_count; t++) { monkey[t] = new KnowlegeBaseTSARules(result.RulesDatabaseSet[0]); if (t > 3) { for (int k = 0; k < result.RulesDatabaseSet[0].TermsSet.Count; k++) { for (int q = 0; q < result.RulesDatabaseSet[0].TermsSet[k].CountParams; q++) { //monkey[t].TermsSet[k].Parametrs[q] = StaticRandom.NextDouble() * (result.RulesDatabaseSet[0].TermsSet[k].Max - result.RulesDatabaseSet[0].TermsSet[k].Min); monkey[t].TermsSet[k].Parametrs[q] = GaussRandom.Random_gaussian(rand, monkey[t].TermsSet[k].Parametrs[q], monkey[t].TermsSet[k].Parametrs[q] * 0.05); } } } double unlaidtest = result.ErrorLearnSamples(monkey[t]); //result.UnlaidProtectionFix(monkey[t]); //Console.WriteLine("Unlaid: " + result.ErrorLearnSamples(monkey[0]).ToString()); if (double.IsNaN(unlaidtest) || double.IsInfinity(unlaidtest)) { result.UnlaidProtectionFix(monkey[t]); } // delete testvals[t] = result.ErrorLearnSamples(monkey[t]); Console.WriteLine("Begin: " + t.ToString() + " " + iter.ToString() + " " + testvals[t].ToString()); } bestsolution = new KnowlegeBaseTSARules(monkey.SelectBest(result, 1)[0]); bestsolutionnumber = result.ErrorLearnSamples(bestsolution); deltaLength = result.RulesDatabaseSet[0].TermsSet.Sum(x => x.Parametrs.Length); if (debug) { using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true)) { file.WriteLine(DateTime.Today.ToString() + "\t" + result.LearnSamplesSet.FileName); file.WriteLine("Parameters:"); file.WriteLine("Population\t" + population_count.ToString()); file.WriteLine("Iteration count\t" + iter_amount.ToString()); file.WriteLine("Crawl count\t" + crawl_iter.ToString()); file.WriteLine("Jump count\t" + jump_iter.ToString()); file.WriteLine("Somersault count\t" + somersault_iter.ToString()); file.WriteLine("Crawl step\t" + step.ToString()); // crawl step file.WriteLine("Jump step\t" + watch_jump_parameter.ToString()); file.WriteLine("Somersault left border\t" + somersault_interval_left.ToString()); file.WriteLine("Somersault right border\t" + somersault_interval_right.ToString()); file.WriteLine("\t\tMonkeys"); file.Write("Iterations\t"); for (int t = 0; t < population_count; t++) { file.Write("\t" + t); } file.WriteLine(); file.Write("0\tbegin"); for (int t = 0; t < population_count; t++) { file.Write("\t" + testvals[t].ToString()); } // excel вставки // наибольший в таблице file.WriteLine(); } } //iter_amount = somersault_iter * (1 + jump_iter * (1 + crawl_iter)); iter_amount = (((crawl_iter + jump_iter) * jump_iter) + somersault_iter) * somersault_iter; for (int r = 0; r < somersault_iter; r++) { for (int t = 0; t < jump_iter; t++) { for (int e = 0; e < crawl_iter; e++) { iter++; oneClimb(result, deltaLength, step); CheckForBest(result); //Console.WriteLine(iter_amount.ToString() + "/" + iter.ToString // дебаг if (debug) { using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true)) { file.Write(iter.ToString() + "\tcrawl"); for (int p = 0; p < population_count; p++) { file.Write("\t" + result.ErrorLearnSamples(monkey[p]).ToString()); } file.WriteLine(); } } } for (int e = 0; e < jump_iter; e++) { iter++; oneWatchJump(result); //Console.WriteLine(iter_amount.ToString() + "/" + iter.ToString()); CheckForBest(result); // дебаг if (debug) { using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true)) { file.Write(iter.ToString() + "\tlocaljump"); for (int p = 0; p < population_count; p++) { file.Write("\t" + result.ErrorLearnSamples(monkey[p]).ToString()); } file.WriteLine(); } } } } for (int e = 0; e < somersault_iter; e++) { iter++; oneGlobalJump(result); CheckForBest(result); // дебаг if (debug) { using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true)) { file.Write(iter.ToString() + "\tglobaljump"); for (int p = 0; p < population_count; p++) { file.Write("\t" + result.ErrorLearnSamples(monkey[p]).ToString()); } file.WriteLine(); } } Console.WriteLine(iter_amount.ToString() + "/" + iter.ToString()); } } //Console.WriteLine(final_iter.ToString() + "/" + final_counter.ToString()); //FOR VICTORY!!! while ((final_iter < final_counter) && (last == true)) { step *= 0.9; watch_jump_parameter *= 0.9; somersault_interval_left *= 0.9; somersault_interval_right *= 0.9; for (int r = 0; r < somersault_iter; r++) { oneClimb(result, deltaLength, step); CheckForBest(result); iter++; } for (int t = 0; t < jump_iter; t++) { oneWatchJump(result); CheckForBest(result); iter++; } for (int e = 0; e < crawl_iter; e++) { oneGlobalJump(result); CheckForBest(result); iter++; } Console.WriteLine(iter_amount.ToString() + "/" + iter.ToString()); } /* for (int t = 0; t < population_count; t++) * if (result.ErrorLearnSamples(monkey[best]) < result.ErrorLearnSamples(monkey[t])) * best = t; */ CheckForBest(result); if (bestsolutionnumber <= result.ErrorLearnSamples(result.RulesDatabaseSet[0])) { result.RulesDatabaseSet[0] = bestsolution; } iter = 0; if (debug) { using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true)) { file.WriteLine("Results\t" + result.ErrorLearnSamples(bestsolution).ToString() + "\t" + result.ErrorTestSamples(bestsolution).ToString()); } } if (totxt) { using (System.IO.StreamWriter file_result = new System.IO.StreamWriter(file_string_to_txt, true)) { file_result.WriteLine(result.ErrorLearnSamples(bestsolution).ToString() + "\t" + result.ErrorTestSamples(bestsolution).ToString()); } } return(result); }
public virtual void oneClimb(TSAFuzzySystem result, int length, double st) { int i; double step = GaussRandom.Random_gaussian(rand, st, st * 0.05); KnowlegeBaseTSARules temp; double[] delta = new double[length]; bool sign_f; int sign_num; for (int j = 0; j < population_count; j++) { i = 0; if (j % 4 < 2) { delta = ClimbVector(length, step); } else { delta = ClimbVectorR(length, step); } monkeysum = new KnowlegeBaseTSARules(monkey[j]); monkeysub = new KnowlegeBaseTSARules(monkey[j]); temp = new KnowlegeBaseTSARules(monkey[j]); for (int k = 0; k < result.RulesDatabaseSet[0].TermsSet.Count; k++) { for (int q = 0; q < result.RulesDatabaseSet[0].TermsSet[k].CountParams; q++, i++) { monkeysum.TermsSet[k].Parametrs[q] += delta[i]; monkeysub.TermsSet[k].Parametrs[q] -= delta[i]; } } if (result.ErrorLearnSamples(monkeysum) < result.ErrorLearnSamples(monkeysub)) { sign_num = 1; } else { sign_num = -1; } i = 0; if (j % 2 == 0) { for (int k = 0; k < result.RulesDatabaseSet[0].TermsSet.Count; k++) { for (int q = 0; q < result.RulesDatabaseSet[0].TermsSet[k].CountParams; q++, i++) { // old //temp.TermsSet[k].Parametrs[q] = monkey[j].TermsSet[k].Parametrs[q] + step * sign(monkeysum, monkeysub, delta[i], result); // Двойной шаг // temp.TermsSet[k].Parametrs[q] = monkey[j].TermsSet[k].Parametrs[q] + step * sign_num * delta[i]; temp.TermsSet[k].Parametrs[q] = monkey[j].TermsSet[k].Parametrs[q] + sign_num * delta[i]; } } } // Идея ГИВ else { sign_f = (result.ErrorLearnSamples(monkeysum) < result.ErrorLearnSamples(monkeysub)); if (sign_f) { temp = monkeysum; } else { temp = monkeysub; } } if ((result.ErrorLearnSamples(temp) < result.ErrorLearnSamples(monkey[j]))) { monkey[j] = temp; } //delete double testval = result.ErrorLearnSamples(monkey[j]); if (testval < testvals[j]) { //Console.WriteLine(j.ToString() + " " + iter.ToString() + " " + testval.ToString() + " - Climb"); testvals[j] = testval; } } }
//основные вычисления public override TSAFuzzySystem TuneUpFuzzySystem(TSAFuzzySystem Approx, ILearnAlgorithmConf conf) { result = Approx; Init(conf); SetPopulation(); KnowlegeBaseTSARules BEST = new KnowlegeBaseTSARules(result.RulesDatabaseSet[0]); double bestError = result.ErrorLearnSamples(BEST); //отчистка консоли #if debug Console.Clear(); #endif //запуск итераций for (int it = 0; it < iter; it++) { #if debug //вывод номера итерации Console.Write("Итерация __№__ = "); Console.WriteLine(it); #endif //расчитыавем значение фитнес-функции Population = ListTakagiSugenoApproximateTool.SortRules(Population, result); double[] K = new double[Population.Length]; for (int i = 0; i < Population.Length; i++) { K[i] = result.ErrorLearnSamples(Population[i]); #if debug Console.Write("Значние K[i1] = "); Console.WriteLine(K[i]); #endif if (double.IsNaN(K[i]) || double.IsInfinity(K[i])) { result.UnlaidProtectionFix(Population[i]); K[i] = result.ErrorLearnSamples(Population[i]); #if debug Console.Write("Значние K[i2] = "); Console.WriteLine(K[i]); #endif } } Kworst = K.Max(); if (double.IsNaN(Kworst) || double.IsInfinity(Kworst)) { int iworst = K.ToList().IndexOf(Kworst); #if debug Console.Write("Значние iworst = "); Console.WriteLine(iworst); #endif } #if debug //вывод Kworst Console.Write("Значние KWorst = "); Console.WriteLine(Kworst); #endif Kbest = K.Min(); #if debug //вывод Kbest Console.Write("Значние Kbest = "); Console.WriteLine(Kbest); #endif int ibest = K.ToList().IndexOf(Kbest); #if debug //вывод ibest Console.Write("Значние ibest = "); Console.WriteLine(ibest); #endif //перебрать значения фитнес функции //расчитыавем значение D double dit; dit = it; double diter; diter = iter; double D = (dmax * (rand.NextDouble() * 2 - 1) * (dit / diter)); //расчитываем значение rand1 для D double rand1; rand1 = D / (dmax * (it) / iter); #if debug //выводим значение rand1 для D Console.Write("Значение Drand = "); Console.WriteLine(rand1); //выводим значение D Console.Write("Значение __D__ = "); Console.WriteLine(D); #endif //расчитыавем значение Xfood double divide = K.Select(x => 1 / x).ToList().Sum(); var Xfood = new KnowlegeBaseTSARules(Population[0]); for (int t = 0; t < Xfood.TermsSet.Count; t++) { for (int p = 0; p < Xfood.TermsSet[t].CountParams; p++) { Xfood.TermsSet[t].Parametrs[p] = 0; for (int i = 0; i < Population.Length; i++) { Xfood.TermsSet[t].Parametrs[p] += Population[i].TermsSet[t].Parametrs[p] / K[i]; #if debug //выводим значение Xfood Console.Write("Значение Xfood = "); Console.WriteLine(Xfood.TermsSet[t].Parametrs[p]); #endif } Xfood.TermsSet[t].Parametrs[p] /= divide; } } #if debug //вывод divide Console.Write("Значние divide = "); Console.WriteLine(divide); #endif //расчитываем значение Kfood double Kfood = result.ErrorLearnSamples(Xfood); if (double.IsNaN(Kfood) || double.IsInfinity(Kfood)) { result.UnlaidProtectionFix(Xfood); Kfood = result.ErrorLearnSamples(Xfood); } #if debug //выводим значение Kfood Console.Write("Значение Kfood = "); Console.WriteLine(Kfood); #endif //расчитываем значение Cfood double Cfood = 2 * (1 - (dit / diter)); #if debug //выводим значение Cfood Console.Write("Значение Cfood = "); Console.WriteLine(Cfood); #endif //расчитываем значение Bfood KnowlegeBaseTSARules[] Bfood = new KnowlegeBaseTSARules[Population.Length]; for (int i = 0; i < Population.Length; i++) { Bfood[i] = new KnowlegeBaseTSARules(Population[i]); double KRoofifood = CalcKroof(K[i], Kfood); KnowlegeBaseTSARules Xroofifood = new KnowlegeBaseTSARules(CalcXroof(Population[i], Xfood)); for (int t = 0; t < Bfood[i].TermsSet.Count; t++) { for (int p = 0; p < Bfood[i].TermsSet[t].CountParams; p++) { Bfood[i].TermsSet[t].Parametrs[p] = Cfood * KRoofifood * Xroofifood.TermsSet[t].Parametrs[p]; #if debug //выводим значение Bfood Console.Write("Значение Bfood = "); Console.WriteLine(Bfood[i].TermsSet[t].Parametrs[p]); #endif } } } //расчитываем значение Bbest KnowlegeBaseTSARules[] Bbest = new KnowlegeBaseTSARules[Population.Length]; for (int i = 0; i < Population.Length; i++) { Bbest[i] = new KnowlegeBaseTSARules(Population[i]); double KRoofifood = CalcKroof(K[i], K[ibest]); KnowlegeBaseTSARules Xroofifood = new KnowlegeBaseTSARules(CalcXroof(Population[i], Population[ibest])); for (int t = 0; t < Bbest[i].TermsSet.Count; t++) { for (int p = 0; p < Bbest[i].TermsSet[t].CountParams; p++) { Bbest[i].TermsSet[t].Parametrs[p] = KRoofifood * Xroofifood.TermsSet[t].Parametrs[p]; #if debug //выводим значение Bbest Console.Write("Значение Bbest = "); Console.WriteLine(Bbest[i].TermsSet[t].Parametrs[p]); #endif } } } //расчитываем значение B KnowlegeBaseTSARules[] B = new KnowlegeBaseTSARules[Population.Length]; for (int i = 0; i < Population.Length; i++) { B[i] = new KnowlegeBaseTSARules(Population[i]); for (int t = 0; t < B[i].TermsSet.Count; t++) { for (int p = 0; p < B[i].TermsSet[t].CountParams; p++) { B[i].TermsSet[t].Parametrs[p] = Bfood[i].TermsSet[t].Parametrs[p] + Bbest[i].TermsSet[t].Parametrs[p]; #if debug //выводим значение B Console.Write("Значение __B__ = "); Console.WriteLine(B[i].TermsSet[t].Parametrs[p]); #endif } } } //расчитываем значение F KnowlegeBaseTSARules[] F = new KnowlegeBaseTSARules[Population.Length]; for (int i = 0; i < Population.Length; i++) { if (i == 0) { F[i] = new KnowlegeBaseTSARules(Population[i]); for (int t = 0; t < F[i].TermsSet.Count; t++) { for (int p = 0; p < F[i].TermsSet[t].CountParams; p++) { F[i].TermsSet[t].Parametrs[p] = Vf * B[i].TermsSet[t].Parametrs[p]; #if debug //выводим значение F Console.Write("Значение __F__ = "); Console.WriteLine(F[i].TermsSet[t].Parametrs[p]); #endif } } } else { F[i] = new KnowlegeBaseTSARules(Population[i]); for (int t = 0; t < F[i].TermsSet.Count; t++) { for (int p = 0; p < F[i].TermsSet[t].CountParams; p++) { F[i].TermsSet[t].Parametrs[p] = Vf * B[i].TermsSet[t].Parametrs[p] + wf * F[i - 1].TermsSet[t].Parametrs[p]; #if debug //выводим значение F Console.Write("Значение __F__ = "); Console.WriteLine(F[i].TermsSet[t].Parametrs[p]); #endif } } } } List <int> [] neihbors = new List <int> [Population.Length]; //расчитываем значение alocal KnowlegeBaseTSARules[] alocal = new KnowlegeBaseTSARules[Population.Length]; for (int i = 0; i < Population.Length; i++) { alocal[i] = new KnowlegeBaseTSARules(Population[i]); neihbors[i] = countneihbors(Population[i]); /* #if debug * //вывод значений количества соседей * for (int g = 0; g < Population.Length; g++) * { * Console.Write("Знаение countneihbors = "); * Console.WriteLine(countneihbors(Population[g])); * } #endif */ for (int t = 0; t < alocal[i].TermsSet.Count; t++) { for (int p = 0; p < alocal[i].TermsSet[t].CountParams; p++) { alocal[i].TermsSet[t].Parametrs[p] = 0; for (int j = 0; j < neihbors[i].Count; j++) { double KRoofij = CalcKroof(K[i], K[neihbors[i][j]]); KnowlegeBaseTSARules XRoofij = new KnowlegeBaseTSARules(CalcXroof(Population[i], Population[neihbors[i][j]])); alocal[i].TermsSet[t].Parametrs[p] += KRoofij * XRoofij.TermsSet[t].Parametrs[p]; #if debug //выводим значение alocal Console.Write("Знаение alocal = "); Console.WriteLine(alocal[i].TermsSet[t].Parametrs[p]); #endif } } } } //расчитываем значение Cbest double Cbest = 2 * (rand.NextDouble() - (dit / diter)); #if debug //выводим значение Cbest Console.Write("Значение Сbest = "); Console.WriteLine(Cbest); #endif //расчитываем значение rand для Cbest double rand2; rand2 = it / iter - Cbest / 2; #if debug //выводим значение rand2 для Cbest Console.Write("Значение Crand = "); Console.WriteLine(rand2); #endif //расчитываем значение atarget KnowlegeBaseTSARules[] atarget = new KnowlegeBaseTSARules[Population.Length]; for (int i = 0; i < Population.Length; i++) { atarget[i] = new KnowlegeBaseTSARules(Population[i]); double KRoofibest = CalcKroof(K[i], K[ibest]); KnowlegeBaseTSARules XRoofibest = new KnowlegeBaseTSARules(CalcXroof(Population[i], Population[ibest])); for (int t = 0; t < alocal[i].TermsSet.Count; t++) { for (int p = 0; p < atarget[i].TermsSet[t].CountParams; p++) { atarget[i].TermsSet[t].Parametrs[p] = Cbest * KRoofibest * XRoofibest.TermsSet[t].Parametrs[p]; #if debug //выводим значение atarget Console.Write("Знание atarget = "); Console.WriteLine(atarget[i].TermsSet[t].Parametrs[p]); #endif } } } //расчитываем значение a KnowlegeBaseTSARules[] a = new KnowlegeBaseTSARules[Population.Length]; for (int i = 0; i < Population.Length; i++) { a[i] = new KnowlegeBaseTSARules(Population[i]); for (int t = 0; t < a[i].TermsSet.Count; t++) { for (int p = 0; p < a[i].TermsSet[t].CountParams; p++) { a[i].TermsSet[t].Parametrs[p] = atarget[i].TermsSet[t].Parametrs[p] + alocal[i].TermsSet[t].Parametrs[p]; #if debug //выводим значение a Console.Write("Значение __a__ = "); Console.WriteLine(a[i].TermsSet[t].Parametrs[p]); #endif } } } //расчитываем значение N KnowlegeBaseTSARules[] N = new KnowlegeBaseTSARules[Population.Length]; for (int i = 0; i < Population.Length; i++) { if (i == 0) { N[i] = new KnowlegeBaseTSARules(Population[i]); for (int t = 0; t < N[i].TermsSet.Count; t++) { for (int p = 0; p < F[i].TermsSet[t].CountParams; p++) { N[i].TermsSet[t].Parametrs[p] = Vf * a[i].TermsSet[t].Parametrs[p]; #if debug //выводим значение N Console.Write("Значение __N__ = "); Console.WriteLine(N[i].TermsSet[t].Parametrs[p]); #endif } } } else { N[i] = new KnowlegeBaseTSARules(Population[i]); for (int t = 0; t < F[i].TermsSet.Count; t++) { for (int p = 0; p < N[i].TermsSet[t].CountParams; p++) { N[i].TermsSet[t].Parametrs[p] = nmax * a[i].TermsSet[t].Parametrs[p] + wn * N[i - 1].TermsSet[t].Parametrs[p]; #if debug //выводим значение N Console.Write("Значение __N__ = "); Console.WriteLine(N[i].TermsSet[t].Parametrs[p]); #endif } } } } //расчитываем значение dX KnowlegeBaseTSARules[] dX = new KnowlegeBaseTSARules[Population.Length]; for (int i = 0; i < Population.Length; i++) { dX[i] = new KnowlegeBaseTSARules(Population[i]); for (int t = 0; t < a[i].TermsSet.Count; t++) { for (int p = 0; p < a[i].TermsSet[t].CountParams; p++) { dX[i].TermsSet[t].Parametrs[p] = F[i].TermsSet[t].Parametrs[p] + N[i].TermsSet[t].Parametrs[p] + D; #if debug //выводим значение dX Console.Write("Значение _dX__ = "); Console.WriteLine(dX[i].TermsSet[t].Parametrs[p]); #endif } } } //выводим значение BEST // Console.Write("Значение BEST_ = "); // Console.WriteLine(BEST); //расчитываем значение X(t+dt) for (int i = 0; i < Population.Length; i++) { Population[i] = new KnowlegeBaseTSARules(Population[i]); for (int t = 0; t < Population[i].TermsSet.Count; t++) { for (int p = 0; p < F[i].TermsSet[t].CountParams; p++) { Population[i].TermsSet[t].Parametrs[p] = Population[i].TermsSet[t].Parametrs[p] + calcdeltat(ct) * dX[i].TermsSet[t].Parametrs[p]; #if debug //выводим значение Xnew Console.Write("Знание X(t+dt) = "); Console.WriteLine(Population[i].TermsSet[t].Parametrs[p]); #endif } } } for (int i = 0; i < Population.Length; i++) { double temp = result.ErrorLearnSamples(Population[i]); if (double.IsNaN(temp) || double.IsInfinity(temp)) { result.UnlaidProtectionFix(Xfood); temp = result.ErrorLearnSamples(Population[i]); } if (temp < bestError) { BEST = new KnowlegeBaseTSARules(Population[i]); bestError = temp; } } double y = it; if (y % 10 == 0 & y != 0) { Console.WriteLine(it); Console.WriteLine(bestError); } #if debug // выводим значение лучшей ошибки Kbest Console.Write("Значние BestError = "); Console.WriteLine(bestError); Console.WriteLine("."); #endif } result.RulesDatabaseSet[0] = BEST; return(result); }
public override TSAFuzzySystem TuneUpFuzzySystem(TSAFuzzySystem Approximate, ILearnAlgorithmConf conf) // + override { result = Approximate; List <KnowlegeBaseTSARules> Archive = new List <KnowlegeBaseTSARules>(); List <double> ErrorsArchive = new List <double>(); var config = (DynamicTuneConf)conf; maxError = config.MaxError; RuleCount = config.RulesCount; TryCount = config.TryCount; double error = result.RMSEtoMSEdiv2forLearn(result.approxLearnSamples(result.RulesDatabaseSet[0])); var kbToOptimize = new KnowlegeBaseTSARules(result.RulesDatabaseSet[0]); var kbBest = new KnowlegeBaseTSARules(kbToOptimize); double errorBefore = Double.MaxValue; result.UnlaidProtectionFix(kbToOptimize); List <input_space> variable_spaces = new List <input_space>(); for (int i = 0; i < result.LearnSamplesSet.InputAttributes.Count; i++) { List <Term> terms_of_variable = new List <Term>(); terms_of_variable = kbToOptimize.TermsSet.Where(term => term.NumVar == i).ToList(); variable_spaces.Add(new input_space(terms_of_variable, i)); } int indexRegion = -1, indexVar = -1, number_of_input_variables = variable_spaces.Count; int tryCount = 0; while (error > maxError) { if (Double.IsInfinity(error)) { throw new Exception("Something went wrong, error is Infinity, region: " + indexRegion); } if (Double.IsNaN(error)) { throw new Exception("Something went wrong, error is NaN, region: " + indexRegion); } region_side[][] sides = new region_side[number_of_input_variables][]; for (int i = 0; i < number_of_input_variables; i++) { sides[i] = variable_spaces[i].get_region_sides(); } var cartresult = CartesianProduct.Get(sides); List <region2> regions = new List <region2>(); foreach (var x in cartresult) { regions.Add(new region2(x.ToList(), result, variable_spaces)); } List <double> region_errors = regions.Select(x => x.region_error()).ToList(); indexRegion = region_errors.IndexOf(region_errors.Max()); for (int i = 0; i < region_errors.Count; i++) { if (Double.IsNaN(region_errors[i]) || Double.IsInfinity(region_errors[i]) || Double.IsNegativeInfinity(region_errors[i]) || Double.IsPositiveInfinity(region_errors[i])) { region_errors[i] = 0; } } List <double> variable_errors = regions[indexRegion].variable_errors(); bool check1 = false; for (int i = 1; i < variable_errors.Count; i++) { if (variable_errors[i - 1] != variable_errors[i]) { check1 = true; break; } } if (!check1) { indexVar = StaticRandom.Next(variable_errors.Count - 1); } else { indexVar = variable_errors.IndexOf(variable_errors.Max()); } Term new_term = regions[indexRegion].new_term(indexVar); result.RulesDatabaseSet[0] = kbToOptimize; kbToOptimize.TermsSet.Add(new_term); // Rules (CHECK REFERENCE TYPES) int @var = indexVar; var rulesLeft = kbToOptimize.RulesDatabase.Where( rule => rule.ListTermsInRule.Contains(regions[indexRegion].sides[indexVar].left)).ToList(); var rulesRight = kbToOptimize.RulesDatabase.Where( rule => rule.ListTermsInRule.Contains(regions[indexRegion].sides[indexVar].right)).ToList(); for (int j = 0; j < rulesLeft.Count; j++) { int[] order = new int[rulesLeft[j].ListTermsInRule.Count]; for (int k = 0; k < rulesLeft[j].ListTermsInRule.Count; k++) { Term temp_term = rulesLeft[j].ListTermsInRule[k]; if (temp_term == regions[indexRegion].sides[indexVar].left) { temp_term = new_term; } order[k] = kbToOptimize.TermsSet.FindIndex(x => x == temp_term); } double temp_approx_Values = kbToOptimize.RulesDatabase[j].IndependentConstantConsequent; double[] temp_approx_RegressionConstantConsequent = kbToOptimize.RulesDatabase[j].RegressionConstantConsequent.Clone() as double[]; TSARule temp_rule = new TSARule( kbToOptimize.TermsSet, order, temp_approx_Values, temp_approx_RegressionConstantConsequent); double[] dC = null; temp_rule.IndependentConstantConsequent = LSMWeghtReqursiveSimple.EvaluteConsiquent( result, temp_rule.ListTermsInRule.ToList(), out dC); temp_rule.RegressionConstantConsequent = (double[])dC.Clone(); kbToOptimize.RulesDatabase.Add(temp_rule); rulesLeft[j].IndependentConstantConsequent = LSMWeghtReqursiveSimple.EvaluteConsiquent( result, rulesLeft[j].ListTermsInRule.ToList(), out dC); rulesLeft[j].RegressionConstantConsequent = (double[])dC.Clone(); } foreach (var rule in rulesRight) { double[] dC = null; rule.IndependentConstantConsequent = LSMWeghtReqursiveSimple.EvaluteConsiquent( result, rule.ListTermsInRule.ToList(), out dC); rule.RegressionConstantConsequent = dC; } variable_spaces[indexVar].terms.Add(new_term); variable_spaces[indexVar].terms.Sort(new CompararerByPick()); // Re-evaluate the system's error error = result.RMSEtoMSEdiv2forLearn(result.ErrorLearnSamples(kbToOptimize)); if ((kbToOptimize.RulesDatabase.Count > config.RulesCount)) { break; } #if Console Console.WriteLine(error + " " + kbToOptimize.TermsSet.Count + " terms\n"); for (int i = 0; i < variable_spaces.Count; i++) { Console.WriteLine(variable_spaces[i].terms.Count + " термов по " + i + "му параметру\n"); } #endif result.RulesDatabaseSet[0] = kbToOptimize; // Get the best knowledge base on the 1st place if (error < errorBefore) { kbBest = new KnowlegeBaseTSARules(kbToOptimize); errorBefore = error; tryCount = 0; } else { tryCount++; } if (tryCount > TryCount) { break; } } result.RulesDatabaseSet[0] = kbBest; RuleCount = kbBest.RulesDatabase.Count; TryCount = tryCount; return(result); }