private List <SARule> RemoveDuplicate(SAFuzzySystem Approximate, List <SARule> Source) { List <SARule> result = Source; for (int i = Source.Count - 1; i > 0; i--) { for (int j = i - 1; j >= 0; j--) { if (Equals(Source[i], Source[j])) { Source.RemoveAt(i); Source[j].IndependentConstantConsequent = KNNConsequent.NearestApprox(Approximate, Source[j].ListTermsInRule); break; } } } return(result); }
public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approximate, ILearnAlgorithmConf conf) // + override { result = Approximate; List <KnowlegeBaseSARules> Archive = new List <KnowlegeBaseSARules>(); List <double> ErrorsArchive = new List <double>(); var config = (DynamicTuneConf)conf; maxError = config.MaxError; RuleCount = config.RulesCount; TryCount = config.TryCount; double error = result.RMSEtoMSEdiv2forLearn(result.approxLearnSamples(result.RulesDatabaseSet[0])); var kbToOptimize = new KnowlegeBaseSARules(result.RulesDatabaseSet[0]); var kbBest = new KnowlegeBaseSARules(kbToOptimize); double errorBefore = Double.MaxValue; result.UnlaidProtectionFix(kbToOptimize); List <input_space> variable_spaces = new List <input_space>(); for (int i = 0; i < result.LearnSamplesSet.InputAttributes.Count; i++) { List <Term> terms_of_variable = new List <Term>(); terms_of_variable = kbToOptimize.TermsSet.Where(term => term.NumVar == i).ToList(); variable_spaces.Add(new input_space(terms_of_variable, i)); } int indexRegion = -1, indexVar = -1, number_of_input_variables = variable_spaces.Count; int tryCount = 0; while (error > maxError) { if (Double.IsInfinity(error)) { throw new Exception("Something went wrong, error is Infinity, region: " + indexRegion); } if (Double.IsNaN(error)) { throw new Exception("Something went wrong, error is NaN, region: " + indexRegion); } region_side[][] sides = new region_side[number_of_input_variables][]; for (int i = 0; i < number_of_input_variables; i++) { sides[i] = variable_spaces[i].get_region_sides(); } var cartresult = CartesianProduct.Get(sides); List <region2> regions = new List <region2>(); foreach (var x in cartresult) { regions.Add(new region2(x.ToList(), result, variable_spaces)); } List <double> region_errors = regions.Select(x => x.region_error()).ToList(); indexRegion = region_errors.IndexOf(region_errors.Max()); for (int i = 0; i < region_errors.Count; i++) { if (Double.IsNaN(region_errors[i]) || Double.IsInfinity(region_errors[i]) || Double.IsNegativeInfinity(region_errors[i]) || Double.IsPositiveInfinity(region_errors[i])) { region_errors[i] = 0; } } List <double> variable_errors = regions[indexRegion].variable_errors(); bool check1 = false; for (int i = 1; i < variable_errors.Count; i++) { if (variable_errors[i - 1] != variable_errors[i]) { check1 = true; break; } } if (!check1) { indexVar = StaticRandom.Next(variable_errors.Count - 1); } else { indexVar = variable_errors.IndexOf(variable_errors.Max()); } Term new_term = regions[indexRegion].new_term(indexVar); result.RulesDatabaseSet[0] = kbToOptimize; kbToOptimize.TermsSet.Add(new_term); // Rules (CHECK REFERENCE TYPES) int @var = indexVar; var rulesLeft = kbToOptimize.RulesDatabase.Where( rule => rule.ListTermsInRule.Contains(regions[indexRegion].sides[indexVar].left)).ToList(); var rulesRight = kbToOptimize.RulesDatabase.Where( rule => rule.ListTermsInRule.Contains(regions[indexRegion].sides[indexVar].right)).ToList(); for (int j = 0; j < rulesLeft.Count; j++) { int[] order = new int[rulesLeft[j].ListTermsInRule.Count]; for (int k = 0; k < rulesLeft[j].ListTermsInRule.Count; k++) { Term temp_term = rulesLeft[j].ListTermsInRule[k]; if (temp_term == regions[indexRegion].sides[indexVar].left) { temp_term = new_term; } order[k] = kbToOptimize.TermsSet.FindIndex(x => x == temp_term); } ///!!!! double temp_approx_Values = kbToOptimize.RulesDatabase[j].IndependentConstantConsequent; /* double[] temp_approx_RegressionConstantConsequent = * kbToOptimize.RulesDatabase[j].RegressionConstantConsequent.Clone() as double[]; */ SARule temp_rule = new SARule( kbToOptimize.TermsSet, order, temp_approx_Values); // double[] dC = null; //!!! temp_rule.IndependentConstantConsequent = KNNConsequent.NearestApprox(result, temp_rule.ListTermsInRule.ToList()); kbToOptimize.RulesDatabase.Add(temp_rule); //!!! rulesLeft[j].IndependentConstantConsequent = KNNConsequent.NearestApprox(result, rulesLeft[j].ListTermsInRule.ToList()); // rulesLeft[j].RegressionConstantConsequent = (double[])dC.Clone(); } foreach (var rule in rulesRight) { //!!! rule.IndependentConstantConsequent = KNNConsequent.NearestApprox( result, rule.ListTermsInRule.ToList()); // rule.RegressionConstantConsequent = dC; } variable_spaces[indexVar].terms.Add(new_term); variable_spaces[indexVar].terms.Sort(new CompararerByPick()); // Re-evaluate the system's error error = result.RMSEtoMSEdiv2forLearn(result.ErrorLearnSamples(kbToOptimize)); if ((kbToOptimize.RulesDatabase.Count > config.RulesCount)) { break; } #if Console Console.WriteLine(error + " " + kbToOptimize.TermsSet.Count + " terms\n"); for (int i = 0; i < variable_spaces.Count; i++) { Console.WriteLine(variable_spaces[i].terms.Count + " термов по " + i + "му параметру\n"); } #endif result.RulesDatabaseSet[0] = kbToOptimize; // Get the best knowledge base on the 1st place if (error < errorBefore) { kbBest = new KnowlegeBaseSARules(kbToOptimize); errorBefore = error; tryCount = 0; } else { tryCount++; } if (tryCount > TryCount) { break; } } result.RulesDatabaseSet[0] = kbBest; RuleCount = kbBest.RulesDatabase.Count; TryCount = tryCount; return(result); }
public override SAFuzzySystem Generate(FuzzySystem.SingletoneApproximate.SAFuzzySystem Approximate, IGeneratorConf config) { Random rand = new Random(); SAFuzzySystem result = Approximate; if (result.RulesDatabaseSet.Count == 0) { KnowlegeBaseSARules temp_rules = new KnowlegeBaseSARules(); result.RulesDatabaseSet.Add(temp_rules); } type_term = ((GeneratorRullesSimpleRandomConfig)config).RSRTypeFunc; stable_terms = (int)((GeneratorRullesSimpleRandomConfig)config).RSRConstant; count_rules = ((GeneratorRullesSimpleRandomConfig)config).RSRCountRules; for (int j = 0; j < count_rules; j++) { int[] order = new int[result.CountFeatures]; TypeTermFuncEnum temp_type_term; if (stable_terms == 0) { temp_type_term = type_term; } else { temp_type_term = Generator_type_term(); } List <Term> temp_term_list = new List <Term>(); for (int k = 0; k < result.CountFeatures; k++) { double[] parametrs = new double[Term.CountParamsinSelectedTermType(temp_type_term)]; switch (temp_type_term) { case TypeTermFuncEnum.Треугольник: parametrs[0] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min); parametrs[1] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min); parametrs[2] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min); Array.Sort(parametrs); break; case TypeTermFuncEnum.Гауссоида: parametrs[0] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min); parametrs[1] = (rand.NextDouble() + 0.01) * 0.5 * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min); break; case TypeTermFuncEnum.Парабола: parametrs[0] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min); parametrs[1] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min); Array.Sort(parametrs); break; case TypeTermFuncEnum.Трапеция: parametrs[0] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min); parametrs[1] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min); parametrs[2] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min); parametrs[3] = result.LearnSamplesSet.InputAttributes[k].Min + rand.NextDouble() * (result.LearnSamplesSet.InputAttributes[k].Max - result.LearnSamplesSet.InputAttributes[k].Min); Array.Sort(parametrs); break; } Term temp_term = new Term(parametrs, temp_type_term, k); result.RulesDatabaseSet[0].TermsSet.Add(temp_term); temp_term_list.Add(temp_term); order[k] = result.RulesDatabaseSet[0].TermsSet.Count - 1; } double DoubleOutput = KNNConsequent.NearestApprox(result, temp_term_list); SARule temp_Rule = new SARule(result.RulesDatabaseSet[0].TermsSet, order, DoubleOutput); result.RulesDatabaseSet[0].RulesDatabase.Add(temp_Rule); } result.RulesDatabaseSet[0].TermsSet.Trim(); result.UnlaidProtectionFix(result.RulesDatabaseSet[0]); GC.Collect(); return(result); }
public static void InitRulesEveryoneWithEveryone(SAFuzzySystem Approximate, TypeTermFuncEnum typeFunc, int[] countSliceForVar) { if ((Approximate.RulesDatabaseSet == null) || (Approximate.RulesDatabaseSet.Count == 0)) { KnowlegeBaseSARules temp_rules = new KnowlegeBaseSARules(); Approximate.RulesDatabaseSet.Add(temp_rules); } int[][] position_of_terms = new int[Approximate.CountFeatures][]; for (int i = 0; i < Approximate.CountFeatures; i++) { if (Approximate.AcceptedFeatures[i] == false) { countSliceForVar[i] = 0; continue; } position_of_terms[i] = new int[countSliceForVar[i]]; double current_value = Approximate.LearnSamplesSet.InputAttributes[i].Min; double coeef = (Approximate.LearnSamplesSet.InputAttributes[i].Scatter); if (countSliceForVar[i] > 1) { coeef = coeef / (countSliceForVar[i] - 1); } if (countSliceForVar[i] <= 1) { current_value = current_value + coeef * 0.5; coeef *= 1.000000001 / 2; } for (int j = 0; j < countSliceForVar[i]; j++) { double[] parametrs = new double[Term.CountParamsinSelectedTermType(typeFunc)]; switch (typeFunc) { case TypeTermFuncEnum.Треугольник: parametrs[1] = current_value; parametrs[0] = parametrs[1] - coeef; parametrs[2] = parametrs[1] + coeef; break; case TypeTermFuncEnum.Гауссоида: parametrs[0] = current_value; parametrs[1] = coeef / 3; break; case TypeTermFuncEnum.Парабола: parametrs[0] = current_value - coeef; parametrs[1] = current_value + coeef; break; case TypeTermFuncEnum.Трапеция: parametrs[0] = current_value - coeef; parametrs[3] = current_value + coeef; parametrs[1] = parametrs[0] + 0.4 * (parametrs[3] - parametrs[0]); parametrs[2] = parametrs[0] + 0.6 * (parametrs[3] - parametrs[0]); break; } Term temp_term = new Term(parametrs, typeFunc, i); if (countSliceForVar[i] > 1) { if ((j == 0) && (typeFunc != TypeTermFuncEnum.Гауссоида)) { temp_term.Min -= 0.00000001 * (temp_term.Max - temp_term.Min); } if ((j == countSliceForVar[i] - 1) && (typeFunc != TypeTermFuncEnum.Гауссоида)) { temp_term.Max += 0.0000001 * (temp_term.Max - temp_term.Min); } } Approximate.RulesDatabaseSet[0].TermsSet.Add(temp_term); position_of_terms[i][j] = Approximate.RulesDatabaseSet[0].TermsSet.Count - 1; current_value += coeef; } } int first_notNull = -1; int[] counter = new int[Approximate.CountFeatures]; for (int i = 0; i < Approximate.CountFeatures; i++) { if (Approximate.AcceptedFeatures[i] == false) { continue; } counter[i] = countSliceForVar[i] - 1; if ((counter[i] != -1) && first_notNull == -1) { first_notNull = i; } } while (counter[first_notNull] >= 0 && counter[0] >= -1) { List <Term> temp_term_set = new List <Term>(); int[] order = new int[Approximate.CountFeatures]; for (int i = 0; i < Approximate.CountFeatures; i++) { if ((counter[i] == -1) || (Approximate.AcceptedFeatures[i] == false)) { order[i] = -1; continue; } temp_term_set.Add(Approximate.RulesDatabaseSet[0].TermsSet[position_of_terms[i][counter[i]]]); order[i] = position_of_terms[i][counter[i]]; } double approx_Values = KNNConsequent.NearestApprox(Approximate, temp_term_set); SARule temp_rule = new SARule(Approximate.RulesDatabaseSet[0].TermsSet, order, approx_Values); Approximate.RulesDatabaseSet[0].RulesDatabase.Add(temp_rule); counter = dec_count(counter, countSliceForVar, Approximate.CountFeatures); } Approximate.RulesDatabaseSet[0].TermsSet.Trim(); }