public override TSAFuzzySystem TuneUpFuzzySystem(TSAFuzzySystem Approximate, ILearnAlgorithmConf conf) { if (Approximate.RulesDatabaseSet.Count == 0) { throw new InvalidOperationException("Нечеткая система не была корректно инициализированна"); } KnowlegeBaseTSARules newBase = new KnowlegeBaseTSARules(Approximate.RulesDatabaseSet[0]); double result_before = Approximate.approxLearnSamples(newBase); foreach (TSARule Rule in newBase.RulesDatabase) { double [] coefficient = null; double Value = LSMWeghtReqursiveSimple.EvaluteConsiquent(Approximate, Rule.ListTermsInRule, out coefficient); Rule.IndependentConstantConsequent = Value; Rule.IndependentConstantConsequent = Value; Rule.RegressionConstantConsequent = coefficient; } double result_after = Approximate.approxLearnSamples(newBase); if (result_before > result_after) { Approximate.RulesDatabaseSet[0] = newBase; } GC.Collect(); Approximate.RulesDatabaseSet[0].TermsSet.Trim(); return(Approximate); }
private List <TSARule> RemoveDuplicate(TSAFuzzySystem Approximate, List <TSARule> Source) { List <TSARule> result = Source; for (int i = Source.Count - 1; i > 0; i--) { for (int j = i - 1; j >= 0; j--) { if (Equals(Source[i], Source[j])) { Source.RemoveAt(i); double[] coeffs = null; double coeff = LSMWeghtReqursiveSimple.EvaluteConsiquent(Approximate, Source[j].ListTermsInRule, out coeffs); // Source[j].IndependentConstantConsequent = KNNConsequent.NearestApprox( Approximate,Source[j].Term_of_Rule_Set); Source[j].RegressionConstantConsequent = coeffs; break; } } } return(result); }
public void kliGenerate(TSAFuzzySystem Approximate, TypeTermFuncEnum typeFunc, double mayError) { if ((Approximate.RulesDatabaseSet == null) || (Approximate.RulesDatabaseSet.Count == 0)) { Approximate.RulesDatabaseSet.Add(new KnowlegeBaseTSARules()); } originalSimpleSet = new List <SampleSet.RowSample>(Approximate.LearnSamplesSet.DataRows); originalSimpleSetDistanteToPoint = new List <double>(); CalcFarPoint(); int ruleIndex = 0; while (originalSimpleSet.Count > 0) { int IndexOfMax = originalSimpleSetDistanteToPoint.IndexOf(originalSimpleSetDistanteToPoint.Max()); var maxPoint = originalSimpleSet[IndexOfMax]; originalSimpleSet.Remove(maxPoint); originalSimpleSetDistanteToPoint.RemoveAt(IndexOfMax); var iterClasterPoints = new List <SampleSet.RowSample>() { maxPoint }; var mnk = new MNK() { n = maxPoint.InputAttributeValue.Count() }; var P = new Matrix(new[] { new double[] { 0 } }); var B = new HyperVector(0, 0); mnk.mnkIter(iterClasterPoints.Last().InputAttributeValue.ToList(), iterClasterPoints.Last().DoubleOutput, ref P, ref B, true); // var errorIter = errorsMnk(B, iterClasterPoints); for (int z = 0; z < Approximate.CountFeatures; z++) { int NextIndex2 = maxPoint.NearestNormalizedInputIndex(originalSimpleSet, ResultSystem.LearnSamplesSet); var nextPoint2 = originalSimpleSet[NextIndex2]; mnk.mnkIter(nextPoint2.InputAttributeValue.ToList(), nextPoint2.DoubleOutput, ref P, ref B, false); iterClasterPoints.Add(nextPoint2); originalSimpleSet.Remove(nextPoint2); } var errorIter = errorsMnk(B, iterClasterPoints); while (errorIter < mayError) { if (originalSimpleSet.Count < Approximate.CountFeatures) { while (originalSimpleSet.Count > 0) { int NextIndex2 = maxPoint.NearestNormalizedInputIndex(originalSimpleSet, ResultSystem.LearnSamplesSet); var nextPoint2 = originalSimpleSet[NextIndex2]; mnk.mnkIter(nextPoint2.InputAttributeValue.ToList(), nextPoint2.DoubleOutput, ref P, ref B, false); iterClasterPoints.Add(nextPoint2); originalSimpleSet.Remove(nextPoint2); originalSimpleSetDistanteToPoint.RemoveAt(NextIndex2); } break; } int NextIndex = maxPoint.NearestNormalizedInputIndex(originalSimpleSet, ResultSystem.LearnSamplesSet); var nextPoint = originalSimpleSet[NextIndex]; mnk.mnkIter(nextPoint.InputAttributeValue.ToList(), nextPoint.DoubleOutput, ref P, ref B, false); errorIter = errorsMnk(B, new List <SampleSet.RowSample>(iterClasterPoints) { nextPoint }); if (errorIter < mayError) { iterClasterPoints.Add(nextPoint); maxPoint = nextPoint; originalSimpleSet.Remove(nextPoint); originalSimpleSetDistanteToPoint.RemoveAt(NextIndex); } } var numbersRule = new List <int>(); List <Term> forRWLSM = new List <Term>(); for (int i = 0; i < Approximate.CountFeatures; i++) { var parametrs = new double[Term.CountParamsinSelectedTermType(typeFunc)]; parametrs[0] = iterClasterPoints.Sum(x => x.InputAttributeValue[i]) / iterClasterPoints.Count; parametrs[1] = Math.Sqrt( iterClasterPoints.Sum(x => Math.Pow(x.InputAttributeValue[i] - parametrs[0], 2.0)) / iterClasterPoints.Count); if (parametrs[1] < Math.Pow(10, -300)) { parametrs[1] = Approximate.LearnSamplesSet.InputAttributes[i].Scatter / 100.0; } var temp_term = new Term(parametrs, typeFunc, i); forRWLSM.Add(temp_term); Approximate.RulesDatabaseSet[0].TermsSet.Add(temp_term); numbersRule.Add(Approximate.RulesDatabaseSet[0].TermsSet.Count - 1); } ruleIndex++; double[] coeffs = null; double coef = LSMWeghtReqursiveSimple.EvaluteConsiquent(ResultSystem, forRWLSM, out coeffs); var temp_rule = new TSARule(Approximate.RulesDatabaseSet[0].TermsSet, numbersRule.ToArray(), coef, coeffs); Approximate.RulesDatabaseSet[0].RulesDatabase.Add(temp_rule); } }
public override TSAFuzzySystem TuneUpFuzzySystem(TSAFuzzySystem Approximate, ILearnAlgorithmConf conf) // + override { result = Approximate; List <KnowlegeBaseTSARules> Archive = new List <KnowlegeBaseTSARules>(); List <double> ErrorsArchive = new List <double>(); var config = (DynamicTuneConf)conf; maxError = config.MaxError; RuleCount = config.RulesCount; TryCount = config.TryCount; double error = result.RMSEtoMSEdiv2forLearn(result.approxLearnSamples(result.RulesDatabaseSet[0])); var kbToOptimize = new KnowlegeBaseTSARules(result.RulesDatabaseSet[0]); var kbBest = new KnowlegeBaseTSARules(kbToOptimize); double errorBefore = Double.MaxValue; result.UnlaidProtectionFix(kbToOptimize); List <input_space> variable_spaces = new List <input_space>(); for (int i = 0; i < result.LearnSamplesSet.InputAttributes.Count; i++) { List <Term> terms_of_variable = new List <Term>(); terms_of_variable = kbToOptimize.TermsSet.Where(term => term.NumVar == i).ToList(); variable_spaces.Add(new input_space(terms_of_variable, i)); } int indexRegion = -1, indexVar = -1, number_of_input_variables = variable_spaces.Count; int tryCount = 0; while (error > maxError) { if (Double.IsInfinity(error)) { throw new Exception("Something went wrong, error is Infinity, region: " + indexRegion); } if (Double.IsNaN(error)) { throw new Exception("Something went wrong, error is NaN, region: " + indexRegion); } region_side[][] sides = new region_side[number_of_input_variables][]; for (int i = 0; i < number_of_input_variables; i++) { sides[i] = variable_spaces[i].get_region_sides(); } var cartresult = CartesianProduct.Get(sides); List <region2> regions = new List <region2>(); foreach (var x in cartresult) { regions.Add(new region2(x.ToList(), result, variable_spaces)); } List <double> region_errors = regions.Select(x => x.region_error()).ToList(); indexRegion = region_errors.IndexOf(region_errors.Max()); for (int i = 0; i < region_errors.Count; i++) { if (Double.IsNaN(region_errors[i]) || Double.IsInfinity(region_errors[i]) || Double.IsNegativeInfinity(region_errors[i]) || Double.IsPositiveInfinity(region_errors[i])) { region_errors[i] = 0; } } List <double> variable_errors = regions[indexRegion].variable_errors(); bool check1 = false; for (int i = 1; i < variable_errors.Count; i++) { if (variable_errors[i - 1] != variable_errors[i]) { check1 = true; break; } } if (!check1) { indexVar = StaticRandom.Next(variable_errors.Count - 1); } else { indexVar = variable_errors.IndexOf(variable_errors.Max()); } Term new_term = regions[indexRegion].new_term(indexVar); result.RulesDatabaseSet[0] = kbToOptimize; kbToOptimize.TermsSet.Add(new_term); // Rules (CHECK REFERENCE TYPES) int @var = indexVar; var rulesLeft = kbToOptimize.RulesDatabase.Where( rule => rule.ListTermsInRule.Contains(regions[indexRegion].sides[indexVar].left)).ToList(); var rulesRight = kbToOptimize.RulesDatabase.Where( rule => rule.ListTermsInRule.Contains(regions[indexRegion].sides[indexVar].right)).ToList(); for (int j = 0; j < rulesLeft.Count; j++) { int[] order = new int[rulesLeft[j].ListTermsInRule.Count]; for (int k = 0; k < rulesLeft[j].ListTermsInRule.Count; k++) { Term temp_term = rulesLeft[j].ListTermsInRule[k]; if (temp_term == regions[indexRegion].sides[indexVar].left) { temp_term = new_term; } order[k] = kbToOptimize.TermsSet.FindIndex(x => x == temp_term); } double temp_approx_Values = kbToOptimize.RulesDatabase[j].IndependentConstantConsequent; double[] temp_approx_RegressionConstantConsequent = kbToOptimize.RulesDatabase[j].RegressionConstantConsequent.Clone() as double[]; TSARule temp_rule = new TSARule( kbToOptimize.TermsSet, order, temp_approx_Values, temp_approx_RegressionConstantConsequent); double[] dC = null; temp_rule.IndependentConstantConsequent = LSMWeghtReqursiveSimple.EvaluteConsiquent( result, temp_rule.ListTermsInRule.ToList(), out dC); temp_rule.RegressionConstantConsequent = (double[])dC.Clone(); kbToOptimize.RulesDatabase.Add(temp_rule); rulesLeft[j].IndependentConstantConsequent = LSMWeghtReqursiveSimple.EvaluteConsiquent( result, rulesLeft[j].ListTermsInRule.ToList(), out dC); rulesLeft[j].RegressionConstantConsequent = (double[])dC.Clone(); } foreach (var rule in rulesRight) { double[] dC = null; rule.IndependentConstantConsequent = LSMWeghtReqursiveSimple.EvaluteConsiquent( result, rule.ListTermsInRule.ToList(), out dC); rule.RegressionConstantConsequent = dC; } variable_spaces[indexVar].terms.Add(new_term); variable_spaces[indexVar].terms.Sort(new CompararerByPick()); // Re-evaluate the system's error error = result.RMSEtoMSEdiv2forLearn(result.ErrorLearnSamples(kbToOptimize)); if ((kbToOptimize.RulesDatabase.Count > config.RulesCount)) { break; } #if Console Console.WriteLine(error + " " + kbToOptimize.TermsSet.Count + " terms\n"); for (int i = 0; i < variable_spaces.Count; i++) { Console.WriteLine(variable_spaces[i].terms.Count + " термов по " + i + "му параметру\n"); } #endif result.RulesDatabaseSet[0] = kbToOptimize; // Get the best knowledge base on the 1st place if (error < errorBefore) { kbBest = new KnowlegeBaseTSARules(kbToOptimize); errorBefore = error; tryCount = 0; } else { tryCount++; } if (tryCount > TryCount) { break; } } result.RulesDatabaseSet[0] = kbBest; RuleCount = kbBest.RulesDatabase.Count; TryCount = tryCount; return(result); }
public static void InitRulesEveryoneWithEveryone(TSAFuzzySystem Approximate, TypeTermFuncEnum typeFunc, int[] countSliceForVar) { // i- номер строки // j -номер входного параметра //Входное данное i, j Approximate.LearnSamplesSet.DataRows[i].InputAttributeValue[j] // Выходное данное в строке i Approximate.LearnSamplesSet.DataRows[i].Approx_Value if ((Approximate.RulesDatabaseSet == null) || (Approximate.RulesDatabaseSet.Count == 0)) { KnowlegeBaseTSARules temp_rules = new KnowlegeBaseTSARules(); Approximate.RulesDatabaseSet.Add(temp_rules); } int[][] position_of_terms = new int[Approximate.CountFeatures][]; for (int i = 0; i < Approximate.CountFeatures; i++) { if (Approximate.AcceptedFeatures[i] == false) { countSliceForVar[i] = 0; continue; } position_of_terms[i] = new int[countSliceForVar[i]]; double current_value = Approximate.LearnSamplesSet.InputAttributes[i].Min; double coeef = (Approximate.LearnSamplesSet.InputAttributes[i].Scatter); if (countSliceForVar[i] > 1) { coeef = coeef / (countSliceForVar[i] - 1); } if (countSliceForVar[i] <= 1) { current_value = current_value + coeef * 0.5; coeef *= 1.000000001 / 2; } for (int j = 0; j < countSliceForVar[i]; j++) { double[] parametrs = new double[Term.CountParamsinSelectedTermType(typeFunc)]; switch (typeFunc) { case TypeTermFuncEnum.Треугольник: parametrs[1] = current_value; parametrs[0] = parametrs[1] - coeef; parametrs[2] = parametrs[1] + coeef; break; case TypeTermFuncEnum.Гауссоида: parametrs[0] = current_value; parametrs[1] = coeef / 3; break; case TypeTermFuncEnum.Парабола: parametrs[0] = current_value - coeef; parametrs[1] = current_value + coeef; break; case TypeTermFuncEnum.Трапеция: parametrs[0] = current_value - coeef; parametrs[3] = current_value + coeef; parametrs[1] = parametrs[0] + 0.4 * (parametrs[3] - parametrs[0]); parametrs[2] = parametrs[0] + 0.6 * (parametrs[3] - parametrs[0]); break; } Term temp_term = new Term(parametrs, typeFunc, i); if (countSliceForVar[i] > 1) { if ((j == 0) && (typeFunc != TypeTermFuncEnum.Гауссоида)) { temp_term.Min -= 0.00000001 * (temp_term.Max - temp_term.Min); } if ((j == countSliceForVar[i] - 1) && (typeFunc != TypeTermFuncEnum.Гауссоида)) { temp_term.Max += 0.0000001 * (temp_term.Max - temp_term.Min); } } Approximate.RulesDatabaseSet[0].TermsSet.Add(temp_term); position_of_terms[i][j] = Approximate.RulesDatabaseSet[0].TermsSet.Count - 1; current_value += coeef; } } int first_notNull = -1; int[] counter = new int[Approximate.CountFeatures]; for (int i = 0; i < Approximate.CountFeatures; i++) { if (Approximate.AcceptedFeatures[i] == false) { continue; } counter[i] = countSliceForVar[i] - 1; if ((counter[i] != -1) && first_notNull == -1) { first_notNull = i; } } while (counter[first_notNull] >= 0 && counter[0] >= -1) { List <Term> temp_term_set = new List <Term>(); int[] order = new int[Approximate.CountFeatures]; for (int i = 0; i < Approximate.CountFeatures; i++) { if ((counter[i] == -1) || (Approximate.AcceptedFeatures[i] == false)) { order[i] = -1; continue; } temp_term_set.Add(Approximate.RulesDatabaseSet[0].TermsSet[position_of_terms[i][counter[i]]]); order[i] = position_of_terms[i][counter[i]]; } double[] depencyCoefficent = null; double approx_Values = LSMWeghtReqursiveSimple.EvaluteConsiquent(Approximate, temp_term_set, out depencyCoefficent); TSARule temp_rule = new TSARule(Approximate.RulesDatabaseSet[0].TermsSet, order, approx_Values, depencyCoefficent); Approximate.RulesDatabaseSet[0].RulesDatabase.Add(temp_rule); counter = dec_count(counter, countSliceForVar, Approximate.CountFeatures); } Approximate.RulesDatabaseSet[0].TermsSet.Trim(); }