public void kliGenerate(TSAFuzzySystem Approximate, TypeTermFuncEnum typeFunc, double mayError) { if ((Approximate.RulesDatabaseSet == null) || (Approximate.RulesDatabaseSet.Count == 0)) { Approximate.RulesDatabaseSet.Add(new KnowlegeBaseTSARules()); } originalSimpleSet = new List <SampleSet.RowSample>(Approximate.LearnSamplesSet.DataRows); originalSimpleSetDistanteToPoint = new List <double>(); CalcFarPoint(); int ruleIndex = 0; while (originalSimpleSet.Count > 0) { int IndexOfMax = originalSimpleSetDistanteToPoint.IndexOf(originalSimpleSetDistanteToPoint.Max()); var maxPoint = originalSimpleSet[IndexOfMax]; originalSimpleSet.Remove(maxPoint); originalSimpleSetDistanteToPoint.RemoveAt(IndexOfMax); var iterClasterPoints = new List <SampleSet.RowSample>() { maxPoint }; var mnk = new MNK() { n = maxPoint.InputAttributeValue.Count() }; var P = new Matrix(new[] { new double[] { 0 } }); var B = new HyperVector(0, 0); mnk.mnkIter(iterClasterPoints.Last().InputAttributeValue.ToList(), iterClasterPoints.Last().DoubleOutput, ref P, ref B, true); // var errorIter = errorsMnk(B, iterClasterPoints); for (int z = 0; z < Approximate.CountFeatures; z++) { int NextIndex2 = maxPoint.NearestNormalizedInputIndex(originalSimpleSet, ResultSystem.LearnSamplesSet); var nextPoint2 = originalSimpleSet[NextIndex2]; mnk.mnkIter(nextPoint2.InputAttributeValue.ToList(), nextPoint2.DoubleOutput, ref P, ref B, false); iterClasterPoints.Add(nextPoint2); originalSimpleSet.Remove(nextPoint2); } var errorIter = errorsMnk(B, iterClasterPoints); while (errorIter < mayError) { if (originalSimpleSet.Count < Approximate.CountFeatures) { while (originalSimpleSet.Count > 0) { int NextIndex2 = maxPoint.NearestNormalizedInputIndex(originalSimpleSet, ResultSystem.LearnSamplesSet); var nextPoint2 = originalSimpleSet[NextIndex2]; mnk.mnkIter(nextPoint2.InputAttributeValue.ToList(), nextPoint2.DoubleOutput, ref P, ref B, false); iterClasterPoints.Add(nextPoint2); originalSimpleSet.Remove(nextPoint2); originalSimpleSetDistanteToPoint.RemoveAt(NextIndex2); } break; } int NextIndex = maxPoint.NearestNormalizedInputIndex(originalSimpleSet, ResultSystem.LearnSamplesSet); var nextPoint = originalSimpleSet[NextIndex]; mnk.mnkIter(nextPoint.InputAttributeValue.ToList(), nextPoint.DoubleOutput, ref P, ref B, false); errorIter = errorsMnk(B, new List <SampleSet.RowSample>(iterClasterPoints) { nextPoint }); if (errorIter < mayError) { iterClasterPoints.Add(nextPoint); maxPoint = nextPoint; originalSimpleSet.Remove(nextPoint); originalSimpleSetDistanteToPoint.RemoveAt(NextIndex); } } var numbersRule = new List <int>(); List <Term> forRWLSM = new List <Term>(); for (int i = 0; i < Approximate.CountFeatures; i++) { var parametrs = new double[Term.CountParamsinSelectedTermType(typeFunc)]; parametrs[0] = iterClasterPoints.Sum(x => x.InputAttributeValue[i]) / iterClasterPoints.Count; parametrs[1] = Math.Sqrt( iterClasterPoints.Sum(x => Math.Pow(x.InputAttributeValue[i] - parametrs[0], 2.0)) / iterClasterPoints.Count); if (parametrs[1] < Math.Pow(10, -300)) { parametrs[1] = Approximate.LearnSamplesSet.InputAttributes[i].Scatter / 100.0; } var temp_term = new Term(parametrs, typeFunc, i); forRWLSM.Add(temp_term); Approximate.RulesDatabaseSet[0].TermsSet.Add(temp_term); numbersRule.Add(Approximate.RulesDatabaseSet[0].TermsSet.Count - 1); } ruleIndex++; double[] coeffs = null; double coef = LSMWeghtReqursiveSimple.EvaluteConsiquent(ResultSystem, forRWLSM, out coeffs); var temp_rule = new TSARule(Approximate.RulesDatabaseSet[0].TermsSet, numbersRule.ToArray(), coef, coeffs); Approximate.RulesDatabaseSet[0].RulesDatabase.Add(temp_rule); } }
public static void kliGenerate(TSAFuzzySystem Approximate, TypeTermFuncEnum typeFunc, double mayError) { var ssr = new List <SampleSet.RowSample>(); for (int index = 0; index < Approximate.LearnSamplesSet.DataRows.Count; index++) { var row = new SampleSet.RowSample(); List <double> InputAttributeValueLIST = new List <double>(); for (int i = 0; i < Approximate.AcceptedFeatures.Count(); i++) { double q = Convert.ToDouble(Approximate.LearnSamplesSet.DataRows[index].InputAttributeValue[i].ToString()); if (Approximate.AcceptedFeatures[i]) { InputAttributeValueLIST.Add(q); } row.InputAttributeValue = InputAttributeValueLIST.ToArray(); row.DoubleOutput = Approximate.LearnSamplesSet.DataRows[index].DoubleOutput; } ssr.Add(row); } List <SampleSet.AttributeInfo> ss = new List <SampleSet.AttributeInfo>(Approximate.LearnSamplesSet.InputAttributes); for (int i = (Approximate.AcceptedFeatures.Count() - 1); i >= 0; i--) { if (!Approximate.AcceptedFeatures[i]) { ss.RemoveAt(i); } } if ((Approximate.RulesDatabaseSet == null) || (Approximate.RulesDatabaseSet.Count == 0)) { Approximate.RulesDatabaseSet.Add(new KnowlegeBaseTSARules()); //wtf????????????? что за, если null придет то всё плохо плохо, а мы его пропускаем выше лол } var originalSimpleSet = new List <SampleSet.RowSample>(ssr); int ruleIndex = 0; while (originalSimpleSet.Count > 0) { //а тут нормировка особой роли не играет, хммм ну или почти не играет var maxPoint = originalSimpleSet.OrderByDescending(x => (Math.Sqrt(x.InputAttributeValue.Sum(y => Math.Pow(y, 2.0)) / x.InputAttributeValue.Count()))).First(); var iterClasterPoints = new List <SampleSet.RowSample>() { maxPoint }; var mnk = new MNK() { n = maxPoint.InputAttributeValue.Count() }; var P = new Matrix(new[] { new double[] { 0 } }); var B = new HyperVector(0, 0); mnk.mnkIter(iterClasterPoints.Last().InputAttributeValue.ToList(), iterClasterPoints.Last().DoubleOutput, ref P, ref B, true); var errorIter = errorsMnk(B, iterClasterPoints); while (errorIter < mayError) { originalSimpleSet.RemoveAll(x => iterClasterPoints.Contains(x)); if (originalSimpleSet.Count == 0) { break; } //это без нормировки // var nextPoint = // originalSimpleSet.OrderBy(x =>(Math.Sqrt(x.InputAttributeValue.Sum(y => Math.Pow(y -maxPoint.InputAttributeValue[x.InputAttributeValue.ToList().IndexOf(y)],2.0))/x.InputAttributeValue.Count()))).First(); //а это с нормировкой var nextPoint = originalSimpleSet.OrderBy(simple => Math.Sqrt(simple.InputAttributeValue.Select((inputValue, index) => Math.Pow((inputValue - maxPoint.InputAttributeValue[index]) / (ss[index].Scatter), 2.0)).Sum())).First(); mnk.mnkIter(nextPoint.InputAttributeValue.ToList(), nextPoint.DoubleOutput, ref P, ref B, false); errorIter = errorsMnk(B, new List <SampleSet.RowSample>(iterClasterPoints) { nextPoint }); if (errorIter < mayError) { iterClasterPoints.Add(nextPoint); } } var numbersRule = new List <int>(); int spear = 0; for (int i = 0; i < Approximate.AcceptedFeatures.Count(); i++)//.CountVars { if (!Approximate.AcceptedFeatures[i]) { spear++; continue; } var parametrs = new double[Term.CountParamsinSelectedTermType(typeFunc)]; parametrs[0] = iterClasterPoints.Sum(x => x.InputAttributeValue[i - spear]) / iterClasterPoints.Count; parametrs[1] = Math.Sqrt( iterClasterPoints.Sum(x => Math.Pow(x.InputAttributeValue[i - spear] - parametrs[0], 2.0)) * 2 / iterClasterPoints.Count); var temp_term = new Term(parametrs, typeFunc, i); Approximate.RulesDatabaseSet[0].TermsSet.Add(temp_term); numbersRule.Add(ruleIndex + i - spear); } ruleIndex += Approximate.AcceptedFeatures.Where(x => x == true).Count();//.CountVars var temp_rule = new TSARule(Approximate.RulesDatabaseSet[0].TermsSet, numbersRule.ToArray(), B.Elements[0].Elements[0], B.Elements.Skip(1).Select(x => x.Elements[0]).ToArray()); Approximate.RulesDatabaseSet[0].RulesDatabase.Add(temp_rule); } }