public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classifier, ILearnAlgorithmConf conf) { result = Classifier; groups = new List <int[]>(); Init(conf); SetPopulation(); Population = SortRules(Population); NS = new int[m]; for (int i = 0; i < m; i++) { NS[i] = (N - 1) / m; } cur_iter = 0; while (cur_iter < iter) { groups = GroupStream(); if (p_one > rand.NextDouble()) { ChooseOneCluster(); } else { ChooseTwoClusters(); } Population = ListPittsburgClassifierTool.SortRules(Population, result); Console.WriteLine(cur_iter + " - Итерация"); Console.WriteLine("Обуч. выборка = " + result.ErrorLearnSamples(Population[0])); Console.WriteLine("Тест. выборка = " + result.ErrorTestSamples(Population[0])); cur_iter++; } Population = ListPittsburgClassifierTool.SortRules(Population, result); result.RulesDatabaseSet[0] = Population[0]; return(result); }
public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Class, ILearnAlgorithmConf Conf) { result = Class; List <int[]> groups = new List <int[]>(); Init(Conf); SetPopulation(); Population = SortRules(Population); NS = new int[Nsr]; NS = SetNS(Population, Nsr); groups = GroupStream(); double BestMSETest = result.ErrorTestSamples(Population[0]); double BestMSELearn = result.ErrorLearnSamples(Population[0]); int BestIter = 0; for (int i = 1; i <= MaxIter; i++) { Console.Clear(); Console.WriteLine((double)i * 100 / MaxIter + "%"); Population = SetNextPosition(groups, Population); Population = Replacement(groups, Population); if (flag) { Evaporation(groups.Last());//Испарение } if (BestMSETest > result.ErrorTestSamples(Population[0])) { BestMSETest = result.ErrorTestSamples(Population[0]); BestMSELearn = result.ErrorLearnSamples(Population[0]); BestIter = i; } } Console.WriteLine(ToString(true)); Console.WriteLine("Итер - " + BestIter + " MSET - " + BestMSETest + " MSEL - " + BestMSELearn); result.RulesDatabaseSet[0] = Population[0]; return(result); }
private string ErrorInfoPC(IFuzzySystem FS) { PCFuzzySystem IFS = FS as PCFuzzySystem; if (IFS.RulesDatabaseSet.Count < 1) { return("Точность нечеткой системы недоступна"); } classLearnResult.Add(IFS.ClassifyLearnSamples(IFS.RulesDatabaseSet[0])); classTestResult.Add(IFS.ClassifyTestSamples(IFS.RulesDatabaseSet[0])); classErLearn.Add(IFS.ErrorLearnSamples(IFS.RulesDatabaseSet[0])); classErTest.Add(IFS.ErrorTestSamples(IFS.RulesDatabaseSet[0])); return("Точностью на обучающей выборке " + classLearnResult[classLearnResult.Count - 1].ToString() + " , Точность на тестовой выборке " + classTestResult[classTestResult.Count - 1].ToString() + " " + Environment.NewLine + "Ошибкой на обучающей выборке " + classErLearn[classErLearn.Count - 1].ToString() + " , Ошибкой на тестовой выборке " + classErTest[classErTest.Count - 1].ToString() + " " + Environment.NewLine); }
public override int Run(string[] args) { Console.WriteLine("Start"); fill_params(args); foreach (string filenametra in System.IO.Directory.GetFiles(file_learn, "*tra.dat", System.IO.SearchOption.AllDirectories)) { string filenameTST = filenametra.Replace("tra.dat", "tst.dat"); Console.WriteLine("Params get \nfile tra {0} \nfile name tst {1} ", filenametra, filenameTST); Class_learn_set = new SampleSet(filenametra); Console.WriteLine("Tra create"); Class_test_set = new SampleSet(filenameTST); Console.WriteLine("Tst create"); conf = new InitBySamplesConfig(); conf.Init(Class_learn_set.CountVars); // fill_conf(); conf.loadParams(confParams); file_out = filenametra.Replace("tra.dat", ((InitBySamplesConfig)conf).IBSTypeFunc.ToString() + "_out.ufs"); Console.WriteLine("Conf Filed"); Class_Pittsburg = new PCFuzzySystem(Class_learn_set, Class_test_set); Console.WriteLine("Classifier created"); generator = new GeneratorRulesBySamples(); Class_Pittsburg = generator.Generate(Class_Pittsburg, conf); Console.WriteLine("Generation complite"); PCFSUFSWriter.saveToUFS(Class_Pittsburg, file_out); StreamWriter sw = new StreamWriter(Path.Combine(file_learn, ((InitBySamplesConfig)conf).IBSTypeFunc.ToString() + "_log.txt"), true); sw.WriteLine(filenametra + "\t" + Class_Pittsburg.ErrorLearnSamples(Class_Pittsburg.RulesDatabaseSet[0])); sw.WriteLine(filenameTST + "\t" + Class_Pittsburg.ErrorTestSamples(Class_Pittsburg.RulesDatabaseSet[0])); sw.Close(); Console.WriteLine("Saved"); } return(1); }
//основные вычисления public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classify, ILearnAlgorithmConf conf) { result = Classify; rand = new Random(); Init(conf); SetPopulation(); bool[] BEST = result.AcceptedFeatures; double bestError = result.ErrorLearnSamples(result.RulesDatabaseSet[0]); //отчистка консоли Dictionary <bool[], double> PopulationWithAccuracy = new Dictionary <bool[], double>(); double accuracy = 0; //запуск итераций for (int it = 0; it < iter; it++) { //расчитыавем значение фитнес-функции for (int i = 0; i < Population.Count; i++) { result.AcceptedFeatures = Population[i]; accuracy = result.ErrorLearnSamples(result.RulesDatabaseSet[0]); PopulationWithAccuracy.Add(Population[i], accuracy); } Population.Clear(); foreach (var pair in PopulationWithAccuracy.OrderByDescending(pair => pair.Value)) { Population.Add(pair.Key); } PopulationWithAccuracy.Clear(); double[] K = new double[Population.Count]; double sumK = 0; double avK = 0; for (int i = 0; i < Population.Count; i++) { result.AcceptedFeatures = Population[i]; K[i] = result.ErrorLearnSamples(result.RulesDatabaseSet[0]); sumK += K[i]; } avK = sumK / K.Length; bool[] KDis = new bool[result.CountFeatures]; for (int i = 0; i < KDis.Length; i++) { if (rand.Next(0, 2) == 0) { KDis[i] = false; } else { KDis[i] = true; } } //перебрать значения фитнес функции //расчитыавем значение D double dit; dit = it; double diter; diter = iter; bool D = true; //расчитыавем значение Xfood var Xfood = new bool[result.CountFeatures]; for (int t = 0; t < Xfood.Length; t++) { Xfood[t] = false; for (int i = 0; i < Population.Count; i++) { Xfood[t] = merge(Population[i][t], KDis[t]); } } //расчитываем значение Cfood double Cfood = 2 * (1 - (dit / diter)); bool CDisfood; if (Cfood <= 1) { CDisfood = false; } else { CDisfood = true; } //расчитываем значение Bfood List <bool[]> Bfood = new List <bool[]>(Population.Count); for (int i = 0; i < Population.Count; i++) { Bfood.Add(new bool[Population[i].Length]); if (rand.Next(0, 2) == 0) { Kroofifood = false; } else { Kroofifood = true; } for (int t = 0; t < Bfood[i].Length; t++) { Bfood[i][t] = merge(merge(CDisfood, Kroofifood), Xfood[t]); } } //расчитываем значение Bbest List <bool[]> Bbest = new List <bool[]>(Population.Count); for (int i = 0; i < Population.Count; i++) { Bbest.Add(new bool[Population[i].Length]); if (rand.Next(0, 2) == 0) { Kroofifood = false; } else { Kroofifood = true; } for (int t = 0; t < Bbest[i].Length; t++) { Bbest[i][t] = merge(Kroofifood, Xfood[t]); } } //расчитываем значение B List <bool[]> B = new List <bool[]>(Population.Count); for (int i = 0; i < Population.Count; i++) { B.Add(new bool[Population[i].Length]); for (int t = 0; t < B[i].Length; t++) { B[i][t] = merge(Bfood[i][t], Bbest[i][t]); } } //расчитываем значение F List <bool[]> F = new List <bool[]>(Population.Count); for (int i = 0; i < Population.Count; i++) { if (i == 0) { F.Add(new bool[Population[i].Length]); for (int t = 0; t < F[i].Length; t++) { F[i][t] = merge(true, B[i][t]); } } else { F.Add(new bool[Population[i].Length]); for (int t = 0; t < F[i].Length; t++) { F[i][t] = merge(merge(true, B[i][t]), merge(false, F[i - 1][t])); } } } List <int>[] neihbors = new List <int> [Population.Count]; //расчитываем значение alocal List <bool[]> alocal = new List <bool[]>(Population.Count); for (int i = 0; i < Population.Count; i++) { alocal.Add(new bool[Population[i].Length]); neihbors[i] = countneihbors(Population[i]); for (int t = 0; t < alocal[i].Length; t++) { alocal[i][t] = false; for (int j = 0; j < neihbors[i].Count; j++) { bool KRoofij = merge(KDis[t], Population[neihbors[i][j]][t]); bool[] XRoofij = new bool[Population.Count]; XRoofij = CalcXroof(Population[i], Population[neihbors[i][j]]); alocal[i][t] = merge(KRoofij, XRoofij[t]); } } } //расчитываем значение Cbest bool Cbest; if (rand.Next(0, 2) == 0) { Cbest = false; } else { Cbest = true; } //расчитываем значение atarget List <bool[]> atarget = new List <bool[]>(Population.Count); for (int i = 0; i < Population.Count; i++) { atarget.Add(new bool[Population[i].Length]); bool[] XRoofibest = new bool[Population.Count]; XRoofibest = CalcXroof(Population[i], Population[0]); for (int t = 0; t < alocal[i].Length; t++) { bool KRoofibest = KDis[t]; atarget[i][t] = merge(merge(Cbest, KRoofibest), XRoofibest[t]); } } //расчитываем значение a List <bool[]> a = new List <bool[]>(Population.Count); for (int i = 0; i < Population.Count; i++) { a.Add(new bool[Population[i].Length]); for (int t = 0; t < a[i].Length; t++) { a[i][t] = merge(atarget[i][t], alocal[i][t]); } } //расчитываем значение N List <bool[]> N = new List <bool[]>(Population.Count); for (int i = 0; i < Population.Count; i++) { if (i == 0) { N.Add(new bool[Population[i].Length]); for (int t = 0; t < N[i].Length; t++) { N[i][t] = merge(true, a[i][t]); } } else { N.Add(new bool[Population[i].Length]); for (int t = 0; t < F[i].Length; t++) { N[i][t] = merge(a[i][t], N[i - 1][t]); } } } //расчитываем значение dX List <bool[]> dX = new List <bool[]>(Population.Count); for (int i = 0; i < Population.Count; i++) { dX.Add(new bool[Population[i].Length]); for (int t = 0; t < a[i].Length; t++) { dX[i][t] = merge(merge(F[i][t], N[i][t]), D); } } //выводим значение BEST // Console.Write("Значение BEST_ = "); // Console.WriteLine(BEST); //расчитываем значение X(t+dt) for (int i = 0; i < Population.Count; i++) { Population[i] = new bool[Population[i].Length]; for (int t = 0; t < Population[i].Length; t++) { Population[i][t] = merge(Population[i][t], dX[i][t]); } } for (int i = 0; i < Population.Count; i++) { result.AcceptedFeatures = Population[i]; double temp = result.ErrorLearnSamples(result.RulesDatabaseSet[0]); if (temp < bestError) { BEST = Population[i]; bestError = temp; } } double y = it; if (y % 10 == 0 & y != 0) { Console.WriteLine(it); Console.WriteLine(bestError); } } result.AcceptedFeatures = BEST; for (int i = 0; i < result.AcceptedFeatures.Length; i++) { if (result.AcceptedFeatures[i] == false) { Console.Write("0 "); } else { Console.Write("1 "); } } Console.WriteLine(); Console.WriteLine(result.ErrorLearnSamples(result.RulesDatabaseSet[0])); Console.WriteLine(result.ErrorTestSamples(result.RulesDatabaseSet[0])); return(result); }
public PittsburgElementofStorage(PCFuzzySystem Checker, KnowlegeBasePCRules SourceElem, string algName) : base(algName) { Element = new KnowlegeBasePCRules(SourceElem); LearnError = Checker.ErrorLearnSamples(SourceElem); TestError = Checker.ErrorTestSamples(SourceElem); }
public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classifier, ILearnAlgorithmConf conf) { Random rand = new Random(DateTime.Now.Millisecond); PCFuzzySystem result = Classifier; BW.DoWork += new DoWorkEventHandler(BW_DoWork); BW.RunWorkerCompleted += new RunWorkerCompletedEventHandler(BW_RunWorkerCompleted); BW.RunWorkerAsync(); MultiGoalOptimaze_conf config = conf as MultiGoalOptimaze_conf; string PathAlg = (new FileInfo(Application.ExecutablePath)).DirectoryName + "\\FS\\"; config.Init2(PathAlg, Classifier.LearnSamplesSet.FileName); countFuzzySystem = config.Итераций_алгоритма; allowSqare = config.Допустимый_процент_перекрытия_по_площади_термов / 100; allowBorder = config.Допустимый_процент_перекрытия_по_границам / 100; int seedPath = rand.Next(); sizePercent = config.азмер_шага_по_точности; sizeComplexity = config.азмер_шага_по_сложности; sizeInteraply = config.азмер_шага_по_интерпретируемости; diviver = config.Уменьшать_шаги_в; trysBeforeDivide = config.Уменьшать_шаг_после; path = config.path; dataSetName = config.dataSetName; toMany = config.азрешено_похожих_систем; isPSO = config.toBool(config.Использовать_АРЧ); isANT = config.toBool(config.Использовать_НАМК); isBEE = config.toBool(config.Использовать_САПК); isES = config.toBool(config.Использовать_ЕС); isGA = config.toBool(config.Использовать_ГА); // isBFO = config.toBool(config.Использовать_АПБ); isTermShrink = config.toBool(config.Удалять_термы); isRuleShrink = config.toBool(config.Удалять_правила); isUnionTerm = config.toBool(config.Объединять_термы); isLindBreakCross = config.toBool(config.Исключать_пересечение_лигвистически_далеких_термов); countANT = config.Использовать_НАМК_раз_за_такт; countPSO = config.Использовать_за_такт_АРЧ_раз; countBEE = config.Использовать_САПК_раз_за_такт; countES = config.Использовать_ЕС_раз_за_такт; countGA = config.Использовать_ГА_раз_за_такт; // countBFO = config.Использовать_за_такт_АПБ_раз; typeComplexity = (int)config.Критерий_сложности; typeInterpreting = (int)config.Критерий_интерпретируемости; List <AbstractNotSafeLearnAlgorithm> learnAlgorithms = initAlgoritms(); List <ILearnAlgorithmConf> learnAlgorithmsconfig = initAlgoritmsConfigs(Classifier.CountFeatures); List <double> ValueLPercent = new List <double>(); List <double> ValueTPercent = new List <double>(); List <double> ValueComplexity = new List <double>(); List <double> ValueInterability = new List <double>(); List <double> SummaryGoods = new List <double>(); List <KnowlegeBasePCRules> Storage = new List <KnowlegeBasePCRules>(); List <int> candidate = new List <int>(); KnowlegeBasePCRules Best = result.RulesDatabaseSet[0]; // result.RulesDatabaseSet[0] = Best; baseLearn = (result.ErrorLearnSamples(result.RulesDatabaseSet[0])); ValueLPercent.Add(baseLearn); ValueTPercent.Add(result.ErrorTestSamples(result.RulesDatabaseSet[0])); // ClassifyTestSamples()); baseComplexity = getComplexity(result); ValueComplexity.Add(baseComplexity); baseIntebility = getInterpreting(result, allowBorder, allowSqare); ValueInterability.Add(baseIntebility); Storage.Add(Best); int NSCount = 0; int deleted = 0; for (int numberStep = 0; numberStep < countFuzzySystem; numberStep++) { bool mustToDivide = true; int usedAlg = 0; for (int tr = 0; tr < trysBeforeDivide; tr++) { deleted = 0; // Parallel.For(0, learnAlgorithms.Count(), i => usedAlg = 0; for (int i = 0; i < learnAlgorithms.Count(); i++) { Console.WriteLine("F****d in Storage.Add(new c_Rules(Best))"); Storage.Add(new KnowlegeBasePCRules(Best)); Console.WriteLine("F****d in result.RulesDatabaseSet.Clear()"); result.RulesDatabaseSet.Clear(); Console.WriteLine("F****d in result.RulesDatabaseSet.Add( Storage[Storage.Count - 1])"); result.RulesDatabaseSet.Add(Storage[Storage.Count - 1]); usedAlg++; bool before_VAlue = true; try { learnAlgorithms[i].TuneUpFuzzySystem(result, learnAlgorithmsconfig[i]); GC.Collect(); before_VAlue = false; ValueLPercent.Add(result.ErrorLearnSamples(result.RulesDatabaseSet[0])); // ClassifyLearnSamples()); ValueTPercent.Add(result.ErrorTestSamples(result.RulesDatabaseSet[0])); //ClassifyTestSamples()); ValueComplexity.Add(getComplexity(result)); ValueInterability.Add(getInterpreting(result, allowBorder, allowSqare)); double temp = ValueLPercent[ValueLPercent.Count - 1] + ValueComplexity[ValueComplexity.Count() - 1] + ValueInterability[ValueInterability.Count() - 1]; Storage[Storage.Count - 1] = result.RulesDatabaseSet[0]; if (double.IsNaN(temp)) { Console.WriteLine("FuckAlarm " + i.ToString() + learnAlgorithms[i].ToString() + " is NAN"); ValueLPercent.RemoveAt(ValueLPercent.Count() - 1); ValueTPercent.RemoveAt(ValueTPercent.Count() - 1); ValueComplexity.RemoveAt(ValueComplexity.Count() - 1); ValueInterability.RemoveAt(ValueInterability.Count() - 1); Storage.RemoveAt(Storage.Count() - 1); usedAlg--; } } catch (Exception) { if (before_VAlue) { Console.WriteLine("FuckAlarm " + i.ToString() + learnAlgorithms[i].ToString() + " before VAlue"); } else { Console.WriteLine("FuckAlarm " + i.ToString() + learnAlgorithms[i].ToString() + " after VAlue"); ValueLPercent.RemoveAt(ValueLPercent.Count() - 1); ValueTPercent.RemoveAt(ValueTPercent.Count() - 1); ValueComplexity.RemoveAt(ValueComplexity.Count() - 1); ValueInterability.RemoveAt(ValueInterability.Count() - 1); Storage.RemoveAt(Storage.Count() - 1); } } NSCount++; Console.WriteLine("F****d in ResultShow"); ResultShow += "[" + NSCount.ToString() + "]\t" + ValueLPercent[ValueLPercent.Count() - 1].ToString() + "\t" + ValueTPercent[ValueTPercent.Count() - 1].ToString() + "\t" + ValueComplexity[ValueComplexity.Count() - 1].ToString() + "\t" + ValueInterability[ValueInterability.Count() - 1].ToString() + Environment.NewLine; // i++; } //); Console.WriteLine("F****d in deleted"); deleted = removeDublicate(ValueLPercent, ValueComplexity, ValueInterability, ValueTPercent, Storage, rand); usedAlg -= deleted; Console.WriteLine("F****d in candidate"); candidate = canBeNext(ValueLPercent, ValueComplexity, ValueInterability); if (candidate.Count() > 0) { mustToDivide = false; break; } } if (mustToDivide) { MessageBox.Show("Divided happend "); sizePercent = sizePercent / diviver; sizeComplexity = sizeComplexity / diviver; sizeInteraply = sizeInteraply / diviver; continue; } Console.WriteLine("F****d in SummaryGoods"); SummaryGoods = reCalcSummary(SummaryGoods, ValueLPercent, ValueComplexity, ValueInterability); Console.WriteLine("F****d in indexofBest"); int indexofBest = getNewBest(candidate, SummaryGoods); if (usedAsNext.ContainsKey(indexofBest)) { usedAsNext[indexofBest]++; } else { usedAsNext.Add(indexofBest, 1); } Console.WriteLine("Best"); Best = Storage[indexofBest]; Console.WriteLine("F****d in for (int i = (Storage.Count - learnAlgorithms.Count); i < Storage.Count(); i++)"); int toSaveCounter = NSCount - usedAlg; for (int i = (Storage.Count - usedAlg); i < Storage.Count(); i++) { result.RulesDatabaseSet[0] = Storage[i]; saveFS(result, path, dataSetName, seedPath, numberStep, toSaveCounter, Best.Equals(result.RulesDatabaseSet[0])); toSaveCounter++; } Console.WriteLine("F****d in result.RulesDatabaseSet[0] = Best;"); result.RulesDatabaseSet[0] = Best; Console.WriteLine("F****d in End"); baseLearn = (result.ErrorLearnSamples(result.RulesDatabaseSet[0]));// ClassifyLearnSamples(); baseComplexity = getComplexity(result); baseIntebility = getInterpreting(result, allowBorder, allowSqare); candidate.Clear(); GC.Collect(); } isEnd = true; Thread.Sleep(1000); result.RulesDatabaseSet[0].TermsSet.Trim(); return(result); }
private static void writeAboutEstimates(XmlWriter writer, PCFuzzySystem Classifier) { writer.WriteStartElement("Estimates"); if (Classifier.TestSamplesSet != null) { writer.WriteAttributeString("Count", XmlConvert.ToString(22)); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Table", Classifier.LearnSamplesSet.FileName); writer.WriteAttributeString("Type", "PrecisionPercent"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.ClassifyLearnSamples(Classifier.RulesDatabaseSet[0]))); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Table", Classifier.LearnSamplesSet.FileName); writer.WriteAttributeString("Type", "ErrorPercent"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.ErrorLearnSamples(Classifier.RulesDatabaseSet[0]))); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Table", Classifier.TestSamplesSet.FileName); writer.WriteAttributeString("Type", "PrecisionPercent"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.ClassifyTestSamples(Classifier.RulesDatabaseSet[0]))); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Table", Classifier.TestSamplesSet.FileName); writer.WriteAttributeString("Type", "ErrorPercent"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.ErrorTestSamples(Classifier.RulesDatabaseSet[0]))); writer.WriteEndElement(); } else { writer.WriteAttributeString("Count", XmlConvert.ToString(20)); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Table", Classifier.LearnSamplesSet.FileName); writer.WriteAttributeString("Type", "PrecisionPercent"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.ClassifyLearnSamples(Classifier.RulesDatabaseSet[0]))); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Table", Classifier.LearnSamplesSet.FileName); writer.WriteAttributeString("Type", "ErrorPercent"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.ErrorLearnSamples(Classifier.RulesDatabaseSet[0]))); writer.WriteEndElement(); } writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Type", "GIBNormal"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGIBNormal())); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Type", "GIBSumStraigh"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGIBSumStrait())); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Type", "GIBSumReverse"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGIBSumReverse())); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Type", "GICNormal"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGICNormal())); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Type", "GICSumStraigh"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGICSumStraigth())); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Type", "GICSumReverse"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGICSumReverce())); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Type", "GISNormal"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGISNormal())); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Type", "GISSumStraigh"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGISSumStraigt())); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Type", "GISSumReverce"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getGISSumReverce())); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Type", "LindisNormal"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getLindisNormal())); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Type", "LindisSumStraigh"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getLindisSumStraight())); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Type", "LindisSumReverse"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getLindisSumReverse())); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Type", "NormalIndex"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getNormalIndex())); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Type", "RealIndex"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getIndexReal())); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Type", "SumStraigthIndex"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getIndexSumStraigt())); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Type", "SumReverseIndex"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getIndexSumReverse())); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Type", "ComplexitIt"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getComplexit())); writer.WriteEndElement(); writer.WriteStartElement("Estimate"); writer.WriteAttributeString("Type", "CountRules"); writer.WriteAttributeString("Value", XmlConvert.ToString(Classifier.getRulesCount())); writer.WriteEndElement(); writer.WriteEndElement(); }
public override PCFuzzySystem TuneUpFuzzySystem(PCFuzzySystem Classifier, ILearnAlgorithmConf conf) { Init(conf); KnowlegeBasePCRules temp_c_Rule = new KnowlegeBasePCRules(Classifier.RulesDatabaseSet[0]); PCFuzzySystem result = Classifier; string file_string = @"..\logs_" + result.TestSamplesSet.FileName + ".txt"; string file_string_to_txt = @"..\result_" + result.TestSamplesSet.FileName + ".txt"; for (int t = 0; t < population_count; t++) { monkey[t] = new KnowlegeBasePCRules(result.RulesDatabaseSet[0]); if (t > 3) { for (int k = 0; k < result.RulesDatabaseSet[0].TermsSet.Count; k++) { for (int q = 0; q < result.RulesDatabaseSet[0].TermsSet[k].CountParams; q++) { //monkey[t].TermsSet[k].Parametrs[q] = StaticRandom.NextDouble() * (result.RulesDatabaseSet[0].TermsSet[k].Max - result.RulesDatabaseSet[0].TermsSet[k].Min); monkey[t].TermsSet[k].Parametrs[q] = GaussRandom.Random_gaussian(rand, monkey[t].TermsSet[k].Parametrs[q], monkey[t].TermsSet[k].Parametrs[q] * 0.05); } } } result.UnlaidProtectionFix(monkey[t]); // delete testvals[t] = result.ErrorLearnSamples(monkey[t]); Console.WriteLine("Begin: " + t.ToString() + " " + iter.ToString() + " " + testvals[t].ToString()); } bestsolution = new KnowlegeBasePCRules(monkey.SelectBest(result, 1)[0]); bestsolutionnumber = result.ErrorLearnSamples(bestsolution); deltaLength = result.RulesDatabaseSet[0].TermsSet.Sum(x => x.Parametrs.Length); if (debug) { using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true)) { file.WriteLine(DateTime.Today.ToString() + "\t" + result.LearnSamplesSet.FileName); file.WriteLine("Parameters:"); file.WriteLine("Population\t" + population_count.ToString()); file.WriteLine("Iteration count\t" + iter_amount.ToString()); file.WriteLine("Crawl count\t" + crawl_iter.ToString()); file.WriteLine("Jump count\t" + jump_iter.ToString()); file.WriteLine("Somersault count\t" + somersault_iter.ToString()); file.WriteLine("Crawl step\t" + step.ToString()); // crawl step file.WriteLine("Jump step\t" + watch_jump_parameter.ToString()); file.WriteLine("Somersault left border\t" + somersault_interval_left.ToString()); file.WriteLine("Somersault right border\t" + somersault_interval_right.ToString()); file.WriteLine("\t\tMonkeys"); file.Write("Iterations\t"); for (int t = 0; t < population_count; t++) { file.Write("\t" + t); } file.WriteLine(); file.Write("0\tbegin"); for (int t = 0; t < population_count; t++) { file.Write("\t" + testvals[t].ToString()); } // excel вставки // наибольший в таблице file.WriteLine(); } } //iter_amount = somersault_iter * (1 + jump_iter * (1 + crawl_iter)); iter_amount = (((crawl_iter + jump_iter) * jump_iter) + somersault_iter) * somersault_iter; for (int r = 0; r < somersault_iter; r++) { for (int t = 0; t < jump_iter; t++) { for (int e = 0; e < crawl_iter; e++) { iter++; oneClimb(result, deltaLength, step); CheckForBest(result); //Console.WriteLine(iter_amount.ToString() + "/" + iter.ToString // дебаг if (debug) { using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true)) { file.Write(iter.ToString() + "\tcrawl"); for (int p = 0; p < population_count; p++) { file.Write("\t" + result.ErrorLearnSamples(monkey[p]).ToString()); } file.WriteLine(); } } } for (int e = 0; e < jump_iter; e++) { iter++; oneWatchJump(result); //Console.WriteLine(iter_amount.ToString() + "/" + iter.ToString()); CheckForBest(result); // дебаг if (debug) { using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true)) { file.Write(iter.ToString() + "\tlocaljump"); for (int p = 0; p < population_count; p++) { file.Write("\t" + result.ErrorLearnSamples(monkey[p]).ToString()); } file.WriteLine(); } } } } for (int e = 0; e < somersault_iter; e++) { iter++; oneGlobalJump(result); CheckForBest(result); // дебаг if (debug) { using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true)) { file.Write(iter.ToString() + "\tglobaljump"); for (int p = 0; p < population_count; p++) { file.Write("\t" + result.ErrorLearnSamples(monkey[p]).ToString()); } file.WriteLine(); } } Console.WriteLine(iter_amount.ToString() + "/" + iter.ToString()); } } //Console.WriteLine(final_iter.ToString() + "/" + final_counter.ToString()); //FOR VICTORY!!! while (final_iter < final_counter) { step *= 0.9; watch_jump_parameter *= 0.9; somersault_interval_left *= 0.9; somersault_interval_right *= 0.9; for (int r = 0; r < somersault_iter; r++) { oneClimb(result, deltaLength, step); CheckForBest(result); iter++; } for (int t = 0; t < jump_iter; t++) { oneWatchJump(result); CheckForBest(result); iter++; } for (int e = 0; e < crawl_iter; e++) { oneGlobalJump(result); CheckForBest(result); iter++; } Console.WriteLine(iter_amount.ToString() + "/" + iter.ToString()); } /* for (int t = 0; t < population_count; t++) * if (result.ErrorLearnSamples(monkey[best]) < result.ErrorLearnSamples(monkey[t])) * best = t; */ CheckForBest(result); if (bestsolutionnumber <= result.ErrorLearnSamples(result.RulesDatabaseSet[0])) { result.RulesDatabaseSet[0] = bestsolution; } iter = 0; if (debug) { using (System.IO.StreamWriter file = new System.IO.StreamWriter(file_string, true)) { file.WriteLine("Results\t" + result.ErrorLearnSamples(bestsolution).ToString() + "\t" + result.ErrorTestSamples(bestsolution).ToString()); } } if (totxt) { using (System.IO.StreamWriter file_result = new System.IO.StreamWriter(file_string_to_txt, true)) { file_result.WriteLine(result.ErrorLearnSamples(bestsolution).ToString() + "\t" + result.ErrorTestSamples(bestsolution).ToString()); } } return(result); }