public override int Run(string[] args) { Console.WriteLine("Start"); fill_params(args); Console.WriteLine("Params get \nfile in {0} \n", file_in); Approx_learn_set = BaseUFSLoader.LoadLearnFromUFS(file_in); Console.WriteLine("Tra load"); Approx_test_set = BaseUFSLoader.LoadTestFromUFS(file_in); Console.WriteLine("Tst load"); conf = new MultiGoalOptimaze_conf(); conf.Init(Approx_learn_set.CountVars); conf.loadParams(confParams); Console.WriteLine("Conf Filed"); Approx_Singletone = new SAFuzzySystem(Approx_learn_set, Approx_test_set); Approx_Singletone = SAFSUFSLoader.loadUFS(Approx_Singletone, file_in); Console.WriteLine("Classifier created"); optimaze = new MultiGoalOpimize(); Approx_Singletone = optimaze.TuneUpFuzzySystem(Approx_Singletone, conf); Console.WriteLine("Optimization complite"); // a_FS_UFS.saveToUFS(Class_Pittsburg, file_out); Console.WriteLine("Saved"); return(1); }
public override ILearnAlgorithmConf getConf(int CountFeatures) { ILearnAlgorithmConf result = new MultiGoalOptimaze_conf(); result.Init(CountFeatures); return(result); }
public override SAFuzzySystem TuneUpFuzzySystem(SAFuzzySystem Approx, ILearnAlgorithmConf conf) { Random rand = new Random(DateTime.Now.Millisecond); SAFuzzySystem result = Approx; BW.DoWork += new DoWorkEventHandler(BW_DoWork); BW.RunWorkerCompleted += new RunWorkerCompletedEventHandler(BW_RunWorkerCompleted); BW.RunWorkerAsync(); MultiGoalOptimaze_conf config = conf as MultiGoalOptimaze_conf; string PathAlg = (new FileInfo(Application.ExecutablePath)).DirectoryName + "\\FS\\"; config.Init2(PathAlg, Approx.LearnSamplesSet.FileName); countFuzzySystem = config.Итераций_алгоритма; allowSqare = config.Допустимый_процент_перекрытия_по_площади_термов / 100; allowBorder = config.Допустимый_процент_перекрытия_по_границам / 100; int seedPath = rand.Next(); sizePercent = config.азмер_шага_по_точности; sizeComplexity = config.азмер_шага_по_сложности; sizeInteraply = config.азмер_шага_по_интерпретируемости; diviver = config.Уменьшать_шаги_в; trysBeforeDivide = config.Уменьшать_шаг_после; path = config.path; dataSetName = config.dataSetName; toMany = config.азрешено_похожих_систем; isPSO = config.toBool(config.Использовать_АРЧ); // isBFO = config.toBool(config.Использовать_АПБ); isANT = config.toBool(config.Использовать_НАМК); isBEE = config.toBool(config.Использовать_САПК); isES = config.toBool(config.Использовать_ЕС); isGA = config.toBool(config.Использовать_ГА); isTermShrink = config.toBool(config.Удалять_термы); isRuleShrink = config.toBool(config.Удалять_правила); isUnionTerm = config.toBool(config.Объединять_термы); isLindBreakCross = config.toBool(config.Исключать_пересечение_лигвистически_далеких_термов); countANT = config.Использовать_НАМК_раз_за_такт; // countBFO = config.Использовать_за_такт_АПБ_раз; countPSO = config.Использовать_за_такт_АРЧ_раз; countBEE = config.Использовать_САПК_раз_за_такт; countES = config.Использовать_ЕС_раз_за_такт; countGA = config.Использовать_ГА_раз_за_такт; typeComplexity = (int)config.Критерий_сложности; typeInterpreting = (int)config.Критерий_интерпретируемости; List <IAbstractLearnAlgorithm> learnAlgorithms = initAlgoritms(); List <ILearnAlgorithmConf> learnAlgorithmsconfig = initAlgoritmsConfigs(Approx.CountFeatures); List <double> ValueLPercent = new List <double>(); List <double> ValueTPercent = new List <double>(); List <double> ValueComplexity = new List <double>(); List <double> ValueInterability = new List <double>(); List <double> SummaryGoods = new List <double>(); List <KnowlegeBaseSARules> Storage = new List <KnowlegeBaseSARules>(); List <int> candidate = new List <int>(); KnowlegeBaseSARules Best = result.RulesDatabaseSet[0]; baseLearn = result.approxLearnSamples(result.RulesDatabaseSet[0]); ValueLPercent.Add(baseLearn); ValueTPercent.Add(result.approxTestSamples(result.RulesDatabaseSet[0])); baseComplexity = getComplexity(result); ValueComplexity.Add(baseComplexity); baseIntebility = getInterpreting(result, allowBorder, allowSqare); ValueInterability.Add(baseIntebility); Storage.Add(Best); int NSCount = 0; int deleted = 0; for (int numberStep = 0; numberStep < countFuzzySystem; numberStep++) { bool mustToDivide = true; int usedAlg = 0; for (int tr = 0; tr < trysBeforeDivide; tr++) { deleted = 0; // Parallel.For(0, learnAlgorithms.Count(), i => usedAlg = 0; for (int i = 0; i < learnAlgorithms.Count(); i++) { Console.WriteLine("F****d in Storage.Add(new a_Rules(Best))"); Storage.Add(new KnowlegeBaseSARules(Best)); Console.WriteLine("F****d in result.RulesDatabaseSet.Clear()"); result.RulesDatabaseSet.Clear(); Console.WriteLine("F****d in result.RulesDatabaseSet.Add( Storage[Storage.Count - 1])"); result.RulesDatabaseSet.Add(Storage[Storage.Count - 1]); usedAlg++; bool before_VAlue = true; try { learnAlgorithms[i].TuneUpFuzzySystem(result, learnAlgorithmsconfig[i]); GC.Collect(); before_VAlue = false; ValueLPercent.Add(result.approxLearnSamples(result.RulesDatabaseSet[0])); ValueTPercent.Add(result.approxTestSamples(result.RulesDatabaseSet[0])); ValueComplexity.Add(getComplexity(result)); ValueInterability.Add(getInterpreting(result, allowBorder, allowSqare)); double temp = ValueLPercent[ValueLPercent.Count - 1] + ValueComplexity[ValueComplexity.Count() - 1] + ValueInterability[ValueInterability.Count() - 1]; Storage[Storage.Count - 1] = result.RulesDatabaseSet[0]; if (double.IsNaN(temp)) { Console.WriteLine("FuckAlarm " + i.ToString() + learnAlgorithms[i].ToString() + " is NAN"); ValueLPercent.RemoveAt(ValueLPercent.Count() - 1); ValueTPercent.RemoveAt(ValueTPercent.Count() - 1); ValueComplexity.RemoveAt(ValueComplexity.Count() - 1); ValueInterability.RemoveAt(ValueInterability.Count() - 1); Storage.RemoveAt(Storage.Count() - 1); usedAlg--; } } catch (Exception) { if (before_VAlue) { Console.WriteLine("FuckAlarm " + i.ToString() + learnAlgorithms[i].ToString() + " before VAlue"); } else { Console.WriteLine("FuckAlarm " + i.ToString() + learnAlgorithms[i].ToString() + " after VAlue"); ValueLPercent.RemoveAt(ValueLPercent.Count() - 1); ValueTPercent.RemoveAt(ValueTPercent.Count() - 1); ValueComplexity.RemoveAt(ValueComplexity.Count() - 1); ValueInterability.RemoveAt(ValueInterability.Count() - 1); Storage.RemoveAt(Storage.Count() - 1); } } NSCount++; Console.WriteLine("F****d in ResultShow"); ResultShow += "[" + NSCount.ToString() + "]\t" + ValueLPercent[ValueLPercent.Count() - 1].ToString() + "\t" + ValueTPercent[ValueTPercent.Count() - 1].ToString() + "\t" + ValueComplexity[ValueComplexity.Count() - 1].ToString() + "\t" + ValueInterability[ValueInterability.Count() - 1].ToString() + Environment.NewLine; // i++; } //); Console.WriteLine("F****d in deleted"); deleted = removeDublicate(ValueLPercent, ValueComplexity, ValueInterability, ValueTPercent, Storage, rand); usedAlg -= deleted; Console.WriteLine("F****d in candidate"); candidate = canBeNext(ValueLPercent, ValueComplexity, ValueInterability); if (candidate.Count() > 0) { mustToDivide = false; break; } } if (mustToDivide) { MessageBox.Show("Divided happend "); sizePercent = sizePercent / diviver; sizeComplexity = sizeComplexity / diviver; sizeInteraply = sizeInteraply / diviver; continue; } Console.WriteLine("F****d in SummaryGoods"); SummaryGoods = reCalcSummary(SummaryGoods, ValueLPercent, ValueComplexity, ValueInterability); Console.WriteLine("F****d in indexofBest"); int indexofBest = getNewBest(candidate, SummaryGoods); if (usedAsNext.ContainsKey(indexofBest)) { usedAsNext[indexofBest]++; } else { usedAsNext.Add(indexofBest, 1); } Console.WriteLine("Best"); Best = Storage[indexofBest]; Console.WriteLine("F****d in for (int i = (Storage.Count - learnAlgorithms.Count); i < Storage.Count(); i++)"); int toSaveCounter = NSCount - usedAlg; for (int i = (Storage.Count - usedAlg); i < Storage.Count(); i++) { result.RulesDatabaseSet[0] = Storage[i]; saveFS(result, path, dataSetName, seedPath, numberStep, toSaveCounter, Best.Equals(result.RulesDatabaseSet[0])); toSaveCounter++; } Console.WriteLine("F****d in result.RulesDatabaseSet[0] = Best;"); result.RulesDatabaseSet[0] = Best; Console.WriteLine("F****d in End"); baseLearn = result.approxLearnSamples(result.RulesDatabaseSet[0]);// ClassifyLearnSamples(); baseComplexity = getComplexity(result); baseIntebility = getInterpreting(result, allowBorder, allowSqare); candidate.Clear(); GC.Collect(); } isEnd = true; Thread.Sleep(10000); result.RulesDatabaseSet[0].TermsSet.Trim(); return(result); }