public override void Run(string[] args) { base.Run(args); Validate(); OpenSource(input); Statistic[] data = (Statistic[])ReadData(); List <Hero> hData = (List <Hero>)JSonParser.Load(File.ReadAllText(heroInput, Encoding.Default), typeof(List <Hero>)); List <Map> mData = (List <Map>)JSonParser.Load(File.ReadAllText(mapInput, Encoding.Default), typeof(List <Map>)); Dictionary <string, int> mapper = new Dictionary <string, int>(); int id = 0; ///Сперва добавляем только тех героев, которые участвовали в матче ///формат записи: Hero#ид героя#номер команды => глобальный идентификатор for (int i = 0; i < data[0].Statictic.Matches.Length; i++) { if (data.Sum((x) => x.Statictic.Matches[i]) > 0) { mapper.Add($"Hero#{i}", id++); Console.WriteLine(hData[i] + " => " + id + ";" + (id + 1)); // mapper.Add($"Hero#{i}#1", id++); } else { Console.WriteLine(hData[i] + " отсутствует в датасете"); } } for (int i = 0; i < data.Length; i++) { if (data[i].Statictic.Ammount > 0) { mapper.Add($"Map#{i}", id++); Console.WriteLine(mData[i] + " => " + id); } else { Console.WriteLine(mData[i] + " отсутствует в датасете"); } } Save(output, mapper, mapper.GetType()); }
public override void Run(string[] args) { base.Run(args); Validate(); guidMapper = (Dictionary <string, int>)JSonParser.Load(File.ReadAllText(guidInput), typeof(Dictionary <string, int>)); HashSet <string[]> nnDataset = new HashSet <string[]>(); OpenSource(input); object data; if (addition) { Console.WriteLine("Датасет будет дополнен обратными записями"); } while ((data = ReadData()) != null) { inputCount = ((object[])data).Length; string[] line = ParseData(data); if (line == null) { continue; } nnDataset.Add(line); if (addition == true) { string[] reverseLine = ParseLoseData(data); reverseLine[inputCount - 1] = (1 - Int32.Parse(reverseLine[inputCount - 1])).ToString(); nnDataset.Add(reverseLine); } if (nnDataset.Count % 100000 == 0) { Console.WriteLine("Уже обработано " + nnDataset.Count); } } Save(output, nnDataset.ToList(), null); }
public static void SaveTOExcelCSVFormat() { List <string> Keys = new List <string>(); Dictionary <string, string> NamedParam = new Dictionary <string, string>(); foreach (string arg in args) { string[] tryParse = arg.Split('='); if (tryParse.Length == 2) { NamedParam[tryParse[0]] = tryParse[1]; } else { Keys.Add(arg); } } if (NamedParam.ContainsKey("i") == false) { throw new Exception("Не инициализирован обязательный параметр i"); } if (File.Exists(NamedParam["i"]) == false) { throw new FileNotFoundException($"{NamedParam["i"]} not found"); } if (NamedParam.ContainsKey("o") == false) { string input = NamedParam["i"]; string fDir = Path.GetDirectoryName(input); string fName = Path.GetFileNameWithoutExtension(input); string fExt = Path.GetExtension(input); NamedParam["o"] = Path.Combine(fDir, String.Concat(fName, "_o", fExt)); } Statistic[] statistic = (Statistic[])JSonParser.Load(File.ReadAllText(NamedParam["i"]), typeof(Statistic[])); SaveTOExcelCSVFormat(statistic, NamedParam["o"]); }
static void Main(string[] args) { log("debug", "Процесс миграции схемы запущен"); log("debug", "Дамп будет создан для базы данных PostegreSQL"); if (!Directory.Exists("./Database")) { Directory.CreateDirectory("./Database"); } if (!Directory.Exists("./Dataset")) { Directory.CreateDirectory("./Dataset"); } HeroService heroes = new HeroService(); log("debug", "Загрузка Hero.json"); heroes.Load("./Source/Hero/Hero.json"); log("succes", "Hero.json Загружен"); HeroDetailsService details = new HeroDetailsService(); log("debug", "Загрузка HeroDetails.json"); details.Load("./Source/Hero/HeroDetails.json"); log("succes", "HeroDetails.json Загружен"); HeroClustersSevice clusters = new HeroClustersSevice(); log("debug", "Загрузка HeroClusters.json"); clusters.Load("./Source/Hero/HeroClusters.json"); log("succes", "HeroClusters.json Загружен"); log("debug", "Загрузка HeroWebExtension.json"); HeroWebExtension[] webExtension = (HeroWebExtension[]) JSonParser.Load(File.ReadAllText("./Source/Hero/HeroWebExtension.json"), typeof(HeroWebExtension[])); log("succes", "HeroWebExtension.json Загружен"); MapService maps = new MapService(); log("debug", "Загрузка Map.json"); maps.Load("./Source/Map/Map.json"); log("succes", "Map.json Загружен"); StatisticService stats = new StatisticService(); log("debug", "Загрузка Statistic.json"); stats.Load("./Source/Replay/Statistic.json"); log("succes", "Statistic.json Загружен"); HeroStatisticService hstats = new HeroStatisticService(); log("debug", "Загрузка Statistic_sho.json"); hstats.Load("./Source/Replay/Statistic_sho.json"); log("succes", "Statistic_sho.json Загружен"); MatchupService matchups = new MatchupService(); log("debug", "Загрузка MatchupTable.json"); matchups.Load("./Source/Replay/MatchupTable.json"); log("succes", "MatchupTable.json Загружен"); log("debug", "Формирование датасета начато"); Dataset set = MakeDataset(); log("succes", "Датасета сформирован"); log("debug", "Инициализация PostegreSQL Converter ORM"); PostegresConverter converter = new PostegresConverter(); log("info", "========Конвертация ключей==============="); log("info", "group => _group"); log("info", "min_id => id_min"); log("info", "max_id => id_max"); log("info", "avg_id => id_avg"); converter.CustomNameMapper["group"] = "_group"; converter.CustomNameMapper["min_id"] = "id_min"; converter.CustomNameMapper["max_id"] = "id_max"; converter.CustomNameMapper["avg_id"] = "id_avg"; log("info", "========================================="); log("succes", "PostegreSQL Converter ORM инициалирован"); log("debug", "Генерация секвенсеров"); string sequensers = @" alter sequence gaussian_id_seq minvalue 0 start with 0; select setval('gaussian_id_seq', 0, false); alter sequence heroclusters_id_seq minvalue 0 start with 0; select setval('heroclusters_id_seq', 0, false); alter sequence statisticheroesmax_id_seq minvalue 0 start with 0; select setval('statisticheroesmax_id_seq', 0, false); alter sequence statisticheroesmin_id_seq minvalue 0 start with 0; select setval('statisticheroesmin_id_seq', 0, false); alter sequence statisticheroesavg_id_seq minvalue 0 start with 0; select setval('statisticheroesavg_id_seq', 0, false);"; log("succes", "Cеквенсеры сгенерированы"); log("debug", "Процесс генерации схемы начат"); log("info", "========Генерация словарей==============="); string[] enumsTable = AppDomain.CurrentDomain.GetAssemblies() .SelectMany(t => t.GetTypes()) .Where(t => t.IsEnum && t.Namespace == "HoTS_Service.Entity.Enum") .Select(_enum => { log("info", _enum.FullName); return(converter.CreateDictionary(_enum.Name)); }) .ToArray(); log("info", "========================================="); log("info", "========Генерация таблиц================="); Dictionary <string, string> tables = new Dictionary <string, string>(); Dictionary <string, string> data = new Dictionary <string, string>(); tables["heroesTable"] = converter.CreateTable("Hero", typeof(Hero), "Id", new List <Foreign> { new Foreign() { DataTable = "HeroGroup", Key = "_group", ForeignKey = "id" }, new Foreign() { DataTable = "HeroSubGroup", Key = "subgroup", ForeignKey = "id" } }); log("info", "Hero"); tables["detailsTable"] = converter.CreateTable("HeroDetails", typeof(HeroDetails), "id", new List <Foreign> { new Foreign() { DataTable = "Hero", Key = "id", ForeignKey = "id" }, new Foreign() { DataTable = "Difficulty", Key = "difficulty", ForeignKey = "id" }, new Foreign() { DataTable = "Franchise", Key = "franchise", ForeignKey = "id" }, new Foreign() { DataTable = "ResourceType", Key = "resourcetype", ForeignKey = "id" } }); log("info", "HeroDetails"); tables["heroWebExtension"] = converter.CreateTable("HeroWebExtension", typeof(HeroWebExtension), "id", new List <Foreign> { new Foreign() { DataTable = "Hero", Key = "id", ForeignKey = "id" } }); log("info", "HeroWebExtension"); tables["mapTable"] = converter.CreateTable("Map", typeof(Map), "id", null); log("info", "Map"); tables["statisticTable"] = StatisticSchema(); log("info", "Statistic"); tables["statisticShoMin"] = converter.CreateTable("StatisticHeroesMin", typeof(HeroStatisticItemMin), "id", null); log("info", "StatitsticHeroesMin"); tables["statisticShoMax"] = converter.CreateTable("StatisticHeroesMax", typeof(HeroStatisticItemMax), "id", null); log("info", "StatitsticHeroesMax"); tables["statisticShoAvg"] = converter.CreateTable("StatisticHeroesAvg", typeof(HeroStatisticItemAvg), "id", null); log("info", "StatitsticHeroesAvg"); tables["statisticSho"] = converter.CreateTable("StatisticHeroes", typeof(HeroStatistic), "id", new List <Foreign> { new Foreign() { DataTable = "StatisticHeroesMin", Key = "id_min", ForeignKey = "id" }, new Foreign() { DataTable = "StatisticHeroesAvg", Key = "id_avg", ForeignKey = "id" }, new Foreign() { DataTable = "StatisticHeroesMax", Key = "id_max", ForeignKey = "id" }, new Foreign() { DataTable = "Hero", Key = "id", ForeignKey = "id" } }); log("info", "StatitsticHeroes"); tables["matchupTable"] = MatchupTableSchema(); log("info", "MatchupTable"); tables["gaussian"] = converter.CreateTable("Gaussian", typeof(Gaussian) , "id", null); log("info", "Gaussian"); tables["probabilities"] = converter.CreateTable("GaussianProbabilities", typeof(Probabilities) , "id", new List <Foreign> { new Foreign() { DataTable = "Gaussian", Key = "gaussian_id", ForeignKey = "id" } }); log("info", "GaussianProbabilities"); tables["heroClusters"] = converter.CreateTable("HeroClusters", typeof(HeroClusters) , "id", new List <Foreign> { new Foreign() { DataTable = "Hero", Key = "id", ForeignKey = "id" }, new Foreign() { DataTable = "Gaussian", Key = "gaussian", ForeignKey = "id" }, new Foreign() { DataTable = "HeroSubGroup", Key = "subgroupcluster", ForeignKey = "id" } }); log("info", "HeroClusters"); tables["dataset"] = converter.CreateTable("Dataset", typeof(Dataset), "id", null); log("info", "Dataset"); tables["trainingState"] = converter.CreateTable("TrainingState", typeof(LogInfo), "id", null); log("info", "TrainingState"); tables["trainMeta"] = converter.CreateTable("TrainingMeta", typeof(TrainMeta), "id", null); log("info", "TrainingMeta"); tables["network"] = converter.CreateTable("Network", typeof(Network) , "id", new List <Foreign> { new Foreign() { DataTable = "Dataset", Key = "dataset_id", ForeignKey = "id" }, new Foreign() { DataTable = "TrainingState", Key = "state_id", ForeignKey = "id" }, new Foreign() { DataTable = "TrainingMeta", Key = "meta_id", ForeignKey = "id" }, }); log("info", "Network"); log("info", "========================================="); log("succes", "Схема успешно сгенерирована"); log("info", "========Генерация дампа данных==========="); string[] enumsData = AppDomain.CurrentDomain.GetAssemblies() .SelectMany(t => t.GetTypes()) .Where(t => t.IsEnum && t.Namespace == "HoTS_Service.Entity.Enum") .Select(_enum => converter.InsertDictionary(_enum)) .ToArray(); data["heroesTable"] = converter.Insert("Hero", heroes.All()); log("info", "Hero"); data["detailsTable"] = converter.Insert("HeroDetails", details.All()); log("info", "HeroDetails"); data["heroWebExtension"] = converter.Insert("HeroWebExtension", webExtension); log("info", "HeroWebExtension"); data["mapTable"] = converter.Insert("Map", maps.All()); log("info", "Map"); data["statisticTable"] = StatisticData(stats); log("info", "Statistic"); data["statisticShoMin"] = converter.Insert("StatisticHeroesMin", hstats.All().Item2); log("info", "StatisticHeroesMin"); data["statisticShoMax"] = converter.Insert("StatisticHeroesMax", hstats.All().Item3); log("info", "StatisticHeroesMax"); data["statisticShoAvg"] = converter.Insert("StatisticHeroesAvg", hstats.All().Item1); log("info", "StatisticHeroesAvg"); data["statisticSho"] = HeroesStatisticData(hstats); log("info", "HeroesStatistic"); data["matchupTable"] = MatchupData(matchups, heroes.Count()); int probId = 0; data["gaussian"] = converter.Insert("Gaussian", clusters.Select(x => x.Gaussian)); log("info", "Gaussian"); data["probabilities"] = converter.Insert("GaussianProbabilities", clusters. Select(x => x.Gaussian.Probability. Select(y => new Probabilities() { id = probId++, value = y, gaussian_id = x.Id, })).SelectMany(z => z)); log("info", "GaussianProbabilities"); data["heroClusters"] = converter.Insert("HeroClusters", clusters.All()); log("info", "HeroClusters"); data["dataset"] = converter.Insert("Dataset", set); log("info", "Dataset"); string[] trainingStatesAll = Directory .GetFiles("./Source/Network", "*.json", SearchOption.AllDirectories); string[] trainingStates = trainingStatesAll .Where(x => !x.Contains("Best") && x.Contains("\\Report\\")) .ToArray(); string[] trainingStatesBest = trainingStatesAll .Where(x => x.Contains("Best") && x.Contains("\\Report\\")) .ToArray(); long[] traingingStateIds = trainingStates .Select(file => File.GetCreationTime(file).Ticks) .ToArray(); long[] traingingStateIdsBest = trainingStatesBest .Select(file => File.GetCreationTime(file).Ticks) .ToArray(); var trainigsStateData = trainingStates .Select(x => File.ReadAllText(x)) .Select(json => ((Dictionary <string, dynamic>)JSONWebParser.Load(json))) .Select((obj, index) => new { id = traingingStateIds[index], error = (double)obj["error"], iteration = (int)obj["iteration"], percent = (double)obj["percent"], validError = (double)obj["validError"], validPercent = (double)obj["validPercent"] }) .Concat( trainingStatesBest .Select(x => File.ReadAllText(x)) .Select(json => ((Dictionary <string, dynamic>)JSONWebParser.Load(json))) .Select((obj, index) => new { id = traingingStateIdsBest[index], error = (double)obj["error"], iteration = (int)obj["iteration"], percent = (double)obj["percent"], validError = (double)obj["validError"], validPercent = (double)obj["validPercent"] })); data["trainingState"] = converter.Insert("TrainingState", trainigsStateData); log("info", "TrainingState"); string[] networksAll = Directory .GetFiles("./Source/Network", "*.json", SearchOption.AllDirectories) .ToArray(); string[] networks = networksAll .Where(x => !x.Contains("Best") && !x.Contains("\\Report\\")) .ToArray(); string[] networksBest = networksAll .Where(x => x.Contains("Best") && !x.Contains("\\Report\\")) .ToArray(); JavaScriptSerializer serializer = new JavaScriptSerializer(); var networksMeta = networks .Select(x => File.ReadAllText(x)) .Select(json => serializer.Deserialize <NetworkTuple>(json).Meta) .Select((obj, index) => new { Alias = obj.Alias, ClusterPath = obj.ClusterPath, Name = obj.Name, Description = obj.Description, Id = traingingStateIds[index] } ) .Concat( networksBest .Select(x => File.ReadAllText(x)) .Select(json => serializer.Deserialize <NetworkTuple>(json).Meta) .Select((obj, index) => new { Alias = obj.Alias, ClusterPath = obj.ClusterPath, Name = obj.Name, Description = obj.Description, Id = traingingStateIdsBest[index] })); data["trainingMeta"] = converter.Insert("TrainingMeta", networksMeta); log("info", "TrainingMeta"); var networksData = networks .Select(x => File.ReadAllText(x)) .Select(json => serializer.Deserialize <NetworkTuple>(json).Network) .Select((x, index) => new Network() { dataset_id = set.id, data = new Json(File.ReadAllText(networks[index])), state_id = traingingStateIds[index], meta_id = traingingStateIds[index], isBest = false, id = traingingStateIds[index] }) .Concat( networksBest .Select(x => File.ReadAllText(x)) .Select(json => serializer.Deserialize <NetworkTuple>(json).Network) .Select((x, index) => new Network() { dataset_id = set.id, data = new Json(File.ReadAllText(networksBest[index])), state_id = traingingStateIdsBest[index], meta_id = traingingStateIdsBest[index], isBest = true, id = traingingStateIdsBest[index] })); data["network"] = converter.Insert("Network", networksData); log("info", "Network"); log("info", "========================================="); log("succes", "Дамп данных успешно сгенерирован"); log("succes", "Схема успешно сгенерирована"); File.WriteAllText("./Database/create.sql", string.Join("\n", enumsTable) + string.Join("\n", tables.Select(x => x.Value).ToArray())); File.WriteAllText("./Database/insert.sql", sequensers + "\n\n" + string.Join("\n", enumsData) + string.Join("\n", data.Select(x => x.Value).ToArray())); }
protected override object ReadData() { return(JSonParser.Load(File.ReadAllText(NamedParam["i"], Encoding.Default), typeof(Statistic[]))); }
public override void Run(string[] args) { base.Run(args); Validate(); if (filter == true) { Console.WriteLine("Фильтр для образов в диапазоне [0.4,0.6] включен"); Console.WriteLine("Фильтр для случайных образов включен"); } Console.WriteLine($"HERO_SUBGROUP_COUNT = {HERO_SUBGROUP_COUNT}"); guidMapper = (Dictionary <string, int>)JSonParser.Load(File.ReadAllText(guidInput), typeof(Dictionary <string, int>)); foreach (var it in guidMapper) { if (it.Key.Split('#')[0] == "Hero") { heroCount++; } } Console.WriteLine($"HERO_COUNT = {heroCount}"); heroService = new HeroService(); heroService.Load(heroInput); Dictionary <SByte[], Tuple <double, int> > nnDataset = new Dictionary <SByte[], Tuple <double, int> >(new ArrayEqualityComparer()); OpenSource(input); object data; bool wasPrint = false; while ((data = ReadData()) != null) { var line = ParseData(data); if (line == null) { continue; } if (nnDataset.ContainsKey(line.Item1)) { var prev = nnDataset[line.Item1]; nnDataset[line.Item1] = new Tuple <double, int>(prev.Item1 + line.Item2, prev.Item2 + 1); } else { nnDataset[line.Item1] = new Tuple <double, int>(line.Item2, 1); } if (nnDataset.Count % 100000 == 0 && wasPrint == false) { Console.WriteLine("Уже обработано " + nnDataset.Count); wasPrint = true; } if (nnDataset.Count % 100000 == 1) { wasPrint = false; } } Save(output, nnDataset, null); }