public override void Run(string[] args) { base.Run(args); Validate(); Console.WriteLine($"HERO_SUBGROUP_COUNT = {HERO_SUBGROUP_COUNT}"); Console.WriteLine($"MAP_COUNT = {MAP_COUNT}"); guidMapper = (Dictionary <string, int>)JSonParser.Load(File.ReadAllText(guidInput), typeof(Dictionary <string, int>)); foreach (var it in guidMapper) { if (it.Key.Split('#')[0] == "Hero") { heroCount++; } } Console.WriteLine($"HERO_COUNT = {heroCount}"); heroService = new HeroService(); heroService.Load(heroInput); List <SByte[]> nnDataset = new List <SByte[]>(); OpenSource(input); object data; while ((data = ReadData()) != null) { var line = ParseData(data); if (line == null) { continue; } nnDataset.Add(line); if (nnDataset.Count % 100000 == 0) { Console.WriteLine("Уже обработано " + nnDataset.Count); } } Save(output, nnDataset.ToList(), null); }
static void Main(string[] args) { log("debug", "Процесс миграции схемы запущен"); log("debug", "Дамп будет создан для базы данных PostegreSQL"); if (!Directory.Exists("./Database")) { Directory.CreateDirectory("./Database"); } if (!Directory.Exists("./Dataset")) { Directory.CreateDirectory("./Dataset"); } HeroService heroes = new HeroService(); log("debug", "Загрузка Hero.json"); heroes.Load("./Source/Hero/Hero.json"); log("succes", "Hero.json Загружен"); HeroDetailsService details = new HeroDetailsService(); log("debug", "Загрузка HeroDetails.json"); details.Load("./Source/Hero/HeroDetails.json"); log("succes", "HeroDetails.json Загружен"); HeroClustersSevice clusters = new HeroClustersSevice(); log("debug", "Загрузка HeroClusters.json"); clusters.Load("./Source/Hero/HeroClusters.json"); log("succes", "HeroClusters.json Загружен"); log("debug", "Загрузка HeroWebExtension.json"); HeroWebExtension[] webExtension = (HeroWebExtension[]) JSonParser.Load(File.ReadAllText("./Source/Hero/HeroWebExtension.json"), typeof(HeroWebExtension[])); log("succes", "HeroWebExtension.json Загружен"); MapService maps = new MapService(); log("debug", "Загрузка Map.json"); maps.Load("./Source/Map/Map.json"); log("succes", "Map.json Загружен"); StatisticService stats = new StatisticService(); log("debug", "Загрузка Statistic.json"); stats.Load("./Source/Replay/Statistic.json"); log("succes", "Statistic.json Загружен"); HeroStatisticService hstats = new HeroStatisticService(); log("debug", "Загрузка Statistic_sho.json"); hstats.Load("./Source/Replay/Statistic_sho.json"); log("succes", "Statistic_sho.json Загружен"); MatchupService matchups = new MatchupService(); log("debug", "Загрузка MatchupTable.json"); matchups.Load("./Source/Replay/MatchupTable.json"); log("succes", "MatchupTable.json Загружен"); log("debug", "Формирование датасета начато"); Dataset set = MakeDataset(); log("succes", "Датасета сформирован"); log("debug", "Инициализация PostegreSQL Converter ORM"); PostegresConverter converter = new PostegresConverter(); log("info", "========Конвертация ключей==============="); log("info", "group => _group"); log("info", "min_id => id_min"); log("info", "max_id => id_max"); log("info", "avg_id => id_avg"); converter.CustomNameMapper["group"] = "_group"; converter.CustomNameMapper["min_id"] = "id_min"; converter.CustomNameMapper["max_id"] = "id_max"; converter.CustomNameMapper["avg_id"] = "id_avg"; log("info", "========================================="); log("succes", "PostegreSQL Converter ORM инициалирован"); log("debug", "Генерация секвенсеров"); string sequensers = @" alter sequence gaussian_id_seq minvalue 0 start with 0; select setval('gaussian_id_seq', 0, false); alter sequence heroclusters_id_seq minvalue 0 start with 0; select setval('heroclusters_id_seq', 0, false); alter sequence statisticheroesmax_id_seq minvalue 0 start with 0; select setval('statisticheroesmax_id_seq', 0, false); alter sequence statisticheroesmin_id_seq minvalue 0 start with 0; select setval('statisticheroesmin_id_seq', 0, false); alter sequence statisticheroesavg_id_seq minvalue 0 start with 0; select setval('statisticheroesavg_id_seq', 0, false);"; log("succes", "Cеквенсеры сгенерированы"); log("debug", "Процесс генерации схемы начат"); log("info", "========Генерация словарей==============="); string[] enumsTable = AppDomain.CurrentDomain.GetAssemblies() .SelectMany(t => t.GetTypes()) .Where(t => t.IsEnum && t.Namespace == "HoTS_Service.Entity.Enum") .Select(_enum => { log("info", _enum.FullName); return(converter.CreateDictionary(_enum.Name)); }) .ToArray(); log("info", "========================================="); log("info", "========Генерация таблиц================="); Dictionary <string, string> tables = new Dictionary <string, string>(); Dictionary <string, string> data = new Dictionary <string, string>(); tables["heroesTable"] = converter.CreateTable("Hero", typeof(Hero), "Id", new List <Foreign> { new Foreign() { DataTable = "HeroGroup", Key = "_group", ForeignKey = "id" }, new Foreign() { DataTable = "HeroSubGroup", Key = "subgroup", ForeignKey = "id" } }); log("info", "Hero"); tables["detailsTable"] = converter.CreateTable("HeroDetails", typeof(HeroDetails), "id", new List <Foreign> { new Foreign() { DataTable = "Hero", Key = "id", ForeignKey = "id" }, new Foreign() { DataTable = "Difficulty", Key = "difficulty", ForeignKey = "id" }, new Foreign() { DataTable = "Franchise", Key = "franchise", ForeignKey = "id" }, new Foreign() { DataTable = "ResourceType", Key = "resourcetype", ForeignKey = "id" } }); log("info", "HeroDetails"); tables["heroWebExtension"] = converter.CreateTable("HeroWebExtension", typeof(HeroWebExtension), "id", new List <Foreign> { new Foreign() { DataTable = "Hero", Key = "id", ForeignKey = "id" } }); log("info", "HeroWebExtension"); tables["mapTable"] = converter.CreateTable("Map", typeof(Map), "id", null); log("info", "Map"); tables["statisticTable"] = StatisticSchema(); log("info", "Statistic"); tables["statisticShoMin"] = converter.CreateTable("StatisticHeroesMin", typeof(HeroStatisticItemMin), "id", null); log("info", "StatitsticHeroesMin"); tables["statisticShoMax"] = converter.CreateTable("StatisticHeroesMax", typeof(HeroStatisticItemMax), "id", null); log("info", "StatitsticHeroesMax"); tables["statisticShoAvg"] = converter.CreateTable("StatisticHeroesAvg", typeof(HeroStatisticItemAvg), "id", null); log("info", "StatitsticHeroesAvg"); tables["statisticSho"] = converter.CreateTable("StatisticHeroes", typeof(HeroStatistic), "id", new List <Foreign> { new Foreign() { DataTable = "StatisticHeroesMin", Key = "id_min", ForeignKey = "id" }, new Foreign() { DataTable = "StatisticHeroesAvg", Key = "id_avg", ForeignKey = "id" }, new Foreign() { DataTable = "StatisticHeroesMax", Key = "id_max", ForeignKey = "id" }, new Foreign() { DataTable = "Hero", Key = "id", ForeignKey = "id" } }); log("info", "StatitsticHeroes"); tables["matchupTable"] = MatchupTableSchema(); log("info", "MatchupTable"); tables["gaussian"] = converter.CreateTable("Gaussian", typeof(Gaussian) , "id", null); log("info", "Gaussian"); tables["probabilities"] = converter.CreateTable("GaussianProbabilities", typeof(Probabilities) , "id", new List <Foreign> { new Foreign() { DataTable = "Gaussian", Key = "gaussian_id", ForeignKey = "id" } }); log("info", "GaussianProbabilities"); tables["heroClusters"] = converter.CreateTable("HeroClusters", typeof(HeroClusters) , "id", new List <Foreign> { new Foreign() { DataTable = "Hero", Key = "id", ForeignKey = "id" }, new Foreign() { DataTable = "Gaussian", Key = "gaussian", ForeignKey = "id" }, new Foreign() { DataTable = "HeroSubGroup", Key = "subgroupcluster", ForeignKey = "id" } }); log("info", "HeroClusters"); tables["dataset"] = converter.CreateTable("Dataset", typeof(Dataset), "id", null); log("info", "Dataset"); tables["trainingState"] = converter.CreateTable("TrainingState", typeof(LogInfo), "id", null); log("info", "TrainingState"); tables["trainMeta"] = converter.CreateTable("TrainingMeta", typeof(TrainMeta), "id", null); log("info", "TrainingMeta"); tables["network"] = converter.CreateTable("Network", typeof(Network) , "id", new List <Foreign> { new Foreign() { DataTable = "Dataset", Key = "dataset_id", ForeignKey = "id" }, new Foreign() { DataTable = "TrainingState", Key = "state_id", ForeignKey = "id" }, new Foreign() { DataTable = "TrainingMeta", Key = "meta_id", ForeignKey = "id" }, }); log("info", "Network"); log("info", "========================================="); log("succes", "Схема успешно сгенерирована"); log("info", "========Генерация дампа данных==========="); string[] enumsData = AppDomain.CurrentDomain.GetAssemblies() .SelectMany(t => t.GetTypes()) .Where(t => t.IsEnum && t.Namespace == "HoTS_Service.Entity.Enum") .Select(_enum => converter.InsertDictionary(_enum)) .ToArray(); data["heroesTable"] = converter.Insert("Hero", heroes.All()); log("info", "Hero"); data["detailsTable"] = converter.Insert("HeroDetails", details.All()); log("info", "HeroDetails"); data["heroWebExtension"] = converter.Insert("HeroWebExtension", webExtension); log("info", "HeroWebExtension"); data["mapTable"] = converter.Insert("Map", maps.All()); log("info", "Map"); data["statisticTable"] = StatisticData(stats); log("info", "Statistic"); data["statisticShoMin"] = converter.Insert("StatisticHeroesMin", hstats.All().Item2); log("info", "StatisticHeroesMin"); data["statisticShoMax"] = converter.Insert("StatisticHeroesMax", hstats.All().Item3); log("info", "StatisticHeroesMax"); data["statisticShoAvg"] = converter.Insert("StatisticHeroesAvg", hstats.All().Item1); log("info", "StatisticHeroesAvg"); data["statisticSho"] = HeroesStatisticData(hstats); log("info", "HeroesStatistic"); data["matchupTable"] = MatchupData(matchups, heroes.Count()); int probId = 0; data["gaussian"] = converter.Insert("Gaussian", clusters.Select(x => x.Gaussian)); log("info", "Gaussian"); data["probabilities"] = converter.Insert("GaussianProbabilities", clusters. Select(x => x.Gaussian.Probability. Select(y => new Probabilities() { id = probId++, value = y, gaussian_id = x.Id, })).SelectMany(z => z)); log("info", "GaussianProbabilities"); data["heroClusters"] = converter.Insert("HeroClusters", clusters.All()); log("info", "HeroClusters"); data["dataset"] = converter.Insert("Dataset", set); log("info", "Dataset"); string[] trainingStatesAll = Directory .GetFiles("./Source/Network", "*.json", SearchOption.AllDirectories); string[] trainingStates = trainingStatesAll .Where(x => !x.Contains("Best") && x.Contains("\\Report\\")) .ToArray(); string[] trainingStatesBest = trainingStatesAll .Where(x => x.Contains("Best") && x.Contains("\\Report\\")) .ToArray(); long[] traingingStateIds = trainingStates .Select(file => File.GetCreationTime(file).Ticks) .ToArray(); long[] traingingStateIdsBest = trainingStatesBest .Select(file => File.GetCreationTime(file).Ticks) .ToArray(); var trainigsStateData = trainingStates .Select(x => File.ReadAllText(x)) .Select(json => ((Dictionary <string, dynamic>)JSONWebParser.Load(json))) .Select((obj, index) => new { id = traingingStateIds[index], error = (double)obj["error"], iteration = (int)obj["iteration"], percent = (double)obj["percent"], validError = (double)obj["validError"], validPercent = (double)obj["validPercent"] }) .Concat( trainingStatesBest .Select(x => File.ReadAllText(x)) .Select(json => ((Dictionary <string, dynamic>)JSONWebParser.Load(json))) .Select((obj, index) => new { id = traingingStateIdsBest[index], error = (double)obj["error"], iteration = (int)obj["iteration"], percent = (double)obj["percent"], validError = (double)obj["validError"], validPercent = (double)obj["validPercent"] })); data["trainingState"] = converter.Insert("TrainingState", trainigsStateData); log("info", "TrainingState"); string[] networksAll = Directory .GetFiles("./Source/Network", "*.json", SearchOption.AllDirectories) .ToArray(); string[] networks = networksAll .Where(x => !x.Contains("Best") && !x.Contains("\\Report\\")) .ToArray(); string[] networksBest = networksAll .Where(x => x.Contains("Best") && !x.Contains("\\Report\\")) .ToArray(); JavaScriptSerializer serializer = new JavaScriptSerializer(); var networksMeta = networks .Select(x => File.ReadAllText(x)) .Select(json => serializer.Deserialize <NetworkTuple>(json).Meta) .Select((obj, index) => new { Alias = obj.Alias, ClusterPath = obj.ClusterPath, Name = obj.Name, Description = obj.Description, Id = traingingStateIds[index] } ) .Concat( networksBest .Select(x => File.ReadAllText(x)) .Select(json => serializer.Deserialize <NetworkTuple>(json).Meta) .Select((obj, index) => new { Alias = obj.Alias, ClusterPath = obj.ClusterPath, Name = obj.Name, Description = obj.Description, Id = traingingStateIdsBest[index] })); data["trainingMeta"] = converter.Insert("TrainingMeta", networksMeta); log("info", "TrainingMeta"); var networksData = networks .Select(x => File.ReadAllText(x)) .Select(json => serializer.Deserialize <NetworkTuple>(json).Network) .Select((x, index) => new Network() { dataset_id = set.id, data = new Json(File.ReadAllText(networks[index])), state_id = traingingStateIds[index], meta_id = traingingStateIds[index], isBest = false, id = traingingStateIds[index] }) .Concat( networksBest .Select(x => File.ReadAllText(x)) .Select(json => serializer.Deserialize <NetworkTuple>(json).Network) .Select((x, index) => new Network() { dataset_id = set.id, data = new Json(File.ReadAllText(networksBest[index])), state_id = traingingStateIdsBest[index], meta_id = traingingStateIdsBest[index], isBest = true, id = traingingStateIdsBest[index] })); data["network"] = converter.Insert("Network", networksData); log("info", "Network"); log("info", "========================================="); log("succes", "Дамп данных успешно сгенерирован"); log("succes", "Схема успешно сгенерирована"); File.WriteAllText("./Database/create.sql", string.Join("\n", enumsTable) + string.Join("\n", tables.Select(x => x.Value).ToArray())); File.WriteAllText("./Database/insert.sql", sequensers + "\n\n" + string.Join("\n", enumsData) + string.Join("\n", data.Select(x => x.Value).ToArray())); }
static void Main(string[] args) { ServicePointManager.SecurityProtocol = SecurityProtocolType.Ssl3 | SecurityProtocolType.Tls | SecurityProtocolType.Tls11 | SecurityProtocolType.Tls12; string HTML = ""; if (!Directory.Exists("Cache")) { Directory.CreateDirectory("Cache"); } if (!Directory.Exists("./Source/Images")) { Directory.CreateDirectory("./Source/Images"); } if (!Directory.Exists("./Source/Icons")) { Directory.CreateDirectory("./Source/Icons"); } if (!Directory.Exists("./Source/Hero")) { Directory.CreateDirectory("./Source/Hero"); } if (!Directory.Exists("./Source/Portrait")) { Directory.CreateDirectory("./Source/Portrait"); } log("debug", "WEB Парсер запущен"); if (!File.Exists("./Source/Hero/Hero.json")) { log("error", "Не найден исходный файл схемы героев по пути ./Source/Hero/Hero.json"); } Heroes.Load("./Source/Hero/Hero.json"); if (USE_HERO_DETAILS) { Details = new HeroDetails[Heroes.Count()]; WebExtension = new HeroWebExtension[Heroes.Count()]; log("succes", "Схема героев успешно загружена"); log("debug", "Парсинг деталей героев"); for (int i = 0; i < Heroes.Count(); i++) { temp = i; var Hero = Heroes.Find(i); var path = HeroTemplateURL + Hero.Name; Caching(HeroTemplateURL + Hero.Name, $"./Cache/{Hero.Name}.html"); log("debug", "Чтение Data:" + Hero.Name); HTML = File.ReadAllText($"./Cache/{Hero.Name}.html", Encoding.Default); log("succes", "Считан Data:" + Hero.Name); log("debug", "Парсинг " + Hero.Name + " начат"); Details[i] = ParseDetails(HTML); WebExtension[i] = new HeroWebExtension(i); WebExtension[i].DetailsUrl = path; log("info", Details[i].ToString()); log("succes", "Парсинг " + Hero.Name + " завершен"); } } if (USE_HERO_ICONS) { Caching(IconsURL, "./Cache/Icons.html"); log("debug", "Чтение Icons"); HTML = File.ReadAllText($"./Cache/Icons.html"); log("succes", "Считан Icons"); log("debug", "Парсинг Icons начат"); ParseIcons(HTML); log("succes", "Парсинг Icons завершен"); } log("debug", "Парсинг изображений"); for (int i = 0; i < Heroes.Count(); i++) { var Hero = Heroes.Find(i); Caching(ImageTemplateURL + Hero.Name, $"./Cache/{Hero.Name}_Large.html"); log("debug", $"Чтение {Hero.Name}_Large"); HTML = File.ReadAllText($"./Cache/{Hero.Name}_Large.html", Encoding.Default); log("succes", $"Считан {Hero.Name}_Large"); log("debug", $"Парсинг {Hero.Name}_Large"); ParseImages(HTML, Hero.Name); ParseModel(HTML, Hero.Name); log("succes", $"Парсинг {Hero.Name}_Large завершен"); } log("succes", "Парсинг изображений завершен"); log("succes", "Парсинг деталей героев завершен"); var outputDetails = JSonParser.Save(Details, typeof(HeroDetails[])); var outputExtensions = JSonParser.Save(WebExtension, typeof(HeroWebExtension[])); File.WriteAllText("./Source/Hero/HeroDetails.json", outputDetails); File.WriteAllText("./Source/Hero/HeroWebExtension.json", outputExtensions); }
public override void Run(string[] args) { base.Run(args); Validate(); if (filter == true) { Console.WriteLine("Фильтр для образов в диапазоне [0.4,0.6] включен"); Console.WriteLine("Фильтр для случайных образов включен"); } Console.WriteLine($"HERO_SUBGROUP_COUNT = {HERO_SUBGROUP_COUNT}"); guidMapper = (Dictionary <string, int>)JSonParser.Load(File.ReadAllText(guidInput), typeof(Dictionary <string, int>)); foreach (var it in guidMapper) { if (it.Key.Split('#')[0] == "Hero") { heroCount++; } } Console.WriteLine($"HERO_COUNT = {heroCount}"); heroService = new HeroService(); heroService.Load(heroInput); Dictionary <SByte[], Tuple <double, int> > nnDataset = new Dictionary <SByte[], Tuple <double, int> >(new ArrayEqualityComparer()); OpenSource(input); object data; bool wasPrint = false; while ((data = ReadData()) != null) { var line = ParseData(data); if (line == null) { continue; } if (nnDataset.ContainsKey(line.Item1)) { var prev = nnDataset[line.Item1]; nnDataset[line.Item1] = new Tuple <double, int>(prev.Item1 + line.Item2, prev.Item2 + 1); } else { nnDataset[line.Item1] = new Tuple <double, int>(line.Item2, 1); } if (nnDataset.Count % 100000 == 0 && wasPrint == false) { Console.WriteLine("Уже обработано " + nnDataset.Count); wasPrint = true; } if (nnDataset.Count % 100000 == 1) { wasPrint = false; } } Save(output, nnDataset, null); }