public override void Run(string[] args) { base.Run(args); Validate(); hParser.Run(args); if (merge == true) { Console.WriteLine($"Читаю псевдонимы из {aOutput}"); Dictionary <string, object> dict = JSONWebParser.Load(aOutput) as Dictionary <string, object>; foreach (var it in dict) { AddValue(mapper, it.Key, (int)it.Value); } Console.WriteLine($"Закончил чтение псевдонимов из файла"); } foreach (var it in hParser.HeroResult) { AddValue(mapper, it.Name, it.Id); } object[] aliases = (object[])ReadData(); foreach (var obj in aliases) { var pair = ParseData(obj); bool parse = mapper.TryGetValue(pair.Key, out int id); if (parse == false) { Console.WriteLine("Неизвестный герой {0}", pair.Key); continue; } foreach (string alias in pair.Value) { AddValue(mapper, alias, id); } } System.IO.File.WriteAllText(aOutput, JSonParser.Save(mapper, mapper.GetType())); }
static void Main(string[] args) { log("debug", "Процесс миграции схемы запущен"); log("debug", "Дамп будет создан для базы данных PostegreSQL"); if (!Directory.Exists("./Database")) { Directory.CreateDirectory("./Database"); } if (!Directory.Exists("./Dataset")) { Directory.CreateDirectory("./Dataset"); } HeroService heroes = new HeroService(); log("debug", "Загрузка Hero.json"); heroes.Load("./Source/Hero/Hero.json"); log("succes", "Hero.json Загружен"); HeroDetailsService details = new HeroDetailsService(); log("debug", "Загрузка HeroDetails.json"); details.Load("./Source/Hero/HeroDetails.json"); log("succes", "HeroDetails.json Загружен"); HeroClustersSevice clusters = new HeroClustersSevice(); log("debug", "Загрузка HeroClusters.json"); clusters.Load("./Source/Hero/HeroClusters.json"); log("succes", "HeroClusters.json Загружен"); log("debug", "Загрузка HeroWebExtension.json"); HeroWebExtension[] webExtension = (HeroWebExtension[]) JSonParser.Load(File.ReadAllText("./Source/Hero/HeroWebExtension.json"), typeof(HeroWebExtension[])); log("succes", "HeroWebExtension.json Загружен"); MapService maps = new MapService(); log("debug", "Загрузка Map.json"); maps.Load("./Source/Map/Map.json"); log("succes", "Map.json Загружен"); StatisticService stats = new StatisticService(); log("debug", "Загрузка Statistic.json"); stats.Load("./Source/Replay/Statistic.json"); log("succes", "Statistic.json Загружен"); HeroStatisticService hstats = new HeroStatisticService(); log("debug", "Загрузка Statistic_sho.json"); hstats.Load("./Source/Replay/Statistic_sho.json"); log("succes", "Statistic_sho.json Загружен"); MatchupService matchups = new MatchupService(); log("debug", "Загрузка MatchupTable.json"); matchups.Load("./Source/Replay/MatchupTable.json"); log("succes", "MatchupTable.json Загружен"); log("debug", "Формирование датасета начато"); Dataset set = MakeDataset(); log("succes", "Датасета сформирован"); log("debug", "Инициализация PostegreSQL Converter ORM"); PostegresConverter converter = new PostegresConverter(); log("info", "========Конвертация ключей==============="); log("info", "group => _group"); log("info", "min_id => id_min"); log("info", "max_id => id_max"); log("info", "avg_id => id_avg"); converter.CustomNameMapper["group"] = "_group"; converter.CustomNameMapper["min_id"] = "id_min"; converter.CustomNameMapper["max_id"] = "id_max"; converter.CustomNameMapper["avg_id"] = "id_avg"; log("info", "========================================="); log("succes", "PostegreSQL Converter ORM инициалирован"); log("debug", "Генерация секвенсеров"); string sequensers = @" alter sequence gaussian_id_seq minvalue 0 start with 0; select setval('gaussian_id_seq', 0, false); alter sequence heroclusters_id_seq minvalue 0 start with 0; select setval('heroclusters_id_seq', 0, false); alter sequence statisticheroesmax_id_seq minvalue 0 start with 0; select setval('statisticheroesmax_id_seq', 0, false); alter sequence statisticheroesmin_id_seq minvalue 0 start with 0; select setval('statisticheroesmin_id_seq', 0, false); alter sequence statisticheroesavg_id_seq minvalue 0 start with 0; select setval('statisticheroesavg_id_seq', 0, false);"; log("succes", "Cеквенсеры сгенерированы"); log("debug", "Процесс генерации схемы начат"); log("info", "========Генерация словарей==============="); string[] enumsTable = AppDomain.CurrentDomain.GetAssemblies() .SelectMany(t => t.GetTypes()) .Where(t => t.IsEnum && t.Namespace == "HoTS_Service.Entity.Enum") .Select(_enum => { log("info", _enum.FullName); return(converter.CreateDictionary(_enum.Name)); }) .ToArray(); log("info", "========================================="); log("info", "========Генерация таблиц================="); Dictionary <string, string> tables = new Dictionary <string, string>(); Dictionary <string, string> data = new Dictionary <string, string>(); tables["heroesTable"] = converter.CreateTable("Hero", typeof(Hero), "Id", new List <Foreign> { new Foreign() { DataTable = "HeroGroup", Key = "_group", ForeignKey = "id" }, new Foreign() { DataTable = "HeroSubGroup", Key = "subgroup", ForeignKey = "id" } }); log("info", "Hero"); tables["detailsTable"] = converter.CreateTable("HeroDetails", typeof(HeroDetails), "id", new List <Foreign> { new Foreign() { DataTable = "Hero", Key = "id", ForeignKey = "id" }, new Foreign() { DataTable = "Difficulty", Key = "difficulty", ForeignKey = "id" }, new Foreign() { DataTable = "Franchise", Key = "franchise", ForeignKey = "id" }, new Foreign() { DataTable = "ResourceType", Key = "resourcetype", ForeignKey = "id" } }); log("info", "HeroDetails"); tables["heroWebExtension"] = converter.CreateTable("HeroWebExtension", typeof(HeroWebExtension), "id", new List <Foreign> { new Foreign() { DataTable = "Hero", Key = "id", ForeignKey = "id" } }); log("info", "HeroWebExtension"); tables["mapTable"] = converter.CreateTable("Map", typeof(Map), "id", null); log("info", "Map"); tables["statisticTable"] = StatisticSchema(); log("info", "Statistic"); tables["statisticShoMin"] = converter.CreateTable("StatisticHeroesMin", typeof(HeroStatisticItemMin), "id", null); log("info", "StatitsticHeroesMin"); tables["statisticShoMax"] = converter.CreateTable("StatisticHeroesMax", typeof(HeroStatisticItemMax), "id", null); log("info", "StatitsticHeroesMax"); tables["statisticShoAvg"] = converter.CreateTable("StatisticHeroesAvg", typeof(HeroStatisticItemAvg), "id", null); log("info", "StatitsticHeroesAvg"); tables["statisticSho"] = converter.CreateTable("StatisticHeroes", typeof(HeroStatistic), "id", new List <Foreign> { new Foreign() { DataTable = "StatisticHeroesMin", Key = "id_min", ForeignKey = "id" }, new Foreign() { DataTable = "StatisticHeroesAvg", Key = "id_avg", ForeignKey = "id" }, new Foreign() { DataTable = "StatisticHeroesMax", Key = "id_max", ForeignKey = "id" }, new Foreign() { DataTable = "Hero", Key = "id", ForeignKey = "id" } }); log("info", "StatitsticHeroes"); tables["matchupTable"] = MatchupTableSchema(); log("info", "MatchupTable"); tables["gaussian"] = converter.CreateTable("Gaussian", typeof(Gaussian) , "id", null); log("info", "Gaussian"); tables["probabilities"] = converter.CreateTable("GaussianProbabilities", typeof(Probabilities) , "id", new List <Foreign> { new Foreign() { DataTable = "Gaussian", Key = "gaussian_id", ForeignKey = "id" } }); log("info", "GaussianProbabilities"); tables["heroClusters"] = converter.CreateTable("HeroClusters", typeof(HeroClusters) , "id", new List <Foreign> { new Foreign() { DataTable = "Hero", Key = "id", ForeignKey = "id" }, new Foreign() { DataTable = "Gaussian", Key = "gaussian", ForeignKey = "id" }, new Foreign() { DataTable = "HeroSubGroup", Key = "subgroupcluster", ForeignKey = "id" } }); log("info", "HeroClusters"); tables["dataset"] = converter.CreateTable("Dataset", typeof(Dataset), "id", null); log("info", "Dataset"); tables["trainingState"] = converter.CreateTable("TrainingState", typeof(LogInfo), "id", null); log("info", "TrainingState"); tables["trainMeta"] = converter.CreateTable("TrainingMeta", typeof(TrainMeta), "id", null); log("info", "TrainingMeta"); tables["network"] = converter.CreateTable("Network", typeof(Network) , "id", new List <Foreign> { new Foreign() { DataTable = "Dataset", Key = "dataset_id", ForeignKey = "id" }, new Foreign() { DataTable = "TrainingState", Key = "state_id", ForeignKey = "id" }, new Foreign() { DataTable = "TrainingMeta", Key = "meta_id", ForeignKey = "id" }, }); log("info", "Network"); log("info", "========================================="); log("succes", "Схема успешно сгенерирована"); log("info", "========Генерация дампа данных==========="); string[] enumsData = AppDomain.CurrentDomain.GetAssemblies() .SelectMany(t => t.GetTypes()) .Where(t => t.IsEnum && t.Namespace == "HoTS_Service.Entity.Enum") .Select(_enum => converter.InsertDictionary(_enum)) .ToArray(); data["heroesTable"] = converter.Insert("Hero", heroes.All()); log("info", "Hero"); data["detailsTable"] = converter.Insert("HeroDetails", details.All()); log("info", "HeroDetails"); data["heroWebExtension"] = converter.Insert("HeroWebExtension", webExtension); log("info", "HeroWebExtension"); data["mapTable"] = converter.Insert("Map", maps.All()); log("info", "Map"); data["statisticTable"] = StatisticData(stats); log("info", "Statistic"); data["statisticShoMin"] = converter.Insert("StatisticHeroesMin", hstats.All().Item2); log("info", "StatisticHeroesMin"); data["statisticShoMax"] = converter.Insert("StatisticHeroesMax", hstats.All().Item3); log("info", "StatisticHeroesMax"); data["statisticShoAvg"] = converter.Insert("StatisticHeroesAvg", hstats.All().Item1); log("info", "StatisticHeroesAvg"); data["statisticSho"] = HeroesStatisticData(hstats); log("info", "HeroesStatistic"); data["matchupTable"] = MatchupData(matchups, heroes.Count()); int probId = 0; data["gaussian"] = converter.Insert("Gaussian", clusters.Select(x => x.Gaussian)); log("info", "Gaussian"); data["probabilities"] = converter.Insert("GaussianProbabilities", clusters. Select(x => x.Gaussian.Probability. Select(y => new Probabilities() { id = probId++, value = y, gaussian_id = x.Id, })).SelectMany(z => z)); log("info", "GaussianProbabilities"); data["heroClusters"] = converter.Insert("HeroClusters", clusters.All()); log("info", "HeroClusters"); data["dataset"] = converter.Insert("Dataset", set); log("info", "Dataset"); string[] trainingStatesAll = Directory .GetFiles("./Source/Network", "*.json", SearchOption.AllDirectories); string[] trainingStates = trainingStatesAll .Where(x => !x.Contains("Best") && x.Contains("\\Report\\")) .ToArray(); string[] trainingStatesBest = trainingStatesAll .Where(x => x.Contains("Best") && x.Contains("\\Report\\")) .ToArray(); long[] traingingStateIds = trainingStates .Select(file => File.GetCreationTime(file).Ticks) .ToArray(); long[] traingingStateIdsBest = trainingStatesBest .Select(file => File.GetCreationTime(file).Ticks) .ToArray(); var trainigsStateData = trainingStates .Select(x => File.ReadAllText(x)) .Select(json => ((Dictionary <string, dynamic>)JSONWebParser.Load(json))) .Select((obj, index) => new { id = traingingStateIds[index], error = (double)obj["error"], iteration = (int)obj["iteration"], percent = (double)obj["percent"], validError = (double)obj["validError"], validPercent = (double)obj["validPercent"] }) .Concat( trainingStatesBest .Select(x => File.ReadAllText(x)) .Select(json => ((Dictionary <string, dynamic>)JSONWebParser.Load(json))) .Select((obj, index) => new { id = traingingStateIdsBest[index], error = (double)obj["error"], iteration = (int)obj["iteration"], percent = (double)obj["percent"], validError = (double)obj["validError"], validPercent = (double)obj["validPercent"] })); data["trainingState"] = converter.Insert("TrainingState", trainigsStateData); log("info", "TrainingState"); string[] networksAll = Directory .GetFiles("./Source/Network", "*.json", SearchOption.AllDirectories) .ToArray(); string[] networks = networksAll .Where(x => !x.Contains("Best") && !x.Contains("\\Report\\")) .ToArray(); string[] networksBest = networksAll .Where(x => x.Contains("Best") && !x.Contains("\\Report\\")) .ToArray(); JavaScriptSerializer serializer = new JavaScriptSerializer(); var networksMeta = networks .Select(x => File.ReadAllText(x)) .Select(json => serializer.Deserialize <NetworkTuple>(json).Meta) .Select((obj, index) => new { Alias = obj.Alias, ClusterPath = obj.ClusterPath, Name = obj.Name, Description = obj.Description, Id = traingingStateIds[index] } ) .Concat( networksBest .Select(x => File.ReadAllText(x)) .Select(json => serializer.Deserialize <NetworkTuple>(json).Meta) .Select((obj, index) => new { Alias = obj.Alias, ClusterPath = obj.ClusterPath, Name = obj.Name, Description = obj.Description, Id = traingingStateIdsBest[index] })); data["trainingMeta"] = converter.Insert("TrainingMeta", networksMeta); log("info", "TrainingMeta"); var networksData = networks .Select(x => File.ReadAllText(x)) .Select(json => serializer.Deserialize <NetworkTuple>(json).Network) .Select((x, index) => new Network() { dataset_id = set.id, data = new Json(File.ReadAllText(networks[index])), state_id = traingingStateIds[index], meta_id = traingingStateIds[index], isBest = false, id = traingingStateIds[index] }) .Concat( networksBest .Select(x => File.ReadAllText(x)) .Select(json => serializer.Deserialize <NetworkTuple>(json).Network) .Select((x, index) => new Network() { dataset_id = set.id, data = new Json(File.ReadAllText(networksBest[index])), state_id = traingingStateIdsBest[index], meta_id = traingingStateIdsBest[index], isBest = true, id = traingingStateIdsBest[index] })); data["network"] = converter.Insert("Network", networksData); log("info", "Network"); log("info", "========================================="); log("succes", "Дамп данных успешно сгенерирован"); log("succes", "Схема успешно сгенерирована"); File.WriteAllText("./Database/create.sql", string.Join("\n", enumsTable) + string.Join("\n", tables.Select(x => x.Value).ToArray())); File.WriteAllText("./Database/insert.sql", sequensers + "\n\n" + string.Join("\n", enumsData) + string.Join("\n", data.Select(x => x.Value).ToArray())); }
protected override object ReadData() { return(JSONWebParser.Load(File.ReadAllText(aInput))); }