void Start() { INSTANCE = this; if (RegenCurves) { animCurveMercatorNormalized = new AnimationCurve[2]; animCurveMercatorNormalized[0] = new AnimationCurve(); animCurveMercatorNormalized[1] = new AnimationCurve(); animCurvePetersNormalized = new AnimationCurve[2]; animCurvePetersNormalized[0] = new AnimationCurve(); animCurvePetersNormalized[1] = new AnimationCurve(); animCurveAEQDNormalized = new AnimationCurve[2]; animCurveAEQDNormalized[0] = new AnimationCurve(); animCurveAEQDNormalized[1] = new AnimationCurve(); } CurveDictionary = new Dictionary <ProjectionType, AnimationCurve[]>(); CurveDictionary.Add(ProjectionType.Mercator, animCurveMercatorNormalized); CurveDictionary.Add(ProjectionType.Peters, animCurvePetersNormalized); CurveDictionary.Add(ProjectionType.AEQD, animCurveAEQDNormalized); Parse(); }
public override void Run(string[] args) { base.Run(args); Validate(); guidMapper = (Dictionary <string, int>)JSonParser.Load(File.ReadAllText(guidInput), typeof(Dictionary <string, int>)); List <SByte[]> nnDataset = new List <SByte[]>(); OpenSource(input); object data; while ((data = ReadData()) != null) { SByte[] line = ParseData(data); if (line == null) { continue; } nnDataset.Add(line); if (nnDataset.Count % 100000 == 0) { Console.WriteLine("Уже обработано " + nnDataset.Count); } } Save(output, nnDataset.ToList(), null); }
// Update is called once per frame void Update() { if (parser == null) { parser = GetComponent <JSonParser>(); } if (Input.GetMouseButton(0) || Input.GetMouseButtonDown(1)) { Vector3 mousePosition = Input.mousePosition; cursorGPS = MercatorUVToGPS(new Vector2(mousePosition.x / Screen.width, mousePosition.y / Screen.height), Input.GetMouseButton(0)); } }
public override void Run(string[] args) { base.Run(args); Validate(); Console.WriteLine($"HERO_SUBGROUP_COUNT = {HERO_SUBGROUP_COUNT}"); Console.WriteLine($"MAP_COUNT = {MAP_COUNT}"); guidMapper = (Dictionary <string, int>)JSonParser.Load(File.ReadAllText(guidInput), typeof(Dictionary <string, int>)); foreach (var it in guidMapper) { if (it.Key.Split('#')[0] == "Hero") { heroCount++; } } Console.WriteLine($"HERO_COUNT = {heroCount}"); heroService = new HeroService(); heroService.Load(heroInput); List <SByte[]> nnDataset = new List <SByte[]>(); OpenSource(input); object data; while ((data = ReadData()) != null) { var line = ParseData(data); if (line == null) { continue; } nnDataset.Add(line); if (nnDataset.Count % 100000 == 0) { Console.WriteLine("Уже обработано " + nnDataset.Count); } } Save(output, nnDataset.ToList(), null); }
/// <summary> /// Call the dev.helsinki.fi API and parse the relevant year data for the client. /// </summary> /// <returns></returns> private static async Task <Dictionary <int, int> > GetYears() { var apiAddress = Startup.Configuration["ApiAddresses:Years"]; using (var client = new HttpClient()) { InitializeClient(client, apiAddress); //HTTP GET var response = await client.GetStringAsync(""); JObject result = JObject.Parse(response); //Parse data. return(JSonParser.ParseYearData(result)); } }
public override void Run(string[] args) { base.Run(args); Validate(); OpenSource(input); Statistic[] data = (Statistic[])ReadData(); List <Hero> hData = (List <Hero>)JSonParser.Load(File.ReadAllText(heroInput, Encoding.Default), typeof(List <Hero>)); List <Map> mData = (List <Map>)JSonParser.Load(File.ReadAllText(mapInput, Encoding.Default), typeof(List <Map>)); Dictionary <string, int> mapper = new Dictionary <string, int>(); int id = 0; ///Сперва добавляем только тех героев, которые участвовали в матче ///формат записи: Hero#ид героя#номер команды => глобальный идентификатор for (int i = 0; i < data[0].Statictic.Matches.Length; i++) { if (data.Sum((x) => x.Statictic.Matches[i]) > 0) { mapper.Add($"Hero#{i}", id++); Console.WriteLine(hData[i] + " => " + id + ";" + (id + 1)); // mapper.Add($"Hero#{i}#1", id++); } else { Console.WriteLine(hData[i] + " отсутствует в датасете"); } } for (int i = 0; i < data.Length; i++) { if (data[i].Statictic.Ammount > 0) { mapper.Add($"Map#{i}", id++); Console.WriteLine(mData[i] + " => " + id); } else { Console.WriteLine(mData[i] + " отсутствует в датасете"); } } Save(output, mapper, mapper.GetType()); }
public override void Run(string[] args) { base.Run(args); Validate(); hParser.Run(args); if (merge == true) { Console.WriteLine($"Читаю псевдонимы из {aOutput}"); Dictionary <string, object> dict = JSONWebParser.Load(aOutput) as Dictionary <string, object>; foreach (var it in dict) { AddValue(mapper, it.Key, (int)it.Value); } Console.WriteLine($"Закончил чтение псевдонимов из файла"); } foreach (var it in hParser.HeroResult) { AddValue(mapper, it.Name, it.Id); } object[] aliases = (object[])ReadData(); foreach (var obj in aliases) { var pair = ParseData(obj); bool parse = mapper.TryGetValue(pair.Key, out int id); if (parse == false) { Console.WriteLine("Неизвестный герой {0}", pair.Key); continue; } foreach (string alias in pair.Value) { AddValue(mapper, alias, id); } } System.IO.File.WriteAllText(aOutput, JSonParser.Save(mapper, mapper.GetType())); }
/// <summary> /// Call the dev.helsinki.fi API and parse the relevant population data for the client. /// </summary> /// <param name="year"></param> /// <returns></returns> private static async Task <Dictionary <string, string> > GetData(int year) { var apiAddress = Startup.Configuration["ApiAddresses:Population"]; using (var client = new HttpClient()) { var address = String.Format(apiAddress, year); InitializeClient(client, address); //HTTP GET var response = await client.GetStringAsync(""); JObject result = JObject.Parse(response); //Parse data. var resultDictionary = JSonParser.ParsePopulationData(result); return(resultDictionary); } }
private static void Parse() { int part = count / perpage; Bar = new ConsoleProgressBar(part); var ci = CultureInfo.CreateSpecificCulture("ru-RU"); Func <string, string> tr = WebUtility.HtmlDecode; List <FilmEntity> dataset = new List <FilmEntity>(); for (int i = 0; i < part; i++) { //Если ресурс не найден, получаем исключение //переходим на сл. итерацию try { CQ cq = CQ.CreateFromUrl(url + "/page/" + i); var reviews = cq.Find(".response"); foreach (var it in reviews) { var response = it.Cq(); string author = tr(response.Find(Selectors.Author)[0].InnerHTML); string name = tr(response.Find(Selectors.Film)[0].InnerHTML); string dateRepr = tr(response.Find(Selectors.Date)[0].InnerHTML); var date = DateTime.ParseExact(dateRepr, "d MMMM yyyy | HH:mm", ci); var title = tr(response.Find(Selectors.Title)[0].InnerHTML); var text = tr(response.Find(Selectors.Text)[0].ChildNodes[1].ChildNodes[0].InnerText); var entity = new FilmEntity(name, author, date, title, text, status); dataset.Add(entity); } } catch { } Bar.Update(i); } File.WriteAllText(name, JSonParser.Save(dataset, dataset.GetType())); }
public override void Run(string[] args) { base.Run(args); Validate(); guidMapper = (Dictionary <string, int>)JSonParser.Load(File.ReadAllText(guidInput), typeof(Dictionary <string, int>)); HashSet <string[]> nnDataset = new HashSet <string[]>(); OpenSource(input); object data; if (addition) { Console.WriteLine("Датасет будет дополнен обратными записями"); } while ((data = ReadData()) != null) { inputCount = ((object[])data).Length; string[] line = ParseData(data); if (line == null) { continue; } nnDataset.Add(line); if (addition == true) { string[] reverseLine = ParseLoseData(data); reverseLine[inputCount - 1] = (1 - Int32.Parse(reverseLine[inputCount - 1])).ToString(); nnDataset.Add(reverseLine); } if (nnDataset.Count % 100000 == 0) { Console.WriteLine("Уже обработано " + nnDataset.Count); } } Save(output, nnDataset.ToList(), null); }
// Use this for initialization void Start() { JSonParser s = new JSonParser(); List <GameObject> a = new List<GameObject>(); foreach (GameObject o in items) { GameObject instanted = Instantiate(o); instanted.transform.SetParent(this.transform); a.Add(instanted); } items = a.ToArray(); writeItemsToJson(); List<GameObject> d = new List<GameObject>(); for (int i = 0; i < items.Length; i++) { if (items[i].GetComponent<ShopItemScript>().bought == false) { d.Add(items[i]); } } items = d.ToArray(); }
public static void SaveTOExcelCSVFormat() { List <string> Keys = new List <string>(); Dictionary <string, string> NamedParam = new Dictionary <string, string>(); foreach (string arg in args) { string[] tryParse = arg.Split('='); if (tryParse.Length == 2) { NamedParam[tryParse[0]] = tryParse[1]; } else { Keys.Add(arg); } } if (NamedParam.ContainsKey("i") == false) { throw new Exception("Не инициализирован обязательный параметр i"); } if (File.Exists(NamedParam["i"]) == false) { throw new FileNotFoundException($"{NamedParam["i"]} not found"); } if (NamedParam.ContainsKey("o") == false) { string input = NamedParam["i"]; string fDir = Path.GetDirectoryName(input); string fName = Path.GetFileNameWithoutExtension(input); string fExt = Path.GetExtension(input); NamedParam["o"] = Path.Combine(fDir, String.Concat(fName, "_o", fExt)); } Statistic[] statistic = (Statistic[])JSonParser.Load(File.ReadAllText(NamedParam["i"]), typeof(Statistic[])); SaveTOExcelCSVFormat(statistic, NamedParam["o"]); }
static void Main(string[] args) { ServicePointManager.SecurityProtocol = SecurityProtocolType.Ssl3 | SecurityProtocolType.Tls | SecurityProtocolType.Tls11 | SecurityProtocolType.Tls12; string HTML = ""; if (!Directory.Exists("Cache")) { Directory.CreateDirectory("Cache"); } if (!Directory.Exists("./Source/Images")) { Directory.CreateDirectory("./Source/Images"); } if (!Directory.Exists("./Source/Icons")) { Directory.CreateDirectory("./Source/Icons"); } if (!Directory.Exists("./Source/Hero")) { Directory.CreateDirectory("./Source/Hero"); } if (!Directory.Exists("./Source/Portrait")) { Directory.CreateDirectory("./Source/Portrait"); } log("debug", "WEB Парсер запущен"); if (!File.Exists("./Source/Hero/Hero.json")) { log("error", "Не найден исходный файл схемы героев по пути ./Source/Hero/Hero.json"); } Heroes.Load("./Source/Hero/Hero.json"); if (USE_HERO_DETAILS) { Details = new HeroDetails[Heroes.Count()]; WebExtension = new HeroWebExtension[Heroes.Count()]; log("succes", "Схема героев успешно загружена"); log("debug", "Парсинг деталей героев"); for (int i = 0; i < Heroes.Count(); i++) { temp = i; var Hero = Heroes.Find(i); var path = HeroTemplateURL + Hero.Name; Caching(HeroTemplateURL + Hero.Name, $"./Cache/{Hero.Name}.html"); log("debug", "Чтение Data:" + Hero.Name); HTML = File.ReadAllText($"./Cache/{Hero.Name}.html", Encoding.Default); log("succes", "Считан Data:" + Hero.Name); log("debug", "Парсинг " + Hero.Name + " начат"); Details[i] = ParseDetails(HTML); WebExtension[i] = new HeroWebExtension(i); WebExtension[i].DetailsUrl = path; log("info", Details[i].ToString()); log("succes", "Парсинг " + Hero.Name + " завершен"); } } if (USE_HERO_ICONS) { Caching(IconsURL, "./Cache/Icons.html"); log("debug", "Чтение Icons"); HTML = File.ReadAllText($"./Cache/Icons.html"); log("succes", "Считан Icons"); log("debug", "Парсинг Icons начат"); ParseIcons(HTML); log("succes", "Парсинг Icons завершен"); } log("debug", "Парсинг изображений"); for (int i = 0; i < Heroes.Count(); i++) { var Hero = Heroes.Find(i); Caching(ImageTemplateURL + Hero.Name, $"./Cache/{Hero.Name}_Large.html"); log("debug", $"Чтение {Hero.Name}_Large"); HTML = File.ReadAllText($"./Cache/{Hero.Name}_Large.html", Encoding.Default); log("succes", $"Считан {Hero.Name}_Large"); log("debug", $"Парсинг {Hero.Name}_Large"); ParseImages(HTML, Hero.Name); ParseModel(HTML, Hero.Name); log("succes", $"Парсинг {Hero.Name}_Large завершен"); } log("succes", "Парсинг изображений завершен"); log("succes", "Парсинг деталей героев завершен"); var outputDetails = JSonParser.Save(Details, typeof(HeroDetails[])); var outputExtensions = JSonParser.Save(WebExtension, typeof(HeroWebExtension[])); File.WriteAllText("./Source/Hero/HeroDetails.json", outputDetails); File.WriteAllText("./Source/Hero/HeroWebExtension.json", outputExtensions); }
public override void Run(string[] args) { base.Run(args); Validate(); rParser.Run(args); Statistic[] stat = new Statistic[rParser.MapCount + 1]; HeroStatisticItemAvg[] avgStat = new HeroStatisticItemAvg[1000]; HeroStatisticItemMin[] minStat = new HeroStatisticItemMin[1000]; HeroStatisticItemMax[] maxStat = new HeroStatisticItemMax[1000]; InitializeHeroStat(avgStat, minStat, maxStat); OpenSource(inputFolder); object data; int i = 0; List <Tuple <int, int, HeroStatisticItem> > temps = new List <Tuple <int, int, HeroStatisticItem> >(); Dictionary <Match, double> result = new Dictionary <Match, double>(); Console.WriteLine("Парсинг матчей"); int heroCount = 0; int lastMatchId = -1; while ((data = ReadData()) != null) { var r = ParseData(data); if (r.Item1 == -1) { continue; } if (lastMatchId == r.Item1) { continue; } temps.Add(r); i++; r.Item3.sec = rParser.ReplayResult[temps[0].Item1].Item2; UpdateHeroStat(avgStat, minStat, maxStat, r); //собираем части воедино if (i == 10) { i = 0; bool flag = false; for (int j = 0; j < 9; j++) { if (temps[j].Item1 != temps[j + 1].Item1) { flag = true; break; } } if (flag == true) { temps.Clear(); continue; } Match m = new Match(); m.Map = rParser.ReplayResult[temps[0].Item1].Item1; m.ProbabilityToWin = 1; int first_i = 0, second_i = 0; for (int j = 0; j < temps.Count; j++) { if (temps[j].Item3.winrate == 1) { m.YourTeam[first_i++] = temps[j].Item2; } else { m.EnemyTeam[second_i++] = temps[j].Item2; } if (temps[j].Item2 > heroCount) { heroCount = temps[j].Item2; } } //обратное Match temp_m = new Match() { YourTeam = m.EnemyTeam, EnemyTeam = m.YourTeam, Map = m.Map }; //пробуем добавить //если запись уже есть if (result.ContainsKey(m)) { //усредняем вероятность double used = result[m]; double newer = m.ProbabilityToWin; double rez = (used + newer) / 2; result[m] = rez; Console.WriteLine(m.ToString() + " Уже существует"); } //если есть противоречие else if (result.ContainsKey(temp_m)) { //усредняем вероятность double used = result[m]; double newer = 1 - m.ProbabilityToWin; double rez = (used + newer) / 2; result[m] = rez; } else { result.Add(m, m.ProbabilityToWin); if (result.Count % 100000 == 0) { Console.WriteLine("Уже обработано " + result.Count); } } lastMatchId = temps.Last().Item1; temps.Clear(); } } ClearEmptyStat(ref avgStat, ref minStat, ref maxStat); ComputeAverage(avgStat); Tuple <HeroStatisticItemAvg[], HeroStatisticItemMin[], HeroStatisticItemMax[]> heroStat = new Tuple <HeroStatisticItemAvg[], HeroStatisticItemMin[], HeroStatisticItemMax[]>(avgStat, minStat, maxStat); Save(output, result, typeof(Dictionary <Match, double>)); File.WriteAllText(statisticHeroOutput, JSonParser.Save(heroStat, typeof(Tuple <HeroStatisticItemAvg[], HeroStatisticItemMin[], HeroStatisticItemMax[]>)), Encoding.Default); Array.Resize(ref avgStat, 0); Array.Resize(ref minStat, 0); Array.Resize(ref maxStat, 0); for (int j = 0; j < stat.Length; j++) { stat[j] = new Statistic() { Statictic = new StatisticItem() { Matches = new int[heroCount + 1], Wins = new int[heroCount + 1] } }; } Console.WriteLine("Расчет статистики матчей"); ///считаем сколько матчей на каждой карте foreach (var it in rParser.ReplayResult) { stat[it.Value.Item1].Statictic.Ammount++; } //данные собраны,считаем статистику Console.WriteLine("Расчет статистики героев"); foreach (var it in result) { Match cur = it.Key; for (int j = 0; j < 5; j++) { stat[cur.Map].Statictic.Matches[cur.YourTeam[j]]++; stat[cur.Map].Statictic.Wins[cur.YourTeam[j]]++; } for (int j = 0; j < 5; j++) { stat[cur.Map].Statictic.Matches[cur.EnemyTeam[j]]++; } } File.WriteAllText(statisticOutput, JSonParser.Save(stat, typeof(Statistic[])), Encoding.Default); Console.WriteLine("Успешно спарсено " + result.Count + " записей"); }
protected override object ReadData() { return(JSonParser.Load(File.ReadAllText(NamedParam["i"], Encoding.Default), typeof(Statistic[]))); }
static void Main(string[] args) { log("debug", "Процесс миграции схемы запущен"); log("debug", "Дамп будет создан для базы данных PostegreSQL"); if (!Directory.Exists("./Database")) { Directory.CreateDirectory("./Database"); } if (!Directory.Exists("./Dataset")) { Directory.CreateDirectory("./Dataset"); } HeroService heroes = new HeroService(); log("debug", "Загрузка Hero.json"); heroes.Load("./Source/Hero/Hero.json"); log("succes", "Hero.json Загружен"); HeroDetailsService details = new HeroDetailsService(); log("debug", "Загрузка HeroDetails.json"); details.Load("./Source/Hero/HeroDetails.json"); log("succes", "HeroDetails.json Загружен"); HeroClustersSevice clusters = new HeroClustersSevice(); log("debug", "Загрузка HeroClusters.json"); clusters.Load("./Source/Hero/HeroClusters.json"); log("succes", "HeroClusters.json Загружен"); log("debug", "Загрузка HeroWebExtension.json"); HeroWebExtension[] webExtension = (HeroWebExtension[]) JSonParser.Load(File.ReadAllText("./Source/Hero/HeroWebExtension.json"), typeof(HeroWebExtension[])); log("succes", "HeroWebExtension.json Загружен"); MapService maps = new MapService(); log("debug", "Загрузка Map.json"); maps.Load("./Source/Map/Map.json"); log("succes", "Map.json Загружен"); StatisticService stats = new StatisticService(); log("debug", "Загрузка Statistic.json"); stats.Load("./Source/Replay/Statistic.json"); log("succes", "Statistic.json Загружен"); HeroStatisticService hstats = new HeroStatisticService(); log("debug", "Загрузка Statistic_sho.json"); hstats.Load("./Source/Replay/Statistic_sho.json"); log("succes", "Statistic_sho.json Загружен"); MatchupService matchups = new MatchupService(); log("debug", "Загрузка MatchupTable.json"); matchups.Load("./Source/Replay/MatchupTable.json"); log("succes", "MatchupTable.json Загружен"); log("debug", "Формирование датасета начато"); Dataset set = MakeDataset(); log("succes", "Датасета сформирован"); log("debug", "Инициализация PostegreSQL Converter ORM"); PostegresConverter converter = new PostegresConverter(); log("info", "========Конвертация ключей==============="); log("info", "group => _group"); log("info", "min_id => id_min"); log("info", "max_id => id_max"); log("info", "avg_id => id_avg"); converter.CustomNameMapper["group"] = "_group"; converter.CustomNameMapper["min_id"] = "id_min"; converter.CustomNameMapper["max_id"] = "id_max"; converter.CustomNameMapper["avg_id"] = "id_avg"; log("info", "========================================="); log("succes", "PostegreSQL Converter ORM инициалирован"); log("debug", "Генерация секвенсеров"); string sequensers = @" alter sequence gaussian_id_seq minvalue 0 start with 0; select setval('gaussian_id_seq', 0, false); alter sequence heroclusters_id_seq minvalue 0 start with 0; select setval('heroclusters_id_seq', 0, false); alter sequence statisticheroesmax_id_seq minvalue 0 start with 0; select setval('statisticheroesmax_id_seq', 0, false); alter sequence statisticheroesmin_id_seq minvalue 0 start with 0; select setval('statisticheroesmin_id_seq', 0, false); alter sequence statisticheroesavg_id_seq minvalue 0 start with 0; select setval('statisticheroesavg_id_seq', 0, false);"; log("succes", "Cеквенсеры сгенерированы"); log("debug", "Процесс генерации схемы начат"); log("info", "========Генерация словарей==============="); string[] enumsTable = AppDomain.CurrentDomain.GetAssemblies() .SelectMany(t => t.GetTypes()) .Where(t => t.IsEnum && t.Namespace == "HoTS_Service.Entity.Enum") .Select(_enum => { log("info", _enum.FullName); return(converter.CreateDictionary(_enum.Name)); }) .ToArray(); log("info", "========================================="); log("info", "========Генерация таблиц================="); Dictionary <string, string> tables = new Dictionary <string, string>(); Dictionary <string, string> data = new Dictionary <string, string>(); tables["heroesTable"] = converter.CreateTable("Hero", typeof(Hero), "Id", new List <Foreign> { new Foreign() { DataTable = "HeroGroup", Key = "_group", ForeignKey = "id" }, new Foreign() { DataTable = "HeroSubGroup", Key = "subgroup", ForeignKey = "id" } }); log("info", "Hero"); tables["detailsTable"] = converter.CreateTable("HeroDetails", typeof(HeroDetails), "id", new List <Foreign> { new Foreign() { DataTable = "Hero", Key = "id", ForeignKey = "id" }, new Foreign() { DataTable = "Difficulty", Key = "difficulty", ForeignKey = "id" }, new Foreign() { DataTable = "Franchise", Key = "franchise", ForeignKey = "id" }, new Foreign() { DataTable = "ResourceType", Key = "resourcetype", ForeignKey = "id" } }); log("info", "HeroDetails"); tables["heroWebExtension"] = converter.CreateTable("HeroWebExtension", typeof(HeroWebExtension), "id", new List <Foreign> { new Foreign() { DataTable = "Hero", Key = "id", ForeignKey = "id" } }); log("info", "HeroWebExtension"); tables["mapTable"] = converter.CreateTable("Map", typeof(Map), "id", null); log("info", "Map"); tables["statisticTable"] = StatisticSchema(); log("info", "Statistic"); tables["statisticShoMin"] = converter.CreateTable("StatisticHeroesMin", typeof(HeroStatisticItemMin), "id", null); log("info", "StatitsticHeroesMin"); tables["statisticShoMax"] = converter.CreateTable("StatisticHeroesMax", typeof(HeroStatisticItemMax), "id", null); log("info", "StatitsticHeroesMax"); tables["statisticShoAvg"] = converter.CreateTable("StatisticHeroesAvg", typeof(HeroStatisticItemAvg), "id", null); log("info", "StatitsticHeroesAvg"); tables["statisticSho"] = converter.CreateTable("StatisticHeroes", typeof(HeroStatistic), "id", new List <Foreign> { new Foreign() { DataTable = "StatisticHeroesMin", Key = "id_min", ForeignKey = "id" }, new Foreign() { DataTable = "StatisticHeroesAvg", Key = "id_avg", ForeignKey = "id" }, new Foreign() { DataTable = "StatisticHeroesMax", Key = "id_max", ForeignKey = "id" }, new Foreign() { DataTable = "Hero", Key = "id", ForeignKey = "id" } }); log("info", "StatitsticHeroes"); tables["matchupTable"] = MatchupTableSchema(); log("info", "MatchupTable"); tables["gaussian"] = converter.CreateTable("Gaussian", typeof(Gaussian) , "id", null); log("info", "Gaussian"); tables["probabilities"] = converter.CreateTable("GaussianProbabilities", typeof(Probabilities) , "id", new List <Foreign> { new Foreign() { DataTable = "Gaussian", Key = "gaussian_id", ForeignKey = "id" } }); log("info", "GaussianProbabilities"); tables["heroClusters"] = converter.CreateTable("HeroClusters", typeof(HeroClusters) , "id", new List <Foreign> { new Foreign() { DataTable = "Hero", Key = "id", ForeignKey = "id" }, new Foreign() { DataTable = "Gaussian", Key = "gaussian", ForeignKey = "id" }, new Foreign() { DataTable = "HeroSubGroup", Key = "subgroupcluster", ForeignKey = "id" } }); log("info", "HeroClusters"); tables["dataset"] = converter.CreateTable("Dataset", typeof(Dataset), "id", null); log("info", "Dataset"); tables["trainingState"] = converter.CreateTable("TrainingState", typeof(LogInfo), "id", null); log("info", "TrainingState"); tables["trainMeta"] = converter.CreateTable("TrainingMeta", typeof(TrainMeta), "id", null); log("info", "TrainingMeta"); tables["network"] = converter.CreateTable("Network", typeof(Network) , "id", new List <Foreign> { new Foreign() { DataTable = "Dataset", Key = "dataset_id", ForeignKey = "id" }, new Foreign() { DataTable = "TrainingState", Key = "state_id", ForeignKey = "id" }, new Foreign() { DataTable = "TrainingMeta", Key = "meta_id", ForeignKey = "id" }, }); log("info", "Network"); log("info", "========================================="); log("succes", "Схема успешно сгенерирована"); log("info", "========Генерация дампа данных==========="); string[] enumsData = AppDomain.CurrentDomain.GetAssemblies() .SelectMany(t => t.GetTypes()) .Where(t => t.IsEnum && t.Namespace == "HoTS_Service.Entity.Enum") .Select(_enum => converter.InsertDictionary(_enum)) .ToArray(); data["heroesTable"] = converter.Insert("Hero", heroes.All()); log("info", "Hero"); data["detailsTable"] = converter.Insert("HeroDetails", details.All()); log("info", "HeroDetails"); data["heroWebExtension"] = converter.Insert("HeroWebExtension", webExtension); log("info", "HeroWebExtension"); data["mapTable"] = converter.Insert("Map", maps.All()); log("info", "Map"); data["statisticTable"] = StatisticData(stats); log("info", "Statistic"); data["statisticShoMin"] = converter.Insert("StatisticHeroesMin", hstats.All().Item2); log("info", "StatisticHeroesMin"); data["statisticShoMax"] = converter.Insert("StatisticHeroesMax", hstats.All().Item3); log("info", "StatisticHeroesMax"); data["statisticShoAvg"] = converter.Insert("StatisticHeroesAvg", hstats.All().Item1); log("info", "StatisticHeroesAvg"); data["statisticSho"] = HeroesStatisticData(hstats); log("info", "HeroesStatistic"); data["matchupTable"] = MatchupData(matchups, heroes.Count()); int probId = 0; data["gaussian"] = converter.Insert("Gaussian", clusters.Select(x => x.Gaussian)); log("info", "Gaussian"); data["probabilities"] = converter.Insert("GaussianProbabilities", clusters. Select(x => x.Gaussian.Probability. Select(y => new Probabilities() { id = probId++, value = y, gaussian_id = x.Id, })).SelectMany(z => z)); log("info", "GaussianProbabilities"); data["heroClusters"] = converter.Insert("HeroClusters", clusters.All()); log("info", "HeroClusters"); data["dataset"] = converter.Insert("Dataset", set); log("info", "Dataset"); string[] trainingStatesAll = Directory .GetFiles("./Source/Network", "*.json", SearchOption.AllDirectories); string[] trainingStates = trainingStatesAll .Where(x => !x.Contains("Best") && x.Contains("\\Report\\")) .ToArray(); string[] trainingStatesBest = trainingStatesAll .Where(x => x.Contains("Best") && x.Contains("\\Report\\")) .ToArray(); long[] traingingStateIds = trainingStates .Select(file => File.GetCreationTime(file).Ticks) .ToArray(); long[] traingingStateIdsBest = trainingStatesBest .Select(file => File.GetCreationTime(file).Ticks) .ToArray(); var trainigsStateData = trainingStates .Select(x => File.ReadAllText(x)) .Select(json => ((Dictionary <string, dynamic>)JSONWebParser.Load(json))) .Select((obj, index) => new { id = traingingStateIds[index], error = (double)obj["error"], iteration = (int)obj["iteration"], percent = (double)obj["percent"], validError = (double)obj["validError"], validPercent = (double)obj["validPercent"] }) .Concat( trainingStatesBest .Select(x => File.ReadAllText(x)) .Select(json => ((Dictionary <string, dynamic>)JSONWebParser.Load(json))) .Select((obj, index) => new { id = traingingStateIdsBest[index], error = (double)obj["error"], iteration = (int)obj["iteration"], percent = (double)obj["percent"], validError = (double)obj["validError"], validPercent = (double)obj["validPercent"] })); data["trainingState"] = converter.Insert("TrainingState", trainigsStateData); log("info", "TrainingState"); string[] networksAll = Directory .GetFiles("./Source/Network", "*.json", SearchOption.AllDirectories) .ToArray(); string[] networks = networksAll .Where(x => !x.Contains("Best") && !x.Contains("\\Report\\")) .ToArray(); string[] networksBest = networksAll .Where(x => x.Contains("Best") && !x.Contains("\\Report\\")) .ToArray(); JavaScriptSerializer serializer = new JavaScriptSerializer(); var networksMeta = networks .Select(x => File.ReadAllText(x)) .Select(json => serializer.Deserialize <NetworkTuple>(json).Meta) .Select((obj, index) => new { Alias = obj.Alias, ClusterPath = obj.ClusterPath, Name = obj.Name, Description = obj.Description, Id = traingingStateIds[index] } ) .Concat( networksBest .Select(x => File.ReadAllText(x)) .Select(json => serializer.Deserialize <NetworkTuple>(json).Meta) .Select((obj, index) => new { Alias = obj.Alias, ClusterPath = obj.ClusterPath, Name = obj.Name, Description = obj.Description, Id = traingingStateIdsBest[index] })); data["trainingMeta"] = converter.Insert("TrainingMeta", networksMeta); log("info", "TrainingMeta"); var networksData = networks .Select(x => File.ReadAllText(x)) .Select(json => serializer.Deserialize <NetworkTuple>(json).Network) .Select((x, index) => new Network() { dataset_id = set.id, data = new Json(File.ReadAllText(networks[index])), state_id = traingingStateIds[index], meta_id = traingingStateIds[index], isBest = false, id = traingingStateIds[index] }) .Concat( networksBest .Select(x => File.ReadAllText(x)) .Select(json => serializer.Deserialize <NetworkTuple>(json).Network) .Select((x, index) => new Network() { dataset_id = set.id, data = new Json(File.ReadAllText(networksBest[index])), state_id = traingingStateIdsBest[index], meta_id = traingingStateIdsBest[index], isBest = true, id = traingingStateIdsBest[index] })); data["network"] = converter.Insert("Network", networksData); log("info", "Network"); log("info", "========================================="); log("succes", "Дамп данных успешно сгенерирован"); log("succes", "Схема успешно сгенерирована"); File.WriteAllText("./Database/create.sql", string.Join("\n", enumsTable) + string.Join("\n", tables.Select(x => x.Value).ToArray())); File.WriteAllText("./Database/insert.sql", sequensers + "\n\n" + string.Join("\n", enumsData) + string.Join("\n", data.Select(x => x.Value).ToArray())); }
protected override void Save(string name, object obj, Type t) { File.WriteAllText(name, JSonParser.Save(obj, t)); }
public override void Run(string[] args) { base.Run(args); Validate(); if (filter == true) { Console.WriteLine("Фильтр для образов в диапазоне [0.4,0.6] включен"); Console.WriteLine("Фильтр для случайных образов включен"); } Console.WriteLine($"HERO_SUBGROUP_COUNT = {HERO_SUBGROUP_COUNT}"); guidMapper = (Dictionary <string, int>)JSonParser.Load(File.ReadAllText(guidInput), typeof(Dictionary <string, int>)); foreach (var it in guidMapper) { if (it.Key.Split('#')[0] == "Hero") { heroCount++; } } Console.WriteLine($"HERO_COUNT = {heroCount}"); heroService = new HeroService(); heroService.Load(heroInput); Dictionary <SByte[], Tuple <double, int> > nnDataset = new Dictionary <SByte[], Tuple <double, int> >(new ArrayEqualityComparer()); OpenSource(input); object data; bool wasPrint = false; while ((data = ReadData()) != null) { var line = ParseData(data); if (line == null) { continue; } if (nnDataset.ContainsKey(line.Item1)) { var prev = nnDataset[line.Item1]; nnDataset[line.Item1] = new Tuple <double, int>(prev.Item1 + line.Item2, prev.Item2 + 1); } else { nnDataset[line.Item1] = new Tuple <double, int>(line.Item2, 1); } if (nnDataset.Count % 100000 == 0 && wasPrint == false) { Console.WriteLine("Уже обработано " + nnDataset.Count); wasPrint = true; } if (nnDataset.Count % 100000 == 1) { wasPrint = false; } } Save(output, nnDataset, null); }