/// <summary> /// Parse all elite insights html files in a defined path, but ignores already parsed ones. /// </summary> /// <param name="path">Path which is used to search for html files in.</param> /// <param name="outputFileName">File name if the summary html file.</param> /// <param name="models">List of already parsed raid models.</param> /// <returns>List of parsed raid models.</returns> public IList <RaidModel> ParseFilesFromDiskWhileWatching(string path, string outputFileName, IList <RaidModel> models) { var knownFiles = models.Select(i => i.LogPath).ToArray(); Parallel.ForEach(Directory.GetFiles(path, "*.html"), filePath => { if (knownFiles.Contains(filePath)) { return; } if (filePath.EndsWith(outputFileName)) { return; } _logger?.LogTrace($"Parsing log: {Path.GetFileName(filePath)} "); var model = EiHtmlParser.ParseLog(filePath, _logger); if (model != null) { _logger?.LogTrace($"Finished: {Path.GetFileName(filePath)}"); models.Add(model); } }); return(models); }
/// <summary> /// Parse elite insights html files from dps.report. /// </summary> /// <param name="path">The path which shall be used to temporarily store logs.</param> /// <param name="token">The dps.report token for the user.</param> /// <param name="day">The day that shall be parsed.</param> /// <returns>List of parsed raid models.</returns> public IList <RaidModel> ParseFileFromWeb(string path, string token, string day) { var models = new List <RaidModel>(); var page = 1; var maxPage = 2; var filePath = Path.Combine(path, "test.html"); var filteredUploads = new List <dynamic>(); var httpClient = new HttpClient(); var client = new RestClient("https://dps.report/"); while (page <= maxPage) { var request = new RestRequest($"getUploads?userToken={token}&page={page++}", DataFormat.Json); var content = client.Get(request).Content; var json = (dynamic)JsonConvert.DeserializeObject(content); if (json == null) { break; } maxPage = (int)json.pages.Value; foreach (var upload in json.uploads) { DateTime date = DateTimeOffset.FromUnixTimeSeconds(upload.encounterTime.Value) .LocalDateTime; var dateString = $"{date:yyyyMMdd}"; if (dateString == day) { filteredUploads.Add(upload); } } } foreach (var upload in filteredUploads) { _logger?.LogTrace($"Loading log {upload.permalink.Value}"); Task <string> getFileTask = httpClient.GetStringAsync(upload.permalink.Value); var html = getFileTask.Result; html = html.Replace("/cache/", "https://dps.report/cache/"); File.WriteAllText(filePath, html); _logger?.LogTrace($"Parsing log {upload.permalink.Value}"); var model = EiHtmlParser.ParseLog(filePath); model.LogUrl = upload.permalink.Value; models.Add(model); } File.Delete(filePath); return(models); }
/// <summary> /// Parse all elite insights html files in a defined path. /// </summary> /// <param name="path">Path which is used to search for html files in.</param> /// <returns>List of parsed raid models.</returns> public IList <RaidModel> ParseFilesFromDisk(string path) { var models = new List <RaidModel>(); Parallel.ForEach(Directory.GetFiles(path, "*.html"), filePath => { _logger?.LogTrace($"Parsing log: {Path.GetFileName(filePath)}"); var model = EiHtmlParser.ParseLog(filePath); if (model != null) { models.Add(model); } }); return(models); }