private static async Task <List <TS> > LoadEntities(string retentionPolicy, string measurement, TagTypes tagType, string defaultTime, bool doAdHocResampling, bool skipLastPoint, int period, List <TsMetadata> metas, StringBuilder batchBuilder, int batchSize, int parallelism, PartProgress progress, InfluxQueryExecutor influxQueryExecutor, EntityCreator entityCreator, TraceWriter log) { var tasks = new List <Task <string> >(parallelism); var tags = new List <string>(); for (int a = 0; a < parallelism; a++) { if (progress.Skip >= metas.Count) { break; } var batch = metas.Skip(progress.Skip).Take(batchSize); progress.Skip += batchSize; foreach (var tagMeta in batch) { string tag = TagCleaner.Clean(tagMeta.Tag); tags.Add(tagMeta.Tag + " cleaned: " + tag); string query = QueryBuilder.CreateMainQuery( retentionPolicy, measurement, tagType, tagMeta.Watermark, tagMeta.LastTimestampBeforeWatermark, tagMeta.LastTimestampAfterWatermark, defaultTime, tag, doAdHocResampling); batchBuilder.Append(query); progress.ProcessedTags++; } //tasks.Add(Timer.Time(() => influxQueryExecutor.Query(batchBuilder.ToString(), log), $"Querying influx #{a}", log)); tasks.Add(influxQueryExecutor.Query(batchBuilder.ToString(), log)); batchBuilder.Clear(); } try { await Timer.Time(() => Task.WhenAll(tasks), "Getting aggregates", log); } catch (Exception ex) { //log.Error("One of these tags failed:\r\n" + string.Join("\r\n", tags), ex); throw; } var metasDict = metas.ToDictionary(k => k.Tag, v => v); List <TS> entities = Timer.TimeSync(() => entityCreator.CreateEntities <TS>(tasks.Select(t => t.Result), metasDict, period, skipLastPoint), "Deserializing resultsets and creating entities", log); return(entities); }
private static async Task <IDictionary <string, (DateTime timestamp, double value)> > GetOutliers(InfluxQueryExecutor influxQueryExecutor, List <TsMetadataDto> metas, string retentionPolicy, string measurement, TagTypes tagType, string defaultTime, bool doAdHocResampling, TraceWriter log, bool beforeWatermark, int batchSize = 256, int parallelism = 32) { var dict = new Dictionary <string, (DateTime timestamp, double value)>(); int skip = 0; while (skip < metas.Count) { var tasks = new List <Task <string> >(parallelism); for (int a = 0; a < parallelism; a++) { if (skip >= metas.Count) { break; } var batchBuilder = new StringBuilder(); var batch = metas.Skip(skip).Take(batchSize); skip += batchSize; foreach (var metaInBatch in batch) { if (beforeWatermark && !metaInBatch.Timestamp.HasValue) { continue; } string tag = TagCleaner.Clean(metaInBatch.Tag); string query = beforeWatermark ? QueryBuilder.CreateFindLastPointBeforeWatermarkQuery(retentionPolicy, measurement, tagType, metaInBatch.Timestamp.Value, defaultTime, tag) : QueryBuilder.CreateFindLastPointQuery(retentionPolicy, measurement, tagType, metaInBatch.Timestamp, defaultTime, tag); batchBuilder.Append(query); } string batchedQuery = batchBuilder.ToString(); batchBuilder.Clear(); if (!string.IsNullOrWhiteSpace(batchedQuery)) { tasks.Add(influxQueryExecutor.Query(batchedQuery, log)); } } await Task.WhenAll(tasks); var deserializedResultsets = tasks.Select(t => JsonConvert.DeserializeObject <InfluxDbResultset>(t.Result)).ToList(); foreach (var resultset in deserializedResultsets) { foreach (var result in resultset.Results.Where(r => r.Series != null)) { var serie = result.Series.Single(); foreach (var value in serie.Values) { var timestamp = (DateTime)value[0]; var val = DoubleConverter.Convert(value[1]).Value; var tag = (string)value[2]; dict.Add(tag, (timestamp, val)); } } } } return(dict); }