#pragma warning disable CA1822 // Does not acces instance data can be marked static. public virtual async Task <Segment> BuildSegment( #pragma warning restore CA1822 // Can't mock static methods in MOQ. bool async, string manifestPath, SegmentCursor cursor = default) { // Models we need for later List <Shard> shards = new List <Shard>(); DateTimeOffset dateTime = BlobChangeFeedExtensions.ToDateTimeOffset(manifestPath).Value; int shardIndex = cursor?.ShardIndex ?? 0; // Download segment manifest BlobClient blobClient = _containerClient.GetBlobClient(manifestPath); BlobDownloadInfo blobDownloadInfo; if (async) { blobDownloadInfo = await blobClient.DownloadAsync().ConfigureAwait(false); } else { blobDownloadInfo = blobClient.Download(); } // Parse segment manifest JsonDocument jsonManifest; if (async) { jsonManifest = await JsonDocument.ParseAsync(blobDownloadInfo.Content).ConfigureAwait(false); } else { jsonManifest = JsonDocument.Parse(blobDownloadInfo.Content); } int i = 0; foreach (JsonElement shardJsonElement in jsonManifest.RootElement.GetProperty("chunkFilePaths").EnumerateArray()) { string shardPath = shardJsonElement.ToString().Substring("$blobchangefeed/".Length); Shard shard = await _shardFactory.BuildShard( async, shardPath, cursor?.ShardCursors?[i]) .ConfigureAwait(false); shards.Add(shard); i++; } return(new Segment( shards, shardIndex, dateTime)); }
#pragma warning disable CA1822 // Does not acces instance data can be marked static. public virtual async Task <Segment> BuildSegment( #pragma warning restore CA1822 // Can't mock static methods in MOQ. bool async, string manifestPath, SegmentCursor cursor = default) { // Models we need for later List <Shard> shards = new List <Shard>(); DateTimeOffset dateTime = BlobChangeFeedExtensions.ToDateTimeOffset(manifestPath).Value; // Download segment manifest BlobClient blobClient = _containerClient.GetBlobClient(manifestPath); BlobDownloadInfo blobDownloadInfo; if (async) { blobDownloadInfo = await blobClient.DownloadAsync().ConfigureAwait(false); } else { blobDownloadInfo = blobClient.Download(); } // Parse segment manifest JsonDocument jsonManifest; if (async) { jsonManifest = await JsonDocument.ParseAsync(blobDownloadInfo.Content).ConfigureAwait(false); } else { jsonManifest = JsonDocument.Parse(blobDownloadInfo.Content); } foreach (JsonElement shardJsonElement in jsonManifest.RootElement.GetProperty("chunkFilePaths").EnumerateArray()) { string shardPath = shardJsonElement.ToString().Substring("$blobchangefeed/".Length); var shardCursor = cursor?.ShardCursors?.Find(x => x.CurrentChunkPath.StartsWith(shardPath, StringComparison.InvariantCulture)); Shard shard = await _shardFactory.BuildShard( async, shardPath, shardCursor) .ConfigureAwait(false); if (shard.HasNext()) { shards.Add(shard); } } int shardIndex = 0; string currentShardPath = cursor?.CurrentShardPath; if (!string.IsNullOrWhiteSpace(currentShardPath)) { shardIndex = shards.FindIndex(s => s.ShardPath == currentShardPath); if (shardIndex < 0) { // Either shard doesn't exist or cursor is pointing to end of shard. So start from beginning. shardIndex = 0; } } return(new Segment( shards, shardIndex, dateTime, manifestPath)); }
public async Task <ChangeFeed> BuildChangeFeed( DateTimeOffset?startTime, DateTimeOffset?endTime, string continuation, bool async, CancellationToken cancellationToken) { DateTimeOffset lastConsumable; Queue <string> years = new Queue <string>(); Queue <string> segments = new Queue <string>(); ChangeFeedCursor cursor = null; // Create cursor if (continuation != null) { cursor = JsonSerializer.Deserialize <ChangeFeedCursor>(continuation); ValidateCursor(_containerClient, cursor); startTime = cursor.CurrentSegmentCursor.SegmentTime; endTime = cursor.EndTime; } // Round start and end time if we are not using the cursor. else { startTime = startTime.RoundDownToNearestHour(); endTime = endTime.RoundUpToNearestHour(); } // Check if Change Feed has been abled for this account. bool changeFeedContainerExists; if (async) { changeFeedContainerExists = await _containerClient.ExistsAsync(cancellationToken : cancellationToken).ConfigureAwait(false); } else { changeFeedContainerExists = _containerClient.Exists(cancellationToken: cancellationToken); } if (!changeFeedContainerExists) { throw new ArgumentException("Change Feed hasn't been enabled on this account, or is currently being enabled."); } // Get last consumable BlobClient blobClient = _containerClient.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath); BlobDownloadInfo blobDownloadInfo; if (async) { blobDownloadInfo = await blobClient.DownloadAsync(cancellationToken : cancellationToken).ConfigureAwait(false); } else { blobDownloadInfo = blobClient.Download(cancellationToken: cancellationToken); } JsonDocument jsonMetaSegment; if (async) { jsonMetaSegment = await JsonDocument.ParseAsync( blobDownloadInfo.Content, cancellationToken : cancellationToken ).ConfigureAwait(false); } else { jsonMetaSegment = JsonDocument.Parse(blobDownloadInfo.Content); } lastConsumable = jsonMetaSegment.RootElement.GetProperty("lastConsumable").GetDateTimeOffset(); // Get year paths years = await GetYearPathsInternal( async, cancellationToken).ConfigureAwait(false); // Dequeue any years that occur before start time if (startTime.HasValue) { while (years.Count > 0 && BlobChangeFeedExtensions.ToDateTimeOffset(years.Peek()) < startTime.RoundDownToNearestYear()) { years.Dequeue(); } } // There are no years. if (years.Count == 0) { return(ChangeFeed.Empty()); } while (segments.Count == 0 && years.Count > 0) { // Get Segments for year segments = await BlobChangeFeedExtensions.GetSegmentsInYearInternal( containerClient : _containerClient, yearPath : years.Dequeue(), startTime : startTime, endTime : BlobChangeFeedExtensions.MinDateTime(lastConsumable, endTime), async : async, cancellationToken : cancellationToken) .ConfigureAwait(false); } // We were on the last year, and there were no more segments. if (segments.Count == 0) { return(ChangeFeed.Empty()); } Segment currentSegment = await _segmentFactory.BuildSegment( async, segments.Dequeue(), cursor?.CurrentSegmentCursor) .ConfigureAwait(false); return(new ChangeFeed( _containerClient, _segmentFactory, years, segments, currentSegment, lastConsumable, startTime, endTime)); }