protected override async Task<bool> Fetch(CollectorHttpClient client, ReadWriteCursor front, ReadCursor back, CancellationToken cancellationToken) { await front.Load(cancellationToken); DateTime frontDateTime = front.Value; JObject root = await client.GetJObjectAsync(Index, cancellationToken); List<Task<JObject>> tasks = new List<Task<JObject>>(); foreach (JObject rootItem in root["items"]) { DateTime pageTimeStamp = rootItem["commitTimeStamp"].ToObject<DateTime>(); if (pageTimeStamp > frontDateTime) { int count = int.Parse(rootItem["count"].ToString()); Total += count; front.Value = pageTimeStamp; await front.Save(cancellationToken); } } return true; }
private void ReadChunkedData(ReadableBuffer buffer, WritableBuffer writableBuffer, out ReadCursor consumed, out ReadCursor examined) { var actual = Math.Min(buffer.Length, _inputLength); consumed = buffer.Move(buffer.Start, actual); examined = consumed; Copy(buffer.Slice(0, actual), writableBuffer); _inputLength -= actual; AddAndCheckConsumedBytes(actual); if (_inputLength == 0) { _mode = Mode.Suffix; } }
protected override void Init(IDictionary <string, string> arguments, CancellationToken cancellationToken) { var source = arguments.GetOrThrow <string>(Arguments.Source); var verbose = arguments.GetOrDefault(Arguments.Verbose, false); var contentBaseAddress = arguments.GetOrDefault <string>(Arguments.ContentBaseAddress); var storageFactory = CommandHelpers.CreateStorageFactory(arguments, verbose); var httpClientTimeoutInSeconds = arguments.GetOrDefault <int?>(Arguments.HttpClientTimeoutInSeconds); var httpClientTimeout = httpClientTimeoutInSeconds.HasValue ? (TimeSpan?)TimeSpan.FromSeconds(httpClientTimeoutInSeconds.Value) : null; StorageFactory preferredPackageSourceStorageFactory = null; IAzureStorage preferredPackageSourceStorage = null; var preferAlternatePackageSourceStorage = arguments.GetOrDefault(Arguments.PreferAlternatePackageSourceStorage, defaultValue: false); if (preferAlternatePackageSourceStorage) { preferredPackageSourceStorageFactory = CommandHelpers.CreateSuffixedStorageFactory("PreferredPackageSourceStorage", arguments, verbose); preferredPackageSourceStorage = preferredPackageSourceStorageFactory.Create() as IAzureStorage; } Logger.LogInformation("CONFIG source: \"{ConfigSource}\" storage: \"{Storage}\" preferred package source storage: \"{PreferredPackageSourceStorage}\"", source, storageFactory, preferredPackageSourceStorageFactory); Logger.LogInformation("HTTP client timeout: {Timeout}", httpClientTimeout); _collector = new DnxCatalogCollector( new Uri(source), storageFactory, preferredPackageSourceStorage, contentBaseAddress == null ? null : new Uri(contentBaseAddress), TelemetryService, Logger, MaxDegreeOfParallelism, CommandHelpers.GetHttpMessageHandlerFactory(TelemetryService, verbose), httpClientTimeout); var storage = storageFactory.Create(); _front = new DurableCursor(storage.ResolveUri("cursor.json"), storage, MemoryCursor.MinValue); _back = MemoryCursor.CreateMax(); _destination = storageFactory.BaseAddress; TelemetryService.GlobalDimensions[TelemetryConstants.Destination] = _destination.AbsoluteUri; }
protected override bool Read(ReadableBuffer readableBuffer, WritableBuffer writableBuffer, out ReadCursor consumed, out ReadCursor examined) { if (_inputLength == 0) { throw new InvalidOperationException("Attempted to read from completed Content-Length request body."); } var actual = (int)Math.Min(readableBuffer.Length, _inputLength); _inputLength -= actual; consumed = readableBuffer.Move(readableBuffer.Start, actual); examined = consumed; Copy(readableBuffer.Slice(0, actual), writableBuffer); return(_inputLength == 0); }
public static int Seek(ReadCursor begin, ReadCursor end, out ReadCursor result, byte byte0) { var enumerator = new BufferEnumerator(begin, end); while (enumerator.MoveNext()) { var span = enumerator.Current.Span; int index = span.IndexOf(byte0); if (index != -1) { result = enumerator.CreateCursor(index); return(span[index]); } } result = end; return(-1); }
public bool TakeStartLine(ReadableBuffer buffer, out ReadCursor consumed, out ReadCursor examined) { var overLength = false; if (buffer.Length >= ServerOptions.Limits.MaxRequestLineSize) { buffer = buffer.Slice(buffer.Start, ServerOptions.Limits.MaxRequestLineSize); overLength = true; } var result = _parser.ParseRequestLine(new Http1ParsingHandler(this), buffer, out consumed, out examined); if (!result && overLength) { ThrowRequestRejected(RequestRejectionReason.RequestLineTooLong); } return(result); }
internal static TlsFrameType CheckForFrameType(this ReadableBuffer buffer, out ReadCursor endOfMessage) { endOfMessage = buffer.Start; //Need at least 5 bytes to be useful if (buffer.Length < 5) { return(TlsFrameType.Incomplete); } var messageType = (TlsFrameType)buffer.ReadBigEndian <byte>(); buffer = buffer.Slice(1); //Check it's a valid frametype for what we are expecting if (messageType != TlsFrameType.AppData && messageType != TlsFrameType.Alert && messageType != TlsFrameType.ChangeCipherSpec && messageType != TlsFrameType.Handshake) { return(TlsFrameType.Invalid); } //now we get the version var version = buffer.ReadBigEndian <ushort>(); buffer = buffer.Slice(2); if (version < 0x300 || version >= 0x500) { return(TlsFrameType.Invalid); } var length = buffer.ReadBigEndian <ushort>(); buffer = buffer.Slice(2); if (buffer.Length >= length) { endOfMessage = buffer.Slice(0, length).End; return(messageType); } return(TlsFrameType.Incomplete); }
public static int Seek(ReadCursor begin, ReadCursor end, out ReadCursor result, byte byte0) { var enumerator = new SegmentEnumerator(begin, end); while (enumerator.MoveNext()) { var segmentPart = enumerator.Current; var segment = segmentPart.Segment; var span = segment.Buffer.Span.Slice(segmentPart.Start, segmentPart.Length); int index = span.IndexOf(byte0); if (index != -1) { result = new ReadCursor(segment, segmentPart.Start + index); return(span[index]); } } result = end; return(-1); }
static async Task Loop(string source, string registration, Lucene.Net.Store.Directory directory, string catalogBaseAddress, string storageBaseAddress, bool verbose, int interval) { Func <HttpMessageHandler> handlerFunc = CommandHelpers.GetHttpMessageHandlerFactory(verbose, catalogBaseAddress, storageBaseAddress); CommitCollector collector = new SearchIndexFromCatalogCollector(new Uri(source), directory, catalogBaseAddress, handlerFunc); ReadWriteCursor front = new LuceneCursor(directory, MemoryCursor.Min.Value); ReadCursor back = (registration == null) ? (ReadCursor)MemoryCursor.Max : new HttpReadCursor(new Uri(registration), handlerFunc); while (true) { bool run = false; do { run = await collector.Run(front, back); }while (run); Thread.Sleep(interval * 1000); } }
public async Task WhenPackageHasMultipleCommitsRespectsOrder(string pageContent) { // Arrange SharedInit(useLegacy: true, useSemVer2: false); var catalogStorage = Catalogs.CreateTestCatalogWithThreeItemsForSamePackage(pageContent); await _mockServer.AddStorageAsync(catalogStorage); ReadWriteCursor front = new DurableCursor(_legacyStorage.ResolveUri("cursor.json"), _legacyStorage, MemoryCursor.MinValue); ReadCursor back = MemoryCursor.CreateMax(); // Act await _target.RunAsync(front, back, CancellationToken.None); // Assert Assert.Equal(3, _legacyStorage.Content.Count); // Ensure storage has cursor.json var cursorJson = _legacyStorage.Content.FirstOrDefault(pair => pair.Key.PathAndQuery.EndsWith("cursor.json")); Assert.NotNull(cursorJson.Key); // Check package entries - ListedPackage var myPackageIndexFile = _legacyStorage.Content.FirstOrDefault(pair => pair.Key.PathAndQuery.EndsWith("/mypackage/index.json")); Assert.NotNull(myPackageIndexFile.Key); Assert.Contains("\"catalog:CatalogRoot\"", myPackageIndexFile.Value.GetContentString()); Assert.Contains("\"PackageRegistration\"", myPackageIndexFile.Value.GetContentString()); Assert.Contains("\"http://tempuri.org/data/2017.02.08.17.16.18/mypackage.3.0.0.json\"", myPackageIndexFile.Value.GetContentString()); Assert.Contains("\"packageContent\":\"http://tempuri.org/packages/mypackage.3.0.0.nupkg\"", myPackageIndexFile.Value.GetContentString()); Assert.Contains("\"lower\":\"3.0.0\",", myPackageIndexFile.Value.GetContentString()); Assert.Contains("\"upper\":\"3.0.0\"", myPackageIndexFile.Value.GetContentString()); var myPackageVersionFile = _legacyStorage.Content.FirstOrDefault(pair => pair.Key.PathAndQuery.EndsWith("/mypackage/3.0.0.json")); Assert.NotNull(myPackageVersionFile.Key); Assert.Contains("\"catalogEntry\":\"http://tempuri.org/data/2017.02.08.17.16.18/mypackage.3.0.0.json\"", myPackageVersionFile.Value.GetContentString()); Assert.Contains("\"listed\":true", myPackageVersionFile.Value.GetContentString()); Assert.Contains("\"packageContent\":\"http://tempuri.org/packages/mypackage.3.0.0.nupkg\"", myPackageIndexFile.Value.GetContentString()); }
public async Task RunAsync_WithValidPackage_CreatesFlatContainer() { var indexJsonUri = _catalogToDnxStorage.ResolveUri("/listedpackage/index.json"); var nupkgUri = _catalogToDnxStorage.ResolveUri("/listedpackage/1.0.0/listedpackage.1.0.0.nupkg"); var nuspecUri = _catalogToDnxStorage.ResolveUri("/listedpackage/1.0.0/listedpackage.nuspec"); var catalogStorage = Catalogs.CreateTestCatalogWithThreePackagesAndDelete(); var nupkgStream = File.OpenRead("Packages\\ListedPackage.1.0.0.zip"); var expectedNupkg = GetStreamBytes(nupkgStream); await _mockServer.AddStorageAsync(catalogStorage); _mockServer.SetAction( "/packages/listedpackage.1.0.0.nupkg", request => Task.FromResult(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(nupkgStream) })); var front = new DurableCursor(_cursorJsonUri, _catalogToDnxStorage, MemoryCursor.MinValue); ReadCursor back = MemoryCursor.CreateMax(); await _target.RunAsync(front, back, CancellationToken.None); Assert.Equal(4, _catalogToDnxStorage.Content.Count); Assert.True(_catalogToDnxStorage.Content.ContainsKey(_cursorJsonUri)); Assert.True(_catalogToDnxStorage.Content.ContainsKey(indexJsonUri)); Assert.True(_catalogToDnxStorage.Content.ContainsKey(nupkgUri)); Assert.True(_catalogToDnxStorage.Content.ContainsKey(nuspecUri)); Assert.True(_catalogToDnxStorage.ContentBytes.ContainsKey(_cursorJsonUri)); Assert.True(_catalogToDnxStorage.ContentBytes.TryGetValue(indexJsonUri, out var indexJson)); Assert.True(_catalogToDnxStorage.ContentBytes.TryGetValue(nupkgUri, out var nupkg)); Assert.True(_catalogToDnxStorage.ContentBytes.TryGetValue(nuspecUri, out var nuspec)); Assert.Equal(GetExpectedIndexJsonContent("1.0.0"), Encoding.UTF8.GetString(indexJson)); Assert.Equal(expectedNupkg, nupkg); Assert.Equal( "<?xml version=\"1.0\" encoding=\"utf-8\"?>\r\n<package xmlns=\"http://schemas.microsoft.com/packaging/2010/07/nuspec.xsd\">\r\n <metadata>\r\n <id>ListedPackage</id>\r\n <version>1.0.0</version>\r\n <authors>NuGet</authors>\r\n <requireLicenseAcceptance>false</requireLicenseAcceptance>\r\n <description>Package description.</description>\r\n </metadata>\r\n</package>", Encoding.UTF8.GetString(nuspec)); }
private void ParseHttpRequest(ReadableBuffer inputBuffer, out ReadCursor consumed, out ReadCursor examined) { consumed = inputBuffer.Start; examined = inputBuffer.End; if (_state == State.StartLine) { if (_parser.ParseRequestLine(this, inputBuffer, out consumed, out examined)) { _state = State.Headers; inputBuffer = inputBuffer.Slice(consumed); } } if (_state == State.Headers) { if (_parser.ParseHeaders(this, inputBuffer, out consumed, out examined, out int consumedBytes)) { _state = State.Body; } } }
public void ParseRequest(ReadableBuffer buffer, out ReadCursor consumed, out ReadCursor examined) { consumed = buffer.Start; examined = buffer.End; switch (_requestProcessingStatus) { case RequestProcessingStatus.RequestPending: if (buffer.IsEmpty) { break; } TimeoutControl.ResetTimeout(_requestHeadersTimeoutTicks, TimeoutAction.SendTimeoutResponse); _requestProcessingStatus = RequestProcessingStatus.ParsingRequestLine; goto case RequestProcessingStatus.ParsingRequestLine; case RequestProcessingStatus.ParsingRequestLine: if (TakeStartLine(buffer, out consumed, out examined)) { buffer = buffer.Slice(consumed, buffer.End); _requestProcessingStatus = RequestProcessingStatus.ParsingHeaders; goto case RequestProcessingStatus.ParsingHeaders; } else { break; } case RequestProcessingStatus.ParsingHeaders: if (TakeMessageHeaders(buffer, out consumed, out examined)) { _requestProcessingStatus = RequestProcessingStatus.AppStarted; } break; } }
static async Task Loop(string source, StorageFactory storageFactory, string contentBaseAddress, bool verbose, int interval) { CommitCollector collector = new DnxCatalogCollector(new Uri(source), storageFactory, CommandHelpers.GetHttpMessageHandlerFactory(verbose)) { ContentBaseAddress = contentBaseAddress == null ? null : new Uri(contentBaseAddress) }; Storage storage = storageFactory.Create(); ReadWriteCursor front = new DurableCursor(storage.ResolveUri("cursor.json"), storage, MemoryCursor.Min.Value); ReadCursor back = MemoryCursor.Max; while (true) { bool run = false; do { run = await collector.Run(front, back); }while (run); Thread.Sleep(interval * 1000); } }
public void CursorIsCorrectWithEmptyLastBlock() { var last = new BufferSegment(); last.SetMemory(new OwnedArray <byte>(new byte[4]), 0, 4); var first = new BufferSegment(); first.SetMemory(new OwnedArray <byte>(new byte[] { 1, 2 }), 0, 2); first.SetNext(last); var start = new ReadCursor(first, first.Start); var end = new ReadCursor(last, last.Start); var reader = new ReadableBufferReader(start, end); reader.Take(); reader.Take(); reader.Take(); Assert.Same(last, reader.Cursor.Segment); Assert.Equal(0, reader.Cursor.Index); Assert.True(reader.End); }
public unsafe bool ParseRequestLine <T>(T handler, ReadableBuffer buffer, out ReadCursor consumed, out ReadCursor examined) where T : IHttpRequestLineHandler { consumed = buffer.Start; examined = buffer.End; // Prepare the first span var span = buffer.First.Span; var lineIndex = span.IndexOf(ByteLF); if (lineIndex >= 0) { consumed = buffer.Move(consumed, lineIndex + 1); span = span.Slice(0, lineIndex + 1); } else if (buffer.IsSingleSpan) { return(false); } else { span = TryGetNewLineSpan(ref buffer, out consumed); if (span.Length == 0) { // No request line end return(false); } } // Fix and parse the span fixed(byte *data = &span.DangerousGetPinnableReference()) { ParseRequestLine(handler, data, span.Length); } examined = consumed; return(true); }
public async Task RunAsync_WhenPackageHasNuspecWithWrongName_ProcessesPackage() { var zipWithWrongNameNuspec = CreateZipStreamWithEntry("Newtonsoft.Json.nuspec", _nuspecData); var indexJsonUri = _catalogToDnxStorage.ResolveUri("/unlistedpackage/index.json"); var nupkgUri = _catalogToDnxStorage.ResolveUri("/unlistedpackage/1.0.0/unlistedpackage.1.0.0.nupkg"); var nuspecUri = _catalogToDnxStorage.ResolveUri("/unlistedpackage/1.0.0/unlistedpackage.nuspec"); var catalogStorage = Catalogs.CreateTestCatalogWithThreePackages(); await _mockServer.AddStorageAsync(catalogStorage); _mockServer.SetAction( "/packages/unlistedpackage.1.0.0.nupkg", request => Task.FromResult(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(zipWithWrongNameNuspec) })); var front = new DurableCursor(_cursorJsonUri, _catalogToDnxStorage, MemoryCursor.MinValue); ReadCursor back = MemoryCursor.CreateMax(); await _target.RunAsync(front, back, CancellationToken.None); Assert.Equal(4, _catalogToDnxStorage.Content.Count); Assert.True(_catalogToDnxStorage.Content.ContainsKey(_cursorJsonUri)); Assert.True(_catalogToDnxStorage.Content.ContainsKey(indexJsonUri)); Assert.True(_catalogToDnxStorage.Content.ContainsKey(nupkgUri)); Assert.True(_catalogToDnxStorage.Content.ContainsKey(nuspecUri)); Assert.True(_catalogToDnxStorage.ContentBytes.ContainsKey(_cursorJsonUri)); Assert.True(_catalogToDnxStorage.ContentBytes.TryGetValue(indexJsonUri, out var indexJson)); Assert.True(_catalogToDnxStorage.ContentBytes.TryGetValue(nupkgUri, out var nupkg)); Assert.True(_catalogToDnxStorage.ContentBytes.TryGetValue(nuspecUri, out var nuspec)); Assert.Equal(GetExpectedIndexJsonContent("1.0.0"), Encoding.UTF8.GetString(indexJson)); Assert.Equal(zipWithWrongNameNuspec.ToArray(), nupkg); Assert.Equal(_nuspecData, Encoding.UTF8.GetString(nuspec)); }
private async Task <CatalogItemBatch[]> CreateBatchesAsync(ReadWriteCursor front, ReadCursor back, JObject page) { IEnumerable <CatalogItem> pageItems = page["items"] .Select(item => new CatalogItem((JObject)item)) .Where(item => item.CommitTimeStamp > front.Value && item.CommitTimeStamp <= back.Value); IEnumerable <CatalogItemBatch> batches = await CreateBatchesAsync(pageItems); return(batches .OrderBy(batch => batch.CommitTimeStamp) .ToArray()); }
private ReadableBuffer Read() { // CompareExchange not required as its setting to current value if test fails if (Interlocked.Exchange(ref _consumingState, State.Active) != State.NotActive) { #if DEBUG var message = "Already consuming."; message += " From: " + _consumingLocation; throw new InvalidOperationException(message); #else ThrowHelper.ThrowInvalidOperationException(ExceptionResource.AlreadyConsuming); #endif } #if DEBUG _consumingLocation = Environment.StackTrace; #endif ReadCursor readEnd; // Reading commit head shared with writer lock (_sync) { readEnd = new ReadCursor(_commitHead, _commitHeadIndex); } return new ReadableBuffer(new ReadCursor(_readHead), readEnd); }
void IPipelineReader.Advance(ReadCursor consumed, ReadCursor examined) => AdvanceReader(consumed, examined);
// Summary: // // 1. Process one catalog page at a time. // 2. Within a given catalog page, batch catalog commit entries by lower-cased package ID. // 3. Process up to `n` batches in parallel. Note that the batches may span multiple catalog commits. // 4. Cease processing new batches if a failure has been observed. This job will eventually retry // batches on its next outermost job loop. // 5. If a failure has been observed, wait for all existing tasks to complete. Avoid task cancellation // as that could lead to the entirety of a package registration being in an inconsistent state. // To be fair, a well-timed exception could have the same result, but registration updates have never // been transactional. Actively cancelling tasks would make an inconsistent registration more likely. // 6. Update the cursor if and only if all preceding commits and the current (oldest) commit have been // fully and successfully processed. protected override async Task <bool> FetchAsync( CollectorHttpClient client, ReadWriteCursor front, ReadCursor back, CancellationToken cancellationToken) { IEnumerable <CatalogItem> catalogItems = await FetchCatalogItemsAsync(client, front, cancellationToken); var hasAnyBatchFailed = false; var hasAnyBatchBeenProcessed = false; foreach (CatalogItem catalogItem in catalogItems) { JObject page = await client.GetJObjectAsync(catalogItem.Uri, cancellationToken); JToken context = page["@context"]; CatalogItemBatch[] batches = await CreateBatchesAsync(front, back, page); SortedDictionary <DateTime, CommitBatchTasks> commitBatchTasksMap = CreateCommitBatchTasksMap(batches); var unprocessedBatches = new Queue <CatalogItemBatch>(batches); var processingBatches = new Queue <BatchTask>(); CatalogItemBatch lastBatch = unprocessedBatches.LastOrDefault(); var exceptions = new List <Exception>(); EnqueueBatchesIfNoFailures( client, context, commitBatchTasksMap, unprocessedBatches, processingBatches, lastBatch, cancellationToken); while (processingBatches.Any()) { var activeTasks = processingBatches.Where(batch => !batch.Task.IsCompleted) .Select(batch => batch.Task) .DefaultIfEmpty(CompletedTask); await Task.WhenAny(activeTasks); while (!hasAnyBatchFailed && commitBatchTasksMap.Any()) { var commitBatchTasks = commitBatchTasksMap.First().Value; var isCommitFullyProcessed = commitBatchTasks.BatchTasks.All(batch => batch.Task != null && batch.Task.IsCompleted); if (!isCommitFullyProcessed) { break; } var isCommitSuccessfullyProcessed = commitBatchTasks.BatchTasks.All(batch => batch.Task.Status == TaskStatus.RanToCompletion); if (isCommitSuccessfullyProcessed) { var commitTimeStamp = commitBatchTasks.CommitTimeStamp; front.Value = commitTimeStamp; await front.SaveAsync(cancellationToken); Trace.TraceInformation($"{nameof(RegistrationCollector)}.{nameof(FetchAsync)} {nameof(front)}.{nameof(front.Value)} saved since timestamp changed from previous: {{0}}", front); DequeueBatchesWhileMatches(processingBatches, batch => batch.CommitTimeStamp == commitTimeStamp); commitBatchTasksMap.Remove(commitTimeStamp); } else // Canceled or Failed { hasAnyBatchFailed = true; exceptions.AddRange( commitBatchTasks.BatchTasks .Select(batch => batch.Task) .Where(task => (task.IsFaulted || task.IsCanceled) && task.Exception != null) .Select(task => task.Exception)); } } if (hasAnyBatchFailed) { DequeueBatchesWhileMatches(processingBatches, batch => batch.Task.IsCompleted); } hasAnyBatchBeenProcessed = true; EnqueueBatchesIfNoFailures( client, context, commitBatchTasksMap, unprocessedBatches, processingBatches, lastBatch, cancellationToken); } if (hasAnyBatchFailed) { var innerException = exceptions.Count == 1 ? exceptions.Single() : new AggregateException(exceptions); throw new BatchProcessingException(innerException); } } return(hasAnyBatchBeenProcessed); }
public void Advance(ReadCursor consumed, ReadCursor examined) { _readerWriter.AdvanceReader(consumed, examined); }
public ParseResult ParseRequest(ReadableBuffer buffer, out ReadCursor consumed, out ReadCursor examined) { consumed = buffer.Start; examined = buffer.Start; if (_state == ParsingState.StartLine) { if (!buffer.TrySliceTo((byte)'\r', (byte)'\n', out ReadableBuffer startLine, out ReadCursor delim)) { return(ParseResult.Incomplete); } // Move the buffer to the rest buffer = buffer.Slice(delim).Slice(2); if (!startLine.TrySliceTo((byte)' ', out ReadableBuffer method, out delim)) { return(ParseResult.BadRequest); } _method = method.Preserve(); // Skip ' ' startLine = startLine.Slice(delim).Slice(1); if (!startLine.TrySliceTo((byte)' ', out ReadableBuffer path, out delim)) { return(ParseResult.BadRequest); } _path = path.Preserve(); // Skip ' ' startLine = startLine.Slice(delim).Slice(1); var httpVersion = startLine; if (httpVersion.IsEmpty) { return(ParseResult.BadRequest); } _httpVersion = httpVersion.Preserve(); _state = ParsingState.Headers; consumed = buffer.Start; examined = buffer.Start; } // Parse headers // key: value\r\n while (!buffer.IsEmpty) { var headerValue = default(ReadableBuffer); if (!buffer.TrySliceTo((byte)'\r', (byte)'\n', out ReadableBuffer headerPair, out ReadCursor delim)) { return(ParseResult.Incomplete); } buffer = buffer.Slice(delim).Slice(2); consumed = buffer.Start; examined = buffer.Start; // End of headers if (headerPair.IsEmpty) { return(ParseResult.Complete); } // : if (!headerPair.TrySliceTo((byte)':', out ReadableBuffer headerName, out delim)) { return(ParseResult.BadRequest); } headerName = headerName.TrimStart(); headerPair = headerPair.Slice(delim).Slice(1); headerValue = headerPair.TrimStart(); RequestHeaders.SetHeader(ref headerName, ref headerValue); } return(ParseResult.Incomplete); }
public bool ParseHeaders(TRequestHandler handler, ReadableBuffer buffer, out ReadCursor consumed, out ReadCursor examined, out int consumedBytes) { handler.OnHeader(new Span <byte>(_hostHeaderName), new Span <byte>(_hostHeaderValue)); handler.OnHeader(new Span <byte>(_acceptHeaderName), new Span <byte>(_acceptHeaderValue)); handler.OnHeader(new Span <byte>(_connectionHeaderName), new Span <byte>(_connectionHeaderValue)); consumedBytes = 0; consumed = buffer.Start; examined = buffer.End; return(true); }
/// <summary> /// Moves forward the pipelines read cursor to after the consumed data. /// </summary> /// <param name="consumed">Marks the extent of the data that has been succesfully proceesed.</param> /// <param name="examined">Marks the extent of the data that has been read and examined.</param> /// <remarks> /// The memory for the consumed data will be released and no longer available. /// The examined data communicates to the pipeline when it should signal more data is available. /// </remarks> public void Advance(ReadCursor consumed, ReadCursor examined) => _input.AdvanceReader(consumed, examined);
/// <summary> /// /// </summary> public BufferEnumerator(ReadCursor start, ReadCursor end) { _segmentEnumerator = new SegmentEnumerator(start, end); _current = default; }
public static BufferSegment Clone(ReadCursor beginBuffer, ReadCursor endBuffer, out BufferSegment lastSegment) { var beginOrig = beginBuffer.Segment; var endOrig = endBuffer.Segment; if (beginOrig == endOrig) { lastSegment = new BufferSegment(beginOrig._buffer, beginBuffer.Index, endBuffer.Index); return lastSegment; } var beginClone = new BufferSegment(beginOrig._buffer, beginBuffer.Index, beginOrig.End); var endClone = beginClone; beginOrig = beginOrig.Next; while (beginOrig != endOrig) { endClone.Next = new BufferSegment(beginOrig._buffer, beginOrig.Start, beginOrig.End); endClone = endClone.Next; beginOrig = beginOrig.Next; } lastSegment = new BufferSegment(endOrig._buffer, endOrig.Start, endBuffer.Index); endClone.Next = lastSegment; return beginClone; }
private bool ParsePreface(ReadableBuffer readableBuffer, out ReadCursor consumed, out ReadCursor examined) { consumed = readableBuffer.Start; examined = readableBuffer.End; if (readableBuffer.Length < ClientPreface.Length) { return(false); } var span = readableBuffer.IsSingleSpan ? readableBuffer.First.Span : readableBuffer.ToSpan(); for (var i = 0; i < ClientPreface.Length; i++) { if (ClientPreface[i] != span[i]) { throw new Exception("Invalid HTTP/2 connection preface."); } } consumed = examined = readableBuffer.Move(readableBuffer.Start, ClientPreface.Length); return(true); }
public void Advance(ReadCursor consumed, ReadCursor examined) { _channel.AdvanceReader(consumed, examined); }
// Called by the READER void IPipelineReader.Advance(ReadCursor consumed, ReadCursor examined) { BufferSegment returnStart = null; BufferSegment returnEnd = null; if (!consumed.IsDefault) { returnStart = _head; returnEnd = consumed.Segment; _head = consumed.Segment; _head.Start = consumed.Index; } // Again, we don't need an interlock here because Read and Write proceed serially. // REVIEW: examined.IsEnd (PipelineReaderWriter has changed this logic) var consumedEverything = examined.IsEnd && Reading.Status == TaskStatus.WaitingForActivation && _awaitableState == _awaitableIsCompleted; CompareExchange(ref _cancelledState, CancelledState.NotCancelled, CancelledState.CancellationObserved); if (consumedEverything && _cancelledState != CancelledState.CancellationRequested) { _awaitableState = _awaitableIsNotCompleted; } while (returnStart != returnEnd) { var returnSegment = returnStart; returnStart = returnStart.Next; returnSegment.Dispose(); } if (!_consuming) { throw new InvalidOperationException("No ongoing consuming operation to complete."); } _consuming = false; }
public void Advance(ReadCursor consumed, ReadCursor examined) { _pipe.Reader.Advance(consumed, examined); }
public bool ParseRequestLine(TRequestHandler handler, ReadableBuffer buffer, out ReadCursor consumed, out ReadCursor examined) { handler.OnStartLine(HttpMethod.Get, HttpVersion.Http11, new Span <byte>(_target), new Span <byte>(_target), Span <byte> .Empty, Span <byte> .Empty, false); consumed = buffer.Start; examined = buffer.End; return(true); }
/// <summary> /// /// </summary> public MemoryEnumerator(ReadCursor start, ReadCursor end) { _segmentEnumerator = new SegmentEnumerator(start, end); _current = Memory <byte> .Empty; }
private static async void WriteToSocket(TSocket tsocket, IPipeReader reader) { Exception error = null; try { while (true) { var readResult = await reader.ReadAsync(); ReadableBuffer buffer = readResult.Buffer; ReadCursor end = buffer.Start; try { if ((buffer.IsEmpty && readResult.IsCompleted) || readResult.IsCancelled) { // EOF or TransportThread stopped break; } if (!buffer.IsEmpty) { var result = TrySend(tsocket.Fd, ref buffer); if (result.Value == buffer.Length) { end = buffer.End; } else if (result.IsSuccess) { end = buffer.Move(buffer.Start, result.Value); } else if (result == PosixResult.EAGAIN || result == PosixResult.EWOULDBLOCK) { if (!await Writable(tsocket)) { // TransportThread stopped break; } } else { error = result.AsException(); break; } } } finally { // We need to call Advance to end the read reader.Advance(end); } } } catch (Exception ex) { error = ex; } finally { tsocket.ConnectionContext.OnConnectionClosed(error); reader.Complete(error); tsocket.StopReadFromSocket(); CleanupSocketEnd(tsocket); } }
public bool TakeMessageHeaders(ReadableBuffer buffer, out ReadCursor consumed, out ReadCursor examined) { // Make sure the buffer is limited bool overLength = false; if (buffer.Length >= _remainingRequestHeadersBytesAllowed) { buffer = buffer.Slice(buffer.Start, _remainingRequestHeadersBytesAllowed); // If we sliced it means the current buffer bigger than what we're // allowed to look at overLength = true; } var result = _parser.ParseHeaders(new Http1ParsingHandler(this), buffer, out consumed, out examined, out var consumedBytes); _remainingRequestHeadersBytesAllowed -= consumedBytes; if (!result && overLength) { ThrowRequestRejected(RequestRejectionReason.HeadersExceedMaxTotalSize); } if (result) { TimeoutControl.CancelTimeout(); } return(result); }
public void AdvanceReader(ReadCursor consumed, ReadCursor examined) { BufferSegment returnStart = null; BufferSegment returnEnd = null; if (!consumed.IsDefault) { returnStart = _readHead; returnEnd = consumed.Segment; _readHead = consumed.Segment; _readHead.Start = consumed.Index; } // Reading commit head shared with writer lock (_sync) { // Change the state from observed -> not cancelled. We only want to reset the cancelled state if it was observed Interlocked.CompareExchange(ref _cancelledState, CancelledState.NotCancelled, CancelledState.CancellationObserved); var consumedEverything = examined.Segment == _commitHead && examined.Index == _commitHeadIndex && Reading.Status == TaskStatus.WaitingForActivation; // We reset the awaitable to not completed if // 1. We've consumed everything the producer produced so far // 2. Cancellation wasn't requested if (consumedEverything && _cancelledState != CancelledState.CancellationRequested) { Interlocked.CompareExchange( ref _awaitableState, _awaitableIsNotCompleted, _awaitableIsCompleted); } } while (returnStart != null && returnStart != returnEnd) { var returnSegment = returnStart; returnStart = returnStart.Next; returnSegment.Dispose(); } #if DEBUG _consumingLocation = null; #endif // CompareExchange not required as its setting to current value if test fails if (Interlocked.Exchange(ref _consumingState, State.NotActive) != State.Active) { ThrowHelper.ThrowInvalidOperationException(ExceptionResource.NotConsumingToComplete); } }
public FilterResult <FakePackageInfo> Filter(ReadableBuffer buffer, out ReadCursor consumed, out ReadCursor examined) { throw new NotImplementedException(); }
protected override async Task <bool> Fetch(CollectorHttpClient client, ReadWriteCursor front, ReadCursor back) { await front.Load(); DateTime frontDateTime = front.Value; JObject root = await client.GetJObjectAsync(Index); List <Task <JObject> > tasks = new List <Task <JObject> >(); foreach (JObject rootItem in root["items"]) { DateTime pageTimeStamp = rootItem["commitTimeStamp"].ToObject <DateTime>(); if (pageTimeStamp > frontDateTime) { int count = int.Parse(rootItem["count"].ToString()); Total += count; front.Value = pageTimeStamp; await front.Save(); } } return(true); }