/// <summary> /// Reads the packet from the underlying stream. /// </summary> /// <param name="buffer">The pointer to start reading at.</param> /// <param name="length">The remaining length.</param> /// <param name="offset">The offset to start at.</param> /// <returns>Whether the packet was parsed successfully or not.</returns> public override unsafe MqttStatus TryRead(BufferSegment buffer, int offset, int length) { throw new NotImplementedException(); }
/// <summary> /// Processes the incoming data in the context. /// </summary> ///<param name="incoming">The socket input to read from</param> internal Task OnReceive(BufferSegment incoming) { // Process the receive Monitor.Enter(this.ReceiveLock); try { // Increment incoming bytes NetStat.BytesIncoming.Add(incoming.Length); // If we have previously throttled segments, glue the last two of them // together. Only two will suffice as it has cumulative effect. bool throttledElement = !this.Segments.IsEmpty; // If we weren't able to decrypt, continue at the next receive() if (incoming == null) { return(Task.CompletedTask); } // We have now a decrypted buffer, enqueue it to the processor queue this.Segments.Enqueue(incoming); byte start = incoming.Array[incoming.Offset]; // While we can dequeue, process each. while (this.Segments.Count > 0) { // If there was some data throttled previously and only two elements left, // glue them together. if (throttledElement && this.Segments.Count == 2) { BufferSegment leftSegment; BufferSegment rightSegment; this.Segments.TryDequeue(out leftSegment); this.Segments.TryDequeue(out rightSegment); this.Buffer = leftSegment.Join(rightSegment); leftSegment.TryRelease(); rightSegment.TryRelease(); } else { // Dequeue just one segment and process it BufferSegment segment; this.Segments.TryDequeue(out segment); // Set the buffer to the dequeued segment this.Buffer = segment; } // Check the contents //Console.WriteLine("[{0}] {1} ({2})", this.Channel.Handle, this.Buffer.ViewAsSsl, this.Channel.IsSecure); NetStat.PacketsIncoming.Increment(); this.Current = 0; // While we have a worker and the connection buffer is still alive while (this.Current < this.Count && !this.BufferProvider.IsDisposed) { // Get the processor and schedule the next one. The 'current' might be adjusted during // the process if Redirect() is called. var process = this.Processors[this.Current++]; // Execute the process var result = process(this.Channel, this); if (result == ProcessingState.InsufficientData) { // Insufficient data was returned, we need to get the current buffer and keep // it for later. When new data arrives, we need to glue them together in one // buffer segment. Only two will suffice as it has cumulative effect. Throttle(this.Buffer); return(Task.CompletedTask); } else if (result == ProcessingState.Stop) { // The processor have told us to stop processing, we must free the buffer. if (this.Buffer != null) { this.Buffer.TryRelease(); } break; } else if (result == ProcessingState.HandleLater) { // This particular buffer needs to be handled later, we need to requeue it back // on the Segments queue. if (this.Buffer != null) { this.LaterQueue.Enqueue(this.Buffer); } break; } } } } #if DEBUG catch (Exception ex) { Service.Logger.Log(ex); } #endif finally { // If we have some segments we have scheduled for later handling, enqueue them now BufferSegment laterSegment; while (this.LaterQueue.TryDequeue(out laterSegment)) { this.Segments.Enqueue(laterSegment); } // Unlock Monitor.Exit(this.ReceiveLock); } return(Task.CompletedTask); }