public void StreamErrorShouldFireExceptionForInbound() { _frameInboundWriter.WriteInboundHeaders(3, _request, 31, false); IHttp2Stream stream = _frameCodec.Connection.Stream(3); Assert.NotNull(stream); StreamException streamEx = new StreamException(3, Http2Error.InternalError, "foo"); _channel.Pipeline.FireExceptionCaught(streamEx); Http2FrameStreamEvent evt = _inboundHandler.ReadInboundMessageOrUserEvent <Http2FrameStreamEvent>(); Assert.Equal(Http2FrameStreamEvent.EventType.State, evt.Type); Assert.Equal(Http2StreamState.Open, evt.Stream.State); IHttp2HeadersFrame headersFrame = _inboundHandler.ReadInboundMessageOrUserEvent <IHttp2HeadersFrame>(); Assert.NotNull(headersFrame); try { _inboundHandler.CheckException(); Assert.False(true, "stream exception expected"); } catch (Http2FrameStreamException e) { Assert.Equal(streamEx, e.InnerException); } Assert.Null(_inboundHandler.ReadInboundMessageOrUserEvent <object>()); }
public void StreamErrorShouldNotFireExceptionForOutbound() { _frameInboundWriter.WriteInboundHeaders(3, _request, 31, false); IHttp2Stream stream = _frameCodec.Connection.Stream(3); Assert.NotNull(stream); StreamException streamEx = new StreamException(3, Http2Error.InternalError, "foo"); _frameCodec.OnError(_frameCodec._ctx, true, streamEx); Http2FrameStreamEvent evt = _inboundHandler.ReadInboundMessageOrUserEvent <Http2FrameStreamEvent>(); Assert.Equal(Http2FrameStreamEvent.EventType.State, evt.Type); Assert.Equal(Http2StreamState.Open, evt.Stream.State); IHttp2HeadersFrame headersFrame = _inboundHandler.ReadInboundMessageOrUserEvent <IHttp2HeadersFrame>(); Assert.NotNull(headersFrame); // No exception expected _inboundHandler.CheckException(); Assert.Null(_inboundHandler.ReadInboundMessageOrUserEvent <object>()); }
protected override Result <BlockCollection> ZipConvert(BlockCollection readerCollection) { int bufferSize = FileInformation.BufferSize; int blocksCount = readerCollection.Blocks.Count; int chunks = blocksCount / threadsLimit; var threads = new Thread[threadsLimit]; var decompressedCollection = new BlockCollection(); var errors = new List <string>(); try { for (var i = 0; i < threadsLimit; i++) { long chunkStart = i * chunks; long chunkEnd = chunkStart + chunks; if (i == threadsLimit - 1) { chunkEnd += blocksCount % threadsLimit; } threads[i] = new Thread( () => { Result <IReadOnlyList <Block> > blocksResult = GetDecompressedBlocksByChunkResult(chunkStart, chunkEnd, bufferSize, readerCollection); if (blocksResult.Success) { decompressedCollection.SetBlocks(blocksResult.Value); } else { Monitor.Enter(mLock); errors.Add(blocksResult.Error); Monitor.Exit(mLock); } }) { IsBackground = true, Priority = ThreadPriority.AboveNormal }; threads[i].Start(); } foreach (Thread thread in threads) { thread.Join(); } return(errors.Any() ? Result.Fail <BlockCollection>(string.Join("\n", errors)) : Result.Ok(decompressedCollection.GetSortedCollection())); } catch (System.Exception ex) { return(Result.Fail <BlockCollection>(StreamException.GetErrorText(ex))); } }
private void FireStreamExceptionsEventIfNecessary(List <KeyValuePair <ICommStream, Exception> > streamsAndExceptions) { if (streamsAndExceptions == null || StreamException == null) { return; } foreach (var streamAndException in streamsAndExceptions) { StreamException?.Invoke(this, new StreamExceptionArgs(streamAndException.Key, streamAndException.Value)); } }
protected override Result Write(BlockCollection zipCollection) { try { foreach (Block block in zipCollection.Blocks) { bw.Write(block.Buffer); } return(Result.Ok()); } catch (System.Exception ex) { return(Result.Fail(StreamException.GetErrorText(ex))); } }
protected override Result Write(BlockCollection zipCollection) { try { foreach (Block block in zipCollection.Blocks) { bw.Write(BitConverter.GetBytes(block.Number)); bw.Write(block.Buffer.Length); bw.Write(block.Buffer.ToArray()); } return(Result.Ok()); } catch (System.Exception ex) { return(Result.Fail(StreamException.GetErrorText(ex))); } }
private static Result <IReadOnlyList <Block> > GetCompressedBlocksByChunkResult(long chunkStart, long chunkEnd, BlockCollection readerCollection) { try { var blocks = new List <Block>(); for (long number = chunkStart; number < chunkEnd; ++number) { Block block = readerCollection.Blocks.ElementAt((int)number); using var memoryStream = new MemoryStream(); using (var zip = new GZipStream(memoryStream, CompressionMode.Compress)) { zip.Write(block.Buffer, 0, block.Buffer.Length); } blocks.Add(new Block(block.Number, memoryStream.ToArray())); } return(Result.Ok <IReadOnlyList <Block> >(blocks)); } catch (System.Exception ex) { return(Result.Fail <IReadOnlyList <Block> >(StreamException.GetErrorText(ex))); } }
protected override Result <BlockCollection> Read(long leftBlocksCount) { try { var blocks = new List <Block>(); var readerCollection = new BlockCollection(); var blockNumber = 0; do { var readBlock = new Block(blockNumber, br.ReadBytes(FileInformation.BufferSize)); blocks.Add(readBlock); blockNumber++; } while (blocks.Count != Math.Min(threadsLimit * Constants.MegabytesForReading, leftBlocksCount)); readerCollection.SetBlocks(blocks); blocks.Clear(); return(Result.Ok(readerCollection)); } catch (System.Exception ex) { return(Result.Fail <BlockCollection>(StreamException.GetErrorText(ex))); } }
protected override Result <BlockCollection> Read(long leftBlocksCount) { try { var blocks = new List <Block>(); var readerCollection = new BlockCollection(); do { int number = br.ReadInt32(); int length = br.ReadInt32(); byte[] value = br.ReadBytes(length); var block = new Block(number, value.ToArray()); blocks.Add(block); } while (blocks.Count != Math.Min(threadsLimit * Constants.MegabytesForReading, leftBlocksCount)); readerCollection.SetBlocks(blocks); blocks.Clear(); return(Result.Ok(readerCollection)); } catch (System.Exception ex) { return(Result.Fail <BlockCollection>(StreamException.GetErrorText(ex))); } }
private static Result <IReadOnlyList <Block> > GetDecompressedBlocksByChunkResult(long chunkStart, long chunkEnd, int bufferSize, BlockCollection readerCollection) { try { var blocks = new List <Block>(); for (long number = chunkStart; number < chunkEnd; ++number) { var decompressedBlock = new byte[bufferSize]; Block block = readerCollection.Blocks.ElementAt((int)number); int size; using (var compressedBlock = new MemoryStream(block.Buffer.ToArray())) { using (var zip = new GZipStream(compressedBlock, CompressionMode.Decompress)) { size = zip.Read(decompressedBlock, 0, bufferSize); } } var newBlock = new Block(block.Number, decompressedBlock.Take(size).ToArray()); blocks.Add(newBlock); } return(Result.Ok <IReadOnlyList <Block> >(blocks)); } catch (System.Exception ex) { return(Result.Fail <IReadOnlyList <Block> >(StreamException.GetErrorText(ex))); } }
private void OnStreamException(Exception e) { StreamException?.Invoke(this, e); }