public override IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback cback, object state)
        {
            if (disposed)
            {
                throw new ObjectDisposedException(GetType().ToString());
            }

            if (buffer == null)
            {
                throw new ArgumentNullException("buffer");
            }

            int len = buffer.Length;

            if (offset < 0 || offset > len)
            {
                throw new ArgumentOutOfRangeException("offset exceeds the size of buffer");
            }

            if (count < 0 || offset > len - count)
            {
                throw new ArgumentOutOfRangeException("offset+size exceeds the size of buffer");
            }

            HttpStreamAsyncResult ares = new HttpStreamAsyncResult();

            ares.Callback = cback;
            ares.State    = state;
            if (no_more_data)
            {
                ares.Complete();
                return(ares);
            }
            int nread = decoder.Read(buffer, offset, count);

            offset += nread;
            count  -= nread;
            if (count == 0)
            {
                // got all we wanted, no need to bother the decoder yet
                ares.Count = nread;
                ares.Complete();
                return(ares);
            }
            if (!decoder.WantMore)
            {
                no_more_data = nread == 0;
                ares.Count   = nread;
                ares.Complete();
                return(ares);
            }
            ares.Buffer = new byte[8192];
            ares.Offset = 0;
            ares.Count  = 8192;
            ReadBufferState rb = new ReadBufferState(buffer, offset, count, ares);

            rb.InitialCount += nread;
            base.BeginRead(ares.Buffer, ares.Offset, ares.Count, OnRead, rb);
            return(ares);
        }
        public void Read_FromOffset()
        {
            const int sourceContentSize = 100;
            const int readBufferSize    = 128;
            const int chunkSize         = 50;
            const int offset            = 20;

            var           sourceString  = string.Join("", Enumerable.Repeat("x", sourceContentSize));
            var           sourceBytes   = Encoding.UTF8.GetBytes(sourceString);
            var           position      = 0;
            Func <byte[]> readNextChunk = () =>
            {
                var bytes = sourceBytes.Skip(position).Take(chunkSize).ToArray();
                position = position + chunkSize;
                return(bytes);
            };

            var chunkStream = new ChunkStream(readNextChunk);

            var readBufferString = string.Join("", Enumerable.Repeat("_", readBufferSize));
            var readBuffer       = Encoding.UTF8.GetBytes(readBufferString);

            chunkStream.Read(readBuffer, offset, readBufferSize - offset);

            var resultString = Encoding.UTF8.GetString(readBuffer);

            Assert.AreEqual(20, resultString.IndexOf('x'));
            Assert.AreEqual(sourceContentSize + offset - 1, resultString.LastIndexOf('x'));
        }
        public void Reading_Chunk_Should_Work_If_Chunk_Ends_Exactly_At_Source_Stream()
        {
            //only 3000 bytes left
            long offset = SourceStream.Length - 5000;

            TestStream = new ChunkStream(SourceStream, 5000, offset, true);

            byte[] buffer = new byte[1234];
            while (true)
            {
                int    read = TestStream.Read(buffer, 0, buffer.Length);
                byte[] copy = new byte[read];
                Array.Copy(buffer, 0, copy, 0, read);
                TargetBuffer.AddRange(copy);

                if (read == 0)
                {
                    break;
                }
            }

            Assert.AreEqual(5000, TargetBuffer.Count);
            for (int i = 0; i < TargetBuffer.Count; i++)
            {
                Assert.AreEqual(InputBuffer[i + offset], TargetBuffer[i], "Failed at index " + i);
            }
        }
        public void Reading_From_Block_Size_Of_Zero_Should_Work_But_Return_No_Data()
        {
            TestStream = new ChunkStream(SourceStream, 0, 0, false);

            byte[] buffer = new byte[1234];

            int read = TestStream.Read(buffer, 0, buffer.Length);

            Assert.AreEqual(0, read);
        }
Пример #5
0
        public override void Decode(Chunk chunk, Stream destination)
        {
            BinReader reader = chunk.GetReader();

            reader.Position = 0;
            FourCC fourCC = reader.ReadFourCC();
            uint   width  = reader.ReadU32LE();
            uint   height = reader.ReadU32LE();

            BinWriter writer = new BinWriter(destination);

            writer.WriteFourCC("DDS ");
            writer.WriteU32LE(124);
            writer.WriteU32LE((uint)(DDSFlags.Caps | DDSFlags.PixelFormat | DDSFlags.Width | DDSFlags.Height | DDSFlags.LinearSize));
            writer.WriteU32LE(height);
            writer.WriteU32LE(width);
            writer.WriteU32LE(width * height * 4);
            writer.WriteU32LE(0);
            writer.WriteU32LE(0);
            for (int i = 0; i < 11; i++)
            {
                writer.WriteU32LE(0);
            }

            writer.WriteU32LE(32);
            writer.WriteU32LE((uint)DDSPixelFormat.FourCC);
            writer.WriteFourCC(fourCC);
            writer.WriteU32LE(0);
            writer.WriteU32LE(0);
            writer.WriteU32LE(0);
            writer.WriteU32LE(0);
            writer.WriteU32LE(0);

            writer.WriteU32LE((uint)DDSCaps1.Texture);
            writer.WriteU32LE(0);
            writer.WriteU32LE(0);
            writer.WriteU32LE(0);

            writer.WriteU32LE(0);

            byte[]      buffer      = new byte[chunk.Size - 12];
            ChunkStream chunkStream = chunk.GetStream();

            chunkStream.Position = 12;
            chunkStream.Read(buffer, 0, (int)chunk.Size - 12);
            writer.Write((uint)buffer.Length, buffer);
        }
        public void Reading_Whole_Stream_Should_Return_Full_Contents()
        {
            TestStream = new ChunkStream(SourceStream, InputBuffer.Length, 0, false);

            byte[] buffer = new byte[1234];
            while (true)
            {
                int    read = TestStream.Read(buffer, 0, buffer.Length);
                byte[] copy = new byte[read];
                Array.Copy(buffer, 0, copy, 0, read);
                TargetBuffer.AddRange(copy);

                if (read == 0)
                {
                    break;
                }
            }

            CollectionAssert.AreEqual(InputBuffer, TargetBuffer);
        }
    public void Reading_Chunk_Should_Work_If_Chunk_Is_In_Middle_Of_Stream()
    {
      TestStream = new ChunkStream(SourceStream, 5000, 1500, true);

      byte[] buffer = new byte[1234];
      while (true)
      {
        int read = TestStream.Read(buffer, 0, buffer.Length);
        byte[] copy = new byte[read];
        Array.Copy(buffer, 0, copy, 0, read);
        TargetBuffer.AddRange(copy);

        if (read == 0) break;
      }

      Assert.AreEqual(5000, TargetBuffer.Count);
      for (int i = 0; i < TargetBuffer.Count; i++)
      {
        Assert.AreEqual(InputBuffer[i+1500], TargetBuffer[i], "Failed at index " + i);
      }
    }
Пример #8
0
        /// <summary>
        /// Creates a new Chunk from given Stream
        /// </summary>
        /// <param name="stream">The Stream to read from</param>
        /// <returns>The new Chunk</returns>
        /// <exception cref="EndOfDataException">Unexpected end of Data</exception>
        /// <exception cref="InvalidChunkException">If Chunk name is not valid or the read chunk length is negative</exception>
        public static BaseChunk FromData(ChunkStream stream, MSH owner)
        {
            BaseChunk chunk = new BaseChunk(owner);

            //every chunk starts with a chunk name (4 bytes)
            chunk.ChunkName = stream.ReadString(4);

            //and an Int32 defining the length of the chunk (in bytes)
            int length = stream.ReadInt32();

            if (!Regex.Match(chunk.ChunkName, ValidChunkRegEx).Success)
            {
                Log.Add(chunk.ChunkName + " is not a valid Chunk Name!", LogType.Error);
                throw new InvalidChunkException(chunk.ChunkName + " is not a valid Chunk Name!");
            }

            if (length < 0)
            {
                Log.Add(length + " is not a valid Chunk Length!", LogType.Error);
                throw new InvalidChunkException(length + " is not a valid Chunk Length!");
            }

            //read all the data
            byte[] buffer = new byte[length];

            try {
                stream.Read(buffer, 0, length);
                chunk.data.AddRange(buffer);
            }
            catch (ArgumentOutOfRangeException ex) {
                Log.Add("Unexpected end of Data!", LogType.Error);
                throw new EndOfDataException("Unexpected end of Data!", ex);
            }

            chunk.ResetPosition();

            return(chunk);
        }
        public void Reading_Chunk_Should_Work_If_Chunk_Is_In_Middle_Of_Stream()
        {
            TestStream = new ChunkStream(SourceStream, 5000, 1500, true);

            byte[] buffer = new byte[1234];
            while (true)
            {
                int    read = TestStream.Read(buffer, 0, buffer.Length);
                byte[] copy = new byte[read];
                Array.Copy(buffer, 0, copy, 0, read);
                TargetBuffer.AddRange(copy);

                if (read == 0)
                {
                    break;
                }
            }

            Assert.AreEqual(5000, TargetBuffer.Count);
            for (int i = 0; i < TargetBuffer.Count; i++)
            {
                Assert.AreEqual(InputBuffer[i + 1500], TargetBuffer[i], "Failed at index " + i);
            }
        }
        public void Read_CountMinusOffsetLargerThanBuffer_ThrowException()
        {
            var stream = new ChunkStream(() => new byte[0]);

            stream.Read(new byte[100], 20, 81);
        }
        public void Read_OffsetIsNegative_ThrowException()
        {
            var stream = new ChunkStream(() => new byte[0]);

            stream.Read(new byte[0], -1, 0);
        }
        public void Read_CountIsNegative_ThrowException()
        {
            var stream = new ChunkStream(() => new byte[0]);

            stream.Read(new byte[0], 0, -1);
        }
        public void Read_BufferIsNull_ThrowException()
        {
            var stream = new ChunkStream(() => new byte[0]);

            stream.Read(null, 0, 0);
        }
    public void Reading_From_Block_Size_Of_Zero_Should_Work_But_Return_No_Data()
    {
      TestStream = new ChunkStream(SourceStream, 0, 0, false);

      byte[] buffer = new byte[1234];
      
      int read = TestStream.Read(buffer, 0, buffer.Length);
      Assert.AreEqual(0, read);
    }
    public void Reading_Whole_Stream_Should_Return_Full_Contents()
    {
      TestStream = new ChunkStream(SourceStream, InputBuffer.Length, 0, false);

      byte[] buffer = new byte[1234];
      while (true)
      {
        int read = TestStream.Read(buffer, 0, buffer.Length);
        byte[] copy = new byte[read];
        Array.Copy(buffer, 0, copy, 0, read);
        TargetBuffer.AddRange(copy);

        if (read == 0) break;
      }

      CollectionAssert.AreEqual(InputBuffer, TargetBuffer);
    }
    public void Reading_Chunk_Should_Work_If_Chunk_Ends_Exactly_At_Source_Stream()
    {
      //only 3000 bytes left
      long offset = SourceStream.Length - 5000;
      TestStream = new ChunkStream(SourceStream, 5000, offset, true);

      byte[] buffer = new byte[1234];
      while (true)
      {
        int read = TestStream.Read(buffer, 0, buffer.Length);
        byte[] copy = new byte[read];
        Array.Copy(buffer, 0, copy, 0, read);
        TargetBuffer.AddRange(copy);

        if (read == 0) break;
      }

      Assert.AreEqual(5000, TargetBuffer.Count);
      for (int i = 0; i < TargetBuffer.Count; i++)
      {
        Assert.AreEqual(InputBuffer[i + offset], TargetBuffer[i], "Failed at index " + i);
      }
    }