private void ReaderOperationWithMime(byte[] mime, string part1ExpectedStr, string part2ExpectedStr, int recommendedChunkSize) { Encoding utf8 = Sharpen.Extensions.GetEncoding("UTF-8"); // if the caller passes in a special chunksize, which is not equal to mime.length, then // lets test the algorithm _only_ at that chunksize. otherwise, test it at every chunksize // between 1 and mime.length. (this is needed because when testing with a very large mime value, // the test takes too long to test at every single chunk size) int chunkSize = 1; if (recommendedChunkSize != mime.Length) { chunkSize = recommendedChunkSize; } for (; chunkSize <= recommendedChunkSize; ++chunkSize) { ByteArrayInputStream mimeInputStream = new ByteArrayInputStream(mime); MultipartReaderTest.TestMultipartReaderDelegate delegate_ = new MultipartReaderTest.TestMultipartReaderDelegate (this); string contentType = "multipart/related; boundary=\"BOUNDARY\""; MultipartReader reader = new MultipartReader(contentType, delegate_); NUnit.Framework.Assert.IsFalse(reader.Finished()); int location = 0; int length = 0; do { NUnit.Framework.Assert.IsTrue("Parser didn't stop at end", location < mime.Length ); length = Math.Min(chunkSize, (mime.Length - location)); byte[] bytesRead = new byte[length]; mimeInputStream.Read(bytesRead, 0, length); reader.AppendData(bytesRead); location += chunkSize; } while (!reader.Finished()); NUnit.Framework.Assert.AreEqual(delegate_.partList.Count, 2); NUnit.Framework.Assert.AreEqual(delegate_.headersList.Count, 2); byte[] part1Expected = Sharpen.Runtime.GetBytesForString(part1ExpectedStr, utf8); byte[] part2Expected = Sharpen.Runtime.GetBytesForString(part2ExpectedStr, utf8); ByteArrayBuffer part1 = delegate_.partList[0]; ByteArrayBuffer part2 = delegate_.partList[1]; NUnit.Framework.Assert.IsTrue(Arrays.Equals(part1.ToByteArray(), part1Expected)); NUnit.Framework.Assert.IsTrue(Arrays.Equals(part2.ToByteArray(), part2Expected)); IDictionary<string, string> headers1 = delegate_.headersList[0]; NUnit.Framework.Assert.IsTrue(headers1.ContainsKey("Foo")); NUnit.Framework.Assert.AreEqual(headers1.Get("Foo"), "Bar"); NUnit.Framework.Assert.IsTrue(headers1.ContainsKey("Header")); NUnit.Framework.Assert.AreEqual(headers1.Get("Header"), "Val ue"); } }
/// <exception cref="System.IO.IOException"></exception> public override void WriteTo(OutputStream @out) { if (@out == null) { throw new ArgumentException("Output stream may not be null"); } InputStream @in = new ByteArrayInputStream(this.content); byte[] tmp = new byte[4096]; int l; while ((l = @in.Read(tmp)) != -1) { @out.Write(tmp, 0, l); } @out.Flush(); }
public virtual void TestDataAfterPackFooterSplitHeaderRead() { TestRepository d = new TestRepository<FileRepository>(db); byte[] data = Constants.Encode("a"); RevBlob b = d.Blob(data); int objects = 248; TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(32 * 1024); PackHeader(pack, objects + 1); int offset = 13; StringBuilder sb = new StringBuilder(); for (int i = 0; i < offset; i++) { sb.Append(i); } offset = sb.ToString().Length; int lenByte = (Constants.OBJ_BLOB) << 4 | (offset & unchecked((int)(0x0F))); offset >>= 4; if (offset > 0) { lenByte |= 1 << 7; } pack.Write(lenByte); while (offset > 0) { lenByte = offset & unchecked((int)(0x7F)); offset >>= 6; if (offset > 0) { lenByte |= 1 << 7; } pack.Write(lenByte); } Deflate(pack, Constants.Encode(sb.ToString())); for (int i_1 = 0; i_1 < objects; i_1++) { // The last pack header written falls across the 8192 byte boundary // between [8189:8210] pack.Write((Constants.OBJ_REF_DELTA) << 4 | 4); b.CopyRawTo(pack); Deflate(pack, new byte[] { unchecked((int)(0x1)), unchecked((int)(0x1)), unchecked( (int)(0x1)), (byte)('b') }); } Digest(pack); byte[] packData = pack.ToByteArray(); byte[] streamData = new byte[packData.Length + 1]; System.Array.Copy(packData, 0, streamData, 0, packData.Length); streamData[packData.Length] = unchecked((int)(0x7e)); InputStream @in = new ByteArrayInputStream(streamData); PackParser p = Index(@in); p.SetAllowThin(true); p.SetCheckEofAfterPackFooter(false); p.SetExpectDataAfterPackFooter(true); p.Parse(NullProgressMonitor.INSTANCE); NUnit.Framework.Assert.AreEqual(unchecked((int)(0x7e)), @in.Read()); }
public virtual void TestDataAfterPackFooterSplitObjectRead() { byte[] data = Constants.Encode("0123456789"); // Build a pack ~17k int objects = 900; TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(32 * 1024); PackHeader(pack, objects); for (int i = 0; i < objects; i++) { pack.Write((Constants.OBJ_BLOB) << 4 | 10); Deflate(pack, data); } Digest(pack); byte[] packData = pack.ToByteArray(); byte[] streamData = new byte[packData.Length + 1]; System.Array.Copy(packData, 0, streamData, 0, packData.Length); streamData[packData.Length] = unchecked((int)(0x7e)); InputStream @in = new ByteArrayInputStream(streamData); PackParser p = Index(@in); p.SetAllowThin(true); p.SetCheckEofAfterPackFooter(false); p.SetExpectDataAfterPackFooter(true); p.Parse(NullProgressMonitor.INSTANCE); NUnit.Framework.Assert.AreEqual(unchecked((int)(0x7e)), @in.Read()); }
public virtual void TestDataAfterPackFooterSingleRead() { TestRepository d = new TestRepository<FileRepository>(db); RevBlob a = d.Blob("a"); TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(32 * 1024); PackHeader(pack, 1); pack.Write((Constants.OBJ_REF_DELTA) << 4 | 4); a.CopyRawTo(pack); Deflate(pack, new byte[] { unchecked((int)(0x1)), unchecked((int)(0x1)), unchecked( (int)(0x1)), (byte)('b') }); Digest(pack); byte[] packData = pack.ToByteArray(); byte[] streamData = new byte[packData.Length + 1]; System.Array.Copy(packData, 0, streamData, 0, packData.Length); streamData[packData.Length] = unchecked((int)(0x7e)); InputStream @in = new ByteArrayInputStream(streamData); PackParser p = Index(@in); p.SetAllowThin(true); p.SetCheckEofAfterPackFooter(false); p.SetExpectDataAfterPackFooter(true); p.Parse(NullProgressMonitor.INSTANCE); NUnit.Framework.Assert.AreEqual(unchecked((int)(0x7e)), @in.Read()); }
/// <exception cref="System.IO.IOException"></exception> private void AssertNoCrLfHelper(string expect, string input) { byte[] inbytes = Sharpen.Runtime.GetBytesForString(input); byte[] expectBytes = Sharpen.Runtime.GetBytesForString(expect); for (int i = 0; i < 5; ++i) { byte[] buf = new byte[i]; InputStream @in = new ByteArrayInputStream(inbytes); ByteArrayOutputStream bos = new ByteArrayOutputStream(); OutputStream @out = new AutoCRLFOutputStream(bos); if (i > 0) { int n; while ((n = @in.Read(buf)) >= 0) { @out.Write(buf, 0, n); } } else { int c; while ((c = @in.Read()) != -1) { @out.Write(c); } } @out.Flush(); @in.Close(); @out.Close(); byte[] actualBytes = bos.ToByteArray(); NUnit.Framework.Assert.AreEqual(Encode(expectBytes), Encode(actualBytes), "bufsize=" + i); } }
public virtual void TestOneBlockAndHalf_Copy() { TemporaryBuffer b = new TemporaryBuffer.LocalFile(); byte[] test = new TestRng(Sharpen.Extensions.GetTestName()).NextBytes(TemporaryBuffer.Block .SZ * 3 / 2); try { ByteArrayInputStream @in = new ByteArrayInputStream(test); b.Write(@in.Read()); b.Copy(@in); b.Close(); NUnit.Framework.Assert.AreEqual(test.Length, b.Length()); { byte[] r = b.ToByteArray(); NUnit.Framework.Assert.IsNotNull(r); NUnit.Framework.Assert.AreEqual(test.Length, r.Length); NUnit.Framework.Assert.IsTrue(Arrays.Equals(test, r)); } { ByteArrayOutputStream o = new ByteArrayOutputStream(); b.WriteTo(o, null); o.Close(); byte[] r = o.ToByteArray(); NUnit.Framework.Assert.AreEqual(test.Length, r.Length); NUnit.Framework.Assert.IsTrue(Arrays.Equals(test, r)); } } finally { b.Destroy(); } }