public virtual void ParseJsonBuffer() { try { document = Manager.GetObjectMapper().ReadValue <IDictionary>(jsonBuffer.ToByteArray ()); } catch (IOException e) { throw new InvalidOperationException("Failed to parse json buffer", e); } jsonBuffer = null; }
/// <exception cref="System.IO.IOException"></exception> public static byte[] Read(InputStream @is) { int initialCapacity = 1024; ByteArrayBuffer byteArrayBuffer = new ByteArrayBuffer(initialCapacity); byte[] bytes = new byte[512]; int offset = 0; int numRead = 0; while ((numRead = @is.Read(bytes, offset, bytes.Length - offset)) >= 0) { byteArrayBuffer.Append(bytes, 0, numRead); offset += numRead; } return byteArrayBuffer.ToByteArray(); }
private void ReaderOperationWithMime(byte[] mime, string part1ExpectedStr, string part2ExpectedStr, int recommendedChunkSize) { Encoding utf8 = Sharpen.Extensions.GetEncoding("UTF-8"); // if the caller passes in a special chunksize, which is not equal to mime.length, then // lets test the algorithm _only_ at that chunksize. otherwise, test it at every chunksize // between 1 and mime.length. (this is needed because when testing with a very large mime value, // the test takes too long to test at every single chunk size) int chunkSize = 1; if (recommendedChunkSize != mime.Length) { chunkSize = recommendedChunkSize; } for (; chunkSize <= recommendedChunkSize; ++chunkSize) { ByteArrayInputStream mimeInputStream = new ByteArrayInputStream(mime); MultipartReaderTest.TestMultipartReaderDelegate delegate_ = new MultipartReaderTest.TestMultipartReaderDelegate (this); string contentType = "multipart/related; boundary=\"BOUNDARY\""; MultipartReader reader = new MultipartReader(contentType, delegate_); NUnit.Framework.Assert.IsFalse(reader.Finished()); int location = 0; int length = 0; do { NUnit.Framework.Assert.IsTrue("Parser didn't stop at end", location < mime.Length ); length = Math.Min(chunkSize, (mime.Length - location)); byte[] bytesRead = new byte[length]; mimeInputStream.Read(bytesRead, 0, length); reader.AppendData(bytesRead); location += chunkSize; }while (!reader.Finished()); NUnit.Framework.Assert.AreEqual(delegate_.partList.Count, 2); NUnit.Framework.Assert.AreEqual(delegate_.headersList.Count, 2); byte[] part1Expected = Sharpen.Runtime.GetBytesForString(part1ExpectedStr, utf8); byte[] part2Expected = Sharpen.Runtime.GetBytesForString(part2ExpectedStr, utf8); ByteArrayBuffer part1 = delegate_.partList[0]; ByteArrayBuffer part2 = delegate_.partList[1]; NUnit.Framework.Assert.IsTrue(Arrays.Equals(part1.ToByteArray(), part1Expected)); NUnit.Framework.Assert.IsTrue(Arrays.Equals(part2.ToByteArray(), part2Expected)); IDictionary <string, string> headers1 = delegate_.headersList[0]; NUnit.Framework.Assert.IsTrue(headers1.ContainsKey("Foo")); NUnit.Framework.Assert.AreEqual(headers1.Get("Foo"), "Bar"); NUnit.Framework.Assert.IsTrue(headers1.ContainsKey("Header")); NUnit.Framework.Assert.AreEqual(headers1.Get("Header"), "Val ue"); } }
/// <exception cref="System.IO.IOException"></exception> public static byte[] Read(InputStream @is) { int initialCapacity = 1024; ByteArrayBuffer byteArrayBuffer = new ByteArrayBuffer(initialCapacity); byte[] bytes = new byte[512]; int offset = 0; int numRead = 0; while ((numRead = @is.Read(bytes, offset, bytes.Length - offset)) >= 0) { byteArrayBuffer.Append(bytes, 0, numRead); offset += numRead; } return(byteArrayBuffer.ToByteArray()); }
public virtual Range SearchFor(byte[] pattern, int start) { KMPMatch searcher = new KMPMatch(); int matchIndex = searcher.IndexOf(buffer.ToByteArray(), pattern, start); if (matchIndex != -1) { return(new Range(matchIndex, pattern.Length)); } else { return(new Range(matchIndex, 0)); } }