/** * Helper method to verify that the files were archived correctly by reading {@code * tarArchiveInputStream}. */ private void VerifyTarArchive(TarInputStream tarArchiveInputStream) { // Verifies fileA was archived correctly. TarEntry headerFileA = tarArchiveInputStream.GetNextEntry(); Assert.AreEqual("some/path/to/resourceFileA", headerFileA.Name); byte[] fileAString = ByteStreams.ToByteArray(tarArchiveInputStream); CollectionAssert.AreEqual(fileAContents, fileAString); // Verifies fileB was archived correctly. TarEntry headerFileB = tarArchiveInputStream.GetNextEntry(); Assert.AreEqual("crepecake", headerFileB.Name); byte[] fileBString = ByteStreams.ToByteArray(tarArchiveInputStream); CollectionAssert.AreEqual(fileBContents, fileBString); // Verifies directoryA was archived correctly. TarEntry headerDirectoryA = tarArchiveInputStream.GetNextEntry(); Assert.AreEqual("some/path/to/", headerDirectoryA.Name); // Verifies the long file was archived correctly. TarEntry headerFileALong = tarArchiveInputStream.GetNextEntry(); Assert.AreEqual( "some/really/long/path/that/exceeds/100/characters/abcdefghijklmnopqrstuvwxyz0123456789012345678901234567890", headerFileALong.Name); byte[] fileALongString = ByteStreams.ToByteArray(tarArchiveInputStream); CollectionAssert.AreEqual(fileAContents, fileALongString); Assert.IsNull(tarArchiveInputStream.GetNextEntry()); }
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: BomReader(java.io.InputStream inputStream) throws java.io.IOException internal BomReader(Stream inputStream) : base(inputStream) { Charset encoding; sbyte[] bom = new sbyte[MAX_BOM_SIZE]; // read first 3 bytes such that they can be pushed back later PushbackInputStream pushbackStream = new PushbackInputStream(inputStream, MAX_BOM_SIZE); int bytesRead = ByteStreams.read(pushbackStream, bom, 0, 3); // look for BOM and adapt, defauling to UTF-8 if (bytesRead >= 3 && bom[0] == X_EF && bom[1] == X_BB && bom[2] == X_BF) { encoding = StandardCharsets.UTF_8; pushbackStream.unread(bom, 3, (bytesRead - 3)); } else if (bytesRead >= 2 && bom[0] == X_FE && bom[1] == X_FF) { encoding = StandardCharsets.UTF_16BE; pushbackStream.unread(bom, 2, (bytesRead - 2)); } else if (bytesRead >= 2 && bom[0] == X_FF && bom[1] == X_FE) { encoding = StandardCharsets.UTF_16LE; pushbackStream.unread(bom, 2, (bytesRead - 2)); } else { encoding = StandardCharsets.UTF_8; pushbackStream.unread(bom, 0, bytesRead); } // use Java standard code now we know the encoding this.underlying = new StreamReader(pushbackStream, encoding); }
/// <summary> /// A very hacky method of returning the implementation of this model in its string form so that if Clyde can't read it, we can still see its name. This returns the full class name.<para/> /// This is only functional for extensions of <see cref="ModelConfig"/>. /// </summary> /// <param name="datFile"></param> /// <param name="isCompressed"></param> /// <returns></returns> private static string HackyGetImplementation(FileInfo datFile, bool isCompressed) { // This is mainly intended for Spiral Knights's ProjectXModelConfig. // SK Animator Tools relied on being in the game directory (like Spiral Spy did) to detect ^ // This allows reading arbitrary class names, even for hypothetical cases where it's a completely different game that uses Clyde. FileInputStream fileIn = new FileInputStream(datFile.FullName); fileIn.skip(8); byte[] buffer; if (isCompressed) { DataInputStream dat = new DataInputStream(new InflaterInputStream(fileIn)); buffer = ByteStreams.toByteArray(dat); } else { buffer = ByteStreams.toByteArray(fileIn); } string modelAsString = Encoding.ASCII.GetString(buffer); int index = modelAsString.IndexOf(IMPLEMENTATION_TAG) + IMPLEMENTATION_TAG.Length; index += 4; // Accomodate for int of space taken after that string. I don't know what purpose it serves (maybe class size?) byte typeLength = buffer[index]; // Length of the string storing the name of the type. string clip = modelAsString.Substring(index + 1, typeLength); return(clip); }
private byte[] GetContents(IBinaryResource resource) { using (Stream file = resource.OpenStream()) { byte[] contents = ByteStreams.ToByteArray(file); return(contents); } }
/// <summary> /// Reads up to MAX_HEADER_LENGTH bytes from is inputStream. If mark is /// supported by is, it is used to restore content of the stream after /// appropriate amount of data is read. Read bytes are stored in /// imageHeaderBytes, which should be capable of storing /// MAX_HEADER_LENGTH bytes. /// </summary> /// <returns>Number of bytes read from is.</returns> private static int ReadHeaderFromStream( Stream inputStream, byte[] imageHeaderBytes) { Preconditions.CheckNotNull(inputStream); Preconditions.CheckNotNull(imageHeaderBytes); Preconditions.CheckArgument(imageHeaderBytes.Length >= MAX_HEADER_LENGTH); return(ByteStreams.Read(inputStream, imageHeaderBytes, 0, MAX_HEADER_LENGTH)); }
/// <summary> /// Creates an instance from an input stream. /// <para> /// This method use the supplier to open the input stream, extract the bytes and close the stream. /// It is intended that invoking the supplier opens the stream. /// It is not intended that an already open stream is supplied. /// /// </para> /// </summary> /// <param name="inputStreamSupplier"> the supplier of the input stream </param> /// <returns> the byte source </returns> /// <exception cref="UncheckedIOException"> if an IO error occurs </exception> public static ArrayByteSource from(CheckedSupplier <Stream> inputStreamSupplier) { return(Unchecked.wrap(() => { using (Stream @in = inputStreamSupplier()) { sbyte[] bytes = Unchecked.wrap(() => ByteStreams.toByteArray(@in)); return new ArrayByteSource(bytes); } })); }
/** * Decompresses the file to obtain the diff ID. * * @param compressedFile the file containing the compressed contents * @return the digest of the decompressed file * @throws IOException if an I/O exception occurs */ private static async Task <DescriptorDigest> GetDiffIdByDecompressingFileAsync(SystemPath compressedFile) { using (CountingDigestOutputStream diffIdCaptureOutputStream = new CountingDigestOutputStream(Stream.Null)) { using (GZipStream decompressorStream = new GZipStream(Files.NewInputStream(compressedFile), CompressionMode.Decompress)) { await ByteStreams.CopyAsync(decompressorStream, diffIdCaptureOutputStream).ConfigureAwait(false); } return(diffIdCaptureOutputStream.ComputeDigest().GetDigest()); } }
private void CheckOutput(string[] args, string pattern, TextWriter @out, Type clazz ) { ByteArrayOutputStream outBytes = new ByteArrayOutputStream(); try { PipedOutputStream pipeOut = new PipedOutputStream(); PipedInputStream pipeIn = new PipedInputStream(pipeOut, PipeBufferSize); if (@out == System.Console.Out) { Runtime.SetOut(new TextWriter(pipeOut)); } else { if (@out == System.Console.Error) { Runtime.SetErr(new TextWriter(pipeOut)); } } if (clazz == typeof(DelegationTokenFetcher)) { ExpectDelegationTokenFetcherExit(args); } else { if (clazz == typeof(JMXGet)) { ExpectJMXGetExit(args); } else { if (clazz == typeof(DFSAdmin)) { ExpectDfsAdminPrint(args); } } } pipeOut.Close(); ByteStreams.Copy(pipeIn, outBytes); pipeIn.Close(); NUnit.Framework.Assert.IsTrue(Sharpen.Runtime.GetStringForBytes(outBytes.ToByteArray ()).Contains(pattern)); } catch (Exception ex) { NUnit.Framework.Assert.Fail("checkOutput error " + ex); } }
public void Initialize() { // Initializes the IFileCache _fileCacheFactory = new DiskStorageCacheFactory(new DynamicDefaultDiskStorageFactory()); _fileCache = _fileCacheFactory.Get(DiskCacheConfig.NewBuilder().Build()); // Initializes the IPooledByteBufferFactory and PooledByteStreams _poolFactory = new PoolFactory(PoolConfig.NewBuilder().Build()); _byteBufferFactory = _poolFactory.PooledByteBufferFactory; _pooledByteStreams = _poolFactory.PooledByteStreams; // Initializes the IPooledByteBuffer from an image var file = StorageFile.GetFileFromApplicationUriAsync( new Uri("ms-appx:///Assets/SplashScreen.scale-200.png")).GetAwaiter().GetResult(); using (var stream = file.OpenReadAsync().GetAwaiter().GetResult()) { _pooledByteBuffer = _byteBufferFactory.NewByteBuffer( ByteStreams.ToByteArray(stream.AsStream())); } _closeableReference = CloseableReference <IPooledByteBuffer> .of(_pooledByteBuffer); _encodedImage = new EncodedImage(_closeableReference); _stagingArea = StagingArea.Instance; _imageCacheStatsTracker = NoOpImageCacheStatsTracker.Instance; // Initializes the cache keys IList <ICacheKey> keys = new List <ICacheKey>(); keys.Add(new SimpleCacheKey("http://test.uri")); keys.Add(new SimpleCacheKey("http://tyrone.uri")); keys.Add(new SimpleCacheKey("http://ian.uri")); _cacheKey = new MultiCacheKey(keys); // Initializes the executors _isCancelled = new AtomicBoolean(false); _readPriorityExecutor = Executors.NewFixedThreadPool(1); _writePriorityExecutor = Executors.NewFixedThreadPool(1); // Initializes the disk cache _bufferedDiskCache = new BufferedDiskCache( _fileCache, _byteBufferFactory, _pooledByteStreams, _readPriorityExecutor, _writePriorityExecutor, _imageCacheStatsTracker); }
public async Task TestGetAsync() { using (TestWebServer server = new TestWebServer(false)) using (Connection connection = Connection.GetConnectionFactory()(new Uri("http://" + server.GetAddressAndPort()))) using (HttpRequestMessage request = new HttpRequestMessage()) using (HttpResponseMessage response = await connection.SendAsync(request).ConfigureAwait(false)) { Assert.AreEqual(HttpStatusCode.OK, response.StatusCode); using (Stream stream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false)) { CollectionAssert.AreEqual( Encoding.UTF8.GetBytes("Hello World!"), ByteStreams.ToByteArray(stream)); } } }
public async Task TestParseMetaData_PNG() { var file = await StorageFile.GetFileFromApplicationUriAsync( new Uri("ms-appx:///Assets/ImagePipeline/Images/image.png")); using (var stream = await file.OpenReadAsync()) { IPooledByteBuffer buf = new TrivialPooledByteBuffer( ByteStreams.ToByteArray(stream.AsStream())); EncodedImage encodedImage = new EncodedImage( CloseableReference <IPooledByteBuffer> .of(buf, _releaser)); await encodedImage.ParseMetaDataAsync(); Assert.AreEqual(ImageFormat.PNG, encodedImage.Format); Assert.AreEqual(800, encodedImage.Width); Assert.AreEqual(600, encodedImage.Height); } }
public async Task TestToBlob_multiByteAsync() { testTarStreamBuilder.AddByteEntry(Encoding.UTF8.GetBytes("日本語"), "test"); testTarStreamBuilder.AddByteEntry(Encoding.UTF8.GetBytes("asdf"), "crepecake"); testTarStreamBuilder.AddBlobEntry( Blobs.From("fib"), Encoding.UTF8.GetBytes("fib").Length, "fib"); // Writes the BLOB and captures the output. MemoryStream tarByteOutputStream = new MemoryStream(); using (Stream compressorStream = new GZipStream(tarByteOutputStream, CompressionMode.Compress)) { await testTarStreamBuilder.WriteAsTarArchiveToAsync(compressorStream).ConfigureAwait(false); } // Rearrange the output into input for verification. MemoryStream byteArrayInputStream = new MemoryStream(tarByteOutputStream.ToArray()); Stream tarByteInputStream = new GZipStream(byteArrayInputStream, CompressionMode.Decompress); using (TarInputStream tarArchiveInputStream = new TarInputStream(tarByteInputStream)) { // Verify multi-byte characters are written/read correctly TarEntry headerFile = tarArchiveInputStream.GetNextEntry(); Assert.AreEqual("test", headerFile.Name); Assert.AreEqual( "日本語", Encoding.UTF8.GetString(ByteStreams.ToByteArray(tarArchiveInputStream))); headerFile = tarArchiveInputStream.GetNextEntry(); Assert.AreEqual("crepecake", headerFile.Name); Assert.AreEqual( "asdf", Encoding.UTF8.GetString(ByteStreams.ToByteArray(tarArchiveInputStream))); headerFile = tarArchiveInputStream.GetNextEntry(); Assert.AreEqual("fib", headerFile.Name); Assert.AreEqual( "fib", Encoding.UTF8.GetString(ByteStreams.ToByteArray(tarArchiveInputStream))); Assert.IsNull(tarArchiveInputStream.GetNextEntry()); } }
public void Write(Stream outputStream) { ByteStreams.Copy(_inputStream, outputStream); }