public void CopyNullIBuffer() { Assert.Throws <ArgumentNullException>(() => { StreamUtil.Copy(new MemoryStream(), new MemoryStream(), null as IBuffer); }); }
/// <summary> /// Extracts a Wwise OGG and converts it to a "regular" OGG file. /// </summary> /// <param name="reader">The stream to read from.</param> /// <param name="offset">The offset of the data to extract.</param> /// <param name="size">The size of the data to extract.</param> /// <param name="outPath">The path of the file to save to.</param> public static void ExtractWwiseToOGG(EndianReader reader, int offset, int size, string outPath) { // Just extract the RIFX to a temporary file string tempFile = Path.GetTempFileName(); try { using (EndianWriter output = new EndianWriter(File.OpenWrite(tempFile), EndianFormat.Big)) { reader.SeekTo(offset); StreamUtil.Copy(reader, output, size); } // Run ww2ogg to convert the resulting RIFX to an OGG RunProgramSilently("Helpers/ww2ogg.exe", string.Format("\"{0}\" -o \"{1}\" --pcb Helpers/packed_codebooks_aoTuV_603.bin", tempFile, outPath), Directory.GetCurrentDirectory()); // Run revorb to fix up the OGG RunProgramSilently("Helpers/revorb.exe", "\"" + outPath + "\"", Directory.GetCurrentDirectory()); } finally { // Delete the old RIFX file if (File.Exists(tempFile)) { File.Delete(tempFile); } } }
public void CopyZeroBufferSize() { Assert.Throws <ArgumentOutOfRangeException>(() => { StreamUtil.Copy(new MemoryStream(), new MemoryStream(), 0); }); }
public void CopyZeroLengthBuffer() { Assert.Throws <ArgumentException>(() => { StreamUtil.Copy(new MemoryStream(), new MemoryStream(), new byte[0]); }); }
public void NullResource(Stream resourceStream, int resourceIndex) { if (resourceIndex < 0 || resourceIndex >= _resources.Count) { throw new ArgumentOutOfRangeException("resourceIndex"); } var resource = _resources[resourceIndex]; var writer = new BinaryWriter(resourceStream); if (IsResourceShared(resourceIndex)) { return; } if (resource.Offset != uint.MaxValue && resource.Size > 0) { StreamUtil.Copy(resourceStream, resource.Offset, resource.Offset, resourceStream.Length - resource.Offset); for (var i = 0; i < _resources.Count; i++) { if (_resources[i].Offset > resource.Offset) { _resources[i].Offset = (uint)(_resources[i].Offset - resource.Size); } } } resource.Offset = uint.MaxValue; resource.Size = 0; UpdateResourceTable(resourceStream); }
public void CopyNullOutput() { Assert.Throws <ArgumentNullException>(() => { StreamUtil.Copy(new MemoryStream(), null); }); }
private void WriteMapsSection(EndianWriter writer) { uint sectionOffset = (uint)writer.BaseStream.Position; GenericSectionEntry mapEntry = new GenericSectionEntry(MapFileStreams.Count, 0x8); mapEntry.Write(writer); // make room for table writer.Write(new byte[0x10 * mapEntry.Count]); for (int i = 0; i < MapFileStreams.Count; i++) { var mapFileStream = MapFileStreams[i]; uint offset = (uint)writer.BaseStream.Position; int size = (int)mapFileStream.Length; mapFileStream.Position = 0; StreamUtil.Copy(mapFileStream, writer.BaseStream, (int)mapFileStream.Length); StreamUtil.Align(writer.BaseStream, 4); // seek to the table and update size and offset writer.BaseStream.Seek(mapEntry.TableOffset + 0x10 * i + sectionOffset, SeekOrigin.Begin); var tableEntry = new CacheMapTableEntry(size, offset - sectionOffset, MapToCacheMapping[i], MapIds[i]); tableEntry.Write(writer); writer.BaseStream.Seek(0, SeekOrigin.End); } writer.BaseStream.Seek(0, SeekOrigin.End); }
public override Stream OpenCacheRead() { if (Version == CacheVersion.HaloXbox) { var resultStream = new MemoryStream(); using (var cacheStream = CacheFile.OpenRead()) using (var compressedStream = new MemoryStream()) using (var uncompressedStream = new MemoryStream()) { var compressedSize = cacheStream.Length - 0x800 - 0x2; // remove zlib header StreamUtil.Copy(cacheStream, resultStream, 0x800); cacheStream.Position = 0x800 + 0x2; StreamUtil.Copy(cacheStream, compressedStream, compressedSize); compressedStream.Position = 0; using (var decompressionStream = new DeflateStream(compressedStream, CompressionMode.Decompress)) { decompressionStream.CopyTo(uncompressedStream); uncompressedStream.Position = 0; StreamUtil.Copy(uncompressedStream, resultStream, uncompressedStream.Length); } } return(resultStream); } else { return(CacheFile.OpenRead()); } }
private void WriteTagsSection(EndianWriter writer, DataSerializationContext context, TagSerializer serializer) { uint sectionOffset = (uint)writer.BaseStream.Position; GenericSectionEntry tagCachesEntry = new GenericSectionEntry(TagCaches.Count, 0x8); tagCachesEntry.Write(writer); // make room for table writer.Write(new byte[0x28 * TagCaches.Count]); for (int i = 0; i < TagCaches.Count; i++) { uint offset = (uint)writer.BaseStream.Position; TagCachesStreams[i].Position = 0; StreamUtil.Copy(TagCachesStreams[i], writer.BaseStream, (int)TagCachesStreams[i].Length); StreamUtil.Align(writer.BaseStream, 4); uint size = (uint)(writer.BaseStream.Position - offset); writer.BaseStream.Seek(tagCachesEntry.TableOffset + 0x28 * i + sectionOffset, SeekOrigin.Begin); var tableEntry = new CacheTableEntry(size, offset - sectionOffset, CacheNames[i]); serializer.Serialize(context, tableEntry); writer.BaseStream.Seek(0, SeekOrigin.End); } }
/// <summary> /// Create a file /// </summary> /// <param name="filePath"></param> /// <param name="inputStream"></param> public void CreateFromStream(string filePath, Stream inputStream) { if (File.Exists(filePath)) { throw new ArgumentException("The file already exists", filePath); } string directoryName = Path.GetDirectoryName(filePath); if (!Directory.Exists(directoryName) && !this._createdDirectories.Contains(directoryName)) { throw new DirectoryNotFoundException(String.Format("The physical upload directory {0} for the file does not exist", directoryName)); } // Check if the target directory is writable (only with existing directories if (!this._createdDirectories.Contains(directoryName) && !IOUtil.CheckIfDirectoryIsWritable(directoryName)) { throw new UnauthorizedAccessException(string.Format("Unable to copy files and directories to {0}. Access denied.", directoryName)); } FileStream fs = new FileStream(filePath, FileMode.Create, FileAccess.Write); StreamUtil.Copy(inputStream, fs); fs.Flush(); fs.Close(); this._createdFiles.Add(filePath); }
public override void SetAuth(HttpWebRequest request, Stream body) { byte[] secretKey = Encoding.ASCII.GetBytes(Config.SECRET_KEY); using (HMACSHA1 hmac = new HMACSHA1(secretKey)) { string pathAndQuery = request.Address.PathAndQuery; byte[] pathAndQueryBytes = Encoding.ASCII.GetBytes(pathAndQuery); using (MemoryStream buffer = new MemoryStream()) { buffer.Write(pathAndQueryBytes, 0, pathAndQueryBytes.Length); buffer.WriteByte((byte)'\n'); if (request.ContentType == "application/x-www-form-urlencoded" && body != null) { if (!body.CanSeek) { throw new Exception("stream can not seek"); } StreamUtil.Copy(body, buffer); body.Seek(0, SeekOrigin.Begin); } byte[] digest = hmac.ComputeHash(buffer.ToArray()); string digestBase64 = Base64UrlSafe.Encode(digest); string authHead = "QBox " + Config.ACCESS_KEY + ":" + digestBase64; request.Headers.Add("Authorization", authHead); } } }
public void ExtractDds(TagDeserializer deserializer, Bitmap bitmap, int imageIndex, Stream outStream) { // TODO: Make sure 3D textures and cube maps work // Deserialize the resource definition and verify it var resource = bitmap.Resources[imageIndex]; var resourceContext = new ResourceSerializationContext(resource.Resource); var definition = deserializer.Deserialize <BitmapTextureResourceDefinition>(resourceContext); if (definition.Texture == null || definition.Texture.Definition == null) { throw new ArgumentException("Invalid bitmap definition"); } var dataReference = definition.Texture.Definition.Data; if (dataReference.Address.Type != ResourceAddressType.Resource) { throw new InvalidOperationException("Invalid resource data address"); } var header = CreateDdsHeader(definition); var resourceDataStream = new MemoryStream(); _resourceManager.Extract(resource.Resource, resourceDataStream); header.WriteTo(outStream); resourceDataStream.Position = dataReference.Address.Offset; StreamUtil.Copy(resourceDataStream, outStream, dataReference.Size); }
private static void Inflate(Stream inStream, long compressedSize, long uncompressedSize, Stream outStream) { var compressedData = new byte[compressedSize]; inStream.Read(compressedData, 0, (int)compressedSize); using (var inflate = new ZlibStream(new MemoryStream(compressedData), CompressionMode.Decompress)) StreamUtil.Copy(inflate, outStream, uncompressedSize); }
public void AddToArchive(Stream inputStream, string entryName) { var entry = zipArchive.CreateEntry(entryName, CompressionLevel.Fastest); using (var entryStream = entry.Open()) { StreamUtil.Copy(inputStream, entryStream); } }
private void WriteStringIdsSection(EndianWriter writer) { var stringIdStream = new MemoryStream(); StringTable.Save(stringIdStream); stringIdStream.Position = 0; StreamUtil.Copy(stringIdStream, writer.BaseStream, (int)stringIdStream.Length); StreamUtil.Align(writer.BaseStream, 4); }
/// <summary> /// Extracts the raw contents of a sound to a file. /// </summary> /// <param name="reader">The stream to read from.</param> /// <param name="offset">The offset of the data to extract.</param> /// <param name="size">The size of the data to extract.</param> /// <param name="outPath">The path of the file to save to.</param> public static void ExtractRaw(EndianReader reader, int offset, int size, string outPath) { using (EndianWriter output = new EndianWriter(File.OpenWrite(outPath), EndianFormat.Big)) { // Just copy the data over to the output stream reader.SeekTo(offset); StreamUtil.Copy(reader, output, size); } }
/// <summary> /// Reallocates a block of memory in the cache file's meta area. /// The contents of the old block will be copied to the new block and then the old block will be zeroed. /// </summary> /// <param name="address">The starting address of the data to reallocate. If this is 0, a new block will be allocated.</param> /// <param name="oldSize">The old size of the data to reallocate. If this is 0, a new block will be allocated.</param> /// <param name="newSize">The requested size of the newly-allocated data block. If this is 0, the block will be freed and 0 will be returned.</param> /// <param name="align">The power of two to align the block to.</param> /// <param name="stream">The stream to write cache file changes to.</param> /// <returns>The memory address of the new block, or 0 if the block was freed.</returns> public long Reallocate(long address, int oldSize, int newSize, uint align, IStream stream) { if (newSize == oldSize) { return(address); } // If the new size is 0, free the block if (newSize == 0) { Free(address, oldSize); return(0); } // If the old size or address is 0, allocate a new block if (address == 0 || oldSize == 0) { return(Allocate(newSize, align, stream)); } // If the block is being made smaller, just free and zero the data at the end if (newSize < oldSize) { Free(address + (uint)newSize, oldSize - newSize); long offset = _cacheFile.MetaArea.PointerToOffset(address); stream.SeekTo(offset + newSize); StreamUtil.Fill(stream, 0, oldSize - newSize); return(address); } // If the block is being made larger, check if there's free space immediately after the block that can be used to avoid a copy FreeArea area; if (newSize > oldSize && _freeAreasByAddr.TryGetValue(address + (uint)oldSize, out area) && area.Size >= newSize - oldSize) { ChangeStartAddress(area, area.Address + (uint)(newSize - oldSize)); return(address); } // Free the block and allocate a new one Free(address, oldSize); long newAddress = Allocate(newSize, align, stream); // If the addresses differ, then copy the data across and zero the old data if (newAddress != address) { long oldOffset = _cacheFile.MetaArea.PointerToOffset(address); long newOffset = _cacheFile.MetaArea.PointerToOffset(newAddress); StreamUtil.Copy(stream, oldOffset, newOffset, oldSize); stream.SeekTo(oldOffset); StreamUtil.Fill(stream, 0, oldSize); } return(newAddress); }
private void AddFontPackage() { var location = Path.Combine(CacheContext.Directory.FullName, $"{CacheContext.Directory.FullName}\\fonts\\font_package.bin"); var file = new FileInfo(location); using (var stream = file.OpenRead()) { ModPackage.FontPackage = new MemoryStream(); StreamUtil.Copy(stream, ModPackage.FontPackage, stream.Length); } }
public void CopyWithBufferSize() { StreamStub input = new StreamStub(); input.AddReadData(new byte[] { 1, 2, 3, 4, 5 }); input.AddReadData(new byte[] { 6, 7, 8, 9, 10 }); MemoryStream output = new MemoryStream(); StreamUtil.Copy(input, output, 6); Assert.AreEqual(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, output.ToArray()); Assert.AreEqual(6, input.LastReadSize); }
public List <SyncItem> LoadItems() { var items = new List <SyncItem>(); using (new SecurityDisabler()) { using (new ProxyDisabler()) { var reader = new ZipReader(PackagePath, Encoding.UTF8); ZipEntry entry = reader.GetEntry("package.zip"); using (var stream = new MemoryStream()) { StreamUtil.Copy(entry.GetStream(), stream, 0x4000); reader = new ZipReader(stream); foreach (ZipEntry zipEntry in reader.Entries) { var entryData = new ZipEntryData(zipEntry); try { if (entryData.Key.EndsWith("/xml")) { string xml = new StreamReader(entryData.GetStream().Stream, Encoding.UTF8).ReadToEnd(); if (!string.IsNullOrWhiteSpace(xml)) { XmlDocument document = XmlUtil.LoadXml(xml); if (document != null) { SyncItem loadedItem = LoadItem(document); if (loadedItem != null) { items.Add(loadedItem); } } } } } catch (Exception) { Console.WriteLine("Unable to load xml from file {0}", entryData.Key); } } } } } Console.WriteLine("Read {0} items from package {1}", items.Count, PackagePath); return(items); }
public void ReadBlock(long entryOffset, ModuleDataBlock block, Stream outStream) { _stream.Position = _dataAreaOffset + entryOffset + block.CompressedOffset; if (block.IsCompressed) { Inflate(_stream, block.CompressedSize, block.UncompressedSize, outStream); } else { StreamUtil.Copy(_stream, outStream, block.UncompressedSize); } }
public PackageManifest GetManifest(string filename) { var manifest = new PackageManifest(); ZipReader reader; try { reader = new ZipReader(filename, Encoding.UTF8); } catch (Exception exception) { throw new InvalidOperationException("Failed to open package", exception); } string tempFileName = Path.GetTempFileName(); ZipEntry entry = reader.GetEntry("package.zip"); if (entry != null) { using (FileStream stream = File.Create(tempFileName)) { StreamUtil.Copy(entry.GetStream(), stream, 0x4000); } reader.Dispose(); reader = new ZipReader(tempFileName, Encoding.UTF8); } try { foreach (ZipEntry entry2 in reader.Entries) { var data = new ZipEntryData(entry2); var packageManifestEntry = ZipEntryDataParser.GetManifestEntry(data.Key); if (!(packageManifestEntry is PackageManifestEntryNotFound)) { manifest.Entries.Add(packageManifestEntry); } } } finally { reader.Dispose(); File.Delete(tempFileName); } return(manifest); }
private void ExtractResourceData(BitmapTextureInteropResource definition, Bitmap.BitmapResource resource, Stream outStream) { var dataReference = definition.Texture.Definition.Data; if (dataReference.Address.Type != CacheAddressType.Resource) { throw new InvalidOperationException("Invalid resource data address"); } var resourceDataStream = new MemoryStream(); CacheContext.ExtractResource(resource.Resource, resourceDataStream); resourceDataStream.Position = dataReference.Address.Offset; StreamUtil.Copy(resourceDataStream, outStream, dataReference.Size); }
private void ExtractBitmap(CacheFile.IndexItem blamTag, string directory) { Console.WriteLine($"{blamTag.Name}"); // // Load the Blam tag definition // var blamContext = new CacheSerializationContext(ref BlamCache, blamTag); var bitmap = BlamCache.Deserializer.Deserialize <Bitmap>(blamContext); var ddsOutDir = directory; string bitmap_name = blamTag.Name.Replace('\\', '_'); if (bitmap.Images.Count > 1) { ddsOutDir = Path.Combine(directory, bitmap_name); Directory.CreateDirectory(ddsOutDir); } for (var i = 0; i < bitmap.Images.Count; i++) { var outPath = Path.Combine(ddsOutDir, ((bitmap.Images.Count > 1) ? i.ToString() : bitmap_name) + ".dds"); var image = bitmap.Images[i]; // // Get bitmap data and write file // BaseBitmap baseBitmap = ExtractBitmapData(bitmap, i); // Bitmap is not valid (not a failure to convert, tag data is not valid / no data to convert if (baseBitmap == null) { return; } var header = new DDSHeader(baseBitmap); using (var outStream = File.Open(outPath, FileMode.Create, FileAccess.Write)) { header.Write(new EndianWriter(outStream)); var dataStream = new MemoryStream(baseBitmap.Data); StreamUtil.Copy(dataStream, outStream, baseBitmap.Data.Length); } } }
/// <summary> /// Resizes a block of data in the file. /// </summary> /// <param name="stream">The stream.</param> /// <param name="tag">The tag that the block belongs to, if any.</param> /// <param name="startOffset">The offset where the block to resize begins at.</param> /// <param name="oldSize">The current size of the block to resize.</param> /// <param name="newSize">The new size of the block.</param> /// <exception cref="System.ArgumentException">Cannot resize a block to a negative size</exception> private void ResizeBlock(Stream stream, TagInstance tag, long startOffset, long oldSize, long newSize) { if (newSize < 0) { throw new ArgumentException("Cannot resize a block to a negative size"); } if (oldSize == newSize) { return; } var oldEndOffset = startOffset + oldSize; var sizeDelta = newSize - oldSize; StreamUtil.Copy(stream, oldEndOffset, oldEndOffset + sizeDelta, stream.Length - oldEndOffset); FixTagOffsets(oldEndOffset, sizeDelta, tag); }
private uint ResizeResource(Stream resourceStream, int resourceIndex, uint minSize) { var resource = Resources[resourceIndex]; var roundedSize = ((minSize + 0xF) & ~0xFU); // Round up to a multiple of 0x10 var sizeDelta = (int)(roundedSize - resource.ChunkSize); var endOffset = resource.Offset + resource.ChunkSize; StreamUtil.Copy(resourceStream, endOffset, endOffset + sizeDelta, resourceStream.Length - endOffset); resource.ChunkSize = roundedSize; // Update resource offsets for (var i = resourceIndex + 1; i < Resources.Count; i++) { Resources[i].Offset = (uint)(Resources[i].Offset + sizeDelta); } UpdateResourceTable(resourceStream); return(roundedSize); }
private uint ResizeResource(BinaryWriter writer, int resourceIndex, uint minSize) { var resource = _resources[resourceIndex]; var roundedSize = ((minSize + 0xF) & ~0xFU); // Round up to a multiple of 0x10 var sizeDelta = (long)(roundedSize - resource.Size); var endOffset = resource.Offset + resource.Size; StreamUtil.Copy(writer.BaseStream, endOffset, endOffset + sizeDelta, writer.BaseStream.Length - endOffset); resource.Size = roundedSize; // Update resource offsets for (var i = resourceIndex + 1; i < _resources.Count; i++) { _resources[i].Offset = (uint)(_resources[i].Offset + sizeDelta); } UpdateResourceTable(writer); return(roundedSize); }
public AntidotePackageDefinition GetSources() { var itemsIds = new List <AntidoteItemSourceDefinition>(); var filesInfo = new List <AntidoteFileSourceDefinition>(); ZipReader zipReader = new ZipReader(filename, Encoding.UTF8); string tempFileName = Path.GetTempFileName(); ZipEntry entry1 = zipReader.GetEntry("package.zip"); if (entry1 != null) { using (FileStream fileStream = File.Create(tempFileName)) StreamUtil.Copy(entry1.GetStream(), fileStream, 16384); zipReader.Dispose(); zipReader = new ZipReader(tempFileName, Encoding.UTF8); } try { foreach (ZipEntry entry2 in zipReader.Entries) { if (entry2.IsItem()) { itemsIds.Add((AntidoteItemSourceDefinition)entry2); } if (entry2.IsFile()) { string filePath = Path.Combine(HostingEnvironment.ApplicationHost.GetPhysicalPath(), entry2.GetFilePath()); filesInfo.Add(new AntidoteFileSourceDefinition { FileInfo = new FileInfo(filePath) }); } } return(new AntidotePackageDefinition { FilesInfo = filesInfo, ItemsId = itemsIds.Distinct() }); } finally { zipReader.Dispose(); File.Delete(tempFileName); } }
public void CopyWithIBuffer() { CachingBufferManager.Options options = new CachingBufferManager.Options(); options.MinBufferSize = 7; CachingBufferManager manager = new CachingBufferManager(options); StreamStub input = new StreamStub(); input.AddReadData(new byte[] { 1, 2, 3, 4, 5 }); input.AddReadData(new byte[] { 6, 7, 8, 9, 10 }); MemoryStream output = new MemoryStream(); using (IBuffer buffer = manager.GetBuffer(10)) { StreamUtil.Copy(input, output, buffer); } Assert.AreEqual(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, output.ToArray()); Assert.AreEqual(14, input.LastReadSize); }
/// <summary> /// Reallocates a block of memory in the cache file's meta area. /// The contents of the old block will be copied to the new block and then zeroed. /// </summary> /// <param name="address">The starting address of the data to reallocate.</param> /// <param name="oldSize">The old size of the data to reallocate.</param> /// <param name="newSize">The requested size of the newly-allocated data block.</param> /// <param name="stream">The stream to write cache file changes to.</param> /// <returns>The memory address of the new block.</returns> public uint Reallocate(uint address, int oldSize, int newSize, IStream stream) { // Pretty basic for now // In the future, we could make an allocator that's biased toward the old address in order to prevent copying Free(address, oldSize); uint newAddress = Allocate(newSize, stream); // If the addresses differ, then copy the data across and zero the old data if (newAddress != address) { long oldOffset = _cacheFile.MetaArea.PointerToOffset(address); long newOffset = _cacheFile.MetaArea.PointerToOffset(newAddress); StreamUtil.Copy(stream, oldOffset, newOffset, oldSize); stream.SeekTo(oldOffset); StreamUtil.Fill(stream, 0, oldSize); } return(newAddress); }