public void ReadFrom_BeyondEndOfFile_Throws() { var cnid = new CatalogNodeId(1); var extentsOverflow = new Mock <BTree <ExtentKey> >(MockBehavior.Strict); extentsOverflow .Setup(e => e.Find(new ExtentKey(cnid, 0, false))) .Returns((byte[])null); var buffer = new FileBuffer( new Context() { VolumeStream = Stream.Null, VolumeHeader = new VolumeHeader() { BlockSize = 8, }, ExtentsOverflow = extentsOverflow.Object, }, new ForkData() { LogicalSize = 0x123, Extents = Array.Empty <ExtentDescriptor>(), TotalBlocks = 0, }, cnid); byte[] data = new byte[0x10]; Assert.Throws <InvalidOperationException>(() => buffer.Read(0, data, 0, data.Length)); }
public void FileContent_ZlibAttribute_Compressed() { var nodeId = new CatalogNodeId(1); byte[] compressionAttribute = Convert.FromBase64String("AAAAEAAAAAAAAAAAAAAALWZwbWMDAAAAKgAAAAAAAAB4XksqyMksLjEwuMDBAAaMjAxQBgMq4AQAjygD0wA="); var attributes = new Mock <BTree <AttributeKey> >(MockBehavior.Strict); attributes .Setup(a => a.Find(new AttributeKey(nodeId, "com.apple.decmpfs"))) .Returns(compressionAttribute); var context = new Context() { Attributes = attributes.Object, }; var catalogInfo = new CatalogFileInfo() { FileId = nodeId, }; var file = new HfsPlusFile(context, nodeId, catalogInfo); byte[] buffer = new byte[0x40]; Assert.Equal(42, file.FileContent.Read(0, buffer, 0, buffer.Length)); Assert.Equal("bplist00", Encoding.UTF8.GetString(buffer, 0, 8)); }
public void FileContent_ZlibAttribute_Raw() { var nodeId = new CatalogNodeId(1); byte[] compressionAttribute = Convert.FromBase64String("AAAAEAAAAAAAAAAAAAAAGWZwbWMDAAAACAAAAAAAAAD/QVBQTD8/Pz++"); var attributes = new Mock <BTree <AttributeKey> >(MockBehavior.Strict); attributes .Setup(a => a.Find(new AttributeKey(nodeId, "com.apple.decmpfs"))) .Returns(compressionAttribute); var context = new Context() { Attributes = attributes.Object, }; var catalogInfo = new CatalogFileInfo() { FileId = nodeId, }; var file = new HfsPlusFile(context, nodeId, catalogInfo); byte[] buffer = new byte[0x20]; Assert.Equal(8, file.FileContent.Read(0, buffer, 0, buffer.Length)); Assert.Equal("APPL????", Encoding.UTF8.GetString(buffer, 0, 8)); }
public void ReadFromOverflow_Works() { var cnid = new CatalogNodeId(1); var descriptor = new ExtentDescriptor() { BlockCount = 1, StartBlock = 0 }; byte[] descriptorBytes = new byte[2 * descriptor.Size]; descriptor.WriteTo(descriptorBytes, 0); descriptor.StartBlock = 1; descriptor.WriteTo(descriptorBytes, descriptor.Size); var extentsOverflow = new Mock <BTree <ExtentKey> >(MockBehavior.Strict); extentsOverflow .Setup(e => e.Find(new ExtentKey(cnid, 0, false))) .Returns(descriptorBytes); var buffer = new FileBuffer( new Context() { VolumeStream = SparseStream.FromStream( new MemoryStream(Encoding.UTF8.GetBytes("Hello, World!")), Ownership.Dispose), VolumeHeader = new VolumeHeader() { BlockSize = 8, }, ExtentsOverflow = extentsOverflow.Object, }, new ForkData() { LogicalSize = 0x123, Extents = Array.Empty <ExtentDescriptor>(), TotalBlocks = 2, }, cnid); byte[] data = new byte[0x10]; Assert.Equal(8, buffer.Read(0, data, 0, data.Length)); Assert.Equal(5, buffer.Read(8, data, 8, data.Length)); }
public void Properties_Work() { var accessTime = new DateTime(2000, 1, 1); var writeTime = new DateTime(2000, 1, 2); var creationTime = new DateTime(2000, 1, 3); var nodeId = new CatalogNodeId(1); var file = new HfsPlusFile( new Context() { Attributes = Mock.Of <BTree <AttributeKey> >(), }, nodeId, new CatalogFileInfo() { AccessTime = accessTime, ContentModifyTime = writeTime, CreateTime = creationTime, DataFork = new ForkData() { LogicalSize = 42, }, FileSystemInfo = new UnixFileSystemInfo() { FileType = UnixFileType.Regular, }, }); Assert.Equal(accessTime, file.LastAccessTimeUtc); Assert.Throws <NotSupportedException>(() => file.LastAccessTimeUtc = DateTime.Now); Assert.Equal(writeTime, file.LastWriteTimeUtc); Assert.Throws <NotSupportedException>(() => file.LastWriteTimeUtc = DateTime.Now); Assert.Equal(creationTime, file.CreationTimeUtc); Assert.Throws <NotSupportedException>(() => file.CreationTimeUtc = DateTime.Now); Assert.Equal(FileAttributes.Normal, file.FileAttributes); Assert.Throws <NotSupportedException>(() => file.FileAttributes = FileAttributes.Hidden); Assert.Equal(42, file.FileLength); }
public void GetExtentsInRange_Works() { var cnid = new CatalogNodeId(1); var extentsOverflow = new Mock <BTree <ExtentKey> >(MockBehavior.Strict); extentsOverflow .Setup(e => e.Find(new ExtentKey(cnid, 0, false))) .Returns((byte[])null); var buffer = new FileBuffer( new Context() { VolumeStream = Stream.Null, VolumeHeader = new VolumeHeader() { BlockSize = 8, }, ExtentsOverflow = extentsOverflow.Object, }, new ForkData() { LogicalSize = 0x10, Extents = Array.Empty <ExtentDescriptor>(), TotalBlocks = 0, }, cnid); var extent = Assert.Single(buffer.GetExtentsInRange(8, 8)); Assert.Equal(8, extent.Start); Assert.Equal(8, extent.Length); extent = Assert.Single(buffer.GetExtentsInRange(8, 16)); Assert.Equal(8, extent.Start); Assert.Equal(8, extent.Length); extent = Assert.Single(buffer.GetExtentsInRange(16, 16)); Assert.Equal(16, extent.Start); Assert.Equal(0, extent.Length); }
public void FileContent_ZlibResource() { var nodeId = new CatalogNodeId(1); byte[] compressionAttribute = Convert.FromBase64String("AAAAEAAAAAAAAAAAAAAAEGZwbWMEAAAAowkAAAAAAAA="); byte[] resourceData = Convert.FromBase64String( "AAABAAAAAwEAAAIBAAAAMgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf0BAAAADAAAAPEBAAB4Xu2UUU" + "/bMBDHn+mnyCIeYFLtwoQ0TSGItUVUCiMiRdokNMnER+rh2JbtECrlw+MksLZTy+jUiZc++eL/" + "3+dfTucLTh5z7j2ANkyKY/8A9XwPRCopE9mxfz0+6372T8JO8GFw2R//iIee4sxYL77+Go36nt" + "/F+FQpDhgPxgMvjkbJ2HM5MB5+8z1/Yq36gnFZlojULpTKvDYaHGupQNtp5JJ13QFELfXdNW32" + "BRy3S1lqw85OcA/T8I5xMAGuQ7dTK3hBOlyu6eLPY25tpZ9XYGShUzD4xbATWF0AXmZBH28QV1" + "r+mjO/pGvNMmf2tzaXqVVLYNlkXtdAeHjQC3AT1OnwKrzZ3Vymz0VCTcl2Nwnz6c0wgt3ilBNj" + "wDQfqzGUdayEr4tytA4JJZbcIAoKBHU9PH1fGibu5DsWZSgy1xiTvzbrPzIcvtIj/68zlz6TWV" + "Cbm4e+OARmZHtnmuRQSn1vqmRCNNC5jZgX2Ui0a5e54HvcT0A/MFfO6hy4G1imuiDpZVJF7FYT" + "PcV7p4WVObFSV4mSltfMVSQzJkYWcrO/v7LuAowFuu7/99CbnuZ2jG3H2KZotmNs85356hibrc" + "3FYecJtZvhhT8rv3hKNgyeoDfeIYxXLdgi8vOb6K85mMnv728xXs0VOVtEfrxsegAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAcADIAAGNtcGYAAAAKAAH//wAAAAAAAAAA"); var attributes = new Mock <BTree <AttributeKey> >(MockBehavior.Strict); attributes .Setup(a => a.Find(new AttributeKey(nodeId, "com.apple.decmpfs"))) .Returns(compressionAttribute); var context = new Context() { Attributes = attributes.Object, VolumeStream = new MemoryStream(resourceData), VolumeHeader = new VolumeHeader() { BlockSize = 0x00001000, }, }; var catalogInfo = new CatalogFileInfo() { FileId = nodeId, ResourceFork = new ForkData() { LogicalSize = 0x0000000000000333, Extents = new ExtentDescriptor[] { new ExtentDescriptor() { StartBlock = 0, BlockCount = 1, }, }, }, }; var file = new HfsPlusFile(context, nodeId, catalogInfo); byte[] buffer = new byte[0x1000]; Assert.Equal(0x9A3, file.FileContent.Read(0, buffer, 0, buffer.Length)); Assert.Equal("<?xml version=\"1.0\" encoding=\"UTF-8\"?>", Encoding.UTF8.GetString(buffer, 0, 38)); }