internal override List <BuilderExtent> FixExtents(out long totalLength) { const int FooterSize = 512; const int DynHeaderSize = 1024; List <BuilderExtent> extents = new List <BuilderExtent>(); _footer.DataOffset = FooterSize; DynamicHeader dynHeader = new DynamicHeader(-1, FooterSize + DynHeaderSize, _blockSize, _footer.CurrentSize); BlockAllocationTableExtent batExtent = new BlockAllocationTableExtent(FooterSize + DynHeaderSize, dynHeader.MaxTableEntries); long streamPos = batExtent.Start + batExtent.Length; foreach (var blockRange in StreamExtent.Blocks(_content.Extents, _blockSize)) { for (int i = 0; i < blockRange.Count; ++i) { long block = blockRange.Offset + i; long blockStart = block * _blockSize; DataBlockExtent dataExtent = new DataBlockExtent(streamPos, new SubStream(_content, blockStart, Math.Min(_blockSize, _content.Length - blockStart))); extents.Add(dataExtent); batExtent.SetEntry((int)block, (uint)(streamPos / Sizes.Sector)); streamPos += dataExtent.Length; } } _footer.UpdateChecksum(); dynHeader.UpdateChecksum(); byte[] footerBuffer = new byte[FooterSize]; _footer.ToBytes(footerBuffer, 0); byte[] dynHeaderBuffer = new byte[DynHeaderSize]; dynHeader.ToBytes(dynHeaderBuffer, 0); // Add footer (to end) extents.Add(new BuilderBufferExtent(streamPos, footerBuffer)); totalLength = streamPos + FooterSize; extents.Insert(0, batExtent); extents.Insert(0, new BuilderBufferExtent(FooterSize, dynHeaderBuffer)); extents.Insert(0, new BuilderBufferExtent(0, footerBuffer)); return(extents); }
internal override void PrepareForRead() { byte[] bitmap = new byte[Utilities.RoundUp(Utilities.Ceil(_content.Length, Sizes.Sector) / 8, Sizes.Sector)]; foreach (var range in StreamExtent.Blocks(_content.Extents, Sizes.Sector)) { for (int i = 0; i < range.Count; ++i) { byte mask = (byte)(1 << (7 - ((int)(range.Offset + i) % 8))); bitmap[(range.Offset + i) / 8] |= mask; } } _bitmapStream = new MemoryStream(bitmap, false); }
internal override void PrepareForRead() { long outputGrain = 0; _grainMapOffsets = new int[Length / (_grainSize * Sizes.Sector)]; _grainMapRanges = new Range <long, long> [_grainMapOffsets.Length]; foreach (Range <long, long> grainRange in StreamExtent.Blocks(_content.Extents, _grainSize * Sizes.Sector)) { for (int i = 0; i < grainRange.Count; ++i) { _grainMapOffsets[outputGrain] = i; _grainMapRanges[outputGrain] = grainRange; outputGrain++; } } }
internal override void PrepareForRead() { _data = new byte[_gtesPerGt * 4]; long gtSpan = _gtesPerGt * _grainSize * Sizes.Sector; long sectorsAllocated = 0; foreach (var block in StreamExtent.Blocks(_content.Extents, _grainSize * Sizes.Sector)) { for (int i = 0; i < block.Count; ++i) { Utilities.WriteBytesLittleEndian((uint)(_dataStart + sectorsAllocated), _data, (int)((block.Offset + i) * 4)); sectorsAllocated += _grainSize; } } }
internal override void PrepareForRead() { byte[] grainTable = new byte[Utilities.RoundUp(_header.NumGTEsPerGT * 4, Sizes.Sector)]; long dataSector = (Start + grainTable.Length) / Sizes.Sector; _grainMapping = new List <long>(); _grainContiguousRangeMapping = new List <long>(); foreach (var grainRange in StreamExtent.Blocks(_content.Extents, _header.GrainSize * Sizes.Sector)) { for (int i = 0; i < grainRange.Count; ++i) { Utilities.WriteBytesLittleEndian((uint)dataSector, grainTable, (int)(4 * (grainRange.Offset + i))); dataSector += _header.GrainSize; _grainMapping.Add(grainRange.Offset + i); _grainContiguousRangeMapping.Add(grainRange.Count - i); } } _grainTableStream = new MemoryStream(grainTable, 0, grainTable.Length, false); }
public void TestBlocks() { StreamExtent[] s = new StreamExtent[] { new StreamExtent(0, 8), new StreamExtent(11, 4) }; List <Range <long, long> > ranges = new List <Range <long, long> >(StreamExtent.Blocks(s, 10)); Assert.Equal(1, ranges.Count); Assert.Equal(0, ranges[0].Offset); Assert.Equal(2, ranges[0].Count); s = new StreamExtent[] { new StreamExtent(0, 8), new StreamExtent(9, 8) }; ranges = new List <Range <long, long> >(StreamExtent.Blocks(s, 10)); Assert.Equal(1, ranges.Count); Assert.Equal(0, ranges[0].Offset); Assert.Equal(2, ranges[0].Count); s = new StreamExtent[] { new StreamExtent(3, 4), new StreamExtent(19, 4), new StreamExtent(44, 4) }; ranges = new List <Range <long, long> >(StreamExtent.Blocks(s, 10)); Assert.Equal(2, ranges.Count); Assert.Equal(0, ranges[0].Offset); Assert.Equal(3, ranges[0].Count); Assert.Equal(4, ranges[1].Offset); Assert.Equal(1, ranges[1].Count); }
protected override List <BuilderExtent> FixExtents(out long totalLength) { List <BuilderExtent> extents = new List <BuilderExtent>(); ServerSparseExtentHeader header = DiskImageFile.CreateServerSparseExtentHeader(_content.Length); GlobalDirectoryExtent gdExtent = new GlobalDirectoryExtent(header); long grainTableStart = header.GdOffset * Sizes.Sector + gdExtent.Length; long grainTableCoverage = header.NumGTEsPerGT * header.GrainSize * Sizes.Sector; foreach (Range <long, long> grainTableRange in StreamExtent.Blocks(_content.Extents, grainTableCoverage)) { for (int i = 0; i < grainTableRange.Count; ++i) { long grainTable = grainTableRange.Offset + i; long dataStart = grainTable * grainTableCoverage; GrainTableExtent gtExtent = new GrainTableExtent(grainTableStart, new SubStream(_content, dataStart, Math.Min(grainTableCoverage, _content.Length - dataStart)), header); extents.Add(gtExtent); gdExtent.SetEntry((int)grainTable, (uint)(grainTableStart / Sizes.Sector)); grainTableStart += gtExtent.Length; } } extents.Insert(0, gdExtent); header.FreeSector = (uint)(grainTableStart / Sizes.Sector); byte[] buffer = header.GetBytes(); extents.Insert(0, new BuilderBufferExtent(0, buffer)); totalLength = grainTableStart; return(extents); }
/// <summary> /// Creates a hex dump from a stream. /// </summary> /// <param name="stream">The stream to generate the hex dump from.</param> /// <param name="output">The destination for the hex dump.</param> public static void Generate(SparseStream stream, TextWriter output) { stream.Position = 0; byte[] buffer = new byte[1024 * 1024]; foreach (var block in StreamExtent.Blocks(stream.Extents, buffer.Length)) { long startPos = block.Offset * (long)buffer.Length; long endPos = Math.Min((block.Offset + block.Count) * (long)buffer.Length, stream.Length); stream.Position = startPos; while (stream.Position < endPos) { int numLoaded = 0; long readStart = stream.Position; while (numLoaded < buffer.Length) { int bytesRead = stream.Read(buffer, numLoaded, buffer.Length - numLoaded); if (bytesRead == 0) { break; } numLoaded += bytesRead; } for (int i = 0; i < numLoaded; i += 16) { bool foundVal = false; if (i > 0) { for (int j = 0; j < 16; j++) { if (buffer[i + j] != buffer[i + j - 16]) { foundVal = true; break; } } } else { foundVal = true; } if (foundVal) { output.Write("{0:x8}", i + readStart); for (int j = 0; j < 16; j++) { if (j % 8 == 0) { output.Write(" "); } output.Write(" {0:x2}", buffer[i + j]); } output.Write(" |"); for (int j = 0; j < 16; j++) { if (j % 8 == 0 && j != 0) { output.Write(" "); } output.Write("{0}", (buffer[i + j] >= 32 && buffer[i + j] < 127) ? (char)buffer[i + j] : '.'); } output.Write("|"); output.WriteLine(); } } } } }
internal override List <BuilderExtent> FixExtents(out long totalLength) { List <BuilderExtent> extents = new List <BuilderExtent>(); MemoryStream descriptorStream = new MemoryStream(); _descriptor.Write(descriptorStream); // Figure out grain size and number of grain tables, and adjust actual extent size to be a multiple // of grain size const int GtesPerGt = 512; long grainSize = 128; int numGrainTables = (int)Utilities.Ceil(_content.Length, grainSize * GtesPerGt * Sizes.Sector); long descriptorLength = 10 * Sizes.OneKiB; // Utilities.RoundUp(descriptorStream.Length, Sizes.Sector); long descriptorStart = 0; if (descriptorLength != 0) { descriptorStart = 1; } long redundantGrainDirStart = Math.Max(descriptorStart, 1) + Utilities.Ceil(descriptorLength, Sizes.Sector); long redundantGrainDirLength = numGrainTables * 4; long redundantGrainTablesStart = redundantGrainDirStart + Utilities.Ceil(redundantGrainDirLength, Sizes.Sector); long redundantGrainTablesLength = numGrainTables * Utilities.RoundUp(GtesPerGt * 4, Sizes.Sector); long grainDirStart = redundantGrainTablesStart + Utilities.Ceil(redundantGrainTablesLength, Sizes.Sector); long grainDirLength = numGrainTables * 4; long grainTablesStart = grainDirStart + Utilities.Ceil(grainDirLength, Sizes.Sector); long grainTablesLength = numGrainTables * Utilities.RoundUp(GtesPerGt * 4, Sizes.Sector); long dataStart = Utilities.RoundUp(grainTablesStart + Utilities.Ceil(grainTablesLength, Sizes.Sector), grainSize); // Generate the header, and write it HostedSparseExtentHeader header = new HostedSparseExtentHeader(); header.Flags = HostedSparseExtentFlags.ValidLineDetectionTest | HostedSparseExtentFlags.RedundantGrainTable; header.Capacity = Utilities.RoundUp(_content.Length, grainSize * Sizes.Sector) / Sizes.Sector; header.GrainSize = grainSize; header.DescriptorOffset = descriptorStart; header.DescriptorSize = descriptorLength / Sizes.Sector; header.NumGTEsPerGT = GtesPerGt; header.RgdOffset = redundantGrainDirStart; header.GdOffset = grainDirStart; header.Overhead = dataStart; extents.Add(new BuilderBytesExtent(0, header.GetBytes())); // The descriptor extent if (descriptorLength > 0) { extents.Add(new BuilderStreamExtent(descriptorStart * Sizes.Sector, descriptorStream)); } // The grain directory extents extents.Add(new GrainDirectoryExtent(redundantGrainDirStart * Sizes.Sector, redundantGrainTablesStart, numGrainTables, GtesPerGt)); extents.Add(new GrainDirectoryExtent(grainDirStart * Sizes.Sector, grainTablesStart, numGrainTables, GtesPerGt)); // For each graintable span that's present... long dataSectorsUsed = 0; long gtSpan = GtesPerGt * grainSize * Sizes.Sector; foreach (Range <long, long> gtRange in StreamExtent.Blocks(_content.Extents, grainSize * GtesPerGt * Sizes.Sector)) { for (long i = 0; i < gtRange.Count; ++i) { int gt = (int)(gtRange.Offset + i); SubStream gtStream = new SubStream(_content, gt * gtSpan, Math.Min(gtSpan, _content.Length - gt * gtSpan)); GrainTableDataExtent dataExtent = new GrainTableDataExtent((dataStart + dataSectorsUsed) * Sizes.Sector, gtStream, grainSize); extents.Add(dataExtent); extents.Add(new GrainTableExtent(GrainTablePosition(redundantGrainTablesStart, gt, GtesPerGt), gtStream, dataStart + dataSectorsUsed, GtesPerGt, grainSize)); extents.Add(new GrainTableExtent(GrainTablePosition(grainTablesStart, gt, GtesPerGt), gtStream, dataStart + dataSectorsUsed, GtesPerGt, grainSize)); dataSectorsUsed += dataExtent.Length / Sizes.Sector; } } totalLength = (dataStart + dataSectorsUsed) * Sizes.Sector; return(extents); }
internal override List <BuilderExtent> FixExtents(out long totalLength) { if (_diskType != DiskType.Dynamic) { throw new NotSupportedException("Creation of only dynamic disks currently implemented"); } List <BuilderExtent> extents = new List <BuilderExtent>(); int logicalSectorSize = 512; int physicalSectorSize = 4096; long chunkRatio = (0x800000L * logicalSectorSize) / _blockSize; long dataBlocksCount = Utilities.Ceil(_content.Length, _blockSize); long sectorBitmapBlocksCount = Utilities.Ceil(dataBlocksCount, chunkRatio); long totalBatEntriesDynamic = dataBlocksCount + ((dataBlocksCount - 1) / chunkRatio); FileHeader fileHeader = new FileHeader() { Creator = ".NET DiscUtils" }; long fileEnd = Sizes.OneMiB; VhdxHeader header1 = new VhdxHeader(); header1.SequenceNumber = 0; header1.FileWriteGuid = Guid.NewGuid(); header1.DataWriteGuid = Guid.NewGuid(); header1.LogGuid = Guid.Empty; header1.LogVersion = 0; header1.Version = 1; header1.LogLength = (uint)Sizes.OneMiB; header1.LogOffset = (ulong)fileEnd; header1.CalcChecksum(); fileEnd += header1.LogLength; VhdxHeader header2 = new VhdxHeader(header1); header2.SequenceNumber = 1; header2.CalcChecksum(); RegionTable regionTable = new RegionTable(); RegionEntry metadataRegion = new RegionEntry(); metadataRegion.Guid = RegionEntry.MetadataRegionGuid; metadataRegion.FileOffset = fileEnd; metadataRegion.Length = (uint)Sizes.OneMiB; metadataRegion.Flags = RegionFlags.Required; regionTable.Regions.Add(metadataRegion.Guid, metadataRegion); fileEnd += metadataRegion.Length; RegionEntry batRegion = new RegionEntry(); batRegion.Guid = RegionEntry.BatGuid; batRegion.FileOffset = fileEnd; batRegion.Length = (uint)Utilities.RoundUp(totalBatEntriesDynamic * 8, Sizes.OneMiB); batRegion.Flags = RegionFlags.Required; regionTable.Regions.Add(batRegion.Guid, batRegion); fileEnd += batRegion.Length; extents.Add(ExtentForStruct(fileHeader, 0)); extents.Add(ExtentForStruct(header1, 64 * Sizes.OneKiB)); extents.Add(ExtentForStruct(header2, 128 * Sizes.OneKiB)); extents.Add(ExtentForStruct(regionTable, 192 * Sizes.OneKiB)); extents.Add(ExtentForStruct(regionTable, 256 * Sizes.OneKiB)); // Metadata FileParameters fileParams = new FileParameters() { BlockSize = (uint)_blockSize, Flags = FileParametersFlags.None }; ParentLocator parentLocator = new ParentLocator(); byte[] metadataBuffer = new byte[metadataRegion.Length]; MemoryStream metadataStream = new MemoryStream(metadataBuffer); Metadata.Initialize(metadataStream, fileParams, (ulong)_content.Length, (uint)logicalSectorSize, (uint)physicalSectorSize, null); extents.Add(new BuilderBufferExtent(metadataRegion.FileOffset, metadataBuffer)); List <Range <long, long> > presentBlocks = new List <Range <long, long> >(StreamExtent.Blocks(_content.Extents, _blockSize)); // BAT BlockAllocationTableBuilderExtent batExtent = new BlockAllocationTableBuilderExtent(batRegion.FileOffset, batRegion.Length, presentBlocks, fileEnd, _blockSize, chunkRatio); extents.Add(batExtent); // Stream contents foreach (var range in presentBlocks) { long substreamStart = range.Offset * _blockSize; long substreamCount = Math.Min(_content.Length - substreamStart, range.Count * _blockSize); SubStream dataSubStream = new SubStream(_content, substreamStart, substreamCount); BuilderSparseStreamExtent dataExtent = new BuilderSparseStreamExtent(fileEnd, dataSubStream); extents.Add(dataExtent); fileEnd += range.Count * _blockSize; } totalLength = fileEnd; return(extents); }