public bool Identify(IMediaImage imagePlugin, Partition partition) { uint sbSize = (uint)(Marshal.SizeOf <VolumeHeader>() / imagePlugin.Info.SectorSize); if (Marshal.SizeOf <VolumeHeader>() % imagePlugin.Info.SectorSize != 0) { sbSize++; } if (partition.Start + sbSize >= partition.End) { return(false); } byte[] sector = imagePlugin.ReadSectors(partition.Start, sbSize); if (sector.Length < Marshal.SizeOf <VolumeHeader>()) { return(false); } VolumeHeader vhdr = Marshal.ByteArrayToStructureLittleEndian <VolumeHeader>(sector); return(vhdr.identifier == FSRS && ArrayHelpers.ArrayIsNullOrEmpty(vhdr.mustBeZero) && vhdr.signature.SequenceEqual(_signature)); }
public static int PackVoxelEdgeId(VolumeHeader hd, Int3 a3, Int3 b3) { int a = hd.CubicToLinear(a3); int b = hd.CubicToLinear(b3); return(a + (hd.TotalCount * b)); }
public VolumeBuffer(Cubic <int> sz) { this.Header = new VolumeHeader(new Int3(sz)); int count = sz.X * sz.Y * sz.Z; this.Array = new T[count]; }
/// <summary> /// /// </summary> /// <param name="volumeName"></param> /// <param name="fileName"></param> /// <param name="nodeNumber"></param> /// <returns></returns> internal static Node Get(string volumeName, string fileName, uint nodeNumber) { VolumeHeader volHeader = VolumeHeader.Get(volumeName); Node headerNode = null; switch (fileName) { case "Catalog": headerNode = CatalogFile.GetHeaderNode(volumeName); break; case "Attributes": headerNode = AttributesFile.GetHeaderNode(volumeName); break; case "ExtentsOverflow": headerNode = ExtentsOverflowFile.GetHeaderNode(volumeName); break; } HeaderRecord headerRecord = headerNode.Records[0] as HeaderRecord; return(Get(GetBytes(volumeName, fileName, nodeNumber), volumeName, fileName, nodeNumber)); }
/// <summary> /// /// </summary> /// <param name="volumeName"></param> /// <param name="fileName"></param> /// <returns></returns> private static byte[] GetHeaderBytes(string volumeName, string fileName) { VolumeHeader volHeader = VolumeHeader.Get(volumeName); ExtentDescriptor extent = null; switch (fileName) { case "Catalog": extent = volHeader.CatalogFile.Extents[0]; break; case "Attributes": extent = volHeader.AttributesFile.Extents[0]; break; case "ExtentsOverflow": extent = volHeader.ExtentsOverflowFile.Extents[0]; break; } // Read the smallest possible amount of bytes byte[] firstSectorBytes = Helper.readDrive(volumeName, extent.StartBlock * volHeader.BlockSize, 0x200); // Parse HeaderRecord to determine NodeSize HeaderRecord headerRecord = HeaderRecord.Get(firstSectorBytes, 0x0E, volumeName, fileName); // Read the full Header Node return(Helper.readDrive(volumeName, extent.StartBlock * volHeader.BlockSize, headerRecord.NodeSize)); }
public void ReadFrom_Works() { var header = new VolumeHeader(); header.ReadFrom(File.ReadAllBytes("HfsPlus/header.bin"), 0); Assert.Equal(0x1000u, header.AllocationFile.LogicalSize); Assert.Equal((VolumeAttributes)0x80000100, header.Attributes); Assert.Equal(0xae000u, header.AttributesFile.LogicalSize); Assert.Equal(new DateTime(1904, 1, 1), header.BackupDate); Assert.Equal(0x1000u, header.BlockSize); Assert.Equal(0xaf000u, header.CatalogFile.LogicalSize); Assert.Equal(new DateTime(2021, 1, 8, 9, 49, 58, DateTimeKind.Utc), header.CheckedDate); Assert.Equal(new DateTime(2021, 1, 8, 1, 49, 58, DateTimeKind.Utc), header.CreateDate); Assert.Equal(0x10000u, header.DataClumpSize); Assert.Equal(0x1u, header.EncodingsBitmap); Assert.Equal(0xaf000u, header.ExtentsFile.LogicalSize); Assert.Equal(0x134u, header.FileCount); Assert.Equal(new uint[] { 0u, 0u, 0u, 0u, 0u, 0u, 0x4394662cu, 0x91caac0bu }, header.FinderInfo); Assert.Equal(0x9cu, header.FolderCount); Assert.Equal(0xda2u, header.FreeBlocks); Assert.True(header.IsValid); Assert.Equal(0u, header.JournalInfoBlock); Assert.Equal(0x31302e30u, header.LastMountedVersion); Assert.Equal(new DateTime(2021, 1, 8, 9, 50, 1, DateTimeKind.Utc), header.ModifyDate); Assert.Equal(0xfb0u, header.NextAllocation); Assert.Equal(new CatalogNodeId(480), header.NextCatalogId); Assert.Equal(0x10000u, header.ResourceClumpSize); Assert.Equal(0x482bu, header.Signature); Assert.Equal(0x200, header.Size); Assert.Equal(0u, header.StartupFile.LogicalSize); Assert.Equal(0x231fu, header.TotalBlocks); Assert.Equal(0x4u, header.Version); Assert.Equal(0x19du, header.WriteCount); }
public bool PrepareCrackOperation() { List <string> descr; mainForm.LogClear(); mainForm.LogAppend("Prepare new crack Operation..."); config.Configuration = mainForm.CrackConfig; if (!config.ValidateConfiguration(out descr)) { mainForm.LogAppend(descr); mainForm.LogAppend("Resolve the above errors and try again. Operation aborted."); mainForm.SetButtonStart(true); return(false); } mainForm.LogAppend("Configuration seems valid."); mainForm.LogAppend("Analyzing Wordlist..."); WordListPasswordProvider.Instance.LoadWordList(config.Configuration.WordListPath); if (config.Configuration.WordListOffset > 0) { WordListPasswordProvider.Instance.StartingLine = config.Configuration.WordListOffset; } wordListLineCnt = WordListPasswordProvider.Instance.PasswordCount; mainForm.LogAppend("Wordlist anaysis: " + WordListPasswordProvider.Instance.PasswordCount + " Passwords!"); // chose the volume extraction mode VolumeHeaderFactory volumeHeaderExtractor; if (!config.Configuration.AttackHiddenVolume) { volumeHeaderExtractor = new VolumeHeaderFactoryFile(config.Configuration.ContainerPath); } else { volumeHeaderExtractor = new VolumeHeaderFactoryFileHidden(config.Configuration.ContainerPath); } // Setup Keydata Pool if (config.Configuration.UseKeyFiles) { mainForm.LogAppend("Setting up Keypool..."); foreach (var path in config.Configuration.KeyFiles) { keyDataPool.AddKeyfile(new KeyDataFile(path)); mainForm.LogAppend(path); } keyDataPool.FinalizeKeyDataPool(); } try { header = volumeHeaderExtractor.ReadEncryptedVolumeHeader(); } catch (IOException e) { MessageBox.Show("Can't read Volume Header: " + e.Message); mainForm.LogAppend("Flow interrupted!"); return(false); } return(true); }
public static void UnpackVoxelEdgeId(VolumeHeader hd, int packed, out Int3 a3, out Int3 b3) { int a = (packed % hd.TotalCount); int b = (packed / hd.TotalCount); a3 = hd.LinearToCubic(a); b3 = hd.LinearToCubic(b); }
public void ReadFrom_Invalid_Works() { var data = new byte[0x200]; var header = new VolumeHeader(); Assert.Equal(0x200, header.ReadFrom(data, 0)); Assert.False(header.IsValid); }
/// <summary> /// Decrypt the files in the specified directory /// </summary> /// /// <param name="FilePaths">A list of the files to be processed</param> /// /// <exception cref="CryptoProcessingException">Thrown if the VolumeKey does not contain enough keys to encrypt all the files in the directory</exception> public void Decrypt(string[] FilePaths) { if (FilePaths.Length < 1) { throw new CryptoProcessingException("VolumeCipher:Transform", "The file paths list is empty!", new ArgumentException()); } InitializeProgress(FilePaths); if (m_progressTotal < 1) { throw new CryptoProcessingException("VolumeCipher:Initialize", "The files are all zero bytes!", new ArgumentException()); } long prgCtr = 0; for (int i = 0; i < FilePaths.Length; ++i) { FileStream inpStream = GetStream(FilePaths[i], true); VolumeHeader vh = GetHeader(inpStream); KeyParams key = VolumeKey.FromId(m_keyStream, vh.FileId); // user dropped a file in, notify or log if (key == null) { if (ErrorNotification != null) { ErrorNotification(this, string.Format("The file {0}; has no key assigned", FilePaths[i])); } } else { FileStream outStream = GetStream(FilePaths[i], false); if (inpStream == null || outStream == null) { if (ErrorNotification != null) { ErrorNotification(this, string.Format("The file {0}; could not be written to", FilePaths[i])); } } else { m_volumeKey.State[m_volumeKey.GetIndex(vh.FileId)] = (byte)VolumeKeyStates.Decrypted; m_cipherStream.Initialize(false, key); m_cipherStream.Write(inpStream, outStream); outStream.SetLength(outStream.Length - VolumeHeader.GetHeaderSize); prgCtr += inpStream.Position; CalculateProgress(prgCtr); inpStream.Dispose(); outStream.Dispose(); UpdateKey(); } } } }
private VolumeHeader GetHeader(Stream InputStream) { InputStream.Seek(InputStream.Length - VolumeHeader.GetHeaderSize, SeekOrigin.Begin); VolumeHeader vh = new VolumeHeader(InputStream); InputStream.Seek(0, SeekOrigin.Begin); return(vh); }
public static int PackVoxelEdgeIdSorted(VolumeHeader hd, Int3 a3, Int3 b3) { int a = hd.CubicToLinear(a3); int b = hd.CubicToLinear(b3); if (a < b) { return(PackVoxelEdgeId(hd, a3, b3)); } else { return(PackVoxelEdgeId(hd, b3, a3)); } }
private static bool IsMatchingVolumeKey(string TargetFile, string KeyPath) { using (FileStream fs = new FileStream(TargetFile, FileMode.Open, FileAccess.Read)) { using (FileStream ks = new FileStream(KeyPath, FileMode.Open, FileAccess.Read)) { fs.Seek(fs.Length - VolumeHeader.GetHeaderSize, SeekOrigin.Begin); VolumeHeader vh = new VolumeHeader(fs); VolumeKey vk = new VolumeKey(ks); return(Evaluate.AreEqual(vk.Tag, vh.KeyId)); } } }
/// <summary> /// /// </summary> /// <param name="volumeName"></param> /// <param name="fileName"></param> /// <param name="nodeNumber"></param> /// <returns></returns> internal static byte[] GetBytes(string volumeName, string fileName, uint nodeNumber) { VolumeHeader volHeader = VolumeHeader.Get(volumeName); ExtentDescriptor[] extents = null; switch (fileName) { case "Catalog": extents = volHeader.CatalogFile.Extents; break; case "Attributes": extents = volHeader.AttributesFile.Extents; break; case "ExtentsOverflow": extents = volHeader.ExtentsOverflowFile.Extents; break; } Node headerNode = GetHeaderNode(volumeName, fileName); HeaderRecord headerRecord = headerNode.Records[0] as HeaderRecord; // Determine which blocks contain desired node's bytes uint blockNumber = nodeNumber * (headerRecord.NodeSize / volHeader.BlockSize); // Starting Position within the extents uint extentPosition = 0; // Iterate through extents to determine which extent conatians the Node foreach (ExtentDescriptor extent in extents) { uint relBlock = blockNumber - extentPosition; if (relBlock < extent.BlockCount) { return(Helper.readDrive(volumeName, (long)(extent.StartBlock + relBlock) * volHeader.BlockSize, headerRecord.NodeSize)); } else { extentPosition += extent.BlockCount; } } // Need to throw an "Invalid Node Number" error return(null); }
/// <summary> /// Encrypt a file with a specific key /// </summary> /// /// <param name="FilePath">The full path to the file</param> /// <param name="FileId">The files key id</param> public void Encrypt(string FilePath, int FileId) { if (m_progressTotal < 1) { throw new CryptoProcessingException("VolumeCipher:Initialize", "The files are all zero bytes!", new ArgumentException()); } KeyParams key = VolumeKey.FromId(m_keyStream, FileId); if (key == null) { if (ErrorNotification != null) { ErrorNotification(this, string.Format("The file {0}; has no key assigned", FilePath)); } } else { FileStream inpStream = GetStream(FilePath, true); FileStream outStream = GetStream(FilePath, false); if (inpStream == null || outStream == null) { if (ErrorNotification != null) { ErrorNotification(this, string.Format("The file {0}; could not be written to", FilePath)); } } else { int index = m_volumeKey.GetIndex(FileId); m_volumeKey.State[index] = (byte)VolumeKeyStates.Encrypted; m_cipherStream.Initialize(true, key); m_cipherStream.ProgressPercent += OnCipherProgress; m_cipherStream.Write(inpStream, outStream); m_cipherStream.ProgressPercent -= OnCipherProgress; // write the header VolumeHeader vh = new VolumeHeader(m_volumeKey.Tag, m_volumeKey.FileId[index]); outStream.Write(vh.ToBytes(), 0, VolumeHeader.GetHeaderSize); inpStream.Dispose(); outStream.Dispose(); UpdateKey(); } } }
/// <summary> /// Reads out the TrueCrypt Volume Header (512 Bytes) /// </summary> /// <param name="headerOffset">Offset from file begin</param> /// <returns>VolumeHeader if successfull</returns> protected VolumeHeader ReadVolumeHeaderEncrypted(int headerOffset) { VolumeHeader header = null; using(var device = File.OpenRead(volumePath)) { try { var tmpbuffer = new byte[TrueCryptAPI.TC_VOLUME_HEADER_EFFECTIVE_SIZE]; device.Seek(headerOffset, SeekOrigin.Begin); device.Read(tmpbuffer, 0, tmpbuffer.Length); header = new VolumeHeader(tmpbuffer); } catch(IOException) { header = null; } finally { if(device != null) device.Close(); } } return header; }
/// <summary> /// Reads out the TrueCrypt Volume Header (512 Bytes) /// </summary> /// <param name="headerOffset">Offset from file begin</param> /// <returns>VolumeHeader if successfull</returns> protected VolumeHeader ReadVolumeHeaderEncrypted(int headerOffset) { VolumeHeader header = null; using (var device = File.OpenRead(volumePath)) { try { var tmpbuffer = new byte[TrueCryptAPI.TC_VOLUME_HEADER_EFFECTIVE_SIZE]; device.Seek(headerOffset, SeekOrigin.Begin); device.Read(tmpbuffer, 0, tmpbuffer.Length); header = new VolumeHeader(tmpbuffer); } catch (IOException) { header = null; } finally { if (device != null) { device.Close(); } } } return(header); }
public void SpreadValuesUp(int mip, Func <T, T, T> comb) { if (mip + 1 >= this.Mips.Length) { return; } var big = this.Mips [mip]; var sml = this.Mips [mip + 1]; var h2 = new VolumeHeader(new Int3(2, 2, 2)); foreach (var si in sml.AllIndices3()) { var res = big.Read(si.AsCubic()); for (int n = 1; n < h2.TotalCount; n++) { var ni = si.Add(h2.LinearToCubic(n)); var sec = big.Read(ni.AsCubic()); res = comb(res, sec); } sml.Write(si.AsCubic(), res); } }
/// <summary> /// Decrypt a single file in the volume /// </summary> /// /// <param name="InputPath">The path to the encrypted file</param> /// <param name="OututPath">The path to the new decrypted file</param> public void Decrypt(string InputPath, string OututPath) { FileStream inpStream = GetStream(InputPath, true); VolumeHeader vh = GetHeader(inpStream); KeyParams key = VolumeKey.FromId(m_keyStream, vh.FileId); if (key == null) { if (ErrorNotification != null) { ErrorNotification(this, string.Format("The file {0}; has no key assigned", InputPath)); } } else { FileStream outStream = GetStream(OututPath, false); if (inpStream == null || outStream == null) { if (ErrorNotification != null) { ErrorNotification(this, string.Format("The file {0}; could not be written to", OututPath)); } } else { m_volumeKey.State[m_volumeKey.GetIndex(vh.FileId)] = (byte)VolumeKeyStates.Decrypted; m_cipherStream.ProgressPercent += OnCipherProgress; m_cipherStream.Initialize(false, key); m_cipherStream.Write(inpStream, outStream); m_cipherStream.ProgressPercent -= OnCipherProgress; outStream.SetLength(outStream.Length - VolumeHeader.GetHeaderSize); inpStream.Dispose(); outStream.Dispose(); UpdateKey(); } } }
/// <summary> /// /// </summary> /// <param name="volumeName"></param> /// <param name="blockNumber"></param> /// <returns></returns> public static bool IsAllocationBlockUsed(string volumeName, uint blockNumber) { // Get VolumeHeader VolumeHeader volHeader = VolumeHeader.Get(volumeName); // Detemine which byte to look at uint bytePosition = blockNumber / 0x8; // Determine which of AllocationFile's blocks the byte belongs to uint fileBlock = bytePosition / volHeader.BlockSize; uint relativeBlock = 0; byte[] blockBytes = null; foreach (ExtentDescriptor extent in volHeader.AllocationFile.Extents) { if (fileBlock < relativeBlock + extent.BlockCount) { uint blockToRead = fileBlock - relativeBlock + extent.StartBlock; blockBytes = Helper.readDrive(volumeName, (blockToRead * volHeader.BlockSize), volHeader.BlockSize); break; } else { relativeBlock += extent.BlockCount; } } // Pick the right byte from the Sector byte byteToCheck = blockBytes[(blockNumber / 8) % volHeader.BlockSize]; byte position = (byte)(blockNumber % 8); // Need to come back and ensure this is correct (it appears to be opposite of what i'd expect) return((byteToCheck & (1 >> (position))) != 0); }
/// <summary> /// Encrypt the files in the specified directory /// </summary> /// /// <param name="FilePaths">A list of the files to be processed</param> /// /// <exception cref="CryptoProcessingException">Thrown if the VolumeKey does not contain enough keys to encrypt all the files in the directory</exception> public void Encrypt(string[] FilePaths) { if (FilePaths.Length < 1) { throw new CryptoProcessingException("VolumeCipher:Transform", "The file paths list is empty!", new ArgumentException()); } if (m_volumeKey.KeyCount() < FilePaths.Length) { throw new CryptoProcessingException("VolumeCipher:Transform", "Not enough keys in the volume key to encrypt this directory!", new ArgumentException()); } InitializeProgress(FilePaths); if (m_progressTotal < 1) { throw new CryptoProcessingException("VolumeCipher:Initialize", "The files are all zero bytes!", new ArgumentException()); } long prgCtr = 0; for (int i = 0; i < FilePaths.Length; ++i) { int index = m_volumeKey.NextSubKey(); KeyParams key = VolumeKey.AtIndex(m_keyStream, index); if (key == null) { if (ErrorNotification != null) { ErrorNotification(this, string.Format("The file {0}; has no key assigned", FilePaths[i])); } } else { FileStream inpStream = GetStream(FilePaths[i], true); FileStream outStream = GetStream(FilePaths[i], false); if (inpStream == null || outStream == null) { if (ErrorNotification != null) { ErrorNotification(this, string.Format("The file {0}; could not be written to", FilePaths[i])); } } else { m_volumeKey.State[index] = (byte)VolumeKeyStates.Encrypted; m_cipherStream.Initialize(true, key); m_cipherStream.Write(inpStream, outStream); // write the header VolumeHeader vh = new VolumeHeader(m_volumeKey.Tag, m_volumeKey.FileId[index]); outStream.Write(vh.ToBytes(), 0, VolumeHeader.GetHeaderSize); prgCtr += inpStream.Position; CalculateProgress(prgCtr); inpStream.Dispose(); outStream.Dispose(); UpdateKey(); } } } }
private void _read() { _labelHeader = new LabelHeader(m_io, this, m_root); _volumeHeader = new VolumeHeader(m_io, this, m_root); }
public void WriteTo_Throws() { var header = new VolumeHeader(); Assert.Throws <NotImplementedException>(() => header.WriteTo(Array.Empty <byte>(), 0)); }
void SetVolume(int newVolume) { currentVolumeID = newVolume; if (!volumes.TryGetValue(currentVolumeID, out currentVolume)) throw new FileNotFoundException("Volume {0} is not available".F(currentVolumeID)); currentVolume.Position = 0; if (currentVolume.ReadUInt32() != 0x28635349) throw new InvalidDataException("Not an Installshield CAB package"); uint fileOffset; if (file.Flags.HasFlag(CABFlags.FileSplit)) { currentVolume.Position += CommonHeader.Size; var head = new VolumeHeader(currentVolume); if (file.Index == head.LastFileIndex) { if (file.Flags.HasFlag(CABFlags.FileCompressed)) remainingInArchive = head.LastFileSizeCompressed; else remainingInArchive = head.LastFileSizeExpanded; fileOffset = head.LastFileOffset; } else if (file.Index == head.FirstFileIndex) { if (file.Flags.HasFlag(CABFlags.FileCompressed)) remainingInArchive = head.FirstFileSizeCompressed; else remainingInArchive = head.FirstFileSizeExpanded; fileOffset = head.FirstFileOffset; } else throw new Exception("Cannot Resolve Remaining Stream"); } else { if (file.Flags.HasFlag(CABFlags.FileCompressed)) remainingInArchive = file.CompressedSize; else remainingInArchive = file.ExpandedSize; fileOffset = file.DataOffset; } currentVolume.Position = fileOffset; }
void SetVolume(int newVolume) { currentVolumeID = newVolume; if (!volumes.TryGetValue(currentVolumeID, out currentVolume)) { throw new FileNotFoundException("Volume {0} is not available".F(currentVolumeID)); } currentVolume.Position = 0; if (currentVolume.ReadUInt32() != 0x28635349) { throw new InvalidDataException("Not an Installshield CAB package"); } uint fileOffset; if (file.Flags.HasFlag(CABFlags.FileSplit)) { currentVolume.Position += CommonHeader.Size; var head = new VolumeHeader(currentVolume); if (file.Index == head.LastFileIndex) { if (file.Flags.HasFlag(CABFlags.FileCompressed)) { remainingInArchive = head.LastFileSizeCompressed; } else { remainingInArchive = head.LastFileSizeExpanded; } fileOffset = head.LastFileOffset; } else if (file.Index == head.FirstFileIndex) { if (file.Flags.HasFlag(CABFlags.FileCompressed)) { remainingInArchive = head.FirstFileSizeCompressed; } else { remainingInArchive = head.FirstFileSizeExpanded; } fileOffset = head.FirstFileOffset; } else { throw new InvalidDataException("Cannot Resolve Remaining Stream"); } } else { if (file.Flags.HasFlag(CABFlags.FileCompressed)) { remainingInArchive = file.CompressedSize; } else { remainingInArchive = file.ExpandedSize; } fileOffset = file.DataOffset; } currentVolume.Position = fileOffset; }
public void GetInformation(IMediaImage imagePlugin, Partition partition, out string information, Encoding encoding) { Encoding = Encoding.BigEndianUnicode; information = ""; var vh = new VolumeHeader(); ulong hfspOffset; bool wrapped; uint sectorsToRead = 0x800 / imagePlugin.Info.SectorSize; if (0x800 % imagePlugin.Info.SectorSize > 0) { sectorsToRead++; } byte[] vhSector = imagePlugin.ReadSectors(partition.Start, sectorsToRead); ushort drSigWord = BigEndianBitConverter.ToUInt16(vhSector, 0x400); if (drSigWord == AppleCommon.HFS_MAGIC) // "BD" { drSigWord = BigEndianBitConverter.ToUInt16(vhSector, 0x47C); // Read embedded HFS+ signature if (drSigWord == AppleCommon.HFSP_MAGIC) // "H+" { ushort xdrStABNt = BigEndianBitConverter.ToUInt16(vhSector, 0x47E); uint drAlBlkSiz = BigEndianBitConverter.ToUInt32(vhSector, 0x414); ushort drAlBlSt = BigEndianBitConverter.ToUInt16(vhSector, 0x41C); hfspOffset = (ulong)(((drAlBlSt * 512) + (xdrStABNt * drAlBlkSiz)) / imagePlugin.Info.SectorSize); wrapped = true; } else { hfspOffset = 0; wrapped = false; } } else { hfspOffset = 0; wrapped = false; } vhSector = imagePlugin.ReadSectors(partition.Start + hfspOffset, sectorsToRead); // Read volume header vh.signature = BigEndianBitConverter.ToUInt16(vhSector, 0x400); if (vh.signature != AppleCommon.HFSP_MAGIC && vh.signature != AppleCommon.HFSX_MAGIC) { return; } var sb = new StringBuilder(); if (vh.signature == 0x482B) { sb.AppendLine("HFS+ filesystem."); } if (vh.signature == 0x4858) { sb.AppendLine("HFSX filesystem."); } if (wrapped) { sb.AppendLine("Volume is wrapped inside an HFS volume."); } byte[] tmp = new byte[0x400]; Array.Copy(vhSector, 0x400, tmp, 0, 0x400); vhSector = tmp; vh = Marshal.ByteArrayToStructureBigEndian <VolumeHeader>(vhSector); if (vh.version == 4 || vh.version == 5) { sb.AppendFormat("Filesystem version is {0}.", vh.version).AppendLine(); if ((vh.attributes & 0x80) == 0x80) { sb.AppendLine("Volume is locked on hardware."); } if ((vh.attributes & 0x100) == 0x100) { sb.AppendLine("Volume is unmounted."); } if ((vh.attributes & 0x200) == 0x200) { sb.AppendLine("There are bad blocks in the extents file."); } if ((vh.attributes & 0x400) == 0x400) { sb.AppendLine("Volume does not require cache."); } if ((vh.attributes & 0x800) == 0x800) { sb.AppendLine("Volume state is inconsistent."); } if ((vh.attributes & 0x1000) == 0x1000) { sb.AppendLine("CNIDs are reused."); } if ((vh.attributes & 0x2000) == 0x2000) { sb.AppendLine("Volume is journaled."); } if ((vh.attributes & 0x8000) == 0x8000) { sb.AppendLine("Volume is locked on software."); } sb.AppendFormat("Implementation that last mounted the volume: \"{0}\".", Encoding.ASCII.GetString(vh.lastMountedVersion)).AppendLine(); if ((vh.attributes & 0x2000) == 0x2000) { sb.AppendFormat("Journal starts at allocation block {0}.", vh.journalInfoBlock).AppendLine(); } sb.AppendFormat("Creation date: {0}", DateHandlers.MacToDateTime(vh.createDate)).AppendLine(); sb.AppendFormat("Last modification date: {0}", DateHandlers.MacToDateTime(vh.modifyDate)).AppendLine(); if (vh.backupDate > 0) { sb.AppendFormat("Last backup date: {0}", DateHandlers.MacToDateTime(vh.backupDate)).AppendLine(); } else { sb.AppendLine("Volume has never been backed up"); } if (vh.backupDate > 0) { sb.AppendFormat("Last check date: {0}", DateHandlers.MacToDateTime(vh.checkedDate)).AppendLine(); } else { sb.AppendLine("Volume has never been checked up"); } sb.AppendFormat("{0} files on volume.", vh.fileCount).AppendLine(); sb.AppendFormat("{0} folders on volume.", vh.folderCount).AppendLine(); sb.AppendFormat("{0} bytes per allocation block.", vh.blockSize).AppendLine(); sb.AppendFormat("{0} allocation blocks.", vh.totalBlocks).AppendLine(); sb.AppendFormat("{0} free blocks.", vh.freeBlocks).AppendLine(); sb.AppendFormat("Next allocation block: {0}.", vh.nextAllocation).AppendLine(); sb.AppendFormat("Resource fork clump size: {0} bytes.", vh.rsrcClumpSize).AppendLine(); sb.AppendFormat("Data fork clump size: {0} bytes.", vh.dataClumpSize).AppendLine(); sb.AppendFormat("Next unused CNID: {0}.", vh.nextCatalogID).AppendLine(); sb.AppendFormat("Volume has been mounted writable {0} times.", vh.writeCount).AppendLine(); sb.AppendFormat("Allocation File is {0} bytes.", vh.allocationFile_logicalSize).AppendLine(); sb.AppendFormat("Extents File is {0} bytes.", vh.extentsFile_logicalSize).AppendLine(); sb.AppendFormat("Catalog File is {0} bytes.", vh.catalogFile_logicalSize).AppendLine(); sb.AppendFormat("Attributes File is {0} bytes.", vh.attributesFile_logicalSize).AppendLine(); sb.AppendFormat("Startup File is {0} bytes.", vh.startupFile_logicalSize).AppendLine(); sb.AppendLine("Finder info:"); sb.AppendFormat("CNID of bootable system's directory: {0}", vh.drFndrInfo0).AppendLine(); sb.AppendFormat("CNID of first-run application's directory: {0}", vh.drFndrInfo1).AppendLine(); sb.AppendFormat("CNID of previously opened directory: {0}", vh.drFndrInfo2).AppendLine(); sb.AppendFormat("CNID of bootable Mac OS 8 or 9 directory: {0}", vh.drFndrInfo3).AppendLine(); sb.AppendFormat("CNID of bootable Mac OS X directory: {0}", vh.drFndrInfo5).AppendLine(); if (vh.drFndrInfo6 != 0 && vh.drFndrInfo7 != 0) { sb.AppendFormat("Mac OS X Volume ID: {0:X8}{1:X8}", vh.drFndrInfo6, vh.drFndrInfo7).AppendLine(); } XmlFsType = new FileSystemType(); if (vh.backupDate > 0) { XmlFsType.BackupDate = DateHandlers.MacToDateTime(vh.backupDate); XmlFsType.BackupDateSpecified = true; } XmlFsType.Bootable |= vh.drFndrInfo0 != 0 || vh.drFndrInfo3 != 0 || vh.drFndrInfo5 != 0; XmlFsType.Clusters = vh.totalBlocks; XmlFsType.ClusterSize = vh.blockSize; if (vh.createDate > 0) { XmlFsType.CreationDate = DateHandlers.MacToDateTime(vh.createDate); XmlFsType.CreationDateSpecified = true; } XmlFsType.Dirty = (vh.attributes & 0x100) != 0x100; XmlFsType.Files = vh.fileCount; XmlFsType.FilesSpecified = true; XmlFsType.FreeClusters = vh.freeBlocks; XmlFsType.FreeClustersSpecified = true; if (vh.modifyDate > 0) { XmlFsType.ModificationDate = DateHandlers.MacToDateTime(vh.modifyDate); XmlFsType.ModificationDateSpecified = true; } if (vh.signature == 0x482B) { XmlFsType.Type = "HFS+"; } if (vh.signature == 0x4858) { XmlFsType.Type = "HFSX"; } if (vh.drFndrInfo6 != 0 && vh.drFndrInfo7 != 0) { XmlFsType.VolumeSerial = $"{vh.drFndrInfo6:X8}{vh.drFndrInfo7:X8}"; } XmlFsType.SystemIdentifier = Encoding.ASCII.GetString(vh.lastMountedVersion); } else { sb.AppendFormat("Filesystem version is {0}.", vh.version).AppendLine(); sb.AppendLine("This version is not supported yet."); } information = sb.ToString(); }
public void GetInformation(IMediaImage imagePlugin, Partition partition, out string information, Encoding encoding) { Encoding = Encoding.UTF8; information = ""; uint sbSize = (uint)(Marshal.SizeOf <VolumeHeader>() / imagePlugin.Info.SectorSize); if (Marshal.SizeOf <VolumeHeader>() % imagePlugin.Info.SectorSize != 0) { sbSize++; } if (partition.Start + sbSize >= partition.End) { return; } byte[] sector = imagePlugin.ReadSectors(partition.Start, sbSize); if (sector.Length < Marshal.SizeOf <VolumeHeader>()) { return; } VolumeHeader vhdr = Marshal.ByteArrayToStructureLittleEndian <VolumeHeader>(sector); AaruConsole.DebugWriteLine("ReFS plugin", "VolumeHeader.jump empty? = {0}", ArrayHelpers.ArrayIsNullOrEmpty(vhdr.jump)); AaruConsole.DebugWriteLine("ReFS plugin", "VolumeHeader.signature = {0}", StringHandlers.CToString(vhdr.signature)); AaruConsole.DebugWriteLine("ReFS plugin", "VolumeHeader.mustBeZero empty? = {0}", ArrayHelpers.ArrayIsNullOrEmpty(vhdr.mustBeZero)); AaruConsole.DebugWriteLine("ReFS plugin", "VolumeHeader.identifier = {0}", StringHandlers.CToString(BitConverter.GetBytes(vhdr.identifier))); AaruConsole.DebugWriteLine("ReFS plugin", "VolumeHeader.length = {0}", vhdr.length); AaruConsole.DebugWriteLine("ReFS plugin", "VolumeHeader.checksum = 0x{0:X4}", vhdr.checksum); AaruConsole.DebugWriteLine("ReFS plugin", "VolumeHeader.sectors = {0}", vhdr.sectors); AaruConsole.DebugWriteLine("ReFS plugin", "VolumeHeader.bytesPerSector = {0}", vhdr.bytesPerSector); AaruConsole.DebugWriteLine("ReFS plugin", "VolumeHeader.sectorsPerCluster = {0}", vhdr.sectorsPerCluster); AaruConsole.DebugWriteLine("ReFS plugin", "VolumeHeader.unknown1 zero? = {0}", vhdr.unknown1 == 0); AaruConsole.DebugWriteLine("ReFS plugin", "VolumeHeader.unknown2 zero? = {0}", vhdr.unknown2 == 0); AaruConsole.DebugWriteLine("ReFS plugin", "VolumeHeader.unknown3 zero? = {0}", vhdr.unknown3 == 0); AaruConsole.DebugWriteLine("ReFS plugin", "VolumeHeader.unknown4 zero? = {0}", vhdr.unknown4 == 0); AaruConsole.DebugWriteLine("ReFS plugin", "VolumeHeader.unknown5 empty? = {0}", ArrayHelpers.ArrayIsNullOrEmpty(vhdr.unknown5)); if (vhdr.identifier != FSRS || !ArrayHelpers.ArrayIsNullOrEmpty(vhdr.mustBeZero) || !vhdr.signature.SequenceEqual(_signature)) { return; } var sb = new StringBuilder(); sb.AppendLine("Microsoft Resilient File System"); sb.AppendFormat("Volume uses {0} bytes per sector", vhdr.bytesPerSector).AppendLine(); sb.AppendFormat("Volume uses {0} sectors per cluster ({1} bytes)", vhdr.sectorsPerCluster, vhdr.sectorsPerCluster * vhdr.bytesPerSector).AppendLine(); sb.AppendFormat("Volume has {0} sectors ({1} bytes)", vhdr.sectors, vhdr.sectors * vhdr.bytesPerSector). AppendLine(); information = sb.ToString(); XmlFsType = new FileSystemType { Type = "Resilient File System", ClusterSize = vhdr.bytesPerSector * vhdr.sectorsPerCluster, Clusters = vhdr.sectors / vhdr.sectorsPerCluster }; }
public void NextFile() { if (cabFile != null) cabFile.Dispose(); ++volumeNumber; cabFile = GlobalFileSystem.Open("{0}{1}.cab".F(commonName, volumeNumber)); if (cabFile.ReadUInt32() != 0x28635349) throw new InvalidDataException("Not an Installshield CAB package"); uint fileOffset; if ((fileDes.Flags & FileSplit) != 0) { cabFile.Seek(CommonHeader.Size, SeekOrigin.Current); var head = new VolumeHeader(cabFile); if (index == head.LastFileIndex) { if ((fileDes.Flags & FileCompressed) != 0) RemainingArchiveStream = head.LastFileSizeCompressed; else RemainingArchiveStream = head.LastFileSizeExpanded; fileOffset = head.LastFileOffset; } else if (index == head.FirstFileIndex) { if ((fileDes.Flags & FileCompressed) != 0) RemainingArchiveStream = head.FirstFileSizeCompressed; else RemainingArchiveStream = head.FirstFileSizeExpanded; fileOffset = head.FirstFileOffset; } else throw new Exception("Cannot Resolve Remaining Stream"); } else { if ((fileDes.Flags & FileCompressed) != 0) RemainingArchiveStream = fileDes.CompressedSize; else RemainingArchiveStream = fileDes.ExpandedSize; fileOffset = fileDes.DataOffset; } cabFile.Seek(fileOffset, SeekOrigin.Begin); }
public void PackFiles(string outrepackDir, string[] filesToRemove, bool packAllAsNew, string customTitleID) { if (FilesToPack.Count == 0 && filesToRemove.Length == 0) { Program.Log("[X] Found no files to pack or remove from volume.", forceConsolePrint: true); Console.WriteLine("[?] Continue? (Y/N)"); if (Console.ReadKey().Key != ConsoleKey.Y) { return; } } // Leftover? if (Directory.Exists($"{outrepackDir}_temp")) { Directory.Delete($"{outrepackDir}_temp", true); } // Create temp to make sure we aren't transfering user leftovers Directory.CreateDirectory($"{outrepackDir}_temp"); Program.Log($"[-] Preparing to pack {FilesToPack.Count} files, and remove {filesToRemove.Length} files"); PackCache newCache = TableOfContents.PackFilesForPatchFileSystem(FilesToPack, _packCache, filesToRemove, outrepackDir, packAllAsNew); if (UsePackingCache) { newCache.Save(".pack_cache"); } // Delete main one if needed if (Directory.Exists(outrepackDir)) { Directory.Delete(outrepackDir, true); } Directory.Move($"{outrepackDir}_temp", outrepackDir); Program.Log($"[-] Verifying and fixing Table of Contents segment sizes if needed"); if (!TableOfContents.TryCheckAndFixInvalidSegmentIndexes()) { Program.Log($"[-] Re-ordered segment indexes."); } else { Program.Log($"[/] Segment sizes are correct."); } if (packAllAsNew) { Program.Log($"[-] Packing as new: New TOC Entry Index is {VolumeHeader.TOCEntryIndex}."); } Program.Log($"[-] Saving Table of Contents ({PDIPFSPathResolver.GetPathFromSeed(VolumeHeader.TOCEntryIndex)})"); TableOfContents.SaveToPatchFileSystem(outrepackDir, out uint compressedSize, out uint uncompressedSize); if (!string.IsNullOrEmpty(customTitleID) && customTitleID.Length <= 128) { VolumeHeader.HasCustomGameID = true; VolumeHeader.TitleID = customTitleID; } VolumeHeader.CompressedTOCSize = compressedSize; VolumeHeader.TOCSize = uncompressedSize; VolumeHeader.TotalVolumeSize = TableOfContents.GetTotalPatchFileSystemSize(compressedSize); Program.Log($"[-] Saving main volume header ({PDIPFSPathResolver.Default})"); byte[] header = VolumeHeader.Serialize(); Span <uint> headerBlocks = MemoryMarshal.Cast <byte, uint>(header); Keyset.EncryptBlocks(headerBlocks, headerBlocks); Keyset.CryptData(header, BASE_VOLUME_ENTRY_INDEX); string headerPath = Path.Combine(outrepackDir, PDIPFSPathResolver.Default); Directory.CreateDirectory(Path.GetDirectoryName(headerPath)); File.WriteAllBytes(headerPath, header); Program.Log($"[/] Done packing.", forceConsolePrint: true); }
void NextFile(FileSystem context) { if (cabFile != null) { cabFile.Dispose(); } ++volumeNumber; cabFile = context.Open("{0}{1}.cab".F(commonName, volumeNumber)); if (cabFile.ReadUInt32() != 0x28635349) { throw new InvalidDataException("Not an Installshield CAB package"); } uint fileOffset; if ((fileDes.Flags & FileSplit) != 0) { cabFile.Seek(CommonHeader.Size, SeekOrigin.Current); var head = new VolumeHeader(cabFile); if (index == head.LastFileIndex) { if ((fileDes.Flags & FileCompressed) != 0) { RemainingArchiveStream = head.LastFileSizeCompressed; } else { RemainingArchiveStream = head.LastFileSizeExpanded; } fileOffset = head.LastFileOffset; } else if (index == head.FirstFileIndex) { if ((fileDes.Flags & FileCompressed) != 0) { RemainingArchiveStream = head.FirstFileSizeCompressed; } else { RemainingArchiveStream = head.FirstFileSizeExpanded; } fileOffset = head.FirstFileOffset; } else { throw new Exception("Cannot Resolve Remaining Stream"); } } else { if ((fileDes.Flags & FileCompressed) != 0) { RemainingArchiveStream = fileDes.CompressedSize; } else { RemainingArchiveStream = fileDes.ExpandedSize; } fileOffset = fileDes.DataOffset; } cabFile.Seek(fileOffset, SeekOrigin.Begin); }
public static Mesh GenerateSurfaceVolume <T>(VolumeBuffer <T> vol, Func <T, float> signTest, Vector3 relativeCameraPos, DynamicFieldModel optionalModel = null) { int tetCorners = 4; Dictionary <int, int> ndxToVertex = new Dictionary <int, int> (); List <int> triangles = new List <int> (); List <int> quads = new List <int> (); float[] signes = new float[tetCorners]; List <int> edgesInTetra = new List <int> (); VolumeHeader h2 = new VolumeHeader(new Int3(2, 2, 2)); foreach (var ndx in vol.AllIndices3()) { if ((ndx.X != 0) && (ndx.Y != 0) && (ndx.Z != 0)) { var start = ndx.Add(new Int3(-1, -1, -1)); foreach (var ts in TetrasInCube) { int countPos = 0, countNeg = 0; int signKey = 0; for (int cornerIndex = 0; cornerIndex < ts.Length; cornerIndex++) { var lndx = ts[cornerIndex]; var cur = AddInvertTileOffset3(start, (h2.LinearToCubic(lndx))); var sv = signTest(vol.Read(cur)); if (sv >= 0.0f) { countPos++; signKey |= (1 << cornerIndex); } else { countNeg++; } signes [cornerIndex] = sv; } if ((countPos > 0) && (countNeg > 0)) { edgesInTetra.Clear(); // we have something in this tetrahedra! for (int ei = 0; ei < VolumeTetrahedraSurfacer.EdgesInTetra.Length; ei += 2) { var ef = VolumeTetrahedraSurfacer.EdgesInTetra [ei + 0]; var et = VolumeTetrahedraSurfacer.EdgesInTetra [ei + 1]; if (((signes [ef]) * (signes [et])) < 0.0f) { // we have something on this edge! var tf = ts[ef]; var tt = ts[et]; var vf = AddInvertTileOffset3(start, h2.LinearToCubic(tf)); var vt = AddInvertTileOffset3(start, h2.LinearToCubic(tt)); int encoded = PackVoxelEdgeIdSorted(vol.Header, vf, vt); edgesInTetra.Add(encoded); } } int prevVid = -1, prevPrevVid = -1, pppv = -1; foreach (var ee in edgesInTetra) { int vid; if (ndxToVertex.ContainsKey(ee)) { vid = ndxToVertex [ee]; } else { vid = ndxToVertex.Count; ndxToVertex.Add(ee, vid); } if (edgesInTetra.Count == 3) { triangles.Add(vid); } else if (edgesInTetra.Count == 4) { quads.Add(vid); } else { Debug.Assert(false, "Really?? c=" + edgesInTetra.Count); } pppv = prevPrevVid; prevPrevVid = prevVid; prevVid = vid; } } } } } List <Vector3> vertices = new List <Vector3> (); List <Vector3> vertexSigns = new List <Vector3> (); List <Vector4> vertexTangents = null; if (optionalModel != null) { vertexTangents = new List <Vector4> (); } Vector3 invScale = new Vector3(1.0f / (vol.Size.X - 1), 1.0f / (vol.Size.Y - 1), 1.0f / (vol.Size.Z - 1)); foreach (var kv in ndxToVertex) { // setup vertices from edge data: var packed = kv.Key; var vid = kv.Value; while (vertices.Count <= vid) { vertices.Add(Vector3.zero); vertexSigns.Add(Vector3.zero);; if (optionalModel != null) { vertexTangents.Add(Vector4.zero); } } Int3 a3, b3; UnpackVoxelEdgeId(vol.Header, packed, out a3, out b3); var wa = signTest(vol.Read(a3)); var wb = signTest(vol.Read(b3)); var wab = Mathf.Abs(wa) / (Mathf.Abs(wa) + Mathf.Abs(wb)); var pos = Vector3.Lerp(a3.AsVector3(), b3.AsVector3(), wab); //0.5f); // TODO: weight value based on signed root var upos = new Vector3(pos.x * invScale.x, pos.y * invScale.y, pos.z * invScale.z) - (Vector3.one * 0.5f); vertices [vid] = upos; vertexSigns [vid] = (a3.AsVector3() - b3.AsVector3()) * Mathf.Sign(wa - wb); if (optionalModel != null) { var ta = CalcFlowTangent(optionalModel, optionalModel.FieldsCells.Read(a3)); var tb = CalcFlowTangent(optionalModel, optionalModel.FieldsCells.Read(b3)); vertexTangents [vid] = Vector4.Lerp(ta, tb, wab); // ((ta + tb) *0.5f); } } for (int qi = 0; qi < quads.Count; qi += 4) { // ensure quad covers whole space (a and b must be furthest from each other): var a = vertices[quads[qi + 0]]; var b = vertices[quads[qi + 1]]; var c = vertices[quads[qi + 2]]; var d = vertices[quads[qi + 3]]; // add triangle a-b-c: triangles.Add(quads[qi + 0]); triangles.Add(quads[qi + 1]); triangles.Add(quads[qi + 2]); triangles.Add(quads[qi + 1]); triangles.Add(quads[qi + 3]); triangles.Add(quads[qi + 2]); } var trisToSort = new List <SortTri> (); for (int i = 0; i < triangles.Count; i += 3) { // ensure triangle is oriented correctly: var a = vertices[triangles[i + 0]]; var b = vertices[triangles[i + 1]]; var c = vertices[triangles[i + 2]]; var n = Vector3.Cross(b - a, c - b); if (Vector3.Dot(vertexSigns [triangles [i + 0]], n) >= 0.0f) { SwapListValues(triangles, i + 1, i + 2); } // add triangle to list SortTri tri; tri.I0 = triangles [i + 0]; tri.I1 = triangles [i + 1]; tri.I2 = triangles [i + 2]; tri.DistFromCam = (relativeCameraPos - ((a + b + c) * (1.0f / 3.0f))).magnitude; trisToSort.Add(tri); } // sort the triangles: if (true) { trisToSort.Sort((a, b) => - (a.DistFromCam.CompareTo(b.DistFromCam))); triangles.Clear(); foreach (var t in trisToSort) { triangles.Add(t.I0); triangles.Add(t.I1); triangles.Add(t.I2); } } Mesh result = new Mesh(); //Debug.Log ("Meshing info: verts=" + vertices.Count + " tris=" + triangles.Count); result.SetVertices(vertices); if (optionalModel != null) { result.SetTangents(vertexTangents); } result.triangles = (triangles.ToArray()); result.RecalculateNormals(); return(result); }