public void MustReadCorrectly() { var position = 750L; var sut = new SubStream(stream.Object, 100, 200); stream.SetupGet(s => s.Position).Returns(position); stream.SetupSet(s => s.Position = It.IsAny <long>()).Callback <long>(p => position = p); stream.Setup(s => s.Read(It.IsAny <byte[]>(), It.IsAny <int>(), It.IsAny <int>())).Returns <byte[], int, int>((a, o, c) => c); sut.Position = 50; var bytesRead = sut.Read(new byte[25], 0, 25); stream.Verify(s => s.Read(It.IsAny <byte[]>(), 0, 25), Times.Once); Assert.AreEqual(25, bytesRead); Assert.AreEqual(75, sut.Position); Assert.AreEqual(750, position); sut.Position = 150; bytesRead = sut.Read(new byte[75], 0, 75); stream.Verify(s => s.Read(It.IsAny <byte[]>(), 0, 50), Times.Once); Assert.AreEqual(50, bytesRead); Assert.AreEqual(200, sut.Position); Assert.AreEqual(750, position); }
public IKanvasImage Load(Stream input) { using var br = new BinaryReaderX(input); // Read header var header = br.ReadType <PckHeader>(); _texHeader = br.ReadType <TexHeader>(); // Read data var imgData = new SubStream(input, input.Position, _texHeader.dataSize); var buffer = new byte[4]; imgData.Read(buffer); imgData.Position -= 4; IKanvasImage image; switch (_magic = BinaryPrimitives.ReadUInt32BigEndian(buffer)) { case 0x89504E47: // PNG var img = (Bitmap)Image.FromStream(imgData); image = new BitmapKanvasImage(img); break; case 0x4C5A3737: // LZ77 // Decompress image data buffer var decompressedData = new MemoryStream(); Compressions.StingLz.Build().Decompress(imgData, decompressedData); decompressedData.Position = 0; var dataBr = new BinaryReaderX(decompressedData); // Prepare image info var dataSize = _texHeader.width * _texHeader.height; var imageInfo = new ImageInfo(dataBr.ReadBytes(dataSize), 0, new Size(_texHeader.width, _texHeader.height)) { PaletteData = dataBr.ReadBytes(256 * 4), PaletteFormat = 0 }; image = new KanvasImage(TexSupport.GetEncodingDefinition(), imageInfo); break; default: throw new InvalidOperationException("Unknown data type."); } // Read unknown region input.Position = (header.size + 3) & ~3; _unkRegion = br.ReadBytes((int)(input.Length - input.Position)); return(image); }
private void darkButton1_Click(object sender, EventArgs e) { FolderBrowserDialog savepath = new FolderBrowserDialog(); if (savepath.ShowDialog() == DialogResult.OK) { try { var numbersAndWords = idEntryList.Zip(nameEntryList, (n, w) => new { id = n, name = w }); foreach (var nw in numbersAndWords) { var pkgPath = filenames; var idx = int.Parse(nw.id); var name = nw.name; var outPath = savepath.SelectedPath + "\\" + name.Replace("_SHA", ".SHA").Replace("_DAT", ".DAT").Replace("_SFO", ".SFO").Replace("_XML", ".XML").Replace("_SIG", ".SIG").Replace("_PNG", ".PNG").Replace("_JSON", ".JSON").Replace("_DDS", ".DDS").Replace("_TRP", ".TRP").Replace("_AT9", ".AT9");; using (var pkgFile = File.OpenRead(pkgPath)) { var pkg = new PkgReader(pkgFile).ReadPkg(); if (idx < 0 || idx >= pkg.Metas.Metas.Count) { DarkMessageBox.ShowError("Error: entry number out of range", "PS4 PKG Tool"); return; } using (var outFile = File.Create(outPath)) { var meta = pkg.Metas.Metas[idx]; outFile.SetLength(meta.DataSize); if (meta.Encrypted) { if (passcode == null) { DarkMessageBox.ShowWarning("Warning: Entry is encrypted but no passcode was provided! Saving encrypted bytes.", "PS4 PKG Tool"); } else { var entry = new SubStream(pkgFile, meta.DataOffset, (meta.DataSize + 15) & ~15); var tmp = new byte[entry.Length]; entry.Read(tmp, 0, tmp.Length); tmp = LibOrbisPkg.PKG.Entry.Decrypt(tmp, pkg.Header.content_id, passcode, meta); outFile.Write(tmp, 0, (int)meta.DataSize); return; } } new SubStream(pkgFile, meta.DataOffset, meta.DataSize).CopyTo(outFile); } } } DarkMessageBox.ShowInformation("All entry item extracted", "PS4 PKG Tool"); } catch (Exception a) { DarkMessageBox.ShowError(a.Message, "PS4 PKG Tool"); } } }
public void MustNotReadOutsideOfBounds() { var sut = new SubStream(stream.Object, 100, 200); sut.Position = -1; var bytesRead = sut.Read(new byte[0], 0, 0); Assert.AreEqual(0, bytesRead); stream.Verify(s => s.ReadByte(), Times.Never); stream.Verify(s => s.Read(It.IsAny <byte[]>(), It.IsAny <int>(), It.IsAny <int>()), Times.Never); sut.Position = 500; bytesRead = sut.Read(new byte[0], 0, 0); Assert.AreEqual(0, bytesRead); stream.Verify(s => s.ReadByte(), Times.Never); stream.Verify(s => s.Read(It.IsAny <byte[]>(), It.IsAny <int>(), It.IsAny <int>()), Times.Never); }
private int ReadNextBlock(byte[] buffer, ref int offset, ref int count) { int length; var readBytes = 0; var blockPosition = Position % BlockSize_; var blockStart = Position - blockPosition; if (count <= 0 || Position >= Length) { return(readBytes); } // Read and decrypt SHA1 portion of the block var hashPartStream = new SubStream(_baseStream, blockStart, BlockHashSize_); if (blockPosition < BlockHashSize_) { var cbcHashPartStream = new CbcStream(hashPartStream, _partitionKey, new byte[0x10]); length = (int)Math.Min(BlockHashSize_ - blockPosition, count); cbcHashPartStream.Position = blockPosition; readBytes += cbcHashPartStream.Read(buffer, offset, length); Position += length; offset += length; count -= length; blockPosition = Position % BlockSize_; } if (count <= 0 || Position >= Length) { return(readBytes); } // Read and decrypt user data hashPartStream.Position = BlockDataIvStart_; var dataIv = new byte[0x10]; hashPartStream.Read(dataIv, 0, 0x10); var dataPartStream = new SubStream(_baseStream, blockStart + BlockHashSize_, BlockDataSize_); var cbcDataPartStream = new CbcStream(dataPartStream, _partitionKey, dataIv); length = (int)Math.Min(BlockSize_ - blockPosition, count); cbcDataPartStream.Position = blockPosition - BlockHashSize_; readBytes += cbcDataPartStream.Read(buffer, offset, length); Position += length; offset += length; count -= length; return(readBytes); }
internal static void List(Option option) { byte[][] areaEncryptionKeys = option.Config.GetKeyConfiguration().GetKeyAreaEncryptionKeys(); using (FileStream fileStream = Program.OpenReadOnlyFileStream(option.List.InputFile, FileOptions.SequentialScan)) { string fileName1 = Path.GetFileName(option.List.InputFile); if (Path.GetExtension(fileName1) == ".nsp") { NintendoSubmissionPackageReader submissionPackageReader = new NintendoSubmissionPackageReader((Stream)fileStream); foreach (Tuple <string, long> tuple in submissionPackageReader.ListFileInfo()) { Console.WriteLine("{0, -45}\t({1} byte)", (object)tuple.Item1, (object)tuple.Item2); } Console.WriteLine("---------------------------------------------"); foreach (Tuple <string, long> tuple in submissionPackageReader.ListFileInfo().FindAll((Predicate <Tuple <string, long> >)(x => Path.GetExtension(x.Item1) == ".nca"))) { string str = tuple.Item1; long num = tuple.Item2; Program.ListNca(submissionPackageReader.OpenNintendoContentArchiveReader(str, areaEncryptionKeys), str); } } else if (Path.GetExtension(fileName1) == ".nca") { Program.ListNca(new NintendoContentArchiveReader((Stream)fileStream, areaEncryptionKeys), (string)null); } else { if (!(Path.GetExtension(fileName1) == ".xci") && !(Path.GetExtension(fileName1) == ".xcie")) { throw new ArgumentException("input archive file must be .nca or .nsp file."); } Console.WriteLine("[headerInfo]"); long offset1 = (long)XciInfo.CardKeyAreaPageCount * (long)XciInfo.PageSize; SubStream subStream = new SubStream((Stream)fileStream, offset1, (long)XciInfo.CardHeaderPageCount * (long)XciInfo.PageSize); byte[] numArray = new byte[subStream.Length]; subStream.Read(numArray, 0, (int)subStream.Length); XciUtils.DumpHeader(numArray); long offset2 = (long)XciInfo.NormalAreaStartPageAddress * (long)XciInfo.PageSize; XciReader xciReader = new XciReader((Stream) new SubStream((Stream)fileStream, offset2, fileStream.Length - offset2)); foreach (Tuple <string, long> tuple1 in xciReader.ListFileInfo()) { string fileName2 = tuple1.Item1; XciPartitionReader xciPartitionReader = xciReader.OpenXciPartitionReader(fileName2); Console.WriteLine("[{0}]", (object)fileName2); foreach (Tuple <string, long> tuple2 in xciPartitionReader.ListFileInfo()) { Console.WriteLine("{0, -45}\t({1} byte)", (object)tuple2.Item1, (object)tuple2.Item2); } } } } }
public void Save(Stream output, IList <IArchiveFileInfo> files) { // Update partition entries long partitionOffset = FirstPartitionOffset_; foreach (var file in files.Cast <ArchiveFileInfo>()) { var partitionIndex = GetPartitionIndex(file.FilePath.GetName()); var partitionEntry = _header.partitionEntries[partitionIndex]; partitionEntry.offset = (int)(partitionOffset / MediaSize_); partitionEntry.length = (int)(file.FileSize / MediaSize_); output.Position = partitionOffset; file.SaveFileData(output); partitionOffset = output.Position; } // Store first NCCH header var firstNcchHeader = new byte[0x100]; foreach (var partitionEntry in _header.partitionEntries) { if (partitionEntry.length != 0) { var ncchStream = new SubStream(output, partitionEntry.offset * MediaSize_, partitionEntry.length * MediaSize_); ncchStream.Read(firstNcchHeader, 0, 0x100); break; } } _header.cardHeader.cardInfoHeader.firstNcchHeader = firstNcchHeader; output.Position = 0; using var bw = new BinaryWriterX(output); // Update NCSD size _header.ncsdSize = (int)(output.Length / MediaSize_); // Write NCSD header bw.WriteType(_header); // Pad until first partition bw.WritePadding(FirstPartitionOffset_ - Tools.MeasureType(typeof(NcsdHeader)), 0xFF); }
public void One_Byte_Read_Result() { ActionStream stream = new ActionStream(new StreamActions { Read = (buffer, offset, count) => { buffer[offset] = 32; return(1); }, Seek = (l, origin) => 0, GetLength = () => 10 }); SubStream subStream = new SubStream(stream, 5); byte[] readBuffer = new byte[10]; int readCount = subStream.Read(readBuffer, 0, 10); Assert.Equal(1, readCount); }
// shows that we can read a subset of an existing stream... static void Main() { byte[] buffer = new byte[255]; for (byte i = 0; i < 255; i++) { buffer[i] = i; } using (MemoryStream ms = new MemoryStream(buffer)) using (SubStream ss = new SubStream(ms, 10, 200)) { const int BUFFER_SIZE = 17; // why not... byte[] working = new byte[BUFFER_SIZE]; int read; while ((read = ss.Read(working, 0, BUFFER_SIZE)) > 0) { for (int i = 0; i < read; i++) { Console.WriteLine(working[i]); } } } }
/// <summary> /// Write IVFC hash levels. /// </summary> /// <param name="output">The stream to write to.</param> /// <param name="metaDataPosition">The position of the initial data to hash.</param> /// <param name="metaDataSize">The position at which to start writing.</param> /// <param name="masterHashPosition">The separate position at which the master hash level is written.</param> /// <param name="levels">Number of levels to write.</param> /// <returns>Position and size of each written level.</returns> private static IList <(long, long, long)> WriteIvfcLevels(Stream output, long metaDataPosition, long metaDataSize, long masterHashPosition, int levels) { // Pre-calculate hash level sizes var hashLevelSizes = new long[levels]; var alignedMetaDataSize = (metaDataSize + BlockSize_ - 1) & ~(BlockSize_ - 1); for (var level = 0; level < levels - 1; level++) { var previousSize = level == 0 ? alignedMetaDataSize : hashLevelSizes[level - 1]; var levelSize = previousSize / BlockSize_ * 0x20; var alignedLevelSize = (levelSize + BlockSize_ - 1) & ~(BlockSize_ - 1); hashLevelSizes[level] = alignedLevelSize; } // Pre-calculate hash level position var hashLevelPositions = new long[levels]; var alignedMetaDataPosition = (metaDataPosition + BlockSize_ - 1) & ~(BlockSize_ - 1); for (var level = 0; level < levels - 1; level++) { var levelPosition = alignedMetaDataPosition + alignedMetaDataSize + hashLevelSizes.Skip(level + 1).Take(levels - level - 2).Sum(x => x); hashLevelPositions[level] = levelPosition; } // Add master hash position hashLevelSizes[levels - 1] = BlockSize_; hashLevelPositions[levels - 1] = masterHashPosition; // Write hash levels var result = new List <(long, long, long)>(); var sha256 = new Kryptography.Hash.Sha256(); var previousLevelPosition = alignedMetaDataPosition; var previousLevelSize = alignedMetaDataSize; for (var level = 0; level < levels; level++) { var previousLevelStream = new SubStream(output, previousLevelPosition, previousLevelSize); var levelStream = new SubStream(output, hashLevelPositions[level], hashLevelSizes[level]); var block = new byte[BlockSize_]; while (previousLevelStream.Position < previousLevelStream.Length) { previousLevelStream.Read(block, 0, BlockSize_); var hash = sha256.Compute(block); levelStream.Write(hash); } result.Add((hashLevelPositions[level], levelStream.Position, hashLevelSizes[level])); previousLevelPosition = hashLevelPositions[level]; previousLevelSize = hashLevelSizes[level]; } //var dataPosition = metaDataPosition; //var writePosition = hashLevelInformation[0]; //var dataSize = writePosition - dataPosition; //for (var level = 0; level < levels; level++) //{ // bw.BaseStream.Position = writePosition; // var dataEnd = dataPosition + dataSize; // while (dataPosition < dataEnd) // { // var blockSize = Math.Min(BlockSize_, dataEnd - dataPosition); // var hash = sha256.Compute(new SubStream(output, dataPosition, blockSize)); // bw.Write(hash); // dataPosition += BlockSize_; // } // dataPosition = writePosition; // dataSize = bw.BaseStream.Position - writePosition; // writePosition = level + 1 >= levels - 1 ? masterHashPosition : hashLevelInformation[level + 1]; // // Pad hash level to next block // // Do not pad master hash level // // TODO: Make general padding code that also works with unaligned master hash position // var alignSize = 0L; // if (level + 1 < levels - 1) // { // alignSize = ((dataSize + BlockSize_ - 1) & ~(BlockSize_ - 1)) - dataSize; // bw.WritePadding((int)alignSize); // } // result.Add((dataPosition, dataSize, dataSize + alignSize)); //} return(result); }
public void CalculateSignatureTest() { using (Stream stream = File.OpenRead(@"Rpm/libplist-2.0.1.151-1.1.x86_64.rpm")) { var originalPackage = RpmPackageReader.Read(stream); RpmPackageCreator creator = new RpmPackageCreator(new PlistFileAnalyzer()); Collection <RpmFile> files; using (var payloadStream = RpmPayloadReader.GetDecompressedPayloadStream(originalPackage)) using (var cpio = new CpioFile(payloadStream, false)) { ArchiveBuilder builder = new ArchiveBuilder(new PlistFileAnalyzer()); var entries = builder.FromCpio(cpio); files = creator.CreateFiles(entries); } // Core routine to populate files and dependencies RpmPackage package = new RpmPackage(); var metadata = new PublicRpmMetadata(package); metadata.Name = "libplist"; metadata.Version = "2.0.1.151"; metadata.Arch = "x86_64"; metadata.Release = "1.1"; creator.AddPackageProvides(metadata); creator.AddLdDependencies(metadata); metadata.Files = files; creator.AddRpmDependencies(metadata, null); PlistMetadata.ApplyDefaultMetadata(metadata); creator.CalculateHeaderOffsets(package); // Make sure the header is really correct using (Stream originalHeaderStream = new SubStream( originalPackage.Stream, originalPackage.HeaderOffset, originalPackage.PayloadOffset - originalPackage.HeaderOffset, leaveParentOpen: true, readOnly: true)) using (Stream headerStream = creator.GetHeaderStream(package)) { byte[] originalData = new byte[originalHeaderStream.Length]; originalHeaderStream.Read(originalData, 0, originalData.Length); byte[] data = new byte[headerStream.Length]; headerStream.Read(data, 0, data.Length); int delta = 0; int dataDelta = 0; IndexTag tag; for (int i = 0; i < data.Length; i++) { if (originalData[i] != data[i]) { delta = i; dataDelta = delta - package.Header.Records.Count * Marshal.SizeOf <IndexHeader>(); tag = package.Header.Records.OrderBy(r => r.Value.Header.Offset).Last(r => r.Value.Header.Offset <= dataDelta).Key; break; } } Assert.Equal(originalData, data); } var krgen = PgpSigner.GenerateKeyRingGenerator("dotnet", "dotnet"); var secretKeyRing = krgen.GenerateSecretKeyRing(); var privateKey = secretKeyRing.GetSecretKey().ExtractPrivateKey("dotnet".ToCharArray()); using (var payload = RpmPayloadReader.GetCompressedPayloadStream(originalPackage)) { // Header should be OK now (see previous test), so now get the signature block and the // trailer creator.CalculateSignature(package, privateKey, payload); creator.CalculateSignatureOffsets(package); foreach (var record in originalPackage.Signature.Records) { if (record.Key == SignatureTag.RPMTAG_HEADERSIGNATURES) { continue; } this.AssertTagEqual(record.Key, originalPackage, package); } this.AssertTagEqual(SignatureTag.RPMTAG_HEADERSIGNATURES, originalPackage, package); } } }
public void ReadTest() { var encoded = Helpers.GetExampleBytes("31 0D 05 00 06 09 2A 86 48 86 F7 0D 01 01 0B"); using (var ms = new MemoryStream(encoded)) { var sub1 = new SubStream(ms, 3); ms.Seek(3, SeekOrigin.Begin); var sub2 = new SubStream(ms, 5); ms.Seek(5, SeekOrigin.Current); var sub3 = new SubStream(ms, 4); ms.Seek(4, SeekOrigin.Current); var sub4 = new SubStream(ms, 3); ms.Seek(3, SeekOrigin.End); var val1 = new byte[3]; sub1.Read(val1, 0, 3); //ComparisonResult result = compareLogic.Compare(new byte[] { 0x31, 0x0d, 0x05 }, val1); //Assert.True(result.AreEqual, result.DifferencesString); val1.ShouldBeEquivalentTo(new byte[] { 0x31, 0x0d, 0x05 }, options => options.AllowingInfiniteRecursion()); var val2 = new byte[5]; sub2.Read(val2, 0, 5); //result = compareLogic.Compare(new byte[] { 0x00, 0x06, 0x09, 0x2A, 0x86 }, val2); //Assert.True(result.AreEqual, result.DifferencesString); val2.ShouldBeEquivalentTo(new byte[] { 0x00, 0x06, 0x09, 0x2A, 0x86 }, options => options.AllowingInfiniteRecursion()); var val3 = new byte[4]; sub3.Read(val3, 0, 4); //result = compareLogic.Compare(new byte[] { 0x48, 0x86, 0xF7, 0x0D }, val3); //Assert.True(result.AreEqual, result.DifferencesString); val3.ShouldBeEquivalentTo(new byte[] { 0x48, 0x86, 0xF7, 0x0D }, options => options.AllowingInfiniteRecursion()); var val4 = new byte[3]; sub4.Read(val4, 0, 3); //result = compareLogic.Compare(new byte[] { 0x01, 0x01, 0x0B }, val4); //Assert.True(result.AreEqual, result.DifferencesString); val4.ShouldBeEquivalentTo(new byte[] { 0x01, 0x01, 0x0B }, options => options.AllowingInfiniteRecursion()); sub1.Seek(0, SeekOrigin.Begin); val1 = new byte[3]; sub1.Read(val1, 0, 3); //result = compareLogic.Compare(new byte[] { 0x31, 0x0d, 0x05 }, val1); //Assert.True(result.AreEqual, result.DifferencesString); val1.ShouldBeEquivalentTo(new byte[] { 0x31, 0x0d, 0x05 }, options => options.AllowingInfiniteRecursion()); sub1.Seek(-2, SeekOrigin.End); val1 = new byte[2]; sub1.Read(val1, 0, 2); //result = compareLogic.Compare(new byte[] { 0x0d, 0x05 }, val1); //Assert.True(result.AreEqual, result.DifferencesString); val1.ShouldBeEquivalentTo(new byte[] { 0x0d, 0x05 }, options => options.AllowingInfiniteRecursion()); } }
public void Read() { // Verify that we can support an empty parent stream. using (var parent = new MemoryStream()) { using (var substream = new SubStream(parent, 0, 0)) { var buffer = new byte[10]; Assert.Equal(0, substream.Position); Assert.Equal(0, substream.Read(buffer, 0, 10)); Assert.Equal(0, substream.Position); } } // Verify that we can substream the entire parent stream. using (var parent = new MemoryStream()) { parent.Write(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }); using (var substream = new SubStream(parent, 0, 10)) { var buffer = new byte[10]; Assert.Equal(0, substream.Position); Assert.Equal(10, substream.Read(buffer, 0, 10)); Assert.Equal(10, substream.Position); Assert.Equal(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }, buffer); } } // Verify that we can substream just a part of a parent stream. using (var parent = new MemoryStream()) { parent.Write(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }); using (var substream = new SubStream(parent, 1, 9)) { var buffer = new byte[9]; Assert.Equal(0, substream.Position); Assert.Equal(9, substream.Read(buffer, 0, 9)); Assert.Equal(9, substream.Position); Assert.Equal(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 }, buffer); } } // Verify that reads clip at the logical EOF. using (var parent = new MemoryStream()) { parent.Write(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }); using (var substream = new SubStream(parent, 5, 5)) { var buffer = new byte[5]; Assert.Equal(0, substream.Position); Assert.Equal(5, substream.Read(buffer, 0, 5)); Assert.Equal(5, substream.Position); Assert.Equal(new byte[] { 5, 6, 7, 8, 9 }, buffer); } } }
private void Trophy_Load(object sender, EventArgs e) { //extract all entry //add entry to array using (var file = File.OpenRead(filenames)) { var pkg = new PkgReader(file).ReadPkg(); var i = 0; foreach (var meta in pkg.Metas.Metas) { idEntryList.Add($"{i++,-6}"); nameEntryList.Add($"{meta.id}"); } idEntryList.ToArray(); nameEntryList.ToArray(); } //extract each entry from array PS4_Tools.PKG.SceneRelated.Unprotected_PKG PS4_PKG = PS4_Tools.PKG.SceneRelated.Read_PKG(filenames); string game_title = PS4_PKG.Param.Title; string filter = game_title.Replace(":", " -"); string title_filter_final = filter.Replace(" -", " -"); string path = tempPath + @"Trophy\"; try { Directory.CreateDirectory(path + title_filter_final); var numbersAndWords = idEntryList.Zip(nameEntryList, (n, w) => new { id = n, name = w }); foreach (var nw in numbersAndWords) { var pkgPath = filenames; var idx = int.Parse(nw.id); var name = nw.name; var outPath = path + title_filter_final + "\\" + name.Replace("_SHA", ".SHA").Replace("_DAT", ".DAT").Replace("_SFO", ".SFO").Replace("_XML", ".XML").Replace("_SIG", ".SIG").Replace("_PNG", ".PNG").Replace("_JSON", ".JSON").Replace("_DDS", ".DDS").Replace("_TRP", ".TRP").Replace("_AT9", ".AT9");; using (var pkgFile = File.OpenRead(pkgPath)) { var pkg = new PkgReader(pkgFile).ReadPkg(); if (idx < 0 || idx >= pkg.Metas.Metas.Count) { DarkMessageBox.ShowError("Error: entry number out of range", "PS4 PKG Tool"); return; } using (var outFile = File.Create(outPath)) { var meta = pkg.Metas.Metas[idx]; outFile.SetLength(meta.DataSize); if (meta.Encrypted) { if (passcode == null) { //MessageBox.Show("Warning: Entry is encrypted but no passcode was provided! Saving encrypted bytes."); } else { var entry = new SubStream(pkgFile, meta.DataOffset, (meta.DataSize + 15) & ~15); var tmp = new byte[entry.Length]; entry.Read(tmp, 0, tmp.Length); tmp = LibOrbisPkg.PKG.Entry.Decrypt(tmp, pkg.Header.content_id, passcode, meta); outFile.Write(tmp, 0, (int)meta.DataSize); return; } } new SubStream(pkgFile, meta.DataOffset, meta.DataSize).CopyTo(outFile); } } } //MessageBox.Show("All entry item extracted", "PS4 PKG Tool"); } catch (Exception a) { DarkMessageBox.ShowError(a.Message, "PS4 PKG Tool"); } //done exract, now open trp if (File.Exists(path + title_filter_final + "\\TROPHY__TROPHY00.TRP")) { trophy = new TRPReader(); trophy.Load(path + title_filter_final + "\\TROPHY__TROPHY00.TRP"); if (!trophy.IsError) { List <Archiver> .Enumerator enumerator = new List <Archiver> .Enumerator(); enumerator = trophy.TrophyList.GetEnumerator(); try { //listView1.Columns.Add("Icon"); //listView1.Columns.Add("Name"); //listView1.Columns.Add("Size"); //listView1.Columns.Add("Offset"); //listView1.View = View.Details; //_ImageListIcons.ImageSize = new Size(96, 96); //listView1.SmallImageList = _ImageListIcons; while (enumerator.MoveNext()) { Archiver current = enumerator.Current; if (current.Name.ToUpper().EndsWith(".PNG")) { imgList.Add(Utilities.BytesToImage(trophy.ExtractFileToMemory(current.Name))); ImageListIcons.Images.Add(imgList[checked (imgList.Count - 1)]); imageToExtract.Add(Utilities.BytesToImage(trophy.ExtractFileToMemory(current.Name))); NameToExtract.Add(current.Name); var image = Utilities.BytesToImage(trophy.ExtractFileToMemory(current.Name)); var resize = ResizeImage(image, image.Width / 2, image.Height / 2); darkDataGridView1.Rows.Add(resize, current.Name, Utilities.RoundBytes(current.Size), "0x" + current.Offset); // listView1.Items.Add(new ListViewItem() // { // SubItems = { // current.Name, Utilities.RoundBytes(current.Size) //}, // ImageIndex = checked(ImageListIcons.Images.Count - 1), // Tag = (object)imgList.Count // }); } //dataGridView1.DataSource = dt; Application.DoEvents(); } NameToExtract.ToArray(); //listView1.Columns[0].AutoResize(ColumnHeaderAutoResizeStyle.ColumnContent); //listView1.Columns[1].AutoResize(ColumnHeaderAutoResizeStyle.None); //listView1.Columns[1].Width = 347; //listView1.Columns[2].AutoResize(ColumnHeaderAutoResizeStyle.None); //listView1.Columns[2].Width = 103; } finally { enumerator.Dispose(); } if (!issaved) { if (Operators.CompareString(trophy.SHA1, trophy.CalculateSHA1Hash(), false) != 0) { DarkMessageBox.ShowError("This file is corrupted, mismatched SHA1 hash!", "PS4 PKG Tool"); } //else //MessageBox.Show("Loaded successfully."); } else { label1.Text = "Saved successfully."; } label1.Enabled = trophy.Version > 1; //this.Text = "[ Trophy v" + Conversions.ToString(trophy.Version > 1) + " - Files count: " + listView1.Items.Count + " ]"; try { string input = Encoding.UTF8.GetString(trophy.ExtractFileToMemory("TROP.SFM")); Match match1 = new Regex("(?<start>[<]title[-]name[>])(?<titlename>.+)(?<end>[<][/]title[-]name[>])").Match(input); if (match1.Success) { trophy.TitleName = match1.Groups["titlename"].Value; Text = "TRPViewer - " + trophy.TitleName; } Match match2 = new Regex("(?<start>[<]npcommid[>])(?<npcommid>.+)(?<end>[<][/]npcommid[>])").Match(input); if (match2.Success) { trophy.NPCommId = match2.Groups["npcommid"].Value; } } catch (Exception ex) { ProjectData.SetProjectError(ex); ProjectData.ClearProjectError(); } } ctrophy.SetVersion = trophy.Version; } this.Text += PS4_PKG.PS4_Title; /* * PS4_Tools.PKG.SceneRelated.Unprotected_PKG PS4_PKG = PS4_Tools.PKG.SceneRelated.Read_PKG(this.filenames); * this.Text = "Trophy List : " + PS4_PKG.PS4_Title; * * DataTable dttemp = new DataTable(); * dttemp.Columns.Add("Index"); * dttemp.Columns.Add("Trophy File"); * dttemp.Columns.Add("Offset"); * dttemp.Columns.Add("Size"); * * try * { * for (int i = 0; i < PS4_PKG.Trophy_File.trophyItemList.Count; i++) * { * dttemp.Rows.Add(PS4_PKG.Trophy_File.trophyItemList[i].Index, PS4_PKG.Trophy_File.trophyItemList[i].Name, PS4_PKG.Trophy_File.trophyItemList[i].Offset, PS4_PKG.Trophy_File.trophyItemList[i].Size); * //dttemp.Rows.Add(PS4_PKG.Param.Tables[i].Name, PS4_PKG.Param.Tables[i].Value); * * } * * * } * catch (Exception ex) * { * * } * dataGridView1.DataSource = dttemp; * filenames = ""; */ }