internal ExitCallback _exitCallbacks; // callback dissociated from the system so executed even if the system is paused internal Family(Matcher[] matchers) { _gameObjectIdToCacheId = new Dictionary <int, int>(); _cashedGameObjects = new List <GameObject>(); _count = 0; _matchers = matchers; _entryCallbacks = null; _exitCallbacks = null; }
/// <summary> /// Extract all entries from the archive into current directory. /// <paramref name="callback"/> could be used to observe/control extraction process. /// </summary> public void ExtractFiles(EntryCallback callback) { int i = 0; foreach (var entry in Dir.OrderBy(e => e.Offset)) { var action = callback(i, entry, null); if (ArchiveOperation.Abort == action) { break; } if (ArchiveOperation.Skip != action) { Extract(entry); } ++i; } }
// TODO: GUI widget for options public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { var zip_options = GetOptions <ZipOptions> (options); int callback_count = 0; using (var zip = new ZipArchive(output, ZipArchiveMode.Create, true, zip_options.FileNameEncoding)) { foreach (var entry in list) { var zip_entry = zip.CreateEntry(entry.Name, zip_options.CompressionLevel); using (var input = File.OpenRead(entry.Name)) using (var zip_file = zip_entry.Open()) { if (null != callback) { callback(++callback_count, entry, arcStrings.MsgAddingFile); } input.CopyTo(zip_file); } } } }
public void copyTo(DirectoryEntry parent, string name, EntryCallback successCallback, ErrorCallback errorCallback) {}
public void getDirectory(string path, object options, EntryCallback successCallback, ErrorCallback errorCallback) {}
/// <summary> /// Create resource within stream <paramref name="file"/> containing entries from the /// supplied <paramref name="list"/> and applying necessary <paramref name="options"/>. /// </summary> public virtual void Create(Stream file, IEnumerable <Entry> list, ResourceOptions options = null, EntryCallback callback = null) { throw new NotImplementedException("ArchiveFormat.Create is not implemented"); }
public void getFile(string path, object flags, EntryCallback successCallback, ErrorCallback errorCallback) {}
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { var sg_options = GetOptions <SteinsGateOptions> (options); Encoding encoding = sg_options.FileNameEncoding.WithFatalFallback(); long start_pos = output.Position; int callback_count = 0; uint index_size = 4; var real_entry_list = new List <RawEntry> (list.Count()); var used_names = new HashSet <string>(); foreach (var entry in list) { string name = entry.Name.Replace(@"\", "/"); if (!used_names.Add(name)) // duplicate name { continue; } var header_entry = new RawEntry { Name = entry.Name }; try { header_entry.IndexName = encoding.GetBytes(name); } catch (EncoderFallbackException X) { throw new InvalidFileName(entry.Name, arcStrings.MsgIllegalCharacters, X); } index_size += (uint)header_entry.IndexName.Length + 16; real_entry_list.Add(header_entry); } output.Seek(4 + index_size, SeekOrigin.Current); foreach (var entry in real_entry_list) { using (var input = File.Open(entry.Name, FileMode.Open, FileAccess.Read)) { var file_size = input.Length; if (file_size > uint.MaxValue) { throw new FileSizeException(); } entry.Offset = output.Position; entry.Size = (uint)file_size; if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } using (var stream = new SteinsGateEncryptedStream(output)) input.CopyTo(stream); } } if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } output.Position = start_pos; output.WriteByte((byte)(index_size & 0xff)); output.WriteByte((byte)((index_size >> 8) & 0xff)); output.WriteByte((byte)((index_size >> 16) & 0xff)); output.WriteByte((byte)((index_size >> 24) & 0xff)); var encrypted_stream = new SteinsGateEncryptedStream(output); using (var header = new BinaryWriter(encrypted_stream)) { header.Write(real_entry_list.Count); foreach (var entry in real_entry_list) { header.Write(entry.IndexName.Length); header.Write(entry.IndexName); header.Write((uint)entry.Size); header.Write((long)entry.Offset); } } }
// files inside archive are aligned to 0x10 boundary. // to convert DateTime structure into entry time: // entry.FileTime = file_info.CreationTimeUtc.Ticks; // // last two bytes of archive is CRC16 of the whole file public override void Create(Stream output, IEnumerable<Entry> list, ResourceOptions options, EntryCallback callback) { const long data_offset = 0x10; var encoding = Encodings.cp932.WithFatalFallback(); int callback_count = 0; var output_list = new List<OutputEntry> (list.Count()); foreach (var entry in list) { try { string name = Path.GetFileNameWithoutExtension (entry.Name); string ext = Path.GetExtension (entry.Name); byte[] name_buf = new byte[0x15]; byte[] ext_buf = new byte[3]; encoding.GetBytes (name, 0, name.Length, name_buf, 0); if (!string.IsNullOrEmpty (ext)) { ext = ext.TrimStart ('.').ToLowerInvariant(); encoding.GetBytes (ext, 0, ext.Length, ext_buf, 0); } var out_entry = new OutputEntry { Name = entry.Name, IndexName = name_buf, IndexExt = ext_buf, }; output_list.Add (out_entry); } catch (EncoderFallbackException X) { throw new InvalidFileName (entry.Name, arcStrings.MsgIllegalCharacters, X); } catch (ArgumentException X) { throw new InvalidFileName (entry.Name, arcStrings.MsgFileNameTooLong, X); } } if (null != callback) callback (output_list.Count+2, null, null); output.Position = data_offset; uint current_offset = 0; foreach (var entry in output_list) { if (null != callback) callback (callback_count++, entry, arcStrings.MsgAddingFile); entry.FileTime = File.GetCreationTimeUtc (entry.Name).Ticks; entry.Offset = current_offset; entry.CompressionType = 0; using (var input = File.OpenRead (entry.Name)) { var size = input.Length; if (size > uint.MaxValue || current_offset + size + 0x0f > uint.MaxValue) throw new FileSizeException(); entry.Size = (uint)size; entry.UnpackedSize = entry.Size; using (var checked_stream = new CheckedStream (output, new Crc16())) { input.CopyTo (checked_stream); entry.HasCheckSum = true; entry.CheckSum = (ushort)checked_stream.CheckSumValue; } current_offset += (uint)size + 0x0f; current_offset &= ~0x0fu; output.Position = data_offset + current_offset; } } if (null != callback) callback (callback_count++, null, arcStrings.MsgUpdatingIndex); // at last, go back to directory and write offset/sizes uint index_offset = current_offset; using (var index = new BinaryWriter (output, encoding, true)) { foreach (var entry in output_list) { index.Write (entry.IndexName); index.Write (entry.IndexExt); index.Write ((uint)entry.Offset); index.Write (entry.UnpackedSize); index.Write (entry.Size); index.Write (entry.CompressionType); index.Write (entry.HasCheckSum); index.Write (entry.CheckSum); index.Write (entry.FileTime); } index.BaseStream.Position = 0; index.Write (Signature); index.Write (0x03006b63); index.Write (index_offset); index.Write (output_list.Count); if (null != callback) callback (callback_count++, null, arcStrings.MsgCalculatingChecksum); output.Position = 0; using (var checked_stream = new CheckedStream (output, new Crc16())) { checked_stream.CopyTo (Stream.Null); index.Write ((ushort)checked_stream.CheckSumValue); } } }
public void WebkitResolveLocalFileSystemUrl(string url, EntryCallback successCallback, ErrorCallback errorCallback = null) { }
public void CopyTo(DirectoryEntry parent, JsString newName, EntryCallback successCallback, FileErrorCallback errorCallback) { }
public override void Create(Stream output, IEnumerable<Entry> list, ResourceOptions options, EntryCallback callback) { using (var writer = new BinaryWriter (output, Encoding.ASCII, true)) { writer.Write (Signature); int list_size = list.Count(); uint dir_size = (uint)(list_size * 40); writer.Write (dir_size); writer.Write (list_size); var encoding = Encodings.cp932.WithFatalFallback(); byte[] name_buf = new byte[32]; int callback_count = 0; if (null != callback) callback (callback_count++, null, arcStrings.MsgWritingIndex); // first, write names only foreach (var entry in list) { string name = Path.GetFileName (entry.Name); try { int size = encoding.GetBytes (name, 0, name.Length, name_buf, 0); if (size < name_buf.Length) name_buf[size] = 0; } catch (EncoderFallbackException X) { throw new InvalidFileName (entry.Name, arcStrings.MsgIllegalCharacters, X); } catch (ArgumentException X) { throw new InvalidFileName (entry.Name, arcStrings.MsgFileNameTooLong, X); } writer.Write (name_buf); writer.BaseStream.Seek (8, SeekOrigin.Current); } // now, write files and remember offset/sizes uint current_offset = 0; foreach (var entry in list) { if (null != callback) callback (callback_count++, entry, arcStrings.MsgAddingFile); entry.Offset = current_offset; using (var input = File.Open (entry.Name, FileMode.Open, FileAccess.Read)) { var size = input.Length; if (size > uint.MaxValue || current_offset + size > uint.MaxValue) throw new FileSizeException(); current_offset += (uint)size; entry.Size = (uint)size; input.CopyTo (output); } } if (null != callback) callback (callback_count++, null, arcStrings.MsgUpdatingIndex); // at last, go back to directory and write offset/sizes long dir_offset = 12+32; foreach (var entry in list) { writer.BaseStream.Position = dir_offset; writer.Write ((uint)entry.Offset); writer.Write (entry.Size); dir_offset += 40; } } }
/// <summary> /// Sets the mouse enter/leave callback for the current window. /// </summary> /// <param name="func"> /// The new entry callback function. See <see cref="EntryCallback" />. /// </param> /// <remarks> /// <para> /// <b>glutEntryFunc</b> sets the mouse enter/leave callback for the current /// window. The state callback parameter is either <see cref="GLUT_LEFT" /> or /// <see cref="GLUT_ENTERED" /> depending on if the mouse pointer has last left or /// entered the window. /// </para> /// <para> /// Passing <c>null</c> to <b>glutEntryFunc</b> disables the generation of the /// mouse enter/leave callback. /// </para> /// <para> /// Some window systems may not generate accurate enter/leave callbacks. /// </para> /// </remarks> /// <seealso cref="EntryCallback" /> // GLUTAPI void APIENTRY glutEntryFunc(void (GLUTCALLBACK *func)(int state)); public static void glutEntryFunc([In] EntryCallback func) { entryCallback = func; __glutEntryFunc(entryCallback); }
public void MoveTo(DirectoryEntry parent, JsString newName, EntryCallback successCallback) { }
private static extern void __glutEntryFunc(EntryCallback func);
public override void Create(Stream output, IEnumerable<Entry> list, ResourceOptions options, EntryCallback callback) { var ons_options = GetOptions<NsaOptions> (options); var encoding = Encodings.cp932.WithFatalFallback(); int callback_count = 0; var real_entry_list = new List<NsaEntry>(); var used_names = new HashSet<string>(); int index_size = 0; foreach (var entry in list) { if (!used_names.Add (entry.Name)) // duplicate name continue; try { index_size += encoding.GetByteCount (entry.Name) + 1; } catch (EncoderFallbackException X) { throw new InvalidFileName (entry.Name, arcStrings.MsgIllegalCharacters, X); } var header_entry = new NsaEntry { Name = entry.Name }; if (Compression.None != ons_options.CompressionType) { if (entry.Name.EndsWith (".bmp", StringComparison.InvariantCultureIgnoreCase)) header_entry.CompressionType = ons_options.CompressionType; } index_size += 13; real_entry_list.Add (header_entry); } long start_offset = output.Position; long base_offset = 6+index_size; output.Seek (base_offset, SeekOrigin.Current); foreach (var entry in real_entry_list) { using (var input = File.OpenRead (entry.Name)) { var file_size = input.Length; if (file_size > uint.MaxValue) throw new FileSizeException(); long file_offset = output.Position - base_offset; if (file_offset+file_size > uint.MaxValue) throw new FileSizeException(); if (null != callback) callback (callback_count++, entry, arcStrings.MsgAddingFile); entry.Offset = file_offset; entry.UnpackedSize = (uint)file_size; if (Compression.LZSS == entry.CompressionType) { var packer = new Packer (input, output); entry.Size = packer.EncodeLZSS(); } else { entry.Size = entry.UnpackedSize; entry.CompressionType = Compression.None; input.CopyTo (output); } } } if (null != callback) callback (callback_count++, null, arcStrings.MsgWritingIndex); output.Position = start_offset; using (var writer = new BinaryWriter (output, encoding, true)) { writer.Write (Binary.BigEndian ((short)real_entry_list.Count)); writer.Write (Binary.BigEndian ((uint)base_offset)); foreach (var entry in real_entry_list) { writer.Write (encoding.GetBytes (entry.Name)); writer.Write ((byte)0); writer.Write ((byte)entry.CompressionType); writer.Write (Binary.BigEndian ((uint)entry.Offset)); writer.Write (Binary.BigEndian ((uint)entry.Size)); writer.Write (Binary.BigEndian ((uint)entry.UnpackedSize)); } } }
/// <summary> /// Extract all entries from the archive into current directory. /// <paramref name="callback"/> could be used to observe/control extraction process. /// </summary> public void ExtractFiles(EntryCallback callback) { int i = 0; foreach (var entry in Dir.OrderBy (e => e.Offset)) { var action = callback (i, entry, null); if (ArchiveOperation.Abort == action) break; if (ArchiveOperation.Skip != action) Extract (entry); ++i; } }
public void getParent(EntryCallback successCallback, ErrorCallback errorCallback) {}
public void ResolveLocalFileSystemUrl(JsString url, EntryCallback successCallback, FileErrorCallback errorCallback = null) { }
public override void Create(Stream output, IEnumerable<Entry> list, ResourceOptions options, EntryCallback callback) { ArcFile base_archive = null; var ami_options = GetOptions<AmiOptions> (options); if (null != ami_options && ami_options.UseBaseArchive && !string.IsNullOrEmpty (ami_options.BaseArchive)) { var base_file = new ArcView (ami_options.BaseArchive); try { if (base_file.View.ReadUInt32(0) == Signature) base_archive = TryOpen (base_file); if (null == base_archive) throw new InvalidFormatException (string.Format ("{0}: base archive could not be read", Path.GetFileName (ami_options.BaseArchive))); base_file = null; } finally { if (null != base_file) base_file.Dispose(); } } try { var file_table = new SortedDictionary<uint, PackedEntry>(); if (null != base_archive) { foreach (AmiEntry entry in base_archive.Dir) file_table[entry.Id] = entry; } int update_count = UpdateFileTable (file_table, list); if (0 == update_count) throw new InvalidFormatException (arcStrings.AMINoFiles); uint file_count = (uint)file_table.Count; if (null != callback) callback ((int)file_count+1, null, null); int callback_count = 0; long start_offset = output.Position; uint data_offset = file_count * 16 + 16; output.Seek (data_offset, SeekOrigin.Current); foreach (var entry in file_table) { if (null != callback) callback (callback_count++, entry.Value, arcStrings.MsgAddingFile); long current_offset = output.Position; if (current_offset > uint.MaxValue) throw new FileSizeException(); if (entry.Value is AmiEntry) CopyAmiEntry (base_archive, entry.Value, output); else entry.Value.Size = WriteAmiEntry (entry.Value, output); entry.Value.Offset = (uint)current_offset; } if (null != callback) callback (callback_count++, null, arcStrings.MsgWritingIndex); output.Position = start_offset; using (var header = new BinaryWriter (output, Encoding.ASCII, true)) { header.Write (Signature); header.Write (file_count); header.Write (data_offset); header.Write ((uint)0); foreach (var entry in file_table) { header.Write (entry.Key); header.Write ((uint)entry.Value.Offset); header.Write ((uint)entry.Value.UnpackedSize); header.Write ((uint)entry.Value.Size); } } } finally { if (null != base_archive) base_archive.Dispose(); } }
// files inside archive are aligned to 0x10 boundary. // to convert DateTime structure into entry time: // entry.FileTime = file_info.CreationTimeUtc.Ticks; // // last two bytes of archive is CRC16 of the whole file public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { const long data_offset = 0x10; var encoding = Encodings.cp932.WithFatalFallback(); int callback_count = 0; var output_list = new List <OutputEntry> (list.Count()); foreach (var entry in list) { try { string name = Path.GetFileNameWithoutExtension(entry.Name); string ext = Path.GetExtension(entry.Name); byte[] name_buf = new byte[0x15]; byte[] ext_buf = new byte[3]; encoding.GetBytes(name, 0, name.Length, name_buf, 0); if (!string.IsNullOrEmpty(ext)) { ext = ext.TrimStart('.').ToLowerInvariant(); encoding.GetBytes(ext, 0, ext.Length, ext_buf, 0); } var out_entry = new OutputEntry { Name = entry.Name, IndexName = name_buf, IndexExt = ext_buf, }; output_list.Add(out_entry); } catch (EncoderFallbackException X) { throw new InvalidFileName(entry.Name, arcStrings.MsgIllegalCharacters, X); } catch (ArgumentException X) { throw new InvalidFileName(entry.Name, arcStrings.MsgFileNameTooLong, X); } } if (null != callback) { callback(output_list.Count + 2, null, null); } output.Position = data_offset; uint current_offset = 0; foreach (var entry in output_list) { if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } entry.FileTime = File.GetCreationTimeUtc(entry.Name).Ticks; entry.Offset = current_offset; entry.CompressionType = 0; using (var input = File.OpenRead(entry.Name)) { var size = input.Length; if (size > uint.MaxValue || current_offset + size + 0x0f > uint.MaxValue) { throw new FileSizeException(); } entry.Size = (uint)size; entry.UnpackedSize = entry.Size; using (var checked_stream = new CheckedStream(output, new Crc16())) { input.CopyTo(checked_stream); entry.HasCheckSum = true; entry.CheckSum = (ushort)checked_stream.CheckSumValue; } current_offset += (uint)size + 0x0f; current_offset &= ~0x0fu; output.Position = data_offset + current_offset; } } if (null != callback) { callback(callback_count++, null, arcStrings.MsgUpdatingIndex); } // at last, go back to directory and write offset/sizes uint index_offset = current_offset; using (var index = new BinaryWriter(output, encoding, true)) { foreach (var entry in output_list) { index.Write(entry.IndexName); index.Write(entry.IndexExt); index.Write((uint)entry.Offset); index.Write(entry.UnpackedSize); index.Write(entry.Size); index.Write(entry.CompressionType); index.Write(entry.HasCheckSum); index.Write(entry.CheckSum); index.Write(entry.FileTime); } index.BaseStream.Position = 0; index.Write(Signature); index.Write(0x03006b63); index.Write(index_offset); index.Write(output_list.Count); if (null != callback) { callback(callback_count++, null, arcStrings.MsgCalculatingChecksum); } output.Position = 0; using (var checked_stream = new CheckedStream(output, new Crc16())) { checked_stream.CopyTo(Stream.Null); index.Write((ushort)checked_stream.CheckSumValue); } } }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { var arc_options = GetOptions <ArcOptions> (options); var encoding = Encodings.cp932.WithFatalFallback(); int file_count = 0; var file_table = new SortedDictionary <string, ArcDirectory>(); foreach (var entry in list) { string ext = Path.GetExtension(entry.Name).TrimStart('.').ToUpperInvariant(); if (string.IsNullOrEmpty(ext)) { throw new InvalidFileName(entry.Name, arcStrings.MsgNoExtension); } if (ext.Length > 3) { throw new InvalidFileName(entry.Name, arcStrings.MsgExtensionTooLong); } string name = Path.GetFileNameWithoutExtension(entry.Name).ToUpperInvariant(); byte[] raw_name = encoding.GetBytes(name); if (raw_name.Length > arc_options.NameLength) { throw new InvalidFileName(entry.Name, arcStrings.MsgFileNameTooLong); } ArcDirectory dir; if (!file_table.TryGetValue(ext, out dir)) { byte[] raw_ext = encoding.GetBytes(ext); if (raw_ext.Length > 3) { throw new InvalidFileName(entry.Name, arcStrings.MsgExtensionTooLong); } dir = new ArcDirectory { Extension = raw_ext, Files = new List <ArcEntry>() }; file_table[ext] = dir; } dir.Files.Add(new ArcEntry { Name = entry.Name, RawName = raw_name }); ++file_count; } if (null != callback) { callback(file_count + 1, null, null); } int callback_count = 0; long dir_offset = 4 + file_table.Count * 12; long data_offset = dir_offset + (arc_options.NameLength + 9) * file_count; output.Position = data_offset; foreach (var ext in file_table.Keys) { var dir = file_table[ext]; dir.DirOffset = (uint)dir_offset; dir_offset += (arc_options.NameLength + 9) * dir.Files.Count; foreach (var entry in dir.Files) { if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } entry.Offset = data_offset; entry.Size = WriteEntry(entry.Name, output); data_offset += entry.Size; if (data_offset > uint.MaxValue) { throw new FileSizeException(); } } } if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } output.Position = 0; using (var header = new BinaryWriter(output, encoding, true)) { byte[] buffer = new byte[arc_options.NameLength + 1]; header.Write(file_table.Count); foreach (var ext in file_table) { Buffer.BlockCopy(ext.Value.Extension, 0, buffer, 0, ext.Value.Extension.Length); for (int i = ext.Value.Extension.Length; i < 4; ++i) { buffer[i] = 0; } header.Write(buffer, 0, 4); header.Write(ext.Value.Files.Count); header.Write(ext.Value.DirOffset); } foreach (var ext in file_table) { foreach (var entry in ext.Value.Files) { Buffer.BlockCopy(entry.RawName, 0, buffer, 0, entry.RawName.Length); for (int i = entry.RawName.Length; i < buffer.Length; ++i) { buffer[i] = 0; } header.Write(buffer); header.Write(entry.Size); header.Write((uint)entry.Offset); } } } }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { var npa_options = GetOptions <NpaOptions> (options); int callback_count = 0; // build file index var index = new Indexer(list, npa_options); output.Position = 41 + index.Size; long data_offset = 0; // write files foreach (var entry in index.Entries.Where(e => e.Type != "directory")) { if (data_offset > uint.MaxValue) { throw new FileSizeException(); } if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } using (var file = File.OpenRead(entry.Name)) { var size = file.Length; if (size > uint.MaxValue) { throw new FileSizeException(); } entry.Offset = data_offset; entry.UnpackedSize = (uint)size; Stream destination = output; if (null != npa_options.Scheme) { destination = new EncryptedStream(output, entry, npa_options.Scheme, index.Key); } try { if (entry.IsPacked) { var start = destination.Position; using (var zstream = new ZLibStream(destination, CompressionMode.Compress, CompressionLevel.Level9, true)) { file.CopyTo(zstream); } entry.Size = (uint)(destination.Position - start); } else { file.CopyTo(destination); entry.Size = entry.UnpackedSize; } } finally { if (destination is EncryptedStream) { destination.Dispose(); } } data_offset += entry.Size; } } if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } output.Position = 0; using (var header = new BinaryWriter(output, Encoding.ASCII, true)) { header.Write(Signature); header.Write((short)0); header.Write((byte)0); header.Write(npa_options.Key1); header.Write(npa_options.Key2); header.Write(npa_options.CompressContents); header.Write(npa_options.Scheme != null); header.Write(index.TotalCount); header.Write(index.FolderCount); header.Write(index.FileCount); header.Write((long)0); header.Write(index.Size); int entry_number = 0; foreach (var entry in index.Entries) { header.Write(entry.RawName.Length); for (int i = 0; i < entry.RawName.Length; ++i) { header.Write((byte)(entry.RawName[i] - DecryptName(i, entry_number, index.Key))); } header.Write((byte)("directory" == entry.Type ? 1 : 2)); header.Write(entry.FolderId); header.Write((uint)entry.Offset); header.Write(entry.Size); header.Write(entry.UnpackedSize); ++entry_number; } } }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { int file_count = list.Count(); if (file_count > 0x4000) { throw new InvalidFormatException(arcStrings.MsgTooManyFiles); } if (null != callback) { callback(file_count + 2, null, null); } int callback_count = 0; var pd_options = GetOptions <PdOptions> (options); using (var writer = new BinaryWriter(output, Encoding.ASCII, true)) { writer.Write(Signature); if (pd_options.ScrambleContents) { writer.Write((uint)0x73756c50); } else { writer.Write((uint)0x796c6e4f); } output.Seek(0x38, SeekOrigin.Current); writer.Write(file_count); writer.Write((int)0); long dir_offset = output.Position; if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } var encoding = Encodings.cp932.WithFatalFallback(); byte[] name_buf = new byte[0x80]; int previous_size = 0; // first, write names only foreach (var entry in list) { string name = Path.GetFileName(entry.Name); try { int size = encoding.GetBytes(name, 0, name.Length, name_buf, 0); for (int i = size; i < previous_size; ++i) { name_buf[i] = 0; } previous_size = size; } catch (EncoderFallbackException X) { throw new InvalidFileName(entry.Name, arcStrings.MsgIllegalCharacters, X); } catch (ArgumentException X) { throw new InvalidFileName(entry.Name, arcStrings.MsgFileNameTooLong, X); } writer.Write(name_buf); writer.BaseStream.Seek(16, SeekOrigin.Current); } // now, write files and remember offset/sizes long current_offset = 0x240000 + dir_offset; output.Seek(current_offset, SeekOrigin.Begin); foreach (var entry in list) { if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } entry.Offset = current_offset; using (var input = File.OpenRead(entry.Name)) { var size = input.Length; if (size > uint.MaxValue) { throw new FileSizeException(); } current_offset += size; entry.Size = (uint)size; if (pd_options.ScrambleContents) { CopyScrambled(input, output); } else { input.CopyTo(output); } } } if (null != callback) { callback(callback_count++, null, arcStrings.MsgUpdatingIndex); } // at last, go back to directory and write offset/sizes dir_offset += 0x80; foreach (var entry in list) { writer.BaseStream.Position = dir_offset; writer.Write(entry.Offset); writer.Write((long)entry.Size); dir_offset += 0x90; } } }
public void getDirectory(string path, object options, EntryCallback successCallback) { }
public override void Create(Stream output, IEnumerable<Entry> list, ResourceOptions options, EntryCallback callback) { var ypf_options = GetOptions<YpfOptions> (options); if (null == ypf_options) throw new ArgumentException ("Invalid archive creation options", "options"); if (ypf_options.Key > 0xff) throw new InvalidEncryptionScheme (arcStrings.MsgCreationKeyRequired); if (0 == ypf_options.Version) throw new InvalidFormatException (arcStrings.MsgInvalidVersion); var scheme = new YpfScheme { SwapTable = GuessSwapTable (ypf_options.Version), Key = (byte)ypf_options.Key }; int callback_count = 0; var encoding = Encodings.cp932.WithFatalFallback(); ChecksumFunc Checksum = data => Crc32.Compute (data, 0, data.Length); uint data_offset = 0x20; var file_table = new List<YpfEntry>(); foreach (var entry in list) { try { string file_name = entry.Name; byte[] name_buf = encoding.GetBytes (file_name); if (name_buf.Length > 0xff) throw new InvalidFileName (entry.Name, arcStrings.MsgFileNameTooLong); uint hash = Checksum (name_buf); byte file_type = GetFileType (ypf_options.Version, file_name); for (int i = 0; i < name_buf.Length; ++i) name_buf[i] = (byte)(name_buf[i] ^ ypf_options.Key); file_table.Add (new YpfEntry { Name = file_name, IndexName = name_buf, NameHash = hash, FileType = file_type, IsPacked = 0 == file_type, }); data_offset += (uint)(0x17 + name_buf.Length); } catch (EncoderFallbackException X) { throw new InvalidFileName (entry.Name, arcStrings.MsgIllegalCharacters, X); } } file_table.Sort ((a, b) => a.NameHash.CompareTo (b.NameHash)); output.Position = data_offset; uint current_offset = data_offset; foreach (var entry in file_table) { if (null != callback) callback (callback_count++, entry, arcStrings.MsgAddingFile); entry.Offset = current_offset; using (var input = File.OpenRead (entry.Name)) { var file_size = input.Length; if (file_size > uint.MaxValue || current_offset + file_size > uint.MaxValue) throw new FileSizeException(); entry.UnpackedSize = (uint)file_size; using (var checked_stream = new CheckedStream (output, new Adler32())) { if (entry.IsPacked) { var start = output.Position; using (var zstream = new ZLibStream (checked_stream, CompressionMode.Compress, CompressionLevel.Level9, true)) { input.CopyTo (zstream); } entry.Size = (uint)(output.Position - start); } else { input.CopyTo (checked_stream); entry.Size = entry.UnpackedSize; } checked_stream.Flush(); entry.CheckSum = checked_stream.CheckSumValue; current_offset += entry.Size; } } } if (null != callback) callback (callback_count++, null, arcStrings.MsgWritingIndex); output.Position = 0; using (var writer = new BinaryWriter (output, encoding, true)) { writer.Write (Signature); writer.Write (ypf_options.Version); writer.Write (file_table.Count); writer.Write (data_offset); writer.BaseStream.Seek (0x20, SeekOrigin.Begin); foreach (var entry in file_table) { writer.Write (entry.NameHash); byte name_len = (byte)~Parser.DecryptLength (scheme.SwapTable, (byte)entry.IndexName.Length); writer.Write (name_len); writer.Write (entry.IndexName); writer.Write (entry.FileType); writer.Write (entry.IsPacked); writer.Write (entry.UnpackedSize); writer.Write (entry.Size); writer.Write ((uint)entry.Offset); writer.Write (entry.CheckSum); } } }
public void getFile(string path, object options, EntryCallback successCallback, ErrorCallback errorCallback) { }
public void getFile(string path, object options, EntryCallback successCallback) {}
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { int version = 1; int file_count = list.Count(); int callback_count = 0; var encoding = Encodings.cp932.WithFatalFallback(); if (null != callback) { callback(file_count + 2, null, null); } using (var writer = new BinaryWriter(output, encoding, true)) { if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } string signature = string.Format("MajiroArcV{0}.000\0", version); writer.Write(encoding.GetBytes(signature)); writer.Write(file_count); int index_offset = 0x1c + (file_count + 1) * 8; writer.Write(index_offset); // calculate name hashes var real_entry_list = new List <MajiroEntry> (file_count); foreach (var entry in list) { try { string name = Path.GetFileName(entry.Name); byte[] name_buf = encoding.GetBytes(name); uint crc32 = Crc32.Compute(name_buf, 0, name_buf.Length); var maj_entry = new MajiroEntry { Name = entry.Name, IndexName = name_buf, NameHash = crc32, }; real_entry_list.Add(maj_entry); } catch (EncoderFallbackException X) { throw new InvalidFileName(entry.Name, arcStrings.MsgIllegalCharacters, X); } } real_entry_list.Sort((a, b) => a.NameHash.CompareTo(b.NameHash)); // write names writer.BaseStream.Seek(index_offset, SeekOrigin.Begin); foreach (var entry in real_entry_list) { writer.Write(entry.IndexName); writer.Write((byte)0); } // write files uint start_offset = (uint)output.Position; uint current_offset = start_offset; foreach (var entry in real_entry_list) { if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } entry.Offset = current_offset; using (var input = File.OpenRead(entry.Name)) { var size = input.Length; if (size > uint.MaxValue || current_offset + size > uint.MaxValue) { throw new FileSizeException(); } current_offset += (uint)size; entry.Size = (uint)size; input.CopyTo(output); } } uint end_offset = (uint)output.Position; if (null != callback) { callback(callback_count++, null, arcStrings.MsgUpdatingIndex); } // at last, go back to index and write hashes/offsets writer.BaseStream.Position = 0x18; writer.Write(start_offset); foreach (var entry in real_entry_list) { writer.Write(entry.NameHash); writer.Write((uint)entry.Offset); } writer.Write((int)0); writer.Write(end_offset); } }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { var xp3_options = GetOptions <Xp3Options> (options); ICrypt scheme = xp3_options.Scheme; bool compress_index = xp3_options.CompressIndex; bool compress_contents = xp3_options.CompressContents; bool retain_dirs = xp3_options.RetainDirs; bool use_encryption = !(scheme is NoCrypt); using (var writer = new BinaryWriter(output, Encoding.ASCII, true)) { writer.Write(s_xp3_header); if (2 == xp3_options.Version) { writer.Write((long)0x17); writer.Write((int)1); writer.Write((byte)0x80); writer.Write((long)0); } long index_pos_offset = writer.BaseStream.Position; writer.BaseStream.Seek(8, SeekOrigin.Current); int callback_count = 0; var used_names = new HashSet <string>(); var dir = new List <Xp3Entry>(); long current_offset = writer.BaseStream.Position; foreach (var entry in list) { if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } string name = entry.Name; if (!retain_dirs) { name = Path.GetFileName(name); } else { name = name.Replace(@"\", "/"); } if (!used_names.Add(name)) { Trace.WriteLine("duplicate name", entry.Name); continue; } var xp3entry = new Xp3Entry { Name = name, Cipher = scheme, IsEncrypted = use_encryption && !(scheme.StartupTjsNotEncrypted && VFS.IsPathEqualsToFileName(name, "startup.tjs")) }; bool compress = compress_contents && ShouldCompressFile(entry); using (var file = File.Open(name, FileMode.Open, FileAccess.Read)) { if (!xp3entry.IsEncrypted || 0 == file.Length) { RawFileCopy(file, xp3entry, output, compress); } else { EncryptedFileCopy(file, xp3entry, output, compress); } } dir.Add(xp3entry); } long index_pos = writer.BaseStream.Position; writer.BaseStream.Position = index_pos_offset; writer.Write(index_pos); writer.BaseStream.Position = index_pos; using (var header = new BinaryWriter(new MemoryStream(dir.Count * 0x58), Encoding.Unicode)) { if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } long dir_pos = 0; foreach (var entry in dir) { header.BaseStream.Position = dir_pos; header.Write((uint)0x656c6946); // "File" long header_size_pos = header.BaseStream.Position; header.Write((long)0); header.Write((uint)0x6f666e69); // "info" header.Write((long)(4 + 8 + 8 + 2 + entry.Name.Length * 2)); header.Write((uint)(use_encryption ? 0x80000000 : 0)); header.Write((long)entry.UnpackedSize); header.Write((long)entry.Size); header.Write((short)entry.Name.Length); foreach (char c in entry.Name) { header.Write(c); } header.Write((uint)0x6d676573); // "segm" header.Write((long)0x1c); var segment = entry.Segments.First(); header.Write((int)(segment.IsCompressed ? 1 : 0)); header.Write((long)segment.Offset); header.Write((long)segment.Size); header.Write((long)segment.PackedSize); header.Write((uint)0x726c6461); // "adlr" header.Write((long)4); header.Write((uint)entry.Hash); dir_pos = header.BaseStream.Position; long header_size = dir_pos - header_size_pos - 8; header.BaseStream.Position = header_size_pos; header.Write(header_size); } header.BaseStream.Position = 0; writer.Write(compress_index); long unpacked_dir_size = header.BaseStream.Length; if (compress_index) { if (null != callback) { callback(callback_count++, null, arcStrings.MsgCompressingIndex); } long packed_dir_size_pos = writer.BaseStream.Position; writer.Write((long)0); writer.Write(unpacked_dir_size); long dir_start = writer.BaseStream.Position; using (var zstream = new ZLibStream(writer.BaseStream, CompressionMode.Compress, CompressionLevel.Level9, true)) header.BaseStream.CopyTo(zstream); long packed_dir_size = writer.BaseStream.Position - dir_start; writer.BaseStream.Position = packed_dir_size_pos; writer.Write(packed_dir_size); } else { writer.Write(unpacked_dir_size); header.BaseStream.CopyTo(writer.BaseStream); } } } output.Seek(0, SeekOrigin.End); }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { var rpa_options = GetOptions <RpaOptions> (options); int callback_count = 0; var file_table = new Dictionary <PyString, ArrayList>(); long data_offset = 0x22; output.Position = data_offset; foreach (var entry in list) { if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } string name = entry.Name.Replace(@"\", "/"); var rpa_entry = new RpaEntry { Name = name }; using (var file = File.OpenRead(entry.Name)) { var size = file.Length; if (size > uint.MaxValue) { throw new FileSizeException(); } int header_size = (int)Math.Min(size, 0x10); rpa_entry.Offset = output.Position ^ rpa_options.Key; rpa_entry.Header = new byte[header_size]; rpa_entry.UnpackedSize = (uint)size ^ rpa_options.Key; rpa_entry.Size = (uint)(size - header_size); file.Read(rpa_entry.Header, 0, header_size); file.CopyTo(output); } var py_name = new PyString(name); if (file_table.ContainsKey(py_name)) { file_table[py_name].Add(rpa_entry); } else { file_table[py_name] = new ArrayList { rpa_entry } }; } long index_pos = output.Position; string signature = string.Format(CultureInfo.InvariantCulture, "RPA-3.0 {0:x16} {1:x8}\n", index_pos, rpa_options.Key); var header = Encoding.ASCII.GetBytes(signature); if (header.Length > data_offset) { throw new ApplicationException("Signature serialization failed."); } if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } using (var index = new ZLibStream(output, CompressionMode.Compress, CompressionLevel.Level9, true)) { var pickle = new Pickle(index); if (!pickle.Dump(file_table)) { throw new ApplicationException("Archive index serialization failed."); } } output.Position = 0; output.Write(header, 0, header.Length); } }
public void moveTo(DirectoryEntry parent, string name, EntryCallback successCallback) {}
public override void Create(Stream output, IEnumerable<Entry> list, ResourceOptions options, EntryCallback callback) { var arc_options = GetOptions<ArcOptions> (options); var encoding = Encodings.cp932.WithFatalFallback(); int file_count = 0; var file_table = new SortedDictionary<string, ArcDirectory>(); foreach (var entry in list) { string ext = Path.GetExtension (entry.Name).TrimStart ('.').ToUpperInvariant(); if (string.IsNullOrEmpty (ext)) throw new InvalidFileName (entry.Name, arcStrings.MsgNoExtension); if (ext.Length > 3) throw new InvalidFileName (entry.Name, arcStrings.MsgExtensionTooLong); string name = Path.GetFileNameWithoutExtension (entry.Name).ToUpperInvariant(); byte[] raw_name = encoding.GetBytes (name); if (raw_name.Length > arc_options.NameLength) throw new InvalidFileName (entry.Name, arcStrings.MsgFileNameTooLong); ArcDirectory dir; if (!file_table.TryGetValue (ext, out dir)) { byte[] raw_ext = encoding.GetBytes (ext); if (raw_ext.Length > 3) throw new InvalidFileName (entry.Name, arcStrings.MsgExtensionTooLong); dir = new ArcDirectory { Extension = raw_ext, Files = new List<ArcEntry>() }; file_table[ext] = dir; } dir.Files.Add (new ArcEntry { Name = entry.Name, RawName = raw_name }); ++file_count; } if (null != callback) callback (file_count+1, null, null); int callback_count = 0; long dir_offset = 4 + file_table.Count * 12; long data_offset = dir_offset + (arc_options.NameLength + 9) * file_count; output.Position = data_offset; foreach (var ext in file_table.Keys) { var dir = file_table[ext]; dir.DirOffset = (uint)dir_offset; dir_offset += (arc_options.NameLength + 9) * dir.Files.Count; foreach (var entry in dir.Files) { if (null != callback) callback (callback_count++, entry, arcStrings.MsgAddingFile); entry.Offset = data_offset; entry.Size = WriteEntry (entry.Name, output); data_offset += entry.Size; if (data_offset > uint.MaxValue) throw new FileSizeException(); } } if (null != callback) callback (callback_count++, null, arcStrings.MsgWritingIndex); output.Position = 0; using (var header = new BinaryWriter (output, encoding, true)) { byte[] buffer = new byte[arc_options.NameLength+1]; header.Write (file_table.Count); foreach (var ext in file_table) { Buffer.BlockCopy (ext.Value.Extension, 0, buffer, 0, ext.Value.Extension.Length); for (int i = ext.Value.Extension.Length; i < 4; ++i) buffer[i] = 0; header.Write (buffer, 0, 4); header.Write (ext.Value.Files.Count); header.Write (ext.Value.DirOffset); } foreach (var ext in file_table) { foreach (var entry in ext.Value.Files) { Buffer.BlockCopy (entry.RawName, 0, buffer, 0, entry.RawName.Length); for (int i = entry.RawName.Length; i < buffer.Length; ++i) buffer[i] = 0; header.Write (buffer); header.Write (entry.Size); header.Write ((uint)entry.Offset); } } } }
public void getParent(EntryCallback successCallback) {}
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { using (var writer = new BinaryWriter(output, Encoding.ASCII, true)) { var encoding = Encodings.cp932.WithFatalFallback(); int callback_count = 0; if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } writer.Write(0); byte[] name_buf = new byte[256]; uint index_size = 0; var entry_sizes = new List <int>(); // first, write names only foreach (var entry in list) { try { int size = encoding.GetBytes(entry.Name, 0, entry.Name.Length, name_buf, 0); if (name_buf.Length == size) { throw new InvalidFileName(entry.Name, arcStrings.MsgFileNameTooLong); } name_buf[size] = 0; int entry_size = size + 17; writer.Write(entry_size); writer.BaseStream.Seek(12, SeekOrigin.Current); writer.Write(name_buf, 0, size + 1); entry_sizes.Add(entry_size); index_size += (uint)entry_size; } catch (EncoderFallbackException X) { throw new InvalidFileName(entry.Name, arcStrings.MsgIllegalCharacters, X); } catch (ArgumentException X) { throw new InvalidFileName(entry.Name, arcStrings.MsgFileNameTooLong, X); } } // now, write files and remember offset/sizes long current_offset = writer.BaseStream.Position; foreach (var entry in list) { if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } entry.Offset = current_offset; using (var input = File.OpenRead(entry.Name)) { var file_size = input.Length; if (file_size > uint.MaxValue || current_offset + file_size > uint.MaxValue) { throw new FileSizeException(); } current_offset += file_size; entry.Size = (uint)file_size; input.CopyTo(output); } } if (null != callback) { callback(callback_count++, null, arcStrings.MsgUpdatingIndex); } // at last, go back to directory and write offset/sizes writer.BaseStream.Position = 0; writer.Write(index_size); long index_offset = 4 + 8; int i = 0; foreach (var entry in list) { writer.BaseStream.Position = index_offset; int entry_size = entry_sizes[i++]; index_offset += entry_size; writer.Write((uint)entry.Offset); writer.Write(entry.Size); } } }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { var ypf_options = GetOptions <YpfOptions> (options); if (null == ypf_options) { throw new ArgumentException("Invalid archive creation options", "options"); } if (ypf_options.Key > 0xff) { throw new InvalidEncryptionScheme(arcStrings.MsgCreationKeyRequired); } if (0 == ypf_options.Version) { throw new InvalidFormatException(arcStrings.MsgInvalidVersion); } var scheme = new YpfScheme { SwapTable = GuessSwapTable(ypf_options.Version), Key = (byte)ypf_options.Key }; int callback_count = 0; var encoding = Encodings.cp932.WithFatalFallback(); ChecksumFunc Checksum = data => Crc32.Compute(data, 0, data.Length); uint data_offset = 0x20; var file_table = new List <YpfEntry>(); foreach (var entry in list) { try { string file_name = entry.Name; byte[] name_buf = encoding.GetBytes(file_name); if (name_buf.Length > 0xff) { throw new InvalidFileName(entry.Name, arcStrings.MsgFileNameTooLong); } uint hash = Checksum(name_buf); byte file_type = GetFileType(ypf_options.Version, file_name); for (int i = 0; i < name_buf.Length; ++i) { name_buf[i] = (byte)(name_buf[i] ^ ypf_options.Key); } file_table.Add(new YpfEntry { Name = file_name, IndexName = name_buf, NameHash = hash, FileType = file_type, IsPacked = 0 == file_type, }); data_offset += (uint)(0x17 + name_buf.Length); } catch (EncoderFallbackException X) { throw new InvalidFileName(entry.Name, arcStrings.MsgIllegalCharacters, X); } } file_table.Sort((a, b) => a.NameHash.CompareTo(b.NameHash)); output.Position = data_offset; uint current_offset = data_offset; foreach (var entry in file_table) { if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } entry.Offset = current_offset; using (var input = File.OpenRead(entry.Name)) { var file_size = input.Length; if (file_size > uint.MaxValue || current_offset + file_size > uint.MaxValue) { throw new FileSizeException(); } entry.UnpackedSize = (uint)file_size; using (var checked_stream = new CheckedStream(output, new Adler32())) { if (entry.IsPacked) { var start = output.Position; using (var zstream = new ZLibStream(checked_stream, CompressionMode.Compress, CompressionLevel.Level9, true)) { input.CopyTo(zstream); } entry.Size = (uint)(output.Position - start); } else { input.CopyTo(checked_stream); entry.Size = entry.UnpackedSize; } checked_stream.Flush(); entry.CheckSum = checked_stream.CheckSumValue; current_offset += entry.Size; } } } if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } output.Position = 0; using (var writer = new BinaryWriter(output, encoding, true)) { writer.Write(Signature); writer.Write(ypf_options.Version); writer.Write(file_table.Count); writer.Write(data_offset); writer.BaseStream.Seek(0x20, SeekOrigin.Begin); foreach (var entry in file_table) { writer.Write(entry.NameHash); byte name_len = (byte)~Parser.DecryptLength(scheme.SwapTable, (byte)entry.IndexName.Length); writer.Write(name_len); writer.Write(entry.IndexName); writer.Write(entry.FileType); writer.Write(entry.IsPacked); writer.Write(entry.UnpackedSize); writer.Write(entry.Size); writer.Write((uint)entry.Offset); writer.Write(entry.CheckSum); } } }
public void getDirectory(string path, object flags, EntryCallback successCallback) {}
public void moveTo(DirectoryEntry parent, string name, EntryCallback successCallback) { }
public void copyTo(DirectoryEntry parent, string name, EntryCallback successCallback, ErrorCallback errorCallback) { }
public override void Create(Stream output, IEnumerable<Entry> list, ResourceOptions options, EntryCallback callback) { var sg_options = GetOptions<SteinsGateOptions> (options); Encoding encoding = sg_options.FileNameEncoding.WithFatalFallback(); long start_pos = output.Position; int callback_count = 0; uint index_size = 4; var real_entry_list = new List<RawEntry> (list.Count()); var used_names = new HashSet<string>(); foreach (var entry in list) { string name = entry.Name.Replace (@"\", "/"); if (!used_names.Add (name)) // duplicate name continue; var header_entry = new RawEntry { Name = entry.Name }; try { header_entry.IndexName = encoding.GetBytes (name); } catch (EncoderFallbackException X) { throw new InvalidFileName (entry.Name, arcStrings.MsgIllegalCharacters, X); } index_size += (uint)header_entry.IndexName.Length + 16; real_entry_list.Add (header_entry); } output.Seek (4+index_size, SeekOrigin.Current); foreach (var entry in real_entry_list) { using (var input = File.Open (entry.Name, FileMode.Open, FileAccess.Read)) { var file_size = input.Length; if (file_size > uint.MaxValue) throw new FileSizeException(); entry.Offset = output.Position; entry.Size = (uint)file_size; if (null != callback) callback (callback_count++, entry, arcStrings.MsgAddingFile); using (var stream = new SteinsGateEncryptedStream (output)) input.CopyTo (stream); } } if (null != callback) callback (callback_count++, null, arcStrings.MsgWritingIndex); output.Position = start_pos; output.WriteByte ((byte)(index_size & 0xff)); output.WriteByte ((byte)((index_size >> 8) & 0xff)); output.WriteByte ((byte)((index_size >> 16) & 0xff)); output.WriteByte ((byte)((index_size >> 24) & 0xff)); var encrypted_stream = new SteinsGateEncryptedStream (output); using (var header = new BinaryWriter (encrypted_stream)) { header.Write (real_entry_list.Count); foreach (var entry in real_entry_list) { header.Write (entry.IndexName.Length); header.Write (entry.IndexName); header.Write ((uint)entry.Size); header.Write ((long)entry.Offset); } } }
public void getParent(EntryCallback successCallback) { }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { output.Position = 0x18; int callback_count = 0; foreach (var entry in list) { using (var file = File.OpenRead(entry.Name)) { var file_size = file.Length; if (file_size > uint.MaxValue) { throw new FileSizeException(); } long file_offset = output.Position; if (file_offset + file_size > uint.MaxValue) { throw new FileSizeException(); } entry.Offset = file_offset; entry.Size = (uint)file_size; if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } file.CopyTo(output); } } if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } byte[] name_buf = new byte[0x40]; var encoding = Encodings.cp932.WithFatalFallback(); int count = list.Count(); var name_offsets = new uint[count]; var name_sizes = new int[count]; int i = 0; foreach (var entry in list) { int length = encoding.GetByteCount(entry.Name); if (length + 1 > name_buf.Length) { name_buf = new byte[length + 2]; } length = encoding.GetBytes(entry.Name, 0, entry.Name.Length, name_buf, 0); name_buf[length++] = 0; name_offsets[i] = (uint)output.Position; output.Write(name_buf, 0, length); name_sizes[i] = length; ++i; if (output.Position > uint.MaxValue) { throw new FileSizeException(); } } uint index_offset = (uint)output.Position; using (var writer = new BinaryWriter(output, encoding, true)) { i = 0; foreach (var entry in list) { writer.Write(name_offsets[i]); writer.Write(name_sizes[i]); writer.Write((uint)entry.Offset); writer.Write(entry.Size); writer.Write(0); ++i; } uint index_length = (uint)(output.Position - index_offset); output.Position = 0; encoding.GetBytes("YKC001\0\0", 0, 8, name_buf, 0); writer.Write(name_buf, 0, 8); writer.Write(0x18); output.Position = 0x10; writer.Write(index_offset); writer.Write(index_length); } }
public void getParent(EntryCallback successCallback, ErrorCallback errorCallback) { }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { ArcFile base_archive = null; var ami_options = GetOptions <AmiOptions> (options); if (null != ami_options && ami_options.UseBaseArchive && !string.IsNullOrEmpty(ami_options.BaseArchive)) { var base_file = new ArcView(ami_options.BaseArchive); try { if (base_file.View.ReadUInt32(0) == Signature) { base_archive = TryOpen(base_file); } if (null == base_archive) { throw new InvalidFormatException(string.Format("{0}: base archive could not be read", Path.GetFileName(ami_options.BaseArchive))); } base_file = null; } finally { if (null != base_file) { base_file.Dispose(); } } } try { var file_table = new SortedDictionary <uint, PackedEntry>(); if (null != base_archive) { foreach (AmiEntry entry in base_archive.Dir) { file_table[entry.Id] = entry; } } int update_count = UpdateFileTable(file_table, list); if (0 == update_count) { throw new InvalidFormatException(arcStrings.AMINoFiles); } uint file_count = (uint)file_table.Count; if (null != callback) { callback((int)file_count + 1, null, null); } int callback_count = 0; long start_offset = output.Position; uint data_offset = file_count * 16 + 16; output.Seek(data_offset, SeekOrigin.Current); foreach (var entry in file_table) { if (null != callback) { callback(callback_count++, entry.Value, arcStrings.MsgAddingFile); } long current_offset = output.Position; if (current_offset > uint.MaxValue) { throw new FileSizeException(); } if (entry.Value is AmiEntry) { CopyAmiEntry(base_archive, entry.Value, output); } else { entry.Value.Size = WriteAmiEntry(entry.Value, output); } entry.Value.Offset = (uint)current_offset; } if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } output.Position = start_offset; using (var header = new BinaryWriter(output, Encoding.ASCII, true)) { header.Write(Signature); header.Write(file_count); header.Write(data_offset); header.Write((uint)0); foreach (var entry in file_table) { header.Write(entry.Key); header.Write((uint)entry.Value.Offset); header.Write((uint)entry.Value.UnpackedSize); header.Write((uint)entry.Value.Size); } } } finally { if (null != base_archive) { base_archive.Dispose(); } } }
public void getFile(string path, object flags, EntryCallback successCallback) { }
public void getDirectory(string path, object flags, EntryCallback successCallback, ErrorCallback errorCallback) { }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { var ons_options = GetOptions <NsaOptions> (options); var encoding = Encodings.cp932.WithFatalFallback(); int callback_count = 0; var real_entry_list = new List <NsaEntry>(); var used_names = new HashSet <string>(); int index_size = 0; foreach (var entry in list) { if (!used_names.Add(entry.Name)) // duplicate name { continue; } try { index_size += encoding.GetByteCount(entry.Name) + 1; } catch (EncoderFallbackException X) { throw new InvalidFileName(entry.Name, arcStrings.MsgIllegalCharacters, X); } var header_entry = new NsaEntry { Name = entry.Name }; if (Compression.None != ons_options.CompressionType) { if (entry.Name.HasExtension(".bmp")) { header_entry.CompressionType = ons_options.CompressionType; } } index_size += 13; real_entry_list.Add(header_entry); } long start_offset = output.Position; long base_offset = 6 + index_size; output.Seek(base_offset, SeekOrigin.Current); foreach (var entry in real_entry_list) { using (var input = File.OpenRead(entry.Name)) { var file_size = input.Length; if (file_size > uint.MaxValue) { throw new FileSizeException(); } long file_offset = output.Position - base_offset; if (file_offset + file_size > uint.MaxValue) { throw new FileSizeException(); } if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } entry.Offset = file_offset; entry.UnpackedSize = (uint)file_size; if (Compression.LZSS == entry.CompressionType) { var packer = new Packer(input, output); entry.Size = packer.EncodeLZSS(); } else { entry.Size = entry.UnpackedSize; entry.CompressionType = Compression.None; input.CopyTo(output); } } } if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } output.Position = start_offset; using (var writer = new BinaryWriter(output, encoding, true)) { writer.Write(Binary.BigEndian((short)real_entry_list.Count)); writer.Write(Binary.BigEndian((uint)base_offset)); foreach (var entry in real_entry_list) { writer.Write(encoding.GetBytes(entry.Name)); writer.Write((byte)0); writer.Write((byte)entry.CompressionType); writer.Write(Binary.BigEndian((uint)entry.Offset)); writer.Write(Binary.BigEndian((uint)entry.Size)); writer.Write(Binary.BigEndian((uint)entry.UnpackedSize)); } } }
/// <summary> /// Adds callback function which will be executed when an entry occurs in this <see cref="FYFY.Family"/>. /// </summary> /// <param name="callback"> /// Callback function. /// </param> public void addEntryCallback(EntryCallback callback) { _entryCallbacks += callback; }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { var npk_options = GetOptions <Npk2Options> (options); if (null == npk_options.Key) { throw new InvalidEncryptionScheme(); } var enc = DefaultEncoding.WithFatalFallback(); int index_length = 0; var dir = new List <NpkStoredEntry>(); foreach (var entry in list) { var ext = Path.GetExtension(entry.Name).ToLowerInvariant(); var npk_entry = new NpkStoredEntry { Name = entry.Name, RawName = enc.GetBytes(entry.Name.Replace('\\', '/')), IsSolid = SolidFiles.Contains(ext), IsPacked = !DisableCompression.Contains(ext), }; int segment_count = 1; if (!npk_entry.IsSolid) { segment_count = (int)(((long)entry.Size + DefaultSegmentSize - 1) / DefaultSegmentSize); } index_length += 3 + npk_entry.RawName.Length + 0x28 + segment_count * 0x14; dir.Add(npk_entry); } index_length = (index_length + 0xF) & ~0xF; int callback_count = 0; using (var aes = Aes.Create()) { aes.Mode = CipherMode.CBC; aes.Padding = PaddingMode.PKCS7; aes.Key = npk_options.Key; aes.IV = GenerateAesIV(); output.Position = 0x20 + index_length; foreach (var entry in dir) { if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } using (var writer = new NpkWriter(entry, output, aes)) writer.Write(DefaultSegmentSize); } output.Position = 0; var buffer = new byte[] { (byte)'N', (byte)'P', (byte)'K', (byte)'2', 1, 0, 0, 0 }; output.Write(buffer, 0, 8); output.Write(aes.IV, 0, 0x10); LittleEndian.Pack(dir.Count, buffer, 0); LittleEndian.Pack(index_length, buffer, 4); output.Write(buffer, 0, 8); using (var encryptor = aes.CreateEncryptor()) using (var proxy = new ProxyStream(output, true)) using (var index_stream = new CryptoStream(proxy, encryptor, CryptoStreamMode.Write)) using (var index = new BinaryWriter(index_stream)) { if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } foreach (var entry in dir) { index.Write(entry.IsSolid); // 0 -> segmentation enabled, 1 -> no segmentation index.Write((short)entry.RawName.Length); index.Write(entry.RawName); index.Write(entry.UnpackedSize); index.Write(entry.CheckSum); index.Write(entry.Segments.Count); foreach (var segment in entry.Segments) { index.Write(segment.Offset); index.Write(segment.AlignedSize); index.Write(segment.Size); index.Write(segment.UnpackedSize); } } } } }
public override void Create(Stream output, IEnumerable<Entry> list, ResourceOptions options, EntryCallback callback) { var encoding = Encodings.cp932.WithFatalFallback(); int callback_count = 0; var real_entry_list = new List<Entry>(); var used_names = new HashSet<string>(); int index_size = 0; foreach (var entry in list) { if (!used_names.Add (entry.Name)) // duplicate name continue; try { index_size += encoding.GetByteCount (entry.Name) + 1; } catch (EncoderFallbackException X) { throw new InvalidFileName (entry.Name, arcStrings.MsgIllegalCharacters, X); } index_size += 8; real_entry_list.Add (entry); } long start_offset = output.Position; long base_offset = 6+index_size; output.Seek (base_offset, SeekOrigin.Current); foreach (var entry in real_entry_list) { using (var input = File.OpenRead (entry.Name)) { var file_size = input.Length; if (file_size > uint.MaxValue) throw new FileSizeException(); long file_offset = output.Position - base_offset; if (file_offset+file_size > uint.MaxValue) throw new FileSizeException(); entry.Offset = file_offset; entry.Size = (uint)file_size; if (null != callback) callback (callback_count++, entry, arcStrings.MsgAddingFile); input.CopyTo (output); } } if (null != callback) callback (callback_count++, null, arcStrings.MsgWritingIndex); output.Position = start_offset; using (var writer = new BinaryWriter (output, encoding, true)) { writer.Write (Binary.BigEndian ((short)real_entry_list.Count)); writer.Write (Binary.BigEndian ((uint)base_offset)); foreach (var entry in real_entry_list) { writer.Write (encoding.GetBytes (entry.Name)); writer.Write ((byte)0); writer.Write (Binary.BigEndian ((uint)entry.Offset)); writer.Write (Binary.BigEndian ((uint)entry.Size)); } } }