public bool AddFileHeader(FileHeader newHeader) { if (FileHeaders.Count == ComplexFile.Settings.HeadersNumber) { return(false); } FileHeaders.Add(newHeader); return(true); }
/// <summary> /// Verifies the start bytes of the decrypted content stream. /// </summary> /// <param name="input">The decrypted content stream.</param> /// <param name="headers">The database file headers.</param> /// <returns><c>true</c> if the bytes match; otherwise, <c>false</c>.</returns> /// <exception cref="System.ArgumentNullException"> /// <paramref name="input"/> and <paramref name="headers"/> cannot be <c>null</c>. /// </exception> public static Task <bool> VerifyStartBytes(IInputStream input, FileHeaders headers) { if (headers == null) { throw new ArgumentNullException("headers"); } return(VerifyStartBytes(input, headers.StartBytes)); }
GetMasterKey(FileHeaders headers) { if (headers == null) { throw new ArgumentNullException("headers"); } return(GetMasterKey(headers.TransformSeed, headers.TransformRounds)); }
/// <summary> /// Decrypts the specified input stream. /// </summary> /// <param name="input">The input stream.</param> /// <param name="masterKey">The master key.</param> /// <param name="headers">The database file headers.</param> /// <returns>The decrypted buffer.</returns> /// <exception cref="ArgumentNullException"> /// The <paramref name="input"/>, <paramref name="masterKey"/>, /// <paramref name="headers"/> cannot be <c>null</c>. /// </exception> public static Task <IInputStream> Decrypt(IRandomAccessStream input, IBuffer masterKey, FileHeaders headers) { if (headers == null) { throw new ArgumentNullException("headers"); } return(Decrypt(input, masterKey, headers.MasterSeed, headers.EncryptionIV)); }
private static string _baseScriptPath = @".\Scripts\"; // static void Main(string[] args) { var setup = new FileHeaders(); // Menu(); foreach (var a in setup.queryDetails) { ExecuteSqlCommand(_tempConnection, "localhost", a); } Console.Read(); }
private static void Decrypt(FileHeaders headers, XDocument doc) { var protectedStrings = doc.Descendants("Entry") .SelectMany(x => x.Elements("String")) .Select(x => x.Element("Value")) .Where(x => { var protect = x.Attribute("Protected"); return(protect != null && (bool)protect); }); IRandomGenerator generator; switch (headers.RandomAlgorithm) { case CrsAlgorithm.ArcFourVariant: generator = new Rc4RandomGenerator( headers.ProtectedStreamKey); break; default: generator = new Salsa20RandomGenerator( headers.ProtectedStreamKey); break; } foreach (var protectedString in protectedStrings) { var encrypted = Convert.FromBase64String( protectedString.Value); var length = encrypted.Length; var padding = generator.GetRandomBytes(length); for (var i = 0U; i < length; i++) { encrypted[i] ^= padding.GetByte(i); } protectedString.Value = Encoding.UTF8 .GetString(encrypted, 0, length); } }
public async Task ParseContent_should_decompress_content() { using (var decrypted = TestFiles.Read("IO.Demo7Pass.Decrypted.bin")) { var headers = new FileHeaders { RandomAlgorithm = CrsAlgorithm.Salsa20, ProtectedStreamKey = CryptographicBuffer.DecodeFromBase64String( "FDNbUwE9jt6Y9+syU+btBIOGRxYt2tiUqnb6FXWIF1E="), }; var doc = await FileFormat.ParseContent( decrypted, true, headers); Assert.NotNull(doc); var root = doc.Root; Assert.NotNull(root); Assert.Equal("KeePassFile", root.Name.LocalName); } }
/// <summary> /// Parse the headers fields. /// </summary> /// <param name="input">The input stream.</param> /// <param name="buffer">The header bytes reader.</param> /// <returns>The file headers.</returns> private static async Task <FileHeaders> GetHeaders( IInputStream input, IBuffer buffer) { var result = new FileHeaders(); while (true) { buffer = await input.ReadAsync(buffer, 3); var field = (HeaderFields)buffer.GetByte(0); var size = BitConverter.ToUInt16(buffer.ToArray(1, 2), 0); if (size > 0) { buffer = await input.ReadAsync(buffer, size); } switch (field) { case HeaderFields.EndOfHeader: return(result); case HeaderFields.CompressionFlags: result.UseGZip = buffer.GetByte(0) == 1; break; case HeaderFields.EncryptionIV: result.EncryptionIV = buffer .ToArray().AsBuffer(); break; case HeaderFields.MasterSeed: result.MasterSeed = buffer .ToArray().AsBuffer(); break; case HeaderFields.StreamStartBytes: result.StartBytes = buffer .ToArray().AsBuffer(); break; case HeaderFields.TransformSeed: result.TransformSeed = buffer .ToArray().AsBuffer(); break; case HeaderFields.TransformRounds: result.TransformRounds = BitConverter.ToUInt64( buffer.ToArray(), 0); break; case HeaderFields.ProtectedStreamKey: result.ProtectedStreamKey = buffer .ToArray().AsBuffer(); break; case HeaderFields.InnerRandomStreamID: result.RandomAlgorithm = (CrsAlgorithm) BitConverter.ToUInt32(buffer.ToArray(), 0); break; } } }
/// <summary> /// Verifies the database file headers integrity. /// </summary> /// <param name="headers">The database file headers.</param> /// <param name="doc">The database content.</param> /// <returns><c>true</c> if the header is valid; otherwise, <c>false</c>.</returns> /// <exception cref="ArgumentNullException"> /// The <paramref name="headers"/> and <paramref name="doc"/> cannot be <c>null</c>. /// </exception> public static bool VerifyHeaders(FileHeaders headers, XDocument doc) { return(VerifyHeaders(headers.Hash, doc)); }
public ZipFile(IEnumerable <ZipEntry> entries) { this.entries = entries; var offset = 0L; foreach (var entry in entries) { offsets.Add(offset, entry); offset += entry.Length; } var main_entries = new ConcatenatedStream(from entry in entries select(Stream) entry); var central_directory = new List <Stream>(); foreach (var entry in offsets) { FileHeaders _offset = (little_endian)(int)entry.Key, total_records = (little_endian)entries.Count(), made_by = (little_endian)(ushort)19, version_needed = ZipEntry.VERSION_NEEDED, general = ZipEntry.GENERAL, method = ZipEntry.COMPRESSION_METHOD, last_mod_time = entry.Value.last_mod_time, last_mod_date = entry.Value.last_mod_date, crc32 = entry.Value.crc32, compressed_size = entry.Value.compressed_size, uncompressed_size = entry.Value.uncompressed_size, file_name_length = entry.Value.file_name_length, file_name_bytes = new MemoryStream(entry.Value.file_name_bytes), file_comment_length = (little_endian)(ushort)0, disk_where_file_starts = (little_endian)(short)0, internal_file_attributes = (little_endian)(short)0, external_file_attributes = (little_endian)(int)0, extra_field_length = ZipEntry.EXTRA_FIELD_LENGTH, zip64_id = entry.Value.zip64_id, zip64_len = entry.Value.zip64_len, zip64_compressed = entry.Value.zip64_compressed, zip64_uncompressed = entry.Value.zip64_uncompressed; ConcatenatedStream directory_entry = new[] { CENTRAL_DIRECTORY_FILE_HEADER_SIGNATURE, made_by, version_needed, general, method, last_mod_time, last_mod_date, crc32, compressed_size, uncompressed_size, file_name_length, extra_field_length, file_comment_length, disk_where_file_starts, internal_file_attributes, external_file_attributes, _offset, file_name_bytes, zip64_id, zip64_len, zip64_compressed, zip64_uncompressed }; central_directory.Add(directory_entry); } FileHeaders //ZIP64 END OF CENTRAL DIRECTORY zip_64_end_of_central_directory_signature = ZIP64_END_OF_CENTRAL_DIRECTORY_SIGNATURE, size_of_zip64_end_of_central_directory_record = (little_endian)(ulong)44, version_made_by = VERSION_MADE_BY, version_needed_to_extract = VERSION_NEEDED_TO_EXTRACT, number_of_this_disk_64 = (little_endian)(uint)0, number_of_the_disk_with_the_start_of_the_central_directory = (little_endian)(uint)0, total_number_of_entries_in_the_central_directory_on_this_disk = (little_endian)(ulong)offsets.Count, total_number_of_entries_in_the_central_director = (little_endian)(ulong)offsets.Count, size_of_the_central_directory = (little_endian)(ulong)central_directory.Sum(x => x.Length), offset_of_start_of_central_directory_with_respect_to_the_starting_disk_number = (little_endian)(ulong)main_entries.Length; ConcatenatedStream zip64_end_of_central_directory = new[] { zip_64_end_of_central_directory_signature, size_of_zip64_end_of_central_directory_record, version_made_by, version_needed_to_extract, number_of_this_disk_64, number_of_the_disk_with_the_start_of_the_central_directory, total_number_of_entries_in_the_central_director, total_number_of_entries_in_the_central_directory_on_this_disk, size_of_zip64_end_of_central_directory_record, offset_of_start_of_central_directory_with_respect_to_the_starting_disk_number }; FileHeaders zip64_end_of_central_dir_locator_signature = ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR_SIGNATURE, number_of_the_disk_with_the_start_of_the_zip64_end_of_central_directory = (little_endian)(uint)0, relative_offset_of_the_zip64_end_of_central_directory_record = (little_endian)(ulong)((ulong)main_entries.Length + (ulong)(central_directory.Sum(x => x.Length))), total_number_of_disks = (little_endian)(uint)1; ConcatenatedStream zip64_end_of_central_directory_locator = new[] { zip64_end_of_central_dir_locator_signature, number_of_the_disk_with_the_start_of_the_zip64_end_of_central_directory, relative_offset_of_the_zip64_end_of_central_directory_record, total_number_of_disks }; FileHeaders central_directory_length = (little_endian)(int)central_directory.Sum(x => x.Length), number_of_this_disk = (little_endian)(ushort)0, disk_where_central_directory_starts = (little_endian)(ushort)0, number_of_central_directory_records_on_this_disk = (little_endian)(ushort)central_directory.Count, total_number_of_records = (little_endian)(ushort)central_directory.Count, offset_start_of_central_directory = (little_endian)(int)main_entries.Length, comment_length = (little_endian)(ushort)0; ConcatenatedStream end_of_directory = new[] { END_OF_CENTRAL_DIRECTORY_SIGNATURE, number_of_this_disk, disk_where_central_directory_starts, number_of_central_directory_records_on_this_disk, total_number_of_records, central_directory_length, offset_start_of_central_directory, comment_length, }; stream = main_entries + central_directory + zip64_end_of_central_directory + zip64_end_of_central_directory_locator + end_of_directory; }
public void Analyze_ShouldReturnCorrectType(Stream fileStream, FileType expectedType) { Assert.Equal(FileHeaders.Analyze(fileStream), expectedType); }