public static byte get_byte(Stream infile) { int b = infile.ReadByte(); ErrorStuff.CHECK_FILE(b == -1, infile, "fread"); return((byte)b); }
public static ulong get_64_be(Stream infile) { byte[] buf = new byte[8]; int bytes_read = infile.Read(buf, 0, 8); ErrorStuff.CHECK_FILE(bytes_read != 8, infile, "fread"); return(read_64_be(buf)); }
public static uint get_32_le(Stream infile) { byte[] buf = new byte[4]; int bytes_read = infile.Read(buf, 0, 4); ErrorStuff.CHECK_FILE(bytes_read != 4, infile, "fread"); return(read_32_le(buf)); }
public static ushort get_16_le(Stream infile) { byte[] buf = new byte[2]; int bytes_read = infile.Read(buf, 0, 2); ErrorStuff.CHECK_FILE(bytes_read != 2, infile, "fread"); return(read_16_le(buf)); }
public static utf_query_result query_utf_nofail(Stream infile, long offset, utf_query query) { utf_query_result result = query_utf(infile, offset, query); ErrorStuff.CHECK_ERROR(result.valid == 0, "didn't find valid @UTF table where one was expected"); ErrorStuff.CHECK_ERROR(query != null && result.found == 0, "key not found"); return result; }
public static void analyze_CPK(Stream infile, string base_name, long file_length) { const long CpkHeader_offset = 0x0; byte[] toc_string_table = null; // check header { byte[] buf = new byte[4]; byte[] CPK_signature = Encoding.ASCII.GetBytes("CPK "); Util.get_bytes_seek(CpkHeader_offset, infile, buf, 4); ErrorStuff.CHECK_ERROR(!Util.memcmp(buf, CPK_signature), "CPK signature not found"); } // check CpkHeader { utf_query_result result = UtfTab.query_utf_nofail(infile, CpkHeader_offset + 0x10, null); ErrorStuff.CHECK_ERROR(result.rows != 1, "wrong number of rows in CpkHeader"); } // get TOC offset long toc_offset = (long)UtfTab.query_utf_8byte(infile, CpkHeader_offset + 0x10, 0, "TocOffset"); // get content offset long content_offset = (long)UtfTab.query_utf_8byte(infile, CpkHeader_offset + 0x10, 0, "ContentOffset"); // get file count from CpkHeader long CpkHeader_count = UtfTab.query_utf_4byte(infile, CpkHeader_offset + 0x10, 0, "Files"); // check TOC header { byte[] buf = new byte[4]; byte[] TOC_signature = Encoding.ASCII.GetBytes("TOC "); Util.get_bytes_seek(toc_offset, infile, buf, 4); ErrorStuff.CHECK_ERROR(!Util.memcmp(buf, TOC_signature), "TOC signature not found"); } // get TOC entry count, string table long toc_entries; { utf_query_result result = UtfTab.query_utf_nofail(infile, toc_offset + 0x10, null); toc_entries = result.rows; toc_string_table = UtfTab.load_utf_string_table(infile, toc_offset + 0x10); } // check that counts match ErrorStuff.CHECK_ERROR(toc_entries != CpkHeader_count, "CpkHeader file count and TOC entry count do not match"); // extract files for (int i = 0; i < toc_entries; i++) { // get file name string file_name = UtfTab.query_utf_string(infile, toc_offset + 0x10, i, "FileName", toc_string_table); // get directory name string dir_name = UtfTab.query_utf_string(infile, toc_offset + 0x10, i, "DirName", toc_string_table); // get file size long file_size = UtfTab.query_utf_4byte(infile, toc_offset + 0x10, i, "FileSize"); // get extract size long extract_size = UtfTab.query_utf_4byte(infile, toc_offset + 0x10, i, "ExtractSize"); // get file offset ulong file_offset_raw = UtfTab.query_utf_8byte(infile, toc_offset + 0x10, i, "FileOffset"); if (content_offset < toc_offset) { file_offset_raw += (ulong)content_offset; } else { file_offset_raw += (ulong)toc_offset; } ErrorStuff.CHECK_ERROR(file_offset_raw > (ulong)long.MaxValue, "File offset too large, will be unable to seek"); long file_offset = (long)file_offset_raw; Console.WriteLine("{0}/{1} 0x{2:x} {3}", dir_name, file_name, (ulong)file_offset, file_size); using (Stream outfile = Util.open_file_in_directory(base_name, dir_name, '/', file_name, FileMode.Create, FileAccess.ReadWrite, FileShare.None)) { ErrorStuff.CHECK_ERRNO(outfile == null, "fopen"); if (extract_size > file_size) { long uncompressed_size = CpkUncompress.uncompress(infile, file_offset, file_size, outfile); Console.WriteLine(" uncompressed to {0}", uncompressed_size); ErrorStuff.CHECK_ERROR(uncompressed_size != extract_size, "uncompressed size != ExtractSize"); } else { Util.dump(infile, outfile, file_offset, file_size); } } } toc_string_table = null; }
public static long uncompress(Stream infile, long offset, long input_size, Stream outfile) { byte[] output_buffer = null; ulong magic = Util.get_64_be_seek(offset + 0x00, infile); ErrorStuff.CHECK_ERROR(!((magic == 0) || (magic == CRILAYLA_sig)), "didn't find 0 or CRILAYLA signature for compressed data"); long uncompressed_size = Util.get_32_le_seek(offset + 0x08, infile); long uncompressed_header_offset = offset + Util.get_32_le_seek(offset + 0x0C, infile) + 0x10; ErrorStuff.CHECK_ERROR(uncompressed_header_offset + 0x100 != offset + input_size, "size mismatch"); output_buffer = new byte[uncompressed_size + 0x100]; ErrorStuff.CHECK_ERROR(output_buffer == null, "malloc"); Util.get_bytes_seek(uncompressed_header_offset, infile, output_buffer, 0x100); long buffer_input_size = input_size - 0x100; long input_offset = buffer_input_size - 1; long output_end = 0x100 + uncompressed_size - 1; byte bit_pool = 0; int bits_left = 0; long bytes_output = 0; const int vle_levels = 4; const int vle_lens_0 = 2; const int vle_lens_1 = 3; const int vle_lens_2 = 5; const int vle_lens_3 = 8; if (buffer_input_size > int.MaxValue) { throw new Exception("compressed data too big to load into buffer"); } byte[] input_buffer = new byte[buffer_input_size]; infile.Position = offset; Util.get_bytes_seek(offset, infile, input_buffer, buffer_input_size); while (bytes_output < uncompressed_size) { if (get_next_bits(input_buffer, ref input_offset, ref bit_pool, ref bits_left, 1) != 0) { long backreference_offset = output_end - bytes_output + get_next_bits(input_buffer, ref input_offset, ref bit_pool, ref bits_left, 13) + 3; long backreference_length = 3; // decode variable length coding for length int vle_level = 0; { int this_level = get_next_bits(input_buffer, ref input_offset, ref bit_pool, ref bits_left, vle_lens_0); backreference_length += this_level; if (this_level != ((1 << vle_lens_0) - 1)) { goto vle_levels_done; } ++vle_level; this_level = get_next_bits(input_buffer, ref input_offset, ref bit_pool, ref bits_left, vle_lens_1); backreference_length += this_level; if (this_level != ((1 << vle_lens_1) - 1)) { goto vle_levels_done; } ++vle_level; this_level = get_next_bits(input_buffer, ref input_offset, ref bit_pool, ref bits_left, vle_lens_2); backreference_length += this_level; if (this_level != ((1 << vle_lens_2) - 1)) { goto vle_levels_done; } ++vle_level; this_level = get_next_bits(input_buffer, ref input_offset, ref bit_pool, ref bits_left, vle_lens_3); backreference_length += this_level; if (this_level != ((1 << vle_lens_3) - 1)) { goto vle_levels_done; } ++vle_level; } vle_levels_done: if (vle_level == vle_levels) { int this_level; do { this_level = get_next_bits(input_buffer, ref input_offset, ref bit_pool, ref bits_left, 8); backreference_length += this_level; } while (this_level == 255); } //printf("0x%08lx backreference to 0x%lx, length 0x%lx\n", output_end-bytes_output, backreference_offset, backreference_length); for (int i = 0; i < backreference_length; i++) { output_buffer[output_end - bytes_output] = output_buffer[backreference_offset--]; bytes_output++; } } else { // verbatim byte output_buffer[output_end - bytes_output] = (byte)get_next_bits(input_buffer, ref input_offset, ref bit_pool, ref bits_left, 8); //printf("0x%08lx verbatim byte\n", output_end-bytes_output); bytes_output++; } } Util.put_bytes_seek(0, outfile, output_buffer, 0x100 + uncompressed_size); output_buffer = null; return(0x100 + bytes_output); }
public static void get_bytes(Stream infile, byte[] buf, long byte_count) { int bytes_read = infile.Read(buf, 0, (int)byte_count); ErrorStuff.CHECK_FILE(bytes_read != byte_count, infile, "fread"); }
public static utf_query_result analyze_utf(Stream infile, long offset, int indent, int print, utf_query query) { byte[] buf = new byte[4]; long table_info_table_offset; uint table_info_table_size; uint table_info_schema_offset; uint table_info_rows_offset; uint table_info_string_table_offset; uint table_info_data_offset; uint table_info_table_name; // references index in string_table ushort table_info_columns; ushort table_info_row_width; uint table_info_rows; byte[] string_table = null; utf_column_info[] schema = null; utf_query_result result = new utf_query_result(); result.valid = 0; if (print != 0) { Util.printf_indent(indent); Console.WriteLine("{"); } indent += INDENT_LEVEL; table_info_table_offset = offset; // check header byte[] UTF_signature = Encoding.ASCII.GetBytes("@UTF"); Util.get_bytes_seek(offset, infile, buf, 4); if (!Util.memcmp(buf, UTF_signature)) { if (print != 0) { Util.printf_indent(indent); Console.WriteLine("not a @UTF table at {0:X8}", offset); } indent -= INDENT_LEVEL; if (print != 0) { Util.printf_indent(indent); Console.WriteLine("}"); } string_table = null; schema = null; return result; } // get table size table_info_table_size = Util.get_32_be(infile); table_info_schema_offset = 0x20; table_info_rows_offset = Util.get_32_be(infile); table_info_string_table_offset = Util.get_32_be(infile); table_info_data_offset = Util.get_32_be(infile); uint table_name_string = Util.get_32_be(infile); table_info_columns = Util.get_16_be(infile); table_info_row_width = Util.get_16_be(infile); table_info_rows = Util.get_32_be(infile); // allocate for string table long string_table_size = table_info_data_offset - table_info_string_table_offset; string_table = new byte[string_table_size + 1]; // load schema schema = new utf_column_info[table_info_columns]; for (int i = 0; i < schema.Length; ++i) { schema[i] = new utf_column_info(); } { int i; for (i = 0; i < table_info_columns; i++) { schema[i].type = Util.get_byte(infile); schema[i].column_name = Util.get_32_be(infile); if ((schema[i].type & COLUMN_STORAGE_MASK) == COLUMN_STORAGE_CONSTANT) { schema[i].constant_offset = infile.Position; switch (schema[i].type & COLUMN_TYPE_MASK) { case COLUMN_TYPE_STRING: Util.get_32_be(infile); break; case COLUMN_TYPE_8BYTE: case COLUMN_TYPE_DATA: Util.get_32_be(infile); Util.get_32_be(infile); break; case COLUMN_TYPE_FLOAT: case COLUMN_TYPE_4BYTE2: case COLUMN_TYPE_4BYTE: Util.get_32_be(infile); break; case COLUMN_TYPE_2BYTE2: case COLUMN_TYPE_2BYTE: Util.get_16_be(infile); break; case COLUMN_TYPE_1BYTE2: case COLUMN_TYPE_1BYTE: Util.get_byte(infile); break; default: ErrorStuff.CHECK_ERROR(true, "unknown type for constant"); break; } } } } // read string table Util.get_bytes_seek(table_info_string_table_offset + 8 + offset, infile, string_table, string_table_size); table_info_table_name = table_name_string; // fill in the default stuff result.valid = 1; result.found = 0; result.rows = table_info_rows; result.name_offset = table_name_string; result.string_table_offset = table_info_string_table_offset; result.data_offset = table_info_data_offset; // explore the values if (query != null || print != 0) { int i, j; for (i = 0; i < table_info_rows; i++) { if (print == 0 && query != null && i != query.index) continue; long row_offset = table_info_table_offset + 8 + table_info_rows_offset + i * table_info_row_width; long row_start_offset = row_offset; if (print != 0) { Util.printf_indent(indent); Console.WriteLine("{0}[{1}] = {{", ReadString(string_table, table_info_table_name), i); } indent += INDENT_LEVEL; for (j = 0; j < table_info_columns; j++) { byte type = schema[j].type; long constant_offset = schema[j].constant_offset; int constant = 0; int qthis = (query != null && i == query.index && ReadString(string_table, schema[j].column_name) == query.name) ? 1 : 0; if (print != 0) { Util.printf_indent(indent); Console.Write("{0:x8} {1:x2} {2} = ", row_offset - row_start_offset, type, ReadString(string_table, schema[j].column_name)); } if (qthis != 0) { result.found = 1; result.type = schema[j].type & COLUMN_TYPE_MASK; } switch (schema[j].type & COLUMN_STORAGE_MASK) { case COLUMN_STORAGE_PERROW: break; case COLUMN_STORAGE_CONSTANT: constant = 1; break; case COLUMN_STORAGE_ZERO: if (print != 0) { Console.WriteLine("UNDEFINED"); } if (qthis != 0) { result.value_u64 = 0; } continue; default: ErrorStuff.CHECK_ERROR(true, "unknown storage class"); break; } if (true) { long data_offset; int bytes_read = 0; if (constant != 0) { data_offset = constant_offset; if (print != 0) { Console.Write("constant "); } } else { data_offset = row_offset; } if (qthis != 0) { result.data_position = data_offset; } switch (type & COLUMN_TYPE_MASK) { case COLUMN_TYPE_STRING: { uint string_offset; string_offset = Util.get_32_be_seek(data_offset, infile); bytes_read = 4; if (print != 0) { Console.WriteLine("\"{0}\"", ReadString(string_table, string_offset)); } if (qthis != 0) { result.value_string = string_offset; } } break; case COLUMN_TYPE_DATA: { uint vardata_offset, vardata_size; vardata_offset = Util.get_32_be_seek(data_offset, infile); vardata_size = Util.get_32_be(infile); bytes_read = 8; if (print != 0) { Console.Write("[0x{0:x8}]", vardata_offset); Console.WriteLine(" (size 0x{0:x8})", vardata_size); } if (qthis != 0) { result.value_data = new offset_size_pair() { offset = vardata_offset, size = vardata_size }; } if (vardata_size != 0 && print != 0) { // assume that the data is another table analyze_utf(infile, table_info_table_offset + 8 + table_info_data_offset + vardata_offset, indent, print, null ); } } break; case COLUMN_TYPE_8BYTE: { ulong value = Util.get_64_be_seek(data_offset, infile); if (print != 0) { Console.WriteLine("0x{0:x}", value); } if (qthis != 0) { result.value_u64 = value; } bytes_read = 8; break; } case COLUMN_TYPE_4BYTE2: case COLUMN_TYPE_4BYTE: if ((type & COLUMN_TYPE_MASK) == COLUMN_TYPE_4BYTE2 && print != 0) { Console.Write("type 2 "); } { uint value = Util.get_32_be_seek(data_offset, infile); if (print != 0) { Console.WriteLine("{0}", value); } if (qthis != 0) { result.value_u32 = value; } bytes_read = 4; } break; case COLUMN_TYPE_2BYTE2: case COLUMN_TYPE_2BYTE: if ((type & COLUMN_TYPE_MASK) == COLUMN_TYPE_2BYTE2 && print != 0) { Console.Write("type 2 "); } { ushort value = Util.get_16_be_seek(data_offset, infile); if (print != 0) { Console.WriteLine("{0}", value); } if (qthis != 0) { result.value_u16 = value; } bytes_read = 2; } break; case COLUMN_TYPE_FLOAT: if (true) { uint int_float; int_float = Util.get_32_be_seek(data_offset, infile); if (print != 0) { Console.WriteLine("{0}", Util.reinterpret_to_float(int_float)); } if (qthis != 0) { result.value_u32 = int_float; } } bytes_read = 4; break; case COLUMN_TYPE_1BYTE2: case COLUMN_TYPE_1BYTE: if ((type & COLUMN_TYPE_MASK) == COLUMN_TYPE_1BYTE2 && print != 0) { Console.Write("type 2 "); } { byte value = Util.get_byte_seek(data_offset, infile); if (print != 0) { Console.WriteLine("{0}", value); } if (qthis != 0) { result.value_u8 = value; } bytes_read = 1; } break; default: ErrorStuff.CHECK_ERROR(true, "unknown normal type"); break; } if (constant == 0) { row_offset += bytes_read; } } // useless if end } // column for loop end indent -= INDENT_LEVEL; if (print != 0) { Util.printf_indent(indent); Console.WriteLine("}"); } ErrorStuff.CHECK_ERROR(row_offset - row_start_offset != table_info_row_width, "column widths do now add up to row width"); if (query != null && print == 0 && i >= query.index) break; } // row for loop end } // explore values block end indent -= INDENT_LEVEL; if (print != 0) { Util.printf_indent(indent); Console.WriteLine("}"); } string_table = null; schema = null; return result; }
public static offset_size_pair query_utf_data(Stream infile, long offset, int index, string name) { utf_query_result result = query_utf_key(infile, offset, index, name); ErrorStuff.CHECK_ERROR(result.type != COLUMN_TYPE_DATA, "value is not data"); return result.value_data; }
public static uint query_utf_string(Stream infile, long offset, int index, string name) { utf_query_result result = query_utf_key(infile, offset, index, name); ErrorStuff.CHECK_ERROR(result.type != COLUMN_TYPE_STRING, "value is not a string"); return result.value_string; }
public static ushort query_utf_2byte(Stream infile, long offset, int index, string name) { utf_query_result result = query_utf_key(infile, offset, index, name); ErrorStuff.CHECK_ERROR(result.type != COLUMN_TYPE_2BYTE, "value is not a 2 byte uint"); return result.value_u16; }