public override ImageData Read(Stream stream, ImageMetaData info) { var meta = (ApsMetaData)info; stream.Position = meta.DataOffset; byte[] image_data; if (meta.IsPacked) { using (var reader = new LzReader(stream, meta.PackedSize, meta.UnpackedSize)) { reader.Unpack(); image_data = reader.Data; } } else { using (var reader = new ArcView.Reader(stream)) image_data = reader.ReadBytes((int)meta.UnpackedSize); } using (var unpacked = new MemoryStream(image_data)) { var ap_info = base.ReadMetaData(unpacked); if (null == ap_info) { throw new InvalidFormatException(); } return(base.Read(unpacked, ap_info)); } }
void UnpackV2(ParallelCbgDecoder decoder) { var base_offset = Input.Position; decoder.Tree1 = new HuffmanTree(ReadWeightTable(Input, 0x10), true); decoder.Tree2 = new HuffmanTree(ReadWeightTable(Input, 0xB0), true); int y_blocks = decoder.Height / 8; var offsets = new int[y_blocks + 1]; int input_base = (int)(Input.Position + offsets.Length * 4 - base_offset); using (var reader = new ArcView.Reader(Input)) { for (int i = 0; i < offsets.Length; ++i) { offsets[i] = reader.ReadInt32() - input_base; } decoder.Input = reader.ReadBytes((int)(Input.Length - Input.Position)); } int pad_skip = ((decoder.Width >> 3) + 7) >> 3; var tasks = new List <Task> (y_blocks + 1); decoder.Output = new byte[decoder.Width * decoder.Height * 4]; int dst = 0; for (int i = 0; i < y_blocks; ++i) { int block_offset = offsets[i] + pad_skip; int next_offset = i + 1 == y_blocks ? decoder.Input.Length : offsets[i + 1]; int closure_dst = dst; var task = Task.Run(() => decoder.UnpackBlock(block_offset, next_offset - block_offset, closure_dst)); tasks.Add(task); dst += decoder.Width * 32; } if (32 == m_info.BPP) { var task = Task.Run(() => decoder.UnpackAlpha(offsets[y_blocks])); tasks.Add(task); } var complete = Task.WhenAll(tasks); complete.Wait(); Format = decoder.HasAlpha ? PixelFormats.Bgra32 : PixelFormats.Bgr32; Stride = decoder.Width * 4; m_output = decoder.Output; }
public override ImageMetaData ReadMetaData(Stream stream) { using (var reader = new ArcView.Reader (stream)) { reader.ReadInt32(); var info = new AkbMetaData(); info.Width = reader.ReadUInt16(); info.Height = reader.ReadUInt16(); int flags = reader.ReadInt32() & 0xFFFF; info.BPP = 0 == flags ? 32 : 24; info.Background = reader.ReadBytes (4); info.OffsetX = reader.ReadInt32(); info.OffsetY = reader.ReadInt32(); info.InnerWidth = reader.ReadInt32() - info.OffsetX; info.InnerHeight = reader.ReadInt32() - info.OffsetY; if (info.InnerWidth > info.Width || info.InnerHeight > info.Height) return null; return info; } }
} // 'AKB ' public override ImageMetaData ReadMetaData(Stream stream) { using (var reader = new ArcView.Reader(stream)) { reader.ReadInt32(); var info = new AkbMetaData(); info.Width = reader.ReadUInt16(); info.Height = reader.ReadUInt16(); int flags = reader.ReadInt32() & 0xFFFF; info.BPP = 0 == flags ? 32 : 24; info.Background = reader.ReadBytes(4); info.OffsetX = reader.ReadInt32(); info.OffsetY = reader.ReadInt32(); info.InnerWidth = reader.ReadInt32() - info.OffsetX; info.InnerHeight = reader.ReadInt32() - info.OffsetY; if (info.InnerWidth > info.Width || info.InnerHeight > info.Height) { return(null); } return(info); } }
public override ArcFile TryOpen(ArcView file) { if (!file.Name.HasExtension(".paz")) { return(null); } uint signature = file.View.ReadUInt32(0); // XXX encryption is queried for every .paz file var scheme = QueryEncryption(file.Name, signature); uint start_offset = scheme.Version > 0 ? 0x20u : 0u; uint index_size = file.View.ReadUInt32(start_offset); start_offset += 4; byte xor_key = (byte)(index_size >> 24); if (xor_key != 0) { index_size ^= (uint)(xor_key << 24 | xor_key << 16 | xor_key << 8 | xor_key); } if (0 != (index_size & 7) || index_size + start_offset >= file.MaxOffset) { return(null); } var arc_list = new List <Entry>(); var arc_dir = VFS.GetDirectoryName(file.Name); long max_offset = file.MaxOffset; for (char suffix = 'A'; suffix <= 'Z'; ++suffix) { var part_name = VFS.CombinePath(arc_dir, file.Name + suffix); if (!VFS.FileExists(part_name)) { break; } var part = VFS.FindFile(part_name); arc_list.Add(part); max_offset += part.Size; } var arc_name = Path.GetFileNameWithoutExtension(file.Name).ToLowerInvariant(); bool is_audio = AudioPazNames.Contains(arc_name); bool is_video = VideoPazNames.Contains(arc_name); Stream input = file.CreateStream(start_offset, index_size); byte[] video_key = null; List <Entry> dir; try { if (xor_key != 0) { input = new XoredStream(input, xor_key); } var enc = new Blowfish(scheme.ArcKeys[arc_name].IndexKey); input = new InputCryptoStream(input, enc.CreateDecryptor()); using (var index = new ArcView.Reader(input)) { int count = index.ReadInt32(); if (!IsSaneCount(count)) { return(null); } if (is_video) { video_key = index.ReadBytes(0x100); } dir = new List <Entry> (count); for (int i = 0; i < count; ++i) { var name = index.BaseStream.ReadCString(); var entry = FormatCatalog.Instance.Create <PazEntry> (name); entry.Offset = index.ReadInt64(); entry.UnpackedSize = index.ReadUInt32(); entry.Size = index.ReadUInt32(); entry.AlignedSize = index.ReadUInt32(); if (!entry.CheckPlacement(max_offset)) { return(null); } entry.IsPacked = index.ReadInt32() != 0; if (string.IsNullOrEmpty(entry.Type) && is_audio) { entry.Type = "audio"; } if (scheme.Version > 0) { string password = ""; if (!entry.IsPacked && scheme.TypeKeys != null) { password = scheme.GetTypePassword(name, is_audio); } if (!string.IsNullOrEmpty(password) || is_video) { password = string.Format("{0} {1:X08} {2}", name.ToLowerInvariant(), entry.UnpackedSize, password); entry.Key = Encodings.cp932.GetBytes(password); } } dir.Add(entry); } } } finally { input.Dispose(); } List <ArcView> parts = null; if (arc_list.Count > 0) { parts = new List <ArcView> (arc_list.Count); try { foreach (var arc_entry in arc_list) { var arc_file = VFS.OpenView(arc_entry); parts.Add(arc_file); } } catch { foreach (var part in parts) { part.Dispose(); } throw; } } if (is_video) { if (scheme.Version < 1) { var table = new byte[0x100]; for (int i = 0; i < 0x100; ++i) { table[video_key[i]] = (byte)i; } video_key = table; } return(new MovPazArchive(file, this, dir, scheme.Version, xor_key, video_key, parts)); } return(new PazArchive(file, this, dir, scheme.Version, xor_key, scheme.ArcKeys[arc_name].DataKey, parts)); }
ImageData ReadV6(Stream stream, TlgMetaData info) { using (var src = new ArcView.Reader(stream)) { int width = (int)info.Width; int height = (int)info.Height; int colors = info.BPP / 8; int max_bit_length = src.ReadInt32(); int x_block_count = ((width - 1) / TVP_TLG6_W_BLOCK_SIZE) + 1; int y_block_count = ((height - 1) / TVP_TLG6_H_BLOCK_SIZE) + 1; int main_count = width / TVP_TLG6_W_BLOCK_SIZE; int fraction = width - main_count * TVP_TLG6_W_BLOCK_SIZE; var image_bits = new uint[height * width]; var bit_pool = new byte[max_bit_length / 8 + 5]; var pixelbuf = new uint[width * TVP_TLG6_H_BLOCK_SIZE + 1]; var filter_types = new byte[x_block_count * y_block_count]; var zeroline = new uint[width]; var LZSS_text = new byte[4096]; // initialize zero line (virtual y=-1 line) uint zerocolor = 3 == colors ? 0xff000000 : 0x00000000; for (var i = 0; i < width; ++i) { zeroline[i] = zerocolor; } uint[] prevline = zeroline; int prevline_index = 0; // initialize LZSS text (used by chroma filter type codes) int p = 0; for (uint i = 0; i < 32 * 0x01010101; i += 0x01010101) { for (uint j = 0; j < 16 * 0x01010101; j += 0x01010101) { LZSS_text[p++] = (byte)(i & 0xff); LZSS_text[p++] = (byte)(i >> 8 & 0xff); LZSS_text[p++] = (byte)(i >> 16 & 0xff); LZSS_text[p++] = (byte)(i >> 24 & 0xff); LZSS_text[p++] = (byte)(j & 0xff); LZSS_text[p++] = (byte)(j >> 8 & 0xff); LZSS_text[p++] = (byte)(j >> 16 & 0xff); LZSS_text[p++] = (byte)(j >> 24 & 0xff); } } // read chroma filter types. // chroma filter types are compressed via LZSS as used by TLG5. { int inbuf_size = src.ReadInt32(); byte[] inbuf = src.ReadBytes(inbuf_size); if (inbuf_size != inbuf.Length) { return(null); } TVPTLG5DecompressSlide(filter_types, inbuf, inbuf_size, LZSS_text, 0); } // for each horizontal block group ... for (int y = 0; y < height; y += TVP_TLG6_H_BLOCK_SIZE) { int ylim = y + TVP_TLG6_H_BLOCK_SIZE; if (ylim >= height) { ylim = height; } int pixel_count = (ylim - y) * width; // decode values for (int c = 0; c < colors; c++) { // read bit length int bit_length = src.ReadInt32(); // get compress method int method = (bit_length >> 30) & 3; bit_length &= 0x3fffffff; // compute byte length int byte_length = bit_length / 8; if (0 != (bit_length % 8)) { byte_length++; } // read source from input src.Read(bit_pool, 0, byte_length); // decode values // two most significant bits of bitlength are // entropy coding method; // 00 means Golomb method, // 01 means Gamma method (not yet suppoted), // 10 means modified LZSS method (not yet supported), // 11 means raw (uncompressed) data (not yet supported). switch (method) { case 0: if (c == 0 && colors != 1) { TVPTLG6DecodeGolombValuesForFirst(pixelbuf, pixel_count, bit_pool); } else { TVPTLG6DecodeGolombValues(pixelbuf, c * 8, pixel_count, bit_pool); } break; default: throw new InvalidFormatException("Unsupported entropy coding method"); } } // for each line int ft = (y / TVP_TLG6_H_BLOCK_SIZE) * x_block_count; // within filter_types int skipbytes = (ylim - y) * TVP_TLG6_W_BLOCK_SIZE; for (int yy = y; yy < ylim; yy++) { int curline = yy * width; int dir = (yy & 1) ^ 1; int oddskip = ((ylim - yy - 1) - (yy - y)); if (0 != main_count) { int start = ((width < TVP_TLG6_W_BLOCK_SIZE) ? width : TVP_TLG6_W_BLOCK_SIZE) * (yy - y); TVPTLG6DecodeLineGeneric( prevline, prevline_index, image_bits, curline, width, 0, main_count, filter_types, ft, skipbytes, pixelbuf, start, zerocolor, oddskip, dir); } if (main_count != x_block_count) { int ww = fraction; if (ww > TVP_TLG6_W_BLOCK_SIZE) { ww = TVP_TLG6_W_BLOCK_SIZE; } int start = ww * (yy - y); TVPTLG6DecodeLineGeneric( prevline, prevline_index, image_bits, curline, width, main_count, x_block_count, filter_types, ft, skipbytes, pixelbuf, start, zerocolor, oddskip, dir); } prevline = image_bits; prevline_index = curline; // Array.Copy (image_bits, curline, prevline, 0, width); } } unsafe { fixed(void *data = image_bits) { int stride = width * 4; PixelFormat format = 32 == info.BPP ? PixelFormats.Bgra32 : PixelFormats.Bgr32; var bitmap = BitmapSource.Create(width, height, ImageData.DefaultDpiX, ImageData.DefaultDpiY, format, null, (IntPtr)data, height * stride, stride); bitmap.Freeze(); return(new ImageData(bitmap, info)); } } } }
void UnpackV2(ParallelCbgDecoder decoder) { var base_offset = Input.Position; decoder.Tree1 = new HuffmanTree (ReadWeightTable (Input, 0x10), true); decoder.Tree2 = new HuffmanTree (ReadWeightTable (Input, 0xB0), true); int y_blocks = decoder.Height / 8; var offsets = new int[y_blocks+1]; int input_base = (int)(Input.Position + offsets.Length*4 - base_offset); using (var reader = new ArcView.Reader (Input)) { for (int i = 0; i < offsets.Length; ++i) offsets[i] = reader.ReadInt32() - input_base; decoder.Input = reader.ReadBytes ((int)(Input.Length - Input.Position)); } int pad_skip = ((decoder.Width >> 3) + 7) >> 3; var tasks = new List<Task> (y_blocks+1); decoder.Output = new byte[decoder.Width * decoder.Height * 4]; int dst = 0; for (int i = 0; i < y_blocks; ++i) { int block_offset = offsets[i] + pad_skip; int next_offset = i+1 == y_blocks ? decoder.Input.Length : offsets[i+1]; int closure_dst = dst; var task = Task.Run (() => decoder.UnpackBlock (block_offset, next_offset-block_offset, closure_dst)); tasks.Add (task); dst += decoder.Width * 32; } if (32 == m_info.BPP) { var task = Task.Run (() => decoder.UnpackAlpha (offsets[y_blocks])); tasks.Add (task); } var complete = Task.WhenAll (tasks); complete.Wait(); Format = decoder.HasAlpha ? PixelFormats.Bgra32 : PixelFormats.Bgr32; Stride = decoder.Width * 4; m_output = decoder.Output; }