/*
         * Name function: Compress
         * Purpose: compress a part of the byte array into a Zlib Block
         * Input: - buffer: byte array
         *        - offset: starting offset inside the array
         *        - count: num of bytes to compress starting from the offset
         * Output: compressed byte array block, the structure is:
         *         - magic word
         *         - max segment size
         *         - total compressed size
         *         - total uncompressed size
         *         - segment list
         *         - compressed data list
         */
        public static byte[] Compress(byte[] buffer, int offset, int count)
        {
            if(buffer == null)
                throw new ArgumentNullException();
            if (count < 0)
                throw new FormatException();
            if (offset + count > buffer.Length)
                throw new IndexOutOfRangeException();

            MemoryStream headBlock = new MemoryStream();
            MemoryStream dataBlock = new MemoryStream();
            DeflaterOutputStream zipStream;

            int numSeg = (int)Math.Ceiling((double)count / (double)maxSegmentSize);

            headBlock.WriteUInt32(magic);
            headBlock.WriteUInt32(maxSegmentSize);
            headBlock.WriteUInt32(0x0);            //total compressed size, still to calculate
            headBlock.WriteInt32(count);          //total uncompressed size

            for (int i = count; i > 0; i -= (int)maxSegmentSize)
            {
                int copyBytes = Math.Min(i, (int)maxSegmentSize);
                uint precCompSize = (uint)dataBlock.Length;
                zipStream = new DeflaterOutputStream(dataBlock);
                zipStream.Write(buffer, offset + (count - i), copyBytes);
                zipStream.Flush();
                zipStream.Finish();
                headBlock.WriteUInt32((uint)dataBlock.Length - precCompSize); //compressed segment size
                headBlock.WriteInt32(copyBytes); //uncompressed segment size
                //Console.WriteLine("  Segment size: {0}, total read: {1}, compr size: {2}", maxSegmentSize, copyBytes, (uint)dataBlock.Length - precCompSize);
            }

            headBlock.Seek(8, SeekOrigin.Begin);
            headBlock.WriteInt32((int)dataBlock.Length); // total compressed size

            byte[] finalBlock = new byte[headBlock.Length + dataBlock.Length];
            Buffer.BlockCopy(headBlock.ToArray(), 0, finalBlock, 0, (int)headBlock.Length);
            Buffer.BlockCopy(dataBlock.ToArray(), 0, finalBlock, (int)headBlock.Length, (int)dataBlock.Length);
            headBlock.Close();
            dataBlock.Close();

            return finalBlock;
        }
        public static void WriteProperty(this Stream stream, string fieldName, Type fieldType, 
            object fieldValue, List<string> nameTable, int? arrayIndex = null)
        {
            if (fieldType.IsArray)
            {
                var array = (Array)fieldValue;
                for (var i = 0; i < array.Length; i++)
                {
                    var value = array.GetValue(i);
                    if (value != null)
                        stream.WriteProperty(fieldName, fieldType.GetElementType(), array.GetValue(i), nameTable, i);
                }
                return;
            }

            var fieldBodyStream = new MemoryStream();

            var baseProperty = new BaseProperty
            {
                NameIdx = AddToNameTable(nameTable, fieldName),
                ArrayIdx = arrayIndex ?? 0
            };

            if (fieldType == typeof (int))
            {
                baseProperty.TypeIdx = AddToNameTable(nameTable, "IntProperty");
                var value = (int) fieldValue;
                fieldBodyStream.WriteInt32(value);
            }
            else if (fieldType == typeof (bool))
            {
                baseProperty.TypeIdx = AddToNameTable(nameTable, "BoolProperty");
                var value = (bool) fieldValue;
                fieldBodyStream.WriteBool(value);
            }
            else if (fieldType.IsEnum)
            {
                baseProperty.TypeIdx = AddToNameTable(nameTable, "ByteProperty");
                var value1 = AddToNameTable(nameTable, fieldType.Name); // Enum type name
                var value2 = AddToNameTable(nameTable, fieldValue.ToString()); // Enum value
                fieldBodyStream.WriteInt32(value1);
                fieldBodyStream.WriteInt32(value2);
            }
            else if (fieldType == typeof (float))
            {
                baseProperty.TypeIdx = AddToNameTable(nameTable, "FloatProperty");
                var value = (float) fieldValue;
                fieldBodyStream.WriteFloat(value);
            }
            else if (fieldType == typeof (NameProperty))
            {
                baseProperty.TypeIdx = AddToNameTable(nameTable, "NameProperty");
                var nameProp = (NameProperty) fieldValue;
                var value = AddToNameTable(nameTable, nameProp.Name);
                fieldBodyStream.WriteInt32(value);
            }
            else if (fieldType == typeof (string))
            {
                baseProperty.TypeIdx = AddToNameTable(nameTable, "StrProperty");
                var value = (string) fieldValue;
                switch (fieldName)
                {
                    case "SaveDetails":
                    case "FriendlyName":
                        fieldBodyStream.WriteUnicodeString(value);
                        break;
                    default:
                        fieldBodyStream.WriteString(value);
                        break;
                }
                //fieldBodyStream.WriteUnicodeString(value); // Save every string as unicode
            }
            else if (fieldType.IsGenericType && (fieldType.GetGenericTypeDefinition() == typeof(List<>)))
            {
                baseProperty.TypeIdx = AddToNameTable(nameTable, "ArrayProperty");
                var list = fieldValue as IList;
                var elementType = fieldType.GetGenericArguments()[0];
                fieldBodyStream.WriteInt32(list.Count); // numElements
                for (var i = 0; i < list.Count; i++)
                {
                    if (elementType == typeof (int))
                    {
                        fieldBodyStream.WriteInt32((int) list[i]);
                    }
                    else if (elementType == typeof (string))
                    {
                        fieldBodyStream.WriteString((string) list[i]);
                    }
                    else if (elementType == typeof (NameProperty))
                    {
                        var value = AddToNameTable(nameTable, ((NameProperty) list[i]).Name);
                        fieldBodyStream.WriteInt32(value);
                    }
                    else
                    {
                        fieldBodyStream.SerializeObject(list[i], nameTable);
                    }
                }
            }
            else if (!fieldType.IsGenericType && !fieldType.IsValueType && !fieldType.IsPrimitive && fieldType.IsClass)
            {
                baseProperty.TypeIdx = AddToNameTable(nameTable, "StructProperty");

                fieldBodyStream.WriteInt32(AddToNameTable(nameTable, fieldType.Name)); // StructNameIdx

                if (fieldType == typeof (Vector))
                {
                    var value = (Vector) fieldValue;
                    fieldBodyStream.WriteFloat(value.X);
                    fieldBodyStream.WriteFloat(value.Y);
                    fieldBodyStream.WriteFloat(value.Z);
                }
                else if (fieldType == typeof (Rotator))
                {
                    var value = (Rotator) fieldValue;
                    fieldBodyStream.WriteInt32(value.Pitch);
                    fieldBodyStream.WriteInt32(value.Yaw);
                    fieldBodyStream.WriteInt32(value.Roll);
                }
                else
                {
                    fieldBodyStream.SerializeObject(fieldValue, nameTable);
                }
            }

            baseProperty.PropertySize = (int) fieldBodyStream.Length;
            stream.WriteBaseProperty(baseProperty);
            fieldBodyStream.WriteTo(stream);
        }
Exemple #3
0
        /// <summary>
        /// Append all array data to the binary stream.
        /// </summary>
        private void WriteBinaryArrayData(NpgsqlNativeTypeInfo TypeInfo, Array nativeData, NativeToBackendTypeConverterOptions options, MemoryStream dst, int dimensionOffset, int[] dimensionOffsets)
        {
            int dimensionLength = nativeData.GetLength(dimensionOffset);
            int dimensionLBound = nativeData.GetLowerBound(dimensionOffset);

            if (dimensionOffset < nativeData.Rank - 1)
            {
                // Drill down recursively until we hit a single dimension array.
                for (int i = dimensionLBound ; i < dimensionLBound + dimensionLength ; i++)
                {
                    dimensionOffsets[dimensionOffset] = i;

                    WriteBinaryArrayData(TypeInfo, nativeData, options, dst, dimensionOffset + 1, dimensionOffsets);
                }
            }
            else
            {
                // Write the individual array elements to the output stream.
                for (int i = dimensionLBound ; i < dimensionLBound + dimensionLength ; i++)
                {
                    object elementNative;

                    dimensionOffsets[dimensionOffset] = i;
                    elementNative = nativeData.GetValue(dimensionOffsets);

                    if (elementNative == null || elementNative == DBNull.Value)
                    {
                        // Write length identifier -1 indicating NULL value.
                        dst.WriteInt32(-1);
                    }
                    else
                    {
                        byte[] elementBinary;

                        elementBinary = (byte[])_elementConverter.ConvertToBackend(elementNative, true, options);

                        // Write lenght identifier.
                        dst.WriteInt32(elementBinary.Length);
                        // Write element data.
                        dst.Write(elementBinary, 0, elementBinary.Length);
                    }
                }
            }
        }
Exemple #4
0
        /// <summary>
        /// Convert a System.Array to PG binary format.
        /// Write the array header and prepare to write array data to the stream.
        /// </summary>
        public byte[] ArrayToArrayBinary(NpgsqlNativeTypeInfo TypeInfo, object oNativeData, NativeToBackendTypeConverterOptions options)
        {
            Array NativeData = (Array)oNativeData;
            MemoryStream dst = new MemoryStream();

            // Write the number of dimensions in the array.
            dst.WriteInt32(NativeData.Rank);
            // Placeholder for null bitmap flag, which isn't used?
            dst.WriteInt32(0);
            // Write the OID of the elements of the array.
            dst.WriteInt32(options.OidToNameMapping[_elementConverter.Name].OID);

            // White dimension descriptors.
            for (int i = 0 ; i < NativeData.Rank ; i++)
            {
                // Number of elements in the dimension.
                dst.WriteInt32(NativeData.GetLength(i));
                // Lower bounds of the dimension, 1-based for SQL.
                dst.WriteInt32(NativeData.GetLowerBound(i) + 1);
            }

            int[] dimensionOffsets = new int[NativeData.Rank];

            // Write all array data.
            WriteBinaryArrayData(TypeInfo, NativeData, options, dst, 0, dimensionOffsets);

            return dst.ToArray();
        }
        void ReadCompressedME1And2(MemoryStream tempStream)
        {
            DebugOutput.PrintLn("File is compressed");
            listsStream = SaltLZOHelper.DecompressPCC(tempStream, this);

            //Correct the header
            compressed = false;
            listsStream.Seek(0, SeekOrigin.Begin);
            listsStream.WriteBytes(header);

            // Set numblocks to zero
            listsStream.WriteInt32(0);
            
            //Write the magic number
            if (GameVersion == 1)
                listsStream.WriteBytes(new byte[] { 0xF2, 0x56, 0x1B, 0x4E });
            else
                listsStream.WriteInt32(1026281201);
            
            // Write 4 bytes of 0
            listsStream.WriteInt32(0);

            // Write 4 more for ME2
            if (GameVersion == 2)
                listsStream.WriteInt32(0);
        }
        private static void Main(string[] args)
        {
            if (args.Length == 0)
            {
                Console.WriteLine("Usage: ScriptEncoder.exe <input_file.txt>");
                Console.ReadKey();
                return;
            }

            string fileInput = args[0];

            // Load script file.
            var br = new BinaryReader(new FileStream(fileInput.Replace(".txt", ""), FileMode.Open));

            var scriptBuffer = br.ReadBytes((int) br.BaseStream.Length);
            

            // Load translation file.
            var sr = new StreamReader(fileInput, Encoding.UTF8, true);
            var lines = sr.ReadToEnd().Replace("\r\n", "\n").Split('\n');
            sr.Close();
            if (lines.Length == 0)
                return;

            // headerLength includes MAGIC and @[0x1C].
            int headerLength = 0;

            // Check whether the file is in new format.
            // The most significant thing is the new format have the magic "BurikoCompiledScriptVer1.00\x00".
            // The difference between old and new is that, the old one DOES NOT have the HEADER which has
            // the length discribed at [0x1C] as a DWORD.
            if (
                scriptBuffer.Slice(0, 0x1C)
                            .EqualWith(new byte[]
                                {
                                    0x42, 0x75, 0x72, 0x69, 0x6B,
                                    0x6F, 0x43, 0x6F, 0x6D, 0x70,
                                    0x69, 0x6C, 0x65, 0x64, 0x53,
                                    0x63, 0x72, 0x69, 0x70, 0x74,
                                    0x56, 0x65, 0x72, 0x31, 0x2E,
                                    0x30, 0x30, 0x00
                                }))
            {
                headerLength = 0x1C + BitConverter.ToInt32(scriptBuffer, 0x1C);
            }
            // else headerLength = 0;


            // Get control bytes from original buffer.
            //var controlStream = new MemoryStream(scriptBuffer.Slice(headerLength, GetSmallestOffset(lines) + headerLength));
            var controlStream = new MemoryStream(scriptBuffer.Slice(headerLength, scriptBuffer.Length));

            // Let's begin.
            var textStream = new MemoryStream();
            foreach (var line in lines)
            {
                if (String.IsNullOrEmpty(line)) continue;

                var info = GetLineInfo(line);
                controlStream.WriteInt32(info[0], (int) (controlStream.Length + textStream.Length));

                string curline = GetText(line);
                if (curline.IndexOf("_") == -1)
                {
                    textStream.WriteBytes(Encoding.GetEncoding(936).GetBytes(curline));
                }
                else
                {
                    textStream.WriteBytes(Encoding.GetEncoding(932).GetBytes(curline));
                }
                

                textStream.WriteByte(0x00);
            }

            // Build new script file.
            var bw = new BinaryWriter(new FileStream(fileInput + ".new", FileMode.Create));
            // Write HEADER.
            if (headerLength != 0) bw.Write(scriptBuffer.Slice(0, headerLength));
            // Control bytes.
            bw.Write(controlStream.ToArray());
            // Text bytes.
            bw.Write(textStream.ToArray());
            bw.Close();

            br.Close();
        }
        public static byte[] CompressChunk(Chunk chunk)
        {
            int numBlocks = (chunk.Uncompressed.Length + maxBlockSize - 1) / maxBlockSize;
            if (numBlocks > 8)
                throw new FormatException("Maximum block number exceeded");
            ChunkHeader head = new ChunkHeader();
            head.magic = -1641380927;
            head.blocksize = maxBlockSize;
            head.uncompressedsize = chunk.Uncompressed.Length;
            int pos = 0;
            MemoryStream mem = new MemoryStream();
            List<Block> blockList = new List<Block>();
            int startData = 16 + 8 * numBlocks;
            mem.Seek(startData, SeekOrigin.Begin);
            for (int i = 0; i < numBlocks; i++)
            {
                Block block = new Block();
                byte[] result, temp;
                if (i != numBlocks - 1)
                {
                    block.uncompressedsize = maxBlockSize;
                    temp = new byte[maxBlockSize];
                }
                else
                {
                    block.uncompressedsize = head.uncompressedsize - pos;
                    temp = new byte[block.uncompressedsize];
                }

                Buffer.BlockCopy(chunk.Uncompressed, pos, temp, 0, temp.Length);
                result = LZOCompress(temp);
                if (result.Length == 0)
                    throw new Exception("LZO compression error!");
                block.compressedsize = result.Length;
                mem.WriteBytes(result);
                blockList.Add(block);
                pos += maxBlockSize;
            }
            head.compressedsize = (int)mem.Length;

            mem.Seek(0, SeekOrigin.Begin);
            mem.WriteInt32(head.magic);
            mem.WriteInt32(head.blocksize);
            mem.WriteInt32(head.compressedsize);
            mem.WriteInt32(head.uncompressedsize);
            foreach (Block block in blockList)
            {
                mem.WriteInt32(block.compressedsize);
                mem.WriteInt32(block.uncompressedsize);
            }

            return mem.ToArray();

        }
		public void Serialize(Stream output)
		{
			var endian = ByteOrder;

			const uint headerSize = 32;
			output.WriteUInt32(0x42424947, endian);
			output.WriteUInt32(Version, endian);

			var keys = new List<string>
			{
				""
			};

			var maxValueLength = 0;
			var blob = new StringBuilder();

			foreach (var file in Files)
			{
				keys.Add(file.Name);

				foreach (var section in file.Sections)
				{
					keys.Add(section.Key);

					foreach (var value in section.Value)
					{
						keys.Add(value.Key);

						foreach (var item in value.Value)
						{
							if (item.Value != null)
							{
								blob.Append(item.Value + '\0');
								maxValueLength = Math.Max(maxValueLength, item.Value.Length);
							}
						}
					}
				}
			}

			var huffmanEncoder = new Encoder();
			huffmanEncoder.Build(blob.ToString());

			keys = keys.Distinct().OrderBy(k => k.HashCrc32()).ToList();
			var maxKeyLength = keys.Max(k => k.Length);

			uint stringTableSize;

			using (var data = new MemoryStream())
			{
				data.Position = 4;
				data.WriteInt32(keys.Count, endian);

				data.Position = 4 + 4 + (8 * keys.Count);
				var offsets = new List<KeyValuePair<uint, uint>>();

				foreach (var key in keys)
				{
					var offset = (uint) data.Position;
					data.WriteUInt16((ushort) key.Length, endian);
					data.WriteString(key, Encoding.UTF8);
					offsets.Add(new KeyValuePair<uint, uint>(key.HashCrc32(), offset));
				}

				data.Position = 8;

				foreach (var kv in offsets)
				{
					data.WriteUInt32(kv.Key, endian);
					data.WriteUInt32(kv.Value - 8, endian);
				}

				data.Position = 0;
				data.WriteUInt32((uint) data.Length, endian);

				data.Position = 0;
				stringTableSize = (uint) data.Length;

				output.Seek(headerSize, SeekOrigin.Begin);
				output.WriteFromStream(data, data.Length);
			}

			uint huffmanSize;
			using (var data = new MemoryStream())
			{
				var pairs = huffmanEncoder.GetPairs();
				data.WriteUInt16((ushort) pairs.Length, endian);
				foreach (var pair in pairs)
				{
					data.WriteInt32(pair.Left, endian);
					data.WriteInt32(pair.Right, endian);
				}

				data.Position = 0;
				huffmanSize = (uint) data.Length;

				output.Seek(headerSize + stringTableSize, SeekOrigin.Begin);
				output.WriteFromStream(data, data.Length);
			}

			var bits = new BitArray(huffmanEncoder.TotalBits);
			var bitOffset = 0;

			uint indexSize;

			using (var index = new MemoryStream())
			{
				var fileDataOffset = 2 + (Files.Count * 6);

				var files = new List<KeyValuePair<ushort, int>>();

				foreach (var file in Files.OrderBy(f => keys.IndexOf(f.Name)))
				{
					files.Add(new KeyValuePair<ushort, int>((ushort) keys.IndexOf(file.Name), fileDataOffset));

					var sectionDataOffset = 2 + (file.Sections.Count * 6);

					var sections = new List<KeyValuePair<ushort, int>>();

					foreach (var section in file.Sections.OrderBy(s => keys.IndexOf(s.Key)))
					{
						sections.Add(new KeyValuePair<ushort, int>((ushort) keys.IndexOf(section.Key), sectionDataOffset));

						var valueDataOffset = 2 + (section.Value.Count * 6);

						var values = new List<KeyValuePair<ushort, int>>();

						foreach (var value in section.Value.OrderBy(v => keys.IndexOf(v.Key)))
						{
							index.Position = fileDataOffset + sectionDataOffset + valueDataOffset;

							values.Add(new KeyValuePair<ushort, int>((ushort) keys.IndexOf(value.Key), valueDataOffset));

							index.WriteUInt16((ushort) value.Value.Count, endian);
							valueDataOffset += 2;

							foreach (var item in value.Value)
							{
								switch (item.Type)
								{
									case -1:
									{
										continue;
									}
									case 1:
									{
										index.WriteInt32((1 << 29) | bitOffset, endian);

										break;
									}
									case 0:
									case 2:
									case 3:
									case 4:
									{
										var type = item.Type;

										if (OverrideCompileValueTypes >= 0)
										{
											type = OverrideCompileValueTypes;
										}

										index.WriteInt32((type << 29) | bitOffset, endian);
										bitOffset += huffmanEncoder.Encode((item.Value ?? "") + '\0', bits, bitOffset);

										break;
									}
								}

								valueDataOffset += 4;
							}
						}

						index.Position = fileDataOffset + sectionDataOffset;

						index.WriteUInt16((ushort) values.Count, endian);
						sectionDataOffset += 2;

						foreach (var value in values)
						{
							index.WriteUInt16(value.Key, endian);
							index.WriteInt32(value.Value, endian);

							sectionDataOffset += 6;
						}

						sectionDataOffset += valueDataOffset;
					}

					index.Position = fileDataOffset;

					index.WriteUInt16((ushort) sections.Count, endian);
					fileDataOffset += 2;

					foreach (var section in sections)
					{
						index.WriteUInt16(section.Key, endian);
						index.WriteInt32(section.Value, endian);

						fileDataOffset += 6;
					}

					fileDataOffset += sectionDataOffset;
				}

				index.Position = 0;

				index.WriteUInt16((ushort) files.Count, endian);

				foreach (var file in files)
				{
					index.WriteUInt16(file.Key, endian);
					index.WriteInt32(file.Value, endian);
				}

				index.Position = 0;
				indexSize = (uint) index.Length;

				output.Seek(headerSize + stringTableSize + huffmanSize, SeekOrigin.Begin);
				output.WriteFromStream(index, index.Length);
			}

			output.Seek(headerSize + stringTableSize + huffmanSize + indexSize, SeekOrigin.Begin);
			output.WriteInt32(bits.Length, endian);

			var bytes = new byte[(bits.Length - 1) / 8 + 1];
			bits.CopyTo(bytes, 0);
			output.WriteBytes(bytes);

			output.Seek(8, SeekOrigin.Begin);
			output.WriteInt32(maxKeyLength, endian);
			output.WriteInt32(maxValueLength, endian);
			output.WriteUInt32(stringTableSize, endian);
			output.WriteUInt32(huffmanSize, endian);
			output.WriteUInt32(indexSize, endian);
			output.WriteInt32(bytes.Length, endian);

			output.Seek(0, SeekOrigin.Begin);
			output.WriteUInt32(0x666D726D, endian);
		}
        private void SezializeSaveFile(Stream stream, SaveFile save)
        {
            var saveDataStream = new MemoryStream();
            var nameTableStream = new MemoryStream();

            var newSave = new SaveFile
            {
                Header =
                {
                    UnkInt1 = save.Header.UnkInt1
                },
                SaveDataHeader =
                {
                    HeaderSize = save.SaveDataHeader.HeaderSize,
                    UnkInt2 = save.SaveDataHeader.UnkInt2
                }
            };

            newSave.SaveData = FillSaveData();

            saveDataStream.SerializeObject(newSave.SaveData, newSave.NameTable.Names);

            nameTableStream.WriteInt32(newSave.NameTable.Names.Count);
            for (var i = 0; i < newSave.NameTable.Names.Count; i++)
            {
                nameTableStream.WriteString(newSave.NameTable.Names[i]);
            }

            newSave.SaveDataHeader.SaveDataSize = (int) saveDataStream.Length + 12; // 12 = SaveDataHeader size
            newSave.Header.FileSize = (int) saveDataStream.Length + 12 + (int) nameTableStream.Length;

            stream.Seek(0, SeekOrigin.Begin);

            // File header
            stream.WriteInt32(newSave.Header.UnkInt1);
            stream.WriteInt32(newSave.Header.FileSize);

            // Save data header
            stream.WriteInt32(newSave.SaveDataHeader.HeaderSize);
            stream.WriteInt32(newSave.SaveDataHeader.SaveDataSize);
            stream.WriteInt32(newSave.SaveDataHeader.UnkInt2);

            // Save data
            saveDataStream.WriteTo(stream);
            //stream.WriteInt32(255);

            // Name table
            nameTableStream.WriteTo(stream);
        }