/// <summary> /// Writes the current replay to a binary file. /// </summary> public void Write(string path) { var frames = FramesToString(); ReplayVersion = CurrentVersion; using (var replayDataStream = new MemoryStream(Encoding.ASCII.GetBytes(frames))) using (var bw = new BinaryWriter(File.Open(path, FileMode.Create))) { bw.Write(ReplayVersion); bw.Write(MapMd5); bw.Write(GetMd5(frames)); bw.Write(PlayerName); bw.Write(DateTime.Now.ToString(CultureInfo.InvariantCulture)); bw.Write(TimePlayed); bw.Write((int)Mode); bw.Write((int)Mods); bw.Write(Score); bw.Write(Accuracy); bw.Write(MaxCombo); bw.Write(CountMarv); bw.Write(CountPerf); bw.Write(CountGreat); bw.Write(CountGood); bw.Write(CountOkay); bw.Write(CountMiss); bw.Write(PauseCount); bw.Write(RandomizeModifierSeed); bw.Write(StreamHelper.ConvertStreamToByteArray(LZMACoder.Compress(replayDataStream))); } }
/// <summary> /// Writes the current replay to a binary file. /// </summary> public void Write(string path) { var frames = FramesToString(); // TOOD: This should be removed when everyone is running the new redesign client // This will manually downgrade the replay version if the user isn't using new modifiers to keep // compatibility between old and new clients. // 0.0.1 used to write the modifiers as a 32-bit integer, but because of the amount of new mods, they need // to be written as 64-bit. ReplayVersion = Mods < ModIdentifier.Speed105X ? "0.0.1" : CurrentVersion; using (var replayDataStream = new MemoryStream(Encoding.ASCII.GetBytes(frames))) using (var bw = new BinaryWriter(File.Open(path, FileMode.Create))) { bw.Write(ReplayVersion); bw.Write(MapMd5); bw.Write(GetMd5(frames)); bw.Write(PlayerName); bw.Write(DateTime.Now.ToString(CultureInfo.InvariantCulture)); bw.Write(TimePlayed); bw.Write((int)Mode); // This check is to keep compatibility with older clients. // We only want to write a 64-bit integer for replays with newer mods activated if (ReplayVersion == "0.0.1" || Mods < ModIdentifier.Speed105X) { bw.Write((int)Mods); } else { bw.Write((long)Mods); } bw.Write(Score); bw.Write(Accuracy); bw.Write(MaxCombo); bw.Write(CountMarv); bw.Write(CountPerf); bw.Write(CountGreat); bw.Write(CountGood); bw.Write(CountOkay); bw.Write(CountMiss); bw.Write(PauseCount); bw.Write(RandomizeModifierSeed); bw.Write(StreamHelper.ConvertStreamToByteArray(LZMACoder.Compress(replayDataStream))); } }
private static byte[] EncodeV1(ObjectData data) { using (FlagsBinaryWriter writer = new FlagsBinaryWriter(new MemoryStream())) { // write client version writer.Write((ushort)DatFormat.Format_1010); // write category string category = string.Empty; switch (data.Category) { case ThingCategory.Item: category = "item"; break; case ThingCategory.Outfit: category = "outfit"; break; case ThingCategory.Effect: category = "effect"; break; case ThingCategory.Missile: category = "missile"; break; } writer.Write((ushort)category.Length); writer.Write(Encoding.UTF8.GetBytes(category)); if (!ThingTypeSerializer.WriteProperties(data.ThingType, DatFormat.Format_1010, writer)) { return(null); } FrameGroup group = data.GetFrameGroup(FrameGroupType.Default); writer.Write(group.Width); writer.Write(group.Height); if (group.Width > 1 || group.Height > 1) { writer.Write(group.ExactSize); } writer.Write(group.Layers); writer.Write(group.PatternX); writer.Write(group.PatternY); writer.Write(group.PatternZ); writer.Write(group.Frames); Sprite[] sprites = data.Sprites[FrameGroupType.Default]; for (int i = 0; i < sprites.Length; i++) { Sprite sprite = sprites[i]; byte[] pixels = sprite.GetARGBPixels(); writer.Write((uint)sprite.ID); writer.Write((uint)pixels.Length); writer.Write(pixels); } return(LZMACoder.Compress(((MemoryStream)writer.BaseStream).ToArray())); } }
private static byte[] EncodeV2(ObjectData data) { using (FlagsBinaryWriter writer = new FlagsBinaryWriter(new MemoryStream())) { // write obd version writer.Write((ushort)ObdVersion.Version2); // write client version writer.Write((ushort)DatFormat.Format_1050); // write category writer.Write((byte)data.Category); // skipping the texture patterns position. int patternsPosition = (int)writer.BaseStream.Position; writer.Seek(4, SeekOrigin.Current); if (!WriteProperties(data.ThingType, writer)) { return(null); } // write the texture patterns position. int position = (int)writer.BaseStream.Position; writer.Seek(patternsPosition, SeekOrigin.Begin); writer.Write((uint)writer.BaseStream.Position); writer.Seek(position, SeekOrigin.Begin); FrameGroup group = data.GetFrameGroup(FrameGroupType.Default); writer.Write(group.Width); writer.Write(group.Height); if (group.Width > 1 || group.Height > 1) { writer.Write(group.ExactSize); } writer.Write(group.Layers); writer.Write(group.PatternX); writer.Write(group.PatternY); writer.Write(group.PatternZ); writer.Write(group.Frames); if (group.IsAnimation) { writer.Write((byte)group.AnimationMode); writer.Write(group.LoopCount); writer.Write(group.StartFrame); for (int i = 0; i < group.Frames; i++) { writer.Write((uint)group.FrameDurations[i].Minimum); writer.Write((uint)group.FrameDurations[i].Maximum); } } Sprite[] sprites = data.Sprites[FrameGroupType.Default]; for (int i = 0; i < sprites.Length; i++) { Sprite sprite = sprites[i]; byte[] pixels = sprite.GetARGBPixels(); writer.Write(sprite.ID); writer.Write(pixels); } return(LZMACoder.Compress(((MemoryStream)writer.BaseStream).ToArray())); } }
static void Main(string[] args) { //some logical steps to help my mind code :> //1. read Caches folder, if a caches.xml file is found, go to step 3, if not, 2 //2. get all the directories inside the Caches folder, add those to a list with version number being 0 //3. get all the caches inside the cache file, add those to a list with its version number //4. read all cache folders recursively, if an info.xml file exists, use that file to verify if any files have changed. //5. any files that haven't changed (or all if an info.xml file doesn't exist) or are new should be compressed and added to the info.xml file //6. build the caches.xml file with an incremented version (if the info.xml file has changed) //7. copy all files to be uploaded (caches.xml + directories with info.xml and only files to be uploaded (lzma's)) to the CacheUpload folder Environment.CurrentDirectory = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); var cf = CombinePaths(Environment.CurrentDirectory, "Caches", "caches.xml"); try { Directory.Delete(CombinePaths(Environment.CurrentDirectory, "CachesUpload"), true); } catch (DirectoryNotFoundException) { } var cacheDict = new SortedDictionary <string, int>(); var cacheChanged = new SortedDictionary <string, bool>(); if (File.Exists(cf)) { foreach (var el in XDocument.Parse(File.ReadAllText(cf, Encoding.UTF8)).Element("Caches").Elements()) { cacheDict.Add(el.Attribute("ID").Value, int.Parse(el.Attribute("Version").Value)); } } foreach (var dir in Directory.GetDirectories(Path.Combine(Environment.CurrentDirectory, "Caches"))) { if (!cacheDict.ContainsKey(Path.GetFileName(dir))) { cacheDict.Add(Path.GetFileName(dir), 0); } } foreach (var keyval in cacheDict) { var changed = false; var CCFList = new SortedDictionary <string, CacheContentFile>(); var infoXML = new XDocument(); var currentFolder = CombinePaths(Environment.CurrentDirectory, "Caches", keyval.Key); if (File.Exists(CombinePaths(currentFolder, "info.xml"))) { foreach (var el in XDocument.Parse(File.ReadAllText(CombinePaths(currentFolder, "info.xml"), Encoding.UTF8)).Element("CacheInfo").Elements()) { var name = el.Attribute("Name").Value; if (File.Exists(CombinePaths(currentFolder, name))) { var size = long.Parse(el.Attribute("Size").Value); var sizeCompressed = long.Parse(el.Attribute("CompressedSize").Value); var hash = el.Attribute("SHA1Hash").Value; CCFList.Add(name, new CacheContentFile { SHA1Hash = hash, Size = size, CompressedSize = sizeCompressed }); } } } foreach (var file in GetFilesRecursive(currentFolder)) { var afile = file.Replace(currentFolder, "").Substring(1).Replace("\\", "/"); //remove current folder and the slash, replace \ with / for linux (still works on windows) if (Path.GetFileName(afile) == "info.xml" || Path.GetExtension(afile) == ".lzma") { continue; //both files shouldn't be here!!!1 } var length = new FileInfo(Path.Combine(currentFolder, afile)).Length; var lengthCompressed = length; var hash = GetFileSHA1(Path.Combine(currentFolder, afile)); bool compressed = true; if (!CCFList.ContainsKey(afile) || CCFList[afile].SHA1Hash != hash || CCFList[afile].Size != length) { changed = true; //doesn't exist in info.xml, hash changed or size changed var pathToGo = CombinePaths(Environment.CurrentDirectory, "CacheUpload", keyval.Key, afile); if (length < 75 * 1024 * 1024) //bigger than 75mb = no compress (should work for most situations) { var lz = new LZMACoder(); var inStream = File.Open(Path.Combine(currentFolder, afile), FileMode.Open, FileAccess.Read); Directory.CreateDirectory(Path.GetDirectoryName(pathToGo)); var outStream = File.Open(pathToGo + ".lzma", FileMode.Create, FileAccess.Write); try { Console.WriteLine("Compressing {0} to {1}....", Path.Combine(currentFolder, afile), pathToGo + ".lzma"); lz.Compress(inStream, outStream); inStream.Close(); outStream.Close(); lengthCompressed = new FileInfo(pathToGo + ".lzma").Length; } catch (Exception e) { Console.WriteLine("Failed to compress {0}, {1}", file, e.ToString()); inStream.Close(); outStream.Close(); compressed = false; File.Delete(pathToGo + ".lzma"); } } if (!compressed) { File.Copy(Path.Combine(currentFolder, afile), pathToGo, true); } if (CCFList.ContainsKey(afile)) { CCFList[afile].Size = length; CCFList[afile].CompressedSize = lengthCompressed; CCFList[afile].SHA1Hash = hash; } else { CCFList.Add(afile, new CacheContentFile { Size = length, CompressedSize = lengthCompressed, SHA1Hash = hash }); } } } infoXML = new XDocument( new XElement("CacheInfo") ); foreach (var keyval2 in CCFList) { infoXML.Element("CacheInfo").Add( new XElement("CacheFile", new XAttribute("Name", keyval2.Key), new XAttribute("Size", keyval2.Value.Size.ToString()), new XAttribute("CompressedSize", keyval2.Value.CompressedSize.ToString()), new XAttribute("SHA1Hash", keyval2.Value.SHA1Hash) ) ); } cacheChanged.Add(keyval.Key, changed); // using (XmlTextWriter writer = new XmlTextWriter(Path.Combine(currentFolder, "info.xml"), null)) // { // infoXML.Save(writer); //// } Store(infoXML, Path.Combine(currentFolder, "info.xml")); Directory.CreateDirectory(Path.GetDirectoryName(CombinePaths(Environment.CurrentDirectory, "CacheUpload", keyval.Key, "info.xml"))); File.Copy(Path.Combine(currentFolder, "info.xml"), CombinePaths(Environment.CurrentDirectory, "CacheUpload", keyval.Key, "info.xml"), true); } var caches = new XDocument( new XElement("Caches") ); foreach (var keyval3 in cacheDict) { caches.Element("Caches").Add( new XElement("Cache", new XAttribute("ID", keyval3.Key), new XAttribute("Version", (keyval3.Value + (cacheChanged[keyval3.Key] ? 1 : 0)).ToString()) ) ); } // using (XmlTextWriter writer = new XmlTextWriter(CombinePaths(Environment.CurrentDirectory, "Caches", "caches.xml"), null)) // { // caches.Save(writer); // } Store(caches, CombinePaths(Environment.CurrentDirectory, "Caches", "caches.xml")); Directory.CreateDirectory(Path.GetDirectoryName(CombinePaths(Environment.CurrentDirectory, "Caches", "caches.xml"))); File.Copy(CombinePaths(Environment.CurrentDirectory, "Caches", "caches.xml"), CombinePaths(Environment.CurrentDirectory, "CacheUpload", "caches.xml"), true); }