public override void BeforeArchiving(Tree tree, ObjectId oid, DateTimeOffset modificationTime) { if (oid == null) { return; } // Store the sha in the pax_global_header using (var stream = new MemoryStream(Encoding.ASCII.GetBytes(string.Format(CultureInfo.InvariantCulture, "52 comment={0}\n", oid.Sha)))) { writer.Write("pax_global_header", stream, modificationTime, "666".OctalToInt32(), "0", "0", 'g', "root", "root", "0", "0", oid.Sha, false); } }
public void CanCreateATarFileWithMultipleFileEntriesWithADeepHierarchy() { using (Stream outStream = BuildOutStream()) { using (var writer = new TarWriter(outStream)) using (var entry1 = GetFileEntryFrom(@"deep\1.txt")) using (var entry2 = GetFileEntryFrom(@"deep\2\fox.txt")) using (var entry3 = GetFileEntryFrom(@"deep\2\3\dog.txt")) { writer.Write(entry1.Path, entry1.Stream, 511, new DateTime(2013, 4, 1, 13, 12, 58, 548).ToUniversalTime()); writer.Write(entry2.Path, entry2.Stream, 511, new DateTime(2013, 4, 1, 13, 12, 58, 548).ToUniversalTime()); writer.Write(entry3.Path, entry3.Stream, 511, new DateTime(2013, 4, 1, 13, 12, 58, 548).ToUniversalTime()); } outStream.Seek(0, SeekOrigin.Begin); using (var reader = new StreamReader(outStream)) using (var expectedContentStream = GetExpectedStream("CanCreateATarFileWithMultipleFileEntriesWithADeepHierarchy")) { string content = reader.ReadToEnd(); string expectedContent = expectedContentStream.ReadToEnd(); Assert.Equal(expectedContent, content); } } }
/// <summary> /// Adds a file to the currently opened archive. /// </summary> private void AddFile(string fileName, int userId = 0, int groupId = 0, int mode = 33188 /* 0100644 */) { var fi = new FileInfo(fileName); using (var stream = File.OpenRead(fileName)) { tarWriter.Write(stream, fi.Length, fi.Name, userId, groupId, mode, fi.LastWriteTimeUtc); } }
unsafe void WriteTextures(Scene scene, TarWriter tw, Dictionary <string, string> textures) { for (var i = 0; i < scene.TextureCount; i++) { var tex = scene.Textures[i]; var name = "*" + i; if (!textures.ContainsKey(name)) { continue; } textures.Remove(name); Bitmap bmp = null; if (tex.HasCompressedData) { using (var ms = new MemoryStream(tex.CompressedData)) { bmp = new Bitmap(ms); } } else if (tex.HasNonCompressedData) { fixed(Texel *p = tex.NonCompressedData) { bmp = new Bitmap(tex.Width, tex.Height, tex.Width * 4, PixelFormat.Format32bppArgb, (IntPtr)p); } } if (bmp != null) { using (var ms = new MemoryStream()) { bmp.Save(ms, ImageFormat.Png); ms.Position = 0; tw.Write(ms, ms.Length, name + ".png"); } } } foreach (var kvp in textures) { try { var img = ImageLoader.Load(kvp.Key); using (var ms = new MemoryStream()) { img.Save(ms, ImageFormat.Png); ms.Position = 0; tw.Write(ms, ms.Length, kvp.Value + ".png"); } } catch (Exception ex) { throw new ContentException("Could not load referenced texture: " + kvp.Key, ex); } } }
/// <summary> /// TAR Archive /// </summary> private void BackupDirectory(TarWriter archive, String dir, String rootDirectory) { if (Path.GetFileName(dir) == "log") { return; } // Backup foreach (var itm in Directory.GetDirectories(dir)) { if (Path.GetFileName(itm).Equals("backup", StringComparison.OrdinalIgnoreCase) || Path.GetFileName(itm).Equals("restore", StringComparison.OrdinalIgnoreCase)) { this.m_tracer.TraceWarning("Skipping {0} ", itm); } else { this.BackupDirectory(archive, itm, rootDirectory); } } // Add files foreach (var itm in Directory.GetFiles(dir)) { try { this.m_tracer.TraceVerbose("Backing up {0}", itm); archive.Write(itm.Replace(rootDirectory, ""), File.OpenRead(itm), DateTime.Now); } catch (Exception e) { this.m_tracer.TraceWarning("Could not add file {0} to backup : {1}", itm, e); } } }
private void WriteObject(TarWriter writer, string path, object value) { if (writer == null) { throw new ArgumentOutOfRangeException(nameof(writer)); } if (path == null) { throw new ArgumentOutOfRangeException(nameof(path)); } if (value == null) { throw new ArgumentOutOfRangeException(nameof(value)); } using (var stream = new MemoryStream()) { using (var textWriter = new StreamWriter(stream, Encoding.UTF8, bufferSize: 4096, leaveOpen: true)) { this.serializer.Serialize(textWriter, value); } stream.Position = 0; writer.Write(path, stream); } }
private static void Main(string[] args) { if (args.Length < 2) { Console.WriteLine("USAGE: ArchiveMaker fileName.tar <fileToAdd.ext> [. more files..]"); return; } using (var archUsTar = File.Create(args[0])) using (var tar = new TarWriter(archUsTar)) { tar.WriteDirectoryEntry("test_dir"); for (int i = 1; i < args.Length; ++i) { tar.Write(args[i]); } } Console.WriteLine("Examine tar file: {0}", args[0]); using (var examiner = File.OpenRead(args[0])) { TarReader tar = new TarReader(examiner); while (tar.MoveNext(true)) { Console.WriteLine("File: {0}, Owner: {1}", tar.FileInfo.FileName, tar.FileInfo.UserName); } } using (var unarchFile = File.OpenRead(args[0])) { TarReader reader = new TarReader(unarchFile); reader.ReadToEnd("out_dir\\data"); } }
/// <summary> /// Writes the file to the archive writer provided in the constructor, then disposes the memory stream. /// </summary> /// <param name="disposing"> /// true to release both managed and unmanaged resources; false to release only unmanaged resources. /// </param> protected override void Dispose(bool disposing) { Seek(0, SeekOrigin.Begin); m_archiveWriter.Write(m_fileName, this, m_lastWriteTime, Length); base.Dispose(disposing); }
public void Tar_Japanese_Name(int length) { using (var mstm = new MemoryStream()) { var enc = new ArchiveEncoding() { Default = Encoding.UTF8 }; var twopt = new TarWriterOptions(CompressionType.None, true); twopt.ArchiveEncoding = enc; var fname = new string((char)0x3042, length); using (var tw = new TarWriter(mstm, twopt)) using (var input = new MemoryStream(new byte[32])) { tw.Write(fname, input, null); } using (var inputMemory = new MemoryStream(mstm.ToArray())) { var tropt = new ReaderOptions() { ArchiveEncoding = enc }; using (var tr = TarReader.Open(inputMemory, tropt)) { while (tr.MoveToNextEntry()) { Assert.Equal(fname, tr.Entry.Key); } } } } }
private void WriteString(TarWriter writer, string path, string value) { if (writer == null) { throw new ArgumentOutOfRangeException(nameof(writer)); } if (path == null) { throw new ArgumentOutOfRangeException(nameof(path)); } if (value == null) { throw new ArgumentOutOfRangeException(nameof(value)); } using (var stream = new MemoryStream()) { // Don't emit a BOM and use Unix-style line endings using (StreamWriter textWriter = new StreamWriter(stream, new UTF8Encoding(false), bufferSize: 4096, leaveOpen: true)) { textWriter.NewLine = "\n"; textWriter.Write(value); } stream.Position = 0; writer.Write(path, stream); } }
/// <summary> /// Adds every file in the directory to the tar, and recurses into subdirectories. /// </summary> private void AddAllToTar(string root, TarWriter tar) { Log("Opening in " + root + "..."); // Add subdirectories... foreach (var directory in Directory.GetDirectories(root)) { var dirName = new DirectoryInfo(directory).Name; if (skipDirectories.Contains(dirName.ToUpperInvariant())) { Log("Skipping directory " + directory + " and its subdirectories"); } else { AddAllToTar(directory, tar); } } foreach (var file in Directory.GetFiles(root)) { var info = new FileInfo(file); Log("Writing " + info.Name + "... (" + Util.FormatFileSize(info.Length) + ")"); using (FileStream fs = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { tar.Write(fs); } } }
public override void Import(string input, Stream output) { var ser = new JsonSerializer(); AtlasMetadata metadata = null; string metaPath = "atlas.meta"; if (File.Exists(metaPath)) { using (var sr = new StreamReader(metaPath)) { metadata = ser.Deserialize <AtlasMetadata>(new JsonTextReader(sr)); } } else { metadata = new AtlasMetadata(); } var lp = new LayoutProperties { inputFilePaths = (Directory.Exists("./sprites") ? Directory.GetFiles("./sprites") : Directory.GetFiles(".")) .Where(p => SupportedFormats.Contains(Path.GetExtension(p).ToLower())).ToArray(), distanceBetweenImages = metadata.Padding, marginWidth = metadata.Margin, powerOfTwo = metadata.PowerOfTwo, maxSpriteWidth = metadata.MaxSpriteWidth > 0 ? metadata.MaxSpriteWidth : 16384, maxSpriteHeight = metadata.MaxSpriteHeight > 0 ? metadata.MaxSpriteHeight : 16384, filterMode = metadata.FilterMode, }; var sheetMaker = new AtlasBuilder(lp); using (var tw = new TarWriter(output)) { using (MemoryStream atlasStream = new MemoryStream(), sheetStream = new MemoryStream()) { using (var bw = new BinaryWriter(atlasStream)) { sheetMaker.Create(bw, sheetStream, ImageFormat.Png, metadata.NoPreMultiply); bw.Flush(); atlasStream.Position = 0; sheetStream.Position = 0; tw.Write(atlasStream, atlasStream.Length, "atlas.bin"); tw.Write(sheetStream, sheetStream.Length, "sheet.png"); } } } }
public void Tar_Read_One_At_A_Time() { var archiveEncoding = new ArchiveEncoding { Default = Encoding.UTF8, }; var tarWriterOptions = new TarWriterOptions(CompressionType.None, true) { ArchiveEncoding = archiveEncoding, }; var testBytes = Encoding.UTF8.GetBytes("This is a test."); using (var memoryStream = new MemoryStream()) { using (var tarWriter = new TarWriter(memoryStream, tarWriterOptions)) using (var testFileStream = new MemoryStream(testBytes)) { tarWriter.Write("test1.txt", testFileStream); testFileStream.Position = 0; tarWriter.Write("test2.txt", testFileStream); } memoryStream.Position = 0; var numberOfEntries = 0; using (var archiveFactory = TarArchive.Open(memoryStream)) { foreach (var entry in archiveFactory.Entries) { ++numberOfEntries; using (var tarEntryStream = entry.OpenEntryStream()) using (var testFileStream = new MemoryStream()) { tarEntryStream.CopyTo(testFileStream); Assert.Equal(testBytes.Length, testFileStream.Length); } } } Assert.Equal(2, numberOfEntries); } }
public void Tar_Finalize_Archive(bool finalizeArchive) { using (MemoryStream stream = new MemoryStream()) using (Stream content = File.OpenRead(Path.Combine(ORIGINAL_FILES_PATH, "jpg", "test.jpg"))) { using (TarWriter writer = new TarWriter(stream, new TarWriterOptions(CompressionType.None, finalizeArchive))) { writer.Write("doesn't matter", content, null); } var paddedContentWithHeader = content.Length / 512 * 512 + 512 + 512; var expectedStreamLength = finalizeArchive ? paddedContentWithHeader + 512 * 2 : paddedContentWithHeader; Assert.Equal(expectedStreamLength, stream.Length); } }
public void Archive(String archiveName) { using (FileStream fsTar = new FileStream(archiveName, FileMode.CreateNew)) { using (TarWriter tar = new TarWriter(fsTar)) { for (int i = 0; i < files.Count; i++) { tar.Write(files[i]); } } } }
public Byte[] ArchiveToMemory(String archiveName) { MemoryStream msTar = new MemoryStream(); using (TarWriter tar = new TarWriter(msTar)) { for (int i = 0; i < files.Count; i++) { tar.Write(files[i]); } } return(msTar.ToArray()); }
protected override void SaveTo(Stream stream, CompressionInfo compressionInfo, IEnumerable <TarArchiveEntry> oldEntries, IEnumerable <TarArchiveEntry> newEntries) { using (TarWriter writer = new TarWriter(stream, compressionInfo)) { foreach (TarArchiveEntry entry in Enumerable.Where <TarArchiveEntry>(Enumerable.Concat <TarArchiveEntry>(oldEntries, newEntries), delegate(TarArchiveEntry x) { return(!x.IsDirectory); })) { using (Stream stream2 = entry.OpenEntryStream()) { writer.Write(entry.Key, stream2, entry.LastModifiedTime, new long?(entry.Size)); } } } }
protected override void SaveTo(Stream stream, CompressionInfo compressionInfo, IEnumerable <TarArchiveEntry> oldEntries, IEnumerable <TarArchiveEntry> newEntries) { using (var writer = new TarWriter(stream, compressionInfo)) { foreach (var entry in oldEntries.Concat(newEntries) .Where(x => !x.IsDirectory)) { using (var entryStream = entry.OpenEntryStream()) { writer.Write(entry.FilePath, entryStream, entry.LastModifiedTime, entry.Size); } } } }
protected override void SaveTo(Stream stream, WriterOptions options, IEnumerable <TarArchiveEntry> oldEntries, IEnumerable <TarArchiveEntry> newEntries) { using (var writer = new TarWriter(stream, options)) { foreach (var entry in oldEntries.Concat(newEntries) .Where(x => !x.IsDirectory)) { using (var entryStream = entry.OpenEntryStream()) { writer.Write(entry.Key, entryStream, entry.LastModifiedTime, entry.Size); } } } }
/// <summary> /// TAR Archive /// </summary> private void BackupDirectory(TarWriter archive, String dir, String rootDirectory) { if (Path.GetFileName(dir) == "log") { return; } // Backup foreach (var itm in Directory.GetDirectories(dir)) { this.BackupDirectory(archive, itm, rootDirectory); } // Add files foreach (var itm in Directory.GetFiles(dir)) { this.m_tracer.TraceVerbose("Backing up {0}", itm); archive.Write(itm.Replace(rootDirectory, ""), File.OpenRead(itm), DateTime.Now); } }
public override void Import(Stream input, Stream output, string filename) { using (var importer = new AssimpContext()) { var scene = importer.ImportFileFromStream(input, PostProcessSteps.Triangulate | PostProcessSteps.SortByPrimitiveType | PostProcessSteps.GenerateNormals, Path.GetExtension(filename)); using (var tw = new TarWriter(output)) { var textures = new Dictionary <string, string>(); using (var ms = new MemoryStream()) { using (var bw = new BinaryWriter(ms)) { this.WriteModel(scene, bw, textures); bw.Flush(); //this.PrintNode(scene, scene.RootNode, 0); ms.Position = 0; tw.Write(ms, ms.Length, "model.bin"); } } this.WriteTextures(scene, tw, textures); } } }
/// <summary> /// Adds every file in the directory to the tar, and recurses into subdirectories. /// </summary> private void AddAllToTar(string root, TarWriter tar) { Log("Opening in " + root + "..."); // Add subdirectories... foreach (var directory in Directory.GetDirectories(root)) { AddAllToTar(directory, tar); } foreach (var file in Directory.GetFiles(root)) { var info = new FileInfo(file); Log("Writing " + info.Name + "... (" + Util.FormatFileSize(info.Length) + ")"); using (FileStream fs = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { tar.Write(fs); } } }
private void WriteByteString(TarWriter writer, string path, ByteString value) { if (writer == null) { throw new ArgumentOutOfRangeException(nameof(writer)); } if (path == null) { throw new ArgumentOutOfRangeException(nameof(path)); } if (value == null) { throw new ArgumentOutOfRangeException(nameof(value)); } using (var stream = new MemoryStream()) { value.WriteTo(stream); stream.Position = 0; writer.Write(path, stream); } }
/// <summary> /// Adds a file to the currently opened archive. /// </summary> private void AddFile(string fileName, int userId = 0, int groupId = 0, int mode = 33188 /* 0100644 */, bool isText = false) { var fi = new FileInfo(fileName); using (var stream = File.OpenRead(fileName)) { if (isText) { using (var memStream = GetConvertedStream(stream)) { tarWriter.Write(memStream, memStream.Length, fi.Name, userId, groupId, mode, fi.LastWriteTimeUtc); } } else { tarWriter.Write(stream, fi.Length, fi.Name, userId, groupId, mode, fi.LastWriteTimeUtc); } } }
public void CanCreateATarFileWithOneFileEntry() { using (Stream outStream = BuildOutStream()) { using (var writer = new TarWriter(outStream)) using (var entry = GetFileEntryFrom(@"single\1.txt")) { writer.Write(entry.Path, entry.Stream, 511, new DateTime(2013, 4, 1, 13, 12, 58, 548).ToUniversalTime()); } outStream.Seek(0, SeekOrigin.Begin); using (var reader = new StreamReader(outStream)) using (var expectedContentStream = GetExpectedStream("CanCreateATarFileWithOneFileEntry")) { string content = reader.ReadToEnd(); string expectedContent = expectedContentStream.ReadToEnd(); Assert.Equal(expectedContent, content); } } }
public override void Import(string input, Stream output) { var sr = new StreamReader(Path.GetFileNameWithoutExtension(input) + ".fnt"); string fontFace = "", textureFile = null; int fontSize = 0; float lineHeight = 0f, lineBase = 0f, scaleW = 0f, scaleH = 0f, pixelScale = 0f; string line; var attrs = new Dictionary <string, string> (); var kernings = new Dictionary <ulong, float> (); var chars = new List <Char> (); while ((line = sr.ReadLine()) != null) { var inQuotes = false; var ch = line.ToCharArray(); for (var i = 0; i < line.Length; i++) { if (ch [i] == '\"' /*&& (i == 0 || ch [i - 1] != '\\')*/) { inQuotes = !inQuotes; } if (!inQuotes && ch [i] == ' ') { ch [i] = '\t'; } if (!inQuotes && (ch[i] == '<' || ch[i] == '>' || ch[i] == '/' && i + 1 < ch.Length && ch[i + 1] == '>')) { ch[i] = '\t'; } } line = new string (ch); var parts = line.Split(new[] { '\t' }, StringSplitOptions.RemoveEmptyEntries); for (var i = 1; i < parts.Length; i++) { var idx = parts [i].IndexOf("="); if (idx > 0) { var value = parts [i].Substring(idx + 1, parts [i].Length - idx - 1); if (value [0] == '\"' && value [value.Length - 1] == '\"') { value = value.Substring(1, value.Length - 2); } attrs.Add(parts [i].Substring(0, idx), value); } } switch (parts [0]) { case "info": fontFace = attrs ["face"]; fontSize = int.Parse(attrs ["size"]); break; case "common": if (attrs.ContainsKey("lineHeight")) { lineHeight = float.Parse(attrs ["lineHeight"]); } if (attrs.ContainsKey("base")) { lineBase = float.Parse(attrs ["base"]); } if (attrs.ContainsKey("scaleW")) { scaleW = float.Parse(attrs ["scaleW"]); } if (attrs.ContainsKey("scaleH")) { scaleH = float.Parse(attrs ["scaleH"]); } if (attrs.ContainsKey("pixelScale")) { pixelScale = float.Parse(attrs ["pixelScale"]); } break; case "page": if (textureFile != null) { throw new ContentException("Only one page per font is supported."); } textureFile = attrs ["file"]; break; case "char": chars.Add(new Char { id = int.Parse(attrs ["id"]), x = float.Parse(attrs ["x"]), y = float.Parse(attrs ["y"]), width = float.Parse(attrs ["width"]), height = float.Parse(attrs ["height"]), xoffset = float.Parse(attrs ["xoffset"]), yoffset = float.Parse(attrs ["yoffset"]), xadvance = float.Parse(attrs ["xadvance"]) }); break; case "kerning": var first = ulong.Parse(attrs ["first"]); var second = ulong.Parse(attrs ["second"]); var combined = (first << 32) | second; kernings.Add(combined, float.Parse(attrs ["amount"])); break; } attrs.Clear(); } sr.Dispose(); using (var img = Image.FromFile(textureFile)) { if (lineHeight == 0f) { lineHeight = fontSize; } if (lineBase == 0f) { lineBase = lineHeight - 1; } if (scaleW == 0f || scaleH == 0f) { scaleW = img.Width; scaleH = img.Height; } if (pixelScale == 0f) { pixelScale = 1f; } var ms = new MemoryStream(); using (var bw = new BinaryWriter(ms)) { bw.Write(fontFace); bw.Write(fontSize); bw.Write(pixelScale); bw.Write(lineHeight / pixelScale); bw.Write(lineBase / pixelScale); bw.Write(chars.Count); foreach (var ch in chars) { bw.Write(ch.id); bw.Write(ch.x / scaleW); bw.Write(((ch.y + ch.height) / scaleH)); bw.Write((ch.x + ch.width) / scaleW); bw.Write(ch.y / scaleH); bw.Write(ch.width / pixelScale); bw.Write(ch.height / pixelScale); bw.Write(ch.xoffset / pixelScale); bw.Write(((lineHeight - ch.height) - ch.yoffset) / pixelScale); bw.Write(ch.xadvance / pixelScale); } bw.Write(kernings.Count); foreach (var kvp in kernings) { bw.Write(kvp.Key); bw.Write(kvp.Value / pixelScale); } bw.Flush(); ms.Position = 0; using (var tw = new TarWriter(output)) { tw.Write(ms, ms.Length, "font.atlas"); using (var ts = new MemoryStream()) { using (var texture = ImageHelper.PremultiplyAlpha(img)) { texture.Save(ts, ImageFormat.Png); } ts.Position = 0; tw.Write(ts, ts.Length, "font.png"); } } } } }
public override void Import(Stream input, Stream output, string filename) { var reader = new BinaryReader(input); int chunkID = reader.ReadInt32(); if (chunkID != RIFF) { throw new InvalidDataException(); } reader.ReadInt32(); // fileSize int riffType = reader.ReadInt32(); if (riffType != WAVE) { throw new InvalidDataException(); } while (reader.ReadInt32() != FMT_) { var dummy = reader.ReadInt32(); reader.ReadBytes(dummy); } int fmtSize = reader.ReadInt32(); int fmtCode = reader.ReadInt16(); int channels = reader.ReadInt16(); int sampleRate = reader.ReadInt32(); reader.ReadInt32(); // fmtAvgBPS reader.ReadInt16(); // fmtBlockAlign int bitDepth = reader.ReadInt16(); if (fmtSize == 18) { int fmtExtraSize = reader.ReadInt16(); reader.ReadBytes(fmtExtraSize); } while (reader.ReadInt32() != DATA) { var dummy = reader.ReadInt32(); reader.ReadBytes(dummy); } int dataSize = reader.ReadInt32(); byte[] byteArray = reader.ReadBytes(dataSize); if (fmtCode != WAVE_FORMAT_PCM && fmtCode != WAVE_FORMAT_IEEE_FLOAT) { throw new NotSupportedException("Wave files must be PCM or IEEE_FLOAT format."); } var md = new SfxMetadata() { Bits = (fmtCode == WAVE_FORMAT_IEEE_FLOAT) ? 32 : bitDepth, Rate = sampleRate, Channels = channels, Length = dataSize / (bitDepth / 8) / channels, }; using (var tw = new TarWriter(output)) { using (var ms = new MemoryStream()) { using (var bw = new BinaryWriter(ms)) { md.Write(bw); ms.Position = 0; tw.Write(ms, ms.Length, "sound.bin"); } } tw.Write(new MemoryStream(byteArray), byteArray.Length, "sound.pcm"); } }