public static StreamChunk Deserialize(Stream stream) { StreamChunk instance = new StreamChunk(); Deserialize(stream, instance); return(instance); }
/// <summary> /// Read file at path, split the contents in chunks and store them together with a StreamChunk. /// </summary> public static ChunkHash GenerateChunk(string path, Repo repo) { StreamChunk message = new StreamChunk(); using (Stream stream = new FileStream(path, FileMode.Open)) { BinaryReader br = new BinaryReader(stream); message.Size = (ulong)stream.Length; while (true) { byte[] data = br.ReadBytes(4096); if (data.Length == 0) { break; } Chunk c = new Chunk(data); ChunkHash ch = repo.WriteChunk(c); message.Chunks.Add(ch.bytes); } } byte[] messageBytes = StreamChunk.SerializeToBytes(message); Chunk messageChunk = new Chunk(messageBytes); ChunkHash messageHash = repo.WriteChunk(messageChunk); return(messageHash); }
public static byte[] SerializeToBytes(StreamChunk instance) { using (MemoryStream ms = new MemoryStream()) { Serialize(ms, instance); return(ms.ToArray()); } }
/// <summary> /// Read file at path, split the contents in chunks and store them together with a StreamChunk. /// </summary> public static ChunkHash GenerateChunk(string path, Repo repo) { StreamChunk message = new StreamChunk (); using (Stream stream = new FileStream(path, FileMode.Open)) { BinaryReader br = new BinaryReader (stream); message.Size = (ulong)stream.Length; while (true) { byte[] data = br.ReadBytes (4096); if (data.Length == 0) break; Chunk c = new Chunk (data); ChunkHash ch = repo.WriteChunk (c); message.Chunks.Add (ch.bytes); } } byte[] messageBytes = StreamChunk.SerializeToBytes (message); Chunk messageChunk = new Chunk (messageBytes); ChunkHash messageHash = repo.WriteChunk (messageChunk); return messageHash; }
public static ChunkHash GenerateChunk(string path, Repo repo) { string fullPath = Path.GetFullPath(path); TreeChunk tree = new TreeChunk(); //Subdirectories string[] dirs = Directory.GetDirectories(fullPath); foreach (string d in dirs) { TreeFile df = new TreeFile(); df.Name = Path.GetFileName(d); df.TreeChunkHash = TreeChunk.GenerateChunk(d, repo).bytes; tree.Directories.Add(df); } //Files string[] files = Directory.GetFiles(fullPath); foreach (string f in files) { TreeFile ff = new TreeFile(); ff.Name = Path.GetFileName(f); ff.TreeChunkHash = StreamChunk.GenerateChunk(f, repo).bytes; tree.Files.Add(ff); } Chunk treeChunk = new Chunk(TreeChunk.SerializeToBytes(tree)); ChunkHash ch = repo.WriteChunk(treeChunk); return(ch); }
public static void Serialize(Stream stream, StreamChunk instance) { ProtocolParser.WriteKey(stream, new ProtocolBuffers.Key(1, Wire.Varint)); ProtocolParser.WriteUInt64(stream, instance.Size); if (instance.Chunks != null) { foreach (byte[] i2 in instance.Chunks) { ProtocolParser.WriteKey(stream, new ProtocolBuffers.Key(2, Wire.LengthDelimited)); ProtocolParser.WriteBytes(stream, i2); } } }
public static void Extract(Repo store, ChunkHash cid, string targetPath) { Directory.CreateDirectory(targetPath); Chunk c = store.ReadChunk(cid); TreeChunk tree = TreeChunk.Deserialize(c.Data); foreach (TreeFile file in tree.Files) { StreamChunk.Extract(store, ChunkHash.FromHashBytes(file.TreeChunkHash), Path.Combine(targetPath, file.Name)); } foreach (TreeFile subdir in tree.Directories) { TreeChunk.Extract(store, ChunkHash.FromHashBytes(subdir.TreeChunkHash), Path.Combine(targetPath, subdir.Name)); } }
public static void Extract(Repo store, ChunkHash fileHash, string targetPath) { Chunk chunk = store.ReadChunk(fileHash); StreamChunk streamChunk = StreamChunk.Deserialize <StreamChunk> (chunk.Data); using (FileStream file = File.Open(targetPath, FileMode.Create)) { foreach (byte[] hashBytes in streamChunk.Chunks) { Chunk fc = store.ReadChunk(ChunkHash.FromHashBytes(hashBytes)); file.Write(fc.Data, 0, fc.Data.Length); } //Verify length if (file.Length != (long)streamChunk.Size) { throw new InvalidDataException("Invalid file length"); } } }