private async Task <UnixFsNode> BuildTreeNodeAsync(IEnumerable <UnixFsNode> nodes, AddFileOptions options, CancellationToken cancel) { var blockService = GetBlockService(options); // Build the DAG that contains all the file nodes. var unixFsNodes = nodes as UnixFsNode[] ?? nodes.ToArray(); var links = unixFsNodes.Select(n => n.ToLink()).ToArray(); var fileSize = (ulong)unixFsNodes.Sum(n => n.Size); var dagSize = unixFsNodes.Sum(n => n.DagSize); var dm = new DataMessage { Type = DataType.File, FileSize = fileSize, BlockSizes = unixFsNodes.Select(n => (ulong)n.Size).ToArray() }; var pb = new MemoryStream(); Serializer.Serialize(pb, dm); var dag = new DagNode(pb.ToArray(), links); // Save it. dag.Id = await blockService.PutAsync( data : dag.ToArray(), encoding : options.Encoding, pin : options.Pin, cancel : cancel).ConfigureAwait(false); return(new UnixFsNode { Id = dag.Id, Size = (long)dm.FileSize, DagSize = dagSize + dag.Size, Links = links }); }
public async Task AddFile_WithProgress() { var path = Path.GetTempFileName(); File.WriteAllText(path, "hello world"); try { var ipfs = TestFixture.Ipfs; var bytesTransferred = 0UL; var options = new AddFileOptions { Progress = new Progress <TransferProgress>(t => { bytesTransferred += t.Bytes; }) }; var result = await ipfs.FileSystem.AddFileAsync(path, options); Assert.AreEqual("Qmf412jQZiuVUtdgnB36FXFX7xg5V6KEbSJ4dpQuhkLyfD", (string)result.Id); // Progress reports get posted on another synchronisation context. var stop = DateTime.Now.AddSeconds(3); while (DateTime.Now < stop) { if (bytesTransferred == 11UL) { break; } await Task.Delay(10); } Assert.AreEqual(11UL, bytesTransferred); } finally { File.Delete(path); } }
protected override async Task <int> OnExecute(CommandLineApplication app) { AddFileOptions options = new AddFileOptions { ChunkSize = ChunkSize, Encoding = Encoding, Hash = Hash, OnlyHash = OnlyHash, Pin = Pin, RawLeaves = RawLeaves, Trickle = Trickle, Wrap = Wrap, ProtectionKey = ProtectionKey }; if (Progress) { options.Progress = new Progress <TransferProgress>(t => { Console.WriteLine($"{t.Name} {t.Bytes}"); }); } IFileSystemNode node; if (Directory.Exists(FilePath)) { node = await Parent.CoreApi.FileSystem.AddDirectoryAsync(FilePath, Recursive, options); } else { node = await Parent.CoreApi.FileSystem.AddFileAsync(FilePath, options); } return(Parent.Output(app, node, (data, writer) => { writer.WriteLine($"{data.Id.Encode()} added"); })); }
public async Task GetTar_RawLeaves() { var ipfs = TestFixture.Ipfs; var temp = MakeTemp(); try { var options = new AddFileOptions { RawLeaves = true }; var dir = ipfs.FileSystem.AddDirectoryAsync(temp, true, options).Result; var dirid = dir.Id.Encode(); var tar = await ipfs.FileSystem.GetAsync(dir.Id); var archive = TarArchive.CreateInputTarArchive(tar); var files = new List <string>(); archive.ProgressMessageEvent += (a, e, m) => { files.Add(e.Name); }; archive.ListContents(); Assert.AreEqual($"{dirid}", files[0]); Assert.AreEqual($"{dirid}/alpha.txt", files[1]); Assert.AreEqual($"{dirid}/beta.txt", files[2]); Assert.AreEqual($"{dirid}/x", files[3]); Assert.AreEqual($"{dirid}/x/x.txt", files[4]); Assert.AreEqual($"{dirid}/x/y", files[5]); Assert.AreEqual($"{dirid}/x/y/y.txt", files[6]); } finally { Directory.Delete(temp, true); } }
public async Task Add_RawAndChunked() { var ipfs = TestFixture.Ipfs; var options = new AddFileOptions { RawLeaves = true, ChunkSize = 3 }; var node = await ipfs.FileSystem.AddTextAsync("hello world", options); var links = node.Links.ToArray(); Assert.AreEqual("QmUuooB6zEhMmMaBvMhsMaUzar5gs5KwtVSFqG4C1Qhyhs", (string)node.Id); Assert.AreEqual(false, node.IsDirectory); Assert.AreEqual(4, links.Length); Assert.AreEqual("bafkreigwvapses57f56cfow5xvoua4yowigpwcz5otqqzk3bpcbbjswowe", (string)links[0].Id); Assert.AreEqual("bafkreiew3cvfrp2ijn4qokcp5fqtoknnmr6azhzxovn6b3ruguhoubkm54", (string)links[1].Id); Assert.AreEqual("bafkreibsybcn72tquh2l5zpim2bba4d2kfwcbpzuspdyv2breaq5efo7tq", (string)links[2].Id); Assert.AreEqual("bafkreihfuch72plvbhdg46lef3n5zwhnrcjgtjywjryyv7ffieyedccchu", (string)links[3].Id); var text = await ipfs.FileSystem.ReadAllTextAsync(node.Id); Assert.AreEqual("hello world", text); }
public void AddDirectory_WithCidEncoding() { var encoding = "base32z"; var options = new AddFileOptions { Encoding = encoding }; var temp = MakeTemp(); try { var dir = ipfs.UnixFsApi.AddDirectoryAsync(temp, false, options).Result; Assert.True(dir.IsDirectory); Assert.AreEqual(encoding, dir.Id.Encoding); foreach (var link in dir.Links) { Assert.AreEqual(encoding, link.Id.Encoding); } } finally { Directory.Delete(temp, true); } }
public void AddDirectory_WithHashAlgorithm() { const string alg = "keccak-512"; var options = new AddFileOptions { Hash = alg }; var temp = MakeTemp(); try { var dir = ipfs.UnixFsApi.AddDirectoryAsync(temp, false, options).Result; Assert.True(dir.IsDirectory); Assert.AreEqual(alg, dir.Id.Hash.Algorithm.Name); foreach (var link in dir.Links) { Assert.AreEqual(alg, link.Id.Hash.Algorithm.Name); } } finally { Directory.Delete(temp, true); } }
public async Task Read_ProtectedWithLength() { const string text = "hello world"; var options = new AddFileOptions { ProtectionKey = "self" }; var node = await ipfs.UnixFsApi.AddTextAsync(text, options); for (var offset = 0; offset < text.Length; ++offset) { for (var length = text.Length + 1; 0 < length; --length) { await using (var data = await ipfs.UnixFsApi.ReadFileAsync(node.Id, offset, length)) { using (var reader = new StreamReader(data)) { var readData = reader.ReadToEnd(); Assert.AreEqual(text.Substring(offset, Math.Min(11 - offset, length)), readData); } } } } }
public async Task Add_RawAndChunked() { var ipfs = TestFixture.Ipfs; var options = new AddFileOptions { RawLeaves = true, ChunkSize = 3 }; var node = await ipfs.FileSystem.AddTextAsync("hello world", options); var links = node.Links.ToArray(); Assert.AreEqual("QmUuooB6zEhMmMaBvMhsMaUzar5gs5KwtVSFqG4C1Qhyhs", (string)node.Id); Assert.AreEqual(false, node.IsDirectory); Assert.AreEqual(4, links.Length); Assert.AreEqual("zb2rhm6D8PTYoMh7PSFKbCxxcD1yjWPD5KPr6nVRuw9ymDyUL", (string)links[0].Id); Assert.AreEqual("zb2rhgo7y6J7p76kCrXs4pmmMQx56fZeWJkC3sfbjeay4UruU", (string)links[1].Id); Assert.AreEqual("zb2rha4Pd2AruByr2RwzhRCVxRCqBC67h7ukTJd99jCjUtmyM", (string)links[2].Id); Assert.AreEqual("zb2rhn6eZLLj7vdVizbNxpASGoVw4vcSmc8avHXmDMVu5ZA6Q", (string)links[3].Id); var text = await ipfs.FileSystem.ReadAllTextAsync(node.Id); Assert.AreEqual("hello world", text); }
public void AddFile_CidEncoding() { var path = Path.GetTempFileName(); File.WriteAllText(path, "hello world"); try { var options = new AddFileOptions { Encoding = "base32" }; var node = ipfs.UnixFsApi.AddFileAsync(path, options).Result; Assert.AreEqual("base32", node.Id.Encoding); Assert.AreEqual(1, node.Id.Version); Assert.AreEqual(0, node.Links.Count()); var text = ipfs.UnixFsApi.ReadAllTextAsync(node.Id).Result; Assert.AreEqual("hello world", text); } finally { File.Delete(path); } }
public async Task Read_ProtectedChunkedWithLength() { var text = "hello world"; var ipfs = TestFixture.Ipfs; var options = new AddFileOptions { ChunkSize = 3, ProtectionKey = "self" }; var node = await ipfs.FileSystem.AddTextAsync(text, options); for (var offset = 0; offset < text.Length; ++offset) { for (var length = text.Length + 1; 0 < length; --length) { using (var data = await ipfs.FileSystem.ReadFileAsync(node.Id, offset, length)) using (var reader = new StreamReader(data)) { var s = reader.ReadToEnd(); Assert.AreEqual(text.Substring(offset, Math.Min(11 - offset, length)), s, $"o={offset} l={length}"); } } } }
IBlockApi GetBlockService(AddFileOptions options) { return(options.OnlyHash ? new HashOnlyBlockService() : ipfs.Block); }
async Task <FileSystemNode> CreateDirectoryAsync(IEnumerable <IFileSystemLink> links, AddFileOptions options, CancellationToken cancel) { var dm = new DataMessage { Type = DataType.Directory }; var pb = new MemoryStream(); ProtoBuf.Serializer.Serialize <DataMessage>(pb, dm); var dag = new DagNode(pb.ToArray(), links, options.Hash); // Save it. var cid = await GetBlockService(options).PutAsync( data: dag.ToArray(), multiHash: options.Hash, encoding: options.Encoding, pin: options.Pin, cancel: cancel).ConfigureAwait(false); return(new FileSystemNode { Id = cid, Links = links, IsDirectory = true }); }
private IBlockApi GetBlockService(AddFileOptions options) { return(options.OnlyHash ? new HashOnlyBlockService() : _blockApi); }
public Task <IFileSystemNode> AddAsync(Stream stream, string name = "", AddFileOptions options = null, CancellationToken cancel = default(CancellationToken)) { throw new NotImplementedException(); }
public override Task <string> AddFileAsync(string fileName, string targetPath, AddFileOptions addFileOptions = null) { AddedFileInputFileName = fileName; AddedFileTargetFilePath = targetPath; return(Task.FromResult(string.Empty)); }
/// <summary> /// Copy a file to a project relative path. /// </summary> /// <param name="projectHierarchy"> /// The project where to add the file. /// </param> /// <param name="fileName"> /// The path to the file to copy. /// </param> /// <param name="targetPath"> /// The target path, including the filename. /// </param> /// <param name="addFileOptions"> /// The options to use while coping the file. /// </param> private async Task AddFileToProjectInFolder(IVsHierarchy projectHierarchy, string fileName, string targetPath, AddFileOptions addFileOptions) { targetPath = AzureIoTHubConnectedServiceHandlerHelper.GetProjectRelativePath(projectHierarchy, targetPath); Project project = ConnectedServicesUtilities.GetDteProject(projectHierarchy); ProjectItems items = project.ProjectItems; fileName = await this.CopyFileAsync(fileName, targetPath); string fileToAdd = ConnectedServicesUtilities.GetProjectFullPath(projectHierarchy, targetPath); string targetFileName = Path.GetFileName(fileToAdd); // Build the directory structure if it doesn't already exist. Directory.CreateDirectory(Path.GetDirectoryName(fileToAdd)); // clone the AdditionalReplacementValues dictionary so we aren't modifying the original Dictionary <string, string> replacementValues = new Dictionary <string, string>(addFileOptions.AdditionalReplacementValues); ProjectItem item = AzureIoTHubConnectedServiceHandlerHelper.GetNestedProjectItem(items, targetPath); bool existOnDisk = File.Exists(fileToAdd); if (item == null && existOnDisk) { // The file is not in the project. We should add the file. // This is some arbitrary file, which we'll update in the same // path as existing project files. // This is 'fileToAdd' because we're not adding the final file here. item = items.AddFromFile(fileToAdd); } if (item != null) { // Add the folder-specific RootNamespace replacement value so $RootNamespace$ has the folder structure in it for C# projects this.AddRootNamespaceReplacementValue(replacementValues, item.Collection); bool filesEqual = this.AreFilesEqualWithReplacement(item, fileName, replacementValues); if (!filesEqual) { if (!addFileOptions.SuppressOverwritePrompt && !this.PromptOverwrite(targetFileName)) { // The user chose not to overwrite the file, so abort adding this file. return; } // Get the document and overwrite with file content. BufferUtilities.UpdateProjectItemFromFile(item, fileName); } } else { File.Copy(fileName, fileToAdd); item = items.AddFromFile(fileToAdd); // Add the folder-specific RootNamespace replacement value so $RootNamespace$ has the folder structure in it for C# projects this.AddRootNamespaceReplacementValue(replacementValues, item.Collection); } this.PerformTokenReplacement(item, replacementValues); if (addFileOptions.OpenOnComplete && !item.IsOpen) { try { var window = item.Open(); // Ensure that the window is always shown regardless of "Preview" // user settings. if (window != null && !window.Visible) { window.Visible = true; } } catch (Exception) { } } }
/// <summary> /// Add the specified file to the project after doing token replacement. /// </summary> /// <param name="fileName"> /// The file to copy and add to the project. /// </param> /// <param name="targetPath"> /// The full or relative path where the file should be added. If specifying a full path, the path must be in a sub-directory /// of the project's directory. /// </param> /// <param name="addFileOptions"> /// The options to use while adding the file. /// </param> /// <returns> /// Returns the path to the file that was added. /// </returns> public override async Task <string> AddFileAsync(string fileName, string targetPath, AddFileOptions addFileOptions = null) { if (addFileOptions == null) { addFileOptions = new AddFileOptions(); } await this.AddFileToProjectInFolder(this.context.ProjectHierarchy, fileName, targetPath, addFileOptions); return(targetPath); }
/// <summary> /// Performs the chunking. /// </summary> /// <param name="stream"> /// The data source. /// </param> /// <param name="name"> /// A name for the data. /// </param> /// <param name="options"> /// The options when adding data to the IPFS file system. /// </param> /// <param name="blockService"> /// The destination for the chunked data block(s). /// </param> /// <param name="keyChain"> /// Used to protect the chunked data blocks(s). /// </param> /// <param name="cancel"> /// Is used to stop the task. When cancelled, the <see cref="TaskCanceledException" /> is raised. /// </param> /// <returns> /// A task that represents the asynchronous operation. The task's value is /// the sequence of file system nodes of the added data blocks. /// </returns> public async Task <List <UnixFsNode> > ChunkAsync(Stream stream, string name, AddFileOptions options, IBlockApi blockService, IKeyApi keyChain, CancellationToken cancel) { var protecting = !string.IsNullOrWhiteSpace(options.ProtectionKey); var nodes = new List <UnixFsNode>(); var chunkSize = options.ChunkSize; var chunk = new byte[chunkSize]; var chunking = true; var totalBytes = 0UL; while (chunking) { // Get an entire chunk. var length = 0; while (length < chunkSize) { var n = await stream.ReadAsync(chunk, length, chunkSize - length, cancel).ConfigureAwait(false); if (n < 1) { chunking = false; break; } length += n; totalBytes += (uint)n; } // Only generate empty block, when the stream is empty. if (length != 0 || nodes.Count <= 0) { options.Progress?.Report(new TransferProgress { Name = name, Bytes = totalBytes }); // if protected data, then get CMS structure. if (protecting) { // TODO: Inefficent to copy chunk, use ArraySegment in DataMessage.Data var plain = new byte[length]; Array.Copy(chunk, plain, length); var cipher = await keyChain.CreateProtectedDataAsync(options.ProtectionKey, plain, cancel) .ConfigureAwait(false); var cid = await blockService.PutAsync( cipher, "cms", options.Hash, options.Encoding, options.Pin, cancel).ConfigureAwait(false); nodes.Add(new UnixFsNode { Id = cid, Size = length, DagSize = cipher.Length, Links = UnixFsLink.None }); } else if (options.RawLeaves) { // TODO: Inefficent to copy chunk, use ArraySegment in DataMessage.Data var data = new byte[length]; Array.Copy(chunk, data, length); var cid = await blockService.PutAsync( data, "raw", options.Hash, options.Encoding, options.Pin, cancel).ConfigureAwait(false); nodes.Add(new UnixFsNode { Id = cid, Size = length, DagSize = length, Links = UnixFsLink.None }); } else { // Build the DAG. var dm = new DataMessage { Type = DataType.File, FileSize = (ulong)length }; if (length > 0) { // TODO: Inefficent to copy chunk, use ArraySegment in DataMessage.Data var data = new byte[length]; Array.Copy(chunk, data, length); dm.Data = data; } var pb = new MemoryStream(); Serializer.Serialize(pb, dm); var dag = new DagNode(pb.ToArray(), null, options.Hash); // Save it. dag.Id = await blockService.PutAsync( dag.ToArray(), multiHash : options.Hash, encoding : options.Encoding, pin : options.Pin, cancel : cancel).ConfigureAwait(false); var node = new UnixFsNode { Id = dag.Id, Size = length, DagSize = dag.Size, Links = UnixFsLink.None }; nodes.Add(node); } } } return(nodes); }
public async Task <IFileSystemNode> AddAsync(Stream stream, string name = "", AddFileOptions options = null, CancellationToken cancel = default(CancellationToken)) { if (options == null) { options = new AddFileOptions(); } var opts = new List <string>(); if (!options.Pin) { opts.Add("pin=false"); } if (options.Wrap) { opts.Add("wrap-with-directory=true"); } if (options.RawLeaves) { opts.Add("raw-leaves=true"); } if (options.OnlyHash) { opts.Add("only-hash=true"); } if (options.Trickle) { opts.Add("trickle=true"); } if (options.Progress != null) { opts.Add("progress=true"); } if (options.Hash != MultiHash.DefaultAlgorithmName) { opts.Add($"hash=${options.Hash}"); } if (options.Encoding != MultiBase.DefaultAlgorithmName) { opts.Add($"cid-base=${options.Encoding}"); } if (!string.IsNullOrWhiteSpace(options.ProtectionKey)) { opts.Add($"protect={options.ProtectionKey}"); } opts.Add($"chunker=size-{options.ChunkSize}"); var response = await ipfs.Upload2Async("add", cancel, stream, name, opts.ToArray()); // The result is a stream of LDJSON objects. // See https://github.com/ipfs/go-ipfs/issues/4852 FileSystemNode fsn = null; using (var sr = new StreamReader(response)) using (var jr = new JsonTextReader(sr) { SupportMultipleContent = true }) { while (jr.Read()) { var r = await JObject.LoadAsync(jr, cancel); // If a progress report. if (r.ContainsKey("Bytes")) { options.Progress?.Report(new TransferProgress { Name = (string)r["Name"], Bytes = (ulong)r["Bytes"] }); } // Else must be an added file. else { fsn = new FileSystemNode { Id = (string)r["Hash"], Size = long.Parse((string)r["Size"]), IsDirectory = false, Name = name, IpfsClient = ipfs }; if (log.IsDebugEnabled) { log.Debug("added " + fsn.Id + " " + fsn.Name); } } } } fsn.IsDirectory = options.Wrap; return(fsn); }
public async Task <IFileSystemNode> AddAsync( Stream stream, string name, AddFileOptions options, CancellationToken cancel) { options = options ?? new AddFileOptions(); // TODO: various options if (options.Trickle) { throw new NotImplementedException("Trickle"); } var blockService = GetBlockService(options); var keyChain = await ipfs.KeyChain(cancel).ConfigureAwait(false); var chunker = new SizeChunker(); var nodes = await chunker.ChunkAsync(stream, name, options, blockService, keyChain, cancel).ConfigureAwait(false); // Multiple nodes for the file? FileSystemNode node = null; if (nodes.Count() == 1) { node = nodes.First(); } else { // Build the DAG that contains all the file nodes. var links = nodes.Select(n => n.ToLink()).ToArray(); var fileSize = (ulong)nodes.Sum(n => n.Size); var dm = new DataMessage { Type = DataType.File, FileSize = fileSize, BlockSizes = nodes.Select(n => (ulong)n.Size).ToArray() }; var pb = new MemoryStream(); ProtoBuf.Serializer.Serialize <DataMessage>(pb, dm); var dag = new DagNode(pb.ToArray(), links, options.Hash); // Save it. dag.Id = await blockService.PutAsync( data : dag.ToArray(), multiHash : options.Hash, encoding : options.Encoding, pin : options.Pin, cancel : cancel).ConfigureAwait(false); node = new FileSystemNode { Id = dag.Id, Size = (long)dm.FileSize, DagSize = dag.Size, Links = links }; } // Wrap in directory? if (options.Wrap) { var link = node.ToLink(name); var wlinks = new IFileSystemLink[] { link }; node = await CreateDirectoryAsync(wlinks, options, cancel).ConfigureAwait(false); } else { node.Name = name; } // Advertise the root node. if (options.Pin && ipfs.IsStarted) { await ipfs.Dht.ProvideAsync(node.Id, advertise : true, cancel : cancel).ConfigureAwait(false); } // Return the file system node. return(node); }
public Task <IFileSystemNode> AddFileAsync(string path, AddFileOptions options = null, CancellationToken cancel = default(CancellationToken)) { throw new NotImplementedException(); }
public Task <IFileSystemNode> AddDirectoryAsync(string path, bool recursive = true, AddFileOptions options = null, CancellationToken cancel = default(CancellationToken)) { throw new NotImplementedException(); }
public async Task <IFileSystemNode> AddDirectoryAsync(string path, bool recursive = true, AddFileOptions options = null, CancellationToken cancel = default(CancellationToken)) { if (options == null) { options = new AddFileOptions(); } options.Wrap = false; // Add the files and sub-directories. path = Path.GetFullPath(path); var files = Directory .EnumerateFiles(path) .Select(p => AddFileAsync(p, options, cancel)); if (recursive) { var folders = Directory .EnumerateDirectories(path) .Select(dir => AddDirectoryAsync(dir, recursive, options, cancel)); files = files.Union(folders); } // go-ipfs v0.4.14 sometimes fails when sending lots of 'add file' // requests. It's happy with adding one file at a time. #if true var links = new List <IFileSystemLink>(); foreach (var file in files) { var node = await file; links.Add(node.ToLink()); } #else var nodes = await Task.WhenAll(files); var links = nodes.Select(node => node.ToLink()); #endif // Create the directory with links to the created files and sub-directories var folder = emptyFolder.Value.AddLinks(links); var directory = await ipfs.Object.PutAsync(folder, cancel); if (log.IsDebugEnabled) { log.Debug("added " + directory.Id + " " + Path.GetFileName(path)); } return(new FileSystemNode { Id = directory.Id, Name = Path.GetFileName(path), Links = links, IsDirectory = true, Size = directory.Size, IpfsClient = ipfs }); }
public Task <IFileSystemNode> AddTextAsync(string text, AddFileOptions options = null, CancellationToken cancel = default(CancellationToken)) { return(AddAsync(new MemoryStream(Encoding.UTF8.GetBytes(text), false), "", options, cancel)); }
public async Task Add( IFormFile file, string hash = MultiHash.DefaultAlgorithmName, [ModelBinder(Name = "cid-base")] string cidBase = MultiBase.DefaultAlgorithmName, [ModelBinder(Name = "only-hash")] bool onlyHash = false, string chunker = null, bool pin = false, [ModelBinder(Name = "raw-leaves")] bool rawLeaves = false, bool trickle = false, [ModelBinder(Name = "wrap-with-directory")] bool wrap = false, string protect = null, bool progress = true ) { if (file == null) { throw new ArgumentNullException("file"); } var options = new AddFileOptions { Encoding = cidBase, Hash = hash, OnlyHash = onlyHash, Pin = pin, RawLeaves = rawLeaves, Trickle = trickle, Wrap = wrap, ProtectionKey = protect, }; if (chunker != null) { if (chunker.StartsWith("size-")) { options.ChunkSize = int.Parse(chunker.Substring(5), CultureInfo.InvariantCulture); } else { throw new ArgumentOutOfRangeException("chunker"); } } if (progress) { options.Progress = new Progress <TransferProgress>(StreamJson); } // TODO: Accept multiple files. using (var stream = file.OpenReadStream()) { // TODO: AddAsync returns a list of nodes containing every node added not just the top level. var node = await IpfsCore.FileSystem.AddAsync(stream, file.FileName, options, Cancel); StreamJson(new FileSystemNodeDto { Name = node.Id, Hash = node.Id, Size = node.Size.ToString(CultureInfo.InvariantCulture) }); } }
public async Task <IFileSystemNode> AddAsync(Stream stream, string name = "", AddFileOptions options = null, CancellationToken cancel = default(CancellationToken)) { if (options == null) { options = new AddFileOptions(); } var opts = new List <string>(); if (!options.Pin) { opts.Add("pin=false"); } if (options.Wrap) { opts.Add("wrap-with-directory=true"); } if (options.RawLeaves) { opts.Add("raw-leaves=true"); } if (options.OnlyHash) { opts.Add("only-hash=true"); } if (options.Trickle) { opts.Add("trickle=true"); } if (options.Hash != "sha2-256") { opts.Add($"hash=${options.Hash}"); } opts.Add($"chunker=size-{options.ChunkSize}"); var json = await ipfs.UploadAsync("add", cancel, stream, name, opts.ToArray()); // The result is a stream of LDJSON objects. // See https://github.com/ipfs/go-ipfs/issues/4852 FileSystemNode fsn = null; using (var sr = new StringReader(json)) using (var jr = new JsonTextReader(sr) { SupportMultipleContent = true }) { while (jr.Read()) { var r = await JObject.LoadAsync(jr, cancel); fsn = new FileSystemNode { Id = (string)r["Hash"], Size = long.Parse((string)r["Size"]), IsDirectory = false, Name = name, IpfsClient = ipfs }; if (log.IsDebugEnabled) { log.Debug("added " + fsn.Id + " " + fsn.Name); } } } fsn.IsDirectory = options.Wrap; return(fsn); }