public UploadPipeline(ISession session, ILogger log, IFileSystem fs) { var tree = new FolderTree(session); pipeline = new ActionBlock <AssetModel>(async asset => { var process = $"Uploading {asset.Id}"; try { var assetFile = fs.GetBlobFile(asset.Id); await using (var stream = assetFile.OpenRead()) { var file = new FileParameter(stream, asset.FileName, asset.MimeType); await session.Assets.PostUpsertAssetAsync(session.App, asset.Id, null, true, file); } log.ProcessCompleted(process); } catch (Exception ex) { log.ProcessFailed(process, ex); } }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 8, MaxMessagesPerTask = 1, BoundedCapacity = 16 }); }
public static async Task <AssetModel> ToModelAsync(this AssetDto asset, FolderTree folders) { return(new AssetModel { Id = asset.Id, Metadata = asset.Metadata, MimeType = asset.MimeType, Slug = asset.Slug, FileName = asset.FileName, FileHash = asset.FileHash, FolderPath = await folders.GetPathAsync(asset.ParentId), Tags = asset.Tags, IsProtected = asset.IsProtected }); }
public async Task ExportAsync(DirectoryInfo directoryInfo, JsonHelper jsonHelper, SyncOptions options, ISession session) { var downloadPipeline = new DownloadPipeline(session, log, directoryInfo); var assets = new List <AssetModel>(); var assetBatch = 0; async Task SaveAsync() { var model = new AssetsModel { Assets = assets }; await log.DoSafeAsync($"Exporting Assets ({assetBatch})", async() => { await jsonHelper.WriteWithSchema(directoryInfo, $"assets/{assetBatch}.json", model, Ref); }); } var tree = new FolderTree(session); await session.Assets.GetAllAsync(session.App, async asset => { assets.Add(await asset.ToModelAsync(tree)); if (assets.Count > 50) { await SaveAsync(); assets.Clear(); assetBatch++; } await downloadPipeline.DownloadAsync(asset); }); if (assets.Count > 0) { await SaveAsync(); } await downloadPipeline.CompleteAsync(); }
public async Task ExportAsync(ISyncService sync, SyncOptions options, ISession session) { var downloadPipeline = new DownloadPipeline(session, log, sync.FileSystem); var assets = new List <AssetModel>(); var assetBatch = 0; async Task SaveAsync() { var model = new AssetsModel { Assets = assets }; await log.DoSafeAsync($"Exporting Assets ({assetBatch})", async() => { await sync.WriteWithSchema(new FilePath("assets", $"{assetBatch}.json"), model, Ref); }); } var tree = new FolderTree(session); await session.Assets.GetAllAsync(session.App, async asset => { assets.Add(await asset.ToModelAsync(tree)); if (assets.Count > 50) { await SaveAsync(); assets.Clear(); assetBatch++; } await downloadPipeline.DownloadAsync(asset); }); if (assets.Count > 0) { await SaveAsync(); } await downloadPipeline.CompleteAsync(); }
public UploadPipeline(ISession session, ILogger log, IFileSystem fs) { var tree = new FolderTree(session); var fileNameStep = new TransformBlock <AssetModel, (AssetModel, FilePath)>(async asset => { FilePath path; if (FilePathProvider != null) { path = FilePathProvider(asset); } else if (FilePathProviderAsync != null) { path = await FilePathProviderAsync(asset); } else { path = new FilePath(asset.Id); } return(asset, path); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 1, MaxMessagesPerTask = 1, BoundedCapacity = 1 }); var maxDegreeOfParallelism = fs.CanAccessInParallel ? Environment.ProcessorCount * 2 : 1; var uploadStep = new ActionBlock <(AssetModel, FilePath)>(async item => { var(asset, path) = item; var process = $"Uploading {path}"; try { var assetFile = fs.GetFile(path); await using (var stream = assetFile.OpenRead()) { var file = new FileParameter(stream, asset.FileName, asset.MimeType); var result = await session.Assets.PostUpsertAssetAsync(session.App, asset.Id, null, true, file); log.ProcessCompleted(process); } } catch (Exception ex) { log.ProcessFailed(process, ex); } }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism, MaxMessagesPerTask = 1, BoundedCapacity = maxDegreeOfParallelism * 2 }); fileNameStep.LinkTo(uploadStep, new DataflowLinkOptions { PropagateCompletion = true }); pipelineStart = fileNameStep; pipelineEnd = uploadStep; }
public async Task ImportAsync(DirectoryInfo directoryInfo, JsonHelper jsonHelper, SyncOptions options, ISession session) { var models = GetFiles(directoryInfo) .Select(x => (x, jsonHelper.Read <AssetsModel>(x, log))); var tree = new FolderTree(session); foreach (var(_, model) in models) { if (model?.Assets?.Count > 0) { var uploader = new UploadPipeline(session, log, directoryInfo); await uploader.UploadAsync(model.Assets); await uploader.CompleteAsync(); var request = new BulkUpdateAssetsDto(); foreach (var asset in model.Assets) { var parentId = await tree.GetIdAsync(asset.FolderPath); request.Jobs.Add(asset.ToMoveJob(parentId)); request.Jobs.Add(asset.ToAnnotateJob()); } var assetIndex = 0; var results = await session.Assets.BulkUpdateAssetsAsync(session.App, request); foreach (var asset in model.Assets) { // We create wo commands per asset. var result1 = results.FirstOrDefault(x => x.JobIndex == (assetIndex * 2)); var result2 = results.FirstOrDefault(x => x.JobIndex == (assetIndex * 2) + 1); log.StepStart($"Upserting #{assetIndex}"); if (result1?.Error != null) { log.StepFailed(result1.Error.ToString()); } else if (result2?.Error != null) { log.StepFailed(result2.Error.ToString()); } else if (result1?.Id != null && result2.Id != null) { log.StepSuccess(); } else { log.StepSkipped("Unknown Reason"); } assetIndex++; } } } }