public async Task List(ImportArguments arguments) { var session = configuration.StartSession(); var assets = session.Assets; var folder = new DirectoryInfo(arguments.Folder); var folderTree = new FolderTree(session); foreach (var file in folder.GetFiles("*.*", SearchOption.AllDirectories)) { var relativeFolder = Path.GetRelativePath(folder.FullName, file.Directory.FullName); var relativePath = Path.GetRelativePath(folder.FullName, file.FullName); var targetFolder = arguments.TargetFolder; if (!string.IsNullOrWhiteSpace(relativePath) && relativePath != ".") { targetFolder = Path.Combine(targetFolder, relativeFolder); } var parentId = await folderTree.GetIdAsync(targetFolder); await log.DoSafeLineAsync($"Uploading {relativePath}", async() => { await assets.PostAssetAsync(session.App, parentId, duplicate: arguments.Duplicate, file: file); }); } log.WriteLine("> Import completed"); }
public async Task Export(ImportArguments arguments) { var session = configuration.StartSession(arguments.App); var assets = session.Assets; using (var fs = await FileSystems.CreateAsync(arguments.Path, session.WorkingDirectory)) { var folderTree = new FolderTree(session); var folderNames = new HashSet <string>(); var parentId = await folderTree.GetIdAsync(arguments.TargetFolder); var downloadPipeline = new DownloadPipeline(session, log, fs) { FilePathProviderAsync = async asset => { var assetFolder = await folderTree.GetPathAsync(asset.ParentId); var assetPath = asset.FileName; if (!string.IsNullOrWhiteSpace(assetFolder)) { assetPath = Path.Combine(assetFolder, assetPath); } if (!folderNames.Add(assetPath)) { assetPath = Path.Combine(assetFolder !, $"{asset.Id}_{asset.FileName}"); } return(FilePath.Create(assetPath)); } }; try { await assets.GetAllByQueryAsync(session.App, async asset => { await downloadPipeline.DownloadAsync(asset); }, new AssetQuery { ParentId = parentId }); } finally { await downloadPipeline.CompleteAsync(); } log.WriteLine("> Export completed"); } }
public async Task Authors(ImportArguments arguments) { var session = configuration.StartSession(arguments.App); await using (var stream = new FileStream(arguments.File, FileMode.Open)) { var importer = new AuthorImporter(session); await importer.ImportAsync(stream); } log.WriteLine("> Authors imports."); }
public async Task Import(ImportArguments arguments) { var(_, service) = Configuration.Setup(); if (arguments.Format == Format.JSON) { var converter = new Json2SquidexConverter(arguments.Fields); using (var stream = new FileStream(arguments.File, FileMode.Open, FileAccess.Read)) { using (var streamReader = new StreamReader(stream)) { using (var reader = new JsonTextReader(streamReader)) { var datas = converter.ReadAll(reader); await ImportAsync(arguments, service, datas); } } } } else { var converter = new Csv2SquidexConverter(arguments.Fields); using (var stream = new FileStream(arguments.File, FileMode.Open, FileAccess.Read)) { using (var streamReader = new StreamReader(stream)) { var csvOptions = new CsvConfiguration(CultureInfo.InvariantCulture) { Delimiter = arguments.Delimiter }; using (var reader = new CsvReader(streamReader, csvOptions)) { var datas = converter.ReadAll(reader); await ImportAsync(arguments, service, datas); } } } } }
public async Task Import(ImportArguments arguments) { var converter = new Csv2SquidexConverter(arguments.Fields); using (var stream = new FileStream(arguments.File, FileMode.Open, FileAccess.Read)) { using (var streamReader = new StreamReader(stream)) { var csvOptions = new CsvOptions { Delimiter = arguments.Delimiter }; using (var reader = new CsvReader(streamReader, csvOptions)) { var datas = converter.ReadAll(reader); await ImportAsync(arguments, datas); } } } }
private async Task ImportAsync(ImportArguments arguments, IEnumerable <DummyData> datas) { var client = Configuration.GetClient().Client.GetClient <DummyEntity, DummyData>(arguments.Schema); var totalWritten = 0; var consoleTop = Console.CursorTop; var handled = new HashSet <string>(); foreach (var data in datas) { await client.CreateAsync(data, !arguments.Unpublished); totalWritten++; Console.WriteLine("> Imported: {0}.", totalWritten); Console.SetCursorPosition(0, consoleTop); } Console.WriteLine("> Imported: {0}. Completed.", totalWritten); }
public async Task Import(ImportArguments arguments) { var session = configuration.StartSession(arguments.App); if (arguments.Format == Format.JSON) { var converter = new Json2SquidexConverter(arguments.Fields); await using (var stream = new FileStream(arguments.File, FileMode.Open, FileAccess.Read)) { var datas = converter.ReadAsArray(stream); await session.ImportAsync(arguments, log, datas); } } else if (arguments.Format == Format.JSON_Separated) { var converter = new Json2SquidexConverter(arguments.Fields); await using (var stream = new FileStream(arguments.File, FileMode.Open, FileAccess.Read)) { var datas = converter.ReadAsSeparatedObjects(stream, JsonSeparator); await session.ImportAsync(arguments, log, datas); } } else { var converter = new Csv2SquidexConverter(arguments.Fields); await using (var stream = new FileStream(arguments.File, FileMode.Open, FileAccess.Read)) { var datas = converter.Read(stream, arguments.Delimiter); await session.ImportAsync(arguments, log, datas); } } }
[HttpPost("import"), RequestSizeLimit(20 * 1024 * 1024)] // 20 MB public async Task <ActionResult <ImportResponse> > Import([FromQuery] ImportArguments args) { string contentType = null; string fileName = null; Stream fileStream = null; if (Request.Form.Files.Count > 0) { IFormFile formFile = Request.Form.Files[0]; contentType = formFile?.ContentType; fileName = formFile?.FileName; fileStream = formFile?.OpenReadStream(); } try { var service = GetCrudService(); var result = await service.Import(fileStream, fileName, contentType, args); var response = new ImportResponse { Inserted = result.Inserted, Updated = result.Updated, Milliseconds = result.Milliseconds }; return(Ok(response)); } finally { if (fileStream != null) { await fileStream.DisposeAsync(); } } }
[HttpPost("import"), RequestSizeLimit(5 * 1024 * 1024)] // 5MB public virtual async Task <ActionResult <ImportResult> > Import([FromQuery] ImportArguments args) { Stopwatch watch = new Stopwatch(); watch.Start(); Stopwatch watch2 = new Stopwatch(); watch2.Start(); decimal parsingToEntitiesForSave = 0; decimal attributeValidationInCSharp = 0; decimal validatingAndSaving = 0; return(await ControllerUtilities.InvokeActionImpl(async() => { // Parse the file into Entities + map back to row numbers (The way source code is compiled into machine code + symbols file) var(entities, rowNumberFromErrorKeyMap) = await ParseImplAsync(args); // This should check for primary code consistency! parsingToEntitiesForSave = Math.Round(((decimal)watch2.ElapsedMilliseconds) / 1000, 1); watch2.Restart(); // Validation ObjectValidator.Validate(ControllerContext, null, null, entities); attributeValidationInCSharp = Math.Round(((decimal)watch2.ElapsedMilliseconds) / 1000, 1); watch2.Restart(); if (!ModelState.IsValid) { var mappedModelState = MapModelState(ModelState, rowNumberFromErrorKeyMap); throw new UnprocessableEntityException(mappedModelState); } // Saving try { await SaveImplAsync(entities, new SaveArguments { ReturnEntities = false }); validatingAndSaving = Math.Round(((decimal)watch2.ElapsedMilliseconds) / 1000, 1); watch2.Stop(); } catch (UnprocessableEntityException ex) { var mappedModelState = MapModelState(ex.ModelState, rowNumberFromErrorKeyMap); throw new UnprocessableEntityException(mappedModelState); } var result = new ImportResult { Inserted = entities.Count(e => e.Id?.Equals(default(TKey)) ?? false), Updated = entities.Count(e => !(e.Id?.Equals(default(TKey)) ?? false)), }; // Record the time watch.Stop(); var elapsed = Math.Round(((decimal)watch.ElapsedMilliseconds) / 1000, 1); result.Seconds = elapsed; result.ParsingToDtosForSave = parsingToEntitiesForSave; result.AttributeValidationInCSharp = attributeValidationInCSharp; result.ValidatingAndSaving = validatingAndSaving; return Ok(result); }, _logger)); }
public async Task Import(ImportArguments arguments) { var session = configuration.StartSession(arguments.App); var assets = session.Assets; using (var fs = await FileSystems.CreateAsync(arguments.Path, session.WorkingDirectory)) { var folders = new FolderTree(session); var assetQuery = new AssetQuery(); foreach (var file in fs.GetFiles(FilePath.Root, ".*")) { var targetFolder = file.LocalFolderPath(); if (!string.IsNullOrWhiteSpace(arguments.TargetFolder)) { targetFolder = Path.Combine(arguments.TargetFolder, targetFolder); } assetQuery.ParentId = await folders.GetIdAsync(targetFolder); assetQuery.Filter = $"fileName eq '{file.Name}'"; var existings = await assets.GetAssetsAsync(session.App, assetQuery); var existing = existings.Items.FirstOrDefault(); var fileHash = file.GetFileHash(); try { var fileParameter = new FileParameter(file.OpenRead(), file.Name, MimeTypesMap.GetMimeType(file.Name)); log.WriteLine($"Uploading: {file.FullName}"); if (existings.Items.Any(x => string.Equals(x.FileHash, fileHash, StringComparison.Ordinal))) { log.StepSkipped("Same hash."); } else if (existings.Items.Count > 1) { log.StepSkipped("Multiple candidates found."); } else if (existing != null) { await assets.PutAssetContentAsync(session.App, existing.Id, fileParameter); log.StepSuccess("Existing Asset"); } else { var result = await assets.PostAssetAsync(session.App, assetQuery.ParentId, null, arguments.Duplicate, fileParameter); if (result._meta?.IsDuplicate == "true") { log.StepSkipped("duplicate."); } else { log.StepSuccess("New Asset"); } } } catch (Exception ex) { LogExtensions.HandleException(ex, error => log.WriteLine("Error: {0}", error)); } finally { log.WriteLine(); } } log.WriteLine("> Import completed"); } }