private void ImportImages(IHasImages[] haveImagesObjects, ExportImportProgressInfo progressInfo) { var allImages = haveImagesObjects.SelectMany(x => x.GetFlatObjectsListWithInterface <IHasImages>()) .SelectMany(x => x.Images).ToArray(); foreach (var image in allImages.Where(x => x.BinaryData != null)) { try { var url = image.Url != null && !image.Url.IsAbsoluteUrl() ? image.Url : image.RelativeUrl; //do not save images with external url if (!string.IsNullOrEmpty(url)) { using (var sourceStream = new MemoryStream(image.BinaryData)) using (var targetStream = _blobStorageProvider.OpenWrite(image.Url)) { sourceStream.CopyTo(targetStream); } } } catch (Exception ex) { progressInfo.Errors.Add(ex.Message); } } }
private void ExportPlatformEntriesInternal(Package package, PlatformExportManifest manifest, Action <ExportImportProgressInfo> progressCallback) { var progressInfo = new ExportImportProgressInfo(); var platformExportObj = new PlatformExportEntries(); if (manifest.HandleSecurity) { //Roles platformExportObj.Roles = _roleManagementService.SearchRoles(new RoleSearchRequest { SkipCount = 0, TakeCount = int.MaxValue }).Roles; //users var usersResult = Task.Run(() => _securityService.SearchUsersAsync(new UserSearchRequest { TakeCount = int.MaxValue })).Result; progressInfo.Description = String.Format("Security: {0} users exporting...", usersResult.Users.Count()); progressCallback(progressInfo); foreach (var user in usersResult.Users) { var userExt = Task.Run(() => _securityService.FindByIdAsync(user.Id, UserDetails.Export)).Result; if (userExt != null) { platformExportObj.Users.Add(userExt); } } } //Export setting for selected modules if (manifest.HandleSettings) { progressInfo.Description = String.Format("Settings: selected modules settings exporting..."); progressCallback(progressInfo); platformExportObj.Settings = manifest.Modules.SelectMany(x => _settingsManager.GetModuleSettings(x.Id)).ToList(); } //Dynamic properties var allTypes = _dynamicPropertyService.GetAvailableObjectTypeNames(); progressInfo.Description = String.Format("Dynamic properties: load properties..."); progressCallback(progressInfo); platformExportObj.DynamicProperties = allTypes.SelectMany(x => _dynamicPropertyService.GetProperties(x)).ToList(); platformExportObj.DynamicPropertyDictionaryItems = platformExportObj.DynamicProperties.Where(x => x.IsDictionary).SelectMany(x => _dynamicPropertyService.GetDictionaryItems(x.Id)).ToList(); //Notification templates progressInfo.Description = String.Format("Notifications: load templates..."); progressCallback(progressInfo); platformExportObj.NotificationTemplates = _notificationTemplateService.GetAllTemplates().ToList(); //Create part for platform entries var platformEntiriesPart = package.CreatePart(_platformEntriesPartUri, System.Net.Mime.MediaTypeNames.Application.Octet, CompressionOption.Normal); using (var partStream = platformEntiriesPart.GetStream()) { platformExportObj.SerializeJson <PlatformExportEntries>(partStream); } }
public async Task ExportAsync(Stream outStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo { Description = "loading data..." }; progressCallback(progressInfo); using (var sw = new StreamWriter(outStream, System.Text.Encoding.UTF8)) using (var writer = new JsonTextWriter(sw)) { writer.WriteStartObject(); progressInfo.Description = "Currencies exporting..."; progressCallback(progressInfo); var currencyResult = await _currencyService.GetAllCurrenciesAsync(); writer.WritePropertyName("CurrencyTotalCount"); writer.WriteValue(currencyResult.Count()); writer.WritePropertyName("Currencies"); writer.WriteStartArray(); foreach (var currency in currencyResult) { _serializer.Serialize(writer, currency); } writer.Flush(); progressInfo.Description = $"{currencyResult.Count()} currencies exported"; progressCallback(progressInfo); writer.WriteEndArray(); var packageTypesResult = await _packageTypesService.GetAllPackageTypesAsync(); writer.WritePropertyName("PackageTypeTotalCount"); writer.WriteValue(packageTypesResult.Count()); writer.WritePropertyName("PackageTypes"); writer.WriteStartArray(); foreach (var packageType in packageTypesResult) { _serializer.Serialize(writer, packageType); } writer.Flush(); progressInfo.Description = $"{packageTypesResult.Count()} package types exported"; progressCallback(progressInfo); writer.WriteEndArray(); writer.WriteEndObject(); writer.Flush(); } }
public virtual void DoImport(Stream backupStream, Action <ExportImportProgressInfo> progressCallback) { var backupObject = backupStream.DeserializeJson <BackupObject>(); var progressInfo = new ExportImportProgressInfo(); progressInfo.Description = $"{backupObject.Promotions.Count} promotions importing..."; progressCallback(progressInfo); //legacy promotion compatability foreach (var promotion in backupObject.Promotions) { if (promotion.StoreIds.IsNullOrEmpty()) { promotion.StoreIds = new List <string>(); } if (!promotion.Store.IsNullOrEmpty() && !promotion.StoreIds.Contains(promotion.Store)) { promotion.StoreIds.Add(promotion.Store); } } SavePromotions(backupObject.Promotions.ToArray()); progressInfo.Description = $"{backupObject.ContentFolders.Count} folders importing..."; progressCallback(progressInfo); SaveFolders(backupObject.ContentFolders.ToArray()); progressInfo.Description = $"{backupObject.ContentPlaces.Count} places importing..."; progressCallback(progressInfo); SavePlaces(backupObject.ContentPlaces.ToArray()); progressInfo.Description = $"{backupObject.ContentItems.Count} contents importing..."; progressCallback(progressInfo); SaveContentItems(backupObject.ContentItems.ToArray()); progressInfo.Description = $"{backupObject.ContentPublications.Distinct().Count()} publications importing..."; progressCallback(progressInfo); SavePublications(backupObject.ContentPublications.Distinct().ToArray()); var pageSize = 500; var couponsTotal = backupObject.Coupons.Count; Paginate(couponsTotal, pageSize, (x) => { progressInfo.Description = $"{Math.Min(x * pageSize, couponsTotal)} of {couponsTotal} coupons imported"; progressCallback(progressInfo); SaveCoupons(backupObject.Coupons.Skip((x - 1) * pageSize).Take(pageSize).ToArray()); }); var usagesTotal = backupObject.Usages.Count; Paginate(usagesTotal, pageSize, (x) => { progressInfo.Description = $"{Math.Min(x * pageSize, usagesTotal)} of {usagesTotal} usages imported"; progressCallback(progressInfo); SaveUsages(backupObject.Usages.Skip((x - 1) * pageSize).Take(pageSize).ToArray()); }); }
private async Task ImportPropertyDictionaryItemsAsync(JsonTextReader reader, ExportImportProgressInfo progressInfo, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { await reader.DeserializeJsonArrayWithPagingAsync <PropertyDictionaryItem>(_jsonSerializer, _batchSize, items => _propertyDictionaryService.SaveChangesAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } property dictionary items have been imported"; progressCallback(progressInfo); }, cancellationToken); }
public async Task ImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { //TDODO: Use AbstractTypeFactory for deserialization of the derived types cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo(); var orderTotalCount = 0; using (var streamReader = new StreamReader(inputStream)) using (var reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType == JsonToken.PropertyName) { if (reader.Value.ToString() == "OrderTotalCount") { orderTotalCount = reader.ReadAsInt32() ?? 0; } else if (reader.Value.ToString() == "CustomerOrders") { reader.Read(); if (reader.TokenType == JsonToken.StartArray) { reader.Read(); var orders = new List <CustomerOrder>(); var orderCount = 0; while (reader.TokenType != JsonToken.EndArray) { var order = _serializer.Deserialize <CustomerOrder>(reader); orders.Add(order); orderCount++; reader.Read(); } for (var i = 0; i < orderCount; i += _batchSize) { await _customerOrderService.SaveChangesAsync(orders.Skip(i).Take(_batchSize).ToArray()); if (orderCount > 0) { progressInfo.Description = $"{ i } of { orderCount } orders have been imported"; } else { progressInfo.Description = $"{ i } orders have been imported"; } progressCallback(progressInfo); } } } } } } }
public async Task DoExportAsync(Stream outStream, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo { Description = "loading data..." }; progressCallback(progressInfo); using (var sw = new StreamWriter(outStream, Encoding.UTF8)) { using (var writer = new JsonTextWriter(sw)) { await writer.WriteStartObjectAsync(); progressInfo.Description = "Options are started to export"; progressCallback(progressInfo); await writer.WritePropertyNameAsync("Options"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => { var searchCriteria = AbstractTypeFactory <ThumbnailOptionSearchCriteria> .TryCreateInstance(); searchCriteria.Take = take; searchCriteria.Skip = skip; var searchResult = await _optionSearchService.SearchAsync(searchCriteria); return((GenericSearchResult <ThumbnailOption>)searchResult); }, (processedCount, totalCount) => { progressInfo.Description = $"{processedCount} of {totalCount} Options have been exported"; progressCallback(progressInfo); }, cancellationToken); progressInfo.Description = "Tasks are started to export"; progressCallback(progressInfo); await writer.WritePropertyNameAsync("Tasks"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => { var searchCriteria = AbstractTypeFactory <ThumbnailTaskSearchCriteria> .TryCreateInstance(); searchCriteria.Take = take; searchCriteria.Skip = skip; var searchResult = await _taskSearchService.SearchAsync(searchCriteria); return((GenericSearchResult <ThumbnailTask>)searchResult); }, (processedCount, totalCount) => { progressInfo.Description = $"{processedCount} of {totalCount} Tasks have been exported"; progressCallback(progressInfo); }, cancellationToken); await writer.WriteEndObjectAsync(); await writer.FlushAsync(); } } }
public void DoImport(Stream backupStream, Action <ExportImportProgressInfo> progressCallback) { var progressInfo = new ExportImportProgressInfo(); var membersTotalCount = 0; using (var streamReader = new StreamReader(backupStream)) using (var reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType == JsonToken.PropertyName) { if (reader.Value.ToString() == "MembersTotalCount") { membersTotalCount = reader.ReadAsInt32() ?? 0; } else if (reader.Value.ToString() == "Members") { reader.Read(); if (reader.TokenType == JsonToken.StartArray) { reader.Read(); var members = new List <Member>(); var membersCount = 0; while (reader.TokenType != JsonToken.EndArray) { var member = _serializer.Deserialize <Member>(reader); members.Add(member); membersCount++; reader.Read(); } if (membersCount % _batchSize == 0 || reader.TokenType == JsonToken.EndArray) { _memberService.SaveChanges(members.ToArray()); members.Clear(); if (membersTotalCount > 0) { progressInfo.Description = $"{ membersCount } of { membersTotalCount } members imported"; } else { progressInfo.Description = $"{ membersCount } members imported"; } progressCallback(progressInfo); } } } } } } }
private void LoadImages(IHasImages[] haveImagesObjects, ExportImportProgressInfo progressInfo) { var loadingErrors = haveImagesObjects.LoadImages(_blobStorageProvider); if (!loadingErrors.IsNullOrEmpty()) { progressInfo.Errors.AddRange(loadingErrors); } }
private void ImportPlatformEntriesInternal(Package package, PlatformExportManifest manifest, Action <ExportImportProgressInfo> progressCallback) { var progressInfo = new ExportImportProgressInfo(); var platformEntriesPart = package.GetPart(_platformEntriesPartUri); if (platformEntriesPart != null) { PlatformExportEntries platformEntries; using (var stream = platformEntriesPart.GetStream()) { platformEntries = stream.DeserializeJson <PlatformExportEntries>(GetJsonSerializer()); } //Import security objects if (manifest.HandleSecurity) { progressInfo.Description = String.Format("Import {0} users with roles...", platformEntries.Users.Count()); progressCallback(progressInfo); //First need import roles foreach (var role in platformEntries.Roles) { _roleManagementService.AddOrUpdateRole(role); } //Next create or update users foreach (var user in platformEntries.Users) { if (_securityService.FindByIdAsync(user.Id, UserDetails.Reduced).Result != null) { var dummy = _securityService.UpdateAsync(user).Result; } else { var dummy = _securityService.CreateAsync(user).Result; } } } //Import modules settings if (manifest.HandleSettings) { //Import dynamic properties _dynamicPropertyService.SaveProperties(platformEntries.DynamicProperties.ToArray()); foreach (var propDicGroup in platformEntries.DynamicPropertyDictionaryItems.GroupBy(x => x.PropertyId)) { _dynamicPropertyService.SaveDictionaryItems(propDicGroup.Key, propDicGroup.ToArray()); } foreach (var module in manifest.Modules) { _settingsManager.SaveSettings(platformEntries.Settings.Where(x => x.ModuleId == module.Id).ToArray()); } } } }
public void DoImport(Stream backupStream, PlatformExportManifest manifest, Action <ExportImportProgressInfo> progressCallback) { var progressInfo = new ExportImportProgressInfo(); var backupObject = backupStream.DeserializeJson <BackupObject>(); var originalObject = GetBackupObject(progressCallback, false); progressInfo.Description = String.Format("{0} catalogs importing...", backupObject.Catalogs.Count()); progressCallback(progressInfo); UpdateCatalogs(originalObject.Catalogs, backupObject.Catalogs); progressInfo.Description = String.Format("{0} categories importing...", backupObject.Categories.Count()); progressCallback(progressInfo); //Categories should be sorted right way (because it have a hierarchy structure and links to virtual categories) backupObject.Categories = backupObject.Categories.Where(x => x.Links == null || !x.Links.Any()) .OrderBy(x => x.Parents != null ? x.Parents.Count() : 0) .Concat(backupObject.Categories.Where(x => x.Links != null && x.Links.Any())) .ToList(); backupObject.Products = backupObject.Products.OrderBy(x => x.MainProductId).ToList(); UpdateCategories(originalObject.Categories, backupObject.Categories); UpdateProperties(originalObject.Properties, backupObject.Properties); //Binary data if (manifest.HandleBinaryData) { var allBackupImages = backupObject.Products.SelectMany(x => x.Images); allBackupImages = allBackupImages.Concat(backupObject.Categories.SelectMany(x => x.Images)); allBackupImages = allBackupImages.Concat(backupObject.Products.SelectMany(x => x.Variations).SelectMany(x => x.Images)); var allOrigImages = originalObject.Products.SelectMany(x => x.Images); allOrigImages = allOrigImages.Concat(originalObject.Categories.SelectMany(x => x.Images)); allOrigImages = allOrigImages.Concat(originalObject.Products.SelectMany(x => x.Variations).SelectMany(x => x.Images)); var allNewImages = allBackupImages.Where(x => !allOrigImages.Contains(x)).Where(x => x.BinaryData != null); var index = 0; var progressTemplate = "{0} of " + allNewImages.Count() + " images uploading"; foreach (var image in allNewImages) { progressInfo.Description = String.Format(progressTemplate, index); progressCallback(progressInfo); using (var stream = new MemoryStream(image.BinaryData)) { image.Url = _blobStorageProvider.Upload(new UploadStreamInfo { FileByteStream = stream, FileName = image.Name, FolderName = "catalog" }); } index++; } } progressInfo.Description = String.Format("{0} products importing...", backupObject.Products.Count()); progressCallback(progressInfo); UpdateCatalogProducts(originalObject.Products, backupObject.Products); }
public void DoImport(Stream backupStream, Action <ExportImportProgressInfo> progressCallback) { var backupObject = backupStream.DeserializeJson <BackupObject>(); var progressInfo = new ExportImportProgressInfo(); progressInfo.Description = String.Format("{0} tagged items importing...", backupObject.TaggedItems.Count); progressCallback(progressInfo); _taggedItemService.SaveTaggedItems(backupObject.TaggedItems.ToArray()); }
public void DoImport(Stream backupStream, Action <ExportImportProgressInfo> progressCallback) { var backupObject = backupStream.DeserializeJson <BackupObject>(); var progressInfo = new ExportImportProgressInfo(); progressInfo.Description = String.Format("{0} RFQs importing", backupObject.QuoteRequests.Count()); progressCallback(progressInfo); _quoteRequestService.SaveChanges(backupObject.QuoteRequests.ToArray()); }
public async Task ImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo(); int storeTotalCount = 0; using (var streamReader = new StreamReader(inputStream)) using (var reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType == JsonToken.PropertyName) { if (reader.Value.ToString() == "StoresTotalCount") { storeTotalCount = reader.ReadAsInt32() ?? 0; } else if (reader.Value.ToString() == "Store") { var stores = new List <Store>(); var storeCount = 0; while (reader.TokenType != JsonToken.EndArray) { var store = _serializer.Deserialize <Store>(reader); stores.Add(store); storeCount++; reader.Read(); } for (int i = 0; i < storeCount; i += BatchSize) { var batchStores = stores.Skip(i).Take(BatchSize); foreach (var store in batchStores) { await _storeService.SaveChangesAsync(new[] { store }); } if (storeCount > 0) { progressInfo.Description = $"{i} of {storeCount} stores imported"; } else { progressInfo.Description = $"{i} stores imported"; } progressCallback(progressInfo); } } } } } }
public void DoImport(Stream backupStream, Action <ExportImportProgressInfo> progressCallback) { var backupObject = backupStream.DeserializeJson <BackupObject>(); var originalObject = GetBackupObject(progressCallback); var progressInfo = new ExportImportProgressInfo(); progressInfo.Description = String.Format("{0} members importing...", backupObject.Members.Count()); progressCallback(progressInfo); _memberService.CreateOrUpdate(backupObject.Members.OrderByDescending(x => x.MemberType).ToArray()); }
private async Task ImportCatalogsAsync(JsonTextReader reader, ExportImportProgressInfo progressInfo, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { await reader.DeserializeJsonArrayWithPagingAsync <Catalog>(_jsonSerializer, _batchSize, async (items) => { await _catalogService.SaveChangesAsync(items.ToArray()); }, processedCount => { progressInfo.Description = $"{ processedCount } catalogs have been imported"; progressCallback(progressInfo); }, cancellationToken); }
private BackupObject GetBackupObject(Action <ExportImportProgressInfo> progressCallback) { var progressInfo = new ExportImportProgressInfo("stores loading..."); progressCallback(progressInfo); return(new BackupObject { Stores = _storeService.GetStoreList().Select(x => x.Id).Select(x => _storeService.GetById(x)).ToArray() }); }
public async Task DoImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo(); using (var streamReader = new StreamReader(inputStream)) using (var reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType == JsonToken.PropertyName) { if (reader.Value.ToString() == "MenuLinkLists") { await reader.DeserializeJsonArrayWithPagingAsync <MenuLinkList>(_jsonSerializer, _batchSize, async items => { foreach (var item in items) { await _menuService.AddOrUpdateAsync(item); } }, processedCount => { progressInfo.Description = $"{ processedCount } menu links have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "CmsContent") { if (options != null && options.HandleBinaryData) { progressInfo.Description = "importing binary data: themes and pages importing..."; progressCallback(progressInfo); await reader.DeserializeJsonArrayWithPagingAsync <ContentFolder>(_jsonSerializer, _batchSize, items => { foreach (var item in items) { SaveContentFolderRecursive(item, progressCallback); } return(Task.CompletedTask); }, processedCount => { progressInfo.Description = $"{ processedCount } menu links have been imported"; progressCallback(progressInfo); }, cancellationToken); } } } } } }
public ProgressNotifier(string notifyPattern, int totalCount, Action <ExportImportProgressInfo> progressCallback) { _notifyPattern = notifyPattern; _progressCallback = progressCallback; _progressInfo = new ExportImportProgressInfo { TotalCount = totalCount, Description = String.Format(notifyPattern, totalCount, 0), ProcessedCount = 0 }; }
public async Task DoExportAsync(Stream outStream, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo { Description = "The fulfilmentCenters are loading" }; progressCallback(progressInfo); using (var sw = new StreamWriter(outStream, Encoding.UTF8)) using (var writer = new JsonTextWriter(sw)) { await writer.WriteStartObjectAsync(); await writer.WritePropertyNameAsync("FulfillmentCenters"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, BatchSize, async (skip, take) => { var searchCriteria = AbstractTypeFactory <FulfillmentCenterSearchCriteria> .TryCreateInstance(); searchCriteria.Take = take; searchCriteria.Skip = skip; var searchResult = await _fulfillmentCenterSearchService.SearchCentersAsync(searchCriteria); return((GenericSearchResult <FulfillmentCenter>)searchResult); }, (processedCount, totalCount) => { progressInfo.Description = $"{processedCount} of {totalCount} FulfillmentCenters have been exported"; progressCallback(progressInfo); }, cancellationToken); progressInfo.Description = "The Inventories are loading"; progressCallback(progressInfo); await writer.WritePropertyNameAsync("Inventories"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, BatchSize, async (skip, take) => { var searchCriteria = AbstractTypeFactory <InventorySearchCriteria> .TryCreateInstance(); searchCriteria.Take = take; searchCriteria.Skip = skip; var searchResult = await _inventorySearchService.SearchInventoriesAsync(searchCriteria); return((GenericSearchResult <InventoryInfo>)searchResult); }, (processedCount, totalCount) => { progressInfo.Description = $"{processedCount} of {totalCount} inventories have been exported"; progressCallback(progressInfo); }, cancellationToken); await writer.WriteEndObjectAsync(); await writer.FlushAsync(); } }
public void DoExport(Stream backupStream, Action <ExportImportProgressInfo> progressCallback) { var prodgressInfo = new ExportImportProgressInfo { Description = "loading data..." }; progressCallback(prodgressInfo); var backupObject = GetBackupObject(); backupObject.SerializeJson(backupStream); }
public void DoImport(Stream backupStream, Action <ExportImportProgressInfo> progressCallback) { var backupObject = backupStream.DeserializeJson <BackupObject>(); var originalObject = GetBackupObject(progressCallback); var progressInfo = new ExportImportProgressInfo(); progressInfo.Description = String.Format("{0} stores importing...", backupObject.Stores.Count()); progressCallback(progressInfo); UpdateStores(originalObject.Stores, backupObject.Stores); }
private void SaveProducts(List <CsvProduct> csvProducts, ExportImportProgressInfo progressInfo, Action <ExportImportProgressInfo> progressCallback) { var defaultFulfilmentCenter = _fulfillmentCenterSearchService.SearchCenters(new Domain.Inventory.Model.Search.FulfillmentCenterSearchCriteria { Take = 1 }).Results.FirstOrDefault(); var totalProductsCount = csvProducts.Count(); for (int i = 0; i < totalProductsCount; i += 10) { var products = csvProducts.Skip(i).Take(10).ToList(); try { _productService.Update(products.ToArray()); SaveProductInventories(products, defaultFulfilmentCenter); SaveProductPrices(products); } catch (FluentValidation.ValidationException validationEx) { lock (_lockObject) { foreach (var validationErrorGroup in validationEx.Errors.GroupBy(x => x.PropertyName)) { string errorMessage = string.Join("; ", validationErrorGroup.Select(x => x.ErrorMessage)); progressInfo.Errors.Add(errorMessage); progressCallback(progressInfo); } } } catch (Exception ex) { lock (_lockObject) { progressInfo.Errors.Add(ex.ToString()); progressCallback(progressInfo); } } finally { lock (_lockObject) { //Raise notification progressInfo.ProcessedCount += products.Count(); progressInfo.Description = $"Saving products: {progressInfo.ProcessedCount} of {progressInfo.TotalCount} created"; progressCallback(progressInfo); } } } }
public async Task DoImportAsync(Stream inputStream, CsvImportInfo importInfo, Action <ExportImportProgressInfo> progressCallback) { var csvProducts = new List <CsvProduct>(); var progressInfo = new ExportImportProgressInfo { Description = "Reading products from csv..." }; progressCallback(progressInfo); var encoding = DetectEncoding(inputStream); using (var reader = new CsvReader(new StreamReader(inputStream, encoding))) { reader.Configuration.Delimiter = importInfo.Configuration.Delimiter; reader.Configuration.RegisterClassMap(new CsvProductMap(importInfo.Configuration)); reader.Configuration.MissingFieldFound = (strings, i, arg3) => { //do nothing }; reader.Configuration.TrimOptions = TrimOptions.Trim; while (reader.Read()) { try { var csvProduct = reader.GetRecord <CsvProduct>(); ReplaceEmptyStringsWithNull(csvProduct); csvProducts.Add(csvProduct); } catch (TypeConverterException ex) { progressInfo.Errors.Add($"Column: {ex.MemberMapData.Member.Name}, {ex.Message}"); progressCallback(progressInfo); } catch (Exception ex) { var error = ex.Message; if (ex.Data.Contains("CsvHelper")) { error += ex.Data["CsvHelper"]; } progressInfo.Errors.Add(error); progressCallback(progressInfo); } } } await DoImport(csvProducts, importInfo, progressInfo, progressCallback); }
private async Task ImportCategoriesAsync(JsonTextReader reader, ExportImportProgressInfo progressInfo, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { await reader.DeserializeJsonArrayWithPagingAsync <Category>(_jsonSerializer, _batchSize, async (items) => { var itemsArray = items.ToArray(); await _categoryService.SaveChangesAsync(itemsArray); ImportImages(itemsArray.OfType <IHasImages>().ToArray(), progressInfo); }, processedCount => { progressInfo.Description = $"{ processedCount } categories have been imported"; progressCallback(progressInfo); }, cancellationToken); }
public void DoImport(Stream backupStream, Action <ExportImportProgressInfo> progressCallback) { var prodgressInfo = new ExportImportProgressInfo { Description = "loading data..." }; progressCallback(prodgressInfo); var backupObject = backupStream.DeserializeJson <BackupObject>(); var originalObject = GetBackupObject(); UpdateOrders(originalObject.CustomerOrders, backupObject.CustomerOrders); }
private BackupObject GetBackupObject(Action <ExportImportProgressInfo> progressCallback) { var progressInfo = new ExportImportProgressInfo("stores loading..."); progressCallback(progressInfo); return(new BackupObject { Stores = _storeService.SearchStores(new SearchCriteria { Take = int.MaxValue }).Stores }); }
private BackupObject GetBackupObject(Action <ExportImportProgressInfo> progressCallback) { var allPricelistIds = _pricingService.GetPriceLists().Select(x => x.Id); var progressInfo = new ExportImportProgressInfo { Description = String.Format("{0} price lists loading...", allPricelistIds.Count()) }; progressCallback(progressInfo); return(new BackupObject { Pricelists = allPricelistIds.Select(x => _pricingService.GetPricelistById(x)).ToList() }); }
private BackupObject GetBackupObject(Action <ExportImportProgressInfo> progressCallback) { var result = new BackupObject(); var progressInfo = new ExportImportProgressInfo { Description = "Search promotions..." }; progressCallback(progressInfo); var allPromotions = _marketingSearchService.SearchResources(new MarketingSearchCriteria { Count = int.MaxValue, ResponseGroup = SearchResponseGroup.WithPromotions }).Promotions; progressInfo.Description = String.Format("{0} promotions loading...", allPromotions.Count()); progressCallback(progressInfo); result.Promotions = allPromotions.Select(x => _promotionService.GetPromotionById(x.Id)).ToList(); progressInfo.Description = "Search dynamic content objects..."; progressCallback(progressInfo); var searchResult = SearchInFolder(null); var allFolderSearchResults = searchResult != null?searchResult.Traverse(ChildrenForFolder).ToArray() : null; if (allFolderSearchResults != null) { progressInfo.Description = String.Format("Loading folders..."); progressCallback(progressInfo); result.ContentFolders = allFolderSearchResults.SelectMany(x => x.ContentFolders).ToList(); progressInfo.Description = String.Format("Loading places..."); progressCallback(progressInfo); result.ContentPlaces = allFolderSearchResults.SelectMany(x => x.ContentPlaces) .Select(x => _dynamicContentService.GetPlaceById(x.Id)) .ToList(); progressInfo.Description = String.Format("Loading contents..."); progressCallback(progressInfo); result.ContentItems = allFolderSearchResults.SelectMany(x => x.ContentItems) .Select(x => _dynamicContentService.GetContentItemById(x.Id)) .ToList(); progressInfo.Description = String.Format("Loading publications..."); progressCallback(progressInfo); result.ContentPublications = allFolderSearchResults.SelectMany(x => x.ContentPublications) .Select(x => _dynamicContentService.GetPublicationById(x.Id)) .ToList(); } return(result); }
public async Task ExportAsync(Stream outStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo { Description = "The store are loading" }; progressCallback(progressInfo); using (var sw = new StreamWriter(outStream, Encoding.UTF8)) using (var writer = new JsonTextWriter(sw)) { writer.WriteStartObject(); progressInfo.Description = "Evaluation the number of store records"; progressCallback(progressInfo); var searchResult = await _storeSearchService.SearchStoresAsync(new StoreSearchCriteria { Take = BatchSize }); var totalCount = searchResult.TotalCount; writer.WritePropertyName("StoreTotalCount"); writer.WriteValue(totalCount); writer.WritePropertyName("Stores"); writer.WriteStartArray(); for (int i = BatchSize; i < totalCount; i += BatchSize) { progressInfo.Description = $"{i} of {totalCount} stores have been loaded"; progressCallback(progressInfo); searchResult = await _storeSearchService.SearchStoresAsync(new StoreSearchCriteria { Skip = i, Take = BatchSize }); foreach (var store in searchResult.Results) { _serializer.Serialize(writer, store); } writer.Flush(); progressInfo.Description = $"{ Math.Min(totalCount, i + BatchSize) } of { totalCount } stores exported"; progressCallback(progressInfo); } writer.WriteEndArray(); writer.WriteEndObject(); writer.Flush(); } }