public async Task ExportAsync(Stream outStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo { Description = "Orders are loading" }; progressCallback(progressInfo); using (var sw = new StreamWriter(outStream, Encoding.UTF8)) using (var writer = new JsonTextWriter(sw)) { writer.WriteStartObject(); var orders = await _customerOrderSearchService.SearchCustomerOrdersAsync(new CustomerOrderSearchCriteria { Take = int.MaxValue }); writer.WritePropertyName("OrderTotalCount"); writer.WriteValue(orders.TotalCount); writer.WritePropertyName("CustomerOrders"); writer.WriteStartArray(); foreach (var order in orders.Results) { _serializer.Serialize(writer, order); } writer.WriteEndArray(); writer.WriteEndObject(); writer.Flush(); } }
public async Task ExportAsync(Stream outStream, ExportImportOptions exportOptions, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { var moduleExportOptions = exportOptions as ThumbnailExportImportOptions; cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo { Description = "loading data..." }; progressCallback(progressInfo); using (var sw = new StreamWriter(outStream, Encoding.UTF8)) { using (var writer = new JsonTextWriter(sw)) { writer.WriteStartObject(); await ExportOptions(writer, _serializer, progressInfo, progressCallback, cancellationToken); await ExportTasksAsync(writer, _serializer, progressInfo, progressCallback, cancellationToken); writer.WriteEndObject(); writer.Flush(); } } }
public async Task ExportAsync(Stream outStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo { Description = "loading data..." }; progressCallback(progressInfo); using (var sw = new StreamWriter(outStream, System.Text.Encoding.UTF8)) using (var writer = new JsonTextWriter(sw)) { writer.WriteStartObject(); progressInfo.Description = "Currencies exporting..."; progressCallback(progressInfo); var currencyResult = await _currencyService.GetAllCurrenciesAsync(); writer.WritePropertyName("CurrencyTotalCount"); writer.WriteValue(currencyResult.Count()); writer.WritePropertyName("Currencies"); writer.WriteStartArray(); foreach (var currency in currencyResult) { _serializer.Serialize(writer, currency); } writer.Flush(); progressInfo.Description = $"{currencyResult.Count()} currencies exported"; progressCallback(progressInfo); writer.WriteEndArray(); var packageTypesResult = await _packageTypesService.GetAllPackageTypesAsync(); writer.WritePropertyName("PackageTypeTotalCount"); writer.WriteValue(packageTypesResult.Count()); writer.WritePropertyName("PackageTypes"); writer.WriteStartArray(); foreach (var packageType in packageTypesResult) { _serializer.Serialize(writer, packageType); } writer.Flush(); progressInfo.Description = $"{packageTypesResult.Count()} package types exported"; progressCallback(progressInfo); writer.WriteEndArray(); writer.WriteEndObject(); writer.Flush(); } }
public async Task ImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { //TDODO: Use AbstractTypeFactory for deserialization of the derived types cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo(); var orderTotalCount = 0; using (var streamReader = new StreamReader(inputStream)) using (var reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType == JsonToken.PropertyName) { if (reader.Value.ToString() == "OrderTotalCount") { orderTotalCount = reader.ReadAsInt32() ?? 0; } else if (reader.Value.ToString() == "CustomerOrders") { reader.Read(); if (reader.TokenType == JsonToken.StartArray) { reader.Read(); var orders = new List <CustomerOrder>(); var orderCount = 0; while (reader.TokenType != JsonToken.EndArray) { var order = _serializer.Deserialize <CustomerOrder>(reader); orders.Add(order); orderCount++; reader.Read(); } for (var i = 0; i < orderCount; i += _batchSize) { await _customerOrderService.SaveChangesAsync(orders.Skip(i).Take(_batchSize).ToArray()); if (orderCount > 0) { progressInfo.Description = $"{ i } of { orderCount } orders have been imported"; } else { progressInfo.Description = $"{ i } orders have been imported"; } progressCallback(progressInfo); } } } } } } }
public async Task DoImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo(); using (var streamReader = new StreamReader(inputStream)) using (var reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType == JsonToken.PropertyName) { if (reader.Value.ToString() == "MenuLinkLists") { await reader.DeserializeJsonArrayWithPagingAsync <MenuLinkList>(_jsonSerializer, _batchSize, async items => { foreach (var item in items) { await _menuService.AddOrUpdateAsync(item); } }, processedCount => { progressInfo.Description = $"{ processedCount } menu links have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "CmsContent") { if (options != null && options.HandleBinaryData) { progressInfo.Description = "importing binary data: themes and pages importing..."; progressCallback(progressInfo); await reader.DeserializeJsonArrayWithPagingAsync <ContentFolder>(_jsonSerializer, _batchSize, items => { foreach (var item in items) { SaveContentFolderRecursive(item, progressCallback); } return(Task.CompletedTask); }, processedCount => { progressInfo.Description = $"{ processedCount } menu links have been imported"; progressCallback(progressInfo); }, cancellationToken); } } } } } }
public async Task ImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo(); int storeTotalCount = 0; using (var streamReader = new StreamReader(inputStream)) using (var reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType == JsonToken.PropertyName) { if (reader.Value.ToString() == "StoresTotalCount") { storeTotalCount = reader.ReadAsInt32() ?? 0; } else if (reader.Value.ToString() == "Store") { var stores = new List <Store>(); var storeCount = 0; while (reader.TokenType != JsonToken.EndArray) { var store = _serializer.Deserialize <Store>(reader); stores.Add(store); storeCount++; reader.Read(); } for (int i = 0; i < storeCount; i += BatchSize) { var batchStores = stores.Skip(i).Take(BatchSize); foreach (var store in batchStores) { await _storeService.SaveChangesAsync(new[] { store }); } if (storeCount > 0) { progressInfo.Description = $"{i} of {storeCount} stores imported"; } else { progressInfo.Description = $"{i} stores imported"; } progressCallback(progressInfo); } } } } } }
private async Task ImportProductsAsync(JsonTextReader reader, ExportImportOptions options, ExportImportProgressInfo progressInfo, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { var associationBackupMap = new Dictionary <string, IList <ProductAssociation> >(); await reader.DeserializeJsonArrayWithPagingAsync <CatalogProduct>(_jsonSerializer, _batchSize, async (items) => { var products = items.Select(product => { //Do not save associations withing product to prevent dependency conflicts in db //we will save separateley after product import if (!product.Associations.IsNullOrEmpty()) { associationBackupMap[product.Id] = product.Associations; } product.Associations = null; return(product); }).ToArray(); await _itemService.SaveChangesAsync(products.ToArray()); if (options != null && options.HandleBinaryData) { ImportImages(products.OfType <IHasImages>().ToArray(), progressInfo); } }, processedCount => { progressInfo.Description = $"{ processedCount } products have been imported"; progressCallback(progressInfo); }, cancellationToken); //Import products associations separately to avoid DB constrain violation var totalProductsWithAssociationsCount = associationBackupMap.Count; for (var i = 0; i < totalProductsWithAssociationsCount; i += _batchSize) { var fakeProducts = new List <CatalogProduct>(); foreach (var pair in associationBackupMap.Skip(i).Take(_batchSize)) { var fakeProduct = AbstractTypeFactory <CatalogProduct> .TryCreateInstance(); fakeProduct.Id = pair.Key; fakeProduct.Associations = pair.Value; fakeProducts.Add(fakeProduct); } await _associationService.SaveChangesAsync(fakeProducts.OfType <IHasAssociations>().ToArray()); progressInfo.Description = $"{ Math.Min(totalProductsWithAssociationsCount, i + _batchSize) } of { totalProductsWithAssociationsCount } products associations imported"; progressCallback(progressInfo); } }
public async Task ExportAsync(Stream outStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo { Description = "The store are loading" }; progressCallback(progressInfo); using (var sw = new StreamWriter(outStream, Encoding.UTF8)) using (var writer = new JsonTextWriter(sw)) { writer.WriteStartObject(); progressInfo.Description = "Evaluation the number of store records"; progressCallback(progressInfo); var searchResult = await _storeSearchService.SearchStoresAsync(new StoreSearchCriteria { Take = BatchSize }); var totalCount = searchResult.TotalCount; writer.WritePropertyName("StoreTotalCount"); writer.WriteValue(totalCount); writer.WritePropertyName("Stores"); writer.WriteStartArray(); for (int i = BatchSize; i < totalCount; i += BatchSize) { progressInfo.Description = $"{i} of {totalCount} stores have been loaded"; progressCallback(progressInfo); searchResult = await _storeSearchService.SearchStoresAsync(new StoreSearchCriteria { Skip = i, Take = BatchSize }); foreach (var store in searchResult.Results) { _serializer.Serialize(writer, store); } writer.Flush(); progressInfo.Description = $"{ Math.Min(totalCount, i + BatchSize) } of { totalCount } stores exported"; progressCallback(progressInfo); } writer.WriteEndArray(); writer.WriteEndObject(); writer.Flush(); } }
public async Task ExportAsync(Stream outStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo { Description = "loading data..." }; progressCallback(progressInfo); var batchSize = await GetBatchSize(); using (var sw = new StreamWriter(outStream, Encoding.UTF8)) using (var writer = new JsonTextWriter(sw)) { writer.WriteStartObject(); progressInfo.Description = "Members exporting..."; progressCallback(progressInfo); var members = await _memberSearchService.SearchMembersAsync(new MembersSearchCriteria { Take = 0, DeepSearch = true }); var memberCount = members.TotalCount; writer.WritePropertyName("MembersTotalCount"); writer.WriteValue(memberCount); cancellationToken.ThrowIfCancellationRequested(); writer.WritePropertyName("Members"); writer.WriteStartArray(); for (var i = 0; i < memberCount; i += batchSize) { var searchResponse = await _memberSearchService.SearchMembersAsync(new MembersSearchCriteria { Skip = i, Take = batchSize, DeepSearch = true }); foreach (var member in searchResponse.Results) { _serializer.Serialize(writer, member); } writer.Flush(); progressInfo.Description = $"{ Math.Min(memberCount, i + batchSize) } of { memberCount } members exported"; progressCallback(progressInfo); } writer.WriteEndArray(); writer.WriteEndObject(); writer.Flush(); } }
public async Task ImportAsync(Stream inputStream, ExportImportOptions importOptions, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { var moduleImportOptions = importOptions as ThumbnailExportImportOptions; cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo(); using (var streamReader = new StreamReader(inputStream)) { using (var jsonReader = new JsonTextReader(streamReader)) { while (jsonReader.Read()) { if (jsonReader.TokenType != JsonToken.PropertyName) { continue; } switch (jsonReader.Value.ToString()) { case "Options": jsonReader.Read(); var options = _serializer.Deserialize <ThumbnailOption[]>(jsonReader); progressInfo.Description = $"Importing {options.Length} options..."; progressCallback(progressInfo); await _optionService.SaveOrUpdateAsync(options); break; case "Tasks": jsonReader.Read(); var tasks = _serializer.Deserialize <ThumbnailTask[]>(jsonReader); progressInfo.Description = $"Importing {tasks.Length} tasks..."; progressCallback(progressInfo); await _taskService.SaveChangesAsync(tasks); break; } } } } }
public async Task ExportAsync(Stream outStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo { Description = "loading data..." }; progressCallback(progressInfo); using (var sw = new StreamWriter(outStream, System.Text.Encoding.UTF8)) using (var writer = new JsonTextWriter(sw)) { writer.WriteStartObject(); progressInfo.Description = "Notifications exporting..."; progressCallback(progressInfo); var notificationsResult = await _notificationSearchService.SearchNotificationsAsync(new NotificationSearchCriteria { Take = Int32.MaxValue, ResponseGroup = NotificationResponseGroup.Default.ToString() }); writer.WritePropertyName("NotificationsTotalCount"); writer.WriteValue(notificationsResult.TotalCount); writer.WritePropertyName("Notifications"); writer.WriteStartArray(); for (var i = 0; i < notificationsResult.TotalCount; i += _batchSize) { var searchResponse = await _notificationSearchService.SearchNotificationsAsync(new NotificationSearchCriteria { Skip = i, Take = _batchSize, ResponseGroup = NotificationResponseGroup.Full.ToString() }); foreach (var notification in searchResponse.Results) { _serializer.Serialize(writer, notification); } writer.Flush(); progressInfo.Description = $"{ Math.Min(notificationsResult.TotalCount, i + _batchSize) } of { notificationsResult.TotalCount } notifications exported"; progressCallback(progressInfo); } writer.WriteEndArray(); writer.WriteEndObject(); writer.Flush(); } }
public async Task ImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { var importJob = _applicationBuilder.ApplicationServices.GetRequiredService <PricingExportImport>(); await importJob.DoImportAsync(inputStream, progressCallback, cancellationToken); }
public Task ImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { return(_appBuilder.ApplicationServices.GetRequiredService <CustomerExportImport>().ImportAsync(inputStream, options, progressCallback, cancellationToken)); }
public async Task DoExportAsync(Stream outStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo { Description = "loading data..." }; progressCallback(progressInfo); using (var sw = new StreamWriter(outStream)) using (var writer = new JsonTextWriter(sw)) { await writer.WriteStartObjectAsync(); #region Export catalogs progressInfo.Description = "Catalogs exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("Catalogs"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => { var searchResult = (await _catalogService.GetCatalogsListAsync()).ToArray(); return(new GenericSearchResult <Catalog> { Results = searchResult, TotalCount = searchResult.Length }); }, (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } catalogs have been exported"; progressCallback(progressInfo); }, cancellationToken); #endregion #region Export categories progressInfo.Description = "Categories exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("Categories"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => { var searchResult = await _categorySearchService.SearchCategoriesAsync(new CategorySearchCriteria { Skip = skip, Take = take }); var categories = searchResult.Results; if (options.HandleBinaryData) { LoadImages(categories.OfType <IHasImages>().ToArray(), progressInfo); } return(new GenericSearchResult <Category> { Results = categories, TotalCount = searchResult.TotalCount }); }, (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } Categories have been exported"; progressCallback(progressInfo); }, cancellationToken); #endregion #region Export properties progressInfo.Description = "Properties exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("Properties"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => { var searchResult = await _propertySearchService.SearchPropertiesAsync(new PropertySearchCriteria { Skip = skip, Take = take }); return(new GenericSearchResult <Property> { Results = searchResult.Results, TotalCount = searchResult.TotalCount }); }, (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } properties have been exported"; progressCallback(progressInfo); }, cancellationToken); #endregion #region Export propertyDictionaryItems progressInfo.Description = "PropertyDictionaryItems exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("PropertyDictionaryItems"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => { var searchResult = await _propertyDictionarySearchService.SearchAsync(new PropertyDictionaryItemSearchCriteria { Skip = skip, Take = take }); return(new GenericSearchResult <PropertyDictionaryItem> { Results = searchResult.Results, TotalCount = searchResult.TotalCount }); }, (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } property dictionary items have been exported"; progressCallback(progressInfo); }, cancellationToken); #endregion #region Export products progressInfo.Description = "Products exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("Products"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => { var searchResult = await _productSearchService.SearchProductsAsync(new ProductSearchCriteria { Skip = skip, Take = take }); if (options.HandleBinaryData) { LoadImages(searchResult.Results.OfType <IHasImages>().ToArray(), progressInfo); } return(new GenericSearchResult <CatalogProduct> { Results = searchResult.Results, TotalCount = searchResult.TotalCount }); }, (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } Products have been exported"; progressCallback(progressInfo); }, cancellationToken); #endregion await writer.WriteEndObjectAsync(); await writer.FlushAsync(); } }
public async Task DoExportAsync(Stream outStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo { Description = "loading data..." }; progressCallback(progressInfo); using (var sw = new StreamWriter(outStream)) using (var writer = new JsonTextWriter(sw)) { await writer.WriteStartObjectAsync(); #region Export properties progressInfo.Description = "Properties exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("Properties"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => { var searchResult = await _propertySearchService.SearchPropertiesAsync(new PropertySearchCriteria { Skip = skip, Take = take }); foreach (var item in searchResult.Results) { ResetRedundantReferences(item); } return((GenericSearchResult <Property>)searchResult); } , (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } properties have been exported"; progressCallback(progressInfo); }, cancellationToken); #endregion Export properties #region Export propertyDictionaryItems progressInfo.Description = "PropertyDictionaryItems exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("PropertyDictionaryItems"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => (GenericSearchResult <PropertyDictionaryItem>) await _propertyDictionarySearchService.SearchAsync(new PropertyDictionaryItemSearchCriteria { Skip = skip, Take = take }) , (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } property dictionary items have been exported"; progressCallback(progressInfo); }, cancellationToken); #endregion Export propertyDictionaryItems #region Export catalogs progressInfo.Description = "Catalogs exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("Catalogs"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => (GenericSearchResult <Catalog>) await _catalogSearchService.SearchCatalogsAsync(new CatalogSearchCriteria { Skip = skip, Take = take }) , (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } catalogs have been exported"; progressCallback(progressInfo); }, cancellationToken); #endregion Export catalogs #region Export categories progressInfo.Description = "Categories exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("Categories"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => { var searchResult = await _categorySearchService.SearchCategoriesAsync(new CategorySearchCriteria { Skip = skip, Take = take }); LoadImages(searchResult.Results.OfType <IHasImages>().ToArray(), progressInfo, options.HandleBinaryData); foreach (var item in searchResult.Results) { ResetRedundantReferences(item); } return((GenericSearchResult <Category>)searchResult); }, (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } Categories have been exported"; progressCallback(progressInfo); }, cancellationToken); #endregion Export categories #region Export products progressInfo.Description = "Products exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("Products"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => { var searchResult = await _productSearchService.SearchProductsAsync(new ProductSearchCriteria { Skip = skip, Take = take, ResponseGroup = ItemResponseGroup.Full.ToString() }); LoadImages(searchResult.Results.OfType <IHasImages>().ToArray(), progressInfo, options.HandleBinaryData); foreach (var item in searchResult.Results) { ResetRedundantReferences(item); } return((GenericSearchResult <CatalogProduct>)searchResult); }, (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } Products have been exported"; progressCallback(progressInfo); }, cancellationToken); #endregion Export products await writer.WriteEndObjectAsync(); await writer.FlushAsync(); } }
public async Task ImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo(); var notificationsTotalCount = 0; using (var streamReader = new StreamReader(inputStream)) using (var reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType == JsonToken.PropertyName) { if (reader.Value.ToString() == "NotificationsTotalCount") { notificationsTotalCount = reader.ReadAsInt32() ?? 0; } else if (reader.Value.ToString() == "Notifications") { reader.Read(); if (reader.TokenType == JsonToken.StartArray) { reader.Read(); var notifications = new List <Notification>(); var notificationsCount = 0; while (reader.TokenType != JsonToken.EndArray) { var notification = _serializer.Deserialize <Notification>(reader); notifications.Add(notification); notificationsCount++; reader.Read(); } cancellationToken.ThrowIfCancellationRequested(); if (notificationsCount % _batchSize == 0 || reader.TokenType == JsonToken.EndArray) { await _notificationService.SaveChangesAsync(notifications.ToArray()); notifications.Clear(); if (notificationsTotalCount > 0) { progressInfo.Description = $"{ notificationsCount } of { notificationsTotalCount } notifications imported"; } else { progressInfo.Description = $"{ notificationsCount } notifications imported"; } progressCallback(progressInfo); } } } } } } }
public async Task ExportAsync(Stream outStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo { Description = "The fulfilmentCenters are loading" }; progressCallback(progressInfo); //var backupObject = await GetBackupObject(progressCallback); using (var sw = new StreamWriter(outStream, Encoding.UTF8)) using (var writer = new JsonTextWriter(sw)) { writer.WriteStartObject(); var centers = await _fulfillmentCenterSearchService.SearchCentersAsync(new FulfillmentCenterSearchCriteria { Take = int.MaxValue }); writer.WritePropertyName("FulfillmentCenterTotalCount"); writer.WriteValue(centers.TotalCount); writer.WritePropertyName("FulfillmentCenters"); writer.WriteStartArray(); foreach (var fulfillmentCenter in centers.Results) { _serializer.Serialize(writer, fulfillmentCenter); } writer.WriteEndArray(); progressInfo.Description = "Evaluation the number of inventory records"; progressCallback(progressInfo); var searchResult = await _inventorySearchService.SearchInventoriesAsync(new InventorySearchCriteria { Take = BatchSize }); var totalCount = searchResult.TotalCount; writer.WritePropertyName("InventoriesTotalCount"); writer.WriteValue(totalCount); writer.WritePropertyName("Inventories"); writer.WriteStartArray(); for (int i = BatchSize; i < totalCount; i += BatchSize) { progressInfo.Description = $"{i} of {totalCount} inventories have been loaded"; progressCallback(progressInfo); searchResult = await _inventorySearchService.SearchInventoriesAsync(new InventorySearchCriteria { Skip = i, Take = BatchSize }); foreach (var inventory in searchResult.Results) { _serializer.Serialize(writer, inventory); } writer.Flush(); progressInfo.Description = $"{ Math.Min(totalCount, i + BatchSize) } of { totalCount } inventories exported"; progressCallback(progressInfo); } writer.WriteEndArray(); writer.WriteEndObject(); writer.Flush(); } }
public Task ExportAsync(Stream outStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { return(_appBuilder.ApplicationServices.GetRequiredService <MarketingExportImport>().DoExportAsync(outStream, options, progressCallback, cancellationToken)); }
public async Task ExportAsync(Stream outStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { await _appBuilder.ApplicationServices.GetRequiredService <StoreExportImport>().ExportAsync(outStream, options, progressCallback, cancellationToken); }
protected virtual Task SavePromotionsAsync(Promotion[] promotions, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback) { return(_promotionService.SavePromotionsAsync(promotions)); }
public async Task DoExportAsync(Stream outStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo(); using (var sw = new StreamWriter(outStream, Encoding.UTF8)) using (var writer = new JsonTextWriter(sw)) { await writer.WriteStartObjectAsync(); //Export menu link list var menuLinkLists = await _menuService.GetAllLinkListsAsync(); var linkLists = menuLinkLists as IList <MenuLinkList> ?? menuLinkLists.ToList(); await writer.WritePropertyNameAsync("MenuLinkLists"); await writer.WriteStartArrayAsync(); for (var skip = 0; skip < linkLists.Count; skip += _batchSize) { progressInfo.Description = $"{skip} of {linkLists.Count} menu link lists have been loaded"; progressCallback(progressInfo); foreach (var list in linkLists.Skip(skip).Take(_batchSize).ToList()) { _jsonSerializer.Serialize(writer, list); } await writer.FlushAsync(); progressInfo.Description = $"{ Math.Min(linkLists.Count, skip + _batchSize) } of { linkLists.Count } menu link lists exported"; progressCallback(progressInfo); } await writer.WriteEndArrayAsync(); if (options.HandleBinaryData) { await writer.WritePropertyNameAsync("CmsContent"); await writer.WriteStartArrayAsync(); var result = await _blobContentStorageProvider.SearchAsync(string.Empty, null); foreach (var blobFolder in result.Results.Where(x => _exportedFolders.Contains(x.Name))) { var contentFolder = new ContentFolder { Url = blobFolder.RelativeUrl }; await ReadContentFoldersRecursiveAsync(contentFolder, progressCallback); _jsonSerializer.Serialize(writer, contentFolder); } await writer.FlushAsync(); progressInfo.Description = $"{ result.TotalCount } cms content exported"; progressCallback(progressInfo); await writer.WriteEndArrayAsync(); } await writer.WriteEndObjectAsync(); await writer.FlushAsync(); } }
public virtual async Task DoExportAsync(Stream outStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo { Description = "loading data..." }; progressCallback(progressInfo); using (var sw = new StreamWriter(outStream)) using (var writer = new JsonTextWriter(sw)) { await writer.WriteStartObjectAsync(); progressInfo.Description = "Promotions exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("Promotions"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => (GenericSearchResult <Promotion>) await LoadPromotionsPageAsync(skip, take, options, progressCallback) , (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } promotions have been exported"; progressCallback(progressInfo); }, cancellationToken); progressInfo.Description = "Dynamic content folders exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("DynamicContentFolders"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => { var searchResult = AbstractTypeFactory <DynamicContentFolderSearchResult> .TryCreateInstance(); var result = await LoadFoldersRecursiveAsync(null); searchResult.Results = result; searchResult.TotalCount = result.Count; return((GenericSearchResult <DynamicContentFolder>)searchResult); }, (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } dynamic content folders have been exported"; progressCallback(progressInfo); }, cancellationToken); progressInfo.Description = "Dynamic content items exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("DynamicContentItems"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => (GenericSearchResult <DynamicContentItem>) await _contentItemsSearchService.SearchContentItemsAsync(new DynamicContentItemSearchCriteria { Skip = skip, Take = take }) , (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } dynamic content items have been exported"; progressCallback(progressInfo); }, cancellationToken); progressInfo.Description = "Dynamic content places exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("DynamicContentPlaces"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => (GenericSearchResult <DynamicContentPlace>) await _contentPlacesSearchService.SearchContentPlacesAsync(new DynamicContentPlaceSearchCriteria { Skip = skip, Take = take }) , (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } dynamic content places have been exported"; progressCallback(progressInfo); }, cancellationToken); progressInfo.Description = "Dynamic content publications exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("DynamicContentPublications"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => { var searchResult = await _contentPublicationsSearchService.SearchContentPublicationsAsync(new DynamicContentPublicationSearchCriteria { Skip = skip, Take = take }); return((GenericSearchResult <DynamicContentPublication>)searchResult); }, (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } dynamic content publications have been exported"; progressCallback(progressInfo); }, cancellationToken); progressInfo.Description = "Coupons exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("Coupons"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => (GenericSearchResult <Coupon>) await _couponSearchService.SearchCouponsAsync(new CouponSearchCriteria { Skip = skip, Take = take }), (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } coupons have been exported"; progressCallback(progressInfo); }, cancellationToken); progressInfo.Description = "Usages exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("Usages"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, async (skip, take) => (GenericSearchResult <PromotionUsage>) await _promotionUsageSearchService.SearchUsagesAsync(new PromotionUsageSearchCriteria { Skip = skip, Take = take }) , (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } usages have been exported"; progressCallback(progressInfo); }, cancellationToken); await writer.WriteEndObjectAsync(); await writer.FlushAsync(); } }
public async Task DoImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo(); var propertiesWithForeignKeys = new List <Property>(); using (var streamReader = new StreamReader(inputStream)) using (var reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType == JsonToken.PropertyName) { if (reader.Value.ToString() == "Catalogs") { await reader.DeserializeJsonArrayWithPagingAsync <Catalog>(_jsonSerializer, _batchSize, async (items) => { await _catalogService.SaveChangesAsync(items.ToArray()); }, processedCount => { progressInfo.Description = $"{ processedCount } catalogs have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "Categories") { await reader.DeserializeJsonArrayWithPagingAsync <Category>(_jsonSerializer, _batchSize, async (items) => { var itemsArray = items.ToArray(); await _categoryService.SaveChangesAsync(itemsArray); ImportImages(itemsArray.OfType <IHasImages>().ToArray(), progressInfo); }, processedCount => { progressInfo.Description = $"{ processedCount } categories have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "Properties") { await reader.DeserializeJsonArrayWithPagingAsync <Property>(_jsonSerializer, _batchSize, async (items) => { var itemsArray = items.ToArray(); foreach (var property in itemsArray) { if (property.CategoryId != null || property.CatalogId != null) { propertiesWithForeignKeys.Add(property.Clone() as Property); //Need to reset property foreign keys to prevent FK violation during inserting into database property.CategoryId = null; property.CatalogId = null; } } await _propertyService.SaveChangesAsync(itemsArray); }, processedCount => { progressInfo.Description = $"{ processedCount } properties have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "PropertyDictionaryItems") { await reader.DeserializeJsonArrayWithPagingAsync <PropertyDictionaryItem>(_jsonSerializer, _batchSize, items => _propertyDictionaryService.SaveChangesAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } property dictionary items have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "Products") { var associationBackupMap = new Dictionary <string, IList <ProductAssociation> >(); var products = new List <CatalogProduct>(); await reader.DeserializeJsonArrayWithPagingAsync <CatalogProduct>(_jsonSerializer, _batchSize, async (items) => { var itemsArray = items.ToArray(); foreach (var product in itemsArray) { //Do not save associations withing product to prevent dependency conflicts in db //we will save separateley after product import if (!product.Associations.IsNullOrEmpty()) { associationBackupMap[product.Id] = product.Associations; } product.Associations = null; products.Add(product); } await _itemService.SaveChangesAsync(products.ToArray()); if (options != null && options.HandleBinaryData) { ImportImages(itemsArray.OfType <IHasImages>().ToArray(), progressInfo); } products.Clear(); }, processedCount => { progressInfo.Description = $"{ processedCount } products have been imported"; progressCallback(progressInfo); }, cancellationToken); //Import products associations separately to avoid DB constrain violation var totalProductsWithAssociationsCount = associationBackupMap.Count; for (var i = 0; i < totalProductsWithAssociationsCount; i += _batchSize) { var fakeProducts = new List <CatalogProduct>(); foreach (var pair in associationBackupMap.Skip(i).Take(_batchSize)) { var fakeProduct = AbstractTypeFactory <CatalogProduct> .TryCreateInstance(); fakeProduct.Id = pair.Key; fakeProduct.Associations = pair.Value; fakeProducts.Add(fakeProduct); } await _associationService.SaveChangesAsync(fakeProducts.OfType <IHasAssociations>().ToArray()); progressInfo.Description = $"{ Math.Min(totalProductsWithAssociationsCount, i + _batchSize) } of { totalProductsWithAssociationsCount } products associations imported"; progressCallback(progressInfo); } } } } } //Update property associations after all required data are saved (Catalogs and Categories) if (propertiesWithForeignKeys.Count > 0) { progressInfo.Description = $"Updating {propertiesWithForeignKeys.Count} property associations…"; progressCallback(progressInfo); var totalCount = propertiesWithForeignKeys.Count; for (var i = 0; i < totalCount; i += _batchSize) { await _propertyService.SaveChangesAsync(propertiesWithForeignKeys.Skip(i).Take(_batchSize).ToArray()); progressInfo.Description = $"{ Math.Min(totalCount, i + _batchSize) } of { totalCount } property associations updated."; progressCallback(progressInfo); } } }
public async Task ImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo(); var membersTotalCount = 0; var batchSize = await GetBatchSize(); using (var streamReader = new StreamReader(inputStream)) using (var reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType == JsonToken.PropertyName) { if (reader.Value.ToString().EqualsInvariant("MembersTotalCount")) { membersTotalCount = reader.ReadAsInt32() ?? 0; } else if (reader.Value.ToString().EqualsInvariant("Members")) { cancellationToken.ThrowIfCancellationRequested(); reader.Read(); if (reader.TokenType == JsonToken.StartArray) { reader.Read(); var members = new List <Member>(); var membersCount = 0; //TODO: implement to iterative import without whole members loading while (reader.TokenType != JsonToken.EndArray) { var member = _serializer.Deserialize <Member>(reader); members.Add(member); membersCount++; reader.Read(); } cancellationToken.ThrowIfCancellationRequested(); //Need to import by topological sort order, because Organizations have a graph structure and here references integrity must be preserved var organizations = members.OfType <Organization>(); var nodes = new HashSet <string>(organizations.Select(x => x.Id)); var edges = new HashSet <Tuple <string, string> >(organizations.Where(x => !string.IsNullOrEmpty(x.ParentId) && x.Id != x.ParentId).Select(x => new Tuple <string, string>(x.Id, x.ParentId))); var orgsTopologicalSortedList = TopologicalSort.Sort(nodes, edges); members = members.OrderByDescending(x => orgsTopologicalSortedList.IndexOf(x.Id)).ToList(); for (var i = 0; i < membersCount; i += batchSize) { await _memberService.SaveChangesAsync(members.Skip(i).Take(batchSize).ToArray()); if (membersTotalCount > 0) { progressInfo.Description = $"{ i } of { membersTotalCount } members imported"; } else { progressInfo.Description = $"{ i } members imported"; } progressCallback(progressInfo); } } } } } } }
public async Task ImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo(); var fulfillmentCentersTotalCount = 0; var inventoriesTotalCount = 0; using (var streamReader = new StreamReader(inputStream)) using (var reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType == JsonToken.PropertyName) { if (reader.Value.ToString() == "FulfillmentCenterTotalCount") { fulfillmentCentersTotalCount = reader.ReadAsInt32() ?? 0; } else if (reader.Value.ToString() == "FulfillmentCenters") { var fulfillmentCenters = new List <FulfillmentCenter>(); var fulfillmentCenterCount = 0; while (reader.TokenType != JsonToken.EndArray) { var fulfillmentCenter = _serializer.Deserialize <FulfillmentCenter>(reader); fulfillmentCenters.Add(fulfillmentCenter); fulfillmentCenterCount++; reader.Read(); } for (int i = 0; i < fulfillmentCenterCount; i += BatchSize) { await _fulfillmentCenterService.SaveChangesAsync(fulfillmentCenters.Skip(i).Take(BatchSize).ToArray()); if (fulfillmentCenterCount > 0) { progressInfo.Description = $"{ i } of { fulfillmentCenterCount } fulfillment centers imported"; } else { progressInfo.Description = $"{ i } fulfillment centers imported"; } progressCallback(progressInfo); } } else if (reader.Value.ToString() == "InventoriesTotalCount") { inventoriesTotalCount = reader.ReadAsInt32() ?? 0; } else if (reader.Value.ToString() == "Inventories") { var inventories = new List <InventoryInfo>(); var inventoryCount = 0; while (reader.TokenType != JsonToken.EndArray) { var inventory = _serializer.Deserialize <InventoryInfo>(reader); inventories.Add(inventory); inventoryCount++; reader.Read(); } for (int i = 0; i < inventoryCount; i += BatchSize) { await _inventoryService.SaveChangesAsync(inventories.Skip(i).Take(BatchSize)); if (inventoryCount > 0) { progressInfo.Description = $"{i} of {inventoryCount} inventories imported"; } else { progressInfo.Description = $"{i} inventories imported"; } progressCallback(progressInfo); } } } } } }
protected virtual Task <PromotionSearchResult> LoadPromotionsPageAsync(int skip, int take, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback) { return(_promotionSearchService.SearchPromotionsAsync(new PromotionSearchCriteria { Skip = skip, Take = take })); }
public async Task ImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { await _appBuilder.ApplicationServices.GetRequiredService <OrderExportImport>().DoImportAsync(inputStream, progressCallback, cancellationToken); }
public virtual async Task DoImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo(); using (var streamReader = new StreamReader(inputStream)) using (var reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType == JsonToken.PropertyName) { if (reader.Value.ToString() == "Promotions") { await reader.DeserializeJsonArrayWithPagingAsync <Promotion>(_jsonSerializer, _batchSize, items => SavePromotionsAsync(items.ToArray(), options, progressCallback), processedCount => { progressInfo.Description = $"{ processedCount } promotions have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "DynamicContentFolders") { await reader.DeserializeJsonArrayWithPagingAsync <DynamicContentFolder>(_jsonSerializer, _batchSize, items => _dynamicContentService.SaveFoldersAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } dynamic content items have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "DynamicContentItems") { await reader.DeserializeJsonArrayWithPagingAsync <DynamicContentItem>(_jsonSerializer, _batchSize, items => _dynamicContentService.SaveContentItemsAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } dynamic content items have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "DynamicContentPlaces") { await reader.DeserializeJsonArrayWithPagingAsync <DynamicContentPlace>(_jsonSerializer, _batchSize, items => _dynamicContentService.SavePlacesAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } dynamic content places have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "DynamicContentPublications") { await reader.DeserializeJsonArrayWithPagingAsync <DynamicContentPublication>(_jsonSerializer, _batchSize, items => _dynamicContentService.SavePublicationsAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } dynamic content publications have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "Coupons") { await reader.DeserializeJsonArrayWithPagingAsync <Coupon>(_jsonSerializer, _batchSize, items => _couponService.SaveCouponsAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } coupons have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "Usages") { await reader.DeserializeJsonArrayWithPagingAsync <PromotionUsage>(_jsonSerializer, _batchSize, items => _promotionUsageService.SaveUsagesAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } usages have been imported"; progressCallback(progressInfo); }, cancellationToken); } } } } }
public async Task DoImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo(); var propertiesWithForeignKeys = new List <Property>(); using (var streamReader = new StreamReader(inputStream)) using (var reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType == JsonToken.PropertyName) { if (reader.Value.ToString() == "Catalogs") { await reader.DeserializeJsonArrayWithPagingAsync <Catalog>(_jsonSerializer, _batchSize, items => _catalogService.SaveChangesAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } catalogs have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "Categories") { await reader.DeserializeJsonArrayWithPagingAsync <Category>(_jsonSerializer, _batchSize, async (items) => { var itemsArray = items.ToArray(); await _categoryService.SaveChangesAsync(itemsArray); //if (options.HandleBinaryData) { ImportImages(itemsArray.OfType <IHasImages>().ToArray(), progressInfo); } }, processedCount => { progressInfo.Description = $"{ processedCount } categories have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "Properties") { await reader.DeserializeJsonArrayWithPagingAsync <Property>(_jsonSerializer, _batchSize, async (items) => { foreach (var property in items) { if (property.CategoryId != null || property.CatalogId != null) { propertiesWithForeignKeys.Add(property.Clone() as Property); //Need to reset property foreign keys to prevent FK violation during inserting into database property.CategoryId = null; property.CatalogId = null; } } await _propertyService.SaveChangesAsync(items.ToArray()); }, processedCount => { progressInfo.Description = $"{ processedCount } properties have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "PropertyDictionaryItems") { await reader.DeserializeJsonArrayWithPagingAsync <PropertyDictionaryItem>(_jsonSerializer, _batchSize, items => _propertyDictionaryService.SaveChangesAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } property dictionary items have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "Products") { await reader.DeserializeJsonArrayWithPagingAsync <CatalogProduct>(_jsonSerializer, _batchSize, async (items) => { var itemsArray = items.ToArray(); await _itemService.SaveChangesAsync(itemsArray); //if (options.HandleBinaryData) { ImportImages(itemsArray.OfType <IHasImages>().ToArray(), progressInfo); } }, processedCount => { progressInfo.Description = $"{ processedCount } products have been imported"; progressCallback(progressInfo); }, cancellationToken); } } } } //Update property associations after all required data are saved (Catalogs and Categories) if (propertiesWithForeignKeys.Count > 0) { progressInfo.Description = $"Updating {propertiesWithForeignKeys.Count} property associations…"; progressCallback(progressInfo); var totalCount = propertiesWithForeignKeys.Count; for (var i = 0; i < totalCount; i += _batchSize) { await _propertyService.SaveChangesAsync(propertiesWithForeignKeys.Skip(i).Take(_batchSize).ToArray()); progressInfo.Description = $"{ Math.Min(totalCount, i + _batchSize) } of { totalCount } property associations updated."; progressCallback(progressInfo); } } }
public async Task DoImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo(); var propertiesWithForeignKeys = new List <Property>(); using (var streamReader = new StreamReader(inputStream)) using (var reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType != JsonToken.PropertyName) { continue; } switch (reader.Value.ToString()) { case "Properties": await ImportPropertiesAsync(reader, propertiesWithForeignKeys, progressInfo, progressCallback, cancellationToken); break; case "PropertyDictionaryItems": await ImportPropertyDictionaryItemsAsync(reader, progressInfo, progressCallback, cancellationToken); break; case "Catalogs": await ImportCatalogsAsync(reader, progressInfo, progressCallback, cancellationToken); break; case "Categories": await ImportCategoriesAsync(reader, progressInfo, progressCallback, cancellationToken); break; case "Products": await ImportProductsAsync(reader, options, progressInfo, progressCallback, cancellationToken); break; default: continue; } } } //Update property associations after all required data are saved (Catalogs and Categories) if (propertiesWithForeignKeys.Count > 0) { progressInfo.Description = $"Updating {propertiesWithForeignKeys.Count} property associations…"; progressCallback(progressInfo); var totalCount = propertiesWithForeignKeys.Count; for (var i = 0; i < totalCount; i += _batchSize) { await _propertyService.SaveChangesAsync(propertiesWithForeignKeys.Skip(i).Take(_batchSize).ToArray()); progressInfo.Description = $"{ Math.Min(totalCount, i + _batchSize) } of { totalCount } property associations updated."; progressCallback(progressInfo); } } }