private void AddDeletedProducts(XmlWriter xmlWriter, string identifier, ref ProcessingCounters counter) { var parts = identifier.Split(new[] { '_' }, StringSplitOptions.RemoveEmptyEntries); if (parts.Length != 3) { throw new ArgumentException("Invalid identifier - length", identifier); } var catalogName = parts[0]; var rangeStart = int.Parse(parts[1]); var rangeEnd = int.Parse(parts[2]); string deletedStoredProcedureName; switch (catalogName) { case "Books": deletedStoredProcedureName = DeletedBooksStoredProcedureName; break; case "GeneralMerchandise": deletedStoredProcedureName = DeletedGeneralMerchandiseStoredProcedureName; break; default: throw new ArgumentException("Invalid identifier - catalog name", identifier); } WriteDeletedProductNodes(xmlWriter, deletedStoredProcedureName, rangeStart, rangeEnd, ref counter); }
private void UpdateCounters(ref ProcessingCounters counter, OutputFormat recordOutput) { if (recordOutput == OutputFormat.Insert) { counter.NumberOfNew++; } else if (recordOutput == OutputFormat.Delete) { counter.NumberOfDeleted++; } else { counter.NumberOfModified++; } }
public void AddFileGenerationUpdate(ProcessingCounters processingCounters) { if (!processingCounters.AllowErrors && processingCounters.NumberOfErrored > 0) { _failedFileNames.Add(processingCounters.Identifier); } else { _successfulFileNames.Add(processingCounters.Identifier); } AddNewRecords(processingCounters.NumberOfNew); AddModifiedRecords(processingCounters.NumberOfModified); AddDeletedRecords(processingCounters.NumberOfDeleted); AddErroredRecords(processingCounters.NumberOfErrored); AddFileCount(processingCounters.FilesCount); if (processingCounters.CustomMessages != null) { foreach (var customMessage in processingCounters.CustomMessages) { AddCustomMessage(customMessage); } } }
public override ProcessingCounters CreateOutput(IDataReader reader, StringDictionary dict, string catalog, string identifier, RunType runType) { var counter = new ProcessingCounters() { AllowErrors = AllowItemErrorsInFiles, Identifier = identifier }; var batchCount = 1; XmlWriter xmlWriter = null; GZipStream gZipStream = null; XmlSerializer serializer = null; string rootName = _dataService.GetXmlRoot(runType); if (UseSerialization) { serializer = _xmlFileProcessor.GetSerializer(_dataService.GetDataType(), string.Empty); } int numberInBatch = 0; int recordsRetrieved = 0; FileInfo currentFile = null; ExportData previousRecord = null; OutputFormat previousRecordOutput = OutputFormat.Update; while (reader.Read()) { recordsRetrieved++; object sourceId = null; if (dict["sourceId"] != null) { sourceId = reader[dict["sourceId"]]; } _logger.DebugFormat("{0}::Processing record [{1}]: {2}", identifier, recordsRetrieved, sourceId?.ToString()); try { if (xmlWriter == null) { numberInBatch = 0; currentFile = StartBatchFile(identifier, runType, batchCount, rootName, ref gZipStream, ref xmlWriter); } var record = _dataService.GetData(dict, reader, catalog, runType); if (record == null || record.ExportData == null) { var message = string.Format("[{1}]Record {0} wasn't found, so is being treated as an erroneous item.", sourceId, identifier); _logger.Debug(message); counter.NumberOfErrored++; counter.AddCustomMessage(message); continue; } if (previousRecord != null) { if (!string.IsNullOrEmpty(record.ExportData.SourceId) && !string.IsNullOrEmpty(previousRecord.SourceId) && string.Equals(record.ExportData.SourceId, previousRecord.SourceId, StringComparison.InvariantCultureIgnoreCase)) { _logger.DebugFormat("Record {0} matches previous. Merge them.", record.ExportData.SourceId); previousRecord = _dataService.MergeData(previousRecord, record.ExportData); } else { WriteToBatch(xmlWriter, serializer, previousRecord); UpdateCounters(ref counter, previousRecordOutput); numberInBatch++; previousRecord = record.ExportData; previousRecordOutput = record.IsNew ? OutputFormat.Insert : record.IsDeleted ? OutputFormat.Delete : OutputFormat.Update; } } else { previousRecordOutput = record.IsNew ? OutputFormat.Insert : record.IsDeleted ? OutputFormat.Delete : OutputFormat.Update; previousRecord = record.ExportData; } if (numberInBatch >= NumberOfRecordsPerBatch) { xmlWriter = FinishBatchFile(xmlWriter, gZipStream, rootName); batchCount++; if (currentFile != null) { _files.Add(currentFile); counter.FilesCount++; currentFile = null; } } } catch (Exception exception) { counter.NumberOfErrored++; var errorMessage = $"An error was encountered while retrieving data for item {sourceId} " + previousRecord != null ? $"and adding xmlelement for the item {previousRecord.SourceId}" : $";catalog:{catalog},Message:{exception.Message}"; counter.AddCustomMessage(errorMessage); _logger.Error(errorMessage); _logger.DebugFormat("Error stack trace: {0}", exception); } } if (previousRecord != null) { try { if (xmlWriter == null) { numberInBatch = 0; currentFile = StartBatchFile(identifier, runType, batchCount, rootName, ref gZipStream, ref xmlWriter); } WriteToBatch(xmlWriter, serializer, previousRecord); UpdateCounters(ref counter, previousRecordOutput); numberInBatch++; } catch (Exception exception) { counter.NumberOfErrored++; var errorMessage = string.Format("Cannot process the item. Id:{0};catalog:{1},Message:{2}", previousRecord.SourceId, catalog, exception.Message); counter.AddCustomMessage(errorMessage); _logger.Error(errorMessage); _logger.DebugFormat("Error stack trace: {0}", exception); } } if (xmlWriter != null) { xmlWriter = FinishBatchFile(xmlWriter, gZipStream, rootName); if (currentFile != null) { _files.Add(currentFile); currentFile = null; counter.FilesCount++; } } _logger.InfoFormat( "[ExecuteFeedUpdate] {0} completed new record count: {1}, error record count: {2}, changed record count: {3}, deleted record count: {4}.", identifier, counter.NumberOfNew, counter.NumberOfErrored, counter.NumberOfModified, counter.NumberOfDeleted); if (!AllowItemErrorsInFiles && counter.NumberOfErrored > 0) { _hasError = true; } return(counter); }
private void ExecuteFeedUpdate(IDataReader reader, StringDictionary dict, string catalog, string identifier) { var counter = new ProcessingCounters(); //<entry> var batch = new List <GooglePlaProductData>(); var deletedBatch = new List <string>(); var fileCount = 1; while (reader.Read()) { Log.DebugFormat("{0}::Processing record [{1}]: {2}", identifier, (counter.GetTotalProcessed()), reader["PID"]); var id = reader[dict["gId"]].ToString(); var title = reader[dict["title"]].ToString(); try { // First get the Bronte data processing result var processingResult = _productDataService.GetGooglePlaProductData(GetProductDataProcessingRequest(dict, reader, catalog)); // Process the result and add to batch if the product update is to be sent to Google while // ensuring that the proper counter gets incremented switch (processingResult.Status) { case GooglePlaProductDataStatus.ExcludedDueToProductData: case GooglePlaProductDataStatus.ExcludedDueToExclusionRule: Log.DebugFormat("Product {0} was excluded.", id); counter.NumberOfExcluded++; continue; case GooglePlaProductDataStatus.Unmodified: if (_isPseudoFullRun) { Log.DebugFormat("Product {0} was unmodified but was treated as if it was an update due to pseudo full run that's in progress.", id); batch.Add(processingResult.ProductData); counter.NumberOfProcessed++; break; } else { Log.DebugFormat("Product {0} was skipped as its data and/or rules haven't resulted in a change for the product.", id); counter.NumberOfUnchanged++; continue; } case GooglePlaProductDataStatus.Removed: Log.DebugFormat("Product {0} was removed, so adding its identifier to the removals batch.", id); counter.NumberOfDeleted++; deletedBatch.Add(id); break; case GooglePlaProductDataStatus.FoundOrModified: Log.DebugFormat("Product {0} was found/modified, so adding its data to the insert/update batch.", id); batch.Add(processingResult.ProductData); counter.NumberOfProcessed++; break; case GooglePlaProductDataStatus.NotFound: default: var message = string.Format("[{2}]Product {0} - {1} wasn't found, so is being treated as an erroneous product.", id, title, identifier); Log.Debug(message); counter.NumberOfErrored++; _executionLogLogger.AddCustomMessage(message); if (!AllowItemErrorsInFiles) { _hasError = true; } continue; } } catch (Exception exception) { counter.NumberOfErrored++; var errorMessage = string.Format("Can't process the item. Id:{0};title:{1},catalog:{2},Message:{3}", id, title, catalog, exception.Message); Log.Error(errorMessage); Log.DebugFormat("Error stack trace: {0}", exception); _executionLogLogger.AddCustomMessage(string.Format("Can't process the item. Id: {0};title: {1}, file identifier: {2}", id, title, identifier)); if (!AllowItemErrorsInFiles) { _hasError = true; } } if (batch.Count >= NumberOfProductsPerApiCall) { SendUpdateBatch(batch, fileCount, identifier); fileCount++; batch = new List <GooglePlaProductData>(); } if (deletedBatch.Count >= NumberOfProductsPerApiCall) { SendDeletionBatch(deletedBatch, fileCount, identifier); fileCount++; deletedBatch = new List <string>(); } } if (batch.Any()) { SendUpdateBatch(batch, fileCount, identifier); fileCount++; batch = new List <GooglePlaProductData>(); } if (deletedBatch.Any()) { SendDeletionBatch(deletedBatch, fileCount, identifier); fileCount++; deletedBatch = new List <string>(); } var infoLogMessage = string.Format("[ExecuteFeedUpdate] {0} completed processed record count: {1}, error record count: {2}, unchanged record count: {3}, " + "excluded (ignored) record count: {4}, removed record count {5}.", identifier, counter.NumberOfProcessed, counter.NumberOfErrored, counter.NumberOfUnchanged, counter.NumberOfExcluded, counter.NumberOfDeleted); //if (SendExcludedProductData) // infoLogMessage += "Excluded produces were included in processed count."; Log.Info(infoLogMessage); }
private void WriteDeletedProductNodes(XmlWriter xmlWriter, string storedProcedureName, int rangeStart, int rangeEnd, ref ProcessingCounters counter) { Log.InfoFormat("Starting to write deleted product nodes for SP of {0}, rangeStart of {1}, rangeEnd of {2}.", storedProcedureName, rangeStart, rangeEnd); using (var sqlConnection = new SqlConnection(OdysseyCommerceConnectionString)) { sqlConnection.Open(); using (var sqlCommand = new SqlCommand(storedProcedureName, sqlConnection) { CommandType = CommandType.StoredProcedure, CommandTimeout = SearchDataCommandTimeout }) { sqlCommand.Parameters.AddWithValue("@PIDRangeStart", rangeStart); sqlCommand.Parameters.AddWithValue("@PIDRangeEnd", rangeEnd); sqlCommand.Parameters.AddWithValue("@DateChanged", _effectiveFromTime); using (var sqlDataReader = sqlCommand.ExecuteReader()) { if (sqlDataReader.HasRows) { while (sqlDataReader.Read()) { WriteDeletedProductNode(xmlWriter, sqlDataReader["Pid"].ToString()); counter.NumberOfDeleted++; } } } //using sqldatareader } //using sqlCommand } Log.InfoFormat("Completed writing deleted product nodes for SP of {0}, rangeStart of {1}, rangeEnd of {2}.", storedProcedureName, rangeStart, rangeEnd); }
private void FeedXmlElement(XmlWriter xmlWriter, IDataReader reader, StringDictionary dict, string catalog, string identifier, string feedFilePath) { var counter = new ProcessingCounters(); var time = _effectiveFromTime.HasValue ? _effectiveFromTime.Value : DateTime.Now; PlaRelatedFeedUtils.StartXmlDocument(xmlWriter, GoogleRunFeedType.Google, time); //<entry> while (reader.Read()) { Log.DebugFormat("{0}::Processing record [{1}]: {2}", identifier, (counter.GetTotalProcessed()), reader["PID"]); var id = reader[dict["gId"]].ToString(); var title = reader[dict["title"]].ToString(); try { var haveExclusionRulesChanged = _runnerFeedRulesHelper.HaveExclusionRulesChanged(); var sku = reader[dict["sku"]].ToString(); var brandName = !dict.ContainsKey("gBrand") ? string.Empty : reader[dict["gBrand"]].ToString(); var cContributors = PlaRelatedFeedUtils.ContributorAttributes(dict, reader, id); var contributor = cContributors ?? null; var defaultCategory = FeedUtils.GetFeedGeneratorIndigoCategory(_feedGeneratorCategoryService, reader, dict, catalog, Log); var productData = FeedUtils.GetProductData(dict, reader, sku, catalog, brandName, contributor, defaultCategory); var sanitizedTitle = (string.IsNullOrWhiteSpace(title)) ? string.Empty : FeedUtils.SanitizeString(title); var gAvailability = !dict.ContainsKey("gAvailability") ? FeedUtils.GetGoogleAvailability(1) : FeedUtils.GetGoogleAvailability(int.Parse(reader[dict["gAvailability"]].ToString())); var availability = !dict.ContainsKey("gAvailability") ? 1 : int.Parse(reader[dict["gAvailability"]].ToString()); var recordType = !dict.ContainsKey("recordType") ? string.Empty : reader[dict["recordType"]].ToString(); var hasImage = true; if (!SkipHasImageCheck) { hasImage = (!dict.ContainsKey("hasImage")) || int.Parse(reader[dict["hasImage"]].ToString()) > 0; } string message; var isExcluded = false; if (_isIncrementalRun) { var statusId = int.Parse(reader["StatusId"].ToString()); switch (statusId) { // New product case 1: counter.NumberOfNew++; continue; // Unchanged product case 3: if (!haveExclusionRulesChanged) { Log.DebugFormat("Product with id {0} is skipped in incremental mode as it wasn't modified and the rules haven't changed.", id); counter.NumberOfUnchanged++; continue; } var oldExclusionResult = _runnerFeedRulesHelper.IsExcludedFromFeed(productData, true); var currentExclusionResult = _runnerFeedRulesHelper.IsExcludedFromFeed(productData, false); if (oldExclusionResult == currentExclusionResult) { Log.DebugFormat("Product with id {0} is skipped in incremental mode as it wasn't modified, rules had changed but exclusion rule evaluation's result remained the same.", id); counter.NumberOfUnchanged++; continue; } // If the product is excluded at the moment, then perform the "exclusion logic" per business requirements, // otherwise (i.e. product is included at the moment, treat it as "new") if (!currentExclusionResult) { Log.DebugFormat("Product with id {0} is marked as new and skipped in incremental mode as it wasn't modified, rules had changed but exclusion rule evaluation's result changed and currently product isn't excluded.", id); counter.NumberOfNew++; continue; } Log.DebugFormat("Product with id {0} is marked as excluded in incremental mode as it wasn't modified, rules had changed but exclusion rule evaluation's result changed and currently product is being excluded.", id); isExcluded = true; break; // Modified product case 2: var isEntryExcluded = IndigoBreadcrumbRepositoryUtils.IsExcludedDueToData(GooglePlaFeedId, sanitizedTitle, hasImage, availability, recordType, false, out message); if (isEntryExcluded) { Log.DebugFormat("Product with id {0} is marked as excluded in incremental mode as it was modified, and it failed the data requirements for inclusion.", id); isExcluded = true; break; } // If product was excluded from the feed due to rules, then mark it as excluded if (_runnerFeedRulesHelper.IsExcludedFromFeed(productData, false)) { Log.DebugFormat("Product with id {0} is marked as excluded in incremental mode as it was modified, and it's matching one of the exclusion rules.", id); isExcluded = true; } break; default: throw new ApplicationException("Invalid StatusId during an incremental run."); } } else { isExcluded = IndigoBreadcrumbRepositoryUtils.IsExcludedDueToData(GooglePlaFeedId, sanitizedTitle, hasImage, availability, recordType, false, out message); if (isExcluded) { Log.DebugFormat("Product with id {0} is marked as excluded in full mode as it failed the data requirements for inclusion.", id); } if (!isExcluded) { isExcluded = _runnerFeedRulesHelper.IsExcludedFromFeed(productData, false); } if (isExcluded) { Log.DebugFormat("Product with id {0} is marked as excluded in full mode as it's matching one of the exclusion rules.", id); } } // At this point, we know if the product is excluded or not, regardless of which type of run is being executed. // If we aren't supposed to be sending excluded product data, then update the skipped counter and exit if (isExcluded) { counter.NumberOfExcluded++; if (!SendExcludedProductData) { Log.Debug("Skipped the product because it was excluded."); continue; } gAvailability = ExcludedProductGoogleAvailabilityText; } var regularPrice = (decimal)reader[dict["price"]]; var adjustedPrice = string.IsNullOrEmpty(dict["adjustedPrice"]) ? "" : reader[dict["adjustedPrice"]].ToString(); decimal?salePrice = null; if (!string.IsNullOrWhiteSpace(adjustedPrice)) { var salePriceFromDatabase = Decimal.Parse(adjustedPrice); if (salePriceFromDatabase != regularPrice) { if (salePriceFromDatabase > regularPrice) { regularPrice = salePriceFromDatabase; salePrice = null; } else { salePrice = salePriceFromDatabase; } } } var entry = EntryAttribute(id, regularPrice, salePrice, gAvailability); entry.WriteTo(xmlWriter); counter.NumberOfProcessed++; } catch (Exception e) { counter.NumberOfErrored++; var errorMessage = string.Format("Can't process the item. Id:{0};title:{1},catalog:{2},Message:{3}", id, title, catalog, e.Message); Log.Error(errorMessage); Log.DebugFormat("Error stack trace: {0}", e); _executionLogLogger.AddCustomMessage(string.Format("Can't process the item. Id: {0};title: {1}, file identifier: {2}", id, title, identifier)); if (_isIncrementalRun && !AllowItemErrorsInFiles) { _hasError = true; } } } // If the setting for sending deleted products is set to true in an incremental run, then get the deleted products since the last run // and send them as the "special" deleted products, i.e. pid + availability of "out of stock" if (_isIncrementalRun && SendExcludedProductData) { AddDeletedProducts(xmlWriter, identifier, ref counter); } PlaRelatedFeedUtils.EndXmlDocument(xmlWriter); var infoLogMessage = string.Format("[WriteFeedFile] {0} completed processed record count: {1}, error record count: {2}, unchanged record count: {3}, " + "new record count: {4}, excluded record count: {5}, deleted record count: {6}. ", feedFilePath, counter.NumberOfProcessed, counter.NumberOfErrored, counter.NumberOfUnchanged, counter.NumberOfNew, counter.NumberOfExcluded, counter.NumberOfDeleted); if (SendExcludedProductData) { infoLogMessage += "Excluded produces were included in processed count."; } Log.Info(infoLogMessage); }
public override ProcessingCounters CreateOutput(IDataReader reader, StringDictionary dict, string catalog, string identifier, RunType runType) { var counter = new ProcessingCounters() { AllowErrors = AllowItemErrorsInFiles, Identifier = identifier }; var newBatch = new List <ExportData>(); var updatedBatch = new List <ExportData>(); var batchCount = 1; while (reader.Read()) { var sourceId = reader[dict["sourceId"]].ToString(); _logger.DebugFormat("{0}::Processing record [{1}]: {2}", identifier, (counter.GetTotalProcessed()), sourceId); var title = reader[dict["title"]].ToString(); try { // First get the data var data = _dataService.GetData(dict, reader, catalog, runType); if (data == null) { var message = string.Format("[{2}]Record {0} - {1} wasn't found, so is being treated as an erroneous item.", sourceId, title, identifier); _logger.Debug(message); counter.NumberOfErrored++; counter.AddCustomMessage(message); continue; } if (data.IsNew) { _logger.DebugFormat("Record {0} was found and was new, so adding its data to the insert batch.", sourceId); newBatch.Add(data.ExportData); counter.NumberOfNew++; continue; } _logger.DebugFormat("Record {0} was found and was modified, so adding its data to the update batch.", sourceId); updatedBatch.Add(data.ExportData); counter.NumberOfModified++; continue; } catch (Exception exception) { counter.NumberOfErrored++; var errorMessage = string.Format("Cannot process the item. Id:{0};title:{1},catalog:{2},Message:{3}", sourceId, title, catalog, exception.Message); counter.AddCustomMessage(errorMessage); _logger.Error(errorMessage); _logger.DebugFormat("Error stack trace: {0}", exception); } if (newBatch.Count >= NumberOfRecordsPerBatch) { OutputBatch(OutputFormat.Insert, newBatch, batchCount, identifier, catalog); batchCount++; newBatch = new List <ExportData>(); } if (updatedBatch.Count >= NumberOfRecordsPerBatch) { OutputBatch(OutputFormat.Update, updatedBatch, batchCount, identifier, catalog); batchCount++; updatedBatch = new List <ExportData>(); } } if (newBatch.Any()) { OutputBatch(OutputFormat.Insert, newBatch, batchCount, identifier, catalog); batchCount++; newBatch = new List <ExportData>(); } if (updatedBatch.Any()) { OutputBatch(OutputFormat.Update, updatedBatch, batchCount, identifier, catalog); batchCount++; updatedBatch = new List <ExportData>(); } _logger.InfoFormat("[ExecuteFeedUpdate] {0} completed new record count: {1}, error record count: {2}, changed record count: {3}.", identifier, counter.NumberOfNew, counter.NumberOfErrored, counter.NumberOfModified); // TODO: Do we want to halt execution if we ran into an error here which will stop other unprocessed ranges from being sent? if (!AllowItemErrorsInFiles && counter.NumberOfErrored > 0) { _hasError = true; } return(counter); }