private void FeedXmlElement(XmlWriter xmlWriter, IDataReader reader, StringDictionary dict, string catalog, string identifier, string feedFilePath)
        {
            var counter = new ProcessingCounters();
            var time    = _effectiveFromTime.HasValue ? _effectiveFromTime.Value : DateTime.Now;

            PlaRelatedFeedUtils.StartXmlDocument(xmlWriter, GoogleRunFeedType.Google, time);

            //<entry>
            while (reader.Read())
            {
                Log.DebugFormat("{0}::Processing record [{1}]: {2}", identifier, (counter.GetTotalProcessed()), reader["PID"]);

                var id    = reader[dict["gId"]].ToString();
                var title = reader[dict["title"]].ToString();
                try
                {
                    var haveExclusionRulesChanged = _runnerFeedRulesHelper.HaveExclusionRulesChanged();
                    var sku             = reader[dict["sku"]].ToString();
                    var brandName       = !dict.ContainsKey("gBrand") ? string.Empty : reader[dict["gBrand"]].ToString();
                    var cContributors   = PlaRelatedFeedUtils.ContributorAttributes(dict, reader, id);
                    var contributor     = cContributors ?? null;
                    var defaultCategory = FeedUtils.GetFeedGeneratorIndigoCategory(_feedGeneratorCategoryService, reader, dict, catalog, Log);
                    var productData     = FeedUtils.GetProductData(dict, reader, sku, catalog, brandName, contributor, defaultCategory);
                    var sanitizedTitle  = (string.IsNullOrWhiteSpace(title)) ? string.Empty : FeedUtils.SanitizeString(title);
                    var gAvailability   = !dict.ContainsKey("gAvailability") ? FeedUtils.GetGoogleAvailability(1)
                            : FeedUtils.GetGoogleAvailability(int.Parse(reader[dict["gAvailability"]].ToString()));
                    var availability = !dict.ContainsKey("gAvailability") ? 1 : int.Parse(reader[dict["gAvailability"]].ToString());
                    var recordType   = !dict.ContainsKey("recordType") ? string.Empty : reader[dict["recordType"]].ToString();
                    var hasImage     = true;
                    if (!SkipHasImageCheck)
                    {
                        hasImage = (!dict.ContainsKey("hasImage")) || int.Parse(reader[dict["hasImage"]].ToString()) > 0;
                    }

                    string message;
                    var    isExcluded = false;
                    if (_isIncrementalRun)
                    {
                        var statusId = int.Parse(reader["StatusId"].ToString());
                        switch (statusId)
                        {
                        // New product
                        case 1:
                            counter.NumberOfNew++;
                            continue;

                        // Unchanged product
                        case 3:
                            if (!haveExclusionRulesChanged)
                            {
                                Log.DebugFormat("Product with id {0} is skipped in incremental mode as it wasn't modified and the rules haven't changed.", id);
                                counter.NumberOfUnchanged++;
                                continue;
                            }

                            var oldExclusionResult     = _runnerFeedRulesHelper.IsExcludedFromFeed(productData, true);
                            var currentExclusionResult = _runnerFeedRulesHelper.IsExcludedFromFeed(productData, false);
                            if (oldExclusionResult == currentExclusionResult)
                            {
                                Log.DebugFormat("Product with id {0} is skipped in incremental mode as it wasn't modified, rules had changed but exclusion rule evaluation's result remained the same.", id);
                                counter.NumberOfUnchanged++;
                                continue;
                            }

                            // If the product is excluded at the moment, then perform the "exclusion logic" per business requirements,
                            // otherwise (i.e. product is included at the moment, treat it as "new")
                            if (!currentExclusionResult)
                            {
                                Log.DebugFormat("Product with id {0} is marked as new and skipped in incremental mode as it wasn't modified, rules had changed but exclusion rule evaluation's result changed and currently product isn't excluded.", id);
                                counter.NumberOfNew++;
                                continue;
                            }

                            Log.DebugFormat("Product with id {0} is marked as excluded in incremental mode as it wasn't modified, rules had changed but exclusion rule evaluation's result changed and currently product is being excluded.", id);
                            isExcluded = true;
                            break;

                        // Modified product
                        case 2:
                            var isEntryExcluded = IndigoBreadcrumbRepositoryUtils.IsExcludedDueToData(GooglePlaFeedId, sanitizedTitle, hasImage, availability, recordType, false, out message);
                            if (isEntryExcluded)
                            {
                                Log.DebugFormat("Product with id {0} is marked as excluded in incremental mode as it was modified, and it failed the data requirements for inclusion.", id);
                                isExcluded = true;
                                break;
                            }

                            // If product was excluded from the feed due to rules, then mark it as excluded
                            if (_runnerFeedRulesHelper.IsExcludedFromFeed(productData, false))
                            {
                                Log.DebugFormat("Product with id {0} is marked as excluded in incremental mode as it was modified, and it's matching one of the exclusion rules.", id);
                                isExcluded = true;
                            }
                            break;

                        default:
                            throw new ApplicationException("Invalid StatusId during an incremental run.");
                        }
                    }
                    else
                    {
                        isExcluded = IndigoBreadcrumbRepositoryUtils.IsExcludedDueToData(GooglePlaFeedId, sanitizedTitle, hasImage, availability, recordType, false, out message);
                        if (isExcluded)
                        {
                            Log.DebugFormat("Product with id {0} is marked as excluded in full mode as it failed the data requirements for inclusion.", id);
                        }

                        if (!isExcluded)
                        {
                            isExcluded = _runnerFeedRulesHelper.IsExcludedFromFeed(productData, false);
                        }

                        if (isExcluded)
                        {
                            Log.DebugFormat("Product with id {0} is marked as excluded in full mode as it's matching one of the exclusion rules.", id);
                        }
                    }

                    // At this point, we know if the product is excluded or not, regardless of which type of run is being executed.
                    // If we aren't supposed to be sending excluded product data, then update the skipped counter and exit
                    if (isExcluded)
                    {
                        counter.NumberOfExcluded++;
                        if (!SendExcludedProductData)
                        {
                            Log.Debug("Skipped the product because it was excluded.");
                            continue;
                        }

                        gAvailability = ExcludedProductGoogleAvailabilityText;
                    }

                    var     regularPrice  = (decimal)reader[dict["price"]];
                    var     adjustedPrice = string.IsNullOrEmpty(dict["adjustedPrice"]) ? "" : reader[dict["adjustedPrice"]].ToString();
                    decimal?salePrice     = null;
                    if (!string.IsNullOrWhiteSpace(adjustedPrice))
                    {
                        var salePriceFromDatabase = Decimal.Parse(adjustedPrice);
                        if (salePriceFromDatabase != regularPrice)
                        {
                            if (salePriceFromDatabase > regularPrice)
                            {
                                regularPrice = salePriceFromDatabase;
                                salePrice    = null;
                            }
                            else
                            {
                                salePrice = salePriceFromDatabase;
                            }
                        }
                    }

                    var entry = EntryAttribute(id, regularPrice, salePrice, gAvailability);
                    entry.WriteTo(xmlWriter);
                    counter.NumberOfProcessed++;
                }
                catch (Exception e)
                {
                    counter.NumberOfErrored++;
                    var errorMessage = string.Format("Can't process the item. Id:{0};title:{1},catalog:{2},Message:{3}", id, title, catalog, e.Message);

                    Log.Error(errorMessage);

                    Log.DebugFormat("Error stack trace: {0}", e);
                    _executionLogLogger.AddCustomMessage(string.Format("Can't process the item. Id: {0};title: {1}, file identifier: {2}", id, title, identifier));
                    if (_isIncrementalRun && !AllowItemErrorsInFiles)
                    {
                        _hasError = true;
                    }
                }
            }

            // If the setting for sending deleted products is set to true in an incremental run, then get the deleted products since the last run
            // and send them as the "special" deleted products, i.e. pid + availability of "out of stock"
            if (_isIncrementalRun && SendExcludedProductData)
            {
                AddDeletedProducts(xmlWriter, identifier, ref counter);
            }

            PlaRelatedFeedUtils.EndXmlDocument(xmlWriter);
            var infoLogMessage = string.Format("[WriteFeedFile] {0} completed processed record count: {1}, error record count: {2}, unchanged record count: {3}, " +
                                               "new record count: {4}, excluded record count: {5}, deleted record count: {6}. ",
                                               feedFilePath, counter.NumberOfProcessed, counter.NumberOfErrored, counter.NumberOfUnchanged, counter.NumberOfNew, counter.NumberOfExcluded, counter.NumberOfDeleted);

            if (SendExcludedProductData)
            {
                infoLogMessage += "Excluded produces were included in processed count.";
            }

            Log.Info(infoLogMessage);
        }
        private void ExecuteFeedUpdate(IDataReader reader, StringDictionary dict, string catalog, string identifier)
        {
            var counter = new ProcessingCounters();

            //<entry>
            var batch        = new List <GooglePlaProductData>();
            var deletedBatch = new List <string>();
            var fileCount    = 1;

            while (reader.Read())
            {
                Log.DebugFormat("{0}::Processing record [{1}]: {2}", identifier, (counter.GetTotalProcessed()), reader["PID"]);

                var id    = reader[dict["gId"]].ToString();
                var title = reader[dict["title"]].ToString();
                try
                {
                    // First get the Bronte data processing result
                    var processingResult = _productDataService.GetGooglePlaProductData(GetProductDataProcessingRequest(dict, reader, catalog));

                    // Process the result and add to batch if the product update is to be sent to Google while
                    // ensuring that the proper counter gets incremented
                    switch (processingResult.Status)
                    {
                    case GooglePlaProductDataStatus.ExcludedDueToProductData:
                    case GooglePlaProductDataStatus.ExcludedDueToExclusionRule:
                        Log.DebugFormat("Product {0} was excluded.", id);
                        counter.NumberOfExcluded++;
                        continue;

                    case GooglePlaProductDataStatus.Unmodified:
                        if (_isPseudoFullRun)
                        {
                            Log.DebugFormat("Product {0} was unmodified but was treated as if it was an update due to pseudo full run that's in progress.", id);
                            batch.Add(processingResult.ProductData);
                            counter.NumberOfProcessed++;
                            break;
                        }
                        else
                        {
                            Log.DebugFormat("Product {0} was skipped as its data and/or rules haven't resulted in a change for the product.", id);
                            counter.NumberOfUnchanged++;
                            continue;
                        }

                    case GooglePlaProductDataStatus.Removed:
                        Log.DebugFormat("Product {0} was removed, so adding its identifier to the removals batch.", id);
                        counter.NumberOfDeleted++;
                        deletedBatch.Add(id);
                        break;

                    case GooglePlaProductDataStatus.FoundOrModified:
                        Log.DebugFormat("Product {0} was found/modified, so adding its data to the insert/update batch.", id);
                        batch.Add(processingResult.ProductData);
                        counter.NumberOfProcessed++;
                        break;

                    case GooglePlaProductDataStatus.NotFound:
                    default:
                        var message = string.Format("[{2}]Product {0} - {1} wasn't found, so is being treated as an erroneous product.", id, title, identifier);
                        Log.Debug(message);
                        counter.NumberOfErrored++;
                        _executionLogLogger.AddCustomMessage(message);
                        if (!AllowItemErrorsInFiles)
                        {
                            _hasError = true;
                        }
                        continue;
                    }
                }
                catch (Exception exception)
                {
                    counter.NumberOfErrored++;
                    var errorMessage = string.Format("Can't process the item. Id:{0};title:{1},catalog:{2},Message:{3}", id, title, catalog, exception.Message);
                    Log.Error(errorMessage);
                    Log.DebugFormat("Error stack trace: {0}", exception);
                    _executionLogLogger.AddCustomMessage(string.Format("Can't process the item. Id: {0};title: {1}, file identifier: {2}", id, title, identifier));
                    if (!AllowItemErrorsInFiles)
                    {
                        _hasError = true;
                    }
                }

                if (batch.Count >= NumberOfProductsPerApiCall)
                {
                    SendUpdateBatch(batch, fileCount, identifier);
                    fileCount++;
                    batch = new List <GooglePlaProductData>();
                }

                if (deletedBatch.Count >= NumberOfProductsPerApiCall)
                {
                    SendDeletionBatch(deletedBatch, fileCount, identifier);
                    fileCount++;
                    deletedBatch = new List <string>();
                }
            }

            if (batch.Any())
            {
                SendUpdateBatch(batch, fileCount, identifier);
                fileCount++;
                batch = new List <GooglePlaProductData>();
            }

            if (deletedBatch.Any())
            {
                SendDeletionBatch(deletedBatch, fileCount, identifier);
                fileCount++;
                deletedBatch = new List <string>();
            }

            var infoLogMessage = string.Format("[ExecuteFeedUpdate] {0} completed processed record count: {1}, error record count: {2}, unchanged record count: {3}, " +
                                               "excluded (ignored) record count: {4}, removed record count {5}.",
                                               identifier, counter.NumberOfProcessed, counter.NumberOfErrored, counter.NumberOfUnchanged, counter.NumberOfExcluded, counter.NumberOfDeleted);

            //if (SendExcludedProductData)
            //    infoLogMessage += "Excluded produces were included in processed count.";

            Log.Info(infoLogMessage);
        }
        public override ProcessingCounters CreateOutput(IDataReader reader, StringDictionary dict, string catalog, string identifier, RunType runType)
        {
            var counter = new ProcessingCounters()
            {
                AllowErrors = AllowItemErrorsInFiles, Identifier = identifier
            };
            var newBatch     = new List <ExportData>();
            var updatedBatch = new List <ExportData>();
            var batchCount   = 1;

            while (reader.Read())
            {
                var sourceId = reader[dict["sourceId"]].ToString();
                _logger.DebugFormat("{0}::Processing record [{1}]: {2}", identifier, (counter.GetTotalProcessed()), sourceId);
                var title = reader[dict["title"]].ToString();
                try
                {
                    // First get the data
                    var data = _dataService.GetData(dict, reader, catalog, runType);
                    if (data == null)
                    {
                        var message = string.Format("[{2}]Record {0} - {1} wasn't found, so is being treated as an erroneous item.", sourceId, title, identifier);
                        _logger.Debug(message);
                        counter.NumberOfErrored++;
                        counter.AddCustomMessage(message);
                        continue;
                    }

                    if (data.IsNew)
                    {
                        _logger.DebugFormat("Record {0} was found and was new, so adding its data to the insert batch.", sourceId);
                        newBatch.Add(data.ExportData);
                        counter.NumberOfNew++;
                        continue;
                    }

                    _logger.DebugFormat("Record {0} was found and was modified, so adding its data to the update batch.", sourceId);
                    updatedBatch.Add(data.ExportData);
                    counter.NumberOfModified++;
                    continue;
                }
                catch (Exception exception)
                {
                    counter.NumberOfErrored++;
                    var errorMessage = string.Format("Cannot process the item. Id:{0};title:{1},catalog:{2},Message:{3}", sourceId, title, catalog, exception.Message);
                    counter.AddCustomMessage(errorMessage);
                    _logger.Error(errorMessage);
                    _logger.DebugFormat("Error stack trace: {0}", exception);
                }

                if (newBatch.Count >= NumberOfRecordsPerBatch)
                {
                    OutputBatch(OutputFormat.Insert, newBatch, batchCount, identifier, catalog);
                    batchCount++;
                    newBatch = new List <ExportData>();
                }

                if (updatedBatch.Count >= NumberOfRecordsPerBatch)
                {
                    OutputBatch(OutputFormat.Update, updatedBatch, batchCount, identifier, catalog);
                    batchCount++;
                    updatedBatch = new List <ExportData>();
                }
            }

            if (newBatch.Any())
            {
                OutputBatch(OutputFormat.Insert, newBatch, batchCount, identifier, catalog);
                batchCount++;
                newBatch = new List <ExportData>();
            }

            if (updatedBatch.Any())
            {
                OutputBatch(OutputFormat.Update, updatedBatch, batchCount, identifier, catalog);
                batchCount++;
                updatedBatch = new List <ExportData>();
            }

            _logger.InfoFormat("[ExecuteFeedUpdate] {0} completed new record count: {1}, error record count: {2}, changed record count: {3}.",
                               identifier, counter.NumberOfNew, counter.NumberOfErrored, counter.NumberOfModified);

            // TODO: Do we want to halt execution if we ran into an error here which will stop other unprocessed ranges from being sent?
            if (!AllowItemErrorsInFiles && counter.NumberOfErrored > 0)
            {
                _hasError = true;
            }

            return(counter);
        }