protected override void PerformIndexAll(string type)
        {
            if (SupportedTypes.Contains(type) == false)
            {
                return;
            }

            var pageIndex = 0;

            DataService.LogService.AddInfoLog(-1, string.Format("PerformIndexAll - Start data queries - {0}", type));
            var stopwatch = new Stopwatch();

            stopwatch.Start();

            try
            {
                switch (type)
                {
                case IndexTypes.Content:
                    var contentParentId = -1;
                    if (IndexerData.ParentNodeId.HasValue && IndexerData.ParentNodeId.Value > 0)
                    {
                        contentParentId = IndexerData.ParentNodeId.Value;
                    }

                    if (SupportUnpublishedContent == false && DisableXmlDocumentLookup == false)
                    {
                        //get all node Ids that have a published version - this is a fail safe check, in theory
                        // only document nodes that have a published version would exist in the cmsContentXml table
                        var allNodesWithPublishedVersions = ApplicationContext.Current.DatabaseContext.Database.Fetch <int>(
                            "select DISTINCT cmsDocument.nodeId from cmsDocument where cmsDocument.published = 1");

                        XElement last       = null;
                        var      trackedIds = new HashSet <string>();

                        ReindexWithXmlEntries(type, contentParentId,
                                              () => _contentTypeService.GetAllContentTypes().ToArray(),
                                              (path, pIndex, pSize) =>
                        {
                            long totalContent;

                            //sorted by: umbracoNode.level, umbracoNode.parentID, umbracoNode.sortOrder
                            var result = _contentService.GetPagedXmlEntries(path, pIndex, pSize, out totalContent).ToArray();
                            var more   = result.Length == pSize;

                            //then like we do in the ContentRepository.BuildXmlCache we need to track what Parents have been processed
                            // already so that we can then exclude implicitly unpublished content items
                            var filtered = new List <XElement>();

                            foreach (var xml in result)
                            {
                                var id = xml.AttributeValue <int>("id");

                                //don't include this if it doesn't have a published version
                                if (allNodesWithPublishedVersions.Contains(id) == false)
                                {
                                    continue;
                                }

                                var parentId = xml.AttributeValue <string>("parentID");

                                if (parentId == null)
                                {
                                    continue;                           //this shouldn't happen
                                }
                                //if the parentid is changing
                                if (last != null && last.AttributeValue <string>("parentID") != parentId)
                                {
                                    var found = trackedIds.Contains(parentId);
                                    if (found == false)
                                    {
                                        //Need to short circuit here, if the parent is not there it means that the parent is unpublished
                                        // and therefore the child is not published either so cannot be included in the xml cache
                                        continue;
                                    }
                                }

                                last = xml;
                                trackedIds.Add(xml.AttributeValue <string>("id"));

                                filtered.Add(xml);
                            }

                            return(Tuple.Create(filtered.ToArray(), more));
                        },
                                              i => _contentService.GetById(i));
                    }
                    else
                    {
                        //used to track non-published entities so we can determine what items are implicitly not published
                        //currently this is not in use apart form in tests
                        var notPublished = new HashSet <string>();

                        int currentPageSize;
                        do
                        {
                            long total;

                            IContent[] descendants;
                            if (SupportUnpublishedContent)
                            {
                                descendants = _contentService.GetPagedDescendants(contentParentId, pageIndex, PageSize, out total, "umbracoNode.id").ToArray();
                            }
                            else
                            {
                                //get all paged records but order by level ascending, we need to do this because we need to track which nodes are not published so that we can determine
                                // which descendent nodes are implicitly not published
                                descendants = _contentService.GetPagedDescendants(contentParentId, pageIndex, PageSize, out total, "level", Direction.Ascending, true, (string)null).ToArray();
                            }

                            // need to store decendants count before filtering, in order for loop to work correctly
                            currentPageSize = descendants.Length;

                            //if specific types are declared we need to post filter them
                            //TODO: Update the service layer to join the cmsContentType table so we can query by content type too
                            IEnumerable <IContent> content;
                            if (IndexerData.IncludeNodeTypes.Any())
                            {
                                content = descendants.Where(x => IndexerData.IncludeNodeTypes.Contains(x.ContentType.Alias));
                            }
                            else
                            {
                                content = descendants;
                            }

                            AddNodesToIndex(GetSerializedContent(
                                                SupportUnpublishedContent,
                                                c => _serializer.Serialize(_contentService, _dataTypeService, _userService, c),
                                                content, notPublished).WhereNotNull(), type);

                            pageIndex++;
                        } while (currentPageSize == PageSize);
                    }

                    break;

                case IndexTypes.Media:
                    var mediaParentId = -1;

                    if (IndexerData.ParentNodeId.HasValue && IndexerData.ParentNodeId.Value > 0)
                    {
                        mediaParentId = IndexerData.ParentNodeId.Value;
                    }

                    ReindexWithXmlEntries(type, mediaParentId,
                                          () => _contentTypeService.GetAllMediaTypes().ToArray(),
                                          (path, pIndex, pSize) =>
                    {
                        long totalMedia;
                        var result = _mediaService.GetPagedXmlEntries(path, pIndex, pSize, out totalMedia).ToArray();
                        var more   = result.Length == pSize;
                        return(Tuple.Create(result, more));
                    },
                                          i => _mediaService.GetById(i));

                    break;
                }
            }
            finally
            {
                stopwatch.Stop();
            }

            DataService.LogService.AddInfoLog(-1, string.Format("PerformIndexAll - End data queries - {0}, took {1}ms", type, stopwatch.ElapsedMilliseconds));
        }
        protected override void PerformIndexAll(string type)
        {
            const int pageSize  = 1000;
            var       pageIndex = 0;

            switch (type)
            {
            case IndexTypes.Content:
                if (this.SupportUnpublishedContent == false)
                {
                    //use the base implementation which will use the published XML cache to perform the lookups
                    base.PerformIndexAll(type);
                }
                else
                {
                    var contentParentId = -1;
                    if (IndexerData.ParentNodeId.HasValue && IndexerData.ParentNodeId.Value > 0)
                    {
                        contentParentId = IndexerData.ParentNodeId.Value;
                    }
                    IContent[] content;

                    do
                    {
                        long total;
                        var  descendants = _contentService.GetPagedDescendants(contentParentId, pageIndex, pageSize, out total);

                        //if specific types are declared we need to post filter them
                        //TODO: Update the service layer to join the cmsContentType table so we can query by content type too
                        if (IndexerData.IncludeNodeTypes.Any())
                        {
                            content = descendants.Where(x => IndexerData.IncludeNodeTypes.Contains(x.ContentType.Alias)).ToArray();
                        }
                        else
                        {
                            content = descendants.ToArray();
                        }

                        AddNodesToIndex(GetSerializedContent(content), type);
                        pageIndex++;
                    } while (content.Length == pageSize);
                }
                break;

            case IndexTypes.Media:

                var mediaParentId = -1;
                if (IndexerData.ParentNodeId.HasValue && IndexerData.ParentNodeId.Value > 0)
                {
                    mediaParentId = IndexerData.ParentNodeId.Value;
                }
                IMedia[] media;

                do
                {
                    long total;
                    var  descendants = _mediaService.GetPagedDescendants(mediaParentId, pageIndex, pageSize, out total);

                    //if specific types are declared we need to post filter them
                    //TODO: Update the service layer to join the cmsContentType table so we can query by content type too
                    if (IndexerData.IncludeNodeTypes.Any())
                    {
                        media = descendants.Where(x => IndexerData.IncludeNodeTypes.Contains(x.ContentType.Alias)).ToArray();
                    }
                    else
                    {
                        media = descendants.ToArray();
                    }

                    AddNodesToIndex(GetSerializedMedia(media), type);
                    pageIndex++;
                } while (media.Length == pageSize);

                break;
            }
        }