Пример #1
0
        private void ApplyMetadataModifications(LazyStringValue id, DocumentInfo documentInfo)
        {
            if (documentInfo.Metadata.Modifications == null)
            {
                return;
            }

            documentInfo.MetadataInstance = null;

            using (documentInfo.Document)
                using (documentInfo.Metadata)
                {
                    documentInfo.Metadata = _session.Context.ReadObject(documentInfo.Metadata, id);
                    documentInfo.Metadata.Modifications = null;

                    documentInfo.Document.Modifications = new DynamicJsonValue(documentInfo.Document)
                    {
                        [Constants.Documents.Metadata.Key] = documentInfo.Metadata
                    };

                    documentInfo.Document = _session.Context.ReadObject(documentInfo.Document, id);
                    documentInfo.Document.Modifications = null;
                }
        }
Пример #2
0
        private (bool AllExpired, LazyStringValue Id) GetConflictedExpiration(DocumentsOperationContext context, DateTime currentTime, Slice clonedId)
        {
            LazyStringValue id         = null;
            var             allExpired = true;
            var             conflicts  = _database.DocumentsStorage.ConflictsStorage.GetConflictsFor(context, clonedId);

            if (conflicts.Count > 0)
            {
                foreach (var conflict in conflicts)
                {
                    id = conflict.Id;

                    if (HasPassed(conflict.Doc, currentTime))
                    {
                        continue;
                    }

                    allExpired = false;
                    break;
                }
            }

            return(allExpired, id);
        }
Пример #3
0
        public IEnumerable <string> GetCountersForDocument(DocumentsOperationContext context, string docId)
        {
            var table = new Table(CountersSchema, context.Transaction.InnerTransaction);

            using (GetCounterPartialKey(context, docId, out var key))
            {
                LazyStringValue prev = null;
                foreach (var result in table.SeekByPrimaryKeyPrefix(key, Slices.Empty, 0))
                {
                    var current = ExtractCounterName(context, result.Value.Reader);

                    if (prev?.Equals(current) == true)
                    {
                        // already seen this one, skip it
                        continue;
                    }

                    yield return(current);

                    prev?.Dispose();
                    prev = current;
                }
            }
        }
Пример #4
0
        private void HandleTreeReduction(TransactionOperationContext indexContext, IndexingStatsScope stats,
                                         MapReduceResultsStore modifiedStore, LowLevelTransaction lowLevelTransaction,
                                         IndexWriteOperation writer, LazyStringValue reduceKeyHash, Table table, CancellationToken token)
        {
            EnsureValidTreeReductionStats(stats);

            var tree = modifiedStore.Tree;

            var branchesToAggregate = new HashSet <long>();

            var parentPagesToAggregate = new HashSet <long>();

            var page = new TreePage(null, Constants.Storage.PageSize);

            HashSet <long> compressedEmptyLeafs = null;

            Dictionary <long, Exception> failedAggregatedLeafs = null;

            foreach (var modifiedPage in modifiedStore.ModifiedPages)
            {
                token.ThrowIfCancellationRequested();

                page.Base = lowLevelTransaction.GetPage(modifiedPage).Pointer;

                stats.RecordReduceTreePageModified(page.IsLeaf);

                if (page.IsLeaf == false)
                {
                    Debug.Assert(page.IsBranch);
                    branchesToAggregate.Add(modifiedPage);

                    continue;
                }

                var leafPage = page;

                var compressed = leafPage.IsCompressed;

                if (compressed)
                {
                    stats.RecordCompressedLeafPage();
                }

                using (compressed ? (DecompressedLeafPage)(leafPage = tree.DecompressPage(leafPage, skipCache: true)) : null)
                {
                    if (leafPage.NumberOfEntries == 0)
                    {
                        if (leafPage.PageNumber == tree.State.RootPageNumber)
                        {
                            writer.DeleteReduceResult(reduceKeyHash, stats);

                            var emptyPageNumber = Bits.SwapBytes(leafPage.PageNumber);
                            using (Slice.External(indexContext.Allocator, (byte *)&emptyPageNumber, sizeof(long), out Slice pageNumSlice))
                                table.DeleteByKey(pageNumSlice);

                            continue;
                        }

                        if (compressed)
                        {
                            // it doesn't have any entries after decompression because
                            // each compressed entry has the delete tombstone

                            if (compressedEmptyLeafs == null)
                            {
                                compressedEmptyLeafs = new HashSet <long>();
                            }

                            compressedEmptyLeafs.Add(leafPage.PageNumber);
                            continue;
                        }

                        throw new UnexpectedReduceTreePageException(
                                  $"Encountered empty page which isn't a root. Page {leafPage} in '{tree.Name}' tree.");
                    }

                    var parentPage = tree.GetParentPageOf(leafPage);

                    stats.RecordReduceAttempts(leafPage.NumberOfEntries);

                    try
                    {
                        using (var result = AggregateLeafPage(leafPage, lowLevelTransaction, indexContext, token))
                        {
                            if (parentPage == -1)
                            {
                                writer.DeleteReduceResult(reduceKeyHash, stats);

                                foreach (var output in result.GetOutputs())
                                {
                                    writer.IndexDocument(reduceKeyHash, output, stats, indexContext);
                                }
                            }
                            else
                            {
                                StoreAggregationResult(leafPage, table, result);
                                parentPagesToAggregate.Add(parentPage);
                            }

                            _metrics.MapReduceIndexes.ReducedPerSec.Mark(leafPage.NumberOfEntries);

                            stats.RecordReduceSuccesses(leafPage.NumberOfEntries);
                        }
                    }
                    catch (Exception e) when(e is OperationCanceledException == false)
                    {
                        if (failedAggregatedLeafs == null)
                        {
                            failedAggregatedLeafs = new Dictionary <long, Exception>();
                        }

                        failedAggregatedLeafs.Add(leafPage.PageNumber, e);

                        _index.ErrorIndexIfCriticalException(e);

                        HandleReductionError(e, reduceKeyHash, writer, stats, updateStats: parentPage == -1, page: leafPage);
                    }
                }
            }

            long tmp = 0;

            using (Slice.External(indexContext.Allocator, (byte *)&tmp, sizeof(long), out Slice pageNumberSlice))
            {
                foreach (var freedPage in modifiedStore.FreedPages)
                {
                    tmp = Bits.SwapBytes(freedPage);
                    table.DeleteByKey(pageNumberSlice);
                }
            }

            while (parentPagesToAggregate.Count > 0 || branchesToAggregate.Count > 0)
            {
                token.ThrowIfCancellationRequested();

                var branchPages = parentPagesToAggregate;
                parentPagesToAggregate = new HashSet <long>();

                foreach (var pageNumber in branchPages)
                {
                    page.Base = lowLevelTransaction.GetPage(pageNumber).Pointer;

                    try
                    {
                        if (page.IsBranch == false)
                        {
                            throw new UnexpectedReduceTreePageException("Parent page was found that wasn't a branch, error at " + page);
                        }

                        stats.RecordReduceAttempts(page.NumberOfEntries);

                        var parentPage = tree.GetParentPageOf(page);

                        using (var result = AggregateBranchPage(page, table, indexContext, branchesToAggregate, compressedEmptyLeafs, failedAggregatedLeafs, token))
                        {
                            if (parentPage == -1)
                            {
                                writer.DeleteReduceResult(reduceKeyHash, stats);

                                foreach (var output in result.GetOutputs())
                                {
                                    writer.IndexDocument(reduceKeyHash, output, stats, indexContext);
                                }
                            }
                            else
                            {
                                parentPagesToAggregate.Add(parentPage);

                                StoreAggregationResult(page, table, result);
                            }

                            _metrics.MapReduceIndexes.ReducedPerSec.Mark(page.NumberOfEntries);

                            stats.RecordReduceSuccesses(page.NumberOfEntries);
                        }
                    }
                    catch (Exception e) when(e is OperationCanceledException == false)
                    {
                        _index.ErrorIndexIfCriticalException(e);

                        HandleReductionError(e, reduceKeyHash, writer, stats, updateStats: true, page: page);
                    }
                    finally
                    {
                        branchesToAggregate.Remove(pageNumber);
                    }
                }

                if (parentPagesToAggregate.Count == 0 && branchesToAggregate.Count > 0)
                {
                    // we still have unaggregated branches which were modified but their children were not modified (branch page splitting) so we missed them
                    parentPagesToAggregate.Add(branchesToAggregate.First());
                }
            }

            if (compressedEmptyLeafs != null && compressedEmptyLeafs.Count > 0)
            {
                // we had some compressed pages that are empty after decompression
                // let's remove them and reduce the tree once again

                modifiedStore.ModifiedPages.Clear();
                modifiedStore.FreedPages.Clear();

                foreach (var pageNumber in compressedEmptyLeafs)
                {
                    page.Base = lowLevelTransaction.GetPage(pageNumber).Pointer;

                    using (var emptyPage = tree.DecompressPage(page, skipCache: true))
                    {
                        if (emptyPage.NumberOfEntries > 0) // could be changed meanwhile
                        {
                            continue;
                        }

                        modifiedStore.Tree.RemoveEmptyDecompressedPage(emptyPage);
                    }
                }

                HandleTreeReduction(indexContext, stats, modifiedStore, lowLevelTransaction, writer, reduceKeyHash, table, token);
            }
        }
Пример #5
0
 public static ByteStringContext.InternalScope From(ByteStringContext context, LazyStringValue value, out Slice str)
 {
     return(From(context, value.Buffer, value.Size, ByteStringType.Immutable, out str));
 }
Пример #6
0
 public PatchDocumentCommand(
     JsonOperationContext context,
     string id,
     LazyStringValue expectedChangeVector,
     bool skipPatchIfChangeVectorMismatch,
     (PatchRequest run, BlittableJsonReaderObject args) patch,
Пример #7
0
 public static ByteStringContext.InternalScope GetLower(ByteStringContext byteStringContext, LazyStringValue str, out Slice loweredKey)
 {
     return(GetLower(byteStringContext, str.Buffer, str.Size, out loweredKey));
 }
 protected abstract int GetFields<T>(T instance, LazyStringValue key, object document, JsonOperationContext indexContext, IWriteOperationBuffer writeBuffer) where T : ILuceneDocumentWrapper;
Пример #9
0
        public static ConflictStatus GetConflictStatusForDocument(DocumentsOperationContext context, string id, LazyStringValue remote, out string conflictingVector)
        {
            //tombstones also can be a conflict entry
            conflictingVector = null;
            var conflicts = context.DocumentDatabase.DocumentsStorage.ConflictsStorage.GetConflictsFor(context, id);

            if (conflicts.Count > 0)
            {
                foreach (var existingConflict in conflicts)
                {
                    if (ChangeVectorUtils.GetConflictStatus(remote, existingConflict.ChangeVector) == ConflictStatus.Conflict)
                    {
                        conflictingVector = existingConflict.ChangeVector;
                        return(ConflictStatus.Conflict);
                    }
                }
                // this document will resolve the conflicts when putted
                return(ConflictStatus.Update);
            }

            var    result = context.DocumentDatabase.DocumentsStorage.GetDocumentOrTombstone(context, id);
            string local;

            if (result.Document != null)
            {
                local = result.Document.ChangeVector;
            }
            else if (result.Tombstone != null)
            {
                local = result.Tombstone.ChangeVector;
            }
            else
            {
                return(ConflictStatus.Update); //document with 'id' doesn't exist locally, so just do PUT
            }
            var status = ChangeVectorUtils.GetConflictStatus(remote, local);

            if (status == ConflictStatus.Conflict)
            {
                conflictingVector = local;
            }

            return(status);
        }
Пример #10
0
        public IEnumerable <Slice> GetDocumentKeysFromCollectionThatReference(string collection, LazyStringValue referenceKey, RavenTransaction tx)
        {
            var collectionTree = tx.InnerTransaction.ReadTree("#" + collection);

            if (collectionTree == null)
            {
                yield break;
            }

            using (DocumentIdWorker.GetLower(tx.InnerTransaction.Allocator, referenceKey, out var k))
                using (var it = collectionTree.MultiRead(k))
                {
                    if (it.Seek(Slices.BeforeAllKeys) == false)
                    {
                        yield break;
                    }

                    do
                    {
                        yield return(it.CurrentKey);
                    } while (it.MoveNext());
                }
        }
Пример #11
0
        public static (LazyStringValue DocId, string CounterName) ExtractDocIdAndCounterNameFromTombstone(JsonOperationContext context,
                                                                                                          LazyStringValue counterTombstoneId)
        {
            var p    = counterTombstoneId.Buffer;
            var size = counterTombstoneId.Size;

            int sizeOfDocId = 0;

            for (; sizeOfDocId < size; sizeOfDocId++)
            {
                if (p[sizeOfDocId] == SpecialChars.RecordSeparator)
                {
                    break;
                }
            }

            var doc  = context.AllocateStringValue(null, p, sizeOfDocId);
            var name = Encoding.UTF8.GetString(p + sizeOfDocId + 1, size - (sizeOfDocId + 2));

            return(doc, name);
        }
Пример #12
0
        private static bool CompareBlittableArray(string fieldPath, string id, BlittableJsonReaderArray oldArray, BlittableJsonReaderArray newArray,
                                                  IDictionary <string, DocumentsChanges[]> changes, List <DocumentsChanges> docChanges, LazyStringValue propName)
        {
            // if we don't care about the changes
            if (oldArray.Length != newArray.Length && changes == null)
            {
                return(true);
            }

            var position = 0;
            var changed  = false;

            while (position < oldArray.Length && position < newArray.Length && (changed == false || changes != null))
            {
                switch (oldArray[position])
                {
                case BlittableJsonReaderObject bjro1:
                    if (newArray[position] is BlittableJsonReaderObject bjro2)
                    {
                        changed |= CompareBlittable(AddIndexFieldPath(fieldPath, position), id, bjro1, bjro2, changes, docChanges);
                    }
                    else
                    {
                        changed = true;
                        if (changes != null)
                        {
                            NewChange(AddIndexFieldPath(fieldPath, position), propName, newArray[position], oldArray[position], docChanges,
                                      DocumentsChanges.ChangeType.ArrayValueChanged);
                        }
                    }
                    break;

                case BlittableJsonReaderArray bjra1:
                    if (newArray[position] is BlittableJsonReaderArray bjra2)
                    {
                        changed |= CompareBlittableArray(AddIndexFieldPath(fieldPath, position), id, bjra1, bjra2, changes, docChanges, propName);
                    }
                    else
                    {
                        changed = true;
                        if (changes != null)
                        {
                            NewChange(AddIndexFieldPath(fieldPath, position), propName, newArray[position], oldArray[position], docChanges,
                                      DocumentsChanges.ChangeType.ArrayValueChanged);
                        }
                    }
                    break;

                case null:
                    if (newArray[position] != null)
                    {
                        changed = true;
                        if (changes != null)
                        {
                            NewChange(AddIndexFieldPath(fieldPath, position), propName, newArray[position], oldArray[position], docChanges,
                                      DocumentsChanges.ChangeType.ArrayValueChanged);
                        }
                    }
                    break;

                default:
                    if (oldArray[position].Equals(newArray[position]) == false)
                    {
                        if (changes != null)
                        {
                            NewChange(AddIndexFieldPath(fieldPath, position), propName, newArray[position], oldArray[position], docChanges,
                                      DocumentsChanges.ChangeType.ArrayValueChanged);
                        }
                        changed = true;
                    }
                    break;
                }

                position++;
            }

            if (changes == null)
            {
                return(changed);
            }

            // if one of the arrays is larger than the other
            while (position < oldArray.Length)
            {
                NewChange(fieldPath, propName, null, oldArray[position], docChanges,
                          DocumentsChanges.ChangeType.ArrayValueRemoved);
                position++;
            }

            while (position < newArray.Length)
            {
                NewChange(fieldPath, propName, newArray[position], null, docChanges,
                          DocumentsChanges.ChangeType.ArrayValueAdded);
                position++;
            }

            return(changed);
        }
Пример #13
0
        public async Task <ImportResult> Import(DocumentsOperationContext context, Stream stream, Action <IOperationProgress> onProgress = null)
        {
            var result   = new ImportResult();
            var progress = new IndeterminateProgress();
            var state    = new JsonParserState();

            JsonOperationContext.ManagedPinnedBuffer buffer;
            using (context.GetManagedBuffer(out buffer))
                using (var parser = new UnmanagedJsonParser(context, state, "fileName"))
                {
                    var operateOnType = "__top_start_object";
                    var buildVersion  = 0L;
                    var identities    = new Dictionary <string, long>();
                    VersioningStorage versioningStorage = null;

                    while (true)
                    {
                        if (parser.Read() == false)
                        {
                            var read = await stream.ReadAsync(buffer.Buffer.Array, buffer.Buffer.Offset, buffer.Length);

                            if (read == 0)
                            {
                                if (state.CurrentTokenType != JsonParserToken.EndObject)
                                {
                                    throw new EndOfStreamException("Stream ended without reaching end of json content");
                                }
                                break;
                            }
                            parser.SetBuffer(buffer, read);
                            continue;
                        }

                        switch (state.CurrentTokenType)
                        {
                        case JsonParserToken.String:
                            unsafe
                            {
                                operateOnType =
                                    new LazyStringValue(null, state.StringBuffer, state.StringSize, context).ToString();
                            }
                            break;

                        case JsonParserToken.Integer:
                            switch (operateOnType)
                            {
                            case "BuildVersion":
                                buildVersion = state.Long;
                                break;
                            }
                            break;

                        case JsonParserToken.StartObject:
                            if (operateOnType == "__top_start_object")
                            {
                                operateOnType = null;
                                break;
                            }
                            context.CachedProperties.NewDocument();
                            var builder = new BlittableJsonDocumentBuilder(_batchPutCommand.Context, BlittableJsonDocumentBuilder.UsageMode.ToDisk, "ImportObject", parser, state);
                            builder.ReadNestedObject();
                            while (builder.Read() == false)
                            {
                                var read = await stream.ReadAsync(buffer.Buffer.Array, buffer.Buffer.Offset, buffer.Length);

                                if (read == 0)
                                {
                                    throw new EndOfStreamException("Stream ended without reaching end of json content");
                                }
                                parser.SetBuffer(buffer, read);
                            }
                            builder.FinalizeDocument();

                            if (operateOnType == "Docs" && Options.OperateOnTypes.HasFlag(DatabaseItemType.Documents))
                            {
                                progress.Progress = "Importing Documents";
                                onProgress?.Invoke(progress);
                                PatchDocument patch        = null;
                                PatchRequest  patchRequest = null;
                                if (string.IsNullOrWhiteSpace(Options.TransformScript) == false)
                                {
                                    patch        = new PatchDocument(context.DocumentDatabase);
                                    patchRequest = new PatchRequest
                                    {
                                        Script = Options.TransformScript
                                    };
                                }

                                result.DocumentsCount++;
                                var reader   = builder.CreateReader();
                                var document = new Document
                                {
                                    Data = reader,
                                };

                                if (Options.IncludeExpired == false && document.Expired(_database.Time.GetUtcNow()))
                                {
                                    continue;
                                }

                                TransformScriptOrDisableVersioningIfNeeded(context, patch, reader, document,
                                                                           patchRequest);

                                _batchPutCommand.Add(document.Data);

                                if (result.DocumentsCount % 1000 == 0)
                                {
                                    progress.Progress = $"Imported {result.DocumentsCount} documents";
                                    onProgress?.Invoke(progress);
                                }

                                await HandleBatchOfDocuments(context, parser, buildVersion).ConfigureAwait(false);
                            }
                            else if (operateOnType == "RevisionDocuments" &&
                                     Options.OperateOnTypes.HasFlag(DatabaseItemType.RevisionDocuments))
                            {
                                if (versioningStorage == null)
                                {
                                    break;
                                }

                                result.RevisionDocumentsCount++;
                                var reader = builder.CreateReader();
                                _batchPutCommand.Add(reader);
                                await HandleBatchOfDocuments(context, parser, buildVersion).ConfigureAwait(false);;
                            }
                            else
                            {
                                using (builder)
                                {
                                    switch (operateOnType)
                                    {
                                    case "Attachments":
                                        result.Warnings.Add("Attachments are not supported anymore. Use RavenFS isntead. Skipping.");
                                        break;

                                    case "Indexes":
                                        if (Options.OperateOnTypes.HasFlag(DatabaseItemType.Indexes) == false)
                                        {
                                            continue;
                                        }

                                        result.IndexesCount++;
                                        progress.Progress = "importing Indexes";
                                        onProgress?.Invoke(progress);
                                        try
                                        {
                                            IndexProcessor.Import(builder, _database, buildVersion, Options.RemoveAnalyzers);
                                        }
                                        catch (Exception e)
                                        {
                                            result.Warnings.Add($"Could not import index. Message: {e.Message}");
                                        }

                                        break;

                                    case "Transformers":
                                        if (Options.OperateOnTypes.HasFlag(DatabaseItemType.Transformers) == false)
                                        {
                                            continue;
                                        }

                                        result.TransformersCount++;
                                        progress.Progress = "Importing Transformers";
                                        onProgress?.Invoke(progress);

                                        try
                                        {
                                            TransformerProcessor.Import(builder, _database, buildVersion);
                                        }
                                        catch (Exception e)
                                        {
                                            result.Warnings.Add($"Could not import transformer. Message: {e.Message}");
                                        }
                                        break;

                                    case "Identities":
                                        if (Options.OperateOnTypes.HasFlag(DatabaseItemType.Identities))
                                        {
                                            result.IdentitiesCount++;
                                            progress.Progress = "Importing Identities";
                                            onProgress?.Invoke(progress);

                                            using (var reader = builder.CreateReader())
                                            {
                                                try
                                                {
                                                    string identityKey, identityValueString;
                                                    long   identityValue;
                                                    if (reader.TryGet("Key", out identityKey) == false || reader.TryGet("Value", out identityValueString) == false || long.TryParse(identityValueString, out identityValue) == false)
                                                    {
                                                        result.Warnings.Add($"Cannot import the following identity: '{reader}'. Skipping.");
                                                    }
                                                    else
                                                    {
                                                        identities[identityKey] = identityValue;
                                                    }
                                                }
                                                catch (Exception e)
                                                {
                                                    result.Warnings.Add($"Cannot import the following identity: '{reader}'. Error: {e}. Skipping.");
                                                }
                                            }
                                        }
                                        break;

                                    default:
                                        result.Warnings.Add(
                                            $"The following type is not recognized: '{operateOnType}'. Skipping.");
                                        break;
                                    }
                                }
                            }
                            break;

                        case JsonParserToken.StartArray:
                            switch (operateOnType)
                            {
                            case "RevisionDocuments":
                                // We are taking a reference here since the documents import can activate or disable the versioning.
                                // We hold a local copy because the user can disable the bundle during the import process, exteranly.
                                // In this case we want to continue to import the revisions documents.
                                versioningStorage           = _database.BundleLoader.VersioningStorage;
                                _batchPutCommand.IsRevision = true;
                                break;
                            }
                            break;

                        case JsonParserToken.EndArray:
                            switch (operateOnType)
                            {
                            case "Docs":
                                await FinishBatchOfDocuments();

                                _batchPutCommand = new MergedBatchPutCommand(_database, buildVersion);
                                break;

                            case "RevisionDocuments":
                                await FinishBatchOfDocuments();

                                break;

                            case "Identities":
                                if (identities.Count > 0)
                                {
                                    using (var tx = context.OpenWriteTransaction())
                                    {
                                        _database.DocumentsStorage.UpdateIdentities(context, identities);
                                        tx.Commit();
                                    }
                                }
                                identities = null;
                                break;
                            }
                            break;
                        }
                    }
                }

            return(result);
        }
        public override void DeleteReduceResult(LazyStringValue reduceKeyHash, IndexingStatsScope stats)
        {
            base.DeleteReduceResult(reduceKeyHash, stats);

            _outputReduceToCollectionCommandBatcher.DeleteReduce(reduceKeyHash);
        }
 public override void Delete(LazyStringValue key, IndexingStatsScope stats)
 {
     throw new NotSupportedException("Deleting index entries by id() field isn't supported by map-reduce indexes");
 }
Пример #16
0
        protected unsafe void SetLazyStringValueFromString(DocumentsOperationContext context, out LazyStringValue prop)
        {
            prop = null;
            var size = *(int *)Reader.ReadExactly(sizeof(int));

            if (size < 0)
            {
                return;
            }

            // This is a special (broken) case.
            // On the source it is stored as Slice in LSV format which is wrong(?) unlike the normal LSV the escaping position is kept before the value itself.
            // and therefore the escaping doesn't include in the LSV size.
            // Additionally, we are over-allocating so writing this value doesn't cause a failure (we look for the escaping after the value)
            // this also work, because we don't pass those values between contexts, if we need to do so, we convert it to string first.

            // TODO: this is inefficient, can skip string allocation
            prop = context.GetLazyString(Encoding.UTF8.GetString(Reader.ReadExactly(size), size));
        }
Пример #17
0
 public override int HandleMap(LazyStringValue lowerId, IEnumerable mapResults, IndexWriteOperation writer, TransactionOperationContext indexContext, IndexingStatsScope stats)
 {
     throw new NotSupportedException($"Index {Name} is in-memory implementation of a faulty index", _e);
 }
 private string GetPrefixedId(LazyStringValue documentId, string loadCollectionName)
 {
     return($"{documentId}/{_script.IdPrefixForCollection[loadCollectionName]}/");
 }
Пример #19
0
        protected unsafe int PutMapResults(LazyStringValue lowerId, IEnumerable <MapResult> mappedResults, TransactionOperationContext indexContext, IndexingStatsScope stats)
        {
            EnsureValidStats(stats);

            using (Slice.External(indexContext.Allocator, lowerId.Buffer, lowerId.Length, out Slice docIdAsSlice))
            {
                Queue <MapEntry> existingEntries = null;

                using (_stats.GetMapEntriesTree.Start())
                    MapReduceWorkContext.DocumentMapEntries.RepurposeInstance(docIdAsSlice, clone: false);

                if (MapReduceWorkContext.DocumentMapEntries.NumberOfEntries > 0)
                {
                    using (_stats.GetMapEntries.Start())
                        existingEntries = GetMapEntries(MapReduceWorkContext.DocumentMapEntries);
                }

                int resultsCount = 0;

                foreach (var mapResult in mappedResults)
                {
                    using (mapResult.Data)
                    {
                        resultsCount++;

                        var reduceKeyHash = mapResult.ReduceKeyHash;

                        long id = -1;

                        if (existingEntries?.Count > 0)
                        {
                            var existing        = existingEntries.Dequeue();
                            var storeOfExisting = GetResultsStore(existing.ReduceKeyHash, indexContext, false);

                            if (reduceKeyHash == existing.ReduceKeyHash)
                            {
                                using (var existingResult = storeOfExisting.Get(existing.Id))
                                {
                                    if (ResultsBinaryEqual(mapResult.Data, existingResult.Data))
                                    {
                                        continue;
                                    }
                                }

                                id = existing.Id;
                            }
                            else
                            {
                                using (_stats.RemoveResult.Start())
                                {
                                    MapReduceWorkContext.DocumentMapEntries.Delete(existing.Id);
                                    storeOfExisting.Delete(existing.Id);
                                }
                            }
                        }

                        using (_stats.PutResult.Start())
                        {
                            if (id == -1)
                            {
                                id = MapReduceWorkContext.NextMapResultId++;

                                using (Slice.External(indexContext.Allocator, (byte *)&reduceKeyHash, sizeof(ulong), out Slice val))
                                    MapReduceWorkContext.DocumentMapEntries.Add(id, val);
                            }

                            GetResultsStore(reduceKeyHash, indexContext, create: true).Add(id, mapResult.Data);
                        }
                    }
                }

                HandleIndexOutputsPerDocument(lowerId, resultsCount, stats);

                DocumentDatabase.Metrics.MapReduceIndexes.MappedPerSec.Mark(resultsCount);

                while (existingEntries?.Count > 0)
                {
                    // need to remove remaining old entries

                    var oldResult = existingEntries.Dequeue();

                    var oldState = GetResultsStore(oldResult.ReduceKeyHash, indexContext, create: false);

                    using (_stats.RemoveResult.Start())
                    {
                        oldState.Delete(oldResult.Id);
                        MapReduceWorkContext.DocumentMapEntries.Delete(oldResult.Id);
                    }
                }

                return(resultsCount);
            }
        }
Пример #20
0
        internal async Task CleanupExpiredDocs()
        {
            var currentTime  = _database.Time.GetUtcNow();
            var currentTicks = currentTime.Ticks;

            try
            {
                if (Logger.IsInfoEnabled)
                {
                    Logger.Info("Trying to find expired documents to delete");
                }

                using (_database.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext context))
                {
                    using (var tx = context.OpenReadTransaction())
                    {
                        var expirationTree = tx.InnerTransaction.CreateTree(DocumentsByExpiration);

                        Dictionary <Slice, List <(Slice LowerId, LazyStringValue Id)> > expired;
                        Stopwatch duration;

                        using (var it = expirationTree.Iterate(false))
                        {
                            if (it.Seek(Slices.BeforeAllKeys) == false)
                            {
                                return;
                            }

                            expired  = new Dictionary <Slice, List <(Slice LowerId, LazyStringValue Id)> >();
                            duration = Stopwatch.StartNew();

                            do
                            {
                                var entryTicks = it.CurrentKey.CreateReader().ReadBigEndianInt64();
                                if (entryTicks >= currentTicks)
                                {
                                    break;
                                }

                                var ticksAsSlice = it.CurrentKey.Clone(tx.InnerTransaction.Allocator);

                                var expiredDocs = new List <(Slice LowerId, LazyStringValue Id)>();

                                expired.Add(ticksAsSlice, expiredDocs);

                                using (var multiIt = expirationTree.MultiRead(it.CurrentKey))
                                {
                                    if (multiIt.Seek(Slices.BeforeAllKeys))
                                    {
                                        do
                                        {
                                            if (CancellationToken.IsCancellationRequested)
                                            {
                                                return;
                                            }

                                            var clonedId = multiIt.CurrentKey.Clone(tx.InnerTransaction.Allocator);

                                            try
                                            {
                                                var document = _database.DocumentsStorage.Get(context, clonedId);
                                                if (document == null)
                                                {
                                                    expiredDocs.Add((clonedId, null));
                                                    continue;
                                                }

                                                if (HasExpired(document.Data, currentTime) == false)
                                                {
                                                    continue;
                                                }

                                                expiredDocs.Add((clonedId, document.Id));
                                            }
                                            catch (DocumentConflictException)
                                            {
                                                LazyStringValue id         = null;
                                                var             allExpired = true;
                                                var             conflicts  = _database.DocumentsStorage.ConflictsStorage.GetConflictsFor(context, clonedId);
                                                if (conflicts.Count == 0)
                                                {
                                                    continue;
                                                }

                                                foreach (var conflict in conflicts)
                                                {
                                                    id = conflict.Id;

                                                    if (HasExpired(conflict.Doc, currentTime))
                                                    {
                                                        continue;
                                                    }

                                                    allExpired = false;
                                                    break;
                                                }

                                                if (allExpired)
                                                {
                                                    expiredDocs.Add((clonedId, id));
                                                }
                                            }
                                        } while (multiIt.MoveNext());
                                    }
                                }
                            } while (it.MoveNext());
                        }

                        var command = new DeleteExpiredDocumentsCommand(expired, _database, Logger);

                        await _database.TxMerger.Enqueue(command);

                        if (Logger.IsInfoEnabled)
                        {
                            Logger.Info($"Successfully deleted {command.DeletionCount:#,#;;0} documents in {duration.ElapsedMilliseconds:#,#;;0} ms.");
                        }
                    }
                }
            }
            catch (Exception e)
            {
                if (Logger.IsOperationsEnabled)
                {
                    Logger.Operations($"Failed to delete expired documents on {_database.Name} which are older than {currentTime}", e);
                }
            }
        }
        protected Field GetOrCreateField(string name, string value, LazyStringValue lazyValue, BlittableJsonReaderObject blittableValue, Field.Store store, Field.Index index, Field.TermVector termVector)
        {
            int cacheKey = FieldCacheKey.GetHashCode(name, index, store, termVector, _multipleItemsSameFieldCount);

            Field field;
            if (_fieldsCache.TryGetValue(cacheKey, out CachedFieldItem<Field> cached) == false ||
                !cached.Key.IsSame(name, index, store, termVector, _multipleItemsSameFieldCount))
            {
                LazyStringReader stringReader = null;
                BlittableObjectReader blittableReader = null;

                if ((lazyValue != null || blittableValue != null) && store.IsStored() == false && index.IsIndexed() && index.IsAnalyzed())
                {
                    TextReader reader;
                    if (lazyValue != null)
                    {
                        stringReader = new LazyStringReader();
                        reader = stringReader.GetTextReaderFor(lazyValue);
                    }
                    else
                    {
                        blittableReader = Scope.GetBlittableReader();
                        reader = blittableReader.GetTextReaderFor(blittableValue);
                    }

                    field = new Field(name, reader, termVector);
                }
                else
                {
                    if (value == null && lazyValue == null)
                        blittableReader = Scope.GetBlittableReader();

                    field = new Field(name,
                        value ?? LazyStringReader.GetStringFor(lazyValue) ?? blittableReader.GetStringFor(blittableValue),
                        store, index, termVector);
                }

                field.Boost = 1;
                field.OmitNorms = true;

                _fieldsCache[cacheKey] = new CachedFieldItem<Field>
                {
                    Key = new FieldCacheKey(name, index, store, termVector, _multipleItemsSameFieldCount.ToArray()),
                    Field = field,
                    LazyStringReader = stringReader
                };
            }
            else
            {
                BlittableObjectReader blittableReader = null;

                field = cached.Field;
                if (lazyValue != null && cached.LazyStringReader == null)
                    cached.LazyStringReader = new LazyStringReader();
                if (blittableValue != null)
                    blittableReader = Scope.GetBlittableReader();

                if ((lazyValue != null || blittableValue != null) && store.IsStored() == false && index.IsIndexed() && index.IsAnalyzed())
                {
                    field.SetValue(lazyValue != null
                        ? cached.LazyStringReader.GetTextReaderFor(lazyValue)
                        : blittableReader.GetTextReaderFor(blittableValue));
                }
                else
                {
                    field.SetValue(value ?? LazyStringReader.GetStringFor(lazyValue) ?? blittableReader.GetStringFor(blittableValue));
                }
            }

            return field;
        }
Пример #22
0
 private static ulong CalculateSecondaryHash(LazyStringValue key)
 {
     return(Hashing.XXHash64.CalculateInline(key.Buffer, (ulong)key.Size, seed: 1337));
 }
Пример #23
0
        protected override int GetFields <T>(T instance, LazyStringValue key, object document, JsonOperationContext indexContext, IWriteOperationBuffer writeBuffer)
        {
            if (!(document is ObjectInstance documentToProcess))
            {
                return(0);
            }

            int newFields = 0;

            if (key != null)
            {
                instance.Add(GetOrCreateKeyField(key));
                newFields++;
            }

            if (_reduceOutput)
            {
                var reduceResult = JsBlittableBridge.Translate(indexContext,
                                                               documentToProcess.Engine,
                                                               documentToProcess);

                instance.Add(GetReduceResultValueField(reduceResult, writeBuffer));
                newFields++;
            }

            foreach (var(property, propertyDescriptor) in documentToProcess.GetOwnProperties())
            {
                if (_fields.TryGetValue(property, out var field) == false)
                {
                    field = _fields[property] = IndexField.Create(property, new IndexFieldOptions(), _allFields);
                }

                object value;
                var    actualValue = propertyDescriptor.Value;
                if (actualValue.IsObject() && actualValue.IsArray() == false)
                {
                    //In case TryDetectDynamicFieldCreation finds a dynamic field it will populate 'field.Name' with the actual property name
                    //so we must use field.Name and not property from this point on.
                    var val = TryDetectDynamicFieldCreation(property, actualValue.AsObject(), field);
                    if (val != null)
                    {
                        if (val.IsObject() && val.AsObject().TryGetValue("$spatial", out _))
                        {
                            actualValue = val; //Here we populate the dynamic spatial field that will be handled below.
                        }
                        else
                        {
                            value      = TypeConverter.ToBlittableSupportedType(val, flattenArrays: false, forIndexing: true, engine: documentToProcess.Engine, context: indexContext);
                            newFields += GetRegularFields(instance, field, value, indexContext, out _);
                            continue;
                        }
                    }

                    var objectValue = actualValue.AsObject();
                    if (objectValue.HasOwnProperty("$spatial") && objectValue.TryGetValue("$spatial", out var inner))
                    {
                        SpatialField spatialField;
                        IEnumerable <AbstractField> spatial;
                        if (inner.IsString())
                        {
                            spatialField = StaticIndexBase.GetOrCreateSpatialField(field.Name);
                            spatial      = StaticIndexBase.CreateSpatialField(spatialField, inner.AsString());
                        }
                        else if (inner.IsObject())
                        {
                            var innerObject = inner.AsObject();
                            if (innerObject.HasOwnProperty("Lat") && innerObject.HasOwnProperty("Lng") && innerObject.TryGetValue("Lat", out var lat) &&
                                lat.IsNumber() && innerObject.TryGetValue("Lng", out var lng) && lng.IsNumber())
                            {
                                spatialField = StaticIndexBase.GetOrCreateSpatialField(field.Name);
                                spatial      = StaticIndexBase.CreateSpatialField(spatialField, lat.AsNumber(), lng.AsNumber());
                            }
                            else
                            {
                                continue; //Ignoring bad spatial field
                            }
                        }
                        else
                        {
                            continue; //Ignoring bad spatial field
                        }
                        newFields += GetRegularFields(instance, field, spatial, indexContext, out _);

                        continue;
                    }
                }

                value      = TypeConverter.ToBlittableSupportedType(propertyDescriptor.Value, flattenArrays: false, forIndexing: true, engine: documentToProcess.Engine, context: indexContext);
                newFields += GetRegularFields(instance, field, value, indexContext, out _);

                if (value is IDisposable toDispose)
                {
                    // the value was converted to a lucene field and isn't needed anymore
                    toDispose.Dispose();
                }
            }

            return(newFields);
        }
Пример #24
0
 public MetadataFieldCache(LazyStringValue id, LazyStringValue key, LazyStringValue collection)
 {
     Id         = id;
     Key        = key;
     Collection = collection;
 }
        protected override int GetFields <T>(T instance, LazyStringValue key, object document, JsonOperationContext indexContext)
        {
            int newFields = 0;

            if (key != null)
            {
                instance.Add(GetOrCreateKeyField(key));
                newFields++;
            }

            var boostedValue      = document as BoostedValue;
            var documentToProcess = boostedValue == null ? document : boostedValue.Value;

            IPropertyAccessor accessor;

            if (_isMultiMap == false)
            {
                accessor = _propertyAccessor ?? (_propertyAccessor = PropertyAccessor.Create(documentToProcess.GetType(), documentToProcess));
            }
            else
            {
                accessor = TypeConverter.GetPropertyAccessor(documentToProcess);
            }

            var reduceResult = _reduceOutput ? new DynamicJsonValue() : null;

            foreach (var property in accessor.GetPropertiesInOrder(documentToProcess))
            {
                var value = property.Value;

                IndexField field;

                try
                {
                    field = _fields[property.Key];
                }
                catch (KeyNotFoundException e)
                {
                    throw new InvalidOperationException($"Field '{property.Key}' is not defined. Available fields: {string.Join(", ", _fields.Keys)}.", e);
                }

                var numberOfCreatedFields = GetRegularFields(instance, field, value, indexContext);

                newFields += numberOfCreatedFields;

                if (boostedValue != null)
                {
                    var fields = instance.GetFields();
                    for (int idx = fields.Count - 1; numberOfCreatedFields > 0; numberOfCreatedFields--, idx--)
                    {
                        var luceneField = fields[idx];
                        luceneField.Boost     = boostedValue.Boost;
                        luceneField.OmitNorms = false;
                    }
                }

                if (reduceResult != null && numberOfCreatedFields > 0)
                {
                    reduceResult[property.Key] = TypeConverter.ToBlittableSupportedType(value, flattenArrays: true);
                }
            }

            if (_reduceOutput)
            {
                instance.Add(GetReduceResultValueField(Scope.CreateJson(reduceResult, indexContext)));
                newFields++;
            }

            return(newFields);
        }
Пример #26
0
 public override int HandleMap(LazyStringValue lowerId, LazyStringValue id, IEnumerable mapResults, IndexWriteOperation writer, TransactionOperationContext indexContext, IndexingStatsScope stats)
 {
     throw new NotImplementedException();
 }
Пример #27
0
 public static ByteStringContext.ExternalScope External(ByteStringContext context, LazyStringValue value, out Slice slice)
 {
     return(External(context, value.Buffer, value.Size, ByteStringType.Mutable | ByteStringType.External, out slice));
 }
Пример #28
0
        private void HandleNestedValuesReduction(TransactionOperationContext indexContext, IndexingStatsScope stats,
                                                 CancellationToken token, MapReduceResultsStore modifiedStore,
                                                 IndexWriteOperation writer, LazyStringValue reduceKeyHash)
        {
            EnsureValidNestedValuesReductionStats(stats);

            var numberOfEntriesToReduce = 0;

            try
            {
                var section = modifiedStore.GetNestedResultsSection();

                if (section.IsModified == false)
                {
                    return;
                }

                using (_nestedValuesReductionStats.NestedValuesRead.Start())
                {
                    numberOfEntriesToReduce += section.GetResults(indexContext, _aggregationBatch);
                }

                stats.RecordReduceAttempts(numberOfEntriesToReduce);

                AggregationResult result;
                using (_nestedValuesReductionStats.NestedValuesAggregation.Start())
                {
                    result = AggregateOn(_aggregationBatch, indexContext, token);
                }

                if (section.IsNew == false)
                {
                    writer.DeleteReduceResult(reduceKeyHash, stats);
                }

                foreach (var output in result.GetOutputs())
                {
                    writer.IndexDocument(reduceKeyHash, output, stats, indexContext);
                }

                _index.ReducesPerSec.Mark(numberOfEntriesToReduce);
                _metrics.MapReduceReducedPerSecond.Mark(numberOfEntriesToReduce);

                stats.RecordReduceSuccesses(numberOfEntriesToReduce);
            }
            catch (Exception e)
            {
                _index.HandleError(e);

                foreach (var item in _aggregationBatch)
                {
                    item.Dispose();
                }

                var message = $"Failed to execute reduce function for reduce key '{reduceKeyHash}' on nested values of '{_indexDefinition.Name}' index.";

                if (_logger.IsInfoEnabled)
                {
                    _logger.Info(message, e);
                }

                stats.RecordReduceErrors(numberOfEntriesToReduce);
                stats.AddReduceError(message + $"  Exception: {e}");
            }
            finally
            {
                _aggregationBatch.Clear();
            }
        }
Пример #29
0
        private void HandleReductionError(Exception error, LazyStringValue reduceKeyHash, IndexWriteOperation writer, IndexingStatsScope stats, bool updateStats, TreePage page,
                                          int numberOfNestedValues = -1)
        {
            var builder = new StringBuilder("Failed to execute reduce function on ");

            if (page != null)
            {
                builder.Append($"page {page} ");
            }
            else
            {
                builder.Append("nested values ");
            }

            builder.Append($"of '{_indexDefinition.Name}' index. The relevant reduce result is going to be removed from the index ");
            builder.Append($"as it would be incorrect due to encountered errors (reduce key hash: {reduceKeyHash}");

            var sampleItem = _aggregationBatch?.Items?.FirstOrDefault();

            if (sampleItem != null)
            {
                builder.Append($", sample item to reduce: {sampleItem}");
            }

            builder.Append(")");

            var message = builder.ToString();

            if (_logger.IsInfoEnabled)
            {
                _logger.Info(message, error);
            }

            try
            {
                writer.DeleteReduceResult(reduceKeyHash, stats);
            }
            catch (Exception e)
            {
                if (_logger.IsInfoEnabled)
                {
                    _logger.Info($"Failed to delete an index result from '${_indexDefinition.Name}' index on reduce error (reduce key hash: ${reduceKeyHash})", e);
                }
            }

            if (updateStats)
            {
                var numberOfEntries = page?.NumberOfEntries ?? numberOfNestedValues;

                Debug.Assert(numberOfEntries != -1);

                // we'll only want to record exceptions on some of these, to give the
                // user information about what is going on, otherwise we'll have to wait a lot if we
                // are processing a big batch, and this can be a perf killer. See: RavenDB-11038

                stats.RecordReduceErrors(numberOfEntries);

                if (stats.NumberOfKeptReduceErrors < IndexStorage.MaxNumberOfKeptErrors)
                {
                    stats.AddReduceError(message + $" Exception: {error}");
                }

                var failureInfo = new IndexFailureInformation
                {
                    Name           = _index.Name,
                    MapErrors      = stats.MapErrors,
                    MapAttempts    = stats.MapAttempts,
                    ReduceErrors   = stats.ReduceErrors,
                    ReduceAttempts = stats.ReduceAttempts
                };

                if (failureInfo.IsInvalidIndex(_isStaleBecauseOfRunningReduction))
                {
                    throw new ExcessiveNumberOfReduceErrorsException("Excessive number of errors during the reduce phase for the current batch. Failure info: " +
                                                                     failureInfo.GetErrorMessage());
                }
            }
        }
Пример #30
0
        private void HandleTreeReduction(TransactionOperationContext indexContext, IndexingStatsScope stats,
                                         CancellationToken token, MapReduceResultsStore modifiedStore, LowLevelTransaction lowLevelTransaction,
                                         IndexWriteOperation writer, LazyStringValue reduceKeyHash, Table table)
        {
            EnsureValidTreeReductionStats(stats);

            var tree = modifiedStore.Tree;

            var branchesToAggregate = new HashSet <long>();

            var parentPagesToAggregate = new HashSet <long>();

            var page = new TreePage(null, Constants.Storage.PageSize);

            foreach (var modifiedPage in modifiedStore.ModifiedPages)
            {
                token.ThrowIfCancellationRequested();

                page.Base = lowLevelTransaction.GetPage(modifiedPage).Pointer;

                stats.RecordReduceTreePageModified(page.IsLeaf);

                if (page.IsLeaf == false)
                {
                    Debug.Assert(page.IsBranch);
                    branchesToAggregate.Add(modifiedPage);

                    continue;
                }

                var leafPage = page;

                var compressed = leafPage.IsCompressed;

                if (compressed)
                {
                    stats.RecordCompressedLeafPage();
                }

                using (compressed ? (DecompressedLeafPage)(leafPage = tree.DecompressPage(leafPage, skipCache: true)) : null)
                {
                    if (leafPage.NumberOfEntries == 0)
                    {
                        if (leafPage.PageNumber != tree.State.RootPageNumber)
                        {
                            throw new InvalidOperationException(
                                      $"Encountered empty page which isn't a root. Page #{leafPage.PageNumber} in '{tree.Name}' tree.");
                        }

                        writer.DeleteReduceResult(reduceKeyHash, stats);

                        var emptyPageNumber = Bits.SwapBytes(leafPage.PageNumber);
                        using (Slice.External(indexContext.Allocator, (byte *)&emptyPageNumber, sizeof(long), out Slice pageNumSlice))
                            table.DeleteByKey(pageNumSlice);

                        continue;
                    }

                    var parentPage = tree.GetParentPageOf(leafPage);

                    stats.RecordReduceAttempts(leafPage.NumberOfEntries);

                    try
                    {
                        using (var result = AggregateLeafPage(leafPage, lowLevelTransaction, indexContext, token))
                        {
                            if (parentPage == -1)
                            {
                                writer.DeleteReduceResult(reduceKeyHash, stats);

                                foreach (var output in result.GetOutputs())
                                {
                                    writer.IndexDocument(reduceKeyHash, output, stats, indexContext);
                                }
                            }
                            else
                            {
                                StoreAggregationResult(leafPage.PageNumber, leafPage.NumberOfEntries, table, result);
                                parentPagesToAggregate.Add(parentPage);
                            }

                            _metrics.MapReduceReducedPerSecond.Mark(leafPage.NumberOfEntries);

                            stats.RecordReduceSuccesses(leafPage.NumberOfEntries);
                        }
                    }
                    catch (Exception e)
                    {
                        _index.HandleError(e);

                        var message =
                            $"Failed to execute reduce function for reduce key '{tree.Name}' on a leaf page #{leafPage} of '{_indexDefinition.Name}' index.";

                        if (_logger.IsInfoEnabled)
                        {
                            _logger.Info(message, e);
                        }

                        if (parentPage == -1)
                        {
                            stats.RecordReduceErrors(leafPage.NumberOfEntries);
                            stats.AddReduceError(message + $"  Exception: {e}");
                        }
                    }
                }
            }

            long tmp = 0;

            using (Slice.External(indexContext.Allocator, (byte *)&tmp, sizeof(long), out Slice pageNumberSlice))
            {
                foreach (var freedPage in modifiedStore.FreedPages)
                {
                    tmp = Bits.SwapBytes(freedPage);
                    table.DeleteByKey(pageNumberSlice);
                }
            }

            while (parentPagesToAggregate.Count > 0 || branchesToAggregate.Count > 0)
            {
                token.ThrowIfCancellationRequested();

                var branchPages = parentPagesToAggregate;
                parentPagesToAggregate = new HashSet <long>();

                foreach (var pageNumber in branchPages)
                {
                    page.Base = lowLevelTransaction.GetPage(pageNumber).Pointer;

                    try
                    {
                        if (page.IsBranch == false)
                        {
                            throw new InvalidOperationException("Parent page was found that wasn't a branch, error at " +
                                                                page.PageNumber);
                        }

                        stats.RecordReduceAttempts(page.NumberOfEntries);

                        var parentPage = tree.GetParentPageOf(page);

                        using (var result = AggregateBranchPage(page, table, indexContext, branchesToAggregate, token))
                        {
                            if (parentPage == -1)
                            {
                                writer.DeleteReduceResult(reduceKeyHash, stats);

                                foreach (var output in result.GetOutputs())
                                {
                                    writer.IndexDocument(reduceKeyHash, output, stats, indexContext);
                                }
                            }
                            else
                            {
                                parentPagesToAggregate.Add(parentPage);

                                StoreAggregationResult(page.PageNumber, page.NumberOfEntries, table, result);
                            }

                            _metrics.MapReduceReducedPerSecond.Mark(page.NumberOfEntries);

                            stats.RecordReduceSuccesses(page.NumberOfEntries);
                        }
                    }
                    catch (Exception e)
                    {
                        _index.HandleError(e);

                        var message =
                            $"Failed to execute reduce function for reduce key '{tree.Name}' on a branch page #{page} of '{_indexDefinition.Name}' index.";

                        if (_logger.IsInfoEnabled)
                        {
                            _logger.Info(message, e);
                        }

                        stats.RecordReduceErrors(page.NumberOfEntries);
                        stats.AddReduceError(message + $" Exception: {e}");
                    }
                    finally
                    {
                        branchesToAggregate.Remove(pageNumber);
                    }
                }

                if (parentPagesToAggregate.Count == 0 && branchesToAggregate.Count > 0)
                {
                    // we still have unaggregated branches which were modified but their children were not modified (branch page splitting) so we missed them
                    parentPagesToAggregate.Add(branchesToAggregate.First());
                }
            }
        }