Beispiel #1
0
            public IndexingPerformanceStats ExecuteReduction()
            {
                var count                           = 0;
                var sourceCount                     = 0;
                var addDocumentDuration             = new Stopwatch();
                var convertToLuceneDocumentDuration = new Stopwatch();
                var linqExecutionDuration           = new Stopwatch();
                var deleteExistingDocumentsDuration = new Stopwatch();
                var writeToIndexStats               = new List <PerformanceStats>();

                IndexingPerformanceStats performance = null;

                parent.Write((indexWriter, analyzer, stats) =>
                {
                    stats.Operation = IndexingWorkStats.Status.Reduce;

                    try
                    {
                        if (Level == 2)
                        {
                            RemoveExistingReduceKeysFromIndex(indexWriter, deleteExistingDocumentsDuration);
                        }

                        foreach (var mappedResults in MappedResultsByBucket)
                        {
                            var input = mappedResults.Select(x =>
                            {
                                sourceCount++;
                                return(x);
                            });

                            IndexingFunc reduceDefinition = ViewGenerator.ReduceDefinition;
                            foreach (var doc in parent.RobustEnumerationReduce(input.GetEnumerator(), reduceDefinition, stats, linqExecutionDuration))
                            {
                                count++;

                                switch (Level)
                                {
                                case 0:
                                case 1:
                                    string reduceKeyAsString = ExtractReduceKey(ViewGenerator, doc);
                                    Actions.MapReduce.PutReducedResult(indexId, reduceKeyAsString, Level + 1, mappedResults.Key, mappedResults.Key / 1024, ToJsonDocument(doc));
                                    Actions.General.MaybePulseTransaction();
                                    break;

                                case 2:
                                    WriteDocumentToIndex(doc, indexWriter, analyzer, convertToLuceneDocumentDuration, addDocumentDuration);
                                    break;

                                default:
                                    throw new InvalidOperationException("Unknown level: " + Level);
                                }

                                stats.ReduceSuccesses++;
                            }
                        }
                    }
                    catch (Exception e)
                    {
                        if (Level == 2)
                        {
                            batchers.ApplyAndIgnoreAllErrors(
                                ex =>
                            {
                                logIndexing.WarnException("Failed to notify index update trigger batcher about an error", ex);
                                Context.AddError(indexId, parent.indexDefinition.Name, null, ex, "AnErrorOccured Trigger");
                            },
                                x => x.AnErrorOccured(e));
                        }
                        throw;
                    }
                    finally
                    {
                        if (Level == 2)
                        {
                            batchers.ApplyAndIgnoreAllErrors(
                                e =>
                            {
                                logIndexing.WarnException("Failed to dispose on index update trigger", e);
                                Context.AddError(indexId, parent.indexDefinition.Name, null, e, "Dispose Trigger");
                            },
                                x => x.Dispose());
                        }

                        // TODO: Check if we need to report "Bucket Counts" or "Total Input Elements"?
                        performance = parent.RecordCurrentBatch("Current Reduce #" + Level, "Reduce Level " + Level, sourceCount);
                    }

                    return(new IndexedItemsInfo(null)
                    {
                        ChangedDocs = count + ReduceKeys.Count
                    });
                }, writeToIndexStats);

                var performanceStats = new List <BasePerformanceStats>();

                performanceStats.Add(PerformanceStats.From(IndexingOperation.Linq_ReduceLinqExecution, linqExecutionDuration.ElapsedMilliseconds));
                performanceStats.Add(PerformanceStats.From(IndexingOperation.Lucene_DeleteExistingDocument, deleteExistingDocumentsDuration.ElapsedMilliseconds));
                performanceStats.Add(PerformanceStats.From(IndexingOperation.Lucene_ConvertToLuceneDocument, convertToLuceneDocumentDuration.ElapsedMilliseconds));
                performanceStats.Add(PerformanceStats.From(IndexingOperation.Lucene_AddDocument, addDocumentDuration.ElapsedMilliseconds));
                performanceStats.AddRange(writeToIndexStats);

                parent.BatchCompleted("Current Reduce #" + Level, "Reduce Level " + Level, sourceCount, count, performanceStats);

                logIndexing.Debug(() => string.Format("Reduce resulted in {0} entries for {1} for reduce keys: {2}", count, indexId, string.Join(", ", ReduceKeys)));

                return(performance);
            }
Beispiel #2
0
            public void ExecuteReduction()
            {
                var count       = 0;
                var sourceCount = 0;
                var sw          = Stopwatch.StartNew();
                var start       = SystemTime.UtcNow;

                parent.Write((indexWriter, analyzer, stats) =>
                {
                    stats.Operation = IndexingWorkStats.Status.Reduce;
                    try
                    {
                        parent.RecordCurrentBatch("Current Reduce #" + Level, MappedResultsByBucket.Sum(x => x.Count()));
                        if (Level == 2)
                        {
                            RemoveExistingReduceKeysFromIndex(indexWriter);
                        }
                        foreach (var mappedResults in MappedResultsByBucket)
                        {
                            var input = mappedResults.Select(x =>
                            {
                                sourceCount++;
                                return(x);
                            });
                            foreach (var doc in parent.RobustEnumerationReduce(input.GetEnumerator(), ViewGenerator.ReduceDefinition, Actions, stats))
                            {
                                count++;
                                string reduceKeyAsString = ExtractReduceKey(ViewGenerator, doc);

                                switch (Level)
                                {
                                case 0:
                                case 1:
                                    Actions.MapReduce.PutReducedResult(indexId, reduceKeyAsString, Level + 1, mappedResults.Key, mappedResults.Key / 1024, ToJsonDocument(doc));
                                    Actions.General.MaybePulseTransaction();
                                    break;

                                case 2:
                                    WriteDocumentToIndex(doc, indexWriter, analyzer);
                                    break;

                                default:
                                    throw new InvalidOperationException("Unknown level: " + Level);
                                }
                                stats.ReduceSuccesses++;
                            }
                        }
                    }
                    catch (Exception e)
                    {
                        if (Level == 2)
                        {
                            batchers.ApplyAndIgnoreAllErrors(
                                ex =>
                            {
                                logIndexing.WarnException("Failed to notify index update trigger batcher about an error", ex);
                                Context.AddError(indexId, parent.indexDefinition.Name, null, ex.Message, "AnErrorOccured Trigger");
                            },
                                x => x.AnErrorOccured(e));
                        }
                        throw;
                    }
                    finally
                    {
                        if (Level == 2)
                        {
                            batchers.ApplyAndIgnoreAllErrors(
                                e =>
                            {
                                logIndexing.WarnException("Failed to dispose on index update trigger", e);
                                Context.AddError(indexId, parent.indexDefinition.Name, null, e.Message, "Dispose Trigger");
                            },
                                x => x.Dispose());
                        }
                        parent.BatchCompleted("Current Reduce #" + Level);
                    }

                    return(new IndexedItemsInfo(null)
                    {
                        ChangedDocs = count + ReduceKeys.Count
                    });
                });
                parent.AddindexingPerformanceStat(new IndexingPerformanceStats
                {
                    OutputCount = count,
                    ItemsCount  = sourceCount,
                    InputCount  = inputCount,
                    Duration    = sw.Elapsed,
                    Operation   = "Reduce Level " + Level,
                    Started     = start
                });
                logIndexing.Debug(() => string.Format("Reduce resulted in {0} entries for {1} for reduce keys: {2}", count, indexId, string.Join(", ", ReduceKeys)));
            }