AddError() public method

public AddError ( string index, string key, string error ) : void
index string
key string
error string
return void
Esempio n. 1
0
        public override void Remove(string[] keys, WorkContext context)
        {
            Write((writer, analyzer, stats) =>
            {
                stats.Operation = IndexingWorkStats.Status.Ignore;
                logIndexing.Debug(() => string.Format("Deleting ({0}) from {1}", string.Join(", ", keys), name));
                var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
                               .Where(x => x != null)
                               .ToList();

                keys.Apply(
                    key => batchers.ApplyAndIgnoreAllErrors(
                        exception =>
                {
                    logIndexing.WarnException(
                        string.Format("Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
                                      name, key),
                        exception);
                    context.AddError(name, key, exception.Message);
                },
                        trigger => trigger.OnIndexEntryDeleted(key)));
                writer.DeleteDocuments(keys.Select(k => new Term(Constants.DocumentIdFieldName, k.ToLowerInvariant())).ToArray());
                batchers.ApplyAndIgnoreAllErrors(
                    e =>
                {
                    logIndexing.WarnException("Failed to dispose on index update trigger", e);
                    context.AddError(name, null, e.Message);
                },
                    batcher => batcher.Dispose());
                return(keys.Length);
            });
        }
Esempio n. 2
0
        public override void Remove(string[] keys, WorkContext context)
        {
            Write(context, (writer, analyzer) =>
            {
                if (logIndexing.IsDebugEnabled)
                {
                    logIndexing.DebugFormat("Deleting ({0}) from {1}", string.Format(", ", keys), name);
                }
                var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
                               .Where(x => x != null)
                               .ToList();

                keys.Apply(
                    key => batchers.ApplyAndIgnoreAllErrors(
                        exception =>
                {
                    logIndexing.WarnFormat(exception,
                                           "Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
                                           name, key);
                    context.AddError(name, key, exception.Message);
                },
                        trigger => trigger.OnIndexEntryDeleted(name, key)));
                writer.DeleteDocuments(keys.Select(k => new Term(Constants.DocumentIdFieldName, k)).ToArray());
                batchers.ApplyAndIgnoreAllErrors(
                    e =>
                {
                    logIndexing.Warn("Failed to dispose on index update trigger", e);
                    context.AddError(name, null, e.Message);
                },
                    batcher => batcher.Dispose());
                return(true);
            });
        }
Esempio n. 3
0
 protected void Write(WorkContext context, Func <IndexWriter, Analyzer, bool> action)
 {
     if (disposed)
     {
         throw new ObjectDisposedException("Index " + name + " has been disposed");
     }
     lock (writeLock)
     {
         bool     shouldRecreateSearcher;
         var      toDispose = new List <Action>();
         Analyzer analyzer  = null;
         try
         {
             try
             {
                 analyzer = CreateAnalyzer(new LowerCaseAnalyzer(), toDispose);
             }
             catch (Exception e)
             {
                 context.AddError(name, "Creating Analyzer", e.ToString());
                 throw;
             }
             if (indexWriter == null)
             {
                 indexWriter = new IndexWriter(directory, new StopAnalyzer(Version.LUCENE_29), IndexWriter.MaxFieldLength.UNLIMITED);
             }
             try
             {
                 shouldRecreateSearcher = action(indexWriter, analyzer);
                 foreach (var indexExtension in indexExtensions.Values)
                 {
                     indexExtension.OnDocumentsIndexed(currentlyIndexDocumented);
                 }
             }
             catch (Exception e)
             {
                 context.AddError(name, null, e.ToString());
                 throw;
             }
         }
         finally
         {
             currentlyIndexDocumented.Clear();
             if (analyzer != null)
             {
                 analyzer.Close();
             }
             foreach (var dispose in toDispose)
             {
                 dispose();
             }
         }
         if (shouldRecreateSearcher)
         {
             RecreateSearcher();
         }
     }
 }
Esempio n. 4
0
        public override void Remove(string[] keys, WorkContext context)
        {
            Write((writer, analyzer, stats) =>
            {
                stats.Operation = IndexingWorkStats.Status.Ignore;
                logIndexing.Debug(() => string.Format("Deleting ({0}) from {1}", string.Join(", ", keys), indexId));
                var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(indexId))
                               .Where(x => x != null)
                               .ToList();

                keys.Apply(
                    key =>
                    InvokeOnIndexEntryDeletedOnAllBatchers(batchers, new Term(Constants.DocumentIdFieldName, key)));

                writer.DeleteDocuments(keys.Select(k => new Term(Constants.DocumentIdFieldName, k.ToLowerInvariant())).ToArray());
                batchers.ApplyAndIgnoreAllErrors(
                    e =>
                {
                    logIndexing.WarnException("Failed to dispose on index update trigger", e);
                    context.AddError(indexId, null, e.Message, "Dispose Trigger");
                },
                    batcher => batcher.Dispose());

                IndexStats currentIndexStats = null;
                context.TransactionalStorage.Batch(accessor => currentIndexStats = accessor.Indexing.GetIndexStats(indexId));

                return(new IndexedItemsInfo
                {
                    ChangedDocs = keys.Length,
                    HighestETag = currentIndexStats.LastIndexedEtag,
                    DeletedKeys = keys
                });
            });
        }
Esempio n. 5
0
        public override void Remove(string[] keys, WorkContext context)
        {
            Write((writer, analyzer, stats) =>
            {
                stats.Operation = IndexingWorkStats.Status.Ignore;
                if (logIndexing.IsDebugEnabled)
                {
                    logIndexing.Debug(() => string.Format("Deleting ({0}) from {1}", string.Join(", ", keys), PublicName));
                }

                var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(indexId))
                               .Where(x => x != null)
                               .ToList();

                keys.Apply(
                    key =>
                    InvokeOnIndexEntryDeletedOnAllBatchers(batchers, new Term(Constants.DocumentIdFieldName, key.ToLowerInvariant())));

                writer.DeleteDocuments(keys.Select(k => new Term(Constants.DocumentIdFieldName, k.ToLowerInvariant())).ToArray());
                batchers.ApplyAndIgnoreAllErrors(
                    e =>
                {
                    logIndexing.WarnException("Failed to dispose on index update trigger in " + PublicName, e);
                    context.AddError(indexId, PublicName, null, e, "Dispose Trigger");
                },
                    batcher => batcher.Dispose());

                return(new IndexedItemsInfo(GetLastEtagFromStats())
                {
                    ChangedDocs = keys.Length,
                    DeletedKeys = keys
                });
            });
        }
Esempio n. 6
0
 protected IEnumerable <object> RobustEnumerationReduce(IEnumerable <object> input, IndexingFunc func,
                                                        IStorageActionsAccessor actions, WorkContext context)
 {
     // not strictly accurate, but if we get that many errors, probably an error anyway.
     return(new RobustEnumerator(context.Configuration.MaxNumberOfItemsToIndexInSingleBatch)
     {
         BeforeMoveNext = actions.Indexing.IncrementReduceIndexingAttempt,
         CancelMoveNext = actions.Indexing.DecrementReduceIndexingAttempt,
         OnError = (exception, o) =>
         {
             context.AddError(name,
                              TryGetDocKey(o),
                              exception.Message
                              );
             logIndexing.WarnException(
                 String.Format("Failed to execute indexing function on {0} on {1}", name,
                               TryGetDocKey(o)),
                 exception);
             try
             {
                 actions.Indexing.IncrementReduceIndexingFailure();
             }
             catch (Exception e)
             {
                 // we don't care about error here, because it is an error on error problem
                 logIndexing.WarnException(
                     String.Format("Could not increment indexing failure rate for {0}", name),
                     e);
             }
         }
     }.RobustEnumeration(input, func));
 }
Esempio n. 7
0
        // we don't use the usual GroupBy, because that isn't streaming
        // we rely on the fact that all values from the same docs are always outputed at
        // the same time, so we can take advantage of this fact
        private IEnumerable <IGrouping <object, dynamic> > GroupByDocumentId(WorkContext context, IEnumerable <object> docs)
        {
            var enumerator = docs.GetEnumerator();

            if (enumerator.MoveNext() == false)
            {
                yield break;
            }

            while (true)
            {
                object documentId;
                try
                {
                    documentId = GetDocumentId(enumerator.Current);
                }
                catch (Exception e)
                {
                    context.AddError(name, null, e.Message);
                    if (enumerator.MoveNext() == false)
                    {
                        yield break;
                    }
                    continue;
                }
                var groupByDocumentId = new Grouping(documentId, enumerator);
                yield return(groupByDocumentId);

                if (groupByDocumentId.Done)
                {
                    break;
                }
            }
        }
Esempio n. 8
0
 private bool?MoveNext(IEnumerator en, StatefulEnumerableWrapper <object> innerEnumerator, WorkContext context,
                       IStorageActionsAccessor actions)
 {
     try
     {
         actions.Indexing.IncrementIndexingAttempt();
         var moveNext = en.MoveNext();
         if (moveNext == false)
         {
             actions.Indexing.DecrementIndexingAttempt();
         }
         return(moveNext);
     }
     catch (Exception e)
     {
         actions.Indexing.IncrementIndexingFailure();
         context.AddError(name,
                          TryGetDocKey(innerEnumerator.Current),
                          e.Message
                          );
         log.WarnFormat(e, "Failed to execute indexing function on {0} on {1}", name,
                        GetDocId(innerEnumerator));
     }
     return(null);
 }
Esempio n. 9
0
 protected IEnumerable <object> RobustEnumerationReduce(IEnumerable <object> input, IndexingFunc func, IStorageActionsAccessor actions, WorkContext context)
 {
     return(new RobustEnumerator
     {
         BeforeMoveNext = actions.Indexing.IncrementReduceIndexingAttempt,
         CancelMoveNext = actions.Indexing.DecrementReduceIndexingAttempt,
         OnError = (exception, o) =>
         {
             context.AddError(name,
                              TryGetDocKey(o),
                              exception.Message
                              );
             logIndexing.WarnFormat(exception, "Failed to execute indexing function on {0} on {1}", name,
                                    TryGetDocKey(o));
             try
             {
                 actions.Indexing.IncrementReduceIndexingFailure();
             }
             catch (Exception e)
             {
                 // we don't care about error here, because it is an error on error problem
                 logIndexing.WarnFormat(e, "Could not increment indexing failure rate for {0}", name);
             }
         }
     }.RobustEnumeration(input, func));
 }
Esempio n. 10
0
        public override void Remove(string[] keys, WorkContext context)
        {
            context.TransactionalStorage.Batch(actions =>
            {
                var reduceKeyAndBuckets = new Dictionary <ReduceKeyAndBucket, int>();
                foreach (var key in keys)
                {
                    context.CancellationToken.ThrowIfCancellationRequested();
                    actions.MapReduce.DeleteMappedResultsForDocumentId(key, indexId, reduceKeyAndBuckets);
                }

                actions.MapReduce.UpdateRemovedMapReduceStats(indexId, reduceKeyAndBuckets, context.CancellationToken);

                foreach (var reduceKeyAndBucket in reduceKeyAndBuckets)
                {
                    context.CancellationToken.ThrowIfCancellationRequested();
                    actions.MapReduce.ScheduleReductions(indexId, 0, reduceKeyAndBucket.Key);
                }
            });

            Write((writer, analyzer, stats) =>
            {
                stats.Operation = IndexingWorkStats.Status.Ignore;
                if (logIndexing.IsDebugEnabled)
                {
                    logIndexing.Debug(() => string.Format("Deleting ({0}) from {1}", string.Join(", ", keys), PublicName));
                }

                var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(indexId))
                               .Where(x => x != null)
                               .ToList();

                keys.Apply(
                    key =>
                    InvokeOnIndexEntryDeletedOnAllBatchers(batchers, new Term(Constants.ReduceKeyFieldName, key.ToLowerInvariant())));

                writer.DeleteDocuments(keys.Select(k => new Term(Constants.ReduceKeyFieldName, k.ToLowerInvariant())).ToArray());
                batchers.ApplyAndIgnoreAllErrors(
                    e =>
                {
                    logIndexing.WarnException("Failed to dispose on index update trigger in " + PublicName, e);
                    context.AddError(indexId, PublicName, null, e, "Dispose Trigger");
                },
                    batcher => batcher.Dispose());

                return(new IndexedItemsInfo(null)
                {
                    ChangedDocs = keys.Length
                });
            });
        }
Esempio n. 11
0
 // we don't care about tracking map/reduce stats here, since it is merely
 // an optimization step
 protected IEnumerable <object> RobustEnumerationReduceDuringMapPhase(IEnumerable <object> input, IndexingFunc func,
                                                                      IStorageActionsAccessor actions, WorkContext context)
 {
     // not strictly accurate, but if we get that many errors, probably an error anyway.
     return(new RobustEnumerator(context.Configuration.MaxNumberOfItemsToIndexInSingleBatch)
     {
         BeforeMoveNext = () => { },                 // don't care
         CancelMoveNext = () => { },                 // don't care
         OnError = (exception, o) =>
         {
             context.AddError(name,
                              TryGetDocKey(o),
                              exception.Message
                              );
             logIndexing.WarnException(
                 String.Format("Failed to execute indexing function on {0} on {1}", name,
                               TryGetDocKey(o)),
                 exception);
         }
     }.RobustEnumeration(input, func));
 }
Esempio n. 12
0
        protected IEnumerable <object> RobustEnumerationIndex(IEnumerable <object> input, IEnumerable <IndexingFunc> funcs,
                                                              IStorageActionsAccessor actions, WorkContext context, IndexingWorkStats stats)
        {
            return(new RobustEnumerator(context.Configuration.MaxNumberOfItemsToIndexInSingleBatch)
            {
                BeforeMoveNext = () => stats.IndexingAttempts++,
                CancelMoveNext = () => stats.IndexingAttempts--,
                OnError = (exception, o) =>
                {
                    context.AddError(name,
                                     TryGetDocKey(o),
                                     exception.Message
                                     );
                    logIndexing.WarnException(
                        String.Format("Failed to execute indexing function on {0} on {1}", name,
                                      TryGetDocKey(o)),
                        exception);

                    stats.IndexingErrors++;
                }
            }.RobustEnumeration(input, funcs));
        }
Esempio n. 13
0
		protected void Write(WorkContext context, Func<IndexWriter, Analyzer, bool> action)
		{
			if (disposed)
				throw new ObjectDisposedException("Index " + name + " has been disposed");
			lock (writeLock)
			{
				bool shouldRecreateSearcher;
				var toDispose = new List<Action>();
				Analyzer analyzer = null;
				try
				{
					try
					{
						analyzer = CreateAnalyzer(new LowerCaseAnalyzer(), toDispose);
					}
					catch (Exception e)
					{
						context.AddError(name, "Creating Analyzer", e.ToString());
						throw;
					}
					if (indexWriter == null)
						indexWriter = new IndexWriter(directory, new StopAnalyzer(Version.LUCENE_29), IndexWriter.MaxFieldLength.UNLIMITED);
					try
					{
						shouldRecreateSearcher = action(indexWriter, analyzer);
						foreach (IIndexExtension indexExtension in indexExtensions.Values)
						{
							indexExtension.OnDocumentsIndexed(currentlyIndexDocumented);
						}
					}
					catch (Exception e)
					{
						context.AddError(name, null, e.ToString());
						throw;
					}
				}
				finally
				{
					currentlyIndexDocumented.Clear();
					if (analyzer != null)
						analyzer.Close();
					foreach (Action dispose in toDispose)
					{
						dispose();
					}
				}
				if (shouldRecreateSearcher)
					RecreateSearcher();
			}
		}
Esempio n. 14
0
    	public override void IndexDocuments(AbstractViewGenerator viewGenerator, IEnumerable<object> documents, WorkContext context, IStorageActionsAccessor actions, DateTime minimumTimestamp)
        {
            actions.Indexing.SetCurrentIndexStatsTo(name);
            var count = 0;
            Write(context, (indexWriter, analyzer) =>
            {
                bool madeChanges = false;
                PropertyDescriptorCollection properties = null;
                var processedKeys = new HashSet<string>();
                var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
                    .Where(x => x != null)
                    .ToList();
                var documentsWrapped = documents.Select((dynamic doc) =>
                {
					if(doc.__document_id == null)
						throw new ArgumentException("Cannot index something which doesn't have a document id, but got: " + doc);

                    string documentId = doc.__document_id.ToString();
                    if (processedKeys.Add(documentId) == false)
                        return doc;
                    madeChanges = true;
                    batchers.ApplyAndIgnoreAllErrors(
                        exception =>
                        {
                            logIndexing.WarnException(
								string.Format("Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
                                                   name, documentId),
								exception);
                            context.AddError(name,
                                             documentId,
                                             exception.Message
                                );
                        },
                        trigger => trigger.OnIndexEntryDeleted(name, documentId));
					indexWriter.DeleteDocuments(new Term(Constants.DocumentIdFieldName, documentId.ToLowerInvariant()));
                    return doc;
                });
                foreach (var doc in RobustEnumerationIndex(documentsWrapped, viewGenerator.MapDefinition, actions, context))
                {
                    count++;

                    IndexingResult indexingResult;
                    if (doc is DynamicJsonObject)
                        indexingResult = ExtractIndexDataFromDocument((DynamicJsonObject)doc);
                    else
                        indexingResult = ExtractIndexDataFromDocument(properties, doc);

                    if (indexingResult.NewDocId != null && indexingResult.ShouldSkip == false)
                    {
                        var luceneDoc = new Document();
						luceneDoc.Add(new Field(Constants.DocumentIdFieldName, indexingResult.NewDocId.ToLowerInvariant(), Field.Store.YES,
												Field.Index.NOT_ANALYZED_NO_NORMS));

                        madeChanges = true;
                        CopyFieldsToDocument(luceneDoc, indexingResult.Fields);
                        batchers.ApplyAndIgnoreAllErrors(
                            exception =>
                            {
                                logIndexing.Warn(
									string.Format( "Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
                                                       name, indexingResult.NewDocId),
									exception);
                                context.AddError(name,
                                                 indexingResult.NewDocId,
                                                 exception.Message
                                    );
                            },
                            trigger => trigger.OnIndexEntryCreated(name, indexingResult.NewDocId, luceneDoc));
                        logIndexing.Debug("Index '{0}' resulted in: {1}", name, luceneDoc);
                        AddDocumentToIndex(indexWriter, luceneDoc, analyzer);
                    }

                    actions.Indexing.IncrementSuccessIndexing();
                }
                batchers.ApplyAndIgnoreAllErrors(
                    e =>
                    {
                        logIndexing.Warn("Failed to dispose on index update trigger", e);
                        context.AddError(name, null, e.Message);
                    },
                    x => x.Dispose());
                return madeChanges;
            });
            logIndexing.Debug("Indexed {0} documents for {1}", count, name);
        }
Esempio n. 15
0
        public override void Remove(string[] keys, WorkContext context)
        {
            Write((writer, analyzer, stats) =>
            {
                stats.Operation = IndexingWorkStats.Status.Ignore;
                logIndexing.Debug(() => string.Format("Deleting ({0}) from {1}", string.Join(", ", keys), name));
                var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
                    .Where(x => x != null)
                    .ToList();

                keys.Apply(
                    key => batchers.ApplyAndIgnoreAllErrors(
                        exception =>
                        {
                            logIndexing.WarnException(
                                string.Format("Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
                                              name, key),
                                exception);
                            context.AddError(name, key, exception.Message, "OnIndexEntryDeleted Trigger");
                        },
                        trigger => trigger.OnIndexEntryDeleted(key)));
                writer.DeleteDocuments(keys.Select(k => new Term(Constants.DocumentIdFieldName, k.ToLowerInvariant())).ToArray());
                batchers.ApplyAndIgnoreAllErrors(
                    e =>
                    {
                        logIndexing.WarnException("Failed to dispose on index update trigger", e);
                        context.AddError(name, null, e.Message, "Dispose Trigger");
                    },
                    batcher => batcher.Dispose());

                IndexStats currentIndexStats = null;
                context.TransactionalStorage.Batch(accessor => currentIndexStats = accessor.Indexing.GetIndexStats(name));

                return new IndexedItemsInfo
                {
                    ChangedDocs = keys.Length,
                    HighestETag = currentIndexStats.LastIndexedEtag,
                    DeletedKeys = keys
                };
            });
        }
Esempio n. 16
0
		protected IEnumerable<object> RobustEnumerationReduce(IEnumerable<object> input, IndexingFunc func,
		                                                      IStorageActionsAccessor actions, WorkContext context)
		{
			// not strictly accurate, but if we get that many errors, probably an error anyway.
			return new RobustEnumerator(context.Configuration.MaxNumberOfItemsToIndexInSingleBatch)
			{
				BeforeMoveNext = actions.Indexing.IncrementReduceIndexingAttempt,
				CancelMoveNext = actions.Indexing.DecrementReduceIndexingAttempt,
				OnError = (exception, o) =>
				{
					context.AddError(name,
					                 TryGetDocKey(o),
					                 exception.Message
						);
					logIndexing.WarnException(
						String.Format("Failed to execute indexing function on {0} on {1}", name,
					                       TryGetDocKey(o)),
						exception);
					try
					{
						actions.Indexing.IncrementReduceIndexingFailure();
					}
					catch (Exception e)
					{
						// we don't care about error here, because it is an error on error problem
						logIndexing.WarnException(
							String.Format("Could not increment indexing failure rate for {0}", name),
							e);
					}
				}
			}.RobustEnumeration(input, func);
		}
Esempio n. 17
0
		public override void IndexDocuments(AbstractViewGenerator viewGenerator, IEnumerable<object> documents, WorkContext context, IStorageActionsAccessor actions, DateTime minimumTimestamp)
		{
			var count = 0;
			Write(context, (indexWriter, analyzer, stats) =>
			{
				var processedKeys = new HashSet<string>();
				var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
					.Where(x => x != null)
					.ToList();
				var documentsWrapped = documents.Select((dynamic doc) =>
				{
					if(doc.__document_id == null)
						throw new ArgumentException(string.Format("Cannot index something which doesn't have a document id, but got: '{0}'", doc));

					count++;
					string documentId = doc.__document_id.ToString();
					if (processedKeys.Add(documentId) == false)
						return doc;
					batchers.ApplyAndIgnoreAllErrors(
						exception =>
						{
							logIndexing.WarnException(
								string.Format("Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
												   name, documentId),
								exception);
							context.AddError(name,
											 documentId,
											 exception.Message
								);
						},
						trigger => trigger.OnIndexEntryDeleted(documentId));
					indexWriter.DeleteDocuments(new Term(Constants.DocumentIdFieldName, documentId.ToLowerInvariant()));
					return doc;
				});
				var anonymousObjectToLuceneDocumentConverter = new AnonymousObjectToLuceneDocumentConverter(indexDefinition);
				var luceneDoc = new Document();
				var documentIdField = new Field(Constants.DocumentIdFieldName, "dummy", Field.Store.YES, Field.Index.ANALYZED_NO_NORMS);
				foreach (var doc in RobustEnumerationIndex(documentsWrapped, viewGenerator.MapDefinitions, actions, context, stats))
				{
					count++;

					float boost;
					var indexingResult = GetIndexingResult(doc, anonymousObjectToLuceneDocumentConverter, out boost);

					if (indexingResult.NewDocId != null && indexingResult.ShouldSkip == false)
					{
						count += 1;
						luceneDoc.GetFields().Clear();
						luceneDoc.SetBoost(boost);
						documentIdField.SetValue(indexingResult.NewDocId.ToLowerInvariant());
						luceneDoc.Add(documentIdField);
						foreach (var field in indexingResult.Fields)
						{
							luceneDoc.Add(field);
						}
						batchers.ApplyAndIgnoreAllErrors(
							exception =>
							{
								logIndexing.WarnException(
									string.Format( "Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
													   name, indexingResult.NewDocId),
									exception);
								context.AddError(name,
												 indexingResult.NewDocId,
												 exception.Message
									);
							},
							trigger => trigger.OnIndexEntryCreated(indexingResult.NewDocId, luceneDoc));
						LogIndexedDocument(indexingResult.NewDocId, luceneDoc);
						AddDocumentToIndex(indexWriter, luceneDoc, analyzer);
					}

					stats.IndexingSuccesses++;
				}
				batchers.ApplyAndIgnoreAllErrors(
					e =>
					{
						logIndexing.WarnException("Failed to dispose on index update trigger", e);
						context.AddError(name, null, e.Message);
					},
					x => x.Dispose());
				return count;
			});
			logIndexing.Debug("Indexed {0} documents for {1}", count, name);
		}
Esempio n. 18
0
        protected void Write(WorkContext context, Func <IndexWriter, Analyzer, bool> action)
        {
            if (disposed)
            {
                throw new ObjectDisposedException("Index " + name + " has been disposed");
            }
            lock (writeLock)
            {
                bool     shouldRecreateSearcher;
                var      toDispose      = new List <Action>();
                Analyzer searchAnalyzer = null;
                try
                {
                    try
                    {
                        searchAnalyzer = CreateAnalyzer(new LowerCaseKeywordAnalyzer(), toDispose);
                    }
                    catch (Exception e)
                    {
                        context.AddError(name, "Creating Analyzer", e.ToString());
                        throw;
                    }

                    if (indexWriter == null)
                    {
                        indexWriter = CreateIndexWriter(directory);
                    }

                    try
                    {
                        shouldRecreateSearcher = action(indexWriter, searchAnalyzer);
                        foreach (IIndexExtension indexExtension in indexExtensions.Values)
                        {
                            indexExtension.OnDocumentsIndexed(currentlyIndexDocuments);
                        }
                    }
                    catch (Exception e)
                    {
                        context.AddError(name, null, e.ToString());
                        throw;
                    }

                    WriteTempIndexToDiskIfNeeded(context);
                }
                finally
                {
                    currentlyIndexDocuments.Clear();
                    if (searchAnalyzer != null)
                    {
                        searchAnalyzer.Close();
                    }
                    foreach (Action dispose in toDispose)
                    {
                        dispose();
                    }
                }
                if (shouldRecreateSearcher)
                {
                    RecreateSearcher();
                }
            }
        }
Esempio n. 19
0
        private bool? MoveNext(IEnumerator en, StatefulEnumerableWrapper<object> innerEnumerator, WorkContext context,
							   DocumentStorageActions actions)
        {
            try
            {
                actions.IncrementIndexingAttempt();
                var moveNext = en.MoveNext();
                if (moveNext == false)
                    actions.DecrementIndexingAttempt();
                return moveNext;
            }
            catch (Exception e)
            {
                actions.IncrementIndexingFailure();
                context.AddError(name,
                                 TryGetDocKey(innerEnumerator.Current),
                                 e.Message
                    );
                log.WarnFormat(e, "Failed to execute indexing function on {0} on {1}", name,
                               GetDocId(innerEnumerator));
            }
            return null;
        }
Esempio n. 20
0
        protected void Write(WorkContext context, Func <IndexWriter, Analyzer, IndexingWorkStats, int> action)
        {
            if (disposed)
            {
                throw new ObjectDisposedException("Index " + name + " has been disposed");
            }
            lock (writeLock)
            {
                bool     shouldRecreateSearcher;
                var      toDispose      = new List <Action>();
                Analyzer searchAnalyzer = null;
                try
                {
                    try
                    {
                        searchAnalyzer = CreateAnalyzer(new LowerCaseKeywordAnalyzer(), toDispose);
                    }
                    catch (Exception e)
                    {
                        context.AddError(name, "Creating Analyzer", e.ToString());
                        throw;
                    }

                    if (indexWriter == null)
                    {
                        indexWriter = CreateIndexWriter(directory);
                    }

                    var stats = new IndexingWorkStats();
                    try
                    {
                        var changedDocs = action(indexWriter, searchAnalyzer, stats);
                        docCountSinceLastOptimization += changedDocs;
                        shouldRecreateSearcher         = changedDocs > 0;
                        foreach (IIndexExtension indexExtension in indexExtensions.Values)
                        {
                            indexExtension.OnDocumentsIndexed(currentlyIndexDocuments);
                        }
                    }
                    catch (Exception e)
                    {
                        context.AddError(name, null, e.ToString());
                        throw;
                    }

                    UpdateIndexingStats(context, stats);

                    WriteTempIndexToDiskIfNeeded(context);

                    if (configuration.TransactionMode == TransactionMode.Safe)
                    {
                        Flush();                         // just make sure changes are flushed to disk
                    }
                }
                finally
                {
                    currentlyIndexDocuments.Clear();
                    if (searchAnalyzer != null)
                    {
                        searchAnalyzer.Close();
                    }
                    foreach (Action dispose in toDispose)
                    {
                        dispose();
                    }
                }
                if (shouldRecreateSearcher)
                {
                    RecreateSearcher();
                }
            }
        }
Esempio n. 21
0
        public override void IndexDocuments(AbstractViewGenerator viewGenerator, IEnumerable <object> documents, WorkContext context, IStorageActionsAccessor actions, DateTime minimumTimestamp)
        {
            actions.Indexing.SetCurrentIndexStatsTo(name);
            var count = 0;

            Write(context, (indexWriter, analyzer) =>
            {
                bool madeChanges = false;
                PropertyDescriptorCollection properties = null;
                var processedKeys = new HashSet <string>();
                var batchers      = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
                                    .Where(x => x != null)
                                    .ToList();
                var documentsWrapped = documents.Select((dynamic doc) =>
                {
                    if (doc.__document_id == null)
                    {
                        throw new ArgumentException("Cannot index something which doesn't have a document id, but got: " + doc);
                    }

                    string documentId = doc.__document_id.ToString();
                    if (processedKeys.Add(documentId) == false)
                    {
                        return(doc);
                    }
                    madeChanges = true;
                    batchers.ApplyAndIgnoreAllErrors(
                        exception =>
                    {
                        logIndexing.WarnFormat(exception,
                                               "Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
                                               name, documentId);
                        context.AddError(name,
                                         documentId,
                                         exception.Message
                                         );
                    },
                        trigger => trigger.OnIndexEntryDeleted(name, documentId));
                    indexWriter.DeleteDocuments(new Term(Constants.DocumentIdFieldName, documentId.ToLowerInvariant()));
                    return(doc);
                });
                foreach (var doc in RobustEnumerationIndex(documentsWrapped, viewGenerator.MapDefinition, actions, context))
                {
                    count++;

                    IndexingResult indexingResult;
                    if (doc is DynamicJsonObject)
                    {
                        indexingResult = ExtractIndexDataFromDocument((DynamicJsonObject)doc);
                    }
                    else
                    {
                        indexingResult = ExtractIndexDataFromDocument(properties, doc);
                    }

                    if (indexingResult.NewDocId != null && indexingResult.ShouldSkip == false)
                    {
                        var luceneDoc = new Document();
                        luceneDoc.Add(new Field(Constants.DocumentIdFieldName, indexingResult.NewDocId.ToLowerInvariant(), Field.Store.YES,
                                                Field.Index.NOT_ANALYZED));

                        madeChanges = true;
                        CopyFieldsToDocument(luceneDoc, indexingResult.Fields);
                        batchers.ApplyAndIgnoreAllErrors(
                            exception =>
                        {
                            logIndexing.WarnFormat(exception,
                                                   "Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
                                                   name, indexingResult.NewDocId);
                            context.AddError(name,
                                             indexingResult.NewDocId,
                                             exception.Message
                                             );
                        },
                            trigger => trigger.OnIndexEntryCreated(name, indexingResult.NewDocId, luceneDoc));
                        logIndexing.DebugFormat("Index '{0}' resulted in: {1}", name, luceneDoc);
                        AddDocumentToIndex(indexWriter, luceneDoc, analyzer);
                    }

                    actions.Indexing.IncrementSuccessIndexing();
                }
                batchers.ApplyAndIgnoreAllErrors(
                    e =>
                {
                    logIndexing.Warn("Failed to dispose on index update trigger", e);
                    context.AddError(name, null, e.Message);
                },
                    x => x.Dispose());
                return(madeChanges);
            });
            logIndexing.DebugFormat("Indexed {0} documents for {1}", count, name);
        }
Esempio n. 22
0
        public void ReduceDocuments(AbstractViewGenerator viewGenerator,
                                    IEnumerable <object> mappedResults,
                                    WorkContext context,
                                    IStorageActionsAccessor actions,
                                    string[] reduceKeys)
        {
            actions.Indexing.SetCurrentIndexStatsTo(name);
            var count = 0;

            Write(context, (indexWriter, analyzer) =>
            {
                var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
                               .Where(x => x != null)
                               .ToList();
                foreach (var reduceKey in reduceKeys)
                {
                    var entryKey = reduceKey;
                    indexWriter.DeleteDocuments(new Term(Raven.Abstractions.Data.Constants.ReduceKeyFieldName, entryKey.ToLowerInvariant()));
                    batchers.ApplyAndIgnoreAllErrors(
                        exception =>
                    {
                        logIndexing.WarnFormat(exception,
                                               "Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
                                               name, entryKey);
                        context.AddError(name,
                                         entryKey,
                                         exception.Message
                                         );
                    },
                        trigger => trigger.OnIndexEntryDeleted(name, entryKey));
                }
                PropertyDescriptorCollection properties = null;
                foreach (var doc in RobustEnumerationReduce(mappedResults, viewGenerator.ReduceDefinition, actions, context))
                {
                    count++;
                    var fields = GetFields(doc, ref properties).ToList();

                    string reduceKeyAsString = ExtractReduceKey(viewGenerator, doc);

                    var luceneDoc = new Document();
                    luceneDoc.Add(new Field(Raven.Abstractions.Data.Constants.ReduceKeyFieldName, reduceKeyAsString.ToLowerInvariant(), Field.Store.NO, Field.Index.NOT_ANALYZED));
                    foreach (var field in fields)
                    {
                        luceneDoc.Add(field);
                    }
                    batchers.ApplyAndIgnoreAllErrors(
                        exception =>
                    {
                        logIndexing.WarnFormat(exception,
                                               "Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
                                               name, reduceKeyAsString);
                        context.AddError(name,
                                         reduceKeyAsString,
                                         exception.Message
                                         );
                    },
                        trigger => trigger.OnIndexEntryCreated(name, reduceKeyAsString, luceneDoc));
                    logIndexing.DebugFormat("Reduce key {0} result in index {1} gave document: {2}", reduceKeyAsString, name, luceneDoc);
                    AddDocumentToIndex(indexWriter, luceneDoc, analyzer);
                    actions.Indexing.IncrementReduceSuccessIndexing();
                }
                batchers.ApplyAndIgnoreAllErrors(
                    e =>
                {
                    logIndexing.Warn("Failed to dispose on index update trigger", e);
                    context.AddError(name, null, e.Message);
                },
                    x => x.Dispose());
                return(true);
            });
            if (logIndexing.IsDebugEnabled)
            {
                logIndexing.DebugFormat("Reduce resulted in {0} entries for {1} for reduce keys: {2}", count, name, string.Join(", ", reduceKeys));
            }
        }
Esempio n. 23
0
 protected IEnumerable<object> RobustEnumeration(IEnumerable<object> input, IndexingFunc func, IStorageActionsAccessor actions, WorkContext context)
 {
     return new RobustEnumerator
     {
         BeforeMoveNext = actions.Indexing.IncrementIndexingAttempt,
         CancelMoveNext = actions.Indexing.DecrementIndexingAttempt,
         OnError = (exception, o) =>
         {
             actions.Indexing.IncrementIndexingFailure();
             context.AddError(name,
                              TryGetDocKey(o),
                              exception.Message
                 );
             logIndexing.WarnFormat(exception, "Failed to execute indexing function on {0} on {1}", name,
                                    TryGetDocKey(o));
         }
     }.RobustEnumeration(input, func);
 }
Esempio n. 24
0
        public void ReduceDocuments(AbstractViewGenerator viewGenerator,
                                    IEnumerable<object> mappedResults,
                                    WorkContext context,
									IStorageActionsAccessor actions,
                                    string[] reduceKeys)
        {
            actions.Indexing.SetCurrentIndexStatsTo(name);
            var count = 0;
            Write(context, indexWriter =>
            {
                var batchers = context.IndexUpdateTriggers.Select(x=>x.CreateBatcher(name))
                    .Where(x=>x!=null)
                    .ToList();
                foreach (var reduceKey in reduceKeys)
            	{
            	    var entryKey = reduceKey;
            	    indexWriter.DeleteDocuments(new Term("__reduce_key", entryKey.ToLowerInvariant()));
                    batchers.ApplyAndIgnoreAllErrors(
                        exception =>
                        {
                            logIndexing.WarnFormat(exception,
                                                   "Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
                                                   name, entryKey);
                            context.AddError(name,
                                           entryKey,
                                           exception.Message
                              );
                        },
                        trigger => trigger.OnIndexEntryDeleted(name, entryKey));
				}
                PropertyDescriptorCollection properties = null;
                foreach (var doc in RobustEnumeration(mappedResults, viewGenerator.ReduceDefinition, actions, context))
                {
                    count++;
                    var fields = GetFields(doc, ref properties);
                	dynamic reduceKey = viewGenerator.GroupByExtraction(doc);
					if (reduceKey == null)
					{
						throw new InvalidOperationException("Could not find reduce key for " + name + " in the result: " + doc);
					}
					string reduceKeyAsString = ReduceKeyToString(reduceKey);

                	var luceneDoc = new Document();
                    luceneDoc.Add(new Field("__reduce_key", reduceKeyAsString.ToLowerInvariant(), Field.Store.NO, Field.Index.NOT_ANALYZED));
                    foreach (var field in fields)
                    {
                        luceneDoc.Add(field);
                    }
                    batchers.ApplyAndIgnoreAllErrors(
                        exception =>
                        {
                            logIndexing.WarnFormat(exception,
                                                   "Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
                                                   name, reduceKeyAsString);
                            context.AddError(name,
                                           reduceKeyAsString,
                                           exception.Message
                              );
                        },
                        trigger => trigger.OnIndexEntryCreated(name, reduceKeyAsString, luceneDoc));
					logIndexing.DebugFormat("Reduce key {0} result in index {1} gave document: {2}", reduceKeyAsString, name, luceneDoc);
                    indexWriter.AddDocument(luceneDoc);
                    actions.Indexing.IncrementSuccessIndexing();
                }
                batchers.ApplyAndIgnoreAllErrors(
                    e =>
                    {
                        logIndexing.Warn("Failed to dispose on index update trigger", e);
                        context.AddError(name, null, e.Message);
                    },
                    x => x.Dispose());
                return true;
            });
			if (logIndexing.IsDebugEnabled)
			{
				logIndexing.DebugFormat("Reduce resulted in {0} entries for {1} for reduce keys: {2}", count, name, string.Join(", ", reduceKeys));
			}
        }
Esempio n. 25
0
        public override void Remove(string[] keys, WorkContext context)
        {
            DeletionBatchInfo deletionBatchInfo = null;

            try
            {
                deletionBatchInfo = context.ReportDeletionBatchStarted(PublicName, keys.Length);

                Write((writer, analyzer, stats) =>
                {
                    var indexUpdateTriggersDuration = new Stopwatch();

                    stats.Operation = IndexingWorkStats.Status.Ignore;
                    if (logIndexing.IsDebugEnabled)
                    {
                        logIndexing.Debug(() => string.Format("Deleting ({0}) from {1}", string.Join(", ", keys), PublicName));
                    }

                    List <AbstractIndexUpdateTriggerBatcher> batchers;
                    using (StopwatchScope.For(indexUpdateTriggersDuration))
                    {
                        batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(indexId))
                                   .Where(x => x != null)
                                   .ToList();

                        keys.Apply(key =>
                                   InvokeOnIndexEntryDeletedOnAllBatchers(batchers, new Term(Constants.DocumentIdFieldName, key.ToLowerInvariant())));
                    }

                    var deleteDocumentsDuration = new Stopwatch();

                    using (StopwatchScope.For(deleteDocumentsDuration))
                    {
                        writer.DeleteDocuments(keys.Select(k => new Term(Constants.DocumentIdFieldName, k.ToLowerInvariant())).ToArray());
                    }

                    deletionBatchInfo.PerformanceStats.Add(PerformanceStats.From(IndexingOperation.Delete_Documents, deleteDocumentsDuration.ElapsedMilliseconds));

                    using (StopwatchScope.For(indexUpdateTriggersDuration))
                    {
                        batchers.ApplyAndIgnoreAllErrors(
                            e =>
                        {
                            logIndexing.WarnException("Failed to dispose on index update trigger in " + PublicName, e);
                            context.AddError(indexId, PublicName, null, e, "Dispose Trigger");
                        },
                            batcher => batcher.Dispose());
                    }

                    deletionBatchInfo.PerformanceStats.Add(PerformanceStats.From(IndexingOperation.Delete_IndexUpdateTriggers, indexUpdateTriggersDuration.ElapsedMilliseconds));

                    return(new IndexedItemsInfo(GetLastEtagFromStats())
                    {
                        ChangedDocs = keys.Length,
                        DeletedKeys = keys
                    });
                }, deletionBatchInfo.PerformanceStats);
            }
            finally
            {
                if (deletionBatchInfo != null)
                {
                    context.ReportDeletionBatchCompleted(deletionBatchInfo);
                }
            }
        }
Esempio n. 26
0
		protected IEnumerable<object> RobustEnumerationIndex(IEnumerable<object> input, IEnumerable<IndexingFunc> funcs,
															IStorageActionsAccessor actions, WorkContext context, IndexingWorkStats stats)
		{
			return new RobustEnumerator(context.Configuration.MaxNumberOfItemsToIndexInSingleBatch)
			{
				BeforeMoveNext = () => stats.IndexingAttempts++,
				CancelMoveNext = () => stats.IndexingAttempts--,
				OnError = (exception, o) =>
				{
					context.AddError(name,
									TryGetDocKey(o),
									exception.Message
						);
					logIndexing.WarnException(
						String.Format("Failed to execute indexing function on {0} on {1}", name,
										TryGetDocKey(o)),
						exception);

					stats.IndexingErrors++;
				}
			}.RobustEnumeration(input, funcs);
		}
Esempio n. 27
0
 protected void Write(WorkContext context, Func<IndexWriter, bool> action)
 {
     if (disposed)
         throw new ObjectDisposedException("Index " + name + " has been disposed");
     lock (writeLock)
     {
         bool shouldRecreateSearcher;
         var toDispose = new List<Action>();
         Analyzer analyzer = null;
         try
         {
             analyzer = CreateAnalyzer(toDispose);
             var indexWriter = new IndexWriter(directory, analyzer, IndexWriter.MaxFieldLength.UNLIMITED);
             try
             {
                 shouldRecreateSearcher = action(indexWriter);
             }
             catch (Exception e)
             {
                 context.AddError(name, null, e.ToString());
                 throw;
             }
             finally
             {
                 indexWriter.Close();
             }
         }
         finally
         {
             if (analyzer != null)
                 analyzer.Close();
             foreach (var dispose in toDispose)
             {
                 dispose();
             }
         }
         if (shouldRecreateSearcher)
             RecreateSearcher();
     }
 }
Esempio n. 28
0
        // This method may be called concurrently, by both the ReduceTask (for removal)
        // and by the ReducingExecuter (for add/modify). This is okay with us, since the 
        // Write() call is already handling locking properly
		public void ReduceDocuments(AbstractViewGenerator viewGenerator,
									IEnumerable<object> mappedResults,
									WorkContext context,
									IStorageActionsAccessor actions,
									string[] reduceKeys)
		{
			var count = 0;
			Write(context, (indexWriter, analyzer) =>
			{
				actions.Indexing.SetCurrentIndexStatsTo(name);
				var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
					.Where(x => x != null)
					.ToList();
				foreach (var reduceKey in reduceKeys)
				{
					var entryKey = reduceKey;
					indexWriter.DeleteDocuments(new Term(Abstractions.Data.Constants.ReduceKeyFieldName, entryKey.ToLowerInvariant()));
					batchers.ApplyAndIgnoreAllErrors(
						exception =>
						{
							logIndexing.WarnException(
								string.Format("Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
								              name, entryKey),
								exception);
							context.AddError(name,
							                 entryKey,
							                 exception.Message
								);
						},
						trigger => trigger.OnIndexEntryDeleted(name, entryKey));
				}
				PropertyDescriptorCollection properties = null;
				foreach (var doc in RobustEnumerationReduce(mappedResults, viewGenerator.ReduceDefinition, actions, context))
				{
					count++;
					var fields = GetFields(doc, ref properties).ToList();

					string reduceKeyAsString = ExtractReduceKey(viewGenerator, doc);

					var luceneDoc = new Document();
					luceneDoc.Add(new Field(Abstractions.Data.Constants.ReduceKeyFieldName, reduceKeyAsString.ToLowerInvariant(),
					                        Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
					foreach (var field in fields)
					{
						luceneDoc.Add(field);
					}
					batchers.ApplyAndIgnoreAllErrors(
						exception =>
						{
							logIndexing.WarnException(
								string.Format("Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
								              name, reduceKeyAsString),
								exception);
							context.AddError(name,
							                 reduceKeyAsString,
							                 exception.Message
								);
						},
						trigger => trigger.OnIndexEntryCreated(name, reduceKeyAsString, luceneDoc));
					logIndexing.Debug("Reduce key {0} result in index {1} gave document: {2}", reduceKeyAsString, name, luceneDoc);
					AddDocumentToIndex(indexWriter, luceneDoc, analyzer);
					actions.Indexing.IncrementReduceSuccessIndexing();
				}
				batchers.ApplyAndIgnoreAllErrors(
					e =>
					{
						logIndexing.Warn("Failed to dispose on index update trigger", e);
						context.AddError(name, null, e.Message);
					},
					x => x.Dispose());
				return true;
			});
			logIndexing.Debug(() => string.Format("Reduce resulted in {0} entries for {1} for reduce keys: {2}", count, name,
							  string.Join(", ", reduceKeys)));
		}
Esempio n. 29
0
		public override void IndexDocuments(AbstractViewGenerator viewGenerator, IndexingBatch batch, WorkContext context, IStorageActionsAccessor actions, DateTime minimumTimestamp)
		{
			var count = 0;
			var sourceCount = 0;
			var sw = Stopwatch.StartNew();
			var start = SystemTime.UtcNow;
			Write((indexWriter, analyzer, stats) =>
			{
				var processedKeys = new HashSet<string>();
				var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
					.Where(x => x != null)
					.ToList();
				try
				{
					var docIdTerm = new Term(Constants.DocumentIdFieldName);
					var documentsWrapped = batch.Docs.Select((doc,i) =>
					{
						Interlocked.Increment(ref sourceCount);
						if (doc.__document_id == null)
							throw new ArgumentException(
								string.Format("Cannot index something which doesn't have a document id, but got: '{0}'", doc));

						string documentId = doc.__document_id.ToString();
						if (processedKeys.Add(documentId) == false)
							return doc;
						batchers.ApplyAndIgnoreAllErrors(
							exception =>
							{
								logIndexing.WarnException(
									string.Format("Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
									              name, documentId),
									exception);
								context.AddError(name,
								                 documentId,
								                 exception.Message
									);
							},
							trigger => trigger.OnIndexEntryDeleted(documentId));
						if (batch.SkipDeleteFromIndex[i] == false || 
							context.ShouldRemoveFromIndex(documentId)) // maybe it is recently deleted?
							indexWriter.DeleteDocuments(docIdTerm.CreateTerm(documentId.ToLowerInvariant()));
				
						return doc;
					})
						.Where(x => x is FilteredDocument == false)
						.ToList();


					BackgroundTaskExecuter.Instance.ExecuteAllBuffered(context, documentsWrapped, (partition) =>
					{
						var anonymousObjectToLuceneDocumentConverter = new AnonymousObjectToLuceneDocumentConverter(indexDefinition);
						var luceneDoc = new Document();
						var documentIdField = new Field(Constants.DocumentIdFieldName, "dummy", Field.Store.YES,
						                                Field.Index.NOT_ANALYZED_NO_NORMS);

						foreach (var doc in RobustEnumerationIndex(partition, viewGenerator.MapDefinitions, actions, stats))
						{
							float boost;
							var indexingResult = GetIndexingResult(doc, anonymousObjectToLuceneDocumentConverter, out boost);

							if (indexingResult.NewDocId != null && indexingResult.ShouldSkip == false)
							{
								Interlocked.Increment(ref count);
								luceneDoc.GetFields().Clear();
								luceneDoc.Boost = boost;
								documentIdField.SetValue(indexingResult.NewDocId.ToLowerInvariant());
								luceneDoc.Add(documentIdField);
								foreach (var field in indexingResult.Fields)
								{
									luceneDoc.Add(field);
								}
								batchers.ApplyAndIgnoreAllErrors(
									exception =>
									{
										logIndexing.WarnException(
											string.Format("Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
											              name, indexingResult.NewDocId),
											exception);
										context.AddError(name,
										                 indexingResult.NewDocId,
										                 exception.Message
											);
									},
									trigger => trigger.OnIndexEntryCreated(indexingResult.NewDocId, luceneDoc));
								LogIndexedDocument(indexingResult.NewDocId, luceneDoc);
								AddDocumentToIndex(indexWriter, luceneDoc, analyzer);
							}

							Interlocked.Increment(ref stats.IndexingSuccesses);
						}
					});
				}
				catch(Exception e)
				{
					batchers.ApplyAndIgnoreAllErrors(
						ex =>
						{
							logIndexing.WarnException("Failed to notify index update trigger batcher about an error", ex);
							context.AddError(name, null, ex.Message);
						},
						x => x.AnErrorOccured(e));
					throw;
				}
				finally
				{
					batchers.ApplyAndIgnoreAllErrors(
						e =>
						{
							logIndexing.WarnException("Failed to dispose on index update trigger", e);
							context.AddError(name, null, e.Message);
						},
						x => x.Dispose());
				}
				return sourceCount;
			});
			AddindexingPerformanceStat(new IndexingPerformanceStats
			{
				OutputCount = count,
				InputCount = sourceCount,
				Duration = sw.Elapsed,
				Operation = "Index",
				Started = start
			});
			logIndexing.Debug("Indexed {0} documents for {1}", count, name);
		}
Esempio n. 30
0
        public override void IndexDocuments(AbstractViewGenerator viewGenerator, IndexingBatch batch, WorkContext context, IStorageActionsAccessor actions, DateTime minimumTimestamp)
        {
            var count       = 0;
            var sourceCount = 0;
            var sw          = Stopwatch.StartNew();
            var start       = SystemTime.UtcNow;

            Write(context, (indexWriter, analyzer, stats) =>
            {
                var processedKeys = new HashSet <string>();
                var batchers      = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
                                    .Where(x => x != null)
                                    .ToList();
                try
                {
                    var docIdTerm        = new Term(Constants.DocumentIdFieldName);
                    var documentsWrapped = batch.Docs.Select((doc, i) =>
                    {
                        Interlocked.Increment(ref sourceCount);
                        if (doc.__document_id == null)
                        {
                            throw new ArgumentException(
                                string.Format("Cannot index something which doesn't have a document id, but got: '{0}'", doc));
                        }

                        string documentId = doc.__document_id.ToString();
                        if (processedKeys.Add(documentId) == false)
                        {
                            return(doc);
                        }
                        batchers.ApplyAndIgnoreAllErrors(
                            exception =>
                        {
                            logIndexing.WarnException(
                                string.Format("Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
                                              name, documentId),
                                exception);
                            context.AddError(name,
                                             documentId,
                                             exception.Message
                                             );
                        },
                            trigger => trigger.OnIndexEntryDeleted(documentId));
                        if (batch.SkipDeleteFromIndex[i] == false)
                        {
                            indexWriter.DeleteDocuments(docIdTerm.CreateTerm(documentId.ToLowerInvariant()));
                        }
                        return(doc);
                    })
                                           .Where(x => x is FilteredDocument == false)
                                           .ToList();


                    BackgroundTaskExecuter.Instance.ExecuteAllBuffered(context, documentsWrapped, (partition) =>
                    {
                        var anonymousObjectToLuceneDocumentConverter = new AnonymousObjectToLuceneDocumentConverter(indexDefinition);
                        var luceneDoc       = new Document();
                        var documentIdField = new Field(Constants.DocumentIdFieldName, "dummy", Field.Store.YES,
                                                        Field.Index.NOT_ANALYZED_NO_NORMS);

                        foreach (var doc in RobustEnumerationIndex(partition, viewGenerator.MapDefinitions, actions, stats))
                        {
                            float boost;
                            var indexingResult = GetIndexingResult(doc, anonymousObjectToLuceneDocumentConverter, out boost);

                            if (indexingResult.NewDocId != null && indexingResult.ShouldSkip == false)
                            {
                                Interlocked.Increment(ref count);
                                luceneDoc.GetFields().Clear();
                                luceneDoc.Boost = boost;
                                documentIdField.SetValue(indexingResult.NewDocId.ToLowerInvariant());
                                luceneDoc.Add(documentIdField);
                                foreach (var field in indexingResult.Fields)
                                {
                                    luceneDoc.Add(field);
                                }
                                batchers.ApplyAndIgnoreAllErrors(
                                    exception =>
                                {
                                    logIndexing.WarnException(
                                        string.Format("Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
                                                      name, indexingResult.NewDocId),
                                        exception);
                                    context.AddError(name,
                                                     indexingResult.NewDocId,
                                                     exception.Message
                                                     );
                                },
                                    trigger => trigger.OnIndexEntryCreated(indexingResult.NewDocId, luceneDoc));
                                LogIndexedDocument(indexingResult.NewDocId, luceneDoc);
                                AddDocumentToIndex(indexWriter, luceneDoc, analyzer);
                            }

                            Interlocked.Increment(ref stats.IndexingSuccesses);
                        }
                    });
                }
                catch (Exception e)
                {
                    batchers.ApplyAndIgnoreAllErrors(
                        ex =>
                    {
                        logIndexing.WarnException("Failed to notify index update trigger batcher about an error", ex);
                        context.AddError(name, null, ex.Message);
                    },
                        x => x.AnErrorOccured(e));
                    throw;
                }
                finally
                {
                    batchers.ApplyAndIgnoreAllErrors(
                        e =>
                    {
                        logIndexing.WarnException("Failed to dispose on index update trigger", e);
                        context.AddError(name, null, e.Message);
                    },
                        x => x.Dispose());
                }
                return(sourceCount);
            });
            AddindexingPerformanceStat(new IndexingPerformanceStats
            {
                OutputCount = count,
                InputCount  = sourceCount,
                Duration    = sw.Elapsed,
                Operation   = "Index",
                Started     = start
            });
            logIndexing.Debug("Indexed {0} documents for {1}", count, name);
        }
Esempio n. 31
0
        // This method may be called concurrently, by both the ReduceTask (for removal)
        // and by the ReducingExecuter (for add/modify). This is okay with us, since the
        // Write() call is already handling locking properly
        public void ReduceDocuments(AbstractViewGenerator viewGenerator,
                                    IEnumerable <object> mappedResults,
                                    WorkContext context,
                                    IStorageActionsAccessor actions,
                                    string[] reduceKeys)
        {
            var count = 0;

            Write(context, (indexWriter, analyzer, stats) =>
            {
                stats.Operation = IndexingWorkStats.Status.Reduce;
                var batchers    = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
                                  .Where(x => x != null)
                                  .ToList();
                foreach (var reduceKey in reduceKeys)
                {
                    var entryKey = reduceKey;
                    indexWriter.DeleteDocuments(new Term(Abstractions.Data.Constants.ReduceKeyFieldName, entryKey.ToLowerInvariant()));
                    batchers.ApplyAndIgnoreAllErrors(
                        exception =>
                    {
                        logIndexing.WarnException(
                            string.Format("Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
                                          name, entryKey),
                            exception);
                        context.AddError(name,
                                         entryKey,
                                         exception.Message
                                         );
                    },
                        trigger => trigger.OnIndexEntryDeleted(entryKey));
                }
                PropertyDescriptorCollection properties      = null;
                var anonymousObjectToLuceneDocumentConverter = new AnonymousObjectToLuceneDocumentConverter(indexDefinition);
                var luceneDoc      = new Document();
                var reduceKeyField = new Field(Constants.ReduceKeyFieldName, "dummy",
                                               Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS);
                foreach (var doc in RobustEnumerationReduce(mappedResults, viewGenerator.ReduceDefinition, actions, context, stats))
                {
                    count++;
                    float boost;
                    var fields = GetFields(anonymousObjectToLuceneDocumentConverter, doc, ref properties, out boost).ToList();

                    string reduceKeyAsString = ExtractReduceKey(viewGenerator, doc);
                    reduceKeyField.SetValue(reduceKeyAsString.ToLowerInvariant());

                    luceneDoc.GetFields().Clear();
                    luceneDoc.SetBoost(boost);
                    luceneDoc.Add(reduceKeyField);
                    foreach (var field in fields)
                    {
                        luceneDoc.Add(field);
                    }

                    batchers.ApplyAndIgnoreAllErrors(
                        exception =>
                    {
                        logIndexing.WarnException(
                            string.Format("Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
                                          name, reduceKeyAsString),
                            exception);
                        context.AddError(name,
                                         reduceKeyAsString,
                                         exception.Message
                                         );
                    },
                        trigger => trigger.OnIndexEntryCreated(reduceKeyAsString, luceneDoc));

                    LogIndexedDocument(reduceKeyAsString, luceneDoc);

                    AddDocumentToIndex(indexWriter, luceneDoc, analyzer);
                    stats.ReduceSuccesses++;
                }
                batchers.ApplyAndIgnoreAllErrors(
                    e =>
                {
                    logIndexing.WarnException("Failed to dispose on index update trigger", e);
                    context.AddError(name, null, e.Message);
                },
                    x => x.Dispose());
                return(count + reduceKeys.Length);
            });
            logIndexing.Debug(() => string.Format("Reduce resulted in {0} entries for {1} for reduce keys: {2}", count, name,
                                                  string.Join(", ", reduceKeys)));
        }
Esempio n. 32
0
		public override void Remove(string[] keys, WorkContext context)
		{
			Write(context, (writer, analyzer,stats) =>
			{
				stats.Operation = IndexingWorkStats.Status.Ignore;
				logIndexing.Debug(() => string.Format("Deleting ({0}) from {1}", string.Join(", ", keys), name));
				var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
					.Where(x => x != null)
					.ToList();

				keys.Apply(
					key => batchers.ApplyAndIgnoreAllErrors(
						exception =>
						{
							logIndexing.WarnException(
								string.Format("Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
								              name, key),
								exception);
							context.AddError(name, key, exception.Message);
						},
						trigger => trigger.OnIndexEntryDeleted(key)));
				writer.DeleteDocuments(keys.Select(k => new Term(Constants.DocumentIdFieldName, k.ToLowerInvariant())).ToArray());
				batchers.ApplyAndIgnoreAllErrors(
					e =>
					{
						logIndexing.WarnException("Failed to dispose on index update trigger", e);
						context.AddError(name, null, e.Message);
					},
					batcher => batcher.Dispose());
				return keys.Length;
			});
		}
Esempio n. 33
0
        public override void IndexDocuments(AbstractViewGenerator viewGenerator, IEnumerable <object> documents, WorkContext context, IStorageActionsAccessor actions, DateTime minimumTimestamp)
        {
            var count = 0;

            Write(context, (indexWriter, analyzer, stats) =>
            {
                var processedKeys = new HashSet <string>();
                var batchers      = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
                                    .Where(x => x != null)
                                    .ToList();
                var documentsWrapped = documents.Select((dynamic doc) =>
                {
                    if (doc.__document_id == null)
                    {
                        throw new ArgumentException(string.Format("Cannot index something which doesn't have a document id, but got: '{0}'", doc));
                    }

                    count++;
                    string documentId = doc.__document_id.ToString();
                    if (processedKeys.Add(documentId) == false)
                    {
                        return(doc);
                    }
                    batchers.ApplyAndIgnoreAllErrors(
                        exception =>
                    {
                        logIndexing.WarnException(
                            string.Format("Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
                                          name, documentId),
                            exception);
                        context.AddError(name,
                                         documentId,
                                         exception.Message
                                         );
                    },
                        trigger => trigger.OnIndexEntryDeleted(documentId));
                    indexWriter.DeleteDocuments(new Term(Constants.DocumentIdFieldName, documentId.ToLowerInvariant()));
                    return(doc);
                });
                var anonymousObjectToLuceneDocumentConverter = new AnonymousObjectToLuceneDocumentConverter(indexDefinition);
                var luceneDoc       = new Document();
                var documentIdField = new Field(Constants.DocumentIdFieldName, "dummy", Field.Store.YES, Field.Index.ANALYZED_NO_NORMS);
                foreach (var doc in RobustEnumerationIndex(documentsWrapped, viewGenerator.MapDefinitions, actions, context, stats))
                {
                    count++;

                    float boost;
                    var indexingResult = GetIndexingResult(doc, anonymousObjectToLuceneDocumentConverter, out boost);

                    if (indexingResult.NewDocId != null && indexingResult.ShouldSkip == false)
                    {
                        count += 1;
                        luceneDoc.GetFields().Clear();
                        luceneDoc.SetBoost(boost);
                        documentIdField.SetValue(indexingResult.NewDocId.ToLowerInvariant());
                        luceneDoc.Add(documentIdField);
                        foreach (var field in indexingResult.Fields)
                        {
                            luceneDoc.Add(field);
                        }
                        batchers.ApplyAndIgnoreAllErrors(
                            exception =>
                        {
                            logIndexing.WarnException(
                                string.Format("Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
                                              name, indexingResult.NewDocId),
                                exception);
                            context.AddError(name,
                                             indexingResult.NewDocId,
                                             exception.Message
                                             );
                        },
                            trigger => trigger.OnIndexEntryCreated(indexingResult.NewDocId, luceneDoc));
                        LogIndexedDocument(indexingResult.NewDocId, luceneDoc);
                        AddDocumentToIndex(indexWriter, luceneDoc, analyzer);
                    }

                    stats.IndexingSuccesses++;
                }
                batchers.ApplyAndIgnoreAllErrors(
                    e =>
                {
                    logIndexing.WarnException("Failed to dispose on index update trigger", e);
                    context.AddError(name, null, e.Message);
                },
                    x => x.Dispose());
                return(count);
            });
            logIndexing.Debug("Indexed {0} documents for {1}", count, name);
        }
Esempio n. 34
0
		protected void Write(WorkContext context, Func<IndexWriter, Analyzer, bool> action)
		{
			if (disposed)
				throw new ObjectDisposedException("Index " + name + " has been disposed");
			lock (writeLock)
			{
				bool shouldRecreateSearcher;
				var toDispose = new List<Action>();
				Analyzer searchAnalyzer = null;
				try
				{
					try
					{
						searchAnalyzer = CreateAnalyzer(new LowerCaseKeywordAnalyzer(), toDispose);
					}
					catch (Exception e)
					{
						context.AddError(name, "Creating Analyzer", e.ToString());
						throw;
					}

					if (indexWriter == null)
					{
						indexWriter = CreateIndexWriter(directory);
					}

					try
					{
						shouldRecreateSearcher = action(indexWriter, searchAnalyzer);
						foreach (IIndexExtension indexExtension in indexExtensions.Values)
						{
							indexExtension.OnDocumentsIndexed(currentlyIndexDocuments);
						}
					}
					catch (Exception e)
					{
						context.AddError(name, null, e.ToString());
						throw;
					}

					WriteTempIndexToDiskIfNeeded(context);
				}
				finally
				{
					currentlyIndexDocuments.Clear();
					if (searchAnalyzer != null)
						searchAnalyzer.Close();
					foreach (Action dispose in toDispose)
					{
						dispose();
					}
				}
				if (shouldRecreateSearcher)
					RecreateSearcher();
			}
		}
Esempio n. 35
0
        public override void Remove(string[] keys, WorkContext context)
        {
            Write(context, writer =>
            {
                if (logIndexing.IsDebugEnabled)
                {
                    logIndexing.DebugFormat("Deleting ({0}) from {1}", string.Format(", ", keys), name);
                }
                var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
                    .Where(x => x != null)
                    .ToList();

                keys.Apply(
                    key => batchers.ApplyAndIgnoreAllErrors(
                        exception =>
                        {
                            logIndexing.WarnFormat(exception,
                                                   "Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
                                                   name, key);
                            context.AddError(name,  key, exception.Message );
                        },
                        trigger => trigger.OnIndexEntryDeleted(name, key)));
                writer.DeleteDocuments(keys.Select(k => new Term("__document_id", k)).ToArray());
                batchers.ApplyAndIgnoreAllErrors(
                    e =>
                    {
                        logIndexing.Warn("Failed to dispose on index update trigger", e);
                        context.AddError(name, null, e.Message );
                    },
                    batcher => batcher.Dispose());
                return true;
            });
        }
Esempio n. 36
0
		// we don't care about tracking map/reduce stats here, since it is merely
		// an optimization step
		protected IEnumerable<object> RobustEnumerationReduceDuringMapPhase(IEnumerable<object> input, IndexingFunc func,
															  IStorageActionsAccessor actions, WorkContext context)
		{
			// not strictly accurate, but if we get that many errors, probably an error anyway.
			return new RobustEnumerator(context.Configuration.MaxNumberOfItemsToIndexInSingleBatch)
			{
				BeforeMoveNext = () => { }, // don't care
				CancelMoveNext = () => { }, // don't care
				OnError = (exception, o) =>
				{
					context.AddError(name,
									 TryGetDocKey(o),
									 exception.Message
						);
					logIndexing.WarnException(
						String.Format("Failed to execute indexing function on {0} on {1}", name,
										   TryGetDocKey(o)),
						exception);
				}
			}.RobustEnumeration(input, func);
		}
Esempio n. 37
0
        public override void IndexDocuments(AbstractViewGenerator viewGenerator, IEnumerable<object> documents, WorkContext context, IStorageActionsAccessor actions, DateTime minimumTimestamp)
        {
            actions.Indexing.SetCurrentIndexStatsTo(name);
            var count = 0;
            Write(context, indexWriter =>
            {
                bool madeChanges = false;
                PropertyDescriptorCollection properties = null;
                var processedKeys = new HashSet<string>();
                var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
                    .Where(x => x != null)
                    .ToList();
                var documentsWrapped = documents.Select((dynamic doc) =>
                {
                    string documentId = doc.__document_id.ToString();
                    if (processedKeys.Add(documentId) == false)
                        return doc;
                    madeChanges = true;
                    batchers.ApplyAndIgnoreAllErrors(
                        exception =>
                        {
                            logIndexing.WarnFormat(exception,
                                                   "Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
                                                   name, documentId);
                            context.AddError(name,
                                             documentId,
                                             exception.Message
                                );
                        },
                        trigger => trigger.OnIndexEntryDeleted(name, documentId));
                    indexWriter.DeleteDocuments(new Term("__document_id", documentId.ToLowerInvariant()));
                    return doc;
                });
                foreach (var doc in RobustEnumeration(documentsWrapped, viewGenerator.MapDefinition, actions, context))
                {
                    count++;

                    string newDocId;
                    IEnumerable<AbstractField> fields;
                    if (doc is DynamicJsonObject)
                        fields = ExtractIndexDataFromDocument((DynamicJsonObject)doc, out newDocId);
                    else
                        fields = ExtractIndexDataFromDocument(properties, doc, out newDocId);

                    if (newDocId != null)
                    {
                        var luceneDoc = new Document();
                        luceneDoc.Add(new Field("__document_id", newDocId.ToLowerInvariant(), Field.Store.YES, Field.Index.NOT_ANALYZED));

                        madeChanges = true;
                        CopyFieldsToDocument(luceneDoc, fields);
                        batchers.ApplyAndIgnoreAllErrors(
                            exception =>
                            {
                                logIndexing.WarnFormat(exception,
                                                       "Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
                                                       name, newDocId);
                                context.AddError(name,
                                            newDocId,
                                            exception.Message
                               );
                            },
                            trigger => trigger.OnIndexEntryCreated(name, newDocId, luceneDoc));
                        logIndexing.DebugFormat("Index '{0}' resulted in: {1}", name, luceneDoc);
                        indexWriter.AddDocument(luceneDoc);
                    }

                    actions.Indexing.IncrementSuccessIndexing();
                }
                batchers.ApplyAndIgnoreAllErrors(
                    e =>
                    {
                        logIndexing.Warn("Failed to dispose on index update trigger", e);
                        context.AddError(name, null, e.Message);
                    },
                    x => x.Dispose());
                return madeChanges;
            });
            logIndexing.DebugFormat("Indexed {0} documents for {1}", count, name);
        }
Esempio n. 38
0
		public override void Remove(string[] keys, WorkContext context)
		{
			Write((writer, analyzer, stats) =>
			{
				stats.Operation = IndexingWorkStats.Status.Ignore;
				logIndexing.Debug(() => string.Format("Deleting ({0}) from {1}", string.Join(", ", keys), indexId));
				var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(indexId))
					.Where(x => x != null)
					.ToList();

				keys.Apply(
					key =>
					InvokeOnIndexEntryDeletedOnAllBatchers(batchers, new Term(Constants.DocumentIdFieldName, key)));

				writer.DeleteDocuments(keys.Select(k => new Term(Constants.DocumentIdFieldName, k.ToLowerInvariant())).ToArray());
				batchers.ApplyAndIgnoreAllErrors(
					e =>
					{
						logIndexing.WarnException("Failed to dispose on index update trigger", e);
						context.AddError(indexId, null, e.Message, "Dispose Trigger");
					},
					batcher => batcher.Dispose());

				return new IndexedItemsInfo(GetLastEtagFromStats())
				{
					ChangedDocs = keys.Length,
					DeletedKeys = keys
				};
			});
		}
Esempio n. 39
0
		protected void Write(WorkContext context, Func<IndexWriter, Analyzer, IndexingWorkStats, int> action)
		{
			if (disposed)
				throw new ObjectDisposedException("Index " + name + " has been disposed");
			LastIndexTime = SystemTime.UtcNow;
			lock (writeLock)
			{
				bool shouldRecreateSearcher;
				var toDispose = new List<Action>();
				Analyzer searchAnalyzer = null;
				try
				{
					waitReason = "Write";
					try
					{
						searchAnalyzer = CreateAnalyzer(new LowerCaseKeywordAnalyzer(), toDispose);
					}
					catch (Exception e)
					{
						context.AddError(name, "Creating Analyzer", e.ToString());
						throw;
					}

					if (indexWriter == null)
					{
						indexWriter = CreateIndexWriter(directory);
					}

					var locker = directory.MakeLock("writing-to-index.lock");
					try
					{
						var stats = new IndexingWorkStats();
						try
						{
							var changedDocs = action(indexWriter, searchAnalyzer, stats);
							docCountSinceLastOptimization += changedDocs;
							shouldRecreateSearcher = changedDocs > 0;
							foreach (IIndexExtension indexExtension in indexExtensions.Values)
							{
								indexExtension.OnDocumentsIndexed(currentlyIndexDocuments);
							}
						}
						catch (Exception e)
						{
							context.AddError(name, null, e.ToString());
							throw;
						}

						UpdateIndexingStats(context, stats);

						WriteTempIndexToDiskIfNeeded(context);

						Flush(); // just make sure changes are flushed to disk
					}
					finally
					{
						locker.Release();
					}
				}
				finally
				{
					currentlyIndexDocuments.Clear();
					if (searchAnalyzer != null)
						searchAnalyzer.Close();
					foreach (Action dispose in toDispose)
					{
						dispose();
					}
					waitReason = null;
					LastIndexTime = SystemTime.UtcNow;
				}
				if (shouldRecreateSearcher)
					RecreateSearcher();
			}
		}
Esempio n. 40
0
        public override void Remove(string[] keys, WorkContext context)
        {
            context.TransactionalStorage.Batch(actions =>
            {
                var reduceKeyAndBuckets = new Dictionary<ReduceKeyAndBucket, int>();
                foreach (var key in keys)
                {
                    actions.MapReduce.DeleteMappedResultsForDocumentId(key, indexId, reduceKeyAndBuckets);
                }

                actions.MapReduce.UpdateRemovedMapReduceStats(indexId, reduceKeyAndBuckets);
                foreach (var reduceKeyAndBucket in reduceKeyAndBuckets)
                {
                    actions.MapReduce.ScheduleReductions(indexId, 0, reduceKeyAndBucket.Key);
                }
            });
            Write((writer, analyzer, stats) =>
            {
                stats.Operation = IndexingWorkStats.Status.Ignore;
                if (logIndexing.IsDebugEnabled)
                    logIndexing.Debug(() => string.Format("Deleting ({0}) from {1}", string.Join(", ", keys), PublicName));

                var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(indexId))
                    .Where(x => x != null)
                    .ToList();

                keys.Apply(
                    key =>
                    InvokeOnIndexEntryDeletedOnAllBatchers(batchers, new Term(Constants.ReduceKeyFieldName, key.ToLowerInvariant())));

                writer.DeleteDocuments(keys.Select(k => new Term(Constants.ReduceKeyFieldName, k.ToLowerInvariant())).ToArray());
                batchers.ApplyAndIgnoreAllErrors(
                    e =>
                    {
                        logIndexing.WarnException("Failed to dispose on index update trigger in " + PublicName, e);
                        context.AddError(indexId, PublicName, null, e, "Dispose Trigger");
                    },
                    batcher => batcher.Dispose());

                return new IndexedItemsInfo(null)
                {
                    ChangedDocs = keys.Length
                };
            });
        }
Esempio n. 41
0
		protected IEnumerable<object> RobustEnumerationReduce(IEnumerable<object> input, IndexingFunc func,
		                                                      IStorageActionsAccessor actions, WorkContext context)
		{
			return new RobustEnumerator
			{
				BeforeMoveNext = actions.Indexing.IncrementReduceIndexingAttempt,
				CancelMoveNext = actions.Indexing.DecrementReduceIndexingAttempt,
				OnError = (exception, o) =>
				{
					context.AddError(name,
					                 TryGetDocKey(o),
					                 exception.Message
						);
					logIndexing.WarnFormat(exception, "Failed to execute indexing function on {0} on {1}", name,
					                       TryGetDocKey(o));
					try
					{
						actions.Indexing.IncrementReduceIndexingFailure();
					}
					catch (Exception e)
					{
						// we don't care about error here, because it is an error on error problem
						logIndexing.WarnFormat(e, "Could not increment indexing failure rate for {0}", name);
					}
				}
			}.RobustEnumeration(input, func);
		}