Наследование: IDisposable
		public SuggestionQueryIndexExtension(
			WorkContext workContext,
			string key,
			StringDistance distanceType,
			bool isRunInMemory,
			string field,
			float accuracy)
		{
			this.workContext = workContext;
			this.key = key;
			this.field = field;

			if (isRunInMemory)
			{
				directory = new RAMDirectory();
			}
			else
			{
				directory = FSDirectory.Open(new DirectoryInfo(key));
			}

			this.spellChecker = new SpellChecker.Net.Search.Spell.SpellChecker(directory, null);
			this.spellChecker.SetAccuracy(accuracy);
			this.spellChecker.setStringDistance(distanceType);
		}
		public PrefetchingBehavior(PrefetchingUser prefetchingUser, WorkContext context, BaseBatchSizeAutoTuner autoTuner)
		{
			this.context = context;
			this.autoTuner = autoTuner;
			PrefetchingUser = prefetchingUser;
			MemoryStatistics.RegisterLowMemoryHandler(this);
		}
 public override void Execute(WorkContext context)
 {
     if (logger.IsDebugEnabled)
     {
         logger.Debug("Going to touch the following documents (missing references, need to check for concurrent transactions): {0}",
             string.Join(", ", Keys));
     }
   
     context.TransactionalStorage.Batch(accessor =>
     {
         foreach (var key in Keys)
         {
             foreach (var index in context.IndexStorage.Indexes)
             {
                 var set = context.DoNotTouchAgainIfMissingReferences.GetOrAdd(index, _ => new ConcurrentSet<string>(StringComparer.OrdinalIgnoreCase));
                 set.Add(key);
             }
             try
             {
                 Etag preTouchEtag;
                 Etag afterTouchEtag;
                 accessor.Documents.TouchDocument(key, out preTouchEtag, out afterTouchEtag);
             }
             catch (ConcurrencyException)
             {
             }
         }
     });
 }
Пример #4
0
 public override void Execute(WorkContext context)
 {
     foreach (var indexName in context.IndexDefinitionStorage.IndexNames)
     {
         context.IndexStorage.RemoveFromIndex(indexName, Keys, context);
     }
 }
Пример #5
0
        public override void IndexDocuments(
			AbstractViewGenerator viewGenerator, 
			IEnumerable<dynamic> documents, 
			WorkContext context, 
			IStorageActionsAccessor actions, 
			DateTime minimumTimestamp)
        {
            actions.Indexing.SetCurrentIndexStatsTo(name);
            var count = 0;
            Func<object, object> documentIdFetcher = null;
            var reduceKeys = new HashSet<string>(StringComparer.InvariantCultureIgnoreCase);
            var documentsWrapped = documents.Select(doc =>
            {
                var documentId = doc.__document_id;
                foreach (var reduceKey in actions.MappedResults.DeleteMappedResultsForDocumentId((string)documentId, name))
                {
                    reduceKeys.Add(reduceKey);
                }
                return doc;
            });
            foreach (var doc in RobustEnumeration(documentsWrapped, viewGenerator.MapDefinition, actions, context))
            {
                count++;

                documentIdFetcher = CreateDocumentIdFetcherIfNeeded(documentIdFetcher, doc);

                var docIdValue = documentIdFetcher(doc);
                if (docIdValue == null)
                    throw new InvalidOperationException("Could not find document id for this document");

                var reduceValue = viewGenerator.GroupByExtraction(doc);
                if (reduceValue == null)
                {
                    logIndexing.DebugFormat("Field {0} is used as the reduce key and cannot be null, skipping document {1}", viewGenerator.GroupByExtraction, docIdValue);
                    continue;
                }
                var reduceKey = ReduceKeyToString(reduceValue);
                var docId = docIdValue.ToString();

                reduceKeys.Add(reduceKey);

                var data = GetMapedData(doc);

                logIndexing.DebugFormat("Mapped result for '{0}': '{1}'", name, data);

                var hash = ComputeHash(name, reduceKey);

                actions.MappedResults.PutMappedResult(name, docId, reduceKey, data, hash);

                actions.Indexing.IncrementSuccessIndexing();
            }

            actions.Tasks.AddTask(new ReduceTask
            {
                Index = name,
                ReduceKeys = reduceKeys.ToArray()
            }, minimumTimestamp);

            logIndexing.DebugFormat("Mapped {0} documents for {1}", count, name);
        }
Пример #6
0
		public override void Execute(WorkContext context)
		{
			if (ReduceKeys.Length == 0)
				return;

			var viewGenerator = context.IndexDefinitionStorage.GetViewGenerator(Index);
			if (viewGenerator == null)
				return; // deleted view?
			
			context.TransactionaStorage.Batch(actions =>
			{
				log.Debug("Starting to read {0} reduce keys for index {1}", ReduceKeys.Length, Index);

				var itemsToFind = ReduceKeys
					.Select(reduceKey => new GetMappedResultsParams(Index, reduceKey, MapReduceIndex.ComputeHash(Index, reduceKey)))
					.ToArray();
				var mappedResults = actions.MappedResults.GetMappedResults(itemsToFind)
					.Select(JsonToExpando.Convert);
				
				var sp = Stopwatch.StartNew();

				var results = mappedResults.ToArray();

				log.Debug("Read {0} reduce keys in {1} with {2} results for index {3}", ReduceKeys.Length, sp.Elapsed, results.Length, Index);
				sp = Stopwatch.StartNew();
				context.IndexStorage.Reduce(Index, viewGenerator, results, context, actions, ReduceKeys);
				log.Debug("Indexed {0} reduce keys in {1} with {2} results for index {3}", ReduceKeys.Length, sp.Elapsed,
				                results.Length, Index);

			});
		}
Пример #7
0
        public PrefetchingBehavior(PrefetchingUser prefetchingUser, 
            WorkContext context, 
            BaseBatchSizeAutoTuner autoTuner, 
            string prefetchingUserDescription, 
            bool isDefault = false,
            Func<int> getPrefetchintBehavioursCount = null,
            Func<PrefetchingSummary> getPrefetcherSummary = null)
        {
            this.context = context;
            this.autoTuner = autoTuner;
            PrefetchingUser = prefetchingUser;
            this.userDescription = prefetchingUserDescription;
            this.IsDefault = isDefault;
            this.getPrefetchintBehavioursCount = getPrefetchintBehavioursCount ?? (() => 1);
            this.getPrefetcherSummary = getPrefetcherSummary ?? GetSummary;
            MemoryStatistics.RegisterLowMemoryHandler(this);
            LastTimeUsed = DateTime.MinValue;

            ingestMeter = context.MetricsCounters.DbMetrics.Meter("metrics",
                "ingest/sec", "In memory documents held by this prefetcher", TimeUnit.Seconds);
            returnedDocsMeter = context.MetricsCounters.DbMetrics.Meter("metrics",
                  "returned docs/sec", "Documents being served by this prefetcher", TimeUnit.Seconds);

            if (isDefault)
            {
                context.Database.TransactionalStorage.Batch(accessor =>
                {
                    recentEtag = accessor.Staleness.GetMostRecentDocumentEtag();
                });
            }
        }
		protected AbstractIndexingExecuter(
			ITransactionalStorage transactionalStorage, WorkContext context, TaskScheduler scheduler)
		{
			this.transactionalStorage = transactionalStorage;
			this.context = context;
			this.scheduler = scheduler;
		}
Пример #9
0
 public MapReduceIndex(Directory directory, int id, IndexDefinition indexDefinition,
                       AbstractViewGenerator viewGenerator, WorkContext context)
     : base(directory, id, indexDefinition, viewGenerator, context)
 {
     jsonSerializer = JsonExtensions.CreateDefaultJsonSerializer();
     jsonSerializer.Converters = MapReduceConverters;
 }
Пример #10
0
        public override void Execute(WorkContext context)
        {
            if (ReduceKeys.Length == 0)
                return;

            var viewGenerator = context.IndexDefinitionStorage.GetViewGenerator(Index);
            if (viewGenerator == null)
                return; // deleted view?

            context.TransactionaStorage.Batch(actions =>
            {
                IEnumerable<object> mappedResults = null;
                foreach (var reduceKey in ReduceKeys)
                {
                    IEnumerable<object> enumerable = actions.MappedResults.GetMappedResults(Index, reduceKey, MapReduceIndex.ComputeHash(Index, reduceKey))
                        .Select(JsonToExpando.Convert);

                    if (mappedResults == null)
                        mappedResults = enumerable;
                    else
                        mappedResults = mappedResults.Concat(enumerable);
                }

                context.IndexStorage.Reduce(Index, viewGenerator, mappedResults, context, actions, ReduceKeys);
            });
        }
Пример #11
0
        protected AbstractIndexingExecuter(WorkContext context, IndexReplacer indexReplacer)
        {
            this.transactionalStorage = context.TransactionalStorage;
            this.context = context;
	        this.indexReplacer = indexReplacer;
	        this.scheduler = context.TaskScheduler;
        }
Пример #12
0
		public IndexingExecuter(WorkContext context, DatabaseEtagSynchronizer synchronizer, Prefetcher prefetcher)
			: base(context)
		{
			autoTuner = new IndexBatchSizeAutoTuner(context);
			etagSynchronizer = synchronizer.GetSynchronizer(EtagSynchronizerType.Indexer);
			prefetchingBehavior = prefetcher.GetPrefetchingBehavior(PrefetchingUser.Indexer, autoTuner);
		}
Пример #13
0
 protected BaseBatchSizeAutoTuner(WorkContext context)
 {
     this.context = context;
     NumberOfItemsToIndexInSingleBatch = InitialNumberOfItems;
     currentlyUsedBatchSizes = new ConcurrentDictionary<Guid, long>();
     memoryLimitForIndexingInBytes = context.Configuration.MemoryLimitForIndexingInMB * 1024 * 1024;
 }
Пример #14
0
		public IndexingExecuter(WorkContext context, Prefetcher prefetcher)
			: base(context)
		{
			autoTuner = new IndexBatchSizeAutoTuner(context);
			this.prefetcher = prefetcher;
			defaultPrefetchingBehavior = prefetcher.CreatePrefetchingBehavior(PrefetchingUser.Indexer, autoTuner);
			prefetchingBehaviors.TryAdd(defaultPrefetchingBehavior);
		}
Пример #15
0
 protected AbstractIndexingExecuter(WorkContext context, IndexReplacer indexReplacer)
 {
     Log = LogManager.GetLogger(GetType());
     this.transactionalStorage = context.TransactionalStorage;
     this.context = context;
     this.indexReplacer = indexReplacer;
     this.scheduler = context.TaskScheduler;
 }
		public override void Execute(WorkContext context)
		{
			if (logger.IsDebugEnabled)
			{
				logger.Debug("Going to touch the following documents (LoadDocument references, need to check for concurrent transactions): {0}",
					string.Join(", ", ReferencesToCheck));
			}

			using (context.Database.TransactionalStorage.DisableBatchNesting())
			{
				var docsToTouch = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
				context.TransactionalStorage.Batch(accessor =>
				{
					foreach (var kvp in ReferencesToCheck)
					{
						var doc = accessor.Documents.DocumentMetadataByKey(kvp.Key, null);

					    if (doc == null)
					    {
                            logger.Debug("Cannot touch {0}, non existant document", kvp.Key);
					        continue;
					    }
					    if (doc.Etag == kvp.Value)
					    {
					        logger.Debug("Don't need to touch {0}, etag {1} is the same as when we last saw it", kvp.Key, doc.Etag);
                            continue;
					    }


						docsToTouch.Add(kvp.Key);
					}
				});

				using (context.Database.DocumentLock.Lock())
				{
					context.TransactionalStorage.Batch(accessor =>
					{
						foreach (var doc in docsToTouch)
						{
							try
							{
								Etag preTouchEtag;
								Etag afterTouchEtag;
								accessor.Documents.TouchDocument(doc, out preTouchEtag, out afterTouchEtag);
								logger.Debug("Touching document: {0}, etag before touch: {1}, after touch {2}", doc, preTouchEtag, afterTouchEtag);
							}
							catch (ConcurrencyException)
							{
                                logger.Info("Concurrency exception when touching {0}", doc);
							}
							context.Database.CheckReferenceBecauseOfDocumentUpdate(doc, accessor);
						}
					});
				}

			}
		}
Пример #17
0
 public IndexingExecuter(WorkContext context, Prefetcher prefetcher, IndexReplacer indexReplacer)
     : base(context, indexReplacer)
 {
     autoTuner = new IndexBatchSizeAutoTuner(context);
     this.prefetcher = prefetcher;
     defaultPrefetchingBehavior = prefetcher.CreatePrefetchingBehavior(PrefetchingUser.Indexer, autoTuner, "Default Prefetching behavior", true);
     defaultPrefetchingBehavior.ShouldHandleUnusedDocumentsAddedAfterCommit = true;
     prefetchingBehaviors.TryAdd(defaultPrefetchingBehavior);
 }
Пример #18
0
        public DocumentDatabase(InMemoryRavenConfiguration configuration)
        {
            Configuration = configuration;

            configuration.Container.SatisfyImportsOnce(this);

            workContext = new WorkContext
            {
                IndexUpdateTriggers = IndexUpdateTriggers,
                ReadTriggers = ReadTriggers
            };
            dynamicQueryRunner = new DynamicQueryRunner(this);
            suggestionQueryRunner = new SuggestionQueryRunner(this);

            TransactionalStorage = configuration.CreateTransactionalStorage(workContext.HandleWorkNotifications);
            configuration.Container.SatisfyImportsOnce(TransactionalStorage);

            bool newDb;
            try
            {
                newDb = TransactionalStorage.Initialize(this);
            }
            catch (Exception)
            {
                TransactionalStorage.Dispose();
                throw;
            }

            TransactionalStorage.Batch(actions => currentEtagBase = actions.General.GetNextIdentityValue("Raven/Etag"));

            IndexDefinitionStorage = new IndexDefinitionStorage(
                configuration,
                TransactionalStorage,
                configuration.DataDirectory,
                configuration.Container.GetExportedValues<AbstractViewGenerator>(),
                Extensions);
            IndexStorage = new IndexStorage(IndexDefinitionStorage, configuration);

            workContext.IndexStorage = IndexStorage;
            workContext.TransactionaStorage = TransactionalStorage;
            workContext.IndexDefinitionStorage = IndexDefinitionStorage;

            try
            {
                InitializeTriggers();
                ExecuteStartupTasks();
            }
            catch (Exception)
            {
                Dispose();
                throw;
            }
            if (!newDb)
                return;

            OnNewlyCreatedDatabase();
        }
Пример #19
0
	    protected BaseBatchSizeAutoTuner(WorkContext context)
        {
            this.context = context;
            NumberOfItemsToIndexInSingleBatch = InitialNumberOfItems;
            currentlyUsedBatchSizes = new ConcurrentDictionary<Guid, long>();
            memoryLimitForIndexingInBytes = context.Configuration.MemoryLimitForIndexingInMB * 1024 * 1024;
	        FetchingDocumentsFromDiskTimeout = TimeSpan.FromSeconds(context.Configuration.FetchingDocumentsFromDiskTimeoutInSeconds);
			maximumSizeAllowedToFetchFromStorageInMb = context.Configuration.MaximumSizeAllowedToFetchFromStorageInMb;
        }
Пример #20
0
        public DocumentDatabase(InMemroyRavenConfiguration configuration)
        {
            Configuration = configuration;

            configuration.Container.SatisfyImportsOnce(this);

            workContext = new WorkContext
            {
            	IndexUpdateTriggers = IndexUpdateTriggers,
				ReadTriggers = ReadTriggers
            };
            dynamicQueryRunner = new DynamicQueryRunner(this);

            TransactionalStorage = configuration.CreateTransactionalStorage(workContext.NotifyAboutWork);
            configuration.Container.SatisfyImportsOnce(TransactionalStorage);

            bool newDb;
            try
            {
                newDb = TransactionalStorage.Initialize();
            }
            catch (Exception)
            {
                TransactionalStorage.Dispose();
                throw;
            }

            IndexDefinitionStorage = new IndexDefinitionStorage(
                configuration,
                TransactionalStorage,
                configuration.DataDirectory,
                configuration.Container.GetExportedValues<AbstractViewGenerator>(),
                Extensions);
            IndexStorage = new IndexStorage(IndexDefinitionStorage, configuration);

            workContext.PerformanceCounters = new PerformanceCounters("Instance @ " + configuration.Port);
            workContext.IndexStorage = IndexStorage;
            workContext.TransactionaStorage = TransactionalStorage;
            workContext.IndexDefinitionStorage = IndexDefinitionStorage;


            try
            {
                InitializeTriggers();
                ExecuteStartupTasks();
            }
            catch (Exception)
            {
                Dispose();
                throw;
            }
            if (!newDb)
                return;

            OnNewlyCreatedDatabase();
        }
Пример #21
0
		public override void Execute(WorkContext context)
		{
			var keysToRemove = new HashSet<string>();
			context.TransactionaStorage.Batch(accessor =>
			{
				keysToRemove = new HashSet<string>(Keys.Where(key=>accessor.Documents.DocumentMetadataByKey(key, null) == null));
				accessor.Indexing.TouchIndexEtag(Index);
			});
			context.IndexStorage.RemoveFromIndex(Index, keysToRemove.ToArray(), context);
		}
Пример #22
0
		public MapReduceIndex(Directory directory, string name, IndexDefinition indexDefinition,
							  AbstractViewGenerator viewGenerator, WorkContext context)
			: base(directory, name, indexDefinition, viewGenerator, context)
		{
			jsonSerializer = new JsonSerializer();
			foreach (var jsonConverter in Default.Converters)
			{
				jsonSerializer.Converters.Add(jsonConverter);
			}
		}
Пример #23
0
		public override void Execute(WorkContext context)
		{
			var keysToRemove = new HashSet<string>(StringComparer.InvariantCultureIgnoreCase);
			context.TransactionaStorage.Batch(accessor =>
			{
				keysToRemove = new HashSet<string>(Keys.Where(key=>FilterDocuments(context, accessor, key)));
				accessor.Indexing.TouchIndexEtag(Index);
			});
			context.IndexStorage.RemoveFromIndex(Index, keysToRemove.ToArray(), context);
		}
Пример #24
0
		protected BaseBatchSizeAutoTuner(WorkContext context)
		{
			this.context = context;
	        FetchingDocumentsFromDiskTimeout = TimeSpan.FromSeconds(context.Configuration.Prefetcher.FetchingDocumentsFromDiskTimeoutInSeconds);
			maximumSizeAllowedToFetchFromStorageInMb = context.Configuration.Prefetcher.MaximumSizeAllowedToFetchFromStorageInMb;
// ReSharper disable once DoNotCallOverridableMethodsInConstructor
			NumberOfItemsToProcessInSingleBatch = InitialNumberOfItems;
			MemoryStatistics.RegisterLowMemoryHandler(this);
			_currentlyUsedBatchSizesInBytes = new ConcurrentDictionary<Guid, long>();
		}
        public override void Execute(WorkContext context)
        {
            if (logger.IsDebugEnabled)
            {
                logger.Debug("Going to touch the following documents (missing references, need to check for concurrent transactions): {0}",
                    string.Join(", ", MissingReferences));
            }
          
            context.TransactionalStorage.Batch(accessor =>
            {
                foreach (var docWithMissingRef in MissingReferences)
                {
                    foreach (var index in context.IndexStorage.Indexes)
                    {
                        var set = context.DoNotTouchAgainIfMissingReferences.GetOrAdd(index, _ => new ConcurrentSet<string>(StringComparer.OrdinalIgnoreCase));
                        set.Add(docWithMissingRef.Key);
                    }

	                bool foundReference = false;

					using (context.TransactionalStorage.DisableBatchNesting())
					{
						context.TransactionalStorage.Batch(freshAccessor =>
						{
							foreach (var missingRef in docWithMissingRef.Value)
							{
								var doc = freshAccessor.Documents.DocumentMetadataByKey(missingRef, null);

								if (doc == null) 
									continue;
								
								foundReference = true;
								break;
							}
						});
					}

					if(foundReference == false)
						continue;

                    try
                    {
                        using (context.Database.DocumentLock.Lock())
                        {
                            Etag preTouchEtag;
                            Etag afterTouchEtag;
                            accessor.Documents.TouchDocument(docWithMissingRef.Key, out preTouchEtag, out afterTouchEtag);
                        }
                    }
                    catch (ConcurrencyException)
                    {
                    }
                }
            });
        }
Пример #26
0
		public MapReduceIndex(Directory directory, int id, IndexDefinition indexDefinition,
							  AbstractViewGenerator viewGenerator, WorkContext context)
			: base(directory, id, indexDefinition, viewGenerator, context)
		{
			jsonSerializer = new JsonSerializer();
			foreach (var jsonConverter in Default.Converters)
			{
				jsonSerializer.Converters.Add(jsonConverter);
			}
			jsonSerializer.Converters.Add(new IgnoreFieldable());
		}
Пример #27
0
        public override void IndexDocuments(
            AbstractViewGenerator viewGenerator,
            IEnumerable<object> documents,
            WorkContext context,
            DocumentStorageActions actions)
        {
            actions.SetCurrentIndexStatsTo(name);
            var count = 0;
            PropertyDescriptor groupByPropertyDescriptor = null;
            PropertyDescriptor documentIdPropertyDescriptor = null;
            var reduceKeys = new HashSet<string>();
            foreach (var doc in RobustEnumeration(documents, viewGenerator.MapDefinition, actions, context))
            {
                count++;

                if (groupByPropertyDescriptor == null)
                {
                    var props = TypeDescriptor.GetProperties(doc);
                    groupByPropertyDescriptor = props.Find(viewGenerator.GroupByField, false);
                    documentIdPropertyDescriptor = props.Find("__document_id", false);
                }

                var docIdValue = documentIdPropertyDescriptor.GetValue(doc);
                if (docIdValue == null)
                    throw new InvalidOperationException("Could not find document id for this document");

                var reduceValue = groupByPropertyDescriptor.GetValue(doc);
                if (reduceValue == null)
                {
                    log.DebugFormat("Field {0} is used as the reduce key and cannot be null, skipping document {1}", viewGenerator.GroupByField, docIdValue);
                    continue;
                }
                var reduceKey = reduceValue.ToString();
                var docId = docIdValue.ToString();

                reduceKeys.Add(reduceKey);

                actions.PutMappedResult(name, docId, reduceKey, JObject.FromObject(doc).ToString(Formatting.None));

                actions.IncrementSuccessIndexing();
            }

            foreach (var reduceKey in reduceKeys)
            {
                actions.AddTask(new ReduceTask
                {
                    Index = name,
                    ReduceKey = reduceKey
                });
            }

            log.DebugFormat("Mapped {0} documents for {1}", count, name);
        }
Пример #28
0
 public override void Remove(string[] keys, WorkContext context)
 {
     Write(writer =>
     {
         if (log.IsDebugEnabled)
         {
             log.DebugFormat("Deleting ({0}) from {1}", string.Format(", ", keys), name);
         }
         writer.DeleteDocuments(keys.Select(k => new Term("__document_id", k)).ToArray());
         return true;
     });
 }
Пример #29
0
		public override void IndexDocuments(
			AbstractViewGenerator viewGenerator, 
			IEnumerable<object> documents,
			WorkContext context,
			IStorageActionsAccessor actions)
		{
			actions.Indexing.SetCurrentIndexStatsTo(name);
			var count = 0;
			Write(indexWriter =>
			{
				bool madeChanges = false;
				PropertyDescriptorCollection properties = null;
				var processedKeys = new HashSet<string>();
				var documentsWrapped = documents.Select((dynamic doc) =>
				{
					var documentId = doc.__document_id.ToString();
					if (processedKeys.Add(documentId) == false)
						return doc;
					madeChanges = true;
					context.IndexUpdateTriggers.Apply(trigger => trigger.OnIndexEntryDeleted(name, documentId));
					indexWriter.DeleteDocuments(new Term("__document_id", documentId));
					return doc;
				});
				foreach (var doc in RobustEnumeration(documentsWrapped, viewGenerator.MapDefinition, actions, context))
				{
					count++;

				    string newDocId;
				    IEnumerable<AbstractField> fields;
                    if (doc is DynamicJsonObject)
                        fields = ExtractIndexDataFromDocument((DynamicJsonObject) doc, out newDocId);
                    else
                        fields = ExtractIndexDataFromDocument(properties, doc, out newDocId);
				   
                    if (newDocId != null)
                    {
                        var luceneDoc = new Document();
                        luceneDoc.Add(new Field("__document_id", newDocId, Field.Store.YES, Field.Index.NOT_ANALYZED));

                    	madeChanges = true;
                        CopyFieldsToDocument(luceneDoc, fields);
                        context.IndexUpdateTriggers.Apply(trigger => trigger.OnIndexEntryCreated(name, newDocId, luceneDoc));
                        log.DebugFormat("Index '{0}' resulted in: {1}", name, luceneDoc);
                        indexWriter.AddDocument(luceneDoc);
                    }

					actions.Indexing.IncrementSuccessIndexing();
				}

				return madeChanges;
			});
			log.DebugFormat("Indexed {0} documents for {1}", count, name);
		}
Пример #30
0
        public override void IndexDocuments(
            AbstractViewGenerator viewGenerator,
            IEnumerable<object> documents,
            WorkContext context,
            DocumentStorageActions actions)
        {
            actions.SetCurrentIndexStatsTo(name);
            var count = 0;
            Func<object, object> documentIdFetcher = null;
            var reduceKeys = new HashSet<string>();
            foreach (var doc in RobustEnumeration(documents, viewGenerator.MapDefinition, actions, context))
            {
                count++;

                documentIdFetcher = CreateDocumentIdFetcherIfNeeded(documentIdFetcher, doc);

                var docIdValue = documentIdFetcher(doc);
                if (docIdValue == null)
                    throw new InvalidOperationException("Could not find document id for this document");

                var reduceValue = viewGenerator.GroupByExtraction(doc);
                if (reduceValue == null)
                {
                    log.DebugFormat("Field {0} is used as the reduce key and cannot be null, skipping document {1}", viewGenerator.GroupByExtraction, docIdValue);
                    continue;
                }
                var reduceKey = ReduceKeyToString(reduceValue);
                var docId = docIdValue.ToString();

                reduceKeys.Add(reduceKey);

                string data = GetMapedData(doc);

                log.DebugFormat("Mapped result for '{0}': '{1}'", name, data);

                var hash = ComputeHash(name, reduceKey);

                actions.PutMappedResult(name, docId, reduceKey, data, hash);

                actions.IncrementSuccessIndexing();
            }

            foreach (var reduceKey in reduceKeys)
            {
                actions.AddTask(new ReduceTask
                {
                    Index = name,
                    ReduceKey = reduceKey
                });
            }

            log.DebugFormat("Mapped {0} documents for {1}", count, name);
        }
Пример #31
0
 public SimpleIndex(Directory directory, string name, IndexDefinition indexDefinition, AbstractViewGenerator viewGenerator, WorkContext context)
     : base(directory, name, indexDefinition, viewGenerator, context)
 {
 }
Пример #32
0
        public void Index(string index, AbstractViewGenerator viewGenerator, IEnumerable <dynamic> docs, WorkContext context,
                          IStorageActionsAccessor actions)
        {
            Index value;

            if (indexes.TryGetValue(index, out value) == false)
            {
                log.DebugFormat("Tried to index on a non existant index {0}, ignoring", index);
                return;
            }
            value.IndexDocuments(viewGenerator, docs, context, actions);
        }
Пример #33
0
        public override void IndexDocuments(
            AbstractViewGenerator viewGenerator,
            IEnumerable <dynamic> documents,
            WorkContext context,
            IStorageActionsAccessor actions,
            DateTime minimumTimestamp)
        {
            actions.Indexing.SetCurrentIndexStatsTo(name);
            var count = 0;
            Func <object, object> documentIdFetcher = null;
            var reduceKeys       = new HashSet <string>(StringComparer.InvariantCultureIgnoreCase);
            var documentsWrapped = documents.Select(doc =>
            {
                var documentId = doc.__document_id;
                foreach (var reduceKey in actions.MappedResults.DeleteMappedResultsForDocumentId((string)documentId, name))
                {
                    reduceKeys.Add(reduceKey);
                }
                return(doc);
            });

            foreach (var doc in RobustEnumerationIndex(documentsWrapped, viewGenerator.MapDefinition, actions, context))
            {
                count++;

                documentIdFetcher = CreateDocumentIdFetcherIfNeeded(documentIdFetcher, doc);

                var docIdValue = documentIdFetcher(doc);
                if (docIdValue == null)
                {
                    throw new InvalidOperationException("Could not find document id for this document");
                }

                var reduceValue = viewGenerator.GroupByExtraction(doc);
                if (reduceValue == null)
                {
                    logIndexing.DebugFormat("Field {0} is used as the reduce key and cannot be null, skipping document {1}", viewGenerator.GroupByExtraction, docIdValue);
                    continue;
                }
                var reduceKey = ReduceKeyToString(reduceValue);
                var docId     = docIdValue.ToString();

                reduceKeys.Add(reduceKey);

                var data = GetMapedData(doc);

                logIndexing.DebugFormat("Mapped result for '{0}': '{1}'", name, data);

                var hash = ComputeHash(name, reduceKey);

                actions.MappedResults.PutMappedResult(name, docId, reduceKey, data, hash);

                actions.Indexing.IncrementSuccessIndexing();
            }

            if (reduceKeys.Count > 0)
            {
                actions.Tasks.AddTask(new ReduceTask
                {
                    Index      = name,
                    ReduceKeys = reduceKeys.ToArray()
                }, minimumTimestamp);
            }

            logIndexing.DebugFormat("Mapped {0} documents for {1}", count, name);
        }
Пример #34
0
 protected AbstractIndexingExecuter(WorkContext context)
 {
     this.transactionalStorage = context.TransactionalStorage;
     this.context   = context;
     this.scheduler = context.TaskScheduler;
 }
Пример #35
0
 public ReducingExecuter(ITransactionalStorage transactionalStorage, WorkContext context, TaskScheduler scheduler)
     : base(transactionalStorage, context, scheduler)
 {
 }
Пример #36
0
 public abstract void Remove(string[] keys, WorkContext context);
Пример #37
0
 public IndexingExecuter(WorkContext context)
     : base(context)
 {
     autoTuner = new IndexBatchSizeAutoTuner(context);
 }
Пример #38
0
        public override void IndexDocuments(AbstractViewGenerator viewGenerator, IEnumerable <object> documents, WorkContext context, IStorageActionsAccessor actions, DateTime minimumTimestamp)
        {
            actions.Indexing.SetCurrentIndexStatsTo(name);
            var count = 0;

            Write(context, (indexWriter, analyzer) =>
            {
                bool madeChanges = false;
                PropertyDescriptorCollection properties = null;
                var processedKeys = new HashSet <string>();
                var batchers      = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
                                    .Where(x => x != null)
                                    .ToList();
                var documentsWrapped = documents.Select((dynamic doc) =>
                {
                    string documentId = doc.__document_id.ToString();
                    if (processedKeys.Add(documentId) == false)
                    {
                        return(doc);
                    }
                    madeChanges = true;
                    batchers.ApplyAndIgnoreAllErrors(
                        exception =>
                    {
                        logIndexing.WarnFormat(exception,
                                               "Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
                                               name, documentId);
                        context.AddError(name,
                                         documentId,
                                         exception.Message
                                         );
                    },
                        trigger => trigger.OnIndexEntryDeleted(name, documentId));
                    indexWriter.DeleteDocuments(new Term(Constants.DocumentIdFieldName, documentId.ToLowerInvariant()));
                    return(doc);
                });
                foreach (var doc in RobustEnumerationIndex(documentsWrapped, viewGenerator.MapDefinition, actions, context))
                {
                    count++;

                    IndexingResult indexingResult;
                    if (doc is DynamicJsonObject)
                    {
                        indexingResult = ExtractIndexDataFromDocument((DynamicJsonObject)doc);
                    }
                    else
                    {
                        indexingResult = ExtractIndexDataFromDocument(properties, doc);
                    }

                    if (indexingResult.NewDocId != null && indexingResult.ShouldSkip == false)
                    {
                        var luceneDoc = new Document();
                        luceneDoc.Add(new Field(Constants.DocumentIdFieldName, indexingResult.NewDocId.ToLowerInvariant(), Field.Store.YES,
                                                Field.Index.NOT_ANALYZED));

                        madeChanges = true;
                        CopyFieldsToDocument(luceneDoc, indexingResult.Fields);
                        batchers.ApplyAndIgnoreAllErrors(
                            exception =>
                        {
                            logIndexing.WarnFormat(exception,
                                                   "Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
                                                   name, indexingResult.NewDocId);
                            context.AddError(name,
                                             indexingResult.NewDocId,
                                             exception.Message
                                             );
                        },
                            trigger => trigger.OnIndexEntryCreated(name, indexingResult.NewDocId, luceneDoc));
                        logIndexing.DebugFormat("Index '{0}' resulted in: {1}", name, luceneDoc);
                        AddDocumentToIndex(indexWriter, luceneDoc, analyzer);
                    }

                    actions.Indexing.IncrementSuccessIndexing();
                }
                batchers.ApplyAndIgnoreAllErrors(
                    e =>
                {
                    logIndexing.Warn("Failed to dispose on index update trigger", e);
                    context.AddError(name, null, e.Message);
                },
                    x => x.Dispose());
                return(madeChanges);
            });
            logIndexing.DebugFormat("Indexed {0} documents for {1}", count, name);
        }
Пример #39
0
 public TasksExecuter(ITransactionalStorage transactionalStorage, WorkContext context)
 {
     this.transactionalStorage = transactionalStorage;
     this.context = context;
 }
Пример #40
0
            public ReduceDocuments(MapReduceIndex parent, AbstractViewGenerator viewGenerator, IEnumerable <IGrouping <int, object> > mappedResultsByBucket, int level, WorkContext context, IStorageActionsAccessor actions, HashSet <string> reduceKeys, int inputCount)
            {
                this.parent           = parent;
                this.inputCount       = inputCount;
                indexId               = this.parent.indexId;
                ViewGenerator         = viewGenerator;
                MappedResultsByBucket = mappedResultsByBucket;
                Level      = level;
                Context    = context;
                Actions    = actions;
                ReduceKeys = reduceKeys;

                anonymousObjectToLuceneDocumentConverter = new AnonymousObjectToLuceneDocumentConverter(this.parent.context.Database, this.parent.indexDefinition, ViewGenerator, logIndexing);

                if (Level == 2)
                {
                    batchers = Context.IndexUpdateTriggers.Select(x => x.CreateBatcher(indexId))
                               .Where(x => x != null)
                               .ToList();
                }
            }
 protected AbstractIndexingExecuter(ITransactionalStorage transactionalStorage, WorkContext context, TaskScheduler scheduler)
 {
     this.transactionalStorage = transactionalStorage;
     this.context   = context;
     this.scheduler = scheduler;
 }
Пример #42
0
 protected IEnumerable <object> RobustEnumerationReduce(IEnumerable <object> input, IndexingFunc func,
                                                        IStorageActionsAccessor actions, WorkContext context)
 {
     // not strictly accurate, but if we get that many errors, probably an error anyway.
     return(new RobustEnumerator(context.Configuration.MaxNumberOfItemsToIndexInSingleBatch)
     {
         BeforeMoveNext = actions.Indexing.IncrementReduceIndexingAttempt,
         CancelMoveNext = actions.Indexing.DecrementReduceIndexingAttempt,
         OnError = (exception, o) =>
         {
             context.AddError(name,
                              TryGetDocKey(o),
                              exception.Message
                              );
             logIndexing.WarnFormat(exception, "Failed to execute indexing function on {0} on {1}", name,
                                    TryGetDocKey(o));
             try
             {
                 actions.Indexing.IncrementReduceIndexingFailure();
             }
             catch (Exception e)
             {
                 // we don't care about error here, because it is an error on error problem
                 logIndexing.WarnFormat(e, "Could not increment indexing failure rate for {0}", name);
             }
         }
     }.RobustEnumeration(input, func));
 }
 public IndependentBatchSizeAutoTuner(WorkContext context, PrefetchingUser user)
     : base(context)
 {
     this.User = user;
     InstallGauges();
 }
Пример #44
0
        public override void Remove(string[] keys, WorkContext context)
        {
            DeletionBatchInfo deletionBatchInfo = null;

            try
            {
                deletionBatchInfo = context.ReportDeletionBatchStarted(PublicName, keys.Length);

                context.TransactionalStorage.Batch(actions =>
                {
                    var storageCommitDuration = new Stopwatch();

                    actions.BeforeStorageCommit += storageCommitDuration.Start;

                    actions.AfterStorageCommit += () =>
                    {
                        storageCommitDuration.Stop();

                        deletionBatchInfo.PerformanceStats.Add(PerformanceStats.From(IndexingOperation.StorageCommit, storageCommitDuration.ElapsedMilliseconds));
                    };

                    var reduceKeyAndBuckets = new Dictionary <ReduceKeyAndBucket, int>();

                    var deleteMappedResultsDuration = new Stopwatch();

                    using (StopwatchScope.For(deleteMappedResultsDuration))
                    {
                        if (actions.MapReduce.HasMappedResultsForIndex(indexId))
                        {
                            foreach (var key in keys)
                            {
                                actions.MapReduce.DeleteMappedResultsForDocumentId(key, indexId, reduceKeyAndBuckets);
                                context.CancellationToken.ThrowIfCancellationRequested();
                            }
                        }
                    }

                    deletionBatchInfo.PerformanceStats.Add(PerformanceStats.From(IndexingOperation.Delete_DeleteMappedResultsForDocumentId, deleteMappedResultsDuration.ElapsedMilliseconds));

                    actions.MapReduce.UpdateRemovedMapReduceStats(indexId, reduceKeyAndBuckets, context.CancellationToken);

                    var scheduleReductionsDuration = new Stopwatch();

                    using (StopwatchScope.For(scheduleReductionsDuration))
                    {
                        foreach (var reduceKeyAndBucket in reduceKeyAndBuckets)
                        {
                            actions.MapReduce.ScheduleReductions(indexId, 0, reduceKeyAndBucket.Key);
                            context.CancellationToken.ThrowIfCancellationRequested();
                        }
                    }

                    deletionBatchInfo.PerformanceStats.Add(PerformanceStats.From(IndexingOperation.Reduce_ScheduleReductions, scheduleReductionsDuration.ElapsedMilliseconds));
                });
            }
            finally
            {
                if (deletionBatchInfo != null)
                {
                    context.ReportDeletionBatchCompleted(deletionBatchInfo);
                }
            }
        }
Пример #45
0
 public RobustEnumerator(WorkContext context, int numberOfConsecutiveErrors)
 {
     this.context = context;
     this.numberOfConsecutiveErrors = numberOfConsecutiveErrors;
 }
Пример #46
0
 public ReduceBatchSizeAutoTuner(WorkContext context)
     : base(context)
 {
     LastAmountOfItemsToRemember = 1;
     InstallGauges();
 }
Пример #47
0
 public IndexingExecuter(WorkContext context, Prefetcher prefetcher)
     : base(context)
 {
     autoTuner           = new IndexBatchSizeAutoTuner(context);
     prefetchingBehavior = prefetcher.GetPrefetchingBehavior(PrefetchingUser.Indexer, autoTuner);
 }
Пример #48
0
 public ReducingExecuter(WorkContext context, IndexReplacer indexReplacer)
     : base(context, indexReplacer)
 {
     autoTuner = new ReduceBatchSizeAutoTuner(context);
 }
Пример #49
0
        protected void Write(WorkContext context, Func <IndexWriter, Analyzer, bool> action)
        {
            if (disposed)
            {
                throw new ObjectDisposedException("Index " + name + " has been disposed");
            }
            lock (writeLock)
            {
                bool     shouldRecreateSearcher;
                var      toDispose = new List <Action>();
                Analyzer analyzer  = null;
                try
                {
                    try
                    {
                        analyzer = CreateAnalyzer(new LowerCaseAnalyzer(), toDispose);
                    }
                    catch (Exception e)
                    {
                        context.AddError(name, "Creating Analyzer", e.ToString());
                        throw;
                    }

                    if (indexWriter == null)
                    {
                        indexWriter = new IndexWriter(directory, new StopAnalyzer(Version.LUCENE_29), IndexWriter.MaxFieldLength.UNLIMITED);
                    }

                    try
                    {
                        shouldRecreateSearcher = action(indexWriter, analyzer);
                        foreach (IIndexExtension indexExtension in indexExtensions.Values)
                        {
                            indexExtension.OnDocumentsIndexed(currentlyIndexDocumented);
                        }
                    }
                    catch (Exception e)
                    {
                        context.AddError(name, null, e.ToString());
                        throw;
                    }

                    WriteTempIndexToDiskIfNeeded(context);
                }
                finally
                {
                    currentlyIndexDocumented.Clear();
                    if (analyzer != null)
                    {
                        analyzer.Close();
                    }
                    foreach (Action dispose in toDispose)
                    {
                        dispose();
                    }
                }
                if (shouldRecreateSearcher)
                {
                    RecreateSearcher();
                }
            }
        }
Пример #50
0
 public BaseBatchSizeAutoTuner(WorkContext context)
 {
     this.context = context;
     this.NumberOfItemsToIndexInSingleBatch = InitialNumberOfItems;
 }
Пример #51
0
 protected IEnumerable <object> RobustEnumerationIndex(IEnumerable <object> input, IEnumerable <IndexingFunc> funcs,
                                                       IStorageActionsAccessor actions, WorkContext context)
 {
     return(new RobustEnumerator(context.Configuration.MaxNumberOfItemsToIndexInSingleBatch)
     {
         BeforeMoveNext = actions.Indexing.IncrementIndexingAttempt,
         CancelMoveNext = actions.Indexing.DecrementIndexingAttempt,
         OnError = (exception, o) =>
         {
             context.AddError(name,
                              TryGetDocKey(o),
                              exception.Message
                              );
             logIndexing.WarnException(
                 String.Format("Failed to execute indexing function on {0} on {1}", name,
                               TryGetDocKey(o)),
                 exception);
             try
             {
                 actions.Indexing.IncrementIndexingFailure();
             }
             catch (Exception e)
             {
                 // we don't care about error here, because it is an error on error problem
                 logIndexing.WarnException(
                     String.Format("Could not increment indexing failure rate for {0}", name),
                     e);
             }
         }
     }.RobustEnumeration(input, funcs));
 }
Пример #52
0
        public void ReduceDocuments(AbstractViewGenerator viewGenerator,
                                    IEnumerable <object> mappedResults,
                                    WorkContext context,
                                    IStorageActionsAccessor actions,
                                    string[] reduceKeys)
        {
            actions.Indexing.SetCurrentIndexStatsTo(name);
            var count = 0;

            Write(context, (indexWriter, analyzer) =>
            {
                var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
                               .Where(x => x != null)
                               .ToList();
                foreach (var reduceKey in reduceKeys)
                {
                    var entryKey = reduceKey;
                    indexWriter.DeleteDocuments(new Term("__reduce_key", entryKey.ToLowerInvariant()));
                    batchers.ApplyAndIgnoreAllErrors(
                        exception =>
                    {
                        logIndexing.WarnFormat(exception,
                                               "Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
                                               name, entryKey);
                        context.AddError(name,
                                         entryKey,
                                         exception.Message
                                         );
                    },
                        trigger => trigger.OnIndexEntryDeleted(name, entryKey));
                }
                PropertyDescriptorCollection properties = null;
                foreach (var doc in RobustEnumerationReduce(mappedResults, viewGenerator.ReduceDefinition, actions, context))
                {
                    count++;
                    var fields = GetFields(doc, ref properties).ToList();

                    string reduceKeyAsString = ExtractReduceKey(viewGenerator, doc);

                    var luceneDoc = new Document();
                    luceneDoc.Add(new Field("__reduce_key", reduceKeyAsString.ToLowerInvariant(), Field.Store.NO, Field.Index.NOT_ANALYZED));
                    foreach (var field in fields)
                    {
                        luceneDoc.Add(field);
                    }
                    batchers.ApplyAndIgnoreAllErrors(
                        exception =>
                    {
                        logIndexing.WarnFormat(exception,
                                               "Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
                                               name, reduceKeyAsString);
                        context.AddError(name,
                                         reduceKeyAsString,
                                         exception.Message
                                         );
                    },
                        trigger => trigger.OnIndexEntryCreated(name, reduceKeyAsString, luceneDoc));
                    logIndexing.DebugFormat("Reduce key {0} result in index {1} gave document: {2}", reduceKeyAsString, name, luceneDoc);
                    AddDocumentToIndex(indexWriter, luceneDoc, analyzer);
                    actions.Indexing.IncrementReduceSuccessIndexing();
                }
                batchers.ApplyAndIgnoreAllErrors(
                    e =>
                {
                    logIndexing.Warn("Failed to dispose on index update trigger", e);
                    context.AddError(name, null, e.Message);
                },
                    x => x.Dispose());
                return(true);
            });
            if (logIndexing.IsDebugEnabled)
            {
                logIndexing.DebugFormat("Reduce resulted in {0} entries for {1} for reduce keys: {2}", count, name, string.Join(", ", reduceKeys));
            }
        }
Пример #53
0
 public abstract void IndexDocuments(AbstractViewGenerator viewGenerator, IEnumerable <object> documents,
                                     WorkContext context, IStorageActionsAccessor actions, DateTime minimumTimestamp);
Пример #54
0
 public ReducingExecuter(WorkContext context)
     : base(context)
 {
     autoTuner = new ReduceBatchSizeAutoTuner(context);
 }
Пример #55
0
        public override void IndexDocuments(
            AbstractViewGenerator viewGenerator,
            IndexingBatch batch,
            WorkContext context,
            IStorageActionsAccessor actions,
            DateTime minimumTimestamp)
        {
            var count            = 0;
            var sourceCount      = 0;
            var sw               = Stopwatch.StartNew();
            var start            = SystemTime.UtcNow;
            var changed          = new HashSet <ReduceKeyAndBucket>();
            var documentsWrapped = batch.Docs.Select(doc =>
            {
                sourceCount++;
                var documentId = doc.__document_id;
                actions.MapReduce.DeleteMappedResultsForDocumentId((string)documentId, name, changed);
                return(doc);
            })
                                   .Where(x => x is FilteredDocument == false);
            var stats = new IndexingWorkStats();

            foreach (
                var mappedResultFromDocument in
                GroupByDocumentId(context,
                                  RobustEnumerationIndex(documentsWrapped.GetEnumerator(), viewGenerator.MapDefinitions, actions, stats)))
            {
                var dynamicResults = mappedResultFromDocument.Select(x => (object)new DynamicJsonObject(RavenJObject.FromObject(x, jsonSerializer))).ToList();
                foreach (
                    var doc in
                    RobustEnumerationReduceDuringMapPhase(dynamicResults.GetEnumerator(), viewGenerator.ReduceDefinition, actions, context))
                {
                    count++;

                    var reduceValue = viewGenerator.GroupByExtraction(doc);
                    if (reduceValue == null)
                    {
                        logIndexing.Debug("Field {0} is used as the reduce key and cannot be null, skipping document {1}",
                                          viewGenerator.GroupByExtraction, mappedResultFromDocument.Key);
                        continue;
                    }
                    var reduceKey = ReduceKeyToString(reduceValue);
                    var docId     = mappedResultFromDocument.Key.ToString();

                    var data = GetMappedData(doc);

                    logIndexing.Debug("Mapped result for index '{0}' doc '{1}': '{2}'", name, docId, data);

                    actions.MapReduce.PutMappedResult(name, docId, reduceKey, data);

                    changed.Add(new ReduceKeyAndBucket(IndexingUtil.MapBucket(docId), reduceKey));
                }
            }
            UpdateIndexingStats(context, stats);
            actions.MapReduce.ScheduleReductions(name, 0, changed);
            AddindexingPerformanceStat(new IndexingPerformanceStats
            {
                OutputCount = count,
                InputCount  = sourceCount,
                Operation   = "Map",
                Duration    = sw.Elapsed,
                Started     = start
            });
            logIndexing.Debug("Mapped {0} documents for {1}", count, name);
        }
Пример #56
0
        protected IEnumerable <object> RobustEnumerationIndex(IEnumerable <object> input, IEnumerable <IndexingFunc> funcs,
                                                              IStorageActionsAccessor actions, WorkContext context, IndexingWorkStats stats)
        {
            return(new RobustEnumerator(context.Configuration.MaxNumberOfItemsToIndexInSingleBatch)
            {
                BeforeMoveNext = () => stats.IndexingAttempts++,
                CancelMoveNext = () => stats.IndexingAttempts--,
                OnError = (exception, o) =>
                {
                    context.AddError(name,
                                     TryGetDocKey(o),
                                     exception.Message
                                     );
                    logIndexing.WarnException(
                        String.Format("Failed to execute indexing function on {0} on {1}", name,
                                      TryGetDocKey(o)),
                        exception);

                    stats.IndexingErrors++;
                }
            }.RobustEnumeration(input, funcs));
        }
Пример #57
0
        protected void Write(WorkContext context, Func <IndexWriter, Analyzer, IndexingWorkStats, int> action)
        {
            if (disposed)
            {
                throw new ObjectDisposedException("Index " + name + " has been disposed");
            }
            lock (writeLock)
            {
                bool     shouldRecreateSearcher;
                var      toDispose      = new List <Action>();
                Analyzer searchAnalyzer = null;
                try
                {
                    try
                    {
                        searchAnalyzer = CreateAnalyzer(new LowerCaseKeywordAnalyzer(), toDispose);
                    }
                    catch (Exception e)
                    {
                        context.AddError(name, "Creating Analyzer", e.ToString());
                        throw;
                    }

                    if (indexWriter == null)
                    {
                        indexWriter = CreateIndexWriter(directory);
                    }

                    var stats = new IndexingWorkStats();
                    try
                    {
                        var changedDocs = action(indexWriter, searchAnalyzer, stats);
                        docCountSinceLastOptimization += changedDocs;
                        shouldRecreateSearcher         = changedDocs > 0;
                        foreach (IIndexExtension indexExtension in indexExtensions.Values)
                        {
                            indexExtension.OnDocumentsIndexed(currentlyIndexDocuments);
                        }
                    }
                    catch (Exception e)
                    {
                        context.AddError(name, null, e.ToString());
                        throw;
                    }

                    UpdateIndexingStats(context, stats);

                    WriteTempIndexToDiskIfNeeded(context);

                    if (configuration.TransactionMode == TransactionMode.Safe)
                    {
                        Flush();                         // just make sure changes are flushed to disk
                    }
                }
                finally
                {
                    currentlyIndexDocuments.Clear();
                    if (searchAnalyzer != null)
                    {
                        searchAnalyzer.Close();
                    }
                    foreach (Action dispose in toDispose)
                    {
                        dispose();
                    }
                }
                if (shouldRecreateSearcher)
                {
                    RecreateSearcher();
                }
            }
        }
Пример #58
0
 public IndexSearcherHolder(int indexId, WorkContext context)
 {
     this.indexId = indexId;
     this.context = context;
 }
Пример #59
0
 // we don't care about tracking map/reduce stats here, since it is merely
 // an optimization step
 protected IEnumerable <object> RobustEnumerationReduceDuringMapPhase(IEnumerable <object> input, IndexingFunc func,
                                                                      IStorageActionsAccessor actions, WorkContext context)
 {
     // not strictly accurate, but if we get that many errors, probably an error anyway.
     return(new RobustEnumerator(context.Configuration.MaxNumberOfItemsToIndexInSingleBatch)
     {
         BeforeMoveNext = () => { },                 // don't care
         CancelMoveNext = () => { },                 // don't care
         OnError = (exception, o) =>
         {
             context.AddError(name,
                              TryGetDocKey(o),
                              exception.Message
                              );
             logIndexing.WarnException(
                 String.Format("Failed to execute indexing function on {0} on {1}", name,
                               TryGetDocKey(o)),
                 exception);
         }
     }.RobustEnumeration(input, func));
 }