/// <summary> /// (re)index an entity. /// Non indexable entities are ignored /// The entity must be associated with the session /// </summary> /// <param name="entity">The entity to index - must not be null</param> /// <returns></returns> public IFullTextSession Index(object entity) { using (new SessionIdLoggingContext(sessionImplementor.SessionId)) { if (entity == null) { return(this); } System.Type clazz = NHibernateUtil.GetClass(entity); ISearchFactoryImplementor searchFactoryImplementor = SearchFactoryImplementor; // TODO: Cache that at the FTSession level // not strictly necesary but a small optmization DocumentBuilder builder = searchFactoryImplementor.DocumentBuilders[clazz]; if (builder != null) { object id = session.GetIdentifier(entity); Work work = new Work(entity, id, WorkType.Index); searchFactoryImplementor.Worker.PerformWork(work, eventSource); } return(this); } }
private static void ProcessContainedInValue(object value, List <LuceneWork> queue, System.Type valueClass, DocumentBuilder builder, ISearchFactoryImplementor searchFactory) { object id = builder.idMapping.Getter.Get(value); builder.AddToWorkQueue(valueClass, value, id, WorkType.Update, queue, searchFactory); }
public override SqlString ToSqlString(ICriteria criteria, ICriteriaQuery criteriaQuery, IDictionary <string, IFilter> enabledFilters) { System.Type type = GetCriteriaClass(criteria); ISearchFactoryImplementor searchFactory = ContextHelper.GetSearchFactory(GetSession(criteria)); Iesi.Collections.Generic.ISet <System.Type> types; IndexSearcher searcher = FullTextSearchHelper.BuildSearcher(searchFactory, out types, type); if (searcher == null) { throw new SearchException("Could not find a searcher for class: " + type.FullName); } Lucene.Net.Search.Query query = FullTextSearchHelper.FilterQueryByClasses(types, luceneQuery); Hits hits = searcher.Search(query); List <object> ids = new List <object>(); for (int i = 0; i < hits.Length(); i++) { object id = DocumentBuilder.GetDocumentId(searchFactory, type, hits.Doc(i)); ids.Add(id); } base.Values = ids.ToArray(); return(base.ToSqlString(criteria, criteriaQuery, enabledFilters)); }
public void Initialize(String directoryProviderName, IDictionary<string, string> properties, ISearchFactoryImplementor searchFactory) { DirectoryInfo indexDir = DirectoryProviderHelper.DetermineIndexDir(directoryProviderName, (IDictionary) properties); try { bool create = !IndexReader.IndexExists(indexDir.FullName); indexName = indexDir.FullName; directory = FSDirectory.GetDirectory(indexName, create); if (create) { IndexWriter iw = new IndexWriter(directory, new StandardAnalyzer(), create, new KeepOnlyLastCommitDeletionPolicy(), IndexWriter.MaxFieldLength.UNLIMITED); iw.Close(); } //searchFactory.RegisterDirectoryProviderForLocks(this); } catch (IOException e) { throw new HibernateException("Unable to initialize index: " + directoryProviderName, e); } }
public void Initialize(String directoryProviderName, IDictionary<string, string> properties, ISearchFactoryImplementor searchFactory) { this.properties = properties; this.directoryProviderName = directoryProviderName; // source guessing source = DirectoryProviderHelper.GetSourceDirectory(Environment.SourceBase, Environment.Source, directoryProviderName, (IDictionary) properties); if (source == null) { throw new ArgumentException("FSSlaveDirectoryProvider requires a viable source directory"); } if (!File.Exists(Path.Combine(source, "current1")) && !File.Exists(Path.Combine(source, "current2"))) { log.Warn("No current marker in source directory: " + source); } log.Debug("Source directory: " + source); indexDir = DirectoryProviderHelper.DetermineIndexDir(directoryProviderName, (IDictionary) properties); log.Debug("Index directory: " + indexDir.FullName); try { bool create = !indexDir.Exists; if (create) { log.DebugFormat("Index directory not found, creating '{0}'", indexDir.FullName); indexDir.Create(); } indexName = indexDir.FullName; } catch (IOException e) { throw new HibernateException("Unable to initialize index: " + directoryProviderName, e); } }
public void Init(ISession session, ISearchFactoryImplementor searchFactoryImplementor, IResultTransformer transformer, string[] aliases) { Init(session, searchFactoryImplementor); this.transformer = transformer; this.aliases = aliases; }
public static IReaderProvider CreateReaderProvider(Configuration cfg, ISearchFactoryImplementor searchFactoryImplementor) { IDictionary<string, string> props = GetProperties(cfg); string impl = props.ContainsKey(Environment.ReaderStrategy) ? props[Environment.ReaderStrategy] : string.Empty; IReaderProvider readerProvider; if (string.IsNullOrEmpty(impl)) // Put in another one readerProvider = new SharedReaderProvider(); else if (impl.ToLowerInvariant() == "not-shared") readerProvider = new NotSharedReaderProvider(); else if (impl.ToLowerInvariant() == "shared") readerProvider = new SharedReaderProvider(); else { try { readerProvider = (IReaderProvider) Activator.CreateInstance(ReflectHelper.ClassForName(impl)); } catch (InvalidCastException) { throw new SearchException(string.Format("Class does not implement IReaderProvider: {0}", impl)); } catch (Exception) { throw new SearchException("Failed to instantiate IReaderProvider with type " + impl); } } readerProvider.Initialize(props, searchFactoryImplementor); return readerProvider; }
public BatchedQueueingProcessor(ISearchFactoryImplementor searchFactoryImplementor, IDictionary properties) { this.searchFactoryImplementor = searchFactoryImplementor; //default to sync if none defined this.sync = !"async".Equals((string)properties[Environment.WorkerExecution], StringComparison.InvariantCultureIgnoreCase); string backend = (string)properties[Environment.WorkerBackend]; batchSize = 0;//(int) properties[Environment.WorkerBatchSize]; if (StringHelper.IsEmpty(backend) || "lucene".Equals(backend, StringComparison.InvariantCultureIgnoreCase)) { backendQueueProcessorFactory = new LuceneBackendQueueProcessorFactory(); } else { try { System.Type processorFactoryClass = ReflectHelper.ClassForName(backend); backendQueueProcessorFactory = (IBackendQueueProcessorFactory)Activator.CreateInstance(processorFactoryClass); } catch (Exception e) { throw new SearchException("Unable to find/create processor class: " + backend, e); } } backendQueueProcessorFactory.Initialize(properties, searchFactoryImplementor); searchFactoryImplementor.BackendQueueProcessorFactory = backendQueueProcessorFactory; }
public BatchedQueueingProcessor(ISearchFactoryImplementor searchFactoryImplementor, IDictionary properties) { this.searchFactoryImplementor = searchFactoryImplementor; //default to sync if none defined this.sync = !"async".Equals((string) properties[Environment.WorkerExecution],StringComparison.InvariantCultureIgnoreCase); string backend = (string) properties[Environment.WorkerBackend]; batchSize = 0;//(int) properties[Environment.WorkerBatchSize]; if (StringHelper.IsEmpty(backend) || "lucene".Equals(backend, StringComparison.InvariantCultureIgnoreCase)) { backendQueueProcessorFactory = new LuceneBackendQueueProcessorFactory(); } else { try { System.Type processorFactoryClass = ReflectHelper.ClassForName(backend); backendQueueProcessorFactory = (IBackendQueueProcessorFactory) Activator.CreateInstance(processorFactoryClass); } catch (Exception e) { throw new SearchException("Unable to find/create processor class: " + backend, e); } } backendQueueProcessorFactory.Initialize(properties, searchFactoryImplementor); searchFactoryImplementor.BackendQueueProcessorFactory = backendQueueProcessorFactory; }
public Workspace(ISearchFactoryImplementor searchFactoryImplementor) { this.readers = new Dictionary <IDirectoryProvider, IndexReader>(); this.writers = new Dictionary <IDirectoryProvider, IndexWriter>(); this.lockedProviders = new List <IDirectoryProvider>(); this.dpStatistics = new Dictionary <IDirectoryProvider, DPStatistics>(); this.searchFactoryImplementor = searchFactoryImplementor; }
public Workspace(ISearchFactoryImplementor searchFactoryImplementor) { this.readers = new Dictionary<IDirectoryProvider, IndexReader>(); this.writers = new Dictionary<IDirectoryProvider, IndexWriter>(); this.lockedProviders = new List<IDirectoryProvider>(); this.dpStatistics = new Dictionary<IDirectoryProvider, DPStatistics>(); this.searchFactoryImplementor = searchFactoryImplementor; }
public static IndexSearcher BuildSearcher(ISearchFactoryImplementor searchFactory, out ISet <System.Type> classesAndSubclasses, params System.Type[] classes) { IDictionary <System.Type, DocumentBuilder> builders = searchFactory.DocumentBuilders; ISet <IDirectoryProvider> directories = new HashedSet <IDirectoryProvider>(); if (classes == null || classes.Length == 0) { // no class means all classes foreach (DocumentBuilder builder in builders.Values) { foreach (IDirectoryProvider provider in builder.DirectoryProvidersSelectionStrategy.GetDirectoryProvidersForAllShards()) { directories.Add(provider); } } // Give them back an empty set classesAndSubclasses = null; } else { ISet <System.Type> involvedClasses = new HashedSet <System.Type>(); involvedClasses.AddAll(classes); foreach (System.Type clazz in classes) { DocumentBuilder builder; builders.TryGetValue(clazz, out builder); if (builder != null) { involvedClasses.AddAll(builder.MappedSubclasses); } } foreach (System.Type clazz in involvedClasses) { DocumentBuilder builder; builders.TryGetValue(clazz, out builder); // TODO should we rather choose a polymorphic path and allow non mapped entities if (builder == null) { throw new HibernateException("Not a mapped entity: " + clazz); } foreach (IDirectoryProvider provider in builder.DirectoryProvidersSelectionStrategy.GetDirectoryProvidersForAllShards()) { directories.Add(provider); } } classesAndSubclasses = involvedClasses; } IDirectoryProvider[] directoryProviders = new List <IDirectoryProvider>(directories).ToArray(); return(new IndexSearcher(searchFactory.ReaderProvider.OpenReader(directoryProviders))); }
public static IndexSearcher BuildSearcher(ISearchFactoryImplementor searchFactory, out ISet<System.Type> classesAndSubclasses, params System.Type[] classes) { IDictionary<System.Type, DocumentBuilder> builders = searchFactory.DocumentBuilders; ISet<IDirectoryProvider> directories = new HashedSet<IDirectoryProvider>(); if (classes == null || classes.Length == 0) { // no class means all classes foreach (DocumentBuilder builder in builders.Values) { foreach (IDirectoryProvider provider in builder.DirectoryProvidersSelectionStrategy.GetDirectoryProvidersForAllShards()) { directories.Add(provider); } } // Give them back an empty set classesAndSubclasses = null; } else { ISet<System.Type> involvedClasses = new HashedSet<System.Type>(); involvedClasses.AddAll(classes); foreach (System.Type clazz in classes) { DocumentBuilder builder; builders.TryGetValue(clazz, out builder); if (builder != null) { involvedClasses.AddAll(builder.MappedSubclasses); } } foreach (System.Type clazz in involvedClasses) { DocumentBuilder builder; builders.TryGetValue(clazz, out builder); // TODO should we rather choose a polymorphic path and allow non mapped entities if (builder == null) { throw new HibernateException("Not a mapped entity: " + clazz); } foreach (IDirectoryProvider provider in builder.DirectoryProvidersSelectionStrategy.GetDirectoryProvidersForAllShards()) { directories.Add(provider); } } classesAndSubclasses = involvedClasses; } IDirectoryProvider[] directoryProviders = new List<IDirectoryProvider>(directories).ToArray(); return new IndexSearcher(searchFactory.ReaderProvider.OpenReader(directoryProviders)); }
public static object GetDocumentId(ISearchFactoryImplementor searchFactory, System.Type clazz, Document document) { DocumentBuilder builder = searchFactory.DocumentBuilders[clazz]; if (builder == null) { throw new SearchException("No Lucene configuration set up for: " + clazz.Name); } return(builder.IdBridge.Get(builder.GetIdKeywordName(), document)); }
public void Initialize(IDirectoryProvider directoryProvider, IDictionary<string, string> indexProperties, ISearchFactoryImplementor searchFactoryImplementor) { this.directoryProvider = directoryProvider; string maxString; indexProperties.TryGetValue("optimizer.operation_limit.max", out maxString); if (!string.IsNullOrEmpty(maxString)) int.TryParse(maxString, out operationMax); indexProperties.TryGetValue("optimizer.transaction_limit.max", out maxString); if (!string.IsNullOrEmpty(maxString)) int.TryParse(maxString, out transactionMax); }
public void Initialize(Configuration cfg) { searchFactory = SearchFactoryImpl.GetSearchFactory(cfg); string indexingStrategy = cfg.GetProperty(Environment.IndexingStrategy) ?? "event"; if ("event".Equals(indexingStrategy)) { used = searchFactory.DocumentBuilders.Count != 0; } else if ("manual".Equals(indexingStrategy)) { used = false; } else { throw new SearchException(Environment.IndexBase + " unknown: " + indexingStrategy); } }
public void Initialize(IDirectoryProvider directoryProvider, IDictionary <string, string> indexProperties, ISearchFactoryImplementor searchFactoryImplementor) { this.directoryProvider = directoryProvider; string maxString; indexProperties.TryGetValue("optimizer.operation_limit.max", out maxString); if (!string.IsNullOrEmpty(maxString)) { int.TryParse(maxString, out operationMax); } indexProperties.TryGetValue("optimizer.transaction_limit.max", out maxString); if (!string.IsNullOrEmpty(maxString)) { int.TryParse(maxString, out transactionMax); } }
public void Initialize(String directoryProviderName, IDictionary<string, string> properties, ISearchFactoryImplementor searchFactory) { if (directoryProviderName == null) throw new ArgumentNullException("directoryProviderName"); indexName = directoryProviderName; directory = new RAMDirectory(); try { IndexWriter iw = new IndexWriter(directory, new StandardAnalyzer(), true); iw.Close(); //searchFactory.RegisterDirectoryProviderForLocks(this); } catch (IOException e) { throw new HibernateException("Unable to initialize index: " + indexName, e); } }
private static void ConfigureOptimizerStrategy(ISearchFactoryImplementor searchFactoryImplementor, IDictionary <string, string> indexProps, IDirectoryProvider provider) { bool incremental = indexProps.ContainsKey("optimizer.operation_limit.max") || indexProps.ContainsKey("optimizer.transaction_limit.max"); IOptimizerStrategy optimizerStrategy; if (incremental) { optimizerStrategy = new IncrementalOptimizerStrategy(); optimizerStrategy.Initialize(provider, indexProps, searchFactoryImplementor); } else { optimizerStrategy = new NoOpOptimizerStrategy(); } searchFactoryImplementor.AddOptimizerStrategy(provider, optimizerStrategy); }
public void Initialize(string directoryProviderName, IDictionary <string, string> properties, ISearchFactoryImplementor searchFactory) { this.properties = properties; this.directoryProviderName = directoryProviderName; // source guessing source = DirectoryProviderHelper.GetSourceDirectory(Environment.SourceBase, Environment.Source, directoryProviderName, (IDictionary)properties); if (source == null) { throw new ArgumentException("FSMasterDirectoryProvider requires a viable source directory"); } log.Debug("Source directory: " + source); indexDir = DirectoryProviderHelper.DetermineIndexDir(directoryProviderName, (IDictionary)properties); log.Debug("Index directory: " + indexDir); try { // NB Do we need to do this since we are passing the create flag to Lucene? bool create = !IndexReader.IndexExists(indexDir.FullName); if (create) { log.DebugFormat("Index directory not found, creating '{0}'", indexDir.FullName); indexDir.Create(); } indexName = indexDir.FullName; directory = FSDirectory.GetDirectory(indexName, create); if (create) { indexName = indexDir.FullName; IndexWriter iw = new IndexWriter(directory, new StandardAnalyzer(), create); iw.Close(); } } catch (IOException e) { throw new HibernateException("Unable to initialize index: " + directoryProviderName, e); } this.searchFactory = searchFactory; }
public void Initialize(string directoryProviderName, IDictionary<string, string> properties, ISearchFactoryImplementor searchFactory) { this.properties = properties; this.directoryProviderName = directoryProviderName; // source guessing source = DirectoryProviderHelper.GetSourceDirectory(Environment.SourceBase, Environment.Source, directoryProviderName, (IDictionary) properties); if (source == null) { throw new ArgumentException("FSMasterDirectoryProvider requires a viable source directory"); } log.Debug("Source directory: " + source); indexDir = DirectoryProviderHelper.DetermineIndexDir(directoryProviderName, (IDictionary) properties); log.Debug("Index directory: " + indexDir); try { // NB Do we need to do this since we are passing the create flag to Lucene? bool create = !IndexReader.IndexExists(indexDir.FullName); if (create) { log.DebugFormat("Index directory not found, creating '{0}'", indexDir.FullName); indexDir.Create(); } indexName = indexDir.FullName; directory = FSDirectory.GetDirectory(indexName, create); if (create) { indexName = indexDir.FullName; IndexWriter iw = new IndexWriter(directory, new StandardAnalyzer(), create); iw.Close(); } } catch (IOException e) { throw new HibernateException("Unable to initialize index: " + directoryProviderName, e); } this.searchFactory = searchFactory; }
public void Initialize(IDictionary <string, string> properties, ISearchFactoryImplementor searchFactoryImplementor) { if (subReadersField == null) { // TODO: If we check for CacheableMultiReader we could avoid reflection here! // TODO: Need to account for Medium Trust - can't reflect on private members subReadersField = typeof(BaseCompositeReader <IndexReader>).GetField("subReaders", BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.IgnoreCase); } HashSet <IDirectoryProvider> providers = new HashSet <IDirectoryProvider>(searchFactoryImplementor.GetLockableDirectoryProviders().Keys); perDirectoryProviderManipulationLocks = new Dictionary <IDirectoryProvider, object>(); foreach (IDirectoryProvider dp in providers) { perDirectoryProviderManipulationLocks[dp] = new object(); } }
public static object[] GetDocumentFields(ISearchFactoryImplementor searchFactoryImplementor, System.Type clazz, Document document, string[] fields) { DocumentBuilder builder; if (!searchFactoryImplementor.DocumentBuilders.TryGetValue(clazz, out builder)) { throw new SearchException("No Lucene configuration set up for: " + clazz.Name); } object[] result = new object[fields.Length]; if (builder.idMapping != null) { PopulateResult(builder.IdentifierName, builder.IdBridge, Attributes.Store.Yes, fields, result, document); } ProcessFieldsForProjection(builder.rootClassMapping, fields, result, document); return(result); }
public override void Initialize(string directoryProviderName, IDictionary <string, string> properties, ISearchFactoryImplementor searchFactory) { this.properties = properties; this.directoryProviderName = directoryProviderName; // source guessing source = DirectoryProviderHelper.GetSourceDirectory(Environment.SourceBase, Environment.Source, directoryProviderName, (IDictionary)properties); if (source == null) { throw new ArgumentException("FSMasterDirectoryProvider requires a viable source directory"); } log.Debug("Source directory: " + source); indexDir = DirectoryProviderHelper.DetermineIndexDir(directoryProviderName, (IDictionary)properties); log.Debug("Index directory: " + indexDir); this.indexName = directoryProviderName; this.directory = InitializeIndex(indexDir, indexDir.FullName); this.searchFactory = searchFactory; }
public static IReaderProvider CreateReaderProvider(Configuration cfg, ISearchFactoryImplementor searchFactoryImplementor) { IDictionary <string, string> props = GetProperties(cfg); string impl = props.ContainsKey(Environment.ReaderStrategy) ? props[Environment.ReaderStrategy] : string.Empty; IReaderProvider readerProvider; if (string.IsNullOrEmpty(impl)) { // Put in another one readerProvider = new SharedReaderProvider(); } else if (impl.ToLowerInvariant() == "not-shared") { readerProvider = new NotSharedReaderProvider(); } else if (impl.ToLowerInvariant() == "shared") { readerProvider = new SharedReaderProvider(); } else { try { readerProvider = (IReaderProvider)Activator.CreateInstance(ReflectHelper.ClassForName(impl)); } catch (InvalidCastException) { throw new SearchException(string.Format("Class does not implement IReaderProvider: {0}", impl)); } catch (Exception) { throw new SearchException("Failed to instantiate IReaderProvider with type " + impl); } } readerProvider.Initialize(props, searchFactoryImplementor); return(readerProvider); }
public void Purge(System.Type clazz, object id) { using (new SessionIdLoggingContext(sessionImplementor.SessionId)) { if (clazz == null) { return; } ISearchFactoryImplementor searchFactoryImplementor = SearchFactoryImplementor; // TODO: Cache that at the FTSession level // not strictly necesary but a small optmization DocumentBuilder builder = searchFactoryImplementor.DocumentBuilders[clazz]; if (builder != null) { // TODO: Check to see this entity type is indexed WorkType workType = id == null ? WorkType.PurgeAll : WorkType.Purge; Work work = new Work(clazz, id, workType); searchFactoryImplementor.Worker.PerformWork(work, eventSource); } } }
public DocumentExtractor(ISearchFactoryImplementor searchFactoryImplementor, string[] projection) { this.searchFactoryImplementor = searchFactoryImplementor; this.projection = projection; }
/// <summary> /// This add the new work to the queue, so it can be processed in a batch fashion later /// </summary> public void AddToWorkQueue(System.Type entityClass, object entity, object id, WorkType workType, List <LuceneWork> queue, ISearchFactoryImplementor searchFactoryImplementor) { // TODO with the caller loop we are in a n^2: optimize it using a HashMap for work recognition foreach (LuceneWork luceneWork in queue) { if (luceneWork.EntityClass == entityClass && luceneWork.Id.Equals(id)) { return; } } bool searchForContainers = false; string idString = idMapping.Bridge.ObjectToString(id); switch (workType) { case WorkType.Add: queue.Add(new AddLuceneWork(id, idString, entityClass, GetDocument(entity, id, entityClass))); searchForContainers = true; break; case WorkType.Delete: case WorkType.Purge: queue.Add(new DeleteLuceneWork(id, idString, entityClass)); break; case WorkType.PurgeAll: queue.Add(new PurgeAllLuceneWork((System.Type)entity)); break; case WorkType.Update: case WorkType.Collection: /** * even with Lucene 2.1, use of indexWriter to update is not an option * We can only delete by term, and the index doesn't have a term that * uniquely identify the entry. * But essentially the optimization we are doing is the same Lucene is doing, the only extra cost is the * double file opening. */ queue.Add(new DeleteLuceneWork(id, idString, entityClass)); queue.Add(new AddLuceneWork(id, idString, entityClass, GetDocument(entity, id, entityClass))); searchForContainers = true; break; case WorkType.Index: queue.Add(new DeleteLuceneWork(id, idString, entityClass)); LuceneWork work = new AddLuceneWork(id, idString, entityClass, GetDocument(entity, id, entityClass)); work.IsBatch = true; queue.Add(work); searchForContainers = true; break; default: throw new AssertionFailure("Unknown WorkType: " + workType); } /** * When references are changed, either null or another one, we expect dirty checking to be triggered (both sides * have to be updated) * When the internal object is changed, we apply the {Add|Update}Work on containedIns */ if (searchForContainers) { ProcessContainedIn(entity, queue, rootClassMapping, searchFactoryImplementor); } }
public DirectoryProviders CreateDirectoryProviders(DocumentMapping classMapping, Configuration cfg, ISearchFactoryImplementor searchFactoryImplementor) { // Get properties string directoryProviderName = GetDirectoryProviderName(classMapping, cfg); IDictionary <string, string>[] indexProps = GetDirectoryProperties(cfg, directoryProviderName); // Set up the directories int nbrOfProviders = indexProps.Length; IDirectoryProvider[] providers = new IDirectoryProvider[nbrOfProviders]; for (int index = 0; index < nbrOfProviders; index++) { string providerName = nbrOfProviders > 1 ? directoryProviderName + "." + index : directoryProviderName; // NB Are the properties nested?? providers[index] = CreateDirectoryProvider(providerName, indexProps[index], searchFactoryImplementor); } // Define sharding strategy IIndexShardingStrategy shardingStrategy; IDictionary <string, string> shardingProperties = new Dictionary <string, string>(); // Any indexProperty will do, the indexProps[0] surely exists. foreach (KeyValuePair <string, string> entry in indexProps[0]) { if (entry.Key.StartsWith(SHARDING_STRATEGY)) { shardingProperties.Add(entry); } } string shardingStrategyName; shardingProperties.TryGetValue(SHARDING_STRATEGY, out shardingStrategyName); if (string.IsNullOrEmpty(shardingStrategyName)) { if (indexProps.Length == 1) { shardingStrategy = new NotShardedStrategy(); } else { shardingStrategy = new IdHashShardingStrategy(); } } else { try { System.Type shardingStrategyClass = ReflectHelper.ClassForName(shardingStrategyName); shardingStrategy = (IIndexShardingStrategy)Activator.CreateInstance(shardingStrategyClass); } catch { // TODO: See if we can get a tigher exception trap here throw new SearchException("Failed to instantiate lucene analyzer with type " + shardingStrategyName); } } shardingStrategy.Initialize(shardingProperties, providers); return(new DirectoryProviders(shardingStrategy, providers)); }
private static void ConfigureIndexingParameters(ISearchFactoryImplementor searchFactoryImplementor, IDictionary <string, string> indexProps, IDirectoryProvider provider) { LuceneIndexingParameters indexingParams = new LuceneIndexingParameters(); ConfigureProp( TRANSACTION + MERGE_FACTOR, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.MergeFactor = value; indexingParams.TransactionIndexParameters.MergeFactor = value; }); ConfigureProp( TRANSACTION + MAX_MERGE_DOCS, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.MaxMergeDocs = value; indexingParams.TransactionIndexParameters.MaxMergeDocs = value; }); ConfigureProp( TRANSACTION + MAX_BUFFERED_DOCS, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.MaxBufferedDocs = value; indexingParams.TransactionIndexParameters.MaxBufferedDocs = value; }); ConfigureProp( TRANSACTION + RAM_BUFFER_SIZE, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.RamBufferSizeMb = value; indexingParams.TransactionIndexParameters.RamBufferSizeMb = value; }); ConfigureProp( TRANSACTION + TERM_INDEX_INTERVAL, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.TermIndexInterval = value; indexingParams.TransactionIndexParameters.TermIndexInterval = value; }); ConfigureProp( BATCH + MERGE_FACTOR, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.MergeFactor = value; }); ConfigureProp( BATCH + MAX_MERGE_DOCS, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.MaxMergeDocs = value; }); ConfigureProp( BATCH + MAX_BUFFERED_DOCS, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.MaxBufferedDocs = value; }); ConfigureProp( BATCH + RAM_BUFFER_SIZE, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.RamBufferSizeMb = value; }); ConfigureProp( BATCH + TERM_INDEX_INTERVAL, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.TermIndexInterval = value; }); searchFactoryImplementor.AddIndexingParameters(provider, indexingParams); }
public LuceneBackendQueueProcessor(IList<LuceneWork> queue, ISearchFactoryImplementor searchFactoryImplementor) { this.queue = queue; this.searchFactoryImplementor = searchFactoryImplementor; }
private static void ConfigureOptimizerStrategy(ISearchFactoryImplementor searchFactoryImplementor, IDictionary<string, string> indexProps, IDirectoryProvider provider) { bool incremental = indexProps.ContainsKey("optimizer.operation_limit.max") || indexProps.ContainsKey("optimizer.transaction_limit.max"); IOptimizerStrategy optimizerStrategy; if (incremental) { optimizerStrategy = new IncrementalOptimizerStrategy(); optimizerStrategy.Initialize(provider, indexProps, searchFactoryImplementor); } else { optimizerStrategy = new NoOpOptimizerStrategy(); } searchFactoryImplementor.AddOptimizerStrategy(provider, optimizerStrategy); }
private static void ConfigureIndexingParameters(ISearchFactoryImplementor searchFactoryImplementor, IDictionary<string, string> indexProps, IDirectoryProvider provider) { LuceneIndexingParameters indexingParams = new LuceneIndexingParameters(); ConfigureProp( TRANSACTION + MERGE_FACTOR, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.MergeFactor = value; indexingParams.TransactionIndexParameters.MergeFactor = value; }); ConfigureProp( TRANSACTION + MAX_MERGE_DOCS, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.MaxMergeDocs = value; indexingParams.TransactionIndexParameters.MaxMergeDocs = value; }); ConfigureProp( TRANSACTION + MAX_BUFFERED_DOCS, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.MaxBufferedDocs = value; indexingParams.TransactionIndexParameters.MaxBufferedDocs = value; }); ConfigureProp( TRANSACTION + RAM_BUFFER_SIZE, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.RamBufferSizeMb = value; indexingParams.TransactionIndexParameters.RamBufferSizeMb = value; }); ConfigureProp( TRANSACTION + TERM_INDEX_INTERVAL, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.TermIndexInterval = value; indexingParams.TransactionIndexParameters.TermIndexInterval = value; }); ConfigureProp( BATCH + MERGE_FACTOR, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.MergeFactor = value; }); ConfigureProp( BATCH + MAX_MERGE_DOCS, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.MaxMergeDocs = value; }); ConfigureProp( BATCH + MAX_BUFFERED_DOCS, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.MaxBufferedDocs = value; }); ConfigureProp( BATCH + RAM_BUFFER_SIZE, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.RamBufferSizeMb = value; }); ConfigureProp( BATCH + TERM_INDEX_INTERVAL, indexProps, delegate(int value) { indexingParams.BatchIndexParameters.TermIndexInterval = value; }); searchFactoryImplementor.AddIndexingParameters(provider, indexingParams); }
private IDirectoryProvider CreateDirectoryProvider(string directoryProviderName, IDictionary<string, string> indexProps, ISearchFactoryImplementor searchFactoryImplementor) { string className; indexProps.TryGetValue("directory_provider", out className); if (StringHelper.IsEmpty(className)) { className = DEFAULT_DIRECTORY_PROVIDER; } IDirectoryProvider provider; try { System.Type directoryClass = ReflectHelper.ClassForName(className); provider = (IDirectoryProvider)Activator.CreateInstance(directoryClass); } catch (Exception e) { throw new HibernateException("Unable to instantiate directory provider: " + className, e); } try { provider.Initialize(directoryProviderName, indexProps, searchFactoryImplementor); } catch (Exception e) { throw new HibernateException("Unable to initialize: " + directoryProviderName, e); } int index = providers.IndexOf(provider); if (index != -1) { // Share the same Directory provider for the same underlying store return providers[index]; } ConfigureOptimizerStrategy(searchFactoryImplementor, indexProps, provider); ConfigureIndexingParameters(searchFactoryImplementor, indexProps, provider); providers.Add(provider); if (!searchFactoryImplementor.GetLockableDirectoryProviders().ContainsKey(provider)) { searchFactoryImplementor.GetLockableDirectoryProviders()[provider] = new object(); } return provider; }
public DirectoryProviders CreateDirectoryProviders(DocumentMapping classMapping, Configuration cfg, ISearchFactoryImplementor searchFactoryImplementor) { // Get properties string directoryProviderName = GetDirectoryProviderName(classMapping, cfg); IDictionary<string, string>[] indexProps = GetDirectoryProperties(cfg, directoryProviderName); // Set up the directories int nbrOfProviders = indexProps.Length; IDirectoryProvider[] providers = new IDirectoryProvider[nbrOfProviders]; for (int index = 0; index < nbrOfProviders; index++) { string providerName = nbrOfProviders > 1 ? directoryProviderName + "." + index : directoryProviderName; // NB Are the properties nested?? providers[index] = CreateDirectoryProvider(providerName, indexProps[index], searchFactoryImplementor); } // Define sharding strategy IIndexShardingStrategy shardingStrategy; IDictionary<string, string> shardingProperties = new Dictionary<string, string>(); // Any indexProperty will do, the indexProps[0] surely exists. foreach (KeyValuePair<string, string> entry in indexProps[0]) { if (entry.Key.StartsWith(SHARDING_STRATEGY)) { shardingProperties.Add(entry); } } string shardingStrategyName; shardingProperties.TryGetValue(SHARDING_STRATEGY, out shardingStrategyName); if (string.IsNullOrEmpty(shardingStrategyName)) { if (indexProps.Length == 1) { shardingStrategy = new NotShardedStrategy(); } else { shardingStrategy = new IdHashShardingStrategy(); } } else { try { System.Type shardingStrategyClass = ReflectHelper.ClassForName(shardingStrategyName); shardingStrategy = (IIndexShardingStrategy) Activator.CreateInstance(shardingStrategyClass); } catch { // TODO: See if we can get a tigher exception trap here throw new SearchException("Failed to instantiate lucene analyzer with type " + shardingStrategyName); } } shardingStrategy.Initialize(shardingProperties, providers); return new DirectoryProviders(shardingStrategy, providers); }
public void Initialize(IDictionary props, ISearchFactoryImplementor searchFactoryImplementor) { this.searchFactoryImplementor = searchFactoryImplementor; }
/// <summary> /// one must lock the directory providers in the exact same order to avoid /// dead lock between concurrent threads or processes /// To achieve that, the work will be done per directory provider /// We rely on the both the DocumentBuilder.GetHashCode() and the GetWorkHashCode() to /// sort them by predictive order at all times, and to put deletes before adds /// </summary> private static void DeadLockFreeQueue(List<LuceneWorker.WorkWithPayload> queue, ISearchFactoryImplementor searchFactoryImplementor) { queue.Sort(delegate(LuceneWorker.WorkWithPayload x, LuceneWorker.WorkWithPayload y) { long h1 = GetWorkHashCode(x, searchFactoryImplementor); long h2 = GetWorkHashCode(y, searchFactoryImplementor); return h1 < h2 ? -1 : h1 == h2 ? 0 : 1; }); }
public void Initialize(IDictionary<string, string> properties, ISearchFactoryImplementor searchFactoryImplementor) { }
public void Initialize(IDictionary<string, string> properties, ISearchFactoryImplementor searchFactoryImplementor) { if (subReadersField == null) { // TODO: If we check for CacheableMultiReader we could avoid reflection here! // TODO: Need to account for Medium Trust - can't reflect on private members subReadersField = typeof(MultiReader).GetField("subReaders", BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.IgnoreCase); } HashedSet<IDirectoryProvider> providers = new HashedSet<IDirectoryProvider>(searchFactoryImplementor.GetLockableDirectoryProviders().Keys); perDirectoryProviderManipulationLocks = new Dictionary<IDirectoryProvider, object>(); foreach (IDirectoryProvider dp in providers) perDirectoryProviderManipulationLocks[dp] = new object(); }
/// <summary> /// This add the new work to the queue, so it can be processed in a batch fashion later /// </summary> public void AddToWorkQueue(System.Type entityClass, object entity, object id, WorkType workType, List<LuceneWork> queue, ISearchFactoryImplementor searchFactoryImplementor) { // TODO with the caller loop we are in a n^2: optimize it using a HashMap for work recognition foreach (LuceneWork luceneWork in queue) { if (luceneWork.EntityClass == entityClass && luceneWork.Id.Equals(id)) { return; } } bool searchForContainers = false; string idString = idMapping.Bridge.ObjectToString(id); switch (workType) { case WorkType.Add: queue.Add(new AddLuceneWork(id, idString, entityClass, GetDocument(entity, id, entityClass))); searchForContainers = true; break; case WorkType.Delete: case WorkType.Purge: queue.Add(new DeleteLuceneWork(id, idString, entityClass)); break; case WorkType.PurgeAll: queue.Add(new PurgeAllLuceneWork((System.Type)entity)); break; case WorkType.Update: case WorkType.Collection: /** * even with Lucene 2.1, use of indexWriter to update is not an option * We can only delete by term, and the index doesn't have a term that * uniquely identify the entry. * But essentially the optimization we are doing is the same Lucene is doing, the only extra cost is the * double file opening. */ queue.Add(new DeleteLuceneWork(id, idString, entityClass)); queue.Add(new AddLuceneWork(id, idString, entityClass, GetDocument(entity, id, entityClass))); searchForContainers = true; break; case WorkType.Index: queue.Add(new DeleteLuceneWork(id, idString, entityClass)); LuceneWork work = new AddLuceneWork(id, idString, entityClass, GetDocument(entity, id, entityClass)); work.IsBatch = true; queue.Add(work); searchForContainers = true; break; default: throw new AssertionFailure("Unknown WorkType: " + workType); } /** * When references are changed, either null or another one, we expect dirty checking to be triggered (both sides * have to be updated) * When the internal object is changed, we apply the {Add|Update}Work on containedIns */ if (searchForContainers) { ProcessContainedIn(entity, queue, rootClassMapping, searchFactoryImplementor); } }
public void Initialize(IDirectoryProvider directoryProvider, IDictionary<string, string> indexProperties, ISearchFactoryImplementor searchFactoryImplementor) { }
private IDirectoryProvider CreateDirectoryProvider(string directoryProviderName, IDictionary <string, string> indexProps, ISearchFactoryImplementor searchFactoryImplementor) { string className; indexProps.TryGetValue("directory_provider", out className); if (StringHelper.IsEmpty(className)) { className = DEFAULT_DIRECTORY_PROVIDER; } IDirectoryProvider provider; try { System.Type directoryClass = ReflectHelper.ClassForName(className); provider = (IDirectoryProvider)Activator.CreateInstance(directoryClass); } catch (Exception e) { throw new HibernateException("Unable to instantiate directory provider: " + className, e); } try { provider.Initialize(directoryProviderName, indexProps, searchFactoryImplementor); } catch (Exception e) { throw new HibernateException("Unable to initialize: " + directoryProviderName, e); } int index = providers.IndexOf(provider); if (index != -1) { // Share the same Directory provider for the same underlying store return(providers[index]); } ConfigureOptimizerStrategy(searchFactoryImplementor, indexProps, provider); ConfigureIndexingParameters(searchFactoryImplementor, indexProps, provider); providers.Add(provider); if (!searchFactoryImplementor.GetLockableDirectoryProviders().ContainsKey(provider)) { searchFactoryImplementor.GetLockableDirectoryProviders()[provider] = new object(); } return(provider); }
public void Initialize(IDictionary <string, string> properties, ISearchFactoryImplementor searchFactoryImplementor) { }
public static object GetDocumentId(ISearchFactoryImplementor searchFactory, System.Type clazz, Document document) { DocumentBuilder builder = searchFactory.DocumentBuilders[clazz]; if (builder == null) { throw new SearchException("No Lucene configuration set up for: " + clazz.Name); } return builder.IdBridge.Get(builder.GetIdKeywordName(), document); }
public void Initialize(String directoryProviderName, IDictionary <string, string> properties, ISearchFactoryImplementor searchFactory) { DirectoryInfo indexDir = DirectoryProviderHelper.DetermineIndexDir(directoryProviderName, (IDictionary)properties); try { indexName = indexDir.FullName; directory = FSDirectory.Open(indexDir.FullName); if (DirectoryReader.IndexExists(directory)) { return; } var config = new IndexWriterConfig(LuceneVersion.LUCENE_48, new StandardAnalyzer(LuceneVersion.LUCENE_48)); var writer = new IndexWriter(directory, config); writer.Dispose(); } catch (IOException e) { throw new HibernateException("Unable to initialize index: " + directoryProviderName, e); } }
public static object[] GetDocumentFields(ISearchFactoryImplementor searchFactoryImplementor, System.Type clazz, Document document, string[] fields) { DocumentBuilder builder; if (!searchFactoryImplementor.DocumentBuilders.TryGetValue(clazz, out builder)) { throw new SearchException("No Lucene configuration set up for: " + clazz.Name); } object[] result = new object[fields.Length]; if (builder.idMapping != null) { PopulateResult(builder.IdentifierName, builder.IdBridge, Attributes.Store.Yes, fields, result, document); } ProcessFieldsForProjection(builder.rootClassMapping, fields, result, document); return result; }
private static void ProcessContainedInValue(object value, List<LuceneWork> queue, System.Type valueClass, DocumentBuilder builder, ISearchFactoryImplementor searchFactory) { object id = builder.idMapping.Getter.Get(value); builder.AddToWorkQueue(valueClass, value, id, WorkType.Update, queue, searchFactory); }
private static void ProcessContainedIn(Object instance, List <LuceneWork> queue, DocumentMapping documentMapping, ISearchFactoryImplementor searchFactoryImplementor) { foreach (var containedIn in documentMapping.ContainedIn) { object value = containedIn.Getter.Get(instance); if (value == null) { continue; } Array array = value as Array; if (array != null) { foreach (object arrayValue in array) { // Highly inneficient but safe wrt the actual targeted class, e.g. polymorphic items in the array System.Type valueType = NHibernateUtil.GetClass(arrayValue); if (valueType == null || !searchFactoryImplementor.DocumentBuilders.ContainsKey(valueType)) { continue; } ProcessContainedInValue(arrayValue, queue, valueType, searchFactoryImplementor.DocumentBuilders[valueType], searchFactoryImplementor); } } else if (typeof(IEnumerable).IsAssignableFrom(value.GetType())) { // NB We only see ISet and IDictionary`2 as IEnumerable IEnumerable collection = value as IEnumerable; if (typeof(IDictionary).IsAssignableFrom(value.GetType())) { collection = ((IDictionary)value).Values; } if (collection == null) { continue; } foreach (object collectionValue in collection) { // Highly inneficient but safe wrt the actual targeted class, e.g. polymorphic items in the array System.Type valueType = NHibernateUtil.GetClass(collectionValue); if (valueType == null || !searchFactoryImplementor.DocumentBuilders.ContainsKey(valueType)) { continue; } ProcessContainedInValue(collectionValue, queue, valueType, searchFactoryImplementor.DocumentBuilders[valueType], searchFactoryImplementor); } } else { System.Type valueType = NHibernateUtil.GetClass(value); if (valueType == null || !searchFactoryImplementor.DocumentBuilders.ContainsKey(valueType)) { continue; } ProcessContainedInValue(value, queue, valueType, searchFactoryImplementor.DocumentBuilders[valueType], searchFactoryImplementor); } } //an embedded cannot have a useful @ContainedIn (no shared reference) //do not walk through them }
public void Init(ISession session, ISearchFactoryImplementor searchFactoryImplementor) { this.session = session; }
public void Initialize(String directoryProviderName, IDictionary <string, string> properties, ISearchFactoryImplementor searchFactory) { if (directoryProviderName == null) { throw new ArgumentNullException("directoryProviderName"); } indexName = directoryProviderName; directory = new RAMDirectory(); try { IndexWriter iw = new IndexWriter(directory, new StandardAnalyzer(), true); iw.Close(); //searchFactory.RegisterDirectoryProviderForLocks(this); } catch (IOException e) { throw new HibernateException("Unable to initialize index: " + indexName, e); } }
private static void ProcessContainedIn(Object instance, List<LuceneWork> queue, DocumentMapping documentMapping, ISearchFactoryImplementor searchFactoryImplementor) { foreach (var containedIn in documentMapping.ContainedIn) { object value = containedIn.Getter.Get(instance); if (value == null) continue; Array array = value as Array; if (array != null) { foreach (object arrayValue in array) { // Highly inneficient but safe wrt the actual targeted class, e.g. polymorphic items in the array System.Type valueType = NHibernateUtil.GetClass(arrayValue); if (valueType == null || !searchFactoryImplementor.DocumentBuilders.ContainsKey(valueType)) { continue; } ProcessContainedInValue(arrayValue, queue, valueType, searchFactoryImplementor.DocumentBuilders[valueType], searchFactoryImplementor); } } else if (typeof(IEnumerable).IsAssignableFrom(value.GetType())) { // NB We only see ISet and IDictionary`2 as IEnumerable IEnumerable collection = value as IEnumerable; if (typeof(IDictionary).IsAssignableFrom(value.GetType())) { collection = ((IDictionary) value).Values; } if (collection == null) { continue; } foreach (object collectionValue in collection) { // Highly inneficient but safe wrt the actual targeted class, e.g. polymorphic items in the array System.Type valueType = NHibernateUtil.GetClass(collectionValue); if (valueType == null || !searchFactoryImplementor.DocumentBuilders.ContainsKey(valueType)) { continue; } ProcessContainedInValue(collectionValue, queue, valueType, searchFactoryImplementor.DocumentBuilders[valueType], searchFactoryImplementor); } } else { System.Type valueType = NHibernateUtil.GetClass(value); if (valueType == null || !searchFactoryImplementor.DocumentBuilders.ContainsKey(valueType)) { continue; } ProcessContainedInValue(value, queue, valueType, searchFactoryImplementor.DocumentBuilders[valueType], searchFactoryImplementor); } } //an embedded cannot have a useful @ContainedIn (no shared reference) //do not walk through them }
public void Init(ISession session, ISearchFactoryImplementor searchFactoryImplementor) { this.session = session; this.searchFactoryImplementor = searchFactoryImplementor; }
public void Initialize(IDirectoryProvider directoryProvider, IDictionary <string, string> indexProperties, ISearchFactoryImplementor searchFactoryImplementor) { }
public abstract void Initialize(string directoryProviderName, IDictionary <string, string> indexProps, ISearchFactoryImplementor searchFactory);
public Workspace(ISearchFactoryImplementor searchFactoryImplementor) { this.searchFactoryImplementor = searchFactoryImplementor; }
private static long GetWorkHashCode(LuceneWorker.WorkWithPayload luceneWork, ISearchFactoryImplementor searchFactoryImplementor) { IDirectoryProvider provider = luceneWork.Provider; int h = provider.GetHashCode(); h = 31 * h + provider.GetHashCode(); long extendedHash = h; //to be sure extendedHash + 1 < extendedHash + 2 is always true if (luceneWork.Work is AddLuceneWork) { extendedHash += 1; //addwork after deleteWork } if (luceneWork.Work is OptimizeLuceneWork) { extendedHash += 2; //optimize after everything } return extendedHash; }