A ParallelTaxonomyArrays that are initialized from the taxonomy index. @lucene.experimental
Inheritance: ParallelTaxonomyArrays
示例#1
0
 private TaxonomyIndexArrays GetTaxoArrays()
 {
     if (taxoArrays == null)
     {
         lock (this)
         {
             if (taxoArrays == null)
             {
                 InitReaderManager();
                 DirectoryReader reader = readerManager.Acquire();
                 try
                 {
                     // according to Java Concurrency, this might perform better on some
                     // JVMs, since the object initialization doesn't happen on the
                     // volatile member.
                     TaxonomyIndexArrays tmpArrays = new TaxonomyIndexArrays(reader);
                     taxoArrays = tmpArrays;
                 }
                 finally
                 {
                     readerManager.Release(reader);
                 }
             }
         }
     }
     return(taxoArrays);
 }
 protected internal override void DoClose()
 {
     indexReader.Dispose();
     taxoArrays = null;
     // do not clear() the caches, as they may be used by other DTR instances.
     ordinalCache  = null;
     categoryCache = null;
 }
        /// <summary>
        /// Called only from <seealso cref="#doOpenIfChanged()"/>. If the taxonomy has been
        /// recreated, you should pass {@code null} as the caches and parent/children
        /// arrays.
        /// </summary>
        internal DirectoryTaxonomyReader(DirectoryReader indexReader, DirectoryTaxonomyWriter taxoWriter, LRUHashMap <FacetLabel, IntClass> ordinalCache, LRUHashMap <int, FacetLabel> categoryCache, TaxonomyIndexArrays taxoArrays)
        {
            this.indexReader = indexReader;
            this.taxoWriter  = taxoWriter;
            this.taxoEpoch   = taxoWriter == null ? -1 : taxoWriter.TaxonomyEpoch;

            // use the same instance of the cache, note the protective code in getOrdinal and getPath
            this.ordinalCache  = ordinalCache == null ? new LRUHashMap <FacetLabel, IntClass>(DEFAULT_CACHE_VALUE) : ordinalCache;
            this.categoryCache = categoryCache == null ? new LRUHashMap <int, FacetLabel>(DEFAULT_CACHE_VALUE) : categoryCache;

            this.taxoArrays = taxoArrays != null ? new TaxonomyIndexArrays(indexReader, taxoArrays) : null;
        }
        /// <summary>
        /// Called only from <seealso cref="#doOpenIfChanged()"/>. If the taxonomy has been
        /// recreated, you should pass {@code null} as the caches and parent/children
        /// arrays.
        /// </summary>
        internal DirectoryTaxonomyReader(DirectoryReader indexReader, DirectoryTaxonomyWriter taxoWriter, LRUHashMap<FacetLabel, IntClass> ordinalCache, LRUHashMap<int, FacetLabel> categoryCache, TaxonomyIndexArrays taxoArrays)
        {
            this.indexReader = indexReader;
            this.taxoWriter = taxoWriter;
            this.taxoEpoch = taxoWriter == null ? -1 : taxoWriter.TaxonomyEpoch;

            // use the same instance of the cache, note the protective code in getOrdinal and getPath
            this.ordinalCache = ordinalCache == null ? new LRUHashMap<FacetLabel, IntClass>(DEFAULT_CACHE_VALUE) : ordinalCache;
            this.categoryCache = categoryCache == null ? new LRUHashMap<int, FacetLabel>(DEFAULT_CACHE_VALUE) : categoryCache;

            this.taxoArrays = taxoArrays != null ? new TaxonomyIndexArrays(indexReader, taxoArrays) : null;
        }
 private void InitTaxoArrays()
 {
     lock (this)
     {
         if (taxoArrays == null)
         {
             // according to Java Concurrency in Practice, this might perform better on
             // some JVMs, because the array initialization doesn't happen on the
             // volatile member.
             TaxonomyIndexArrays tmpArrays = new TaxonomyIndexArrays(indexReader);
             taxoArrays = tmpArrays;
         }
     }
 }
示例#6
0
        // LUCENENET specific - eliminated the InitTaxoArrays() method in favor of LazyInitializer

        protected override void Dispose(bool disposing) // LUCENENET specific - changed from DoClose()
        {
            if (disposing && !isDisposed)
            {
                indexReader.Dispose();
                taxoArrays = null;
                // do not clear() the caches, as they may be used by other DTR instances.
                ordinalCache  = null;
                categoryCache = null;
                ordinalCacheLock.Dispose(); // LUCENENET specific - cleanup ReaderWriterLockSlim instances
                categoryCacheLock.Dispose();
                isDisposed = true;
            }
        }
示例#7
0
        public TaxonomyIndexArrays(IndexReader reader, TaxonomyIndexArrays copyFrom)
        {
            Debug.Assert(copyFrom != null);

            // note that copyParents.length may be equal to reader.maxDoc(). this is not a bug
            // it may be caused if e.g. the taxonomy segments were merged, and so an updated
            // NRT reader was obtained, even though nothing was changed. this is not very likely
            // to happen.
            int[] copyParents = copyFrom.Parents;
            this.parents = new int[reader.MaxDoc];
            Array.Copy(copyParents, 0, parents, 0, copyParents.Length);
            InitParents(reader, copyParents.Length);

            if (copyFrom.initializedChildren)
            {
                InitChildrenSiblings(copyFrom);
            }
        }
        public TaxonomyIndexArrays(IndexReader reader, TaxonomyIndexArrays copyFrom)
        {
            Debug.Assert(copyFrom != null);

            // note that copyParents.length may be equal to reader.maxDoc(). this is not a bug
            // it may be caused if e.g. the taxonomy segments were merged, and so an updated
            // NRT reader was obtained, even though nothing was changed. this is not very likely
            // to happen.
            int[] copyParents = copyFrom.Parents;
            this.parents = new int[reader.MaxDoc];
            Array.Copy(copyParents, 0, parents, 0, copyParents.Length);
            InitParents(reader, copyParents.Length);

            if (copyFrom.initializedChildren)
            {
                InitChildrenSiblings(copyFrom);
            }
        }
示例#9
0
 private void InitChildrenSiblings(TaxonomyIndexArrays copyFrom)
 {
     if (!initializedChildren) // must do this check !
     {
         LazyInitializer.EnsureInitialized(ref children, ref initializedChildren, ref syncLock, () =>
         {
             children = new int[parents.Length];
             siblings = new int[parents.Length];
             if (copyFrom != null)
             {
                 // called from the ctor, after we know copyFrom has initialized children/siblings
                 Array.Copy(copyFrom.Children, 0, children, 0, copyFrom.Children.Length);
                 Array.Copy(copyFrom.Siblings, 0, siblings, 0, copyFrom.Siblings.Length);
                 ComputeChildrenSiblings(copyFrom.parents.Length);
             }
             else
             {
                 ComputeChildrenSiblings(0);
             }
             return(children);
         });
     }
 }
示例#10
0
 private void InitChildrenSiblings(TaxonomyIndexArrays copyFrom)
 {
     lock (this)
     {
         if (!initializedChildren) // must do this check !
         {
             children = new int[parents.Length];
             siblings = new int[parents.Length];
             if (copyFrom != null)
             {
                 // called from the ctor, after we know copyFrom has initialized children/siblings
                 Array.Copy(copyFrom.Children, 0, children, 0, copyFrom.Children.Length);
                 Array.Copy(copyFrom.Siblings, 0, siblings, 0, copyFrom.Siblings.Length);
                 ComputeChildrenSiblings(copyFrom.parents.Length);
             }
             else
             {
                 ComputeChildrenSiblings(0);
             }
             initializedChildren = true;
         }
     }
 }
示例#11
0
        /// <summary>
        /// Note that the methods calling <see cref="AddCategoryDocument"/> are synchornized, so
        /// this method is effectively synchronized as well.
        /// </summary>
        private int AddCategoryDocument(FacetLabel categoryPath, int parent)
        {
            // Before Lucene 2.9, position increments >=0 were supported, so we
            // added 1 to parent to allow the parent -1 (the parent of the root).
            // Unfortunately, starting with Lucene 2.9, after LUCENE-1542, this is
            // no longer enough, since 0 is not encoded consistently either (see
            // comment in SinglePositionTokenStream). But because we must be
            // backward-compatible with existing indexes, we can't just fix what
            // we write here (e.g., to write parent+2), and need to do a workaround
            // in the reader (which knows that anyway only category 0 has a parent
            // -1).
            parentStream.Set(Math.Max(parent + 1, 1));
            Document d = new Document();

            d.Add(parentStreamField);

            fullPathField.SetStringValue(FacetsConfig.PathToString(categoryPath.Components, categoryPath.Length));
            d.Add(fullPathField);

            // Note that we do no pass an Analyzer here because the fields that are
            // added to the Document are untokenized or contains their own TokenStream.
            // Therefore the IndexWriter's Analyzer has no effect.
            indexWriter.AddDocument(d);
            int id = nextID++;

            // added a category document, mark that ReaderManager is not up-to-date
            shouldRefreshReaderManager = true;

            // also add to the parent array
            taxoArrays = GetTaxoArrays().Add(id, parent);

            // NOTE: this line must be executed last, or else the cache gets updated
            // before the parents array (LUCENE-4596)
            AddToCache(categoryPath, id);

            return(id);
        }
示例#12
0
        /// <summary>
        /// Replaces the current taxonomy with the given one. This method should
        /// generally be called in conjunction with
        /// <see cref="IndexWriter.AddIndexes(Directory[])"/> to replace both the taxonomy
        /// as well as the search index content.
        /// </summary>
        public virtual void ReplaceTaxonomy(Directory taxoDir)
        {
            lock (this)
            {
                // replace the taxonomy by doing IW optimized operations
                indexWriter.DeleteAll();
                indexWriter.AddIndexes(taxoDir);
                shouldRefreshReaderManager = true;
                InitReaderManager(); // ensure that it's initialized
                RefreshReaderManager();
                nextID     = indexWriter.MaxDoc;
                taxoArrays = null; // must nullify so that it's re-computed next time it's needed

                // need to clear the cache, so that addCategory won't accidentally return
                // old categories that are in the cache.
                cache.Clear();
                cacheIsComplete   = false;
                shouldFillCache   = true;
                cacheMisses.Value = 0;

                // update indexEpoch as a taxonomy replace is just like it has be recreated
                ++indexEpoch;
            }
        }
 protected internal override void DoClose()
 {
     indexReader.Dispose();
     taxoArrays = null;
     // do not clear() the caches, as they may be used by other DTR instances.
     ordinalCache = null;
     categoryCache = null;
 }
 private void InitTaxoArrays()
 {
     lock (this)
     {
         if (taxoArrays == null)
         {
             // according to Java Concurrency in Practice, this might perform better on
             // some JVMs, because the array initialization doesn't happen on the
             // volatile member.
             TaxonomyIndexArrays tmpArrays = new TaxonomyIndexArrays(indexReader);
             taxoArrays = tmpArrays;
         }
     }
 }
        /// <summary>
        /// Note that the methods calling addCategoryDocument() are synchornized, so
        /// this method is effectively synchronized as well.
        /// </summary>
        private int AddCategoryDocument(FacetLabel categoryPath, int parent)
        {
            // Before Lucene 2.9, position increments >=0 were supported, so we
            // added 1 to parent to allow the parent -1 (the parent of the root).
            // Unfortunately, starting with Lucene 2.9, after LUCENE-1542, this is
            // no longer enough, since 0 is not encoded consistently either (see
            // comment in SinglePositionTokenStream). But because we must be
            // backward-compatible with existing indexes, we can't just fix what
            // we write here (e.g., to write parent+2), and need to do a workaround
            // in the reader (which knows that anyway only category 0 has a parent
            // -1).    
            parentStream.Set(Math.Max(parent + 1, 1));
            Document d = new Document();
            d.Add(parentStreamField);

            fullPathField.StringValue = FacetsConfig.PathToString(categoryPath.Components, categoryPath.Length);
            d.Add(fullPathField);

            // Note that we do no pass an Analyzer here because the fields that are
            // added to the Document are untokenized or contains their own TokenStream.
            // Therefore the IndexWriter's Analyzer has no effect.
            indexWriter.AddDocument(d);
            int id = nextID++;

            // added a category document, mark that ReaderManager is not up-to-date
            shouldRefreshReaderManager = true;

            // also add to the parent array
            taxoArrays = TaxoArrays.Add(id, parent);

            // NOTE: this line must be executed last, or else the cache gets updated
            // before the parents array (LUCENE-4596)
            AddToCache(categoryPath, id);

            return id;
        }
        /// <summary>
        /// Replaces the current taxonomy with the given one. This method should
        /// generally be called in conjunction with
        /// <seealso cref="IndexWriter#addIndexes(Directory...)"/> to replace both the taxonomy
        /// as well as the search index content.
        /// </summary>
        public virtual void ReplaceTaxonomy(Directory taxoDir)
        {
            lock (this)
            {
                // replace the taxonomy by doing IW optimized operations
                indexWriter.DeleteAll();
                indexWriter.AddIndexes(taxoDir);
                shouldRefreshReaderManager = true;
                InitReaderManager(); // ensure that it's initialized
                RefreshReaderManager();
                nextID = indexWriter.MaxDoc;
                taxoArrays = null; // must nullify so that it's re-computed next time it's needed

                // need to clear the cache, so that addCategory won't accidentally return
                // old categories that are in the cache.
                cache.Clear();
                cacheIsComplete = false;
                shouldFillCache = true;
                cacheMisses.Set(0);

                // update indexEpoch as a taxonomy replace is just like it has be recreated
                ++indexEpoch;
            }
        }
 private void InitChildrenSiblings(TaxonomyIndexArrays copyFrom)
 {
     lock (this)
     {
         if (!initializedChildren) // must do this check !
         {
             children = new int[parents.Length];
             siblings = new int[parents.Length];
             if (copyFrom != null)
             {
                 // called from the ctor, after we know copyFrom has initialized children/siblings
                 Array.Copy(copyFrom.Children, 0, children, 0, copyFrom.Children.Length);
                 Array.Copy(copyFrom.Siblings, 0, siblings, 0, copyFrom.Siblings.Length);
                 ComputeChildrenSiblings(copyFrom.parents.Length);
             }
             else
             {
                 ComputeChildrenSiblings(0);
             }
             initializedChildren = true;
         }
     }
 }