public virtual void  TestCachingWorks()
		{
			Directory dir = new RAMDirectory();
			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
			writer.Close();
			
			IndexReader reader = IndexReader.Open(dir);
			
			MockFilter filter = new MockFilter();
			CachingWrapperFilter cacher = new CachingWrapperFilter(filter);
			
			// first time, nested filter is called
			cacher.GetDocIdSet(reader);
			Assert.IsTrue(filter.WasCalled(), "first time");
			
			// make sure no exception if cache is holding the wrong bitset
			cacher.Bits(reader);
			cacher.GetDocIdSet(reader);
			
			// second time, nested filter should not be called
			filter.Clear();
			cacher.GetDocIdSet(reader);
			Assert.IsFalse(filter.WasCalled(), "second time");
			
			reader.Close();
		}
Exemplo n.º 2
0
        public virtual void TestCachingWorks()
        {
            Directory   dir    = new RAMDirectory();
            IndexWriter writer = new IndexWriter(dir, new KeywordAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);

            writer.Close();

            IndexReader reader = IndexReader.Open(dir, true);

            MockFilter           filter = new MockFilter();
            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);

            // first time, nested filter is called
            cacher.GetDocIdSet(reader);
            Assert.IsTrue(filter.WasCalled(), "first time");

            // make sure no exception if cache is holding the wrong DocIdSet
            cacher.GetDocIdSet(reader);

            // second time, nested filter should not be called
            filter.Clear();
            cacher.GetDocIdSet(reader);
            Assert.IsFalse(filter.WasCalled(), "second time");

            reader.Close();
        }
Exemplo n.º 3
0
        public virtual void TestCachingWorks()
        {
            Directory         dir    = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, dir);

            writer.Dispose();

            IndexReader          reader  = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
            AtomicReaderContext  context = (AtomicReaderContext)reader.Context;
            MockFilter           filter  = new MockFilter();
            CachingWrapperFilter cacher  = new CachingWrapperFilter(filter);

            // first time, nested filter is called
            DocIdSet strongRef = cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs);

            Assert.IsTrue(filter.WasCalled(), "first time");

            // make sure no exception if cache is holding the wrong docIdSet
            cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs);

            // second time, nested filter should not be called
            filter.Clear();
            cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs);
            Assert.IsFalse(filter.WasCalled(), "second time");

            reader.Dispose();
            dir.Dispose();
        }
Exemplo n.º 4
0
        public void TestNullDocIdSetIterator()
        {
            Directory   dir    = new RAMDirectory();
            IndexWriter writer = new IndexWriter(dir, new KeywordAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);

            writer.Close();

            IndexReader reader = IndexReader.Open(dir, true);

            Filter filter = new AnonymousFilter2();
            //final Filter filter = new Filter() {
            //  //@Override
            //  public DocIdSet getDocIdSet(IndexReader reader) {
            //    return new DocIdSet() {
            //      //@Override
            //      public DocIdSetIterator iterator() {
            //        return null;
            //      }
            //    };
            //  }
            //};
            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);

            // the caching filter should return the empty set constant
            Assert.AreSame(DocIdSet.EMPTY_DOCIDSET, cacher.GetDocIdSet(reader));

            reader.Close();
        }
Exemplo n.º 5
0
        private static void AssertDocIdSetCacheable(IndexReader reader, Filter filter, bool shouldCacheable)
        {
            Assert.IsTrue(reader.Context is AtomicReaderContext);
            AtomicReaderContext  context     = (AtomicReaderContext)reader.Context;
            CachingWrapperFilter cacher      = new CachingWrapperFilter(filter);
            DocIdSet             originalSet = filter.GetDocIdSet(context, (context.AtomicReader).LiveDocs);
            DocIdSet             cachedSet   = cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs);

            if (originalSet == null)
            {
                Assert.IsNull(cachedSet);
            }
            if (cachedSet == null)
            {
                Assert.IsTrue(originalSet == null || originalSet.GetIterator() == null);
            }
            else
            {
                Assert.IsTrue(cachedSet.IsCacheable);
                Assert.AreEqual(shouldCacheable, originalSet.IsCacheable);
                //System.out.println("Original: "+originalSet.getClass().getName()+" -- cached: "+cachedSet.getClass().getName());
                if (originalSet.IsCacheable)
                {
                    Assert.AreEqual(originalSet.GetType(), cachedSet.GetType(), "Cached DocIdSet must be of same class like uncached, if cacheable");
                }
                else
                {
                    Assert.IsTrue(cachedSet is FixedBitSet || cachedSet == null, "Cached DocIdSet must be an FixedBitSet if the original one was not cacheable");
                }
            }
        }
        private IEnumerable<int> GetDocIDs(IndexReader reader, string groupByField, string group)
        {
            var query = new TermQuery(new Term(groupByField, group));
            Filter filter = new CachingWrapperFilter(new QueryWrapperFilter(query));

            return filter.GetDocIdSet(reader).AsEnumerable().ToList();
        }
        private IEnumerable <int> GetDocIDs(IndexReader reader, string groupByField, string group)
        {
            var    query  = new TermQuery(new Term(groupByField, group));
            Filter filter = new CachingWrapperFilter(new QueryWrapperFilter(query));

            return(filter.GetDocIdSet(reader).AsEnumerable().ToList());
        }
Exemplo n.º 8
0
        OpenBitSetDISI GetBitSet(string groupByField, string group)
        {
            TermQuery query  = new TermQuery(new Term(groupByField, group));
            Filter    filter = new CachingWrapperFilter(new QueryWrapperFilter(query));

            return(new OpenBitSetDISI(filter.GetDocIdSet(_Reader).Iterator(), _Reader.MaxDoc));
        }
Exemplo n.º 9
0
        public OpenBitSetDISI TermToBitSet(string term, IndexReader indexReader)
        {
            var facetQuery = new TermQuery(new Term(this.Field, term));
            var facetQueryFilter = new CachingWrapperFilter(new QueryWrapperFilter(facetQuery));
            var bitSet = new OpenBitSetDISI(facetQueryFilter.GetDocIdSet(indexReader).Iterator(), indexReader.MaxDoc());

            return bitSet;
        }
        public void TestNullDocIdSet()
        {
            Directory dir = new RAMDirectory();
            IndexWriter writer = new IndexWriter(dir, new KeywordAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
            writer.Close();

            IndexReader reader = IndexReader.Open(dir, true);

            Filter filter = new AnonymousFilter();

            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);

            // the caching filter should return the empty set constant
            Assert.AreSame(DocIdSet.EMPTY_DOCIDSET, cacher.GetDocIdSet(reader));

            reader.Close();
        }
Exemplo n.º 11
0
        public void TestNullDocIdSet()
        {
            Directory   dir    = new RAMDirectory();
            IndexWriter writer = new IndexWriter(dir, new KeywordAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            writer.Close();

            IndexReader reader = IndexReader.Open(dir, true, null);

            Filter filter = new AnonymousFilter();

            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);

            // the caching filter should return the empty set constant
            Assert.AreSame(DocIdSet.EMPTY_DOCIDSET, cacher.GetDocIdSet(reader, null));

            reader.Close();
        }
Exemplo n.º 12
0
        private static void assertDocIdSetCacheable(IndexReader reader, Filter filter, bool shouldCacheable)
        {
            CachingWrapperFilter cacher      = new CachingWrapperFilter(filter);
            DocIdSet             originalSet = filter.GetDocIdSet(reader);
            DocIdSet             cachedSet   = cacher.GetDocIdSet(reader);

            Assert.IsTrue(cachedSet.IsCacheable);
            Assert.AreEqual(shouldCacheable, originalSet.IsCacheable);
            //System.out.println("Original: "+originalSet.getClass().getName()+" -- cached: "+cachedSet.getClass().getName());
            if (originalSet.IsCacheable)
            {
                Assert.AreEqual(originalSet.GetType(), cachedSet.GetType(), "Cached DocIdSet must be of same class like uncached, if cacheable");
            }
            else
            {
                Assert.IsTrue(cachedSet is OpenBitSetDISI, "Cached DocIdSet must be an OpenBitSet if the original one was not cacheable");
            }
        }
Exemplo n.º 13
0
        public virtual void TestNullDocIdSetIterator()
        {
            Directory         dir    = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);

            writer.Dispose();

            IndexReader         reader  = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
            AtomicReaderContext context = (AtomicReaderContext)reader.Context;

            Filter filter = new FilterAnonymousInnerClassHelper2(this, context);
            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);

            // the caching filter should return the empty set constant
            Assert.IsNull(cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs));

            reader.Dispose();
            dir.Dispose();
        }
Exemplo n.º 14
0
        public virtual void TestNullDocIdSetIterator()
        {
            Directory         dir    = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, dir);

            writer.Dispose();

            IndexReader         reader  = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
            AtomicReaderContext context = (AtomicReaderContext)reader.Context;

            Filter filter = new FilterAnonymousInnerClassHelper2(this, context);
            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);

            // the caching filter should return the empty set constant
            Assert.IsNull(cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs));

            reader.Dispose();
            dir.Dispose();
        }
Exemplo n.º 15
0
        public virtual void TestCachingWorks()
        {
            Directory dir = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir);
            writer.Dispose();

            IndexReader reader = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
            AtomicReaderContext context = (AtomicReaderContext)reader.Context;
            MockFilter filter = new MockFilter();
            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);

            // first time, nested filter is called
            DocIdSet strongRef = cacher.GetDocIdSet(context, ((AtomicReader)context.Reader()).LiveDocs);
            Assert.IsTrue(filter.WasCalled(), "first time");

            // make sure no exception if cache is holding the wrong docIdSet
            cacher.GetDocIdSet(context, ((AtomicReader)context.Reader()).LiveDocs);

            // second time, nested filter should not be called
            filter.Clear();
            cacher.GetDocIdSet(context, ((AtomicReader)context.Reader()).LiveDocs);
            Assert.IsFalse(filter.WasCalled(), "second time");

            reader.Dispose();
            dir.Dispose();
        }
Exemplo n.º 16
0
 OpenBitSetDISI GetBitSet(string groupByField, string group)
 {
     TermQuery query = new TermQuery(new Term(groupByField, group));
     Filter filter = new CachingWrapperFilter(new QueryWrapperFilter(query));
     return new OpenBitSetDISI(filter.GetDocIdSet(_Reader).Iterator(), _Reader.MaxDoc);
 }
 private static void assertDocIdSetCacheable(IndexReader reader, Filter filter, bool shouldCacheable)
 {
     CachingWrapperFilter cacher = new CachingWrapperFilter(filter);
     DocIdSet originalSet = filter.GetDocIdSet(reader);
     DocIdSet cachedSet = cacher.GetDocIdSet(reader);
     Assert.IsTrue(cachedSet.IsCacheable());
     Assert.AreEqual(shouldCacheable, originalSet.IsCacheable());
     //System.out.println("Original: "+originalSet.getClass().getName()+" -- cached: "+cachedSet.getClass().getName());
     if (originalSet.IsCacheable())
     {
         Assert.AreEqual(originalSet.GetType(), cachedSet.GetType(), "Cached DocIdSet must be of same class like uncached, if cacheable");
     }
     else
     {
         Assert.IsTrue(cachedSet is OpenBitSetDISI, "Cached DocIdSet must be an OpenBitSet if the original one was not cacheable");
     }
 }
        private IEnumerable<FacetMatch> FindMatchesInQuery(Query baseQueryWithoutFacetDrilldown, IList<FacetFieldInfo> allFacetFieldInfos, FacetFieldInfo facetFieldInfoToCalculateFor)
        {
            var calculations = 0;

            var queryFilter = new CachingWrapperFilter(new QueryWrapperFilter(CreateFacetedQuery(baseQueryWithoutFacetDrilldown, allFacetFieldInfos, facetFieldInfoToCalculateFor.FieldName)));
            var bitsQueryWithoutFacetDrilldown = new OpenBitSetDISI(queryFilter.GetDocIdSet(IndexReader).Iterator(), IndexReader.MaxDoc);
            var baseQueryWithoutFacetDrilldownCopy = new OpenBitSetDISI(bitsQueryWithoutFacetDrilldown.Bits.Length)
            {
                Bits = new long[bitsQueryWithoutFacetDrilldown.Bits.Length]
            };

            var calculatedFacetCounts = new ResultCollection(facetFieldInfoToCalculateFor);
            foreach (var facetValueBitSet in GetOrCreateFacetBitSet(facetFieldInfoToCalculateFor.FieldName).FacetValueBitSetList)
            {
                var isSelected = calculatedFacetCounts.IsSelected(facetValueBitSet.Value);

                if (!isSelected && facetValueBitSet.Count < calculatedFacetCounts.MinCountForNonSelected) //Impossible to get a better result
                {
                    if (calculatedFacetCounts.HaveEnoughResults)
                        break;
                }

                bitsQueryWithoutFacetDrilldown.Bits.CopyTo(baseQueryWithoutFacetDrilldownCopy.Bits, 0);
                baseQueryWithoutFacetDrilldownCopy.NumWords = bitsQueryWithoutFacetDrilldown.NumWords;

                var bitset = facetValueBitSet.Bitset ?? CalculateOpenBitSetDisi(facetFieldInfoToCalculateFor.FieldName, facetValueBitSet.Value);
                baseQueryWithoutFacetDrilldownCopy.And(bitset);
                var count = baseQueryWithoutFacetDrilldownCopy.Cardinality();
                if (count == 0)
                    continue;
                var match = new FacetMatch
                {
                    Count = count,
                    Value = facetValueBitSet.Value,
                    FacetFieldName = facetFieldInfoToCalculateFor.FieldName
                };

                calculations++;
                if (isSelected)
                    calculatedFacetCounts.AddToSelected(match);
                else
                    calculatedFacetCounts.AddToNonSelected(match);
            }

            return calculatedFacetCounts.GetList();
        }
Exemplo n.º 19
0
 private static void AssertDocIdSetCacheable(IndexReader reader, Filter filter, bool shouldCacheable)
 {
     Assert.IsTrue(reader.Context is AtomicReaderContext);
     AtomicReaderContext context = (AtomicReaderContext)reader.Context;
     CachingWrapperFilter cacher = new CachingWrapperFilter(filter);
     DocIdSet originalSet = filter.GetDocIdSet(context, ((AtomicReader)context.Reader()).LiveDocs);
     DocIdSet cachedSet = cacher.GetDocIdSet(context, ((AtomicReader)context.Reader()).LiveDocs);
     if (originalSet == null)
     {
         Assert.IsNull(cachedSet);
     }
     if (cachedSet == null)
     {
         Assert.IsTrue(originalSet == null || originalSet.GetIterator() == null);
     }
     else
     {
         Assert.IsTrue(cachedSet.Cacheable);
         Assert.AreEqual(shouldCacheable, originalSet.Cacheable);
         //System.out.println("Original: "+originalSet.getClass().getName()+" -- cached: "+cachedSet.getClass().getName());
         if (originalSet.Cacheable)
         {
             Assert.AreEqual(originalSet.GetType(), cachedSet.GetType(), "Cached DocIdSet must be of same class like uncached, if cacheable");
         }
         else
         {
             Assert.IsTrue(cachedSet is FixedBitSet || cachedSet == null, "Cached DocIdSet must be an FixedBitSet if the original one was not cacheable");
         }
     }
 }
Exemplo n.º 20
0
        public virtual void TestNullDocIdSetIterator()
        {
            Directory dir = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir);
            writer.Dispose();

            IndexReader reader = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
            AtomicReaderContext context = (AtomicReaderContext)reader.Context;

            Filter filter = new FilterAnonymousInnerClassHelper2(this, context);
            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);

            // the caching filter should return the empty set constant
            Assert.IsNull(cacher.GetDocIdSet(context, ((AtomicReader)context.Reader()).LiveDocs));

            reader.Dispose();
            dir.Dispose();
        }
Exemplo n.º 21
0
        public IEnumerable<IHit> Query(int pageIndex, int pageSize, out int totalCount, out IEnumerable<FacetGroup> facetedResults)
        {
            totalCount = 0;
            facetedResults = null;

            if (searchPaths == null || searchPaths.Count <= 0)
                searchPaths.AddRange(indexPaths.Values.Select(o => o.Path));

            List<LuceneHit> results = new List<LuceneHit>();

            List<IndexSearcher> subSearchs = new List<IndexSearcher>();

            searchPaths.ForEach(o => subSearchs.Add(new IndexSearcher(FSDirectory.Open(o))));

            if (facetFields != null && facetFields.Count > 0)
            {
                var facetGroups = new List<FacetGroup>();
                var mainQueryFilter = new CachingWrapperFilter(new QueryWrapperFilter(query));
                MultiReader readers = new MultiReader(subSearchs.Select(o => o.IndexReader).ToArray());

                foreach (var facetField in facetFields)
                {
                    FacetGroup fg = new FacetGroup();
                    fg.FieldName = facetFieldNameProvider.GetMapName(TypeName, facetField);
                    var items = new List<FacetItem>();

                    var allDistinctField = FieldCache_Fields.DEFAULT.GetStrings(readers, facetField).Distinct().ToArray();
                    int totalHits = 0;

                    Parallel.ForEach(allDistinctField, fieldValue =>
                        {
                        //foreach (var fieldValue in allDistinctField)
                        //{
                            var facetQuery = new TermQuery(new Term(facetField, fieldValue));
                            var facetQueryFilter = new CachingWrapperFilter(new QueryWrapperFilter(facetQuery));

                            var bs = new OpenBitSetDISI(facetQueryFilter.GetDocIdSet(readers).Iterator(), readers.MaxDoc);
                            bs.InPlaceAnd(mainQueryFilter.GetDocIdSet(readers).Iterator());
                            int count = (Int32)bs.Cardinality();

                            FacetItem item = new FacetItem();
                            item.GroupValue = fieldValue;
                            item.Count = count;

                            items.Add(item);
                            totalHits += count;
                        }
                    );

                    fg.FacetItems = items.OrderByDescending(o => o.Count);
                    fg.TotalHits = totalHits;

                    facetGroups.Add(fg);
                }

                facetedResults = facetGroups.OrderBy(o => o.FieldName);
            }
            ParallelMultiSearcher searcher = new ParallelMultiSearcher(subSearchs.ToArray());
            Sort sort = null;
            if (sortFields != null && sortFields.Count > 0)
            {
                sort = new Sort(sortFields.ToArray());
            }

            int maxDoc = searcher.MaxDoc;
            int startIndex = 0;
            if (pageIndex >= 0 && pageSize > 0)
            {
                startIndex = pageIndex * pageSize;
                maxDoc = pageSize * (pageIndex + 1);
            }
            var docs = sort == null ?  searcher.Search(query, null, maxDoc) : searcher.Search(query, null, maxDoc, sort);
            totalCount = docs.TotalHits;
            int endIndex = docs.TotalHits - startIndex;
            for (int i = startIndex; i < endIndex; i++)
            {
                LuceneHit h = new LuceneHit(TypeName, DocumentBuilder, searcher.Doc(docs.ScoreDocs[i].Doc));
                results.Add(h);
            }
            return results;
        }
        /// <summary>
        /// Creates facets.
        /// </summary>
        /// <param name="reader">The reader.</param>
        /// <param name="query">The query.</param>
        private void CreateFacets(IndexReader reader, Query query)
        {
            var groups = new List<FacetGroup>();
            var baseQueryFilter = new CachingWrapperFilter(new QueryWrapperFilter(query));
            var baseDocIdSet = baseQueryFilter.GetDocIdSet(reader);

            #region Subcategory filters


            /* 
            var catalogCriteria = Results.SearchCriteria as CatalogItemSearchCriteria;
            if (catalogCriteria != null && catalogCriteria.ChildCategoryFilters.Any())
            {
                var group = new FacetGroup("Subcategory");
                var groupCount = 0;

                foreach (var value in catalogCriteria.ChildCategoryFilters)
                {
                    var q = LuceneQueryHelper.CreateQuery(catalogCriteria.OutlineField, value);

                    if (q == null) continue;

                    var queryFilter = new CachingWrapperFilter(new QueryWrapperFilter(q));
                    var filterArray = queryFilter.GetDocIdSet(reader);
                    var newCount = (int)CalculateFacetCount(baseDocIdSet, filterArray);
                    if (newCount == 0) continue;

                    var newFacet = new Facet(group, value.Code, value.Name, newCount);
                    group.Facets.Add(newFacet);
                    groupCount += newCount;
                }

                // Add only if items exist under
                if (groupCount > 0)
                {
                    groups.Add(group);
                }
            }
             * */

            #endregion

            if (Results.SearchCriteria.Filters != null && Results.SearchCriteria.Filters.Length > 0)
            {
                foreach (var filter in Results.SearchCriteria.Filters)
                {

                    if (!string.IsNullOrEmpty(Results.SearchCriteria.Currency) && filter is PriceRangeFilter)
                    {
                        var valCurrency = ((PriceRangeFilter)filter).Currency;
                        if (!valCurrency.Equals(Results.SearchCriteria.Currency, StringComparison.OrdinalIgnoreCase))
                        {
                            continue;
                        }
                    }

                    var facetGroup = CalculateResultCount(reader, baseDocIdSet, filter, Results.SearchCriteria);
                    if (facetGroup != null)
                    {
                        groups.Add(facetGroup);
                    }
                }
            }

            Results.FacetGroups = groups.ToArray();
        }