Esempio n. 1
0
        public void testMissingTerms()
        {
            String fieldName = "field1";
            Directory rd = new RAMDirectory();
            var w = new IndexWriter(rd, new KeywordAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            for (int i = 0; i < 100; i++)
            {
                var doc = new Document();
                int term = i*10; //terms are units of 10;
                doc.Add(new Field(fieldName, "" + term, Field.Store.YES, Field.Index.ANALYZED));
                w.AddDocument(doc);
            }
            IndexReader reader = w.GetReader();
            w.Close();

            TermsFilter tf = new TermsFilter();
            tf.AddTerm(new Term(fieldName, "19"));
            FixedBitSet bits = (FixedBitSet) tf.GetDocIdSet(reader);
            Assert.AreEqual(0, bits.Cardinality(), "Must match nothing");

            tf.AddTerm(new Term(fieldName, "20"));
            bits = (FixedBitSet) tf.GetDocIdSet(reader);
            Assert.AreEqual(1, bits.Cardinality(), "Must match 1");

            tf.AddTerm(new Term(fieldName, "10"));
            bits = (FixedBitSet) tf.GetDocIdSet(reader);
            Assert.AreEqual(2, bits.Cardinality(), "Must match 2");

            tf.AddTerm(new Term(fieldName, "00"));
            bits = (FixedBitSet) tf.GetDocIdSet(reader);
            Assert.AreEqual(2, bits.Cardinality(), "Must match 2");

            reader.Close();
            rd.Close();
        }
        public static Filter CreateTermsFilter(string fieldName, string value)
        {
            var query = new TermsFilter();

            query.AddTerm(new Term(fieldName, value));
            return(query);
        }
        public void TestFieldNotPresent()
        {
            Directory dir = NewDirectory();
            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
            int num = AtLeast(3);
            int skip = Random().Next(num);
            var terms = new List<Term>();
            for (int i = 0; i < num; i++)
            {
                terms.Add(new Term("field" + i, "content1"));
                Document doc = new Document();
                if (skip == i)
                {
                    continue;
                }
                doc.Add(NewStringField("field" + i, "content1", Field.Store.YES));
                w.AddDocument(doc);
            }

            w.ForceMerge(1);
            IndexReader reader = w.Reader;
            w.Dispose();
            assertEquals(1, reader.Leaves.size());

            AtomicReaderContext context = reader.Leaves.First();
            TermsFilter tf = new TermsFilter(terms);

            FixedBitSet bits = (FixedBitSet)tf.GetDocIdSet(context, context.AtomicReader.LiveDocs);
            assertEquals("Must be num fields - 1 since we skip only one field", num - 1, bits.Cardinality());
            reader.Dispose();
            dir.Dispose();
        }
Esempio n. 4
0
        Filter IContentHandler.GetFilter(ResourceSearchQuery searchQuery)
        {
            if (!searchQuery.SubcategoryId.HasValue && !searchQuery.CategoryId.HasValue)
            {
                return(null);
            }

            var filter = new TermsFilter();

            //use subcategory in preference to category
            if (searchQuery.SubcategoryId.HasValue)
            {
                filter.addTerm(new Term(FieldName.SubcategoryId, searchQuery.SubcategoryId.Value.ToFieldValue()));
            }
            else
            {
                var category = _categories.FirstOrDefault(c => c.Id == searchQuery.CategoryId);
                if (category != null)
                {
                    foreach (var subcategory in category.Subcategories)
                    {
                        filter.addTerm(new Term(FieldName.SubcategoryId, subcategory.Id.ToFieldValue()));
                    }
                }
            }

            return(filter);
        }
Esempio n. 5
0
        public void TestHashCodeAndEquals()
        {
            int          num         = AtLeast(100);
            bool         singleField = Random.NextBoolean();
            IList <Term> terms       = new List <Term>();
            var          uniqueTerms = new HashSet <Term>();

            for (int i = 0; i < num; i++)
            {
                string field   = "field" + (singleField ? "1" : Random.Next(100).ToString());
                string @string = TestUtil.RandomRealisticUnicodeString(Random);
                terms.Add(new Term(field, @string));
                uniqueTerms.Add(new Term(field, @string));
                TermsFilter left = TermsFilter(singleField && Random.NextBoolean(), uniqueTerms);
                Collections.Shuffle(terms);
                TermsFilter right = TermsFilter(singleField && Random.NextBoolean(), terms);
                assertEquals(right, left);
                assertEquals(right.GetHashCode(), left.GetHashCode());
                if (uniqueTerms.Count > 1)
                {
                    IList <Term> asList = new List <Term>(uniqueTerms);
                    asList.RemoveAt(0);
                    TermsFilter notEqual = TermsFilter(singleField && Random.NextBoolean(), asList);
                    assertFalse(left.Equals(notEqual));
                    assertFalse(right.Equals(notEqual));
                }
            }
        }
Esempio n. 6
0
        LuceneFilter IContentHandler.GetFilter(JobAdSearchQuery searchQuery)
        {
            // If the community is not set then only include non-community jobs.

            if (!searchQuery.CommunityId.HasValue)
            {
                var filter = new TermsFilter();
                filter.addTerm(new Term(FieldName.HasCommunity, HasNoCommunityId));
                return(filter);
            }

            // The community is set so it needs to be included.

            var specificCommunityFilter = new TermsFilter();

            specificCommunityFilter.addTerm(new Term(FieldName.Community, searchQuery.CommunityId.Value.ToFieldValue()));

            // If it is only the community that must be returned then return it.

            if (searchQuery.CommunityOnly.HasValue && searchQuery.CommunityOnly.Value)
            {
                return(specificCommunityFilter);
            }

            // Need to include those that match the community, or have no community set, i.e. all other community jobs should not be retutned.

            var noCommunityFilter = new TermsFilter();

            noCommunityFilter.addTerm(new Term(FieldName.HasCommunity, HasNoCommunityId));

            return(new ChainedFilter(new Filter[] { specificCommunityFilter, noCommunityFilter }, ChainedFilter.OR));
        }
        /// <summary>
        ///     Creates the query.
        /// </summary>
        /// <param name="field">The field.</param>
        /// <param name="value">The value.</param>
        /// <returns></returns>
        public static Filter CreateQuery(string field, AttributeFilterValue value)
        {
            object val   = value.Value;
            var    query = new TermsFilter();

            query.AddTerm(new Term(field, ConvertToSearchable(val, false)));
            return(query);
        }
Esempio n. 8
0
        public void TestToString()
        {
            TermsFilter termsFilter = new TermsFilter(
                new Term("field1", "a"),
                new Term("field1", "b"),
                new Term("field1", "c"));

            assertEquals("field1:a field1:b field1:c", termsFilter.ToString());
        }
Esempio n. 9
0
        public void TestSingleFieldEquals()
        {
            // Two terms with the same hash code
            //assertEquals("AaAaBB".GetHashCode(), "BBBBBB".GetHashCode());
            TermsFilter left  = TermsFilter(true, new Term("id", "AaAaAa"), new Term("id", "AaAaBB"));
            TermsFilter right = TermsFilter(true, new Term("id", "AaAaAa"), new Term("id", "BBBBBB"));

            assertFalse(left.Equals(right));
        }
Esempio n. 10
0
        public void TestMissingField()
        {
            string            fieldName = "field1";
            Directory         rd1       = NewDirectory();
            RandomIndexWriter w1        = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, rd1);
            Document doc = new Document();

            doc.Add(NewStringField(fieldName, "content1", Field.Store.YES));
            w1.AddDocument(doc);
            IndexReader reader1 = w1.GetReader();

            w1.Dispose();

            fieldName = "field2";
            Directory         rd2 = NewDirectory();
            RandomIndexWriter w2  = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, rd2);

            doc = new Document();
            doc.Add(NewStringField(fieldName, "content2", Field.Store.YES));
            w2.AddDocument(doc);
            IndexReader reader2 = w2.GetReader();

            w2.Dispose();

            TermsFilter tf    = new TermsFilter(new Term(fieldName, "content1"));
            MultiReader multi = new MultiReader(reader1, reader2);

            foreach (AtomicReaderContext context in multi.Leaves)
            {
                DocIdSet docIdSet = tf.GetDocIdSet(context, context.AtomicReader.LiveDocs);
                if (context.Reader.DocFreq(new Term(fieldName, "content1")) == 0)
                {
                    assertNull(docIdSet);
                }
                else
                {
                    FixedBitSet bits = (FixedBitSet)docIdSet;
                    assertTrue("Must be >= 0", bits.Cardinality >= 0);
                }
            }
            multi.Dispose();
            reader1.Dispose();
            reader2.Dispose();
            rd1.Dispose();
            rd2.Dispose();
        }
        private static TermsFilter CreateTermsFilter(string fieldName, IEnumerable <string> values)
        {
            var query = new TermsFilter();

            foreach (var value in values)
            {
                query.AddTerm(new Term(fieldName, value));
            }

            return(query);
        }
Esempio n. 12
0
        public void TestRandom()
        {
            Directory         dir = NewDirectory();
            RandomIndexWriter w   = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, dir);
            int  num         = AtLeast(100);
            bool singleField = Random.NextBoolean();

            JCG.List <Term> terms = new JCG.List <Term>();
            for (int i = 0; i < num; i++)
            {
                string field   = "field" + (singleField ? "1" : Random.Next(100).ToString(CultureInfo.InvariantCulture));
                string @string = TestUtil.RandomRealisticUnicodeString(Random);
                terms.Add(new Term(field, @string));
                Document doc = new Document();
                doc.Add(NewStringField(field, @string, Field.Store.YES));
                w.AddDocument(doc);
            }
            IndexReader reader = w.GetReader();

            w.Dispose();

            IndexSearcher searcher = NewSearcher(reader);

            int numQueries = AtLeast(10);

            for (int i = 0; i < numQueries; i++)
            {
                terms.Shuffle(Random);
                int          numTerms = 1 + Random.Next(Math.Min(BooleanQuery.MaxClauseCount, terms.Count));
                BooleanQuery bq       = new BooleanQuery();
                for (int j = 0; j < numTerms; j++)
                {
                    bq.Add(new BooleanClause(new TermQuery(terms[j]), Occur.SHOULD));
                }
                TopDocs queryResult = searcher.Search(new ConstantScoreQuery(bq), reader.MaxDoc);

                MatchAllDocsQuery matchAll     = new MatchAllDocsQuery();
                TermsFilter       filter       = TermsFilter(singleField, terms.GetView(0, numTerms)); // LUCENENET: Checked length for correctness
                TopDocs           filterResult = searcher.Search(matchAll, filter, reader.MaxDoc);
                assertEquals(filterResult.TotalHits, queryResult.TotalHits);
                ScoreDoc[] scoreDocs = filterResult.ScoreDocs;
                for (int j = 0; j < scoreDocs.Length; j++)
                {
                    assertEquals(scoreDocs[j].Doc, queryResult.ScoreDocs[j].Doc);
                }
            }

            reader.Dispose();
            dir.Dispose();
        }
Esempio n. 13
0
        LuceneFilter IContentHandler.GetFilter(MemberSearchQuery searchQuery)
        {
            if (searchQuery.HasResume == null)
            {
                return(null);
            }

            var filter = new TermsFilter();

            filter.addTerm(new Term(FieldName.HasResume, NumericUtils.intToPrefixCoded(searchQuery.HasResume.Value ? 1 : 0)));
            return(filter);
        }
Esempio n. 14
0
        LuceneFilter IContentHandler.GetFilter(MemberSearchQuery searchQuery)
        {
            if (searchQuery.CommunityId == null)
            {
                return(null);
            }

            var filter = new TermsFilter();

            filter.addTerm(new Term(FieldName.Community, searchQuery.CommunityId.Value.ToFieldValue()));
            return(filter);
        }
Esempio n. 15
0
        public void TestCachability()
        {
            TermsFilter      a             = TermsFilter(Random.NextBoolean(), new Term("field1", "a"), new Term("field1", "b"));
            HashSet <Filter> cachedFilters = new HashSet <Filter>();

            cachedFilters.Add(a);
            TermsFilter b = TermsFilter(Random.NextBoolean(), new Term("field1", "b"), new Term("field1", "a"));

            assertTrue("Must be cached", cachedFilters.Contains(b));
            //duplicate term
            assertTrue("Must be cached", cachedFilters.Contains(TermsFilter(true, new Term("field1", "a"), new Term("field1", "a"), new Term("field1", "b"))));
            assertFalse("Must not be cached", cachedFilters.Contains(TermsFilter(Random.NextBoolean(), new Term("field1", "a"), new Term("field1", "a"), new Term("field1", "b"), new Term("field1", "v"))));
        }
Esempio n. 16
0
        public void TestRandom()
        {
            Directory         dir    = NewDirectory();
            RandomIndexWriter w      = new RandomIndexWriter(Random, dir, Similarity, TimeZone);
            int          num         = AtLeast(100);
            bool         singleField = Random.NextBoolean();
            IList <Term> terms       = new List <Term>();

            for (int i = 0; i < num; i++)
            {
                string field   = "field" + (singleField ? "1" : Random.Next(100).ToString());
                string @string = TestUtil.RandomRealisticUnicodeString(Random);
                terms.Add(new Term(field, @string));
                Document doc = new Document();
                doc.Add(NewStringField(field, @string, Field.Store.YES));
                w.AddDocument(doc);
            }
            IndexReader reader = w.GetReader();

            w.Dispose();

            IndexSearcher searcher = NewSearcher(reader);

            int numQueries = AtLeast(10);

            for (int i = 0; i < numQueries; i++)
            {
                Collections.Shuffle(terms);
                int          numTerms = 1 + Random.Next(Math.Min(BooleanQuery.MaxClauseCount, terms.Count));
                BooleanQuery bq       = new BooleanQuery();
                for (int j = 0; j < numTerms; j++)
                {
                    bq.Add(new BooleanClause(new TermQuery(terms[j]), Occur.SHOULD));
                }
                TopDocs queryResult = searcher.Search(new ConstantScoreQuery(bq), reader.MaxDoc);

                MatchAllDocsQuery matchAll     = new MatchAllDocsQuery();
                TermsFilter       filter       = TermsFilter(singleField, terms.SubList(0, numTerms));
                TopDocs           filterResult = searcher.Search(matchAll, filter, reader.MaxDoc);
                assertEquals(filterResult.TotalHits, queryResult.TotalHits);
                ScoreDoc[] scoreDocs = filterResult.ScoreDocs;
                for (int j = 0; j < scoreDocs.Length; j++)
                {
                    assertEquals(scoreDocs[j].Doc, queryResult.ScoreDocs[j].Doc);
                }
            }

            reader.Dispose();
            dir.Dispose();
        }
Esempio n. 17
0
        LuceneFilter IContentHandler.GetFilter(MemberSearchQuery searchQuery)
        {
            if (!searchQuery.EthnicStatus.HasValue || searchQuery.EthnicStatus.Value == default(EthnicStatus))
            {
                return(null);
            }

            var filter = new TermsFilter();

            foreach (var status in Split(searchQuery.EthnicStatus.Value))
            {
                filter.addTerm(new Term(FieldName.EthnicStatus, Encode(status)));
            }
            return(filter);
        }
Esempio n. 18
0
        public void TestSkipField()
        {
            Directory         dir = NewDirectory();
            RandomIndexWriter w   = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, dir);
            int num   = AtLeast(10);
            var terms = new JCG.HashSet <Term>();

            for (int i = 0; i < num; i++)
            {
                string field = "field" + Random.Next(100);
                terms.Add(new Term(field, "content1"));
                Document doc = new Document();
                doc.Add(NewStringField(field, "content1", Field.Store.YES));
                w.AddDocument(doc);
            }
            int randomFields = Random.Next(10);

            for (int i = 0; i < randomFields; i++)
            {
                while (true)
                {
                    string field = "field" + Random.Next(100);
                    Term   t     = new Term(field, "content1");
                    if (!terms.Contains(t))
                    {
                        terms.Add(t);
                        break;
                    }
                }
            }
            w.ForceMerge(1);
            IndexReader reader = w.GetReader();

            w.Dispose();
            assertEquals(1, reader.Leaves.size());
            AtomicReaderContext context = reader.Leaves[0];
            TermsFilter         tf      = new TermsFilter(terms.ToList());

            FixedBitSet bits = (FixedBitSet)tf.GetDocIdSet(context, context.AtomicReader.LiveDocs);

            assertEquals(context.Reader.NumDocs, bits.Cardinality);
            reader.Dispose();
            dir.Dispose();
        }
        public TermsFilterInstance CreateTermsFilter(object fieldName, object text)
        {
            var fieldNameValue = JurassicHelper.GetTypedArgumentValue(Engine, fieldName, String.Empty);
            var textValue      = JurassicHelper.GetTypedArgumentValue(Engine, text, String.Empty);

            var termsFilter = new TermsFilter();

            if (fieldNameValue.IsNullOrWhiteSpace() == false && textValue.IsNullOrWhiteSpace() == false)
            {
                termsFilter.Terms.Add(new Term {
                    FieldName = fieldNameValue, Value = textValue
                });
            }

            return(new TermsFilterInstance(Engine.Object.InstancePrototype, termsFilter));
        }
        public override Filter MakeFilter(SpatialArgs args)
        {
            SpatialOperation op = args.Operation;
            if (op != SpatialOperation.Intersects)
                throw new UnsupportedSpatialOperation(op);

            Shape shape = args.Shape;
            int detailLevel = grid.GetLevelForDistance(args.ResolveDistErr(ctx, distErrPct));
            var cells = grid.GetNodes(shape, detailLevel, false);
            var filter = new TermsFilter();
            foreach (Node cell in cells)
            {
                filter.AddTerm(new Term(GetFieldName(), cell.GetTokenString()));
            }
            return filter;
        }
Esempio n. 21
0
        public void testCachability()
        {
            TermsFilter a = new TermsFilter();
            a.AddTerm(new Term("field1", "a"));
            a.AddTerm(new Term("field1", "b"));
            HashSet<Filter> cachedFilters = new HashSet<Filter>();
            cachedFilters.Add(a);
            TermsFilter b = new TermsFilter();
            b.AddTerm(new Term("field1", "a"));
            b.AddTerm(new Term("field1", "b"));

            Assert.True(cachedFilters.Contains(b), "Must be cached");
            b.AddTerm(new Term("field1", "a")); //duplicate term
            Assert.True(cachedFilters.Contains(b), "Must be cached");
            b.AddTerm(new Term("field1", "c"));
            Assert.False(cachedFilters.Contains(b), "Must not be cached");
        }
Esempio n. 22
0
        public void TestSingleFieldEquals()
        {
            //// Two terms with the same hash code
            //assertEquals("AaAaBB".GetHashCode(), "BBBBBB".GetHashCode());
            //TermsFilter left = TermsFilter(true, new Term("id", "AaAaAa"), new Term("id", "AaAaBB"));
            //TermsFilter right = TermsFilter(true, new Term("id", "AaAaAa"), new Term("id", "BBBBBB"));
            //assertFalse(left.Equals(right));

            // LUCENENET specific - since in .NET the hash code is dependent on the underlying
            // target framework, we need to generate a collision at runtime.
            GenerateHashCollision(out string theString, out string stringWithCollision);
            assertEquals(theString.GetHashCode(), stringWithCollision.GetHashCode());
            TermsFilter left  = TermsFilter(true, new Term("id", "AaAaAa"), new Term("id", theString));
            TermsFilter right = TermsFilter(true, new Term("id", "AaAaAa"), new Term("id", stringWithCollision));

            assertFalse(left.Equals(right));
        }
		public override Filter MakeFilter(SpatialArgs args)
		{
			SpatialOperation op = args.Operation;
			if (
				!SpatialOperation.Is(op, SpatialOperation.IsWithin, SpatialOperation.Intersects, SpatialOperation.BBoxWithin,
				                     SpatialOperation.BBoxIntersects))
				throw new UnsupportedSpatialOperation(op);

			Shape shape = args.GetShape();
			int detailLevel = grid.GetMaxLevelForPrecision(shape, args.GetDistPrecision());
			var cells = grid.GetNodes(shape, detailLevel, false);
			var filter = new TermsFilter();
			foreach (Node cell in cells)
			{
				filter.AddTerm(new Term(GetFieldName(), cell.GetTokenString()));
			}
			return filter;
		}
Esempio n. 24
0
        public static void Main(string[] args)
        {
            var directory = new RAMDirectory();
            var analyzer  = new KeywordAnalyzer();

            using (var writer = new IndexWriter(directory, analyzer, IndexWriter.MaxFieldLength.UNLIMITED)) {
                var users       = new [] { "Alfa", "Beta", "Gamma", "Delta" };
                var friendships = new Dictionary <String, String[]> {
                    { "Alfa", new [] { "Beta", "Gamma", "Delta" } },
                    { "Beta", new [] { "Gamma", "Delta" } },
                    { "Gamma", new [] { "Delta" } },
                    { "Delta", new String[0] }                             // Noone likes Delta.
                };
                foreach (var userName in users)
                {
                    var doc = new Document();
                    doc.Add(new Field("Name", userName, Field.Store.YES, Field.Index.NO));
                    foreach (var friendName in friendships[userName])
                    {
                        doc.Add(new Field("FriendsWith", friendName, Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
                    }
                    writer.AddDocument(doc);
                }
                writer.Commit();
            }
            // This should be the real query provided by the user (city, age, description, ...)
            var query = new MatchAllDocsQuery();
            // Create a filter limiting the result to those being friends with the current user,
            // in this example the user "Beta".
            var filter = new TermsFilter();

            filter.AddTerm(new Term("FriendsWith", "Gamma"));
            var reader   = IndexReader.Open(directory, readOnly: true);
            var searcher = new IndexSearcher(reader);
            var result   = searcher.Search(query, filter, 10);

            foreach (var topDoc in result.ScoreDocs)
            {
                var doc       = searcher.Doc(topDoc.Doc);
                var foundName = doc.Get("Name");
                Console.WriteLine("Matched user '{0}'", foundName);
            }
            Console.ReadLine();
        }
        public override Filter MakeFilter(SpatialArgs args)
        {
            SpatialOperation op = args.Operation;

            if (op != SpatialOperation.Intersects)
            {
                throw new UnsupportedSpatialOperation(op);
            }

            Shape shape       = args.Shape;
            int   detailLevel = grid.GetLevelForDistance(args.ResolveDistErr(ctx, distErrPct));
            var   cells       = grid.GetNodes(shape, detailLevel, false);
            var   filter      = new TermsFilter();

            foreach (Node cell in cells)
            {
                filter.AddTerm(new Term(GetFieldName(), cell.GetTokenString()));
            }
            return(filter);
        }
Esempio n. 26
0
        public LuceneFilter GetFilter(JobAdSearchQuery searchQuery)
        {
            if (searchQuery.ExcludeIntegratorIds == null || searchQuery.ExcludeIntegratorIds.Count == 0)
            {
                return(null);
            }

            var termsFilter = new TermsFilter();

            foreach (var integratorId in searchQuery.ExcludeIntegratorIds)
            {
                termsFilter.addTerm(new Term(FieldName.Integrator, integratorId.ToFieldValue()));
            }

            var filter = new BooleanFilter();

            filter.add(new FilterClause(termsFilter, BooleanClause.Occur.MUST_NOT));

            return(filter);
        }
        public void testCachability()
        {
            TermsFilter a = new TermsFilter();

            a.AddTerm(new Term("field1", "a"));
            a.AddTerm(new Term("field1", "b"));
            HashSet <Filter> cachedFilters = new HashSet <Filter>();

            cachedFilters.Add(a);
            TermsFilter b = new TermsFilter();

            b.AddTerm(new Term("field1", "a"));
            b.AddTerm(new Term("field1", "b"));

            Assert.True(cachedFilters.Contains(b), "Must be cached");
            b.AddTerm(new Term("field1", "a")); //duplicate term
            Assert.True(cachedFilters.Contains(b), "Must be cached");
            b.AddTerm(new Term("field1", "c"));
            Assert.False(cachedFilters.Contains(b), "Must not be cached");
        }
Esempio n. 28
0
        public void TestFieldNotPresent()
        {
            Directory         dir = NewDirectory();
            RandomIndexWriter w   = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, dir);
            int num   = AtLeast(3);
            int skip  = Random.Next(num);
            var terms = new JCG.List <Term>();

            for (int i = 0; i < num; i++)
            {
                terms.Add(new Term("field" + i, "content1"));
                Document doc = new Document();
                if (skip == i)
                {
                    continue;
                }
                doc.Add(NewStringField("field" + i, "content1", Field.Store.YES));
                w.AddDocument(doc);
            }

            w.ForceMerge(1);
            IndexReader reader = w.GetReader();

            w.Dispose();
            assertEquals(1, reader.Leaves.size());



            AtomicReaderContext context = reader.Leaves[0];
            TermsFilter         tf      = new TermsFilter(terms);

            FixedBitSet bits = (FixedBitSet)tf.GetDocIdSet(context, context.AtomicReader.LiveDocs);

            assertEquals("Must be num fields - 1 since we skip only one field", num - 1, bits.Cardinality);
            reader.Dispose();
            dir.Dispose();
        }
        public void testMissingTerms()
        {
            String    fieldName = "field1";
            Directory rd        = new RAMDirectory();
            var       w         = new IndexWriter(rd, new KeywordAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);

            for (int i = 0; i < 100; i++)
            {
                var doc  = new Document();
                int term = i * 10; //terms are units of 10;
                doc.Add(new Field(fieldName, "" + term, Field.Store.YES, Field.Index.ANALYZED));
                w.AddDocument(doc);
            }
            IndexReader reader = w.GetReader();

            w.Close();

            TermsFilter tf = new TermsFilter();

            tf.AddTerm(new Term(fieldName, "19"));
            FixedBitSet bits = (FixedBitSet)tf.GetDocIdSet(reader);

            Assert.AreEqual(0, bits.Cardinality(), "Must match nothing");

            tf.AddTerm(new Term(fieldName, "20"));
            bits = (FixedBitSet)tf.GetDocIdSet(reader);
            Assert.AreEqual(1, bits.Cardinality(), "Must match 1");

            tf.AddTerm(new Term(fieldName, "10"));
            bits = (FixedBitSet)tf.GetDocIdSet(reader);
            Assert.AreEqual(2, bits.Cardinality(), "Must match 2");

            tf.AddTerm(new Term(fieldName, "00"));
            bits = (FixedBitSet)tf.GetDocIdSet(reader);
            Assert.AreEqual(2, bits.Cardinality(), "Must match 2");

            reader.Close();
            rd.Close();
        }
Esempio n. 30
0
        LuceneFilter IContentHandler.GetFilter(MemberSearchQuery searchQuery)
        {
            if (searchQuery.VisaStatusList.IsNullOrEmpty())
            {
                return(null);
            }

            var filter = new TermsFilter();

            foreach (var status in searchQuery.VisaStatusList)
            {
                filter.addTerm(new Term(FieldName.VisaStatus, status.Encode()));

                // Everyone who does not have a visa status is considered a citizen.

                if (status == VisaStatus.Citizen)
                {
                    filter.addTerm(new Term(FieldName.VisaStatus, ((VisaStatus?)null).Encode()));
                    filter.addTerm(new Term(FieldName.VisaStatus, VisaStatus.NotApplicable.Encode()));
                }
            }
            return(filter);
        }
Esempio n. 31
0
        private TermsFilter TermsFilter(bool singleField, IEnumerable <Term> termList)
        {
            if (!singleField)
            {
                return(new TermsFilter(termList.ToList()));
            }
            TermsFilter filter;
            var         bytes = new List <BytesRef>();
            string      field = null;

            foreach (Term term in termList)
            {
                bytes.Add(term.Bytes);
                if (field != null)
                {
                    assertEquals(term.Field, field);
                }
                field = term.Field;
            }
            assertNotNull(field);
            filter = new TermsFilter(field, bytes);
            return(filter);
        }
Esempio n. 32
0
        public void TestFieldNotPresent()
        {
            Directory         dir = NewDirectory();
            RandomIndexWriter w   = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
            int num   = AtLeast(3);
            int skip  = Random().Next(num);
            var terms = new List <Term>();

            for (int i = 0; i < num; i++)
            {
                terms.Add(new Term("field" + i, "content1"));
                Document doc = new Document();
                if (skip == i)
                {
                    continue;
                }
                doc.Add(NewStringField("field" + i, "content1", Field.Store.YES));
                w.AddDocument(doc);
            }

            w.ForceMerge(1);
            IndexReader reader = w.Reader;

            w.Dispose();
            assertEquals(1, reader.Leaves.size());



            AtomicReaderContext context = reader.Leaves.First();
            TermsFilter         tf      = new TermsFilter(terms);

            FixedBitSet bits = (FixedBitSet)tf.GetDocIdSet(context, context.AtomicReader.LiveDocs);

            assertEquals("Must be num fields - 1 since we skip only one field", num - 1, bits.Cardinality());
            reader.Dispose();
            dir.Dispose();
        }
Esempio n. 33
0
 public void TestToString()
 {
     TermsFilter termsFilter = new TermsFilter(new Term("field1", "a"), new Term("field1", "b"), new Term("field1", "c"));
     assertEquals("field1:a field1:b field1:c", termsFilter.ToString());
 }
Esempio n. 34
0
 private TermsFilter TermsFilter(bool singleField, IEnumerable<Term> termList)
 {
     if (!singleField)
     {
         return new TermsFilter(termList.ToList());
     }
     TermsFilter filter;
     var bytes = new List<BytesRef>();
     string field = null;
     foreach (Term term in termList)
     {
         bytes.Add(term.Bytes);
         if (field != null)
         {
             assertEquals(term.Field, field);
         }
         field = term.Field;
     }
     assertNotNull(field);
     filter = new TermsFilter(field, bytes);
     return filter;
 }
Esempio n. 35
0
        public void TestMissingField()
        {
            string fieldName = "field1";
            Directory rd1 = NewDirectory();
            RandomIndexWriter w1 = new RandomIndexWriter(Random(), rd1, Similarity, TimeZone);
            Document doc = new Document();
            doc.Add(NewStringField(fieldName, "content1", Field.Store.YES));
            w1.AddDocument(doc);
            IndexReader reader1 = w1.Reader;
            w1.Dispose();

            fieldName = "field2";
            Directory rd2 = NewDirectory();
            RandomIndexWriter w2 = new RandomIndexWriter(Random(), rd2, Similarity, TimeZone);
            doc = new Document();
            doc.Add(NewStringField(fieldName, "content2", Field.Store.YES));
            w2.AddDocument(doc);
            IndexReader reader2 = w2.Reader;
            w2.Dispose();

            TermsFilter tf = new TermsFilter(new Term(fieldName, "content1"));
            MultiReader multi = new MultiReader(reader1, reader2);
            foreach (AtomicReaderContext context in multi.Leaves)
            {
                DocIdSet docIdSet = tf.GetDocIdSet(context, context.AtomicReader.LiveDocs);
                if (context.Reader.DocFreq(new Term(fieldName, "content1")) == 0)
                {
                    assertNull(docIdSet);
                }
                else
                {
                    FixedBitSet bits = (FixedBitSet)docIdSet;
                    assertTrue("Must be >= 0", bits.Cardinality() >= 0);
                }
            }
            multi.Dispose();
            reader1.Dispose();
            reader2.Dispose();
            rd1.Dispose();
            rd2.Dispose();
        }
Esempio n. 36
0
        private static TermsFilter CreateTermsFilter(string fieldName, IEnumerable <string> values)
        {
            var query = new TermsFilter(values.Select(v => new Term(fieldName, v)).ToList());

            return(query);
        }
Esempio n. 37
0
        public List<ResourceNode> Search(string term, bool isResourceDocs, bool isProcurement, bool isCatalog)
        {
            try
            {
                if(String.IsNullOrWhiteSpace(term))
                    return new List<ResourceNode>();
                Query query = multiParser.Parse(term);
           
                TopDocs docs = null;

                TermsFilter filter = new TermsFilter();
                Boolean isFilter = true;
                if (isProcurement)
                {
                    filter.AddTerm(new Term(FieldNames.RESOURCE_TYPE, ResourceTypeEnum.Procurement_Language.ToString()));
                }
                if (isCatalog)
                {
                    filter.AddTerm(new Term(FieldNames.RESOURCE_TYPE, ResourceTypeEnum.Catalog_Recommendation.ToString()));
                }
                if (isResourceDocs)
                {
                    filter.AddTerm(new Term(FieldNames.RESOURCE_TYPE, ResourceTypeEnum.Resource_Doc.ToString()));
                }
             
                if(isFilter)
                   docs =  searcher.Search(query,filter,20);
                else
                   docs = searcher.Search(query, 20);
               
               
                List<ResourceNode> listResourceDocuments = new List<ResourceNode>();
              
                foreach (ScoreDoc doc in docs.ScoreDocs)
                {
                    int lucDocId = doc.Doc;                   

                    Document lucDoc = searcher.Doc(lucDocId);
                    string sDocId = lucDoc.Get(FieldNames.DOC_ID);

                    int docId = 0;
                    if (sDocId != null)
                        docId = Int32.Parse(sDocId);
                    else
                    {
                        docId = -1;
                    }
                   
                    string resourceType = lucDoc.Get(FieldNames.RESOURCE_TYPE);
                    ResourceTypeEnum resourceTypeEnum = (ResourceTypeEnum)Enum.Parse(typeof(ResourceTypeEnum), resourceType, true);
                    ResourceNode resDoc = GetDoc(docId, resourceTypeEnum);
                    if(resDoc != null)
                        listResourceDocuments.Add(resDoc);
                }
                return listResourceDocuments;
            }
            catch(Exception ex)
            {
                //May get exceptions if put symbols and other wierd things in search.  So then just return an empty list in that case.
                //CSETLogger.Fatal("An exception occurred in finding items in search.", ex);
                return new List<ResourceNode>();           
            }

 
        }
Esempio n. 38
0
        public void TestSkipField()
        {
            Directory dir = NewDirectory();
            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
            int num = AtLeast(10);
            var terms = new HashSet<Term>();
            for (int i = 0; i < num; i++)
            {
                string field = "field" + Random().Next(100);
                terms.Add(new Term(field, "content1"));
                Document doc = new Document();
                doc.Add(NewStringField(field, "content1", Field.Store.YES));
                w.AddDocument(doc);
            }
            int randomFields = Random().Next(10);
            for (int i = 0; i < randomFields; i++)
            {
                while (true)
                {
                    string field = "field" + Random().Next(100);
                    Term t = new Term(field, "content1");
                    if (!terms.Contains(t))
                    {
                        terms.Add(t);
                        break;
                    }
                }
            }
            w.ForceMerge(1);
            IndexReader reader = w.Reader;
            w.Dispose();
            assertEquals(1, reader.Leaves.size());
            AtomicReaderContext context = reader.Leaves.First();
            TermsFilter tf = new TermsFilter(terms.ToList());

            FixedBitSet bits = (FixedBitSet)tf.GetDocIdSet(context, context.AtomicReader.LiveDocs);
            assertEquals(context.Reader.NumDocs, bits.Cardinality());
            reader.Dispose();
            dir.Dispose();
        }
Esempio n. 39
0
 /// <summary>
 ///     Creates the query.
 /// </summary>
 /// <param name="field">The field.</param>
 /// <param name="value">The value.</param>
 /// <returns></returns>
 public static Filter CreateQuery(string field, AttributeFilterValue value)
 {
     object val = value.Value;
     var query = new TermsFilter();
     query.AddTerm(new Term(field, ConvertToSearchable(val)));
     return query;
 }