Exemplo n.º 1
1
        public void TestRollbackIntegrityWithBufferFlush()
        {
            Directory dir = new MockRAMDirectory();
            IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            for (int i = 0; i < 5; i++)
            {
                Document doc = new Document();
                doc.Add(new Field("pk", i.ToString(), Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
                w.AddDocument(doc);
            }
            w.Close();

            // If buffer size is small enough to cause a flush, errors ensue...
            w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            w.SetMaxBufferedDocs(2);

            Term pkTerm = new Term("pk", "");
            for (int i = 0; i < 3; i++)
            {
                Document doc = new Document();
                String value = i.ToString();
                doc.Add(new Field("pk", value, Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
                doc.Add(new Field("text", "foo", Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
                w.UpdateDocument(pkTerm.CreateTerm(value), doc);
            }
            w.Rollback();

            IndexReader r = IndexReader.Open(dir, true);
            Assert.AreEqual(5, r.NumDocs(), "index should contain same number of docs post rollback");
            r.Close();
            dir.Close();
        }
		internal SegmentMergeInfo(int b, TermEnum te, IndexReader r)
		{
			base_Renamed = b;
			reader = r;
			termEnum = te;
			term = te.Term;
		}
Exemplo n.º 3
0
        public static BooleanQuery ParseRange(string fieldName, long lowerValue, long upperValue, bool inclusive)
        {
            if (lowerValue > upperValue)
            {
                return null;
            }

            //var rangeQuery = new BooleanQuery();
            var dateQuery = new BooleanQuery();
            BooleanQuery.SetMaxClauseCount(int.MaxValue);

            for (long i = lowerValue; i < upperValue; i++)
            {
                var term = new Term(fieldName, i.ToString());
                var q = new TermQuery(term);
                dateQuery.Add(q, BooleanClause.Occur.SHOULD);
            }

            if (inclusive)
            {
                var term = new Term(fieldName, upperValue.ToString());
                var q = new TermQuery(term);
                dateQuery.Add(q, BooleanClause.Occur.SHOULD);
            }

            //if (dateQuery.GetClauses() != null || dateQuery.GetClauses().Length != 0)
            //{
            //    rangeQuery.Add(dateQuery, BooleanClause.Occur.MUST);
            //}

            return dateQuery;
        }
 public virtual void TestAnyChanges()
 {
     DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();
     int size = 200 + Random().Next(500) * RANDOM_MULTIPLIER;
     int termsSinceFreeze = 0;
     int queriesSinceFreeze = 0;
     for (int i = 0; i < size; i++)
     {
         Term term = new Term("id", "" + i);
         if (Random().Next(10) == 0)
         {
             queue.AddDelete(new TermQuery(term));
             queriesSinceFreeze++;
         }
         else
         {
             queue.AddDelete(term);
             termsSinceFreeze++;
         }
         Assert.IsTrue(queue.AnyChanges());
         if (Random().Next(5) == 0)
         {
             FrozenBufferedUpdates freezeGlobalBuffer = queue.FreezeGlobalBuffer(null);
             Assert.AreEqual(termsSinceFreeze, freezeGlobalBuffer.TermCount);
             Assert.AreEqual(queriesSinceFreeze, ((Query[])freezeGlobalBuffer.Queries_Nunit()).Length);
             queriesSinceFreeze = 0;
             termsSinceFreeze = 0;
             Assert.IsFalse(queue.AnyChanges());
         }
     }
 }
Exemplo n.º 5
0
        internal int DocIDUpto = -1; // unassigned until applied, and confusing that it's here, when it's just used in BufferedDeletes...

        /// <summary>
        /// Constructor.
        /// </summary>
        /// <param name="term"> the <seealso cref="Term"/> which determines the documents that will be updated </param>
        /// <param name="field"> the <seealso cref="NumericDocValuesField"/> to update </param>
        /// <param name="value"> the updated value </param>
        protected internal DocValuesUpdate(DocValuesFieldUpdates.Type_e type, Term term, string field, object value)
        {
            this.Type = type;
            this.Term = term;
            this.Field = field;
            this.Value = value;
        }
Exemplo n.º 6
0
        static void ProcessTermClause(BooleanQuery bq, TermClause term)
        {
            Term t = new Term(term.Field, term.Value.ToLower());

            Lucene.Net.Search.Query q = null;

            if (term.ValueType == TermClauseType.Wildcard && term.Value.IndexOf('*') == term.Value.Length - 1)
            {
                q = new PrefixQuery(new Term(term.Field, term.Value.Substring(0, term.Value.Length - 1)));
            }
            else if (term.ValueType == TermClauseType.Wildcard)
            {
                q = new WildcardQuery(t);
            }
            else if (term.ValueType == TermClauseType.Fuzzy)
            {
                q = new FuzzyQuery(t);
            }
            else
            {
                q = new TermQuery(t);
            }

            if (term.Boost > 0)
            {
                q.SetBoost(term.Boost);
            }

            bq.Add(
                q,
                Translate(term.Type)
                );
        }
Exemplo n.º 7
0
		public virtual void  TestSimpleSkip()
		{
			RAMDirectory dir = new RAMDirectory();
			IndexWriter writer = new IndexWriter(dir, new PayloadAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
			Term term = new Term("test", "a");
			for (int i = 0; i < 5000; i++)
			{
				Document d1 = new Document();
				d1.Add(new Field(term.Field(), term.Text(), Field.Store.NO, Field.Index.ANALYZED));
				writer.AddDocument(d1);
			}
			writer.Flush();
			writer.Optimize();
			writer.Close();
			
			IndexReader reader = SegmentReader.GetOnlySegmentReader(dir);
			SegmentTermPositions tp = (SegmentTermPositions) reader.TermPositions();
			tp.freqStream_ForNUnit = new CountingStream(this, tp.freqStream_ForNUnit);
			
			for (int i = 0; i < 2; i++)
			{
				counter = 0;
				tp.Seek(term);
				
				CheckSkipTo(tp, 14, 185); // no skips
				CheckSkipTo(tp, 17, 190); // one skip on level 0
				CheckSkipTo(tp, 287, 200); // one skip on level 1, two on level 0
				
				// this test would fail if we had only one skip level,
				// because than more bytes would be read from the freqStream
				CheckSkipTo(tp, 4800, 250); // one skip on level 2
			}
		}
        public void TestTermSearch()
        {
            using (var dir = FSDirectory.Open(TestEnvironment.TestIndexDirectory))
            using (var indexSearcher = new IndexSearcher(dir))
            {
                var termSubjectAnt = new Term("subject", "ant");
                var termQuerySubjectAnt = new TermQuery(termSubjectAnt);
                var topDocsSubjectAnt = indexSearcher.Search(termQuerySubjectAnt, 10);

                // title=Ant in Action
                // subject=apache ant build tool junit java development
                Assert.Equal(1, topDocsSubjectAnt.TotalHits);


                var termSubjectJUnit = new Term("subject", "junit");
                var termQuerySubjectJUnit = new TermQuery(termSubjectJUnit);
                var topDocsSubjectJUnit = indexSearcher.Search(termQuerySubjectJUnit, 10);

//                ExplainResults(indexSearcher, termQuerySubjectJUnit, topDocsSubjectJUnit);

                // title=JUnit in Action, Second Edition
                // subject=junit unit testing mock objects

                // title=Ant in Action
                // subject=apache ant build tool junit java development
                Assert.Equal(2, topDocsSubjectJUnit.TotalHits); // Ants in Action, "JUnit in Action, Second Edition"
            }

        }
Exemplo n.º 9
0
 public override int DocFreq(Term t)
 {
     int total = 0; // sum freqs in segments
     for (int i = 0; i < subReaders.Length; i++)
         total += subReaders[i].DocFreq(t);
     return total;
 }
Exemplo n.º 10
0
        /// <summary>macro for readability </summary>
        public static Query Cswcq(Term wild)
        {
            WildcardQuery query = new WildcardQuery(wild);

            query.RewriteMethod = MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE;
            return(query);
        }
Exemplo n.º 11
0
		public override Query GetFieldQuery(string field, string queryText)
		{
			HashSet<string> set;
			if(untokenized.TryGetValue(field, out set))
			{
				if (set.Contains(queryText))
					return new TermQuery(new Term(field, queryText));
			}

			var fieldQuery = base.GetFieldQuery(field, queryText);
			if (fieldQuery is TermQuery
				&& queryText.EndsWith("*")
				&& !queryText.EndsWith(@"\*")
				&& queryText.Contains(" "))
			{ 
				var analyzer = GetAnalyzer();
				var tokenStream = analyzer.ReusableTokenStream(field, new StringReader(queryText.Substring(0, queryText.Length-1)));
				var sb = new StringBuilder();
				while (tokenStream.IncrementToken())
				{
					var attribute = (TermAttribute)tokenStream.GetAttribute(typeof(TermAttribute));
					if (sb.Length != 0)
						sb.Append(' ');
					sb.Append(attribute.Term());
				}
				var prefix = new Term(field, sb.ToString());
				return new PrefixQuery(prefix);
			}
			return fieldQuery;
		}
Exemplo n.º 12
0
        public virtual void  TestHashCodeAndEquals()
        {
            MultiPhraseQuery query1 = new MultiPhraseQuery();
            MultiPhraseQuery query2 = new MultiPhraseQuery();

            Assert.AreEqual(query1.GetHashCode(), query2.GetHashCode());
            Assert.True(query1.Equals(query2));

            Term term1 = new Term("someField", "someText");

            query1.Add(term1);
            query2.Add(term1);

            Assert.AreEqual(query1.GetHashCode(), query2.GetHashCode());
            Assert.AreEqual(query1, query2);

            Term term2 = new Term("someField", "someMoreText");

            query1.Add(term2);

            Assert.IsFalse(query1.GetHashCode() == query2.GetHashCode());
            Assert.IsFalse(query1.Equals(query2));

            query2.Add(term2);

            Assert.AreEqual(query1.GetHashCode(), query2.GetHashCode());
            Assert.AreEqual(query1, query2);
        }
Exemplo n.º 13
0
        internal readonly Term[] Terms; // for repetitions initialization

        internal PhrasePositions(DocsAndPositionsEnum postings, int o, int ord, Term[] terms)
        {
            this.Postings = postings;
            Offset = o;
            this.Ord = ord;
            this.Terms = terms;
        }
        private void CheckSplitting(Directory dir, Term splitTerm, int leftCount, int rightCount)
        {
            using (Directory dir1 = NewDirectory())
            {
                using (Directory dir2 = NewDirectory())
                {
                    PKIndexSplitter splitter = new PKIndexSplitter(dir, dir1, dir2, splitTerm,
                        NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())),
                        NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
                    splitter.Split();

                    using (IndexReader ir1 = DirectoryReader.Open(dir1))
                    {
                        using (IndexReader ir2 = DirectoryReader.Open(dir2))
                        {
                            assertEquals(leftCount, ir1.NumDocs);
                            assertEquals(rightCount, ir2.NumDocs);


                            CheckContents(ir1, "1");
                            CheckContents(ir2, "2");

                        }
                    }
                }
            }
        }
Exemplo n.º 15
0
		public Result Search (string term, int count, int start) {
			try {
				term = term.ToLower ();
				Term htTerm = new Term ("hottext", term);
				Query qq1 = new FuzzyQuery (htTerm);
				Query qq2 = new TermQuery (htTerm);
				qq2.Boost = 10f;
				Query qq3 = new PrefixQuery (htTerm);
				qq3.Boost = 10f;
				DisjunctionMaxQuery q1 = new DisjunctionMaxQuery (0f);
				q1.Add (qq1);
				q1.Add (qq2);
				q1.Add (qq3);
				Query q2 = new TermQuery (new Term ("text", term));
				q2.Boost = 3f;
				Query q3 = new TermQuery (new Term ("examples", term));
				q3.Boost = 3f;
				DisjunctionMaxQuery q = new DisjunctionMaxQuery (0f);

				q.Add (q1);
				q.Add (q2);
				q.Add (q3);
			
				TopDocs top = SearchInternal (q, count, start);
				Result r = new Result (term, searcher, top.ScoreDocs);
				Results.Add (r);
				return r;
			} catch (IOException) {
				Console.WriteLine ("No index in {0}", dir);
				return null;
			}
		}
Exemplo n.º 16
0
        public virtual Query GetQuery(XmlElement e)
        {
            string fieldName = DOMUtils.GetAttributeWithInheritanceOrFail(e, "fieldName");
            string text = DOMUtils.GetNonBlankTextOrFail(e);

            BooleanQuery bq = new BooleanQuery(DOMUtils.GetAttribute(e, "disableCoord", false));
            bq.MinimumNumberShouldMatch = DOMUtils.GetAttribute(e, "minimumNumberShouldMatch", 0);
            TokenStream ts = null;
            try
            {
                ts = analyzer.TokenStream(fieldName, text);
                ITermToBytesRefAttribute termAtt = ts.AddAttribute<ITermToBytesRefAttribute>();
                Term term = null;
                BytesRef bytes = termAtt.BytesRef;
                ts.Reset();
                while (ts.IncrementToken())
                {
                    termAtt.FillBytesRef();
                    term = new Term(fieldName, BytesRef.DeepCopyOf(bytes));
                    bq.Add(new BooleanClause(new TermQuery(term), BooleanClause.Occur.SHOULD));
                }
                ts.End();
            }
            catch (IOException ioe)
            {
                throw new Exception("Error constructing terms from index:" + ioe);
            }
            finally
            {
                IOUtils.CloseWhileHandlingException(ts);
            }

            bq.Boost = DOMUtils.GetAttribute(e, "boost", 1.0f);
            return bq;
        }
Exemplo n.º 17
0
        /// <summary> Create a new FuzzyQuery that will match terms with a similarity
        /// of at least <c>minimumSimilarity</c> to <c>term</c>.
        /// If a <c>prefixLength</c> &gt; 0 is specified, a common prefix
        /// of that length is also required.
        ///
        /// </summary>
        /// <param name="term">the term to search for
        /// </param>
        /// <param name="minimumSimilarity">a value between 0 and 1 to set the required similarity
        /// between the query term and the matching terms. For example, for a
        /// <c>minimumSimilarity</c> of <c>0.5</c> a term of the same length
        /// as the query term is considered similar to the query term if the edit distance
        /// between both terms is less than <c>length(term)*0.5</c>
        /// </param>
        /// <param name="prefixLength">length of common (non-fuzzy) prefix
        /// </param>
        /// <throws>  IllegalArgumentException if minimumSimilarity is &gt;= 1 or &lt; 0 </throws>
        /// <summary> or if prefixLength &lt; 0
        /// </summary>
        public FuzzyQuery(Term term, float minimumSimilarity, int prefixLength)
        {
            this.Term = term;

            if (minimumSimilarity >= 1.0f)
            {
                throw new System.ArgumentException("minimumSimilarity >= 1");
            }
            else if (minimumSimilarity < 0.0f)
            {
                throw new System.ArgumentException("minimumSimilarity < 0");
            }
            if (prefixLength < 0)
            {
                throw new System.ArgumentException("prefixLength < 0");
            }

            if (term.Text.Length > 1.0f / (1.0f - minimumSimilarity))
            {
                this.termLongEnough = true;
            }

            this.minimumSimilarity = minimumSimilarity;
            this.prefixLength      = prefixLength;
            internalRewriteMethod  = SCORING_BOOLEAN_QUERY_REWRITE;
        }
Exemplo n.º 18
0
 private void AddMetaDataField(Document doc, Term term, int[] meta)
 {
     IntMetaDataTokenStream tokenStream = new IntMetaDataTokenStream(term.Text);
     tokenStream.SetMetaData(meta);
     Field field = new Field(term.Field, tokenStream);
     doc.Add(field);
 }
Exemplo n.º 19
0
 protected void AddNumericRangeQuery(BooleanQuery query, NumericRangeField range, BooleanClause.Occur occurance)
 {
     var startTerm = new Term(range.FieldName, SearchHelper.FormatNumber(range.Start));
      var endTerm = new Term(range.FieldName, SearchHelper.FormatNumber(range.End));
      var rangeQuery = new RangeQuery(startTerm, endTerm, true);
      query.Add(rangeQuery, occurance);
 }
Exemplo n.º 20
0
        public virtual void Test()
        {
            Term allTerm = new Term(FIELD, "all");
            TermQuery termQuery = new TermQuery(allTerm);

            Weight weight = IndexSearcher.CreateNormalizedWeight(termQuery);
            Assert.IsTrue(IndexSearcher.TopReaderContext is AtomicReaderContext);
            AtomicReaderContext context = (AtomicReaderContext)IndexSearcher.TopReaderContext;
            BulkScorer ts = weight.BulkScorer(context, true, ((AtomicReader)context.Reader()).LiveDocs);
            // we have 2 documents with the term all in them, one document for all the
            // other values
            IList<TestHit> docs = new List<TestHit>();
            // must call next first

            ts.Score(new CollectorAnonymousInnerClassHelper(this, context, docs));
            Assert.IsTrue(docs.Count == 2, "docs Size: " + docs.Count + " is not: " + 2);
            TestHit doc0 = docs[0];
            TestHit doc5 = docs[1];
            // The scores should be the same
            Assert.IsTrue(doc0.Score == doc5.Score, doc0.Score + " does not equal: " + doc5.Score);
            /*
             * Score should be (based on Default Sim.: All floats are approximate tf = 1
             * numDocs = 6 docFreq(all) = 2 idf = ln(6/3) + 1 = 1.693147 idf ^ 2 =
             * 2.8667 boost = 1 lengthNorm = 1 //there is 1 term in every document coord
             * = 1 sumOfSquaredWeights = (idf * boost) ^ 2 = 1.693147 ^ 2 = 2.8667
             * queryNorm = 1 / (sumOfSquaredWeights)^0.5 = 1 /(1.693147) = 0.590
             *
             * score = 1 * 2.8667 * 1 * 1 * 0.590 = 1.69
             */
            Assert.IsTrue(doc0.Score == 1.6931472f, doc0.Score + " does not equal: " + 1.6931472f);
        }
Exemplo n.º 21
0
            protected internal override string[] CreateValue(IndexReader reader, Entry entryKey, IState state)
            {
                System.String   field    = StringHelper.Intern(entryKey.field);
                System.String[] retArray = new System.String[reader.MaxDoc];
                TermDocs        termDocs = reader.TermDocs(state);
                TermEnum        termEnum = reader.Terms(new Term(field), state);

                try
                {
                    do
                    {
                        Term term = termEnum.Term;
                        if (term == null || (System.Object)term.Field != (System.Object)field)
                        {
                            break;
                        }
                        System.String termval = term.Text;
                        termDocs.Seek(termEnum, state);
                        while (termDocs.Next(state))
                        {
                            retArray[termDocs.Doc] = termval;
                        }
                    }while (termEnum.Next(state));
                }
                finally
                {
                    termDocs.Close();
                    termEnum.Close();
                }
                return(retArray);
            }
Exemplo n.º 22
0
        public virtual void TestSimpleSkip()
        {
            Directory dir = new CountingRAMDirectory(this, new RAMDirectory());
            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new PayloadAnalyzer()).SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat())).SetMergePolicy(NewLogMergePolicy()));
            Term term = new Term("test", "a");
            for (int i = 0; i < 5000; i++)
            {
                Document d1 = new Document();
                d1.Add(NewTextField(term.Field(), term.Text(), Field.Store.NO));
                writer.AddDocument(d1);
            }
            writer.Commit();
            writer.ForceMerge(1);
            writer.Dispose();

            AtomicReader reader = GetOnlySegmentReader(DirectoryReader.Open(dir));

            for (int i = 0; i < 2; i++)
            {
                Counter = 0;
                DocsAndPositionsEnum tp = reader.TermPositionsEnum(term);
                CheckSkipTo(tp, 14, 185); // no skips
                CheckSkipTo(tp, 17, 190); // one skip on level 0
                CheckSkipTo(tp, 287, 200); // one skip on level 1, two on level 0

                // this test would fail if we had only one skip level,
                // because than more bytes would be read from the freqStream
                CheckSkipTo(tp, 4800, 250); // one skip on level 2
            }
        }
Exemplo n.º 23
0
        /// <summary>
        /// Create a new <see cref="SlowFuzzyQuery"/> that will match terms with a similarity 
        /// of at least <paramref name="minimumSimilarity"/> to <paramref name="term"/>.
        /// If a <paramref name="prefixLength"/> &gt; 0 is specified, a common prefix
        /// of that length is also required.
        /// </summary>
        /// <param name="term">the term to search for</param>
        /// <param name="minimumSimilarity">
        /// a value between 0 and 1 to set the required similarity
        /// between the query term and the matching terms. For example, for a
        /// <paramref name="minimumSimilarity"/> of <c>0.5</c> a term of the same length
        /// as the query term is considered similar to the query term if the edit distance
        /// between both terms is less than <c>length(term)*0.5</c>
        /// <para/>
        /// Alternatively, if <paramref name="minimumSimilarity"/> is >= 1f, it is interpreted
        /// as a pure Levenshtein edit distance. For example, a value of <c>2f</c>
        /// will match all terms within an edit distance of <c>2</c> from the
        /// query term. Edit distances specified in this way may not be fractional.
        /// </param>
        /// <param name="prefixLength">length of common (non-fuzzy) prefix</param>
        /// <param name="maxExpansions">
        /// the maximum number of terms to match. If this number is
        /// greater than <see cref="BooleanQuery.MaxClauseCount"/> when the query is rewritten,
        /// then the maxClauseCount will be used instead.
        /// </param>
        /// <exception cref="ArgumentException">
        /// if <paramref name="minimumSimilarity"/> is &gt;= 1 or &lt; 0
        /// or if <paramref name="prefixLength"/> &lt; 0
        /// </exception>
        public SlowFuzzyQuery(Term term, float minimumSimilarity, int prefixLength,
            int maxExpansions)
            : base(term.Field)
        {
            this.term = term;

            if (minimumSimilarity >= 1.0f && minimumSimilarity != (int)minimumSimilarity)
                throw new ArgumentException("fractional edit distances are not allowed");
            if (minimumSimilarity < 0.0f)
                throw new ArgumentException("minimumSimilarity < 0");
            if (prefixLength < 0)
                throw new ArgumentException("prefixLength < 0");
            if (maxExpansions < 0)
                throw new ArgumentException("maxExpansions < 0");

            SetRewriteMethod(new MultiTermQuery.TopTermsScoringBooleanQueryRewrite(maxExpansions));

            string text = term.Text();
            int len = text.CodePointCount(0, text.Length);
            if (len > 0 && (minimumSimilarity >= 1f || len > 1.0f / (1.0f - minimumSimilarity)))
            {
                this.termLongEnough = true;
            }

            this.minimumSimilarity = minimumSimilarity;
            this.prefixLength = prefixLength;
        }
Exemplo n.º 24
0
        public void AndExtension()
        {
            BooleanQuery originalQuery = new BooleanQuery();
            BooleanQuery innerQuery = new BooleanQuery();

            Term term = new Term("_name", "value1");
            TermQuery termQuery1 = new TermQuery(term);
            innerQuery.Add(termQuery1, Occur.MUST);

            Term term2 = new Term("_name", "value2");
            TermQuery termQuery2 = new TermQuery(term2);
            innerQuery.Add(termQuery2, Occur.MUST);

            originalQuery.Add(innerQuery, Occur.MUST);
            string queryString = originalQuery.ToString();

            QueryBuilder builder = new QueryBuilder();
            builder.And
                (
                    x => x.Term("_name", "value1"),
                    x => x.Term("_name", "value2")
                );
            Query replacementQuery = builder.Build();
            string newQueryString = replacementQuery.ToString();

            Assert.AreEqual(queryString, newQueryString);
            Console.Write(queryString);
        }
        public static Lucene.Net.Search.Query FilterQueryByClasses(IESI.ISet<System.Type> classesAndSubclasses, Lucene.Net.Search.Query luceneQuery)
        {
            // A query filter is more practical than a manual class filtering post query (esp on scrollable resultsets)
            // it also probably minimise the memory footprint
            if (classesAndSubclasses == null)
            {
                return luceneQuery;
            }

            BooleanQuery classFilter = new BooleanQuery();

            // annihilate the scoring impact of DocumentBuilder.CLASS_FIELDNAME
            classFilter.SetBoost(0);
            foreach (System.Type clazz in classesAndSubclasses)
            {
                Term t = new Term(DocumentBuilder.CLASS_FIELDNAME, TypeHelper.LuceneTypeName(clazz));
                TermQuery termQuery = new TermQuery(t);
                classFilter.Add(termQuery, BooleanClause.Occur.SHOULD);
            }

            BooleanQuery filteredQuery = new BooleanQuery();
            filteredQuery.Add(luceneQuery, BooleanClause.Occur.MUST);
            filteredQuery.Add(classFilter, BooleanClause.Occur.MUST);
            return filteredQuery;
        }
Exemplo n.º 26
0
        public void Read(IndexInput input, FieldInfos fieldInfos)
        {
            this.term = null; // invalidate cache
            int start = input.ReadVInt();
            int length = input.ReadVInt();
            int totalLength = start + length;
            if (preUTF8Strings)
            {
                text.SetLength(totalLength);
                input.ReadChars(text.result, start, length);
            }
            else
            {

                if (dirty)
                {
                    // Fully convert all bytes since bytes is dirty
                    UnicodeUtil.UTF16toUTF8(text.result, 0, text.length, bytes);
                    bytes.SetLength(totalLength);
                    input.ReadBytes(bytes.result, start, length);
                    UnicodeUtil.UTF8toUTF16(bytes.result, 0, totalLength, text);
                    dirty = false;
                }
                else
                {
                    // Incrementally convert only the UTF8 bytes that are new:
                    bytes.SetLength(totalLength);
                    input.ReadBytes(bytes.result, start, length);
                    UnicodeUtil.UTF8toUTF16(bytes.result, start, length, text);
                }
            }
            this.field = fieldInfos.FieldName(input.ReadVInt());
        }
Exemplo n.º 27
0
        /// <summary>macro for readability </summary>
        public static Query Cspq(Term prefix)
        {
            PrefixQuery query = new PrefixQuery(prefix);

            query.RewriteMethod = MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE;
            return(query);
        }
Exemplo n.º 28
0
        public void UpdatePackage(Package package)
        {
            var packageRegistrationKey = package.PackageRegistrationKey;
            var updateTerm = new Term("PackageRegistrationKey", packageRegistrationKey.ToString(CultureInfo.InvariantCulture));

            if (!package.IsLatest || package.IsLatestStable)
            {
                // Someone passed us in a version which was e.g. just unlisted? Or just not the latest version which is what we want to index. Doesn't really matter. We'll find one to index.
                package = _packageRepository.GetAll()
                .Where(p => (p.IsLatest || p.IsLatestStable) && p.PackageRegistrationKey == packageRegistrationKey)
                .Include(p => p.PackageRegistration)
                .Include(p => p.PackageRegistration.Owners)
                .Include(p => p.SupportedFrameworks)
                .FirstOrDefault();
            }

            // Just update the provided package
            using (Trace.Activity(String.Format(CultureInfo.CurrentCulture, "Updating Lucene Index for: {0} {1} [PackageKey:{2}]", package.PackageRegistration.Id, package.Version, package.Key)))
            {
                EnsureIndexWriter(creatingIndex: false);
                if (package != null)
                {
                    var indexEntity = new PackageIndexEntity(package);
                    Trace.Information(String.Format(CultureInfo.CurrentCulture, "Updating Document: {0}", updateTerm.ToString()));
                    _indexWriter.UpdateDocument(updateTerm, indexEntity.ToDocument());
                }
                else
                {
                    Trace.Information(String.Format(CultureInfo.CurrentCulture, "Deleting Document: {0}", updateTerm.ToString()));
                    _indexWriter.DeleteDocuments(updateTerm);
                }
                _indexWriter.Commit();
            }
        }
 protected void AddNumericRangeQuery(BooleanQuery query, NumericRangeField range, BooleanClause.Occur occurance)
 {
     var startTerm = new Term(range.FieldName, NumberTools.LongToString(range.Start));
     var endTerm = new Term(range.FieldName, NumberTools.LongToString(range.End));
     var rangeQuery = new RangeQuery(startTerm, endTerm, true);
     query.Add(rangeQuery, occurance);
 }
Exemplo n.º 30
0
		/// <summary>
		/// Построение поискового индекса по полученным в результате синхронизации сообщениям
		/// </summary>
		public static int ProcessResponseMessages(IEnumerable<MessageSearchInfo> messages)
		{
			if (messages == null)
				throw new ArgumentNullException("messages");

			int addedCount;

			var indexPath = GetIndexDir();

			// Чистим
			if (IndexReader.IndexExists(indexPath))
			{
				var reader = IndexReader.Open(indexPath, false);
				var baseTerm = new Term("mid");

				try
				{
					foreach (var msg in messages)
					{
						var term = baseTerm.CreateTerm(msg.MessageBody ?? "");
						reader.DeleteDocuments(term);
					}
				}
				finally
				{
					reader.Close();
				}
			}

			//Добавляем
			var writer = CreateIndexWriter();
			try
			{
				var count = 0;
				foreach (var msg in messages)
				{
					// Форумы с id 0 и 58 это мусорки
					if (msg.ForumID == 0 || msg.ForumID == 58)
						continue;
					writer.AddDocument(
						CreateDocument(
							msg.MessageID.ToString(),
							msg.ForumID.ToString(),
							FormatDate(msg.MessageDate),
							msg.Subject,
							msg.UserID.ToString(),
							msg.UserNick,
							msg.MessageBody));
					count++;
				}
				addedCount = count;
			}
			finally
			{
				writer.Close();
			}

			return addedCount;
		}
Exemplo n.º 31
0
            protected internal override float[] CreateValue(IndexReader reader, Entry entryKey, IState state)
            {
                Entry entry = entryKey;

                System.String field  = entry.field;
                FloatParser   parser = (FloatParser)entry.custom;

                if (parser == null)
                {
                    try
                    {
                        return(wrapper.GetFloats(reader, field, Lucene.Net.Search.FieldCache_Fields.DEFAULT_FLOAT_PARSER, state));
                    }
                    catch (System.FormatException)
                    {
                        return(wrapper.GetFloats(reader, field, Lucene.Net.Search.FieldCache_Fields.NUMERIC_UTILS_FLOAT_PARSER, state));
                    }
                }
                float[]  retArray = null;
                TermDocs termDocs = reader.TermDocs(state);
                TermEnum termEnum = reader.Terms(new Term(field), state);

                try
                {
                    do
                    {
                        Term term = termEnum.Term;
                        if (term == null || (System.Object)term.Field != (System.Object)field)
                        {
                            break;
                        }
                        float termval = parser.ParseFloat(term.Text);
                        if (retArray == null)
                        {
                            // late init
                            retArray = new float[reader.MaxDoc];
                        }
                        termDocs.Seek(termEnum, state);
                        while (termDocs.Next(state))
                        {
                            retArray[termDocs.Doc] = termval;
                        }
                    }while (termEnum.Next(state));
                }
                catch (StopFillCacheException)
                {
                }
                finally
                {
                    termDocs.Close();
                    termEnum.Close();
                }
                if (retArray == null)
                {
                    // no values
                    retArray = new float[reader.MaxDoc];
                }
                return(retArray);
            }
Exemplo n.º 32
0
        protected override IEnumerable<Term> VisitTerm(Term term)
        {
            yield return term;

            var chars = term.Text.ToCharArray();
            Array.Reverse(chars);
            yield return new Term(term.Field, new string(chars));
        }
 public MultiValueFacetHandler(string name, string indexFieldName, TermListFactory termListFactory, Term sizePayloadTerm, IEnumerable<string> depends)
     : base(name, depends)
 {
     _depends = depends;
     _indexFieldName = (indexFieldName != null ? indexFieldName : name);
     _termListFactory = termListFactory;
     _sizePayloadTerm = sizePayloadTerm;
     _dataCache = null;
 }
Exemplo n.º 34
0
 public static TermQuery Term(this BooleanQuery inputQuery, string fieldName, string fieldValue, BooleanClause.Occur occur = null)
 {
     Term term = new Term(fieldName, fieldValue);
     BooleanQuery parentQuery = GetParentQuery(inputQuery);
     TermQuery query = new TermQuery(term);
     SetOccurValue(inputQuery, ref occur);
     parentQuery.Add(query, occur);
     return query;
 }
Exemplo n.º 35
0
 protected int getHitCount(String fieldName, String searchString)
 {
     IndexSearcher searcher = new IndexSearcher(directory, true); //4
       Term t = new Term(fieldName, searchString);
       Query query = new TermQuery(t); //5
       int hitCount = TestUtil.hitCount(searcher, query); //6
       searcher.Close();
       return hitCount;
 }
Exemplo n.º 36
0
        /// <summary> Creates a new <c>MultipleTermPositions</c> instance.
        /// 
        /// </summary>
        /// <exception cref="System.IO.IOException">
        /// </exception>
        public MultipleTermPositions(IndexReader indexReader, Term[] terms)
        {
            System.Collections.IList termPositions = new System.Collections.ArrayList();

            for (int i = 0; i < terms.Length; i++)
                termPositions.Add(indexReader.TermPositions(terms[i]));

            _termPositionsQueue = new TermPositionsQueue(termPositions);
            _posList = new IntQueue();
        }
Exemplo n.º 37
0
        public override int GetHashCode()
        {
            int prime  = 31;
            int result = base.GetHashCode();

            result = prime * result + BitConverter.ToInt32(BitConverter.GetBytes(minimumSimilarity), 0);
            result = prime * result + prefixLength;
            result = prime * result + ((Term == null)?0:Term.GetHashCode());
            return(result);
        }
Exemplo n.º 38
0
        public virtual void  TestSetBufferSize()
        {
            System.IO.DirectoryInfo indexDir = new System.IO.DirectoryInfo(System.IO.Path.Combine(AppSettings.Get("tempDir", ""), "testSetBufferSize"));
            MockFSDirectory         dir      = new MockFSDirectory(indexDir, NewRandom());

            try
            {
                IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
                writer.UseCompoundFile = false;
                for (int i = 0; i < 37; i++)
                {
                    Document doc = new Document();
                    doc.Add(new Field("content", "aaa bbb ccc ddd" + i, Field.Store.YES, Field.Index.ANALYZED));
                    doc.Add(new Field("id", "" + i, Field.Store.YES, Field.Index.ANALYZED));
                    writer.AddDocument(doc, null);
                }
                writer.Close();

                dir.allIndexInputs.Clear();

                IndexReader reader = IndexReader.Open((Directory)dir, false, null);
                Term        aaa    = new Term("content", "aaa");
                Term        bbb    = new Term("content", "bbb");
                Term        ccc    = new Term("content", "ccc");
                Assert.AreEqual(37, reader.DocFreq(ccc, null));
                reader.DeleteDocument(0, null);
                Assert.AreEqual(37, reader.DocFreq(aaa, null));
                dir.tweakBufferSizes();
                reader.DeleteDocument(4, null);
                Assert.AreEqual(reader.DocFreq(bbb, null), 37);
                dir.tweakBufferSizes();

                IndexSearcher searcher = new IndexSearcher(reader);
                ScoreDoc[]    hits     = searcher.Search(new TermQuery(bbb), null, 1000, null).ScoreDocs;
                dir.tweakBufferSizes();
                Assert.AreEqual(35, hits.Length);
                dir.tweakBufferSizes();
                hits = searcher.Search(new TermQuery(new Term("id", "33")), null, 1000, null).ScoreDocs;
                dir.tweakBufferSizes();
                Assert.AreEqual(1, hits.Length);
                hits = searcher.Search(new TermQuery(aaa), null, 1000, null).ScoreDocs;
                dir.tweakBufferSizes();
                Assert.AreEqual(35, hits.Length);
                searcher.Close();
                reader.Close();
            }
            finally
            {
                _TestUtil.RmDir(indexDir);
            }
        }
Exemplo n.º 39
0
        private static bool LowPrecisionNumber(Lucene.Net.Index.Term term)
        {
            if (term.Field.EndsWith("_Range") == false)
            {
                return(false);
            }

            if (string.IsNullOrEmpty(term.Text))
            {
                return(false);
            }

            return(term.Text[0] - NumericUtils.SHIFT_START_INT != 0 &&
                   term.Text[0] - NumericUtils.SHIFT_START_LONG != 0);
        }
Exemplo n.º 40
0
            protected internal override short[] CreateValue(IndexReader reader, Entry entryKey, IState state)
            {
                Entry entry = entryKey;

                System.String field  = entry.field;
                ShortParser   parser = (ShortParser)entry.custom;

                if (parser == null)
                {
                    return(wrapper.GetShorts(reader, field, Lucene.Net.Search.FieldCache_Fields.DEFAULT_SHORT_PARSER, state));
                }
                short[]  retArray = new short[reader.MaxDoc];
                TermDocs termDocs = reader.TermDocs(state);
                TermEnum termEnum = reader.Terms(new Term(field), state);

                try
                {
                    do
                    {
                        Term term = termEnum.Term;
                        if (term == null || (System.Object)term.Field != (System.Object)field)
                        {
                            break;
                        }
                        short termval = parser.ParseShort(term.Text);
                        termDocs.Seek(termEnum, state);
                        while (termDocs.Next(state))
                        {
                            retArray[termDocs.Doc] = termval;
                        }
                    }while (termEnum.Next(state));
                }
                catch (StopFillCacheException)
                {
                }
                finally
                {
                    termDocs.Close();
                    termEnum.Close();
                }
                return(retArray);
            }
Exemplo n.º 41
0
        public override bool Equals(System.Object obj)
        {
            if (this == obj)
            {
                return(true);
            }
            if (!base.Equals(obj))
            {
                return(false);
            }
            if (GetType() != obj.GetType())
            {
                return(false);
            }
            FuzzyQuery other = (FuzzyQuery)obj;

            if (BitConverter.ToInt32(BitConverter.GetBytes(minimumSimilarity), 0) != BitConverter.ToInt32(BitConverter.GetBytes(other.minimumSimilarity), 0))
            {
                return(false);
            }
            if (prefixLength != other.prefixLength)
            {
                return(false);
            }
            if (Term == null)
            {
                if (other.Term != null)
                {
                    return(false);
                }
            }
            else if (!Term.Equals(other.Term))
            {
                return(false);
            }
            return(true);
        }
Exemplo n.º 42
0
        public JsonDocumentDto Retrieve(BaristaIndexDefinition indexDefinition, string documentId)
        {
            if (documentId.IsNullOrWhiteSpace())
            {
                throw new ArgumentNullException("documentId", @"A document Id must be specified.");
            }

            try
            {
                var           index = GetOrAddIndex(indexDefinition, true);
                IndexSearcher indexSearcher;
                using (index.GetSearcher(out indexSearcher))
                {
                    var term = new Lucene.Net.Index.Term(Constants.DocumentIdFieldName, documentId.ToLowerInvariant());

                    var termQuery = new Lucene.Net.Search.TermQuery(term);

                    var hits = indexSearcher.Search(termQuery, 1);

                    if (hits.TotalHits == 0)
                    {
                        return(null);
                    }

                    var result = RetrieveSearchResults(indexSearcher, hits).FirstOrDefault();

                    return(result == null
                      ? null
                      : result.Document);
                }
            }
            catch (Exception ex)
            {
                throw new FaultException(ex.Message);
            }
        }
Exemplo n.º 43
0
 public PKIndexSplitter(Directory input, Directory dir1, Directory dir2, Term midTerm, IndexWriterConfig config1, IndexWriterConfig config2)
     : this(input, dir1, dir2, new TermRangeFilter(midTerm.Field, null, midTerm.Bytes, true, false), config1, config2)
 {
 }
Exemplo n.º 44
0
        public override Query Rewrite(IndexReader reader, IState state)
        {
            if (!termLongEnough)
            {
                // can only match if it's exact
                return(new TermQuery(Term));
            }

            int maxSize = BooleanQuery.MaxClauseCount;

            // TODO: Java uses a PriorityQueue.  Using Linq, we can emulate it,
            //       however it's considerable slower than the java counterpart.
            //       this should be a temporary thing, fixed before release
            SortedList <ScoreTerm, ScoreTerm> stQueue = new SortedList <ScoreTerm, ScoreTerm>();
            FilteredTermEnum enumerator = GetEnum(reader, state);

            try
            {
                ScoreTerm st = new ScoreTerm();
                do
                {
                    Term t = enumerator.Term;
                    if (t == null)
                    {
                        break;
                    }
                    float score = enumerator.Difference();
                    //ignore uncompetetive hits
                    if (stQueue.Count >= maxSize && score <= stQueue.Keys.First().score)
                    {
                        continue;
                    }
                    // add new entry in PQ
                    st.term  = t;
                    st.score = score;
                    stQueue.Add(st, st);
                    // possibly drop entries from queue
                    if (stQueue.Count > maxSize)
                    {
                        st = stQueue.Keys.First();
                        stQueue.Remove(st);
                    }
                    else
                    {
                        st = new ScoreTerm();
                    }
                }while (enumerator.Next(state));
            }
            finally
            {
                enumerator.Close();
            }

            BooleanQuery query = new BooleanQuery(true);

            foreach (ScoreTerm st in stQueue.Keys)
            {
                TermQuery tq = new TermQuery(st.term);       // found a match
                tq.Boost = Boost * st.score;                 // set the boost
                query.Add(tq, Occur.SHOULD);                 // add to query
            }

            return(query);
        }
Exemplo n.º 45
0
 /// <summary> Calls <see cref="FuzzyQuery(Index.Term, float)">FuzzyQuery(term, 0.5f, 0)</see>.</summary>
 public FuzzyQuery(Term term) : this(term, defaultMinSimilarity, defaultPrefixLength)
 {
 }
Exemplo n.º 46
0
 /// <summary> Calls <see cref="FuzzyQuery(Index.Term, float)">FuzzyQuery(term, minimumSimilarity, 0)</see>.</summary>
 public FuzzyQuery(Term term, float minimumSimilarity) : this(term, minimumSimilarity, defaultPrefixLength)
 {
 }
Exemplo n.º 47
0
 // inherit javadoc
 public override int DocFreq(Term term, IState state)
 {
     return(reader.DocFreq(term, state));
 }
Exemplo n.º 48
0
        public virtual void TestRollingUpdates_Mem()
        {
            Random random             = new Random(Random().Next());
            BaseDirectoryWrapper dir  = NewDirectory();
            LineFileDocs         docs = new LineFileDocs(random, DefaultCodecSupportsDocValues());

            //provider.register(new MemoryCodec());
            // LUCENE TODO: uncomment this out once MemoryPostingsFormat is brought over
            //if ((!"Lucene3x".Equals(Codec.Default.Name)) && Random().NextBoolean())
            //{
            //    Codec.Default =
            //        TestUtil.AlwaysPostingsFormat(new MemoryPostingsFormat(random().nextBoolean(), random.NextFloat()));
            //}

            MockAnalyzer analyzer = new MockAnalyzer(Random());

            analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);

            IndexWriter   w          = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
            int           SIZE       = AtLeast(20);
            int           id         = 0;
            IndexReader   r          = null;
            IndexSearcher s          = null;
            int           numUpdates = (int)(SIZE * (2 + (TEST_NIGHTLY ? 200 * Random().NextDouble() : 5 * Random().NextDouble())));

            if (VERBOSE)
            {
                Console.WriteLine("TEST: numUpdates=" + numUpdates);
            }
            int updateCount = 0;

            // TODO: sometimes update ids not in order...
            for (int docIter = 0; docIter < numUpdates; docIter++)
            {
                Documents.Document doc  = docs.NextDoc();
                string             myID = "" + id;
                if (id == SIZE - 1)
                {
                    id = 0;
                }
                else
                {
                    id++;
                }
                if (VERBOSE)
                {
                    Console.WriteLine("  docIter=" + docIter + " id=" + id);
                }
                ((Field)doc.GetField("docid")).StringValue = myID;

                Term idTerm = new Term("docid", myID);

                bool doUpdate;
                if (s != null && updateCount < SIZE)
                {
                    TopDocs hits = s.Search(new TermQuery(idTerm), 1);
                    Assert.AreEqual(1, hits.TotalHits);
                    doUpdate = !w.TryDeleteDocument(r, hits.ScoreDocs[0].Doc);
                    if (VERBOSE)
                    {
                        if (doUpdate)
                        {
                            Console.WriteLine("  tryDeleteDocument failed");
                        }
                        else
                        {
                            Console.WriteLine("  tryDeleteDocument succeeded");
                        }
                    }
                }
                else
                {
                    doUpdate = true;
                    if (VERBOSE)
                    {
                        Console.WriteLine("  no searcher: doUpdate=true");
                    }
                }

                updateCount++;

                if (doUpdate)
                {
                    w.UpdateDocument(idTerm, doc);
                }
                else
                {
                    w.AddDocument(doc);
                }

                if (docIter >= SIZE && Random().Next(50) == 17)
                {
                    if (r != null)
                    {
                        r.Dispose();
                    }

                    bool applyDeletions = Random().NextBoolean();

                    if (VERBOSE)
                    {
                        Console.WriteLine("TEST: reopen applyDeletions=" + applyDeletions);
                    }

                    r = w.GetReader(applyDeletions);
                    if (applyDeletions)
                    {
                        s = NewSearcher(r);
                    }
                    else
                    {
                        s = null;
                    }
                    Assert.IsTrue(!applyDeletions || r.NumDocs == SIZE, "applyDeletions=" + applyDeletions + " r.NumDocs=" + r.NumDocs + " vs SIZE=" + SIZE);
                    updateCount = 0;
                }
            }

            if (r != null)
            {
                r.Dispose();
            }

            w.Commit();
            Assert.AreEqual(SIZE, w.NumDocs());

            w.Dispose();

            TestIndexWriter.AssertNoUnreferencedFiles(dir, "leftover files after rolling updates");

            docs.Dispose();

            // LUCENE-4455:
            SegmentInfos infos = new SegmentInfos();

            infos.Read(dir);
            long totalBytes = 0;

            foreach (SegmentCommitInfo sipc in infos.Segments)
            {
                totalBytes += sipc.SizeInBytes();
            }
            long totalBytes2 = 0;

            foreach (string fileName in dir.ListAll())
            {
                if (!fileName.StartsWith(IndexFileNames.SEGMENTS))
                {
                    totalBytes2 += dir.FileLength(fileName);
                }
            }
            Assert.AreEqual(totalBytes2, totalBytes);
            dir.Dispose();
        }
Exemplo n.º 49
0
 public virtual void DeleteDocuments(Term term)
 => IndexWriter.DeleteDocuments(term);
Exemplo n.º 50
0
        public virtual void  TestDeleteLeftoverFiles()
        {
            Directory dir = new RAMDirectory();

            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            writer.SetMaxBufferedDocs(10);
            int i;

            for (i = 0; i < 35; i++)
            {
                AddDoc(writer, i);
            }
            writer.UseCompoundFile = false;
            for (; i < 45; i++)
            {
                AddDoc(writer, i);
            }
            writer.Close();

            // Delete one doc so we get a .del file:
            IndexReader reader     = IndexReader.Open(dir, false, null);
            Term        searchTerm = new Term("id", "7");
            int         delCount   = reader.DeleteDocuments(searchTerm, null);

            Assert.AreEqual(1, delCount, "didn't delete the right number of documents");

            // Set one norm so we get a .s0 file:
            reader.SetNorm(21, "content", (float)1.5, null);
            reader.Close();

            // Now, artificially create an extra .del file & extra
            // .s0 file:
            System.String[] files = dir.ListAll(null);

            /*
             * for(int j=0;j<files.length;j++) {
             * System.out.println(j + ": " + files[j]);
             * }
             */

            // The numbering of fields can vary depending on which
            // JRE is in use.  On some JREs we see content bound to
            // field 0; on others, field 1.  So, here we have to
            // figure out which field number corresponds to
            // "content", and then set our expected file names below
            // accordingly:
            CompoundFileReader cfsReader  = new CompoundFileReader(dir, "_2.cfs", null);
            FieldInfos         fieldInfos = new FieldInfos(cfsReader, "_2.fnm", null);
            int contentFieldIndex         = -1;

            for (i = 0; i < fieldInfos.Size(); i++)
            {
                FieldInfo fi = fieldInfos.FieldInfo(i);
                if (fi.name_ForNUnit.Equals("content"))
                {
                    contentFieldIndex = i;
                    break;
                }
            }
            cfsReader.Close();
            Assert.IsTrue(contentFieldIndex != -1, "could not locate the 'content' field number in the _2.cfs segment");

            System.String normSuffix = "s" + contentFieldIndex;

            // Create a bogus separate norms file for a
            // segment/field that actually has a separate norms file
            // already:
            CopyFile(dir, "_2_1." + normSuffix, "_2_2." + normSuffix);

            // Create a bogus separate norms file for a
            // segment/field that actually has a separate norms file
            // already, using the "not compound file" extension:
            CopyFile(dir, "_2_1." + normSuffix, "_2_2.f" + contentFieldIndex);

            // Create a bogus separate norms file for a
            // segment/field that does not have a separate norms
            // file already:
            CopyFile(dir, "_2_1." + normSuffix, "_1_1." + normSuffix);

            // Create a bogus separate norms file for a
            // segment/field that does not have a separate norms
            // file already using the "not compound file" extension:
            CopyFile(dir, "_2_1." + normSuffix, "_1_1.f" + contentFieldIndex);

            // Create a bogus separate del file for a
            // segment that already has a separate del file:
            CopyFile(dir, "_0_1.del", "_0_2.del");

            // Create a bogus separate del file for a
            // segment that does not yet have a separate del file:
            CopyFile(dir, "_0_1.del", "_1_1.del");

            // Create a bogus separate del file for a
            // non-existent segment:
            CopyFile(dir, "_0_1.del", "_188_1.del");

            // Create a bogus segment file:
            CopyFile(dir, "_0.cfs", "_188.cfs");

            // Create a bogus fnm file when the CFS already exists:
            CopyFile(dir, "_0.cfs", "_0.fnm");

            // Create a deletable file:
            CopyFile(dir, "_0.cfs", "deletable");

            // Create some old segments file:
            CopyFile(dir, "segments_3", "segments");
            CopyFile(dir, "segments_3", "segments_2");

            // Create a bogus cfs file shadowing a non-cfs segment:
            CopyFile(dir, "_2.cfs", "_3.cfs");

            System.String[] filesPre = dir.ListAll(null);

            // Open & close a writer: it should delete the above 4
            // files and nothing more:
            writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED, null);
            writer.Close();

            System.String[] files2 = dir.ListAll(null);
            dir.Close();

            System.Array.Sort(files);
            System.Array.Sort(files2);

            System.Collections.Hashtable dif = DifFiles(files, files2);

            if (!CollectionsHelper.Equals(files, files2))
            {
                Assert.Fail("IndexFileDeleter failed to delete unreferenced extra files: should have deleted " + (filesPre.Length - files.Length) + " files but only deleted " + (filesPre.Length - files2.Length) + "; expected files:\n    " + AsString(files) + "\n  actual files:\n    " + AsString(files2) + "\ndif: " + CollectionsHelper.CollectionToString(dif));
            }
        }
Exemplo n.º 51
0
 /// <summary> Compares if current upper bound is reached,
 /// this also updates the term count for statistics.
 /// In contrast to <see cref="FilteredTermEnum" />, a return value
 /// of <c>false</c> ends iterating the current enum
 /// and forwards to the next sub-range.
 /// </summary>
 //@Override
 protected internal override bool TermCompare(Term term)
 {
     return(term.Field == Enclosing_Instance.field && string.CompareOrdinal(term.Text, currentUpperBound) <= 0);
 }
Exemplo n.º 52
0
 private void  InitBlock(NumericRangeQuery <T> enclosingInstance)
 {
     this.enclosingInstance = enclosingInstance;
     termTemplate           = new Term(Enclosing_Instance.field);
 }
Exemplo n.º 53
0
 public override TermEnum Terms(Term term)
 {
     EnsureOpen();
     return(new MultiTermEnum(this, subReaders, starts, term));
 }
Exemplo n.º 54
0
        public virtual void  TestEmptyIndex()
        {
            // creating two directories for indices
            Directory indexStoreA = new MockRAMDirectory();
            Directory indexStoreB = new MockRAMDirectory();

            // creating a document to store
            Document lDoc = new Document();

            lDoc.Add(new Field("fulltext", "Once upon a time.....", Field.Store.YES, Field.Index.ANALYZED));
            lDoc.Add(new Field("id", "doc1", Field.Store.YES, Field.Index.NOT_ANALYZED));
            lDoc.Add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));

            // creating a document to store
            Document lDoc2 = new Document();

            lDoc2.Add(new Field("fulltext", "in a galaxy far far away.....", Field.Store.YES, Field.Index.ANALYZED));
            lDoc2.Add(new Field("id", "doc2", Field.Store.YES, Field.Index.NOT_ANALYZED));
            lDoc2.Add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));

            // creating a document to store
            Document lDoc3 = new Document();

            lDoc3.Add(new Field("fulltext", "a bizarre bug manifested itself....", Field.Store.YES, Field.Index.ANALYZED));
            lDoc3.Add(new Field("id", "doc3", Field.Store.YES, Field.Index.NOT_ANALYZED));
            lDoc3.Add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));

            // creating an index writer for the first index
            IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED, null);
            // creating an index writer for the second index, but writing nothing
            IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED, null);

            //--------------------------------------------------------------------
            // scenario 1
            //--------------------------------------------------------------------

            // writing the documents to the first index
            writerA.AddDocument(lDoc, null);
            writerA.AddDocument(lDoc2, null);
            writerA.AddDocument(lDoc3, null);
            writerA.Optimize(null);
            writerA.Close();

            // closing the second index
            writerB.Close();

            // creating the query
            QueryParser parser = new QueryParser(Util.Version.LUCENE_CURRENT, "fulltext", new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
            Query       query  = parser.Parse("handle:1");

            // building the searchables
            Searcher[] searchers = new Searcher[2];
            // VITAL STEP:adding the searcher for the empty index first, before the searcher for the populated index
            searchers[0] = new IndexSearcher(indexStoreB, true, null);
            searchers[1] = new IndexSearcher(indexStoreA, true, null);
            // creating the multiSearcher
            Searcher mSearcher = GetMultiSearcherInstance(searchers);

            // performing the search
            ScoreDoc[] hits = mSearcher.Search(query, null, 1000, null).ScoreDocs;

            Assert.AreEqual(3, hits.Length);

            // iterating over the hit documents
            for (int i = 0; i < hits.Length; i++)
            {
                mSearcher.Doc(hits[i].Doc, null);
            }
            mSearcher.Close();


            //--------------------------------------------------------------------
            // scenario 2
            //--------------------------------------------------------------------

            // adding one document to the empty index
            writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED, null);
            writerB.AddDocument(lDoc, null);
            writerB.Optimize(null);
            writerB.Close();

            // building the searchables
            Searcher[] searchers2 = new Searcher[2];
            // VITAL STEP:adding the searcher for the empty index first, before the searcher for the populated index
            searchers2[0] = new IndexSearcher(indexStoreB, true, null);
            searchers2[1] = new IndexSearcher(indexStoreA, true, null);
            // creating the mulitSearcher
            MultiSearcher mSearcher2 = GetMultiSearcherInstance(searchers2);

            // performing the same search
            ScoreDoc[] hits2 = mSearcher2.Search(query, null, 1000, null).ScoreDocs;

            Assert.AreEqual(4, hits2.Length);

            // iterating over the hit documents
            for (int i = 0; i < hits2.Length; i++)
            {
                // no exception should happen at this point
                mSearcher2.Doc(hits2[i].Doc, null);
            }

            // test the subSearcher() method:
            Query subSearcherQuery = parser.Parse("id:doc1");

            hits2 = mSearcher2.Search(subSearcherQuery, null, 1000, null).ScoreDocs;
            Assert.AreEqual(2, hits2.Length);
            Assert.AreEqual(0, mSearcher2.SubSearcher(hits2[0].Doc));             // hit from searchers2[0]
            Assert.AreEqual(1, mSearcher2.SubSearcher(hits2[1].Doc));             // hit from searchers2[1]
            subSearcherQuery = parser.Parse("id:doc2");
            hits2            = mSearcher2.Search(subSearcherQuery, null, 1000, null).ScoreDocs;
            Assert.AreEqual(1, hits2.Length);
            Assert.AreEqual(1, mSearcher2.SubSearcher(hits2[0].Doc));             // hit from searchers2[1]
            mSearcher2.Close();

            //--------------------------------------------------------------------
            // scenario 3
            //--------------------------------------------------------------------

            // deleting the document just added, this will cause a different exception to take place
            Term        term    = new Term("id", "doc1");
            IndexReader readerB = IndexReader.Open(indexStoreB, false, null);

            readerB.DeleteDocuments(term, null);
            readerB.Close();

            // optimizing the index with the writer
            writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED, null);
            writerB.Optimize(null);
            writerB.Close();

            // building the searchables
            Searcher[] searchers3 = new Searcher[2];

            searchers3[0] = new IndexSearcher(indexStoreB, true, null);
            searchers3[1] = new IndexSearcher(indexStoreA, true, null);
            // creating the mulitSearcher
            Searcher mSearcher3 = GetMultiSearcherInstance(searchers3);

            // performing the same search
            ScoreDoc[] hits3 = mSearcher3.Search(query, null, 1000, null).ScoreDocs;

            Assert.AreEqual(3, hits3.Length);

            // iterating over the hit documents
            for (int i = 0; i < hits3.Length; i++)
            {
                mSearcher3.Doc(hits3[i].Doc, null);
            }
            mSearcher3.Close();
            indexStoreA.Close();
            indexStoreB.Close();
        }
Exemplo n.º 55
0
 /// <summary>
 /// Split an index based on a  given primary key term
 /// and a 'middle' term.  If the middle term is present, it's
 /// sent to dir2.
 /// </summary>
 public PKIndexSplitter(LuceneVersion version, Directory input, Directory dir1, Directory dir2, Term midTerm)
     : this(version, input, dir1, dir2, new TermRangeFilter(midTerm.Field, null, midTerm.Bytes, true, false))
 {
 }
Exemplo n.º 56
0
 public virtual void UpdateBinaryDocValue(Term term, string field, BytesRef value)
 => IndexWriter.UpdateBinaryDocValue(term, field, value);
Exemplo n.º 57
0
 public virtual void UpdateNumericDocValue(Term term, string field, long?value)
 => IndexWriter.UpdateNumericDocValue(term, field, value);
Exemplo n.º 58
0
 public virtual void UpdateDocuments(Term delTerm, IEnumerable <IEnumerable <IIndexableField> > docs)
 {
     IndexWriter.UpdateDocuments(delTerm, docs);
     MaybeCommit();
 }
Exemplo n.º 59
0
 public PayloadTermQuery(Term term, PayloadFunction function)
     : this(term, function, true)
 {
 }
Exemplo n.º 60
0
 public PayloadTermQuery(Term term, PayloadFunction function, bool includeSpanScore)
     : base(term)
 {
     this.Function         = function;
     this.IncludeSpanScore = includeSpanScore;
 }