public virtual void TestMixupDocs()
        {
            Directory dir = NewDirectory();
            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
            iwc.SetMergePolicy(NewLogMergePolicy());
            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, iwc);
            Document doc = new Document();
            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
            customType.StoreTermVectors = true;
            customType.StoreTermVectorPositions = true;
            customType.StoreTermVectorPayloads = true;
            customType.StoreTermVectorOffsets = Random().NextBoolean();
            Field field = new Field("field", "", customType);
            TokenStream ts = new MockTokenizer(new StringReader("here we go"), MockTokenizer.WHITESPACE, true);
            Assert.IsFalse(ts.HasAttribute<IPayloadAttribute>());
            field.TokenStream = ts;
            doc.Add(field);
            writer.AddDocument(doc);

            Token withPayload = new Token("withPayload", 0, 11);
            withPayload.Payload = new BytesRef("test");
            ts = new CannedTokenStream(withPayload);
            Assert.IsTrue(ts.HasAttribute<IPayloadAttribute>());
            field.TokenStream = ts;
            writer.AddDocument(doc);

            ts = new MockTokenizer(new StringReader("another"), MockTokenizer.WHITESPACE, true);
            Assert.IsFalse(ts.HasAttribute<IPayloadAttribute>());
            field.TokenStream = ts;
            writer.AddDocument(doc);

            DirectoryReader reader = writer.Reader;
            Terms terms = reader.GetTermVector(1, "field");
            Debug.Assert(terms != null);
            TermsEnum termsEnum = terms.Iterator(null);
            Assert.IsTrue(termsEnum.SeekExact(new BytesRef("withPayload")));
            DocsAndPositionsEnum de = termsEnum.DocsAndPositions(null, null);
            Assert.AreEqual(0, de.NextDoc());
            Assert.AreEqual(0, de.NextPosition());
            Assert.AreEqual(new BytesRef("test"), de.Payload);
            writer.Dispose();
            reader.Dispose();
            dir.Dispose();
        }
 public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
 {
     Tokenizer tokenizer = new MockTokenizer(reader);
     if (fieldName.Contains("payloadsFixed"))
     {
         TokenFilter filter = new MockFixedLengthPayloadFilter(new Random(0), tokenizer, 1);
         return new TokenStreamComponents(tokenizer, filter);
     }
     else if (fieldName.Contains("payloadsVariable"))
     {
         TokenFilter filter = new MockVariableLengthPayloadFilter(new Random(0), tokenizer);
         return new TokenStreamComponents(tokenizer, filter);
     }
     else
     {
         return new TokenStreamComponents(tokenizer);
     }
 }
Esempio n. 3
0
 public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
 {
     Tokenizer result = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
     return new TokenStreamComponents(result, new PayloadFilter(OuterInstance, result));
 }
 protected override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
 {
     Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
     return new TokenStreamComponents(tokenizer, tokenizer);
 }
Esempio n. 5
0
 public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
 {
     MockTokenizer tokenizer = new MockTokenizer(reader);
     return new TokenStreamComponents(tokenizer, new MockSynonymFilter(tokenizer));
 }
 public override TokenStreamComponents CreateComponents(string fieldName, TextReader input)
 {
     Tokenizer tokenizer = new MockTokenizer(input);
     if (fieldName.Equals("distinctiveFieldName"))
     {
         TokenFilter tosser = new TokenFilterAnonymousInnerClassHelper(this, tokenizer);
         return new TokenStreamComponents(tokenizer, tosser);
     }
     else
     {
         return new TokenStreamComponents(tokenizer);
     }
 }