public static SetupDoc ( Lucene.Net.Documents.Document doc ) : void | ||
doc | Lucene.Net.Documents.Document | The document to write |
Résultat | void |
public virtual void TestDelete() { Document docToDelete = new Document(); DocHelper.SetupDoc(docToDelete); DocHelper.WriteDoc(dir, "seg-to-delete", docToDelete); try { SegmentReader deleteReader = new SegmentReader(new SegmentInfo("seg-to-delete", 1, dir)); Assert.IsTrue(deleteReader != null); Assert.IsTrue(deleteReader.NumDocs() == 1); deleteReader.Delete(0); Assert.IsTrue(deleteReader.IsDeleted(0) == true); Assert.IsTrue(deleteReader.HasDeletions() == true); Assert.IsTrue(deleteReader.NumDocs() == 0); try { Document test = deleteReader.Document(0); Assert.IsTrue(false); } catch (System.ArgumentException e) { Assert.IsTrue(true); } } catch (System.IO.IOException e) { System.Console.Error.WriteLine(e.StackTrace); Assert.IsTrue(false); } }
public override void SetUp() { base.SetUp(); Dir = NewDirectory(); DocHelper.SetupDoc(TestDoc); Info = DocHelper.WriteDoc(Random(), Dir, TestDoc); }
public override void SetUp() { base.SetUp(); DocHelper.SetupDoc(testDoc); SegmentInfo info = DocHelper.WriteDoc(dir, testDoc); reader = SegmentReader.Get(info); }
public override void SetUp() { base.SetUp(); DocHelper.SetupDoc(testDoc); SegmentInfo info = DocHelper.WriteDoc(dir, testDoc); reader = SegmentReader.Get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, null); }
public override void SetUp() { base.SetUp(); testDoc = new Document(); dir = NewDirectory(); DocHelper.SetupDoc(testDoc); info = DocHelper.WriteDoc(Random, dir, testDoc); }
public virtual void TestIndexDivisor() { TestDoc = new Document(); DocHelper.SetupDoc(TestDoc); DocHelper.WriteDoc(Random(), Dir, TestDoc); TestTermDocs(2); TestBadSeek(2); TestSkipTo(2); }
public override void SetUp() { base.SetUp(); Dir = NewDirectory(); DocHelper.SetupDoc(TestDoc); SegmentCommitInfo info = DocHelper.WriteDoc(Random(), Dir, TestDoc); Reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, IOContext.READ); }
public virtual void TestIndexDivisor() { dir = new MockRAMDirectory(); testDoc = new Document(); DocHelper.SetupDoc(testDoc); DocHelper.WriteDoc(dir, testDoc); TestTermDocs(2); testBadSeek(2); testSkipTo(2); }
public virtual void TestAddDocument() { Document testDoc = new Document(); DocHelper.SetupDoc(testDoc); Analyzer analyzer = new WhitespaceAnalyzer(); IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED); writer.AddDocument(testDoc); writer.Flush(); SegmentInfo info = writer.NewestSegment(); writer.Close(); //After adding the document, we should be able to read it back in SegmentReader reader = SegmentReader.Get(info); Assert.IsTrue(reader != null); Document doc = reader.Document(0); Assert.IsTrue(doc != null); //System.out.println("Document: " + doc); Fieldable[] fields = doc.GetFields("textField2"); Assert.IsTrue(fields != null && fields.Length == 1); Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.FIELD_2_TEXT)); Assert.IsTrue(fields[0].IsTermVectorStored()); fields = doc.GetFields("textField1"); Assert.IsTrue(fields != null && fields.Length == 1); Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.FIELD_1_TEXT)); Assert.IsFalse(fields[0].IsTermVectorStored()); fields = doc.GetFields("keyField"); Assert.IsTrue(fields != null && fields.Length == 1); Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.KEYWORD_TEXT)); fields = doc.GetFields(DocHelper.NO_NORMS_KEY); Assert.IsTrue(fields != null && fields.Length == 1); Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.NO_NORMS_TEXT)); fields = doc.GetFields(DocHelper.TEXT_FIELD_3_KEY); Assert.IsTrue(fields != null && fields.Length == 1); Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.FIELD_3_TEXT)); // test that the norms are not present in the segment if // omitNorms is true for (int i = 0; i < reader.core_ForNUnit.fieldInfos_ForNUnit.Size(); i++) { FieldInfo fi = reader.core_ForNUnit.fieldInfos_ForNUnit.FieldInfo(i); if (fi.isIndexed_ForNUnit) { Assert.IsTrue(fi.omitNorms_ForNUnit == !reader.HasNorms(fi.name_ForNUnit)); } } }
public virtual void TestAddDocument() { Document testDoc = new Document(); DocHelper.SetupDoc(testDoc); IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))); writer.AddDocument(testDoc); writer.Commit(); SegmentCommitInfo info = writer.NewestSegment(); writer.Dispose(); //After adding the document, we should be able to read it back in SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random())); Assert.IsTrue(reader != null); Document doc = reader.Document(0); Assert.IsTrue(doc != null); //System.out.println("Document: " + doc); IIndexableField[] fields = doc.GetFields("textField2"); Assert.IsTrue(fields != null && fields.Length == 1); Assert.IsTrue(fields[0].GetStringValue().Equals(DocHelper.FIELD_2_TEXT, StringComparison.Ordinal)); Assert.IsTrue(fields[0].IndexableFieldType.StoreTermVectors); fields = doc.GetFields("textField1"); Assert.IsTrue(fields != null && fields.Length == 1); Assert.IsTrue(fields[0].GetStringValue().Equals(DocHelper.FIELD_1_TEXT, StringComparison.Ordinal)); Assert.IsFalse(fields[0].IndexableFieldType.StoreTermVectors); fields = doc.GetFields("keyField"); Assert.IsTrue(fields != null && fields.Length == 1); Assert.IsTrue(fields[0].GetStringValue().Equals(DocHelper.KEYWORD_TEXT, StringComparison.Ordinal)); fields = doc.GetFields(DocHelper.NO_NORMS_KEY); Assert.IsTrue(fields != null && fields.Length == 1); Assert.IsTrue(fields[0].GetStringValue().Equals(DocHelper.NO_NORMS_TEXT, StringComparison.Ordinal)); fields = doc.GetFields(DocHelper.TEXT_FIELD_3_KEY); Assert.IsTrue(fields != null && fields.Length == 1); Assert.IsTrue(fields[0].GetStringValue().Equals(DocHelper.FIELD_3_TEXT, StringComparison.Ordinal)); // test that the norms are not present in the segment if // omitNorms is true foreach (FieldInfo fi in reader.FieldInfos) { if (fi.IsIndexed) { Assert.IsTrue(fi.OmitsNorms == (reader.GetNormValues(fi.Name) == null)); } } reader.Dispose(); }
public override void SetUp() { base.SetUp(); DocHelper.SetupDoc(doc1); SegmentInfo info1 = DocHelper.WriteDoc(merge1Dir, doc1); DocHelper.SetupDoc(doc2); SegmentInfo info2 = DocHelper.WriteDoc(merge2Dir, doc2); reader1 = SegmentReader.Get(info1); reader2 = SegmentReader.Get(info2); }
public override void SetUp() { base.SetUp(); fieldInfos = new FieldInfos(); DocHelper.SetupDoc(testDoc); fieldInfos.Add(testDoc); IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); writer.SetUseCompoundFile(false); writer.AddDocument(testDoc); writer.Close(); }
protected virtual void SetUp() { try { DocHelper.SetupDoc(testDoc); DocHelper.WriteDoc(dir, testDoc); reader = new SegmentReader(new SegmentInfo("test", 1, dir)); } catch (System.IO.IOException e) { } }
public override void SetUp() { base.SetUp(); DocHelper.SetupDoc(doc1); SegmentInfo info1 = DocHelper.WriteDoc(merge1Dir, doc1); DocHelper.SetupDoc(doc2); SegmentInfo info2 = DocHelper.WriteDoc(merge2Dir, doc2); reader1 = SegmentReader.Get(true, info1, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, null); reader2 = SegmentReader.Get(true, info2, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, null); }
public override void SetUp() { base.SetUp(); dir = new RAMDirectory(); doc1 = new Document(); doc2 = new Document(); DocHelper.SetupDoc(doc1); DocHelper.SetupDoc(doc2); DocHelper.WriteDoc(dir, doc1); DocHelper.WriteDoc(dir, doc2); sis = new SegmentInfos(); sis.Read(dir); }
public virtual void TestDelete() { Document docToDelete = new Document(); DocHelper.SetupDoc(docToDelete); SegmentInfo info = DocHelper.WriteDoc(dir, docToDelete); SegmentReader deleteReader = SegmentReader.Get(false, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, null); Assert.IsTrue(deleteReader != null); Assert.IsTrue(deleteReader.NumDocs() == 1); deleteReader.DeleteDocument(0, null); Assert.IsTrue(deleteReader.IsDeleted(0) == true); Assert.IsTrue(deleteReader.HasDeletions == true); Assert.IsTrue(deleteReader.NumDocs() == 0); }
public virtual void TestDelete() { Document docToDelete = new Document(); DocHelper.SetupDoc(docToDelete); SegmentInfo info = DocHelper.WriteDoc(dir, docToDelete); SegmentReader deleteReader = SegmentReader.Get(info); Assert.IsTrue(deleteReader != null); Assert.IsTrue(deleteReader.NumDocs() == 1); deleteReader.DeleteDocument(0); Assert.IsTrue(deleteReader.IsDeleted(0) == true); Assert.IsTrue(deleteReader.HasDeletions() == true); Assert.IsTrue(deleteReader.NumDocs() == 0); }
public override void SetUp() { base.SetUp(); MergedDir = NewDirectory(); Merge1Dir = NewDirectory(); Merge2Dir = NewDirectory(); DocHelper.SetupDoc(Doc1); SegmentCommitInfo info1 = DocHelper.WriteDoc(Random(), Merge1Dir, Doc1); DocHelper.SetupDoc(Doc2); SegmentCommitInfo info2 = DocHelper.WriteDoc(Random(), Merge2Dir, Doc2); Reader1 = new SegmentReader(info1, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random())); Reader2 = new SegmentReader(info2, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random())); }
protected virtual void SetUp() { DocHelper.SetupDoc(doc1); DocHelper.WriteDoc(merge1Dir, merge1Segment, doc1); DocHelper.SetupDoc(doc2); DocHelper.WriteDoc(merge2Dir, merge2Segment, doc2); try { reader1 = new SegmentReader(new SegmentInfo(merge1Segment, 1, merge1Dir)); reader2 = new SegmentReader(new SegmentInfo(merge2Segment, 1, merge2Dir)); } catch (System.IO.IOException e) { System.Console.Error.WriteLine(e.StackTrace); } }
protected virtual void SetUp() { fieldInfos = new FieldInfos(); DocHelper.SetupDoc(testDoc); fieldInfos.Add(testDoc); DocumentWriter writer = new DocumentWriter(dir, new WhitespaceAnalyzer(), Similarity.GetDefault(), 50); Assert.IsTrue(writer != null); try { writer.AddDocument("test", testDoc); } catch (System.IO.IOException e) { } }
public void BeforeClass() { TestDoc = new Document(); FieldInfos = new FieldInfos.Builder(); DocHelper.SetupDoc(TestDoc); foreach (IndexableField field in TestDoc) { FieldInfos.AddOrUpdate(field.Name, field.FieldType); } Dir = NewDirectory(); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()); conf.MergePolicy.NoCFSRatio = 0.0; IndexWriter writer = new IndexWriter(Dir, conf); writer.AddDocument(TestDoc); writer.Dispose(); FaultyIndexInput.DoFail = false; }
protected virtual void SetUp() { DocHelper.SetupDoc(doc1); DocHelper.SetupDoc(doc2); DocHelper.WriteDoc(dir, "seg-1", doc1); DocHelper.WriteDoc(dir, "seg-2", doc2); try { sis.Write(dir); reader1 = new SegmentReader(new SegmentInfo("seg-1", 1, dir)); reader2 = new SegmentReader(new SegmentInfo("seg-2", 1, dir)); readers[0] = reader1; readers[1] = reader2; } catch (System.IO.IOException e) { System.Console.Error.WriteLine(e.StackTrace); } }
protected virtual void SetUp() { DocHelper.SetupDoc(testDoc); }
public override void SetUp() { base.SetUp(); DocHelper.SetupDoc(testDoc); }
public override void SetUp() { base.SetUp(); DocHelper.SetupDoc(testDoc); info = DocHelper.WriteDoc(dir, testDoc); }
protected virtual void SetUp() { DocHelper.SetupDoc(testDoc); DocHelper.WriteDoc(dir, testDoc); }