public SetNorm ( int doc, String field, byte value ) : void | ||
doc | int | |
field | String | |
value | byte | |
return | void |
public virtual void TestIsCurrent() { Directory dir1 = GetDir1(); Directory dir2 = GetDir2(); ParallelReader pr = new ParallelReader(); pr.Add(IndexReader.Open(dir1)); pr.Add(IndexReader.Open(dir2)); Assert.IsTrue(pr.IsCurrent()); IndexReader modifier = IndexReader.Open(dir1); modifier.SetNorm(0, "f1", 100); modifier.Close(); // one of the two IndexReaders which ParallelReader is using // is not current anymore Assert.IsFalse(pr.IsCurrent()); modifier = IndexReader.Open(dir2); modifier.SetNorm(0, "f3", 100); modifier.Close(); // now both are not current anymore Assert.IsFalse(pr.IsCurrent()); }
/// <summary> 1. Get a norm from the original reader 2. Clone the original reader 3. /// Delete a document and set the norm of the cloned reader 4. Verify the norms /// are not the same on each reader 5. Verify the doc deleted is only in the /// cloned reader 6. Try to delete a document in the original reader, an /// exception should be thrown /// /// </summary> /// <param name="r1">IndexReader to perform tests on /// </param> /// <throws> Exception </throws> private void PerformDefaultTests(IndexReader r1) { float norm1 = Similarity.DecodeNorm(r1.Norms("field1")[4]); IndexReader pr1Clone = (IndexReader)r1.Clone(); pr1Clone.DeleteDocument(10); pr1Clone.SetNorm(4, "field1", 0.5f); Assert.IsTrue(Similarity.DecodeNorm(r1.Norms("field1")[4]) == norm1); Assert.IsTrue(Similarity.DecodeNorm(pr1Clone.Norms("field1")[4]) != norm1); Assert.IsTrue(!r1.IsDeleted(10)); Assert.IsTrue(pr1Clone.IsDeleted(10)); // try to update the original reader, which should throw an exception try { r1.DeleteDocument(11); Assert.Fail("Tried to delete doc 11 and an exception should have been thrown"); } catch (System.Exception exception) { // expectted } pr1Clone.Close(); }
public virtual void TestCloneWithSetNorm() { Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.CreateIndex(dir1, false); IndexReader orig = IndexReader.Open(dir1, false); orig.SetNorm(1, "field1", 17.0f); byte encoded = Similarity.EncodeNorm(17.0f); Assert.AreEqual(encoded, orig.Norms("field1")[1]); // the cloned segmentreader should have 2 references, 1 to itself, and 1 to // the original segmentreader IndexReader clonedReader = (IndexReader)orig.Clone(); orig.Close(); clonedReader.Close(); IndexReader r = IndexReader.Open(dir1, false); Assert.AreEqual(encoded, r.Norms("field1")[1]); r.Close(); dir1.Close(); }
public virtual void TestNormsRefCounting() { Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.CreateIndex(dir1, false); IndexReader reader1 = IndexReader.Open(dir1, false); IndexReader reader2C = (IndexReader)reader1.Clone(); SegmentReader segmentReader2C = SegmentReader.GetOnlySegmentReader(reader2C); segmentReader2C.Norms("field1"); // load the norms for the field Norm reader2CNorm = segmentReader2C.norms_ForNUnit["field1"]; Assert.IsTrue(reader2CNorm.BytesRef().RefCount() == 2, "reader2CNorm.bytesRef()=" + reader2CNorm.BytesRef()); IndexReader reader3C = (IndexReader)reader2C.Clone(); SegmentReader segmentReader3C = SegmentReader.GetOnlySegmentReader(reader3C); Norm reader3CCNorm = segmentReader3C.norms_ForNUnit["field1"]; Assert.AreEqual(3, reader3CCNorm.BytesRef().RefCount()); // edit a norm and the refcount should be 1 IndexReader reader4C = (IndexReader)reader3C.Clone(); SegmentReader segmentReader4C = SegmentReader.GetOnlySegmentReader(reader4C); Assert.AreEqual(4, reader3CCNorm.BytesRef().RefCount()); reader4C.SetNorm(5, "field1", 0.33f); // generate a cannot update exception in reader1 Assert.Throws <LockObtainFailedException>(() => reader3C.SetNorm(1, "field1", 0.99f), "did not hit expected exception"); // norm values should be different Assert.IsTrue(Similarity.DecodeNorm(segmentReader3C.Norms("field1")[5]) != Similarity.DecodeNorm(segmentReader4C.Norms("field1")[5])); Norm reader4CCNorm = segmentReader4C.norms_ForNUnit["field1"]; Assert.AreEqual(3, reader3CCNorm.BytesRef().RefCount()); Assert.AreEqual(1, reader4CCNorm.BytesRef().RefCount()); IndexReader reader5C = (IndexReader)reader4C.Clone(); SegmentReader segmentReader5C = SegmentReader.GetOnlySegmentReader(reader5C); Norm reader5CCNorm = segmentReader5C.norms_ForNUnit["field1"]; reader5C.SetNorm(5, "field1", 0.7f); Assert.AreEqual(1, reader5CCNorm.BytesRef().RefCount()); reader5C.Close(); reader4C.Close(); reader3C.Close(); reader2C.Close(); reader1.Close(); dir1.Close(); }
private void ModifyNormsForF1(Directory dir) { IndexReader ir = IndexReader.Open(dir, false); int n = ir.MaxDoc; for (int i = 0; i < n; i += 3) { // modify for every third doc int k = (i * 3) % modifiedNorms.Count; float origNorm = (float)modifiedNorms[i]; float newNorm = (float)modifiedNorms[k]; //System.out.println("Modifying: for "+i+" from "+origNorm+" to "+newNorm); //System.out.println(" and: for "+k+" from "+newNorm+" to "+origNorm); modifiedNorms[i] = newNorm; modifiedNorms[k] = origNorm; ir.SetNorm(i, "f" + 1, newNorm); ir.SetNorm(k, "f" + 1, origNorm); } ir.Close(); }
/* Open pre-lockless index, add docs, do a delete & * setNorm, and search */ public virtual void ChangeIndexNoAdds(System.String dirName) { dirName = FullDir(dirName); Directory dir = FSDirectory.Open(new System.IO.DirectoryInfo(dirName)); // make sure searching sees right # hits IndexSearcher searcher = new IndexSearcher(dir, true); ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs; Assert.AreEqual(34, hits.Length, "wrong number of hits"); Document d = searcher.Doc(hits[0].Doc); Assert.AreEqual("21", d.Get("id"), "wrong first document"); searcher.Close(); // make sure we can do a delete & setNorm against this // pre-lockless segment: IndexReader reader = IndexReader.Open(dir, false); Term searchTerm = new Term("id", "6"); int delCount = reader.DeleteDocuments(searchTerm); Assert.AreEqual(1, delCount, "wrong delete count"); reader.SetNorm(22, "content", (float)2.0); reader.Close(); // make sure they "took": searcher = new IndexSearcher(dir, true); hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs; Assert.AreEqual(33, hits.Length, "wrong number of hits"); d = searcher.Doc(hits[0].Doc); Assert.AreEqual("22", d.Get("id"), "wrong first document"); TestHits(hits, 33, searcher.IndexReader); searcher.Close(); // optimize IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED); writer.Optimize(); writer.Close(); searcher = new IndexSearcher(dir, true); hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs; Assert.AreEqual(33, hits.Length, "wrong number of hits"); d = searcher.Doc(hits[0].Doc); Assert.AreEqual("22", d.Get("id"), "wrong first document"); TestHits(hits, 33, searcher.IndexReader); searcher.Close(); dir.Close(); }
public virtual void TestSetNorm_Renamed() { RAMDirectory store = new RAMDirectory(); IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null); // add the same document four times IFieldable f1 = new Field("field", "word", Field.Store.YES, Field.Index.ANALYZED); Document d1 = new Document(); d1.Add(f1); writer.AddDocument(d1, null); writer.AddDocument(d1, null); writer.AddDocument(d1, null); writer.AddDocument(d1, null); writer.Close(); // reset the boost of each instance of this document IndexReader reader = IndexReader.Open((Directory)store, false, null); reader.SetNorm(0, "field", 1.0f, null); reader.SetNorm(1, "field", 2.0f, null); reader.SetNorm(2, "field", 4.0f, null); reader.SetNorm(3, "field", 16.0f, null); reader.Close(); // check that searches are ordered by this boost float[] scores = new float[4]; new IndexSearcher(store, true, null).Search(new TermQuery(new Term("field", "word")), new AnonymousClassCollector(scores, this), null); float lastScore = 0.0f; for (int i = 0; i < 4; i++) { Assert.IsTrue(scores[i] > lastScore); lastScore = scores[i]; } }
private void ModifyNormsForF1(IndexReader ir) { int n = ir.MaxDoc(); // System.out.println("modifyNormsForF1 maxDoc: "+n); for (int i = 0; i < n; i += 3) { // modify for every third doc int k = (i * 3) % modifiedNorms.Count; float origNorm = (float)((System.Single)modifiedNorms[i]); float newNorm = (float)((System.Single)modifiedNorms[k]); // System.out.println("Modifying: for "+i+" from "+origNorm+" to // "+newNorm); // System.out.println(" and: for "+k+" from "+newNorm+" to "+origNorm); modifiedNorms[i] = (float)newNorm; modifiedNorms[k] = (float)origNorm; ir.SetNorm(i, "f" + 1, newNorm); ir.SetNorm(k, "f" + 1, origNorm); // System.out.println("setNorm i: "+i); // break; } // ir.close(); }
/// <summary> 1. Get a norm from the original reader 2. Clone the original reader 3. /// Delete a document and set the norm of the cloned reader 4. Verify the norms /// are not the same on each reader 5. Verify the doc deleted is only in the /// cloned reader 6. Try to delete a document in the original reader, an /// exception should be thrown /// /// </summary> /// <param name="r1">IndexReader to perform tests on /// </param> /// <throws> Exception </throws> private void PerformDefaultTests(IndexReader r1) { float norm1 = Similarity.DecodeNorm(r1.Norms("field1")[4]); IndexReader pr1Clone = (IndexReader)r1.Clone(); pr1Clone.DeleteDocument(10); pr1Clone.SetNorm(4, "field1", 0.5f); Assert.IsTrue(Similarity.DecodeNorm(r1.Norms("field1")[4]) == norm1); Assert.IsTrue(Similarity.DecodeNorm(pr1Clone.Norms("field1")[4]) != norm1); Assert.IsTrue(!r1.IsDeleted(10)); Assert.IsTrue(pr1Clone.IsDeleted(10)); // try to update the original reader, which should throw an exception Assert.Throws <LockObtainFailedException>(() => r1.DeleteDocument(11), "Tried to delete doc 11 and an exception should have been thrown"); pr1Clone.Close(); }
public virtual void CreateIndex(System.String dirName, bool doCFS) { RmDir(dirName); dirName = FullDir(dirName); Directory dir = FSDirectory.Open(new System.IO.DirectoryInfo(dirName)); IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); writer.UseCompoundFile = doCFS; writer.SetMaxBufferedDocs(10); for (int i = 0; i < 35; i++) { AddDoc(writer, i); } Assert.AreEqual(35, writer.MaxDoc(), "wrong doc count"); writer.Close(); // open fresh writer so we get no prx file in the added segment writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED); writer.UseCompoundFile = doCFS; writer.SetMaxBufferedDocs(10); AddNoProxDoc(writer); writer.Close(); // Delete one doc so we get a .del file: IndexReader reader = IndexReader.Open(dir, false); Term searchTerm = new Term("id", "7"); int delCount = reader.DeleteDocuments(searchTerm); Assert.AreEqual(1, delCount, "didn't delete the right number of documents"); // Set one norm so we get a .s0 file: reader.SetNorm(21, "content", (float)1.5); reader.Close(); }
public virtual void TestKeepLastNDeletionPolicyWithReader() { int N = 10; for (int pass = 0; pass < 2; pass++) { bool useCompoundFile = (pass % 2) != 0; KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(this, N); Directory dir = new RAMDirectory(); IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED); writer.UseCompoundFile = useCompoundFile; writer.Close(); Term searchTerm = new Term("content", "aaa"); Query query = new TermQuery(searchTerm); for (int i = 0; i < N + 1; i++) { writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED); writer.UseCompoundFile = useCompoundFile; for (int j = 0; j < 17; j++) { AddDoc(writer); } // this is a commit writer.Close(); IndexReader reader = IndexReader.Open(dir, policy, false); reader.DeleteDocument(3 * i + 1); reader.SetNorm(4 * i + 1, "content", 2.0F); IndexSearcher searcher = new IndexSearcher(reader); ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(16 * (1 + i), hits.Length); // this is a commit reader.Close(); searcher.Close(); } writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED); writer.UseCompoundFile = useCompoundFile; writer.Optimize(); // this is a commit writer.Close(); Assert.AreEqual(2 * (N + 2), policy.numOnInit); Assert.AreEqual(2 * (N + 2) - 1, policy.numOnCommit); IndexSearcher searcher2 = new IndexSearcher(dir, false); ScoreDoc[] hits2 = searcher2.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(176, hits2.Length); // Simplistic check: just verify only the past N segments_N's still // exist, and, I can open a reader on each: long gen = SegmentInfos.GetCurrentSegmentGeneration(dir); dir.DeleteFile(IndexFileNames.SEGMENTS_GEN); int expectedCount = 176; for (int i = 0; i < N + 1; i++) { try { IndexReader reader = IndexReader.Open(dir, true); // Work backwards in commits on what the expected // count should be. searcher2 = new IndexSearcher(reader); hits2 = searcher2.Search(query, null, 1000).ScoreDocs; if (i > 1) { if (i % 2 == 0) { expectedCount += 1; } else { expectedCount -= 17; } } Assert.AreEqual(expectedCount, hits2.Length); searcher2.Close(); reader.Close(); if (i == N) { Assert.Fail("should have failed on commits before last 5"); } } catch (System.IO.IOException e) { if (i != N) { throw e; } } if (i < N) { dir.DeleteFile(IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen)); } gen--; } dir.Close(); } }
public virtual void TestKeepLastNDeletionPolicyWithCreates() { int N = 10; for (int pass = 0; pass < 4; pass++) { bool autoCommit = pass < 2; bool useCompoundFile = (pass % 2) > 0; KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(this, N); Directory dir = new RAMDirectory(); IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy); writer.SetMaxBufferedDocs(10); writer.SetUseCompoundFile(useCompoundFile); writer.Close(); Term searchTerm = new Term("content", "aaa"); Query query = new TermQuery(searchTerm); for (int i = 0; i < N + 1; i++) { writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy); writer.SetMaxBufferedDocs(10); writer.SetUseCompoundFile(useCompoundFile); for (int j = 0; j < 17; j++) { AddDoc(writer); } // this is a commit when autoCommit=false: writer.Close(); IndexReader reader = IndexReader.Open(dir, policy); reader.DeleteDocument(3); reader.SetNorm(5, "content", 2.0F); IndexSearcher searcher = new IndexSearcher(reader); ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs; Assert.AreEqual(16, hits.Length); // this is a commit when autoCommit=false: reader.Close(); searcher.Close(); writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy); // This will not commit: there are no changes // pending because we opened for "create": writer.Close(); } Assert.AreEqual(1 + 3 * (N + 1), policy.numOnInit); if (!autoCommit) { Assert.AreEqual(3 * (N + 1), policy.numOnCommit); } IndexSearcher searcher2 = new IndexSearcher(dir); ScoreDoc[] hits2 = searcher2.Search(query, null, 1000).scoreDocs; Assert.AreEqual(0, hits2.Length); // Simplistic check: just verify only the past N segments_N's still // exist, and, I can open a reader on each: long gen = SegmentInfos.GetCurrentSegmentGeneration(dir); dir.DeleteFile(IndexFileNames.SEGMENTS_GEN); int expectedCount = 0; for (int i = 0; i < N + 1; i++) { try { IndexReader reader = IndexReader.Open(dir); // Work backwards in commits on what the expected // count should be. Only check this in the // autoCommit false case: if (!autoCommit) { searcher2 = new IndexSearcher(reader); hits2 = searcher2.Search(query, null, 1000).scoreDocs; Assert.AreEqual(expectedCount, hits2.Length); searcher2.Close(); if (expectedCount == 0) { expectedCount = 16; } else if (expectedCount == 16) { expectedCount = 17; } else if (expectedCount == 17) { expectedCount = 0; } } reader.Close(); if (i == N) { Assert.Fail("should have failed on commits before last " + N); } } catch (System.IO.IOException e) { if (i != N) { throw e; } } if (i < N) { dir.DeleteFile(IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen)); } gen--; } dir.Close(); } }
public virtual void TestExactFileNames() { System.String outputDir = "lucene.backwardscompat0.index"; RmDir(outputDir); try { Directory dir = FSDirectory.Open(new System.IO.DirectoryInfo(FullDir(outputDir))); IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED); writer.SetRAMBufferSizeMB(16.0); for (int i = 0; i < 35; i++) { AddDoc(writer, i); } Assert.AreEqual(35, writer.MaxDoc(), "wrong doc count"); writer.Close(); // Delete one doc so we get a .del file: IndexReader reader = IndexReader.Open(dir, false); Term searchTerm = new Term("id", "7"); int delCount = reader.DeleteDocuments(searchTerm); Assert.AreEqual(1, delCount, "didn't delete the right number of documents"); // Set one norm so we get a .s0 file: reader.SetNorm(21, "content", (float)1.5); reader.Close(); // The numbering of fields can vary depending on which // JRE is in use. On some JREs we see content bound to // field 0; on others, field 1. So, here we have to // figure out which field number corresponds to // "content", and then set our expected file names below // accordingly: CompoundFileReader cfsReader = new CompoundFileReader(dir, "_0.cfs"); FieldInfos fieldInfos = new FieldInfos(cfsReader, "_0.fnm"); int contentFieldIndex = -1; for (int i = 0; i < fieldInfos.Size(); i++) { FieldInfo fi = fieldInfos.FieldInfo(i); if (fi.name_ForNUnit.Equals("content")) { contentFieldIndex = i; break; } } cfsReader.Close(); Assert.IsTrue(contentFieldIndex != -1, "could not locate the 'content' field number in the _2.cfs segment"); // Now verify file names: System.String[] expected; expected = new System.String[] { "_0.cfs", "_0_1.del", "_0_1.s" + contentFieldIndex, "segments_3", "segments.gen" }; System.String[] actual = dir.ListAll(); System.Array.Sort(expected); System.Array.Sort(actual); if (!CollectionsHelper.Equals(expected, actual)) { Assert.Fail("incorrect filenames in index: expected:\n " + AsString(expected) + "\n actual:\n " + AsString(actual)); } dir.Close(); } finally { RmDir(outputDir); } }
protected internal override void DoSetNorm(int d, System.String f, byte b, IState state) { in_Renamed.SetNorm(d, f, b, state); }
/* Open pre-lockless index, add docs, do a delete & * setNorm, and search */ public virtual void ChangeIndexWithAdds(System.String dirName) { System.String origDirName = dirName; dirName = FullDir(dirName); Directory dir = FSDirectory.Open(new System.IO.DirectoryInfo(dirName)); // open writer IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED); // add 10 docs for (int i = 0; i < 10; i++) { AddDoc(writer, 35 + i); } // make sure writer sees right total -- writer seems not to know about deletes in .del? int expected; if (Compare(origDirName, "24") < 0) { expected = 45; } else { expected = 46; } Assert.AreEqual(expected, writer.MaxDoc(), "wrong doc count"); writer.Close(); // make sure searching sees right # hits IndexSearcher searcher = new IndexSearcher(dir, true); ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs; Document d = searcher.Doc(hits[0].Doc); Assert.AreEqual("21", d.Get("id"), "wrong first document"); TestHits(hits, 44, searcher.IndexReader); searcher.Close(); // make sure we can do delete & setNorm against this // pre-lockless segment: IndexReader reader = IndexReader.Open(dir, false); Term searchTerm = new Term("id", "6"); int delCount = reader.DeleteDocuments(searchTerm); Assert.AreEqual(1, delCount, "wrong delete count"); reader.SetNorm(22, "content", (float)2.0); reader.Close(); // make sure they "took": searcher = new IndexSearcher(dir, true); hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs; Assert.AreEqual(43, hits.Length, "wrong number of hits"); d = searcher.Doc(hits[0].Doc); Assert.AreEqual("22", d.Get("id"), "wrong first document"); TestHits(hits, 43, searcher.IndexReader); searcher.Close(); // optimize writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED); writer.Optimize(); writer.Close(); searcher = new IndexSearcher(dir, true); hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs; Assert.AreEqual(43, hits.Length, "wrong number of hits"); d = searcher.Doc(hits[0].Doc); TestHits(hits, 43, searcher.IndexReader); Assert.AreEqual("22", d.Get("id"), "wrong first document"); searcher.Close(); dir.Close(); }
public virtual void TestDeleteLeftoverFiles() { Directory dir = new RAMDirectory(); IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); writer.SetMaxBufferedDocs(10); int i; for (i = 0; i < 35; i++) { AddDoc(writer, i); } writer.SetUseCompoundFile(false); for (; i < 45; i++) { AddDoc(writer, i); } writer.Close(); // Delete one doc so we get a .del file: IndexReader reader = IndexReader.Open(dir); Term searchTerm = new Term("id", "7"); int delCount = reader.DeleteDocuments(searchTerm); Assert.AreEqual(1, delCount, "didn't delete the right number of documents"); // Set one norm so we get a .s0 file: reader.SetNorm(21, "content", (float)1.5); reader.Close(); // Now, artificially create an extra .del file & extra // .s0 file: System.String[] files = dir.ListAll(); /* * for(int j=0;j<files.length;j++) { * System.out.println(j + ": " + files[j]); * } */ // The numbering of fields can vary depending on which // JRE is in use. On some JREs we see content bound to // field 0; on others, field 1. So, here we have to // figure out which field number corresponds to // "content", and then set our expected file names below // accordingly: CompoundFileReader cfsReader = new CompoundFileReader(dir, "_2.cfs"); FieldInfos fieldInfos = new FieldInfos(cfsReader, "_2.fnm"); int contentFieldIndex = -1; for (i = 0; i < fieldInfos.Size(); i++) { FieldInfo fi = fieldInfos.FieldInfo(i); if (fi.name_ForNUnit.Equals("content")) { contentFieldIndex = i; break; } } cfsReader.Close(); Assert.IsTrue(contentFieldIndex != -1, "could not locate the 'content' field number in the _2.cfs segment"); System.String normSuffix = "s" + contentFieldIndex; // Create a bogus separate norms file for a // segment/field that actually has a separate norms file // already: CopyFile(dir, "_2_1." + normSuffix, "_2_2." + normSuffix); // Create a bogus separate norms file for a // segment/field that actually has a separate norms file // already, using the "not compound file" extension: CopyFile(dir, "_2_1." + normSuffix, "_2_2.f" + contentFieldIndex); // Create a bogus separate norms file for a // segment/field that does not have a separate norms // file already: CopyFile(dir, "_2_1." + normSuffix, "_1_1." + normSuffix); // Create a bogus separate norms file for a // segment/field that does not have a separate norms // file already using the "not compound file" extension: CopyFile(dir, "_2_1." + normSuffix, "_1_1.f" + contentFieldIndex); // Create a bogus separate del file for a // segment that already has a separate del file: CopyFile(dir, "_0_1.del", "_0_2.del"); // Create a bogus separate del file for a // segment that does not yet have a separate del file: CopyFile(dir, "_0_1.del", "_1_1.del"); // Create a bogus separate del file for a // non-existent segment: CopyFile(dir, "_0_1.del", "_188_1.del"); // Create a bogus segment file: CopyFile(dir, "_0.cfs", "_188.cfs"); // Create a bogus fnm file when the CFS already exists: CopyFile(dir, "_0.cfs", "_0.fnm"); // Create a deletable file: CopyFile(dir, "_0.cfs", "deletable"); // Create some old segments file: CopyFile(dir, "segments_3", "segments"); CopyFile(dir, "segments_3", "segments_2"); // Create a bogus cfs file shadowing a non-cfs segment: CopyFile(dir, "_2.cfs", "_3.cfs"); System.String[] filesPre = dir.ListAll(); // Open & close a writer: it should delete the above 4 // files and nothing more: writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED); writer.Close(); System.String[] files2 = dir.ListAll(); dir.Close(); System.Array.Sort(files); System.Array.Sort(files2); System.Collections.Hashtable dif = DifFiles(files, files2); if (!SupportClass.CollectionsHelper.Equals(files, files2)) { Assert.Fail("IndexFileDeleter failed to delete unreferenced extra files: should have deleted " + (filesPre.Length - files.Length) + " files but only deleted " + (filesPre.Length - files2.Length) + "; expected files:\n " + AsString(files) + "\n actual files:\n " + AsString(files2) + "\ndif: " + SupportClass.CollectionsHelper.CollectionToString(dif)); } }
protected internal override void DoSetNorm(int d, string f, byte b) { in_Renamed.SetNorm(d, f, b); }
private void ModifyNormsForF1(IndexReader ir) { int n = ir.MaxDoc; // System.out.println("modifyNormsForF1 maxDoc: "+n); for (int i = 0; i < n; i += 3) { // modify for every third doc int k = (i * 3) % modifiedNorms.Count; float origNorm = (float) (modifiedNorms[i]); float newNorm = (float) (modifiedNorms[k]); // System.out.println("Modifying: for "+i+" from "+origNorm+" to // "+newNorm); // System.out.println(" and: for "+k+" from "+newNorm+" to "+origNorm); modifiedNorms[i] = newNorm; modifiedNorms[k] = origNorm; ir.SetNorm(i, "f" + 1, newNorm); ir.SetNorm(k, "f" + 1, origNorm); // System.out.println("setNorm i: "+i); // break; } // ir.close(); }
public virtual void TestQuery() { RAMDirectory dir = new RAMDirectory(); IndexWriter iw = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED, null); iw.SetMaxBufferedDocs(2); // force multi-segment AddDoc("one", iw, 1f); AddDoc("two", iw, 20f); AddDoc("three four", iw, 300f); iw.Close(); IndexReader ir = IndexReader.Open((Directory)dir, false, null); IndexSearcher is_Renamed = new IndexSearcher(ir); ScoreDoc[] hits; // assert with norms scoring turned off hits = is_Renamed.Search(new MatchAllDocsQuery(), null, 1000, null).ScoreDocs; Assert.AreEqual(3, hits.Length); Assert.AreEqual(ir.Document(hits[0].Doc, null).Get("key", null), "one"); Assert.AreEqual(ir.Document(hits[1].Doc, null).Get("key", null), "two"); Assert.AreEqual(ir.Document(hits[2].Doc, null).Get("key", null), "three four"); // assert with norms scoring turned on MatchAllDocsQuery normsQuery = new MatchAllDocsQuery("key"); hits = is_Renamed.Search(normsQuery, null, 1000, null).ScoreDocs; Assert.AreEqual(3, hits.Length); Assert.AreEqual(ir.Document(hits[0].Doc, null).Get("key", null), "three four"); Assert.AreEqual(ir.Document(hits[1].Doc, null).Get("key", null), "two"); Assert.AreEqual(ir.Document(hits[2].Doc, null).Get("key", null), "one"); // change norm & retest ir.SetNorm(0, "key", 400f, null); normsQuery = new MatchAllDocsQuery("key"); hits = is_Renamed.Search(normsQuery, null, 1000, null).ScoreDocs; Assert.AreEqual(3, hits.Length); Assert.AreEqual(ir.Document(hits[0].Doc, null).Get("key", null), "one"); Assert.AreEqual(ir.Document(hits[1].Doc, null).Get("key", null), "three four"); Assert.AreEqual(ir.Document(hits[2].Doc, null).Get("key", null), "two"); // some artificial queries to trigger the use of skipTo(): BooleanQuery bq = new BooleanQuery(); bq.Add(new MatchAllDocsQuery(), Occur.MUST); bq.Add(new MatchAllDocsQuery(), Occur.MUST); hits = is_Renamed.Search(bq, null, 1000, null).ScoreDocs; Assert.AreEqual(3, hits.Length); bq = new BooleanQuery(); bq.Add(new MatchAllDocsQuery(), Occur.MUST); bq.Add(new TermQuery(new Term("key", "three")), Occur.MUST); hits = is_Renamed.Search(bq, null, 1000, null).ScoreDocs; Assert.AreEqual(1, hits.Length); // delete a document: is_Renamed.IndexReader.DeleteDocument(0, null); hits = is_Renamed.Search(new MatchAllDocsQuery(), null, 1000, null).ScoreDocs; Assert.AreEqual(2, hits.Length); // test parsable toString() QueryParser qp = new QueryParser(Util.Version.LUCENE_CURRENT, "key", analyzer); hits = is_Renamed.Search(qp.Parse(new MatchAllDocsQuery().ToString()), null, 1000, null).ScoreDocs; Assert.AreEqual(2, hits.Length); // test parsable toString() with non default boost Query maq = new MatchAllDocsQuery(); maq.Boost = 2.3f; Query pq = qp.Parse(maq.ToString()); hits = is_Renamed.Search(pq, null, 1000, null).ScoreDocs; Assert.AreEqual(2, hits.Length); is_Renamed.Close(); ir.Close(); dir.Close(); }