public new void TearDown() { reader.Close(); searcher.Close(); directory.Close(); base.TearDown(); }
/// <summary> /// Clears all results from the current index. During the next search the index will be rebuilt. /// </summary> public void Clear() { lck.AcquireWriterLock(WriterTimeOut); try { if (rd != null) { rd.Close(); rd = null; } } finally { lck.ReleaseWriterLock(); } }
public void testMissingTerms() { String fieldName = "field1"; Directory rd = new RAMDirectory(); var w = new IndexWriter(rd, new KeywordAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED); for (int i = 0; i < 100; i++) { var doc = new Document(); int term = i * 10; //terms are units of 10; doc.Add(new Field(fieldName, "" + term, Field.Store.YES, Field.Index.ANALYZED)); w.AddDocument(doc); } IndexReader reader = w.GetReader(); w.Close(); TermsFilter tf = new TermsFilter(); tf.AddTerm(new Term(fieldName, "19")); FixedBitSet bits = (FixedBitSet)tf.GetDocIdSet(reader); Assert.AreEqual(0, bits.Cardinality(), "Must match nothing"); tf.AddTerm(new Term(fieldName, "20")); bits = (FixedBitSet)tf.GetDocIdSet(reader); Assert.AreEqual(1, bits.Cardinality(), "Must match 1"); tf.AddTerm(new Term(fieldName, "10")); bits = (FixedBitSet)tf.GetDocIdSet(reader); Assert.AreEqual(2, bits.Cardinality(), "Must match 2"); tf.AddTerm(new Term(fieldName, "00")); bits = (FixedBitSet)tf.GetDocIdSet(reader); Assert.AreEqual(2, bits.Cardinality(), "Must match 2"); reader.Close(); rd.Close(); }
static void Main(string[] args) { int maxLength = GeohashPrefixTree.GetMaxLevelsPossible(); strategy = new RecursivePrefixTreeStrategy( new GeohashPrefixTree(context, maxLength)); var dir = new RAMDirectory(); var writer = new IndexWriter(dir, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED); AddPoint(writer, "London", -81.233040, 42.983390); AddPoint(writer, "East New York", -73.882360, 40.666770); AddPoint(writer, "Manhattan", -73.966250, 40.783430); AddPoint(writer, "New York City", -74.005970, 40.714270); AddPoint(writer, "Oslo", 10.746090, 59.912730); AddPoint(writer, "Bergen", 5.324150, 60.392990); AddPoint(writer, "Washington, D. C.", -77.036370, 38.895110); writer.Close(); // Origin point - Oslo Spektrum const double lat = 59.9138688; const double lng = 10.752245399999993; const double radius = 600; var query = strategy.MakeQuery(new SpatialArgs(SpatialOperation.IsWithin, context.MakeCircle(lng, lat, radius)), fieldInfo); var searcher = new IndexSearcher(dir); var results = searcher.Search(query, null, 100); foreach (var topDoc in results.ScoreDocs) { var name = searcher.Doc(topDoc.doc).Get("Name"); Console.WriteLine(name); } searcher.Close(); dir.Close(); }
protected void AssertFoundInText(string whatToIndex, string whatToSearch) { Directory d = new RAMDirectory(); IndexWriter writer = new IndexWriter(d, analyzer, true, new IndexWriter.MaxFieldLength(10000)); Document doc = new Document(); doc.Add(new Field("content", whatToIndex, Field.Store.YES, Field.Index.ANALYZED)); writer.AddDocument(doc); writer.Close(); writer = null; IndexSearcher searcher = new IndexSearcher(d, true); // read-only=true QueryParser qp = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "content", analyzer); Query query = qp.Parse(whatToSearch); var hits = searcher.Search(query, null, 1000).ScoreDocs; Assert(hits.Length == 1); searcher.Close(); d.Close(); }
public void TearDown() { reader.Close(); searcher.Close(); directory.Close(); }
/// <summary> /// Annotates the given sequence of <see cref="Document"/> objects by adding a <b>_highlight</b> field; /// the <b>_highlight</b> field will contain the best matching text fragment from the <see cref="Document"/> /// object's full-text field. /// </summary> /// <param name="hits">The sequence of <see cref="Document"/> objects.</param> /// <param name="criteria">The search criteria that produced the hits.</param> /// <returns> /// The original sequence of Document objects, with a <b>_highlight</b> field added to each Document. /// </returns> public static IEnumerable<Document> GenerateHighlights(this IEnumerable<Document> hits, SearchCriteria criteria) { if (hits == null) throw new ArgumentNullException(nameof(hits)); if (criteria == null) throw new ArgumentNullException(nameof(criteria)); if (String.IsNullOrWhiteSpace(criteria.Query)) throw new ArgumentException("SearchCriteria.Query cannot be empty"); var documents = hits.ToList(); try { var indexDirectory = new RAMDirectory(); var analyzer = new FullTextAnalyzer(); var config = new IndexWriterConfig(analyzer).SetRAMBufferSizeMB(_ramBufferSizeMB); var writer = new IndexWriter(indexDirectory, config); BuidIndex(documents, writer); GenerateHighlights(documents, writer, criteria); writer.DeleteAll(); writer.Commit(); writer.Close(); indexDirectory.Close(); } catch (Exception ex) { _log.Error(ex); } return documents; }
public void TearDown() { directory.Close(); }
public void Run() { CreateIndex(); SearchSingleTerm("title", "lucene"); ramDirectory.Close(); }