static void Main(string[] args) { Directory index = new RAMDirectory(); Analyzer analyzer = new KeywordAnalyzer(); IndexWriter writer = new IndexWriter(index, analyzer, true); Document doc = new Document(); doc.Add(new Field("title", "t1", Field.Store.YES, Field.Index.TOKENIZED)); writer.AddDocument(doc); doc = new Document(); doc.Add(new Field("title", "t2", Field.Store.YES, Field.Index.TOKENIZED)); writer.AddDocument(doc); writer.Close(); Searcher searcher = new IndexSearcher(index); Query query = new MatchAllDocsQuery(); Filter filter = new LuceneCustomFilter(); Sort sort = new Sort("title", true); Hits hits = searcher.Search(query, filter, sort); IEnumerator hitsEnumerator = hits.Iterator(); while (hitsEnumerator.MoveNext()) { Hit hit = (Hit)hitsEnumerator.Current; Console.WriteLine(hit.GetDocument().GetField("title"). StringValue()); } }
/// <summary> /// This constructor is used while retrieving the hit from the dump /// </summary> /// <param name="ltask">The dump indexer this Wiki topic belongs to</param> /// <param name="hit">The Lucene Hit object</param> public PageInfo(Indexer ixr, Hit hit) { TreatRedirectException = false; Indexer = ixr; // Decoder setter sort Beginnings and Ends. _decoder = ixr; Score = hit.GetScore(); Document doc = hit.GetDocument(); TopicId = Convert.ToInt64(doc.GetField("topicid").StringValue()); Name = doc.GetField("title").StringValue(); Beginnings = new long[doc.GetFields("beginning").Length]; Ends = new long[doc.GetFields("end").Length]; int i = 0; foreach (byte[] binVal in doc.GetBinaryValues("beginning")) { Beginnings[i] = BitConverter.ToInt64(binVal, 0); i++; } i = 0; foreach (byte[] binVal in doc.GetBinaryValues("end")) { Ends[i] = BitConverter.ToInt64(binVal, 0); i++; } Array.Sort(Beginnings); Array.Sort(Ends); }