示例#1
0
 public void Can_modify_doc_in_index_and_retrieve_by_secondary_key() {
     var index = new TestIndex();
     var doc = new XDoc("doc").Elem("foo", "bar");
     var d = new Document();
     d.Add(new Field("id", "123", Field.Store.YES, Field.Index.UN_TOKENIZED));
     d.Add(new Field("doc", doc.ToString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
     d.Add(new Field("foo", "bar", Field.Store.NO, Field.Index.ANALYZED));
     index.Add(d);
     var hits = index.Search(index.Parse("foo:bar"));
     Assert.AreEqual(1, hits.Count());
     var d2 = hits.First().Document;
     Assert.AreEqual("123", d2.Get("id"));
     Assert.AreEqual(doc.ToString(), d2.Get("doc"));
     d2.RemoveField("doc");
     d2.RemoveField("foo");
     doc = new XDoc("doc").Elem("foo", "baz");
     d2.Add(new Field("doc", doc.ToString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
     d2.Add(new Field("foo", "baz", Field.Store.NO, Field.Index.ANALYZED));
     index.Update(new Term("id", "123"), d2);
     hits = index.Search(index.Parse("foo:baz"));
     Assert.AreEqual(1, hits.Count());
     var d3 = hits.First().Document;
     Assert.AreEqual("123", d3.Get("id"));
     Assert.AreEqual(doc.ToString(), d3.Get("doc"));
 }
示例#2
0
        public void Can_modify_doc_in_index_and_retrieve_by_secondary_key()
        {
            var index = new TestIndex();
            var doc   = new XDoc("doc").Elem("foo", "bar");
            var d     = new Document();

            d.Add(new Field("id", "123", Field.Store.YES, Field.Index.UN_TOKENIZED));
            d.Add(new Field("doc", doc.ToString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
            d.Add(new Field("foo", "bar", Field.Store.NO, Field.Index.ANALYZED));
            index.Add(d);
            var hits = index.Search(index.Parse("foo:bar"));

            Assert.AreEqual(1, hits.Count());
            var d2 = hits.First().Document;

            Assert.AreEqual("123", d2.Get("id"));
            Assert.AreEqual(doc.ToString(), d2.Get("doc"));
            d2.RemoveField("doc");
            d2.RemoveField("foo");
            doc = new XDoc("doc").Elem("foo", "baz");
            d2.Add(new Field("doc", doc.ToString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
            d2.Add(new Field("foo", "baz", Field.Store.NO, Field.Index.ANALYZED));
            index.Update(new Term("id", "123"), d2);
            hits = index.Search(index.Parse("foo:baz"));
            Assert.AreEqual(1, hits.Count());
            var d3 = hits.First().Document;

            Assert.AreEqual("123", d3.Get("id"));
            Assert.AreEqual(doc.ToString(), d3.Get("doc"));
        }
 public void UntokenizedAnalyzer_field_treats_whitespace_as_part_of_token() {
     var index = new TestIndex(new UntokenizedAnalyzer());
     var d = new Document();
     d.Add(new Field("id", "a", Field.Store.YES, Field.Index.UN_TOKENIZED));
     d.Add(new Field(index.Default, "foo bar", Field.Store.NO, Field.Index.ANALYZED));
     index.Add(d);
     var hits = index.Search("\"foo bar\"");
     Assert.AreEqual(1, hits.Length());
     Assert.AreEqual("a", hits.Doc(0).Get("id"));
 }
示例#4
0
        public void UntokenizedAnalyzer_field_treats_whitespace_as_part_of_token()
        {
            var index = new TestIndex(new UntokenizedAnalyzer());
            var d     = new Document();

            d.Add(new Field("id", "a", Field.Store.YES, Field.Index.UN_TOKENIZED));
            d.Add(new Field(index.Default, "foo bar", Field.Store.NO, Field.Index.ANALYZED));
            index.Add(d);
            var hits = index.Search("\"foo bar\"");

            Assert.AreEqual(1, hits.Count());
            Assert.AreEqual("a", hits.First().Document.Get("id"));
        }
 public void UntokenizedAnalyzer_field_is_case_insensitive() {
     var index = new TestIndex(new UntokenizedAnalyzer());
     var d = new Document();
     d.Add(new Field("id", "a", Field.Store.YES, Field.Index.UN_TOKENIZED));
     d.Add(new Field(index.Default, "FOO", Field.Store.NO, Field.Index.ANALYZED));
     index.Add(d);
     var hits = index.Search("foo");
     Assert.AreEqual(1, hits.Length());
     Assert.AreEqual("a", hits.Doc(0).Get("id"));
     hits = index.Search("FOO");
     Assert.AreEqual(1, hits.Length());
     Assert.AreEqual("a", hits.Doc(0).Get("id"));
 }
示例#6
0
        public void UntokenizedAnalyzer_field_accepts_whitespace_term_followed_by_wildcards()
        {
            var index = new TestIndex(new UntokenizedAnalyzer());
            var d     = new Document();

            d.Add(new Field("id", "a", Field.Store.YES, Field.Index.UN_TOKENIZED));
            d.Add(new Field(index.Default, "FOO BAR", Field.Store.NO, Field.Index.ANALYZED));
            index.Add(d);
            var hits = index.Search("foo\\ ba*");

            Assert.AreEqual(1, hits.Count());
            Assert.AreEqual("a", hits.First().Document.Get("id"));
        }
示例#7
0
        public void FilenameAnalyzer_field_treats_whitespace_underscore_and_dash_the_same()
        {
            var index = new TestIndex(new FilenameAnalyzer());
            var d     = new Document();

            d.Add(new Field("id", "a", Field.Store.YES, Field.Index.UN_TOKENIZED));
            d.Add(new Field(index.Default, "foo bar-baz_boom", Field.Store.NO, Field.Index.ANALYZED));
            index.Add(d);
            var hits = index.Search("foo_bar\\ baz-boom");

            Assert.AreEqual(1, hits.Count());
            Assert.AreEqual("a", hits.First().Document.Get("id"));
        }
示例#8
0
 public void Can_inverse_score_search() {
     var index = new TestIndex();
     var d = new Document();
     d.Add(new Field(index.Default, "Foo", Field.Store.NO, Field.Index.ANALYZED));
     index.Add(d);
     d = new Document();
     d.Add(new Field(index.Default, "Foo Foo", Field.Store.NO, Field.Index.ANALYZED));
     index.Add(d);
     d = new Document();
     d.Add(new Field(index.Default, "Foo Foo Foo", Field.Store.NO, Field.Index.ANALYZED));
     index.Add(d);
     var hits = index.Search("Foo",new Sort(new SortField(SortField.FIELD_SCORE.GetField(),SortField.SCORE,true)));
     Assert.AreEqual(3,hits.Length());
     Assert.Less(hits.Score(0), hits.Score(1));
     Assert.Less(hits.Score(1), hits.Score(2));
 }
示例#9
0
        public void UntokenizedAnalyzer_field_is_case_insensitive()
        {
            var index = new TestIndex(new UntokenizedAnalyzer());
            var d     = new Document();

            d.Add(new Field("id", "a", Field.Store.YES, Field.Index.UN_TOKENIZED));
            d.Add(new Field(index.Default, "FOO", Field.Store.NO, Field.Index.ANALYZED));
            index.Add(d);
            var hits = index.Search("foo");

            Assert.AreEqual(1, hits.Count());
            Assert.AreEqual("a", hits.First().Document.Get("id"));
            hits = index.Search("FOO");
            Assert.AreEqual(1, hits.Count());
            Assert.AreEqual("a", hits.First().Document.Get("id"));
        }
示例#10
0
        public void Can_use_update_for_new_document()
        {
            var index = new TestIndex();
            var doc   = new XDoc("doc").Elem("foo", "bar");
            var d     = new Document();

            d.Add(new Field("id", "123", Field.Store.YES, Field.Index.UN_TOKENIZED));
            d.Add(new Field("doc", doc.ToString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
            d.Add(new Field("foo", "bar", Field.Store.NO, Field.Index.ANALYZED));
            index.Update(new Term("id", "123"), d);
            var hits = index.Search(index.Parse("foo:bar"));

            Assert.AreEqual(1, hits.Count());
            var d2 = hits.First().Document;

            Assert.AreEqual("123", d2.Get("id"));
        }
示例#11
0
        public void Can_find_file_with_dash_in_it()
        {
            var index = new TestIndex(new UntokenizedAnalyzer());
            var d     = new Document();

            d.Add(new Field("id", "a", Field.Store.YES, Field.Index.UN_TOKENIZED));
            d.Add(new Field(index.Default, "cube-teal", Field.Store.NO, Field.Index.ANALYZED));
            index.Add(d);
            Console.WriteLine("query");
            var query = index.Parse("CUBE-*");

            Assert.AreEqual("content:cube-*", query.ToString());
            var hits = index.Search(query);

            Assert.AreEqual(1, hits.Count());
            Assert.AreEqual("a", hits.First().Document.Get("id"));
        }
示例#12
0
 public void Can_inverse_score_search() {
     var index = new TestIndex();
     var d = new Document();
     d.Add(new Field(index.Default, "Foo", Field.Store.NO, Field.Index.ANALYZED));
     index.Add(d);
     d = new Document();
     d.Add(new Field(index.Default, "Foo Foo", Field.Store.NO, Field.Index.ANALYZED));
     index.Add(d);
     d = new Document();
     d.Add(new Field(index.Default, "Foo Foo Foo", Field.Store.NO, Field.Index.ANALYZED));
     index.Add(d);
     var hits = index.Search("Foo", new Sort(new SortField(SortField.FIELD_SCORE.GetField(), SortField.SCORE, true)));
     Assert.AreEqual(3, hits.Count());
     var score0 = hits.ElementAt(0).Score;
     var score1 = hits.ElementAt(1).Score;
     var score2 = hits.ElementAt(2).Score;
     Assert.Less(score0, score1);
     Assert.Less(score1, score2);
 }
示例#13
0
        public void Update_does_not_keep_unstored_fields()
        {
            var index = new TestIndex();
            var doc   = new XDoc("doc").Elem("foo", "bar");
            var d     = new Document();

            d.Add(new Field("id", "123", Field.Store.YES, Field.Index.UN_TOKENIZED));
            d.Add(new Field("doc", doc.ToString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
            d.Add(new Field("foo", "bar", Field.Store.NO, Field.Index.ANALYZED));
            index.Add(d);
            var hits = index.Search(index.Parse("foo:bar"));

            Assert.AreEqual(1, hits.Count());
            var d2 = hits.First().Document;

            index.Update(new Term("id", "123"), d2);
            hits = index.Search(index.Parse("foo:bar"));
            Assert.AreEqual(0, hits.Count());
        }
示例#14
0
        public void RemoveFields_removes_all_occurences()
        {
            var index = new TestIndex();
            var doc   = new XDoc("doc").Elem("foo", "bar");
            var d     = new Document();

            d.Add(new Field("id", "123", Field.Store.YES, Field.Index.UN_TOKENIZED));
            d.Add(new Field("doc", doc.ToString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
            d.Add(new Field("foo", "bar", Field.Store.YES, Field.Index.ANALYZED));
            d.Add(new Field("foo", "baz", Field.Store.YES, Field.Index.ANALYZED));
            index.Add(d);
            var hits = index.Search(index.Parse("foo:bar"));

            Assert.AreEqual(1, hits.Count());
            var d2     = hits.First().Document;
            var fields = d2.GetFields("foo");

            Assert.AreEqual(2, fields.Length);
            d2.RemoveFields("foo");
            fields = d2.GetFields("foo");
            Assert.AreEqual(0, fields.Length);
        }
示例#15
0
        public void Can_add_same_field_multiple_times()
        {
            var index = new TestIndex();
            var doc   = new XDoc("doc").Elem("foo", "bar");
            var d     = new Document();

            d.Add(new Field("id", "123", Field.Store.YES, Field.Index.UN_TOKENIZED));
            d.Add(new Field("doc", doc.ToString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
            d.Add(new Field("foo", "bar", Field.Store.NO, Field.Index.ANALYZED));
            d.Add(new Field("foo", "baz", Field.Store.NO, Field.Index.ANALYZED));
            index.Add(d);
            var hits = index.Search(index.Parse("foo:bar"));

            Assert.AreEqual(1, hits.Count());
            var d2 = hits.First().Document;

            Assert.AreEqual("123", d2.Get("id"));
            hits = index.Search(index.Parse("foo:baz"));
            Assert.AreEqual(1, hits.Count());
            var d3 = hits.First().Document;

            Assert.AreEqual("123", d3.Get("id"));
        }
示例#16
0
        public void Can_inverse_score_search()
        {
            var index = new TestIndex();
            var d     = new Document();

            d.Add(new Field(index.Default, "Foo", Field.Store.NO, Field.Index.ANALYZED));
            index.Add(d);
            d = new Document();
            d.Add(new Field(index.Default, "Foo Foo", Field.Store.NO, Field.Index.ANALYZED));
            index.Add(d);
            d = new Document();
            d.Add(new Field(index.Default, "Foo Foo Foo", Field.Store.NO, Field.Index.ANALYZED));
            index.Add(d);
            var hits = index.Search("Foo", new Sort(new SortField(SortField.FIELD_SCORE.GetField(), SortField.SCORE, true)));

            Assert.AreEqual(3, hits.Count());
            var score0 = hits.ElementAt(0).Score;
            var score1 = hits.ElementAt(1).Score;
            var score2 = hits.ElementAt(2).Score;

            Assert.Less(score0, score1);
            Assert.Less(score1, score2);
        }
示例#17
0
        public void Can_query_for_all_documents()
        {
            var index = new TestIndex();
            var d     = new Document();

            d.Add(new Field(index.Default, "Foo", Field.Store.YES, Field.Index.ANALYZED));
            index.Add(d);
            d = new Document();
            d.Add(new Field(index.Default, "Foo Foo", Field.Store.YES, Field.Index.ANALYZED));
            index.Add(d);
            d = new Document();
            d.Add(new Field(index.Default, "bar", Field.Store.YES, Field.Index.ANALYZED));
            index.Add(d);
            var hits = index.Search(new MatchAllDocsQuery());

            Assert.AreEqual(3, hits.Count());
            var matches = new List <string>();

            foreach (var result in hits)
            {
                matches.Add(result.Document.Get(index.Default));
            }
            Assert.AreEqual(new[] { "bar", "Foo", "Foo Foo" }, matches.OrderBy(x => x).ToArray());
        }
示例#18
0
 public void Can_use_update_for_new_document() {
     var index = new TestIndex();
     var doc = new XDoc("doc").Elem("foo", "bar");
     var d = new Document();
     d.Add(new Field("id", "123", Field.Store.YES, Field.Index.UN_TOKENIZED));
     d.Add(new Field("doc", doc.ToString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
     d.Add(new Field("foo", "bar", Field.Store.NO, Field.Index.ANALYZED));
     index.Update(new Term("id", "123"), d);
     var hits = index.Search(index.Parse("foo:bar"));
     Assert.AreEqual(1, hits.Count());
     var d2 = hits.First().Document;
     Assert.AreEqual("123", d2.Get("id"));
 }
示例#19
0
 public void RemoveFields_removes_all_occurences() {
     var index = new TestIndex();
     var doc = new XDoc("doc").Elem("foo", "bar");
     var d = new Document();
     d.Add(new Field("id", "123", Field.Store.YES, Field.Index.UN_TOKENIZED));
     d.Add(new Field("doc", doc.ToString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
     d.Add(new Field("foo", "bar", Field.Store.YES, Field.Index.ANALYZED));
     d.Add(new Field("foo", "baz", Field.Store.YES, Field.Index.ANALYZED));
     index.Add(d);
     var hits = index.Search(index.Parse("foo:bar"));
     Assert.AreEqual(1, hits.Count());
     var d2 = hits.First().Document;
     var fields = d2.GetFields("foo");
     Assert.AreEqual(2, fields.Length);
     d2.RemoveFields("foo");
     fields = d2.GetFields("foo");
     Assert.AreEqual(0, fields.Length);
 }
 public void FilenameAnalyzer_field_treats_whitespace_underscore_and_dash_the_same() {
     var index = new TestIndex(new FilenameAnalyzer());
     var d = new Document();
     d.Add(new Field("id", "a", Field.Store.YES, Field.Index.UN_TOKENIZED));
     d.Add(new Field(index.Default, "foo bar-baz_boom", Field.Store.NO, Field.Index.ANALYZED));
     index.Add(d);
     var hits = index.Search("foo_bar\\ baz-boom");
     Assert.AreEqual(1, hits.Length());
     Assert.AreEqual("a", hits.Doc(0).Get("id"));
 }
 public void Can_find_file_with_dash_in_it() {
     var index = new TestIndex(new UntokenizedAnalyzer());
     var d = new Document();
     d.Add(new Field("id", "a", Field.Store.YES, Field.Index.UN_TOKENIZED));
     d.Add(new Field(index.Default, "cube-teal", Field.Store.NO, Field.Index.ANALYZED));
     index.Add(d);
     Console.WriteLine("query");
     var query = index.Parse("CUBE-*");
     Assert.AreEqual("content:cube-*",query.ToString());
     var hits = index.Search(query);
     Assert.AreEqual(1, hits.Length());
     Assert.AreEqual("a", hits.Doc(0).Get("id"));
 }
 public void UntokenizedAnalyzer_field_accepts_whitespace_term_followed_by_wildcards() {
     var index = new TestIndex(new UntokenizedAnalyzer());
     var d = new Document();
     d.Add(new Field("id", "a", Field.Store.YES, Field.Index.UN_TOKENIZED));
     d.Add(new Field(index.Default, "FOO BAR", Field.Store.NO, Field.Index.ANALYZED));
     index.Add(d);
     var hits = index.Search("foo\\ ba*");
     Assert.AreEqual(1, hits.Length());
     Assert.AreEqual("a", hits.Doc(0).Get("id"));
 }
示例#23
0
 public void Can_add_same_field_multiple_times() {
     var index = new TestIndex();
     var doc = new XDoc("doc").Elem("foo", "bar");
     var d = new Document();
     d.Add(new Field("id", "123", Field.Store.YES, Field.Index.UN_TOKENIZED));
     d.Add(new Field("doc", doc.ToString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
     d.Add(new Field("foo", "bar", Field.Store.NO, Field.Index.ANALYZED));
     d.Add(new Field("foo", "baz", Field.Store.NO, Field.Index.ANALYZED));
     index.Add(d);
     var hits = index.Search(index.Parse("foo:bar"));
     Assert.AreEqual(1, hits.Count());
     var d2 = hits.First().Document;
     Assert.AreEqual("123", d2.Get("id"));
     hits = index.Search(index.Parse("foo:baz"));
     Assert.AreEqual(1, hits.Count());
     var d3 = hits.First().Document;
     Assert.AreEqual("123", d3.Get("id"));
 }
示例#24
0
 public void Can_query_for_all_documents() {
     var index = new TestIndex();
     var d = new Document();
     d.Add(new Field(index.Default, "Foo", Field.Store.YES, Field.Index.ANALYZED));
     index.Add(d);
     d = new Document();
     d.Add(new Field(index.Default, "Foo Foo", Field.Store.YES, Field.Index.ANALYZED));
     index.Add(d);
     d = new Document();
     d.Add(new Field(index.Default, "bar", Field.Store.YES, Field.Index.ANALYZED));
     index.Add(d);
     var hits = index.Search(new MatchAllDocsQuery());
     Assert.AreEqual(3, hits.Count());
     var matches = new List<string>();
     foreach(var result in hits) {
         matches.Add(result.Document.Get(index.Default));
     }
     Assert.AreEqual(new[] { "bar", "Foo", "Foo Foo" }, matches.OrderBy(x => x).ToArray());
 }
示例#25
0
 public void Update_does_not_keep_unstored_fields() {
     var index = new TestIndex();
     var doc = new XDoc("doc").Elem("foo", "bar");
     var d = new Document();
     d.Add(new Field("id", "123", Field.Store.YES, Field.Index.UN_TOKENIZED));
     d.Add(new Field("doc", doc.ToString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
     d.Add(new Field("foo", "bar", Field.Store.NO, Field.Index.ANALYZED));
     index.Add(d);
     var hits = index.Search(index.Parse("foo:bar"));
     Assert.AreEqual(1, hits.Count());
     var d2 = hits.First().Document;
     index.Update(new Term("id", "123"), d2);
     hits = index.Search(index.Parse("foo:bar"));
     Assert.AreEqual(0, hits.Count());
 }
示例#26
0
 public void UntokenizedAnalyzer_field_accepts_wildcards() {
     var index = new TestIndex(new UntokenizedAnalyzer());
     var d = new Document();
     d.Add(new Field("id", "a", Field.Store.YES, Field.Index.UN_TOKENIZED));
     d.Add(new Field(index.Default, "FOO BAR", Field.Store.NO, Field.Index.ANALYZED));
     index.Add(d);
     var hits = index.Search("foo*");
     Assert.AreEqual(1, hits.Count());
     Assert.AreEqual("a", hits.First().Document.Get("id"));
 }