Example #1
0
        public StoredClassFieldNode(ExtObjectContainer store, StoredClass storedClass, StoredField storedField)
        {
            _store = store;
            _class = storedClass;
            _field = storedField;

            Text = _field.getStoredType().ToString();
            if (_field.isArray()) {
                Text += "[]";
            }
            Text += " " + _field.getName();
        }
Example #2
0
        public virtual void TestBinaryFieldInIndex()
        {
            FieldType ft = new FieldType();
            ft.Stored = true;
            IndexableField binaryFldStored = new StoredField("binaryStored", (sbyte[])(Array)System.Text.UTF8Encoding.UTF8.GetBytes(BinaryValStored));
            IndexableField stringFldStored = new Field("stringStored", BinaryValStored, ft);

            Document doc = new Document();

            doc.Add(binaryFldStored);

            doc.Add(stringFldStored);

            /// <summary>
            /// test for field count </summary>
            Assert.AreEqual(2, doc.Fields.Count);

            /// <summary>
            /// add the doc to a ram index </summary>
            Directory dir = NewDirectory();
            Random r = new Random();
            RandomIndexWriter writer = new RandomIndexWriter(r, dir);
            writer.AddDocument(doc);

            /// <summary>
            /// open a reader and fetch the document </summary>
            IndexReader reader = writer.Reader;
            Document docFromReader = reader.Document(0);
            Assert.IsTrue(docFromReader != null);

            /// <summary>
            /// fetch the binary stored field and compare it's content with the original one </summary>
            BytesRef bytes = docFromReader.GetBinaryValue("binaryStored");
            Assert.IsNotNull(bytes);

            string binaryFldStoredTest = Encoding.UTF8.GetString((byte[])(Array)bytes.Bytes).Substring(bytes.Offset, bytes.Length);
            //new string(bytes.Bytes, bytes.Offset, bytes.Length, IOUtils.CHARSET_UTF_8);
            Assert.IsTrue(binaryFldStoredTest.Equals(BinaryValStored));

            /// <summary>
            /// fetch the string field and compare it's content with the original one </summary>
            string stringFldStoredTest = docFromReader.Get("stringStored");
            Assert.IsTrue(stringFldStoredTest.Equals(BinaryValStored));

            writer.Dispose();
            reader.Dispose();
            dir.Dispose();
        }
Example #3
0
        public virtual void TestBinaryField()
        {
            Document doc = new Document();

            FieldType ft = new FieldType();
            ft.Stored = true;
            IndexableField stringFld = new Field("string", BinaryVal, ft);
            IndexableField binaryFld = new StoredField("binary", BinaryVal.GetBytes(Encoding.UTF8));
            IndexableField binaryFld2 = new StoredField("binary", BinaryVal2.GetBytes(Encoding.UTF8));

            doc.Add(stringFld);
            doc.Add(binaryFld);

            Assert.AreEqual(2, doc.Fields.Count);

            Assert.IsTrue(binaryFld.BinaryValue() != null);
            Assert.IsTrue(binaryFld.FieldType().Stored);
            Assert.IsFalse(binaryFld.FieldType().Indexed);

            string binaryTest = doc.GetBinaryValue("binary").Utf8ToString();
            Assert.IsTrue(binaryTest.Equals(BinaryVal));

            string stringTest = doc.Get("string");
            Assert.IsTrue(binaryTest.Equals(stringTest));

            doc.Add(binaryFld2);

            Assert.AreEqual(3, doc.Fields.Count);

            BytesRef[] binaryTests = doc.GetBinaryValues("binary");

            Assert.AreEqual(2, binaryTests.Length);

            binaryTest = binaryTests[0].Utf8ToString();
            string binaryTest2 = binaryTests[1].Utf8ToString();

            Assert.IsFalse(binaryTest.Equals(binaryTest2));

            Assert.IsTrue(binaryTest.Equals(BinaryVal));
            Assert.IsTrue(binaryTest2.Equals(BinaryVal2));

            doc.RemoveField("string");
            Assert.AreEqual(2, doc.Fields.Count);

            doc.RemoveFields("binary");
            Assert.AreEqual(0, doc.Fields.Count);
        }
        public void TestNumericField()
        {
            Directory dir     = NewDirectory();
            var       w       = new RandomIndexWriter(Random(), dir);
            var       numDocs = AtLeast(500);
            var       answers = new object[numDocs];

            FieldType.NumericType[] typeAnswers = new FieldType.NumericType[numDocs];
            for (int id = 0; id < numDocs; id++)
            {
                Document doc = new Document();
                Field    nf;
                Field    sf;
                object   answer;
                FieldType.NumericType typeAnswer;
                if (Random().NextBoolean())
                {
                    // float/double
                    if (Random().NextBoolean())
                    {
                        float f = Random().NextFloat();
                        answer     = Convert.ToSingle(f);
                        nf         = new FloatField("nf", f, Field.Store.NO);
                        sf         = new StoredField("nf", f);
                        typeAnswer = FieldType.NumericType.FLOAT;
                    }
                    else
                    {
                        double d = Random().NextDouble();
                        answer     = Convert.ToDouble(d);
                        nf         = new DoubleField("nf", d, Field.Store.NO);
                        sf         = new StoredField("nf", d);
                        typeAnswer = FieldType.NumericType.DOUBLE;
                    }
                }
                else
                {
                    // int/long
                    if (Random().NextBoolean())
                    {
                        int i = Random().Next();
                        answer     = Convert.ToInt32(i);
                        nf         = new IntField("nf", i, Field.Store.NO);
                        sf         = new StoredField("nf", i);
                        typeAnswer = FieldType.NumericType.INT;
                    }
                    else
                    {
                        long l = Random().NextLong();
                        answer     = Convert.ToInt64(l);
                        nf         = new LongField("nf", l, Field.Store.NO);
                        sf         = new StoredField("nf", l);
                        typeAnswer = FieldType.NumericType.LONG;
                    }
                }
                doc.Add(nf);
                doc.Add(sf);
                answers[id]     = answer;
                typeAnswers[id] = typeAnswer;
                FieldType ft = new FieldType(IntField.TYPE_STORED);
                ft.NumericPrecisionStep = int.MaxValue;
                doc.Add(new IntField("id", id, ft));
                w.AddDocument(doc);
            }
            DirectoryReader r = w.Reader;

            w.Dispose();

            Assert.AreEqual(numDocs, r.NumDocs);

            foreach (AtomicReaderContext ctx in r.Leaves)
            {
                AtomicReader    sub = (AtomicReader)ctx.Reader;
                FieldCache.Ints ids = FieldCache.DEFAULT.GetInts(sub, "id", false);
                for (int docID = 0; docID < sub.NumDocs; docID++)
                {
                    Document doc = sub.Document(docID);
                    Field    f   = (Field)doc.GetField("nf");
                    Assert.IsTrue(f is StoredField, "got f=" + f);
                    Assert.AreEqual(answers[ids.Get(docID)], f.NumericValue);
                }
            }
            r.Dispose();
            dir.Dispose();
        }
Example #5
0
        public virtual void TestCompressionTools()
        {
            IndexableField binaryFldCompressed = new StoredField("binaryCompressed", (sbyte[])(Array)CompressionTools.Compress(BinaryValCompressed.GetBytes(Encoding.UTF8)));
            IndexableField stringFldCompressed = new StoredField("stringCompressed", (sbyte[])(Array)CompressionTools.CompressString(BinaryValCompressed));

            var doc = new Document {binaryFldCompressed, stringFldCompressed};

            using (Directory dir = NewDirectory())
            using (RandomIndexWriter writer = new RandomIndexWriter(Random(), dir))
            {
                writer.AddDocument(doc);

                using (IndexReader reader = writer.Reader)
                {
                    Document docFromReader = reader.Document(0);
                    Assert.IsTrue(docFromReader != null);

                    string binaryFldCompressedTest =
                        Encoding.UTF8.GetString(
                            CompressionTools.Decompress(docFromReader.GetBinaryValue("binaryCompressed")));
                    //new string(CompressionTools.Decompress(docFromReader.GetBinaryValue("binaryCompressed")), IOUtils.CHARSET_UTF_8);
                    Assert.IsTrue(binaryFldCompressedTest.Equals(BinaryValCompressed));
                    Assert.IsTrue(
                        CompressionTools.DecompressString(docFromReader.GetBinaryValue("stringCompressed"))
                            .Equals(BinaryValCompressed));
                }

            }
        }
Example #6
0
        public static LuceneSearcherDatabase Create(List <SearcherItem> items,
                                                    Dictionary <SearcherItem, IEnumerable <IIndexableField> > extraFields)
        {
            var luceneSearcherDatabase = new LuceneSearcherDatabase(items);

            var AppLuceneVersion = LuceneVersion.LUCENE_48;

            luceneSearcherDatabase.fsDirectory = new RAMDirectory();

            // create an analyzer to process the text
            var standardAnalyzer = new StandardAnalyzer(AppLuceneVersion, CharArraySet.EMPTY_SET);
            // NO stop words. we want to keep them for the title (so "wait for all triggers" is not indexed as "wait triggers")
            Analyzer analyzer = new PerFieldAnalyzerWrapper(standardAnalyzer,
                                                            new J2N.Collections.Generic.Dictionary <string, Analyzer>
            {
                {
                    // id field indexed as-is, no processing. that's what the keyword analyzer is made for
                    k_IdField, new KeywordAnalyzer()
                },
                {
                    k_DocField, new VSDocAnalyzer(AppLuceneVersion)
                }
            });

            // reuse fields/doc
            var idField           = new StoredField(k_IdField, 0);
            var nameField         = new TextField(k_TitleField, "", Field.Store.NO);
            var nameNoSpacesField = new TextField(k_TitleNoSpacesField, "", Field.Store.NO);
            var docField          = new TextField(k_DocField, "", Field.Store.NO);
            var document          = new Document()
            {
                idField,
                nameField,
                nameNoSpacesField,
                docField,
            };

            // create an index writer
            var indexConfig = new IndexWriterConfig(AppLuceneVersion, analyzer);
            var writer      = new IndexWriter(luceneSearcherDatabase.fsDirectory, indexConfig);

            Rec(items);
            writer.Flush(triggerMerge: false, applyAllDeletes: false);
            writer.Dispose();
            return(luceneSearcherDatabase);

            void Rec(List <SearcherItem> searcherItems)
            {
                foreach (var item in searcherItems)
                {
                    if (item.HasChildren)
                    {
                        Rec(item.Children);
                        // continue;
                    }

                    idField.SetInt32Value(item.Id);
                    nameField.SetStringValue(item.Path);
                    nameNoSpacesField.SetStringValue(item.Name.Replace(" ", null));
                    docField.SetStringValue(string.IsNullOrWhiteSpace(item.Help)
                        ? ""
                        : (item.Name + " " + (item.Help ?? "")));

                    if (extraFields != null && extraFields.TryGetValue(item, out var fields) && fields != null)
                    {
                        document.Fields.AddRange(fields);

                        writer.AddDocument(document);

                        foreach (var indexableField in fields)
                        {
                            document.Fields.Remove(indexableField);
                        }
                    }
                    else
                    {
                        writer.AddDocument(document);
                    }
                }
            }
        }
Example #7
0
        public override List <SearcherItem> Search(string query, out float localMaxScore)
        {
            Query q = null;

            if (string.IsNullOrWhiteSpace(query))
            {
                q = MakeFilterQuery(true);
            }
            else
            {
                query = query.ToLowerInvariant();
                // title: split prefixes (on update indexed as 'on' 'update' so 'on* up*' would match)
                var prefixQuery = new BooleanQuery()
                {
                    Boost = 10
                };

                // search in doc field
                var docQuery = new NGramPhraseQuery(VSDocAnalyzer.k_MinGramSize);

                foreach (var queryPart in query.Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries))
                {
                    // add 'on*' to the split prefix query
                    prefixQuery.Add(new PrefixQuery(new Term(k_TitleField, queryPart)), Occur.MUST);
                    // adding a doc query longer then the maxgramsize gives no results
                    // doc field is indexed with 3 to 5 ngrams. meaning "update" -> "updat", "xyz" -> "xyz", "xy" -> not indexed
                    // we need to substring(maxGramSize), otherwise a query with "update" would not find any 3to5gram as it doesn't match the indexed "updat"
                    docQuery.Add(new Term(k_DocField, queryPart.Length > VSDocAnalyzer.k_MaxGramSize
                        ? queryPart.Substring(0, VSDocAnalyzer.k_MaxGramSize)
                        : queryPart));
                }

                var bq = new BooleanQuery
                {
                    // split prefixes
                    { prefixQuery, Occur.SHOULD },
                    // no space abbrev prefix query (on update indexed as 'onupdate' so 'onup*' would match)
                    { new PrefixQuery(new Term(k_TitleNoSpacesField, query))
                      {
                          Boost = 10
                      }, Occur.SHOULD },
                    { docQuery, Occur.SHOULD }
                };

                // means at least one part of bq must occur. seems to be required ?
                var fq = MakeFilterQuery(false);
                fq.Add(bq, Occur.MUST);
                q = fq;
            }

            if (q == null)
            {
                localMaxScore = 0;
                return(new List <SearcherItem>());
            }

            using (var directoryReader = DirectoryReader.Open(fsDirectory))
            {
                List <SearcherItem> results = new List <SearcherItem>();

                var     searcher = new IndexSearcher(directoryReader);
                TopDocs search   = searcher.Search(q, 10000);
                localMaxScore = search.MaxScore;

                ISet <string> fieldSet = new HashSet <string> {
                    k_IdField
                };
                foreach (ScoreDoc hit in search.ScoreDocs)
                {
                    Document    foundDoc = searcher.Doc(hit.Doc, fieldSet);
                    StoredField id       = (StoredField)foundDoc.GetField(k_IdField);
                    if (id == null)
                    {
                        continue;
                    }
                    results.Add(m_ItemList[id.GetInt32Value().Value]);
                }

                return(results);
            }

            BooleanQuery MakeFilterQuery(bool matchAll)
            {
                var filterQuery = new BooleanQuery();

                if (m_Filters == null || m_Filters.Count == 0 || matchAll)
                {
                    filterQuery.Add(new MatchAllDocsQuery(), Occur.MUST);
                }
                if (m_Filters != null)
                {
                    foreach (var filter in m_Filters)
                    {
                        if (filter.Value == null)
                        {
                            continue;
                        }
                        var filterClauseQuery = filter.Value is int i
                            ? (Query)NumericRangeQuery.NewInt32Range(filter.Field, i, i, true, true)
                            : new TermQuery(new Term(filter.Field, filter.Value.ToString()));
                        filterQuery.Add(new BooleanClause(filterClauseQuery,
                                                          filter.Type == FilterType.Must ? Occur.MUST : Occur.MUST_NOT));
                    }
                }

                return(filterQuery);
            }
        }
Example #8
0
        static DocHelper()
        {
            //Initialize the large Lazy Field
            StringBuilder buffer = new StringBuilder();

            for (int i = 0; i < 10000; i++)
            {
                buffer.Append("Lazily loading lengths of language in lieu of laughing ");
            }

            try
            {
                LAZY_FIELD_BINARY_BYTES = "These are some binary field bytes".GetBytes(Encoding.UTF8);
            }
#pragma warning disable 168
            catch (EncoderFallbackException e)
#pragma warning restore 168
            {
            }
            LazyFieldBinary           = new StoredField(LAZY_FIELD_BINARY_KEY, LAZY_FIELD_BINARY_BYTES);
            Fields[Fields.Length - 2] = LazyFieldBinary;
            LARGE_LAZY_FIELD_TEXT     = buffer.ToString();
            LargeLazyField            = new Field(LARGE_LAZY_FIELD_KEY, LARGE_LAZY_FIELD_TEXT, CustomType);
            Fields[Fields.Length - 1] = LargeLazyField;
            for (int i = 0; i < Fields.Length; i++)
            {
                IIndexableField f = Fields[i];
                Add(All, f);
                if (f.IndexableFieldType.IsIndexed)
                {
                    Add(Indexed, f);
                }
                else
                {
                    Add(Unindexed, f);
                }
                if (f.IndexableFieldType.StoreTermVectors)
                {
                    Add(Termvector, f);
                }
                if (f.IndexableFieldType.IsIndexed && !f.IndexableFieldType.StoreTermVectors)
                {
                    Add(Notermvector, f);
                }
                if (f.IndexableFieldType.IsStored)
                {
                    Add(Stored, f);
                }
                else
                {
                    Add(Unstored, f);
                }
                if (f.IndexableFieldType.IndexOptions == IndexOptions.DOCS_ONLY)
                {
                    Add(NoTf, f);
                }
                if (f.IndexableFieldType.OmitNorms)
                {
                    Add(NoNorms, f);
                }
                if (f.IndexableFieldType.IndexOptions == IndexOptions.DOCS_ONLY)
                {
                    Add(NoTf, f);
                }
                //if (f.isLazy()) add(lazy, f);
            }
            NameValues = new Dictionary <string, object>
            {
                { TEXT_FIELD_1_KEY, FIELD_1_TEXT },
                { TEXT_FIELD_2_KEY, FIELD_2_TEXT },
                { TEXT_FIELD_3_KEY, FIELD_3_TEXT },
                { KEYWORD_FIELD_KEY, KEYWORD_TEXT },
                { NO_NORMS_KEY, NO_NORMS_TEXT },
                { NO_TF_KEY, NO_TF_TEXT },
                { UNINDEXED_FIELD_KEY, UNINDEXED_FIELD_TEXT },
                { UNSTORED_FIELD_1_KEY, UNSTORED_1_FIELD_TEXT },
                { UNSTORED_FIELD_2_KEY, UNSTORED_2_FIELD_TEXT },
                { LAZY_FIELD_KEY, LAZY_FIELD_TEXT },
                { LAZY_FIELD_BINARY_KEY, LAZY_FIELD_BINARY_BYTES },
                { LARGE_LAZY_FIELD_KEY, LARGE_LAZY_FIELD_TEXT },
                { TEXT_FIELD_UTF1_KEY, FIELD_UTF1_TEXT },
                { TEXT_FIELD_UTF2_KEY, FIELD_UTF2_TEXT }
            };
        }
Example #9
0
        // [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
        public virtual void TestNumericField()
        {
            Directory dir     = NewDirectory();
            var       w       = new RandomIndexWriter(Random(), dir, ClassEnvRule.similarity, ClassEnvRule.timeZone);
            var       numDocs = AtLeast(500);
            var       answers = new object[numDocs];

            NumericType[] typeAnswers = new NumericType[numDocs];
            for (int id = 0; id < numDocs; id++)
            {
                Document    doc = new Document();
                Field       nf;
                Field       sf;
                object      answer;
                NumericType typeAnswer;
                if (Random().NextBoolean())
                {
                    // float/double
                    if (Random().NextBoolean())
                    {
                        float f = Random().NextFloat();
                        answer     = Convert.ToSingle(f, CultureInfo.InvariantCulture);
                        nf         = new SingleField("nf", f, Field.Store.NO);
                        sf         = new StoredField("nf", f);
                        typeAnswer = NumericType.SINGLE;
                    }
                    else
                    {
                        double d = Random().NextDouble();
                        answer     = Convert.ToDouble(d, CultureInfo.InvariantCulture);
                        nf         = new DoubleField("nf", d, Field.Store.NO);
                        sf         = new StoredField("nf", d);
                        typeAnswer = NumericType.DOUBLE;
                    }
                }
                else
                {
                    // int/long
                    if (Random().NextBoolean())
                    {
                        int i = Random().Next();
                        answer     = Convert.ToInt32(i, CultureInfo.InvariantCulture);
                        nf         = new Int32Field("nf", i, Field.Store.NO);
                        sf         = new StoredField("nf", i);
                        typeAnswer = NumericType.INT32;
                    }
                    else
                    {
                        long l = Random().NextLong();
                        answer     = Convert.ToInt64(l, CultureInfo.InvariantCulture);
                        nf         = new Int64Field("nf", l, Field.Store.NO);
                        sf         = new StoredField("nf", l);
                        typeAnswer = NumericType.INT64;
                    }
                }
                doc.Add(nf);
                doc.Add(sf);
                answers[id]     = answer;
                typeAnswers[id] = typeAnswer;
                FieldType ft = new FieldType(Int32Field.TYPE_STORED);
                ft.NumericPrecisionStep = int.MaxValue;
                doc.Add(new Int32Field("id", id, ft));
                w.AddDocument(doc);
            }
            DirectoryReader r = w.Reader;

            w.Dispose();

            Assert.AreEqual(numDocs, r.NumDocs);

            foreach (AtomicReaderContext ctx in r.Leaves)
            {
                AtomicReader      sub = (AtomicReader)ctx.Reader;
                FieldCache.Int32s ids = FieldCache.DEFAULT.GetInt32s(sub, "id", false);
                for (int docID = 0; docID < sub.NumDocs; docID++)
                {
                    Document doc = sub.Document(docID);
                    Field    f   = doc.GetField <Field>("nf");
                    Assert.IsTrue(f is StoredField, "got f=" + f);
#pragma warning disable 612, 618
                    Assert.AreEqual(answers[ids.Get(docID)], f.GetNumericValue());
#pragma warning restore 612, 618
                }
            }
            r.Dispose();
            dir.Dispose();
        }
Example #10
0
        /** Returns Pair(list of invalid document terms, Map of document term -> document) */
        private KeyValuePair <IList <string>, IDictionary <string, Document> > GenerateIndexDocuments(int ndocs, bool requiresPayload, bool requiresContexts)
        {
            IDictionary <string, Document> docs = new JCG.Dictionary <string, Document>();
            IList <string> invalidDocTerms      = new JCG.List <string>();

            for (int i = 0; i < ndocs; i++)
            {
                Document doc        = new Document();
                bool     invalidDoc = false;
                Field    field      = null;
                // usually have valid term field in document
                if (Usually())
                {
                    field = new TextField(FIELD_NAME, "field_" + i, Field.Store.YES);
                    doc.Add(field);
                }
                else
                {
                    invalidDoc = true;
                }

                // even if payload is not required usually have it
                if (requiresPayload || Usually())
                {
                    // usually have valid payload field in document
                    if (Usually())
                    {
                        Field payload = new StoredField(PAYLOAD_FIELD_NAME, new BytesRef("payload_" + i));
                        doc.Add(payload);
                    }
                    else if (requiresPayload)
                    {
                        invalidDoc = true;
                    }
                }

                if (requiresContexts || Usually())
                {
                    if (Usually())
                    {
                        for (int j = 0; j < AtLeast(2); j++)
                        {
                            doc.Add(new StoredField(CONTEXT_FIELD_NAME, new BytesRef("context_" + i + "_" + j)));
                        }
                    }
                    // we should allow entries without context
                }

                // usually have valid weight field in document
                if (Usually())
                {
                    Field weight = (Rarely()) ?
                                   (Field) new StoredField(WEIGHT_FIELD_NAME, 100d + i) :
                                   (Field) new NumericDocValuesField(WEIGHT_FIELD_NAME, 100 + i);
                    doc.Add(weight);
                }

                string term = null;
                if (invalidDoc)
                {
                    term = (field != null) ? field.GetStringValue() : "invalid_" + i;
                    invalidDocTerms.Add(term);
                }
                else
                {
                    term = field.GetStringValue();
                }

                docs.Put(term, doc);
            }
            return(new KeyValuePair <IList <string>, IDictionary <string, Document> >(invalidDocTerms, docs));
        }
Example #11
0
        public virtual void TestNumericField()
        {
            using Directory dir = NewDirectory();
            DirectoryReader r = null;

            try
            {
                var numDocs = AtLeast(500);
                var answers = new Number[numDocs];
                using (var w = new RandomIndexWriter(Random, dir))
                {
                    NumericType[] typeAnswers = new NumericType[numDocs];
                    for (int id = 0; id < numDocs; id++)
                    {
                        Document    doc = new Document();
                        Field       nf;
                        Field       sf;
                        Number      answer;
                        NumericType typeAnswer;
                        if (Random.NextBoolean())
                        {
                            // float/double
                            if (Random.NextBoolean())
                            {
                                float f = Random.NextSingle();
                                answer     = Single.GetInstance(f);
                                nf         = new SingleField("nf", f, Field.Store.NO);
                                sf         = new StoredField("nf", f);
                                typeAnswer = NumericType.SINGLE;
                            }
                            else
                            {
                                double d = Random.NextDouble();
                                answer     = Double.GetInstance(d);
                                nf         = new DoubleField("nf", d, Field.Store.NO);
                                sf         = new StoredField("nf", d);
                                typeAnswer = NumericType.DOUBLE;
                            }
                        }
                        else
                        {
                            // int/long
                            if (Random.NextBoolean())
                            {
                                int i = Random.Next();
                                answer     = Int32.GetInstance(i);
                                nf         = new Int32Field("nf", i, Field.Store.NO);
                                sf         = new StoredField("nf", i);
                                typeAnswer = NumericType.INT32;
                            }
                            else
                            {
                                long l = Random.NextInt64();
                                answer     = Int64.GetInstance(l);
                                nf         = new Int64Field("nf", l, Field.Store.NO);
                                sf         = new StoredField("nf", l);
                                typeAnswer = NumericType.INT64;
                            }
                        }
                        doc.Add(nf);
                        doc.Add(sf);
                        answers[id]     = answer;
                        typeAnswers[id] = typeAnswer;
                        FieldType ft = new FieldType(Int32Field.TYPE_STORED);
                        ft.NumericPrecisionStep = int.MaxValue;
                        doc.Add(new Int32Field("id", id, ft));
                        w.AddDocument(doc);
                    }
                    r = w.GetReader();
                } // w.Dispose();

                Assert.AreEqual(numDocs, r.NumDocs);

                foreach (AtomicReaderContext ctx in r.Leaves)
                {
                    AtomicReader      sub = ctx.AtomicReader;
                    FieldCache.Int32s ids = FieldCache.DEFAULT.GetInt32s(sub, "id", false);
                    for (int docID = 0; docID < sub.NumDocs; docID++)
                    {
                        Document doc = sub.Document(docID);
                        Field    f   = doc.GetField <Field>("nf");
                        Assert.IsTrue(f is StoredField, "got f=" + f);
#pragma warning disable 612, 618
                        Assert.AreEqual(answers[ids.Get(docID)], f.GetNumericValue());
#pragma warning restore 612, 618
                    }
                }
            }
            finally
            {
                r?.Dispose();
            }
        }
Example #12
0
        static DocHelper()
        {
            CustomType  = new FieldType(TextField.TYPE_STORED);
            TextField1  = new Field(TEXT_FIELD_1_KEY, FIELD_1_TEXT, CustomType);
            CustomType2 = new FieldType(TextField.TYPE_STORED);
            CustomType2.StoreTermVectors         = true;
            CustomType2.StoreTermVectorPositions = true;
            CustomType2.StoreTermVectorOffsets   = true;
            TextField2            = new Field(TEXT_FIELD_2_KEY, FIELD_2_TEXT, CustomType2);
            CustomType3           = new FieldType(TextField.TYPE_STORED);
            CustomType3.OmitNorms = true;
            TextField3            = new Field(TEXT_FIELD_3_KEY, FIELD_3_TEXT, CustomType3);
            KeyField                     = new StringField(KEYWORD_FIELD_KEY, KEYWORD_TEXT, Field.Store.YES);
            CustomType5                  = new FieldType(TextField.TYPE_STORED);
            CustomType5.OmitNorms        = true;
            CustomType5.IsTokenized      = false;
            NoNormsField                 = new Field(NO_NORMS_KEY, NO_NORMS_TEXT, CustomType5);
            CustomType6                  = new FieldType(TextField.TYPE_STORED);
            CustomType6.IndexOptions     = IndexOptions.DOCS_ONLY;
            NoTFField                    = new Field(NO_TF_KEY, NO_TF_TEXT, CustomType6);
            CustomType7                  = new FieldType();
            CustomType7.IsStored         = true;
            UnIndField                   = new Field(UNINDEXED_FIELD_KEY, UNINDEXED_FIELD_TEXT, CustomType7);
            CustomType8                  = new FieldType(TextField.TYPE_NOT_STORED);
            CustomType8.StoreTermVectors = true;
            UnStoredField2               = new Field(UNSTORED_FIELD_2_KEY, UNSTORED_2_FIELD_TEXT, CustomType8);

            UnStoredField1 = new TextField(UNSTORED_FIELD_1_KEY, UNSTORED_1_FIELD_TEXT, Field.Store.NO);
            LazyField      = new Field(LAZY_FIELD_KEY, LAZY_FIELD_TEXT, CustomType);
            TextUtfField1  = new Field(TEXT_FIELD_UTF1_KEY, FIELD_UTF1_TEXT, CustomType);
            TextUtfField2  = new Field(TEXT_FIELD_UTF2_KEY, FIELD_UTF2_TEXT, CustomType2);

            Fields = new Field[] { TextField1, TextField2, TextField3, KeyField, NoNormsField, NoTFField, UnIndField, UnStoredField1, UnStoredField2, TextUtfField1, TextUtfField2, LazyField, LazyFieldBinary, LargeLazyField };

            //Initialize the large Lazy Field
            StringBuilder buffer = new StringBuilder();

            for (int i = 0; i < 10000; i++)
            {
                buffer.Append("Lazily loading lengths of language in lieu of laughing ");
            }

            try
            {
                LAZY_FIELD_BINARY_BYTES = "These are some binary field bytes".GetBytes(Encoding.UTF8);
            }
#pragma warning disable 168
            catch (EncoderFallbackException e)
#pragma warning restore 168
            {
            }
            LazyFieldBinary           = new StoredField(LAZY_FIELD_BINARY_KEY, LAZY_FIELD_BINARY_BYTES);
            Fields[Fields.Length - 2] = LazyFieldBinary;
            LARGE_LAZY_FIELD_TEXT     = buffer.ToString();
            LargeLazyField            = new Field(LARGE_LAZY_FIELD_KEY, LARGE_LAZY_FIELD_TEXT, CustomType);
            Fields[Fields.Length - 1] = LargeLazyField;
            for (int i = 0; i < Fields.Length; i++)
            {
                IIndexableField f = Fields[i];
                Add(All, f);
                if (f.IndexableFieldType.IsIndexed)
                {
                    Add(Indexed, f);
                }
                else
                {
                    Add(Unindexed, f);
                }
                if (f.IndexableFieldType.StoreTermVectors)
                {
                    Add(Termvector, f);
                }
                if (f.IndexableFieldType.IsIndexed && !f.IndexableFieldType.StoreTermVectors)
                {
                    Add(Notermvector, f);
                }
                if (f.IndexableFieldType.IsStored)
                {
                    Add(Stored, f);
                }
                else
                {
                    Add(Unstored, f);
                }
                if (f.IndexableFieldType.IndexOptions == IndexOptions.DOCS_ONLY)
                {
                    Add(NoTf, f);
                }
                if (f.IndexableFieldType.OmitNorms)
                {
                    Add(NoNorms, f);
                }
                if (f.IndexableFieldType.IndexOptions == IndexOptions.DOCS_ONLY)
                {
                    Add(NoTf, f);
                }
                //if (f.isLazy()) add(lazy, f);
            }
            NameValues = new Dictionary <string, object>();
            NameValues[TEXT_FIELD_1_KEY]      = FIELD_1_TEXT;
            NameValues[TEXT_FIELD_2_KEY]      = FIELD_2_TEXT;
            NameValues[TEXT_FIELD_3_KEY]      = FIELD_3_TEXT;
            NameValues[KEYWORD_FIELD_KEY]     = KEYWORD_TEXT;
            NameValues[NO_NORMS_KEY]          = NO_NORMS_TEXT;
            NameValues[NO_TF_KEY]             = NO_TF_TEXT;
            NameValues[UNINDEXED_FIELD_KEY]   = UNINDEXED_FIELD_TEXT;
            NameValues[UNSTORED_FIELD_1_KEY]  = UNSTORED_1_FIELD_TEXT;
            NameValues[UNSTORED_FIELD_2_KEY]  = UNSTORED_2_FIELD_TEXT;
            NameValues[LAZY_FIELD_KEY]        = LAZY_FIELD_TEXT;
            NameValues[LAZY_FIELD_BINARY_KEY] = LAZY_FIELD_BINARY_BYTES;
            NameValues[LARGE_LAZY_FIELD_KEY]  = LARGE_LAZY_FIELD_TEXT;
            NameValues[TEXT_FIELD_UTF1_KEY]   = FIELD_UTF1_TEXT;
            NameValues[TEXT_FIELD_UTF2_KEY]   = FIELD_UTF2_TEXT;
        }