Freeze() public method

Prevents future changes. Note, it is recommended that this is called once the FieldTypes's properties have been set, to prevent unintentional state changes.
public Freeze ( ) : void
return void
Beispiel #1
0
 static StoredField()
 {
     TYPE = new FieldType {
         IsStored = true
     };
     TYPE.Freeze();
 }
        private static FieldType LoadType() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
        {
            var type = new FieldType
            {
                DocValueType = DocValuesType.NUMERIC
            };

            type.Freeze();
            return(type);
        }
Beispiel #3
0
        static TextField()
        {
            TYPE_NOT_STORED.Indexed   = true;
            TYPE_NOT_STORED.Tokenized = true;
            TYPE_NOT_STORED.Freeze();

            TYPE_STORED.Indexed   = true;
            TYPE_STORED.Tokenized = true;
            TYPE_STORED.Stored    = true;
            TYPE_STORED.Freeze();
        }
Beispiel #4
0
        private static FieldType LoadTypeNotStored() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
        {
            var typeNotStored = new FieldType
            {
                IsIndexed   = true,
                IsTokenized = true
            };

            typeNotStored.Freeze();
            return(typeNotStored);
        }
Beispiel #5
0
        private static FieldType LoadTypeNotStored() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
        {
            var typeNotStored = new FieldType
            {
                IsIndexed    = true,
                IsTokenized  = true,
                OmitNorms    = true,
                IndexOptions = IndexOptions.DOCS_ONLY,
                NumericType  = Documents.NumericType.SINGLE
            };

            typeNotStored.Freeze();
            return(typeNotStored);
        }
Beispiel #6
0
        static StringField()
        {
            TYPE_NOT_STORED.Indexed      = true;
            TYPE_NOT_STORED.OmitNorms    = true;
            TYPE_NOT_STORED.IndexOptions = FieldInfo.IndexOptions.DOCS_ONLY;
            TYPE_NOT_STORED.Tokenized    = false;
            TYPE_NOT_STORED.Freeze();

            TYPE_STORED.Indexed      = true;
            TYPE_STORED.OmitNorms    = true;
            TYPE_STORED.IndexOptions = FieldInfo.IndexOptions.DOCS_ONLY;
            TYPE_STORED.Stored       = true;
            TYPE_STORED.Tokenized    = false;
            TYPE_STORED.Freeze();
        }
Beispiel #7
0
 static LongField()
 {
     TYPE_NOT_STORED.Indexed          = true;
     TYPE_NOT_STORED.Tokenized        = true;
     TYPE_NOT_STORED.OmitNorms        = true;
     TYPE_NOT_STORED.IndexOptions     = FieldInfo.IndexOptions.DOCS_ONLY;
     TYPE_NOT_STORED.NumericTypeValue = Documents.FieldType.NumericType.LONG;
     TYPE_NOT_STORED.Freeze();
     TYPE_STORED.Indexed          = true;
     TYPE_STORED.Tokenized        = true;
     TYPE_STORED.OmitNorms        = true;
     TYPE_STORED.IndexOptions     = FieldInfo.IndexOptions.DOCS_ONLY;
     TYPE_STORED.NumericTypeValue = Documents.FieldType.NumericType.LONG;
     TYPE_STORED.Stored           = true;
     TYPE_STORED.Freeze();
 }
Beispiel #8
0
        static SingleField()
        {
            TYPE_NOT_STORED.IsIndexed    = true;
            TYPE_NOT_STORED.IsTokenized  = true;
            TYPE_NOT_STORED.OmitNorms    = true;
            TYPE_NOT_STORED.IndexOptions = IndexOptions.DOCS_ONLY;
            TYPE_NOT_STORED.NumericType  = NumericType.SINGLE;
            TYPE_NOT_STORED.Freeze();

            TYPE_STORED.IsIndexed    = true;
            TYPE_STORED.IsTokenized  = true;
            TYPE_STORED.OmitNorms    = true;
            TYPE_STORED.IndexOptions = IndexOptions.DOCS_ONLY;
            TYPE_STORED.NumericType  = NumericType.SINGLE;
            TYPE_STORED.IsStored     = true;
            TYPE_STORED.Freeze();
        }
        public virtual void Test()
        {
            IndexWriter w = new IndexWriter(Dir, NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
            try
            {
                FieldType ft = new FieldType();
                ft.Indexed = true;
                ft.Stored = Random().NextBoolean();
                ft.Freeze();

                Document doc = new Document();
                if (Random().NextBoolean())
                {
                    // totally ok short field value
                    doc.Add(new Field(TestUtil.RandomSimpleString(Random(), 1, 10), TestUtil.RandomSimpleString(Random(), 1, 10), ft));
                }
                // problematic field
                string name = TestUtil.RandomSimpleString(Random(), 1, 50);
                string value = TestUtil.RandomSimpleString(Random(), MinTestTermLength, MaxTestTermLegnth);
                Field f = new Field(name, value, ft);
                if (Random().NextBoolean())
                {
                    // totally ok short field value
                    doc.Add(new Field(TestUtil.RandomSimpleString(Random(), 1, 10), TestUtil.RandomSimpleString(Random(), 1, 10), ft));
                }
                doc.Add(f);

                try
                {
                    w.AddDocument(doc);
                    Assert.Fail("Did not get an exception from adding a monster term");
                }
                catch (System.ArgumentException e)
                {
                    string maxLengthMsg = Convert.ToString(IndexWriter.MAX_TERM_LENGTH);
                    string msg = e.Message;
                    Assert.IsTrue(msg.Contains("immense term"), "IllegalArgumentException didn't mention 'immense term': " + msg);
                    Assert.IsTrue(msg.Contains(maxLengthMsg), "IllegalArgumentException didn't mention max length (" + maxLengthMsg + "): " + msg);
                    Assert.IsTrue(msg.Contains(name), "IllegalArgumentException didn't mention field name (" + name + "): " + msg);
                }
            }
            finally
            {
                w.Dispose();
            }
        }
Beispiel #10
0
        static Int64Field()
        {
            TYPE_NOT_STORED.IsIndexed    = true;
            TYPE_NOT_STORED.IsTokenized  = true;
            TYPE_NOT_STORED.OmitNorms    = true;
            TYPE_NOT_STORED.IndexOptions = IndexOptions.DOCS_ONLY;
            TYPE_NOT_STORED.NumericType  = NumericType.INT64;
            TYPE_NOT_STORED.Freeze();

            TYPE_STORED.IsIndexed    = true;
            TYPE_STORED.IsTokenized  = true;
            TYPE_STORED.OmitNorms    = true;
            TYPE_STORED.IndexOptions = IndexOptions.DOCS_ONLY;
            TYPE_STORED.NumericType  = NumericType.INT64;
            TYPE_STORED.IsStored     = true;
            TYPE_STORED.Freeze();
        }
Beispiel #11
0
        static DoubleField()
        {
            TYPE_NOT_STORED.IsIndexed    = true;
            TYPE_NOT_STORED.IsTokenized  = true;
            TYPE_NOT_STORED.OmitNorms    = true;
            TYPE_NOT_STORED.IndexOptions = IndexOptions.DOCS_ONLY;
            TYPE_NOT_STORED.NumericType  = Documents.NumericType.DOUBLE;
            TYPE_NOT_STORED.Freeze();

            TYPE_STORED.IsIndexed    = true;
            TYPE_STORED.IsTokenized  = true;
            TYPE_STORED.OmitNorms    = true;
            TYPE_STORED.IndexOptions = IndexOptions.DOCS_ONLY;
            TYPE_STORED.NumericType  = Documents.NumericType.DOUBLE;
            TYPE_STORED.IsStored     = true;
            TYPE_STORED.Freeze();
        }
Beispiel #12
0
        public static FieldType TranslateFieldType(Store store, Index index, TermVector termVector)
        {
            FieldType ft = new FieldType();

            ft.Stored = store == Store.YES;

            switch (index)
            {
            case Index.ANALYZED:
                ft.Indexed   = true;
                ft.Tokenized = true;
                break;

            case Index.ANALYZED_NO_NORMS:
                ft.Indexed   = true;
                ft.Tokenized = true;
                ft.OmitNorms = true;
                break;

            case Index.NOT_ANALYZED:
                ft.Indexed   = true;
                ft.Tokenized = false;
                break;

            case Index.NOT_ANALYZED_NO_NORMS:
                ft.Indexed   = true;
                ft.Tokenized = false;
                ft.OmitNorms = true;
                break;

            case Index.NO:
                break;
            }

            switch (termVector)
            {
            case TermVector.NO:
                break;

            case TermVector.YES:
                ft.StoreTermVectors = true;
                break;

            case TermVector.WITH_POSITIONS:
                ft.StoreTermVectors         = true;
                ft.StoreTermVectorPositions = true;
                break;

            case TermVector.WITH_OFFSETS:
                ft.StoreTermVectors       = true;
                ft.StoreTermVectorOffsets = true;
                break;

            case TermVector.WITH_POSITIONS_OFFSETS:
                ft.StoreTermVectors         = true;
                ft.StoreTermVectorPositions = true;
                ft.StoreTermVectorOffsets   = true;
                break;
            }
            ft.Freeze();
            return(ft);
        }
        public void BeforeClass()
        {
            NoDocs = AtLeast(4096);
            Distance = (1 << 30) / NoDocs;
            Directory = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 100, 1000)).SetMergePolicy(NewLogMergePolicy()));

            FieldType storedInt = new FieldType(IntField.TYPE_NOT_STORED);
            storedInt.Stored = true;
            storedInt.Freeze();

            FieldType storedInt8 = new FieldType(storedInt);
            storedInt8.NumericPrecisionStep = 8;

            FieldType storedInt4 = new FieldType(storedInt);
            storedInt4.NumericPrecisionStep = 4;

            FieldType storedInt2 = new FieldType(storedInt);
            storedInt2.NumericPrecisionStep = 2;

            FieldType storedIntNone = new FieldType(storedInt);
            storedIntNone.NumericPrecisionStep = int.MaxValue;

            FieldType unstoredInt = IntField.TYPE_NOT_STORED;

            FieldType unstoredInt8 = new FieldType(unstoredInt);
            unstoredInt8.NumericPrecisionStep = 8;

            FieldType unstoredInt4 = new FieldType(unstoredInt);
            unstoredInt4.NumericPrecisionStep = 4;

            FieldType unstoredInt2 = new FieldType(unstoredInt);
            unstoredInt2.NumericPrecisionStep = 2;

            IntField field8 = new IntField("field8", 0, storedInt8), field4 = new IntField("field4", 0, storedInt4), field2 = new IntField("field2", 0, storedInt2), fieldNoTrie = new IntField("field" + int.MaxValue, 0, storedIntNone), ascfield8 = new IntField("ascfield8", 0, unstoredInt8), ascfield4 = new IntField("ascfield4", 0, unstoredInt4), ascfield2 = new IntField("ascfield2", 0, unstoredInt2);

            Document doc = new Document();
            // add fields, that have a distance to test general functionality
            doc.Add(field8);
            doc.Add(field4);
            doc.Add(field2);
            doc.Add(fieldNoTrie);
            // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive
            doc.Add(ascfield8);
            doc.Add(ascfield4);
            doc.Add(ascfield2);

            // Add a series of noDocs docs with increasing int values
            for (int l = 0; l < NoDocs; l++)
            {
                int val = Distance * l + StartOffset;
                field8.IntValue = val;
                field4.IntValue = val;
                field2.IntValue = val;
                fieldNoTrie.IntValue = val;

                val = l - (NoDocs / 2);
                ascfield8.IntValue = val;
                ascfield4.IntValue = val;
                ascfield2.IntValue = val;
                writer.AddDocument(doc);
            }

            Reader = writer.Reader;
            Searcher = NewSearcher(Reader);
            writer.Dispose();
        }
 protected internal virtual FieldType FieldType(Options options)
 {
     var ft = new FieldType(TextField.TYPE_NOT_STORED)
     {
         StoreTermVectors = true,
         StoreTermVectorPositions = (new OptionsWrapper(options)).positions,
         StoreTermVectorOffsets = (new OptionsWrapper(options)).offsets,
         StoreTermVectorPayloads = (new OptionsWrapper(options)).payloads
     };
     ft.Freeze();
     return ft;
 }
        public virtual void Test([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
        {
            MockDirectoryWrapper dir = new MockDirectoryWrapper(Random(), new MMapDirectory(CreateTempDir("4GBStoredFields")));
            dir.Throttling = MockDirectoryWrapper.Throttling_e.NEVER;

            var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
                            .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
                            .SetRAMBufferSizeMB(256.0)
                            .SetMergeScheduler(scheduler)
                            .SetMergePolicy(NewLogMergePolicy(false, 10))
                            .SetOpenMode(IndexWriterConfig.OpenMode_e.CREATE);
            IndexWriter w = new IndexWriter(dir, config);

            MergePolicy mp = w.Config.MergePolicy;
            if (mp is LogByteSizeMergePolicy)
            {
                // 1 petabyte:
                ((LogByteSizeMergePolicy)mp).MaxMergeMB = 1024 * 1024 * 1024;
            }

            Document doc = new Document();
            FieldType ft = new FieldType();
            ft.Indexed = false;
            ft.Stored = true;
            ft.Freeze();
            int valueLength = RandomInts.NextIntBetween(Random(), 1 << 13, 1 << 20);
            var value = new byte[valueLength];
            for (int i = 0; i < valueLength; ++i)
            {
                // random so that even compressing codecs can't compress it
                value[i] = (byte)Random().Next(256);
            }
            Field f = new Field("fld", value, ft);
            doc.Add(f);

            int numDocs = (int)((1L << 32) / valueLength + 100);
            for (int i = 0; i < numDocs; ++i)
            {
                w.AddDocument(doc);
                if (VERBOSE && i % (numDocs / 10) == 0)
                {
                    Console.WriteLine(i + " of " + numDocs + "...");
                }
            }
            w.ForceMerge(1);
            w.Dispose();
            if (VERBOSE)
            {
                bool found = false;
                foreach (string file in dir.ListAll())
                {
                    if (file.EndsWith(".fdt"))
                    {
                        long fileLength = dir.FileLength(file);
                        if (fileLength >= 1L << 32)
                        {
                            found = true;
                        }
                        Console.WriteLine("File length of " + file + " : " + fileLength);
                    }
                }
                if (!found)
                {
                    Console.WriteLine("No .fdt file larger than 4GB, test bug?");
                }
            }

            DirectoryReader rd = DirectoryReader.Open(dir);
            Document sd = rd.Document(numDocs - 1);
            Assert.IsNotNull(sd);
            Assert.AreEqual(1, sd.Fields.Count);
            BytesRef valueRef = sd.GetBinaryValue("fld");
            Assert.IsNotNull(valueRef);
            Assert.AreEqual(new BytesRef(value), valueRef);
            rd.Dispose();

            dir.Dispose();
        }
 static NumericDocValuesField()
 {
     TYPE.DocValueType = FieldInfo.DocValuesType_e.NUMERIC;
     TYPE.Freeze();
 }
 static SortedSetDocValuesField()
 {
     TYPE.DocValueType = FieldInfo.DocValuesType_e.SORTED_SET;
     TYPE.Freeze();
 }
        public virtual void TestOmitTFAndNorms()
        {
            Directory dir = NewDirectory();
            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
            Document doc = new Document();
            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
            ft.IndexOptions = FieldInfo.IndexOptions.DOCS_ONLY;
            ft.OmitNorms = true;
            ft.Freeze();
            Field f = NewField("foo", "bar", ft);
            doc.Add(f);
            iw.AddDocument(doc);
            IndexReader ir = iw.Reader;
            iw.Dispose();
            IndexSearcher @is = NewSearcher(ir);

            foreach (Similarity sim in Sims)
            {
                @is.Similarity = sim;
                BooleanQuery query = new BooleanQuery(true);
                query.Add(new TermQuery(new Term("foo", "bar")), BooleanClause.Occur.SHOULD);
                Assert.AreEqual(1, @is.Search(query, 10).TotalHits);
            }
            ir.Dispose();
            dir.Dispose();
        }
 protected internal override void AddRandomFields(Document doc)
 {
     foreach (FieldInfo.IndexOptions opts in Enum.GetValues(typeof(FieldInfo.IndexOptions)))
     {
         string field = "f_" + opts;
         string pf = TestUtil.GetPostingsFormat(Codec.Default, field);
         if (opts == FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS && DoesntSupportOffsets.Contains(pf))
         {
             continue;
         }
         var ft = new FieldType {IndexOptions = opts, Indexed = true, OmitNorms = true};
         ft.Freeze();
         int numFields = Random().Next(5);
         for (int j = 0; j < numFields; ++j)
         {
             doc.Add(new Field("f_" + opts, TestUtil.RandomSimpleString(Random(), 2), ft));
         }
     }
 }
        public virtual void TestWriteReadMerge()
        {
            // get another codec, other than the default: so we are merging segments across different codecs
            Codec otherCodec;
            /*if ("SimpleText".Equals(Codec.Default.Name))
            {*/
            otherCodec = new Lucene46Codec();
            /*}
            else
            {
              otherCodec = new SimpleTextCodec();
            }*/
            Directory dir = NewDirectory();
            IndexWriterConfig iwConf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
            iwConf.SetMaxBufferedDocs(RandomInts.NextIntBetween(Random(), 2, 30));
            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, (IndexWriterConfig)iwConf.Clone());

            int docCount = AtLeast(200);
            var data = new byte[docCount][][];
            for (int i = 0; i < docCount; ++i)
            {
                int fieldCount = Rarely() ? RandomInts.NextIntBetween(Random(), 1, 500) : RandomInts.NextIntBetween(Random(), 1, 5);
                data[i] = new byte[fieldCount][];
                for (int j = 0; j < fieldCount; ++j)
                {
                    int length = Rarely() ? Random().Next(1000) : Random().Next(10);
                    int max = Rarely() ? 256 : 2;
                    data[i][j] = RandomByteArray(length, max);
                }
            }

            FieldType type = new FieldType(StringField.TYPE_STORED);
            type.Indexed = false;
            type.Freeze();
            IntField id = new IntField("id", 0, Field.Store.YES);
            for (int i = 0; i < data.Length; ++i)
            {
                Document doc = new Document();
                doc.Add(id);
                id.IntValue = i;
                for (int j = 0; j < data[i].Length; ++j)
                {
                    Field f = new Field("bytes" + j, data[i][j], type);
                    doc.Add(f);
                }
                iw.w.AddDocument(doc);
                if (Random().NextBoolean() && (i % (data.Length / 10) == 0))
                {
                    iw.w.Dispose();
                    // test merging against a non-compressing codec
                    if (iwConf.Codec == otherCodec)
                    {
                        iwConf.SetCodec(Codec.Default);
                    }
                    else
                    {
                        iwConf.SetCodec(otherCodec);
                    }
                    iw = new RandomIndexWriter(Random(), dir, (IndexWriterConfig)iwConf.Clone());
                }
            }

            for (int i = 0; i < 10; ++i)
            {
                int min = Random().Next(data.Length);
                int max = min + Random().Next(20);
                iw.DeleteDocuments(NumericRangeQuery.NewIntRange("id", min, max, true, false));
            }

            iw.ForceMerge(2); // force merges with deletions

            iw.Commit();

            DirectoryReader ir = DirectoryReader.Open(dir);
            Assert.IsTrue(ir.NumDocs > 0);
            int numDocs = 0;
            for (int i = 0; i < ir.MaxDoc; ++i)
            {
                Document doc = ir.Document(i);
                if (doc == null)
                {
                    continue;
                }
                ++numDocs;
                int docId = (int)doc.GetField("id").NumericValue;
                Assert.AreEqual(data[docId].Length + 1, doc.Fields.Count);
                for (int j = 0; j < data[docId].Length; ++j)
                {
                    var arr = data[docId][j];
                    BytesRef arr2Ref = doc.GetBinaryValue("bytes" + j);
                    var arr2 = Arrays.CopyOfRange(arr2Ref.Bytes, arr2Ref.Offset, arr2Ref.Offset + arr2Ref.Length);
                    Assert.AreEqual(arr, arr2);
                }
            }
            Assert.IsTrue(ir.NumDocs <= numDocs);
            ir.Dispose();

            iw.DeleteAll();
            iw.Commit();
            iw.ForceMerge(1);

            iw.Dispose();
            dir.Dispose();
        }
        public void TestReadSkip()
        {
            Directory dir = NewDirectory();
            IndexWriterConfig iwConf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
            iwConf.SetMaxBufferedDocs(RandomInts.NextIntBetween(Random(), 2, 30));
            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwConf);

            FieldType ft = new FieldType();
            ft.Stored = true;
            ft.Freeze();

            string @string = TestUtil.RandomSimpleString(Random(), 50);
            var bytes = @string.GetBytes(IOUtils.CHARSET_UTF_8);
            long l = Random().NextBoolean() ? Random().Next(42) : Random().NextLong();
            int i = Random().NextBoolean() ? Random().Next(42) : Random().Next();
            float f = Random().NextFloat();
            double d = Random().NextDouble();

            IList<Field> fields = Arrays.AsList(new Field("bytes", bytes, ft), new Field("string", @string, ft), new LongField("long", l, Field.Store.YES), new IntField("int", i, Field.Store.YES), new FloatField("float", f, Field.Store.YES), new DoubleField("double", d, Field.Store.YES)
               );

            for (int k = 0; k < 100; ++k)
            {
                Document doc = new Document();
                foreach (Field fld in fields)
                {
                    doc.Add(fld);
                }
                iw.w.AddDocument(doc);
            }
            iw.Commit();

            DirectoryReader reader = DirectoryReader.Open(dir);
            int docID = Random().Next(100);
            foreach (Field fld in fields)
            {
                string fldName = fld.Name();
                Document sDoc = reader.Document(docID, Collections.Singleton(fldName));
                IndexableField sField = sDoc.GetField(fldName);
                if (typeof(Field) == fld.GetType())
                {
                    Assert.AreEqual(fld.BinaryValue(), sField.BinaryValue());
                    Assert.AreEqual(fld.StringValue, sField.StringValue);
                }
                else
                {
                    Assert.AreEqual(fld.NumericValue, sField.NumericValue);
                }
            }
            reader.Dispose();
            iw.Dispose();
            dir.Dispose();
        }
Beispiel #22
0
 static StoredField()
 {
     TYPE = new FieldType();
     TYPE.Stored = true;
     TYPE.Freeze();
 }
Beispiel #23
0
 public virtual void TestStats()
 {
     Directory dir = NewDirectory();
     RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
     Document doc = new Document();
     FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
     ft.IndexOptions = FieldInfo.IndexOptions.DOCS_ONLY;
     ft.Freeze();
     Field f = NewField("foo", "bar", ft);
     doc.Add(f);
     iw.AddDocument(doc);
     IndexReader ir = iw.Reader;
     iw.Dispose();
     Assert.AreEqual(-1, ir.TotalTermFreq(new Term("foo", new BytesRef("bar"))));
     Assert.AreEqual(-1, ir.GetSumTotalTermFreq("foo"));
     ir.Dispose();
     dir.Dispose();
 }
 static BinaryDocValuesField()
 {
     fType.DocValueType = FieldInfo.DocValuesType_e.BINARY;
     fType.Freeze();
 }
Beispiel #25
0
 static SortedDocValuesField()
 {
     TYPE.DocValueType = DocValuesType.SORTED;
     TYPE.Freeze();
 }
Beispiel #26
0
        public static FieldType TranslateFieldType(Store store, Index index, TermVector termVector)
        {
            FieldType ft = new FieldType();

            ft.Stored = store == Store.YES;

            switch (index)
            {
                case Index.ANALYZED:
                    ft.Indexed = true;
                    ft.Tokenized = true;
                    break;

                case Index.ANALYZED_NO_NORMS:
                    ft.Indexed = true;
                    ft.Tokenized = true;
                    ft.OmitNorms = true;
                    break;

                case Index.NOT_ANALYZED:
                    ft.Indexed = true;
                    ft.Tokenized = false;
                    break;

                case Index.NOT_ANALYZED_NO_NORMS:
                    ft.Indexed = true;
                    ft.Tokenized = false;
                    ft.OmitNorms = true;
                    break;

                case Index.NO:
                    break;
            }

            switch (termVector)
            {
                case TermVector.NO:
                    break;

                case TermVector.YES:
                    ft.StoreTermVectors = true;
                    break;

                case TermVector.WITH_POSITIONS:
                    ft.StoreTermVectors = true;
                    ft.StoreTermVectorPositions = true;
                    break;

                case TermVector.WITH_OFFSETS:
                    ft.StoreTermVectors = true;
                    ft.StoreTermVectorOffsets = true;
                    break;

                case TermVector.WITH_POSITIONS_OFFSETS:
                    ft.StoreTermVectors = true;
                    ft.StoreTermVectorPositions = true;
                    ft.StoreTermVectorOffsets = true;
                    break;
            }
            ft.Freeze();
            return ft;
        }
Beispiel #27
0
 static StoredField()
 {
     TYPE        = new FieldType();
     TYPE.Stored = true;
     TYPE.Freeze();
 }
        public void BeforeClass()
        {
            ANALYZER = new MockAnalyzer(Random());

            qp = new StandardQueryParser(ANALYZER);

            HashMap<String, /*Number*/object> randomNumberMap = new HashMap<string, object>();

            /*SimpleDateFormat*/
            string dateFormat;
            long randomDate;
            bool dateFormatSanityCheckPass;
            int count = 0;
            do
            {
                if (count > 100)
                {
                    fail("This test has problems to find a sane random DateFormat/NumberFormat. Stopped trying after 100 iterations.");
                }

                dateFormatSanityCheckPass = true;
                LOCALE = randomLocale(Random());
                TIMEZONE = randomTimeZone(Random());
                DATE_STYLE = randomDateStyle(Random());
                TIME_STYLE = randomDateStyle(Random());

                //// assumes localized date pattern will have at least year, month, day,
                //// hour, minute
                //dateFormat = (SimpleDateFormat)DateFormat.getDateTimeInstance(
                //    DATE_STYLE, TIME_STYLE, LOCALE);

                //// not all date patterns includes era, full year, timezone and second,
                //// so we add them here
                //dateFormat.applyPattern(dateFormat.toPattern() + " G s Z yyyy");
                //dateFormat.setTimeZone(TIMEZONE);

                DATE_FORMAT = new NumberDateFormat(DATE_STYLE, TIME_STYLE, LOCALE)
                {
                    TimeZone = TIMEZONE
                };
                dateFormat = DATE_FORMAT.GetDateFormat();

                do
                {
                    randomDate = Random().nextLong();

                    // prune date value so it doesn't pass in insane values to some
                    // calendars.
                    randomDate = randomDate % 3400000000000L;

                    // truncate to second
                    randomDate = (randomDate / 1000L) * 1000L;

                    // only positive values
                    randomDate = Math.Abs(randomDate);
                } while (randomDate == 0L);

                dateFormatSanityCheckPass &= checkDateFormatSanity(dateFormat, randomDate);

                dateFormatSanityCheckPass &= checkDateFormatSanity(dateFormat, 0);

                dateFormatSanityCheckPass &= checkDateFormatSanity(dateFormat,
                          -randomDate);

                count++;
            } while (!dateFormatSanityCheckPass);

            //NUMBER_FORMAT = NumberFormat.getNumberInstance(LOCALE);
            //NUMBER_FORMAT.setMaximumFractionDigits((Random().nextInt() & 20) + 1);
            //NUMBER_FORMAT.setMinimumFractionDigits((Random().nextInt() & 20) + 1);
            //NUMBER_FORMAT.setMaximumIntegerDigits((Random().nextInt() & 20) + 1);
            //NUMBER_FORMAT.setMinimumIntegerDigits((Random().nextInt() & 20) + 1);

            NUMBER_FORMAT = new NumberFormat(LOCALE);

            double randomDouble;
            long randomLong;
            int randomInt;
            float randomFloat;

            while ((randomLong = Convert.ToInt64(NormalizeNumber(Math.Abs(Random().nextLong()))
                )) == 0L)
                ;
            while ((randomDouble = Convert.ToDouble(NormalizeNumber(Math.Abs(Random().NextDouble()))
                )) == 0.0)
                ;
            while ((randomFloat = Convert.ToSingle(NormalizeNumber(Math.Abs(Random().nextFloat()))
                )) == 0.0f)
                ;
            while ((randomInt = Convert.ToInt32(NormalizeNumber(Math.Abs(Random().nextInt())))) == 0)
                ;

            randomNumberMap.Put(FieldType.NumericType.LONG.ToString(), randomLong);
            randomNumberMap.Put(FieldType.NumericType.INT.ToString(), randomInt);
            randomNumberMap.Put(FieldType.NumericType.FLOAT.ToString(), randomFloat);
            randomNumberMap.Put(FieldType.NumericType.DOUBLE.ToString(), randomDouble);
            randomNumberMap.Put(DATE_FIELD_NAME, randomDate);

            RANDOM_NUMBER_MAP = Collections.UnmodifiableMap(randomNumberMap);

            directory = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory,
                NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
                    .SetMaxBufferedDocs(TestUtil.NextInt(Random(), 50, 1000))
                    .SetMergePolicy(NewLogMergePolicy()));

            Document doc = new Document();
            HashMap<String, NumericConfig> numericConfigMap = new HashMap<String, NumericConfig>();
            HashMap<String, Field> numericFieldMap = new HashMap<String, Field>();
            qp.NumericConfigMap = (numericConfigMap);

            foreach (FieldType.NumericType type in Enum.GetValues(typeof(FieldType.NumericType)))
            {
                numericConfigMap.Put(type.ToString(), new NumericConfig(PRECISION_STEP,
                    NUMBER_FORMAT, type)); 

                FieldType ft2 = new FieldType(IntField.TYPE_NOT_STORED);
                ft2.NumericTypeValue = (type);
                ft2.Stored = (true);
                ft2.NumericPrecisionStep = (PRECISION_STEP);
                ft2.Freeze();
                Field field;

                switch (type)
                {
                    case FieldType.NumericType.INT:
                        field = new IntField(type.ToString(), 0, ft2);
                        break;
                    case FieldType.NumericType.FLOAT:
                        field = new FloatField(type.ToString(), 0.0f, ft2);
                        break;
                    case FieldType.NumericType.LONG:
                        field = new LongField(type.ToString(), 0L, ft2);
                        break;
                    case FieldType.NumericType.DOUBLE:
                        field = new DoubleField(type.ToString(), 0.0, ft2);
                        break;
                    default:
                        fail();
                        field = null;
                        break;
                }
                numericFieldMap.Put(type.ToString(), field);
                doc.Add(field);
            }

            numericConfigMap.Put(DATE_FIELD_NAME, new NumericConfig(PRECISION_STEP,
                DATE_FORMAT, FieldType.NumericType.LONG));
            FieldType ft = new FieldType(LongField.TYPE_NOT_STORED);
            ft.Stored = (true);
            ft.NumericPrecisionStep = (PRECISION_STEP);
            LongField dateField = new LongField(DATE_FIELD_NAME, 0L, ft);
            numericFieldMap.Put(DATE_FIELD_NAME, dateField);
            doc.Add(dateField);

            foreach (NumberType numberType in Enum.GetValues(typeof(NumberType)))
            {
                setFieldValues(numberType, numericFieldMap);
                if (VERBOSE) Console.WriteLine("Indexing document: " + doc);
                writer.AddDocument(doc);
            }

            reader = writer.Reader;
            searcher = NewSearcher(reader);
            writer.Dispose();

        }