public void Two_default_instances_are_equal() { var io1 = new IndexOptions(); var io2 = new IndexOptions(); Assert.AreEqual(io1, io2); }
internal override bool ExecuteFullScan(BsonDocument doc, IndexOptions options) { var val = doc.Get(this.Field).Normalize(options); if(!val.IsString) return false; return val.AsString.StartsWith(_value.AsString); }
public void Two_index_options_with_different_attributes_are_not_equal() { var io1 = new IndexOptions(); io1.Attributes.Add("somekey", "somevalue"); var io2 = new IndexOptions(); io2.Attributes.Add("someotherkey", "someothervalue"); Assert.AreNotEqual(io1, io2); }
public void Two_index_options_with_same_attributes_but_diff_values_are_not_equal() { var io1 = new IndexOptions(); io1.Attributes.Add("somekey", "somevalue"); io1.Attributes.Add("somecount", 100); var io2 = new IndexOptions(); io2.Attributes.Add("somekey", "somevalue2"); io2.Attributes.Add("somecount", 150); Assert.AreNotEqual(io1, io2); }
public void OneWordSorts(IndexOptions indexOptions) { var users = new[] { new User {Name = "Abc"}, new User {Name = "Ghi"}, new User {Name = "Def"}, }; SetIndicesAndAssertWeCanRetrieveUsersOrderedByName(indexOptions, users); }
public void TwoWordSortsSameStartingWord(IndexOptions indexOptions) { var users = new[] { new User {Name = "Abc Xyz"}, new User {Name = "Abc Rst"}, new User {Name = "Abc Uvw"}, }; SetIndicesAndAssertWeCanRetrieveUsersOrderedByName(indexOptions, users); }
internal override void NormalizeValues(IndexOptions options) { var values = new List<BsonValue>(); foreach (var value in _values.Distinct()) { values.Add(value.Normalize(options)); } _values = values; }
internal override bool ExecuteFullScan(BsonDocument doc, IndexOptions options) { var val = doc.Get(this.Field).Normalize(options); foreach (var value in _values.Distinct()) { var diff = val.CompareTo(value); if (diff == 0) return true; } return false; }
/// <summary> /// Create a new index (or do nothing if already exisits) to a collection/field /// </summary> public bool EnsureIndex(string colName, string field, IndexOptions options) { return this.Transaction<bool>(colName, true, (col) => { // check if index already exists if (col.GetIndex(field) != null) return false; _log.Write(Logger.COMMAND, "create index on '{0}' :: '{1}' unique: {2}", colName, field, options.Unique); // create index head var index = _indexer.CreateIndex(col); index.Field = field; index.Options = options; // read all objects (read from PK index) foreach (var node in new QueryAll("_id", Query.Ascending).Run(col, _indexer)) { var buffer = _data.Read(node.DataBlock); var dataBlock = _data.GetBlock(node.DataBlock); // mark datablock page as dirty _pager.SetDirty(dataBlock.Page); // read object var doc = BsonSerializer.Deserialize(buffer).AsDocument; // adding index var key = doc.Get(field); var newNode = _indexer.AddNode(index, key); // adding this new index Node to indexRef dataBlock.IndexRef[index.Slot] = newNode.Position; // link index node to datablock newNode.DataBlock = dataBlock.Position; _cache.CheckPoint(); } return true; }); }
public DirectoryIndexWriter(DirectoryInfo indexLocation, Analyzer analyzer, bool recreateIndex = false) { if (indexLocation == null) { throw new ArgumentNullException("indexLocation"); } if (analyzer == null) { throw new ArgumentNullException("analyzer"); } IndexOptions = new IndexOptions { IndexLocation = new FileSystemIndexLocation(indexLocation), RecreateIndex = recreateIndex, Analyzer = analyzer }; }
public BsonValue Execute(DbEngine engine, StringScanner s) { var col = this.ReadCollection(engine, s); var field = s.Scan(this.FieldPattern).Trim().ThrowIfEmpty("Invalid field name"); var opts = JsonSerializer.Deserialize(s); var options = new IndexOptions(); if(opts.IsBoolean) { options.Unique = opts.AsBoolean; } else if(opts.IsDocument) { var doc = opts.AsDocument; if (doc["unique"].IsBoolean) options.Unique = doc["unique"].AsBoolean; if (doc["ignoreCase"].IsBoolean) options.IgnoreCase = doc["ignoreCase"].AsBoolean; if (doc["removeAccents"].IsBoolean) options.RemoveAccents = doc["removeAccents"].AsBoolean; if (doc["trimWhitespace"].IsBoolean) options.TrimWhitespace = doc["trimWhitespace"].AsBoolean; if (doc["emptyStringToNull"].IsBoolean) options.EmptyStringToNull = doc["emptyStringToNull"].AsBoolean; } return engine.EnsureIndex(col, field, options); }
protected override void InitializeCollection(MongoDatabase database) { base.InitializeCollection(database); _collection.CreateIndex(IndexKeys.Ascending(FieldNames.Invites_Token), IndexOptions.SetBackground(true)); _collection.CreateIndex(IndexKeys.Ascending(FieldNames.Invites_EmailAddress), IndexOptions.SetBackground(true)); _collection.CreateIndex(IndexKeys.Ascending(FieldNames.StripeCustomerId), IndexOptions.SetUnique(true).SetSparse(true).SetBackground(true)); }
public void Two_index_options_with_different_recreate_index_are_not_equal() { var io1 = new IndexOptions { IndexLocation = new TestIndexLocation("products"), RecreateIndex = true }; var io2 = new IndexOptions { IndexLocation = new TestIndexLocation("products"), RecreateIndex = false }; Assert.AreNotEqual(io1, io2); }
public void TestHidden() { var options = IndexOptions.SetHidden(true); options.ToBsonDocument().Should().Be("{ hidden : true }"); }
public void TestTextIndexCreation() { if (_primary.InstanceType != MongoServerInstanceType.ShardRouter) { if (_primary.Supports(FeatureId.TextSearchCommand)) { using (_server.RequestStart(null, _primary)) { var collection = _database.GetCollection <Test>("test_text"); collection.Drop(); collection.CreateIndex(IndexKeys <Test> .Text(x => x.A, x => x.B).Ascending(x => x.C), IndexOptions.SetTextLanguageOverride("idioma").SetName("custom").SetTextDefaultLanguage("spanish")); var indexCollection = _database.GetCollection("system.indexes"); var result = indexCollection.FindOne(Query.EQ("name", "custom")); Assert.AreEqual("custom", result["name"].AsString); Assert.AreEqual("idioma", result["language_override"].AsString); Assert.AreEqual("spanish", result["default_language"].AsString); Assert.AreEqual(1, result["key"]["c"].AsInt32); } } } }
internal override bool ExecuteFullScan(BsonDocument doc, IndexOptions options) { var val = doc.Get(this.Field).Normalize(options); return val.CompareTo(_value) != 0; }
public virtual SimpleTextDocsAndPositionsEnum Reset(long fp, IBits liveDocs, IndexOptions indexOptions, int docFreq) { _liveDocs = liveDocs; _nextDocStart = fp; _docId = -1; _readPositions = indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; _readOffsets = indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; if (!_readOffsets) { _startOffset = -1; _endOffset = -1; } _cost = docFreq; return(this); }
public virtual TermStats Merge(MergeState mergeState, IndexOptions indexOptions, DocsEnum postings, FixedBitSet visitedDocs) { int df = 0; long totTF = 0; if (indexOptions == IndexOptions.DOCS_ONLY) { while (true) { int doc = postings.NextDoc(); if (doc == DocIdSetIterator.NO_MORE_DOCS) { break; } visitedDocs.Set(doc); this.StartDoc(doc, -1); this.FinishDoc(); df++; } totTF = -1; } else if (indexOptions == IndexOptions.DOCS_AND_FREQS) { while (true) { int doc = postings.NextDoc(); if (doc == DocIdSetIterator.NO_MORE_DOCS) { break; } visitedDocs.Set(doc); int freq = postings.Freq; this.StartDoc(doc, freq); this.FinishDoc(); df++; totTF += freq; } } else if (indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) { var postingsEnum = (DocsAndPositionsEnum)postings; while (true) { int doc = postingsEnum.NextDoc(); if (doc == DocIdSetIterator.NO_MORE_DOCS) { break; } visitedDocs.Set(doc); int freq = postingsEnum.Freq; this.StartDoc(doc, freq); totTF += freq; for (int i = 0; i < freq; i++) { int position = postingsEnum.NextPosition(); BytesRef payload = postingsEnum.GetPayload(); this.AddPosition(position, payload, -1, -1); } this.FinishDoc(); df++; } } else { Debug.Assert(indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); var postingsEnum = (DocsAndPositionsEnum)postings; while (true) { int doc = postingsEnum.NextDoc(); if (doc == DocIdSetIterator.NO_MORE_DOCS) { break; } visitedDocs.Set(doc); int freq = postingsEnum.Freq; this.StartDoc(doc, freq); totTF += freq; for (int i = 0; i < freq; i++) { int position = postingsEnum.NextPosition(); BytesRef payload = postingsEnum.GetPayload(); this.AddPosition(position, payload, postingsEnum.StartOffset, postingsEnum.EndOffset); } this.FinishDoc(); df++; } } return(new TermStats(df, indexOptions == IndexOptions.DOCS_ONLY ? -1 : totTF)); }
public SimpleTextPostingsWriter(SimpleTextFieldsWriter outerInstance, FieldInfo field) { _outerInstance = outerInstance; _indexOptions = field.FieldIndexOptions.Value; _writePositions = _indexOptions >= IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; _writeOffsets = _indexOptions >= IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS; }
public void EnsureIndexExists() { collection.CreateIndex(IndexKeys.TextAll(), IndexOptions.SetName("Searchable").SetBackground(true)); }
public void SetUp() { Configurator = new AntlerConfigurator(); Configurator.UseWindsorContainer() .UseStorage(MongoDbStorage.Use("mongodb://localhost:27017", "AntlerTest") .WithRecreatedDatabase(true) .WithIdentityGenerator(() => new Random().Next(1, int.MaxValue)) .WithEnsuredIndexes(MongoDbIndexBuilder.Add <Employee>(IndexKeys <Employee> .Ascending(_ => _.Id), IndexOptions <Employee> .SetUnique(true)) .ThenAdd <Team>(IndexKeys <Team> .Ascending(_ => _.Id), IndexOptions <Employee> .SetUnique(true)))); }
public override void Initialize(string name, NameValueCollection config) { this.ApplicationName = config["applicationName"] ?? HostingEnvironment.ApplicationVirtualPath; this.enablePasswordReset = Boolean.Parse(config["enablePasswordReset"] ?? "true"); this.enablePasswordRetrieval = Boolean.Parse(config["enablePasswordRetrieval"] ?? "false"); this.maxInvalidPasswordAttempts = Int32.Parse(config["maxInvalidPasswordAttempts"] ?? "5"); this.minRequiredNonAlphanumericCharacters = Int32.Parse(config["minRequiredNonAlphanumericCharacters"] ?? "1"); this.minRequiredPasswordLength = Int32.Parse(config["minRequiredPasswordLength"] ?? "7"); this.passwordAttemptWindow = Int32.Parse(config["passwordAttemptWindow"] ?? "10"); this.passwordFormat = (MembershipPasswordFormat)Enum.Parse(typeof(MembershipPasswordFormat), config["passwordFormat"] ?? "Hashed"); this.passwordStrengthRegularExpression = config["passwordStrengthRegularExpression"] ?? String.Empty; this.requiresQuestionAndAnswer = Boolean.Parse(config["requiresQuestionAndAnswer"] ?? "false"); this.requiresUniqueEmail = Boolean.Parse(config["requiresUniqueEmail"] ?? "true"); if (this.PasswordFormat == MembershipPasswordFormat.Hashed && this.EnablePasswordRetrieval) { throw new ProviderException("Configured settings are invalid: Hashed passwords cannot be retrieved. Either set the password format to different type, or set enablePasswordRetrieval to false."); } this.mongoCollection = MongoDatabase.Create(ConnectionHelper.GetDatabaseConnectionString(config)).GetCollection(config["collection"] ?? "Users"); this.mongoCollection.EnsureIndex("ApplicationName"); this.mongoCollection.EnsureIndex("ApplicationName", "Email"); this.mongoCollection.EnsureIndex("ApplicationName", "Username"); this.mongoCollection.EnsureIndex(IndexKeys.Ascending("ApplicationName", "Username"), IndexOptions.SetUnique(true).SetName("UniqueApplicationNameUserName")); base.Initialize(name, config); }
public void CreateIndex() { _hitCountCollection.CreateIndex(new IndexKeysBuilder().Ascending("URL"), IndexOptions.SetUnique(true)); }
public IndexCreationInfo(IndexToCreateType type, IndexOptions optionsToAdd) { this.type = type; this.optionsToAdd = optionsToAdd; }
public void Two_index_options_with_same_locations_are_equal() { var io1 = new IndexOptions { IndexLocation = new TestIndexLocation("products") }; var io2 = new IndexOptions { IndexLocation = new TestIndexLocation("products") }; Assert.AreEqual(io1, io2); }
public virtual void DoTestLongPostingsNoPositions(IndexOptions options) { // Don't use TestUtil.getTempDir so that we own the // randomness (ie same seed will point to same dir): Directory dir = NewFSDirectory(CreateTempDir("longpostings" + "." + Random().NextLong())); int NUM_DOCS = AtLeast(2000); if (VERBOSE) { Console.WriteLine("TEST: NUM_DOCS=" + NUM_DOCS); } string s1 = GetRandomTerm(null); string s2 = GetRandomTerm(s1); if (VERBOSE) { Console.WriteLine("\nTEST: s1=" + s1 + " s2=" + s2); /* * for(int idx=0;idx<s1.Length();idx++) { * System.out.println(" s1 ch=0x" + Integer.toHexString(s1.charAt(idx))); * } * for(int idx=0;idx<s2.Length();idx++) { * System.out.println(" s2 ch=0x" + Integer.toHexString(s2.charAt(idx))); * } */ } FixedBitSet isS1 = new FixedBitSet(NUM_DOCS); for (int idx = 0; idx < NUM_DOCS; idx++) { if (Random().NextBoolean()) { isS1.Set(idx); } } IndexReader r; if (true) { IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMergePolicy(NewLogMergePolicy()); iwc.SetRAMBufferSizeMB(16.0 + 16.0 * Random().NextDouble()); iwc.SetMaxBufferedDocs(-1); RandomIndexWriter riw = new RandomIndexWriter(Random(), dir, iwc); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); ft.IndexOptions = options; for (int idx = 0; idx < NUM_DOCS; idx++) { Document doc = new Document(); string s = isS1.Get(idx) ? s1 : s2; Field f = NewField("field", s, ft); int count = TestUtil.NextInt(Random(), 1, 4); for (int ct = 0; ct < count; ct++) { doc.Add(f); } riw.AddDocument(doc); } r = riw.Reader; riw.Dispose(); } else { #pragma warning disable 162 r = DirectoryReader.Open(dir); #pragma warning restore 162 } /* * if (VERBOSE) { * System.out.println("TEST: terms"); * TermEnum termEnum = r.Terms(); * while(termEnum.Next()) { * System.out.println(" term=" + termEnum.Term() + " len=" + termEnum.Term().Text().Length()); * Assert.IsTrue(termEnum.DocFreq() > 0); * System.out.println(" s1?=" + (termEnum.Term().Text().equals(s1)) + " s1len=" + s1.Length()); * System.out.println(" s2?=" + (termEnum.Term().Text().equals(s2)) + " s2len=" + s2.Length()); * final String s = termEnum.Term().Text(); * for(int idx=0;idx<s.Length();idx++) { * System.out.println(" ch=0x" + Integer.toHexString(s.charAt(idx))); * } * } * } */ Assert.AreEqual(NUM_DOCS, r.NumDocs); Assert.IsTrue(r.DocFreq(new Term("field", s1)) > 0); Assert.IsTrue(r.DocFreq(new Term("field", s2)) > 0); int num = AtLeast(1000); for (int iter = 0; iter < num; iter++) { string term; bool doS1; if (Random().NextBoolean()) { term = s1; doS1 = true; } else { term = s2; doS1 = false; } if (VERBOSE) { Console.WriteLine("\nTEST: iter=" + iter + " doS1=" + doS1 + " term=" + term); } DocsEnum docs; DocsEnum postings; if (options == IndexOptions.DOCS_ONLY) { docs = TestUtil.Docs(Random(), r, "field", new BytesRef(term), null, null, DocsFlags.NONE); postings = null; } else { docs = postings = TestUtil.Docs(Random(), r, "field", new BytesRef(term), null, null, DocsFlags.FREQS); Debug.Assert(postings != null); } Debug.Assert(docs != null); int docID = -1; while (docID < DocIdSetIterator.NO_MORE_DOCS) { int what = Random().Next(3); if (what == 0) { if (VERBOSE) { Console.WriteLine("TEST: docID=" + docID + "; do next()"); } // nextDoc int expected = docID + 1; while (true) { if (expected == NUM_DOCS) { expected = int.MaxValue; break; } else if (isS1.Get(expected) == doS1) { break; } else { expected++; } } docID = docs.NextDoc(); if (VERBOSE) { Console.WriteLine(" got docID=" + docID); } Assert.AreEqual(expected, docID); if (docID == DocIdSetIterator.NO_MORE_DOCS) { break; } if (Random().Next(6) == 3 && postings != null) { int freq = postings.Freq; Assert.IsTrue(freq >= 1 && freq <= 4); } } else { // advance int targetDocID; if (docID == -1) { targetDocID = Random().Next(NUM_DOCS + 1); } else { targetDocID = docID + TestUtil.NextInt(Random(), 1, NUM_DOCS - docID); } if (VERBOSE) { Console.WriteLine("TEST: docID=" + docID + "; do advance(" + targetDocID + ")"); } int expected = targetDocID; while (true) { if (expected == NUM_DOCS) { expected = int.MaxValue; break; } else if (isS1.Get(expected) == doS1) { break; } else { expected++; } } docID = docs.Advance(targetDocID); if (VERBOSE) { Console.WriteLine(" got docID=" + docID); } Assert.AreEqual(expected, docID); if (docID == DocIdSetIterator.NO_MORE_DOCS) { break; } if (Random().Next(6) == 3 && postings != null) { int freq = postings.Freq; Assert.IsTrue(freq >= 1 && freq <= 4, "got invalid freq=" + freq); } } } } r.Dispose(); dir.Dispose(); }
private void SetIndicesAndAssertWeCanRetrieveUsersOrderedByName(IndexOptions indexOptions, User[] users) { PutIndex(indexOptions); StoreItems(users); using (var s = DocumentStore.OpenSession()) { IRavenQueryable<User> query; if(indexOptions.HasFlag(IndexOptions.Sort)) query = s.Query<User, UserSortIndex>(); // use it if specified else query = s.Query<User>(); query = query.OrderBy(dto => dto.Name); AssertRetrievedOrdered(query); } }
public void TestTextIndexCreation() { if (_primary.InstanceType != MongoServerInstanceType.ShardRouter) { if (_primary.Supports(FeatureId.TextSearchCommand)) { var collection = _database.GetCollection <Test>("test_text"); collection.Drop(); collection.CreateIndex(IndexKeys <Test> .Text(x => x.A, x => x.B).Ascending(x => x.C), IndexOptions.SetTextLanguageOverride("idioma").SetName("custom").SetTextDefaultLanguage("spanish")); var indexes = collection.GetIndexes(); var index = indexes.RawDocuments.Single(i => i["name"].AsString == "custom"); Assert.AreEqual("idioma", index["language_override"].AsString); Assert.AreEqual("spanish", index["default_language"].AsString); Assert.AreEqual(1, index["key"]["c"].AsInt32); } } }
// should only be called by FieldInfos#addOrUpdate internal void Update(bool indexed, bool storeTermVector, bool omitNorms, bool storePayloads, IndexOptions? indexOptions) { //System.out.println("FI.update field=" + name + " indexed=" + indexed + " omitNorms=" + omitNorms + " this.omitNorms=" + this.omitNorms); if (this.indexed != indexed) { this.indexed = true; // once indexed, always index } if (indexed) // if updated field data is not for indexing, leave the updates out { if (this.StoreTermVector != storeTermVector) { this.StoreTermVector = true; // once vector, always vector } if (this.StorePayloads != storePayloads) { this.StorePayloads = true; } if (this.OmitNorms != omitNorms) { this.OmitNorms = true; // if one require omitNorms at least once, it remains off for life this.NormTypeValue = null; } if (this.IndexOptionsValue != indexOptions) { if (this.IndexOptionsValue == null) { this.IndexOptionsValue = indexOptions; } else { // downgrade IndexOptionsValue = (int)IndexOptionsValue.GetValueOrDefault() < (int)indexOptions ? IndexOptionsValue : indexOptions; } if ((int)IndexOptionsValue.GetValueOrDefault() < (int)FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) { // cannot store payloads if we don't store positions: this.StorePayloads = false; } } } Debug.Assert(CheckConsistency()); }
public override void Write(Directory directory, string segmentName, string segmentSuffix, FieldInfos infos, IOContext context) { string fileName = IndexFileNames.SegmentFileName(segmentName, segmentSuffix, Lucene46FieldInfosFormat.EXTENSION); IndexOutput output = directory.CreateOutput(fileName, context); bool success = false; try { CodecUtil.WriteHeader(output, Lucene46FieldInfosFormat.CODEC_NAME, Lucene46FieldInfosFormat.FORMAT_CURRENT); output.WriteVInt32(infos.Count); foreach (FieldInfo fi in infos) { IndexOptions indexOptions = fi.IndexOptions; sbyte bits = 0x0; if (fi.HasVectors) { bits |= Lucene46FieldInfosFormat.STORE_TERMVECTOR; } if (fi.OmitsNorms) { bits |= Lucene46FieldInfosFormat.OMIT_NORMS; } if (fi.HasPayloads) { bits |= Lucene46FieldInfosFormat.STORE_PAYLOADS; } if (fi.IsIndexed) { bits |= Lucene46FieldInfosFormat.IS_INDEXED; Debug.Assert(indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); if (indexOptions == IndexOptions.DOCS_ONLY) { bits |= Lucene46FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS; } else if (indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) { bits |= Lucene46FieldInfosFormat.STORE_OFFSETS_IN_POSTINGS; } else if (indexOptions == IndexOptions.DOCS_AND_FREQS) { bits |= Lucene46FieldInfosFormat.OMIT_POSITIONS; } } output.WriteString(fi.Name); output.WriteVInt32(fi.Number); output.WriteByte((byte)bits); // pack the DV types in one byte var dv = DocValuesByte(fi.DocValuesType); var nrm = DocValuesByte(fi.NormType); Debug.Assert((dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); var val = (byte)(0xff & ((nrm << 4) | (byte)dv)); output.WriteByte(val); output.WriteInt64(fi.DocValuesGen); output.WriteStringStringMap(fi.Attributes); } CodecUtil.WriteFooter(output); success = true; } finally { if (success) { output.Dispose(); } else { IOUtils.CloseWhileHandlingException(output); } } }
/// <inheritdoc /> public Task AddIndexAsync(IndexOptions indexOptions) => AddIndexAsync(indexOptions, CancellationToken.None);
public bool canReuse(IndexOptions indexOptions, bool storePayloads) { return indexOptions == this.indexOptions && storePayloads == this.storePayloads; }
internal void Flush(string fieldName, FieldsConsumer consumer, SegmentWriteState state) { if (!fieldInfo.IsIndexed) { return; // nothing to flush, don't bother the codec with the unindexed field } TermsConsumer termsConsumer = consumer.AddField(fieldInfo); IComparer <BytesRef> termComp = termsConsumer.Comparer; // CONFUSING: this.indexOptions holds the index options // that were current when we first saw this field. But // it's possible this has changed, eg when other // documents are indexed that cause a "downgrade" of the // IndexOptions. So we must decode the in-RAM buffer // according to this.indexOptions, but then write the // new segment to the directory according to // currentFieldIndexOptions: IndexOptions currentFieldIndexOptions = fieldInfo.IndexOptions; if (Debugging.AssertsEnabled) { Debugging.Assert(currentFieldIndexOptions != IndexOptions.NONE); } bool writeTermFreq = currentFieldIndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS) >= 0; bool writePositions = currentFieldIndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; bool writeOffsets = currentFieldIndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; bool readTermFreq = this.hasFreq; bool readPositions = this.hasProx; bool readOffsets = this.hasOffsets; //System.out.println("flush readTF=" + readTermFreq + " readPos=" + readPositions + " readOffs=" + readOffsets); // Make sure FieldInfo.update is working correctly!: if (Debugging.AssertsEnabled) { Debugging.Assert(!writeTermFreq || readTermFreq); Debugging.Assert(!writePositions || readPositions); Debugging.Assert(!writeOffsets || readOffsets); Debugging.Assert(!writeOffsets || writePositions); } IDictionary <Term, int?> segDeletes; if (state.SegUpdates != null && state.SegUpdates.terms.Count > 0) { segDeletes = state.SegUpdates.terms; } else { segDeletes = null; } int[] termIDs = termsHashPerField.SortPostings(termComp); int numTerms = termsHashPerField.bytesHash.Count; BytesRef text = new BytesRef(); FreqProxPostingsArray postings = (FreqProxPostingsArray)termsHashPerField.postingsArray; ByteSliceReader freq = new ByteSliceReader(); ByteSliceReader prox = new ByteSliceReader(); FixedBitSet visitedDocs = new FixedBitSet(state.SegmentInfo.DocCount); long sumTotalTermFreq = 0; long sumDocFreq = 0; Term protoTerm = new Term(fieldName); for (int i = 0; i < numTerms; i++) { int termID = termIDs[i]; // Get BytesRef int textStart = postings.textStarts[termID]; termsHashPerField.bytePool.SetBytesRef(text, textStart); termsHashPerField.InitReader(freq, termID, 0); if (readPositions || readOffsets) { termsHashPerField.InitReader(prox, termID, 1); } // TODO: really TermsHashPerField should take over most // of this loop, including merge sort of terms from // multiple threads and interacting with the // TermsConsumer, only calling out to us (passing us the // DocsConsumer) to handle delivery of docs/positions PostingsConsumer postingsConsumer = termsConsumer.StartTerm(text); int?delDocLimit; if (segDeletes != null) { protoTerm.Bytes = text; int?docIDUpto; segDeletes.TryGetValue(protoTerm, out docIDUpto); if (docIDUpto != null) { delDocLimit = docIDUpto; } else { delDocLimit = 0; } } else { delDocLimit = 0; } // Now termStates has numToMerge FieldMergeStates // which all share the same term. Now we must // interleave the docID streams. int docFreq = 0; long totalTermFreq = 0; int docID = 0; while (true) { //System.out.println(" cycle"); int termFreq; if (freq.Eof()) { if (postings.lastDocCodes[termID] != -1) { // Return last doc docID = postings.lastDocIDs[termID]; if (readTermFreq) { termFreq = postings.termFreqs[termID]; } else { termFreq = -1; } postings.lastDocCodes[termID] = -1; } else { // EOF break; } } else { int code = freq.ReadVInt32(); if (!readTermFreq) { docID += code; termFreq = -1; } else { docID += (int)((uint)code >> 1); if ((code & 1) != 0) { termFreq = 1; } else { termFreq = freq.ReadVInt32(); } } if (Debugging.AssertsEnabled) { Debugging.Assert(docID != postings.lastDocIDs[termID]); } } docFreq++; if (Debugging.AssertsEnabled) { Debugging.Assert(docID < state.SegmentInfo.DocCount, () => "doc=" + docID + " maxDoc=" + state.SegmentInfo.DocCount); } // NOTE: we could check here if the docID was // deleted, and skip it. However, this is somewhat // dangerous because it can yield non-deterministic // behavior since we may see the docID before we see // the term that caused it to be deleted. this // would mean some (but not all) of its postings may // make it into the index, which'd alter the docFreq // for those terms. We could fix this by doing two // passes, ie first sweep marks all del docs, and // 2nd sweep does the real flush, but I suspect // that'd add too much time to flush. visitedDocs.Set(docID); postingsConsumer.StartDoc(docID, writeTermFreq ? termFreq : -1); if (docID < delDocLimit) { // Mark it deleted. TODO: we could also skip // writing its postings; this would be // deterministic (just for this Term's docs). // TODO: can we do this reach-around in a cleaner way???? if (state.LiveDocs == null) { state.LiveDocs = docState.docWriter.codec.LiveDocsFormat.NewLiveDocs(state.SegmentInfo.DocCount); } if (state.LiveDocs.Get(docID)) { state.DelCountOnFlush++; state.LiveDocs.Clear(docID); } } totalTermFreq += termFreq; // Carefully copy over the prox + payload info, // changing the format to match Lucene's segment // format. if (readPositions || readOffsets) { // we did record positions (& maybe payload) and/or offsets int position = 0; int offset = 0; for (int j = 0; j < termFreq; j++) { BytesRef thisPayload; if (readPositions) { int code = prox.ReadVInt32(); position += (int)((uint)code >> 1); if ((code & 1) != 0) { // this position has a payload int payloadLength = prox.ReadVInt32(); if (payload == null) { payload = new BytesRef(); payload.Bytes = new byte[payloadLength]; } else if (payload.Bytes.Length < payloadLength) { payload.Grow(payloadLength); } prox.ReadBytes(payload.Bytes, 0, payloadLength); payload.Length = payloadLength; thisPayload = payload; } else { thisPayload = null; } if (readOffsets) { int startOffset = offset + prox.ReadVInt32(); int endOffset = startOffset + prox.ReadVInt32(); if (writePositions) { if (writeOffsets) { if (Debugging.AssertsEnabled) { Debugging.Assert(startOffset >= 0 && endOffset >= startOffset, () => "startOffset=" + startOffset + ",endOffset=" + endOffset + ",offset=" + offset); } postingsConsumer.AddPosition(position, thisPayload, startOffset, endOffset); } else { postingsConsumer.AddPosition(position, thisPayload, -1, -1); } } offset = startOffset; } else if (writePositions) { postingsConsumer.AddPosition(position, thisPayload, -1, -1); } } } } postingsConsumer.FinishDoc(); } termsConsumer.FinishTerm(text, new TermStats(docFreq, writeTermFreq ? totalTermFreq : -1)); sumTotalTermFreq += totalTermFreq; sumDocFreq += docFreq; } termsConsumer.Finish(writeTermFreq ? sumTotalTermFreq : -1, sumDocFreq, visitedDocs.Cardinality()); }
protected override void InitializeCollection(MongoCollection <Project> collection) { base.InitializeCollection(collection); collection.CreateIndex(IndexKeys.Ascending(FieldNames.ApiKeys), IndexOptions.SetUnique(true).SetSparse(true)); // TODO: Should we set an index on project and configuration key name. }
public FSTDocsEnum(IndexOptions indexOptions, bool storePayloads) { if (!InstanceFieldsInitialized) { InitializeInstanceFields(); InstanceFieldsInitialized = true; } this.indexOptions = indexOptions; this.storePayloads = storePayloads; }
internal virtual void SetIndexOptions(IndexOptions v) { _indexOptions = v; }
public void Two_references_to_the_same_object_are_equal() { var io1 = new IndexOptions { IndexLocation = new TestIndexLocation("products") }; var io2 = io1; Assert.AreEqual(io1, io2); }
// should only be called by FieldInfos#addOrUpdate internal void Update(bool indexed, bool storeTermVector, bool omitNorms, bool storePayloads, IndexOptions indexOptions) { //System.out.println("FI.update field=" + name + " indexed=" + indexed + " omitNorms=" + omitNorms + " this.omitNorms=" + this.omitNorms); if (this.indexed != indexed) { this.indexed = true; // once indexed, always index } if (indexed) // if updated field data is not for indexing, leave the updates out { if (this.storeTermVector != storeTermVector) { this.storeTermVector = true; // once vector, always vector } if (this.storePayloads != storePayloads) { this.storePayloads = true; } if (this.omitNorms != omitNorms) { this.omitNorms = true; // if one require omitNorms at least once, it remains off for life this.normType = DocValuesType.NONE; } if (this.indexOptions != indexOptions) { if (this.indexOptions == IndexOptions.NONE) { this.indexOptions = indexOptions; } else { // downgrade this.indexOptions = this.indexOptions.CompareTo(indexOptions) < 0 ? this.indexOptions : indexOptions; } if (this.indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) { // cannot store payloads if we don't store positions: this.storePayloads = false; } } } Debug.Assert(CheckConsistency()); }
public void Two_index_options_with_different_analyzers_are_not_equal() { var io1 = new IndexOptions { IndexLocation = new TestIndexLocation("products") }; // default Standard Analyzer var io2 = new IndexOptions { IndexLocation = new TestIndexLocation("products"), Analyzer = new SimpleAnalyzer() }; Assert.AreNotEqual(io1, io2); }
protected override void InitializeCollection(MongoDatabase database) { base.InitializeCollection(database); _collection.CreateIndex(IndexKeys <User> .Ascending(u => u.OrganizationIds), IndexOptions.SetBackground(true)); _collection.CreateIndex(IndexKeys <User> .Ascending(u => u.EmailAddress), IndexOptions.SetUnique(true).SetBackground(true)); _collection.CreateIndex(IndexKeys.Ascending(FieldNames.OAuthAccounts_Provider, FieldNames.OAuthAccounts_ProviderUserId), IndexOptions.SetUnique(true).SetSparse(true).SetBackground(true)); }
public void Two_index_options_with_different_locations_are_not_equal() { var io1 = new IndexOptions { IndexLocation = new TestIndexLocation("products") }; var io2 = new IndexOptions { IndexLocation = new TestIndexLocation("products2") }; Assert.AreNotEqual(io1, io2); }
public void EnsureIndex(string fieldName, string collectionName = null) { string collection = collectionName == null ? _collectionName : collectionName; _database.GetCollection(collection).CreateIndex(IndexKeys.Ascending(fieldName), IndexOptions.SetBackground(true)); }
public bool CanReuse(IndexOptions indexOptions, bool storePayloads) { return(indexOptions == this.indexOptions && storePayloads == this.storePayloads); }
public virtual void Merge(MergeState mergeState, IndexOptions indexOptions, TermsEnum termsEnum) { BytesRef term; if (Debugging.AssertsEnabled) { Debugging.Assert(termsEnum != null); } long sumTotalTermFreq = 0; long sumDocFreq = 0; long sumDFsinceLastAbortCheck = 0; FixedBitSet visitedDocs = new FixedBitSet(mergeState.SegmentInfo.DocCount); if (indexOptions == IndexOptions.DOCS_ONLY) { if (docsEnum == null) { docsEnum = new MappingMultiDocsEnum(); } docsEnum.MergeState = mergeState; MultiDocsEnum docsEnumIn = null; while ((term = termsEnum.Next()) != null) { // We can pass null for liveDocs, because the // mapping enum will skip the non-live docs: docsEnumIn = (MultiDocsEnum)termsEnum.Docs(null, docsEnumIn, DocsFlags.NONE); if (docsEnumIn != null) { docsEnum.Reset(docsEnumIn); PostingsConsumer postingsConsumer = StartTerm(term); TermStats stats = postingsConsumer.Merge(mergeState, indexOptions, docsEnum, visitedDocs); if (stats.DocFreq > 0) { FinishTerm(term, stats); sumTotalTermFreq += stats.DocFreq; sumDFsinceLastAbortCheck += stats.DocFreq; sumDocFreq += stats.DocFreq; if (sumDFsinceLastAbortCheck > 60000) { mergeState.CheckAbort.Work(sumDFsinceLastAbortCheck / 5.0); sumDFsinceLastAbortCheck = 0; } } } } } else if (indexOptions == IndexOptions.DOCS_AND_FREQS) { if (docsAndFreqsEnum == null) { docsAndFreqsEnum = new MappingMultiDocsEnum(); } docsAndFreqsEnum.MergeState = mergeState; MultiDocsEnum docsAndFreqsEnumIn = null; while ((term = termsEnum.Next()) != null) { // We can pass null for liveDocs, because the // mapping enum will skip the non-live docs: docsAndFreqsEnumIn = (MultiDocsEnum)termsEnum.Docs(null, docsAndFreqsEnumIn); if (Debugging.AssertsEnabled) { Debugging.Assert(docsAndFreqsEnumIn != null); } docsAndFreqsEnum.Reset(docsAndFreqsEnumIn); PostingsConsumer postingsConsumer = StartTerm(term); TermStats stats = postingsConsumer.Merge(mergeState, indexOptions, docsAndFreqsEnum, visitedDocs); if (stats.DocFreq > 0) { FinishTerm(term, stats); sumTotalTermFreq += stats.TotalTermFreq; sumDFsinceLastAbortCheck += stats.DocFreq; sumDocFreq += stats.DocFreq; if (sumDFsinceLastAbortCheck > 60000) { mergeState.CheckAbort.Work(sumDFsinceLastAbortCheck / 5.0); sumDFsinceLastAbortCheck = 0; } } } } else if (indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) { if (postingsEnum == null) { postingsEnum = new MappingMultiDocsAndPositionsEnum(); } postingsEnum.MergeState = mergeState; MultiDocsAndPositionsEnum postingsEnumIn = null; while ((term = termsEnum.Next()) != null) { // We can pass null for liveDocs, because the // mapping enum will skip the non-live docs: postingsEnumIn = (MultiDocsAndPositionsEnum)termsEnum.DocsAndPositions(null, postingsEnumIn, DocsAndPositionsFlags.PAYLOADS); if (Debugging.AssertsEnabled) { Debugging.Assert(postingsEnumIn != null); } postingsEnum.Reset(postingsEnumIn); PostingsConsumer postingsConsumer = StartTerm(term); TermStats stats = postingsConsumer.Merge(mergeState, indexOptions, postingsEnum, visitedDocs); if (stats.DocFreq > 0) { FinishTerm(term, stats); sumTotalTermFreq += stats.TotalTermFreq; sumDFsinceLastAbortCheck += stats.DocFreq; sumDocFreq += stats.DocFreq; if (sumDFsinceLastAbortCheck > 60000) { mergeState.CheckAbort.Work(sumDFsinceLastAbortCheck / 5.0); sumDFsinceLastAbortCheck = 0; } } } } else { if (Debugging.AssertsEnabled) { Debugging.Assert(indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); } if (postingsEnum == null) { postingsEnum = new MappingMultiDocsAndPositionsEnum(); } postingsEnum.MergeState = mergeState; MultiDocsAndPositionsEnum postingsEnumIn = null; while ((term = termsEnum.Next()) != null) { // We can pass null for liveDocs, because the // mapping enum will skip the non-live docs: postingsEnumIn = (MultiDocsAndPositionsEnum)termsEnum.DocsAndPositions(null, postingsEnumIn); if (Debugging.AssertsEnabled) { Debugging.Assert(postingsEnumIn != null); } postingsEnum.Reset(postingsEnumIn); PostingsConsumer postingsConsumer = StartTerm(term); TermStats stats = postingsConsumer.Merge(mergeState, indexOptions, postingsEnum, visitedDocs); if (stats.DocFreq > 0) { FinishTerm(term, stats); sumTotalTermFreq += stats.TotalTermFreq; sumDFsinceLastAbortCheck += stats.DocFreq; sumDocFreq += stats.DocFreq; if (sumDFsinceLastAbortCheck > 60000) { mergeState.CheckAbort.Work(sumDFsinceLastAbortCheck / 5.0); sumDFsinceLastAbortCheck = 0; } } } } Finish(indexOptions == IndexOptions.DOCS_ONLY ? -1 : sumTotalTermFreq, sumDocFreq, visitedDocs.Cardinality()); }
private void PutIndex(IndexOptions options) { if (options.HasFlag(IndexOptions.Search)) DocumentStore.ExecuteIndex(new UserSearchIndex()); if (options.HasFlag(IndexOptions.Sort)) DocumentStore.ExecuteIndex(new UserSortIndex()); }
protected override void InitializeCollection(MongoDatabase database) { base.InitializeCollection(database); _collection.CreateIndex(IndexKeys.Ascending(FieldNames.ProjectId), IndexOptions.SetBackground(true)); }
public FieldOptions(StoreOptions store, IndexOptions index, TermVectorOptions termVector) { _store = store; _index = index; _termVector = termVector; }
private static void CreateIndexes(MongoCollection <BsonDocument> sourceCollection, MongoCollection <BsonDocument> targetCollection, FlexibleOptions options) { if (options == null) { options = new FlexibleOptions(); } var logger = NLog.LogManager.GetLogger("CreateIndexes"); logger.Debug("{0}.{1} - Start index creation", sourceCollection.Database.Name, sourceCollection.Name); var command = new CommandDocument(); command.Add("createIndexes", targetCollection.Name); var indexList = new BsonArray(); command.Add("indexes", indexList); // Copying Indexes - If Any foreach (IndexInfo idx in sourceCollection.GetIndexes().ToList()) { // Skipping "_id_" default index - Since Every mongodb Collection has it if (idx.Name == "_id_") { continue; } // Recreating Index Options based on the current index options var opts = IndexOptions.SetBackground(idx.IsBackground || options.Get("indexes-background", false)) .SetSparse(idx.IsSparse || options.Get("indexes-sparse", false)) .SetUnique(idx.IsUnique).SetName(idx.Name).SetDropDups(idx.DroppedDups); if (idx.TimeToLive < TimeSpan.MaxValue) { opts.SetTimeToLive(idx.TimeToLive); } // Adding Index try { if (targetCollection.Database.Server.BuildInfo.Version.Major < 2 && targetCollection.Database.Server.BuildInfo.Version.MajorRevision < 6) { logger.Debug("{0}.{1} - Creating index: {2}", sourceCollection.Database.Name, sourceCollection, idx.Name); targetCollection.CreateIndex(idx.Key, opts); } else { logger.Debug("{0}.{1} - Prepare index creation: {2}", sourceCollection.Database.Name, sourceCollection, idx.Name); // removes the namespace to allow mongodb to generate the correct one... var doc = idx.RawDocument; doc.Remove("ns"); if (options.Get("indexes-background", false)) { doc["background"] = true; } if (options.Get("indexes-sparse", false)) { doc["sparse"] = true; } indexList.Add(doc); } } catch (Exception ex) { // check for timeout exception that may occur if the collection is large... if (ex is System.IO.IOException || ex is System.Net.Sockets.SocketException || (ex.InnerException != null && ex.InnerException is System.Net.Sockets.SocketException)) { logger.Warn("{0}.{1} - Timeout creating index {2}, this may occur in large collections. You should check manually after a while.", sourceCollection.Database.Name, sourceCollection.Name, idx.Name); // wait for index creation.... for (var i = 0; i < 30; i++) { System.Threading.Thread.Sleep(10000); try { if (targetCollection.IndexExists(idx.Name)) { break; } } catch { } } } else { logger.Error(ex, "{0}.{1} - Error creating index {2}" + idx.Name); } logger.Warn("{0}.{1} - Index details: {2}", sourceCollection.Database.Name, sourceCollection.Name, idx.RawDocument.ToJson()); } } if (indexList.Count > 0) { try { logger.Debug("{0}.{1} - Creating {2} indexes", sourceCollection.Database.Name, sourceCollection, indexList.Count); targetCollection.Database.RunCommand(command); } catch (Exception ex) { // check for timeout exception that may occur if the collection is large... if (ex is System.IO.IOException || ex is System.Net.Sockets.SocketException || (ex.InnerException != null && ex.InnerException is System.Net.Sockets.SocketException)) { logger.Warn("{0}.{1} - Timeout creating {2} indexes, this may occur in large collections. You should check manually after a while.", sourceCollection.Database.Name, sourceCollection.Name, indexList.Count); logger.Warn("{0}.{1} - Index details: {2}", sourceCollection.Database.Name, sourceCollection.Name, command.ToJson()); } else { logger.Error(ex, "{0}.{1} - Error creating indexes"); logger.Error("{0}.{1} - Index details: {2}", sourceCollection.Database.Name, sourceCollection.Name, command.ToJson()); } } } logger.Debug("{0}.{1} - Index creation completed", sourceCollection.Database.Name, sourceCollection); }
/// <summary> /// Sole Constructor. /// /// @lucene.experimental /// </summary> public FieldInfo(string name, bool indexed, int number, bool storeTermVector, bool omitNorms, bool storePayloads, IndexOptions? indexOptions, DocValuesType_e? docValues, DocValuesType_e? normsType, IDictionary<string, string> attributes) { this.Name = name; this.indexed = indexed; this.Number = number; this.docValueType = docValues; if (indexed) { this.StoreTermVector = storeTermVector; this.StorePayloads = storePayloads; this.OmitNorms = omitNorms; this.IndexOptionsValue = indexOptions; this.NormTypeValue = !omitNorms ? normsType : null; } // for non-indexed fields, leave defaults else { this.StoreTermVector = false; this.StorePayloads = false; this.OmitNorms = false; this.IndexOptionsValue = null; this.NormTypeValue = null; } this.Attributes_Renamed = attributes; Debug.Assert(CheckConsistency()); }
public BsonIndexAttribute(IndexOptions options) { if (options == null) throw new ArgumentNullException("options"); this.Options = options; }
public StringPropertyDescriptor <T> IndexOptions(IndexOptions indexOptions) => Assign(a => a.IndexOptions = indexOptions);
public SimpleTextTermsEnum(SimpleTextFieldsReader outerInstance, FST <PairOutputs <long?, PairOutputs <long?, long?> .Pair> .Pair> fst, IndexOptions indexOptions) { _outerInstance = outerInstance; _indexOptions = indexOptions; _fstEnum = new BytesRefFSTEnum <PairOutputs <long?, PairOutputs <long?, long?> .Pair> .Pair>(fst); }
public KeywordPropertyDescriptor <T> IndexOptions(IndexOptions indexOptions) => Assign(a => a.IndexOptions = indexOptions);
/// <summary> /// Constructs the timeout storage, connecting to the Mongo database pointed to by the given connection string, /// storing the timeouts in the given collection /// </summary> public MongoDbTimeoutStorage(string connectionString, string collectionName) { database = MongoHelper.GetDatabase(connectionString); collection = database.GetCollection(collectionName); collection.EnsureIndex(IndexKeys.Ascending(TimeProperty), IndexOptions.SetBackground(true).SetUnique(false)); }
internal override void NormalizeValues(IndexOptions options) { _value = _value.Normalize(options); }
private FieldInfo AddOrUpdateInternal(string name, int preferredFieldNumber, bool isIndexed, bool storeTermVector, bool omitNorms, bool storePayloads, IndexOptions indexOptions, DocValuesType docValues, DocValuesType normType) { // LUCENENET: Bypass FieldInfo method so we can access the quick boolean check if (!TryGetFieldInfo(name, out FieldInfo fi) || fi is null) { // this field wasn't yet added to this in-RAM // segment's FieldInfo, so now we get a global // number for this field. If the field was seen // before then we'll get the same name and number, // else we'll allocate a new one: int fieldNumber = globalFieldNumbers.AddOrGet(name, preferredFieldNumber, docValues); fi = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, docValues, normType, null); if (Debugging.AssertsEnabled) { Debugging.Assert(!byName.ContainsKey(fi.Name)); Debugging.Assert(globalFieldNumbers.ContainsConsistent(fi.Number, fi.Name, fi.DocValuesType)); } byName[fi.Name] = fi; } else { fi.Update(isIndexed, storeTermVector, omitNorms, storePayloads, indexOptions); if (docValues != DocValuesType.NONE) { // only pay the synchronization cost if fi does not already have a DVType bool updateGlobal = !fi.HasDocValues; fi.DocValuesType = docValues; // this will also perform the consistency check. if (updateGlobal) { // must also update docValuesType map so it's // aware of this field's DocValueType globalFieldNumbers.SetDocValuesType(fi.Number, name, docValues); } } if (!fi.OmitsNorms && normType != DocValuesType.NONE) { fi.NormType = normType; } } return(fi); }
internal void SetIndexOptions(IndexOptions v) { indexOptions = v; }
private BsonIndexAttribute(IndexOptions options) { this.Options = options; }