private static IList <Term> Sample(Random random, IndexReader reader, int size) { IList <Term> sample = new List <Term>(); Fields fields = MultiFields.GetFields(reader); foreach (string field in fields) { Terms terms = fields.Terms(field); Assert.IsNotNull(terms); TermsEnum termsEnum = terms.Iterator(null); while (termsEnum.Next() != null) { if (sample.Count >= size) { int pos = random.Next(size); sample[pos] = new Term(field, termsEnum.Term()); } else { sample.Add(new Term(field, termsEnum.Term())); } } } sample = CollectionsHelper.Shuffle(sample); return(sample); }
public async Task <List <DocumentModel> > MultiSearch(IndexModel indexModel, MultiSearchModel searchModel) { var results = new List <List <DocumentModel> >(); foreach (var search in searchModel.Searches) { SetSearcher(search.Type); results.Add(await _searcher.ExecuteSearch(indexModel, search)); } List <DocumentModel> searchResult; switch (searchModel.QueryType) { case QueryType.Or: searchResult = CollectionsHelper.Union(new DocumentComparer(), results.ToArray()).ToList(); break; case QueryType.And: searchResult = CollectionsHelper.Intersect(new DocumentComparer(), results.ToArray()).ToList(); break; default: throw new ArgumentOutOfRangeException(); } return(GetSorted(searchModel, searchResult)); }
public virtual void TestSeeking() { for (int i = 0; i < NumIterations; i++) { string reg = AutomatonTestUtil.RandomRegexp(Random()); Automaton automaton = (new RegExp(reg, RegExp.NONE)).ToAutomaton(); TermsEnum te = MultiFields.GetTerms(Reader, "field").Iterator(null); IList <BytesRef> unsortedTerms = new List <BytesRef>(Terms); unsortedTerms = CollectionsHelper.Shuffle(unsortedTerms); foreach (BytesRef term in unsortedTerms) { if (BasicOperations.Run(automaton, term.Utf8ToString())) { // term is accepted if (Random().NextBoolean()) { // seek exact Assert.IsTrue(te.SeekExact(term)); } else { // seek ceil Assert.AreEqual(SeekStatus.FOUND, te.SeekCeil(term)); Assert.AreEqual(term, te.Term()); } } } } }
/// <summary> Tests that a query matches the an expected set of documents using Hits. /// /// <p/> /// Note that when using the Hits API, documents will only be returned /// if they have a positive normalized score. /// <p/> /// </summary> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaing the query in assertion messages /// </param> /// <param name="results">a list of documentIds that must match the query /// </param> /// <seealso cref="Searcher.Search(Query)"> /// </seealso> /// <seealso cref="checkHitCollector"> /// </seealso> public static void CheckHits_Renamed_Method(Query query, System.String defaultFieldName, Searcher searcher, int[] results) { if (searcher is IndexSearcher) { QueryUtils.Check(query, searcher); } ScoreDoc[] hits = searcher.Search(query, null, 1000, null).ScoreDocs; System.Collections.ArrayList correct = new System.Collections.ArrayList(); for (int i = 0; i < results.Length; i++) { CollectionsHelper.AddIfNotContains(correct, results[i]); } correct.Sort(); System.Collections.ArrayList actual = new System.Collections.ArrayList(); for (int i = 0; i < hits.Length; i++) { CollectionsHelper.AddIfNotContains(actual, hits[i].Doc); } actual.Sort(); Assert.AreEqual(correct, actual, query.ToString(defaultFieldName)); QueryUtils.Check(query, searcher); }
internal virtual DocsEnum Reset(FieldInfo fieldInfo, StandardTermState termState) { IndexOmitsTF = fieldInfo.FieldIndexOptions == FieldInfo.IndexOptions.DOCS_ONLY; StorePayloads = fieldInfo.HasPayloads(); StoreOffsets = fieldInfo.FieldIndexOptions >= FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS; FreqOffset = termState.FreqOffset; SkipOffset = termState.SkipOffset; // TODO: for full enum case (eg segment merging) this // seek is unnecessary; maybe we can avoid in such // cases FreqIn.Seek(termState.FreqOffset); Limit = termState.DocFreq; Debug.Assert(Limit > 0); Ord = 0; Doc = -1; Accum = 0; // if (DEBUG) System.out.println(" sde limit=" + limit + " freqFP=" + freqOffset); Skipped = false; Start = -1; Count = 0; Freq_Renamed = 1; if (IndexOmitsTF) { CollectionsHelper.Fill(Freqs, 1); } MaxBufferedDocId = -1; return(this); }
/// <summary> /// Reloads the internal SPI list from the given <seealso cref="ClassLoader"/>. /// Changes to the service list are visible after the method ends, all /// iterators (<seealso cref="#iterator()"/>,...) stay consistent. /// /// <p><b>NOTE:</b> Only new service providers are added, existing ones are /// never removed or replaced. /// /// <p><em>this method is expensive and should only be called for discovery /// of new service providers on the given classpath/classloader!</em> /// </summary> public void Reload() { lock (this) { IDictionary <string, S> services = new Dictionary <string, S>(this.Services); SPIClassIterator <S> loader = SPIClassIterator <S> .Get(); // Ensure there is a default constructor (the SPIClassIterator contains types that don't) foreach (Type c in loader.Where(t => t.GetConstructor(Type.EmptyTypes) != null)) { try { S service = (S)Activator.CreateInstance(c); string name = service.Name; // only add the first one for each name, later services will be ignored // this allows to place services before others in classpath to make // them used instead of others if (!services.ContainsKey(name)) { CheckServiceName(name); services[name] = service; } } catch (Exception e) { throw new InvalidOperationException("Cannot instantiate SPI class: " + c.Name, e); } } this.Services = CollectionsHelper.UnmodifiableMap(services); } }
public virtual void TestCheck() { Random rnd = Random(); ISet <object> jdk = CollectionsHelper.NewSetFromMap(new IdentityHashMap <object, bool?>()); RamUsageEstimator.IdentityHashSet <object> us = new RamUsageEstimator.IdentityHashSet <object>(); int max = 100000; int threshold = 256; for (int i = 0; i < max; i++) { // some of these will be interned and some will not so there will be collisions. int?v = rnd.Next(threshold); bool e1 = jdk.Contains(v); bool e2 = us.Contains(v); Assert.AreEqual(e1, e2); e1 = jdk.Add(v); e2 = us.Add(v); Assert.AreEqual(e1, e2); } ISet <object> collected = CollectionsHelper.NewSetFromMap(new IdentityHashMap <object, bool?>()); foreach (object o in us) { collected.Add(o); } Assert.AreEqual(collected, jdk); }
/// <summary> /// Called if we hit an exception at a bad time (when /// updating the index files) and must discard all /// currently buffered docs. this resets our state, /// discarding any docs added since last flush. /// </summary> internal virtual void Abort(ISet <string> createdFiles) { //System.out.println(Thread.currentThread().getName() + ": now abort seg=" + segmentInfo.name); HasAborted = Aborting = true; try { if (InfoStream.IsEnabled("DWPT")) { InfoStream.Message("DWPT", "now abort"); } try { Consumer.Abort(); } catch (Exception t) { } PendingUpdates.Clear(); CollectionsHelper.AddAll(createdFiles, Directory.CreatedFiles); } finally { Aborting = false; if (InfoStream.IsEnabled("DWPT")) { InfoStream.Message("DWPT", "done abort"); } } }
public override void Run() { try { StartingGun.Wait(); for (int i = 0; i < 20; i++) { IList <KeyValuePair <BytesRef, TopDocs> > shuffled = new List <KeyValuePair <BytesRef, TopDocs> >(Answers.EntrySet()); shuffled = CollectionsHelper.Shuffle(shuffled); foreach (KeyValuePair <BytesRef, TopDocs> ent in shuffled) { TopDocs actual = s.Search(new TermQuery(new Term("body", ent.Key)), 100); TopDocs expected = ent.Value; Assert.AreEqual(expected.TotalHits, actual.TotalHits); Assert.AreEqual(expected.ScoreDocs.Length, actual.ScoreDocs.Length, "query=" + ent.Key.Utf8ToString()); for (int hit = 0; hit < expected.ScoreDocs.Length; hit++) { Assert.AreEqual(expected.ScoreDocs[hit].Doc, actual.ScoreDocs[hit].Doc); // Floats really should be identical: Assert.IsTrue(expected.ScoreDocs[hit].Score == actual.ScoreDocs[hit].Score); } } } } catch (Exception e) { throw new Exception(e.Message, e); } }
public override MergeSpecification FindMerges(MergeTrigger?mergeTrigger, SegmentInfos segmentInfos) { MergeSpecification mergeSpec = null; //System.out.println("MRMP: findMerges sis=" + segmentInfos); int numSegments = segmentInfos.Size(); IList <SegmentCommitInfo> segments = new List <SegmentCommitInfo>(); ICollection <SegmentCommitInfo> merging = Writer.Get().MergingSegments; foreach (SegmentCommitInfo sipc in segmentInfos.Segments) { if (!merging.Contains(sipc)) { segments.Add(sipc); } } numSegments = segments.Count; if (numSegments > 1 && (numSegments > 30 || Random.Next(5) == 3)) { segments = CollectionsHelper.Shuffle(segments); // TODO: sometimes make more than 1 merge? mergeSpec = new MergeSpecification(); int segsToMerge = TestUtil.NextInt(Random, 1, numSegments); mergeSpec.Add(new OneMerge(segments.SubList(0, segsToMerge))); } return(mergeSpec); }
Task RefreshMapDeviceToList() { var list = new SortableBindingList <UserDevice>(); // Exclude System/Virtual devices. UserDevice[] devices; lock (SettingsManager.UserDevices.SyncRoot) { devices = SettingsManager.UserDevices.Items .Where(x => ShowSystemDevices || x.ConnectionClass != DEVCLASS.SYSTEM) .ToArray(); } list.AddRange(devices); list.SynchronizingObject = ControlsHelper.MainTaskScheduler; // If new list, item added or removed then... if (_currentData == null) { AttachDataSource(list); } else if (_currentData.Count != list.Count) { CollectionsHelper.Synchronize(list, _currentData); } return(null); }
private static System.Collections.Hashtable DifFiles(System.String[] files1, System.String[] files2) { System.Collections.Hashtable set1 = new System.Collections.Hashtable(); System.Collections.Hashtable set2 = new System.Collections.Hashtable(); System.Collections.Hashtable extra = new System.Collections.Hashtable(); for (int x = 0; x < files1.Length; x++) { CollectionsHelper.AddIfNotContains(set1, files1[x]); } for (int x = 0; x < files2.Length; x++) { CollectionsHelper.AddIfNotContains(set2, files2[x]); } System.Collections.IEnumerator i1 = set1.GetEnumerator(); while (i1.MoveNext()) { System.Object o = i1.Current; if (!set2.Contains(o)) { CollectionsHelper.AddIfNotContains(extra, o); } } System.Collections.IEnumerator i2 = set2.GetEnumerator(); while (i2.MoveNext()) { System.Object o = i2.Current; if (!set1.Contains(o)) { CollectionsHelper.AddIfNotContains(extra, o); } } return(extra); }
internal void Reset(int @base) { this.@base = @base; NextToUpdate = @base; CollectionsHelper.Fill(HashTable, -1); CollectionsHelper.Fill(ChainTable, (short)0); }
/// <summary> Tests that a query matches the an expected set of documents using a /// HitCollector. /// /// <p/> /// Note that when using the HitCollector API, documents will be collected /// if they "match" regardless of what their score is. /// <p/> /// </summary> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaying the query in assertion messages /// </param> /// <param name="results">a list of documentIds that must match the query /// </param> /// <seealso cref="Searcher.Search(Query,HitCollector)"> /// </seealso> /// <seealso cref="checkHits"> /// </seealso> public static void CheckHitCollector(Query query, System.String defaultFieldName, Searcher searcher, int[] results) { QueryUtils.Check(query, searcher); System.Collections.Hashtable correct = new System.Collections.Hashtable(); for (int i = 0; i < results.Length; i++) { CollectionsHelper.AddIfNotContains(correct, (System.Int32)results[i]); } System.Collections.Hashtable actual = new System.Collections.Hashtable(); Collector c = new SetCollector(actual); searcher.Search(query, c, null); Assert.AreEqual(correct, actual, "Simple: " + query.ToString(defaultFieldName)); for (int i = -1; i < 2; i++) { actual.Clear(); QueryUtils.WrapSearcher(searcher, i).Search(query, c, null); Assert.AreEqual(correct, actual, "Wrap Searcher " + i + ": " + query.ToString(defaultFieldName)); } if (!(searcher is IndexSearcher)) { return; } for (int i = -1; i < 2; i++) { actual.Clear(); QueryUtils.WrapUnderlyingReader((IndexSearcher)searcher, i).Search(query, c, null); Assert.AreEqual(correct, actual, "Wrap Reader " + i + ": " + query.ToString(defaultFieldName)); } }
public void TestHashCodeAndEquals() { int num = AtLeast(100); bool singleField = Random().NextBoolean(); IList <Term> terms = new List <Term>(); var uniqueTerms = new HashSet <Term>(); for (int i = 0; i < num; i++) { string field = "field" + (singleField ? "1" : Random().Next(100).ToString()); string @string = TestUtil.RandomRealisticUnicodeString(Random()); terms.Add(new Term(field, @string)); uniqueTerms.Add(new Term(field, @string)); TermsFilter left = TermsFilter(singleField && Random().NextBoolean(), uniqueTerms); CollectionsHelper.Shuffle(terms); TermsFilter right = TermsFilter(singleField && Random().NextBoolean(), terms); assertEquals(right, left); assertEquals(right.GetHashCode(), left.GetHashCode()); if (uniqueTerms.Count > 1) { IList <Term> asList = new List <Term>(uniqueTerms); asList.RemoveAt(0); TermsFilter notEqual = TermsFilter(singleField && Random().NextBoolean(), asList); assertFalse(left.Equals(notEqual)); assertFalse(right.Equals(notEqual)); } } }
private static IList <FacetField> RandomCategories(Random random) { // add random categories from the two dimensions, ensuring that the same // category is not added twice. int numFacetsA = random.Next(3) + 1; // 1-3 int numFacetsB = random.Next(2) + 1; // 1-2 List <FacetField> categories_a = new List <FacetField>(); categories_a.AddRange(Arrays.AsList(CATEGORIES_A)); List <FacetField> categories_b = new List <FacetField>(); categories_b.AddRange(Arrays.AsList(CATEGORIES_B)); categories_a = CollectionsHelper.Shuffle(categories_a).ToList(); categories_b = CollectionsHelper.Shuffle(categories_b).ToList(); List <FacetField> categories = new List <FacetField>(); categories.AddRange(categories_a.SubList(0, numFacetsA)); categories.AddRange(categories_b.SubList(0, numFacetsB)); // add the NO_PARENT categories categories.Add(CATEGORIES_C[Random().Next(NUM_CHILDREN_CP_C)]); categories.Add(CATEGORIES_D[Random().Next(NUM_CHILDREN_CP_D)]); return(categories); }
protected override int Correct(int currentOff) { KeyValuePair <int, int> lastEntry = CollectionsHelper.LowerEntry(Corrections, currentOff + 1); int ret = lastEntry.Equals(default(KeyValuePair <int, int>)) ? currentOff : currentOff + lastEntry.Value; Debug.Assert(ret >= 0, "currentOff=" + currentOff + ",diff=" + (ret - currentOff)); return(ret); }
public static string NotEmptyNameValueCollectionAsString(NameValueCollection coll, string sComment /* = "" */) { if (CollectionsHelper.IsNullOrEmpty(coll)) { return(""); } return(NameValueCollectionAsString(coll, sComment)); }
public virtual DocsAndWriter IndexRandomIWReader(int nThreads, int iterations, int range, Directory dir) { System.Collections.Hashtable docs = new System.Collections.Hashtable(); IndexWriter w = new MockIndexWriter(this, dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED); w.UseCompoundFile = false; /* * w.setMaxMergeDocs(Integer.MAX_VALUE); * w.setMaxFieldLength(10000); * w.setRAMBufferSizeMB(1); * w.setMergeFactor(10); ***/ // force many merges w.MergeFactor = mergeFactor; w.SetRAMBufferSizeMB(.1); w.SetMaxBufferedDocs(maxBufferedDocs); threads = new IndexingThread[nThreads]; for (int i = 0; i < threads.Length; i++) { IndexingThread th = new IndexingThread(); th.w = w; th.base_Renamed = 1000000 * i; th.range = range; th.iterations = iterations; threads[i] = th; } for (int i = 0; i < threads.Length; i++) { threads[i].Start(); } for (int i = 0; i < threads.Length; i++) { threads[i].Join(); } // w.optimize(); //w.close(); for (int i = 0; i < threads.Length; i++) { IndexingThread th = threads[i]; lock (th) { CollectionsHelper.AddAllIfNotContains(docs, th.docs); } } _TestUtil.CheckIndex(dir); DocsAndWriter dw = new DocsAndWriter(); dw.docs = docs; dw.writer = w; return(dw); }
public void TestReadSkip() { Directory dir = NewDirectory(); IndexWriterConfig iwConf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); iwConf.SetMaxBufferedDocs(RandomInts.NextIntBetween(Random(), 2, 30)); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwConf); FieldType ft = new FieldType(); ft.Stored = true; ft.Freeze(); string @string = TestUtil.RandomSimpleString(Random(), 50); sbyte[] bytes = @string.GetBytes(IOUtils.CHARSET_UTF_8); long l = Random().NextBoolean() ? Random().Next(42) : Random().NextLong(); int i = Random().NextBoolean() ? Random().Next(42) : Random().Next(); float f = Random().NextFloat(); double d = Random().NextDouble(); IList <Field> fields = Arrays.AsList(new Field("bytes", bytes, ft), new Field("string", @string, ft), new LongField("long", l, Field.Store.YES), new IntField("int", i, Field.Store.YES), new FloatField("float", f, Field.Store.YES), new DoubleField("double", d, Field.Store.YES) ); for (int k = 0; k < 100; ++k) { Document doc = new Document(); foreach (Field fld in fields) { doc.Add(fld); } iw.w.AddDocument(doc); } iw.Commit(); DirectoryReader reader = DirectoryReader.Open(dir); int docID = Random().Next(100); foreach (Field fld in fields) { string fldName = fld.Name(); Document sDoc = reader.Document(docID, CollectionsHelper.Singleton(fldName)); IndexableField sField = sDoc.GetField(fldName); if (typeof(Field).Equals(fld.GetType())) { Assert.AreEqual(fld.BinaryValue(), sField.BinaryValue()); Assert.AreEqual(fld.StringValue, sField.StringValue); } else { Assert.AreEqual(fld.NumericValue, sField.NumericValue); } } reader.Dispose(); iw.Dispose(); dir.Dispose(); }
public RandomSimilarityProvider(Random random) { PerFieldSeed = random.Next(); CoordType = random.Next(3); ShouldQueryNorm = random.NextBoolean(); KnownSims = new List <Similarity>(AllSims); KnownSims = CollectionsHelper.Shuffle(KnownSims); //Collections.shuffle(KnownSims, random); }
public override void OnKill() { var position = CollectionsHelper.RandomizeFrequenciesListPosition(_dungeonKeyRarityChances); if (position != 0) { var dungeonKey = User.Instance.DungeonKeys.FirstOrDefault(x => x.Rarity == (Rarity)(position - 1)); dungeonKey.AddItem(); } }
/** * Loads terms and frequencies from Wikipedia (cached). */ public override void SetUp() { Debug.Assert(false, "disable assertions before running benchmarks!"); IList <Input> input = ReadTop50KWiki(); input = CollectionsHelper.Shuffle(input); dictionaryInput = input.ToArray(); input = CollectionsHelper.Shuffle(input); benchmarkInput = input; }
public override void RemoveItem(int amount = 1) { CollectionsHelper.RemoveItemFromCollection(this, User.Instance.Ingots, amount); if (amount != 0) { (Application.Current.MainWindow as GameWindow).CreateFloatingTextUtility($"-{amount}", ColorsHelper.GetRarityColor(Rarity), FloatingTextHelper.GetIngotRarityPosition(Rarity)); } InterfaceHelper.RefreshSpecificEquipmentPanelTabOnCurrentPage(typeof(Ingot)); }
public virtual System.String GetUTF8String(int nTokens) { int upto = 0; CollectionsHelper.Fill(buffer, (char)0); for (int i = 0; i < nTokens; i++) { upto = AddUTF8Token(upto); } return(new System.String(buffer, 0, upto)); }
public void Init(long skipPointer, long freqBasePointer, long proxBasePointer, int df, bool storesPayloads) { base.Init(skipPointer, df); this.CurrentFieldStoresPayloads = storesPayloads; LastFreqPointer = freqBasePointer; LastProxPointer = proxBasePointer; CollectionsHelper.Fill(FreqPointer_Renamed, freqBasePointer); CollectionsHelper.Fill(ProxPointer_Renamed, proxBasePointer); CollectionsHelper.Fill(PayloadLength_Renamed, 0); }
public void IntersectTest() { var arr1 = new[] { "1", "23", "11", "32" }; var arr2 = new[] { "2", "23", "41", "32" }; var arr3 = new[] { "13", "23", "113", "31" }; var arr4 = new[] { "112", "23", "111", "312" }; var actual = CollectionsHelper.Intersect(null, arr1, arr2, arr3, arr4).ToArray(); Assert.IsNotEmpty(actual); Assert.AreEqual("23", actual.First()); }
public Specializations() { SpecializationBuffs = new ObservableDictionary <SpecializationType, int>(); SpecializationThresholds = new ObservableDictionary <SpecializationType, int>(); SpecializationAmounts = new ObservableDictionary <SpecializationType, int>(); CollectionsHelper.InitializeDictionary(SpecializationBuffs); CollectionsHelper.InitializeDictionary(SpecializationThresholds); CollectionsHelper.InitializeDictionary(SpecializationAmounts); UpdateSpecialization(); }
public override void AddItem(int amount = 1, bool displayFloatingText = false) { CollectionsHelper.AddItemToCollection(this, User.Instance.CurrentHero.Recipes, amount); if (displayFloatingText) { LootQueueHelper.AddToQueue(Name, Rarity, PackIconKind.ScriptText); } AddAchievementProgress(); InterfaceHelper.RefreshSpecificEquipmentPanelTabOnCurrentPage(typeof(Recipe)); }
public override void AddItem(int amount = 1, bool displayFloatingText = true) { CollectionsHelper.AddItemToCollection(this, User.Instance.Ingots, amount); if (amount != 0) { (Application.Current.MainWindow as GameWindow)?.CreateFloatingTextUtility($"+{amount}", ColorsHelper.GetRarityColor(Rarity), FloatingTextHelper.GetIngotRarityPosition(Rarity)); } AddAchievementProgress(); InterfaceHelper.RefreshSpecificEquipmentPanelTabOnCurrentPage(typeof(Ingot)); }