Subject <RedisValue> GetSubject(string channelName) { channelName += Config.CatalogueName; Subject <RedisValue> subject = null; lock (subsync) { if (subscriptions.ContainsKey(channelName)) { subject = subscriptions[channelName]; } else { subject = new Subject <RedisValue>(); subscriptions = subscriptions.Add(channelName, subject); Retry(() => redis.GetSubscriber().Subscribe( channelName, (channel, value) => { if (channel == channelName && !value.IsNullOrEmpty) { subject.OnNext(value); } })); } } return(subject); }
public void ObjToObjStressTest() { int numberOfIteration = 2000000; HashMap <BinaryArray, Int32> myHashMap = new HashMap <BinaryArray, Int32>(-1); Dictionary <BinaryArray, Int32> etalon = new Dictionary <BinaryArray, int>(); Random rand = new Random(55); for (int i = 0; i < numberOfIteration; ++i) { if (rand.Next(3) != 0) { BinaryArray ar = new BinaryArray().Assign(i); etalon.Add(ar, i); myHashMap[ar] = i; Assert.AreEqual(etalon[ar], myHashMap[ar]); } else { int x = rand.Next(i); BinaryArray ar = new BinaryArray().Assign(x); Assert.AreEqual(etalon.ContainsKey(ar), myHashMap.ContainsKey(ar)); if (myHashMap.ContainsKey(ar)) { myHashMap.Remove(ar); etalon.Remove(ar); } } if (i % 1000000 == 0) { myHashMap.Clear(); etalon.Clear(); } } }
public void ListedOnlyReturnsListed(string[] versionSet, int expectedNumSemVer1) { var result = MakeVersionResult(versionSet, randomizeListing: true); var listedMap = new HashMap <string, VersionDetail>(); var listedPackages = result.AllVersionDetails.Where(x => x.IsListed); foreach (var detail in listedPackages) { listedMap.Add(detail.FullVersion, detail); } var semVer2ListedResult = result.GetVersions(onlyListed: true, includeSemVer2: true); var semVer1ListedResult = result.GetVersions(onlyListed: true, includeSemVer2: false); Assert.True(semVer2ListedResult.Count() <= listedMap.Count); foreach (var version in semVer2ListedResult) { Assert.True(listedMap.ContainsKey(version)); } Assert.True(semVer1ListedResult.Count() <= listedMap.Count); Assert.True(semVer1ListedResult.Count() <= semVer2ListedResult.Count()); foreach (var version in semVer1ListedResult) { Assert.True(listedMap.ContainsKey(version)); var versionResult = listedMap[version]; Assert.True(versionResult.IsListed); Assert.True(!versionResult.IsSemVer2); } }
/// <summary> /// 客户端关闭方法 /// </summary> /// <param name="socketID">连接ID</param> /// <param name="localSID">本地连接ID</param> public virtual void onClientClose(int socketID, int localSID) { if (m_compressTypes.ContainsKey(socketID)) { m_compressTypes.Remove(socketID); } }
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: public boolean continueWithNextDecision(String symbol) throws org.maltparser.core.exception.MaltChainedException public virtual bool continueWithNextDecision(string symbol) { if (symbol2transitionMap.ContainsKey(symbol)) { return(symbol2transitionMap[symbol].Labeled); } return(true); }
public void TestPuttingObject() { Int32 key = GetRandomKey(); House house = GetRandomHouse(); houseMap.Put(key, house); Assert.IsTrue(houseMap.ContainsKey(key)); Assert.IsTrue(houseMap.ContainsValue(house)); }
internal static void RegisterActivity(InstanceReference instanceReference) { var hashCode = instanceReference.GetHashCode(); if (registeredInstances.ContainsKey(hashCode)) { registeredInstances.Remove(hashCode); } registeredInstances.Put(hashCode, instanceReference.Instance); }
public void test_PropertyChange_ParentChild_ToOne() { MaterialType obj2 = EntityFactory.CreateEntity <MaterialType>(); Material mat = EntityFactory.CreateEntity <Material>(); CacheModification.Active = true; try { obj2.Id = 2; obj2.Name = "name2"; obj2.Version = 1; mat.Id = 1; mat.Name = "name1"; mat.Version = 1; mat.ChildMatType = obj2; } finally { CacheModification.Active = false; } HashMap <String, int> matCounter = new HashMap <String, int>(); PropertyChangedEventHandler matHandler = GetPropertyChangeHandler(matCounter); HashMap <String, int> matTypeCounter = new HashMap <String, int>(); PropertyChangedEventHandler matTypeHandler = GetPropertyChangeHandler(matTypeCounter); ((INotifyPropertyChanged)mat).PropertyChanged += matHandler; ((INotifyPropertyChanged)mat.ChildMatType).PropertyChanged += matTypeHandler; mat.ChildMatType.Name += "_change"; WaitForUI(); Assert.AssertEquals(2, matCounter.Count); Assert.AssertTrue(matCounter.ContainsKey("ToBeUpdated")); Assert.AssertTrue(matCounter.ContainsKey("HasPendingChanges")); Assert.AssertEquals(5, matCounter.Get("ToBeUpdated")); Assert.AssertEquals(5, matCounter.Get("HasPendingChanges")); Assert.AssertEquals(5, matTypeCounter.Count); Assert.AssertTrue(matTypeCounter.ContainsKey("Name")); Assert.AssertTrue(matTypeCounter.ContainsKey("Temp1")); Assert.AssertTrue(matTypeCounter.ContainsKey("Temp2")); Assert.AssertTrue(matTypeCounter.ContainsKey("ToBeUpdated")); Assert.AssertTrue(matTypeCounter.ContainsKey("HasPendingChanges")); Assert.AssertEquals(1, matTypeCounter.Get("Name")); Assert.AssertEquals(1, matTypeCounter.Get("Temp1")); Assert.AssertEquals(1, matTypeCounter.Get("Temp2")); Assert.AssertEquals(1, matTypeCounter.Get("ToBeUpdated")); Assert.AssertEquals(1, matTypeCounter.Get("HasPendingChanges")); }
/// <summary> /// 等待消息的处理 /// </summary> /// <param name="message">消息</param> public virtual void onWaitMessageHandle(FCMessage message) { if (m_waitMessages.Count > 0) { lock (m_waitMessages) { if (m_waitMessages.ContainsKey(message.m_requestID)) { FCMessage waitMessage = m_waitMessages.get(message.m_requestID); waitMessage.copy(message); } } } }
public SecondlifeWikiLibraryData(IDocumentationProvider documentationProvider, IEnumerable <string> subsets) { _client = new CachedWebDownloader(WebCacheFileDirectory); _subsets = subsets.ToList(); Log.WriteLine("============================"); Log.WriteLine("Starting scrape of " + SecondlifeWikiDomain + " ... "); Log.WriteLine("============================"); foreach (var lslLibraryConstantSignature in GetLSLConstants()) { lslLibraryConstantSignature.DocumentationString = documentationProvider.DocumentConstant(lslLibraryConstantSignature); _constants.Add(lslLibraryConstantSignature.Name, lslLibraryConstantSignature); } foreach (var lslLibraryFunctionSignature in GetLSLFunctions()) { lslLibraryFunctionSignature.DocumentationString = documentationProvider.DocumentFunction(lslLibraryFunctionSignature); if (_functions.ContainsKey(lslLibraryFunctionSignature.Name)) { _functions[lslLibraryFunctionSignature.Name].Add(lslLibraryFunctionSignature); } else { _functions.Add(lslLibraryFunctionSignature.Name, new GenericArray <LSLLibraryFunctionSignature> { lslLibraryFunctionSignature }); } } foreach (var lslLibraryEventSignature in GetLSLEvents()) { lslLibraryEventSignature.DocumentationString = documentationProvider.DocumentEvent(lslLibraryEventSignature); _events.Add(lslLibraryEventSignature.Name, lslLibraryEventSignature); } Log.WriteLine("============================"); Log.WriteLine("Finished scrape of " + SecondlifeWikiDomain); Log.WriteLine("============================"); }
public void Remove() { HashMap <int, string> map = new HashMap <int, string>(); map.Add(1, "a"); map.Add(2, "b"); map.Add(3, "c"); Assert.True(map.ContainsKey(2)); map.Remove(2); Assert.False(map.ContainsKey(2)); }
public void ObjToObjUnsafeIterationStressTest() { int numberOfIteration = 2000000; HashMap <BinaryArray, Int32> myHashMap = new HashMap <BinaryArray, Int32>(-1); Dictionary <BinaryArray, Int32> etalon = new Dictionary <BinaryArray, int>(); Random rand = new Random(55); for (int i = 0; i < numberOfIteration; ++i) { if (rand.Next(3) != 0) { BinaryArray ar = new BinaryArray().Assign(i); etalon.Add(ar, i); myHashMap[ar] = i; Assert.AreEqual(etalon[ar], myHashMap[ar]); } else { int x = rand.Next(i); BinaryArray ar = new BinaryArray().Assign(x); Assert.AreEqual(etalon.ContainsKey(ar), myHashMap.ContainsKey(ar)); if (myHashMap.ContainsKey(ar)) { myHashMap.Remove(ar); etalon.Remove(ar); } } if (i % 10000 == 0) { List <Int32> etalon1 = new List <int>(); List <Int32> unsafeEnumeration = new List <int>(); for (var item = myHashMap.First; item != null; item = item.Value.Next) { etalon1.Add(item.Value.Value); } for (var item = myHashMap.UnsafeFirst; item != null; item = item.Value.Next) { unsafeEnumeration.Add(item.Value.Value); } etalon1.Sort(); unsafeEnumeration.Sort(); Assert.AreEqual(etalon1.Count, unsafeEnumeration.Count); for (int j = 0; j < etalon1.Count; ++j) { Assert.AreEqual(etalon1[j], unsafeEnumeration[j]); } } } }
public void Handle_StartStandingQuery(StartStandingQuery message) { if (_statements.ContainsKey(message.Query.StandingQueryId) && _statements[message.Query.StandingQueryId].IsStarted) { return; } var statement = _service.EPAdministrator.CreateEPL(message.Query.EplStatement, message.Query.StandingQueryId, message.Query); statement.AddEventHandlerWithReplay(SimpleEventHandler); _statements.Add(message.Query.StandingQueryId, statement); statement.Start(); _log.Info("StandingQuery {0} - {1} Started", message.Query.StandingQueryId, message.Query.Description); }
/// <summary> /// Returns the set of all component properties which were tagged as mandatory but which are not set (or no default /// value is given). /// </summary> /// <returns></returns> public Collection <string> GetUndefinedMandatoryProps() { var undefProps = new Collection <string>(); foreach (var propName in GetRegisteredProperties()) { var anno = _registeredProperties[propName].Annotation; var isMandatory = false; if (anno is S4Component) { isMandatory = ((S4Component)anno).Mandatory && ((S4Component)anno).DefaultClass == null; } else if (anno is S4String) { isMandatory = ((S4String)anno).Mandatory && ((S4String)anno).DefaultValue.Equals(S4String.NotDefined); } else if (anno is S4Integer) { isMandatory = ((S4Integer)anno).Mandatory && ((S4Integer)anno).DefaultValue == S4Integer.NotDefined; } else if (anno is S4Double) { isMandatory = ((S4Double)anno).Mandatory && ((S4Double)anno).DefaultValue == S4Double.NotDefined; } if (isMandatory && !((_rawProps.ContainsKey(propName) && _rawProps[propName] != null) || (_propValues.ContainsKey(propName) && _propValues[propName] != null))) { undefProps.Add(propName); } } return(undefProps); }
public static void Main() { Console.Write("Please, enter some text: "); string text = Console.ReadLine(); var chars = text.AsEnumerable(); var charCounts = new HashMap<char, int>(); foreach (var character in chars) { if (charCounts.ContainsKey(character)) { charCounts[character]++; } else { charCounts[character] = 1; } } var sortedChars = charCounts.Keys.OrderBy(k => k).ToList(); foreach (var character in sortedChars) { Console.WriteLine("{0}: {1} time(s)", character, charCounts[character]); } }
private Assembly _currentDomainOnAssemblyResolve(object sender, ResolveEventArgs loadArgs) { var aname = new AssemblyName(loadArgs.Name); if (_loaded.ContainsKey(aname)) { return(_loaded[aname]); } if (loadArgs.RequestingAssembly == null) { return(null); } var path = Path.Combine(_openSimBinDirectory, aname.Name + ".dll"); if (!File.Exists(path)) { return(null); } var assembly = Assembly.LoadFrom(path); _loaded.Add(aname, assembly); return(assembly); }
public ValueToken BinaryOperator(string op, ValueToken lhs, ValueToken rhs) => Convert(rhs) .Map(rhsconv => BinaryOperators.ContainsKey(op) ? BinaryOperators[op](lhs, rhsconv) : failwith <ValueToken>($"binary operator '{op}' not supported for {Name}")) .IfNone(() => failwith <ValueToken>($"binary operator '{op}' used with incompatible types {lhs.Type} and {rhs.Type}"));
public override float GetProbability(WordSequence wordSequence) { float prob; if (_logProbs.ContainsKey(wordSequence)) { prob = _logProbs[wordSequence]; } else if (wordSequence.Size > 1) { Float backoff = _logBackoffs[wordSequence.GetOldest()]; if (backoff == null) { prob = LogMath.LogOne + GetProbability(wordSequence.GetNewest()); } else { prob = backoff + GetProbability(wordSequence.GetNewest()); } } else { prob = LogMath.LogZero; } return(prob); }
public void HashMapAllFunctions() { HashMap <int, int> hashMap = new HashMap <int, int>(); hashMap.Add(1, 5); hashMap.Add(19, 17); hashMap.Add(7, 14); hashMap.Add(46, 18); hashMap.Add(6, 13); hashMap.Add(13, 49); hashMap.Add(84, 82); hashMap.Add(92, 98); hashMap.Add(31, 92); hashMap.Add(57, 13); hashMap.Add(78, 78); hashMap.Remove(7); hashMap[3] = 2; //if key doesn't exist, add a k/v pair int value; Assert.IsTrue(hashMap.TryGetValue(19, out value)); Assert.IsFalse(hashMap.ContainsKey(7)); Assert.IsFalse(hashMap.pairs[13] == null); for (int i = 0; i < hashMap.pairs.Length; i++) { if (hashMap.pairs[i] == null) { continue; } foreach (var pair in hashMap) { } } }
public void HashMapAddGet_RandomSequnce_Success() { const int count = 1000; var totalCount = 0; var map = new HashMap <int, int>(); var random = new Random(); for (int i = 0; i < count; i++) { var next = random.Next(); if (map.ContainsKey(next)) { continue; } map.Add(next, next + 10000); totalCount++; } map.Count.Should().Be(totalCount); foreach (var key in map.Keys) { map[key].Should().Be(key + 10000); } }
private static Map <string, string[]> stringToMap(string str) { Map <string, string[]> parameterMap = new HashMap <string, string[]>(); if (!string.IsNullOrEmpty(str)) { string[] paramArray = str.Split('&'); foreach (string param in paramArray) { string[] keyValue = param.Split('='); if (parameterMap.ContainsKey(keyValue[0])) { string[] array = parameterMap[keyValue[0]]; string[] newArray = new string[array.Length + 1]; Array.Copy(array, 0, newArray, 0, array.Length); newArray[array.Length] = keyValue[1]; parameterMap.Add(keyValue[0], newArray); } else { parameterMap.Add(keyValue[0], new string[] { keyValue[1] }); } } } return(parameterMap); }
/// <summary> /// Returns the property sheet for the given object instance /// </summary> /// <param name="instanceName"></param> /// <returns></returns> public PropertySheet GetPropertySheet(String instanceName) { if (!_symbolTable.ContainsKey(instanceName)) { // if it is not in the symbol table, so construct // it based upon our raw property data RawPropertyData rpd = null; if (_rawPropertyMap.ContainsKey(instanceName)) { rpd = _rawPropertyMap[instanceName]; } if (rpd != null) { var className = rpd.ClassName; try { // now load the property-sheet by using the class annotation var propertySheet = new PropertySheet(Type.GetType(className, true), instanceName, this, rpd); _symbolTable.Put(instanceName, propertySheet); } catch (Exception) { Trace.Fail(string.Format("Class '{0}' not found in Assembly '{1}'", className, Assembly.GetCallingAssembly())); throw; } } } return(_symbolTable.Get(instanceName)); }
/// <summary> /// Creates a new configuration manager. Initial properties are loaded from the given URL. No need to keep the notion /// of 'context' around anymore we will just pass around this property manager. /// </summary> /// <param name="url">Path to config file.</param> public ConfigurationManager(URL url) { ConfigUrl = url; try { _rawPropertyMap = new SaxLoader(url, _globalProperties).Load(); } catch (IOException e) { throw new SystemException(e.ToString()); } ConfigurationManagerUtils.ApplySystemProperties(_rawPropertyMap, _globalProperties); //ConfigurationManagerUtils.ConfigureLogger(this); // we can't configure the configuration manager with itself so we // do some of these configure items manually. if (_globalProperties.ContainsKey("showCreations")) { var showCreations = _globalProperties["showCreations"]; if (showCreations != null) { _showCreations = "true".Equals(showCreations); } } }
public virtual int symbolToCode(string symbol) { if (!symbol2CodeMap.ContainsKey(symbol)) { return(-1); } return(symbol2CodeMap[symbol]); }
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: public String nullvalueToSymbol(NullValueId nullValueIdentifier) throws org.maltparser.core.exception.MaltChainedException public virtual string nullvalueToSymbol(NullValueId nullValueIdentifier) { if (!nullValue2SymbolMap.ContainsKey(nullValueIdentifier)) { throw new SymbolException("Illegal null-value identifier. "); } return(nullValue2SymbolMap[nullValueIdentifier]); }
public void ContainsTest() { HashMap <int, string> map = new HashMap <int, string>(); map.Add(1, "a"); map.Add(2, "b"); map.Add(3, "c"); KeyValuePair <int, string> containsA = new KeyValuePair <int, string>(1, "a"); KeyValuePair <int, string> containsB = new KeyValuePair <int, string>(1, "b"); Assert.True(map.Contains(containsA)); Assert.False(map.Contains(containsB)); Assert.True(map.ContainsKey(1)); Assert.False(map.ContainsKey(4)); }
public bool ContainTeamID(string teamID) { if (teamID == null) { return(false); } return(TeamIDData.ContainsKey(teamID)); }
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: public int nullvalueToCode(NullValueId nullValueIdentifier) throws org.maltparser.core.exception.MaltChainedException public virtual int nullvalueToCode(NullValueId nullValueIdentifier) { if (!nullValue2CodeMap.ContainsKey(nullValueIdentifier)) { throw new SymbolException("Illegal null-value identifier. "); } return(nullValue2CodeMap[nullValueIdentifier]); }
/// <summary> /// Returns true if the given key is in the {@link #kingSectionLikeMap} map, /// and the value is the same as the given value. /// </summary> /// <param name="key">key to look for in the map.</param> /// <param name="value">the value to match.</param> /// <returns>true if it matches, or false if it does not or if the key is not /// mapped to any value in the map.</returns> private static bool InKingSectionLikeMap(string key, string value) { if (KingSectionLikeMap.ContainsKey(key)) { return(KingSectionLikeMap.Get(key).Equals(value)); } return(false); }
public void RemoveExistingKey() { HashMap <string, string> hashMap = new HashMap <string, string>(); hashMap.Put("The same key", "The different value 1"); hashMap.Remove("The same key"); Assert.IsTrue(!hashMap.ContainsKey("The same key")); }
/// <summary> /// Gets a context independent unit. There should only be one instance of any CI unit. /// </summary> /// <param name="name">The name of the unit.</param> /// <param name="isFiller">if true, the unit is a filler unit</param> /// <returns>The unit.</returns> protected override Unit GetCIUnit(String name, bool isFiller) { if (_mapping.ContainsKey(name)) { name = _mapping.Get(name); } return(unitManager.GetUnit(name, isFiller, Context.EmptyContext)); }
public static void Main() { Console.WriteLine("Enter contacts (name - number) or search existing contacts (type 'search'):"); HashMap<string, string> phonebook = new HashMap<string, string>(); string entry = Console.ReadLine(); while (entry != "search") { string[] contactInfo = entry .Split(new char[] { '-' }, StringSplitOptions.RemoveEmptyEntries) .Select(s => s.Trim()) .ToArray(); if (!phonebook.ContainsKey(contactInfo[0])) { phonebook.Add(contactInfo[0], contactInfo[1]); } entry = Console.ReadLine(); } List<string> searchWords = new List<string>(); string word = Console.ReadLine(); while (word != string.Empty) { searchWords.Add(word); word = Console.ReadLine(); } foreach (var searchWord in searchWords) { if (phonebook.ContainsKey(searchWord)) { Console.WriteLine("{0} -> {1}", searchWord, phonebook[searchWord]); } else { Console.WriteLine("Contact {0} does not exist.", searchWord); } } }
static void Main(string[] args) { IMap<string, int> map = new HashMap<string, int>(); map.Put("yellow", 1); map.Put("blue", 10); map.Put("red", 67); while(true) { string[] command = Console.ReadLine().Split(' '); try { switch (command[0].ToLower()) { case "clear": map.Clear(); break; case "put": map.Put(command[1], Convert.ToInt32(command[2])); break; case "remove": map.Remove(command[1]); break; case "containskey": Console.WriteLine(map.ContainsKey(command[1])); break; case "containsvalue": Console.WriteLine(map.ContainsValue(Convert.ToInt32(command[1]))); break; case "list": foreach (IEntry<string, int> e in map) Console.WriteLine(e.ToString()); break; case "keys": foreach (string s in map.Keys) Console.WriteLine(s); break; case "values": foreach (int i in map.Values) Console.WriteLine(i.ToString()); break; case "testum": //test UnmutableMap<string, int> um = new UnmutableMap<string, int>(map); Console.WriteLine(um["red"].ToString()); um["red"] = 3; break; case "testfind": map = MapUtilsGeneric<string, int>.FindAll(map, new MapUtilsGeneric<string, int>.CheckDelegate( (Entry<string, int> e) => { return e.Key[0] == 'r'; } ), MapUtilsGeneric<string, int>.ArrayMapConstructor); break; default: throw new Exception("Unknown command."); } } catch (Exception ex) { Console.WriteLine(ex.GetType().ToString() + ": " + ex.Message); } } }
private void ProcessTerms(System.String[] queryTerms) { if (queryTerms != null) { System.Array.Sort(queryTerms); IDictionary<string, int> tmpSet = new HashMap<string, int>(queryTerms.Length); //filter out duplicates IList<string> tmpList = new List<string>(queryTerms.Length); IList<int> tmpFreqs = new List<int>(queryTerms.Length); int j = 0; for (int i = 0; i < queryTerms.Length; i++) { var term = queryTerms[i]; var position = tmpSet[term]; if (!tmpSet.ContainsKey(term)) // if temp_position == null { tmpSet[term] = j++; tmpList.Add(term); tmpFreqs.Add(1); } else { int integer = tmpFreqs[position]; tmpFreqs[position] = (integer + 1); } } terms = tmpList.ToArray(); //termFreqs = (int[])tmpFreqs.toArray(termFreqs); termFreqs = new int[tmpFreqs.Count]; int i2 = 0; foreach (int integer in tmpFreqs) { termFreqs[i2++] = integer; } } }
public void TestLazy() { int id = Random().nextInt(NUM_DOCS); IndexReader reader = DirectoryReader.Open(dir); try { Query q = new TermQuery(new Term("docid", "" + id)); IndexSearcher searcher = NewSearcher(reader); ScoreDoc[] hits = searcher.Search(q, 100).ScoreDocs; assertEquals("Too many docs", 1, hits.Length); LazyTestingStoredFieldVisitor visitor = new LazyTestingStoredFieldVisitor(new LazyDocument(reader, hits[0].Doc), FIELDS); reader.Document(hits[0].Doc, visitor); Document d = visitor.doc; int numFieldValues = 0; IDictionary<string, int> fieldValueCounts = new HashMap<string, int>(); // at this point, all FIELDS should be Lazy and unrealized foreach (IndexableField f in d) { numFieldValues++; if (f.Name.equals("never_load")) { fail("never_load was loaded"); } if (f.Name.equals("load_later")) { fail("load_later was loaded on first pass"); } if (f.Name.equals("docid")) { assertFalse(f.Name, f is LazyDocument.LazyField); } else { int count = fieldValueCounts.ContainsKey(f.Name) ? fieldValueCounts[f.Name] : 0; count++; fieldValueCounts.Put(f.Name, count); assertTrue(f.Name + " is " + f.GetType(), f is LazyDocument.LazyField); LazyDocument.LazyField lf = (LazyDocument.LazyField)f; assertFalse(f.Name + " is loaded", lf.HasBeenLoaded); } } Console.WriteLine("numFieldValues == " + numFieldValues); assertEquals("numFieldValues", 1 + (NUM_VALUES * FIELDS.Length), // LUCENENET TODO: Failing here 1 too small, but what field is the + 1 here supposed to represent? numFieldValues); foreach (string field in fieldValueCounts.Keys) { assertEquals("fieldName count: " + field, NUM_VALUES, fieldValueCounts[field]); } // pick a single field name to load a single value string fieldName = FIELDS[Random().nextInt(FIELDS.Length)]; IndexableField[] fieldValues = d.GetFields(fieldName); assertEquals("#vals in field: " + fieldName, NUM_VALUES, fieldValues.Length); int valNum = Random().nextInt(fieldValues.Length); assertEquals(id + "_" + fieldName + "_" + valNum, fieldValues[valNum].StringValue); // now every value of fieldName should be loaded foreach (IndexableField f in d) { if (f.Name.equals("never_load")) { fail("never_load was loaded"); } if (f.Name.equals("load_later")) { fail("load_later was loaded too soon"); } if (f.Name.equals("docid")) { assertFalse(f.Name, f is LazyDocument.LazyField); } else { assertTrue(f.Name + " is " + f.GetType(), f is LazyDocument.LazyField); LazyDocument.LazyField lf = (LazyDocument.LazyField)f; assertEquals(f.Name + " is loaded?", lf.Name.equals(fieldName), lf.HasBeenLoaded); } } // use the same LazyDoc to ask for one more lazy field visitor = new LazyTestingStoredFieldVisitor(new LazyDocument(reader, hits[0].Doc), "load_later"); reader.Document(hits[0].Doc, visitor); d = visitor.doc; // ensure we have all the values we expect now, and that // adding one more lazy field didn't "unload" the existing LazyField's // we already loaded. foreach (IndexableField f in d) { if (f.Name.equals("never_load")) { fail("never_load was loaded"); } if (f.Name.equals("docid")) { assertFalse(f.Name, f is LazyDocument.LazyField); } else { assertTrue(f.Name + " is " + f.GetType(), f is LazyDocument.LazyField); LazyDocument.LazyField lf = (LazyDocument.LazyField)f; assertEquals(f.Name + " is loaded?", lf.Name.equals(fieldName), lf.HasBeenLoaded); } } // even the underlying doc shouldn't have never_load assertNull("never_load was loaded in wrapped doc", visitor.lazyDoc.Document.GetField("never_load")); } finally { reader.Dispose(); } }
/// <summary>This constructor is only used for <see cref="Reopen()" /> </summary> internal DirectoryReader(Directory directory, SegmentInfos infos, SegmentReader[] oldReaders, int[] oldStarts, IEnumerable<KeyValuePair<string, byte[]>> oldNormsCache, bool readOnly, bool doClone, int termInfosIndexDivisor) { this.internalDirectory = directory; this.readOnly = readOnly; this.segmentInfos = infos; this.termInfosIndexDivisor = termInfosIndexDivisor; if (!readOnly) { // We assume that this segments_N was previously // properly sync'd: synced.UnionWith(infos.Files(directory, true)); } // we put the old SegmentReaders in a map, that allows us // to lookup a reader using its segment name IDictionary<string, int> segmentReaders = new HashMap<string, int>(); if (oldReaders != null) { // create a Map SegmentName->SegmentReader for (int i = 0; i < oldReaders.Length; i++) { segmentReaders[oldReaders[i].SegmentName] = i; } } var newReaders = new SegmentReader[infos.Count]; // remember which readers are shared between the old and the re-opened // DirectoryReader - we have to incRef those readers var readerShared = new bool[infos.Count]; for (int i = infos.Count - 1; i >= 0; i--) { // find SegmentReader for this segment if (!segmentReaders.ContainsKey(infos.Info(i).name)) { // this is a new segment, no old SegmentReader can be reused newReaders[i] = null; } else { // there is an old reader for this segment - we'll try to reopen it newReaders[i] = oldReaders[segmentReaders[infos.Info(i).name]]; } bool success = false; try { SegmentReader newReader; if (newReaders[i] == null || infos.Info(i).GetUseCompoundFile() != newReaders[i].SegmentInfo.GetUseCompoundFile()) { // We should never see a totally new segment during cloning System.Diagnostics.Debug.Assert(!doClone); // this is a new reader; in case we hit an exception we can close it safely newReader = SegmentReader.Get(readOnly, infos.Info(i), termInfosIndexDivisor); } else { newReader = newReaders[i].ReopenSegment(infos.Info(i), doClone, readOnly); } if (newReader == newReaders[i]) { // this reader will be shared between the old and the new one, // so we must incRef it readerShared[i] = true; newReader.IncRef(); } else { readerShared[i] = false; newReaders[i] = newReader; } success = true; } finally { if (!success) { for (i++; i < infos.Count; i++) { if (newReaders[i] != null) { try { if (!readerShared[i]) { // this is a new subReader that is not used by the old one, // we can close it newReaders[i].Close(); } else { // this subReader is also used by the old reader, so instead // closing we must decRef it newReaders[i].DecRef(); } } catch (System.IO.IOException) { // keep going - we want to clean up as much as possible } } } } } } // initialize the readers to calculate maxDoc before we try to reuse the old normsCache Initialize(newReaders); // try to copy unchanged norms from the old normsCache to the new one if (oldNormsCache != null) { foreach(var entry in oldNormsCache) { String field = entry.Key; if (!HasNorms(field)) { continue; } byte[] oldBytes = entry.Value; var bytes = new byte[MaxDoc]; for (int i = 0; i < subReaders.Length; i++) { int oldReaderIndex = segmentReaders[subReaders[i].SegmentName]; // this SegmentReader was not re-opened, we can copy all of its norms if (segmentReaders.ContainsKey(subReaders[i].SegmentName) && (oldReaders[oldReaderIndex] == subReaders[i] || oldReaders[oldReaderIndex].norms[field] == subReaders[i].norms[field])) { // we don't have to synchronize here: either this constructor is called from a SegmentReader, // in which case no old norms cache is present, or it is called from MultiReader.reopen(), // which is synchronized Array.Copy(oldBytes, oldStarts[oldReaderIndex], bytes, starts[i], starts[i + 1] - starts[i]); } else { subReaders[i].Norms(field, bytes, starts[i]); } } normsCache[field] = bytes; // update cache } } }