/// <summary> /// Applies the given phoneme expression to all phonemes in this phoneme builder. /// <para/> /// This will lengthen phonemes that have compatible language sets to the expression, and drop those that are /// incompatible. /// </summary> /// <param name="phonemeExpr">The expression to apply.</param> /// <param name="maxPhonemes">The maximum number of phonemes to build up.</param> public void Apply(IPhonemeExpr phonemeExpr, int maxPhonemes) { ISet <Phoneme> newPhonemes = new JCG.LinkedHashSet <Phoneme>(maxPhonemes); //EXPR_continue: foreach (Phoneme left in this.phonemes) { foreach (Phoneme right in phonemeExpr.Phonemes) { LanguageSet languages = left.Languages.RestrictTo(right.Languages); if (!languages.IsEmpty) { Phoneme join = new Phoneme(left, right, languages); if (newPhonemes.Count < maxPhonemes) { newPhonemes.Add(join); if (newPhonemes.Count >= maxPhonemes) { goto EXPR_break; } } } } } EXPR_break : { } this.phonemes.Clear(); this.phonemes.UnionWith(newPhonemes); }
internal FieldQuery(Query query, IndexReader reader, bool phraseHighlight, bool fieldMatch) { this.fieldMatch = fieldMatch; // LUCENENET NOTE: LinkedHashSet cares about insertion order ISet <Query> flatQueries = new JCG.LinkedHashSet <Query>(); Flatten(query, reader, flatQueries); SaveTerms(flatQueries, reader); ICollection <Query> expandQueries = Expand(flatQueries); foreach (Query flatQuery in expandQueries) { QueryPhraseMap rootMap = GetRootMap(flatQuery); rootMap.Add(flatQuery /*, reader // LUCENENET: Never read */); if (!phraseHighlight && flatQuery is PhraseQuery pq) { if (pq.GetTerms().Length > 1) { foreach (Term term in pq.GetTerms()) { rootMap.AddTerm(term, flatQuery.Boost); } } } } }
/** Reads: * typeInfo{ * deprecated{ * co{ * direct{"true"} * } * tz{ * camtr{"true"} * } * } * } */ private static void GetTypeInfo(UResourceBundle typeInfoRes) { IDictionary <string, ISet <string> > _deprecatedKeyTypes = new JCG.LinkedDictionary <string, ISet <string> >(); foreach (var keyInfoEntry in typeInfoRes) { string key = keyInfoEntry.Key; TypeInfoType typeInfo = (TypeInfoType)Enum.Parse(typeof(TypeInfoType), key, true); foreach (var keyInfoEntry2 in keyInfoEntry) { string key2 = keyInfoEntry2.Key; ISet <string> _deprecatedTypes = new JCG.LinkedHashSet <string>(); foreach (var keyInfoEntry3 in keyInfoEntry2) { string key3 = keyInfoEntry3.Key; switch (typeInfo) { // allow for expansion case TypeInfoType.deprecated: _deprecatedTypes.Add(key3); break; } } _deprecatedKeyTypes[key2] = _deprecatedTypes.AsReadOnly(); } } DEPRECATED_KEY_TYPES = _deprecatedKeyTypes.AsReadOnly(); }
/// <summary> /// Create expandQueries from <paramref name="flatQueries"/>. /// /// <code> /// expandQueries := flatQueries + overlapped phrase queries /// /// ex1) flatQueries={a,b,c} /// => expandQueries={a,b,c} /// ex2) flatQueries={a,"b c","c d"} /// => expandQueries={a,"b c","c d","b c d"} /// </code> /// </summary> /// <param name="flatQueries"></param> /// <returns></returns> internal ICollection <Query> Expand(ICollection <Query> flatQueries) { ISet <Query> expandQueries = new JCG.LinkedHashSet <Query>(); for (int i = 0; i < flatQueries.Count;) { Query query = flatQueries.ElementAt(i); //i.Remove(); if (!flatQueries.Remove(query)) { i++; } expandQueries.Add(query); if (!(query is PhraseQuery)) { continue; } using (IEnumerator <Query> j = flatQueries.GetEnumerator()) { while (j.MoveNext()) { Query qj = j.Current; if (!(qj is PhraseQuery)) { continue; } CheckOverlap(expandQueries, (PhraseQuery)query, (PhraseQuery)qj); } } } //for (IEnumerator<Query> i = flatQueries.GetEnumerator(); i.MoveNext();) //{ // Query query = i.Current; // i.Remove(); // expandQueries.Add(query); // if (!(query is PhraseQuery)) continue; // for (IEnumerator<Query> j = flatQueries.GetEnumerator(); j.MoveNext();) // { // Query qj = j.Current; // if (!(qj is PhraseQuery)) continue; // CheckOverlap(expandQueries, (PhraseQuery)query, (PhraseQuery)qj); // } //} return(expandQueries); }
/// <summary> /// Helper function to create a LinkedHashSet fulfilling the given specific parameters. The function will /// create an LinkedHashSet using the Comparer constructor and then add values /// to it until it is full. It will begin by adding the desired number of matching, /// followed by random (deterministic) elements until the desired count is reached. /// </summary> protected IEnumerable <T> CreateLinkedHashSet(IEnumerable <T> enumerableToMatchTo, int count, int numberOfMatchingElements) { JCG.LinkedHashSet <T> set = new JCG.LinkedHashSet <T>(GetIEqualityComparer()); int seed = 528; JCG.List <T> match = null; // Add Matching elements if (enumerableToMatchTo != null) { match = enumerableToMatchTo.ToList(); for (int i = 0; i < numberOfMatchingElements; i++) { set.Add(match[i]); } } // Add elements to reach the desired count while (set.Count < count) { T toAdd = CreateT(seed++); while (set.Contains(toAdd) || (match != null && match.Contains(toAdd, GetIEqualityComparer()))) // Don't want any unexpectedly duplicate values { toAdd = CreateT(seed++); } set.Add(toAdd); } // Validate that the Enumerable fits the guidelines as expected Debug.Assert(set.Count == count); if (match != null) { int actualMatchingCount = 0; foreach (T lookingFor in match) { actualMatchingCount += set.Contains(lookingFor) ? 1 : 0; } Assert.Equal(numberOfMatchingElements, actualMatchingCount); } return(set); }
private void doTest(SpatialOperation operation) { //first show that when there's no data, a query will result in no results { Query query = strategy.MakeQuery(new SpatialArgs(operation, randomRectangle())); SearchResults searchResults = executeQuery(query, 1); assertEquals(0, searchResults.numFound); } bool biasContains = (operation == SpatialOperation.Contains); //Main index loop: IDictionary <String, IShape> indexedShapes = new JCG.LinkedDictionary <String, IShape>(); IDictionary <String, IShape> indexedShapesGS = new JCG.LinkedDictionary <String, IShape>();//grid snapped int numIndexedShapes = randomIntBetween(1, 6); #pragma warning disable 219 bool indexedAtLeastOneShapePair = false; #pragma warning restore 219 for (int i = 0; i < numIndexedShapes; i++) { String id = "" + i; IShape indexedShape; int R = Random.nextInt(12); if (R == 0) {//1 in 12 indexedShape = null; } else if (R == 1) { //1 in 12 indexedShape = randomPoint(); //just one point } else if (R <= 4) {//3 in 12 //comprised of more than one shape indexedShape = randomShapePairRect(biasContains); indexedAtLeastOneShapePair = true; } else { indexedShape = randomRectangle();//just one rect } indexedShapes.Put(id, indexedShape); indexedShapesGS.Put(id, gridSnap(indexedShape)); adoc(id, indexedShape); if (Random.nextInt(10) == 0) { Commit();//intermediate commit, produces extra segments } } //delete some documents randomly IEnumerator <String> idIter = indexedShapes.Keys.ToList().GetEnumerator(); while (idIter.MoveNext()) { String id = idIter.Current; if (Random.nextInt(10) == 0) { DeleteDoc(id); //idIter.Remove(); indexedShapes.Remove(id); indexedShapesGS.Remove(id); } } Commit(); //Main query loop: int numQueryShapes = AtLeast(20); for (int i = 0; i < numQueryShapes; i++) { int scanLevel = randomInt(grid.MaxLevels); ((RecursivePrefixTreeStrategy)strategy).PrefixGridScanLevel = (scanLevel); IShape queryShape; switch (randomInt(10)) { case 0: queryShape = randomPoint(); break; // LUCENE-5549 //TODO debug: -Dtests.method=testWithin -Dtests.multiplier=3 -Dtests.seed=5F5294CE2E075A3E:AAD2F0F79288CA64 // case 1:case 2:case 3: // if (!indexedAtLeastOneShapePair) { // avoids ShapePair.relate(ShapePair), which isn't reliable // queryShape = randomShapePairRect(!biasContains);//invert biasContains for query side // break; // } default: queryShape = randomRectangle(); break; } IShape queryShapeGS = gridSnap(queryShape); bool opIsDisjoint = operation == SpatialOperation.IsDisjointTo; //Generate truth via brute force: // We ensure true-positive matches (if the predicate on the raw shapes match // then the search should find those same matches). // approximations, false-positive matches ISet <string> expectedIds = new JCG.LinkedHashSet <string>(); //true-positives ISet <string> secondaryIds = new JCG.LinkedHashSet <string>(); //false-positives (unless disjoint) foreach (var entry in indexedShapes) { string id = entry.Key; IShape indexedShapeCompare = entry.Value; if (indexedShapeCompare == null) { continue; } IShape queryShapeCompare = queryShape; if (operation.Evaluate(indexedShapeCompare, queryShapeCompare)) { expectedIds.Add(id); if (opIsDisjoint) { //if no longer intersect after buffering them, for disjoint, remember this indexedShapeCompare = indexedShapesGS[id]; queryShapeCompare = queryShapeGS; if (!operation.Evaluate(indexedShapeCompare, queryShapeCompare)) { secondaryIds.Add(id); } } } else if (!opIsDisjoint) { //buffer either the indexed or query shape (via gridSnap) and try again if (operation == SpatialOperation.Intersects) { indexedShapeCompare = indexedShapesGS[id]; queryShapeCompare = queryShapeGS; //TODO Unfortunately, grid-snapping both can result in intersections that otherwise // wouldn't happen when the grids are adjacent. Not a big deal but our test is just a // bit more lenient. } else if (operation == SpatialOperation.Contains) { indexedShapeCompare = indexedShapesGS[id]; } else if (operation == SpatialOperation.IsWithin) { queryShapeCompare = queryShapeGS; } if (operation.Evaluate(indexedShapeCompare, queryShapeCompare)) { secondaryIds.Add(id); } } } //Search and verify results SpatialArgs args = new SpatialArgs(operation, queryShape); if (queryShape is ShapePair) { args.DistErrPct = (0.0);//a hack; we want to be more detailed than gridSnap(queryShape) } Query query = strategy.MakeQuery(args); SearchResults got = executeQuery(query, 100); ISet <String> remainingExpectedIds = new JCG.LinkedHashSet <string>(expectedIds); foreach (SearchResult result in got.results) { String id = result.GetId(); bool removed = remainingExpectedIds.Remove(id); if (!removed && (!opIsDisjoint && !secondaryIds.Contains(id))) { fail("Shouldn't match", id, indexedShapes, indexedShapesGS, queryShape); } } if (opIsDisjoint) { remainingExpectedIds.ExceptWith(secondaryIds); } if (remainingExpectedIds.Count > 0) { var iter = remainingExpectedIds.GetEnumerator(); iter.MoveNext(); String id = iter.Current; fail("Should have matched", id, indexedShapes, indexedShapesGS, queryShape); } } }
public virtual void Test() { #pragma warning disable 612, 618 IFieldCache cache = FieldCache.DEFAULT; FieldCache.Doubles doubles = cache.GetDoubles(reader, "theDouble", Random.NextBoolean()); Assert.AreSame(doubles, cache.GetDoubles(reader, "theDouble", Random.NextBoolean()), "Second request to cache return same array"); Assert.AreSame(doubles, cache.GetDoubles(reader, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, Random.NextBoolean()), "Second request with explicit parser return same array"); for (int i = 0; i < NUM_DOCS; i++) { Assert.IsTrue(doubles.Get(i) == (double.MaxValue - i), doubles.Get(i) + " does not equal: " + (double.MaxValue - i)); } FieldCache.Int64s longs = cache.GetInt64s(reader, "theLong", Random.NextBoolean()); Assert.AreSame(longs, cache.GetInt64s(reader, "theLong", Random.NextBoolean()), "Second request to cache return same array"); Assert.AreSame(longs, cache.GetInt64s(reader, "theLong", FieldCache.DEFAULT_INT64_PARSER, Random.NextBoolean()), "Second request with explicit parser return same array"); for (int i = 0; i < NUM_DOCS; i++) { Assert.IsTrue(longs.Get(i) == (long.MaxValue - i), longs.Get(i) + " does not equal: " + (long.MaxValue - i) + " i=" + i); } FieldCache.Bytes bytes = cache.GetBytes(reader, "theByte", Random.NextBoolean()); Assert.AreSame(bytes, cache.GetBytes(reader, "theByte", Random.NextBoolean()), "Second request to cache return same array"); Assert.AreSame(bytes, cache.GetBytes(reader, "theByte", FieldCache.DEFAULT_BYTE_PARSER, Random.NextBoolean()), "Second request with explicit parser return same array"); for (int i = 0; i < NUM_DOCS; i++) { Assert.IsTrue((sbyte)bytes.Get(i) == (sbyte)(sbyte.MaxValue - i), (sbyte)bytes.Get(i) + " does not equal: " + (sbyte.MaxValue - i)); } FieldCache.Int16s shorts = cache.GetInt16s(reader, "theShort", Random.NextBoolean()); Assert.AreSame(shorts, cache.GetInt16s(reader, "theShort", Random.NextBoolean()), "Second request to cache return same array"); Assert.AreSame(shorts, cache.GetInt16s(reader, "theShort", FieldCache.DEFAULT_INT16_PARSER, Random.NextBoolean()), "Second request with explicit parser return same array"); for (int i = 0; i < NUM_DOCS; i++) { Assert.IsTrue(shorts.Get(i) == (short)(short.MaxValue - i), shorts.Get(i) + " does not equal: " + (short.MaxValue - i)); } FieldCache.Int32s ints = cache.GetInt32s(reader, "theInt", Random.NextBoolean()); Assert.AreSame(ints, cache.GetInt32s(reader, "theInt", Random.NextBoolean()), "Second request to cache return same array"); Assert.AreSame(ints, cache.GetInt32s(reader, "theInt", FieldCache.DEFAULT_INT32_PARSER, Random.NextBoolean()), "Second request with explicit parser return same array"); for (int i = 0; i < NUM_DOCS; i++) { Assert.IsTrue(ints.Get(i) == (int.MaxValue - i), ints.Get(i) + " does not equal: " + (int.MaxValue - i)); } FieldCache.Singles floats = cache.GetSingles(reader, "theFloat", Random.NextBoolean()); Assert.AreSame(floats, cache.GetSingles(reader, "theFloat", Random.NextBoolean()), "Second request to cache return same array"); Assert.AreSame(floats, cache.GetSingles(reader, "theFloat", FieldCache.DEFAULT_SINGLE_PARSER, Random.NextBoolean()), "Second request with explicit parser return same array"); for (int i = 0; i < NUM_DOCS; i++) { Assert.IsTrue(floats.Get(i) == (float.MaxValue - i), floats.Get(i) + " does not equal: " + (float.MaxValue - i)); } #pragma warning restore 612, 618 IBits docsWithField = cache.GetDocsWithField(reader, "theLong"); Assert.AreSame(docsWithField, cache.GetDocsWithField(reader, "theLong"), "Second request to cache return same array"); Assert.IsTrue(docsWithField is Bits.MatchAllBits, "docsWithField(theLong) must be class Bits.MatchAllBits"); Assert.IsTrue(docsWithField.Length == NUM_DOCS, "docsWithField(theLong) Size: " + docsWithField.Length + " is not: " + NUM_DOCS); for (int i = 0; i < docsWithField.Length; i++) { Assert.IsTrue(docsWithField.Get(i)); } docsWithField = cache.GetDocsWithField(reader, "sparse"); Assert.AreSame(docsWithField, cache.GetDocsWithField(reader, "sparse"), "Second request to cache return same array"); Assert.IsFalse(docsWithField is Bits.MatchAllBits, "docsWithField(sparse) must not be class Bits.MatchAllBits"); Assert.IsTrue(docsWithField.Length == NUM_DOCS, "docsWithField(sparse) Size: " + docsWithField.Length + " is not: " + NUM_DOCS); for (int i = 0; i < docsWithField.Length; i++) { Assert.AreEqual(i % 2 == 0, docsWithField.Get(i)); } // getTermsIndex SortedDocValues termsIndex = cache.GetTermsIndex(reader, "theRandomUnicodeString"); Assert.AreSame(termsIndex, cache.GetTermsIndex(reader, "theRandomUnicodeString"), "Second request to cache return same array"); BytesRef br = new BytesRef(); for (int i = 0; i < NUM_DOCS; i++) { BytesRef term; int ord = termsIndex.GetOrd(i); if (ord == -1) { term = null; } else { termsIndex.LookupOrd(ord, br); term = br; } string s = term == null ? null : term.Utf8ToString(); Assert.IsTrue(unicodeStrings[i] == null || unicodeStrings[i].Equals(s, StringComparison.Ordinal), "for doc " + i + ": " + s + " does not equal: " + unicodeStrings[i]); } int nTerms = termsIndex.ValueCount; TermsEnum tenum = termsIndex.GetTermsEnum(); BytesRef val = new BytesRef(); for (int i = 0; i < nTerms; i++) { tenum.MoveNext(); BytesRef val1 = tenum.Term; termsIndex.LookupOrd(i, val); // System.out.println("i="+i); Assert.AreEqual(val, val1); } // seek the enum around (note this isn't a great test here) int num = AtLeast(100); for (int i = 0; i < num; i++) { int k = Random.Next(nTerms); termsIndex.LookupOrd(k, val); Assert.AreEqual(TermsEnum.SeekStatus.FOUND, tenum.SeekCeil(val)); Assert.AreEqual(val, tenum.Term); } for (int i = 0; i < nTerms; i++) { termsIndex.LookupOrd(i, val); Assert.AreEqual(TermsEnum.SeekStatus.FOUND, tenum.SeekCeil(val)); Assert.AreEqual(val, tenum.Term); } // test bad field termsIndex = cache.GetTermsIndex(reader, "bogusfield"); // getTerms BinaryDocValues terms = cache.GetTerms(reader, "theRandomUnicodeString", true); Assert.AreSame(terms, cache.GetTerms(reader, "theRandomUnicodeString", true), "Second request to cache return same array"); IBits bits = cache.GetDocsWithField(reader, "theRandomUnicodeString"); for (int i = 0; i < NUM_DOCS; i++) { terms.Get(i, br); BytesRef term; if (!bits.Get(i)) { term = null; } else { term = br; } string s = term == null ? null : term.Utf8ToString(); Assert.IsTrue(unicodeStrings[i] == null || unicodeStrings[i].Equals(s, StringComparison.Ordinal), "for doc " + i + ": " + s + " does not equal: " + unicodeStrings[i]); } // test bad field terms = cache.GetTerms(reader, "bogusfield", false); // getDocTermOrds SortedSetDocValues termOrds = cache.GetDocTermOrds(reader, "theRandomUnicodeMultiValuedField"); int numEntries = cache.GetCacheEntries().Length; // ask for it again, and check that we didnt create any additional entries: termOrds = cache.GetDocTermOrds(reader, "theRandomUnicodeMultiValuedField"); Assert.AreEqual(numEntries, cache.GetCacheEntries().Length); for (int i = 0; i < NUM_DOCS; i++) { termOrds.SetDocument(i); // this will remove identical terms. A DocTermOrds doesn't return duplicate ords for a docId ISet <BytesRef> values = new JCG.LinkedHashSet <BytesRef>(multiValued[i]); foreach (BytesRef v in values) { if (v == null) { // why does this test use null values... instead of an empty list: confusing break; } long ord = termOrds.NextOrd(); if (Debugging.AssertsEnabled) { Debugging.Assert(ord != SortedSetDocValues.NO_MORE_ORDS); } BytesRef scratch = new BytesRef(); termOrds.LookupOrd(ord, scratch); Assert.AreEqual(v, scratch); } Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, termOrds.NextOrd()); } // test bad field termOrds = cache.GetDocTermOrds(reader, "bogusfield"); Assert.IsTrue(termOrds.ValueCount == 0); FieldCache.DEFAULT.PurgeByCacheKey(reader.CoreCacheKey); }
public void TestNameList() { string[][][] tests = { /* name in French, name in self, minimized, modified */ new string[][] { new string[] { "fr-Cyrl-BE", "fr-Cyrl-CA" }, new string[] { "Français (cyrillique, Belgique)", "Français (cyrillique, Belgique)", "fr_Cyrl_BE", "fr_Cyrl_BE" }, new string[] { "Français (cyrillique, Canada)", "Français (cyrillique, Canada)", "fr_Cyrl_CA", "fr_Cyrl_CA" }, }, new string[][] { new string[] { "en", "de", "fr", "zh"}, new string[] { "Allemand", "Deutsch", "de", "de" }, new string[] { "Anglais", "English", "en", "en" }, new string[] { "Chinois", "中文", "zh", "zh" }, new string[] { "Français", "Français", "fr", "fr" }, }, // some non-canonical names new string[][] { new string[] { "iw", "iw-US", "no", "no-Cyrl", "in", "in-YU"}, new string[] { "Hébreu (États-Unis)", "עברית (ארצות הברית)", "iw_US", "iw_US" }, new string[] { "Hébreu (Israël)", "עברית (ישראל)", "iw", "iw_IL" }, new string[] { "Indonésien (Indonésie)", "Indonesia (Indonesia)", "in", "in_ID" }, new string[] { "Indonésien (Serbie)", "Indonesia (Serbia)", "in_YU", "in_YU" }, new string[] { "Norvégien (cyrillique)", "Norsk (kyrillisk)", "no_Cyrl", "no_Cyrl" }, new string[] { "Norvégien (latin)", "Norsk (latinsk)", "no", "no_Latn" }, }, new string[][] { new string[] { "zh-Hant-TW", "en", "en-gb", "fr", "zh-Hant", "de", "de-CH", "zh-TW"}, new string[] { "Allemand (Allemagne)", "Deutsch (Deutschland)", "de", "de_DE" }, new string[] { "Allemand (Suisse)", "Deutsch (Schweiz)", "de_CH", "de_CH" }, new string[] { "Anglais (États-Unis)", "English (United States)", "en", "en_US" }, new string[] { "Anglais (Royaume-Uni)", "English (United Kingdom)", "en_GB", "en_GB" }, new string[] { "Chinois (traditionnel)", "中文(繁體)", "zh_Hant", "zh_Hant" }, new string[] { "Français", "Français", "fr", "fr" }, }, new string[][] { new string[] { "zh", "en-gb", "en-CA", "fr-Latn-FR"}, new string[] { "Anglais (Canada)", "English (Canada)", "en_CA", "en_CA" }, new string[] { "Anglais (Royaume-Uni)", "English (United Kingdom)", "en_GB", "en_GB" }, new string[] { "Chinois", "中文", "zh", "zh" }, new string[] { "Français", "Français", "fr", "fr" }, }, new string[][] { new string[] { "en-gb", "fr", "zh-Hant", "zh-SG", "sr", "sr-Latn"}, new string[] { "Anglais (Royaume-Uni)", "English (United Kingdom)", "en_GB", "en_GB" }, new string[] { "Chinois (simplifié, Singapour)", "中文(简体,新加坡)", "zh_SG", "zh_Hans_SG" }, new string[] { "Chinois (traditionnel, Taïwan)", "中文(繁體,台灣)", "zh_Hant", "zh_Hant_TW" }, new string[] { "Français", "Français", "fr", "fr" }, new string[] { "Serbe (cyrillique)", "Српски (ћирилица)", "sr", "sr_Cyrl" }, new string[] { "Serbe (latin)", "Srpski (latinica)", "sr_Latn", "sr_Latn" }, }, new string[][] { new string[] { "fr-Cyrl", "fr-Arab" }, new string[] { "Français (arabe)", "Français (arabe)", "fr_Arab", "fr_Arab" }, new string[] { "Français (cyrillique)", "Français (cyrillique)", "fr_Cyrl", "fr_Cyrl" }, }, new string[][] { new string[] { "fr-Cyrl-BE", "fr-Arab-CA" }, new string[] { "Français (arabe, Canada)", "Français (arabe, Canada)", "fr_Arab_CA", "fr_Arab_CA" }, new string[] { "Français (cyrillique, Belgique)", "Français (cyrillique, Belgique)", "fr_Cyrl_BE", "fr_Cyrl_BE" }, } }; UCultureInfo french = new UCultureInfo("fr"); CultureDisplayNames names = CultureDisplayNames.GetInstance(french, new DisplayContextOptions { Capitalization = Capitalization.UIListOrMenu }); Logln("Contexts: " + names.DisplayContextOptions.ToString()); Collator collator = Collator.GetInstance(french); foreach (String[][] test in tests) { var list = new JCG.LinkedHashSet <UCultureInfo>(); IList <UiListItem> expected = new JCG.List <UiListItem>(); foreach (String item in test[0]) { list.Add(new UCultureInfo(item)); } for (int i = 1; i < test.Length; ++i) { String[] rawRow = test[i]; expected.Add(new UiListItem(new UCultureInfo(rawRow[2]), new UCultureInfo(rawRow[3]), rawRow[0], rawRow[1])); } IList <UiListItem> newList = names.GetUiList(list, false, collator); if (!expected.Equals(newList)) // J2N's List compares the list contents { if (expected.Count != newList.Count) { Errln(string.Format(StringFormatter.CurrentCulture, "{0}", list) + ": wrong size" + expected + ", " + newList); } else { Errln(string.Format(StringFormatter.CurrentCulture, "{0}", list)); for (int i = 0; i < expected.Count; ++i) { assertEquals(i + "", expected[i], newList[i]); } } } else { assertEquals(string.Format(StringFormatter.CurrentCulture, "{0}", list), expected, newList); } } }
private static void InitFromResourceBundle() { UResourceBundle keyTypeDataRes = UResourceBundle.GetBundleInstance( ICUData.IcuBaseName, "keyTypeData", ICUResourceBundle.IcuDataAssembly); GetKeyInfo(keyTypeDataRes.Get("keyInfo")); GetTypeInfo(keyTypeDataRes.Get("typeInfo")); UResourceBundle keyMapRes = keyTypeDataRes.Get("keyMap"); UResourceBundle typeMapRes = keyTypeDataRes.Get("typeMap"); // alias data is optional UResourceBundle typeAliasRes = null; UResourceBundle bcpTypeAliasRes = null; try { typeAliasRes = keyTypeDataRes.Get("typeAlias"); } catch (MissingManifestResourceException) { // fall through } try { bcpTypeAliasRes = keyTypeDataRes.Get("bcpTypeAlias"); } catch (MissingManifestResourceException) { // fall through } // iterate through keyMap resource using (UResourceBundleEnumerator keyMapItr = keyMapRes.GetEnumerator()) { IDictionary <string, ISet <string> > _Bcp47Keys = new JCG.LinkedDictionary <string, ISet <string> >(); // ICU4N NOTE: As long as we don't delete, Dictionary keeps insertion order the same as LinkedHashMap while (keyMapItr.MoveNext()) { UResourceBundle keyMapEntry = keyMapItr.Current; string legacyKeyId = keyMapEntry.Key; string bcpKeyId = keyMapEntry.GetString(); bool hasSameKey = false; if (bcpKeyId.Length == 0) { // Empty value indicates that BCP key is same with the legacy key. bcpKeyId = legacyKeyId; hasSameKey = true; } ISet <string> _bcp47Types = new JCG.LinkedHashSet <string>(); _Bcp47Keys[bcpKeyId] = _bcp47Types.AsReadOnly(); bool isTZ = legacyKeyId.Equals("timezone"); // reverse type alias map IDictionary <string, ISet <string> > typeAliasMap = null; if (typeAliasRes != null) { UResourceBundle typeAliasResByKey = null; try { typeAliasResByKey = typeAliasRes.Get(legacyKeyId); } catch (MissingManifestResourceException) { // fall through } if (typeAliasResByKey != null) { typeAliasMap = new Dictionary <string, ISet <string> >(); using (UResourceBundleEnumerator typeAliasResItr = typeAliasResByKey.GetEnumerator()) { while (typeAliasResItr.MoveNext()) { UResourceBundle typeAliasDataEntry = typeAliasResItr.Current; string from = typeAliasDataEntry.Key; string to = typeAliasDataEntry.GetString(); if (isTZ) { from = from.Replace(':', '/'); } if (!typeAliasMap.TryGetValue(to, out ISet <string> aliasSet) || aliasSet == null) { aliasSet = new JCG.HashSet <string>(); typeAliasMap[to] = aliasSet; } aliasSet.Add(from); } } } } // reverse bcp type alias map IDictionary <string, ISet <string> > bcpTypeAliasMap = null; if (bcpTypeAliasRes != null) { UResourceBundle bcpTypeAliasResByKey = null; try { bcpTypeAliasResByKey = bcpTypeAliasRes.Get(bcpKeyId); } catch (MissingManifestResourceException) { // fall through } if (bcpTypeAliasResByKey != null) { bcpTypeAliasMap = new Dictionary <string, ISet <string> >(); using (UResourceBundleEnumerator bcpTypeAliasResItr = bcpTypeAliasResByKey.GetEnumerator()) { while (bcpTypeAliasResItr.MoveNext()) { UResourceBundle bcpTypeAliasDataEntry = bcpTypeAliasResItr.Current; string from = bcpTypeAliasDataEntry.Key; string to = bcpTypeAliasDataEntry.GetString(); if (!bcpTypeAliasMap.TryGetValue(to, out ISet <string> aliasSet) || aliasSet == null) { aliasSet = new JCG.HashSet <string>(); bcpTypeAliasMap[to] = aliasSet; } aliasSet.Add(from); } } } } IDictionary <string, Type> typeDataMap = new Dictionary <string, Type>(); ISet <SpecialType> specialTypeSet = null; // look up type map for the key, and walk through the mapping data UResourceBundle typeMapResByKey = null; try { typeMapResByKey = typeMapRes.Get(legacyKeyId); } catch (MissingManifestResourceException) { // type map for each key must exist Debug.Assert(false); } if (typeMapResByKey != null) { using (UResourceBundleEnumerator typeMapResByKeyItr = typeMapResByKey.GetEnumerator()) while (typeMapResByKeyItr.MoveNext()) { UResourceBundle typeMapEntry = typeMapResByKeyItr.Current; string legacyTypeId = typeMapEntry.Key; string bcpTypeId = typeMapEntry.GetString(); // special types char first = legacyTypeId[0]; bool isSpecialType = '9' < first && first < 'a' && bcpTypeId.Length == 0; if (isSpecialType) { if (specialTypeSet == null) { specialTypeSet = new JCG.HashSet <SpecialType>(); } specialTypeSet.Add((SpecialType)Enum.Parse(typeof(SpecialType), legacyTypeId, true)); _bcp47Types.Add(legacyTypeId); continue; } if (isTZ) { // a timezone key uses a colon instead of a slash in the resource. // e.g. America:Los_Angeles legacyTypeId = legacyTypeId.Replace(':', '/'); } bool hasSameType = false; if (bcpTypeId.Length == 0) { // Empty value indicates that BCP type is same with the legacy type. bcpTypeId = legacyTypeId; hasSameType = true; } _bcp47Types.Add(bcpTypeId); // Note: legacy type value should never be // equivalent to bcp type value of a different // type under the same key. So we use a single // map for lookup. Type t = new Type(legacyTypeId, bcpTypeId); typeDataMap[AsciiUtil.ToLower(legacyTypeId)] = t; if (!hasSameType) { typeDataMap[AsciiUtil.ToLower(bcpTypeId)] = t; } // Also put aliases in the map if (typeAliasMap != null) { if (typeAliasMap.TryGetValue(legacyTypeId, out ISet <string> typeAliasSet) && typeAliasSet != null) { foreach (string alias in typeAliasSet) { typeDataMap[AsciiUtil.ToLower(alias)] = t; } } } if (bcpTypeAliasMap != null) { if (bcpTypeAliasMap.TryGetValue(bcpTypeId, out ISet <string> bcpTypeAliasSet) && bcpTypeAliasSet != null) { foreach (string alias in bcpTypeAliasSet) { typeDataMap[AsciiUtil.ToLower(alias)] = t; } } } } } KeyData keyData = new KeyData(legacyKeyId, bcpKeyId, typeDataMap, specialTypeSet); KEYMAP[AsciiUtil.ToLower(legacyKeyId)] = keyData; if (!hasSameKey) { KEYMAP[AsciiUtil.ToLower(bcpKeyId)] = keyData; } } BCP47_KEYS = _Bcp47Keys.AsReadOnly(); } }