static TestRandomChains() { try { brokenConstructors[typeof(LimitTokenCountFilter).GetConstructor(new Type[] { typeof(TokenStream), typeof(int) })] = ALWAYS; brokenConstructors[typeof(LimitTokenCountFilter).GetConstructor(new Type[] { typeof(TokenStream), typeof(int), typeof(bool) })] = new PredicateAnonymousInnerClassHelper2(); brokenConstructors[typeof(LimitTokenPositionFilter).GetConstructor(new Type[] { typeof(TokenStream), typeof(int) })] = ALWAYS; brokenConstructors[typeof(LimitTokenPositionFilter).GetConstructor(new Type[] { typeof(TokenStream), typeof(int), typeof(bool) })] = new PredicateAnonymousInnerClassHelper3(); foreach (Type c in Arrays.AsList( // TODO: can we promote some of these to be only // offsets offenders? // doesn't actual reset itself: typeof(CachingTokenFilter), // Not broken, simulates brokenness: typeof(CrankyTokenFilter), // Not broken: we forcefully add this, so we shouldn't // also randomly pick it: typeof(ValidatingTokenFilter))) { foreach (ConstructorInfo ctor in c.GetConstructors()) { brokenConstructors[ctor] = ALWAYS; } } } catch (Exception e) { throw new Exception(e.Message, e); } try { foreach (Type c in Arrays.AsList( typeof(ReversePathHierarchyTokenizer), typeof(PathHierarchyTokenizer), // TODO: it seems to mess up offsets!? typeof(WikipediaTokenizer), // TODO: doesn't handle graph inputs typeof(CJKBigramFilter), // TODO: doesn't handle graph inputs (or even look at positionIncrement) typeof(HyphenatedWordsFilter), // TODO: LUCENE-4983 typeof(CommonGramsFilter), // TODO: doesn't handle graph inputs typeof(CommonGramsQueryFilter), // TODO: probably doesnt handle graph inputs, too afraid to try typeof(WordDelimiterFilter))) { foreach (ConstructorInfo ctor in c.GetConstructors()) { brokenOffsetsConstructors[ctor] = ALWAYS; } } } catch (Exception e) { throw new Exception(e.Message, e); } allowedTokenizerArgs = new IdentityHashSet <Type>(); // Collections.NewSetFromMap(new IdentityHashMap<Type, bool?>()); allowedTokenizerArgs.addAll(argProducers.Keys); allowedTokenizerArgs.Add(typeof(TextReader)); allowedTokenizerArgs.Add(typeof(AttributeSource.AttributeFactory)); allowedTokenizerArgs.Add(typeof(AttributeSource)); allowedTokenFilterArgs = new IdentityHashSet <Type>(); //Collections.newSetFromMap(new IdentityHashMap<Type, bool?>()); allowedTokenFilterArgs.addAll(argProducers.Keys); allowedTokenFilterArgs.Add(typeof(TokenStream)); // TODO: fix this one, thats broken: allowedTokenFilterArgs.Add(typeof(CommonGramsFilter)); allowedCharFilterArgs = new IdentityHashSet <Type>(); //Collections.newSetFromMap(new IdentityHashMap<Type, bool?>()); allowedCharFilterArgs.addAll(argProducers.Keys); allowedCharFilterArgs.Add(typeof(TextReader)); }