public static IEnumerable <T> CollectionShouldEqual <T>(this IEnumerable <T> @this, IEnumerable <T> that, string message = null, IEqualityComparer <T> comparer = null, bool orderMatters = false) { if (@this == null || that == null) { return(@this.ShouldEqual(that)); } var cmp = comparer ?? EqualityComparer <T> .Default; List <Tuple <T, int> > MakeTuples(IEnumerable <T> seq) => seq.Select((t, globalIndex) => new { t, globalIndex }) .GroupBy(tt => tt.t, cmp) .SelectMany(g => g.Select((tt, groupIndex) => Tuple.Create(tt.t, orderMatters ? tt.globalIndex : groupIndex))) .ToList(); var actualTuples = MakeTuples(@this); var expectedTuples = MakeTuples(that); var messageBuilder = new StringBuilder(); if (message != null) { messageBuilder.AppendLine(message); } messageBuilder.AppendLine("The collections were not equal!"); var tupleComparer = EqualityComparers.Create <Tuple <T, int> >((t1, t2) => cmp.Equals(t1.Item1, t2.Item1) && t1.Item2 == t2.Item2, t => cmp.GetHashCode(t.Item1) ^ t.Item2); var missingFromActual = expectedTuples.Except(actualTuples, tupleComparer).ToList(); if (missingFromActual.Any()) { messageBuilder.AppendLine("The following values were missing from actual:"); missingFromActual.ForEach(t => messageBuilder.AppendLine(JsonConvert.SerializeObject(t.Item1))); messageBuilder.AppendLine(); } var missingFromExpected = actualTuples.Except(expectedTuples, tupleComparer).ToList(); if (missingFromExpected.Any()) { messageBuilder.AppendLine("The following values were missing from expected:"); missingFromExpected.ForEach(t => messageBuilder.AppendLine(JsonConvert.SerializeObject(t.Item1))); messageBuilder.AppendLine(); } if (missingFromActual.Any() || missingFromExpected.Any()) { messageBuilder.AppendLine("Actual:"); actualTuples.ForEach(t => messageBuilder.AppendLine(JsonConvert.SerializeObject(t.Item1))); messageBuilder.AppendLine().AppendLine("Expected:"); expectedTuples.ForEach(t => messageBuilder.AppendLine(JsonConvert.SerializeObject(t.Item1))); Assert.Fail(messageBuilder.ToString()); } return(@this); }
public void TestNextGaussian() { var random = Rand.CreateJavaRandom(0); var sequence = Enumerable.Range(0, 50).Select(_ => random.NextGaussian()).ToArray(); Assert.Equal( actual: sequence, expected: new[] { 0.8025330637390305, -0.9015460884175122, 2.080920790428163, 0.7637707684364894, 0.9845745328825128, -1.6834122587673428, -0.027290262907887285, 0.11524570286202315, -0.39016704137993785, -0.6433888131264491, 0.052460907198835775, 0.5213420769298895, -0.8239670026881707, 0.26071819402835644, -0.4529877558422544, 1.4031473817209366, 0.27113061707020236, -0.007054015349837401, 0.9049586994113287, 0.8568542481006806, 0.3723340814425109, 0.3976728390023819, 0.06294576961546386, 0.9414599976474556, 0.44110379103508873, -0.7318797311599887, -0.01176361185227962, -0.15736219614735453, -0.5822582291186266, -0.2059701784999411, -0.39990122591137445, 0.8913156150655253, 0.41076063425965825, -1.1712365002966285, -0.3905082189100106, 0.49014040388330665, 0.9597752538041666, 0.7523861952143763, -0.657956415573505, 0.6450323331598297, -0.3154523215417022, 1.054894794114192, 0.5957831787424875, 1.0225509680217193, -2.3561969031359187, -1.5250681153426493, 1.1808572722180044, 0.006140951070945433, -0.13698941007400853, -0.42220793207202106, }, comparer: EqualityComparers.Create((double a, double b) => Math.Abs(a - b) < 1e-15) ); }
internal static EqualityComparer <T> GetComparer <T>() { if (typeof(T).IsClass && typeof(T) != typeof(string)) { var props = typeof(T).GetProperties(); return(EqualityComparers.Create <T>((t1, t2) => { var result = props.All(p => GetComparer(p.PropertyType).Equals(p.GetValue(t1), p.GetValue(t2))); return result; })); } return(EqualityComparer <T> .Default); }
public void TestNextSingle() { var random = Rand.CreateJavaRandom(2); for (var i = 0; i < 1000; ++i) { random.NextInt32(); // spin } var values = Enumerable.Range(0, 50).Select(_ => random.NextSingle()).ToArray(); Assert.Equal( actual: values, expected: new[] { 0.94287306f, 0.8903044f, 0.97903967f, 0.012356639f, 0.17383873f, 0.6793508f, 0.8875068f, 0.06914234f, 0.7914038f, 0.9430413f, 0.25484967f, 0.9709232f, 0.2722515f, 0.008735001f, 0.5167838f, 0.17015874f, 0.8142477f, 0.3411495f, 0.2899512f, 0.29670966f, 0.358271f, 0.1576863f, 0.38329488f, 0.90439373f, 0.46732223f, 0.9526825f, 0.30414647f, 0.4749437f, 0.21356344f, 0.7139307f, 0.2368964f, 0.7848527f, 0.58981174f, 0.45460987f, 0.5398049f, 0.81236047f, 0.36067104f, 0.939894f, 0.22092265f, 0.9285346f, 0.33370495f, 0.74295545f, 0.66506624f, 0.89385194f, 0.28724986f, 0.68099236f, 0.6538195f, 0.08884382f, 0.90368855f, 0.24754298f, }, comparer: EqualityComparers.Create((float a, float b) => Math.Abs(a - b) < 1e-7) ); }
public void When_duplicates_found_Should_raise_an_event_for_every_duplicate_set_found(int testSize) { const int NumberOfCollisionsForEach = 2; var modulo = testSize / NumberOfCollisionsForEach; var toCreate = Enumerable.Range(0, testSize).Select(i => Tuple.Create(i, Guid.NewGuid())).ToArray(); const int concurrency = 100; var semaphore = new SemaphoreSlim(concurrency); foreach (var t in toCreate) { semaphore.Wait(); cloudTable.ExecuteAsync(TableOperation.Insert(CreateSagaState(t, modulo))) .ContinueWith(task => { if (task.Exception != null) { Console.WriteLine($"Exception occured {task.Exception}"); } semaphore.Release(); }); } for (var i = 0; i < concurrency; i++) { semaphore.Wait(); } var comparer = EqualityComparers.GetValueComparer(EdmType.Int64); var indexer = new SagaIndexer(cloudTable, "CorrelatingId", o => (ulong)(long)o, comparer); var results = new List <Tuple <Guid, Guid[]> >(); indexer.SearchForDuplicates((o, guids) => results.Add(Tuple.Create(o, guids.ToArray()))); var dict = results .GroupBy(t => t.Item1, t => t.Item2, comparer) .ToDictionary(g => g.Key, g => g.SelectMany(ids => ids).Distinct().ToArray(), comparer); Assert.AreEqual(modulo, dict.Count); foreach (var kvp in dict) { Assert.AreEqual(2, kvp.Value.Length); } }
public ChainComparer(IEnumerable <IComparer <T> > comparers, IEnumerable <IEqualityComparer <T> > equalityComparers) { if (comparers != null) { foreach (IComparer <T> comparer in comparers.Where(e => e != null)) { Comparers.Add(comparer); } } if (equalityComparers != null) { foreach (IEqualityComparer <T> equalityComparer in equalityComparers.Where(e => e != null)) { EqualityComparers.Add(equalityComparer); } } }
public CompareOptions Use <T>(IEqualityComparer <T> comparer) { EqualityComparers.Add(typeof(T), comparer); return(this); }
private ParserBuilder( IEnumerable <Rule> rules, IEnumerable <KeyValuePair <IReadOnlyList <Symbol>, Rule> > ambiguityResolutions) { this.rules = rules.GroupBy(r => r.Produced) .ToDictionary(g => g.Key, g => (IReadOnlyList <Rule>)g.ToArray()); this.baseFirstFollow = new FirstFollowCalculator(this.rules.SelectMany(kvp => kvp.Value).ToArray()); this.firstFollow = new InternalFirstFollowProvider(this.baseFirstFollow); this.remainingSymbols = new Queue <NonTerminal>(this.baseFirstFollow.NonTerminals); this.ambiguityResolutions = ambiguityResolutions.ToDictionary(kvp => kvp.Key, kvp => kvp.Value, (IEqualityComparer <IReadOnlyList <Symbol> >)EqualityComparers.GetSequenceComparer <Symbol>()); }
private IParserNode CreateGrammarLookaheadParserNode(Token lookaheadToken, IReadOnlyList <PartialRule> rules, ImmutableList <Symbol> prefix) { // sanity checks if (rules.Select(r => r.Rule).Distinct().Count() != rules.Count) { throw new ArgumentException(nameof(rules), "must be partials of distinct rules"); } Dictionary <IReadOnlyList <Symbol>, PartialRule> suffixToRuleMapping; try { suffixToRuleMapping = rules.SelectMany(r => this.GatherPostTokenSuffixes(lookaheadToken, r), (r, suffix) => new { r, suffix }) // note: this will throw if two rules have the same suffix, but it's not very elegant // note: this could be resolvable in some cases. Basically it needs to be the case that: // (a) the rules with the same suffix have entirely disjoint follow sets // (b) we can successfully proceed by dropping the duplicate suffixes // this would require a new type of node or at least modification to the node tree .ToDictionary(t => t.suffix, t => t.r, (IEqualityComparer <IReadOnlyList <Symbol> >)EqualityComparers.GetSequenceComparer <Symbol>()); } catch (Exception ex) when( (ex is NotSupportedException && ex.Message == "can't remove prefix from empty") || (ex is ArgumentException && ex.Message == "An item with the same key has already been added.")) { var context = this.GetFullContext(rules.Only(r => r.Produced)); var additionalSuffixMessage = ex is ArgumentException ? ", [common suffix]" : string.Empty; throw new NotSupportedException($"Parsing {context.Item1} on {string.Join(", ", context.Item2.Concat(prefix).Append(lookaheadToken))}{additionalSuffixMessage}", ex); } NonTerminal discriminator; // first, see if we already have an equivalent discriminator var comparer = EqualityComparers.GetCollectionComparer( EqualityComparers.GetSequenceComparer <Symbol>() ); var existing = this.discriminatorSymbols.Keys // must have an exact rule match (todo could also be a superset) .Where(d => comparer.Equals(this.rules[d].Select(s => s.Symbols), suffixToRuleMapping.Keys)) // the matching rule follow sets must be a superset of the incoming rule follow sets .FirstOrDefault(d => suffixToRuleMapping.All( kvp => this.firstFollow.FollowOf(kvp.Value.Rule) .Except(this.firstFollow.FollowOf(this.rules[d].Single(r => r.Symbols.SequenceEqual(kvp.Key)))).Count == 0 )); if (existing != null) { discriminator = existing; // todo we should probably register additional discriminator context here } else { // create the discriminator symbol discriminator = new NonTerminal("T" + this.discriminatorSymbols.Count); this.discriminatorSymbols.Add(discriminator, new List <DiscriminatorPrefixInfo>()); var rulesAndFollowSets = suffixToRuleMapping.ToDictionary(kvp => new Rule(discriminator, kvp.Key, kvp.Value.Rule.Action, kvp.Value.Rule.RequiredParserVariable), kvp => this.firstFollow.FollowOf(kvp.Value.Rule)); foreach (var rule in rulesAndFollowSets.Keys) { this.discriminatorRuleMappings.Add(rule, suffixToRuleMapping.Single(kvp => kvp.Key.SequenceEqual(rule.Symbols)).Value.Rule); } this.rules.Add(discriminator, rulesAndFollowSets.Keys.ToArray()); this.firstFollow.Register(rulesAndFollowSets); this.discriminatorContexts.Add(discriminator, new DiscriminatorContext(outerSymbol: rules.Only(r => r.Produced), prefix: prefix, lookaheadToken: lookaheadToken)); this.remainingSymbols.Enqueue(discriminator); } return(new GrammarLookaheadNode( lookaheadToken, discriminator, // map each discriminator rule back to the corresponding original rule this.rules[discriminator].ToDictionary( r => r, r => suffixToRuleMapping[r.Symbols].Rule ) )); }