public void CountWithFilter() { DataProducer<string> subject = new DataProducer<string>(); IFuture<int> count = subject.Count(x => x.Length >= 2); subject.ProduceAndEnd("a", "bbb", "cc", "d"); Assert.AreEqual(2, count.Value); }
public void Count() { DataProducer<string> subject = new DataProducer<string>(); IFuture<int> count = subject.Count(); subject.ProduceAndEnd("a", "b", "c", "d"); Assert.AreEqual(4, count.Value); }
public void CountWithFilter() { DataProducer <string> subject = new DataProducer <string>(); IFuture <int> count = subject.Count(x => x.Length >= 2); subject.ProduceAndEnd("a", "bbb", "cc", "d"); Assert.AreEqual(2, count.Value); }
public void Count() { DataProducer <string> subject = new DataProducer <string>(); IFuture <int> count = subject.Count(); subject.ProduceAndEnd("a", "b", "c", "d"); Assert.AreEqual(4, count.Value); }
public void CountWithNulls() { int?[] data = { 1, null, 4, null, 3, null, 2 }; DataProducer<int?> subject = new DataProducer<int?>(); IFuture<int> count = subject.Count(); subject.ProduceAndEnd(data); Assert.AreEqual(data.Length, count.Value); Assert.AreEqual(7, count.Value); // to be sure... }
public void CountWithNulls() { int?[] data = { 1, null, 4, null, 3, null, 2 }; DataProducer <int?> subject = new DataProducer <int?>(); IFuture <int> count = subject.Count(); subject.ProduceAndEnd(data); Assert.AreEqual(data.Length, count.Value); Assert.AreEqual(7, count.Value); // to be sure... }
public void ComplexFileParsing() { // the push-LINQ start-point; a datap-producer var source = new DataProducer <string>(); // listen to data and log and comments // (note that we don't have to use a list here, // we could do something more interesting as // we see the lines) var comments = (from line in source where line.StartsWith("#") select line.TrimStart('#').Trim()).ToList(); // listen to data and create entities from // lines that aren't comments var people = from line in source where !line.StartsWith("#") let fields = line.Split('~') select new { Forename = fields[0], Surname = fields[1], Age = int.Parse(fields[2]) }; // just for the fun of it, find the longest line-length etc var maxLen = source.Max(line => line.Length); var count = source.Count(); // and while we're having fun, perform some aggregates // on the people *as we're reading them!* // (not afterwards, like how LINQ-to-objects works) var stats = (from person in people group person by person.Surname into grp let agg = new { Surname = grp.Key, Count = grp.Count(), MaxAge = grp.Max(p => p.Age) } orderby agg.Surname select agg).ToList(); // and we'll want to catch the people var peopleList = people.ToList(); // now that we've set everything up // read the file *once* //source.ProduceAndEnd(new LineReader(path)); source.ProduceAndEnd(new LineReader(() => new StringReader(FileContents))); // sort the groups //stats.Sort(grp => grp.Surname); // show what we got TextWriter output = new StringWriter(); output.WriteLine("Max Line Length: {0}", maxLen); output.WriteLine("Line Count: {0}", count); Write(output, "Comments", comments); Write(output, "People", peopleList); Write(output, "Stats", stats); Assert.AreEqual(ExpectedOutput, NormalizeLineEndings(output.ToString())); }