/// <summary>
        /// Asserts all expectations for a single iteration of the control logic within the target
        /// <see cref="DataAggregationBackgroundWorker{TDataPointTypeEnum}"/>.  Returns when all
        /// expectations have been met.
        /// </summary>
        /// <param name="noRawData">
        /// Whether to simulate a situation where the pager is returning empty pages.
        /// </param>
        /// <param name="noAggregatedData">
        /// Whether to simulate a situation where the aggregation of the data provided by the
        /// pager consists of no aggregated data points.
        /// </param>
        /// <param name="expectPagerToFail">
        /// Whether to throw an exception from within the pager when asked to retrieve raw data point observations.
        /// </param>
        /// <param name="expectAggregationToFail">
        /// Whether to throw an exception from within the aggregator when asked to aggregate raw data point observations.
        /// </param>
        /// <param name="expectFailureStoringResults">
        /// Whether to throw an exception when asked to persist aggregation results.
        /// </param>
        /// <param name="seed">
        /// Seed to use for pseudo-random generation of sample data.
        /// </param>
        private void AssertSingleIteration(
            bool noRawData = false,
            bool noAggregatedData = false,
            bool expectPagerToFail = false,
            bool expectAggregationToFail = false,
            bool expectFailureStoringResults = false,
            int seed = 0)
        {
            string errorMessage = 
                "Exception during aggregation of " + ArbitraryDataPointType + " data from PagerWrapper.FixedToStringValue" + 
                " using DAaVE.Samples.SampleDataPointAggregator";

            if (expectPagerToFail)
            {
                this.PushError(errorMessage, typeof(FormatException));
                Assert.IsFalse(expectAggregationToFail, "Aggregation won't happen, so cannot fail.");
                Assert.IsFalse(expectFailureStoringResults, "Result storage won't happen, so cannot fail.");
            }
            else if (expectAggregationToFail)
            {
                this.PushError(errorMessage, typeof(DivideByZeroException));
                Assert.IsFalse(expectFailureStoringResults, "Result storage won't happen, so cannot fail.");
            }
            else if (expectFailureStoringResults)
            {
                this.PushError(errorMessage, typeof(MissingMemberException));
            }

            DataPointObservation[] sampleRawData;
            AggregatedDataPoint[] sampleAggregatedData;
            bool isPartial = GenerateSampleData(
                seed: seed,
                sampleRawDataMaximumLength: noRawData ? 0 : 50,
                sampleAggregatedDataMaximumLength: noAggregatedData ? 0 : 50,
                sampleRawData: out sampleRawData,
                sampleAggregatedData: out sampleAggregatedData);

            ManualResetEventSlim aggregationResultReceivedByPagerDataObject = new ManualResetEventSlim(false);
            SampleConsecutiveDataPointObservationsCollection dataObjectFromPager = new SampleConsecutiveDataPointObservationsCollection(
                sampleRawData.OrderBy(d => d.UtcTimestamp),
                aggregationResult =>
                {
                    Assert.IsTrue(
                        aggregationResult.SequenceEqual(sampleAggregatedData),
                        "Entire aggregation result should be sent verbatim to the original pager-supplied data set");
                    
                    if (expectFailureStoringResults)
                    {
                        throw new MissingMemberException();
                    }

                    aggregationResultReceivedByPagerDataObject.Set();
                },
                isPartial);

            if (expectPagerToFail)
            {
                this.ExpectPagerRequest(exceptionToThrow: new FormatException());
            }
            else
            {
                this.ExpectPagerRequest(dataToReturn: dataObjectFromPager);
            }

            if (noRawData || expectPagerToFail)
            {
                // The aggregator should not be called; this iteration is now complete.
                return;
            }

            ConsecutiveDataPointObservationsCollection dataProvidedToAggregator = 
                expectAggregationToFail ?
                    this.ExpectAggregationRequestResponse(exceptionToThrow: new DivideByZeroException()) :
                    this.ExpectAggregationRequestResponse(response: sampleAggregatedData);

            Assert.IsTrue(
                dataProvidedToAggregator.SequenceEqual(sampleRawData.OrderBy(d => d.UtcTimestamp)),
                "Entire pager output should be passed verbatim as a single data-set to the aggregator for aggregation");

            if (noAggregatedData || expectAggregationToFail) 
            {
                // No aggregation results to report.
                this.postTestVerifications.Enqueue(() => 
                {
                    Assert.IsFalse(aggregationResultReceivedByPagerDataObject.IsSet, "Unexpected aggregator results provided to pager data object");
                });
                return;
            }

            if (!expectFailureStoringResults)
            {
                Assert.IsTrue(aggregationResultReceivedByPagerDataObject.Wait(Timeout), "Aggregator results not provided to originating pager data object");
            }
        }
        public void AggregationUploadConcurrency()
        {
            Stopwatch timer = Stopwatch.StartNew();
            using (DataAggregationBackgroundWorker<SampleDataPointType> target = this.NewTarget())
            {
                DataPointObservation[] sampleRawData;
                AggregatedDataPoint[] sampleAggregatedData;
                bool isPartial = GenerateSampleData(
                    seed: 03291812,
                    sampleRawDataMaximumLength: 1,
                    sampleAggregatedDataMaximumLength: 1,
                    sampleRawData: out sampleRawData,
                    sampleAggregatedData: out sampleAggregatedData);

                ManualResetEventSlim aggregation1UploadUnblocked = new ManualResetEventSlim(initialState: false);
                SampleConsecutiveDataPointObservationsCollection dataObject1 = new SampleConsecutiveDataPointObservationsCollection(
                    sampleRawData.OrderBy(d => d.UtcTimestamp),
                    aggregationResult => aggregation1UploadUnblocked.Wait(),
                    isPartial);

                ManualResetEventSlim aggregation2UploadStarted = new ManualResetEventSlim(initialState: false);
                SampleConsecutiveDataPointObservationsCollection dataObject2 = new SampleConsecutiveDataPointObservationsCollection(
                    sampleRawData.OrderBy(d => d.UtcTimestamp),
                    aggregationResult => aggregation2UploadStarted.Set(),
                    isPartial);

                // Iteration 1:
                this.ExpectPagerRequest(dataToReturn: dataObject1);
                this.ExpectAggregationRequestResponse(response: sampleAggregatedData);

                // Iteration 2:
                this.ExpectPagerRequest(dataToReturn: dataObject2);
                this.ExpectAggregationRequestResponse(response: sampleAggregatedData);
                Assert.IsFalse(aggregation2UploadStarted.Wait(TimeSpan.FromSeconds(10.0)), "Only one upload should be allowed at a time.");

                aggregation1UploadUnblocked.Set();

                Assert.IsTrue(aggregation2UploadStarted.Wait(TimeSpan.FromSeconds(10.0)), "Second aggregation did not get unblocked when expected.");
            }

            TimeSpan elapsed = timer.Elapsed;
            AssertTimeSpanBetween(
                TimeSpan.FromSeconds(5.0),
                elapsed,
                TimeSpan.FromSeconds(15.0),
                "Test should be blocked for about 10 seconds, but took {0} to complete",
                elapsed);
        }
        public void DisposalDuringUpload()
        {
            DataPointObservation[] sampleRawData;
            AggregatedDataPoint[] sampleAggregatedData;
            bool isPartial = GenerateSampleData(
                seed: 03291952,
                sampleRawDataMaximumLength: 1,
                sampleAggregatedDataMaximumLength: 1,
                sampleRawData: out sampleRawData,
                sampleAggregatedData: out sampleAggregatedData);

            DataAggregationBackgroundWorker<SampleDataPointType> target = this.NewTarget();

            ManualResetEventSlim targetDisposalUnblocked = new ManualResetEventSlim(initialState: false);
            ManualResetEventSlim aggregationUpload1Started = new ManualResetEventSlim(initialState: false);
            ManualResetEventSlim aggregationUpload2Started = new ManualResetEventSlim(initialState: false);
            ManualResetEventSlim aggregationUpload2Unblocked = new ManualResetEventSlim(initialState: false);

            SampleConsecutiveDataPointObservationsCollection dataObject1 = new SampleConsecutiveDataPointObservationsCollection(
                sampleRawData.OrderBy(d => d.UtcTimestamp),
                aggregationResult =>
                {
                    aggregationUpload1Started.Set();
                },
                isPartial);

            SampleConsecutiveDataPointObservationsCollection dataObject2 = new SampleConsecutiveDataPointObservationsCollection(
                sampleRawData.OrderBy(d => d.UtcTimestamp),
                aggregationResult =>
                {
                    aggregationUpload2Started.Set();
                    aggregationUpload2Unblocked.Wait();
                    targetDisposalUnblocked.Set();
                },
                isPartial);

            this.ExpectPagerRequest(dataToReturn: dataObject1);
            this.ExpectAggregationRequestResponse(response: sampleAggregatedData);
            this.ExpectPagerRequest(dataToReturn: dataObject2);
            this.ExpectAggregationRequestResponse(response: sampleAggregatedData);
            Assert.IsTrue(aggregationUpload1Started.Wait(TimeSpan.FromSeconds(5.0)), "Upload 1 should have begun.");
            Assert.IsTrue(aggregationUpload2Started.Wait(TimeSpan.FromSeconds(5.0)), "Upload 2 should have begun.");

            using (Task disposalTask = Task.Run(() => target.Dispose()))
            {
                Assert.IsFalse(disposalTask.Wait(TimeSpan.FromSeconds(5.0)), "Dispose should be blocked.");

                aggregationUpload2Unblocked.Set();

                Assert.IsTrue(targetDisposalUnblocked.Wait(TimeSpan.FromSeconds(5.0)), "Dispose should be unblocked.");
                Assert.IsTrue(disposalTask.Wait(TimeSpan.FromSeconds(5.0)), "Dispose should terminate.");
            }
        }
        /// <summary>
        /// Creates a new instance of <see cref="AverageBySecondDataPointAggregator"/> and uses it to aggregate
        /// some mock data.
        /// </summary>
        /// <param name="observations">
        /// The (UTC) dates to use for the mock observations. The amount of mock observations generated will be
        /// equal to the length of this array.
        /// </param>
        /// <returns>The results of the aggregation.</returns>
        private static IEnumerable<AggregatedDataPoint> DummyAggregation(params DataPointObservation[] observations)
        {
            IOrderedEnumerable<DataPointObservation> dummyObservations = observations.OrderBy(o => o.UtcTimestamp);

            const bool DoesntMatter = true;

            ConsecutiveDataPointObservationsCollection aggregationInput =
                new SampleConsecutiveDataPointObservationsCollection(
                    observations: dummyObservations, 
                    aggregationReceiver: _ => Assert.Fail("ProvideCorrespondingAggregatedData should not be called"), 
                    isPartial: DoesntMatter);

            AverageBySecondDataPointAggregator target = new AverageBySecondDataPointAggregator();

            return target.Aggregate(aggregationInput);
        }