Beispiel #1
0
        public void CombinedBucketsAreMergedCorrectly()
        {
            var ts1 = new DateTime(2014, 05, 10, 0, 0, 0);
            var ts2 = new DateTime(2014, 05, 10, 0, 5, 0);
            var sharedDimensions = new DimensionSpecification {
                { "one", "a1" }, { "two", "a2" }
            };
            var bucket1Dimensions = new DimensionSpecification {
                { "one", "b1" }, { "two", "b2" }
            };
            var bucket2Dimensions = new DimensionSpecification {
                { "one", "c1" }, { "two", "c2" }
            };

            var bucket1 =
                new DataBucket <InternalHitCount>(new DimensionSet(this.twoDimensionSet), ts1,
                                                  TimeSpan.FromMinutes(5).Ticks, null,
                                                  this.properties.MemoryStreamManager);

            bucket1.AddValue(sharedDimensions, 867);
            bucket1.AddValue(bucket1Dimensions, 5309);

            var bucket2 =
                new DataBucket <InternalHitCount>(new DimensionSet(this.twoDimensionSet), ts2,
                                                  TimeSpan.FromMinutes(5).Ticks, null,
                                                  this.properties.MemoryStreamManager);

            bucket2.AddValue(sharedDimensions, 867);
            bucket2.AddValue(bucket2Dimensions, 42);

            bucket1.Seal();
            bucket2.Seal();
            var bucket3 =
                new DataBucket <InternalHitCount>(new[] { bucket1, bucket2 },
                                                  new DimensionSet(this.twoDimensionSet), ts1,
                                                  TimeSpan.FromMinutes(10).Ticks, null,
                                                  this.properties.MemoryStreamManager);

            var match = bucket3.GetMatches(bucket1Dimensions).First().Data;

            Assert.AreEqual((ulong)5309, match.HitCount);
            match = bucket3.GetMatches(bucket2Dimensions).First().Data;
            Assert.AreEqual((ulong)42, match.HitCount);

            match = bucket3.GetMatches(sharedDimensions).First().Data;
            Assert.AreEqual((ulong)867 * 2, match.HitCount);

            bucket1.Dispose();
            bucket2.Dispose();
            bucket3.Dispose();
        }
Beispiel #2
0
        public void HistogramCompactionIsCorrect()
        {
            const int numBuckets       = 5;
            const int samplesPerBucket = 100;
            var       buckets          = new DataBucket <InternalHistogram> [numBuckets];

            for (int i = 0; i < numBuckets; i++)
            {
                var newBucket =
                    new DataBucket <InternalHistogram>(this.dimensionSet, DateTime.Now,
                                                       TimeSpan.FromMinutes(1).Ticks,
                                                       this.storagePath,
                                                       this.properties.MemoryStreamManager);
                buckets[i] = newBucket;

                for (int sample = 0; sample < samplesPerBucket; sample++)
                {
                    newBucket.AddValue(this.dimensions, sample);
                }
                newBucket.Seal();
            }

            var compactedBucket =
                new DataBucket <InternalHistogram>(buckets, this.dimensionSet, DateTime.Now,
                                                   TimeSpan.FromMinutes(60).Ticks,
                                                   this.storagePath,
                                                   this.properties.MemoryStreamManager);

            var data      = compactedBucket.GetMatches(this.dimensions);
            var singleVal = data.First().Data;

            Assert.IsNotNull(singleVal);
            Assert.AreEqual(numBuckets * samplesPerBucket, (int)singleVal.SampleCount);
            compactedBucket.Dispose();
        }
Beispiel #3
0
        public void ExistingDataFilesAreCompactedOnCompactCall()
        {
            // Create one bucket at present time so that remaining buckets
            // being created are out of compaction
            var timeStamp = DateTime.UtcNow;
            var bucket    =
                new DataBucket <InternalHitCount>(this.dimensionSet, timeStamp, TimeSpan.TicksPerMinute,
                                                  this.storagePath,
                                                  this.properties.MemoryStreamManager);

            bucket.AddValue(this.dimensions, 5);
            bucket.Seal();
            bucket.Persist();

            // Create buckets manually at 1 min and 5 min quantum
            // but they actually belong at a 10 min once compacted
            var expectedNewBucketQuantum = customCompaction.Intervals.ElementAt(2).Interval.Ticks;
            var timeStamp2 = RoundTimeStamp(timeStamp.Subtract(new TimeSpan(2, 20, 0)), expectedNewBucketQuantum);

            // The buckets will have files written out but not be
            // part of the dataset until it is created
            var bucket1 =
                new DataBucket <InternalHitCount>(this.dimensionSet, timeStamp2, TimeSpan.TicksPerMinute,
                                                  this.storagePath,
                                                  this.properties.MemoryStreamManager);

            bucket1.AddValue(this.dimensions, 5);
            bucket1.Seal();
            bucket1.Persist();
            var timeStamp3 = timeStamp2.Add(new TimeSpan(0, 5, 0));
            var bucket2    =
                new DataBucket <InternalHitCount>(this.dimensionSet, timeStamp3,
                                                  TimeSpan.TicksPerMinute * 5,
                                                  this.storagePath,
                                                  this.properties.MemoryStreamManager);

            bucket2.AddValue(this.dimensions, 2);
            bucket2.Seal();
            bucket2.Persist();

            bucket.Dispose();
            bucket1.Dispose();
            bucket2.Dispose();

            this.InitializeDataSet(null);
            this.dataSet.Compact();

            // Verify that a 10 minute compacted bucket was created
            var newBucket = this.dataSet.GetDataBucket(timeStamp2);

            Assert.AreEqual(timeStamp2.Ticks, newBucket.StartTicks);
            Assert.AreEqual(expectedNewBucketQuantum, newBucket.EndTicks - newBucket.StartTicks);
            var matches = newBucket.GetMatches(this.dimensions).ToList();

            Assert.AreEqual(1, matches.Count);
            var result = matches[0].Data;

            Assert.AreEqual((ulong)7, result.HitCount);
        }
Beispiel #4
0
        public void SealAndReleaseAreThreadSafe()
        {
            var filterableDimension = new DimensionSet(new HashSet <Dimension> {
                new Dimension("thing")
            });

            using (
                var filterableBucket =
                    new DataBucket <InternalHitCount>(filterableDimension,
                                                      this.timestamp,
                                                      DefaultBucketTimeSpanTicks,
                                                      this.currentDirectory,
                                                      properties.MemoryStreamManager))
            {
                var allDims = new DimensionSpecification {
                    { "thing", "thing" }
                };
                Parallel.For(0, 10, (i) => filterableBucket.AddValue(allDims, i));

                Parallel.For(0, 100, (i) =>
                {
                    switch (i % 3)
                    {
                    case 0:
                        foreach (var item in filterableBucket.GetMatches(allDims))
                        {
                            Assert.IsNotNull(item);
                        }
                        break;

                    case 1:
                        filterableBucket.AddValue(allDims, 11);
                        break;

                    case 2:
                        filterableBucket.ReleaseData();
                        break;
                    }
                });
            }
        }
Beispiel #5
0
        public void SplitByDimensionWithFiltersWorksProperly()
        {
            var filterableDimension = new DimensionSet(new HashSet <Dimension> {
                new Dimension("thing"), new Dimension("meat")
            });

            using (
                var filterableBucket =
                    new DataBucket <InternalHitCount>(filterableDimension, this.timestamp,
                                                      DefaultBucketTimeSpanTicks,
                                                      this.currentDirectory,
                                                      this.properties.MemoryStreamManager))
            {
                var queryDimensions = new DimensionSpecification();
                queryDimensions["thing"] = "thingOne";
                queryDimensions["meat"]  = "bacon";
                filterableBucket.AddValue(queryDimensions, 100);

                queryDimensions["thing"] = "thingTwo";
                queryDimensions["meat"]  = "pepperoni";
                filterableBucket.AddValue(queryDimensions, 200);
                filterableBucket.Seal();

                // thingOne and thingTwo will match with no filter
                Assert.AreEqual(2,
                                filterableBucket.GetMatchesSplitByDimension(new DimensionSpecification(), "thing")
                                .Sum(match => match.DataCount));

                // only thingOne matches bacon
                var bestMatchFilter = new DimensionSpecification {
                    { "meat", "bacon" }
                };
                Assert.AreEqual(1,
                                filterableBucket.GetMatchesSplitByDimension(bestMatchFilter, "thing")
                                .Sum(match => match.DataCount));
            }
        }
        public void HistogramCompactionIsCorrect()
        {
            const int numBuckets = 5;
            const int samplesPerBucket = 100;
            var buckets = new DataBucket<InternalHistogram>[numBuckets];

            for (int i = 0; i < numBuckets; i++)
            {
                var newBucket =
                    new DataBucket<InternalHistogram>(this.dimensionSet, DateTime.Now,
                                                                                          TimeSpan.FromMinutes(1).Ticks,
                                                                                          this.storagePath,
                                                                                          this.properties.MemoryStreamManager);
                buckets[i] = newBucket;

                for (int sample = 0; sample < samplesPerBucket; sample++)
                {
                    newBucket.AddValue(this.dimensions, sample);
                }
                newBucket.Seal();
            }

            var compactedBucket =
                new DataBucket<InternalHistogram>(buckets, this.dimensionSet, DateTime.Now,
                                                                                      TimeSpan.FromMinutes(60).Ticks,
                                                                                      this.storagePath,
                                                                                      this.properties.MemoryStreamManager);

            var data = compactedBucket.GetMatches(this.dimensions);
            var singleVal = data.First().Data;
            Assert.IsNotNull(singleVal);
            Assert.AreEqual(numBuckets * samplesPerBucket, (int)singleVal.SampleCount);
            compactedBucket.Dispose();
        }
        public void ExistingDataFilesAreCompactedOnCompactCall()
        {
            // Create one bucket at present time so that remaining buckets
            // being created are out of compaction
            var timeStamp = DateTime.UtcNow;
            var bucket =
                new DataBucket<InternalHitCount>(this.dimensionSet, timeStamp, TimeSpan.TicksPerMinute,
                                                                            this.storagePath,
                                                                            this.properties.MemoryStreamManager);
            bucket.AddValue(this.dimensions, 5);
            bucket.Seal();
            bucket.Persist();

            // Create buckets manually at 1 min and 5 min quantum
            // but they actually belong at a 10 min once compacted
            var expectedNewBucketQuantum = customCompaction.Intervals.ElementAt(2).Interval.Ticks;
            var timeStamp2 = RoundTimeStamp(timeStamp.Subtract(new TimeSpan(2, 20, 0)), expectedNewBucketQuantum);

            // The buckets will have files written out but not be 
            // part of the dataset until it is created
            var bucket1 =
                new DataBucket<InternalHitCount>(this.dimensionSet, timeStamp2, TimeSpan.TicksPerMinute,
                                                                            this.storagePath,
                                                                            this.properties.MemoryStreamManager);
            bucket1.AddValue(this.dimensions, 5);
            bucket1.Seal();
            bucket1.Persist();
            var timeStamp3 = timeStamp2.Add(new TimeSpan(0, 5, 0));
            var bucket2 =
                new DataBucket<InternalHitCount>(this.dimensionSet, timeStamp3,
                                                                            TimeSpan.TicksPerMinute * 5,
                                                                            this.storagePath,
                                                                            this.properties.MemoryStreamManager);
            bucket2.AddValue(this.dimensions, 2);
            bucket2.Seal();
            bucket2.Persist();

            bucket.Dispose();
            bucket1.Dispose();
            bucket2.Dispose();

            this.InitializeDataSet(null);
            this.dataSet.Compact();

            // Verify that a 10 minute compacted bucket was created
            var newBucket = this.dataSet.GetDataBucket(timeStamp2);
            Assert.AreEqual(timeStamp2.Ticks, newBucket.StartTicks);
            Assert.AreEqual(expectedNewBucketQuantum, newBucket.EndTicks - newBucket.StartTicks);
            var matches = newBucket.GetMatches(this.dimensions).ToList();
            Assert.AreEqual(1, matches.Count);
            var result = matches[0].Data;
            Assert.AreEqual((ulong)7, result.HitCount);
        }
        public void CombinedBucketsAreMergedCorrectly()
        {
            var ts1 = new DateTime(2014, 05, 10, 0, 0, 0);
            var ts2 = new DateTime(2014, 05, 10, 0, 5, 0);
            var sharedDimensions = new DimensionSpecification { {"one", "a1"}, {"two", "a2"}};
            var bucket1Dimensions = new DimensionSpecification { {"one", "b1"}, {"two", "b2"}};
            var bucket2Dimensions = new DimensionSpecification { {"one", "c1"}, {"two", "c2"}};

            var bucket1 =
                new DataBucket<InternalHitCount>(new DimensionSet(this.twoDimensionSet), ts1,
                                                                            TimeSpan.FromMinutes(5).Ticks, null,
                                                                            this.properties.MemoryStreamManager);
            bucket1.AddValue(sharedDimensions, 867);
            bucket1.AddValue(bucket1Dimensions, 5309);

            var bucket2 =
                new DataBucket<InternalHitCount>(new DimensionSet(this.twoDimensionSet), ts2,
                                                                            TimeSpan.FromMinutes(5).Ticks, null,
                                                                            this.properties.MemoryStreamManager);
            bucket2.AddValue(sharedDimensions, 867);
            bucket2.AddValue(bucket2Dimensions, 42);

            bucket1.Seal();
            bucket2.Seal();
            var bucket3 =
                new DataBucket<InternalHitCount>(new[] {bucket1, bucket2},
                                                                            new DimensionSet(this.twoDimensionSet), ts1,
                                                                            TimeSpan.FromMinutes(10).Ticks, null,
                                                                            this.properties.MemoryStreamManager);

            var match = bucket3.GetMatches(bucket1Dimensions).First().Data;
            Assert.AreEqual((ulong)5309, match.HitCount);
            match = bucket3.GetMatches(bucket2Dimensions).First().Data;
            Assert.AreEqual((ulong)42, match.HitCount);

            match = bucket3.GetMatches(sharedDimensions).First().Data;
            Assert.AreEqual((ulong)867 * 2, match.HitCount);

            bucket1.Dispose();
            bucket2.Dispose();
            bucket3.Dispose();
        }
        public void SealAndReleaseAreThreadSafe()
        {
            var filterableDimension = new DimensionSet(new HashSet<Dimension> { new Dimension("thing") });
            using (
                var filterableBucket =
                    new DataBucket<InternalHitCount>(filterableDimension,
                                                                                this.timestamp,
                                                                                DefaultBucketTimeSpanTicks,
                                                                                this.currentDirectory,
                                                                                properties.MemoryStreamManager))
            {


                var allDims = new DimensionSpecification {{"thing", "thing"}};
                Parallel.For(0, 10, (i) => filterableBucket.AddValue(allDims, i));

                Parallel.For(0, 100, (i) =>
                                     {
                                         switch (i % 3)
                                         {
                                         case 0:
                                             foreach (var item in filterableBucket.GetMatches(allDims))
                                             {
                                                 Assert.IsNotNull(item);
                                             }
                                             break;

                                         case 1:
                                             filterableBucket.AddValue(allDims, 11);
                                             break;

                                         case 2:
                                             filterableBucket.ReleaseData();
                                             break;
                                         }
                                     });
            }
        }
Beispiel #10
0
        public void SplitByDimensionWithFiltersWorksProperly()
        {
            var filterableDimension = new DimensionSet(new HashSet<Dimension> {new Dimension("thing"), new Dimension("meat")});
            using (
                var filterableBucket =
                    new DataBucket<InternalHitCount>(filterableDimension, this.timestamp,
                                                                                DefaultBucketTimeSpanTicks,
                                                                                this.currentDirectory,
                                                                                this.properties.MemoryStreamManager))
            {
                var queryDimensions = new DimensionSpecification();
                queryDimensions["thing"] = "thingOne";
                queryDimensions["meat"] = "bacon";
                filterableBucket.AddValue(queryDimensions, 100);

                queryDimensions["thing"] = "thingTwo";
                queryDimensions["meat"] = "pepperoni";
                filterableBucket.AddValue(queryDimensions, 200);
                filterableBucket.Seal();

                // thingOne and thingTwo will match with no filter
                Assert.AreEqual(2,
                                filterableBucket.GetMatchesSplitByDimension(new DimensionSpecification(), "thing")
                                                .Sum(match => match.DataCount));

                // only thingOne matches bacon
                var bestMatchFilter = new DimensionSpecification {{"meat", "bacon"}};
                Assert.AreEqual(1,
                                filterableBucket.GetMatchesSplitByDimension(bestMatchFilter, "thing")
                                                .Sum(match => match.DataCount));
            }
        }