Exemple #1
0
        public async Task <IHttpActionResult> GetRecentInvocationsTimeline(
            string functionId,
            [FromUri] PagingInfo pagingInfo,
            DateTime?start = null,
            DateTime?end   = null)
        {
            if (end == null)
            {
                end = DateTime.UtcNow;
            }
            if (start == null)
            {
                start = end.Value.AddDays(-7);
            }
            if (pagingInfo == null)
            {
                return(BadRequest());
            }

            var segment = await _reader.GetAggregateStatsAsync(functionId, start.Value, end.Value, null);

            var entities = segment.Results;

            var result = Array.ConvertAll(entities, entity => new
            {
                StartBucket = entity.TimeBucket,
                Start       = entity.Time,
                TotalPass   = entity.TotalPass,
                TotalFail   = entity.TotalFail,
                TotalRun    = entity.TotalRun
            });

            return(Ok(result));
        }
Exemple #2
0
        [InlineData(20, 80)]     // 3/4's of the entries should be dropped
        public async Task LogsAreDroppedWhenBufferIsFull(int maxBufferedEntryCount, int logItemCount)
        {
            List <Exception> caughtExceptions = new List <Exception>();
            LogWriter        writer           = (LogWriter)LogFactory.NewWriter(defaultHost, "c1", this, (ex) => caughtExceptions.Add(ex));

            writer.MaxBufferedEntryCount = maxBufferedEntryCount;
            ILogReader reader = LogFactory.NewReader(this);

            var logItems = new List <FunctionInstanceLogItem>();

            for (int i = 0; i < logItemCount; i++)
            {
                logItems.Add(new FunctionInstanceLogItem
                {
                    FunctionInstanceId = Guid.NewGuid(),
                    FunctionName       = "test",
                    StartTime          = DateTime.UtcNow - TimeSpan.FromMilliseconds(50),
                    EndTime            = DateTime.UtcNow,
                    LogOutput          = "output 1"
                });
            }

            foreach (var item in logItems)
            {
                await writer.AddAsync(item);
            }

            await writer.FlushAsync();

            var id = logItems[0].FunctionId;

            if (maxBufferedEntryCount < logItemCount)
            {
                Assert.NotEmpty(caughtExceptions);
                Assert.StartsWith("The limit on the number of buffered log entries was reached.", caughtExceptions[0].Message);
            }

            // Counts should be intact
            var segment1 = await reader.GetAggregateStatsAsync(id, DateTime.MinValue, DateTime.MaxValue, null);

            var runs = segment1.Results.Sum(x => x.TotalRun);

            Assert.Equal(runs, logItemCount);

            // Some of the results should be missing
            var segmentRecent = await reader.GetRecentFunctionInstancesAsync(new RecentFunctionQuery
            {
                FunctionId     = id,
                Start          = DateTime.MinValue,
                End            = DateTime.MaxValue,
                MaximumResults = 1000
            }, null);

            int expectedLoggedCount = Math.Min(logItemCount, maxBufferedEntryCount);

            Assert.NotNull(segmentRecent);
            Assert.Equal(expectedLoggedCount, segmentRecent.Results.Length);
        }
Exemple #3
0
        private async Task <FunctionStatistics> LookupAsync(FunctionId functionId)
        {
            var total = new FunctionStatistics
            {
            };

            // summarize over last 7 days.
            DateTime now   = DateTime.UtcNow;
            DateTime start = now.AddDays(-7);

            var segment = await _reader.GetAggregateStatsAsync(functionId, start, now, null);

            var items = segment.Results;

            foreach (var item in items)
            {
                total.SucceededCount += item.TotalPass;
                total.FailedCount    += item.TotalFail;
            }

            return(total);
        }
        public async Task LogExactWriteAndRead()
        {
            // Make some very precise writes and verify we read exactly what we'd expect.
            ILogWriter writer = LogFactory.NewWriter(defaultHost, "c1", this);
            ILogReader reader = LogFactory.NewReader(this);

            string Func1 = "alpha";
            string Func2 = "beta";

            var t1a = new DateTime(2010, 3, 6, 10, 11, 20);
            var t1b = new DateTime(2010, 3, 6, 10, 11, 21); // same time bucket as t1a
            var t2  = new DateTime(2010, 3, 7, 10, 11, 21);

            FunctionInstanceLogItem l1 = new FunctionInstanceLogItem
            {
                FunctionInstanceId = Guid.NewGuid(),
                FunctionName       = Func1,
                StartTime          = t1a,
                LogOutput          = "one"
            };

            await WriteAsync(writer, l1);

            await writer.FlushAsync(); // Multiple flushes; test starting & stopping the backgrounf worker.

            FunctionInstanceLogItem l2 = new FunctionInstanceLogItem
            {
                FunctionInstanceId = Guid.NewGuid(),
                FunctionName       = Func2,
                StartTime          = t1b,
                LogOutput          = "two"
            };

            await WriteAsync(writer, l2);

            FunctionInstanceLogItem l3 = new FunctionInstanceLogItem
            {
                FunctionInstanceId = Guid.NewGuid(),
                FunctionName       = Func1,
                StartTime          = t2,
                LogOutput          = "three",
                ErrorDetails       = "this failed"
            };

            await WriteAsync(writer, l3);

            await writer.FlushAsync();

            // Now read
            var definitionSegment = await reader.GetFunctionDefinitionsAsync(null, null);

            string[] functionNames = Array.ConvertAll(definitionSegment.Results, definition => definition.Name);
            Array.Sort(functionNames);
            Assert.Equal(Func1, functionNames[0]);
            Assert.Equal(Func2, functionNames[1]);

            // Read Func1
            {
                var segment1 = await reader.GetAggregateStatsAsync(l3.FunctionId, DateTime.MinValue, DateTime.MaxValue, null);

                Assert.Null(segment1.ContinuationToken);
                var stats1 = segment1.Results;
                Assert.Equal(2, stats1.Length); // includes t1 and t2

                // First bucket has l1, second bucket has l3
                Assert.Equal(stats1[0].TotalPass, 1);
                Assert.Equal(stats1[0].TotalRun, 1);
                Assert.Equal(stats1[0].TotalFail, 0);

                Assert.Equal(stats1[1].TotalPass, 0);
                Assert.Equal(stats1[1].TotalRun, 1);
                Assert.Equal(stats1[1].TotalFail, 1);

                // reverse order. So l3 latest function, is listed first.
                var recent1 = await GetRecentAsync(reader, l3.FunctionId);

                Assert.Equal(2, recent1.Length);

                Assert.Equal(recent1[0].FunctionInstanceId, l3.FunctionInstanceId);
                Assert.Equal(recent1[1].FunctionInstanceId, l1.FunctionInstanceId);
            }

            // Read Func2
            {
                var segment2 = await reader.GetAggregateStatsAsync(l2.FunctionId, DateTime.MinValue, DateTime.MaxValue, null);

                var stats2 = segment2.Results;
                Assert.Single(stats2);
                Assert.Equal(stats2[0].TotalPass, 1);
                Assert.Equal(stats2[0].TotalRun, 1);
                Assert.Equal(stats2[0].TotalFail, 0);

                var recent2 = await GetRecentAsync(reader, l2.FunctionId);

                Assert.Single(recent2);
                Assert.Equal(recent2[0].FunctionInstanceId, l2.FunctionInstanceId);
            }
        }