Ejemplo n.º 1
0
        /// <summary>
        /// Run the processing of incomming requests
        /// </summary>
        private async Task Run()
        {
            var rs = new ReportSet();
            var tf = GetTempFilename();

            foreach (var f in GetAbandonedFiles(null))
            {
                await m_forward.WriteAsync(f);
            }

            while (true)
            {
                var forceSend = false;
                try
                {
                    var item = await m_channel.ReadAsync(rs.Items.Count == 0?Timeout.Infinite : WAIT_TIME);

                    if (item != null)
                    {
                        forceSend = item.Type == ReportType.Crash;
                        rs.Items.Add(item);
                        File.WriteAllText(tf, JsonConvert.SerializeObject(rs));
                    }
                }
                catch (TimeoutException)
                {
                    forceSend = true;
                }

                if ((forceSend && rs.Items.Count > 0) || (rs.Items.Count > MAX_ITEMS_IN_SET))
                {
                    var nextFilename = GetTempFilename();
                    await m_forward.WriteAsync(tf);

                    rs = new ReportSet();

                    foreach (var f in GetAbandonedFiles(tf))
                    {
                        await m_forward.WriteAsync(f);
                    }

                    tf = nextFilename;
                }
            }
        }
Ejemplo n.º 2
0
        /// <summary>
        /// Runs the report processor
        /// </summary>
        /// <param name="forward">The channel accepting filenames with usage reports.</param>
        internal static Tuple <Task, IWriteChannel <ReportItem> > Run(IWriteChannel <string> forward)
        {
            var instanceid = System.Diagnostics.Process.GetCurrentProcess().Id.ToString();
            var channel    = ChannelManager.CreateChannel <ReportItem>(
                maxPendingWriters: MAX_QUEUE_SIZE,
                pendingWritersOverflowStrategy: QueueOverflowStrategy.LIFO
                );

            var task = AutomationExtensions.RunTask(
                new
            {
                Input  = channel.AsRead(),
                Output = forward
            },
                async(self) =>
            {
                // Wait 20 seconds before we start transmitting
                for (var i = 0; i < 20; i++)
                {
                    await Task.Delay(TimeSpan.FromSeconds(1)).ConfigureAwait(false);
                    if (await self.Input.IsRetiredAsync)
                    {
                        return;
                    }
                }

                await ProcessAbandonedFiles(self.Output, self.Input, null).ConfigureAwait(false);

                var rs = new ReportSet();
                var tf = GetTempFilename(instanceid);
                var nextTransmitTarget = new DateTime(0);

                while (true)
                {
                    var forceSend = false;
                    try
                    {
                        // We wait until we get an item, or WAIT_TIME from the last event
                        var waittime =
                            rs.Items.Count == 0
                                      ? Timeout.Infinite
                                      : new TimeSpan(Math.Max(0, (nextTransmitTarget - DateTime.UtcNow).Ticks));

                        var item = await self.Input.ReadAsync(waittime);
                        if (item != null)
                        {
                            if (rs.Items.Count == 0)
                            {
                                nextTransmitTarget = DateTime.UtcNow + WAIT_TIME;
                            }

                            forceSend = item.Type == ReportType.Crash;
                            rs.Items.Add(item);
                            File.WriteAllText(tf, JsonConvert.SerializeObject(rs));
                        }
                    }
                    catch (TimeoutException)
                    {
                        forceSend = true;
                    }

                    if ((forceSend && rs.Items.Count > 0) || (rs.Items.Count > MAX_ITEMS_IN_SET))
                    {
                        var nextFilename = GetTempFilename(instanceid);
                        self.Output.WriteNoWait(tf);
                        rs = new ReportSet();

                        await ProcessAbandonedFiles(self.Output, self.Input, null);

                        tf = nextFilename;
                    }
                }
            }
                );

            return(new Tuple <Task, IWriteChannel <ReportItem> >(task, channel));
        }
Ejemplo n.º 3
0
        /// <summary>
        /// Run the processing of incomming requests
        /// </summary>
        private async Task Run()
        {
            var rs = new ReportSet();
            var tf = GetTempFilename();

            foreach(var f in GetAbandonedFiles(null))
                await m_forward.WriteAsync(f);

            while(true)
            {
                var forceSend = false;
                try
                {
                    var item = await m_channel.ReadAsync(rs.Items.Count == 0 ? Timeout.Infinite : WAIT_TIME);
                    if (item != null)
                    {
                        forceSend = item.Type == ReportType.Crash;
                        rs.Items.Add(item);
                        File.WriteAllText(tf, JsonConvert.SerializeObject(rs));
                    }
                }
                catch(TimeoutException)
                {
                    forceSend = true;
                }

                if ((forceSend && rs.Items.Count > 0) || (rs.Items.Count > MAX_ITEMS_IN_SET))
                {
                    var nextFilename = GetTempFilename();
                    await m_forward.WriteAsync(tf);
                    rs = new ReportSet();

                    foreach(var f in GetAbandonedFiles(tf))
                        await m_forward.WriteAsync(f);

                    tf = nextFilename;
                }
            }
        }