Exemplo n.º 1
0
        public void TestMethod1()
        {
            var pool = new TaskQueuePool(4);

            pool.Start();

            pool.QueueAction((f) => {
                Thread.Sleep(100);

                Debug.Write(" first");
                Debug.Write(" queue count : " + pool.Count);
                Debug.WriteLine(" current tasks : " + pool.CurrentTasks);
            }, "first");

            Thread.Sleep(1000);

            for (int i = 0; i < 10; i++)
            {
                pool.QueueAction((index) => {
                    Thread.Sleep(1000);

                    Debug.WriteLine("aaaa " + index + " queue count : " + pool.Count + " current tasks : " + pool.CurrentTasks);
                }, i);
            }

            for (int i = 10; i < 20; i++)
            {
                pool.QueueAction((index) => {
                    Thread.Sleep(2000);

                    Debug.WriteLine("bbbb " + index + " queue count : " + pool.Count + " current tasks : " + pool.CurrentTasks);
                }, i);
            }

            Thread.Sleep(10000);

            for (int i = 20; i < 50; i++)
            {
                pool.QueueAction((index) => {
                    Thread.Sleep(500);

                    Debug.WriteLine("cccc " + index + " queue count : " + pool.Count + " current tasks : " + pool.CurrentTasks);
                }, i);
            }

            Thread.Sleep(1000000);
            Assert.True(true);
        }
Exemplo n.º 2
0
        public async Task Execute(IJobExecutionContext context)
        {
            baseUrl = context.JobDetail.JobDataMap.Get("baseUrl").ToString();
            var feedRequest = context.JobDetail.JobDataMap.Get("request") as FeedRequest;

            Logger.GetLogger(baseUrl).Info(" feed job " + context.JobDetail.Key + " add to feed crawl queue");

            queuePool.QueueAction(() =>
            {
                Logger.GetLogger(baseUrl).Info(" feed job " + feedRequest.Request.Uri.ToString() + " starting");

                var response = DoTask(feedRequest);
                Save(feedRequest, response);
            });
        }
Exemplo n.º 3
0
        public async Task Execute(IJobExecutionContext context)
        {
            baseUrl = context.JobDetail.JobDataMap.Get("baseUrl").ToString();
            var feedRequest = context.JobDetail.JobDataMap.Get("request") as FeedRequest;

            Logger.GetLogger(baseUrl).Info(" feed job " + context.JobDetail.Key + " add to feed crawl queue");

            var addrs = CodeCompilerManager.GetResult("url", feedRequest.Request.Uri.ToString());

            foreach (var addr in addrs)
            {
                queuePool.QueueAction(() =>
                {
                    Logger.GetLogger(baseUrl).Info(" feed job " + addr.ToString() + " starting");

                    feedRequest.Request     = feedRequest.Request.Clone() as Request;
                    feedRequest.Request.Uri = new Uri(addr.ToString());

                    var response = DoTask(feedRequest);
                    Save(feedRequest, response);
                });
            }
        }
Exemplo n.º 4
0
        public void DoTask(string path)
        {
            try
            {
                var file = new FileInfo(path);

                var sp      = file.Name.Split('_');
                var feedId  = Convert.ToInt32(sp[0]);
                var content = File.ReadAllText(path);

                var feed = JsonConvert.DeserializeObject <FeedSnapshot>(content);
                var url  = feed.Url;

                Logger.GetLogger(baseUrl).Info(" extract feed " + feed.Url + " address ");

                var urls = ExtractAddress(feed);

                Logger.GetLogger(baseUrl).Info(" extract feed " + feed.Url + " address count :" + urls.Count);

                var hisFile     = Path.Combine(historyPath, feedId + ".txt");
                var urlsHistory = new string[0];
                if (File.Exists(hisFile))
                {
                    urlsHistory = File.ReadAllLines(hisFile, Encoding.UTF8);

                    Logger.GetLogger(baseUrl).Info(" read feed history : " + urlsHistory.Length);
                }

                File.WriteAllLines(hisFile, urls, Encoding.UTF8);

                urls.RemoveAll(m => urlsHistory.Contains(m));
                urls.RemoveAll(m => string.IsNullOrEmpty(m));
                urls.RemoveAll(m => !Uri.IsWellFormedUriString(m, UriKind.Absolute));

                Logger.GetLogger(baseUrl).Info("feed " + feed.Url + " new url count : " + urls.Count);

                foreach (var u in urls)
                {
                    Logger.GetLogger(baseUrl).Info(" extract job " + u + " add to feed extract queue");

                    queuePool.QueueAction(() => {
                        Logger.GetLogger(baseUrl).Info(" extract job " + u + " starting");

                        var result = NodeVisitor.Cooperater.GetResult(u);
                        if (result != null)
                        {
                            Save(feedId, u, result);
                        }
                        else
                        {
                            Logger.GetLogger(baseUrl).Info(" extract job " + u + " result is null");
                        }
                    });
                }

                var destFile = path.Replace("snapshot", "pre").Replace(file.Name, feedId + ".txt");
                if (File.Exists(destFile))
                {
                    File.Delete(destFile);
                }

                File.Move(path, destFile);

                Logger.GetLogger(baseUrl).Info(" move feed snapshot to pre fold " + destFile);
            }
            catch (Exception ex) {
                Logger.GetLogger(baseUrl).Error(" feed snapshot extract error " + ex.Message);
            }
        }