Exemplo n.º 1
0
        private void Ready()
        {
            // kick off the job
            Receive <IStartJobV1>(start =>
            {
                Subscribers.Add(start.Requestor);

                JobStarter = Context.System.Scheduler.ScheduleTellRepeatedlyCancelable(TimeSpan.FromMilliseconds(20),
                                                                                       TimeSpan.FromMilliseconds(20), Self, new AttemptToStartJob(start), Self);
            });

            Receive <AttemptToStartJob>(start =>
            {
                var self = Self;
                CoordinatorRouter.Ask <Routees>(new GetRoutees()).ContinueWith(tr =>
                {
                    return(new CrawlCanStart(start.Job, tr.Result.Members.Count()));
                }).PipeTo(self);
            });

            Receive <CrawlCanStart>(start => start.NodeCount > 0, start =>
            {
                var downloadRootDocument = new DownloadWorker.DownloadHtmlDocument(new CrawlDocument(start.Job.Job.Root));

                //should kick off the initial downloads and parsing
                //var routees = CoordinatorRouter.Ask<Routees>(new GetRoutees()).Result;
                CoordinatorRouter.Tell(downloadRootDocument);
                JobStarter.Cancel();

                Become(Started);
                Stash.UnstashAll();
            });

            Receive <CrawlCanStart>(start =>
            {
                Log.Info("Can't start job yet. No routees.");
            });


            ReceiveAny(o => Stash.Stash());
        }
        private void Ready()
        {
            // kick off the job
            Receive<IStartJobV1>(start =>
            {
                Subscribers.Add(start.Requestor);

                JobStarter = Context.System.Scheduler.ScheduleTellRepeatedlyCancelable(TimeSpan.FromMilliseconds(20),
                    TimeSpan.FromMilliseconds(20), Self, new AttemptToStartJob(start), Self);
            });

            Receive<AttemptToStartJob>(start =>
            {
                var self = Self;
                CoordinatorRouter.Ask<Routees>(new GetRoutees()).ContinueWith(tr =>
                {
                    return new CrawlCanStart(start.Job, tr.Result.Members.Count());
                }).PipeTo(self);
            });

            Receive<CrawlCanStart>(start => start.NodeCount > 0, start =>
            {
                var downloadRootDocument = new DownloadWorker.DownloadHtmlDocument(new CrawlDocument(start.Job.Job.Root));

                //should kick off the initial downloads and parsing
                //var routees = CoordinatorRouter.Ask<Routees>(new GetRoutees()).Result;
                CoordinatorRouter.Tell(downloadRootDocument);
                JobStarter.Cancel();

                Become(Started);
                Stash.UnstashAll();
            });

            Receive<CrawlCanStart>(start =>
            {
                Log.Info("Can't start job yet. No routees.");
            });


            ReceiveAny(o => Stash.Stash());

        }