public IEnumerator Start() { state = CrawlerState.BEGIN; buttonFight.SetActive(false); buttonMove.SetActive(false); buttonCapacity.SetActive(false); player = Instantiate(player); fightScript.onFightEnd.AddListener(OnFightEnd); fightScript.player = player; yield return(StartCoroutine(hpText.ChangeText($" HP : {player.HP}"))); yield return(StartCoroutine(strenghtText.ChangeText($" Strength : {player.strength}"))); yield return(StartCoroutine(defenseText.ChangeText($" Defense : {player.defense}"))); yield return(StartCoroutine(intelligenceText.ChangeText($" Intelligence : {player.intelligence}"))); yield return(StartCoroutine(moveText.ChangeText($" Move : {player.movementCase}"))); yield return(StartCoroutine(ChangeText("Info player"))); //yield return StartCoroutine(ChangeText($"{player.Description}")); //yield return StartCoroutine(ChangeText(currentRoom.description)); yield return(new WaitForSeconds(1.0f)); yield return(StartCoroutine(SelectPerso())); //buttons.SetActive(true); }
// 현재 상태변경 void ChangeCurrentStatus(CrawlerState state) { // 상태 변경 CrawlerManager.Instance.SetState(state); // 상태 문자열 변경 string message = StringData.strCrawLerState[(Int32)state]; label_CurrentState.InvokeIfNeeded(SetCurrentStatus, message); // 버튼 변경. if (state == CrawlerState.STOP) { button_onoff.Text = "시작"; button_onoff.Enabled = true; LastCrawlingTick_ = 0; } else if (state == CrawlerState.STOPPING) { button_onoff.Text = "중지중"; button_onoff.Enabled = false; } else if (state == CrawlerState.WORKING) { button_onoff.Text = "중지"; button_onoff.Enabled = true; } }
void StartCrawling() { CrawlerState currentstate = CrawlerManager.Instance.GetState(); if (currentstate == CrawlerState.INITIATING || currentstate == CrawlerState.BEFORE_INIT) { MessageBox.Show("초기화 중입니다. 잠시후 다시 요청하세요."); } else if (currentstate == CrawlerState.WORKING) { if (crawler_worker_.IsBusy == true) { ChangeCurrentStatus(CrawlerState.STOPPING); crawler_worker_.CancelAsync(); } else { ChangeCurrentStatus(CrawlerState.STOP); } } else if (currentstate == CrawlerState.ERROR || currentstate == CrawlerState.STOP) { ChangeCurrentStatus(CrawlerState.WORKING); AppManager.Instance.GetCrawlerTimer().Interval = 10; AppManager.Instance.GetCrawlerTimer().Enabled = true; } else if (currentstate == CrawlerState.STOPPING) { MessageBox.Show("크롤링 중지 중입니다. 잠시후 다시 요청하세요."); } }
void worker_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e) { if (e.Error != null) { MessageBox.Show(e.Error.Message, "Error"); return; } GC.Collect(); label_CrawlingFailedCount.InvokeIfNeeded(SetFailedCount); label_CrawlingCount.InvokeIfNeeded(SetTotalCrawlingCount); CrawlerState pState = CrawlerManager.Instance.GetState(); if (pState == CrawlerState.WORKING) { AppManager.Instance.GetCrawlerTimer().Interval = CrawlerInfoManager.Instance.crawlingtick_; AppManager.Instance.GetCrawlerTimer().Enabled = true; LastCrawlingTick_ = Environment.TickCount; } else if (pState == CrawlerState.STOPPING) { ChangeCurrentStatus(CrawlerState.STOP); LastCrawlingTick_ = 0; } else { LastCrawlingTick_ = 0; } label_NextCrawling.InvokeIfNeeded(SetNextCrawlingLeftTime); }
/// <summary> /// This method is responsible for monitoring system resource and user activity with the computer /// And periodically changes the shared variable 'crawlerState' /// </summary> public void Scheduler() { PerformanceCounter pc = new PerformanceCounter("Processor", "% Idle Time", "_Total", true); LASTINPUTINFO info = new LASTINPUTINFO(); info.cbSize = Marshal.SizeOf(typeof(LASTINPUTINFO)); while (GlobalData.RunScheduler) { if (GetLastInputInfo(ref info)) { if ((Environment.TickCount - info.dwTime) / 1000 > 5 && (int)pc.NextValue() > 40) { crawlerState = CrawlerState.Run; } else { crawlerState = CrawlerState.Stop; if ((Environment.TickCount - info.dwTime) / 1000 <= 5) GlobalData.lIndexingStatus.Text = string.Format("Indexing is paused and will be resumed in {0} sec of computer inactivity [ CPU Idle : {1:F2}% ]", 5 - (Environment.TickCount - info.dwTime) / 1000, pc.NextValue()); } } Thread.Sleep(1000); } pc.Close(); }
CrawlerState crawlerState = CrawlerState.Run; //Shared Variable <-- /// <summary> /// This method is responsible for monitoring system resource and user activity with the computer /// And periodically changes the shared variable 'crawlerState' /// </summary> public void Scheduler() { PerformanceCounter pc = new PerformanceCounter("Processor", "% Idle Time", "_Total", true); LASTINPUTINFO info = new LASTINPUTINFO(); info.cbSize = Marshal.SizeOf(typeof(LASTINPUTINFO)); while (GlobalData.RunScheduler) { if (GetLastInputInfo(ref info)) { if ((Environment.TickCount - info.dwTime) / 1000 > 5 && (int)pc.NextValue() > 40) { crawlerState = CrawlerState.Run; } else { crawlerState = CrawlerState.Stop; if ((Environment.TickCount - info.dwTime) / 1000 <= 5) { GlobalData.lIndexingStatus.Text = string.Format("Indexing is paused and will be resumed in {0} sec of computer inactivity [ CPU Idle : {1:F2}% ]", 5 - (Environment.TickCount - info.dwTime) / 1000, pc.NextValue()); } } } Thread.Sleep(1000); } pc.Close(); }
IEnumerator <WaitForSeconds> Crawl(Texture2D tex) { CrawlerState state = new CrawlerState(tex); int i = 0; int n = Mathf.RoundToInt(0.01f / crawlSpeed); while (true) { List <Vector2> f = state.CalculateForces(tex); state.ApplyForces(f); state.Move(); if (state.Alive) { state.UpdateColor(tex); if (state.Painting) { myTex.SetPixel(state.x, state.y, GetColor(state.velocity.magnitude)); } i++; if (i > n) { i = 0; yield return(new WaitForSeconds(crawlSpeed)); } } else { break; } } }
IEnumerator SelectPerso() { state = CrawlerState.CURRENT_PERSO; buttonFight.SetActive(true); buttonMove.SetActive(true); buttonCapacity.SetActive(true); if (player.HP == 0) { } else { yield return(StartCoroutine(ChangeText("Que voulez-vous faire ?"))); } // Condition de victoire // perso mort /*if (currentRoom.roomLeft == null && currentRoom.roomRight == null) * { * EndGame(); * } * else * { * yield return StartCoroutine(ChangeText("Que voulez-vous faire ?")); * }*/ }
protected void Page_Load(object sender, EventArgs e) { Crawler crawler = new Crawler(); CrawlerState st = new CrawlerState(); for(int i = 0; i < 1000; i++) crawler.AsyncCrawl("http://www.hao123.com/", ResultCallback, null); }
public override void setState(CrawlerState state) // 2019103013 { this.state = state; // 2019103012 if (state == CrawlerState.Crawled) { urlCrawledTime = DateTime.Now; } }
IEnumerator Start() { player = Instantiate(player); buttons.SetActive(false); state = CrawlerState.BEGIN; fightScript.onFightEnd += OnFightEnd; fightScript.player = player; yield return(StartCoroutine(ChangeText(currentRoom.description))); yield return(new WaitForSeconds(1.0f)); yield return(StartCoroutine(ChooseDoor())); }
IEnumerator ChooseDoor() { state = CrawlerState.CHOOSE_ROOM; buttons.SetActive(true); if (currentRoom.roomLeft == null && currentRoom.roomRight == null) { EndGame(); } else { yield return(StartCoroutine(ChangeText("Choisissez une porte "))); } }
IEnumerator Start() { state = CrawlerState.BEGIN; buttons.SetActive(false); player = Instantiate(player); // Créer le donjon // Afficher le panel de crawling // Récupérer le player fightScript.onFightEnd.AddListener(OnFightEnd); fightScript.player = player; yield return(StartCoroutine(ChangeText(currentRoom.description))); yield return(new WaitForSeconds(1.0f)); yield return(StartCoroutine(ChooseDoor())); buttons.SetActive(true); }
public CrawlerFrontend() { InitializeComponent(); ThreadPool.SetMaxThreads(100000, 100000); ThreadPool.SetMinThreads(100000, 100000); ServicePointManager.DefaultConnectionLimit = 1000; crawlerBE = new CrawlerBackend(); state = CrawlerState.Crawled; timerUpdateURLTab = new DispatcherTimer(); // 2019103041 timerUpdateURLTab.Tick += timerUpdateURLTab_Tick; timerUpdateURLTab.Interval = new TimeSpan(0, 0, 0, 0, 250); // 2019103040 timerUpdateStatisticsTab = new DispatcherTimer(); // 2019103041 timerUpdateStatisticsTab.Tick += timerUpdateStatisticsTab_Tick; timerUpdateStatisticsTab.Interval = new TimeSpan(0, 0, 0, 0, 250); // 2019103040 }
IEnumerator SelectPerso() { state = CrawlerState.CURRENT_PERSO; buttonFight.SetActive(true); buttonMove.SetActive(true); buttonCapacity.SetActive(true); yield return(StartCoroutine(ChangeText("Que voulez-vous faire ?"))); /*if (player.HP == 0) * { * * } * else * { * yield return StartCoroutine(ChangeText("Que voulez-vous faire ?")); * }*/ // Condition de victoire // perso mort }
public List <string> GetURLs(CrawlerState state) { switch (state) { case CrawlerState.Crawled: return(new List <string>(dCrawledURLs.Keys)); case CrawlerState.Crawling: return(new List <string>(dCrawlingURLs.Keys)); case CrawlerState.ToBeCrawled: return(new List <string>(dToBeCrawledURLs.Keys)); case CrawlerState.Disabled: return(disabledURLs.Select(child => child.url).ToList()); // 2019103026 case CrawlerState.Root: return(lRootURLs.Select(root => root.rootURL).ToList()); // 2019103026 default: return(new List <string>()); } }
private void rb_Checked(object sender, RoutedEventArgs e) { lb_URL.Items.Clear(); if (rb_crawled.IsChecked == true) { state = CrawlerState.Crawled; } else if (rb_crawling.IsChecked == true) { state = CrawlerState.Crawling; } else if (rb_toBeCrawled.IsChecked == true) { state = CrawlerState.ToBeCrawled; } else if (rb_disabled.IsChecked == true) { state = CrawlerState.Disabled; } else if (rb_roots.IsChecked == true) { state = CrawlerState.Root; } }
static WaitHandle ProcessRequest(CrawlerState state) { maxConcurrentJobControl.WaitOne(); try { state.request = state.CreateRequest(); if (state.hasDataToSend) { state.request.ContentLength = state.DataToSend.Length; IAsyncResult sendResult = state.request.BeginGetRequestStream(state.internalCallback, state); return sendResult.AsyncWaitHandle; } else { IAsyncResult result = state.request.BeginGetResponse(state.internalCallback, state); return result.AsyncWaitHandle; } } catch (Exception e) { state.exception = e; state.userCallback(state); maxConcurrentJobControl.Release(); return null; } }
/// <summary> /// Enables the appropriate buttons according to the crawler's state. /// </summary> /// <param name="state">The <see cref="CrawlerState"/> of the Client.</param> private void EnableControls(CrawlerState state) { switch(state) { case CrawlerState.Stopped: cmdStart.Enabled = true; cmdStop.Enabled = false; cmdPause.Enabled = false; cmdResume.Enabled = false; cmdTerminate.Enabled = true; break; case CrawlerState.Paused: cmdStart.Enabled = false; cmdStop.Enabled = false; cmdPause.Enabled = false; cmdResume.Enabled = false; cmdTerminate.Enabled = true; break; case CrawlerState.Running: cmdStart.Enabled = false; cmdStop.Enabled = true; cmdPause.Enabled = true; cmdResume.Enabled = false; cmdTerminate.Enabled = true; break; default: DisableControls(); break; } }
public virtual void setState(CrawlerState state) // 2019103013 { this.state = state; // 2019103006 }
/// <summary> /// Stops the crawling process. If the crawler is already in the <see cref="CrawlerState.Stopped"/> /// state it has no effect. If the crawling is in progress it is not stopped abruptly /// but the method waits until the current working Url Set is processed. /// </summary> public void Stop() { try { stopping = true; if(globals.Settings.LogLevel == CWLogLevel.LogInfo) { globals.FileLog.LogInfo("Crawler.Stop is trying to stop all threads."); } //wait for all the threads to finish while(urlsToCrawl.Count>0) { Thread.Sleep(3000);//TODO ExponentialBackoff.DefaultBackoff); } mustStop = true; while(runningThreads > 0) { Thread.Sleep(3000);//TODO ExponentialBackoff.DefaultBackoff); } stopping = false; //store the crawl results on disk StoreCrawlResults(); //Stop all threads StopAllThreads(); robotsFilter.SaveEntries(); state = CrawlerState.Stopped; //notify the other classes OnStateChanged(EventArgs.Empty); } catch(Exception ex) { if(globals.Settings.LogLevel <= CWLogLevel.LogWarning) { globals.FileLog.LogWarning("Crawler.Stop failed: " + ex.ToString()); } } finally { queueSize = 0; urlsToCrawl.Clear(); crawledUrls.Clear(); stopping = false; mustStop = false; } }
/// <summary> /// Starts the crawling process. If the crawler is already in the <see cref="CrawlerState.Running"/> /// state it has no effect. /// </summary> /// <exception cref="CWException">Thrown if the crawler is in the <see cref="CrawlerState.Paused"/> state.</exception> public void Start() { if(state == CrawlerState.Running) { return; } if(state == CrawlerState.Paused) { throw new CWException("The Crawler is in the Pause mode and cannot be started."); } try { mustStop = false; stopping = false; syncBackOff.Reset(); downloadBackOff.Reset(); //Initialize the results queue InitializeResultsQueue(); //Initialize the Urls queue InitializeUrlsQueue(); //create the thread that will be downloading new urls to crawl synchronizeThread = new Thread(new ThreadStart(SynchronizeProcess)); synchronizeThread.IsBackground = true; synchronizeThread.Priority = ThreadPriority.Lowest; synchronizeThread.Name = "Synchronization Thread"; synchronizeThread.Start(); //create the thread that will be sending the results to the server sendResultsThread = new Thread(new ThreadStart(SendResultsToServer)); sendResultsThread.IsBackground = true; sendResultsThread.Priority = ThreadPriority.Lowest; sendResultsThread.Name = "Results Thread"; sendResultsThread.Start(); //create the threads that will perform the crawling crawlingThreads = new Thread[numThreads]; for(int i=0; i<numThreads; i++) { crawlingThreads[i] = new Thread(new ThreadStart(PerformCrawling)); crawlingThreads[i].IsBackground = true; crawlingThreads[i].Priority = ThreadPriority.Lowest; crawlingThreads[i].Name = "Crawler Thread " + i.ToString(); crawlingThreads[i].Start(); } //Notify the clients that the crawling process has started state = CrawlerState.Running; OnStateChanged(EventArgs.Empty); } catch(Exception ex) { if(globals.Settings.LogLevel <= CWLogLevel.LogError) { globals.FileLog.LogError("The Crawler failed to start: " + ex.ToString()); } mustStop = true; state = CrawlerState.Stopped; try { StopAllThreads(); } catch(Exception exc) { globals.FileLog.LogError("The Crawler failed to stop all the threads: " + exc.ToString()); } } finally { } }
/// <summary> /// Resumes the crawling process if it has been paused. /// </summary> /// <exception cref="CWException"> /// Thrown if the crawler is in the <see cref="CrawlerState.Stopped"/> or /// <see cref="CrawlerState.Running"/> state. /// </exception> public void Resume() { if((state == CrawlerState.Stopped)||(state == CrawlerState.Running)) { throw new CWException("The crawler is not in the Paused state and cannot be resumed."); } try { state = CrawlerState.Running; ResumeAllThreads(); OnStateChanged(EventArgs.Empty); } catch { //if something goes wrong return to the Paused state state = CrawlerState.Paused; } finally { //hmmm.... } }
/// <summary> /// Pauses the crawling process by calling <see cref="Thread.Suspend"/> on all the /// crawling threads. If the crawler is already in the <see cref="CrawlerState.Paused"/> /// state it has no effect. /// </summary> /// <exception cref="CWException">Thrown if the crawler is in the <see cref="CrawlerState.Stopped"/> state.</exception> public void Pause() { if(state == CrawlerState.Paused) { return; } if(state == CrawlerState.Stopped) { throw new CWException("The crawler is in the Stopped state and cannot be paused."); } try { state = CrawlerState.Paused; SuspendAllThreads(); OnStateChanged(EventArgs.Empty); } catch { //if something goes wrong return to the Running state state = CrawlerState.Running; } finally { //hmmm.... } }
/// <summary> /// Called at regular intervals in order to refresh the form status. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void tmrTimer_Tick(object sender, System.EventArgs e) { try { if(chkEnableLog.Checked) { Queue<EventLoggerEntry> events = core.GetEventQueue(); while(events.Count>0) { EventLoggerEntry entry = events.Dequeue(); rtblClientLog.LogEventEntry(entry); } } state = core.GetState(); EnableControls(state); memory = core.GetMemoryUsage(); if(memory < hlbMemory.Maximum) { hlbMemory.Value = memory; } hlgMemory.NextValue = memory; if((core != null)&&(core.GetState() == CrawlerState.Running)) { stats = core.GetStatistics(); speed = (int)(Math.Round((double)((stats[9] - totalbytes)/5120))); totalbytes = stats[9]; hlbSpeed.Value = speed; hlgSpeed.NextValue = speed; UpdateStatistics(); } } catch {} }
protected virtual CrawlerState CreateState(string request, WaitCallback callback, object userState) { CrawlerState state = new CrawlerState(); state.internalCallback = CrawlerCallback; state.userState = userState; state.originalRequest = request; state.userCallback = callback; state.crawler = this; return state; }
static void AbortJob(CrawlerState state) { if (state.request != null) { lock (state) { if (state.request != null) { state.request.Abort(); state.request = null; } } } }
/// <summary> /// Constructs a new istance of the <see cref="Crawler"/> class and initializes its /// properties with the default values. The constructor is private so that only the /// class itself can create an instance. /// </summary> private Crawler() { //first of all get a reference to the global variables because they are needed //in order to initialize some variables. globals = Globals.Instance(); mustStop = false; stopping = false; state = CrawlerState.Stopped; stats = new long[10] {0,0,0,0,0,0,0,0,0,0}; numThreads = (int)globals.Settings.ConnectionSpeed; runningThreads = 0; //sendResultsThread = null; synchronizeThread = null; crawlingThreads = null; syncBackOff = new Backoff(BackoffSpeed.Declining, 30000); downloadBackOff = new Backoff(BackoffSpeed.Fast); urlsToCrawl = new Queue(); resultFileNames = new Queue(); crawledUrls = new ArrayList(); queueSize = 0; dataFileName = String.Empty; defaultEncoding = Encoding.GetEncoding("ISO-8859-7"); defaultGreekEncoding = Encoding.GetEncoding(1253); contentRegex = new Regex("<meta\\s*http-equiv=([^>])*charset\\s*=\\s*([^>])*(utf-7|utf-8|utf-16|windows-1253)([^>])*>",RegexOptions.CultureInvariant|RegexOptions.Multiline|RegexOptions.IgnoreCase|RegexOptions.Compiled); htmlParser = HtmlParser.Instance(); textParser = TextParser.Instance(); pdfParser = PdfParser.Instance(); swfParser = SwfParser.Instance(); nullParser = NullParser.Instance(); robotsFilter = RobotsFilter.Instance(); robotsFilter.LoadEntries(); domainFilter = DomainFilter.Instance(); hostRequestFilter = HostRequestFilter.Instance(); hostBanFilter = HostBanFilter.Instance(); //proxy = WebServiceProxy.Instance(); proxy = CrawlWaveServerProxy.Instance(globals); }
/// <summary> /// Stops the crawling process immediately without waiting for the crawling threads /// to finish. If the crawler is already in the <see cref="CrawlerState.Stopped"/> /// state it has no effect. /// </summary> public void StopImmediately() { try { mustStop = true; //Kill all the threads without waiting KillAllThreads(); } catch { //reset runningThreads to 0 if something goes wrong if(runningThreads >0) { runningThreads = 0; } } finally { queueSize = 0; urlsToCrawl.Clear(); crawledUrls.Clear(); robotsFilter.SaveEntries(); mustStop = false; state = CrawlerState.Stopped; OnStateChanged(EventArgs.Empty); if(globals.Settings.LogLevel == CWLogLevel.LogInfo) { globals.FileLog.LogInfo("Crawler.StopImmediately stopped all threads."); } } }
void EndGame() { state = CrawlerState.END; StartCoroutine(ChangeText("Vous avez terminé le donjon! Bravo!")); }
public void AsyncCrawl(string uriString, WaitCallback callback, object userState, int priority) { byte[] content = GetFromCache(uriString); if (content != null) { CrawlerState cs = new CrawlerState(); cs.originalRequest = uriString; cs.rowData = content; cs.userState = userState; cs.priority = priority; ThreadPool.QueueUserWorkItem(callback, cs); return; } CrawlerState state = CreateState(uriString, callback, userState); jobQueue.EnqueueJob(state); }
public void SetState(CrawlerState state) { State_ = state; }
/// <summary> /// Constructs a new istance of the <see cref="Crawler"/> class and initializes its /// properties with the default values. There should be only one instance of Crawler /// </summary> public Crawler() { //first of all get a reference to the global variables because they are needed //in order to initialize some variables. globals = Globals.Instance(); mustStop = false; stopping = false; state = CrawlerState.Stopped; stats = new long[10] {0,0,0,0,0,0,0,0,0,0}; numThreads = (int)globals.Settings.ConnectionSpeed; runningThreads = 0; sendResultsThread = null; synchronizeThread = null; crawlingThreads = null; syncBackOff = new ExponentialBackoff(BackoffSpeed.Declining); downloadBackOff = new ExponentialBackoff(BackoffSpeed.Fast); urlsToCrawl = new Queue(); resultFileNames = new Queue(); crawledUrls = new ArrayList(); queueSize = 0; dataFileName = String.Empty; defaultEncoding = Encoding.GetEncoding("ISO-8859-7"); htmlParser = HtmlParser.Instance(); textParser = TextParser.Instance(); pdfParser = PdfParser.Instance(); swfParser = SwfParser.Instance(); robotsFilter = RobotsFilter.Instance(); domainFilter = DomainFilter.Instance(); hostRequestFilter =HostRequestFilter.Instance(); hostBanFilter = HostBanFilter.Instance(); proxy = WebServiceProxy.Instance(); }
/// <summary> /// Loads the <see cref="ClientSettings"/> of the Client and populates the fields. /// </summary> private void LoadSettings() { settings = core.GetSettings(); txtUsername.Text = settings.UserName; if(settings.UserName != String.Empty) { grpUserInformation.Enabled = false; } else { grpUserStatistics.Enabled = false; DisableControls(); } txtEmail.Text = settings.Email; chkLoadAtStartup.Checked = settings.LoadAtStartup; chkMinimizeToSystemTray.Checked = settings.MinimizeToTray; chkMinimizeOnExit.Checked = settings.MinimizeOnExit; SelectConnectionSpeed(settings.ConnectionSpeed); chkEnableScheduler.Checked = settings.EnableScheduler; dtStartTime.Enabled = settings.EnableScheduler; dtStopTime.Enabled = settings.EnableScheduler; lblStartTime.Enabled = settings.EnableScheduler; lblStopTime.Enabled = settings.EnableScheduler; dtStartTime.Value = settings.StartTime; dtStopTime.Value = settings.StopTime; state = core.GetState(); EnableControls(state); }