public override void eventCrawlJobFinished(crawlerDomainTaskMachine __machine, modelSpiderTestRecord __tRecord)
        {
            // per module summary


            // all three modules summary
        }
Esempio n. 2
0
        private void doPerform(crawlerDomainTaskMachine _machine, modelSpiderTestRecord tRecord, reportPlugIn_workload plugin, experimentSessionEntry entry)
        {
            switch (pluginState)
            {
            case workloadPluginState.active:



                break;

            case workloadPluginState.cooldown:
                if (cooldownIndex > 0)
                {
                    comment       = comment.add($"Coolingdown [{cooldownIndex}]");
                    cooldownIndex = cooldownIndex - (thisSampleID - lastSampleID);
                }
                else
                {
                    comment     = comment.add($"Cooldown finished");
                    pluginState = workloadPluginState.active;
                }
                break;

            case workloadPluginState.disabled:
                break;

            case workloadPluginState.none:
                break;

            case workloadPluginState.preparing:
                if (tCPU.SampleState.HasFlag(measureTrendSampleState.macroMean))
                {
                    pluginState = workloadPluginState.active;
                    plugin.loger.log("Workload plugin ready");
                }
                break;

            case workloadPluginState.sampleTail:
                break;

            case workloadPluginState.terminating:

                terminate(_machine);

                break;

            case workloadPluginState.wormingUp:
                if (wormingUpIndex > 0)
                {
                    comment        = comment.add($"WormingUp [{wormingUpIndex}]");
                    wormingUpIndex = wormingUpIndex - (thisSampleID - lastSampleID);
                }
                else
                {
                    comment     = comment.add($"WormingUp finished");
                    pluginState = workloadPluginState.active;
                }
                break;
            }
        }
 public void eventCrawlJobFinished(analyticJob aJob, crawlerDomainTaskMachine __machine, modelSpiderTestRecord __tRecord)
 {
     foreach (IPlugInCommonBase <indexMaintenanceStageEnum, experimentSessionEntry> plug in allPlugins)
     {
         plug.eventCrawlJobFinished(aJob, __machine, __tRecord);
     }
 }
Esempio n. 4
0
 public void eventCrawlJobFinished(analyticJob aJob, crawlerDomainTaskMachine __machine, modelSpiderTestRecord __tRecord)
 {
     foreach (IPlugInCommonBase <crawlReportingStageEnum, directReporterBase> plug in allPlugins)
     {
         plug.eventCrawlJobFinished(aJob, __machine, __tRecord);
     }
 }
 public void eventCrawlJobFinished(analyticJob aJob, crawlerDomainTaskMachine __machine, modelSpiderTestRecord __tRecord)
 {
     foreach (IPlugInCommonBase <crawlerDomainTaskIterationPhase, spiderEvaluatorBase> plug in allPlugins)
     {
         plug.eventCrawlJobFinished(aJob, __machine, __tRecord);
     }
 }
Esempio n. 6
0
        public string doCreateEntry(crawlerDomainTaskMachine _machine, modelSpiderTestRecord tRecord, reportPlugIn_workload plugin, experimentSessionEntry entry)
        {
            string recID = "";

            if (_machine != null)
            {
                thisSampleID = _machine.dataLoadTaker.CountTakes();
            }
            if (lastSampleID == -1)
            {
                lastSampleID = 0;
            }

            if (thisSampleID != lastSampleID)
            {
                RecordID++;

                // <------------------ RECORD CREATION
                recID                 = GetEntryID(RecordID, measureGroup);
                lastEntry             = plugin.records.GetOrCreate(recID);
                lastEntry.RecordID    = RecordID;
                lastEntry.pluginState = pluginState.ToString();

                switch (pluginState)
                {
                case workloadPluginState.active:
                    lastEntry.measureGroup = measureGroup;
                    break;

                default:
                    lastEntry.measureGroup = -1;
                    break;
                }

                lastEntry.SetTestIDAndSignature(tRecord.instance, entry.state, tRecord);

                lastEntry.terminationWarning = terminationWarning;
                lastEntry.availableMemory    = mMemory;

                lastEntry.ContentPages     = tContentPages.MicroMean;
                lastEntry.cpuRateOfMachine = tCPUm.MicroMean;
                lastEntry.cpuRateOfProcess = tCPU.MicroMean;
                lastEntry.physicalMemory   = tMemory.MicroMean;


                lastEntry.CrawlerIterations = tIterations.MicroMean;
                lastEntry.DataLoad          = tDataLoad.MicroMean;

                lastEntry.dlcDone    = _machine.taskDone;
                lastEntry.dlcRunning = _machine.taskRunning;
                lastEntry.dlcWaiting = _machine.taskWaiting;
                lastEntry.dlcMaximum = _machine.maxThreads;

                plugin.records.AddOrUpdate(lastEntry);
            }
            lastSampleID = thisSampleID;
            return(recID);
        }
Esempio n. 7
0
        public void terminate(crawlerDomainTaskMachine _machine)
        {
            _machine.items.items.ToList().ForEach(x => x.isStageAborted = true);

            imbWEMManager.MASTERKILL_SWITCH = true;

            _machine.allTaskDone             = true;
            _machine.TimeLimitForCompleteJob = 1;
            _machine._timeLimitForDLC        = 1;
        }
Esempio n. 8
0
        public void doCheckCriteria(crawlerDomainTaskMachine _machine, modelSpiderTestRecord tRecord, reportPlugIn_workload plugin, experimentSessionEntry entry)
        {
            if (!tCPU.SampleState.HasFlag(measureTrendSampleState.macroMean))
            {
                pluginState = workloadPluginState.preparing;
            }

            if (plugin.plugin_settings.term_DLCFinished > 0) // <----- da li je aktiviran ovaj uslov
            {
                if (_machine.taskDone >= plugin.plugin_settings.term_DLCFinished)
                {
                    terminate(_machine);
                }
            }



            if (pluginState == workloadPluginState.active)
            {
                if (_machine.taskDone >= DLCDoneForNext)
                {
                    doStartNextGroup(_machine, tRecord, plugin, entry);

                    stateUpdate(_machine, tRecord, plugin, entry);

                    plugin.records.Save(getWritableFileMode.overwrite);
                }

                if (isSampleTail)
                {
                    if (plugin.plugin_settings.term_JLCinTail)
                    {
                        terminate(_machine);
                    }
                    else
                    {
                        pluginState = workloadPluginState.sampleTail;
                    }
                }

                if (isMemoryLimit)
                {
                    if (plugin.plugin_settings.term_availableMemory > 0)
                    {
                        terminate(_machine);
                    }
                    else
                    {
                        pluginState = workloadPluginState.cooldown;

                        cooldownIndex = plugin.plugin_settings.warmingUpTicks;
                    }
                }
            }
        }
Esempio n. 9
0
        public void WriteAboutCrawlerRun(modelSpiderTestRecord tRecord, crawlerDomainTaskMachine cDTM)
        {
            AppendHorizontalLine();
            AppendLine("Crawler name:           " + cDTM.tRecord.instance.name);
            AppendLine("Crawler description:    " + cDTM.tRecord.instance.description);

            AppendLine("Session report folder:      " + cDTM.folder.path);
            AppendLine("Crawler report folder:      " + imbWEMManager.index.experimentEntry.sessionCrawlerFolder.path);
            AppendLine("--------------------------------------------------------------------------- ");

            string settings = objectSerialization.ObjectToXML(tRecord.instance.settings);
            string hash     = md5.GetMd5Hash(settings);

            var fileinfo = settings.saveStringToFile(
                imbWEMManager.index.experimentEntry.sessionCrawlerFolder.pathFor(imbWEMManager.index.experimentEntry.SessionID.getFilename() + "_settings.xml").getWritableFile().FullName);

            //state.setupHash_crawler = hash;

            AppendLine("Crawler settings hash:  " + hash);
            AppendLine("Crawler complete hash:  " + tRecord.instance.crawlerHash);
            //  AppendLine("Crawler settings file:  " + fileinfo.Name);

            AppendLine("--------------- Crawler configuration overview ---------------------------- ");



            AppendLine("PL_max    	   | PL         - Page Load max per domain    	        | : "+ tRecord.instance.settings.limitTotalPageLoad);
            AppendLine("LT_t    	   | LT         - Load Take per iteration    	        | : "+ tRecord.instance.settings.limitIterationNewLinks);
            AppendLine("I_max    	   | I_max      - Iteration limit per DLC	            | : "+ tRecord.instance.settings.limitIterations);

            AppendLine("PS_c *         |            - Page Select count (not used)          | : " + tRecord.instance.settings.primaryPageSetSize);
            AppendLine("--------------------------------------------------------------------------- ");
            AppendLine("-- * parameteers not used in by this version ------------------------------ ");
            AppendLine();

            AppendLine("--------------------------------------------------------------------------- ");



            var duration = DateTime.Now.Subtract(cDTM.startTime);

            AppendLine("Start time:         " + cDTM.startTime.ToShortTimeString());
            AppendLine("Finish time:        " + DateTime.Now.ToShortTimeString());
            AppendLine("Duration (minutes): " + duration.TotalMinutes);
            AppendLine("^-- includes post-crawl reporting and index database update");

            AppendLine("Failed domains:     " + cDTM.webLoaderControler.GetFailedDomains().Count());
            AppendLine("^-- includes domains that were accesable but no links discovered    ");
            AppendLine("Failed URLs:        " + cDTM.webLoaderControler.GetFailedURLsCount());

            AppendHorizontalLine();

            // cDTM.tRecord.instance.Describe(this);
        }
Esempio n. 10
0
        public void stateUpdate(crawlerDomainTaskMachine _machine, modelSpiderTestRecord tRecord, reportPlugIn_workload plugin, experimentSessionEntry entry)
        {
            comment = "";

            doReadData(_machine);

            // <------------------ DATA COLLECTION



            // <------------------ STATE DECISION
            doCheckFacts(_machine, tRecord, plugin, entry);

            doPerform(_machine, tRecord, plugin, entry);

            doCreateEntry(_machine, tRecord, plugin, entry);



            doCheckCriteria(_machine, tRecord, plugin, entry);



            // <------------------ PRINTING OUT ----------------------------

            plugin.loger.AppendHorizontalLine();

            if (pluginState != workloadPluginState.disabled)
            {
                string st_in = pluginState.ToString();
                if (pluginState == workloadPluginState.active)
                {
                    st_in = "_" + st_in + "_";
                }
                plugin.loger.AppendLine(string.Format(STATUSLINE_ONE, st_in, lastEntry.RecordID.ToString("D3"), lastEntry.measureGroup, lastEntry.dlcMaximum, lastEntry.dlcRunning, lastEntry.dlcWaiting).toWidthExact(Console.BufferWidth - 11, "="));
            }

            plugin.loger.AppendLine(tMemory.GetTrendInline() + " | " + tCPU.GetTrendInline() + " | " + tCPUm.GetTrendInline());
            plugin.loger.AppendLine(tDataLoad.GetTrendInline() + " | " + tContentPages.GetTrendInline() + " | " + tIterations.GetTrendInline());

            //plugin.loger.AppendLine("--- Info: " );
            if (pluginState != workloadPluginState.disabled)
            {
                plugin.loger.AppendLine(string.Format(STATUSLINE_TWO, mMemory.ToString("P2"),
                                                      lastEntry.terminationWarning.ToString("D3"), lastEntry.dlcDone, DLCDoneForNext, thisSampleID, lastSampleID).toWidthExact(Console.BufferWidth - 11, "="));
                //  plugin.loger.AppendLine(String.Format(STATUSLINE_TWO, mMemory.ToString("P2"), g).toWidthExact(Console.BufferWidth-11, "="));
            }
        }
Esempio n. 11
0
 public void eventCrawlJobFinished(crawlerDomainTaskMachine __machine, modelSpiderTestRecord __tRecord) // <---- ovaj se nikad ne poziva
 {
     foreach (reportPlugIn_base plug in allPlugins)
     {
         try
         {
             plug.eventCrawlJobFinished(__machine, __tRecord);
         }
         catch (Exception ex)
         {
             aceLog.log("Reporting Plugin [" + plug.name + "]:" + plug.GetType().Name + " at status report execution crashed: " + ex.Message);
             crawlerErrorLog cel = new crawlerErrorLog(ex, null, null, crawlerErrorEnum.indexPlugin);
             cel.SaveXML();
         }
     }
 }
Esempio n. 12
0
        private void doStartNextGroup(crawlerDomainTaskMachine _machine, modelSpiderTestRecord tRecord, reportPlugIn_workload plugin, experimentSessionEntry entry)
        {
            doReadData(_machine);

            DLCDoneForNext = _machine.taskDone + plugin.plugin_settings.stepUp_DLCCount;
            pluginState    = workloadPluginState.wormingUp;
            string msg = $"Measure group {measureGroup} completed -- DLCs done: {_machine.taskDone}";

            plugin.loger.log(msg);

            comment        = comment.add($"Group {measureGroup} done");
            wormingUpIndex = plugin.plugin_settings.warmingUpTicks;
            if (wormingUpIndex == 0)
            {
                pluginState = workloadPluginState.active;
            }
            measureGroup         = measureGroup + 1;
            _machine.maxThreads += plugin.plugin_settings.stepUp_step;
        }
Esempio n. 13
0
        public void doReadData(crawlerDomainTaskMachine _machine)
        {
            var lastTake = _machine.measureTaker.GetLastTake();

            tMemory = _machine.measureTaker.GetTrend(trendMemory);
            tCPU    = _machine.measureTaker.GetTrend(trendCPU);
            tCPUm   = _machine.cpuTaker.GetTrend(trendCPUm);

            tDataLoad = _machine.measureTaker.GetTrend(trendDataLoad);

            tContentPages = _machine.dataLoadTaker.GetTrend(trendContentPages);

            tContentTerms = _machine.dataLoadTaker.GetTrend(trendContentTerms);

            tIterations = _machine.dataLoadTaker.GetTrend(trendIterations);


            mMemory = lastTake.availableMemory.GetRatio(lastTake.totalMemory);
        }
Esempio n. 14
0
        public override void eventAtInitiationOfCrawlJob(crawlerDomainTaskMachine _machine, modelSpiderTestRecord tRecord)
        {
            plugin_settings = imbWEMManager.settings.supportEngine.plugIn_workload_settings;
            plugin_state.statePrepare(plugin_settings);

            string ad = plugin_settings.stepUp_start.ToString() + plugin_settings.stepUp_step.ToString();

            __recordPath = homeFolder.pathFor("results_" + tRecord.instance.name + ad + ".xml");

            records = new objectTable <reportPlugIn_workloadEntry>(recordPath, false, __recordKeyProperty, plugin_state.TestID);

            if (plugin_settings.stepUp_enabled)
            {
                plugin_state.pluginState = workloadPluginState.preparing;
                _machine.maxThreads      = plugin_settings.stepUp_start;
            }
            else
            {
                plugin_state.pluginState = workloadPluginState.disabled;
            }
        }
Esempio n. 15
0
        public void eventAtEndOfCrawlJob(crawlerDomainTaskMachine crawlerDomainTaskMachine, modelSpiderTestRecord tRecord)
        {
            if (!IsEnabled)
            {
                return;
            }

            foreach (reportPlugIn_base plug in allPlugins)
            {
                try
                {
                    plug.eventAtEndOfCrawlJob(crawlerDomainTaskMachine, tRecord);
                    //if (plug is ISpiderPlugInForContent) ((ISpiderPlugInForContent)plug).processAfterResultReceived(wRecord, wTask);
                }
                catch (Exception ex)
                {
                    aceLog.log("Reporting Plugin [" + plug.name + "]:" + plug.GetType().Name + " at status report execution crashed: " + ex.Message);
                    crawlerErrorLog cel = new crawlerErrorLog(ex, null, null, crawlerErrorEnum.indexPlugin);
                    cel.SaveXML();
                }
            }
        }
Esempio n. 16
0
        private void doCheckFacts(crawlerDomainTaskMachine _machine, modelSpiderTestRecord tRecord, reportPlugIn_workload plugin, experimentSessionEntry entry)
        {
            if (mMemory < plugin.plugin_settings.term_availableMemory)
            {
                warningUpDate("Available RAM [" + mMemory.ToString("P2") + "] is below the termination limit [" + plugin.plugin_settings.term_availableMemory.ToString("P2") + "]", true, plugin);
                if (terminationWarning >= plugin.plugin_settings.term_warningCount)
                {
                    isMemoryLimit = true;
                }
                else
                {
                }
            }
            else if (_machine.taskWaiting == 0)
            {
                warningUpDate("There is no DLCs waiting [" + _machine.taskWaiting + "] - no way to run DLCs up to TC_max [" + _machine.maxThreads + "]", true, plugin);
                if (terminationWarning >= plugin.plugin_settings.term_warningCount)
                {
                    isSampleTail = true;
                }
            }
            else
            {
                if (terminationWarning > 0)
                {
                    warningUpDate("All termination criteria clean", false, plugin);
                }
            }

            if (_machine.taskRunning > _machine.maxThreads)
            {
                plugin.loger.log($" Running {_machine.taskRunning} more then TC_max {_machine.maxThreads} - switching to cooldown");
                cooldownIndex = plugin.plugin_settings.warmingUpTicks;
                pluginState   = workloadPluginState.cooldown;
            }
        }
Esempio n. 17
0
 /// <summary>
 /// Just before
 /// </summary>
 /// <param name="crawlerDomainTaskMachine">The crawler domain task machine.</param>
 /// <param name="tRecord">The t record.</param>
 public abstract void eventAtEndOfCrawlJob(crawlerDomainTaskMachine crawlerDomainTaskMachine, modelSpiderTestRecord tRecord);
Esempio n. 18
0
 public override void eventAtInitiationOfCrawlJob(crawlerDomainTaskMachine crawlerDomainTaskMachine, modelSpiderTestRecord tRecord)
 {
 }
Esempio n. 19
0
 public override void eventAtEndOfCrawlJob(crawlerDomainTaskMachine _machine, modelSpiderTestRecord tRecord)
 {
 }
Esempio n. 20
0
 public override void eventStatusReport(crawlerDomainTaskMachine crawlerDomainTaskMachine, modelSpiderTestRecord tRecord)
 {
 }
Esempio n. 21
0
 public override void eventStatusReport(crawlerDomainTaskMachine _machine, modelSpiderTestRecord tRecord)
 {
     plugin_state.stateUpdate(_machine, tRecord, this, imbWEMManager.index.experimentEntry);
 }
Esempio n. 22
0
        public override void eventUniversal <TFirst, TSecond>(crawlJobEngineStageEnum stage, crawlerDomainTaskMachine __machine, TFirst __task, TSecond __resource)
        {
            switch (stage)
            {
            case crawlJobEngineStageEnum.statusReport:

                //var tMemory = __machine.measureTaker.GetTrend(trendMemory);
                //var tCPU = __machine.measureTaker.GetTrend(trendCPU);
                //var tDataLoad = __machine.measureTaker.GetTrend(trendDataLoad);

                //var tContentPages = __machine.dataLoadTaker.GetTrend(trendContentPages);
                //var tContentTerms = __machine.dataLoadTaker.GetTrend(trendContentTerms);
                //var tIterations = __machine.dataLoadTaker.GetTrend(trendIterations);

                //loger.AppendLine(String.Format(TREND_LINE, tMemory.GetTrendInline(), tCPU.GetTrendInline(), tDataLoad.GetTrendInline()));
                //loger.AppendLine(String.Format(TREND_LINE, tContentPages.GetTrendInline(), tContentTerms.GetTrendInline(), tIterations.GetTrendInline()));



                int    change            = 0;
                int    newTC             = __machine.maxThreads;
                double maxLatencyToLimit = 0;
                double maxLatency        = 0;
                bool   doBoost           = false;
                string domainThatLates   = "";
                string threadId          = "";
                Thread criticalThread    = null;
                double average           = CPUAverageLast;
                double avgChange         = average - CPUAverageLast;

                double            maxAge     = 0;
                crawlerDomainTask taskOldest = null;

                var tasks = __machine.task_running.ToList();

                foreach (Task task in tasks)
                {
                    crawlerDomainTask taskInRun = task.AsyncState as crawlerDomainTask;
                    double            since     = taskInRun.sinceLastIterationStart;
                    double            tage      = DateTime.Now.Subtract(taskInRun.startTime).TotalMinutes;
                    maxLatency = Math.Max(maxLatency, since);
                    if (maxAge <= tage)
                    {
                        maxAge     = tage;
                        taskOldest = taskInRun;
                    }
                    if (maxLatency <= since)
                    {
                        domainThatLates = taskInRun.wRecord.domain;
                        if (taskInRun?.executionThread != null)
                        {
                            threadId = taskInRun.executionThread.ManagedThreadId.ToString() + " [" + taskInRun.executionThread.Priority.ToString() + "]";
                        }
                        criticalThread = taskInRun.executionThread;
                    }
                }

                maxLatencyToLimit = maxLatency.GetRatio(__machine.TimeLimitForTask);
                double maxAgeLimit = maxAge.GetRatio(__machine._timeLimitForDLC);

                double totalAgeLimit = DateTime.Now.Subtract(__machine.startTime).TotalMinutes.GetRatio(__machine.TimeLimitForCompleteJob);


                loger.log("Max. latency:    [" + maxLatency.ToString("F2") + " min][" + maxLatencyToLimit.ToString("P2") + "] " + domainThatLates + " Thread: " + threadId);

                if (taskOldest != null)
                {
                    loger.log("Oldest DLC:      [" + maxAge.ToString("F2") + " min][" + maxAgeLimit.ToString("P2") + "] " + taskOldest.wRecord.domain + " Thread: " + taskOldest.executionThread.ManagedThreadId.ToString() + " [" + taskOldest.executionThread.Priority.ToString() + "]");
                }

                #region TIMEOUT PREVENTION -----------------------------------------
                if (imbWEMManager.settings.crawlerJobEngine.doTaskTimeOutPrevention)
                {
                    if (totalAgeLimit > 0.9)
                    {
                        bool newDisable = false;
                        foreach (Task task in tasks)
                        {
                            crawlerDomainTask t = task.AsyncState as crawlerDomainTask;
                            if (!t.isLoaderDisabled)
                            {
                                t.isLoaderDisabled = true;
                                newDisable         = true;
                                loger.log("Time Limit Critical: loader is disabled for: " + t.wRecord.domain + " due execution time limit for Thread: " + t.executionThread.ManagedThreadId.ToString());
                            }
                        }


                        if (newDisable)
                        {
                            aceTerminalInput.doBeepViaConsole(1200, 250, 5);
                        }
                    }

                    if (maxAgeLimit > 0.9)
                    {
                        if (!taskOldest.isLoaderDisabled)
                        {
                            taskOldest.isLoaderDisabled = true;
                            loger.consoleAltColorToggle();
                            loger.log("DLC Time Limit Critical: loader is disabled for: " + taskOldest.wRecord.domain + " due execution time limit for Thread: " + taskOldest.executionThread.ManagedThreadId.ToString());
                            loger.consoleAltColorToggle();
                            aceTerminalInput.doBeepViaConsole();
                        }
                    }


                    doBoost = false;

                    if (maxLatencyToLimit > 0.5)
                    {
                        if (criticalThread != null)
                        {
                            criticalThread.Priority = ThreadPriority.AboveNormal;
                        }
                        change = -2;
                    }
                    else if (maxLatencyToLimit > 0.70)
                    {
                        if (criticalThread != null)
                        {
                            criticalThread.Priority = ThreadPriority.Highest;
                        }
                        change = -4;
                    }
                    else if (maxLatencyToLimit > 0.90)
                    {
                        loger.log("Max. latency critical :: REDUCING TO SINGLE THREAD : ");

                        foreach (Task task in tasks)
                        {
                            crawlerDomainTask taskInRun = task.AsyncState as crawlerDomainTask;
                            if (taskInRun?.executionThread != null)
                            {
                                taskInRun.executionThread.Priority = ThreadPriority.BelowNormal;
                            }
                        }

                        if (criticalThread != null)
                        {
                            criticalThread.Priority = ThreadPriority.Highest;
                        }

                        newTC = 1;
                    }
                    else
                    {
                        foreach (Task task in tasks)
                        {
                            crawlerDomainTask taskInRun = task.AsyncState as crawlerDomainTask;
                            if (taskOldest == taskInRun)
                            {
                                if (taskInRun?.executionThread != null)
                                {
                                    taskInRun.executionThread.Priority = ThreadPriority.AboveNormal;
                                }
                            }
                            else
                            {
                                if (taskInRun?.executionThread != null)
                                {
                                    taskInRun.executionThread.Priority = ThreadPriority.Normal;
                                }
                            }
                        }

                        doBoost = true;
                    }
                }

                if (imbWEMManager.settings.crawlerJobEngine.doAutoAdjustTC)
                {
                    #endregion --------------------------- ^ timeout prevention ^^

                    if (doBoost)     // <------ TC adjust
                    {
                        var takes = __machine.cpuTaker.GetLastSamples(imbWEMManager.settings.crawlerJobEngine.CPUSampleForAutoAdjustMax);


                        if (takes.Count < imbWEMManager.settings.crawlerJobEngine.CPUSampleForAutoAdjust)
                        {
                            return;
                        }

                        average = (takes.Average(x => x.reading) / 100);

                        avgChange = average - CPUAverageLast;

                        double CPUMargin = imbWEMManager.settings.crawlerJobEngine.CPUMargin;
                        int    dlc       = __machine.taskRunning;
                        CPUAverageDefendLine = Math.Max(average, CPUAverageLast);

                        if (dlc < (__machine.maxThreads - 1))
                        {
                            return;
                        }

                        if (average < imbWEMManager.settings.crawlerJobEngine.CPUTarget)
                        {
                            if (average < (CPUAverageDefendLine - CPUMargin))
                            {
                                change = -1;
                            }
                            else
                            {
                                change = 1;
                            }
                        }
                        else if (average > imbWEMManager.settings.crawlerJobEngine.CPULimit)
                        {
                            change = -1;
                        }

                        newTC = Math.Min(__machine.maxThreads + change, imbWEMManager.settings.crawlerJobEngine.TCAutoLimit);
                        if (newTC < 0)
                        {
                            newTC = 1;
                        }

                        CPUAverageLast = average;
                    }
                    else
                    {
                        if (change != 0)
                        {
                            newTC = Math.Min(__machine.maxThreads + change, imbWEMManager.settings.crawlerJobEngine.TCAutoLimit);
                        }
                        if (newTC < 0)
                        {
                            newTC = 1;
                        }
                    }
                }

                int e_change = newTC - __machine.maxThreads;
                __machine.maxThreads = newTC;


                loger.log("CPU average [" + average.ToString("P2") + "][" + avgChange.ToString("P2") + "] - (change: " + e_change + ") TC: " + __machine.maxThreads.ToString("D3") + " DLC:[" + __machine.taskRunning.ToString("D3") + "]");



                /*
                 * if (average < imbWEMManager.settings.crawlerJobEngine.CPUTarget)
                 * {
                 *
                 *  ;
                 *
                 * }
                 * else if (average > imbWEMManager.settings.crawlerJobEngine.CPULimit)
                 * {
                 *  __machine.maxThreads = Math.Min(__machine.maxThreads - 1, imbWEMManager.settings.crawlerJobEngine.TCAutoLimit);
                 *  loger.log("CPU average [" + average.ToString("P2") + "]  >  reducing TC to: " + __machine.maxThreads.ToString("D3") + " DLC:[" + __machine.taskRunning.ToString("D3") + "]");
                 * }
                 * else
                 * {
                 *  loger.log("CPU average [" + average.ToString("P2") + "]  ---------- TC_max: " + __machine.maxThreads.ToString("D3") + " DLC:[" + __machine.taskRunning.ToString("D3") + "]");
                 * }
                 */
                break;
            }
        }
Esempio n. 23
0
        // public builderForLog loger { get; set; }



        //public String InstanceHash
        //{
        //    get
        //    {
        //        return name + "_" + GetHashCode();
        //    }
        //}



        public abstract void eventUniversal <TFirst, TSecond>(crawlJobEngineStageEnum stage, crawlerDomainTaskMachine __machine, TFirst __task, TSecond __resource);
Esempio n. 24
0
        //public override void eventDLCFinished<TParent>(TParent __parent, crawlerDomainTask __task, modelSpiderSiteRecord __wRecord) => eventDLCFinished(__parent as directReporterBase, __task, __wRecord);

        //public override void eventCrawlJobFinished(analyticJob aJob, crawlerDomainTaskMachine __machine, modelSpiderTestRecord __tRecord) => eventCrawlJobFinished(__machine, __tRecord);


        //public override void eventDLCInitiated<TParent>(TParent __parent, crawlerDomainTask __task, modelSpiderSiteRecord __wRecord) => eventDLCInitiated(__parent as directReporterBase, __task, __wRecord);


        public override void eventCrawlJobFinished(analyticJob aJob, crawlerDomainTaskMachine __machine, modelSpiderTestRecord __tRecord)
        {
        }
Esempio n. 25
0
 public override void eventCrawlJobFinished(analyticJob aJob, crawlerDomainTaskMachine __machine, modelSpiderTestRecord __tRecord)
 {
     //  imbWEMManager.index.Recheck(loger);
 }
Esempio n. 26
0
 public abstract void eventCrawlJobFinished(crawlerDomainTaskMachine __machine, modelSpiderTestRecord __tRecord);
Esempio n. 27
0
 public void eventDLCInitiated(crawlerDomainTaskMachine __parent, crawlerDomainTask __task, modelSpiderSiteRecord __wRecord)
 {
 }
Esempio n. 28
0
 public override void eventCrawlJobFinished(crawlerDomainTaskMachine _machine, modelSpiderTestRecord tRecord)
 {
     records.Save();
     records.GetDataTable(null, plugin_state.TestID).GetReportAndSave(imbWEMManager.index.experimentEntry.sessionCrawlerFolder, imbWEMManager.authorNotation, "workload_", true);
 }
Esempio n. 29
0
        /// <summary>Runs the current crawl job</summary>
        /// <remarks><para>Starts crawl execution</para></remarks>
        /// <seealso cref="aceOperationSetExecutorBase"/>
        public void aceOperation_runRun()
        {
            IAceAdvancedConsole console = parent as IAceAdvancedConsole;

            // your code
            DateTime start = DateTime.Now;

            if (context.aRecord == null)
            {
                output.log("Error: define Job before calling this command.");
                return;
            }

            int Tdl_max = context.crawlerJobEngineSettings.Tdl_max;
            int Tll_max = context.crawlerJobEngineSettings.Tll_max;
            int TC_max  = context.crawlerJobEngineSettings.TC_max;

            var spiderEvals = context.aRecord.GetChildRecords();


            context.aRecord.initializeSoft(context.sampleList);



            int c = 0;



            DirectoryInfo di = imbWEMManager.index.experimentManager.CurrentSession.sessionReportFolder;


            var notation = appManager.AppInfo;

            // ------------------ note creation -------------------
            analyticJobNote note = new analyticJobNote(imbWEMManager.index.experimentEntry.sessionCrawlerFolder);

            note.WriteAboutJob(context, console.workspace, console);

            note.AppendLine("--------------- Crawl Job configuration overview -------------------------- ");
            note.AppendLine("   Script var | Article    - Description                             ");
            note.AppendLine("--------------------------------------------------------------------------- ");
            note.AppendLine("   Tdl_max    | Tdl        - Time limit per domain - in minutes     | : " + Tdl_max);
            note.AppendLine("   Tll_max    | Tac        - Time limit for inactivity - in minutes | : " + Tll_max);
            note.AppendLine("   TC_max     | TC         - Maximum number of JLC threads allowed  | : " + TC_max);
            note.AppendLine("--------------------------------------------------------------------------- ");
            note.AppendHorizontalLine();
            note.AppendLine("-- if the test was finished without problem at the last line it will be message [RunJob completed] ---");
            note.AppendLine("-- if not: something went wrong - check the logs ---");
            note.AppendHorizontalLine();
            note.SaveNote();

            foreach (modelSpiderTestRecord tRecord in spiderEvals)
            {
                c++;
                spiderWebLoaderControler controler = null;

                directAnalyticReporter reporter = new directAnalyticReporter(imbWEMManager.index.experimentEntry.CrawlID, imbWEMManager.index.experimentEntry.sessionCrawlerFolder, notation);



                context.pluginStack.InstallTo(imbWEMManager.index.plugins, plugInGroupEnum.index, true);

                tRecord.performance = imbWEMManager.index.experimentEntry;

                output.log(tRecord.instance.name + " crawl start");


                crawlerDomainTaskMachine cDTM = new crawlerDomainTaskMachine(tRecord, context.aRecord.sample, reporter, di)
                {
                    maxThreads       = TC_max,
                    _timeLimitForDLC = Tdl_max,
                    TimeLimitForTask = Tll_max
                };

                //state.pluginStack
                context.pluginStack.InstallTo(cDTM.plugins, plugInGroupEnum.engine, false);
                context.pluginStack.InstallTo(tRecord.instance.plugins, plugInGroupEnum.crawler, false);
                context.pluginStack.InstallTo(cDTM.reportPlugins, plugInGroupEnum.report, false);

                cDTM.startAutoParallel(true); // ----- execution

                output.log(tRecord.instance.name + " crawl finished");

                cDTM.webLoaderControler.Save();

                controler = cDTM.webLoaderControler;


                reporter.reportCrawler(tRecord);


                note.WriteAboutCrawlerRun(tRecord, cDTM);

                if (console != null)
                {
                    console.scriptRunning.getContent().saveStringToFile(imbWEMManager.index.experimentEntry.sessionCrawlerFolder.pathFor("script.ace"));
                }



                if (imbWEMManager.settings.directReportEngine.doPublishExperimentSessionTable)
                {
                    imbWEMManager.index.experimentManager.AddOrUpdate(tRecord.performance as experimentSessionEntry);
                }
            }


            imbWEMManager.index.CloseSession(spiderEvals);



            output.AppendLine("RunJob done in: " + DateTime.Now.Subtract(start).TotalMinutes.ToString("#0.0##") + " min");

            note.AppendLine("[RunJob completed]");
            note.SaveNote();



            // imbWEMManager.settings.Save(imbWEMManager.index.experimentEntry.sessionCrawlerFolder.pathFor("imbAnalyticEngineSettings.xml"));

            var sl = context.sampleList.ToList();

            sl.saveContentOnFilePath(note.folder.pathFor("sample.txt"));
        }
Esempio n. 30
0
 public abstract void eventStatusReport(crawlerDomainTaskMachine crawlerDomainTaskMachine, modelSpiderTestRecord tRecord);