public override void reportDomainFinished(directAnalyticReporter reporter, modelSpiderSiteRecord wRecord) { foreach (ISpiderModuleBase module in modules) { module.reportDomainFinished(reporter, wRecord); } }
public crawlerDomainTaskMachine(modelSpiderTestRecord __tRecord, List <webSiteProfile> sample, directAnalyticReporter __reporter, folderNode __folder) { reporter = __reporter; folder = __folder; tRecord = __tRecord; logger = new builderForLog(); aceLog.consoleControl.setAsOutput(logger, tRecord.name); SetWebLoaderControler(__folder); items = new crawlerDomainTaskCollection(tRecord, sample, this); cpuTaker = new performanceCpu(tRecord.name); dataLoadTaker = new performanceDataLoad(tRecord.name); measureTaker = new performanceResources(tRecord.name, this); cpuTaker.take(); dataLoadTaker.take(); measureTaker.take(); tRecord.cpuTaker = cpuTaker; tRecord.dataLoadTaker = dataLoadTaker; tRecord.measureTaker = measureTaker; plugins = new enginePlugInCollection(this); reportPlugins = new reportingPlugInCollection(reporter, this); }
public override void reportCrawlFinished(directAnalyticReporter reporter, modelSpiderTestRecord tRecord) { foreach (ISpiderModuleBase module in modules) { module.reportCrawlFinished(reporter, tRecord); } }
public override void reportIteration(directAnalyticReporter reporter, modelSpiderSiteRecord wRecord) { foreach (ISpiderModuleBase module in modules) { module.reportIteration(reporter, wRecord); } }
public override void reportIteration(directAnalyticReporter reporter, modelSpiderSiteRecord wRecord) { /* * var itfolder = reporter.getIterationFolder(wRecord.iteration, wRecord); * * fileunit layerInfo = new fileunit(itfolder.pathFor("layers.txt")); * * layerInfo.Append("Module: " + name + " id(" + GetHashCode() + ")"); * * if (wRecord.lastInput != null) * { * layerInfo.Append(wRecord.lastInput.GetInlineDescription("Input")); * } * * layerInfo.Append(layers.GetInlineDescription(), true); * * * if (wRecord.lastOutput != null) * { * layerInfo.Append(wRecord.lastOutput.GetInlineDescription("Output")); * * * layerInfo.Append("------------ ------------------"); * * foreach (spiderLink sl in wRecord.lastOutput.active) * { * layerInfo.Append(sl.url); * layerInfo.Append(sl.marks.GetActiveResults()); * } * * * * layerInfo.Append("------------ ------------------"); * * * } * * layerInfo.Save(); */ }
/// <summary>Runs the current crawl job</summary> /// <remarks><para>Starts crawl execution</para></remarks> /// <seealso cref="aceOperationSetExecutorBase"/> public void aceOperation_runRun() { IAceAdvancedConsole console = parent as IAceAdvancedConsole; // your code DateTime start = DateTime.Now; if (context.aRecord == null) { output.log("Error: define Job before calling this command."); return; } int Tdl_max = context.crawlerJobEngineSettings.Tdl_max; int Tll_max = context.crawlerJobEngineSettings.Tll_max; int TC_max = context.crawlerJobEngineSettings.TC_max; var spiderEvals = context.aRecord.GetChildRecords(); context.aRecord.initializeSoft(context.sampleList); int c = 0; DirectoryInfo di = imbWEMManager.index.experimentManager.CurrentSession.sessionReportFolder; var notation = appManager.AppInfo; // ------------------ note creation ------------------- analyticJobNote note = new analyticJobNote(imbWEMManager.index.experimentEntry.sessionCrawlerFolder); note.WriteAboutJob(context, console.workspace, console); note.AppendLine("--------------- Crawl Job configuration overview -------------------------- "); note.AppendLine(" Script var | Article - Description "); note.AppendLine("--------------------------------------------------------------------------- "); note.AppendLine(" Tdl_max | Tdl - Time limit per domain - in minutes | : " + Tdl_max); note.AppendLine(" Tll_max | Tac - Time limit for inactivity - in minutes | : " + Tll_max); note.AppendLine(" TC_max | TC - Maximum number of JLC threads allowed | : " + TC_max); note.AppendLine("--------------------------------------------------------------------------- "); note.AppendHorizontalLine(); note.AppendLine("-- if the test was finished without problem at the last line it will be message [RunJob completed] ---"); note.AppendLine("-- if not: something went wrong - check the logs ---"); note.AppendHorizontalLine(); note.SaveNote(); foreach (modelSpiderTestRecord tRecord in spiderEvals) { c++; spiderWebLoaderControler controler = null; directAnalyticReporter reporter = new directAnalyticReporter(imbWEMManager.index.experimentEntry.CrawlID, imbWEMManager.index.experimentEntry.sessionCrawlerFolder, notation); context.pluginStack.InstallTo(imbWEMManager.index.plugins, plugInGroupEnum.index, true); tRecord.performance = imbWEMManager.index.experimentEntry; output.log(tRecord.instance.name + " crawl start"); crawlerDomainTaskMachine cDTM = new crawlerDomainTaskMachine(tRecord, context.aRecord.sample, reporter, di) { maxThreads = TC_max, _timeLimitForDLC = Tdl_max, TimeLimitForTask = Tll_max }; //state.pluginStack context.pluginStack.InstallTo(cDTM.plugins, plugInGroupEnum.engine, false); context.pluginStack.InstallTo(tRecord.instance.plugins, plugInGroupEnum.crawler, false); context.pluginStack.InstallTo(cDTM.reportPlugins, plugInGroupEnum.report, false); cDTM.startAutoParallel(true); // ----- execution output.log(tRecord.instance.name + " crawl finished"); cDTM.webLoaderControler.Save(); controler = cDTM.webLoaderControler; reporter.reportCrawler(tRecord); note.WriteAboutCrawlerRun(tRecord, cDTM); if (console != null) { console.scriptRunning.getContent().saveStringToFile(imbWEMManager.index.experimentEntry.sessionCrawlerFolder.pathFor("script.ace")); } if (imbWEMManager.settings.directReportEngine.doPublishExperimentSessionTable) { imbWEMManager.index.experimentManager.AddOrUpdate(tRecord.performance as experimentSessionEntry); } } imbWEMManager.index.CloseSession(spiderEvals); output.AppendLine("RunJob done in: " + DateTime.Now.Subtract(start).TotalMinutes.ToString("#0.0##") + " min"); note.AppendLine("[RunJob completed]"); note.SaveNote(); // imbWEMManager.settings.Save(imbWEMManager.index.experimentEntry.sessionCrawlerFolder.pathFor("imbAnalyticEngineSettings.xml")); var sl = context.sampleList.ToList(); sl.saveContentOnFilePath(note.folder.pathFor("sample.txt")); }
public override void reportDomainFinished(directAnalyticReporter reporter, modelSpiderSiteRecord wRecord) { }
public override void reportCrawlFinished(directAnalyticReporter reporter, modelSpiderTestRecord tRecord) { }
public override void reportIteration(directAnalyticReporter reporter, modelSpiderSiteRecord wRecord) { }
public abstract void reportCrawlFinished(directAnalyticReporter reporter, modelSpiderTestRecord tRecord);
public abstract void reportDomainFinished(directAnalyticReporter reporter, modelSpiderSiteRecord wRecord);
public abstract void reportIteration(directAnalyticReporter reporter, modelSpiderSiteRecord wRecord);
public override void reportIteration(directAnalyticReporter reporter, modelSpiderSiteRecord wRecord) { base.reportIteration(reporter, wRecord); //layerTargetUrlGraph graphRule = layerActiveRules[0] as layerActiveRules; }
public directAnalyticReporter makeReporter(string crawlerName, aceAuthorNotation notation) { directAnalyticReporter output = new directAnalyticReporter(crawlerName, folder[ACFolders.reports], notation); return(output); }