/// <summary> /// Loads the specified folder. /// </summary> /// <param name="folder">The folder - to be scanned for experiment data</param> /// <param name="logger">The logger.</param> public void Load(folderNode _folder, ILogBuilder logger) { folder = _folder; try { var experimentFiles = folder.findFiles("experimentSetup.xml", SearchOption.AllDirectories, false); foreach (var path in experimentFiles) { try { var exp = new secondaryReportOnExperiment(path, logger, folder); items.Add(exp); allItems.AddRange(exp.items); allTopItems.Add(exp.topPerformer); // logger.log("Experiment report processed [" + path + "] : [" + allItems.Count + "]"); } catch (Exception ex) { logger.log(ex.LogException("secondaryReport.Load", "SECREP_LOAD=>" + path)); } } } catch (Exception ex) { logger.log(ex.LogException("secondaryReport.Load", "SECREP_LOAD")); } }
public static HtmlSourceAndUrlCollection LoadAllInSubfolders(this folderNode folder, String filename = "htmlsource", Boolean removeIncompleteEntries = true, Boolean removeEmptyDocuments = true) { HtmlSourceAndUrlCollection output = LoadAll(folder, filename, removeIncompleteEntries, removeEmptyDocuments); DirectoryInfo directory = folder; var allFiles = directory.GetFiles(filename + "*.html", SearchOption.AllDirectories); List <DirectoryInfo> subdirectories = new List <DirectoryInfo>(); foreach (FileInfo fi in allFiles) { if (!subdirectories.Any(x => x.FullName == fi.DirectoryName)) { if (fi.Directory.FullName != directory.FullName) { subdirectories.Add(fi.Directory); } } } foreach (folderNode subfolder in subdirectories) { output.SubCollections.Add(LoadAll(subfolder, filename, removeIncompleteEntries, removeEmptyDocuments)); } return(output); }
/// <summary> /// Loads the semantic clouds of the specified FVE /// </summary> /// <param name="fve">The fve.</param> /// <param name="logger">The logger.</param> public void LoadSemanticClouds(semanticFVExtractor fve, ILogBuilder logger) { DirectoryInfo di = folder; var dirs = di.GetDirectories(fve.name); if (!dirs.Any()) { logger.log("Failed to find subfolder for FVE [" + fve.name + "]"); } else { DirectoryInfo dir = dirs.First(); folderNode fveFolder = dir; var allCloudFiles = fveFolder.findFiles("*Cloud.xml", SearchOption.AllDirectories); Int32 cl = 0; foreach (String cloudFile in allCloudFiles) { if (cloudFile.Contains("General") || cloudFile.Contains("SharedKnowledge")) { } else { semanticClouds.Add(fve.name, objectSerialization.loadObjectFromXML <lemmaSemanticCloud>(cloudFile, logger)); cl++; } } logger.log("Semantic clouds loaded [" + cl + "] for " + fve.name); } }
public crawlerDomainTaskMachine(modelSpiderTestRecord __tRecord, List <webSiteProfile> sample, directAnalyticReporter __reporter, folderNode __folder) { reporter = __reporter; folder = __folder; tRecord = __tRecord; logger = new builderForLog(); aceLog.consoleControl.setAsOutput(logger, tRecord.name); SetWebLoaderControler(__folder); items = new crawlerDomainTaskCollection(tRecord, sample, this); cpuTaker = new performanceCpu(tRecord.name); dataLoadTaker = new performanceDataLoad(tRecord.name); measureTaker = new performanceResources(tRecord.name, this); cpuTaker.take(); dataLoadTaker.take(); measureTaker.take(); tRecord.cpuTaker = cpuTaker; tRecord.dataLoadTaker = dataLoadTaker; tRecord.measureTaker = measureTaker; plugins = new enginePlugInCollection(this); reportPlugins = new reportingPlugInCollection(reporter, this); }
public static String GetSelectedFeaturesDataFilename(String outputfilename, folderNode folder) { String fn = outputfilename; String p_m = folder.pathFor(fn.ensureEndsWith("_selected.xml"), imbSCI.Data.enums.getWritableFileMode.none); return(p_m); }
public directAnalyticReporter(string reportName, folderNode reportRootDir, aceAuthorNotation __notation) : base(reportName, reportRootDir, __notation) { if (imbWEMManager.settings.directReportEngine.doDomainReport) { siteRecords = new folderNodeForInstances <modelSpiderSiteRecord>(folder[DRFolderEnum.sites], getSiteName); } if (imbWEMManager.settings.directReportEngine.doIterationReport) { iterationRecords = new folderNodeForInstances <int>(folder[DRFolderEnum.it], getIterationName); } REPORT_DOMAIN_PAGES = imbWEMManager.settings.directReportEngine.DR_ReportDomainPages; REPORT_DOMAIN_TERMS = imbWEMManager.settings.directReportEngine.DR_ReportDomainTerms; REPORT_ITERATION_TERMS = imbWEMManager.settings.directReportEngine.DR_ReportIterationTerms; REPORT_ITERATION_URLS = imbWEMManager.settings.directReportEngine.DR_ReportIterationUrls; REPORT_WRECORD_LOG = imbWEMManager.settings.directReportEngine.DR_ReportWRecordLog; REPORT_TIMELINE = imbWEMManager.settings.directReportEngine.DR_ReportTimeline; REPORT_MODULES = imbWEMManager.settings.directReportEngine.DR_ReportModules; aceLog.consoleControl.setLogFileWriter(folder[DRFolderEnum.logs].pathFor("log.txt")); /* * TextWriter logOut = File.CreateText(); * aceLog.consoleControl.logWritter = logOut; * aceLog.consoleControl.logFileWriteOn = true; */ }
public void SaveAll(folderNode _folder = null) { if (_folder != null) { folder = _folder; } }
public static void Publish(this StructureGraphInformationSet info, folderNode folder, String name, aceAuthorNotation notation = null) { DataTable dt = info.items.ReportToDataTable <StructureGraphInformation>(true); dt.SetTitle(name + " records"); dt.GetReportAndSave(folder, notation); dt = info.changes.ReportToDataTable <StructureGraphInformation>(true); dt.SetTitle(name + " changes"); dt.GetReportAndSave(folder, notation); builderForText output = new builderForText(); foreach (StructureGraphInformation item in info.items) { item.Report(null, output); } output.ReportSave(folder, name + "_records", "Structure graph entries"); output = new builderForText(); foreach (var item in info.changes) { item.Report(null, output); } output.ReportSave(folder, name + "_changes", "Structure graph changes log"); }
public void GenerateOverlapMatrixes(ILogBuilder log, folderNode folder) { List <histogramModel> histograms = new List <histogramModel>(); foreach (Int32 atSize in FSTests.Get1stKeys()) { ConcurrentDictionary <string, FeatureSelectionAnalysis> concurrentDictionary = FSTests[atSize]; String prefix = concurrentDictionary.Keys.toCsvInLine() + "_" + atSize; GenerateOverlapMatrixes(prefix, concurrentDictionary, log, folder); List <histogramModel> models = new List <histogramModel>(); foreach (var selcol in concurrentDictionary.Values) { //histogramModel model = new histogramModel(50, "SelectedDistributionAt" + atSize); var freq = selcol.weightedFeatures.index.Values.OrderByDescending(x => x.weight); histogramModel model = histogramModelExtensions.GetHistogramModel(freq, "Weights", x => x.weight, 20); models.Add(model); } models.BlendHistogramModels(prefix).GetReportAndSave(folder, null, "histogram" + prefix); } }
/// <summary> /// Initializes a new instance of the /// </summary> /// <param name="__instanceID">The instance identifier.</param> /// <param name="__directory">The directory.</param> public cacheResponseForType(String __instanceID, folderNode __directory, Boolean __createNewOnNotFound = false, Type __instanceType = null) { instanceID = __instanceID; directory = __directory; createNewOnNotFound = __createNewOnNotFound; instanceType = __instanceType; }
public becFeatureCWPAnalysisExtension(folderNode __folder, IAceOperationSetExecutor __parent) : base(__folder, __parent) { _rendering = new becDocumentRenderingExtension(__parent, __folder); _features = new becFeatureVectorExtension(__folder, __parent); _weight = new becWeightingModelExtension(__folder, __parent); SetSubBinding(); }
/// <summary> /// Merges the ds rankings - searches folder for specified input names or search pattern /// </summary> /// <param name="folder">The folder.</param> /// <param name="inputNames">The input names.</param> /// <param name="output">The output.</param> /// <param name="searchPattern">The search pattern.</param> /// <returns></returns> public static FeatureVectorDictionaryWithDimensions MergeWeightDictionaries(folderNode folder, String inputNames, ILogBuilder output, String searchPattern = "*_wt.xml") { List <string> filepaths = folder.GetOrFindFiles(inputNames, searchPattern); List <WeightDictionary> results = new List <WeightDictionary>(); String tmpOutputName = ""; Int32 c = 0; foreach (var fp in filepaths) { var lr = WeightDictionary.LoadFile(fp, output); //DocumentSelectResult.LoadFromFile(fp, output); lr.description += "Source name: " + lr.name; String fn = Path.GetFileNameWithoutExtension(fp); lr.name = fn + c.ToString("D3"); c++; results.Add(lr); } FeatureVectorDictionaryWithDimensions featureDict = MergeWeightDictionaries(results); return(featureDict); }
public static String ReportSave(this reportExpandedData report, folderNode folder, String filename, String fileDescription = "") { builderForText reporter = new builderForText(); if (filename.isNullOrEmpty()) { filename = "expandedData"; } filename = Path.GetFileNameWithoutExtension(filename); foreach (reportExpandedDataPair entry in report) { if (entry.value.StartsWith("<?xml")) { String filepath = folder.pathFor(filename + "_" + entry.key + ".xml", Data.enums.getWritableFileMode.overwrite, "Stored type value from [" + filename + "]"); File.WriteAllText(filepath, entry.value); } else { reporter.AppendLine($"{entry.key} = {entry.value} \t\t\t //{entry.description}"); } } return(reporter.ReportSave(folder, filename, fileDescription)); }
public void Report(folderNode folder) { builderForText output = new builderForText(); output.AppendLine($"Signature: \t\t\t {Signature}"); output.AppendLine($"XPathRoot: \t\t\t {XPathRoot}"); output.AppendLine($"JunctionSize: \t\t {JunctionSize}"); output.AppendLine($"JunctionSizeFrequency: \t {JunctionSizeFrequency}"); output.AppendLine($"Level: \t {Level}"); output.AppendLine($"Junction type: \t {type}"); output.AppendLine($"Template:"); // \t {JunctionSizeFrequency}"); output.AppendLine($"XSubPath: \t\t {Template.SubXPath}"); output.AppendLine($"Signature: \t\t {Template.Signature}"); output.AppendLine($"Query: \t\t {Template.BuildXPathQuery()}"); output.AppendLine($"Items: \t\t "); foreach (T item in items) { output.AppendLine(item.path); } String op = folder.pathFor("JunctionPoint_" + Signature + ".txt", imbSCI.Data.enums.getWritableFileMode.overwrite); File.WriteAllText(op, output.GetContent()); }
public override void OnBeforeSave(folderNode folder) { base.OnBeforeSave(folder); if (primaryNodes.Any()) { String fnp = className + "_" + name + "_primary.txt"; fnp = folder.pathFor(fnp.getCleanFileName(false), imbSCI.Data.enums.getWritableFileMode.none, "List of lemma nodes in the Semantic Cloud [" + name + "] that are in Primary Term category"); List <String> primary = new List <string>(); primaryNodes.ForEach(x => primary.Add(x.name)); File.WriteAllLines(fnp, primary); } if (secondaryNodes.Any()) { String fns = className + "_" + name + "_secondary.txt"; fns = folder.pathFor(fns.getCleanFileName(false), imbSCI.Data.enums.getWritableFileMode.none, "List of lemma nodes in the Semantic Cloud [" + name + "] that are in Secondary Term category"); List <String> secondary = new List <string>(); secondaryNodes.ForEach(x => secondary.Add(x.name)); File.WriteAllLines(fns, secondary); } }
/// <summary> /// Merges the ds rankings - searches folder for specified input names or search pattern /// </summary> /// <param name="folder">The folder.</param> /// <param name="inputNames">The input names.</param> /// <param name="output">The output.</param> /// <param name="searchPattern">The search pattern.</param> /// <returns></returns> public static FeatureVectorDictionaryWithDimensions MergeDSRankings(folderNode folder, String inputNames, ILogBuilder output, String searchPattern = "DS_*_ranking.xml") { List <string> filepaths = folder.GetOrFindFiles(inputNames, searchPattern); DocumentSelectResult resultOut = new DocumentSelectResult(); List <DocumentSelectResult> results = new List <DocumentSelectResult>(); List <String> existingNames = new List <string>(); String tmpOutputName = ""; foreach (var fp in filepaths) { var lr = DocumentSelectResult.LoadFromFile(fp, output); String fn = Path.GetFileNameWithoutExtension(fp); if (existingNames.Contains(lr.name)) { lr.name = fn; } existingNames.Add(lr.name); results.Add(lr); tmpOutputName += lr.name; } FeatureVectorDictionaryWithDimensions featureDict = DocumentRankingExtensions.TransformToFVDictionary(results); return(featureDict); }
private void reportTarget(spiderTarget t, folderNode fn, int c) { string pageFolder = "P" + c.ToString("D3") + "_" + t.IsRelevant.ToString(); folderNode pfn = fn.Add(pageFolder, "Page " + c.ToString(), "Report on page " + t.url + " crawled by " + name + ". Target.IsRelevant: " + t.IsRelevant + ".".addLine(pageDescription)); fileunit content = new fileunit(pfn.pathFor("content.txt"), false); fileunit links = new fileunit(pfn.pathFor("links.txt"), false); if (t.evaluation != null) { t.evaluation.saveObjectToXML(pfn.pathFor("relevance.xml")); } content.setContent(t.pageText); //t.page.relationship.outflowLinks if (t.page != null) { foreach (spiderLink ln in t.page.relationship.outflowLinks.items.Values) { string rl = ln.url; links.Append(ln.url); } //t.page.webpage.links.ForEach(x => links.Append(x.nature + " | " + x.name + " | " + x.url)); } content.Save(); links.Save(); // marks.Save(); }
/// <summary> /// Saves the subcategories. /// </summary> /// <param name="category">The category.</param> /// <param name="rootFolder">The root folder.</param> protected void SaveSubcategories(WebDocumentsCategory category, folderNode rootFolder, WebDomainCategoryFormatOptions options, ILogBuilder logger = null) { foreach (WebDocumentsCategory subcat in category) { SaveWebSites(subcat, rootFolder, options); } }
//public List<TaskPropertyValidation> ValidPropertyList { get; set; } = new List<TaskPropertyValidation>(); //public List<TaskPropertyValidation> InValidPropertyList { get; set; } = new List<TaskPropertyValidation>(); // public TaskPropertyDictionary PropertyDictionary { get; set; } = new TaskPropertyDictionary(); public void Publish(folderNode folder, aceAuthorNotation notation) { objectSerialization.saveObjectToXML(task, folder.pathFor(task.name + "_declaration.xml", imbSCI.Data.enums.getWritableFileMode.overwrite, "Declaration of task [" + task.name + "]")); ReportTable_PropertyValudation = PropertyValidation.GetReportTable(this); var pvr = PropertyValidation.GetResults(); foreach (var pv in pvr[ValidationOutcome.Invalid]) { reporter.AppendLine(pv.item.PropertyName + " : " + pv.item.DisplayName); reporter.AppendLine(pv.Outcome + " : " + pv.Message); reporter.nextTabLevel(); reporter.AppendParagraph(pv.reporter.GetContent()); reporter.prevTabLevel(); } reporter.ReportSave(folder, task.name + "_validation_log.txt"); task.ExtractorCustomizationSettings.ReportSave(folder, task.name + "_settings", "Custom settings for extractor [" + task.ExtractorName + "] stored in task [" + task.name + "]"); //foreach (var t in ReportTables) //{ // t.GetReportAndSave(folder, notation, task.name); //} }
/// <summary> /// Loads the bin. /// </summary> /// <param name="folder">The folder.</param> /// <param name="logger">The logger.</param> /// <param name="filenamePrefix">The filename prefix.</param> public Boolean LoadBin(folderNode folder, ILogBuilder logger, String filenamePrefix = "lexicResource") { var files = folder.findFiles(filenamePrefix + "_*.bin", SearchOption.TopDirectoryOnly); Int32 c = 0; foreach (var pair in files) { String filename = Path.GetFileNameWithoutExtension(pair); String letter = filename.Replace(filenamePrefix + "_", ""); ConcurrentDictionary <String, lexicInflection> dict = Accord.IO.Serializer.Load <ConcurrentDictionary <String, lexicInflection> >(pair); if (dict.Any()) { c++; logger.log("File [" + filename + "] loaded --> index [" + letter + "]"); if (items.ContainsKey(letter)) { items[letter].Clear(); items[letter].AddRange(dict); } else { items.TryAdd(letter, dict); } } } if (c > 0) { logger.log("[" + c + "] lexic files loaded from [" + folder.path + "]"); return(true); } return(false); }
/// <summary> /// Saves the data structure: its properties marked with <see cref="fileDataAttribute"/> attribute and it self /// </summary> /// <param name="instance">The instance that has to be saved</param> /// <param name="parentFolder">The parent folder in which this instance will be saved - if not specified the application current folder is used</param> /// <param name="output">Logger</param> /// <returns></returns> /// <exception cref="System.NotImplementedException">Can't have File Data Structure loaded if no file structure mode specified</exception> internal String SaveDataStructure(IFileDataStructure instance, folderNode parentFolder = null, ILogBuilder output = null) { if (parentFolder == null) { parentFolder = new folderNode(); } String desc = instance.description; if (desc.isNullOrEmpty()) { desc = description; } String filename = GetFilenameAndSetInstanceFolder(instance, parentFolder, output); instance.OnBeforeSave(); foreach (var pair in fileDataProperties) { fileDataPropertyDescriptor pDesc = pair.Value; pDesc.SaveData(instance, instance.folder, output); } String filepath = instance.folder.pathFor(filename, getWritableFileMode.overwrite, desc, true); SaveDataFile(instance, filepath, output); return(filepath); }
/// <summary> /// Saves the bin. /// </summary> /// <param name="folder">The folder.</param> /// <param name="logger">The logger.</param> /// <param name="skipExisting">if set to <c>true</c> [skip existing].</param> /// <param name="filenamePrefix">The filename prefix.</param> public Boolean SaveBin(folderNode folder, ILogBuilder logger, Boolean skipExisting = true, String filenamePrefix = "lexicResource") { Int32 c = 0; foreach (var pair in items) { String pbin = folder.pathFor(filenamePrefix + "_" + pair.Key + ".bin", imbSCI.Data.enums.getWritableFileMode.none, "Binary serialized lexic entries starting with [" + pair.Key + "]"); if (skipExisting && File.Exists(pbin)) { logger.log("File [" + pbin + "] exists. Skipping binary serialization"); } else { ConcurrentDictionary <String, lexicInflection> dict = pair.Value; dict.Save(pbin); c++; } } if (c > 0) { logger.log("[" + c + "] lexic files serialized to [" + folder.path + "]"); return(true); } return(false); }
public static String GetModelDefinitionFilename(String outputfilename, folderNode folder) { String fn = outputfilename; String p_m = folder.pathFor(fn.ensureEndsWith("_model.xml"), imbSCI.Data.enums.getWritableFileMode.none); return(p_m); }
/// <summary> /// Call this method to execute all test methods, declared in child class /// </summary> /// <param name="resultsNode">The results node.</param> public void ExecuteTest(folderNode resultsNode = null) { imbSCI.Core.screenOutputControl.logToConsoleControl.setAsOutput(log, GetType().Name); if (resultsNode != null) { folderResults = resultsNode; } log.log("-- Starting test [" + GetType().Name + "]"); foreach (MethodInfo mi in GetType().GetMethods(System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.DeclaredOnly | System.Reflection.BindingFlags.Instance)) { folderNode baseResults = folderResults; folderResults = baseResults.Add(mi.Name, mi.Name.imbTitleCamelOperation(true), "Results of test method [" + mi.Name + "]"); if (!mi.GetParameters().Any()) { log.log("-- Starting test method [" + mi.Name + "]"); try { mi.Invoke(this, null); } catch (Exception ex) { log.log("-- : " + ex.LogException("Test failed[" + mi.Name + "]", GetType().Name + " -")); } } folderResults = baseResults; } Done(); }
private void SetFolder(Int32 i = 0) { if (parent == null) { throw new ArgumentException(nameof(parent), "Parent state or Project must be set with construction of this state object!"); } folderNode parentFolder = parent.folder; if (parentFolder == null) { throw new ArgumentException(nameof(Project), "Parent state or Project must be set with construction of this state object!"); } if (_folder == null) { if (HasSubfolder) { _folder = parentFolder.Add(name, name, "Project state [" + Info.displayName + "] data." + Info.description); } else { _folder = parentFolder; } } if (_folder != null) { if (!_folder.path.StartsWith(parentFolder.path)) { if (i > 0) { throw new Exception("Folder [" + _folder.path + "] is not child of [" + parentFolder.path + "] -- after [" + i.ToString() + "] iterations of autosetup"); } SetFolder(i + 1); } } }
protected T CreateKnowledgeInstance <T>(String name, kFoldValidationCase validationCase, WebFVExtractorKnowledgeType type, ILogBuilder logger) where T : class, IWebFVExtractorKnowledge, new() { folderNode folder = validationCase.caseFolder; if (type == WebFVExtractorKnowledgeType.aboutDocumentSet) { folder = validationCollection.caseFolder; if (DoShareCaseKnowledge) { folder = ExperimentSharedCasesFolder; } } T knowledge = new T(); knowledge.name = name; knowledge.type = type; try { knowledge.Deploy(folder, logger); } catch (aceGeneralException ex) { validationCase.context.errorNotes.LogException("Create Knowledge Instance (" + typeof(T).Name + ") error for [" + name + "] in fold [" + validationCase.name + "]:" + ex.title, ex); } return(knowledge); }
public static void Save(this HtmlSourceAndUrlCollection sources, folderNode folder, String filename = "htmlsource", Boolean deleteExisting = true) { if (deleteExisting) { folder.deleteFiles(); } if (filename.isNullOrEmpty()) { Int32 c = 0; foreach (var s in sources.items) { s.Save(folder, c.ToString()); c++; } } else { foreach (var s in sources.items) { s.Save(folder, filename); } } foreach (var sb in sources.SubCollections) { var f = folder.Add(sb.name, sb.name, "HTML sources subcollection of " + sources.name + "."); sb.Save(f, filename); } }
public void ReportSample(folderNode folder, String name, Int32 limit) { //String tknA_p = folder.pathFor("tokens_" + name + ".txt", imbSCI.Data.enums.getWritableFileMode.overwrite, "List of all unique terms in the dataset [" + name + "]"); //File.WriteAllText(tknA_p, terms.GetTokens().toCsvInLine(",")); List <String> allDocumentIDS = new List <string>(); foreach (KeyValuePair <string, List <TextDocument> > pair in categoryNameVsDocumentText) { String tkn_c = folder.pathFor("documents_" + name + "_" + pair.Key + ".txt", imbSCI.Data.enums.getWritableFileMode.overwrite, "List of documents in category [" + pair.Key + "]"); var names = pair.Value.Select(x => x.name).ToList(); File.WriteAllLines(tkn_c, names); allDocumentIDS.AddRange(names); } String dp = folder.pathFor("documents_" + name + "_All" + ".txt", imbSCI.Data.enums.getWritableFileMode.overwrite, "List of documents in dataset"); File.WriteAllLines(dp, allDocumentIDS); dp = folder.pathFor("webpages_" + name + ".txt", imbSCI.Data.enums.getWritableFileMode.overwrite, "List of documents in dataset"); File.WriteAllLines(dp, pages); dp = folder.pathFor("webdomains_" + name + ".txt", imbSCI.Data.enums.getWritableFileMode.overwrite, "List of domains in dataset"); File.WriteAllLines(dp, domains); }
// protected void saveRecord(TRecord record, Dire) /// <summary> /// Saves the collection. /// </summary> /// <param name="collection">The collection.</param> /// <param name="delete_all_existing_records">if set to <c>true</c> [delete all existing records].</param> /// <param name="response">The response.</param> /// <returns></returns> public override RecordProviderResponse SaveCollection(TCollection collection, Boolean delete_all_existing_records = false, RecordProviderResponse response = null) { if (response == null) { response = new RecordProviderResponse(); } folderNode targetFolder = GetFolderForCollection(collection.name); // folder.Attach(GetCollectionDirectoryName(collection.name), collection.name, "Storage directory for collection records", false, true); if (delete_all_existing_records) { var existing_records = targetFolder.findFiles(GetRecordFilename("*")); foreach (var file in existing_records) { File.Delete(file); } } foreach (TRecord record in collection.items) { String filename = GetRecordFilename(record.GetUID()); String path = targetFolder.pathFor(filename, Data.enums.getWritableFileMode.overwrite, "Serialized data for [" + record.GetType().Name + "]"); objectSerialization.saveObjectToXML((TRecord)record, path); } if (response != null) { response.Path = targetFolder.path; response.status |= RecordProviderResponseStatus.saved; } return(response); }
/// <summary> /// Scans the subdirectories of given folder and creates summaries for each experiment group, where groups are formed following the structure of subdirectory tree /// </summary> /// <param name="_folder">The folder.</param> /// <param name="reportSubDirectory">The report sub directory - name of the subdirectory where the summary secondary report is stored</param> /// <param name="logger">The logger.</param> public static void ScanSubdirectories(folderNode _folder, String reportSubDirectory, ILogBuilder logger) { List <String> files = _folder.findFiles("experimentSetup.xml", SearchOption.AllDirectories); List <DirectoryInfo> directories = new List <DirectoryInfo>(); Dictionary <String, DirectoryInfo> dirs = new Dictionary <string, DirectoryInfo>(); foreach (String fl in files) { DirectoryInfo di = new DirectoryInfo(Path.GetDirectoryName(fl)); if (di.Parent != null) { if (!dirs.ContainsKey(di.Parent.FullName)) { dirs.Add(di.Parent.FullName, di.Parent); } } } logger.log("Experiment groups detected: " + dirs.Count); //foreach (var pair in dirs) //{ //} Parallel.ForEach(dirs, pair => { var secRep = new secondaryReport(pair.Value); secRep.Load(pair.Value, logger); secRep.GetAndSaveDataTable(reportSubDirectory, logger); }); }