/// <summary> /// Applies the context properties to the specified <paramref name="builder" />. /// </summary> /// <param name="builder">The builder to copy the properties to.</param> public void Apply(ILogBuilder builder) { var dictionary = GetDictionary(); if (dictionary == null) return; foreach (var pair in dictionary) builder.Property(pair.Key, pair.Value); }
/// <summary> /// Loads settings from the path or returns default /// </summary> /// <param name="path">The path.</param> /// <param name="logger">The logger.</param> /// <returns></returns> public static WebSiteDataSetReductionSettings LoadOrDefault(String path, ILogBuilder logger) { if (path == "*") { return(GetDefaultReductionSettings()); } if (File.Exists(path)) { return(objectSerialization.loadObjectFromXML <WebSiteDataSetReductionSettings>(path, logger)); } else { return(GetDefaultReductionSettings()); } }
private ILogBuilder MergeDefaults(ILogBuilder builder) { // copy logger name if (!String.IsNullOrEmpty(Name)) { builder.Logger(Name); } // copy properties to current builder if (_properties.IsValueCreated) { _properties.Value.Apply(builder); } return(builder); }
protected void DescribeDimensions(ILogBuilder logger, List <dimensionSpecification> dimensions, string heading) { if (dimensions.Any()) { logger.AppendLine(heading); logger.nextTabLevel(); for (int i = 0; i < labelDimensions.Count; i++) { dimensionSpecification ds = labelDimensions[i]; logger.AppendPair("[" + i.ToString("D2") + "]", ds.functionName, true, "\t\t\t"); } logger.prevTabLevel(); } }
protected override void stageExecute(ILogBuilder response) { string splitPath = semanticLexiconManager.manager.constructor.projectFolderStructure[lexiconConstructorProjectFolder.splits].path; fileTextSplitResultSet splits = state.scheduledTasks.Split(lexiconConstructorProjectFolder.splits.ToString().add("{0}.txt", "\\"), corpusFilterLanguageEval, false); splits.saveSlits(semanticLexiconManager.manager.constructor.projectFolderStructure.path, getWritableFileMode.overwrite); state.processedBuffer.AddRange(splits.getLines()); state.scheduledTasks.Remove(splits.getLineNumbers(true)); state.stateSessionTick(this, true); stageComplete(response); }
/// <summary> /// 2013a: Poziv za priremu /// Nije bitan sadržaj već samo postojanje poziva ka ovom metodu jer to osigurava da se instanciraju sve date statičke kolekcije /// </summary> public static void prepare(dataBaseTarget dbSource = null, ILogBuilder logger = null) { //Int32 entries = topLevelDomains.loadItems(); if (dbSource != null) { countries.Load(dbSource.GetTable(nameof(countries)), logger, objectTableUpdatePolicy.overwrite); topLevelDomains.Load(dbSource.GetTable(nameof(topLevelDomains).ToLower()), logger, objectTableUpdatePolicy.overwrite); whoIsServers.Load(dbSource.GetTable(nameof(whoIsServers).ToLower()), logger, objectTableUpdatePolicy.overwrite); countries.Save(); topLevelDomains.Save(); whoIsServers.Save(); } else { countries.Load(); topLevelDomains.Load(); whoIsServers.Load(); //string tld_path = appManager.Application.folder_resources.findFile("tld.xlsx", SearchOption.AllDirectories); //DataTable dt = tld_path.deserializeDataTable(imbSCI.Data.enums.reporting.dataTableExportEnum.excel); //foreach(DataRow dr in dt.Rows) //{ // imbTopLevelDomain tld = new imbTopLevelDomain(); // tld.domainName = dr[0].toStringSafe(); // tld.nic = dr[3].toStringSafe(""); // tld.domainName = dr[1].toStringSafe(); // tld.countryName = dr[4].toStringSafe("international"); //} } imbDomainManager.prepare(); List <imbTopLevelDomain> tlds = topLevelDomains.GetList(); //.selectItems<imbTopLevelDomain>("doPreload=1"); foreach (imbTopLevelDomain t in tlds) { imbDomainManager.AllDomains.Add(t); } imbDomainManager.afterLoad(); }
/// <summary> /// Loads C# XML documentation from the file /// </summary> /// <param name="source">The source.</param> /// <param name="log">The log.</param> public void LoadXML(XmlDocument source, ILogBuilder log = null) { XmlNodeList members = source.SelectNodes("member"); foreach (XmlNode node in members) { String stringPath = node.Attributes[nameof(memberRegistryEntry.name)].Value; Match mch = memberRegistryTools.regex_SelectMethodPath.Match(stringPath); memberRegistryEntryType memberType = memberRegistryEntryType.entry_unknown; String _path = ""; String _name = ""; foreach (Group mc in mch.Groups) { switch (mc.Index) { case 1: memberType = memberRegistryTools.GetEnum(mc.Value); break; case 2: _path = mc.Value; break; case 3: _name = mc.Value.Replace(".", "_"); _name = _name.Replace(",", "_"); _path += "_" + _name; break; } } memberRegistryEntry entry = AddOrGetByPath(_path); if (entry.member == null) { entry.memberType = memberType; entry.deployNode(node); } if (log != null) { log.log("XML documentation loaded for: " + stringPath); } } }
/// <summary> /// Prepares weight computation task for lemmas provided /// </summary> /// <param name="_lemmas">The lemmas.</param> public kernelComputeWeightTask(List <webLemmaTerm> _lemmas, ILogBuilder _loger, Boolean _forSingleWebsite, wlfConstructorSettings _settings) { lemmas = _lemmas; loger = _loger; forSingleWebSite = _forSingleWebsite; settings = _settings; foreach (webLemmaTerm lemma in lemmas) { documentSetFrequencyMax = Math.Max(documentSetFrequencyMax, lemma.documentSetFrequency); documentFrequencyMax = Math.Max(documentFrequencyMax, lemma.documentFrequency); termFrequencyMax = Math.Max(termFrequencyMax, lemma.termFrequency); termFrequencyTotal += lemma.termFrequency; } termCount = lemmas.Count; }
/// <summary> /// Resolves the plugin by name or directory.name path /// </summary> /// <param name="plugin_name">Name of the plugin.</param> /// <param name="output">The output.</param> /// <returns></returns> protected Type resolvePlugin(String plugin_name, ILogBuilder output) { if (!DOSHOWLOGS) { output = null; } if (!bannedShortNames.ContainsKey(plugin_name)) { if (pluginTypesByName.ContainsKey(plugin_name)) { if (output != null) { output.log("Plugin class [" + plugin_name + "] class resolved. "); } return(pluginTypesByName[plugin_name]); } } if (pluginTypesByPathName.ContainsKey(plugin_name)) { if (output != null) { output.log("Plugin class [" + plugin_name + "] class resolved. "); } return(pluginTypesByPathName[plugin_name]); } else { if (supportDirtyNaming) { String dirtyName = getDirtyForm(plugin_name); if (dirtyDictionary.ContainsKey(dirtyName)) { return(dirtyDictionary[dirtyName]); } } if (output != null) { output.log("Plugin class [" + plugin_name + "] not found."); } } return(null); }
public void DeployAndRun(ILogBuilder log, SpaceModel _space, folderNode folder) { filter.Deploy(log, folder); weightedFeatures = new WeightDictionary(name + "_weg" + filter.limit.ToString(), "weighted features, before filter"); selectedFeatures = new WeightDictionary(name + "_sel" + filter.limit.ToString(), "selected weighted featyres"); var selected = filter.SelectFeatures(_space, log, folder, weightedFeatures); foreach (var pair in selected) { selectedFeatures.AddEntry(pair.Key, pair.Value); } weightedFeatures.Save(folder, log, WeightDictionary.GetDictionaryFilename(weightedFeatures.name, folder)); selectedFeatures.Save(folder, log, WeightDictionary.GetDictionaryFilename(selectedFeatures.name, folder)); }
public override void stageComplete(ILogBuilder response) { List <string> lemmas = new List <string>(); foreach (ITermLemma tl in semanticLexiconManager.manager.lexiconContext.TermLemmas) { lemmas.Add(tl.name); } lemmas.saveContentOnFilePath(state.folder.pathFor(taskOutputPath)); state.failedTasks.file.CopyTo(semanticLexiconManager.manager.constructor.projectFolderStructure[lexiconConstructorProjectFolder.splits].pathFor("corpus_explore_fails.csv"), true); //state.failedTasks.sa state.stateSessionTick(this, true); }
public void OnLoad(folderNode folder, ILogBuilder output) { if (folder != null) { this.folder = folder; } foreach (String className in valCaseNames) { kFoldValidationCase setClass = className.LoadDataStructure <kFoldValidationCase>(this.folder, output); setClass.kFoldMaster = this; setClass.evaluationCases.kFoldMaster = this; setClass.trainingCases.kFoldMaster = this; setClass.id = items.Count; DeployCase(setClass); items.Add(setClass); } }
public static dimensionSpecificationSet Load(String filepath, ILogBuilder logger) { if (filepath.isNullOrEmpty()) { throw new ArgumentNullException("Filepath for dimensionSpecificationSet.Load is null or empty", nameof(filepath)); } if (!File.Exists(filepath)) { throw new ArgumentNullException("File [" + filepath + "] not found, fordimensionSpecificationSet.Load is null or empty", nameof(filepath)); } var output = objectSerialization.loadObjectFromXML <dimensionSpecificationSet>(filepath, logger); return(output); }
public void saveAll(ILogBuilder externalLog = null) { if (externalLog != null) { saveBase.saveToFile(projectFolderStructure[lexiconConstructorProjectFolder.logs].pathFor("console_response_log.txt"), externalLog.logContent); } output.logContent.saveStringToFile(projectFolderStructure[lexiconConstructorProjectFolder.logs].pathFor("constructor_log.txt"), getWritableFileMode.overwrite); string help = manager.console.helpContent; if (!help.isNullOrEmpty()) { help.saveStringToFile(projectFolderStructure[lexiconConstructorProjectFolder.scripts].pathFor("help.md")); } }
/// <summary> /// Describes the specified logger. /// </summary> /// <param name="logger">The logger.</param> public override void Describe(ILogBuilder logger) { if (instructions.Any()) { logger.AppendLine("Rendering instructions:"); Int32 c = 1; foreach (var inst in instructions) { logger.AppendLine("[" + c.ToString("D2") + "] " + inst.name + " Code=[" + inst.code + "] Flags=[" + inst.instructionFlags.ToString() + "]"); c++; } } else { logger.AppendLine("No rendering instructions"); } }
/// <summary> /// Sends the specified message. /// </summary> /// <param name="message">The message.</param> /// <param name="loger">The loger.</param> /// <returns></returns> public Boolean Send(emailMessage message, ILogBuilder loger) { try { var msg = message.createMailObject(); smtpServer.Send(msg); loger.log(message.address + " sent"); return(true); } catch (Exception ex) { loger.log(message.address + " failed [" + ex.Message + "]"); return(false); } }
/// <summary> /// Logs a multiline description of the gramCaseSet /// </summary> /// <param name="log">The log.</param> public void ToString(ILogBuilder log, bool expanded = false) { //StringBuilder sb = new StringBuilder(); log.AppendLine("Grammatical sets"); int c = 0; foreach (KeyValuePair <string, gramFlags> gf in items) { log.AppendLine("[" + c + "] " + gf.Key); if (expanded) { log.AppendLine(gf.Value.ToString(!expanded)); log.AppendLine(""); } c++; } }
protected void LoadCacheFiles(ILogBuilder loger, semanticLexiconContext context) { failedQueries = new fileunit(folder.pathFor("lexiconCache_negatives.txt"), true); loger.log("Negative queries loaded"); AddTemp(failedQueries.contentLines, loger, true, true); loger.log("Loading encoding twins"); twins.Load(twinsSavePath, loger); loger.log("Encoding twins loaded"); if (twins.Count == 0) { rebuildEncodedTwins(loger, context); } failedQueries.Save(); }
public static ILogBuilder Properties(this ILogBuilder builder, ICollection <KeyValuePair <string, string> > collection) { if (collection == null) { return(builder); } foreach (var pair in collection) { if (pair.Key != null) { builder.Property(pair.Key, pair.Value); } } return(builder); }
/// <summary> /// Marks the event as being a unhandled occurrence and sets the submission method. /// </summary> /// <param name="builder">The log builder object.</param> /// <param name="submissionMethod">The submission method.</param> public static ILogBuilder MarkUnhandled(this ILogBuilder builder, string submissionMethod = null) { var contextData = builder.GetContextData(); if (contextData == null) { return(builder); } contextData.MarkAsUnhandledError(); if (!String.IsNullOrEmpty(submissionMethod)) { contextData.SetSubmissionMethod(submissionMethod); } return(builder); }
/// <summary> /// Gets the SSRM - computes the SSRM Similarity /// </summary> /// <param name="lemmas">The lemmas.</param> /// <param name="logger">The logger.</param> /// <param name="debug">The debug.</param> /// <returns></returns> public Double GetSSRM(webLemmaTermPairCollection lemmas, ILogBuilder logger = null, SSRMComputation debug = null) { Double upper = 0; Double lowerA = 0; Int32 i = 0; foreach (webLemmaTermPair wlta in lemmas) { //foreach (webLemmaTermPair wltb in lemmas) //{ if (ContainsNode(wlta.entryA.name)) { i++; var node = GetNode(wlta.entryA.name); upper += wlta.entryA.weight * wlta.entryB.weight * node.weight; lowerA += wlta.entryA.weight * wlta.entryB.weight; if (debug != null) { debug.printTerm(i, wlta.entryA.name, wlta.entryA.weight, wlta.entryB.weight, node.weight, upper, lowerA); } } //} } Double output = upper.GetRatio(lowerA); if (debug != null) { debug.upper = upper; debug.lower = lowerA; debug.similarity = output; debug.terms = i; debug.printFinale(); } if (output == 0) { logger.log("Semantic similarity returned 0 score!"); } return(output); }
public String DescribeSampleDistribution(ILogBuilder modelNotes) { if (modelNotes == null) { modelNotes = new builderForLog(); } var l = modelNotes.Length; foreach (var vc in GetCases()) { modelNotes.AppendHeading("Fold: " + vc.name, 2); var categoryCaseList = new aceDictionarySet <String, String>(); foreach (validationCaseCollection vcc in vc.trainingCases) { foreach (string vccs in vcc) { categoryCaseList.Add(vcc.className, "[T] " + vccs); } } foreach (validationCaseCollection vcc in vc.evaluationCases) { foreach (string vccs in vcc) { categoryCaseList.Add(vcc.className, "[E] " + vccs); } } foreach (var k in categoryCaseList.Keys) { modelNotes.AppendHeading("Category: " + k, 3); foreach (var s in categoryCaseList[k]) { modelNotes.AppendLine(s); } } } SampleDistributionNote = modelNotes.GetContent(l); SampleDistributionHash = md5.GetMd5Hash(SampleDistributionNote); return(SampleDistributionNote); }
/// <summary> /// Processes the specified input. /// </summary> /// <param name="_input">The input.</param> /// <param name="logger">The logger.</param> /// <returns></returns> public virtual List <T> process(IEnumerable <T> _input, ILogBuilder logger) { List <T> output = new List <T>(); List <T> next = new List <T>(); next = _input.ToList(); while (currentIteration > 0) { List <T> MCNext = new List <T>(); foreach (T sub in next) { MCNext.AddRange(processIteration(sub), true); } if (settings.keepAllInOutput) { output.AddRange(MCNext, true); } else { output = MCNext; } logger.log("[" + currentIteration + "] chunk construction in[" + next.Count + "] new[" + MCNext.Count + "] out[" + output.Count + "]"); if (next.Count == output.Count) { logger.log("Aborting the process since last iteation produced no changes"); break; } next = MCNext.ToList(); if (MCNext.Count == 0) { break; } currentIteration--; } return(output); }
/// <summary> /// Builds vectors from selected features and feature weighting model /// </summary> /// <param name="context">The context.</param> /// <param name="log">The log.</param> public void VectorSpaceConstruction(OperationContext context, ILogBuilder log, Boolean constructCategories = false) { List <string> FV = context.SelectedFeatures.GetKeys(); //.entries.Select(x => x.name).ToList(); //FV.AddRange(); log.log("Preparing Weight model [" + weightModel.GetSignature() + "] - feature selection [" + FV.Count() + "]"); // preparing the model weightModel.PrepareTheModel(context.spaceModel, log); Int32 i = 0; Int32 s = context.spaceModel.documents.Count / 10; // building document VSM foreach (SpaceDocumentModel docModel in context.spaceModel.documents) { var wd = weightModel.GetWeights(FV, docModel, context.spaceModel); VectorDocument docVec = new VectorDocument(docModel.name); docVec.terms = wd; context.vectorSpace.documents.Add(docVec); if (i % s == 0) { Double r = i.GetRatio(context.spaceModel.documents.Count); log.log("[" + r.ToString("F2") + "]"); } i++; } if (constructCategories) { // logger.log(":: Creating VectorSpace instances for categories"); // building category VSM foreach (SpaceCategoryModel catModel in context.spaceModel.categories) { var wd = weightModel.GetWeights(FV, catModel, context.spaceModel); VectorLabel catVec = new VectorLabel(catModel.name); catVec.terms = wd; context.vectorSpace.labels.Add(catVec); } } }
/// <summary> /// Describes the unit via specified loger /// </summary> /// <param name="loger">The loger.</param> public void describe(ILogBuilder loger) { // loger.log("deliveryUnit describe() call started"); loger.AppendHeading("Delivery unit (" + GetType().Name + ")", 2); loger.AppendLine("Logical name: " + name); loger.open("items", "Delivery items", "List of all deliveryUnit items contained here"); foreach (IDeliveryUnitItem item in items) { //loger.AppendHeading(this.name + " (" + this.GetType().Name + ")", 3); loger.AppendLine(" > " + item.name + ":" + item.itemType.ToString()); loger.AppendLine(" > > Location: " + item.location.ToString()); loger.AppendLine(" > > Description: " + item.description); } loger.close(); loger.open("items", "Items by level", "Showing items triggered by scope level"); reportElementLevel lev = reportElementLevel.none; foreach (KeyValuePair <reportElementLevel, List <deliveryUnitItem> > pair in itemByLevel) { lev = pair.Key; foreach (deliveryUnitItem it in pair.Value) { loger.AppendLine(lev.ToString() + " --> " + it.name + " (" + it.GetType().Name + ")"); } } loger.close(); loger.open("items", "Output by level", "Showing items designated as output items and triggered by scope level"); foreach (KeyValuePair <reportElementLevel, List <deliveryUnitItem> > pair in outputByLevel) { lev = pair.Key; foreach (deliveryUnitItem it in pair.Value) { loger.AppendLine(lev.ToString() + " --> " + it.name + " (" + it.GetType().Name + ")"); } } loger.close(); // loger.log("deliveryUnit describe() call finished"); }
/// <summary> /// Builds the specified clouds. /// </summary> /// <param name="clouds">The clouds.</param> public void build(IEnumerable <lemmaSemanticCloud> clouds, ILogBuilder logger) { var matrix = this; MaxOverlap = Int32.MinValue; MinOverlap = Int32.MaxValue; Int32 ci = 0; Int32 oc = 0; foreach (lemmaSemanticCloud x in clouds) { foreach (lemmaSemanticCloud y in clouds) { if (matrix[x, y] == null) { if (x == y) { matrix[x, y] = x.GetOverlap(y); matrix[y, x] = x.GetOverlap(y); } else { List <freeGraphNodeBase> overlap = x.GetOverlap(y); matrix[x, y] = overlap; matrix[y, x] = overlap; MinOverlap = Math.Min(MinOverlap, overlap.Count); MaxOverlap = Math.Max(MaxOverlap, overlap.Count); oc++; } } } ci++; numberOfLemmas.Add(x, x.CountNodes()); numberOfLinks.Add(x, x.CountLinks()); } if (ci < 3) { #if DEBUG throw new aceScienceException("MATRIX CLOUD PROBLEM", null, this, "MATRIX"); #endif } logger.log("Semantic Cloud matrix built from [" + ci + "] clouds - overlaps counted [" + oc + "]"); logger.log("Max. overlap [" + MaxOverlap + "] - Min. overlap [" + MinOverlap + "]"); }
/// <summary> /// Spaces the model categories. /// </summary> /// <param name="context">The context.</param> /// <param name="log">The log.</param> public void SpaceModelCategories(OperationContext context, ILogBuilder log) { log.log("Space model categories"); foreach (SpaceLabel label in context.spaceModel.labels) { if (label.name != SpaceLabel.UNKNOWN) { var docs = context.spaceModel.LabelToDocumentLinks.GetAllLinked(label); SpaceCategoryModel categoryModel = new SpaceCategoryModel(label, docs); context.spaceModel.LabelToCategoryLinks.Add(label, categoryModel, 1); context.spaceModel.categories.Add(categoryModel); // notes.log("Class [" + categoryModel.name + "] BoW model created - terms[" + categoryModel.terms.Count + "] "); } } }
public override int DoSelect(FeatureVector target, ILogBuilder logger) { Int32 result = 0; switch (model) { case mSVMModels.linear: result = machine.Decide(target.dimensions); break; case mSVMModels.gaussian: result = machineGaussian.Decide(target.dimensions); break; } return(result); }
public AffiliateWindowFeedGenerator( ICategoryService categoryService, ILogBuilder logBuilder, IOrderCalculator orderCalculator, StoreInformationSettings storeInformationSettings, MediaSettings mediaSettings) { _categoryService = categoryService; _storeInformationSettings = storeInformationSettings; _mediaSettings = mediaSettings; _orderCalculator = orderCalculator; _logger = logBuilder.CreateLogger(GetType().FullName); if (_logger == null) { throw new NullReferenceException("Object logger is null as Object logBuilder was failed to create logger."); } }
/// <summary> /// Gets the domain list. /// </summary> /// <param name="options">The options.</param> /// <returns></returns> public String GetDomainList(WebDomainCategoryFormatOptions options, ILogBuilder logger = null) { StringBuilder sb = new StringBuilder(); foreach (String s in sites) { String ln = s; if (options.HasFlag(WebDomainCategoryFormatOptions.normalizeDomainname)) { domainAnalysis da = new domainAnalysis(s); ln = da.urlProper; } sb.AppendLine(ln); } return(sb.ToString()); }
/// <summary> /// Sets object instance properties. /// </summary> /// <param name="entry">The entry.</param> /// <param name="log">The log - if null, log is off</param> public void SetFromEntry(BibTexEntryBase entry, ILogBuilder log = null) { SetDictionary(); foreach (var tag in entry.Tags) { if (propDictionary.ContainsKey(tag.Key)) { this.imbSetPropertyConvertSafe(propDictionary[tag.Key], tag.Value.Value); } else { if (log != null) { log.log(entry.Key + "[" + tag.Key + "] - property not declared at [" + GetType().Name + "] - consider using your own class, derived from BibTexEntryModel"); } } } }
/// <summary> /// Applies the context properties to the specified <paramref name="builder" />. /// </summary> /// <param name="builder">The builder to copy the properties to.</param> public void Apply(ILogBuilder builder) { foreach (var pair in _dictionary) builder.Property(pair.Key, pair.Value); }