/// <summary> /// Builds the cloud - common part of the algorithm /// </summary> /// <param name="c">The c.</param> /// <param name="chunkTable">The chunk table.</param> /// <param name="termTable">The term table.</param> /// <param name="output">The output.</param> /// <param name="logger">The logger.</param> /// <param name="resolver">The resolver.</param> /// <returns></returns> /// <exception cref="aceScienceException">This is stupid. Settings for cloudConstructor have assignChunkTableWeightToLink=true but it will not create new link in case the lemmas are already linked" + /// ", therefore resulting weight is assigned just by chance! Change cloudConstructor settings bro, to make some sense. - null - cloudConstructor has irrational settings</exception> protected lemmaSemanticCloud BuildCloud(lemmaSemanticConstruct c, webLemmaTermTable chunkTable, webLemmaTermTable termTable, lemmaSemanticCloud output, ILogBuilder logger, ITextResourceResolver resolver) { c.TrashBin.ForEach(x => c.nodeNames.Remove(x)); foreach (String n in c.nodeNames) // <------------ creating nodes { Double weight = 0; if (settings.assignTermTableWeightToNode) { var lemma = termTable[n]; if (lemma != null) { weight = lemma.weight; } } else { weight = 1; } if (weight > 0) { if (c.isCaseCloud) { if (settings.doFactorToCaseClouds) { if (c.PrimaryTerms.Contains(n)) { output.AddNode(n, weight * settings.PrimaryTermWeightFactor, 2); } else if (c.SecondaryTerms.Contains(n)) { output.AddNode(n, weight * settings.SecondaryTermWeightFactor, 1); } else { output.AddNode(n, weight * settings.ReserveTermWeightFactor, 0); } } else { output.AddNode(n, weight); } } else { // class cloud if (settings.doFactorToClassClouds) { if (c.PrimaryTerms.Contains(n)) { output.AddNode(n, weight * settings.PrimaryTermWeightFactor, 2); } else if (c.SecondaryTerms.Contains(n)) { output.AddNode(n, weight * settings.SecondaryTermWeightFactor, 1); } else { output.AddNode(n, weight * settings.ReserveTermWeightFactor, 0); } } else { output.AddNode(n, weight); } } } } foreach (List <String> n in c.lemmasList) // <-------- creating links { String first = n[0]; if (c.TrashBin.Contains(first)) { continue; } if (output.ContainsNode(first, true)) { foreach (String m in n) { if (c.TrashBin.Contains(m)) { continue; } if (m != first) { if (output.ContainsNode(m, true)) { Double weight = 1; if (settings.assignChunkTableWeightToLink) { weight = c.weightDict[n].weight; } else { if (settings.doAdjustLinkWeightByChunkSize) { weight = (n.Count - 1).GetRatio(1); } else { weight = 1; } } var link = output.GetLink(first, m); if (link == null) { output.AddLink(first, m, weight); } else { if (settings.doSumExistingLinkWeights) { link.weight += weight; } else { // it will not create new link as it already exists // this is irrational in case settings.assignChunkTableWeightToLink is true if (settings.assignChunkTableWeightToLink) { throw new aceScienceException("This is stupid. Settings for cloudConstructor have assignChunkTableWeightToLink=true but it will not create new link in case the lemmas are already linked" + ", therefore resulting weight is assigned just by chance! Change cloudConstructor settings bro, to make some sense.", null, this, "cloudConstructor has irrational settings", settings); } } } } } } } } c.primaryChunks.ForEach(x => output.primaryChunks.Add(x.nominalForm)); c.secondaryChunks.ForEach(x => output.secondaryChunks.Add(x.nominalForm)); return(output); }
/// <summary> /// Returns expanded cloud from given lemma list - only for matched lemmas /// </summary> /// <param name="lemmas">The lemmas.</param> /// <param name="expansionSteps">The expansion steps.</param> /// <param name="options">The options.</param> /// <param name="typeToMin">todo: describe typeToMin parameter on ExpandTermsToCloud</param> /// <returns></returns> public lemmaSemanticCloud ExpandTermsToCloud(IEnumerable <String> lemmas, Int32 expansionSteps, Boolean typeToMin = true, lemmaExpansionOptions options = lemmaExpansionOptions.initialWeightFromParent | lemmaExpansionOptions.weightAsSemanticDistanceFromParent) { lemmaSemanticCloud output = new lemmaSemanticCloud(); output.name = name + "_subset_exp" + expansionSteps; output.DisableCheck = true; StringBuilder sb = new StringBuilder(); sb.Append("Subset expanded from matched query lemmas ["); List <String> nextTerms = new List <string>(); List <String> allTerms = new List <string>(); foreach (String t in lemmas) { if (ContainsNode(t)) { sb.Append(t + " "); var l = GetNode(t); output.AddNode(l.name, l.weight, 0).distance = 1; nextTerms.Add(t); allTerms.Add(t); } } sb.Append("] using cloud [" + name + "]"); output.description = sb.ToString(); Int32 exp_i = 1; while (nextTerms.Any()) { List <String> newNextTerms = new List <string>(); foreach (String t in nextTerms) { freeGraphNodeAndLinks links = new freeGraphNodeAndLinks(); if (options.HasFlag(lemmaExpansionOptions.weightAsSemanticDistanceFromParent)) { links = GetLinks(t, true, false, 1.GetRatio(exp_i), exp_i, true, options.HasFlag(lemmaExpansionOptions.initialWeightFromParent)); } else if (options.HasFlag(lemmaExpansionOptions.weightAsSemanticDistanceThatIsSumOfLinkWeights)) { var nd = output.GetNode(t, true); links = GetLinks(t, true, false, 1, exp_i, true, options.HasFlag(lemmaExpansionOptions.initialWeightFromParent)); } else { links = GetLinks(t, true, false, 1, exp_i, true, options.HasFlag(lemmaExpansionOptions.initialWeightFromParent)); } foreach (freeGraphLink link in links) { if (!allTerms.Contains(link.nodeA.name)) { newNextTerms.Add(link.nodeA.name); allTerms.Add(link.nodeA.name); } if (link.nodeA.name != t) { output.AddOrUpdateNode(link.nodeA, link, links, typeToMin, options); } if (!allTerms.Contains(link.nodeB.name)) { newNextTerms.Add(link.nodeB.name); allTerms.Add(link.nodeB.name); } if (link.nodeB.name != t) { output.AddOrUpdateNode(link.nodeB, link, links, typeToMin, options); } } foreach (freeGraphLink link in links) { if (!output.ContainsLink(link.linkBase.nodeNameA, link.linkBase.nodeNameB)) { output.AddLink(link.linkBase.nodeNameA, link.linkBase.nodeNameB, Math.Max(link.linkBase.weight, 1), link.linkBase.type); //var nd = output.GetNode(link.nodeB.name); //nd.weight = nd.weight + (link.nodeB.weight.GetRatio(links.linkedNodeClones.Count)); //nd.type = Math.Max(nd.type, link.nodeB.type); } else { var lnk = output.GetLink(link.linkBase.nodeNameA, link.linkBase.nodeNameB); lnk.weight += link.linkBase.weight; if (typeToMin) { lnk.type = Math.Min(link.linkBase.type, lnk.type); } else { lnk.type = Math.Max(link.linkBase.type, lnk.type); } } } } nextTerms = newNextTerms; exp_i++; if (exp_i > expansionSteps) { break; } } output.DisableCheck = false; output.RebuildIndex(); return(output); }