public virtual void TestGetMaxedMarginals(TableFactor factor, int marginalizeTo) { if (!Arrays.Stream(factor.neighborIndices).Boxed().Collect(Collectors.ToSet()).Contains(marginalizeTo)) { return; } int indexOfVariable = -1; for (int i = 0; i < factor.neighborIndices.Length; i++) { if (factor.neighborIndices[i] == marginalizeTo) { indexOfVariable = i; break; } } Assume.AssumeTrue(indexOfVariable > -1); double[] gold = new double[factor.GetDimensions()[indexOfVariable]]; for (int i_1 = 0; i_1 < gold.Length; i_1++) { gold[i_1] = double.NegativeInfinity; } foreach (int[] assignment in factor) { gold[assignment[indexOfVariable]] = Math.Max(gold[assignment[indexOfVariable]], factor.GetAssignmentValue(assignment)); } Normalize(gold); Assert.AssertArrayEquals(factor.GetMaxedMarginals()[indexOfVariable], 1.0e-5, gold); }
public virtual void TestSumOut(TableFactor factor, int marginalize) { if (!Arrays.Stream(factor.neighborIndices).Boxed().Collect(Collectors.ToSet()).Contains(marginalize)) { return; } if (factor.neighborIndices.Length <= 1) { return; } TableFactor summedOut = factor.SumOut((int)marginalize); NUnit.Framework.Assert.AreEqual(factor.neighborIndices.Length - 1, summedOut.neighborIndices.Length); NUnit.Framework.Assert.IsTrue(!Arrays.Stream(summedOut.neighborIndices).Boxed().Collect(Collectors.ToSet()).Contains(marginalize)); IDictionary<IList<int>, IList<int[]>> subsetToSuperset = SubsetToSupersetAssignments((TableFactor)factor, summedOut); foreach (IList<int> subsetAssignmentList in subsetToSuperset.Keys) { double sum = 0.0; foreach (int[] supersetAssignment in subsetToSuperset[subsetAssignmentList]) { sum += factor.GetAssignmentValue(supersetAssignment); } int[] subsetAssignment = new int[subsetAssignmentList.Count]; for (int i = 0; i < subsetAssignment.Length; i++) { subsetAssignment[i] = subsetAssignmentList[i]; } NUnit.Framework.Assert.AreEqual(summedOut.GetAssignmentValue(subsetAssignment), 1.0e-5, sum); } }
public virtual void TestObserve(TableFactor factor, int observe, int value) { if (!Arrays.Stream(factor.neighborIndices).Boxed().Collect(Collectors.ToSet()).Contains(observe)) { return; } if (factor.neighborIndices.Length == 1) { return; } TableFactor observedOut = factor.Observe((int)observe, (int)value); int observeIndex = -1; for (int i = 0; i < factor.neighborIndices.Length; i++) { if (factor.neighborIndices[i] == observe) { observeIndex = i; } } foreach (int[] assignment in factor) { if (assignment[observeIndex] == value) { NUnit.Framework.Assert.AreEqual(observedOut.GetAssignmentValue(SubsetAssignment(assignment, (TableFactor)factor, observedOut)), 1.0e-7, factor.GetAssignmentValue(assignment)); } } }
public virtual void TestMaxOut(TableFactor factor, int marginalize) { if (!Arrays.Stream(factor.neighborIndices).Boxed().Collect(Collectors.ToSet()).Contains(marginalize)) { return; } if (factor.neighborIndices.Length <= 1) { return; } TableFactor maxedOut = factor.MaxOut((int)marginalize); NUnit.Framework.Assert.AreEqual(factor.neighborIndices.Length - 1, maxedOut.neighborIndices.Length); NUnit.Framework.Assert.IsTrue(!Arrays.Stream(maxedOut.neighborIndices).Boxed().Collect(Collectors.ToSet()).Contains(marginalize)); foreach (int[] assignment in factor) { NUnit.Framework.Assert.IsTrue(factor.GetAssignmentValue(assignment) >= double.NegativeInfinity); NUnit.Framework.Assert.IsTrue(factor.GetAssignmentValue(assignment) <= maxedOut.GetAssignmentValue(SubsetAssignment(assignment, (TableFactor)factor, maxedOut))); } IDictionary<IList<int>, IList<int[]>> subsetToSuperset = SubsetToSupersetAssignments((TableFactor)factor, maxedOut); foreach (IList<int> subsetAssignmentList in subsetToSuperset.Keys) { double max = double.NegativeInfinity; foreach (int[] supersetAssignment in subsetToSuperset[subsetAssignmentList]) { max = Math.Max(max, factor.GetAssignmentValue(supersetAssignment)); } int[] subsetAssignment = new int[subsetAssignmentList.Count]; for (int i = 0; i < subsetAssignment.Length; i++) { subsetAssignment[i] = subsetAssignmentList[i]; } NUnit.Framework.Assert.AreEqual(maxedOut.GetAssignmentValue(subsetAssignment), 1.0e-5, max); } }
public virtual void CheckMAPAgainstBruteForce(GraphicalModel model, ConcatVector weights, CliqueTree inference) { int[] map = inference.CalculateMAP(); ICollection <TableFactor> tableFactors = model.factors.Stream().Map(null).Collect(Collectors.ToSet()); // this is the super slow but obviously correct way to get global marginals TableFactor bruteForce = null; foreach (TableFactor factor in tableFactors) { if (bruteForce == null) { bruteForce = factor; } else { bruteForce = bruteForce.Multiply(factor); } } System.Diagnostics.Debug.Assert((bruteForce != null)); // observe out all variables that have been registered TableFactor observed = bruteForce; foreach (int n in bruteForce.neighborIndices) { if (model.GetVariableMetaDataByReference(n).Contains(CliqueTree.VariableObservedValue)) { int value = System.Convert.ToInt32(model.GetVariableMetaDataByReference(n)[CliqueTree.VariableObservedValue]); if (observed.neighborIndices.Length > 1) { observed = observed.Observe(n, value); } else { // If we've observed everything, then just quit return; } } } bruteForce = observed; int largestVariableNum = 0; foreach (GraphicalModel.Factor f in model.factors) { foreach (int i in f.neigborIndices) { if (i > largestVariableNum) { largestVariableNum = i; } } } // this is presented in true order, where 0 corresponds to var 0 int[] mapValueAssignment = new int[largestVariableNum + 1]; // this is kept in the order that the factor presents to us int[] highestValueAssignment = new int[bruteForce.neighborIndices.Length]; foreach (int[] assignment in bruteForce) { if (bruteForce.GetAssignmentValue(assignment) > bruteForce.GetAssignmentValue(highestValueAssignment)) { highestValueAssignment = assignment; for (int i = 0; i < assignment.Length; i++) { mapValueAssignment[bruteForce.neighborIndices[i]] = assignment[i]; } } } int[] forcedAssignments = new int[largestVariableNum + 1]; for (int i_1 = 0; i_1 < mapValueAssignment.Length; i_1++) { if (model.GetVariableMetaDataByReference(i_1).Contains(CliqueTree.VariableObservedValue)) { mapValueAssignment[i_1] = System.Convert.ToInt32(model.GetVariableMetaDataByReference(i_1)[CliqueTree.VariableObservedValue]); forcedAssignments[i_1] = mapValueAssignment[i_1]; } } if (!Arrays.Equals(mapValueAssignment, map)) { System.Console.Error.WriteLine("---"); System.Console.Error.WriteLine("Relevant variables: " + Arrays.ToString(bruteForce.neighborIndices)); System.Console.Error.WriteLine("Var Sizes: " + Arrays.ToString(bruteForce.GetDimensions())); System.Console.Error.WriteLine("MAP: " + Arrays.ToString(map)); System.Console.Error.WriteLine("Brute force map: " + Arrays.ToString(mapValueAssignment)); System.Console.Error.WriteLine("Forced assignments: " + Arrays.ToString(forcedAssignments)); } foreach (int i_2 in bruteForce.neighborIndices) { // Only check defined variables NUnit.Framework.Assert.AreEqual(mapValueAssignment[i_2], map[i_2]); } }
private void CheckMarginalsAgainstBruteForce(GraphicalModel model, ConcatVector weights, CliqueTree inference) { CliqueTree.MarginalResult result = inference.CalculateMarginals(); double[][] marginals = result.marginals; ICollection <TableFactor> tableFactors = model.factors.Stream().Map(null).Collect(Collectors.ToSet()); System.Diagnostics.Debug.Assert((tableFactors.Count == model.factors.Count)); // this is the super slow but obviously correct way to get global marginals TableFactor bruteForce = null; foreach (TableFactor factor in tableFactors) { if (bruteForce == null) { bruteForce = factor; } else { bruteForce = bruteForce.Multiply(factor); } } if (bruteForce != null) { // observe out all variables that have been registered TableFactor observed = bruteForce; for (int i = 0; i < bruteForce.neighborIndices.Length; i++) { int n = bruteForce.neighborIndices[i]; if (model.GetVariableMetaDataByReference(n).Contains(CliqueTree.VariableObservedValue)) { int value = System.Convert.ToInt32(model.GetVariableMetaDataByReference(n)[CliqueTree.VariableObservedValue]); // Check that the marginals reflect the observation for (int j = 0; j < marginals[n].Length; j++) { NUnit.Framework.Assert.AreEqual(marginals[n][j], 1.0e-9, j == value ? 1.0 : 0.0); } if (observed.neighborIndices.Length > 1) { observed = observed.Observe(n, value); } else { // If we've observed everything, then just quit return; } } } bruteForce = observed; // Spot check each of the marginals in the brute force calculation double[][] bruteMarginals = bruteForce.GetSummedMarginals(); int index = 0; foreach (int i_1 in bruteForce.neighborIndices) { bool isEqual = true; double[] brute = bruteMarginals[index]; index++; System.Diagnostics.Debug.Assert((brute != null)); System.Diagnostics.Debug.Assert((marginals[i_1] != null)); for (int j = 0; j < brute.Length; j++) { if (double.IsNaN(brute[j])) { isEqual = false; break; } if (Math.Abs(brute[j] - marginals[i_1][j]) > 3.0e-2) { isEqual = false; break; } } if (!isEqual) { System.Console.Error.WriteLine("Arrays not equal! Variable " + i_1); System.Console.Error.WriteLine("\tGold: " + Arrays.ToString(brute)); System.Console.Error.WriteLine("\tResult: " + Arrays.ToString(marginals[i_1])); } Assert.AssertArrayEquals(marginals[i_1], 3.0e-2, brute); } // Spot check the partition function double goldPartitionFunction = bruteForce.ValueSum(); // Correct to within 3% NUnit.Framework.Assert.AreEqual(result.partitionFunction, goldPartitionFunction * 3.0e-2, goldPartitionFunction); // Check the joint marginals foreach (GraphicalModel.Factor f in model.factors) { NUnit.Framework.Assert.IsTrue(result.jointMarginals.Contains(f)); TableFactor bruteForceJointMarginal = bruteForce; foreach (int n in bruteForce.neighborIndices) { foreach (int i_2 in f.neigborIndices) { if (i_2 == n) { goto outer_continue; } } if (bruteForceJointMarginal.neighborIndices.Length > 1) { bruteForceJointMarginal = bruteForceJointMarginal.SumOut(n); } else { int[] fixedAssignment = new int[f.neigborIndices.Length]; for (int i_3 = 0; i_3 < fixedAssignment.Length; i_3++) { fixedAssignment[i_3] = System.Convert.ToInt32(model.GetVariableMetaDataByReference(f.neigborIndices[i_3])[CliqueTree.VariableObservedValue]); } foreach (int[] assn in result.jointMarginals[f]) { if (Arrays.Equals(assn, fixedAssignment)) { NUnit.Framework.Assert.AreEqual(result.jointMarginals[f].GetAssignmentValue(assn), 1.0e-7, 1.0); } else { if (result.jointMarginals[f].GetAssignmentValue(assn) != 0) { TableFactor j = result.jointMarginals[f]; foreach (int[] assignment in j) { System.Console.Error.WriteLine(Arrays.ToString(assignment) + ": " + j.GetAssignmentValue(assignment)); } } NUnit.Framework.Assert.AreEqual(result.jointMarginals[f].GetAssignmentValue(assn), 1.0e-7, 0.0); } } goto marginals_continue; } } outer_break :; // Find the correspondence between the brute force joint marginal, which may be missing variables // because they were observed out of the table, and the output joint marginals, which are always an exact // match for the original factor int[] backPointers = new int[f.neigborIndices.Length]; int[] observedValue = new int[f.neigborIndices.Length]; for (int i_4 = 0; i_4 < backPointers.Length; i_4++) { if (model.GetVariableMetaDataByReference(f.neigborIndices[i_4]).Contains(CliqueTree.VariableObservedValue)) { observedValue[i_4] = System.Convert.ToInt32(model.GetVariableMetaDataByReference(f.neigborIndices[i_4])[CliqueTree.VariableObservedValue]); backPointers[i_4] = -1; } else { observedValue[i_4] = -1; backPointers[i_4] = -1; for (int j = 0; j < bruteForceJointMarginal.neighborIndices.Length; j++) { if (bruteForceJointMarginal.neighborIndices[j] == f.neigborIndices[i_4]) { backPointers[i_4] = j; } } System.Diagnostics.Debug.Assert((backPointers[i_4] != -1)); } } double sum = bruteForceJointMarginal.ValueSum(); if (sum == 0.0) { sum = 1; } foreach (int[] assignment_1 in result.jointMarginals[f]) { int[] bruteForceMarginalAssignment = new int[bruteForceJointMarginal.neighborIndices.Length]; for (int i_2 = 0; i_2 < assignment_1.Length; i_2++) { if (backPointers[i_2] != -1) { bruteForceMarginalAssignment[backPointers[i_2]] = assignment_1[i_2]; } else { // Make sure all assignments that don't square with observations get 0 weight System.Diagnostics.Debug.Assert((observedValue[i_2] != -1)); if (assignment_1[i_2] != observedValue[i_2]) { if (result.jointMarginals[f].GetAssignmentValue(assignment_1) != 0) { System.Console.Error.WriteLine("Joint marginals: " + Arrays.ToString(result.jointMarginals[f].neighborIndices)); System.Console.Error.WriteLine("Assignment: " + Arrays.ToString(assignment_1)); System.Console.Error.WriteLine("Observed Value: " + Arrays.ToString(observedValue)); foreach (int[] assn in result.jointMarginals[f]) { System.Console.Error.WriteLine("\t" + Arrays.ToString(assn) + ":" + result.jointMarginals[f].GetAssignmentValue(assn)); } } NUnit.Framework.Assert.AreEqual(result.jointMarginals[f].GetAssignmentValue(assignment_1), 1.0e-7, 0.0); goto outer_continue; } } } NUnit.Framework.Assert.AreEqual(result.jointMarginals[f].GetAssignmentValue(assignment_1), 1.0e-3, bruteForceJointMarginal.GetAssignmentValue(bruteForceMarginalAssignment) / sum); } outer_break :; } marginals_break :; } else { foreach (double[] marginal in marginals) { foreach (double d in marginal) { NUnit.Framework.Assert.AreEqual(d, 3.0e-2, 1.0 / marginal.Length); } } } }
public ClustererDoc(int id, ICounter <Pair <int, int> > classificationScores, ICounter <Pair <int, int> > rankingScores, ICounter <int> anaphoricityScores, IDictionary <Pair <int, int>, bool> labeledPairs, IList <IList <int> > goldClusters, IDictionary < int, string> mentionTypes) { this.id = id; this.classificationScores = classificationScores; this.rankingScores = rankingScores; this.goldClusters = goldClusters; this.mentionTypes = mentionTypes; this.anaphoricityScores = anaphoricityScores; positivePairs = labeledPairs.Keys.Stream().Filter(null).Collect(Collectors.ToSet()); ICollection <int> mentionsSet = new HashSet <int>(); foreach (Pair <int, int> pair in labeledPairs.Keys) { mentionsSet.Add(pair.first); mentionsSet.Add(pair.second); } mentions = new List <int>(mentionsSet); mentions.Sort(null); mentionIndices = new Dictionary <int, int>(); for (int i = 0; i < mentions.Count; i++) { mentionIndices[mentions[i]] = i; } mentionToGold = new Dictionary <int, IList <int> >(); if (goldClusters != null) { foreach (IList <int> gold in goldClusters) { foreach (int m in gold) { mentionToGold[m] = gold; } } } }
private static void SurfaceFeatures(KBPRelationExtractor.KBPInput input, Sentence simpleSentence, ClassicCounter <string> feats) { IList <string> lemmaSpan = SpanBetweenMentions(input, null); IList <string> nerSpan = SpanBetweenMentions(input, null); IList <string> posSpan = SpanBetweenMentions(input, null); // Unigram features of the sentence IList <CoreLabel> tokens = input.sentence.AsCoreLabels(null, null); foreach (CoreLabel token in tokens) { Indicator(feats, "sentence_unigram", token.Lemma()); } // Full lemma span ( -0.3 F1 ) // if (lemmaSpan.size() <= 5) { // indicator(feats, "full_lemma_span", withMentionsPositioned(input, StringUtils.join(lemmaSpan, " "))); // } // Lemma n-grams string lastLemma = "_^_"; foreach (string lemma in lemmaSpan) { Indicator(feats, "lemma_bigram", WithMentionsPositioned(input, lastLemma + " " + lemma)); Indicator(feats, "lemma_unigram", WithMentionsPositioned(input, lemma)); lastLemma = lemma; } Indicator(feats, "lemma_bigram", WithMentionsPositioned(input, lastLemma + " _$_")); // NER + lemma bi-grams for (int i = 0; i < lemmaSpan.Count - 1; ++i) { if (!"O".Equals(nerSpan[i]) && "O".Equals(nerSpan[i + 1]) && "IN".Equals(posSpan[i + 1])) { Indicator(feats, "ner/lemma_bigram", WithMentionsPositioned(input, nerSpan[i] + " " + lemmaSpan[i + 1])); } if (!"O".Equals(nerSpan[i + 1]) && "O".Equals(nerSpan[i]) && "IN".Equals(posSpan[i])) { Indicator(feats, "ner/lemma_bigram", WithMentionsPositioned(input, lemmaSpan[i] + " " + nerSpan[i + 1])); } } // Distance between mentions string distanceBucket = ">10"; if (lemmaSpan.Count == 0) { distanceBucket = "0"; } else { if (lemmaSpan.Count <= 3) { distanceBucket = "<=3"; } else { if (lemmaSpan.Count <= 5) { distanceBucket = "<=5"; } else { if (lemmaSpan.Count <= 10) { distanceBucket = "<=10"; } else { if (lemmaSpan.Count <= 15) { distanceBucket = "<=15"; } } } } } Indicator(feats, "distance_between_entities_bucket", distanceBucket); // Punctuation features int numCommasInSpan = 0; int numQuotesInSpan = 0; int parenParity = 0; foreach (string lemma_1 in lemmaSpan) { if (lemma_1.Equals(",")) { numCommasInSpan += 1; } if (lemma_1.Equals("\"") || lemma_1.Equals("``") || lemma_1.Equals("''")) { numQuotesInSpan += 1; } if (lemma_1.Equals("(") || lemma_1.Equals("-LRB-")) { parenParity += 1; } if (lemma_1.Equals(")") || lemma_1.Equals("-RRB-")) { parenParity -= 1; } } Indicator(feats, "comma_parity", numCommasInSpan % 2 == 0 ? "even" : "odd"); Indicator(feats, "quote_parity", numQuotesInSpan % 2 == 0 ? "even" : "odd"); Indicator(feats, "paren_parity", string.Empty + parenParity); // Is broken by entity ICollection <string> intercedingNERTags = nerSpan.Stream().Filter(null).Collect(Collectors.ToSet()); if (!intercedingNERTags.IsEmpty()) { Indicator(feats, "has_interceding_ner", "t"); } foreach (string ner in intercedingNERTags) { Indicator(feats, "interceding_ner", ner); } // Left and right context IList <CoreLabel> sentence = input.sentence.AsCoreLabels(null); if (input.subjectSpan.Start() == 0) { Indicator(feats, "subj_left", "^"); } else { Indicator(feats, "subj_left", sentence[input.subjectSpan.Start() - 1].Lemma()); } if (input.subjectSpan.End() == sentence.Count) { Indicator(feats, "subj_right", "$"); } else { Indicator(feats, "subj_right", sentence[input.subjectSpan.End()].Lemma()); } if (input.objectSpan.Start() == 0) { Indicator(feats, "obj_left", "^"); } else { Indicator(feats, "obj_left", sentence[input.objectSpan.Start() - 1].Lemma()); } if (input.objectSpan.End() == sentence.Count) { Indicator(feats, "obj_right", "$"); } else { Indicator(feats, "obj_right", sentence[input.objectSpan.End()].Lemma()); } // Skip-word patterns if (lemmaSpan.Count == 1 && input.subjectSpan.IsBefore(input.objectSpan)) { string left = input.subjectSpan.Start() == 0 ? "^" : sentence[input.subjectSpan.Start() - 1].Lemma(); Indicator(feats, "X<subj>Y<obj>", left + "_" + lemmaSpan[0]); } }
/// <summary>Print an Annotation to an output stream.</summary> /// <remarks> /// Print an Annotation to an output stream. /// The target OutputStream is assumed to already by buffered. /// </remarks> /// <param name="doc"/> /// <param name="target"/> /// <param name="options"/> /// <exception cref="System.IO.IOException"/> public override void Print(Annotation doc, OutputStream target, AnnotationOutputter.Options options) { PrintWriter writer = new PrintWriter(IOUtils.EncodedOutputStreamWriter(target, options.encoding)); // vv A bunch of nonsense to get tokens vv if (doc.Get(typeof(CoreAnnotations.SentencesAnnotation)) != null) { foreach (ICoreMap sentence in doc.Get(typeof(CoreAnnotations.SentencesAnnotation))) { if (sentence.Get(typeof(CoreAnnotations.TokensAnnotation)) != null) { IList <CoreLabel> tokens = sentence.Get(typeof(CoreAnnotations.TokensAnnotation)); SemanticGraph depTree = sentence.Get(typeof(SemanticGraphCoreAnnotations.BasicDependenciesAnnotation)); for (int i = 0; i < tokens.Count; ++i) { // ^^ end nonsense to get tokens ^^ // Try to get the incoming dependency edge int head = -1; string deprel = null; if (depTree != null) { ICollection <int> rootSet = depTree.GetRoots().Stream().Map(null).Collect(Collectors.ToSet()); IndexedWord node = depTree.GetNodeByIndexSafe(i + 1); if (node != null) { IList <SemanticGraphEdge> edgeList = depTree.GetIncomingEdgesSorted(node); if (!edgeList.IsEmpty()) { System.Diagnostics.Debug.Assert(edgeList.Count == 1); head = edgeList[0].GetGovernor().Index(); deprel = edgeList[0].GetRelation().ToString(); } else { if (rootSet.Contains(i + 1)) { head = 0; deprel = "ROOT"; } } } } // Write the token writer.Print(Line(i + 1, tokens[i], head, deprel)); writer.Println(); } } writer.Println(); } } // extra blank line at end of sentence writer.Flush(); }
/// <summary>The slowest, but obviously correct way to get log likelihood.</summary> /// <remarks> /// The slowest, but obviously correct way to get log likelihood. We've already tested the partition function in /// the CliqueTreeTest, but in the interest of making things as different as possible to catch any lurking bugs or /// numerical issues, we use the brute force approach here. /// </remarks> /// <param name="model">the model to get the log-likelihood of, assumes labels for assignments</param> /// <param name="weights">the weights to get the log-likelihood at</param> /// <returns>the log-likelihood</returns> private double LogLikelihood(GraphicalModel model, ConcatVector weights) { ICollection <TableFactor> tableFactors = model.factors.Stream().Map(null).Collect(Collectors.ToSet()); System.Diagnostics.Debug.Assert((tableFactors.Count == model.factors.Count)); // this is the super slow but obviously correct way to get global marginals TableFactor bruteForce = null; foreach (TableFactor factor in tableFactors) { if (bruteForce == null) { bruteForce = factor; } else { bruteForce = bruteForce.Multiply(factor); } } System.Diagnostics.Debug.Assert((bruteForce != null)); // observe out all variables that have been registered TableFactor observed = bruteForce; foreach (int n in bruteForce.neighborIndices) { if (model.GetVariableMetaDataByReference(n).Contains(CliqueTree.VariableObservedValue)) { int value = System.Convert.ToInt32(model.GetVariableMetaDataByReference(n)[CliqueTree.VariableObservedValue]); if (observed.neighborIndices.Length > 1) { observed = observed.Observe(n, value); } else { // If we've observed everything, then just quit return(0.0); } } } bruteForce = observed; // Now we can get a partition function double partitionFunction = bruteForce.ValueSum(); // For now, we'll assume that all the variables are given for training. EM is another problem altogether int[] assignment = new int[bruteForce.neighborIndices.Length]; for (int i = 0; i < assignment.Length; i++) { System.Diagnostics.Debug.Assert((!model.GetVariableMetaDataByReference(bruteForce.neighborIndices[i]).Contains(CliqueTree.VariableObservedValue))); assignment[i] = System.Convert.ToInt32(model.GetVariableMetaDataByReference(bruteForce.neighborIndices[i])[LogLikelihoodDifferentiableFunction.VariableTrainingValue]); } if (bruteForce.GetAssignmentValue(assignment) == 0 || partitionFunction == 0) { return(double.NegativeInfinity); } return(Math.Log(bruteForce.GetAssignmentValue(assignment)) - Math.Log(partitionFunction)); }