コード例 #1
0
        /// <summary>
        /// Try to find which quantifier we matched, given that we matched the head of a quantifier at the given IndexedWord, and that
        /// this whole deal is taking place in the given sentence.
        /// </summary>
        /// <param name="sentence">The sentence we are matching.</param>
        /// <param name="quantifier">The word at which we matched a quantifier.</param>
        /// <param name="isUnary">If true, this is a unary quantifier</param>
        /// <returns>An optional triple consisting of the particular quantifier we matched, as well as the span of that quantifier in the sentence.</returns>
        private static Optional <Triple <Operator, int, int> > ValidateQuantifierByHead(ICoreMap sentence, IndexedWord quantifier, bool isUnary)
        {
            // Some useful variables
            IList <CoreLabel>             tokens  = sentence.Get(typeof(CoreAnnotations.TokensAnnotation));
            IFunction <CoreLabel, string> glossFn = null;
            int quantIndex = quantifier.Index();

            // Look forward a bit too, if the head is a number.
            int[] positiveOffsetToCheck = "CD".Equals(tokens[quantIndex - 1].Tag()) ? new int[] { 2, 1, 0 } : new int[] { 0 };
            // Try searching backwards for the right quantifier
            foreach (int offsetEnd in positiveOffsetToCheck)
            {
                int end = quantIndex + offsetEnd;
                for (int start = Math.Max(0, quantIndex - 10); start < quantIndex; ++start)
                {
                    string gloss = StringUtils.Join(tokens, " ", glossFn, start, end).ToLower();
                    foreach (Operator q in Operator.valuesByLengthDesc)
                    {
                        if (q.surfaceForm.Equals(gloss) && (!q.IsUnary() || isUnary))
                        {
                            return(Optional.Of(Triple.MakeTriple(q, start + 1, end + 1)));
                        }
                    }
                }
            }
            return(Optional.Empty());
        }
コード例 #2
0
        public static Triple <string, IFileFilter, double> GetWeightedTreebankDescription(string[] args, int argIndex, string flag)
        {
            string      path   = null;
            IFileFilter filter = null;
            double      weight = 1.0;
            // the next arguments are the treebank path and maybe the range for testing
            int numSubArgs = NumSubArgs(args, argIndex);

            if (numSubArgs > 0 && numSubArgs < 4)
            {
                argIndex++;
                path = args[argIndex++];
                bool hasWeight = false;
                if (numSubArgs > 1 && DoublePattern.Matcher(args[argIndex + numSubArgs - 2]).Matches())
                {
                    weight    = double.Parse(args[argIndex + numSubArgs - 2]);
                    hasWeight = true;
                    numSubArgs--;
                }
                if (numSubArgs == 2)
                {
                    filter = new NumberRangesFileFilter(args[argIndex++], true);
                }
                else
                {
                    if (numSubArgs == 3)
                    {
                        try
                        {
                            int low  = System.Convert.ToInt32(args[argIndex]);
                            int high = System.Convert.ToInt32(args[argIndex + 1]);
                            filter    = new NumberRangeFileFilter(low, high, true);
                            argIndex += 2;
                        }
                        catch (NumberFormatException)
                        {
                            // maybe it's a ranges expression?
                            filter = new NumberRangesFileFilter(args[argIndex++], true);
                        }
                    }
                }
                if (hasWeight)
                {
                    argIndex++;
                }
            }
            else
            {
                throw new ArgumentException("Bad arguments after " + flag);
            }
            return(Triple.MakeTriple(path, filter, weight));
        }
コード例 #3
0
        /// <summary>Segment a double into a mantissa and exponent.</summary>
        public static Triple <bool, long, int> SegmentDouble(double d)
        {
            if (double.IsInfinite(d) || double.IsNaN(d))
            {
                throw new ArgumentException("Cannot handle weird double: " + d);
            }
            bool negative = d < 0;

            d = System.Math.Abs(d);
            int exponent = 0;

            while (d >= 10.0)
            {
                exponent += 1;
                d         = d / 10.0;
            }
            while (d < 1.0)
            {
                exponent -= 1;
                d         = d * 10.0;
            }
            return(Triple.MakeTriple(negative, (long)(d * 10000000000000000.0), exponent));
        }
コード例 #4
0
        /// <exception cref="System.IO.IOException"/>
        public NaturalLogicWeights(string affinityModels, double upperProbabilityCap)
        {
            this.upperProbabilityCap = upperProbabilityCap;
            string line;

            // Simple PP attachments
            using (BufferedReader ppReader = IOUtils.ReaderFromString(affinityModels + "/pp.tab.gz", "utf8"))
            {
                while ((line = ppReader.ReadLine()) != null)
                {
                    string[] fields           = line.Split("\t");
                    Pair <string, string> key = Pair.MakePair(string.Intern(fields[0]), string.Intern(fields[1]));
                    verbPPAffinity[key] = double.Parse(fields[2]);
                }
            }
            // Subj PP attachments
            using (BufferedReader subjPPReader = IOUtils.ReaderFromString(affinityModels + "/subj_pp.tab.gz", "utf8"))
            {
                while ((line = subjPPReader.ReadLine()) != null)
                {
                    string[] fields = line.Split("\t");
                    Triple <string, string, string> key = Triple.MakeTriple(string.Intern(fields[0]), string.Intern(fields[1]), string.Intern(fields[2]));
                    verbSubjPPAffinity[key] = double.Parse(fields[3]);
                }
            }
            // Subj Obj PP attachments
            using (BufferedReader subjObjPPReader = IOUtils.ReaderFromString(affinityModels + "/subj_obj_pp.tab.gz", "utf8"))
            {
                while ((line = subjObjPPReader.ReadLine()) != null)
                {
                    string[] fields = line.Split("\t");
                    Quadruple <string, string, string, string> key = Quadruple.MakeQuadruple(string.Intern(fields[0]), string.Intern(fields[1]), string.Intern(fields[2]), string.Intern(fields[3]));
                    verbSubjObjPPAffinity[key] = double.Parse(fields[4]);
                }
            }
            // Subj PP PP attachments
            using (BufferedReader subjPPPPReader = IOUtils.ReaderFromString(affinityModels + "/subj_pp_pp.tab.gz", "utf8"))
            {
                while ((line = subjPPPPReader.ReadLine()) != null)
                {
                    string[] fields = line.Split("\t");
                    Quadruple <string, string, string, string> key = Quadruple.MakeQuadruple(string.Intern(fields[0]), string.Intern(fields[1]), string.Intern(fields[2]), string.Intern(fields[3]));
                    verbSubjPPPPAffinity[key] = double.Parse(fields[4]);
                }
            }
            // Subj PP PP attachments
            using (BufferedReader subjPPObjReader = IOUtils.ReaderFromString(affinityModels + "/subj_pp_obj.tab.gz", "utf8"))
            {
                while ((line = subjPPObjReader.ReadLine()) != null)
                {
                    string[] fields = line.Split("\t");
                    Quadruple <string, string, string, string> key = Quadruple.MakeQuadruple(string.Intern(fields[0]), string.Intern(fields[1]), string.Intern(fields[2]), string.Intern(fields[3]));
                    verbSubjPPObjAffinity[key] = double.Parse(fields[4]);
                }
            }
            // Subj PP PP attachments
            using (BufferedReader objReader = IOUtils.ReaderFromString(affinityModels + "/obj.tab.gz", "utf8"))
            {
                while ((line = objReader.ReadLine()) != null)
                {
                    string[] fields = line.Split("\t");
                    verbObjAffinity[fields[0]] = double.Parse(fields[1]);
                }
            }
        }
コード例 #5
0
        public virtual double PpDeletionProbability(SemanticGraphEdge edge, IEnumerable <SemanticGraphEdge> neighbors)
        {
            // Get information about the neighbors
            // (in a totally not-creepy-stalker sort of way)
            Optional <string> subj = Optional.Empty();
            Optional <string> obj  = Optional.Empty();
            Optional <string> pp   = Optional.Empty();

            foreach (SemanticGraphEdge neighbor in neighbors)
            {
                if (neighbor != edge)
                {
                    string neighborRel = neighbor.GetRelation().ToString();
                    if (neighborRel.Contains("subj"))
                    {
                        subj = Optional.Of(neighbor.GetDependent().OriginalText().ToLower());
                    }
                    if (neighborRel.Contains("obj"))
                    {
                        obj = Optional.Of(neighbor.GetDependent().OriginalText().ToLower());
                    }
                    if (neighborRel.Contains("prep"))
                    {
                        pp = Optional.Of(neighborRel);
                    }
                }
            }
            string prep = edge.GetRelation().ToString();
            string verb = edge.GetGovernor().OriginalText().ToLower();
            // Compute the most informative drop probability we can
            double rawScore = null;

            if (subj.IsPresent())
            {
                if (obj.IsPresent())
                {
                    // Case: subj+obj
                    rawScore = verbSubjObjPPAffinity[Quadruple.MakeQuadruple(verb, subj.Get(), obj.Get(), prep)];
                }
                if (rawScore == null && pp.IsPresent())
                {
                    // Case: subj+other_pp
                    rawScore = verbSubjPPPPAffinity[Quadruple.MakeQuadruple(verb, subj.Get(), pp.Get(), prep)];
                }
                if (rawScore == null)
                {
                    // Case: subj
                    rawScore = verbSubjPPAffinity[Triple.MakeTriple(verb, subj.Get(), prep)];
                }
            }
            if (rawScore == null)
            {
                // Case: just the original pp
                rawScore = verbPPAffinity[Pair.MakePair(verb, prep)];
            }
            if (rawScore == null)
            {
                return(DeletionProbability(prep));
            }
            else
            {
                return(1.0 - Math.Min(1.0, rawScore / upperProbabilityCap));
            }
        }
コード例 #6
0
        /// <summary>Annotate every token for its polarity, based on the operators found.</summary>
        /// <remarks>
        /// Annotate every token for its polarity, based on the operators found. This function will set the
        /// <see cref="PolarityAnnotation"/>
        /// for every token.
        /// </remarks>
        /// <param name="sentence">
        /// As in
        /// <see cref="DoOneSentence(Edu.Stanford.Nlp.Pipeline.Annotation, Edu.Stanford.Nlp.Util.ICoreMap)"/>
        /// </param>
        private static void AnnotatePolarity(ICoreMap sentence)
        {
            // Collect all the operators in this sentence
            IList <OperatorSpec> operators = new List <OperatorSpec>();
            IList <CoreLabel>    tokens    = sentence.Get(typeof(CoreAnnotations.TokensAnnotation));

            foreach (CoreLabel token in tokens)
            {
                OperatorSpec specOrNull = token.Get(typeof(NaturalLogicAnnotations.OperatorAnnotation));
                if (specOrNull != null)
                {
                    operators.Add(specOrNull);
                }
            }
            // Make sure every node of the dependency tree has a polarity.
            // This is separate from the code below in case the tokens in the dependency
            // tree don't correspond to the tokens in the sentence. This happens at least
            // when the constituency parser craps out on a long sentence, and the
            // dependency tree is put together haphazardly.
            if (sentence.ContainsKey(typeof(SemanticGraphCoreAnnotations.BasicDependenciesAnnotation)))
            {
                foreach (IndexedWord token_1 in sentence.Get(typeof(SemanticGraphCoreAnnotations.BasicDependenciesAnnotation)).VertexSet())
                {
                    token_1.Set(typeof(NaturalLogicAnnotations.PolarityAnnotation), Polarity.Default);
                }
            }
            if (sentence.ContainsKey(typeof(SemanticGraphCoreAnnotations.EnhancedDependenciesAnnotation)))
            {
                foreach (IndexedWord token_1 in sentence.Get(typeof(SemanticGraphCoreAnnotations.EnhancedDependenciesAnnotation)).VertexSet())
                {
                    token_1.Set(typeof(NaturalLogicAnnotations.PolarityAnnotation), Polarity.Default);
                }
            }
            if (sentence.ContainsKey(typeof(SemanticGraphCoreAnnotations.EnhancedPlusPlusDependenciesAnnotation)))
            {
                foreach (IndexedWord token_1 in sentence.Get(typeof(SemanticGraphCoreAnnotations.EnhancedPlusPlusDependenciesAnnotation)).VertexSet())
                {
                    token_1.Set(typeof(NaturalLogicAnnotations.PolarityAnnotation), Polarity.Default);
                }
            }
            // Set polarity for each token
            for (int i = 0; i < tokens.Count; ++i)
            {
                CoreLabel token_1 = tokens[i];
                // Get operators in scope
                IList <Triple <int, Monotonicity, MonotonicityType> > inScope = new List <Triple <int, Monotonicity, MonotonicityType> >(4);
                foreach (OperatorSpec @operator in operators)
                {
                    if (i >= @operator.subjectBegin && i < @operator.subjectEnd)
                    {
                        inScope.Add(Triple.MakeTriple(@operator.subjectEnd - @operator.subjectBegin, @operator.instance.subjMono, @operator.instance.subjType));
                    }
                    else
                    {
                        if (i >= @operator.objectBegin && i < @operator.objectEnd)
                        {
                            inScope.Add(Triple.MakeTriple(@operator.objectEnd - @operator.objectBegin, @operator.instance.objMono, @operator.instance.objType));
                        }
                    }
                }
                // Sort the operators by their scope (approximated by the size of their argument span)
                inScope.Sort(null);
                // Create polarity
                IList <Pair <Monotonicity, MonotonicityType> > info = new List <Pair <Monotonicity, MonotonicityType> >(inScope.Count);
                foreach (Triple <int, Monotonicity, MonotonicityType> term in inScope)
                {
                    info.Add(Pair.MakePair(term.second, term.third));
                }
                Polarity polarity = new Polarity(info);
                // Set polarity
                token_1.Set(typeof(NaturalLogicAnnotations.PolarityAnnotation), polarity);
            }
            // Set the PolarityDirectionAnnotation
            foreach (CoreLabel token_2 in tokens)
            {
                Polarity polarity = token_2.Get(typeof(NaturalLogicAnnotations.PolarityAnnotation));
                if (polarity != null)
                {
                    if (polarity.IsUpwards())
                    {
                        token_2.Set(typeof(NaturalLogicAnnotations.PolarityDirectionAnnotation), "up");
                    }
                    else
                    {
                        if (polarity.IsDownwards())
                        {
                            token_2.Set(typeof(NaturalLogicAnnotations.PolarityDirectionAnnotation), "down");
                        }
                        else
                        {
                            token_2.Set(typeof(NaturalLogicAnnotations.PolarityDirectionAnnotation), "flat");
                        }
                    }
                }
            }
        }
コード例 #7
0
        /// <summary>
        /// Find the operators in this sentence, annotating the head word (only!) of each operator with the
        /// <see cref="OperatorAnnotation"/>
        /// .
        /// </summary>
        /// <param name="sentence">
        /// As in
        /// <see cref="DoOneSentence(Edu.Stanford.Nlp.Pipeline.Annotation, Edu.Stanford.Nlp.Util.ICoreMap)"/>
        /// </param>
        private void AnnotateOperators(ICoreMap sentence)
        {
            SemanticGraph     tree   = sentence.Get(typeof(SemanticGraphCoreAnnotations.BasicDependenciesAnnotation));
            IList <CoreLabel> tokens = sentence.Get(typeof(CoreAnnotations.TokensAnnotation));

            if (tree == null)
            {
                tree = sentence.Get(typeof(SemanticGraphCoreAnnotations.EnhancedDependenciesAnnotation));
            }
            foreach (SemgrexPattern pattern in Patterns)
            {
                SemgrexMatcher matcher = pattern.Matcher(tree);
                while (matcher.Find())
                {
                    // Get terms
                    IndexedWord properSubject = matcher.GetNode("Subject");
                    IndexedWord quantifier;
                    IndexedWord subject;
                    bool        namedEntityQuantifier = false;
                    if (properSubject != null)
                    {
                        quantifier            = subject = properSubject;
                        namedEntityQuantifier = true;
                    }
                    else
                    {
                        quantifier = matcher.GetNode("quantifier");
                        subject    = matcher.GetNode("subject");
                    }
                    IndexedWord @object = matcher.GetNode("object");
                    // Validate quantifier
                    // At the end of this
                    Optional <Triple <Operator, int, int> > quantifierInfo;
                    if (namedEntityQuantifier)
                    {
                        // named entities have the "all" semantics by default.
                        if (!neQuantifiers)
                        {
                            continue;
                        }
                        quantifierInfo = Optional.Of(Triple.MakeTriple(Operator.ImplicitNamedEntity, quantifier.Index(), quantifier.Index()));
                    }
                    else
                    {
                        // note: empty quantifier span given
                        // find the quantifier, and return some info about it.
                        quantifierInfo = ValidateQuantifierByHead(sentence, quantifier, @object == null || subject == null);
                    }
                    // Awful hacks to regularize the subject of things like "one of" and "there are"
                    // (fix up 'there are')
                    if ("be".Equals(subject == null ? null : subject.Lemma()))
                    {
                        bool        hasExpl    = false;
                        IndexedWord newSubject = null;
                        foreach (SemanticGraphEdge outgoingEdge in tree.OutgoingEdgeIterable(subject))
                        {
                            if ("nsubj".Equals(outgoingEdge.GetRelation().ToString()))
                            {
                                newSubject = outgoingEdge.GetDependent();
                            }
                            else
                            {
                                if ("expl".Equals(outgoingEdge.GetRelation().ToString()))
                                {
                                    hasExpl = true;
                                }
                            }
                        }
                        if (hasExpl)
                        {
                            subject = newSubject;
                        }
                    }
                    // (fix up '$n$ of')
                    if ("CD".Equals(subject == null ? null : subject.Tag()))
                    {
                        foreach (SemanticGraphEdge outgoingEdge in tree.OutgoingEdgeIterable(subject))
                        {
                            string rel = outgoingEdge.GetRelation().ToString();
                            if (rel.StartsWith("nmod"))
                            {
                                subject = outgoingEdge.GetDependent();
                            }
                        }
                    }
                    // Set tokens
                    if (quantifierInfo.IsPresent())
                    {
                        // Compute span
                        IndexedWord pivot = matcher.GetNode("pivot");
                        if (pivot == null)
                        {
                            pivot = @object;
                        }
                        OperatorSpec scope = ComputeScope(tree, quantifierInfo.Get().first, pivot, Pair.MakePair(quantifierInfo.Get().second, quantifierInfo.Get().third), subject, namedEntityQuantifier, @object, tokens.Count);
                        // Set annotation
                        CoreLabel    token    = sentence.Get(typeof(CoreAnnotations.TokensAnnotation))[quantifier.Index() - 1];
                        OperatorSpec oldScope = token.Get(typeof(NaturalLogicAnnotations.OperatorAnnotation));
                        if (oldScope == null || oldScope.QuantifierLength() < scope.QuantifierLength() || oldScope.instance != scope.instance)
                        {
                            token.Set(typeof(NaturalLogicAnnotations.OperatorAnnotation), scope);
                        }
                        else
                        {
                            token.Set(typeof(NaturalLogicAnnotations.OperatorAnnotation), OperatorSpec.Merge(oldScope, scope));
                        }
                    }
                }
            }
            // Ensure we didn't select overlapping quantifiers. For example, "a" and "a few" can often overlap.
            // In these cases, take the longer quantifier match.
            IList <OperatorSpec> quantifiers = new List <OperatorSpec>();

            for (int i = 0; i < tokens.Count; ++i)
            {
                CoreLabel    token = tokens[i];
                OperatorSpec @operator;
                if ((@operator = token.Get(typeof(NaturalLogicAnnotations.OperatorAnnotation))) != null)
                {
                    if (i == 0 && @operator.instance == Operator.No && tokens.Count > 2 && "PRP".Equals(tokens[1].Get(typeof(CoreAnnotations.PartOfSpeechAnnotation))))
                    {
                        // This is pragmatically not a negation -- ignore it
                        // For example, "no I don't like candy" or "no you like cats"
                        token.Remove(typeof(NaturalLogicAnnotations.OperatorAnnotation));
                    }
                    else
                    {
                        quantifiers.Add(@operator);
                    }
                }
            }
            quantifiers.Sort(null);
            foreach (OperatorSpec quantifier_1 in quantifiers)
            {
                for (int i_1 = quantifier_1.quantifierBegin; i_1 < quantifier_1.quantifierEnd; ++i_1)
                {
                    if (i_1 != quantifier_1.quantifierHead)
                    {
                        tokens[i_1].Remove(typeof(NaturalLogicAnnotations.OperatorAnnotation));
                    }
                }
            }
        }
コード例 #8
0
        /// <summary>Fix some bizarre peculiarities with certain trees.</summary>
        /// <remarks>
        /// Fix some bizarre peculiarities with certain trees.
        /// So far, these include:
        /// <ul>
        /// <li>Sometimes there's a node from a word to itself. This seems wrong.</li>
        /// </ul>
        /// </remarks>
        /// <param name="tree">The tree to clean (in place!).</param>
        /// <returns>A list of extra edges, which are valid but were removed.</returns>
        public static IList <SemanticGraphEdge> CleanTree(SemanticGraph tree)
        {
            //    assert !isCyclic(tree);
            // Clean nodes
            IList <IndexedWord> toDelete = new List <IndexedWord>();

            foreach (IndexedWord vertex in tree.VertexSet())
            {
                // Clean punctuation
                if (vertex.Tag() == null)
                {
                    continue;
                }
                char tag = vertex.BackingLabel().Tag()[0];
                if (tag == '.' || tag == ',' || tag == '(' || tag == ')' || tag == ':')
                {
                    if (!tree.OutgoingEdgeIterator(vertex).MoveNext())
                    {
                        // This should really never happen, but it does.
                        toDelete.Add(vertex);
                    }
                }
            }
            toDelete.ForEach(null);
            // Clean edges
            IEnumerator <SemanticGraphEdge> iter = tree.EdgeIterable().GetEnumerator();
            IList <Triple <IndexedWord, IndexedWord, SemanticGraphEdge> > toAdd = new List <Triple <IndexedWord, IndexedWord, SemanticGraphEdge> >();

            toDelete.Clear();
            while (iter.MoveNext())
            {
                SemanticGraphEdge edge = iter.Current;
                if (edge.GetDependent().Index() == edge.GetGovernor().Index())
                {
                    // Clean up copy-edges
                    if (edge.GetDependent().IsCopy(edge.GetGovernor()))
                    {
                        foreach (SemanticGraphEdge toCopy in tree.OutgoingEdgeIterable(edge.GetDependent()))
                        {
                            toAdd.Add(Triple.MakeTriple(edge.GetGovernor(), toCopy.GetDependent(), toCopy));
                        }
                        toDelete.Add(edge.GetDependent());
                    }
                    if (edge.GetGovernor().IsCopy(edge.GetDependent()))
                    {
                        foreach (SemanticGraphEdge toCopy in tree.OutgoingEdgeIterable(edge.GetGovernor()))
                        {
                            toAdd.Add(Triple.MakeTriple(edge.GetDependent(), toCopy.GetDependent(), toCopy));
                        }
                        toDelete.Add(edge.GetGovernor());
                    }
                    // Clean self-edges
                    iter.Remove();
                }
                else
                {
                    if (edge.GetRelation().ToString().Equals("punct"))
                    {
                        // Clean punctuation (again)
                        if (!tree.OutgoingEdgeIterator(edge.GetDependent()).MoveNext())
                        {
                            // This should really never happen, but it does.
                            iter.Remove();
                        }
                    }
                }
            }
            // (add edges we wanted to add)
            toDelete.ForEach(null);
            foreach (Triple <IndexedWord, IndexedWord, SemanticGraphEdge> edge_1 in toAdd)
            {
                tree.AddEdge(edge_1.first, edge_1.second, edge_1.third.GetRelation(), edge_1.third.GetWeight(), edge_1.third.IsExtra());
            }
            // Handle extra edges.
            // Two cases:
            // (1) the extra edge is a subj/obj edge and the main edge is a conj:.*
            //     in this case, keep the extra
            // (2) otherwise, delete the extra
            IList <SemanticGraphEdge> extraEdges = new List <SemanticGraphEdge>();

            foreach (SemanticGraphEdge edge_2 in tree.EdgeIterable())
            {
                if (edge_2.IsExtra())
                {
                    IList <SemanticGraphEdge> incomingEdges = tree.IncomingEdgeList(edge_2.GetDependent());
                    SemanticGraphEdge         toKeep        = null;
                    foreach (SemanticGraphEdge candidate in incomingEdges)
                    {
                        if (toKeep == null)
                        {
                            toKeep = candidate;
                        }
                        else
                        {
                            if (toKeep.GetRelation().ToString().StartsWith("conj") && candidate.GetRelation().ToString().Matches(".subj.*|.obj.*"))
                            {
                                toKeep = candidate;
                            }
                            else
                            {
                                if (!candidate.IsExtra() && !(candidate.GetRelation().ToString().StartsWith("conj") && toKeep.GetRelation().ToString().Matches(".subj.*|.obj.*")))
                                {
                                    toKeep = candidate;
                                }
                            }
                        }
                    }
                    foreach (SemanticGraphEdge candidate_1 in incomingEdges)
                    {
                        if (candidate_1 != toKeep)
                        {
                            extraEdges.Add(candidate_1);
                        }
                    }
                }
            }
            extraEdges.ForEach(null);
            // Add apposition edges (simple coref)
            foreach (SemanticGraphEdge extraEdge in new List <SemanticGraphEdge>(extraEdges))
            {
                // note[gabor] prevent concurrent modification exception
                foreach (SemanticGraphEdge candidateAppos in tree.IncomingEdgeIterable(extraEdge.GetDependent()))
                {
                    if (candidateAppos.GetRelation().ToString().Equals("appos"))
                    {
                        extraEdges.Add(new SemanticGraphEdge(extraEdge.GetGovernor(), candidateAppos.GetGovernor(), extraEdge.GetRelation(), extraEdge.GetWeight(), extraEdge.IsExtra()));
                    }
                }
                foreach (SemanticGraphEdge candidateAppos_1 in tree.OutgoingEdgeIterable(extraEdge.GetDependent()))
                {
                    if (candidateAppos_1.GetRelation().ToString().Equals("appos"))
                    {
                        extraEdges.Add(new SemanticGraphEdge(extraEdge.GetGovernor(), candidateAppos_1.GetDependent(), extraEdge.GetRelation(), extraEdge.GetWeight(), extraEdge.IsExtra()));
                    }
                }
            }
            // Brute force ensure tree
            // Remove incoming edges from roots
            IList <SemanticGraphEdge> rootIncomingEdges = new List <SemanticGraphEdge>();

            foreach (IndexedWord root in tree.GetRoots())
            {
                foreach (SemanticGraphEdge incomingEdge in tree.IncomingEdgeIterable(root))
                {
                    rootIncomingEdges.Add(incomingEdge);
                }
            }
            rootIncomingEdges.ForEach(null);
            // Loop until it becomes a tree.
            bool changed = true;

            while (changed)
            {
                // I just want trees to be trees; is that so much to ask!?
                changed = false;
                IList <IndexedWord>       danglingNodes = new List <IndexedWord>();
                IList <SemanticGraphEdge> invalidEdges  = new List <SemanticGraphEdge>();
                foreach (IndexedWord vertex_1 in tree.VertexSet())
                {
                    // Collect statistics
                    IEnumerator <SemanticGraphEdge> incomingIter = tree.IncomingEdgeIterator(vertex_1);
                    bool hasIncoming         = incomingIter.MoveNext();
                    bool hasMultipleIncoming = false;
                    if (hasIncoming)
                    {
                        incomingIter.Current;
                        hasMultipleIncoming = incomingIter.MoveNext();
                    }
                    // Register actions
                    if (!hasIncoming && !tree.GetRoots().Contains(vertex_1))
                    {
                        danglingNodes.Add(vertex_1);
                    }
                    else
                    {
                        if (hasMultipleIncoming)
                        {
                            foreach (SemanticGraphEdge edge in new IterableIterator <SemanticGraphEdge>(incomingIter))
                            {
                                invalidEdges.Add(edge_2);
                            }
                        }
                    }
                }
                // Perform actions
                foreach (IndexedWord vertex_2 in danglingNodes)
                {
                    tree.RemoveVertex(vertex_2);
                    changed = true;
                }
                foreach (SemanticGraphEdge edge_3 in invalidEdges)
                {
                    tree.RemoveEdge(edge_3);
                    changed = true;
                }
            }
            // Edge case: remove duplicate dobj to "that."
            //            This is a common parse error.
            foreach (IndexedWord vertex_3 in tree.VertexSet())
            {
                SemanticGraphEdge thatEdge = null;
                int dobjCount = 0;
                foreach (SemanticGraphEdge edge in tree.OutgoingEdgeIterable(vertex_3))
                {
                    if (Sharpen.Runtime.EqualsIgnoreCase("that", edge_2.GetDependent().Word()))
                    {
                        thatEdge = edge_2;
                    }
                    if ("dobj".Equals(edge_2.GetRelation().ToString()))
                    {
                        dobjCount += 1;
                    }
                }
                if (dobjCount > 1 && thatEdge != null)
                {
                    // Case: there are two dobj edges, one of which goes to the word "that"
                    // Action: rewrite the dobj edge to "that" to be a "mark" edge.
                    tree.RemoveEdge(thatEdge);
                    tree.AddEdge(thatEdge.GetGovernor(), thatEdge.GetDependent(), GrammaticalRelation.ValueOf(thatEdge.GetRelation().GetLanguage(), "mark"), thatEdge.GetWeight(), thatEdge.IsExtra());
                }
            }
            // Return
            System.Diagnostics.Debug.Assert(IsTree(tree));
            return(extraEdges);
        }