示例#1
0
        /// <summary>The search algorithm, starting with a full sentence and iteratively shortening it to its entailed sentences.</summary>
        /// <returns>A list of search results, corresponding to shortenings of the sentence.</returns>
        private IList <ForwardEntailerSearchProblem.SearchResult> SearchImplementation()
        {
            // Pre-process the tree
            SemanticGraph parseTree = new SemanticGraph(this.parseTree);

            System.Diagnostics.Debug.Assert(Edu.Stanford.Nlp.Naturalli.Util.IsTree(parseTree));
            // (remove common determiners)
            IList <string> determinerRemovals = new List <string>();

            parseTree.GetLeafVertices().Stream().Filter(null).ForEach(null);
            // (cut conj_and nodes)
            ICollection <SemanticGraphEdge> andsToAdd = new HashSet <SemanticGraphEdge>();

            foreach (IndexedWord vertex in parseTree.VertexSet())
            {
                if (parseTree.InDegree(vertex) > 1)
                {
                    SemanticGraphEdge conjAnd = null;
                    foreach (SemanticGraphEdge edge in parseTree.IncomingEdgeIterable(vertex))
                    {
                        if ("conj:and".Equals(edge.GetRelation().ToString()))
                        {
                            conjAnd = edge;
                        }
                    }
                    if (conjAnd != null)
                    {
                        parseTree.RemoveEdge(conjAnd);
                        System.Diagnostics.Debug.Assert(Edu.Stanford.Nlp.Naturalli.Util.IsTree(parseTree));
                        andsToAdd.Add(conjAnd);
                    }
                }
            }
            // Clean the tree
            Edu.Stanford.Nlp.Naturalli.Util.CleanTree(parseTree);
            System.Diagnostics.Debug.Assert(Edu.Stanford.Nlp.Naturalli.Util.IsTree(parseTree));
            // Find the subject / object split
            // This takes max O(n^2) time, expected O(n*log(n)) time.
            // Optimal is O(n), but I'm too lazy to implement it.
            BitSet isSubject = new BitSet(256);

            foreach (IndexedWord vertex_1 in parseTree.VertexSet())
            {
                // Search up the tree for a subj node; if found, mark that vertex as a subject.
                IEnumerator <SemanticGraphEdge> incomingEdges = parseTree.IncomingEdgeIterator(vertex_1);
                SemanticGraphEdge edge = null;
                if (incomingEdges.MoveNext())
                {
                    edge = incomingEdges.Current;
                }
                int numIters = 0;
                while (edge != null)
                {
                    if (edge.GetRelation().ToString().EndsWith("subj"))
                    {
                        System.Diagnostics.Debug.Assert(vertex_1.Index() > 0);
                        isSubject.Set(vertex_1.Index() - 1);
                        break;
                    }
                    incomingEdges = parseTree.IncomingEdgeIterator(edge.GetGovernor());
                    if (incomingEdges.MoveNext())
                    {
                        edge = incomingEdges.Current;
                    }
                    else
                    {
                        edge = null;
                    }
                    numIters += 1;
                    if (numIters > 100)
                    {
                        //          log.error("tree has apparent depth > 100");
                        return(Java.Util.Collections.EmptyList);
                    }
                }
            }
            // Outputs
            IList <ForwardEntailerSearchProblem.SearchResult> results = new List <ForwardEntailerSearchProblem.SearchResult>();

            if (!determinerRemovals.IsEmpty())
            {
                if (andsToAdd.IsEmpty())
                {
                    double score = Math.Pow(weights.DeletionProbability("det"), (double)determinerRemovals.Count);
                    System.Diagnostics.Debug.Assert(!double.IsNaN(score));
                    System.Diagnostics.Debug.Assert(!double.IsInfinite(score));
                    results.Add(new ForwardEntailerSearchProblem.SearchResult(parseTree, determinerRemovals, score));
                }
                else
                {
                    SemanticGraph treeWithAnds = new SemanticGraph(parseTree);
                    System.Diagnostics.Debug.Assert(Edu.Stanford.Nlp.Naturalli.Util.IsTree(treeWithAnds));
                    foreach (SemanticGraphEdge and in andsToAdd)
                    {
                        treeWithAnds.AddEdge(and.GetGovernor(), and.GetDependent(), and.GetRelation(), double.NegativeInfinity, false);
                    }
                    System.Diagnostics.Debug.Assert(Edu.Stanford.Nlp.Naturalli.Util.IsTree(treeWithAnds));
                    results.Add(new ForwardEntailerSearchProblem.SearchResult(treeWithAnds, determinerRemovals, Math.Pow(weights.DeletionProbability("det"), (double)determinerRemovals.Count)));
                }
            }
            // Initialize the search
            System.Diagnostics.Debug.Assert(Edu.Stanford.Nlp.Naturalli.Util.IsTree(parseTree));
            IList <IndexedWord> topologicalVertices;

            try
            {
                topologicalVertices = parseTree.TopologicalSort();
            }
            catch (InvalidOperationException)
            {
                //      log.info("Could not topologically sort the vertices! Using left-to-right traversal.");
                topologicalVertices = parseTree.VertexListSorted();
            }
            if (topologicalVertices.IsEmpty())
            {
                return(results);
            }
            Stack <ForwardEntailerSearchProblem.SearchState> fringe = new Stack <ForwardEntailerSearchProblem.SearchState>();

            fringe.Push(new ForwardEntailerSearchProblem.SearchState(new BitSet(256), 0, parseTree, null, null, 1.0));
            // Start the search
            int numTicks = 0;

            while (!fringe.IsEmpty())
            {
                // Overhead with popping a node.
                if (numTicks >= maxTicks)
                {
                    return(results);
                }
                numTicks += 1;
                if (results.Count >= maxResults)
                {
                    return(results);
                }
                ForwardEntailerSearchProblem.SearchState state = fringe.Pop();
                System.Diagnostics.Debug.Assert(state.score > 0.0);
                IndexedWord currentWord = topologicalVertices[state.currentIndex];
                // Push the case where we don't delete
                int nextIndex = state.currentIndex + 1;
                int numIters  = 0;
                while (nextIndex < topologicalVertices.Count)
                {
                    IndexedWord nextWord = topologicalVertices[nextIndex];
                    System.Diagnostics.Debug.Assert(nextWord.Index() > 0);
                    if (!state.deletionMask.Get(nextWord.Index() - 1))
                    {
                        fringe.Push(new ForwardEntailerSearchProblem.SearchState(state.deletionMask, nextIndex, state.tree, null, state, state.score));
                        break;
                    }
                    else
                    {
                        nextIndex += 1;
                    }
                    numIters += 1;
                    if (numIters > 10000)
                    {
                        //          log.error("logic error (apparent infinite loop); returning");
                        return(results);
                    }
                }
                // Check if we can delete this subtree
                bool canDelete = !state.tree.GetFirstRoot().Equals(currentWord);
                foreach (SemanticGraphEdge edge in state.tree.IncomingEdgeIterable(currentWord))
                {
                    if ("CD".Equals(edge.GetGovernor().Tag()))
                    {
                        canDelete = false;
                    }
                    else
                    {
                        // Get token information
                        CoreLabel            token = edge.GetDependent().BackingLabel();
                        OperatorSpec         @operator;
                        NaturalLogicRelation lexicalRelation;
                        Polarity             tokenPolarity = token.Get(typeof(NaturalLogicAnnotations.PolarityAnnotation));
                        if (tokenPolarity == null)
                        {
                            tokenPolarity = Polarity.Default;
                        }
                        // Get the relation for this deletion
                        if ((@operator = token.Get(typeof(NaturalLogicAnnotations.OperatorAnnotation))) != null)
                        {
                            lexicalRelation = @operator.instance.deleteRelation;
                        }
                        else
                        {
                            System.Diagnostics.Debug.Assert(edge.GetDependent().Index() > 0);
                            lexicalRelation = NaturalLogicRelation.ForDependencyDeletion(edge.GetRelation().ToString(), isSubject.Get(edge.GetDependent().Index() - 1));
                        }
                        NaturalLogicRelation projectedRelation = tokenPolarity.ProjectLexicalRelation(lexicalRelation);
                        // Make sure this is a valid entailment
                        if (!projectedRelation.ApplyToTruthValue(truthOfPremise).IsTrue())
                        {
                            canDelete = false;
                        }
                    }
                }
                if (canDelete)
                {
                    // Register the deletion
                    Lazy <Pair <SemanticGraph, BitSet> > treeWithDeletionsAndNewMask = Lazy.Of(null);
                    // Compute the score of the sentence
                    double newScore = state.score;
                    foreach (SemanticGraphEdge edge_1 in state.tree.IncomingEdgeIterable(currentWord))
                    {
                        double multiplier = weights.DeletionProbability(edge_1, state.tree.OutgoingEdgeIterable(edge_1.GetGovernor()));
                        System.Diagnostics.Debug.Assert(!double.IsNaN(multiplier));
                        System.Diagnostics.Debug.Assert(!double.IsInfinite(multiplier));
                        newScore *= multiplier;
                    }
                    // Register the result
                    if (newScore > 0.0)
                    {
                        SemanticGraph resultTree = new SemanticGraph(treeWithDeletionsAndNewMask.Get().first);
                        andsToAdd.Stream().Filter(null).ForEach(null);
                        results.Add(new ForwardEntailerSearchProblem.SearchResult(resultTree, AggregateDeletedEdges(state, state.tree.IncomingEdgeIterable(currentWord), determinerRemovals), newScore));
                        // Push the state with this subtree deleted
                        nextIndex = state.currentIndex + 1;
                        numIters  = 0;
                        while (nextIndex < topologicalVertices.Count)
                        {
                            IndexedWord   nextWord          = topologicalVertices[nextIndex];
                            BitSet        newMask           = treeWithDeletionsAndNewMask.Get().second;
                            SemanticGraph treeWithDeletions = treeWithDeletionsAndNewMask.Get().first;
                            if (!newMask.Get(nextWord.Index() - 1))
                            {
                                System.Diagnostics.Debug.Assert(treeWithDeletions.ContainsVertex(topologicalVertices[nextIndex]));
                                fringe.Push(new ForwardEntailerSearchProblem.SearchState(newMask, nextIndex, treeWithDeletions, null, state, newScore));
                                break;
                            }
                            else
                            {
                                nextIndex += 1;
                            }
                            numIters += 1;
                            if (numIters > 10000)
                            {
                                //              log.error("logic error (apparent infinite loop); returning");
                                return(results);
                            }
                        }
                    }
                }
            }
            // Return
            return(results);
        }
        /// <summary>Annotate every token for its polarity, based on the operators found.</summary>
        /// <remarks>
        /// Annotate every token for its polarity, based on the operators found. This function will set the
        /// <see cref="PolarityAnnotation"/>
        /// for every token.
        /// </remarks>
        /// <param name="sentence">
        /// As in
        /// <see cref="DoOneSentence(Edu.Stanford.Nlp.Pipeline.Annotation, Edu.Stanford.Nlp.Util.ICoreMap)"/>
        /// </param>
        private static void AnnotatePolarity(ICoreMap sentence)
        {
            // Collect all the operators in this sentence
            IList <OperatorSpec> operators = new List <OperatorSpec>();
            IList <CoreLabel>    tokens    = sentence.Get(typeof(CoreAnnotations.TokensAnnotation));

            foreach (CoreLabel token in tokens)
            {
                OperatorSpec specOrNull = token.Get(typeof(NaturalLogicAnnotations.OperatorAnnotation));
                if (specOrNull != null)
                {
                    operators.Add(specOrNull);
                }
            }
            // Make sure every node of the dependency tree has a polarity.
            // This is separate from the code below in case the tokens in the dependency
            // tree don't correspond to the tokens in the sentence. This happens at least
            // when the constituency parser craps out on a long sentence, and the
            // dependency tree is put together haphazardly.
            if (sentence.ContainsKey(typeof(SemanticGraphCoreAnnotations.BasicDependenciesAnnotation)))
            {
                foreach (IndexedWord token_1 in sentence.Get(typeof(SemanticGraphCoreAnnotations.BasicDependenciesAnnotation)).VertexSet())
                {
                    token_1.Set(typeof(NaturalLogicAnnotations.PolarityAnnotation), Polarity.Default);
                }
            }
            if (sentence.ContainsKey(typeof(SemanticGraphCoreAnnotations.EnhancedDependenciesAnnotation)))
            {
                foreach (IndexedWord token_1 in sentence.Get(typeof(SemanticGraphCoreAnnotations.EnhancedDependenciesAnnotation)).VertexSet())
                {
                    token_1.Set(typeof(NaturalLogicAnnotations.PolarityAnnotation), Polarity.Default);
                }
            }
            if (sentence.ContainsKey(typeof(SemanticGraphCoreAnnotations.EnhancedPlusPlusDependenciesAnnotation)))
            {
                foreach (IndexedWord token_1 in sentence.Get(typeof(SemanticGraphCoreAnnotations.EnhancedPlusPlusDependenciesAnnotation)).VertexSet())
                {
                    token_1.Set(typeof(NaturalLogicAnnotations.PolarityAnnotation), Polarity.Default);
                }
            }
            // Set polarity for each token
            for (int i = 0; i < tokens.Count; ++i)
            {
                CoreLabel token_1 = tokens[i];
                // Get operators in scope
                IList <Triple <int, Monotonicity, MonotonicityType> > inScope = new List <Triple <int, Monotonicity, MonotonicityType> >(4);
                foreach (OperatorSpec @operator in operators)
                {
                    if (i >= @operator.subjectBegin && i < @operator.subjectEnd)
                    {
                        inScope.Add(Triple.MakeTriple(@operator.subjectEnd - @operator.subjectBegin, @operator.instance.subjMono, @operator.instance.subjType));
                    }
                    else
                    {
                        if (i >= @operator.objectBegin && i < @operator.objectEnd)
                        {
                            inScope.Add(Triple.MakeTriple(@operator.objectEnd - @operator.objectBegin, @operator.instance.objMono, @operator.instance.objType));
                        }
                    }
                }
                // Sort the operators by their scope (approximated by the size of their argument span)
                inScope.Sort(null);
                // Create polarity
                IList <Pair <Monotonicity, MonotonicityType> > info = new List <Pair <Monotonicity, MonotonicityType> >(inScope.Count);
                foreach (Triple <int, Monotonicity, MonotonicityType> term in inScope)
                {
                    info.Add(Pair.MakePair(term.second, term.third));
                }
                Polarity polarity = new Polarity(info);
                // Set polarity
                token_1.Set(typeof(NaturalLogicAnnotations.PolarityAnnotation), polarity);
            }
            // Set the PolarityDirectionAnnotation
            foreach (CoreLabel token_2 in tokens)
            {
                Polarity polarity = token_2.Get(typeof(NaturalLogicAnnotations.PolarityAnnotation));
                if (polarity != null)
                {
                    if (polarity.IsUpwards())
                    {
                        token_2.Set(typeof(NaturalLogicAnnotations.PolarityDirectionAnnotation), "up");
                    }
                    else
                    {
                        if (polarity.IsDownwards())
                        {
                            token_2.Set(typeof(NaturalLogicAnnotations.PolarityDirectionAnnotation), "down");
                        }
                        else
                        {
                            token_2.Set(typeof(NaturalLogicAnnotations.PolarityDirectionAnnotation), "flat");
                        }
                    }
                }
            }
        }