Ejemplo n.º 1
0
        public static Vocabulary LoadFromStream(TextModelReader sr)
        {
            var v = new Vocabulary();
            int knownWordCount = 0;
            int sigCount = 0;
            string name = typeof(Vocabulary).FullName;
            int startLvl = 0;

            string line = sr.Read();
            startLvl = sr.NestLevel;
            if (line != name)
            {
                throw new Exception("error in model file!");
            }

            var xsig = sr.ReadOptionUInt64("SIG");
            var xver = sr.ReadOptionUInt64("VER");
            if (xsig != SIG || xver != VER)
            {
                throw new Exception("Signiture or version does not match!");
            }
            knownWordCount = sr.ReadOptionInt("knownWordCount");
            sigCount = sr.ReadOptionInt("sigCount");
            v.vocab = CodeBook32.LoadFromStream(sr);
            v.signitureVocab = CodeBook32.LoadFromStream(sr);

            if (v.vocab.Count != knownWordCount || v.signitureVocab.Count != sigCount)
            {
                throw new Exception("vocab size does not match");
            }

            string closeline = sr.Read();

            if (sr.NestLevel != startLvl || closeline != name)
            {
                throw new Exception("model is not closed!");
            }

            return v;
        }
Ejemplo n.º 2
0
        private static bool[][] AssignTagConstraints(Vocabulary vocab, TagSet tagSet, string[] words, int[] wids)
        {
            bool[][] allowedTags = new bool[wids.Length][];

            for (int i = 0; i < wids.Length; ++i)
            {
                //allowedTags[i] = new bool[tagSet.PTCount];

                //allowedTags[i][tagSet.GetID(tags[i])] = true;

                //continue;
                if (vocab.IsRareOrUNK(wids[i]))
                {
                    var lemmas = EMorph.EnglishMorph.GetBaseForm(words[i]);

                    if (lemmas == null || lemmas.Count == 0)
                    {
                        continue;
                    }

                    allowedTags[i] = new bool[tagSet.PTCount];

                    if (char.IsUpper(words[i][0]))
                    {
                        allowedTags[i][tagSet.GetID("NNP")] = true;
                        allowedTags[i][tagSet.GetID("NNPS")] = true;
                    }

                    foreach (var lemma in lemmas)
                    {
                        switch (lemma.PoS)
                        {
                            case EMorph.MorphPoS.NN:
                                allowedTags[i][tagSet.GetID("NN")] = true;
                                var w = words[i].ToLower();
                                if (EMorph.EnglishMorph.IsNoChangeNoun(w)
                                    || w.EndsWith("ese") || w.EndsWith("ise"))
                                {
                                    allowedTags[i][tagSet.GetID("NNS")] = true;
                                }
                                break;
                            case EMorph.MorphPoS.NNS:
                                allowedTags[i][tagSet.GetID("NNS")] = true;
                                //allowedTags[i][tagSet.GetID("NN")] = true;
                                break;
                            case EMorph.MorphPoS.JJ:
                                allowedTags[i][tagSet.GetID("JJ")] = true;
                                break;
                            case EMorph.MorphPoS.JJR:
                                allowedTags[i][tagSet.GetID("JJR")] = true;
                                break;
                            case EMorph.MorphPoS.JJS:
                                allowedTags[i][tagSet.GetID("JJS")] = true;
                                break;
                            case EMorph.MorphPoS.RB:
                                allowedTags[i][tagSet.GetID("RB")] = true;
                                break;
                            case EMorph.MorphPoS.RBR:
                                allowedTags[i][tagSet.GetID("RBR")] = true;
                                break;
                            case EMorph.MorphPoS.RBS:
                                allowedTags[i][tagSet.GetID("RBS")] = true;
                                break;
                            case EMorph.MorphPoS.VB:
                                allowedTags[i][tagSet.GetID("VB")] = true;
                                allowedTags[i][tagSet.GetID("VBP")] = true;
                                break;
                            case EMorph.MorphPoS.VBD:
                                allowedTags[i][tagSet.GetID("VBD")] = true;
                                allowedTags[i][tagSet.GetID("VBN")] = true;
                                //allowedTags[i][tagSet.GetID("JJ")] = true;
                                break;
                            case EMorph.MorphPoS.VBG:
                                allowedTags[i][tagSet.GetID("VBG")] = true;
                                //allowedTags[i][tagSet.GetID("JJ")] = true;
                                break;
                            case EMorph.MorphPoS.VBZ:
                                allowedTags[i][tagSet.GetID("VBZ")] = true;
                                break;
                            default:
                                throw new Exception("not recognized morph lemma!");
                        }
                    }

                    //if(!allowedTags[i][tagSet.GetID(tags[i])])
                    //{
                    //    Console.Error.WriteLine("!");
                    //}
                }
            }
            return allowedTags;
        }
Ejemplo n.º 3
0
        private static double ParseGraphs(int nthread,
            List<PhrasalTree> treebank,
            LAPCFGrammar rules,
            Vocabulary vocab,
            TagSet tagSet,
            out int failed)
        {
            double llhd = 0;
            failed = 0;

            int xfail = 0;
            var handle = new object();
            Parallel.For(0, nthread, threadid =>
            {
                int fail = 0;
                double xllhd = 0;
                var parser = new HyperGraphParser(vocab, tagSet, rules);
                for (int i = threadid; i < treebank.Count; i += nthread)
                {
                    try
                    {
                        var graph = parser.BuildHyperGraph(treebank [i]);

                        graph.SumForward();
                        graph.SumBackward();

                        if (double.IsInfinity(graph.RootScore) || double.IsNaN(graph.RootScore))
                        {
                            fail += 1;
                            continue;
                        }
                        xllhd += graph.RootScore;
                    } catch
                    {
                        fail += 1;
                    }

                }

                lock (handle)
                {
                    xfail += fail;
                    llhd += xllhd;
                }
            }
            );
            failed = xfail;
            return llhd;
        }
Ejemplo n.º 4
0
        private static double ParseGraphAndCollect(int nthread,
            List<PhrasalTree> treebank,
            LAPCFGrammar rules,
            Vocabulary vocab,
            TagSet tagSet,
            out int failed)
        {
            double llhd = 0;
            failed = 0;

            int xfail = 0;
            var handle = new object();
            var rulelist = new List<LAPCFGrammar>();
            rulelist.Add(rules);
            while (rulelist.Count < nthread)
            {
                rulelist.Add(rules.CloneWithSharedParameters());
            }
            Parallel.For(0, nthread, threadid =>
            {
                int fail = 0;
                double xllhd = 0;
                var parser = new HyperGraphParser(vocab, tagSet, rulelist [threadid]);
                for (int i = threadid; i < treebank.Count; i += nthread)
                {
                    try
                    {
                        var graph = parser.BuildHyperGraph(treebank [i]);

                        graph.SumForward();
                        graph.SumBackward();

                        if (double.IsInfinity(graph.RootScore) || double.IsNaN(graph.RootScore))
                        {
                            fail += 1;
                            continue;
                        }

                        graph.CollectExpectedCount();
                        xllhd += graph.RootScore;
                    } catch
                    {
                        fail += 1;
                    }

                }

                lock (handle)
                {
                    xfail += fail;
                    llhd += xllhd;
                }
            }
            );

            for (int i = 1; i < rulelist.Count; ++i)
            {
                LAPCFGrammar.ApplyToRules((x, y) => x.Add(y), rules.tposteriorCounts, rulelist [i].tposteriorCounts);
                LAPCFGrammar.ApplyToRules((x, y) => x.Add(y), rules.uposteriorCounts, rulelist [i].uposteriorCounts);
                LAPCFGrammar.ApplyToRules((x, y) => x.Add(y), rules.bposteriorCounts, rulelist [i].bposteriorCounts);
            }
            failed = xfail;
            //Console.Error.WriteLine("fail: {0}\tllhd: {1}", failed, llhd);
            return llhd;
        }
Ejemplo n.º 5
0
        public static double[][] SubtagExpectedCounts(
            int nthread,
            Vocabulary vocab,
            TagSet tagset,
            //LALexiconBuilder lexicon,
            LAPCFGrammar rules,
            List<PhrasalTree> treebank)
        {
            var parser = new HyperGraphParser (vocab, tagset, rules);

            double[][][] tagExpectsArray = new double[nthread][][];

            for (int tid = 0; tid < nthread; ++tid) {
                tagExpectsArray [tid] = new double[rules.TotalTagCount][];
                var tagExpects = tagExpectsArray [tid];
                for (int i = 0; i < tagExpects.Length; ++i) {
                    tagExpects [i] = new double[rules.GetSubTagCount (i)];
                }
                ArrayHelper.Fill (tagExpects, double.NegativeInfinity);
            }

            Parallel.For (0, nthread, threadid =>
            {
                var tagExpects = tagExpectsArray [threadid];

                for (int treeId = threadid; treeId < treebank.Count; treeId += nthread) {
                    var tree = treebank [treeId];
                    var g = parser.BuildHyperGraph (tree);

                    g.SumForward ();
                    g.SumBackward ();

                    double sentS = g.RootScore;

                    if (double.IsNaN (sentS) || double.IsInfinity (sentS)) {
                        continue;
                    }
                    foreach (var v in g.Vs) {
                        if (v.TYPE == VTYPE.TERMINAL) {
                            continue;
                        }

                        int t = v.tag;

                        for (int st = 0; st < v.subtagCount; ++st) {
                            if (double.IsNaN (v.alpha.v [st]) || double.IsInfinity (v.alpha.v [st])
                                || double.IsNaN (v.beta.v [st]) || double.IsInfinity (v.beta.v [st])
                                || v.alpha.pruned [st] || v.beta.pruned [st]) {
                                continue;
                            }

                            tagExpects [t] [st] = MathHelper.LogAdd (v.alpha.v [st] + v.beta.v [st] - sentS, tagExpects [t] [st]);
                        }
                    }
                }
            }
            );

            var te = tagExpectsArray [0];
            for (int i = 1; i < nthread; ++i) {
                for (int j = 0; j < te.Length; ++j) {
                    for (int k = 0; k < te[j].Length; ++k) {
                        te [j] [k] = MathHelper.LogAdd (te [j] [k], tagExpectsArray [i] [j] [k]);
                    }
                }
            }

            return te;
        }
Ejemplo n.º 6
0
        private static double[][] CollectMergeLoss(int nthread, Vocabulary vocab, TagSet tagset, LAPCFGrammar rules, List<PhrasalTree> treebank, double[][] tagProb)
        {
            double[][][] mlossList = new double[nthread][][];
            for (int tid = 0; tid < nthread; ++tid) {
                double[][] mergeLoss = new double[rules.TotalTagCount][];

                for (int i = 0; i < mergeLoss.Length; ++i) {
                    if (tagProb [i].Length == 1) {
                        continue;
                    }

                    mergeLoss [i] = new double[tagProb [i].Length / 2];
                }

                ArrayHelper.Fill (mergeLoss, 0);

                mlossList [tid] = mergeLoss;
            }

            var parser = new HyperGraphParser (vocab, tagset, rules);

            Parallel.For (0, nthread, threadid =>
            {
                var mergeLoss = mlossList [threadid];
                for (int treeId = threadid; treeId < treebank.Count; treeId += nthread) {
                    var tree = treebank [treeId];

                    var g = parser.BuildHyperGraph (tree);

                    g.SumForward ();
                    g.SumBackward ();

                    double sentS = g.RootScore;

                    if (double.IsNaN (sentS) || double.IsInfinity (sentS)) {
                        continue;
                    }
                    foreach (var v in g.Vs) {
                        if (v.TYPE == VTYPE.TERMINAL) {
                            continue;
                        }

                        int t = v.tag;

                        if (v.subtagCount == 1) {
                            continue;
                        }

                        double[] marginals = new double[v.subtagCount];

                        for (int st = 0; st < v.subtagCount; ++st) {
                            if (!v.alpha.pruned [st]) {
                                marginals [st] = v.alpha.v [st] + v.beta.v [st];
                            }
                        }

                        for (int st = 0; st < v.subtagCount / 2; ++st) {
                            int l = st * 2;
                            int r = st * 2 + 1;
                            if (double.IsNaN (v.alpha.v [l]) || double.IsInfinity (v.alpha.v [l])
                                || double.IsNaN (v.beta.v [l]) || double.IsInfinity (v.beta.v [l])
                                || double.IsNaN (v.alpha.v [r]) || double.IsInfinity (v.alpha.v [r])
                                || double.IsNaN (v.beta.v [r]) || double.IsInfinity (v.beta.v [r])
                                || v.alpha.pruned [l] || v.alpha.pruned [r]) {
                                continue;
                            }

                            double lllhd = marginals [l];
                            double rllhd = marginals [r];

                            double mllhd = MathHelper.LogAdd (tagProb [t] [l] + v.alpha.v [l], tagProb [t] [r] + v.alpha.v [r])
                                + MathHelper.LogAdd (v.beta.v [l], v.beta.v [r]);

                            marginals [l] = mllhd;
                            marginals [r] = double.NegativeInfinity;

                            double xSentScore = MathHelper.LogAdd (marginals);

                            double sentScore = g.RootScore;

                            mergeLoss [t] [st] += sentScore - xSentScore;
                            //MathHelper.LogAdd(xSentScore - sentScore, mergeLoss[t][st]);

                            marginals [l] = lllhd;
                            marginals [r] = rllhd;
                        }
                    }
                }
            }
            );

            var ml = mlossList [0];

            for (int threadid = 1; threadid < mlossList.Length; ++threadid) {
                var xl = mlossList [threadid];
                for (int i = 0; i < ml.Length; ++i) {
                    if (ml [i] == null) {
                        continue;
                    }

                    for (int j = 0; j < ml[i].Length; ++j) {
                        ml [i] [j] += xl [i] [j];
                    }

                }
            }

            return ml;
        }
Ejemplo n.º 7
0
        public static double[][] BuildLexSimple(List<PhrasalTree> treebank, TagSet tagSet, Vocabulary vocab, Random RNG)
        {
            int PTCount = tagSet.PTCount;
            int vocabCount = vocab.VocabSize;
            double[][] tagWordCounts = ArrayHelper.AllocateArray<double>(PTCount, vocabCount);
            double[] tagCounts = new double[PTCount];

            HashSet<string>[] tagTypeSets = new HashSet<string>[PTCount];

            for (int i = 0; i < tagTypeSets.Length; ++i)
            {
                tagTypeSets[i] = new HashSet<string>();
            }

            foreach (var tree in treebank)
            {
                tree.ComputeStartEnd();
                foreach (var node in tree.TreeNodes)
                {
                    if (node.Children.Count == 0)
                    {
                        string word = SimpleTokenizor.ETokenize(node.Lex);
                        string tag = node.Tag;

                        int tagId = tagSet.GetPTID(tag);
                        tagTypeSets[tagId].Add(word);

                        int wordId = vocab.GetId(word, node.Start == 0);

                        double weight = RNG == null ? 1.0 : 1.0 + (RNG.NextDouble() - 0.5) / 100;

                        tagWordCounts[tagId][wordId] += weight;
                        tagCounts[tagId] += weight;
                    }
                }
            }

            double[][] scores = ArrayHelper.AllocateArray<double>(vocabCount, PTCount);

            ArrayHelper.Fill(scores, double.NegativeInfinity);

            for (int word = 0; word < scores.Length; ++word)
            {
                for (int tag = 0; tag < scores[word].Length; ++tag)
                {
                    if (tagWordCounts[tag][word] > 0)
                    {
                        //scores[i][j] = new double[1];
                        //expectedCounts[i][j] = new double[1];
                        scores[word][tag] = (double)Math.Log(tagWordCounts[tag][word] / tagCounts[tag]);
                        //expectedCounts[i][j][0] = double.NegativeInfinity;
                    }
                }
            }

            return scores;
        }
Ejemplo n.º 8
0
        public static LAPCFGrammar MergeSymbols(double percentage,
            Vocabulary vocab,
            TagSet tagset,
            LAPCFGrammar rules,
            List<PhrasalTree> treebank,
            int nthread)
        {
            rules.InitializeExpectedCounts ();
            double[][] tagProb = SubtagExpectedCounts (nthread, vocab, tagset, rules, treebank);

            bool[] isSplit = new bool[tagProb.Length];

            for (int i = 0; i < tagProb.Length; ++i) {
                if (tagProb [i].Length == 1) {
                    tagProb [i] [0] = 0;
                    isSplit [i] = false;
                } else {
                    isSplit [i] = true;
                    for (int j = 0; j < tagProb[i].Length / 2; ++j) {
                        double z = MathHelper.LogAdd (tagProb [i] [2 * j], tagProb [i] [2 * j + 1]);
                        tagProb [i] [2 * j] -= z;
                        tagProb [i] [2 * j + 1] -= z;
                    }
                }
            }

            double[][] mergeLoss = CollectMergeLoss (nthread, vocab, tagset, rules, treebank, tagProb);

            var mergeCands = new List<MergeHelper> ();
            for (int t = 0; t < mergeLoss.Length; ++t) {
                if (mergeLoss [t] == null) {
                    continue;
                }

                for (int st = 0; st < mergeLoss[t].Length; ++st) {
                    mergeCands.Add (new MergeHelper (t, st, mergeLoss [t] [st]));
                }
            }

            mergeCands.Sort ((a, b) => {
                return a.loss.CompareTo (b.loss); }
            );

            //mergeCands.Reverse();

            int[][] subtagMap;
            bool[][] isMerged;
            int[] newSubTagCounts;

            CreateMergeMapping (rules, mergeCands, out subtagMap, out isMerged, out newSubTagCounts);

            var newRules = MergeRuleTable (rules, tagProb, subtagMap, isMerged, newSubTagCounts);

            newRules.InitializeExpectedCounts ();

            return newRules;
        }
Ejemplo n.º 9
0
        public static double[][] BuildLex(List<PhrasalTree> treebank, TagSet tagSet, Vocabulary vocab)
        {
            int PTCount = tagSet.PTCount;
            int vocabCount = vocab.VocabSize;
            double[][] tagWordCounts = ArrayHelper.AllocateArray<double> (PTCount, vocabCount);
            double[][] wordTagCounts = ArrayHelper.AllocateArray<double> (vocabCount, PTCount);
            double[] tagCounts = new double[PTCount];
            double[] wordCounts = new double[vocabCount];

            HashSet<string>[] tagTypeSets = new HashSet<string>[PTCount];

            for (int i = 0; i < tagTypeSets.Length; ++i) {
                tagTypeSets [i] = new HashSet<string> ();
            }

            foreach (var tree in treebank) {
                tree.ComputeStartEnd ();
                foreach (var node in tree.TreeNodes) {
                    if (node.Children.Count == 0) {
                        string word = SimpleTokenizor.ETokenize (node.Lex);
                        string tag = node.Tag;

                        int tagId = tagSet.GetPTID (tag);
                        tagTypeSets [tagId].Add (word);

                        int wordId = vocab.GetId (word, node.Start == 0);

                        tagWordCounts [tagId] [wordId] += 1.0f;
                        wordTagCounts [wordId] [tagId] += 1.0f;

                        tagCounts [tagId] += 1.0f;
                        wordCounts [wordId] += 1.0f;
                    }
                }
            }

            double[] typeTagCount = new double[PTCount];

            for (int i = 0; i < typeTagCount.Length; ++i) {
                typeTagCount [i] = tagTypeSets [i].Count;
            }

            // for smoothing
            for (int wordId = 0; wordId < wordTagCounts.Length; ++wordId) {
                var wt = wordTagCounts [wordId];
                double wc = wordCounts [wordId];

                //bool isRare = vocab.IsRareOrUNK (wordId);

                //if (isRare) {
                //    for (int tid = 0; tid < wt.Length; ++tid) {
                //        if (wt [tid] > 0 || typeTagCount [tid] >= openTagClassThr) {
                //            wt [tid] += addXSmoothing;
                //            wc += addXSmoothing;
                //        }
                //    }
                //}

                for (int i = 0; i < wt.Length; ++i) {
                    wt [i] /= wc;
                }
            }

            double totalwc = MathHelper.Sum (wordCounts);

            for (int i = 0; i < wordCounts.Length; ++i) {
                wordCounts [i] /= totalwc;
            }

            double totaltc = MathHelper.Sum (tagCounts);

            for (int i = 0; i < tagCounts.Length; ++i) {
                tagCounts [i] /= totaltc;
            }

            for (int tagId = 0; tagId < tagCounts.Length; ++tagId) {
                for (int wordId = 0; wordId < wordCounts.Length; ++wordId) {
                    tagWordCounts [tagId] [wordId] = wordTagCounts [wordId] [tagId] * wordCounts [wordId] / tagCounts [tagId];
                }
            }

            double[][] scores = ArrayHelper.AllocateArray<double> (vocabCount, PTCount);

            ArrayHelper.Fill (scores, double.NegativeInfinity);

            for (int word = 0; word < scores.Length; ++word) {
                for (int tag = 0; tag < scores[word].Length; ++tag) {
                    if (tagWordCounts [tag] [word] > 0) {
                        //scores[i][j] = new double[1];
                        //expectedCounts[i][j] = new double[1];
                        scores [word] [tag] = (double)Math.Log (tagWordCounts [tag] [word]);
                        //expectedCounts[i][j][0] = double.NegativeInfinity;
                    }
                }
            }

            return scores;
        }
Ejemplo n.º 10
0
        public static void Build(
            Vocabulary vocab,
            TagSet tagset,
            List<PhrasalTree> treebank,
            out LAPCFGrammar rules,
            Random RNG = null)
        {
            int tagCount = tagset.NTCount + tagset.PTCount;

            double[] pmass = new double[tagset.NTCount + tagset.PTCount];

            double[][] unaries = ArrayHelper.AllocateArray<double> (tagCount, tagCount);

            double[][][] binaries = ArrayHelper.AllocateArray<double> (tagCount, tagCount, tagCount);

            foreach (var tree in treebank) {
                foreach (var node in tree.TreeNodes) {
                    if (node.Children.Count == 0) {
                        // terminals
                        continue;
                    } else if (node.Children.Count == 1) {
                        int pt = tagset.GetID (node.Tag);
                        int ct = tagset.GetID (node.Children [0].Tag);

                        pmass [pt] += 1.0f;
                        unaries [ct] [pt] += 1.0f;
                    } else if (node.Children.Count == 2) {
                        int pt = tagset.GetID (node.Tag);
                        int lt = tagset.GetID (node.Children [0].Tag);
                        int rt = tagset.GetID (node.Children [1].Tag);

                        pmass [pt] += 1.0f;
                        binaries [lt] [rt] [pt] += 1.0f;
                    } else {
                        throw new Exception ("tree node with more than 2 children!");
                    }
                }
            }

            for (int c = 0; c < unaries.Length; ++c)
            {
                bool csurvive = false;
                for (int p = 0; p < unaries[c].Length; ++p)
                {
                    if (unaries[c][p] > 0)
                    {
                        csurvive = true;
                        break;
                    }
                }

                if (!csurvive)
                {
                    unaries[c] = null;
                }
            }

            for (int l = 0; l < binaries.Length; ++l)
            {
                bool lsurvive = false;
                for (int r = 0; r < binaries[l].Length; ++r)
                {
                    bool rsurvive = false;

                    for (int p = 0; p < binaries[l][r].Length; ++p)
                    {
                        if (binaries[l][r][p] > 0)
                        {
                            rsurvive = true;
                            break;
                        }
                    }

                    if (rsurvive)
                    {
                        lsurvive = true;
                    }
                    else
                    {
                        binaries[l][r] = null;
                    }
                }

                if (!lsurvive)
                {
                    binaries[l] = null;
                }
            }

            foreach (var x in unaries.Where(x => x != null))
            {
                for (int p = 0; p < x.Length; ++p)
                {
                    double noise = RNG.NextDouble();
                    x[p] += noise;
                    pmass[p] += noise;
                }
            }

            foreach (var x in binaries.Where(x => x != null))
            {
                foreach (var y in x.Where(y => y != null))
                {
                    for (int p = 0; p < y.Length; ++p)
                    {
                        double noise = RNG.NextDouble();
                        y[p] += noise;
                        pmass[p] += noise;
                    }
                }
            }

            for (int c = 0; c < tagCount; ++c) {
                for (int p = 0; p < tagCount; ++p) {
                    if (pmass [p] == 0) {
                        continue;
                    }
                    if (unaries[c] == null)
                    {
                        continue;
                    }
                    unaries [c] [p] /= pmass [p];
                }
            }

            for (int c = 0; c < tagCount; ++c) {
                if (unaries [c] == null) {
                    continue;
                }
                for (int p = 0; p < tagCount; ++p) {
                    if (unaries [c] [p] <= 0) {
                        unaries [c] [p] = double.NegativeInfinity;
                    } else {
                        unaries [c] [p] = (double)Math.Log (unaries [c] [p]);
                    }
                }
            }

            for (int l = 0; l < tagCount; ++l) {
                if (binaries[l] == null)
                {
                    continue;
                }
                for (int r = 0; r < tagCount; ++r) {
                    for (int p = 0; p < tagCount; ++p) {
                        if (pmass [p] == 0) {
                            continue;
                        }

                        if (binaries[l][r] == null)
                        {
                            continue;
                        }

                        binaries [l] [r] [p] /= pmass [p];
                    }
                }
            }

            for (int l = 0; l < tagCount; ++l) {
                if (binaries [l] == null) {
                    continue;
                }
                for (int r = 0; r < tagCount; ++r) {
                    if (binaries [l] [r] == null) {
                        continue;
                    }

                    for (int p = 0; p < tagCount; ++p) {

                        if (binaries [l] [r] [p] <= 0) {
                            binaries [l] [r] [p] = double.NegativeInfinity;
                        } else {
                            binaries [l] [r] [p] = (double)Math.Log (binaries [l] [r] [p]);
                        }
                    }

                }
            }

            var terminals = BuildLexSimple (treebank, tagset, vocab, RNG);

            rules = new LAPCFGrammar (tagset, binaries, unaries, terminals);
        }
Ejemplo n.º 11
0
 public HyperGraphParser(
     Vocabulary vocab,
     TagSet tagset,
     LAPCFGrammar rules)
 {
     this.vocab = vocab;
     this.tagset = tagset;
     this.rules = rules;
 }
Ejemplo n.º 12
0
        public void DumpToStream(TextModelWriter sw, TagSet tagSet, Vocabulary vocab)
        {
            var name = typeof(LAPCFGrammar).FullName;

            sw.Write(name);
            sw.WriteOption("VER", VER);
            sw.WriteOption("NTCount", NTCount);
            sw.WriteOption("PTCount", PTCount);
            sw.WriteOption("ROOTID", ROOTID);
            sw.Write("TerminalRule");
            sw.NestLevel += 1;
            foreach (var x in trules)
            {
                if (x != null)
                {
                    foreach (var y in x)
                    {
                        if (y != null)
                        {
                            var word = vocab.GetWordString(y.word);
                            var tag = tagSet.GetTagString(y.tag);
                            for (int p = 0; p < y.scores.Length; ++p)
                            {
                                if (!double.IsInfinity(y.scores [p]) && !double.IsNaN(y.scores [p]))
                                {
                                    sw.Write(string.Format("{0}_{1}\t{2}\t{3}", tag, p, word, y.scores [p]));
                                }
                            }
                        }
                    }
                }
            }
            sw.NestLevel -= 1;
            sw.Write("UnaryRule");
            sw.NestLevel += 1;
            foreach (var x in urules)
            {
                if (x != null)
                {
                    foreach (var y in x)
                    {
                        if (y != null)
                        {
                            var ptag = tagSet.GetTagString(y.ptag);
                            var ctag = tagSet.GetTagString(y.ctag);
                            for (int c = 0; c < y.scores.Length; ++c)
                            {
                                for (int p = 0; p < y.scores[c].Length; ++p)
                                {
                                    if (!double.IsInfinity(y.scores [c] [p]) && !double.IsNaN(y.scores [c] [p]))
                                    {
                                        sw.Write(string.Format("{0}_{1}\t{2}_{3}\t{4}", ptag, p, ctag, c, y.scores [c] [p]));
                                    }
                                }
                            }
                        }
                    }
                }
            }
            sw.NestLevel -= 1;
            sw.Write("BinaryRule");
            sw.NestLevel += 1;
            foreach (var x in brules)
            {
                if (x != null)
                {
                    foreach (var y in x)
                    {
                        if (y != null)
                        {
                            foreach (var z in y)
                            {
                                if (z != null)
                                {
                                    var ptag = tagSet.GetTagString(z.ptag);
                                    var ltag = tagSet.GetTagString(z.ltag);
                                    var rtag = tagSet.GetTagString(z.rtag);
                                    for (int l = 0; l < z.scores.Length; ++l)
                                    {
                                        for (int r = 0; r < z.scores[l].Length; ++r)
                                        {
                                            for (int p = 0; p < z.scores[l][r].Length; ++p)
                                            {
                                                if (!double.IsInfinity(z.scores [l] [r] [p]) && !double.IsNaN(z.scores [l] [r] [p]))
                                                {
                                                    sw.Write(
                                                        string.Format("{0}_{1}\t{2}_{3}\t{4}_{5}\t{6}",
                                                        ptag, p, ltag, l, rtag, r, z.scores [l] [r] [p])
                                                    );
                                                }
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
            }
            sw.NestLevel -= 1;

            sw.WriteOption("TraceCount", subtagTraces.Count);
            foreach (var trace in subtagTraces)
            {
                sw.WriteOption("TRACE", trace.Length);
                sw.NestLevel += 1;
                foreach (var t in trace)
                {
                    sw.Write(string.Join(" ", t));
                }
                sw.NestLevel -= 1;
            }

            sw.Write(name);
        }
Ejemplo n.º 13
0
        public static LAPCFGrammar LoadFromStream(TextModelReader sr, Vocabulary vocab, TagSet tagSet)
        {
            var grammar = new LAPCFGrammar();
            var name = typeof(LAPCFGrammar).FullName;

            sr.Require(name);
            sr.Require("VER", VER);

            grammar.NTCount = sr.ReadOptionInt("NTCount");
            grammar.PTCount = sr.ReadOptionInt("PTCount");
            grammar.ROOTID = sr.ReadOptionInt("ROOTID");

            sr.Require("TerminalRule");

            int lvl = sr.NestLevel;
            var truleStrings = new HashSet<string>();
            var uruleStrings = new HashSet<string>();
            var bruleStrings = new HashSet<string>();

            string line = sr.Read();
            while (sr.NestLevel > lvl)
            {
                truleStrings.Add(line);
                line = sr.Read();
            }

            if (line != "UnaryRule")
            {
                throw new Exception("wrong model!");
            }
            line = sr.Read();
            while (sr.NestLevel > lvl)
            {
                uruleStrings.Add(line);
                line = sr.Read();
            }

            if (line != "BinaryRule")
            {
                throw new Exception("wrong model!");
            }
            line = sr.Read();
            while (sr.NestLevel > lvl)
            {
                bruleStrings.Add(line);
                line = sr.Read();
            }

            string[] parts = line.Split('\t');

            if (parts [0] != "TraceCount")
            {
                throw new Exception("error in model");
            }

            int subtraceCount = int.Parse(parts [1]);

            grammar.subtagTraces = new List<int[][]>();

            for (int i = 0; i < subtraceCount; ++i)
            {
                int tlen = sr.ReadOptionInt("TRACE");
                int[][] trace = new int[tlen][];

                for (int j = 0; j < tlen; ++j)
                {
                    trace [j] = sr.ReadIntArray();
                }

                grammar.subtagTraces.Add(trace);
            }

            if (grammar.subtagTraces.Count == 0)
            {
                grammar.subTagCounts = new int[grammar.TotalTagCount];
                ArrayHelper.Fill(grammar.subTagCounts, 1);
            } else
            {
                var trace = grammar.subtagTraces [grammar.subtagTraces.Count - 1];
                grammar.subTagCounts = trace.Select(x => x.Length).ToArray();
            }

            sr.Require(name);

            foreach (var str in uruleStrings)
            {
                grammar.BuildUnaryRule(str, tagSet);
            }

            foreach (var str in truleStrings)
            {
                grammar.BuildTerminalRule(str, vocab, tagSet);
            }

            foreach (var str in bruleStrings)
            {
                grammar.BuildBinaryRule(str, tagSet);
            }

            return grammar;
        }
Ejemplo n.º 14
0
        private void BuildTerminalRule(string ruleString, Vocabulary vocab, TagSet tagSet)
        {
            string[] parts = ruleString.Split(new string[] { "\t", "_" }, StringSplitOptions.RemoveEmptyEntries);

            int tag = tagSet.GetID(parts [0]);
            int subtag = int.Parse(parts [1]);
            int word = vocab.GetId(parts [2]);
            double s = double.Parse(parts [3]);

            if (trules == null)
            {
                trules = new TerminalRule[vocab.VocabSize][];
            }

            if (trules [word] == null)
            {
                trules [word] = new TerminalRule[tagSet.PTCount];
            }

            if (trules [word] [tag] == null)
            {
                trules [word] [tag] = new TerminalRule(new double[subTagCounts [tag]], tag, word);
                trules [word] [tag].ClearScore();
            }

            trules [word] [tag].scores [subtag] = s;
        }
Ejemplo n.º 15
0
        public LAPCFGrammar(TagSet tagset, Vocabulary vocab, string lexfile, string rulefile)
        {
            NTCount = tagset.NTCount;
            PTCount = tagset.PTCount;
            ROOTID = tagset.ROOTID;

            subTagCounts = new int[TotalTagCount];
            ArrayHelper.Fill(subTagCounts, 2);

            subTagCounts[ROOTID] = 1;

            var trace = subTagCounts.Select(x => new int[x]).ToArray();

            subtagTraces.Add(trace);

            using (var sr = new System.IO.StreamReader(lexfile))
            {
                while (!sr.EndOfStream)
                {
                    string line = sr.ReadLine();
                    if (string.IsNullOrWhiteSpace(line))
                    {
                        continue;
                    }
                    string[] parts = line.Split(new string[] { " ", "\t" }, StringSplitOptions.RemoveEmptyEntries);

                    string tag = parts[0];
                    string word = parts[1];

                    for (int i = 0; i < parts.Length - 2; ++i)
                    {
                        double p = double.Parse(parts[i + 2]);
                        if (p <= 0)
                        {
                            continue;
                        }

                        string pline = string.Format("{0}_{1}\t{2}\t{3}", tag, i, word, Math.Log(p));

                        BuildTerminalRule(pline, vocab, tagset);
                    }
                }
            }

            using (var sr = new System.IO.StreamReader(rulefile))
            {
                while (!sr.EndOfStream)
                {
                    string line = sr.ReadLine();

                    if (string.IsNullOrWhiteSpace(line))
                    {
                        continue;
                    }

                    string[] parts = line.Split(new string[] { " ", "\t", "->" }, StringSplitOptions.RemoveEmptyEntries);

                    int xlen = parts.Length - 1;

                    parts[xlen] = Math.Log(double.Parse(parts[xlen])).ToString();

                    string pline = string.Join("\t", parts);

                    if (parts.Length == 3)
                    {
                        BuildUnaryRule(pline, tagset);
                    }
                    else
                    {
                        BuildBinaryRule(pline, tagset);
                    }

                }
            }
        }