예제 #1
0
 public model(model m, bool wCopy)
 {
     _nTag = m.NTag;
     _w    = new float[m.W.Length];
     if (wCopy)
     {
         m.W.CopyTo(_w, 0);
     }
 }
예제 #2
0
        //return the gradient of -log{P(y*|x,w)} as follows: E_{P(y|x)}(F(x,y)) - F(x,y*)
        virtual public double getGrad(List <double> vecGrad, model m, dataSeq x, baseHashSet <int> idSet)
        {
            if (idSet != null)
            {
                idSet.Clear();
            }
            int nTag = m.NTag;
            //compute beliefs
            belief bel       = new belief(x.Count, nTag);
            belief belMasked = new belief(x.Count, nTag);

            _inf.getBeliefs(bel, m, x, false);
            _inf.getBeliefs(belMasked, m, x, true);
            double ZGold = belMasked.Z;
            double Z     = bel.Z;

            List <featureTemp> fList;

            for (int i = 0; i < x.Count; i++)
            {
                fList = _fGene.getFeatureTemp(x, i);
                for (int j = 0; j < fList.Count; j++)
                {
                    featureTemp im = fList[j];
                    int         id = im.id;
                    double      v  = im.val;
                    for (int s = 0; s < nTag; s++)
                    {
                        int f = _fGene.getNodeFeatID(id, s);
                        if (idSet != null)
                        {
                            idSet.Add(f);
                        }
                        vecGrad[f] += bel.belState[i][s] * v;
                        vecGrad[f] -= belMasked.belState[i][s] * v;
                    }
                }
            }

            for (int i = 1; i < x.Count; i++)
            {
                for (int s = 0; s < nTag; s++)
                {
                    for (int sPre = 0; sPre < nTag; sPre++)
                    {
                        int f = _fGene.getEdgeFeatID(sPre, s);
                        if (idSet != null)
                        {
                            idSet.Add(f);
                        }
                        vecGrad[f] += bel.belEdge[i][sPre, s];
                        vecGrad[f] -= belMasked.belEdge[i][sPre, s];
                    }
                }
            }
            return(Z - ZGold);
        }
예제 #3
0
 public model(model m, bool wCopy)
 {
     _nTag = m.NTag;
     if (wCopy)
     {
         _w = new List <double>(m.W);
     }
     else
     {
         _w = new List <double>(new double[m.W.Count]);
     }
 }
예제 #4
0
        //fast viterbi decode without probability
        public void decodeViterbi_train(model m, dataSeq x, List <dMatrix> YYlist, List <List <double> > Ylist, List <int> tags)
        {
            int     nNode  = x.Count;
            int     nState = m.NState;
            Viterbi viter  = new Viterbi(nNode, nState);

            for (int i = 0; i < nNode; i++)
            {
                viter.setScores(i, Ylist[i], YYlist[i]);
            }

            double numer = viter.runViterbi(ref tags, false);
        }
예제 #5
0
        //the scalar version
        virtual public void getLogYY(double scalar, model m, dataSeq x, int i, ref dMatrix YY, ref List <double> Y, bool takeExp, bool mask)
        {
            YY.set(0);
            listTool.listSet(ref Y, 0);

            List <double>      w     = m.W;
            List <featureTemp> fList = _fGene.getFeatureTemp(x, i);
            int nTag = m.NTag;

            for (int j = 0; j < fList.Count; j++)
            {
                featureTemp ptr = fList[j];
                int         id  = ptr.id;
                double      v   = ptr.val;
                for (int s = 0; s < nTag; s++)
                {
                    int f = _fGene.getNodeFeatID(id, s);
                    Y[s] += w[f] * scalar * v;
                }
            }
            if (i > 0)
            {
                for (int s = 0; s < nTag; s++)
                {
                    for (int sPre = 0; sPre < nTag; sPre++)
                    {
                        int f = _fGene.getEdgeFeatID(sPre, s);
                        YY[sPre, s] += w[f] * scalar;
                    }
                }
            }
            double maskValue = double.MinValue;

            if (takeExp)
            {
                listTool.listExp(ref Y);
                YY.eltExp();
                maskValue = 0;
            }
            if (mask)
            {
                List <int> tagList = x.getTags();
                for (int s = 0; s < Y.Count; s++)
                {
                    if (tagList[i] != s)
                    {
                        Y[s] = maskValue;
                    }
                }
            }
        }
예제 #6
0
        override public void getLogYY(model m, dataSeq x, int i, ref dMatrix YY, ref List <double> Y, bool takeExp, bool mask)
        {
            YY.set(0);
            listTool.listSet(ref Y, 0);

            float[]            w     = m.W;
            List <featureTemp> fList = _fGene.getFeatureTemp(x, i);
            int nState = m.NState;

            foreach (featureTemp ft in fList)
            {
                for (int s = 0; s < nState; s++)
                {
                    int f = _fGene.getNodeFeatID(ft.id, s);
                    Y[s] += w[f] * ft.val;
                }
            }
            if (i > 0)
            {
                foreach (featureTemp im in fList)
                {
                    for (int s = 0; s < nState; s++)
                    {
                        for (int sPre = 0; sPre < nState; sPre++)
                        {
                            int f = _fGene.getEdgeFeatID(im.id, sPre, s);
                            YY[sPre, s] += w[f] * im.val;
                        }
                    }
                }
            }
            double maskValue = double.MinValue;

            if (takeExp)
            {
                listTool.listExp(ref Y);
                YY.eltExp();
                maskValue = 0;
            }
            if (mask)
            {
                dMatrix statesPerNodes = m.getStatesPerNode(x);
                for (int s = 0; s < Y.Count; s++)
                {
                    if (statesPerNodes[i, s] == 0)
                    {
                        Y[s] = maskValue;
                    }
                }
            }
        }
예제 #7
0
        //for test
        public featureGenerator(dataSet X, model m)
        {
            _nState       = m.NState;
            _nFeatureTemp = X.NFeatureTemp;
            Global.swLog.WriteLine("feature templates: {0}", _nFeatureTemp);

            _nTag = X.NTag;
            int nNodeFeature = _nFeatureTemp * _nState;
            int nEdgeFeature = _nState * _nState;

            _backoffEdge      = nNodeFeature;
            _nCompleteFeature = nNodeFeature + nEdgeFeature;
            Global.swLog.WriteLine("complete features: {0}", _nCompleteFeature);
        }
예제 #8
0
        public double getGrad_SGD(List <double> g, model m, dataSeq x, baseHashSet <int> idset)
        {
            if (idset != null)
            {
                idset.Clear();
            }

            if (x == null)
            {
                return(0);
            }

            return(getGradCRF(g, m, x, idset));
        }
예제 #9
0
        public void getYYandY(model m, dataSeq x, List <dMatrix> YYlist, List <List <double> > Ylist, List <dMatrix> maskYYlist, List <List <double> > maskYlist)
        {
            int nNodes = x.Count;
            //int nTag = m.NTag;
            int nState = m.NState;

            double[] dAry = new double[nState];
            bool     mask = false;

            try
            {
                //Global.rwlock.AcquireReaderLock(Global.readWaitTime);

                for (int i = 0; i < nNodes; i++)
                {
                    dMatrix       YYi = new dMatrix(nState, nState);
                    List <double> Yi  = new List <double>(dAry);
                    //compute the Mi matrix
                    getLogYY(m, x, i, ref YYi, ref Yi, false, mask);
                    YYlist.Add(YYi);
                    Ylist.Add(Yi);

                    maskYYlist.Add(new dMatrix(YYi));
                    maskYlist.Add(new List <double>(Yi));
                }

                //Global.rwlock.ReleaseReaderLock();
            }
            catch (ApplicationException)
            {
                Console.WriteLine("read out time!");
            }

            //get the masked YY and Y
            double  maskValue      = double.MinValue;
            dMatrix statesPerNodes = m.getStatesPerNode(x);

            for (int i = 0; i < nNodes; i++)
            {
                List <double> Y       = maskYlist[i];
                List <int>    tagList = x.getTags();
                for (int s = 0; s < Y.Count; s++)
                {
                    if (statesPerNodes[i, s] == 0)
                    {
                        Y[s] = maskValue;
                    }
                }
            }
        }
예제 #10
0
        //the scalar version
        virtual public double getGradCRF(List <double> vecGrad, double scalar, model m, dataSeq x, baseHashSet <int> idSet)
        {
            idSet.Clear();
            int nTag = m.NTag;
            //compute beliefs
            belief bel       = new belief(x.Count, nTag);
            belief belMasked = new belief(x.Count, nTag);

            _inf.getBeliefs(bel, m, x, scalar, false);
            _inf.getBeliefs(belMasked, m, x, scalar, true);
            double ZGold = belMasked.Z;
            double Z     = bel.Z;

            List <featureTemp> fList;

            //Loop over nodes to compute features and update the gradient
            for (int i = 0; i < x.Count; i++)
            {
                fList = _fGene.getFeatureTemp(x, i);
                foreach (featureTemp im in fList)
                {
                    for (int s = 0; s < nTag; s++)
                    {
                        int f = _fGene.getNodeFeatID(im.id, s);
                        idSet.Add(f);

                        vecGrad[f] += bel.belState[i][s] * im.val;
                        vecGrad[f] -= belMasked.belState[i][s] * im.val;
                    }
                }
            }

            //Loop over edges to compute features and update the gradient
            for (int i = 1; i < x.Count; i++)
            {
                for (int s = 0; s < nTag; s++)
                {
                    for (int sPre = 0; sPre < nTag; sPre++)
                    {
                        int f = _fGene.getEdgeFeatID(sPre, s);
                        idSet.Add(f);

                        vecGrad[f] += bel.belEdge[i][sPre, s];
                        vecGrad[f] -= belMasked.belEdge[i][sPre, s];
                    }
                }
            }
            return(Z - ZGold);//-log{P(y*|x,w)}
        }
예제 #11
0
        virtual public void getLogYY(model m, dataSeq x, int i, ref dMatrix YY, ref List <double> Y, bool takeExp, bool mask)
        {
            YY.set(0);
            listTool.listSet(ref Y, 0);

            float[]            w     = m.W;
            List <featureTemp> fList = _fGene.getFeatureTemp(x, i);
            int nTag = m.NTag;

            foreach (featureTemp ft in fList)
            {
                for (int s = 0; s < nTag; s++)
                {
                    int f = _fGene.getNodeFeatID(ft.id, s);
                    Y[s] += w[f] * ft.val;
                }
            }
            if (i > 0)
            {
                for (int s = 0; s < nTag; s++)
                {
                    for (int sPre = 0; sPre < nTag; sPre++)
                    {
                        int f = _fGene.getEdgeFeatID(sPre, s);
                        YY[sPre, s] += w[f];
                    }
                }
            }
            double maskValue = double.MinValue;

            if (takeExp)
            {
                listTool.listExp(ref Y);
                YY.eltExp();
                maskValue = 0;
            }
            if (mask)
            {
                List <int> tagList = x.getTags();
                for (int s = 0; s < Y.Count; s++)
                {
                    if (tagList[i] != s)
                    {
                        Y[s] = maskValue;
                    }
                }
            }
        }
예제 #12
0
        public double getNBest(model m, inference inf, dataSeq x, int N, ref List <List <int> > nBestTaggings, ref List <double> scores)
        {
            nBestTaggings.Clear();
            _w       = x.Count;
            _h       = m.NTag;
            _lattice = new Lattice(m, inf, x);
            setStartAndGoal(-1, 0, _w, 0);//a virtual begin node & a virtual end node

            for (int n = 0; n < N; n++)
            {
                List <int> tagging  = new List <int>();
                double     logNumer = searchForPath(ref tagging);
                if (logNumer == -2)//search fail
                {
                    break;
                }

                nBestTaggings.Add(tagging);
                scores.Add(logNumer);//log numerator

                double check = Math.Exp((scores[0] - scores[n]));
                if (check >= Global.stopSearchFactor)//20 times bigger then break
                {
                    break;
                }
            }

            double Z = logSum(scores);

            listTool.listAdd(ref scores, -Z);
            listTool.listExp(ref scores);//prob
            //error
            double error = Z - _lattice.ZGold;


            //update the profiler
            Global.nbestCount += scores.Count;
            Global.nbestNorm++;
            int small = scores.Count < 10 ? scores.Count : 10;

            for (int i = 0; i < small; i++)
            {
                Global.nbestProbList[i] += scores[i];
            }

            return(error);
        }
예제 #13
0
        //string accuracy
        public List <double> decode_strAcc(dataSet X, model m, double iter)
        {
            double xsize = X.Count;
            double corr  = 0;

            //multi thread
            List <dataSeqTest> X2 = new List <dataSeqTest>();

            multiThreading(X, X2);

            foreach (dataSeqTest x in X2)
            {
                //output tag results
                if (Global.swOutput != null)
                {
                    for (int i = 0; i < x._x.Count; i++)
                    {
                        Global.swOutput.Write(x._yOutput[i].ToString() + ",");
                    }
                    Global.swOutput.WriteLine();
                }

                List <int> goldTags = x._x.getTags();
                bool       ck       = true;
                for (int i = 0; i < x._x.Count; i++)
                {
                    if (goldTags[i] != x._yOutput[i])
                    {
                        ck = false;
                        break;
                    }
                }
                if (ck)
                {
                    corr++;
                }
            }
            double acc = corr / xsize * 100.0;

            Global.swLog.WriteLine("total-tag-strings={0}  correct-tag-strings={1}  string-accuracy={2}%", xsize, corr, acc);
            List <double> scoreList = new List <double>();

            scoreList.Add(acc);
            return(scoreList);
        }
        //test3 mode in multi-task test: all models vote
        public List <double> test3_multi_mtl(List <List <double> > vecList, List <dataSet> XXList, double iter, List <StreamWriter> swOutputList)
        {
            List <double> scoreList = new List <double>();

            for (int i = 0; i < XXList.Count; i++)
            {
                dataSet       X      = XXList[i];
                List <double> vec    = MainClass.getVecFromX(X);
                model         m      = new model(_modelList[0], false);
                double        ttlCos = 0;
                for (int j = 0; j < vecList.Count; j++)
                {
                    double cos = mathTool.cos(vecList[j], vec);
                    for (int k = 0; k < m.W.Count; k++)
                    {
                        m.W[k] += cos * _modelList[j].W[k];
                        ttlCos += cos;
                    }
                }
                for (int k = 0; k < m.W.Count; k++)
                {
                    m.W[k] /= ttlCos;
                }

                List <double> scoreList_i;
                if (Global.evalMetric == "tok.acc")
                {
                    scoreList_i = decode_tokAcc(X, m, iter, swOutputList[i]);
                }
                else if (Global.evalMetric == "str.acc")
                {
                    scoreList_i = decode_strAcc(X, m, iter, swOutputList[i]);
                }
                else if (Global.evalMetric == "f1")
                {
                    scoreList_i = decode_fscore(X, m, iter, swOutputList[i]);
                }
                else
                {
                    throw new Exception("error");
                }
                scoreList.Add(scoreList_i[0]);
            }
            return(scoreList);
        }
예제 #15
0
 public toolbox(dataSet X, bool train = true)
 {
     if (train)//for training
     {
         _X     = X;
         _fGene = new featureGenerator(X);
         _model = new model(X, _fGene);
         _inf   = new inference(this);
         initOptimizer();
     }
     else//for test
     {
         _X     = X;
         _model = new model(Global.fModel);
         _fGene = new featureGenerator(X);
         _inf   = new inference(this);
     }
 }
        public List <double> decode_fscore(dataSet XX, model m, double iter, StreamWriter swOutput)
        {
            int    nTag = m.NTag;
            double ttl  = XX.Count;

            List <string> goldTagList = new List <string>();
            List <string> resTagList  = new List <string>();

            foreach (dataSeq x in XX)
            {
                //compute detected tags
                List <int> tags = new List <int>();
                double     prob = _inf.decodeViterbi(m, x, tags);

                string res = "";
                foreach (int im in tags)
                {
                    res += im.ToString() + ",";
                }
                resTagList.Add(res);

                //output result tags
                if (swOutput != null)
                {
                    for (int i = 0; i < x.Count; i++)
                    {
                        swOutput.Write(tags[i] + ",");
                    }
                    swOutput.WriteLine();
                }

                List <int> goldTags = x.getTags();
                string     gold     = "";
                foreach (int im in goldTags)
                {
                    gold += im.ToString() + ",";
                }
                goldTagList.Add(gold);
            }
            List <double> infoList  = new List <double>();
            List <double> scoreList = fscore.getFscore(goldTagList, resTagList, infoList);

            return(scoreList);
        }
예제 #17
0
        //f-score
        public List <double> decode_fscore(dataSet X, model m, double iter)
        {
            //multi thread
            List <dataSeqTest> X2 = new List <dataSeqTest>();

            multiThreading(X, X2);

            List <string> goldTagList = new List <string>();
            List <string> resTagList  = new List <string>();

            foreach (dataSeqTest x in X2)
            {
                string res = "";
                foreach (int im in x._yOutput)
                {
                    res += im.ToString() + ",";
                }
                resTagList.Add(res);

                //output tag results
                if (Global.swOutput != null)
                {
                    for (int i = 0; i < x._yOutput.Count; i++)
                    {
                        Global.swOutput.Write(x._yOutput[i] + ",");
                    }
                    Global.swOutput.WriteLine();
                }

                List <int> goldTags = x._x.getTags();
                string     gold     = "";
                foreach (int im in goldTags)
                {
                    gold += im.ToString() + ",";
                }
                goldTagList.Add(gold);
            }

            List <double> infoList  = new List <double>();
            List <double> scoreList = fscore.getFscore(goldTagList, resTagList, infoList);

            Global.swLog.WriteLine("#gold-chunk={0}  #output-chunk={1}  #correct-output-chunk={2}  precision={3}  recall={4}  f-score={5}", infoList[0], infoList[1], infoList[2], scoreList[1].ToString("f2"), scoreList[2].ToString("f2"), scoreList[0].ToString("f2"));
            return(scoreList);
        }
        public List <double> decode_strAcc(dataSet XX, model m, double iter, StreamWriter swOutput)
        {
            int    nTag    = m.NTag;
            double ttl     = XX.Count;
            double correct = 0;

            foreach (dataSeq x in XX)
            {
                //compute detected tags
                List <int> tags = new List <int>();
                double     prob = _inf.decodeViterbi(m, x, tags);

                //output result tags
                if (swOutput != null)
                {
                    for (int i = 0; i < x.Count; i++)
                    {
                        swOutput.Write(tags[i] + ",");
                    }
                    swOutput.WriteLine();
                }

                List <int> goldTags = x.getTags();
                bool       ck       = true;
                for (int i = 0; i < x.Count; i++)
                {
                    if (goldTags[i] != tags[i])
                    {
                        ck = false;
                        break;
                    }
                }
                if (ck)
                {
                    correct++;
                }
            }
            double        acc       = correct / ttl;
            List <double> scoreList = new List <double>();

            scoreList.Add(acc);
            return(scoreList);
        }
        //test2 mode in multi-task test: get the most similar task/model for test
        public List <double> test2_multi_mtl(List <List <double> > vecList, List <dataSet> XXList, double iter, List <StreamWriter> swOutputList)
        {
            List <double> scoreList = new List <double>();

            for (int i = 0; i < XXList.Count; i++)
            {
                dataSet       X   = XXList[i];
                List <double> vec = MainClass.getVecFromX(X);
                double        cos = -2;
                int           idx = -1;
                for (int j = 0; j < vecList.Count; j++)
                {
                    double newCos = mathTool.cos(vecList[j], vec);
                    if (newCos > cos)
                    {
                        idx = j;
                        cos = newCos;
                    }
                }

                model         m = _modelList[idx];
                List <double> scoreList_i;
                if (Global.evalMetric == "tokAcc")
                {
                    scoreList_i = decode_tokAcc(X, m, iter, swOutputList[i]);
                }
                else if (Global.evalMetric == "strAcc")
                {
                    scoreList_i = decode_strAcc(X, m, iter, swOutputList[i]);
                }
                else if (Global.evalMetric == "f1")
                {
                    scoreList_i = decode_fscore(X, m, iter, swOutputList[i]);
                }
                else
                {
                    throw new Exception("error");
                }
                scoreList.Add(scoreList_i[0]);
            }
            return(scoreList);
        }
예제 #20
0
        //fast viterbi decode without probability
        public void decodeViterbi_test(model m, dataSeq x, List <int> tags)
        {
            tags.Clear();

            int     nNode = x.Count;
            int     nTag  = m.NTag;
            dMatrix YY    = new dMatrix(nTag, nTag);

            double[]      dAry  = new double[nTag];
            List <double> Y     = new List <double>(dAry);
            Viterbi       viter = new Viterbi(nNode, nTag);

            for (int i = 0; i < nNode; i++)
            {
                getLogYY(m, x, i, ref YY, ref Y, false, false);
                viter.setScores(i, Y, YY);
            }

            viter.runViterbi(ref tags);
        }
        public double reg(model m, int nFeatures, double r_k)
        {
            double error = 0;

            if (Global.reg != 0.0)
            {
                for (int i = 0; i < nFeatures; i++)
                {
                    double grad_i = m.W[i] / (Global.reg * Global.reg);
                    m.W[i] -= r_k * grad_i;
                }

                if (Global.reg != 0.0)
                {
                    List <double> tmpWeights = m.W;
                    double        sum        = listTool.squareSum(tmpWeights);
                    error += sum / (2.0 * Global.reg * Global.reg);
                }
            }
            return(error);
        }
예제 #22
0
        //the mini-batch version
        public double getGrad_SGD_miniBatch(List <double> g, model m, List <dataSeq> X, baseHashSet <int> idset)
        {
            if (idset != null)
            {
                idset.Clear();
            }
            double error = 0;

            foreach (dataSeq x in X)
            {
                baseHashSet <int> idset2 = new baseHashSet <int>();

                error += getGradCRF(g, m, x, idset2);

                if (idset != null)
                {
                    foreach (int i in idset2)
                    {
                        idset.Add(i);
                    }
                }
            }
            return(error);
        }
 //for single-task
 public toolbox(dataSet X, bool train = true)
 {
     if (train)//to train
     {
         _XList     = null;
         _modelList = null;
         _X         = X;
         _fGene     = new featureGenerator(X);
         _model     = new model(X, _fGene);
         _inf       = new inference(this);
         _grad      = new gradient(this);
         initOptimizer();
     }
     else//to test
     {
         _XList     = null;
         _modelList = null;
         _X         = X;
         _model     = new model(Global.modelDir + Global.fModel);
         _fGene     = new featureGenerator(X);
         _inf       = new inference(this);
         _grad      = new gradient(this);
     }
 }
예제 #24
0
        //token accuracy
        public List <double> decode_tokAcc(dataSet X, model m, double iter)
        {
            int nTag = m.NTag;

            int[]      tmpAry     = new int[nTag];
            List <int> corrOutput = new List <int>(tmpAry);
            List <int> gold       = new List <int>(tmpAry);
            List <int> output     = new List <int>(tmpAry);

            //multi thread
            List <dataSeqTest> X2 = new List <dataSeqTest>();

            multiThreading(X, X2);

            foreach (dataSeqTest x in X2)
            {
                List <int> outTags  = x._yOutput;
                List <int> goldTags = x._x.getTags();

                //output tag results
                if (Global.swOutput != null)
                {
                    for (int i = 0; i < outTags.Count; i++)
                    {
                        Global.swOutput.Write(outTags[i].ToString() + ",");
                    }
                    Global.swOutput.WriteLine();
                }

                //count
                for (int i = 0; i < outTags.Count; i++)
                {
                    gold[goldTags[i]]++;
                    output[outTags[i]]++;

                    if (outTags[i] == goldTags[i])
                    {
                        corrOutput[outTags[i]]++;
                    }
                }
            }

            Global.swLog.WriteLine("% tag-type  #gold  #output  #correct-output  token-precision  token-recall  token-f-score");
            double prec, rec;
            int    sumGold = 0, sumOutput = 0, sumCorrOutput = 0;

            for (int i = 0; i < nTag; i++)
            {
                sumCorrOutput += corrOutput[i];
                sumGold       += gold[i];
                sumOutput     += output[i];
                if (gold[i] == 0)
                {
                    rec = 0;
                }
                else
                {
                    rec = ((double)corrOutput[i]) * 100.0 / (double)gold[i];
                }
                if (output[i] == 0)
                {
                    prec = 0;
                }
                else
                {
                    prec = ((double)corrOutput[i]) * 100.0 / (double)output[i];
                }

                Global.swLog.WriteLine("% {0}:  {1}  {2}  {3}  {4}  {5}  {6}", i, gold[i], output[i], corrOutput[i], prec.ToString("f2"), rec.ToString("f2"), (2 * prec * rec / (prec + rec)).ToString("f2"));
            }
            if (sumGold == 0)
            {
                rec = 0;
            }
            else
            {
                rec = ((double)sumCorrOutput) * 100.0 / (double)sumGold;
            }
            if (sumOutput == 0)
            {
                prec = 0;
            }
            else
            {
                prec = ((double)sumCorrOutput) * 100.0 / (double)sumOutput;
            }

            double fscore;

            if (prec == 0 && rec == 0)
            {
                fscore = 0;
            }
            else
            {
                fscore = 2 * prec * rec / (prec + rec);//this token-based overall-f-score is also the token-based-accuracy
            }
            Global.swLog.WriteLine("% overall-tags:  {0}  {1}  {2}  {3}  {4}  {5}", sumGold, sumOutput, sumCorrOutput, prec.ToString("f2"), rec.ToString("f2"), fscore.ToString("f2"));
            Global.swLog.Flush();
            List <double> scoreList = new List <double>();

            scoreList.Add(fscore);
            return(scoreList);
        }
예제 #25
0
        public void getBeliefs(belief bel, model m, dataSeq x, bool mask)
        {
            int nNodes  = x.Count;
            int nStates = m.NTag;

            dMatrix YY = new dMatrix(nStates, nStates);

            double[]      dAry       = new double[nStates];
            List <double> Y          = new List <double>(dAry);
            List <double> alpha_Y    = new List <double>(dAry);
            List <double> newAlpha_Y = new List <double>(dAry);
            List <double> tmp_Y      = new List <double>(dAry);

            for (int i = nNodes - 1; i > 0; i--)
            {
                getLogYY(m, x, i, ref YY, ref Y, false, mask);
                listTool.listSet(ref tmp_Y, bel.belState[i]);
                listTool.listAdd(ref tmp_Y, Y);
                logMultiply(YY, tmp_Y, bel.belState[i - 1]);
            }
            //compute Alpha values
            for (int i = 0; i < nNodes; i++)
            {
                getLogYY(m, x, i, ref YY, ref Y, false, mask);
                if (i > 0)
                {
                    listTool.listSet(ref tmp_Y, alpha_Y);
                    YY.transpose();
                    logMultiply(YY, tmp_Y, newAlpha_Y);
                    listTool.listAdd(ref newAlpha_Y, Y);
                }
                else
                {
                    listTool.listSet(ref newAlpha_Y, Y);
                }
                if (i > 0)
                {
                    listTool.listSet(ref tmp_Y, Y);
                    listTool.listAdd(ref tmp_Y, bel.belState[i]);
                    YY.transpose();
                    bel.belEdge[i].set(YY);
                    for (int yPre = 0; yPre < nStates; yPre++)
                    {
                        for (int y = 0; y < nStates; y++)
                        {
                            bel.belEdge[i][yPre, y] += tmp_Y[y] + alpha_Y[yPre];
                        }
                    }
                }
                List <double> tmp = bel.belState[i];
                listTool.listAdd(ref tmp, newAlpha_Y);
                listTool.listSet(ref alpha_Y, newAlpha_Y);
            }
            double Z = logSum(alpha_Y);

            for (int i = 0; i < nNodes; i++)
            {
                List <double> tmp = bel.belState[i];
                listTool.listAdd(ref tmp, -Z);
                listTool.listExp(ref tmp);
            }
            for (int i = 1; i < nNodes; i++)
            {
                bel.belEdge[i].add(-Z);
                bel.belEdge[i].eltExp();
            }
            bel.Z = Z;
        }
예제 #26
0
 //get n-best
 public void decodeNbest_train(model m, dataSeq x, List <dMatrix> YYlist, List <List <double> > Ylist, List <List <int> > nBestTags)
 {
 }
        public List <double> decode_tokAcc(dataSet XX, model m, double iter, StreamWriter swOutput)
        {
            int nTag = m.NTag;

            int[]      tmpAry     = new int[nTag];
            List <int> corrOutput = new List <int>(tmpAry);
            List <int> gold       = new List <int>(tmpAry);
            List <int> output     = new List <int>(tmpAry);

            foreach (dataSeq x in XX)
            {
                List <int> tags = new List <int>();
                double     prob = _inf.decodeViterbi(m, x, tags);

                //output result tags
                if (swOutput != null)
                {
                    for (int i = 0; i < x.Count; i++)
                    {
                        swOutput.Write(tags[i] + ",");
                    }
                    swOutput.WriteLine();
                }

                //count tags for the sample
                for (int i = 0; i < x.Count; i++)
                {
                    gold[x.getTags(i)]++;
                    output[tags[i]]++;

                    if (tags[i] == x.getTags(i))
                    {
                        corrOutput[tags[i]]++;
                    }
                }
            }

            double prec, recall;
            int    sumGold = 0, sumOutput = 0, sumCorrOutput = 0;

            for (int i = 0; i < nTag; i++)
            {
                sumCorrOutput += corrOutput[i];
                sumGold       += gold[i];
                sumOutput     += output[i];
            }
            if (sumGold == 0)
            {
                recall = 0;
            }
            else
            {
                recall = ((double)sumCorrOutput) * 100.0 / (double)sumGold;
            }
            if (sumOutput == 0)
            {
                prec = 0;
            }
            else
            {
                prec = ((double)sumCorrOutput) * 100.0 / (double)sumOutput;
            }
            double fscore;

            if (prec == 0 && recall == 0)
            {
                fscore = 0;
            }
            else
            {
                fscore = 2 * prec * recall / (prec + recall);
            }
            List <double> scoreList = new List <double>();

            scoreList.Add(fscore);
            return(scoreList);
        }
예제 #28
0
 //the scalar version
 public double getGrad_SGD(List <double> g, double scalar, model m, dataSeq x, baseHashSet <int> idset)
 {
     return(getGradCRF(g, scalar, m, x, idset));
 }
예제 #29
0
        public void getLogYY(model m, dataSeq x, int i, ref dMatrix YY, ref List <double> Y, bool takeExp, bool mask)
        {
            YY.set(0);
            listTool.listSet(ref Y, 0);

            float[]            w     = m.W;
            List <featureTemp> fList = _fGene.getFeatureTemp(x, i);
            int nTag = m.NTag;

            //node feature
            foreach (featureTemp im in fList)
            {
                nodeFeature[] features = Global.idNodeFeatures[im.id];
                foreach (nodeFeature feat in features)
                {
                    int f = feat._id;
                    int s = feat._s;

                    Y[s] += w[f] * im.val;
                }
            }

            if (i > 0)
            {
                //non-rich edge
                if (Global.useTraditionalEdge)
                {
                    for (int s = 0; s < nTag; s++)
                    {
                        for (int sPre = 0; sPre < nTag; sPre++)
                        {
                            int f = _fGene.getEdgeFeatID(sPre, s);
                            YY[sPre, s] += w[f];
                        }
                    }
                }

                //rich edge
                foreach (featureTemp im in fList)
                {
                    edgeFeature[] features = Global.idEdgeFeatures[im.id];
                    foreach (edgeFeature feat in features)
                    {
                        YY[feat._sPre, feat._s] += w[feat._id] * im.val;
                    }
                }

                //rich2
                if (Global.richFeat2)
                {
                    List <featureTemp> fList2 = _fGene.getFeatureTemp(x, i - 1);
                    foreach (featureTemp im in fList2)
                    {
                        edgeFeature[] features = Global.idEdgeFeatures2[im.id];
                        foreach (edgeFeature feat in features)
                        {
                            YY[feat._sPre, feat._s] += w[feat._id] * im.val;
                        }
                    }
                }
            }
            double maskValue = double.MinValue;

            if (takeExp)
            {
                listTool.listExp(ref Y);
                YY.eltExp();
                maskValue = 0;
            }
            if (mask)
            {
                dMatrix statesPerNodes = m.getStatesPerNode(x);
                for (int s = 0; s < Y.Count; s++)
                {
                    if (statesPerNodes[i, s] == 0)
                    {
                        Y[s] = maskValue;
                    }
                }
            }
        }
예제 #30
0
        //get beliefs (mariginal probabilities)
        public void getBeliefs(belief bel, model m, dataSeq x, List <dMatrix> YYlist, List <List <double> > Ylist)
        {
            int nNodes = x.Count;
            int nTag   = m.NTag;

            //dMatrix YY = new dMatrix(nTag, nTag);
            double[] dAry = new double[nTag];
            //List<double> Y = new List<double>(dAry);
            List <double> alpha_Y    = new List <double>(dAry);
            List <double> newAlpha_Y = new List <double>(dAry);//marginal probability from left to current node (including values of the current node)
            List <double> tmp_Y      = new List <double>(dAry);

            //compute beta values in a backward scan
            for (int i = nNodes - 1; i > 0; i--)
            {
                dMatrix       YY = YYlist[i];
                List <double> Y  = Ylist[i];
                //compute the Mi matrix
                //getLogYY(m, x, i, ref YY, ref Y, false, mask);
                listTool.listSet(ref tmp_Y, bel.belState[i]);//this is meaningful from the 2nd round
                listTool.listAdd(ref tmp_Y, Y);
                logMultiply(YY, tmp_Y, bel.belState[i - 1]);
            }
            //compute alpha values
            for (int i = 0; i < nNodes; i++)
            {
                dMatrix YY = null;
                if (i > 0)
                {
                    YY = new dMatrix(YYlist[i]);//should use the copy to avoid change
                }
                List <double> Y = Ylist[i];
                //compute the Mi matrix
                //getLogYY(m, x, i, ref YY, ref Y, false, mask);
                if (i > 0)
                {
                    listTool.listSet(ref tmp_Y, alpha_Y);//this is meaningful from the 2nd round
                    YY.transpose();
                    logMultiply(YY, tmp_Y, newAlpha_Y);
                    listTool.listAdd(ref newAlpha_Y, Y);
                }
                else
                {
                    listTool.listSet(ref newAlpha_Y, Y);
                }
                //setting marginal probability on edges
                if (i > 0)
                {
                    //beta + Y
                    listTool.listSet(ref tmp_Y, Y);
                    listTool.listAdd(ref tmp_Y, bel.belState[i]);
                    //YY
                    YY.transpose();
                    bel.belEdge[i].set(YY);
                    //belief = alpha + YY + beta + Y
                    for (int yPre = 0; yPre < nTag; yPre++)
                    {
                        for (int y = 0; y < nTag; y++)
                        {
                            bel.belEdge[i][yPre, y] += tmp_Y[y] + alpha_Y[yPre];
                        }
                    }
                }
                //setting marginal probability on nodes
                List <double> tmp = bel.belState[i];   //beta
                listTool.listAdd(ref tmp, newAlpha_Y); //belief = alpha + beta
                listTool.listSet(ref alpha_Y, newAlpha_Y);
            }
            double Z = logSum(alpha_Y);

            for (int i = 0; i < nNodes; i++)
            {
                List <double> tmp = bel.belState[i];
                listTool.listAdd(ref tmp, -Z);
                listTool.listExp(ref tmp);
            }
            for (int i = 1; i < nNodes; i++)
            {
                bel.belEdge[i].add(-Z);
                bel.belEdge[i].eltExp();
            }
            bel.Z = Z;//the overall potential function value
        }