Exemplo n.º 1
0
        public Lattice(model m, inference inf, dataSeq x)
        {
            _w = x.Count;
            _h = m.NTag;

            _logBel = new belief(_w, _h);

            List <dMatrix>        YYlist = new List <dMatrix>();
            List <List <double> > Ylist  = new List <List <double> >();

            inf.getYYandY(m, x, YYlist, Ylist);

            for (int i = 0; i < _w; i++)
            {
                _logBel.belState[i] = new List <double>(Ylist[i]);

                if (i > 0)
                {
                    _logBel.belEdge[i] = new dMatrix(YYlist[i]);
                }
            }

            _heuListList = new List <List <double> >();
            for (int i = 0; i < _w; i++)
            {
                _heuListList.Add(new List <double>(new double[_h]));
            }

            Viterbi _bwdViterbi = new Viterbi(_w, _h);

            for (int i = 0; i < _w; i++)
            {
                _bwdViterbi.setScores(i, Ylist[i], YYlist[i]);
            }
            List <int> tags = new List <int>();

            _bwdViterbi.runViterbi(ref tags);
            //update the viterbiHeuristicMap
            for (int i = 0; i < _w; i++)
            {
                for (int j = 0; j < _h; j++)
                {
                    double h = _bwdViterbi.getPathScore(i, j);
                    setHeuMap(i, j, h);
                }
            }

            //get zGold
            ZGold = 0;
            for (int i = 0; i < x.Count; i++)
            {
                int s = x.getTags(i);
                ZGold += Ylist[i][s];
                if (i > 0)
                {
                    int sPre = x.getTags(i - 1);
                    ZGold += YYlist[i][sPre, s];
                }
            }
        }
 //for multi-task
 public toolbox(dataSet X, List <dataSet> XList, bool train = true)
 {
     if (train)//to train
     {
         _X         = X;
         _XList     = XList;
         _fGene     = new featureGenerator(X);
         _model     = null;
         _modelList = new List <model>();
         for (int i = 0; i < Global.nTask; i++)
         {
             model m = new model(XList[i], _fGene);
             _modelList.Add(m);
         }
         _inf  = new inference(this);
         _grad = new gradient(this);
         initOptimizer();
     }
     else//to test
     {
         _X         = X;
         _XList     = XList;
         _model     = null;
         _modelList = new List <model>();
         for (int i = 0; i < Global.nTask; i++)
         {
             model m = new model(Global.modelDir + i.ToString() + Global.fModel);
             _modelList.Add(m);
         }
         _fGene = new featureGenerator(X);
         _inf   = new inference(this);
         _grad  = new gradient(this);
     }
 }
Exemplo n.º 3
0
        public double getNBest(model m, inference inf, dataSeq x, int N, ref List <List <int> > nBestTaggings, ref List <double> scores)
        {
            nBestTaggings.Clear();
            _w       = x.Count;
            _h       = m.NTag;
            _lattice = new Lattice(m, inf, x);
            setStartAndGoal(-1, 0, _w, 0);//a virtual begin node & a virtual end node

            for (int n = 0; n < N; n++)
            {
                List <int> tagging  = new List <int>();
                double     logNumer = searchForPath(ref tagging);
                if (logNumer == -2)//search fail
                {
                    break;
                }

                nBestTaggings.Add(tagging);
                scores.Add(logNumer);//log numerator

                double check = Math.Exp((scores[0] - scores[n]));
                if (check >= Global.stopSearchFactor)//20 times bigger then break
                {
                    break;
                }
            }

            double Z = logSum(scores);

            listTool.listAdd(ref scores, -Z);
            listTool.listExp(ref scores);//prob
            //error
            double error = Z - _lattice.ZGold;


            //update the profiler
            Global.nbestCount += scores.Count;
            Global.nbestNorm++;
            int small = scores.Count < 10 ? scores.Count : 10;

            for (int i = 0; i < small; i++)
            {
                Global.nbestProbList[i] += scores[i];
            }

            return(error);
        }
Exemplo n.º 4
0
 public toolbox(dataSet X, bool train = true)
 {
     if (train)//for training
     {
         _X     = X;
         _fGene = new featureGenerator(X);
         _model = new model(X, _fGene);
         _inf   = new inference(this);
         initOptimizer();
     }
     else//for test
     {
         _X     = X;
         _model = new model(Global.fModel);
         _fGene = new featureGenerator(X);
         _inf   = new inference(this);
     }
 }
 //for single-task
 public toolbox(dataSet X, bool train = true)
 {
     if (train)//to train
     {
         _XList     = null;
         _modelList = null;
         _X         = X;
         _fGene     = new featureGenerator(X);
         _model     = new model(X, _fGene);
         _inf       = new inference(this);
         _grad      = new gradient(this);
         initOptimizer();
     }
     else//to test
     {
         _XList     = null;
         _modelList = null;
         _X         = X;
         _model     = new model(Global.modelDir + Global.fModel);
         _fGene     = new featureGenerator(X);
         _inf       = new inference(this);
         _grad      = new gradient(this);
     }
 }
Exemplo n.º 6
0
 public gradient(toolbox tb)
 {
     _optim = tb.Optim;
     _inf   = tb.Inf;
     _fGene = tb.FGene;
 }