Beispiel #1
0
 public void Merge(NGram <TSymbol> toBeMerged)
 {
     if (_n != toBeMerged.GetN())
     {
         return;
     }
     _vocabulary.UnionWith(toBeMerged._vocabulary);
     rootNode.Merge(toBeMerged.rootNode);
 }
        /**
         * <summary>Wrapper function to set the N-gram probabilities with interpolated smoothing.</summary>
         * <param name="nGram">N-Gram for which the probabilities will be set.</param>
         * <param name="level">Level for which N-Gram probabilities will be set. Probabilities for different levels of the</param>
         *              N-gram can be set with this function. If level = 1, N-Gram is treated as UniGram, if level = 2,
         *              N-Gram is treated as Bigram, etc.
         *
         */
        public override void SetProbabilities(NGram <TSymbol> nGram, int level)
        {
            for (var j = 2; j <= nGram.GetN(); j++)
            {
                nGram.CalculateNGramProbabilities(_simpleSmoothing, j);
            }

            nGram.CalculateNGramProbabilities(_simpleSmoothing, 1);
            switch (nGram.GetN())
            {
            case 2:
                nGram.SetLambda(_lambda1);
                break;

            case 3:
                nGram.SetLambda(_lambda1, _lambda2);
                break;
            }
        }
Beispiel #3
0
 /**
  * <summary>Wrapper function to learn parameters of the smoothing method and set the N-gram probabilities.</summary>
  *
  * <param name="corpus">Train corpus used to optimize parameters of the smoothing method.</param>
  * <param name="nGram">N-Gram for which the probabilities will be set.</param>
  */
 public void Train(List <List <TSymbol> > corpus, NGram <TSymbol> nGram)
 {
     LearnParameters(corpus, nGram.GetN());
     SetProbabilities(nGram);
 }