public virtual Graph buildHMMGraph(Graph cdGraph) { Graph graph = new Graph(); graph.copyGraph(cdGraph); Node[] array = graph.nodeToArray(); int num = array.Length; int i = 0; while (i < num) { Node node = array[i]; Unit unit; if (node.getType().equals(NodeType.__PHONE)) { unit = this.unitManager.getUnit(node.getID()); goto IL_75; } if (node.getType().equals(NodeType.__SILENCE_WITH_LOOPBACK)) { unit = this.unitManager.getUnit("SIL"); goto IL_75; } IL_AB: i++; continue; IL_75: HMM hmm = this.acousticModel.lookupNearestHMM(unit, HMMPosition.__UNDEFINED, false); Graph graph2 = this.buildModelGraph((SenoneHMM)hmm); graph2.validate(); graph.insertGraph(graph2, node); goto IL_AB; } return(graph); }
public FKFeatures(int size, HMM p, HMM n) : this(modshogunPINVOKE.new_FKFeatures__SWIG_1(size, HMM.getCPtr(p), HMM.getCPtr(n)), true) { if (modshogunPINVOKE.SWIGPendingException.Pending) { throw modshogunPINVOKE.SWIGPendingException.Retrieve(); } }
public PhoneLoopSearchGraph(CIPhoneLoop this_0) { this.this_0 = this_0; this.__existingStates = new HashMap(); this.__firstState = new UnknownWordState(); BranchOutState branchOutState = new BranchOutState(this.__firstState); this.attachState(this.__firstState, branchOutState, 0f, 0f); LoopBackState loopBackState = new LoopBackState(this.__firstState); loopBackState.setFinalState(true); this.attachState(loopBackState, branchOutState, 0f, 0f); Iterator contextIndependentUnitIterator = this_0.__model.getContextIndependentUnitIterator(); while (contextIndependentUnitIterator.hasNext()) { UnitState unitState = new UnitState((Unit)contextIndependentUnitIterator.next(), HMMPosition.__UNDEFINED); this.attachState(branchOutState, unitState, 0f, CIPhoneLoop.access_000(this_0)); HMM hmm = this_0.__model.lookupNearestHMM(unitState.getUnit(), unitState.getPosition(), false); HMMState initialState = hmm.getInitialState(); HMMStateState hmmstateState = new HMMStateState(unitState, initialState); this.addStateToCache(hmmstateState); this.attachState(unitState, hmmstateState, 0f, 0f); HMMStateState prevState = this.expandHMMTree(unitState, hmmstateState); this.attachState(prevState, loopBackState, 0f, 0f); } }
public TOPFeatures(int size, HMM p, HMM n, bool neglin, bool poslin) : this(modshogunPINVOKE.new_TOPFeatures__SWIG_1(size, HMM.getCPtr(p), HMM.getCPtr(n), neglin, poslin), true) { if (modshogunPINVOKE.SWIGPendingException.Pending) { throw modshogunPINVOKE.SWIGPendingException.Retrieve(); } }
internal void createEntryPointMap() { HashMap hashMap = new HashMap(); HashMap hashMap2 = new HashMap(); Iterator iterator = HMMTree.access_000(this.this_0).iterator(); while (iterator.hasNext()) { Unit unit = (Unit)iterator.next(); Node node = new Node(float.MinValue); Iterator iterator2 = this.getEntryPointRC().iterator(); while (iterator2.hasNext()) { Unit unit2 = (Unit)iterator2.next(); HMM hmm = HMMTree.access_100(this.this_0).getHMM(this.baseUnit, unit, unit2, HMMPosition.__BEGIN); Node node2; if ((node2 = (Node)hashMap.get(hmm)) == null) { node2 = node.addSuccessor(hmm, this.getProbability()); hashMap.put(hmm, node2); } else { node.putSuccessor(hmm, node2); } this.nodeCount++; this.connectEntryPointNode(node2, unit2); } this.connectSingleUnitWords(unit, node, hashMap2); this.unitToEntryPointMap.put(unit, node); } }
public HMMNode[] getHMMNodes(EndNode endNode) { HMMNode[] array = (HMMNode[])this.endNodeMap.get(endNode.getKey()); if (array == null) { HashMap hashMap = new HashMap(); Unit baseUnit = endNode.getBaseUnit(); Unit leftContext = endNode.getLeftContext(); Iterator iterator = this.entryPoints.iterator(); while (iterator.hasNext()) { Unit unit = (Unit)iterator.next(); HMM hmm = this.hmmPool.getHMM(baseUnit, leftContext, unit, HMMPosition.__END); HMMNode hmmnode = (HMMNode)hashMap.get(hmm); if (hmmnode == null) { hmmnode = new HMMNode(hmm, 0f); hashMap.put(hmm, hmmnode); } hmmnode.addRC(unit); Node[] successors = endNode.getSuccessors(); int num = successors.Length; for (int i = 0; i < num; i++) { Node node = successors[i]; WordNode wordNode = (WordNode)node; hmmnode.addSuccessor(wordNode); } } array = (HMMNode[])hashMap.values().toArray(new HMMNode[hashMap.size()]); this.endNodeMap.put(endNode.getKey(), array); } return(array); }
public HMM(HMM h) : this(modshogunPINVOKE.new_HMM__SWIG_6(HMM.getCPtr(h)), true) { if (modshogunPINVOKE.SWIGPendingException.Pending) { throw modshogunPINVOKE.SWIGPendingException.Retrieve(); } }
public void set_observations(StringWordFeatures obs, HMM hmm) { modshogunPINVOKE.HMM_set_observations__SWIG_0(swigCPtr, StringWordFeatures.getCPtr(obs), HMM.getCPtr(hmm)); if (modshogunPINVOKE.SWIGPendingException.Pending) { throw modshogunPINVOKE.SWIGPendingException.Retrieve(); } }
public void estimate_model_viterbi_defined(HMM train) { modshogunPINVOKE.HMM_estimate_model_viterbi_defined(swigCPtr, HMM.getCPtr(train)); if (modshogunPINVOKE.SWIGPendingException.Pending) { throw modshogunPINVOKE.SWIGPendingException.Retrieve(); } }
public void estimate_model_baum_welch_trans(HMM train) { modshogunPINVOKE.HMM_estimate_model_baum_welch_trans(swigCPtr, HMM.getCPtr(train)); if (modshogunPINVOKE.SWIGPendingException.Pending) { throw modshogunPINVOKE.SWIGPendingException.Retrieve(); } }
public void set_models(HMM p, HMM n) { modshogunPINVOKE.FKFeatures_set_models(swigCPtr, HMM.getCPtr(p), HMM.getCPtr(n)); if (modshogunPINVOKE.SWIGPendingException.Pending) { throw modshogunPINVOKE.SWIGPendingException.Retrieve(); } }
public void copy_model(HMM l) { modshogunPINVOKE.HMM_copy_model(swigCPtr, HMM.getCPtr(l)); if (modshogunPINVOKE.SWIGPendingException.Pending) { throw modshogunPINVOKE.SWIGPendingException.Retrieve(); } }
protected void Page_Load(object sender, EventArgs e) { //隐马尔科夫模型的研究建模 //http://www.zhihu.com/question/20962240 var pS = new double[] { 1.0 / 3, 1.0 / 3, 1.0 / 3 }; var p4N = new double[] { 1.0 / 4, 1.0 / 4, 1.0 / 4, 1.0 / 4 }; var p6N = new double[] { 1.0 / 6, 1.0 / 6, 1.0 / 6, 1.0 / 6, 1.0 / 6, 1.0 / 6 }; var p8N = new double[] { 1.0 / 8, 1.0 / 8, 1.0 / 8, 1.0 / 8, 1.0 / 8, 1.0 / 8, 1.0 / 8, 1.0 / 8 }; var pN = new double[][] { p4N, p6N, p8N }; //问题0 //状态链 var sn0 = new int[,] { { 1, 1 }, { 2, 6 }, { 2, 3 } }; var pI = CountProbability(pS, pN, sn0); //问题1:看见不可见的,破解骰子序列(取3个序列,计算出隐式链中最大可能链与其出现概率,我这里只计算了概率,最大可能链的计算方法同理) //解法1:穷举所有可能的骰子序列 var pList = new List<double>(); for (int i = 0; i < 3; i++) { for (int j = 0; j < 3; j++) { for (int k = 0; k < 3; k++) { //状态链 var sn1 = new int[,] { { i, 1 }, { j, 6 }, { k, 3 } }; var pI1 = CountProbability(pS, pN, sn1); pList.Add(pI1); } } } //解法2:Viterbi algorithm (最大概率累加 1 2 3 .... n) var sn2 = new int[] { 1, 6, 3 }; var pI2 = ViterbiAlgorithm(pS, pN, sn2); //问题2:谁动了我的骰子?(取3个序列,计算出显式链序列的出现概率) //解法1:穷举所有可能的骰子序列(上面就有了,所以不写了) //解法2:前向算法(forward algorithm) var sn3 = new int[] { 1, 6, 3 }; var pI3 = ForwardAlgorithm(pS, pN, sn3, sn3.Length).Sum(); //问题3:BaumWelch算法 http://www.52nlp.cn/hmm-learn-best-practices-seven-forward-backward-algorithm-5 Double LogProbInit = 0.0; Double LogProbFinal = 0.0; var sn4 = new int[] { 1, 6, 3 }; var hmm = new HMM(3, 8); // var round = hmm.BaumWelch(sn4, out LogProbInit, out LogProbFinal); // Result = round.ToString() + ":" + LogProbInit + ":" + LogProbFinal; // Result = pI3.ToString(); // Result = string.Join(" ", pList.OrderByDescending(p => p)) + "---:" + pI2; // Result = total.ToString(); }
public bool compute_relevant_indizes(HMM hmm, SWIGTYPE_p_shogun__T_HMM_INDIZES hmm_idx) { bool ret = modshogunPINVOKE.TOPFeatures_compute_relevant_indizes(swigCPtr, HMM.getCPtr(hmm), SWIGTYPE_p_shogun__T_HMM_INDIZES.getCPtr(hmm_idx)); if (modshogunPINVOKE.SWIGPendingException.Pending) { throw modshogunPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public bool append_model(HMM append_model, SWIGTYPE_p_double cur_out, SWIGTYPE_p_double app_out) { bool ret = modshogunPINVOKE.HMM_append_model__SWIG_0(swigCPtr, HMM.getCPtr(append_model), SWIGTYPE_p_double.getCPtr(cur_out), SWIGTYPE_p_double.getCPtr(app_out)); if (modshogunPINVOKE.SWIGPendingException.Pending) { throw modshogunPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public bool append_model(HMM append_model) { bool ret = modshogunPINVOKE.HMM_append_model__SWIG_1(swigCPtr, HMM.getCPtr(append_model)); if (modshogunPINVOKE.SWIGPendingException.Pending) { throw modshogunPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public override HMM lookupNearestHMM(Unit unit, HMMPosition position, bool exactMatch) { HMM result = null; if (!exactMatch || position == HMMPosition.__UNDEFINED) { unit = unit.getBaseUnit(); result = (HMM)this.hmmMap.get(unit); } return(result); }
private HMMStateState getHMMStates(UnitState unitState) { Unit unit = unitState.getUnit(); HMMPosition position = unitState.getPosition(); HMM hmm = FlatLinguist.access_700(this.this_0).lookupNearestHMM(unit, position, false); HMMState initialState = hmm.getInitialState(); HMMStateState hmmstateState = new HMMStateState(unitState, initialState); this.attachState(unitState, hmmstateState, 0f, 0f); this.addStateToCache(hmmstateState); return(this.expandHMMTree(unitState, hmmstateState)); }
private void createContextDependentSuccessors() { this.cdHMMs = new HashMap(); this.senonesToUnits = new HashMap(); this.fillerHMMs = new ArrayList(); this.leftContextSilHMMs = new ArrayList(); Iterator hmmiterator = this.acousticModel.getHMMIterator(); while (hmmiterator.hasNext()) { HMM hmm = (HMM)hmmiterator.next(); SenoneSequence senoneSequence = ((SenoneHMM)hmm).getSenoneSequence(); ArrayList arrayList; if ((arrayList = (ArrayList)this.senonesToUnits.get(senoneSequence)) == null) { arrayList = new ArrayList(); this.senonesToUnits.put(senoneSequence, arrayList); } arrayList.add(hmm.getUnit()); if (hmm.getUnit().isFiller()) { this.fillerHMMs.add(hmm); } else if (hmm.getUnit().isContextDependent()) { LeftRightContext leftRightContext = (LeftRightContext)hmm.getUnit().getContext(); Unit unit = leftRightContext.getLeftContext()[0]; if (unit == UnitManager.__SILENCE) { this.leftContextSilHMMs.add(hmm); } else { Unit baseUnit = hmm.getUnit().getBaseUnit(); HashMap hashMap; if ((hashMap = (HashMap)this.cdHMMs.get(unit)) == null) { hashMap = new HashMap(); this.cdHMMs.put(unit, hashMap); } ArrayList arrayList2; if ((arrayList2 = (ArrayList)hashMap.get(baseUnit)) == null) { arrayList2 = new ArrayList(); hashMap.put(baseUnit, arrayList2); } arrayList2.add(hmm); } } } this.leftContextSilHMMs.addAll(this.fillerHMMs); }
internal static ArrayList run(IList para) { bool reverse = false; modshogun.init_shogun_with_defaults(); int N = (int)((int?)para[0]); int M = (int)((int?)para[1]); double pseudo = (double)((double?)para[2]); int order = (int)((int?)para[3]); int gap = (int)((int?)para[4]); string[] fm_train_dna = Load.load_cubes("../data/fm_train_cube.dat"); StringCharFeatures charfeat = new StringCharFeatures(fm_train_dna, CUBE); StringWordFeatures feats = new StringWordFeatures(charfeat.get_alphabet()); feats.obtain_from_char(charfeat, order-1, order, gap, reverse); HMM hmm = new HMM(feats, N, M, pseudo); hmm.train(); hmm.baum_welch_viterbi_train(BW_NORMAL); int num_examples = feats.get_num_vectors(); int num_param = hmm.get_num_model_parameters(); for (int i = 0; i < num_examples; i++) { for(int j = 0; j < num_param; j++) { hmm.get_log_derivative(j, i); } } int best_path = 0; int best_path_state = 0; for(int i = 0; i < num_examples; i++) { best_path += hmm.best_path(i); for(int j = 0; j < N; j++) { best_path_state += hmm.get_best_path_state(i, j); } } DoubleMatrix lik_example = hmm.get_log_likelihood(); double lik_sample = hmm.get_log_likelihood_sample(); ArrayList result = new ArrayList(); result.Add(lik_example); result.Add(lik_sample); result.Add(hmm); modshogun.exit_shogun(); return result; }
// 测试维特比算法 public static void CheckViterbi() { // 状态转移矩阵 Double[,] A = { { 0.500, 0.250, 0.250 }, { 0.375, 0.125, 0.375 }, { 0.125, 0.675, 0.375 } }; // 混淆矩阵 Double[,] B = { { 0.60, 0.20, 0.15, 0.05 }, { 0.25, 0.25, 0.25, 0.25 }, { 0.05, 0.10, 0.35, 0.50 } }; // 初始概率向量 Double[] PI = { 0.63, 0.17, 0.20 }; // 观察序列 Int32[] OB = { (Int32)Seaweed.Dry, (Int32)Seaweed.Damp, (Int32)Seaweed.Soggy, (Int32)Seaweed.Dryish, (Int32)Seaweed.Dry }; // 初始化HMM模型 HMM hmm = new HMM(A.GetLength(0), B.GetLength(1)); hmm.A = A; hmm.B = B; hmm.PI = PI; // 找出最有可能的隐藏状态序列 Double Probability; Console.WriteLine("------------维特比算法:双精度运算-----------------"); Int32[] Q = hmm.Viterbi(OB, out Probability); Console.WriteLine("Probability =" + Probability.ToString("0.###E+0")); foreach (Int32 Value in Q) { Console.WriteLine(((Weather)Value).ToString()); } Console.WriteLine(); Console.WriteLine("------------维特比算法:对数运算-----------------"); Q = hmm.ViterbiLog(OB, out Probability); Console.WriteLine("Probability =" + Probability.ToString("0.###E+0")); foreach (Int32 Value in Q) { Console.WriteLine(((Weather)Value).ToString()); } }
public static void Main() { bool reverse = false; modshogun.init_shogun_with_defaults(); int N = 1; int M = 512; double pseudo = 1e-5; int order = 3; int gap = 0; string[] fm_train_dna = Load.load_cubes("../data/fm_train_cube.dat"); StringCharFeatures charfeat = new StringCharFeatures(fm_train_dna, EAlphabet.CUBE); StringWordFeatures feats = new StringWordFeatures(charfeat.get_alphabet()); feats.obtain_from_char(charfeat, order - 1, order, gap, reverse); HMM hmm = new HMM(feats, N, M, pseudo); hmm.train(); hmm.baum_welch_viterbi_train(BaumWelchViterbiType.BW_NORMAL); int num_examples = feats.get_num_vectors(); int num_param = hmm.get_num_model_parameters(); for (int i = 0; i < num_examples; i++) { for (int j = 0; j < num_param; j++) { hmm.get_log_derivative(j, i); } } int best_path = 0; int best_path_state = 0; for (int i = 0; i < num_examples; i++) { best_path += (int)hmm.best_path(i); for (int j = 0; j < N; j++) { best_path_state += hmm.get_best_path_state(i, j); } } double[] lik_example = hmm.get_log_likelihood(); double lik_sample = hmm.get_log_likelihood_sample(); modshogun.exit_shogun(); }
enum Seaweed { Dry, Dryish, Damp, Soggy }; // 观察状态(海藻湿度) //static void Main(string[] args) //{ // 测试前向<a href="http://lib.csdn.net/base/datastructure" class='replace_word' title="算法与数据结构知识库" target='_blank' style='color:#df3434; font-weight:bold;'>算法</a>和后向算法 //CheckForwardAndBackward(); //Console.WriteLine(); //// 测试维特比算法 //CheckViterbi(); //Console.WriteLine(); //// 测试HMM学习算法 //CheckBaumWelch(); //} // 测试前向算法和后向算法 public static string CheckForwardAndBackward() { // 状态转移矩阵 Double[,] A = { { 0.500, 0.375, 0.125 }, { 0.250, 0.125, 0.625 }, { 0.250, 0.375, 0.375 } }; // 混淆矩阵 Double[,] B = { { 0.60, 0.20, 0.15, 0.05 }, { 0.25, 0.25, 0.25, 0.25 }, { 0.05, 0.10, 0.35, 0.50 } }; // 初始概率向量 Double[] PI = { 0.63, 0.17, 0.20 }; // 观察序列 Int32[] OB = { (Int32)Seaweed.Dry, (Int32)Seaweed.Damp, (Int32)Seaweed.Soggy }; // 初始化HMM模型 HMM hmm = new HMM(A.GetLength(0), B.GetLength(1)); hmm.A = A; hmm.B = B; hmm.PI = PI; // 观察序列的概率 //Console.WriteLine("------------前向算法:双精度运算-----------------"); Double ProbabilityFor = hmm.Forward(OB); //Console.WriteLine("Probability =" + Probability.ToString("0.###E+0")); //Console.WriteLine(); // 观察序列的概率 //Console.WriteLine("------------后向算法:双精度运算-----------------"); Double ProbabilityBack = hmm.Backward(OB); //Console.WriteLine("Probability =" + Probability.ToString("0.###E+0")); return(ProbabilityFor.ToString("0.###E+0") + "|" + ProbabilityBack.ToString("0.###E+0")); }
private void connectSingleUnitWords(Unit lc, Node node, HashMap hashMap) { if (!this.singleUnitWords.isEmpty()) { Iterator iterator = HMMTree.access_200(this.this_0).iterator(); while (iterator.hasNext()) { Unit unit = (Unit)iterator.next(); HMM hmm = HMMTree.access_100(this.this_0).getHMM(this.baseUnit, lc, unit, HMMPosition.__SINGLE); HMMNode hmmnode; if ((hmmnode = (HMMNode)hashMap.get(hmm)) == null) { hmmnode = (HMMNode)node.addSuccessor(hmm, this.getProbability()); hashMap.put(hmm, hmmnode); } else { node.putSuccessor(hmm, hmmnode); } hmmnode.addRC(unit); this.nodeCount++; Iterator iterator2 = this.singleUnitWords.iterator(); while (iterator2.hasNext()) { Pronunciation pronunciation = (Pronunciation)iterator2.next(); if (pronunciation.getWord() == HMMTree.access_300(this.this_0).getSentenceStartWord()) { HMMTree.access_402(this.this_0, new InitialWordNode(pronunciation, hmmnode)); } else { float num = HMMTree.access_500(this.this_0, pronunciation.getWord()); WordNode wordNode = hmmnode.addSuccessor(pronunciation, num, HMMTree.access_600(this.this_0)); if (pronunciation.getWord() == HMMTree.access_300(this.this_0).getSentenceEndWord()) { HMMTree.access_702(this.this_0, wordNode); } } this.nodeCount++; } } } }
private void accumulateTransition(int num, int num2, TrainerScore[] array, TrainerScore[] array2) { if (num == -1) { Iterator iterator = this.hmmManager.iterator(); while (iterator.hasNext()) { HMM hmm = (HMM)iterator.next(); for (int i = 0; i < hmm.getOrder(); i++) { this.accumulateStateTransition(i, (SenoneHMM)hmm, array[num2].getScore()); } } } else if (array2 != null) { this.accumulateStateTransition(num2, array, array2); } }
public override HMM get(HMMPosition position, Unit unit) { HMM hmm = base.get(position, unit); if (null != hmm) { return(hmm); } int[] array = new int[3]; array[1] = ((Integer)this.symbolTable.get(unit.getName())).intValue(); if (unit.isContextDependent()) { LeftRightContext leftRightContext = (LeftRightContext)unit.getContext(); Unit unit2 = leftRightContext.getLeftContext()[0]; Unit unit3 = leftRightContext.getRightContext()[0]; array[0] = ((Integer)this.symbolTable.get(unit2.getName())).intValue(); array[2] = ((Integer)this.symbolTable.get(unit3.getName())).intValue(); } else { array[0] = ((Integer)this.symbolTable.get("SIL")).intValue(); array[2] = ((Integer)this.symbolTable.get("SIL")).intValue(); } int[] array2 = new int[] { this.eventMap.map(0, array), this.eventMap.map(1, array), this.eventMap.map(2, array) }; SenoneSequence senoneSequence = new SenoneSequence(new Senone[] { (Senone)this.senonePool.get(array2[0]), (Senone)this.senonePool.get(array2[1]), (Senone)this.senonePool.get(array2[2]) }); float[][] transitionMatrix = this.transitionModel.getTransitionMatrix(array[1], array2); SenoneHMM senoneHMM = new SenoneHMM(unit, senoneSequence, transitionMatrix, position); this.put(senoneHMM); return(senoneHMM); }
/** Constructs a phone loop search graph. */ public PhoneLoopSearchGraph(SentenceHMMState initState, AcousticModel model, float logPhoneInsertionProbability) { this.inititalState = initState; this.model = model; this.logPhoneInsertionProbability = logPhoneInsertionProbability; existingStates = new Dictionary <string, SearchState>(); firstState = new UnknownWordState(); SentenceHMMState branchState = new BranchOutState(firstState); attachState(firstState, branchState, logOne, logOne); SentenceHMMState lastState = new LoopBackState(firstState); //lastState.setFinalState(true); //attachState(lastState, branchState, LogMath.getLogZero(), // LogMath.getLogZero()); attachState(lastState, inititalState, logOne, logOne); for (java.util.Iterator i = model.getContextIndependentUnitIterator(); i.hasNext();) { Unit unit = (Unit)i.next(); UnitState unitState = new UnitState(unit, HMMPosition.UNDEFINED); // attach unit state to the branch out state attachState(branchState, unitState, logOne, logPhoneInsertionProbability); HMM hmm = model.lookupNearestHMM (unitState.getUnit(), unitState.getPosition(), false); HMMState initialState = hmm.getInitialState(); HMMStateState hmmTree = new HMMStateState(unitState, initialState); addStateToCache(hmmTree); // attach first HMM state to the unit state attachState(unitState, hmmTree, logOne, logOne); // expand the HMM tree HMMStateState finalState = expandHMMTree(unitState, hmmTree); // attach final state of HMM tree to the loopback state attachState(finalState, lastState, logOne, logOne); } }
internal HMMNode(HMM hmm, float num) : base(num) { this.hmm = hmm; Unit baseUnit = this.getBaseUnit(); int type = 1; if (baseUnit.isSilence()) { type = 3; } else if (baseUnit.isFiller()) { type = 4; } else if (hmm.getPosition().isWordBeginning()) { type = 2; } this.setType(type); }
public override SearchStateArc[] getSuccessors() { ArrayList arrayList = new ArrayList(); Unit @base = UnitManager.__SILENCE; Unit baseUnit = this.unit.getBaseUnit(); if (this.unit.isContextDependent()) { @base = ((LeftRightContext)this.unit.getContext()).getRightContext()[0]; } ArrayList arrayList2 = (!this.linguist.useContextDependentPhones()) ? this.linguist.getCISuccessors() : this.linguist.getCDSuccessors(baseUnit, @base); Iterator iterator = arrayList2.iterator(); while (iterator.hasNext()) { HMM hmm = (HMM)iterator.next(); arrayList.add(new PhoneHmmSearchState(hmm.getInitialState(), this.linguist, this.linguist.getPhoneInsertionProb(), 0f)); } return((SearchStateArc[])arrayList.toArray(new SearchStateArc[arrayList.size()])); }
internal virtual Node addSuccessor(HMM hmm, float num) { Node successor = this.getSuccessor(hmm); Node node; if (successor == null) { node = new HMMNode(hmm, num); this.putSuccessor(hmm, (HMMNode)node); } else { if (successor.getUnigramProbability() < num) { successor.setUnigramProbability(num); } node = successor; } return(node); }
public static void Main() { bool reverse = false; modshogun.init_shogun_with_defaults(); int N = 1; int M = 512; double pseudo = 1e-5; int order = 3; int gap = 0; string[] fm_train_dna = Load.load_cubes("../data/fm_train_cube.dat"); StringCharFeatures charfeat = new StringCharFeatures(fm_train_dna, EAlphabet.CUBE); StringWordFeatures feats = new StringWordFeatures(charfeat.get_alphabet()); feats.obtain_from_char(charfeat, order-1, order, gap, reverse); HMM hmm = new HMM(feats, N, M, pseudo); hmm.train(); hmm.baum_welch_viterbi_train(BaumWelchViterbiType.BW_NORMAL); int num_examples = feats.get_num_vectors(); int num_param = hmm.get_num_model_parameters(); for (int i = 0; i < num_examples; i++) for(int j = 0; j < num_param; j++) { hmm.get_log_derivative(j, i); } int best_path = 0; int best_path_state = 0; for(int i = 0; i < num_examples; i++){ best_path += (int)hmm.best_path(i); for(int j = 0; j < N; j++) best_path_state += hmm.get_best_path_state(i, j); } double[] lik_example = hmm.get_log_likelihood(); double lik_sample = hmm.get_log_likelihood_sample(); modshogun.exit_shogun(); }
public virtual void add(Token t) { this.numTokens++; SearchState searchState = t.getSearchState(); if (searchState is WordSearchState) { this.numWords++; } else if (searchState is UnitSearchState) { this.numUnits++; } else if (searchState is HMMSearchState) { HMM hmm = ((HMMSearchState)searchState).getHMMState().getHMM(); switch (TokenTypeTracker_1._SwitchMap_edu_cmu_sphinx_linguist_acoustic_HMMPosition[hmm.getPosition().ordinal()]) { case 1: this.numHMMBegin++; break; case 2: this.numHMMEnd++; break; case 3: this.numHMMSingle++; break; case 4: this.numHMMInternal++; break; } } else { this.numOthers++; } }
private void addPronunciation(Pronunciation pronunciation, float num) { Unit[] units = pronunciation.getUnits(); Unit unit = units[0]; HMMTree.EntryPoint entryPoint = this.entryPointTable.getEntryPoint(unit); entryPoint.addProbability(num); if (units.Length > 1) { Node node = entryPoint.getNode(); Unit unit2 = unit; for (int i = 1; i < units.Length - 1; i++) { unit = units[i]; Unit unit3 = units[i + 1]; HMM hmm = this.hmmPool.getHMM(unit, unit2, unit3, HMMPosition.__INTERNAL); if (hmm == null) { this.logger.severe(new StringBuilder().append("Missing HMM for unit ").append(unit.getName()).append(" with lc=").append(unit2.getName()).append(" rc=").append(unit3.getName()).toString()); } else { node = node.addSuccessor(hmm, num); } unit2 = unit; } unit = units[units.Length - 1]; EndNode endNode = new EndNode(unit, unit2, num); EndNode endNode2 = node.addSuccessor(endNode, num); WordNode wordNode = endNode2.addSuccessor(pronunciation, num, this.wordNodeMap); if (wordNode.getWord().isSentenceEndWord()) { this.sentenceEndWordNode = wordNode; } } else { entryPoint.addSingleUnitWord(pronunciation); } }
public bool compute_relevant_indizes(HMM hmm, SWIGTYPE_p_shogun__T_HMM_INDIZES hmm_idx) { bool ret = modshogunPINVOKE.TOPFeatures_compute_relevant_indizes(swigCPtr, HMM.getCPtr(hmm), SWIGTYPE_p_shogun__T_HMM_INDIZES.getCPtr(hmm_idx)); if (modshogunPINVOKE.SWIGPendingException.Pending) throw modshogunPINVOKE.SWIGPendingException.Retrieve(); return ret; }
public void copy_model(HMM l) { modshogunPINVOKE.HMM_copy_model(swigCPtr, HMM.getCPtr(l)); if (modshogunPINVOKE.SWIGPendingException.Pending) throw modshogunPINVOKE.SWIGPendingException.Retrieve(); }
public TOPFeatures(int size, HMM p, HMM n, bool neglin, bool poslin) : this(modshogunPINVOKE.new_TOPFeatures__SWIG_1(size, HMM.getCPtr(p), HMM.getCPtr(n), neglin, poslin), true) { if (modshogunPINVOKE.SWIGPendingException.Pending) throw modshogunPINVOKE.SWIGPendingException.Retrieve(); }
public void set_models(HMM p, HMM n) { modshogunPINVOKE.TOPFeatures_set_models(swigCPtr, HMM.getCPtr(p), HMM.getCPtr(n)); if (modshogunPINVOKE.SWIGPendingException.Pending) throw modshogunPINVOKE.SWIGPendingException.Retrieve(); }
internal static HandleRef getCPtr(HMM obj) { return (obj == null) ? new HandleRef(null, IntPtr.Zero) : obj.swigCPtr; }
public FKFeatures(int size, HMM p, HMM n) : this(modshogunPINVOKE.new_FKFeatures__SWIG_1(size, HMM.getCPtr(p), HMM.getCPtr(n)), true) { if (modshogunPINVOKE.SWIGPendingException.Pending) throw modshogunPINVOKE.SWIGPendingException.Retrieve(); }
public void estimate_model_baum_welch_trans(HMM train) { modshogunPINVOKE.HMM_estimate_model_baum_welch_trans(swigCPtr, HMM.getCPtr(train)); if (modshogunPINVOKE.SWIGPendingException.Pending) throw modshogunPINVOKE.SWIGPendingException.Retrieve(); }
public void estimate_model_viterbi_defined(HMM train) { modshogunPINVOKE.HMM_estimate_model_viterbi_defined(swigCPtr, HMM.getCPtr(train)); if (modshogunPINVOKE.SWIGPendingException.Pending) throw modshogunPINVOKE.SWIGPendingException.Retrieve(); }
public HMM(HMM h) : this(modshogunPINVOKE.new_HMM__SWIG_6(HMM.getCPtr(h)), true) { if (modshogunPINVOKE.SWIGPendingException.Pending) throw modshogunPINVOKE.SWIGPendingException.Retrieve(); }
public bool append_model(HMM append_model, SWIGTYPE_p_double cur_out, SWIGTYPE_p_double app_out) { bool ret = modshogunPINVOKE.HMM_append_model__SWIG_0(swigCPtr, HMM.getCPtr(append_model), SWIGTYPE_p_double.getCPtr(cur_out), SWIGTYPE_p_double.getCPtr(app_out)); if (modshogunPINVOKE.SWIGPendingException.Pending) throw modshogunPINVOKE.SWIGPendingException.Retrieve(); return ret; }
public bool append_model(HMM append_model) { bool ret = modshogunPINVOKE.HMM_append_model__SWIG_1(swigCPtr, HMM.getCPtr(append_model)); if (modshogunPINVOKE.SWIGPendingException.Pending) throw modshogunPINVOKE.SWIGPendingException.Retrieve(); return ret; }
public void set_observations(StringWordFeatures obs, HMM hmm) { modshogunPINVOKE.HMM_set_observations__SWIG_0(swigCPtr, StringWordFeatures.getCPtr(obs), HMM.getCPtr(hmm)); if (modshogunPINVOKE.SWIGPendingException.Pending) throw modshogunPINVOKE.SWIGPendingException.Retrieve(); }
internal static ArrayList run(IList para) { bool reverse = false; modshogun.init_shogun_with_defaults(); int N = (int)((int?)para[0]); int M = (int)((int?)para[1]); double pseudo = (double)((double?)para[2]); int order = (int)((int?)para[3]); int gap = (int)((int?)para[4]); string[] fm_train_dna = Load.load_dna("../data/fm_train_dna.dat"); string[] fm_test_dna = Load.load_dna("../data/fm_test_dna.dat"); DoubleMatrix label_train_dna = Load.load_labels("../data/label_train_dna.dat"); ArrayList fm_hmm_pos_builder = new ArrayList(); ArrayList fm_hmm_neg_builder = new ArrayList(); for(int i = 0; i < label_train_dna.Columns; i++) { if (label_train_dna.get(i) == 1) { fm_hmm_pos_builder.Add(fm_train_dna[i]); } else { fm_hmm_neg_builder.Add(fm_train_dna[i]); } } int pos_size = fm_hmm_pos_builder.Count; int neg_size = fm_hmm_neg_builder.Count; string[] fm_hmm_pos = new string[pos_size]; string[] fm_hmm_neg = new string[neg_size]; for (int i = 0; i < pos_size; i++) { fm_hmm_pos[i] = (string)fm_hmm_pos_builder[i]; } for (int i = 0; i < neg_size; i++) { fm_hmm_pos[i] = (string)fm_hmm_neg_builder[i]; } StringCharFeatures charfeat = new StringCharFeatures(fm_hmm_pos, DNA); StringWordFeatures hmm_pos_train = new StringWordFeatures(charfeat.get_alphabet()); hmm_pos_train.obtain_from_char(charfeat, order-1, order, gap, reverse); HMM pos = new HMM(hmm_pos_train, N, M, pseudo); pos.baum_welch_viterbi_train(BW_NORMAL); charfeat = new StringCharFeatures(fm_hmm_neg, DNA); StringWordFeatures hmm_neg_train = new StringWordFeatures(charfeat.get_alphabet()); hmm_neg_train.obtain_from_char(charfeat, order-1, order, gap, reverse); HMM neg = new HMM(hmm_neg_train, N, M, pseudo); neg.baum_welch_viterbi_train(BW_NORMAL); charfeat = new StringCharFeatures(fm_train_dna, DNA); StringWordFeatures wordfeats_train = new StringWordFeatures(charfeat.get_alphabet()); wordfeats_train.obtain_from_char(charfeat, order-1, order, gap, reverse); charfeat = new StringCharFeatures(fm_test_dna, DNA); StringWordFeatures wordfeats_test = new StringWordFeatures(charfeat.get_alphabet()); wordfeats_test.obtain_from_char(charfeat, order-1, order, gap, reverse); pos.set_observations(wordfeats_train); neg.set_observations(wordfeats_train); FKFeatures feats_train = new FKFeatures(10, pos, neg); feats_train.set_opt_a(-1); PolyKernel kernel = new PolyKernel(feats_train, feats_train, 1, true); DoubleMatrix km_train = kernel.get_kernel_matrix(); HMM pos_clone = new HMM(pos); HMM neg_clone = new HMM(neg); pos_clone.set_observations(wordfeats_test); neg_clone.set_observations(wordfeats_test); FKFeatures feats_test = new FKFeatures(10, pos_clone, neg_clone); feats_test.set_a(feats_train.get_a()); kernel.init(feats_train, feats_test); DoubleMatrix km_test =kernel.get_kernel_matrix(); ArrayList result = new ArrayList(); result.Add(km_train); result.Add(km_test); result.Add(kernel); modshogun.exit_shogun(); return result; }