//Extract word embedding features from current context public VectorBase ExtractDenseFeature(int currentState, int numStates, List <string[]> features) { var fc = featureContext; if (fc.ContainsKey(WORDEMBEDDING_CONTEXT)) { var v = fc[WORDEMBEDDING_CONTEXT]; if (v.Count == 1) { var strKey = features[TruncPosition(currentState + v[0], 0, numStates)][preTrainedModelColumn]; return(preTrainedModel.GetTermVector(strKey)); } var dense = new CombinedVector(); for (var j = 0; j < v.Count; j++) { var offset = currentState + v[j]; if (offset >= 0 && offset < numStates) { var strKey = features[offset][preTrainedModelColumn]; dense.Append(preTrainedModel.GetTermVector(strKey)); } else { dense.Append(preTrainedModel.m_UnkEmbedding); } } return(dense); } return(new SingleVector()); }
//Extract word embedding features from current context public VectorBase ExtractDenseFeature(int currentState, int numStates, List <string[]> features) { var fc = m_FeatureConfiguration; if (fc.ContainsKey(WORDEMBEDDING_CONTEXT) == true) { List <int> v = fc[WORDEMBEDDING_CONTEXT]; if (v.Count == 1) { string strKey = features[TruncPosition((int)currentState + v[0], 0, (int)numStates)][m_WordEmbeddingCloumn]; return(m_WordEmbedding.GetTermVector(strKey)); } CombinedVector dense = new CombinedVector(); for (int j = 0; j < v.Count; j++) { int offset = currentState + v[j]; if (offset >= 0 && offset < numStates) { string strKey = features[offset][m_WordEmbeddingCloumn]; dense.Append(m_WordEmbedding.GetTermVector(strKey)); } else { dense.Append(m_WordEmbedding.m_UnkEmbedding); } } return(dense); } return(new SingleVector()); }