public WordEMWrapFeaturizer(string filename) { Txt2Vec.Decoder decoder = new Txt2Vec.Decoder(); decoder.LoadBinaryModel(filename); string[] terms = decoder.GetAllTerms(); vectorSize = decoder.GetVectorSize(); m_WordEmbedding = new Dictionary<string, SingleVector>(); m_UnkEmbedding = new SingleVector(vectorSize); foreach (string term in terms) { double[] vector = decoder.GetVector(term); if (vector != null) { SingleVector spVector = new SingleVector(vectorSize, vector); spVector.Normalize(); m_WordEmbedding.Add(term, spVector); } } }
public WordEMWrapFeaturizer(string filename, bool textFormat = false) { var model = new Model(); model.LoadModel(filename, textFormat); var terms = model.GetAllTerms(); vectorSize = model.VectorSize; if (vectorSize % Vector <float> .Count != 0) { vectorSize += (Vector <float> .Count - (vectorSize % Vector <float> .Count)); } m_WordEmbedding = new Dictionary <string, SingleVector>(); m_UnkEmbedding = new SingleVector(vectorSize); foreach (var term in terms) { var vector = model.GetVector(term); if (vector != null) { var spVector = new SingleVector(vector, vectorSize); m_WordEmbedding.Add(term, spVector); } } }
public void Append(SingleVector vector) { if (m_nLenPerBlock > 0) { if (m_nLenPerBlock != vector.Length) { throw new Exception("The dimension of appending vector is not the same as the previous one"); } } m_innerData.Add(vector); m_nLenPerBlock = vector.Length; m_nLen += m_nLenPerBlock; }
public void Append(SingleVector vector) { if (m_nLenPerBlock > 0) { if (m_nLenPerBlock != vector.GetDimension()) { throw new Exception("The dimension of appending vector is not the same as the previous one"); } } m_innerData.Add(vector); m_nLenPerBlock = vector.GetDimension(); m_nLen += m_nLenPerBlock; }
public WordEMWrapFeaturizer(string filename) { Txt2Vec.Model model = new Txt2Vec.Model(); model.LoadBinaryModel(filename); string[] terms = model.GetAllTerms(); vectorSize = model.VectorSize; m_WordEmbedding = new Dictionary <string, SingleVector>(); m_UnkEmbedding = new SingleVector(vectorSize); foreach (string term in terms) { float[] vector = model.GetVector(term); if (vector != null) { SingleVector spVector = new SingleVector(vectorSize, vector); m_WordEmbedding.Add(term, spVector); } } }
public WordEMWrapFeaturizer(string filename, bool textFormat = false) { Txt2Vec.Model model = new Txt2Vec.Model(); model.LoadModel(filename, textFormat); string[] terms = model.GetAllTerms(); vectorSize = model.VectorSize; m_WordEmbedding = new Dictionary<string, SingleVector>(); m_UnkEmbedding = new SingleVector(vectorSize); foreach (string term in terms) { float[] vector = model.GetVector(term); if (vector != null) { SingleVector spVector = new SingleVector(vectorSize, vector); m_WordEmbedding.Add(term, spVector); } } }
private void CreateCells() { neuFeatures = new SingleVector(DenseFeatureSize); OutputLayer = new neuron[L2]; neuHidden = new neuron[L1]; }
public SingleVector Set(SingleVector rhs, int startOffset) { for (int i = 0; i < rhs.GetDimension(); i++) { m_innerData[i + startOffset] = rhs.m_innerData[i]; } return this; }
private void CreateCell(BinaryReader br) { neuFeatures = new SingleVector(DenseFeatureSize); OutputLayer = new neuron[L2]; for (int a = 0; a < L2; a++) { OutputLayer[a].cellOutput = 0; OutputLayer[a].er = 0; } neuHidden = new LSTMCell[L1]; for (int i = 0; i < L1; i++) { neuHidden[i] = new LSTMCell(); LSTMCellInit(neuHidden[i]); } if (br != null) { //Load weight from input file for (int i = 0; i < L1; i++) { neuHidden[i].wCellIn = br.ReadSingle(); neuHidden[i].wCellForget = br.ReadSingle(); neuHidden[i].wCellOut = br.ReadSingle(); } } else { //Initialize weight by random number for (int i = 0; i < L1; i++) { //internal weights, also important neuHidden[i].wCellIn = RandInitWeight(); neuHidden[i].wCellForget = RandInitWeight(); neuHidden[i].wCellOut = RandInitWeight(); } } }