public virtual void ForwardPass(List <SparseVector> sparseFeatureGroups, List <float[]> denseFeatureGroups) { if (DenseFeatureSize > 0) { DenseFeatureGroups = denseFeatureGroups; RNNHelper.matrixXvectorADD(Cells, DenseFeatureGroups, DenseWeights, LayerSize); } if (SparseFeatureSize > 0) { //Apply sparse features SparseFeatureGroups = sparseFeatureGroups; for (var b = 0; b < LayerSize; b++) { float score = 0; var vector_b = SparseWeights[b]; foreach (var sparseFeature in SparseFeatureGroups) { foreach (var pair in sparseFeature) { score += pair.Value * vector_b[pair.Key]; } } Cells[b] += score; } } }
public virtual void computeLayer(SparseVector sparseFeature, double[] denseFeature, bool isTrain = true) { if (SparseFeatureSize > 0) { //Apply sparse features SparseFeature = sparseFeature; Parallel.For(0, LayerSize, parallelOption, b => { double score = 0; double[] vector_b = SparseWeights[b]; for (int i = 0; i < SparseFeature.Count; i++) { var entry = SparseFeature.GetEntry(i); score += entry.Value * vector_b[entry.Key]; } cellOutput[b] += score; }); } if (DenseFeatureSize > 0) { DenseFeature = denseFeature; RNNHelper.matrixXvectorADD(cellOutput, denseFeature, DenseWeights, LayerSize, DenseFeatureSize); } }
public override void computeLayer(SparseVector sparseFeature, double[] denseFeature, bool isTrain = true) { if (isTrain == true) { negativeSampleWordList.Clear(); negativeSampleWordList.Add(CurrentLabelId); for (int i = 0; i < NegativeSampleSize; i++) { int randomFreq = rand.Next((int)accTotalFreq); int wordId = SearchAccTermTable(randomFreq); while (negativeSampleWordList.Contains(wordId) == true) { wordId = (wordId + 1) % vocab_size; } negativeSampleWordList.Add(wordId); } DenseFeature = denseFeature; RNNHelper.matrixXvectorADD(cellOutput, denseFeature, DenseWeights, negativeSampleWordList, DenseFeatureSize, true); } else { base.computeLayer(sparseFeature, denseFeature, isTrain); } }
public override void computeLayer(SparseVector sparseFeature, double[] denseFeature, bool isTrain = true) { if (isTrain == true) { negativeSampleWordList.Clear(); foreach (int labelId in LabelShortList) { negativeSampleWordList.Add(labelId); } for (int i = 0; i < NegativeSampleSize; i++) { int randomFreq = rand.Next((int)accTotalFreq); int wordId = SearchAccTermTable(randomFreq); while (negativeSampleWordList.Contains(wordId) == true) { wordId = (wordId + 1) % vocab_size; } negativeSampleWordList.Add(wordId); } if (DenseFeatureSize > 0) { DenseFeature = denseFeature; RNNHelper.matrixXvectorADD(cellOutput, denseFeature, DenseWeights, negativeSampleWordList, DenseFeatureSize, true); } if (SparseFeatureSize > 0) { //Apply sparse features SparseFeature = sparseFeature; Parallel.ForEach(negativeSampleWordList, b => { double score = 0; double[] vector_b = SparseWeights[b]; foreach (KeyValuePair <int, float> pair in SparseFeature) { score += pair.Value * vector_b[pair.Key]; } cellOutput[b] += score; }); } } else { base.computeLayer(sparseFeature, denseFeature, isTrain); } }
public override void ForwardPass(SparseVector sparseFeature, float[] denseFeature, bool isTrain = true) { if (isTrain) { negativeSampleWordList.Clear(); foreach (var labelId in LabelShortList) { negativeSampleWordList.Add(labelId); } for (var i = 0; i < NegativeSampleSize; i++) { var wordId = rand.Next() % LayerSize; while (negativeSampleWordList.Contains(wordId)) { wordId = (wordId + 1) % LayerSize; } negativeSampleWordList.Add(wordId); } if (DenseFeatureSize > 0) { DenseFeature = denseFeature; RNNHelper.matrixXvectorADD(Cell, denseFeature, DenseWeights, negativeSampleWordList, DenseFeatureSize, true); } if (SparseFeatureSize > 0) { //Apply sparse features SparseFeature = sparseFeature; Parallel.ForEach(negativeSampleWordList, b => { float score = 0; var vector_b = SparseWeights[b]; foreach (var pair in SparseFeature) { score += pair.Value * vector_b[pair.Key]; } Cell[b] += score; }); } } else { base.ForwardPass(sparseFeature, denseFeature, isTrain); } }
// forward process. output layer consists of tag value public override void computeLayer(SparseVector sparseFeature, double[] denseFeature, bool isTrain = true) { //keep last hidden layer and erase activations cellOutput.CopyTo(previousCellOutput, 0); //Apply previous feature to current time //hidden(t-1) -> hidden(t) RNNHelper.matrixXvectorADD(cellOutput, previousCellOutput, BpttWeights, LayerSize, LayerSize); //Apply features on hidden layer SparseFeature = sparseFeature; DenseFeature = denseFeature; if (SparseFeatureSize > 0) { //Apply sparse features Parallel.For(0, LayerSize, parallelOption, b => { double score = 0; if (SparseFeatureSize > 0) { double[] vector_b = SparseWeights[b]; for (int i = 0; i < SparseFeature.Count; i++) { var entry = SparseFeature.GetEntry(i); score += entry.Value * vector_b[entry.Key]; } } cellOutput[b] += score; }); } if (DenseFeatureSize > 0) { //Apply dense features RNNHelper.matrixXvectorADD(cellOutput, DenseFeature, DenseWeights, LayerSize, DenseFeatureSize, false); } //activate layer activityLayer(); }
// forward process. output layer consists of tag value public override void ForwardPass(SparseVector sparseFeature, float[] denseFeature, bool isTrain = true) { //keep last hidden layer and erase activations Cell.CopyTo(previousCellOutput, 0); //Apply previous feature to current time //hidden(t-1) -> hidden(t) RNNHelper.matrixXvectorADD(Cell, previousCellOutput, BpttWeights, LayerSize, LayerSize); //Apply features on hidden layer SparseFeature = sparseFeature; DenseFeature = denseFeature; if (SparseFeatureSize > 0) { //Apply sparse features Parallel.For(0, LayerSize, parallelOption, b => { float score = 0; if (SparseFeatureSize > 0) { var vector_b = SparseWeights[b]; foreach (var pair in SparseFeature) { score += pair.Value * vector_b[pair.Key]; } } Cell[b] += score; }); } if (DenseFeatureSize > 0) { //Apply dense features RNNHelper.matrixXvectorADD(Cell, DenseFeature, DenseWeights, LayerSize, DenseFeatureSize, false); } //activate layer activityLayer(); }
public virtual void computeLayer(SparseVector sparseFeature, double[] denseFeature, bool isTrain = true) { if (DenseFeatureSize > 0) { DenseFeature = denseFeature; RNNHelper.matrixXvectorADD(cellOutput, denseFeature, DenseWeights, LayerSize, DenseFeatureSize); } if (SparseFeatureSize > 0) { //Apply sparse features SparseFeature = sparseFeature; Parallel.For(0, LayerSize, parallelOption, b => { double score = 0; double[] vector_b = SparseWeights[b]; foreach (KeyValuePair <int, float> pair in SparseFeature) { score += pair.Value * vector_b[pair.Key]; } cellOutput[b] += score; }); } }
public virtual void ForwardPass(SparseVector sparseFeature, float[] denseFeature, bool isTrain = true) { if (DenseFeatureSize > 0) { DenseFeature = denseFeature; RNNHelper.matrixXvectorADD(Cell, denseFeature, DenseWeights, LayerSize, DenseFeatureSize); } if (SparseFeatureSize > 0) { //Apply sparse features SparseFeature = sparseFeature; Parallel.For(0, LayerSize, parallelOption, b => { float score = 0; var vector_b = SparseWeights[b]; foreach (var pair in SparseFeature) { score += pair.Value * vector_b[pair.Key]; } Cell[b] += score; }); } }
public override void ForwardPass(SparseVector sparseFeature, float[] denseFeature) { if (runningMode == RunningMode.Training) { negativeSampleWordList.Clear(); foreach (var labelId in LabelShortList) { negativeSampleWordList.Add(labelId); } for (var i = 0; i < NegativeSampleSize; i++) { var wordId = rand.Next() % LayerSize; while (negativeSampleWordList.Contains(wordId)) { wordId = (wordId + 1) % LayerSize; } negativeSampleWordList.Add(wordId); } if (DenseFeatureSize > 0) { DenseFeature = denseFeature; RNNHelper.matrixXvectorADD(Cells, denseFeature, DenseWeights, negativeSampleWordList, DenseFeatureSize); } if (SparseFeatureSize > 0) { //Apply sparse features SparseFeature = sparseFeature; foreach (var b in negativeSampleWordList) { float score = 0; var vector_b = SparseWeights[b]; foreach (var pair in SparseFeature) { score += pair.Value * vector_b[pair.Key]; } Cells[b] += score; } } //Softmax double sum = 0; foreach (var c in negativeSampleWordList) { var cell = Cells[c]; if (cell > 50) { cell = 50; } if (cell < -50) { cell = -50; } var val = (float)Math.Exp(cell); sum += val; Cells[c] = val; } foreach (var c in negativeSampleWordList) { Cells[c] /= (float)sum; } } else { base.ForwardPass(sparseFeature, denseFeature); } }
public virtual void computeLayer(SparseVector sparseFeature, double[] denseFeature, bool isTrain = true) { DenseFeature = denseFeature; RNNHelper.matrixXvectorADD(cellOutput, denseFeature, DenseWeights, LayerSize, DenseFeatureSize); }