private static SimpleMatrix ComputeTensorDeltaDown(SimpleMatrix deltaFull, SimpleMatrix leftVector, SimpleMatrix rightVector, SimpleMatrix W, SimpleTensor Wt) { SimpleMatrix WTDelta = W.Transpose().Mult(deltaFull); SimpleMatrix WTDeltaNoBias = WTDelta.ExtractMatrix(0, deltaFull.NumRows() * 2, 0, 1); int size = deltaFull.GetNumElements(); SimpleMatrix deltaTensor = new SimpleMatrix(size * 2, 1); SimpleMatrix fullVector = NeuralUtils.Concatenate(leftVector, rightVector); for (int slice = 0; slice < size; ++slice) { SimpleMatrix scaledFullVector = fullVector.Scale(deltaFull.Get(slice)); deltaTensor = deltaTensor.Plus(Wt.GetSlice(slice).Plus(Wt.GetSlice(slice).Transpose()).Mult(scaledFullVector)); } return(deltaTensor.Plus(WTDeltaNoBias)); }
public static IDictionary <string, SimpleMatrix> AverageUnaryMatrices(IList <IDictionary <string, SimpleMatrix> > maps) { IDictionary <string, SimpleMatrix> averages = Generics.NewTreeMap(); foreach (string name in GetUnaryMatrixNames(maps)) { int count = 0; SimpleMatrix matrix = null; foreach (IDictionary <string, SimpleMatrix> map in maps) { if (!map.Contains(name)) { continue; } SimpleMatrix original = map[name]; ++count; if (matrix == null) { matrix = original; } else { matrix = matrix.Plus(original); } } matrix = matrix.Divide(count); averages[name] = matrix; } return(averages); }
public static TwoDimensionalMap <string, string, SimpleMatrix> AverageBinaryMatrices(IList <TwoDimensionalMap <string, string, SimpleMatrix> > maps) { TwoDimensionalMap <string, string, SimpleMatrix> averages = TwoDimensionalMap.TreeMap(); foreach (Pair <string, string> binary in GetBinaryMatrixNames(maps)) { int count = 0; SimpleMatrix matrix = null; foreach (TwoDimensionalMap <string, string, SimpleMatrix> map in maps) { if (!map.Contains(binary.First(), binary.Second())) { continue; } SimpleMatrix original = map.Get(binary.First(), binary.Second()); ++count; if (matrix == null) { matrix = original; } else { matrix = matrix.Plus(original); } } matrix = matrix.Divide(count); averages.Put(binary.First(), binary.Second(), matrix); } return(averages); }
/// <summary>Creates a random context matrix.</summary> /// <remarks> /// Creates a random context matrix. This will be numRows x /// 2*numCols big. These can be appended to the end of either a /// unary or binary transform matrix to get the transform matrix /// which uses context words. /// </remarks> private SimpleMatrix RandomContextMatrix() { SimpleMatrix matrix = new SimpleMatrix(numRows, numCols * 2); matrix.InsertIntoThis(0, 0, identity.Scale(op.trainOptions.scalingForInit * 0.1)); matrix.InsertIntoThis(0, numCols, identity.Scale(op.trainOptions.scalingForInit * 0.1)); matrix = matrix.Plus(SimpleMatrix.Random(numRows, numCols * 2, -1.0 / Math.Sqrt((double)numCols * 100.0), 1.0 / Math.Sqrt((double)numCols * 100.0), rand)); return(matrix); }
private SimpleMatrix GetAverageEmbedding(IList <CoreLabel> words) { SimpleMatrix emb = new SimpleMatrix(staticWordEmbeddings.GetEmbeddingSize(), 1); foreach (CoreLabel word in words) { emb = emb.Plus(GetStaticWordEmbedding(word.Word())); } return(emb.Divide(Math.Max(1, words.Count))); }
public virtual double GetPairwiseScore(SimpleMatrix antecedentEmbedding, SimpleMatrix anaphorEmbedding, SimpleMatrix pairFeatures) { SimpleMatrix firstLayerOutput = NeuralUtils.ElementwiseApplyReLU(antecedentEmbedding.Plus(anaphorEmbedding).Plus(pairFeaturesMatrix.Mult(pairFeatures)).Plus(pairwiseFirstLayerBias)); return(Score(firstLayerOutput, pairwiseModel)); }
private void BackpropDerivativesAndError(Tree tree, TwoDimensionalMap <string, string, SimpleMatrix> binaryTD, TwoDimensionalMap <string, string, SimpleMatrix> binaryCD, TwoDimensionalMap <string, string, SimpleTensor> binaryTensorTD, IDictionary <string, SimpleMatrix> unaryCD, IDictionary <string, SimpleMatrix> wordVectorD, SimpleMatrix deltaUp) { if (tree.IsLeaf()) { return; } SimpleMatrix currentVector = RNNCoreAnnotations.GetNodeVector(tree); string category = tree.Label().Value(); category = model.BasicCategory(category); // Build a vector that looks like 0,0,1,0,0 with an indicator for the correct class SimpleMatrix goldLabel = new SimpleMatrix(model.numClasses, 1); int goldClass = RNNCoreAnnotations.GetGoldClass(tree); if (goldClass >= 0) { goldLabel.Set(goldClass, 1.0); } double nodeWeight = model.op.trainOptions.GetClassWeight(goldClass); SimpleMatrix predictions = RNNCoreAnnotations.GetPredictions(tree); // If this is an unlabeled class, set deltaClass to 0. We could // make this more efficient by eliminating various of the below // calculations, but this would be the easiest way to handle the // unlabeled class SimpleMatrix deltaClass = goldClass >= 0 ? predictions.Minus(goldLabel).Scale(nodeWeight) : new SimpleMatrix(predictions.NumRows(), predictions.NumCols()); SimpleMatrix localCD = deltaClass.Mult(NeuralUtils.ConcatenateWithBias(currentVector).Transpose()); double error = -(NeuralUtils.ElementwiseApplyLog(predictions).ElementMult(goldLabel).ElementSum()); error = error * nodeWeight; RNNCoreAnnotations.SetPredictionError(tree, error); if (tree.IsPreTerminal()) { // below us is a word vector unaryCD[category] = unaryCD[category].Plus(localCD); string word = tree.Children()[0].Label().Value(); word = model.GetVocabWord(word); //SimpleMatrix currentVectorDerivative = NeuralUtils.elementwiseApplyTanhDerivative(currentVector); //SimpleMatrix deltaFromClass = model.getUnaryClassification(category).transpose().mult(deltaClass); //SimpleMatrix deltaFull = deltaFromClass.extractMatrix(0, model.op.numHid, 0, 1).plus(deltaUp); //SimpleMatrix wordDerivative = deltaFull.elementMult(currentVectorDerivative); //wordVectorD.put(word, wordVectorD.get(word).plus(wordDerivative)); SimpleMatrix currentVectorDerivative = NeuralUtils.ElementwiseApplyTanhDerivative(currentVector); SimpleMatrix deltaFromClass = model.GetUnaryClassification(category).Transpose().Mult(deltaClass); deltaFromClass = deltaFromClass.ExtractMatrix(0, model.op.numHid, 0, 1).ElementMult(currentVectorDerivative); SimpleMatrix deltaFull = deltaFromClass.Plus(deltaUp); SimpleMatrix oldWordVectorD = wordVectorD[word]; if (oldWordVectorD == null) { wordVectorD[word] = deltaFull; } else { wordVectorD[word] = oldWordVectorD.Plus(deltaFull); } } else { // Otherwise, this must be a binary node string leftCategory = model.BasicCategory(tree.Children()[0].Label().Value()); string rightCategory = model.BasicCategory(tree.Children()[1].Label().Value()); if (model.op.combineClassification) { unaryCD[string.Empty] = unaryCD[string.Empty].Plus(localCD); } else { binaryCD.Put(leftCategory, rightCategory, binaryCD.Get(leftCategory, rightCategory).Plus(localCD)); } SimpleMatrix currentVectorDerivative = NeuralUtils.ElementwiseApplyTanhDerivative(currentVector); SimpleMatrix deltaFromClass = model.GetBinaryClassification(leftCategory, rightCategory).Transpose().Mult(deltaClass); deltaFromClass = deltaFromClass.ExtractMatrix(0, model.op.numHid, 0, 1).ElementMult(currentVectorDerivative); SimpleMatrix deltaFull = deltaFromClass.Plus(deltaUp); SimpleMatrix leftVector = RNNCoreAnnotations.GetNodeVector(tree.Children()[0]); SimpleMatrix rightVector = RNNCoreAnnotations.GetNodeVector(tree.Children()[1]); SimpleMatrix childrenVector = NeuralUtils.ConcatenateWithBias(leftVector, rightVector); SimpleMatrix W_df = deltaFull.Mult(childrenVector.Transpose()); binaryTD.Put(leftCategory, rightCategory, binaryTD.Get(leftCategory, rightCategory).Plus(W_df)); SimpleMatrix deltaDown; if (model.op.useTensors) { SimpleTensor Wt_df = GetTensorGradient(deltaFull, leftVector, rightVector); binaryTensorTD.Put(leftCategory, rightCategory, binaryTensorTD.Get(leftCategory, rightCategory).Plus(Wt_df)); deltaDown = ComputeTensorDeltaDown(deltaFull, leftVector, rightVector, model.GetBinaryTransform(leftCategory, rightCategory), model.GetBinaryTensor(leftCategory, rightCategory)); } else { deltaDown = model.GetBinaryTransform(leftCategory, rightCategory).Transpose().Mult(deltaFull); } SimpleMatrix leftDerivative = NeuralUtils.ElementwiseApplyTanhDerivative(leftVector); SimpleMatrix rightDerivative = NeuralUtils.ElementwiseApplyTanhDerivative(rightVector); SimpleMatrix leftDeltaDown = deltaDown.ExtractMatrix(0, deltaFull.NumRows(), 0, 1); SimpleMatrix rightDeltaDown = deltaDown.ExtractMatrix(deltaFull.NumRows(), deltaFull.NumRows() * 2, 0, 1); BackpropDerivativesAndError(tree.Children()[0], binaryTD, binaryCD, binaryTensorTD, unaryCD, wordVectorD, leftDerivative.ElementMult(leftDeltaDown)); BackpropDerivativesAndError(tree.Children()[1], binaryTD, binaryCD, binaryTensorTD, unaryCD, wordVectorD, rightDerivative.ElementMult(rightDeltaDown)); } }
public virtual void BackpropDerivative(Tree tree, IList <string> words, IdentityHashMap <Tree, SimpleMatrix> nodeVectors, TwoDimensionalMap <string, string, SimpleMatrix> binaryW_dfs, IDictionary <string, SimpleMatrix> unaryW_dfs, TwoDimensionalMap <string, string, SimpleMatrix> binaryScoreDerivatives, IDictionary <string, SimpleMatrix> unaryScoreDerivatives, IDictionary <string, SimpleMatrix> wordVectorDerivatives, SimpleMatrix deltaUp) { if (tree.IsLeaf()) { return; } if (tree.IsPreTerminal()) { if (op.trainOptions.trainWordVectors) { string word = tree.Children()[0].Label().Value(); word = dvModel.GetVocabWord(word); // SimpleMatrix currentVector = nodeVectors.get(tree); // SimpleMatrix currentVectorDerivative = nonlinearityVectorToDerivative(currentVector); // SimpleMatrix derivative = deltaUp.elementMult(currentVectorDerivative); SimpleMatrix derivative = deltaUp; wordVectorDerivatives[word] = wordVectorDerivatives[word].Plus(derivative); } return; } SimpleMatrix currentVector = nodeVectors[tree]; SimpleMatrix currentVectorDerivative = NeuralUtils.ElementwiseApplyTanhDerivative(currentVector); SimpleMatrix scoreW = dvModel.GetScoreWForNode(tree); currentVectorDerivative = currentVectorDerivative.ElementMult(scoreW.Transpose()); // the delta that is used at the current nodes SimpleMatrix deltaCurrent = deltaUp.Plus(currentVectorDerivative); SimpleMatrix W = dvModel.GetWForNode(tree); SimpleMatrix WTdelta = W.Transpose().Mult(deltaCurrent); if (tree.Children().Length == 2) { //TODO: RS: Change to the nice "getWForNode" setup? string leftLabel = dvModel.BasicCategory(tree.Children()[0].Label().Value()); string rightLabel = dvModel.BasicCategory(tree.Children()[1].Label().Value()); binaryScoreDerivatives.Put(leftLabel, rightLabel, binaryScoreDerivatives.Get(leftLabel, rightLabel).Plus(currentVector.Transpose())); SimpleMatrix leftVector = nodeVectors[tree.Children()[0]]; SimpleMatrix rightVector = nodeVectors[tree.Children()[1]]; SimpleMatrix childrenVector = NeuralUtils.ConcatenateWithBias(leftVector, rightVector); if (op.trainOptions.useContextWords) { childrenVector = ConcatenateContextWords(childrenVector, tree.GetSpan(), words); } SimpleMatrix W_df = deltaCurrent.Mult(childrenVector.Transpose()); binaryW_dfs.Put(leftLabel, rightLabel, binaryW_dfs.Get(leftLabel, rightLabel).Plus(W_df)); // and then recurse SimpleMatrix leftDerivative = NeuralUtils.ElementwiseApplyTanhDerivative(leftVector); SimpleMatrix rightDerivative = NeuralUtils.ElementwiseApplyTanhDerivative(rightVector); SimpleMatrix leftWTDelta = WTdelta.ExtractMatrix(0, deltaCurrent.NumRows(), 0, 1); SimpleMatrix rightWTDelta = WTdelta.ExtractMatrix(deltaCurrent.NumRows(), deltaCurrent.NumRows() * 2, 0, 1); BackpropDerivative(tree.Children()[0], words, nodeVectors, binaryW_dfs, unaryW_dfs, binaryScoreDerivatives, unaryScoreDerivatives, wordVectorDerivatives, leftDerivative.ElementMult(leftWTDelta)); BackpropDerivative(tree.Children()[1], words, nodeVectors, binaryW_dfs, unaryW_dfs, binaryScoreDerivatives, unaryScoreDerivatives, wordVectorDerivatives, rightDerivative.ElementMult(rightWTDelta)); } else { if (tree.Children().Length == 1) { string childLabel = dvModel.BasicCategory(tree.Children()[0].Label().Value()); unaryScoreDerivatives[childLabel] = unaryScoreDerivatives[childLabel].Plus(currentVector.Transpose()); SimpleMatrix childVector = nodeVectors[tree.Children()[0]]; SimpleMatrix childVectorWithBias = NeuralUtils.ConcatenateWithBias(childVector); if (op.trainOptions.useContextWords) { childVectorWithBias = ConcatenateContextWords(childVectorWithBias, tree.GetSpan(), words); } SimpleMatrix W_df = deltaCurrent.Mult(childVectorWithBias.Transpose()); // System.out.println("unary backprop derivative for " + childLabel); // System.out.println("Old transform:"); // System.out.println(unaryW_dfs.get(childLabel)); // System.out.println(" Delta:"); // System.out.println(W_df.scale(scale)); unaryW_dfs[childLabel] = unaryW_dfs[childLabel].Plus(W_df); // and then recurse SimpleMatrix childDerivative = NeuralUtils.ElementwiseApplyTanhDerivative(childVector); //SimpleMatrix childDerivative = childVector; SimpleMatrix childWTDelta = WTdelta.ExtractMatrix(0, deltaCurrent.NumRows(), 0, 1); BackpropDerivative(tree.Children()[0], words, nodeVectors, binaryW_dfs, unaryW_dfs, binaryScoreDerivatives, unaryScoreDerivatives, wordVectorDerivatives, childDerivative.ElementMult(childWTDelta)); } } }