public void TestAddTensorTensorBP() { TensorAllocator.InitDevices(ProcessorTypeEnums.CPU, new int[] { 0 }); var graph = new ComputeGraphTensor(new WeightTensorFactory(), 0, true); var tensorA = new WeightTensor(new long[2] { 2, 2 }, 1, 0, name: "tensorA", isTrainable: true); var tensorB = new WeightTensor(new long[2] { 2, 2 }, 2, 0, name: "tensorB", isTrainable: true); var tensorSum = graph.Add(tensorA, tensorB); tensorSum.CopyWeightsToGradients(tensorSum); graph.Backward(); float gA = tensorA.GetGradientAt(new long[] { 1, 1 }); float gB = tensorB.GetGradientAt(new long[] { 1, 1, }); Assert.IsTrue(gA == 3.0f); Assert.IsTrue(gB == 3.0f); }
public void TestAddTensorTensor() { TensorAllocator.InitDevices(ProcessorTypeEnums.CPU, new int[] { 0 }); var graph = new ComputeGraphTensor(new WeightTensorFactory(), 0, true); var tensorA = new WeightTensor(new long[2] { 2, 2 }, 1, 0, name: "tensorA", isTrainable: true); var tensorB = new WeightTensor(new long[2] { 2, 2 }, 2, 0, name: "tensorB", isTrainable: true); var tensorSum = graph.Add(tensorA, tensorB); float v = tensorSum.GetWeightAt(new long[] { 1, 1 }); Assert.IsTrue(v == 3.0f); }
public void TestAddSubGradients() { int batchSize = 5; int vocabSize = 20; TensorAllocator.InitDevices(ProcessorTypeEnums.CPU, new int[] { 0 }); var graph = new ComputeGraphTensor(new WeightTensorFactory(), 0, true); var tensorA = new WeightTensor(new long[2] { batchSize, vocabSize }, 1, 0, name: "tensorA", isTrainable: true); var tensorB = new WeightTensor(new long[2] { batchSize, vocabSize }, 1, 0, name: "tensorB", isTrainable: true); var tensorIdx = BuildRandomLabelTensor(batchSize, vocabSize, "tensorIdx"); var tensorANeg = graph.Mul(tensorA, -1.0f); var tensorANegSum = graph.Add(tensorANeg, 100.0f); var tensorSub = graph.Sub(100.0f, tensorB); float v1 = tensorANegSum.GetWeightAt(new long[] { 1, 1 }); float v2 = tensorSub.GetWeightAt(new long[] { 1, 1 }); Assert.IsTrue(v1 == v2); var softmax1 = graph.Softmax(tensorANegSum); var softmax2 = graph.Softmax(tensorSub); graph.CrossEntropyLoss(softmax1, tensorIdx); graph.CrossEntropyLoss(softmax2, tensorIdx); graph.Backward(); float gA = tensorA.GetGradientAt(new long[] { 1, 1 }); float gB = tensorB.GetGradientAt(new long[] { 1, 1, }); Assert.IsTrue(gA == gB); }