public void ConcatOpDerivative() { var cns = new ConvNetSharp <T>(); var leftShape = new Shape(2, 2); var rightShape = new Shape(2); var left = cns.Const(NewVolume(new[] { 1.0, 2.0, 3.0, 4.0 }, leftShape), "left"); var right = cns.Const(NewVolume(new[] { 4.0, 5.0 }, rightShape), "right"); var op = cns.Concat(left, right); using (var session = new Session <T>()) { session.Differentiate(op); var total = (int)(leftShape.TotalLength + rightShape.TotalLength); op.Derivate = cns.Const(NewVolume(new double[total].Populate(1.0), new Shape(total)), "Gradient"); var result = left.Derivate.Evaluate(session); Assert.AreEqual(result.Shape, leftShape); result = right.Derivate.Evaluate(session); Assert.AreEqual(result.Shape, rightShape); } }
public void ConcatGradientCheck() { var leftShape = new Shape(2, 2, 1, 1); var rightShape = new Shape(3, 1, 1, 1); var cns = new ConvNetSharp <T>(); var location = NewVolume(RandomUtilities.RandomDoubleArray(leftShape.TotalLength), new Shape(2, 2, 1, 1)); var x = cns.PlaceHolder("x"); var z = cns.Const(NewVolume(new double[rightShape.TotalLength].Populate(1.0), rightShape), "z"); var fun = cns.Concat(x, z); GradientCheck(cns, fun, location, 1e-5); }