public static Tensor[] _MulGrad(EagerOperation op, IntPtr[] grads) { var x = op.InputHandles[0]; var y = op.InputHandles[1]; var grad = grads[0]; if (op.SkipInputIndices.Contains(1) && EagerTensor.GetRank(grad) == 0) { return(new Tensor[] { null,//gen_math_ops.mul(grad, math_ops.conj(y)), null }); } if (_ShapesFullySpecifiedAndEqual(x, y, grad)) { return(new Tensor[] { gen_math_ops.mul(grad, y), gen_math_ops.mul(grad, x) }); } throw new NotImplementedException(""); }
public static Func <EagerOperation, IntPtr[], EagerTensor[]> get_gradient_function_eager(EagerOperation op) { if (op.inputs == null) { return(null); } RegisterFromAssemblyEager(); if (!gradientFunctionsEager.ContainsKey(op.type)) { throw new LookupError($"can't get graident function through get_gradient_function {op.type}"); } return(gradientFunctionsEager[op.type]); }