public static Tensor[] fused_batch_norm_grad_v3(FusedBatchNormParams @params) => tf.Context.ExecuteOp("FusedBatchNormGradV3", @params.Name, new ExecuteOpArgs(@params.YBackprop, @params.X, @params.Scale, @params.ReserveSpace1, @params.ReserveSpace2, @params.ReserveSpace3) .SetAttributes(new { epsilon = @params.Epsilon, data_format = @params.DataFormat, is_training = @params.IsTraining }));
/// <summary> /// Gradient for batch normalization. /// </summary> /// <param name="params"></param> /// <returns></returns> public static Tensor[] fused_batch_norm_grad(FusedBatchNormParams @params) { var op = tf.OpDefLib._apply_op_helper("FusedBatchNormGrad", name: @params.Name, args: new { y_backprop = @params.YBackprop, x = @params.X, scale = @params.Scale, reserve_space_1 = @params.ReserveSpace1, reserve_space_2 = @params.ReserveSpace2, epsilon = @params.Epsilon, data_format = @params.DataFormat, is_training = @params.IsTraining }); return(op.outputs); }
public static Tensor[] fused_batch_norm_grad_v3(FusedBatchNormParams @params) => tf.Context.RunInAutoMode(() => tf.OpDefLib._apply_op_helper("FusedBatchNormGradV3", name: @params.Name, args: new { y_backprop = @params.YBackprop, x = @params.X, scale = @params.Scale, reserve_space_1 = @params.ReserveSpace1, reserve_space_2 = @params.ReserveSpace2, reserve_space_3 = @params.ReserveSpace3, epsilon = @params.Epsilon, data_format = @params.DataFormat, is_training = @params.IsTraining }).outputs, () => tf.Runner.TFE_FastPathExecute(tf.Context, tf.Context.DeviceName, "FusedBatchNormGradV3", @params.Name, null, @params.YBackprop, @params.X, @params.Scale, @params.ReserveSpace1, @params.ReserveSpace2, @params.ReserveSpace3, "epsilon", @params.Epsilon, "data_format", @params.DataFormat, "is_training", @params.IsTraining), @params.YBackprop);