public static Tensor[] Unstack(this Session session, Tensor x, int axis) { const string ActionName = "unstack"; if (axis < 0) { throw new ArgumentException(Properties.Resources.E_NegativeAxisIndex, nameof(axis)); } return(session.RunOperation( ActionName, () => { bool calculateGradient = session.CalculateGradients && x.CalculateGradient; // allocate destination Tensor[] ys = session.AllocateTensors(ActionName, x.Axes[axis], x.Shape.RemoveAxis(axis), calculateGradient); ArrayOperations.Unstack(x, axis, ys, false); #if !NOLEARNING if (calculateGradient) { session.Push(ActionName, () => ArrayOperations.Stack(ys, axis, x, true)); // return copy of the array; calling method can replace its content; our closure keeps the array, not its items return ys.ToArray(); } #endif return ys; })); }
public static Tensor Stack(this Session session, IList <Tensor> xs, int axis) { const string ActionName = "stack"; if (axis < 0) { throw new ArgumentException(Properties.Resources.E_NegativeAxisIndex, nameof(axis)); } return(session.RunOperation( ActionName, () => { bool calculateGradient = session.CalculateGradients && xs.Any(x => x.CalculateGradient); // check source int xdim = xs.Count; if (xdim == 0) { throw new ArgumentException("There should be at least one source tensor."); } if (!Shape.AreSame(xs)) { throw new ArgumentException("All source tensors must have the same rank and shape."); } Tensor y = session.AllocateTensor(ActionName, xs[0].Shape.InsertAxis(axis, xdim), calculateGradient); ArrayOperations.Stack(xs, axis, y, false); #if !NOLEARNING if (calculateGradient) { session.Push(ActionName, () => ArrayOperations.Unstack(y, axis, xs, true)); } #endif y.Validate(); return y; })); }