private Indexing(Tensor <Type> x, TensorList indices) : base("IndexWith", x, indices) { this.Indices = indices; var indexDim = Indices[0].NDim; this.Shape = new Dim[indexDim + x.NDim - Indices.Count]; for (int i = 1; i < Indices.Count; ++i) { if (!Indices[i].Shape.CanEqualTo(Indices[0].Shape)) { throw new RankException("In advanced indexing all index array must have the same size"); } } // TODO : still TODO ? for (int i = 0; i < indexDim; ++i) { this.Shape[i] = Indices[0].Shape[i]; } for (int i = NDim - 1; i >= indexDim; --i) { this.Shape[i] = x.Shape[x.NDim + i - NDim]; } }
private Deindexing(Tensor <T> content, XList <Scalar <int>, int> shape, TensorList indices) : base("Deindex", content, shape, indices) { Content = content; Shape = shape; Indices = indices; }
private Dispatch(Scalar <T> content, XList <Scalar <int>, int> shape, TensorList indices) : base("Dispatch", content, shape, indices) { Content = content; Shape = shape; Indices = indices; }
/// <param name="x">the scalar expression used to fill the array</param> /// <param name="shape">the shape of the array</param> /// <param name="indices">the indices to fill</param> public static Tensor <T> Create(Scalar <T> x, XList <Scalar <int>, int> shape, TensorList indices) => new Dispatch <T>(x, shape, indices);
public override Binary <Tensor <Type>, Array <Type>, TensorList, Array <int>[]> Clone(Tensor <Type> x, TensorList y) => new Indexing <Type>(x, y);
/// <param name="x">the array to read values from</param> /// <param name="shape">the shape of the original array</param> /// <param name="indices">the indices of each values from `x`</param> public static Tensor <T> Create(Tensor <T> x, XList <Scalar <int>, int> shape, TensorList indices) { switch (x) { case Fill <T> fill: return(Dispatch <T> .Create(fill.x, shape, indices)); default: return(new Deindexing <T>(x, shape, indices)); } }