public void TestTensorConst() { var x = Op.Const(2.0f, 2, 2); var f = Op.Function(output: x); AssertArray.AreEqual(f(), NN.Const(2.0f, 2, 2)); AssertArray.AreNotEqual(f(), NN.Const(3.0f, 2, 2)); }
public static Tensor <Type> Create(Tensor <Type> x, Dim[] shape) { int resizePos = -1; Dim size = 1; for (int i = 0; i < shape.Length; ++i) { if (shape[i].Check((Int c) => c.Value == -1)) { if (resizePos >= 0) { throw new ArgumentException("Can't reshape to: [" + string.Join(", ", shape.Select(a => a.ToString())) + "]"); } resizePos = i; } else { size *= shape[i]; } } if (resizePos >= 0) { var originalSize = x.Shape.Aggregate((Dim)1, (s, d) => s * d); shape[resizePos] = originalSize / size; } if (x.NDim == shape.Length) { bool sameShape = true; for (int i = 0; i < shape.Length; ++i) { if (!ShapeExtension.WillEqualTo(x.Shape[i], shape[i])) { sameShape = false; } } if (sameShape) { return(x); } } switch (x) { case Fill <Type> fill: return(Op.Const(fill.x, shape)); default: return(new Reshaping <Type>(x, shape)); } }
public static Tensor <Type> Create(Dim[] shape, Scalar <int>[] indexes, Scalar <Type> content = null) { content = content ?? Numeric <Type> .One; if (shape.Length == 1 && shape[0].IsOne) { return(Op.Const(content, shape)); } if (shape.Length != indexes.Length) { throw new ArgumentException(); } return(new OneHotPoint <Type>(shape, indexes, content)); }
public static Tensor <float> Create(Tensor <float> x, int axis = -1) { switch (x) { case BroadCast <float> xBroadcast: if (xBroadcast.broadcast.Contains(axis)) { return(Op.Const(1f / xBroadcast.Shape[axis].As <float>(), xBroadcast.Shape)); } else { return(Op.Softmax(xBroadcast.x, axis).BroadcastTo(xBroadcast.Shape)); } default: return(new Softmax(x, axis)); } }
private static Perm ToAxes(int[] perm) => new Perm(perm.Select(a => Op.Const(a)).ToArray());
private static Axes ToAxes(int[] axes) => new Axes(axes.Select(a => Op.Const(a)).ToArray());