protected NdArray ForwardCpu(NdArray x) { int[] inputShape = x.Shape; int[] outputShape = this.Bias.Shape; List <int> shapeList = new List <int>(); for (int i = 0; i < this.Axis; i++) { shapeList.Add(1); } shapeList.AddRange(outputShape); for (int i = 0; i < inputShape.Length - this.Axis - outputShape.Length; i++) { shapeList.Add(1); } int[] y1Shape = shapeList.ToArray(); NdArray y1 = new Reshape(y1Shape).Forward(this.Bias)[0]; NdArray y2 = new Broadcast(inputShape).Forward(y1)[0]; return(x + y2); }
private NdArray ForwardCpu([NotNull] NdArray x) { int[] inputShape = x.Shape; int[] outputShape = Weight.Shape; List <int> shapeList = new List <int>(); for (int i = 0; i < Axis; i++) { shapeList.Add(1); } shapeList.AddRange(outputShape); for (int i = 0; i < inputShape.Length - Axis - outputShape.Length; i++) { shapeList.Add(1); } int[] preShape = shapeList.ToArray(); NdArray y1 = new Reshape(preShape).Forward(false, Weight)[0]; NdArray y2 = new Broadcast(inputShape).Forward(false, y1)[0]; if (BiasTerm) { NdArray b1 = new Reshape(preShape).Forward(false, Bias)[0]; NdArray b2 = new Broadcast(inputShape).Forward(false, b1)[0]; return(x * y2 + b2); } return(x * y2); }
public void Reshape() { var x = new PlaceHolder <T>("x"); var op = new Reshape <T>(x, new Shape(1, 1, -1, 1)); using (var session = new Session <T>()) { // [4] -> [1,1,4,1] var result = session.Run(op, new Dictionary <string, Volume <T> > { { "x", NewVolume(new[] { 1.0, 2.0, 3.0, 4.0 }, Volume.Shape.From(4)) } }); Assert.AreEqual(new Shape(1, 1, 4, 1), result.Shape); // [8] -> [1,1,8,1] result = session.Run(op, new Dictionary <string, Volume <T> > { { "x", NewVolume(new[] { 1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0, 4.0 }, Volume.Shape.From(8)) } }); Assert.AreEqual(new Shape(1, 1, 8, 1), result.Shape); } }
public void ReshapeDerivate() { var x = new PlaceHolder <T>("x"); var op = new Reshape <T>(x, new Shape(1, 1, -1, 1)); var grad = new PlaceHolder <T>("grad"); using (var session = new Session <T>()) { op.Derivate = grad; op.Differentiate(); var diff = x.Derivate; // [4,1,1,1] -> [1,1,4,1] var result = session.Run(op, new Dictionary <string, Volume <T> > { { "x", NewVolume(new[] { 1.0, 2.0, 3.0, 4.0 }, Volume.Shape.From(4, 1, 1, 1)) } }); // [1,1,4,1] -> [4,1,1,1] result = session.Run(diff, new Dictionary <string, Volume <T> > { { "x", NewVolume(new[] { 1.0, 2.0, 3.0, 4.0 }, Volume.Shape.From(4)) }, { "grad", NewVolume(new[] { 1.0, 1.0, 1.0, 1.0 }, Volume.Shape.From(1, 1, 4, 1)) } }); Assert.AreEqual(new Shape(4, 1, 1, 1), result.Shape); } }
public static Model ConvolutionalNeuralNetworkModel() { var images = Variable <float>(); var labels = Variable <float>(); ILayer <float> net = new Reshape <float>(images, PartialShape.Create(-1, 1, 28, 28)); net = new Convolution2D <float>(net.Output, 5, 5, 16); net = new ActivationReLU <float>(net.Output); net = new Pooling2D <float>(net.Output, PoolingMode.MAX, 2, 2, 2, 2); net = new Convolution2D <float>(net.Output, 5, 5, 32); net = new ActivationTanh <float>(net.Output); net = new Pooling2D <float>(net.Output, PoolingMode.MAX, 2, 2, 2, 2); net = new Reshape <float>(net.Output, PartialShape.Create(-1, net.Output.Shape.Skip(1).Aggregate(ScalarOps.Mul))); net = new FullyConnected <float>(net.Output, 50); net = new ActivationTanh <float>(net.Output); net = new FullyConnected <float>(net.Output, 10); return(new Model { Loss = new SoftmaxCrossEntropy <float>(net.Output, labels), Images = images, Labels = labels }); }
public override void Save(XmlElement layer) { layer.SetAttribute("normalize", Normalize.ToString().ToLowerInvariant()); layer.SetAttribute("activation", Activation.ToString()); layer.SetAttribute("reshape", Reshape.ToString().ToLowerInvariant()); if (Reshape) { layer.SetAttribute("shape", Shape.Serialize()); } }
private Layer ConvertShape2(paddle.OpDesc op) { var x = GetParameter(op.Inputs, "X").Arguments[0]; var output = GetParameter(op.Outputs, "Out").Arguments[0]; var shape = GetAttr(op, "shape").Ints.ToArray(); var layer = new Reshape(GetVarShape(x), shape); _inputs.Add(layer.Input, x); _outputs.Add(output, layer.Output); return(layer); }
public void Reshape1() { var x = new Const <double>(1.0, "x"); var op = new Reshape <double>(x, new Shape(1, 2, 3, 4)); var xml = op.ToXml(); var deserialized = SerializationExtensions.FromXml <double>(xml) as Reshape <double>; Assert.IsNotNull(deserialized); Assert.AreEqual(1, deserialized.Parents.Count); Assert.AreEqual("x", (deserialized.Parents[0] as Const <double>).Name); Assert.AreEqual(op.OutputShape, deserialized.OutputShape); }
public void Reshape2() { var x = new Const <double>(1.0, "x"); var shape = new Const <double>(new[] { 1.0, 2.0, 3.0, 4.0 }, "shape"); var op = new Reshape <double>(x, shape); var xml = op.ToXml(); var deserialized = SerializationExtensions.FromXml <double>(xml) as Reshape <double>; Assert.IsNotNull(deserialized); Assert.AreEqual(2, deserialized.Parents.Count); Assert.AreEqual("x", (deserialized.Parents[0] as Const <double>).Name); Assert.AreEqual("shape", (deserialized.Parents[1] as Const <double>).Name); }
private Layer ConvertMean(tflite.Operator op) { var inputs = op.GetInputsArray(); var input = _graph.Tensors(inputs[0]).Value; var axes = _model.GetTensor <int>(_graph.Tensors(inputs[1]).Value); if (axes.ToArray().SequenceEqual(new[] { 1, 2 })) { var layer = new GlobalAveragePool(input.GetShapeArray().ToNCHW()); _inputs.Add(layer.Input, inputs[0]); var reshape = new Reshape(layer.Output.Dimensions, new[] { -1, layer.Output.Dimensions[1] }); reshape.Input.SetConnection(layer.Output); _outputs.Add(op.Outputs(0), layer.Output); return(reshape); } else { throw new LayerNotSupportedException(op.ToString(), "Only [1,2] axis mean is supported"); } }
private Layer ConvertInnerProduct(LayerParameter layerParam) { var input = _outputs[layerParam.Bottom[0]]; var param = layerParam.InnerProductParam; var weights = LoadBlob(layerParam.Blobs[0]); if (input.Dimensions.Length == 4 && (input.Dimensions[2] != 1 || input.Dimensions[3] != 1)) { var flatten = new Reshape(input.Dimensions, new[] { -1, input.Dimensions.GetSize() }); var layer = new FullyConnected(flatten.Output.Dimensions, weights, null, ActivationFunctionType.Linear); flatten.Input.SetConnection(input); layer.Input.SetConnection(flatten.Output); _outputs[layerParam.Top[0]] = layer.Output; return(layer); } else { var layer = new FullyConnected(input.Dimensions, weights, null, ActivationFunctionType.Linear); layer.Input.SetConnection(input); _outputs[layerParam.Top[0]] = layer.Output; return(layer); } }
protected NdArray ForwardCpu(NdArray x) { int[] inputShape = x.Shape; int[] outputShape = this.Weight.Shape; List <int> shapeList = new List <int>(); for (int i = 0; i < this.Axis; i++) { shapeList.Add(1); } shapeList.AddRange(outputShape); for (int i = 0; i < inputShape.Length - this.Axis - outputShape.Length; i++) { shapeList.Add(1); } int[] preShape = shapeList.ToArray(); NdArray y1 = new Reshape(preShape).Forward(this.Weight)[0]; NdArray y2 = new Broadcast(inputShape).Forward(y1)[0]; if (BiasTerm) { NdArray b1 = new Reshape(preShape).Forward(this.Bias)[0]; NdArray b2 = new Broadcast(inputShape).Forward(b1)[0]; return(x * y2 + b2); } else { return(x * y2); } }
public void AllocateInputMemory(Reshape layer, OutputConnector input, InferenceContext context) { context.MainMemoryMap.Add(layer.Output, context.GetOrAllocateMainMemory(layer.Input.Connection.From)); }
protected override bool HandleMessage(WidgetMessage message, Widget widget, IntPtr param1, IntPtr param2) { bool handled = false; switch (message) { case WidgetMessage.Create: Created?.Invoke(widget, param1 != default, ref handled); break; case WidgetMessage.Destroy: Destroyed?.Invoke(widget, param1 != default, ref handled); break; case WidgetMessage.Paint: Paint?.Invoke(widget, ref handled); break; case WidgetMessage.Draw: Draw?.Invoke(widget, ref handled); break; case WidgetMessage.KeyPress: KeyPressed?.Invoke(widget, ref AsRef <KeyState>(param1), ref handled); break; case WidgetMessage.KeyTakeFocus: TakingFocus?.Invoke(widget, param1 != default, ref handled); break; case WidgetMessage.KeyLoseFocus: LostFocus?.Invoke(widget, param1 != default, ref handled); break; case WidgetMessage.MouseDown: MouseDown?.Invoke(widget, ref AsRef <MouseState>(param1), ref handled); break; case WidgetMessage.MouseDrag: MouseDrag?.Invoke(widget, ref AsRef <MouseState>(param1), ref handled); break; case WidgetMessage.MouseUp: MouseUp?.Invoke(widget, ref AsRef <MouseState>(param1), ref handled); break; case WidgetMessage.Reshape: Reshape?.Invoke(widget, Widget.GetOrCreate(param1), ref AsRef <WidgetGeometryChange>(param2), ref handled); break; case WidgetMessage.ExposedChanged: ExposedChanged?.Invoke(widget, ref handled); break; case WidgetMessage.AcceptChild: ChildAdded?.Invoke(widget, Widget.GetOrCreate(param1), ref handled); break; case WidgetMessage.LoseChild: ChildRemoved?.Invoke(widget, Widget.GetOrCreate(param1), ref handled); break; case WidgetMessage.AcceptParent: ParentChanged?.Invoke(widget, param1 != default ? Widget.GetOrCreate(param1) : null, ref handled); break; case WidgetMessage.Shown: Shown?.Invoke(widget, Widget.GetOrCreate(param1), ref handled); break; case WidgetMessage.Hidden: Hidden?.Invoke(widget, Widget.GetOrCreate(param1), ref handled); break; case WidgetMessage.DescriptorChanged: DescriptorChanged?.Invoke(widget, ref handled); break; case WidgetMessage.PropertyChanged: PropertyChanged?.Invoke(widget, param1.ToInt32(), param2, ref handled); break; case WidgetMessage.MouseWheel: MouseWheel?.Invoke(widget, ref AsRef <MouseState>(param1), ref handled); break; case WidgetMessage.CursorAdjust: CursorAdjust?.Invoke(widget, ref AsRef <MouseState>(param1), ref AsRef <CursorStatus>(param2), ref handled); break; } return(handled); }