public override void Backward(SuperArray outputgrad) { int pad = 0; if (Padding == PaddingType.Same) { pad = 1; } else if (Padding == PaddingType.Full) { pad = 2; } var dout_flat = outputgrad.Transpose(2, 0, 1).Reshape(Filters, -1); var dW = Ops.Dot(dout_flat, xCols.Transpose()); dW = dW.Reshape(Params["w"].Data.Shape); var db = Ops.Sum(outputgrad, new uint[] { 0, 1, 2 }).Reshape(Filters, -1); var W_flat = Params["w"].Data.Reshape(Filters, -1); var dX_col = Ops.Dot(W_flat.Transpose(), dout_flat); Input.Grad = ImUtil.Col2Im(dX_col, Input.Data.Shape.Dims, Tuple.Create(KernalSize, KernalSize), pad, Strides); Params["w"].Grad = dW; if (UseBias) { Params["b"].Grad = db; } }
public override void Forward(SuperArray x) { base.Forward(x); var(n, c, h, w) = x.GetConv2DShape(); int pad = 0; if (Padding == PaddingType.Same) { pad = 1; } else if (Padding == PaddingType.Full) { pad = 2; } var h_out = (h - PoolSize.Item1 + 2 * pad) / Strides + 1; var w_out = (w - PoolSize.Item2 + 2 * pad) / Strides + 1; var x_reshaped = x.Reshape(n * c, 1, h, w); xCols = ImUtil.Im2Col(x_reshaped, PoolSize, pad, Strides); Output = Ops.Max(xCols, 0); Output = Output.Reshape(h_out, w_out, n, c).Transpose(2, 3, 0, 1); }
public override void Forward(SuperArray x) { base.Forward(x); var(n, c, h, w) = x.GetConv2DShape(); Parameter weight = BuildParam("w", new Shape(Filters, c, KernalSize.Item1, KernalSize.Item2), KernalInitializer, KernalConstraint, KernalRegularizer); Parameter bias = null; if (UseBias) { bias = BuildParam("b", new Shape(Filters, 1), BiasInitializer, BiasConstraint, BiasRegularizer); } int pad = 0; if (Padding == PaddingType.Same) { pad = 1; } else if (Padding == PaddingType.Full) { pad = 2; } var dialatedKernel = Tuple.Create(((KernalSize.Item1 - 1) * DialationRate.Item1 + 1), ((KernalSize.Item2 - 1) * DialationRate.Item2 + 1)); var h_out = (h - dialatedKernel.Item1 + 2 * pad) / Strides + 1; var w_out = (w - dialatedKernel.Item2 + 2 * pad) / Strides + 1; var wRows = weight.Data.Reshape(Filters, -1); xCols = ImUtil.Im2Col(x, dialatedKernel, pad, Strides); Output = Ops.Dot(wRows, xCols); if (UseBias) { Output = Output + bias.Data; } Output = Output.Reshape(Filters, h_out, w_out, n); Output = Output.Transpose(3, 0, 1, 2); }
public override void Backward(SuperArray outputgrad) { SuperArray dX_col = Ops.Constant(0, xCols.Shape); var(n, c, h, w) = Input.Data.GetConv2DShape(); int pad = 0; if (Padding == PaddingType.Same) { pad = 1; } else if (Padding == PaddingType.Full) { pad = 2; } var dout_flat = outputgrad.Transpose(2, 3, 0, 1); var dX = ImUtil.Col2Im(dout_flat, Input.Data.Shape.Dims, PoolSize, pad, Strides); Input.Grad = dX.Reshape(n, c, h, w); }
public override void Forward(SuperArray x) { base.Forward(x); var(n, c, s) = x.GetConv1DShape(); Parameter weight = BuildParam("w", new Shape(Filters, c, KernalSize), KernalInitializer, KernalConstraint, KernalRegularizer); Parameter bias = null; if (UseBias) { bias = BuildParam("b", new Shape(Filters, 1), BiasInitializer, BiasConstraint, BiasRegularizer); } int pad = 0; if (Padding == PaddingType.Same) { pad = 1; } else if (Padding == PaddingType.Full) { pad = 2; } KernalSize = (KernalSize - 1) * DilationRate + 1; var steps_out = (s - KernalSize + 2 * pad) / Strides + 1; xCols = ImUtil.Im2Col(x, Tuple.Create(KernalSize, KernalSize), pad, Strides); var wRows = weight.Data.Reshape(Filters, -1); Output = Ops.Dot(wRows, xCols); if (UseBias) { Output = Output + bias.Data; } Output = Output.Reshape(Filters, steps_out, n); Output = Output.Transpose(2, 0, 1); }
public override void Forward(SuperArray x) { base.Forward(x); var(n, c, s) = x.GetConv1DShape(); int pad = 0; if (Padding == PaddingType.Same) { pad = 1; } else if (Padding == PaddingType.Full) { pad = 2; } var s_out = (s - PoolSize) / Strides + 1; var x_reshaped = x.Reshape(n * c, 1, s); xCols = ImUtil.Im2Col(x_reshaped, Tuple.Create(PoolSize, PoolSize), pad, Strides); Output = Ops.Mean(xCols, 0); Output = Output.Reshape(s_out, n, c).Transpose(2, 0, 1); }