public override void Setup(TensorCollection bottom, TensorCollection top) { base.Setup(bottom, top); this.n = Parameters.Outputs; this.m = bottom[0].Num; this.k = bottom[0].Count / bottom[0].Num; // Reshape the output top[0].Reshape(m, n, 1, 1); if (this.weights == null || this.bias == null) { // Fill the weights this.weights = new Tensor(1, 1, n, k); var weightsFiller = FillerFactory.Create(Parameters.WeightsFiller); weightsFiller.Fill(weights); // If necessary, initialize and fill the bias term if (Parameters.UseBias) { this.bias = new Tensor(1, 1, 1, n); var biasFiller = FillerFactory.Create(Parameters.BiasFiller); biasFiller.Fill(bias); } } else { // LOG we are skipping the parameter initialization } if (Parameters.UseBias) { this.biasMultiplier = new Tensor(1, 1, 1, m); using (var biasMultiplierCpu = this.biasMultiplier.OnCpu()) { biasMultiplierCpu.Data.Map(v => 1, biasMultiplierCpu.Data, Zeros.Include); } } }
public override void Setup(IList <Blob> bottom, IList <Blob> top) { base.Setup(bottom, top); Guard.That(() => this.Parameters.Kernel.Width).IsGreaterThan(0); Guard.That(() => this.Parameters.Kernel.Height).IsGreaterThan(0); Guard.That(() => this.Parameters.Kernel.Depth).IsEqual(0); Guard.That(() => this.Parameters.NumberOfOutputs).IsGreaterThan(0); int num = bottom[0].Num; int channels = bottom[0].Channels; int height = bottom[0].Height; int width = bottom[0].Width; foreach (var bottomBlob in bottom) { Guard.That(() => bottomBlob.Num).IsEqual(num); Guard.That(() => bottomBlob.Channels).IsEqual(channels); Guard.That(() => bottomBlob.Height).IsEqual(height); Guard.That(() => bottomBlob.Width).IsEqual(width); } Size padding = this.Parameters.Padding; Size stride = this.Parameters.Stride; Size kernel = this.Parameters.Kernel; if (this.IsScaleKernel) { throw new NotSupportedException("1x1 kernels are not supported yet."); } // Number of output should be multiples of group. Guard.That(() => this.Parameters.NumberOfOutputs % this.Parameters.Groups).IsEqual(0); // Number of channels should be multiples of group. Guard.That(() => channels % this.Parameters.Groups).IsEqual(0); // We are going to work 1 image at a time to avoid overly large memory usage. int outputHeight = (height + 2 * padding.Height - kernel.Height) / stride.Height + 1; int outputWidth = (width + 2 * padding.Width - kernel.Width) / stride.Width + 1; this.imageBuffer = new Blob(1, channels * kernel.Height * kernel.Width, outputHeight, outputWidth); // Figure out the dimensions for individual multiplications. this.m = this.Parameters.NumberOfOutputs / this.Parameters.Groups; this.k = channels * kernel.Height * kernel.Width / this.Parameters.Groups; this.n = outputHeight * outputWidth; // Resize the output foreach (var topBlob in top) { topBlob.Reshape(num, this.Parameters.NumberOfOutputs, outputHeight, outputWidth); } // Initialize the weight this.weights = new Blob(this.Parameters.NumberOfOutputs, channels / this.Parameters.Groups, this.Parameters.Kernel.Height, this.Parameters.Kernel.Width); var weightsFiller = FillerFactory.Create(this.Parameters.WeightsFiller); weightsFiller.Fill(this.weights); this.SetPropagateDownForParameter(0, true); if (this.Parameters.UseBias) { // Initialize the bias this.bias = new Blob(1, 1, 1, this.Parameters.NumberOfOutputs); var biasFiller = FillerFactory.Create(this.Parameters.BiasFiller); biasFiller.Fill(this.bias); this.SetPropagateDownForParameter(1, true); } this.biasMultiplier = new Blob(1, 1, 1, this.n); this.biasMultiplier.InitializeWith(1, 0); }