/// <summary> /// Reshape the bottom (input) and top (output) blobs. /// </summary> /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param> /// <param name="colTop">Specifies the collection of top (output) Blobs.</param> public override void Reshape(BlobCollection <T> colBottom, BlobCollection <T> colTop) { BiasParameter p = m_param.bias_param; Blob <T> blobBias = (colBottom.Count > 1) ? colBottom[1] : m_colBlobs[0]; // Always set axis == 0 in special case where bias is a scalar // (num_axes == 0. Mathematically eqiuvalent for any choice of axis, os the // actual setting can be safely ignored; and computation is most efficient // with axis == 0 and (therefore) outer_dim == 1. int nAxis = (blobBias.num_axes == 0) ? 0 : colBottom[0].CanonicalAxisIndex(p.axis); m_log.CHECK_GE(colBottom[0].num_axes, nAxis + blobBias.num_axes, "bias blob's shape extends past bottom[0]'s shape when applied starting with bottom[0] axis = " + nAxis.ToString()); for (int i = 0; i < blobBias.num_axes; i++) { m_log.CHECK_EQ(colBottom[0].shape(nAxis + i), blobBias.shape(i), "dimension mismatch between bottom[0]->shape(" + (nAxis + i).ToString() + ") and bias->shape(" + i.ToString() + ")"); } m_nOuterDim = colBottom[0].count(0, nAxis); m_nBiasDim = blobBias.count(); m_nInnerDim = colBottom[0].count(nAxis + blobBias.num_axes); m_nDim = m_nBiasDim * m_nInnerDim; if (colBottom[0] != colTop[0]) { colTop[0].ReshapeLike(colBottom[0]); } m_blobBiasMultiplier.Reshape(new List <int>() { m_nInnerDim }); m_blobBiasMultiplier.SetData(1.0); }
/// <summary> /// Setup the layer. /// </summary> /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param> /// <param name="colTop">Specifies the collection of top (output) Blobs.</param> public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop) { if (colBottom.Count == 1 && m_colBlobs.Count > 0) { m_log.WriteLine("Skipping parameter initialization."); } else if (colBottom.Count == 1) { // bias is a learned parameter; initialize it. BiasParameter p = m_param.bias_param; int nAxis = colBottom[0].CanonicalAxisIndex(p.axis); int nNumAxes = p.num_axes; m_log.CHECK_GE(nNumAxes, -1, "num_axes must be non-negative, or -1 to extend to end of bottom[0]."); if (nNumAxes >= 0) { m_log.CHECK_GE(colBottom[0].num_axes, nAxis + nNumAxes, "bias blob's shape extends past bottom[0]'s shape when applied starting with bottom[0] axis = " + nAxis.ToString()); } m_colBlobs = new BlobCollection <T>(); List <int> rgBiasShape = new List <int>(); int nStart = nAxis; int nEnd = (nNumAxes == -1) ? colBottom[0].shape().Count : nStart + nNumAxes; for (int i = nStart; i < nEnd; i++) { rgBiasShape.Add(colBottom[0].shape(i)); } Blob <T> blobBias = new Blob <T>(m_cuda, m_log); blobBias.Name = m_param.name + " bias"; blobBias.type = BLOB_TYPE.INTERNAL; blobBias.type = BLOB_TYPE.WEIGHT; if (!shareParameter(blobBias, rgBiasShape)) { blobBias.Reshape(rgBiasShape); FillerParameter fp = p.filler; if (fp == null) { fp = new FillerParameter("constant", 0.0); } Filler <T> filler = Filler <T> .Create(m_cuda, m_log, fp); filler.Fill(blobBias); } m_colBlobs.Add(blobBias); } m_rgbParamPropagateDown = new DictionaryMap <bool>(m_colBlobs.Count, true); }