Example #1
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            base.LayerSetUp(colBottom, colTop);

            // Internal softmax layer.
            LayerParameter softmax_param = new LayerParameter(LayerParameter.LayerType.SOFTMAX);

            softmax_param.softmax_param.axis = m_param.infogain_loss_param.axis;
            softmax_param.loss_weight.Clear();
            softmax_param.loss_weight.Add(1);
            m_softmaxLayer = new SoftmaxLayer <T>(m_cuda, m_log, softmax_param);
            m_colSoftmaxBottomVec.Clear();
            m_colSoftmaxBottomVec.Add(colBottom[0]);
            m_colSoftmaxTopVec.Clear();
            m_colSoftmaxTopVec.Add(m_blobProb);
            m_softmaxLayer.Setup(m_colSoftmaxBottomVec, m_colSoftmaxTopVec);

            // ignore label.
            m_nIgnoreLabel = m_param.loss_param.ignore_label;

            // normalization
            m_log.CHECK(!m_param.loss_param.normalize, "normalize is drepreciated, use 'normalization'.");
            m_normalization = m_param.loss_param.normalization;

            // matrix H
            if (colBottom.Count < 3)
            {
                m_log.CHECK(m_param.infogain_loss_param.source != null, "Infogain matrix source must be specified.");
                PersistCaffe <T> persist   = new PersistCaffe <T>(m_log, true);
                BlobProto        blobProto = persist.LoadBlobProto(m_param.infogain_loss_param.source, 1);
                m_blobInfoGain.FromProto(blobProto);
            }
        }
        /// <summary>
        /// The SoftmaxCrossEntropyLayer constructor.
        /// </summary>
        /// <param name="cuda">Specifies the CudaDnn connection to Cuda.</param>
        /// <param name="log">Specifies the Log for output.</param>
        /// <param name="p">Specifies the LayerParameter of type SOFTMAXCROSSENTROPY_LOSS.
        /// </param>
        public SoftmaxCrossEntropyLossLayer(CudaDnn <T> cuda, Log log, LayerParameter p)
            : base(cuda, log, p)
        {
            m_type = LayerParameter.LayerType.SOFTMAXCROSSENTROPY_LOSS;
            m_blobSoftmaxOutput      = new Blob <T>(cuda, log);
            m_blobSoftmaxOutput.Name = m_param.name + " softmax out";
            m_blobLoss      = new Blob <T>(cuda, log);
            m_blobLoss.Name = m_param.name + " loss";

            LayerParameter param_softmax = p.Clone(false);

            param_softmax.loss_weight.Clear();
            m_softmaxLayer = new SoftmaxLayer <T>(cuda, log, param_softmax);
        }