Exemplo n.º 1
0
 private void setupBtmTop(Blob <T> btm, Blob <T> top)
 {
     m_rgBtm.Clear();
     m_rgBtm.Add(btm);
     m_rgTop.Clear();
     m_rgTop.Add(top);
 }
Exemplo n.º 2
0
        private void addInternal(Blob <T> bottom, Blob <T> top)
        {
            m_colInternalBottom.Clear();
            m_colInternalBottom.Add(bottom);

            m_colInternalTop.Clear();
            m_colInternalTop.Add(top);
        }
Exemplo n.º 3
0
        public ActionResult SendPDF(string formXml, string xsnName, string viewName, string toEmail, string emailBody)
        {
            var spContext = SharePointContextProvider.Current.GetSharePointContext(System.Web.HttpContext.Current);

            using (var clientContext = spContext.CreateUserClientContextForSPHost())
            {
                SP.User spUser = GetSharePointUser(clientContext);

                string internalUserID = null;

                // Store data for processing
                string            tenantID = TokenHelper.GetRealmFromTargetUrl(new Uri(clientContext.Url));
                RequestIdentifier rid      = RequestUtil.AddRequestEntity(PdfRequestType.SendPDF, PdfRequestStatus.InProgress, tenantID, internalUserID);

                PDFRequest response = new PDFRequest();
                response.RequestID   = rid.ID;
                response.RequestType = PdfRequestType.SendPDF;
                response.Status      = PdfRequestStatus.InProgress;
                response.Message     = "";

                BlobUtil bu = null;
                try
                {
                    bu = new BlobUtil();

                    ParameterCollection plist = new ParameterCollection();
                    plist.Add(Parameters.Api, "SendPDF");
                    plist.Add(Parameters.ViewName, viewName ?? "");
                    plist.Add(Parameters.UserID, internalUserID);
                    plist.Add(Parameters.XsnName, xsnName ?? "");
                    plist.Add(Parameters.FromEmail, spUser.Email ?? "");
                    plist.Add(Parameters.ToEmail, toEmail ?? "");
                    plist.Add(Parameters.EmailBody, emailBody ?? "");

                    BlobCollection bc = new BlobCollection();
                    bc.Add("xml", formXml);
                    bc.Add("parameters", plist);
                    bu.StoreRequestArguments(rid.ID, bc);

                    // post to queue
                    PdfServiceQueues.XmlToHtmlClient.AddMessage(rid.ID, internalUserID);
                }
                catch (Exception ex)
                {
                    // Update request status
                    response.Status  = PdfRequestStatus.Error;
                    response.Message = ex.Message;
                    RequestUtil.UpdateRequestStatus(rid.ID, PdfRequestStatus.Error, ex.Message);
                    //PdfServiceQueues.EmailSendClient.AddErrorMessage(requestID, internalUserID.Value, ex.Message);
                }
                finally
                {
                }
                return(new ObjectResult <PDFRequest>(response));
            }
        }
Exemplo n.º 4
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            DataNormalizerParameter p = m_param.data_normalizer_param;

            if (p.steps.Count == 0)
            {
                m_log.WriteLine("WARNING: No normalization steps are specified, data will just pass through in its normal form.");
            }

            if (p.steps.Contains(DataNormalizerParameter.NORMALIZATION_STEP.RANGE))
            {
                double dfRange = p.output_max - p.output_min;
                m_log.CHECK_GT(dfRange, 0, "The output data range must be greater than 0!");
            }

            if (p.steps.Contains(DataNormalizerParameter.NORMALIZATION_STEP.STDEV))
            {
                if (p.input_stdev.HasValue)
                {
                    m_log.CHECK_NE(p.input_stdev.Value, 0, "The standard deviation cannot be zero!");
                }
            }

            for (int i = 0; i < colBottom.Count; i++)
            {
                m_colWork1.Add(new Blob <T>(m_cuda, m_log, colBottom[i]));
                m_colWork2.Add(new Blob <T>(m_cuda, m_log, colBottom[i]));
            }
        }
Exemplo n.º 5
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            if (m_phase == Phase.TEST || m_phase == Phase.TRAIN)
            {
                m_log.CHECK_EQ(2, colBottom.Count, "There should be two bottom items: data (embeddings) and labels.");
            }

            m_nBatchSize = colBottom[0].shape(0);

            // Allocate the temp batch storage.
            for (int i = 0; i < m_nMaxBatches; i++)
            {
                Blob <T> data = new Blob <T>(m_cuda, m_log, false);
                data.ReshapeLike(colBottom[0]);
                m_rgBatchData.Add(data);
            }

            // Setup the weights (which stores the centroid embedding for each class)
            Blob <T>   blobCentroid = new Blob <T>(m_cuda, m_log);
            List <int> rgShape      = Utility.Clone <int>(colBottom[0].shape());

            rgShape[0] = m_nNumOutput;
            blobCentroid.Reshape(rgShape);
            m_colBlobs.Add(blobCentroid);

            for (int i = 0; i < colBottom.Count; i++)
            {
                m_param.propagate_down.Add(false);
            }
        }
Exemplo n.º 6
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            base.LayerSetUp(colBottom, colTop);

            // Internal softmax layer.
            LayerParameter softmax_param = new LayerParameter(LayerParameter.LayerType.SOFTMAX);

            softmax_param.softmax_param.axis = m_param.infogain_loss_param.axis;
            softmax_param.loss_weight.Clear();
            softmax_param.loss_weight.Add(1);
            m_softmaxLayer = new SoftmaxLayer <T>(m_cuda, m_log, softmax_param);
            m_colSoftmaxBottomVec.Clear();
            m_colSoftmaxBottomVec.Add(colBottom[0]);
            m_colSoftmaxTopVec.Clear();
            m_colSoftmaxTopVec.Add(m_blobProb);
            m_softmaxLayer.Setup(m_colSoftmaxBottomVec, m_colSoftmaxTopVec);

            // ignore label.
            m_nIgnoreLabel = m_param.loss_param.ignore_label;

            // normalization
            m_log.CHECK(!m_param.loss_param.normalize, "normalize is drepreciated, use 'normalization'.");
            m_normalization = m_param.loss_param.normalization;

            // matrix H
            if (colBottom.Count < 3)
            {
                m_log.CHECK(m_param.infogain_loss_param.source != null, "Infogain matrix source must be specified.");
                PersistCaffe <T> persist   = new PersistCaffe <T>(m_log, true);
                BlobProto        blobProto = persist.LoadBlobProto(m_param.infogain_loss_param.source, 1);
                m_blobInfoGain.FromProto(blobProto);
            }
        }
Exemplo n.º 7
0
        /// <summary>
        /// The PreprocessInput allows derivative data layers to convert a property set of input
        /// data into the bottom blob collection used as intput.
        /// </summary>
        /// <param name="customInput">Specifies the custom input data.</param>
        /// <param name="colBottom">Optionally, specifies the bottom data to fill.</param>
        /// <returns>The bottom data is returned.</returns>
        /// <remarks>The blobs returned should match the blob descriptions returned in the LayerParameter's
        /// overrides for 'PrepareRunModelInputs' and 'PrepareRunModel'.</remarks>
        public override BlobCollection <T> PreProcessInput(PropertySet customInput, BlobCollection <T> colBottom = null)
        {
            if (colBottom == null)
            {
                string   strInput = m_param.PrepareRunModelInputs();
                RawProto proto    = RawProto.Parse(strInput);
                Dictionary <string, BlobShape> rgInput = NetParameter.InputFromProto(proto);
                colBottom = new BlobCollection <T>();

                foreach (KeyValuePair <string, BlobShape> kv in rgInput)
                {
                    Blob <T> blob = new Blob <T>(m_cuda, m_log);
                    blob.Name = kv.Key;
                    blob.Reshape(kv.Value);
                    colBottom.Add(blob);
                }
            }

            string strEncInput = customInput.GetProperty("InputData");

            if (strEncInput == null)
            {
                throw new Exception("Could not find the expected input property 'InputData'!");
            }

            PreProcessInput(strEncInput, null, colBottom);

            return(colBottom);
        }
Exemplo n.º 8
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            base.LayerSetUp(colBottom, colTop);

            LayerParameter p = m_param.Clone(false);

            p.type             = LayerParameter.LayerType.SOFTMAX;
            m_softmaxLayer     = new SoftmaxLayer <T>(m_cuda, m_log, p);
            m_colSoftmaxBottom = new BlobCollection <T>();
            m_colSoftmaxTop    = new BlobCollection <T>();

            m_colSoftmaxBottom.Add(colBottom[0]);
            m_colSoftmaxTop.Add(m_blobProb);

            m_softmaxLayer.Setup(m_colSoftmaxBottom, m_colSoftmaxTop);

            m_nIgnoreLabel = m_param.loss_param.ignore_label;

            if (m_param.loss_param.normalization == LossParameter.NormalizationMode.NONE)
            {
                m_normalization = (m_param.loss_param.normalize) ? LossParameter.NormalizationMode.VALID : LossParameter.NormalizationMode.BATCH_SIZE;
            }
            else
            {
                m_normalization = m_param.loss_param.normalization;
            }
        }
        public void SaveFavorites()
        {
            BlobCollection blobs = new BlobCollection();

            foreach (string applicationName in _favoritesUpdateCache.Keys)
            {
                ShellDrillDownMenuItem favoritesMenu = _favoritesUpdateCache[applicationName];
                XmlDocument            xmlDocument   = XmlToShellDrillDownMenuItemTransformer.Transform(favoritesMenu);

                Blob blob = CreateBlob(applicationName, xmlDocument);

                if (blob != null)
                {
                    blobs.Add(blob);
                }
            }

            CreateOrUpdateBlobParameters parameters = new CreateOrUpdateBlobParameters
            {
                ContainerName = GetContainerName(),
                Blobs         = blobs
            };

            CreateOrUpdateBlobRequest request = new CreateOrUpdateBlobRequest();

            request.CreateOrUpdateBlobParameters = parameters;

            SettingsService.CreateOrUpdateBlob(request);
        }
Exemplo n.º 10
0
        public InputLayerEx(CudaDnn <T> cuda, Log log, Layer <T> layer) : base(cuda, log, layer.layer_param)
        {
            string strPath = Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData);

            strPath += "\\mycaffe\\test_data\\data\\text\\";
            LayerParameter text_param = new LayerParameter(LayerParameter.LayerType.TEXT_DATA);

            text_param.text_data_param.batch_size     = 1;
            text_param.text_data_param.decoder_source = strPath + "robot_text.txt";
            text_param.text_data_param.encoder_source = strPath + "human_text.txt";
            text_param.text_data_param.enable_normal_encoder_output  = true;
            text_param.text_data_param.enable_reverse_encoder_output = true;
            text_param.text_data_param.sample_size = 1000;
            text_param.text_data_param.shuffle     = false;
            text_param.text_data_param.time_steps  = 80;
            text_param.phase = Phase.TEST;

            m_dataLayer = new TextDataLayer <T>(cuda, log, text_param);
            BlobCollection <T> colBottom = new BlobCollection <T>();
            BlobCollection <T> colTop    = new BlobCollection <T>();

            colTop.Add(new Blob <T>(cuda, log));
            colTop.Add(new Blob <T>(cuda, log));
            colTop.Add(new Blob <T>(cuda, log));
            colTop.Add(new Blob <T>(cuda, log));
            colTop.Add(new Blob <T>(cuda, log));
            colTop.Add(new Blob <T>(cuda, log));
            colTop.Add(new Blob <T>(cuda, log));

            m_dataLayer.Setup(colBottom, colTop);

            colTop.Dispose();
        }
Exemplo n.º 11
0
        protected virtual void LoadHDF5FileData(string strFile)
        {
            m_log.WriteLine("Loading HDF5 file: '" + strFile + "'");
            HDF5 <T> hdf5 = new HDF5 <T>(m_cuda, m_log, strFile);

            int nTopCount = m_param.top.Count;

            for (int i = 0; i < nTopCount; i++)
            {
                // Allow reshape here, as we are loading data not params.
                Blob <T> blob = null;

                if (m_colHdfBlobs.Count < nTopCount)
                {
                    blob = new Blob <T>(m_cuda, m_log, false);
                    m_colHdfBlobs.Add(blob);
                }
                else
                {
                    blob = m_colHdfBlobs[i];
                }

                blob.Name = m_param.top[i];

                hdf5.load_nd_dataset(blob, m_param.top[i], true);
            }

            hdf5.Dispose();

            // MinTopBlobs=1 guarantees at least one top blob
            m_log.CHECK_GE(m_colHdfBlobs[0].num_axes, 1, "Input must have at least 1 axis.");
            int nNum = m_colHdfBlobs[0].shape(0);

            for (int i = 1; i < nTopCount; i++)
            {
                m_log.CHECK_EQ(m_colHdfBlobs[i].shape(0), nNum, "The 'num' on all blobs must be equal.");
            }

            // Default to identity permutation.
            m_rgDataPermutation = new List <int>();
            for (int i = 0; i < nNum; i++)
            {
                m_rgDataPermutation.Add(i);
            }

            // Shuffle if needed
            if (m_param.hdf5_data_param.shuffle)
            {
                m_rgDataPermutation = Utility.RandomShuffle(m_rgDataPermutation);
                m_log.WriteLine("Successfully loaded " + nNum.ToString() + " rows (shuffled).");
            }
            else
            {
                m_log.WriteLine("Successfully loaded " + nNum.ToString() + " rows.");
            }
        }
Exemplo n.º 12
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            base.LayerSetUp(colBottom, colTop);

            m_colSigmoidBottom.Clear();
            m_colSigmoidBottom.Add(m_blobSigmoidInput);
            m_colSigmoidTop.Clear();
            m_colSigmoidTop.Add(m_blobSigmoidOutput);
            m_sigmoidLayer.LayerSetUp(m_colSigmoidBottom, m_colSigmoidTop);
        }
Exemplo n.º 13
0
        /// <summary>
        /// Load the data into the model.
        /// </summary>
        /// <param name="strInput">Specifies the encoder input sentence.</param>
        /// <param name="nIxInput">Specifies the decoder current input word index.</param>
        private BlobCollection <float> loadData(string strInput, int nIxInput)
        {
            Net <float>            net          = m_mycaffe.GetInternalNet(Phase.RUN);
            TextDataLayer <float>  layer        = net.FindLayer("TextData", "data") as TextDataLayer <float>;
            Blob <float>           blobData     = net.FindBlob("ienc");
            Blob <float>           blobDatar    = net.FindBlob("iencr");
            Blob <float>           blobClipE    = net.FindBlob("iencc");
            Blob <float>           blobDecInput = net.FindBlob("idec");
            BlobCollection <float> colBtm       = new BlobCollection <float>();

            colBtm.Add(blobDecInput);
            colBtm.Add(blobData);
            colBtm.Add(blobDatar);
            colBtm.Add(blobClipE);

            layer.PreProcessInput(strInput, nIxInput, colBtm);

            return(colBtm);
        }
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            base.LayerSetUp(colBottom, colTop);

            m_colSoftmaxBottomVec = new BlobCollection <T>();
            m_colSoftmaxBottomVec.Add(colBottom[0]);
            m_colSoftmaxTopVec = new BlobCollection <T>();
            m_colSoftmaxTopVec.Add(m_blobSoftmaxOutput);
            m_softmaxLayer.Setup(m_colSoftmaxBottomVec, m_colSoftmaxTopVec);
        }
Exemplo n.º 15
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            if (colBottom.Count == 1 && m_colBlobs.Count > 0)
            {
                m_log.WriteLine("Skipping parameter initialization.");
            }
            else if (colBottom.Count == 1)
            {
                // bias is a learned parameter; initialize it.
                BiasParameter p        = m_param.bias_param;
                int           nAxis    = colBottom[0].CanonicalAxisIndex(p.axis);
                int           nNumAxes = p.num_axes;

                m_log.CHECK_GE(nNumAxes, -1, "num_axes must be non-negative, or -1 to extend to end of bottom[0].");

                if (nNumAxes >= 0)
                {
                    m_log.CHECK_GE(colBottom[0].num_axes, nAxis + nNumAxes, "bias blob's shape extends past bottom[0]'s shape when applied starting with bottom[0] axis = " + nAxis.ToString());
                }

                m_colBlobs = new BlobCollection <T>();

                List <int> rgBiasShape = new List <int>();
                int        nStart      = nAxis;
                int        nEnd        = (nNumAxes == -1) ? colBottom[0].shape().Count : nStart + nNumAxes;

                for (int i = nStart; i < nEnd; i++)
                {
                    rgBiasShape.Add(colBottom[0].shape(i));
                }

                Blob <T> blobBias = new Blob <T>(m_cuda, m_log);
                blobBias.Name = m_param.name + " bias";
                blobBias.type = BLOB_TYPE.INTERNAL;
                blobBias.type = BLOB_TYPE.WEIGHT;

                if (!shareParameter(blobBias, rgBiasShape))
                {
                    blobBias.Reshape(rgBiasShape);
                    FillerParameter fp = p.filler;
                    if (fp == null)
                    {
                        fp = new FillerParameter("constant", 0.0);
                    }

                    Filler <T> filler = Filler <T> .Create(m_cuda, m_log, fp);

                    filler.Fill(blobBias);
                }
                m_colBlobs.Add(blobBias);
            }

            m_rgbParamPropagateDown = new DictionaryMap <bool>(m_colBlobs.Count, true);
        }
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            base.LayerSetUp(colBottom, colTop);

            m_colSigmoidBottomVec.Clear();
            m_colSigmoidBottomVec.Add(colBottom[0]);
            m_colSigmoidTopVec.Clear();
            m_colSigmoidTopVec.Add(m_blobSigmoidOutput);
            m_sigmoidLayer.Setup(m_colSigmoidBottomVec, m_colSigmoidTopVec);

            m_nIgnoreLabel = m_param.loss_param.ignore_label;
        }
Exemplo n.º 17
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            m_log.CHECK_GE(colBottom.Count, 2, "There should be at least two bottom items: data (embeddings) and labels.");
            m_log.CHECK_EQ(colTop.Count, colBottom.Count - 1, "The top count should equal the bottom count - 1");

            // Allocate the temp batch storage.
            for (int i = 0; i < m_nMaxBatches; i++)
            {
                Blob <T> data = new Blob <T>(m_cuda, m_log);
                data.ReshapeLike(colBottom[0]);
                m_rgBatchData.Add(data);

                Blob <T> label = new common.Blob <T>(m_cuda, m_log, false);
                label.ReshapeLike(colBottom[1]);
                m_rgBatchLabels.Add(label);
            }
        }
Exemplo n.º 18
0
        /// <summary>
        /// Runs the pre-solve which parpares the Solver to start Solving.
        /// </summary>
        public void PreSolve()
        {
            BlobCollection <T> colNetParams = m_net.learnable_parameters;

            m_colHistory.Clear(true);
//            m_colUpdate.Clear(true);
            m_colTemp.Clear(true);

            for (int i = 0; i < colNetParams.Count; i++)
            {
                List <int> rgShape = colNetParams[i].shape();

                m_colHistory.Add(new Blob <T>(m_cuda, m_log, rgShape, false));   // diff never used
//                m_colUpdate.Add(new Blob<T>(m_cuda, m_log, rgShape, false));
                m_colTemp.Add(new Blob <T>(m_cuda, m_log, rgShape, false));      // diff never used
            }
        }
Exemplo n.º 19
0
        /// <summary>
        /// Runs the pre-solve which parpares the Solver to start Solving.
        /// </summary>
        public void PreSolve()
        {
            BlobCollection <T> net_params = m_net.learnable_parameters;

            m_nN = 0;

            for (int i = 0; i < net_params.Count; i++)
            {
                if (m_net.params_lr[i] != 0)
                {
                    m_nN += net_params[i].count();
                }
            }

            // Nothing to do, all learnable parameters have lr_mult = 0
            if (m_nN == 0)
            {
                return;
            }

            List <int> rgShape = new List <int>()
            {
                m_nN
            };

            m_colBlobHistoryS.Clear();
            m_colBlobHistoryY.Clear();
            m_rgRhoHistory.Clear();
            m_nStart = 0;
            m_nEnd   = -1;

            m_blobGradients          = new Blob <T>(m_cuda, m_log, rgShape, false);
            m_blobGradients.Name     = "gradients";
            m_blobGradientsPrev      = new Blob <T>(m_cuda, m_log, rgShape, false);
            m_blobGradientsPrev.Name = "gradients prev";
            m_blobDirection          = new Blob <T>(m_cuda, m_log, rgShape, false);
            m_blobDirection.Name     = "direction";

            for (int i = 0; i < m_param.lbgfs_corrections; i++)
            {
                m_colBlobHistoryS.Add(new Blob <T>(m_cuda, m_log, rgShape, false));
                m_colBlobHistoryY.Add(new Blob <T>(m_cuda, m_log, rgShape, false));
                m_rgRhoHistory.Add(0);
            }
        }
Exemplo n.º 20
0
        /// <summary>
        /// The Layer constructor.
        /// </summary>
        /// <remarks>
        /// Setup code for derivative classes should go into an override of the LayerSetup function where the
        /// dimensionsn of the Blob%s are provided to the Layer.
        /// </remarks>
        /// <param name="cuda">Specifies the CudaDnn connection to Cuda.</param>
        /// <param name="log">Specifies the Log for output.</param>
        /// <param name="p">Specifies the LayerParameter that contains the settings of the Layer.</param>
        public Layer(CudaDnn <T> cuda, Log log, LayerParameter p)
        {
            m_cuda  = cuda;
            m_log   = log;
            m_param = p.Clone(true);
            m_phase = p.phase;
            m_rgbParamPropagateDown = new DictionaryMap <bool>(false);
            m_rgLoss   = new DictionaryMap <double>(0.0);
            m_colBlobs = new BlobCollection <T>();

            for (int i = 0; i < p.blobs.Count; i++)
            {
                m_colBlobs.Add(new Blob <T>(cuda, log, p.blobs[i]));
            }

            m_tOne  = (T)Convert.ChangeType(1, typeof(T));
            m_tZero = (T)Convert.ChangeType(0, typeof(T));
        }
        protected void SaveOrUpdateDefaultUser(string user)
        {
            BlobCollection blobs = new BlobCollection();

            blobs.Add(CreateBlob(user));

            CreateOrUpdateBlobParameters parameters = new CreateOrUpdateBlobParameters
            {
                ContainerName = GetContainerName(),
                Blobs         = blobs
            };

            CreateOrUpdateBlobRequest request = new CreateOrUpdateBlobRequest();

            request.CreateOrUpdateBlobParameters = parameters;

            SettingsService.CreateOrUpdateBlob(request);
        }
Exemplo n.º 22
0
        public void SaveSettings()
        {
            if (string.IsNullOrEmpty(ContainerName))
            {
                throw new ArgumentNullException("ContainerName");
            }

            BlobCollection blobs = new BlobCollection();

            foreach (KeyValuePair <object, ISettingsProvider> providerEntry in _providerDictionary)
            {
                object settings = providerEntry.Value.SaveSettings(providerEntry.Key);

                Blob blob = CreateBlob(providerEntry.Value.GetKey(providerEntry.Key), settings);

                if (blob != null)
                {
                    blobs.Add(blob);
                }
            }

            if (_useBackgroundThread)
            {
                ThreadPool.QueueUserWorkItem((s) =>
                {
                    try
                    {
                        CreateOrUpdateBlob(blobs);
                    }
                    catch (Exception ex)
                    {
                        _synchronizationContext.Send((state) =>
                        {
                            throw ex;
                        }, null);
                    }
                });
            }
            else
            {
                CreateOrUpdateBlob(blobs);
            }
        }
Exemplo n.º 23
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            base.LayerSetUp(colBottom, colTop);

            LayerParameter param_softmax = m_param.Clone(false);

            param_softmax.SetType(LayerParameter.LayerType.SOFTMAX);
            param_softmax.softmax_param = m_param.softmax_param.Clone() as SoftmaxParameter;
            param_softmax.loss_weight.Clear();

            m_softmaxLayer     = new SoftmaxLayer <T>(m_cuda, m_log, param_softmax);
            m_colSoftmaxBottom = new BlobCollection <T>();
            m_colSoftmaxTop    = new BlobCollection <T>();

            m_colSoftmaxBottom.Add(colBottom[0]);
            m_colSoftmaxTop.Add(m_blobProb);
            m_softmaxLayer.Setup(m_colSoftmaxBottom, m_colSoftmaxTop);

            m_nIgnoreLabel = m_param.loss_param.ignore_label;
        }
Exemplo n.º 24
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            base.LayerSetUp(colBottom, colTop);

            m_colSigmoidBottomVec = new BlobCollection <T>();
            m_colSigmoidBottomVec.Add(colBottom[0]);
            m_colSigmoidTopVec = new BlobCollection <T>();
            m_colSigmoidTopVec.Add(m_blobSigmoidOutput);
            m_sigmoidLayer.Setup(m_colSigmoidBottomVec, m_colSigmoidTopVec);

            m_nIgnoreLabel = m_param.loss_param.ignore_label;

            if (m_param.loss_param.normalization != LossParameter.NormalizationMode.NONE)
            {
                m_normalization = m_param.loss_param.normalization;
            }
            else
            {
                m_normalization = (m_param.loss_param.normalize) ? LossParameter.NormalizationMode.VALID : LossParameter.NormalizationMode.BATCH_SIZE;
            }
        }
Exemplo n.º 25
0
        public TestEx(string strName, List <int> rgBottomShape = null, int nDeviceID = TestBase.DEFAULT_DEVICE_ID)
            : base(strName, nDeviceID)
        {
            if (rgBottomShape == null)
            {
                rgBottomShape = new List <int>()
                {
                    2, 3, 4, 5
                }
            }
            ;

            m_blob_bottom = new Blob <T>(m_cuda, m_log, rgBottomShape);
            m_blob_top    = new Blob <T>(m_cuda, m_log);
            m_colBottom.Add(m_blob_bottom);
            m_colTop.Add(m_blob_top);

            FillerParameter fp = getFillerParam();

            m_filler = Filler <T> .Create(m_cuda, m_log, fp);

            m_filler.Fill(m_blob_bottom);
        }
Exemplo n.º 26
0
        /// <summary>
        /// Setup the layer for both Engine.CUDNN and Engine.CAFFE modes.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            m_nSize = (int)m_param.lrn_param.local_size;
            m_log.CHECK_EQ(m_nSize % 2, 1, "LRN only supports odd values for local_size.");
            m_nPrePad = (m_nSize - 1) / 2;
            m_dfAlpha = m_param.lrn_param.alpha;
            m_dfBeta  = m_param.lrn_param.beta;
            m_dfK     = m_param.lrn_param.k;

            if (m_param.lrn_param.norm_region == LRNParameter.NormRegion.WITHIN_CHANNEL)
            {
                // Set up split_layer to use in the numerator and denominator.
                m_colSplitTopVec = new BlobCollection <T>();
                m_colSplitTopVec.Add(m_blobProductInput);
                m_colSplitTopVec.Add(m_blobSquareInput);
                LayerParameter split_param = new LayerParameter(LayerParameter.LayerType.SPLIT, "split");
                m_splitLayer = new SplitLayer <T>(m_cuda, m_log, split_param);
                m_splitLayer.Setup(colBottom, m_colSplitTopVec);

                // Set up square_layer to square teh inputs.
                m_colSquareBottomVec = new BlobCollection <T>();
                m_colSquareTopVec    = new BlobCollection <T>();
                m_colSquareBottomVec.Add(m_blobSquareInput);
                m_colSquareTopVec.Add(m_blobSquareOutput);
                LayerParameter square_param = new LayerParameter(LayerParameter.LayerType.POWER, "square");
                square_param.power_param.power = 2.0;
                m_squareLayer = new PowerLayer <T>(m_cuda, m_log, square_param);
                m_squareLayer.Setup(m_colSquareBottomVec, m_colSquareTopVec);

                // Set up pool_layer to sum over square neighborhoods of the input.
                m_colPoolTopVec = new BlobCollection <T>();
                m_colPoolTopVec.Add(m_blobPoolOutput);
                LayerParameter pool_param = new LayerParameter(LayerParameter.LayerType.POOLING, "pool");
                pool_param.pooling_param.pool = PoolingParameter.PoolingMethod.AVE;
                pool_param.pooling_param.pad.Add((uint)m_nPrePad);
                pool_param.pooling_param.kernel_size.Add((uint)m_nSize);
                m_poolLayer = new PoolingLayer <T>(m_cuda, m_log, pool_param);
                m_poolLayer.Setup(m_colSquareTopVec, m_colPoolTopVec);

                // Set up power_layer to compute (1 + alpha/N^2 s)^-beta, where s is
                // the sum of the squared neighborhood (the output of pool_layer)
                m_colPowerTopVec = new BlobCollection <T>();
                m_colPowerTopVec.Add(m_blobPowerOutput);
                LayerParameter power_param = new LayerParameter(LayerParameter.LayerType.POWER, "power");
                power_param.power_param.power = -m_dfBeta;
                power_param.power_param.scale = m_dfAlpha;
                power_param.power_param.shift = 1.0;
                m_powerLayer = new PowerLayer <T>(m_cuda, m_log, power_param);
                m_powerLayer.Setup(m_colPoolTopVec, m_colPowerTopVec);

                // Set up a product_layer to compute outputs by multiplying inputs by the
                // inverse denominator computed by the power layer.
                m_colProductBottomVec = new BlobCollection <T>();
                m_colProductBottomVec.Add(m_blobProductInput);
                m_colProductBottomVec.Add(m_blobPowerOutput);
                LayerParameter product_param = new LayerParameter(LayerParameter.LayerType.ELTWISE, "product");
                product_param.eltwise_param.operation = EltwiseParameter.EltwiseOp.PROD;
                m_productLayer = new EltwiseLayer <T>(m_cuda, m_log, product_param);
                m_productLayer.Setup(m_colProductBottomVec, colTop);
            }

            if (!m_param.lrn_param.useCudnn())
            {
                return;
            }

            m_hCuDnn      = m_cuda.CreateCuDNN();
            m_hNormDesc   = m_cuda.CreateLRNDesc();
            m_hBottomDesc = m_cuda.CreateTensorDesc();
            m_hTopDesc    = m_cuda.CreateTensorDesc();
        }
Exemplo n.º 27
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            m_dfClippingThreshold = m_param.lstm_simple_param.clipping_threshold;
            m_nN = (int)m_param.lstm_simple_param.batch_size;              // batch size.
            m_nH = (int)m_param.lstm_simple_param.num_output;              // number of hidden units.
            m_nI = (int)(colBottom[0].count() / colBottom[0].num);         // input dimension.

            // Check if we need to set up the weights.
            if (m_colBlobs.Count > 0)
            {
                m_log.WriteLine("Skipping parameter initialization.");
            }
            else
            {
                m_colBlobs = new BlobCollection <T>();

                Filler <T> weight_filler = Filler <T> .Create(m_cuda, m_log, m_param.lstm_simple_param.weight_filler);

                Filler <T> bias_filler = Filler <T> .Create(m_cuda, m_log, m_param.lstm_simple_param.bias_filler);

                // input-to-hidden weights
                // Initialize the weight.
                List <int> rgShape1 = new List <int>()
                {
                    4 * m_nH, m_nI
                };
                Blob <T> blobWeights_I_H = new Blob <T>(m_cuda, m_log);
                blobWeights_I_H.Name = m_param.name + " weights I to H";
                blobWeights_I_H.type = Blob <T> .BLOB_TYPE.WEIGHT;

                if (!shareParameter(blobWeights_I_H, rgShape1))
                {
                    blobWeights_I_H.Reshape(rgShape1);
                    weight_filler.Fill(blobWeights_I_H);
                }
                m_colBlobs.Add(blobWeights_I_H);

                // hidden-to-hidden weights
                // Initialize the weight.
                List <int> rgShape2 = new List <int>()
                {
                    4 * m_nH, m_nH
                };
                Blob <T> blobWeights_H_H = new Blob <T>(m_cuda, m_log);
                blobWeights_H_H.Name = m_param.name + " weights H to H";
                blobWeights_H_H.type = Blob <T> .BLOB_TYPE.WEIGHT;

                if (!shareParameter(blobWeights_H_H, rgShape2))
                {
                    blobWeights_H_H.Reshape(rgShape2);
                    weight_filler.Fill(blobWeights_H_H);
                }
                m_colBlobs.Add(blobWeights_H_H);

                // If necessary, initialize and fill the bias term.
                List <int> rgShape3 = new List <int>()
                {
                    4 * m_nH
                };
                Blob <T> blobBias = new Blob <T>(m_cuda, m_log);
                blobBias.Name = m_param.name + " bias weights";
                blobBias.type = Blob <T> .BLOB_TYPE.WEIGHT;

                if (!shareParameter(blobBias, rgShape3))
                {
                    blobBias.Reshape(rgShape3);
                    bias_filler.Fill(blobBias);
                }
                m_colBlobs.Add(blobBias);

                // Initialize the bias for the forget gate to 5.0 as described in the
                // Clockwork RNN paper:
                // [1] Koutnik, J., Greff, K., Gomez, F., Schmidhuber, J., 'A Clockwork RNN', 2014"
                if (m_param.lstm_simple_param.enable_clockwork_forgetgate_bias)
                {
                    double[] rgBias = convertD(blobBias.mutable_cpu_data);

                    for (int i = m_nH; i < 2 * m_nH; i++)
                    {
                        rgBias[i] = 5.0;
                    }

                    blobBias.mutable_cpu_data = convert(rgBias);
                }
            }

            m_rgbParamPropagateDown = new DictionaryMap <bool>(m_colBlobs.Count, true);

            List <int> rgCellShape = new List <int>()
            {
                m_nN, m_nH
            };

            m_blob_C_0.Reshape(rgCellShape);
            m_blob_H_0.Reshape(rgCellShape);
            m_blob_C_T.Reshape(rgCellShape);
            m_blob_H_T.Reshape(rgCellShape);
            m_blob_H_to_H.Reshape(rgCellShape);

            List <int> rgGateShape = new List <int>()
            {
                m_nN, 4, m_nH
            };

            m_blob_H_to_Gate.Reshape(rgGateShape);
        }
Exemplo n.º 28
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            ScaleParameter p = m_param.scale_param;

            if (colBottom.Count == 1 && blobs.Count > 0)
            {
                m_log.WriteLine("Skipping parameter initialization.");
            }
            else if (colBottom.Count == 1)
            {
                // scale is a learned parameter; initialize it.
                m_nAxis = colBottom[0].CanonicalAxisIndex(p.axis);
                int nNumAxes = p.num_axes;
                m_log.CHECK_GE(nNumAxes, -1, "num_axes must be non-negative, or -1 to extend to the end of bottom[0].");

                if (nNumAxes >= 0)
                {
                    m_log.CHECK_GE(colBottom[0].num_axes, m_nAxis + nNumAxes, "scale blob's shape extends past bottom[0]'s shape when applied starting with bottom[0] axis = " + m_nAxis.ToString());
                }

                m_colBlobs = new BlobCollection <T>();

                List <int> rgShape = new List <int>();
                int        nStart  = m_nAxis;
                int        nEnd    = (nNumAxes == -1) ? colBottom[0].shape().Count : nStart + nNumAxes;

                for (int i = nStart; i < nEnd; i++)
                {
                    rgShape.Add(colBottom[0].shape(i));
                }

                Blob <T> blobScale = new Blob <T>(m_cuda, m_log, rgShape);
                blobScale.Name = "scale";
                FillerParameter fp = p.filler;

                // Default to unit (1) filler for identity operation.
                if (fp == null)
                {
                    fp = new FillerParameter("constant", 1.0);
                }

                Filler <T> filler = Filler <T> .Create(m_cuda, m_log, fp);

                filler.Fill(blobScale);

                m_colBlobs.Add(blobScale);
            }

            if (p.bias_term)
            {
                LayerParameter pb = new LayerParameter(LayerParameter.LayerType.BIAS);
                pb.bias_param.axis     = p.axis;
                pb.bias_param.num_axes = (colBottom.Count > 1) ? colBottom[1].num_axes : p.num_axes;
                pb.bias_param.filler   = p.bias_filler;

                m_colBiasBottomVec = new BlobCollection <T>();
                m_colBiasBottomVec.Add(colBottom[0]);

                m_biasLayer = new BiasLayer <T>(m_cuda, m_log, pb);
                m_biasLayer.Setup(m_colBiasBottomVec, colTop);
                m_nBiasParamId = m_colBlobs.Count;
                m_colBlobs.Add(m_biasLayer.blobs[0]);
                m_rgbBiasPropagateDown = Utility.Create <bool>(1, false);
            }

            m_rgbParamPropagateDown = new DictionaryMap <bool>(m_colBlobs.Count(), true);
        }
Exemplo n.º 29
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            m_log.CHECK_GE(colBottom[0].num_axes, 2, "Number of axes of bottom must be >= 2");
            PReLUParameter p         = m_param.prelu_param;
            int            nChannels = colBottom[0].channels;

            m_bChannelShared = p.channel_shared;

            if (m_colBlobs.Count > 0)
            {
                m_log.WriteLine("Skipping parameter initialization.");
            }
            else
            {
                m_colBlobs = new BlobCollection <T>();

                List <int> rgSlopeShape = new List <int>();
                if (!m_bChannelShared)
                {
                    rgSlopeShape.Add(nChannels);
                }

                Blob <T> blobSlope = new Blob <T>(m_cuda, m_log);
                blobSlope.Name = m_param.name + " slope";
                blobSlope.type = BLOB_TYPE.INTERNAL;

                if (!shareParameter(blobSlope, rgSlopeShape))
                {
                    blobSlope.Reshape(rgSlopeShape);
                    FillerParameter fp = p.filler;

                    if (fp == null)
                    {
                        fp = new FillerParameter("constant", 0.25);
                    }

                    Filler <T> filler = Filler <T> .Create(m_cuda, m_log, fp);

                    filler.Fill(blobSlope);
                }
                m_colBlobs.Add(blobSlope);
            }

            if (m_bChannelShared)
            {
                m_log.CHECK_EQ(m_colBlobs[0].count(), 1, "Negative slope size is inconsistent with prototxt config.");
            }
            else
            {
                m_log.CHECK_EQ(m_colBlobs[0].count(), nChannels, "Negative slope size is inconsistent with prototxt config.");
            }

            // Propagate gradients to the parameters (as directed by backward pass)
            m_rgbParamPropagateDown = new DictionaryMap <bool>(m_colBlobs.Count, true);

            List <int> rgShape = new List <int>()
            {
                colBottom[0].count(1)
            };

            m_blobMultiplier.Reshape(rgShape);
            m_blobBackwardBuff.Reshape(rgShape);
            m_blobMultiplier.SetData(1.0);
        }
Exemplo n.º 30
0
        /// <summary>
        /// Checks the gradient of a single output with respect to particular input
        /// blob(s).  If check_bottom = i >= 0, check only the ith bottom Blob<T>.
        /// If check_bottom == -1, check everything -- all bottom Blobs and all
        /// param Blobs.  Otherwise (if check_bottom less than -1), check only param Blobs.
        /// </summary>
        public void CheckGradientSingle(Layer <T> layer, BlobCollection <T> colBottom, BlobCollection <T> colTop, int nCheckBottom, int nTopID, int nTopDataID, bool bElementwise = false)
        {
            if (bElementwise)
            {
                m_log.CHECK_EQ(0, layer.blobs.Count(), "Cannot have blobs in the layer checked for element-wise checking.");
                m_log.CHECK_LE(0, nTopID, "The top ID '" + nTopID.ToString() + "' must be zero or greater with element-wise checking.");
                m_log.CHECK_LE(0, nTopDataID, "The top data ID '" + nTopDataID.ToString() + "' must be zero or greater with element-wise checking.");

                int nTopCount = colTop[nTopID].count();

                for (int nBlobID = 0; nBlobID < colBottom.Count(); nBlobID++)
                {
                    m_log.CHECK_EQ(nTopCount, colBottom[nBlobID].count(), "The top count and blob counts must be equal for element-wise checking.");
                }
            }

            // First, figure out what blobs we need to check against, and zero init
            // parameter blobs.
            BlobCollection <T> colBlobsToCheck = new BlobCollection <T>();
            List <bool>        rgPropagateDown = new List <bool>();

            for (int i = 0; i < colBottom.Count; i++)
            {
                rgPropagateDown.Add((nCheckBottom == -1) ? true : false);
            }

            for (int i = 0; i < layer.blobs.Count; i++)
            {
                Blob <T> blob = layer.blobs[i];

                blob.SetDiff(0);
                colBlobsToCheck.Add(blob);
            }

            if (nCheckBottom == -1)
            {
                for (int i = 0; i < colBottom.Count; i++)
                {
                    colBlobsToCheck.Add(colBottom[i]);
                }
            }
            else if (nCheckBottom >= 0)
            {
                m_log.CHECK_LT(nCheckBottom, colBottom.Count, "The check bottom value '" + nCheckBottom.ToString() + "' must be less than the number of bottom blobs.");
                colBlobsToCheck.Add(colBottom[nCheckBottom]);
                rgPropagateDown[nCheckBottom] = true;
            }

            m_log.CHECK_GT(colBlobsToCheck.Count, 0, "No blobs to check!");

            // Compute the gradient analytically using Backward.
            m_cuda.rng_setseed(m_uiSeed);

            // Ignore the loss from the layer (it's just the weighted sum of the losses
            // from the top blobs, whose gradients we may want to test individually).
            layer.Forward(colBottom, colTop);

            // Get additional loss from the objective.
            GetObjAndGradient(layer, colTop, nTopID, nTopDataID);
            layer.Backward(colTop, rgPropagateDown, colBottom);

            // Store computed gradients for all checked blobs
            BlobCollection <T> colComputedGradientBlobs = new BlobCollection <T>();

            for (int nBlobID = 0; nBlobID < colBlobsToCheck.Count; nBlobID++)
            {
                Blob <T> current_blob = colBlobsToCheck[nBlobID];
                Blob <T> new_blob     = new Blob <T>(m_cuda, m_log);

                if (current_blob.DiffExists)
                {
                    new_blob.ReshapeLike(current_blob);
                    m_cuda.copy(current_blob.count(), current_blob.gpu_diff, new_blob.mutable_gpu_data);
                }

                colComputedGradientBlobs.Add(new_blob);
            }

            // Compute derivative of top w.r.t. each bottom and parameter input using
            // finite differencing.

            for (int nBlobID = 0; nBlobID < colBlobsToCheck.Count; nBlobID++)
            {
                Blob <T> current_blob = colBlobsToCheck[nBlobID];

                if (!current_blob.DiffExists)
                {
                    continue;
                }

                T[]    rgdfComputedGradients = colComputedGradientBlobs[nBlobID].update_cpu_data();
                double dfData;

                for (int nFeatID = 0; nFeatID < current_blob.count(); nFeatID++)
                {
                    if (m_evtCancel.WaitOne(0))
                    {
                        throw new Exception("Aborted!");
                    }

                    // For an element-wise layer, we only need to do finite differencing to
                    // compute the derivative of top[nTopID][nTopDataID] w.r.t.
                    // bottom[nBlobID][i] only for i == nTopDataID.  For any otehr
                    // i != nTopDataID, we know the derivative is 0 by definition, and simply
                    // check that that's true.
                    double dfEstimateGradient  = 0;
                    double dfPositiveObjective = 0;
                    double dfNegativeObjective = 0;

                    if (!bElementwise || (nFeatID == nTopDataID))
                    {
                        // Do finite differencing.
                        // Compute loss with stepwise added to input.
                        dfData  = (double)Convert.ChangeType(current_blob.GetData(nFeatID), typeof(double));
                        dfData += m_dfStepsize;
                        current_blob.SetData(dfData, nFeatID);
                        m_cuda.rng_setseed(m_uiSeed);

                        layer.Forward(colBottom, colTop);
                        dfPositiveObjective = GetObjAndGradient(layer, colTop, nTopID, nTopDataID);

                        // Compute loss with stepsize subtracted from input.
                        dfData  = (double)Convert.ChangeType(current_blob.GetData(nFeatID), typeof(double));
                        dfData -= (m_dfStepsize * 2);
                        current_blob.SetData(dfData, nFeatID);
                        m_cuda.rng_setseed(m_uiSeed);

                        layer.Forward(colBottom, colTop);
                        dfNegativeObjective = GetObjAndGradient(layer, colTop, nTopID, nTopDataID);

                        // Recover original input value.
                        dfData  = (double)Convert.ChangeType(current_blob.GetData(nFeatID), typeof(double));
                        dfData += m_dfStepsize;
                        current_blob.SetData(dfData, nFeatID);

                        dfEstimateGradient = (dfPositiveObjective - dfNegativeObjective) / m_dfStepsize / 2.0;
                    }

                    double dfComputedGradient = (double)Convert.ChangeType(rgdfComputedGradients[nFeatID], typeof(double));
                    double dfFeature          = (double)Convert.ChangeType(current_blob.GetData(nFeatID), typeof(double));

                    if (m_dfKink - m_dfKinkRange > Math.Abs(dfFeature) ||
                        Math.Abs(dfFeature) > m_dfKink + m_dfKinkRange)
                    {
                        // We check the relative accuracy, but for too small values, we threshold
                        // the scale factor by 1.
                        double dfScale = Math.Max(Math.Max(Math.Abs(dfComputedGradient), Math.Abs(dfEstimateGradient)), 1.0);

                        m_log.EXPECT_NEAR(dfComputedGradient, dfEstimateGradient, m_dfThreshold * dfScale, "DEBUG: (nTopID, nTopDataID, nBlobID, nFeatID)=" + nTopID.ToString() + ", " + nTopDataID.ToString() + ", " + nBlobID.ToString() + ", " + nFeatID.ToString() + "; feat = " + dfFeature.ToString() + "; objective+ = " + dfPositiveObjective.ToString() + "; objective- = " + dfNegativeObjective.ToString());
                    }
                }
            }
        }