Beispiel #1
0
        private void backward_cuda(BlobCollection <T> colTop, List <bool> rgbPropagateDown, BlobCollection <T> colBottom)
        {
            m_log.CHECK(!rgbPropagateDown[1], "Cannot backpropagate to sequence indicators.");

            // TODO: skip backpropagation to inputs and parameters inside the unrolled
            // net according to propagate_down[0] and propagate_down[2].  For now just
            // backprop to inputs and parameters unconditionally, as either the inputs or
            // the parameters do need backward (or Net would have set
            // layer_needs_backward[i] = false for this layer).
            m_unrolledNet.Backward(m_nLastLayerIndex);
        }
Beispiel #2
0
        private void make_step(string strLayer, double dfSigma, double dfStepSize = 1.5, int nFocusLabel = -1, float[] rgDirectInputs = null)
        {
            Blob <T> blobSrc = m_net.blob_by_name("data"); // input image is stored in Net's 'data' blob
            Blob <T> blobDst = m_net.blob_by_name(strLayer);
            int      nDstIdx = m_net.layer_index_by_name(strLayer);
            double   dfLoss;

            m_net.Forward(out dfLoss);

            if (nFocusLabel < 0 && rgDirectInputs == null)
            {
                m_cuda.copy(blobDst.count(), blobDst.gpu_data, blobDst.mutable_gpu_diff);
            }
            else if (rgDirectInputs != null)
            {
                blobDst.SetDiff(0);

                for (int i = 0; i < rgDirectInputs.Length && i < blobDst.count(); i++)
                {
                    if (rgDirectInputs[i] != 0)
                    {
                        blobDst.SetDiff(rgDirectInputs[i], i);
                    }
                }
            }
            else
            {
                if (nFocusLabel >= blobDst.count())
                {
                    throw new Exception("The focus label '" + nFocusLabel + "' is greater than the number of outputs for blob '" + blobDst.Name + "' -- it only has '" + blobDst.count().ToString() + "' outputs!");
                }

                blobDst.SetDiff(0);
                blobDst.SetDiff(1.0, nFocusLabel);
            }

            m_net.Backward(nDstIdx);

            // Apply normalized ascent step to the input image.
            double dfAsum = Utility.ConvertVal <T>(blobSrc.asum_diff());
            double dfMean = dfAsum / blobSrc.count();
            double dfStep = dfStepSize / dfMean;

            m_cuda.scal(blobSrc.count(), dfStep, blobSrc.mutable_gpu_diff);
            m_cuda.add(blobSrc.count(), blobSrc.gpu_diff, blobSrc.gpu_data, blobSrc.mutable_gpu_data);

            if (dfSigma != 0)
            {
                m_cuda.gaussian_blur(blobSrc.count(), blobSrc.channels, blobSrc.height, blobSrc.width, dfSigma, blobSrc.gpu_data, m_blobBlur.mutable_gpu_data);
                blobSrc.CopyFrom(m_blobBlur);
            }
        }
Beispiel #3
0
        //-----------------------------------------------------------------------------------------
        //  Simple Classification (using direct net surgery)
        //-----------------------------------------------------------------------------------------

        /// <summary>
        /// The SimpleClassification sample is designed to show how to manually train the MNIST dataset using raw image data stored
        /// in the \ProgramData\MyCaffe\test_data\images\mnist\training directory (previously loaded with the 'Export Images'
        /// sample above).
        /// </summary>
        /// <remarks>
        /// IMPORTANT: This sample is for demonstration, using the Simplest Classification method is the fastest recommended method that uses the Image Database.
        ///
        /// This sample requires that you have already loaded the MNIST dataset into SQL (or SQLEXPRESS) using the MyCaffe
        /// Test Application by selecting its 'Database | Load MNIST...' menu item.
        /// </remarks>
        /// <param name="sender">Specifies the event sender.</param>
        /// <param name="e">Specifies the event argument.</param>
        private void btnSimpleClassification_Click(object sender, EventArgs e)
        {
            Stopwatch     sw         = new Stopwatch();
            int           nBatchSize = 32;
            SettingsCaffe settings   = new SettingsCaffe();

            settings.GpuIds = "0";

            if (!Directory.Exists(m_strImageDirTesting) || !Directory.Exists(m_strImageDirTraining))
            {
                MessageBox.Show("You must first export the MNIST images by pressing the Export button!", "Export Needed", MessageBoxButtons.OK, MessageBoxIcon.Error);
                return;
            }

            m_rgstrTrainingFiles = Directory.GetFiles(m_strImageDirTraining);
            m_rgstrTrainingFiles = m_rgstrTrainingFiles.Where(p => p.Contains(".png")).ToArray();
            m_rgstrTestingFiles  = Directory.GetFiles(m_strImageDirTesting);
            m_rgstrTestingFiles  = m_rgstrTestingFiles.Where(p => p.Contains(".png")).ToArray();

            string strSolver;
            string strModel;

            load_descriptors("mnist", out strSolver, out strModel); // Load the descriptors from their respective files (installed by MyCaffe Test Application install)
            strModel = fixup_model(strModel, nBatchSize);

            MyCaffeControl <float> mycaffe = new MyCaffeControl <float>(settings, m_log, m_evtCancel);

            mycaffe.Load(Phase.TRAIN,                                  // using the training phase.
                         strSolver,                                    // solver descriptor, that specifies to use the SGD solver.
                         strModel,                                     // simple LENET model descriptor.
                         null, null, null, false, null, false, false); // no weights are loaded and no image database is used.

            // Perform your own training
            Solver <float> solver    = mycaffe.GetInternalSolver();
            Net <float>    net       = mycaffe.GetInternalNet(Phase.TRAIN);
            Blob <float>   dataBlob  = net.blob_by_name("data");
            Blob <float>   labelBlob = net.blob_by_name("label");

            sw.Start();

            int nIterations = 5000;

            for (int i = 0; i < nIterations; i++)
            {
                // Load the data into the data and label blobs.
                loadData(m_rgstrTrainingFiles, nBatchSize, dataBlob, labelBlob);

                // Run the forward and backward passes.
                double dfLoss;
                net.Forward(out dfLoss);
                net.ClearParamDiffs();
                net.Backward();

                // Apply the gradients calculated during Backward.
                solver.ApplyUpdate(i);

                // Output the loss.
                if (sw.Elapsed.TotalMilliseconds > 1000)
                {
                    m_log.Progress = (double)i / (double)nIterations;
                    m_log.WriteLine("Loss = " + dfLoss.ToString());
                    sw.Restart();
                }
            }

            // Run testing using the MyCaffe control (who's internal Run net is already updated
            // for it shares its weight memory with the training net.
            net       = mycaffe.GetInternalNet(Phase.TEST);
            dataBlob  = net.blob_by_name("data");
            labelBlob = net.blob_by_name("label");

            float fTotalAccuracy = 0;

            nIterations = 100;
            for (int i = 0; i < nIterations; i++)
            {
                // Load the data into the data and label blobs.
                loadData(m_rgstrTestingFiles, nBatchSize, dataBlob, labelBlob);

                // Run the forward pass.
                double dfLoss;
                BlobCollection <float> res = net.Forward(out dfLoss);
                fTotalAccuracy += res[0].GetData(0);

                // Output the training progress.
                if (sw.Elapsed.TotalMilliseconds > 1000)
                {
                    m_log.Progress = (double)i / (double)nIterations;
                    m_log.WriteLine("training...");
                    sw.Restart();
                }
            }

            double dfAccuracy = (double)fTotalAccuracy / (double)nIterations;

            m_log.WriteLine("Accuracy = " + dfAccuracy.ToString("P"));

            MessageBox.Show("Average Accuracy = " + dfAccuracy.ToString("P"), "Traing/Test on MNIST Completed", MessageBoxButtons.OK, MessageBoxIcon.Information);
        }
Beispiel #4
0
        //-----------------------------------------------------------------------------------------
        //  Simple Classification (using direct net surgery)
        //-----------------------------------------------------------------------------------------

        /// <summary>
        /// The SimpleClassification sample is designed to show how to manually train the MNIST dataset using raw image data stored
        /// in the \ProgramData\MyCaffe\test_data\images\mnist\training directory (previously loaded with the 'Export Images'
        /// sample above).
        /// </summary>
        /// <remarks>
        /// This sample requires that you have already loaded the MNIST dataset into SQL (or SQLEXPRESS) using the MyCaffe
        /// Test Application by selecting its 'Database | Load MNIST...' menu item.
        /// </remarks>
        /// <param name="sender">Specifies the event sender.</param>
        /// <param name="e">Specifies the event argument.</param>
        private void btnSimpleClassification_Click(object sender, EventArgs e)
        {
            Stopwatch     sw         = new Stopwatch();
            int           nBatchSize = 32;
            SettingsCaffe settings   = new SettingsCaffe();

            settings.GpuIds = "0";

            if (!Directory.Exists(m_strImageDirTraining) || !Directory.Exists(m_strImageDirTesting))
            {
                string strMsg = "You must first expand the MNIST dataset into the following directories:" + Environment.NewLine;
                strMsg += "Training Images: '" + m_strImageDirTraining + "'" + Environment.NewLine;
                strMsg += "Testing Images: '" + m_strImageDirTesting + "'" + Environment.NewLine + Environment.NewLine;

                strMsg += "If you have Microsoft SQL or SQL Express installed, selecting the 'Export' button from the 'ImageClassification' project will export these images for you." + Environment.NewLine + Environment.NewLine;

                strMsg += "If you DO NOT have Microsoft SQL or SQL Express, running the MyCaffe Test Application and selecting the 'Database | Load MNIST...' menu item with the 'Export to file only' check box checked, will export the images for you without SQL." + Environment.NewLine + Environment.NewLine;

                strMsg += "To get the MNIST *.gz data files, please see http://yann.lecun.com/exdb/mnist/";

                MessageBox.Show(strMsg, "Images Not Found", MessageBoxButtons.OK, MessageBoxIcon.Error);
                return;
            }

            m_rgstrTrainingFiles = Directory.GetFiles(m_strImageDirTraining);
            m_rgstrTrainingFiles = m_rgstrTrainingFiles.Where(p => p.Contains(".png")).ToArray();
            m_rgstrTestingFiles  = Directory.GetFiles(m_strImageDirTesting);
            m_rgstrTestingFiles  = m_rgstrTestingFiles.Where(p => p.Contains(".png")).ToArray();

            string strSolver;
            string strModel;

            load_descriptors("mnist", out strSolver, out strModel); // Load the descriptors from their respective files (installed by MyCaffe Test Application install)
            strModel = fixup_model(strModel, nBatchSize);

            MyCaffeControl <float> mycaffe = new MyCaffeControl <float>(settings, m_log, m_evtCancel);

            mycaffe.LoadLite(Phase.TRAIN, // using the training phase.
                             strSolver,   // solver descriptor, that specifies to use the SGD solver.
                             strModel,    // simple LENET model descriptor.
                             null);       // no weights are loaded.

            // Perform your own training
            Solver <float> solver    = mycaffe.GetInternalSolver();
            Net <float>    net       = mycaffe.GetInternalNet(Phase.TRAIN);
            Blob <float>   dataBlob  = net.blob_by_name("data");
            Blob <float>   labelBlob = net.blob_by_name("label");

            sw.Start();

            int nIterations = 5000;

            for (int i = 0; i < nIterations; i++)
            {
                // Load the data into the data and label blobs.
                loadData(m_rgstrTrainingFiles, nBatchSize, dataBlob, labelBlob);

                // Run the forward and backward passes.
                double dfLoss;
                net.Forward(out dfLoss);
                net.ClearParamDiffs();
                net.Backward();

                // Apply the gradients calculated during Backward.
                solver.ApplyUpdate(i);

                // Output the loss.
                if (sw.Elapsed.TotalMilliseconds > 1000)
                {
                    m_log.Progress = (double)i / (double)nIterations;
                    m_log.WriteLine("Loss = " + dfLoss.ToString());
                    sw.Restart();
                }
            }

            // Run testing using the MyCaffe control (who's internal Run net is already updated
            // for it shares its weight memory with the training net.
            net       = mycaffe.GetInternalNet(Phase.TEST);
            dataBlob  = net.blob_by_name("data");
            labelBlob = net.blob_by_name("label");

            float fTotalAccuracy = 0;

            nIterations = 100;
            for (int i = 0; i < nIterations; i++)
            {
                // Load the data into the data and label blobs.
                loadData(m_rgstrTestingFiles, nBatchSize, dataBlob, labelBlob);

                // Run the forward pass.
                double dfLoss;
                BlobCollection <float> res = net.Forward(out dfLoss);
                fTotalAccuracy += res[0].GetData(0);

                // Output the training progress.
                if (sw.Elapsed.TotalMilliseconds > 1000)
                {
                    m_log.Progress = (double)i / (double)nIterations;
                    m_log.WriteLine("training...");
                    sw.Restart();
                }
            }

            double dfAccuracy = (double)fTotalAccuracy / (double)nIterations;

            m_log.WriteLine("Accuracy = " + dfAccuracy.ToString("P"));

            MessageBox.Show("Average Accuracy = " + dfAccuracy.ToString("P"), "Traing/Test on MNIST Completed", MessageBoxButtons.OK, MessageBoxIcon.Information);

            // Save the trained weights for use later.
            saveWeights(mycaffe, "my_weights");

            Bitmap           bmp     = new Bitmap(m_rgstrTestingFiles[0]);
            ResultCollection results = mycaffe.Run(bmp);

            MyCaffeControl <float> mycaffe2 = mycaffe.Clone(0);
            ResultCollection       results2 = mycaffe2.Run(bmp);

            // Release resources used.
            mycaffe.Dispose();
            mycaffe2.Dispose();
        }