Exemple #1
0
        /// <summary>
        /// Releases all resources (GPU and Host) used by the Solver.
        /// </summary>
        protected override void dispose()
        {
            if (m_blobGradients != null)
            {
                m_blobGradients.Dispose();
                m_blobGradients = null;
            }

            if (m_blobGradientsPrev != null)
            {
                m_blobGradientsPrev.Dispose();
                m_blobGradientsPrev = null;
            }

            if (m_blobDirection != null)
            {
                m_blobDirection.Dispose();
                m_blobDirection = null;
            }

            if (m_colBlobHistoryY != null)
            {
                m_colBlobHistoryY.Dispose();
                m_colBlobHistoryY = null;
            }

            if (m_colBlobHistoryS != null)
            {
                m_colBlobHistoryS.Dispose();
                m_colBlobHistoryS = null;
            }

            base.dispose();
        }
Exemple #2
0
 /** @copydoc Layer::dispose */
 protected override void dispose()
 {
     m_rgBatchData.Dispose();
     m_blobCompare.Dispose();
     m_blobItem.Dispose();
     base.dispose();
 }
        public InputLayerEx(CudaDnn <T> cuda, Log log, Layer <T> layer) : base(cuda, log, layer.layer_param)
        {
            string strPath = Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData);

            strPath += "\\mycaffe\\test_data\\data\\text\\";
            LayerParameter text_param = new LayerParameter(LayerParameter.LayerType.TEXT_DATA);

            text_param.text_data_param.batch_size     = 1;
            text_param.text_data_param.decoder_source = strPath + "robot_text.txt";
            text_param.text_data_param.encoder_source = strPath + "human_text.txt";
            text_param.text_data_param.enable_normal_encoder_output  = true;
            text_param.text_data_param.enable_reverse_encoder_output = true;
            text_param.text_data_param.sample_size = 1000;
            text_param.text_data_param.shuffle     = false;
            text_param.text_data_param.time_steps  = 80;
            text_param.phase = Phase.TEST;

            m_dataLayer = new TextDataLayer <T>(cuda, log, text_param);
            BlobCollection <T> colBottom = new BlobCollection <T>();
            BlobCollection <T> colTop    = new BlobCollection <T>();

            colTop.Add(new Blob <T>(cuda, log));
            colTop.Add(new Blob <T>(cuda, log));
            colTop.Add(new Blob <T>(cuda, log));
            colTop.Add(new Blob <T>(cuda, log));
            colTop.Add(new Blob <T>(cuda, log));
            colTop.Add(new Blob <T>(cuda, log));
            colTop.Add(new Blob <T>(cuda, log));

            m_dataLayer.Setup(colBottom, colTop);

            colTop.Dispose();
        }
Exemple #4
0
        public void Dispose()
        {
            m_memLoss.OnGetLoss -= memLoss_OnGetLoss;
            dispose(ref m_blobDiscountedR);
            dispose(ref m_blobPolicyGradient);
            dispose(ref m_blobActionOneHot);
            dispose(ref m_blobLoss);

            if (m_colAccumulatedGradients != null)
            {
                m_colAccumulatedGradients.Dispose();
                m_colAccumulatedGradients = null;
            }
        }
        /// <summary>
        /// Releases all resources (GPU and Host) used by the Solver.
        /// </summary>
        protected override void dispose()
        {
            if (m_colHistory != null)
            {
                m_colHistory.Dispose();
                m_colHistory = null;
            }

            if (m_colTemp != null)
            {
                m_colTemp.Dispose();
                m_colTemp = null;
            }

            base.dispose();
        }
Exemple #6
0
        /// <summary>
        /// Clean up any resources used.
        /// </summary>
        protected override void dispose()
        {
            if (m_colWork1 != null)
            {
                m_colWork1.Dispose();
                m_colWork1 = null;
            }

            if (m_colWork2 != null)
            {
                m_colWork2.Dispose();
                m_colWork2 = null;
            }

            base.dispose();
        }
Exemple #7
0
        /** @copydoc Layer::dispose */
        protected override void dispose()
        {
            foreach (PoolingLayer <T> layer in m_rgPoolingLayers)
            {
                layer.Dispose();
            }

            m_rgPoolingLayers.Clear();

            if (m_colBlobSplitTopVec != null)
            {
                m_colBlobSplitTopVec.Dispose();
                m_colBlobSplitTopVec = null;
            }

            if (m_split_layer != null)
            {
                m_split_layer.Dispose();
                m_split_layer = null;
            }

            if (m_colBlobPoolingOutputs != null)
            {
                m_colBlobPoolingOutputs.Dispose();
                m_colBlobPoolingOutputs = null;
            }

            if (m_colBlobFlattenOutputs != null)
            {
                m_colBlobFlattenOutputs.Dispose();
                m_colBlobFlattenOutputs = null;
            }

            foreach (FlattenLayer <T> layer in m_rgFlattenLayers)
            {
                layer.Dispose();
            }

            m_rgFlattenLayers.Clear();

            m_rgPoolingBottomVec.Clear();
            m_rgPoolingTopVecs.Clear();
            m_rgFlattenLayerTopVecs.Clear();

            base.dispose();
        }
Exemple #8
0
        /// <summary>
        /// Release all resources used by the Brain.
        /// </summary>
        public void Dispose()
        {
            dispose(ref m_blobActions);
            dispose(ref m_blobQValue);
            dispose(ref m_blobNextQValue);
            dispose(ref m_blobExpectedQValue);
            dispose(ref m_blobDone);
            dispose(ref m_blobLoss);
            dispose(ref m_blobWeights);

            if (m_colAccumulatedGradients != null)
            {
                m_colAccumulatedGradients.Dispose();
                m_colAccumulatedGradients = null;
            }

            if (m_netTarget != null)
            {
                m_netTarget.Dispose();
                m_netTarget = null;
            }

            if (m_font != null)
            {
                m_font.Dispose();
                m_font = null;
            }

            foreach (KeyValuePair <Color, Tuple <Brush, Brush, Pen, Brush> > kv in m_rgStyle)
            {
                kv.Value.Item1.Dispose();
                kv.Value.Item2.Dispose();
                kv.Value.Item3.Dispose();
                kv.Value.Item4.Dispose();
            }

            m_rgStyle.Clear();
        }
Exemple #9
0
 /** @copydoc Layer::dispose */
 protected override void dispose()
 {
     m_rgBatchData.Dispose();
     m_rgBatchLabels.Dispose();
     base.dispose();
 }
Exemple #10
0
 /// <summary>
 /// Release all internal blobs.
 /// </summary>
 protected override void dispose()
 {
     m_colHdfBlobs.Dispose();
     base.dispose();
 }
        /// <summary>
        /// Process the content image by applying the style to it that was learned from the style image.
        /// </summary>
        /// <param name="bmpStyle">Specifies the image used to train the what style to apply to the content.</param>
        /// <param name="bmpContent">Specifies the content image to which the style is to be applied.</param>
        /// <param name="nIterations">Specifies the number of training iterations.</param>
        /// <param name="strResultDir">Optionally, specifies an output directory where intermediate images are stored.</param>
        /// <param name="nIntermediateOutput">Optionally, specifies how often to output an intermediate image.</param>
        /// <param name="dfTvLoss">Optionally, specifies the TV-Loss weight for smoothing (default = 0, which disables this loss).</param>
        /// <returns>The resulting image is returned.</returns>
        public Bitmap Process(Bitmap bmpStyle, Bitmap bmpContent, int nIterations, string strResultDir = null, int nIntermediateOutput = -1, double dfTvLoss = 0)
        {
            Solver <T>         solver = null;
            Net <T>            net    = null;
            BlobCollection <T> colContentActivations = new BlobCollection <T>();
            BlobCollection <T> colGramActivations    = new BlobCollection <T>();
            double             dfLoss;

            try
            {
                m_dfTVLossWeight = dfTvLoss;
                m_nIterations    = nIterations;

                if (bmpStyle.Width != bmpContent.Width ||
                    bmpStyle.Height != bmpContent.Height)
                {
                    bmpStyle = ImageTools.ResizeImage(bmpStyle, bmpContent.Width, bmpContent.Height);
                }

                m_log.WriteLine("Creating input network...");
                m_log.Enable = false;
                net          = new Net <T>(m_cuda, m_log, m_param, m_evtCancel, null, Phase.TEST);
                m_log.Enable = true;

                if (m_rgWeights != null)
                {
                    net.LoadWeights(m_rgWeights, m_persist);
                }

                //-----------------------------------------
                //  Get style and content activations.
                //-----------------------------------------

                prepare_data_blob(net, bmpStyle);
                net.Forward(out dfLoss);

                foreach (KeyValuePair <string, double> kvGram in m_rgLayers["gram"])
                {
                    string   strGram  = kvGram.Key;
                    Blob <T> blobGram = net.blob_by_name(strGram);
                    colGramActivations.Add(blobGram.Clone());
                }

                prepare_data_blob(net, bmpContent);
                net.Forward(out dfLoss);

                foreach (KeyValuePair <string, double> kvContent in m_rgLayers["content"])
                {
                    string   strContent  = kvContent.Key;
                    Blob <T> blobContent = net.blob_by_name(strContent);
                    colContentActivations.Add(blobContent.Clone());
                }


                //-----------------------------------------
                //  Prepare the network by adding new layers.
                //-----------------------------------------

                NetParameter net_param = m_param;

                foreach (KeyValuePair <string, double> kvInput in m_rgLayers["input"])
                {
                    string         strName = kvInput.Key;
                    LayerParameter p       = new LayerParameter(LayerParameter.LayerType.INPUT);
                    p.name = "input_" + strName;
                    p.top.Add(p.name);

                    Blob <T> blob = net.blob_by_name(strName);
                    p.input_param.shape.Add(new BlobShape(blob.shape()));

                    net_param.layer.Add(p);
                }

                foreach (KeyValuePair <string, double> kvContent in m_rgLayers["content"])
                {
                    string strName   = kvContent.Key;
                    string strScale1 = "input_" + strName;
                    string strScale2 = strName;

                    if (m_dfContentDataScale != 1.0)
                    {
                        strScale1 += "b";
                        LayerParameter ps1 = new LayerParameter(LayerParameter.LayerType.SCALAR);
                        ps1.scalar_param.value                = m_dfContentDataScale;
                        ps1.scalar_param.operation            = ScalarParameter.ScalarOp.MUL;
                        ps1.scalar_param.passthrough_gradient = true;
                        ps1.bottom.Add("input_" + strName);
                        ps1.top.Add(strScale1);

                        net_param.layer.Add(ps1);

                        strScale2 += "b";
                        LayerParameter ps2 = new LayerParameter(LayerParameter.LayerType.SCALAR);
                        ps2.scalar_param.value                = m_dfContentDataScale;
                        ps2.scalar_param.operation            = ScalarParameter.ScalarOp.MUL;
                        ps2.scalar_param.passthrough_gradient = true;
                        ps2.bottom.Add(strName);
                        ps2.top.Add(strScale2);

                        net_param.layer.Add(ps2);
                    }

                    LayerParameter event_param = new LayerParameter(LayerParameter.LayerType.EVENT);
                    event_param.name = "event_" + strName;
                    event_param.bottom.Add(strScale2);
                    event_param.bottom.Add(strScale1);
                    event_param.top.Add("event_" + strName);

                    net_param.layer.Add(event_param);

                    LayerParameter p = new LayerParameter(LayerParameter.LayerType.EUCLIDEAN_LOSS);
                    p.name = "loss_" + strName;

                    Blob <T> blobContent = colContentActivations[strName];
                    double   dfScale     = get_content_scale(blobContent);
                    p.loss_weight.Add(kvContent.Value * dfScale);

                    p.bottom.Add("event_" + strName);
                    p.bottom.Add(strScale1);
                    p.top.Add("loss_" + strName);

                    net_param.layer.Add(p);
                }

                foreach (KeyValuePair <string, double> kvGram in m_rgLayers["gram"].ToList())
                {
                    string strGramName = kvGram.Key;

                    LayerParameter event_param = new LayerParameter(LayerParameter.LayerType.EVENT);
                    event_param.name = "event_" + strGramName;
                    event_param.bottom.Add(strGramName);
                    event_param.bottom.Add("input_" + strGramName);
                    event_param.top.Add("event_" + strGramName);

                    net_param.layer.Add(event_param);

                    LayerParameter p = new LayerParameter(LayerParameter.LayerType.EUCLIDEAN_LOSS);
                    p.name = "loss_" + strGramName;

                    Blob <T> blobGram = colGramActivations[strGramName];
                    double   dfScale  = get_style_scale(blobGram);
                    p.loss_weight.Add(kvGram.Value * dfScale);

                    p.bottom.Add("input_" + strGramName);
                    p.bottom.Add("event_" + strGramName);
                    p.top.Add("loss_" + strGramName);

                    net_param.layer.Add(p);
                }

                // Add TV Loss;
                if (m_dfTVLossWeight != 0)
                {
                    LayerParameter p = new LayerParameter(LayerParameter.LayerType.TV_LOSS);
                    p.name = "loss_tv";

                    double dfWeight = m_dfTVLossWeight;
                    p.loss_weight.Add(dfWeight);

                    p.bottom.Add("data");
                    p.top.Add("loss_tv");

                    net_param.layer.Add(p);
                }

                // Replace InputLayer with ParameterLayer,
                // so that we'll be able to backprop into the image.
                Blob <T> data = net.blob_by_name("data");
                for (int i = 0; i < net_param.layer.Count; i++)
                {
                    LayerParameter p = net_param.layer[i];

                    if (p.name == "input1")
                    {
                        net_param.layer[i].SetType(LayerParameter.LayerType.PARAMETER);
                        net_param.layer[i].parameter_param.shape = new BlobShape(data.shape());
                        break;
                    }
                }

                // Disable weights learning.
                List <LayerParameter.LayerType> rgTypes = new List <LayerParameter.LayerType>();
                rgTypes.Add(LayerParameter.LayerType.CONVOLUTION);
                rgTypes.Add(LayerParameter.LayerType.DECONVOLUTION);
                rgTypes.Add(LayerParameter.LayerType.INNERPRODUCT);
                rgTypes.Add(LayerParameter.LayerType.PRELU);
                rgTypes.Add(LayerParameter.LayerType.BIAS);
                rgTypes.Add(LayerParameter.LayerType.EMBED);
                rgTypes.Add(LayerParameter.LayerType.LSTM);
                rgTypes.Add(LayerParameter.LayerType.LSTM_SIMPLE);
                rgTypes.Add(LayerParameter.LayerType.RNN);

                foreach (LayerParameter layer in net_param.layer)
                {
                    if (rgTypes.Contains(layer.type))
                    {
                        layer.parameters = new List <ParamSpec>();
                        layer.parameters.Add(new ParamSpec(0, 0));
                        layer.parameters.Add(new ParamSpec(0, 0));
                    }
                }

                net.Dispose();
                net = null;


                //-----------------------------------------
                //  Create solver and assign inputs.
                //-----------------------------------------

                RawProto proto1 = net_param.ToProto("root");
                string   str    = proto1.ToString();

                SolverParameter solver_param = new SolverParameter();
                solver_param.display         = m_nDisplayEvery;
                solver_param.train_net_param = net_param;
                solver_param.test_iter.Clear();
                solver_param.test_interval       = 0;
                solver_param.test_initialization = false;
                solver_param.base_lr             = m_dfLearningRate;
                solver_param.type = m_solverType;

                m_log.WriteLine("Creating " + m_solverType.ToString() + " solver with learning rate = " + m_dfLearningRate.ToString() + "...");
                m_log.Enable = false;

                if (m_solverType == SolverParameter.SolverType.LBFGS)
                {
                    solver = new LBFGSSolver <T>(m_cuda, m_log, solver_param, m_evtCancel, null, null, null, m_persist);
                }
                else
                {
                    solver = Solver <T> .Create(m_cuda, m_log, solver_param, m_evtCancel, null, null, null, m_persist);
                }

                m_log.Enable                = true;
                solver.OnSnapshot          += Solver_OnSnapshot;
                solver.OnTrainingIteration += Solver_OnTrainingIteration;

                foreach (Layer <T> layer in solver.net.layers)
                {
                    if (layer.type == LayerParameter.LayerType.EVENT)
                    {
                        EventLayer <T> eventLayer = layer as EventLayer <T>;
                        eventLayer.OnBackward += EventLayer_OnBackward;
                    }
                }

                prepare_input_param(solver.net, bmpContent);

                foreach (KeyValuePair <string, double> kvContent in m_rgLayers["content"])
                {
                    string   strName = kvContent.Key;
                    Blob <T> blobDst = solver.net.blob_by_name("input_" + strName);
                    Blob <T> blobSrc = colContentActivations[strName];
                    blobDst.CopyFrom(blobSrc);
                }

                foreach (KeyValuePair <string, double> kvGram in m_rgLayers["gram"])
                {
                    string   strName = kvGram.Key;
                    Blob <T> blobDst = solver.net.blob_by_name("input_" + strName);
                    Blob <T> blobSrc = colGramActivations[strName];
                    blobDst.CopyFrom(blobSrc);
                }

                //-----------------------------------------
                //  Optimize.
                //-----------------------------------------

                int nIterations1 = m_nIterations;
                if (strResultDir != null && nIntermediateOutput > 0)
                {
                    nIterations1 /= nIntermediateOutput;
                }

                if (m_rgWeights != null)
                {
                    Blob <T> blobInput = solver.net.learnable_parameters[0];
                    solver.net.learnable_parameters.RemoveAt(0);
                    solver.net.LoadWeights(m_rgWeights, m_persist);
                    solver.net.learnable_parameters.Insert(0, blobInput);
                }

                if (strResultDir != null)
                {
                    strResultDir  = strResultDir.TrimEnd('\\');
                    strResultDir += "\\";
                }

                for (int i = 0; i < nIterations1; i++)
                {
                    if (m_evtCancel.WaitOne(0))
                    {
                        break;
                    }

                    solver.Step(nIntermediateOutput, TRAIN_STEP.NONE, true, true, true);

                    if (strResultDir != null)
                    {
                        Bitmap bmpTemp = save(solver.net);

                        string strFile = strResultDir + i.ToString() + "_temp.png";
                        if (File.Exists(strFile))
                        {
                            File.Delete(strFile);
                        }

                        bmpTemp.Save(strFile);
                    }
                }

                Bitmap bmpOutput = save(solver.net);

                return(bmpOutput);
            }
            catch (Exception excpt)
            {
                throw excpt;
            }
            finally
            {
                if (net != null)
                {
                    net.Dispose();
                }

                if (solver != null)
                {
                    solver.Dispose();
                }

                colGramActivations.Dispose();
                colContentActivations.Dispose();
            }
        }