コード例 #1
0
ファイル: MemoryDataLayer.cs プロジェクト: maplewei/MyCaffe
 /// <summary>
 /// The constructor.
 /// </summary>
 /// <param name="blobData">Specifies the blob data to fill with the ordered data.</param>
 /// <param name="blobClip">Specifies the clip data to fill with the ordered data.</param>
 /// <param name="blobLabel">Specifies the labeld ata to fill with ordered data.</param>
 /// <param name="rgData">Specifies the raw data to use to fill.</param>
 /// <param name="rgClip">Specifies the raw clip data to use to fill.</param>
 /// <param name="type">Specifies the LSTM type.</param>
 public MemoryDataLayerPackDataArgs(Blob <T> blobData, Blob <T> blobClip, Blob <T> blobLabel, List <Datum> rgData, List <Datum> rgClip, LayerParameter.LayerType type = LayerParameter.LayerType.LSTM)
 {
     m_blobData  = blobData;
     m_blobClip  = blobClip;
     m_blobLabel = blobLabel;
     m_rgData    = rgData;
     m_rgClip    = rgClip;
     m_lstmType  = type;
 }
コード例 #2
0
        /// <summary>
        /// The ResultCollection constructor.
        /// </summary>
        /// <param name="rgResults">Specifies the results.</param>
        /// <param name="outputLayerType">Specifies the output layer type.</param>
        public ResultCollection(List <Result> rgResults, LayerParameter.LayerType outputLayerType)
        {
            m_resultType        = GetResultType(outputLayerType);
            m_rgResultsOriginal = rgResults;

            foreach (Result item in rgResults)
            {
                m_rgResultsSorted.Add(item);
            }

            m_rgResultsSorted = m_rgResultsSorted.OrderByDescending(p => p.Score).ToList();
        }
コード例 #3
0
        //-----------------------------------------------------------------------------------------
        //  Create descriptors programically
        //-----------------------------------------------------------------------------------------

        private bool verifyInputType(LayerParameter.LayerType inputType)
        {
            if (inputType == LayerParameter.LayerType.INPUT)
            {
                return(true);
            }

            if (inputType == LayerParameter.LayerType.IMAGE_DATA)
            {
                return(true);
            }

            return(false);
        }
コード例 #4
0
        /// <summary>
        /// Locates a layer based on a layer type and phase.
        /// </summary>
        /// <param name="type">Specifies the LayerParameter.LayerType to look for.</param>
        /// <param name="phase">Optionally, specifies a phase to look for.</param>
        /// <returns></returns>
        public LayerParameter FindLayer(LayerParameter.LayerType type, Phase phase = Phase.NONE)
        {
            foreach (LayerParameter p in m_rgLayers)
            {
                if (p.type == type)
                {
                    if (p.MeetsPhase(phase))
                    {
                        return(p);
                    }
                }
            }

            return(null);
        }
コード例 #5
0
        public static RESULT_TYPE GetResultType(LayerParameter.LayerType type)
        {
            switch (type)
            {
            case LayerParameter.LayerType.SOFTMAX:
                return(RESULT_TYPE.PROBABILITIES);

            case LayerParameter.LayerType.DECODE:
                return(RESULT_TYPE.DISTANCES);

            case LayerParameter.LayerType.DETECTION_OUTPUT:
                return(RESULT_TYPE.MULTIBOX);

            default:
                return(RESULT_TYPE.NONE);
            }
        }
コード例 #6
0
        public Brain(MyCaffeControl <T> mycaffe, PropertySet properties, CryptoRandom random, IxTrainerCallbackRNN icallback, Phase phase, BucketCollection rgVocabulary, bool bUsePreloadData, string strRunProperties = null)
        {
            string strOutputBlob = null;

            if (strRunProperties != null)
            {
                m_runProperties = new PropertySet(strRunProperties);
            }

            m_icallback             = icallback;
            m_mycaffe               = mycaffe;
            m_properties            = properties;
            m_random                = random;
            m_rgVocabulary          = rgVocabulary;
            m_bUsePreloadData       = bUsePreloadData;
            m_nSolverSequenceLength = m_properties.GetPropertyAsInt("SequenceLength", -1);
            m_bDisableVocabulary    = m_properties.GetPropertyAsBool("DisableVocabulary", false);
            m_nThreads              = m_properties.GetPropertyAsInt("Threads", 1);
            m_dfScale               = m_properties.GetPropertyAsDouble("Scale", 1.0);

            if (m_nThreads > 1)
            {
                m_dataPool.Initialize(m_nThreads, icallback);
            }

            if (m_runProperties != null)
            {
                m_dfTemperature = Math.Abs(m_runProperties.GetPropertyAsDouble("Temperature", 0));
                if (m_dfTemperature > 1.0)
                {
                    m_dfTemperature = 1.0;
                }

                string strPhaseOnRun = m_runProperties.GetProperty("PhaseOnRun", false);
                switch (strPhaseOnRun)
                {
                case "RUN":
                    m_phaseOnRun = Phase.RUN;
                    break;

                case "TEST":
                    m_phaseOnRun = Phase.TEST;
                    break;

                case "TRAIN":
                    m_phaseOnRun = Phase.TRAIN;
                    break;
                }

                if (phase == Phase.RUN && m_phaseOnRun != Phase.NONE)
                {
                    if (m_phaseOnRun != Phase.RUN)
                    {
                        m_mycaffe.Log.WriteLine("Warning: Running on the '" + m_phaseOnRun.ToString() + "' network.");
                    }

                    strOutputBlob = m_runProperties.GetProperty("OutputBlob", false);
                    if (strOutputBlob == null)
                    {
                        throw new Exception("You must specify the 'OutputBlob' when Running with a phase other than RUN.");
                    }

                    strOutputBlob = Utility.Replace(strOutputBlob, '~', ';');

                    phase = m_phaseOnRun;
                }
            }

            m_net = mycaffe.GetInternalNet(phase);
            if (m_net == null)
            {
                mycaffe.Log.WriteLine("WARNING: Test net does not exist, set test_iteration > 0.  Using TRAIN phase instead.");
                m_net = mycaffe.GetInternalNet(Phase.TRAIN);
            }

            // Find the first LSTM layer to determine how to load the data.
            // NOTE: Only LSTM has a special loading order, other layers use the standard N, C, H, W ordering.
            LSTMLayer <T>       lstmLayer       = null;
            LSTMSimpleLayer <T> lstmSimpleLayer = null;

            foreach (Layer <T> layer1 in m_net.layers)
            {
                if (layer1.layer_param.type == LayerParameter.LayerType.LSTM)
                {
                    lstmLayer  = layer1 as LSTMLayer <T>;
                    m_lstmType = LayerParameter.LayerType.LSTM;
                    break;
                }
                else if (layer1.layer_param.type == LayerParameter.LayerType.LSTM_SIMPLE)
                {
                    lstmSimpleLayer = layer1 as LSTMSimpleLayer <T>;
                    m_lstmType      = LayerParameter.LayerType.LSTM_SIMPLE;
                    break;
                }
            }

            if (lstmLayer == null && lstmSimpleLayer == null)
            {
                throw new Exception("Could not find the required LSTM or LSTM_SIMPLE layer!");
            }

            if (m_phaseOnRun != Phase.NONE && m_phaseOnRun != Phase.RUN && strOutputBlob != null)
            {
                if ((m_blobOutput = m_net.FindBlob(strOutputBlob)) == null)
                {
                    throw new Exception("Could not find the 'Output' layer top named '" + strOutputBlob + "'!");
                }
            }

            if ((m_blobData = m_net.FindBlob("data")) == null)
            {
                throw new Exception("Could not find the 'Input' layer top named 'data'!");
            }

            if ((m_blobClip = m_net.FindBlob("clip")) == null)
            {
                throw new Exception("Could not find the 'Input' layer top named 'clip'!");
            }

            Layer <T> layer = m_net.FindLastLayer(LayerParameter.LayerType.INNERPRODUCT);

            m_mycaffe.Log.CHECK(layer != null, "Could not find an ending INNERPRODUCT layer!");

            if (!m_bDisableVocabulary)
            {
                m_nVocabSize = (int)layer.layer_param.inner_product_param.num_output;
                if (rgVocabulary != null)
                {
                    m_mycaffe.Log.CHECK_EQ(m_nVocabSize, rgVocabulary.Count, "The vocabulary count = '" + rgVocabulary.Count.ToString() + "' and last inner product output count = '" + m_nVocabSize.ToString() + "' - these do not match but they should!");
                }
            }

            if (m_lstmType == LayerParameter.LayerType.LSTM)
            {
                m_nSequenceLength = m_blobData.shape(0);
                m_nBatchSize      = m_blobData.shape(1);
            }
            else
            {
                m_nBatchSize      = (int)lstmSimpleLayer.layer_param.lstm_simple_param.batch_size;
                m_nSequenceLength = m_blobData.shape(0) / m_nBatchSize;

                if (phase == Phase.RUN)
                {
                    m_nBatchSize = 1;

                    List <int> rgNewShape = new List <int>()
                    {
                        m_nSequenceLength, 1
                    };
                    m_blobData.Reshape(rgNewShape);
                    m_blobClip.Reshape(rgNewShape);
                    m_net.Reshape();
                }
            }

            m_mycaffe.Log.CHECK_EQ(m_nSequenceLength, m_blobData.num, "The data num must equal the sequence lengh of " + m_nSequenceLength.ToString());

            m_rgDataInput = new T[m_nSequenceLength * m_nBatchSize];

            T[] rgClipInput = new T[m_nSequenceLength * m_nBatchSize];
            m_mycaffe.Log.CHECK_EQ(rgClipInput.Length, m_blobClip.count(), "The clip count must equal the sequence length * batch size: " + rgClipInput.Length.ToString());
            m_tZero = (T)Convert.ChangeType(0, typeof(T));
            m_tOne  = (T)Convert.ChangeType(1, typeof(T));

            for (int i = 0; i < rgClipInput.Length; i++)
            {
                if (m_lstmType == LayerParameter.LayerType.LSTM)
                {
                    rgClipInput[i] = (i < m_nBatchSize) ? m_tZero : m_tOne;
                }
                else
                {
                    rgClipInput[i] = (i % m_nSequenceLength == 0) ? m_tZero : m_tOne;
                }
            }

            m_blobClip.mutable_cpu_data = rgClipInput;

            if (phase != Phase.RUN)
            {
                m_solver                      = mycaffe.GetInternalSolver();
                m_solver.OnStart             += m_solver_OnStart;
                m_solver.OnTestStart         += m_solver_OnTestStart;
                m_solver.OnTestingIteration  += m_solver_OnTestingIteration;
                m_solver.OnTrainingIteration += m_solver_OnTrainingIteration;

                if ((m_blobLabel = m_net.FindBlob("label")) == null)
                {
                    throw new Exception("Could not find the 'Input' layer top named 'label'!");
                }

                m_nSequenceLengthLabel = m_blobLabel.count(0, 2);
                m_rgLabelInput         = new T[m_nSequenceLengthLabel];
                m_mycaffe.Log.CHECK_EQ(m_rgLabelInput.Length, m_blobLabel.count(), "The label count must equal the label sequence length * batch size: " + m_rgLabelInput.Length.ToString());
                m_mycaffe.Log.CHECK(m_nSequenceLengthLabel == m_nSequenceLength * m_nBatchSize || m_nSequenceLengthLabel == 1, "The label sqeuence length must be 1 or equal the length of the sequence: " + m_nSequenceLength.ToString());
            }
        }
コード例 #7
0
        public Brain(MyCaffeControl <T> mycaffe, PropertySet properties, CryptoRandom random, IxTrainerCallbackRNN icallback, Phase phase, BucketCollection rgVocabulary, string strRunProperties = null)
        {
            string strOutputBlob = null;

            if (strRunProperties != null)
            {
                m_runProperties = new PropertySet(strRunProperties);
            }

            m_icallback    = icallback;
            m_mycaffe      = mycaffe;
            m_properties   = properties;
            m_random       = random;
            m_rgVocabulary = rgVocabulary;

            if (m_runProperties != null)
            {
                m_dfTemperature = m_runProperties.GetPropertyAsDouble("Temperature", 0);
                string strPhaseOnRun = m_runProperties.GetProperty("PhaseOnRun", false);
                switch (strPhaseOnRun)
                {
                case "RUN":
                    m_phaseOnRun = Phase.RUN;
                    break;

                case "TEST":
                    m_phaseOnRun = Phase.TEST;
                    break;

                case "TRAIN":
                    m_phaseOnRun = Phase.TRAIN;
                    break;
                }

                if (phase == Phase.RUN && m_phaseOnRun != Phase.NONE)
                {
                    if (m_phaseOnRun != Phase.RUN)
                    {
                        m_mycaffe.Log.WriteLine("Warning: Running on the '" + m_phaseOnRun.ToString() + "' network.");
                    }

                    strOutputBlob = m_runProperties.GetProperty("OutputBlob", false);
                    if (strOutputBlob == null)
                    {
                        throw new Exception("You must specify the 'OutputBlob' when Running with a phase other than RUN.");
                    }

                    strOutputBlob = Utility.Replace(strOutputBlob, '~', ';');

                    phase = m_phaseOnRun;
                }
            }

            m_net = mycaffe.GetInternalNet(phase);

            // Find the first LSTM layer to determine how to load the data.
            // NOTE: Only LSTM has a special loading order, other layers use the standard N, C, H, W ordering.
            LSTMLayer <T>       lstmLayer       = null;
            LSTMSimpleLayer <T> lstmSimpleLayer = null;

            foreach (Layer <T> layer1 in m_net.layers)
            {
                if (layer1.layer_param.type == LayerParameter.LayerType.LSTM)
                {
                    lstmLayer  = layer1 as LSTMLayer <T>;
                    m_lstmType = LayerParameter.LayerType.LSTM;
                    break;
                }
                else if (layer1.layer_param.type == LayerParameter.LayerType.LSTM_SIMPLE)
                {
                    lstmSimpleLayer = layer1 as LSTMSimpleLayer <T>;
                    m_lstmType      = LayerParameter.LayerType.LSTM_SIMPLE;
                    break;
                }
            }

            if (lstmLayer == null && lstmSimpleLayer == null)
            {
                throw new Exception("Could not find the required LSTM or LSTM_SIMPLE layer!");
            }

            if (m_phaseOnRun != Phase.NONE && m_phaseOnRun != Phase.RUN && strOutputBlob != null)
            {
                if ((m_blobOutput = m_net.FindBlob(strOutputBlob)) == null)
                {
                    throw new Exception("Could not find the 'Output' layer top named '" + strOutputBlob + "'!");
                }
            }

            if ((m_blobData = m_net.FindBlob("data")) == null)
            {
                throw new Exception("Could not find the 'Input' layer top named 'data'!");
            }

            if ((m_blobClip = m_net.FindBlob("clip")) == null)
            {
                throw new Exception("Could not find the 'Input' layer top named 'clip'!");
            }

            Layer <T> layer = m_net.FindLastLayer(LayerParameter.LayerType.INNERPRODUCT);

            m_mycaffe.Log.CHECK(layer != null, "Could not find an ending INNERPRODUCT layer!");

            m_nVocabSize = (int)layer.layer_param.inner_product_param.num_output;
            if (rgVocabulary != null)
            {
                m_mycaffe.Log.CHECK_EQ(m_nVocabSize, rgVocabulary.Count, "The vocabulary count and last inner product output count should match!");
            }

            if (m_lstmType == LayerParameter.LayerType.LSTM)
            {
                m_nSequenceLength = m_blobData.shape(0);
                m_nBatchSize      = m_blobData.shape(1);
            }
            else
            {
                m_nBatchSize      = (int)lstmSimpleLayer.layer_param.lstm_simple_param.batch_size;
                m_nSequenceLength = m_blobData.shape(0) / m_nBatchSize;

                if (phase == Phase.RUN)
                {
                    m_nBatchSize = 1;

                    List <int> rgNewShape = new List <int>()
                    {
                        m_nSequenceLength, 1
                    };
                    m_blobData.Reshape(rgNewShape);
                    m_blobClip.Reshape(rgNewShape);
                    m_net.Reshape();
                }
            }

            m_mycaffe.Log.CHECK_EQ(m_blobData.count(), m_blobClip.count(), "The data and clip blobs must have the same count!");

            m_rgDataInput = new T[m_nSequenceLength * m_nBatchSize];

            T[] rgClipInput = new T[m_nSequenceLength * m_nBatchSize];
            m_tZero = (T)Convert.ChangeType(0, typeof(T));
            m_tOne  = (T)Convert.ChangeType(1, typeof(T));

            for (int i = 0; i < rgClipInput.Length; i++)
            {
                if (m_lstmType == LayerParameter.LayerType.LSTM)
                {
                    rgClipInput[i] = (i < m_nBatchSize) ? m_tZero : m_tOne;
                }
                else
                {
                    rgClipInput[i] = (i % m_nSequenceLength == 0) ? m_tZero : m_tOne;
                }
            }

            m_blobClip.mutable_cpu_data = rgClipInput;

            if (phase != Phase.RUN)
            {
                m_solver                      = mycaffe.GetInternalSolver();
                m_solver.OnStart             += m_solver_OnStart;
                m_solver.OnTestStart         += m_solver_OnTestStart;
                m_solver.OnTestingIteration  += m_solver_OnTestingIteration;
                m_solver.OnTrainingIteration += m_solver_OnTrainingIteration;

                if ((m_blobLabel = m_net.FindBlob("label")) == null)
                {
                    throw new Exception("Could not find the 'Input' layer top named 'label'!");
                }

                m_rgLabelInput = new T[m_nSequenceLength * m_nBatchSize];
                m_mycaffe.Log.CHECK_EQ(m_blobData.count(), m_blobLabel.count(), "The data and label blobs must have the same count!");
            }
        }
コード例 #8
0
        /// <summary>
        /// Create the LeNet_train_test prototxt programmatically.
        /// </summary>
        /// <param name="strDataName">Specifies the dataset name.</param>
        /// <param name="nBatchSize">Specifies the batch size.</param>
        /// <returns>The model descriptor is returned as text.</returns>
        private string create_model_descriptor_programmatically(string strDataName, int nBatchSize, LayerParameter.LayerType inputType)
        {
            if (!verifyInputType(inputType))
            {
                throw new Exception("The input type " + inputType.ToString() + " is not supported by this sample.");
            }

            NetParameter net_param = new NetParameter();

            net_param.name = "LeNet";

            if (inputType == LayerParameter.LayerType.INPUT)
            {
                LayerParameter input_param_train = new LayerParameter(LayerParameter.LayerType.INPUT);
                input_param_train.name = strDataName;
                input_param_train.top.Add("data");
                input_param_train.top.Add("label");
                input_param_train.include.Add(new NetStateRule(Phase.TRAIN));
                input_param_train.transform_param       = new TransformationParameter();
                input_param_train.transform_param.scale = 1.0 / 256.0;
                input_param_train.input_param.shape     = new List <BlobShape>()
                {
                    new BlobShape(nBatchSize, 1, 28, 28),   // data (the images)
                    new BlobShape(nBatchSize, 1, 1, 1)
                };                                          // label
                net_param.layer.Add(input_param_train);

                LayerParameter input_param_test = new LayerParameter(LayerParameter.LayerType.INPUT);
                input_param_test.name = strDataName;
                input_param_test.top.Add("data");
                input_param_test.top.Add("label");
                input_param_test.include.Add(new NetStateRule(Phase.TEST));
                input_param_test.transform_param       = new TransformationParameter();
                input_param_test.transform_param.scale = 1.0 / 256.0;
                input_param_train.input_param.shape    = new List <BlobShape>()
                {
                    new BlobShape(nBatchSize, 1, 28, 28),   // data (the images)
                    new BlobShape(nBatchSize, 1, 1, 1)
                };                                          // label
                net_param.layer.Add(input_param_test);
            }
            else if (inputType == LayerParameter.LayerType.IMAGE_DATA)
            {
                LayerParameter input_param_train = new LayerParameter(LayerParameter.LayerType.IMAGE_DATA);
                input_param_train.name = strDataName;
                input_param_train.top.Add("data");
                input_param_train.top.Add("label");
                input_param_train.include.Add(new NetStateRule(Phase.TRAIN));
                input_param_train.transform_param           = new TransformationParameter();
                input_param_train.transform_param.scale     = 1.0 / 256.0;
                input_param_train.data_param.batch_size     = (uint)nBatchSize;
                input_param_train.data_param.source         = m_strImageDirTraining + "\\file_list.txt";
                input_param_train.image_data_param.is_color = false;
                net_param.layer.Add(input_param_train);

                LayerParameter input_param_test = new LayerParameter(LayerParameter.LayerType.IMAGE_DATA);
                input_param_test.name = strDataName;
                input_param_test.top.Add("data");
                input_param_test.top.Add("label");
                input_param_test.include.Add(new NetStateRule(Phase.TEST));
                input_param_test.transform_param           = new TransformationParameter();
                input_param_test.transform_param.scale     = 1.0 / 256.0;
                input_param_test.data_param.batch_size     = (uint)nBatchSize;
                input_param_test.data_param.source         = m_strImageDirTesting + "\\file_list.txt";
                input_param_test.image_data_param.is_color = false;
                net_param.layer.Add(input_param_test);
            }

            LayerParameter conv1 = new LayerParameter(LayerParameter.LayerType.CONVOLUTION);

            conv1.name = "conv1";
            conv1.bottom.Add("data");
            conv1.top.Add("conv1");
            conv1.parameters.Add(new ParamSpec(1, 2));
            conv1.convolution_param.num_output = 20;
            conv1.convolution_param.kernel_size.Add(5);
            conv1.convolution_param.stride.Add(1);
            conv1.convolution_param.weight_filler = new FillerParameter("xavier");
            conv1.convolution_param.bias_filler   = new FillerParameter("constant");
            net_param.layer.Add(conv1);

            LayerParameter pool1 = new LayerParameter(LayerParameter.LayerType.POOLING);

            pool1.name = "pool1";
            pool1.bottom.Add("conv1");
            pool1.top.Add("pool1");
            pool1.pooling_param.pool = PoolingParameter.PoolingMethod.MAX;
            pool1.pooling_param.kernel_size.Add(2);
            pool1.pooling_param.stride.Add(2);
            net_param.layer.Add(pool1);

            LayerParameter conv2 = new LayerParameter(LayerParameter.LayerType.CONVOLUTION);

            conv2.name = "conv2";
            conv2.bottom.Add("pool1");
            conv2.top.Add("conv2");
            conv2.parameters.Add(new ParamSpec(1, 2));
            conv2.convolution_param.num_output = 50;
            conv2.convolution_param.kernel_size.Add(5);
            conv2.convolution_param.stride.Add(1);
            conv2.convolution_param.weight_filler = new FillerParameter("xavier");
            conv2.convolution_param.bias_filler   = new FillerParameter("constant");
            net_param.layer.Add(conv2);

            LayerParameter pool2 = new LayerParameter(LayerParameter.LayerType.POOLING);

            pool2.name = "pool2";
            pool2.bottom.Add("conv2");
            pool2.top.Add("pool2");
            pool2.pooling_param.pool = PoolingParameter.PoolingMethod.MAX;
            pool2.pooling_param.kernel_size.Add(2);
            pool2.pooling_param.stride.Add(2);
            net_param.layer.Add(pool2);

            LayerParameter ip1 = new LayerParameter(LayerParameter.LayerType.INNERPRODUCT);

            ip1.name = "ip1";
            ip1.bottom.Add("pool2");
            ip1.top.Add("ip1");
            ip1.parameters.Add(new ParamSpec(1, 2));
            ip1.inner_product_param.num_output    = 500;
            ip1.inner_product_param.weight_filler = new FillerParameter("xavier");
            ip1.inner_product_param.bias_filler   = new FillerParameter("constant");
            net_param.layer.Add(ip1);

            LayerParameter relu1 = new LayerParameter(LayerParameter.LayerType.RELU);

            relu1.name = "relu1";
            relu1.bottom.Add("ip1");
            relu1.top.Add("ip1"); // inline.
            net_param.layer.Add(relu1);

            LayerParameter ip2 = new LayerParameter(LayerParameter.LayerType.INNERPRODUCT);

            ip2.name = "ip2";
            ip2.bottom.Add("ip1");
            ip2.top.Add("ip2");
            ip2.parameters.Add(new ParamSpec(1, 2));
            ip2.inner_product_param.num_output    = 10;
            ip2.inner_product_param.weight_filler = new FillerParameter("xavier");
            ip2.inner_product_param.bias_filler   = new FillerParameter("constant");
            net_param.layer.Add(ip2);

            LayerParameter accuracy = new LayerParameter(LayerParameter.LayerType.ACCURACY);

            accuracy.name = "accuracy";
            accuracy.bottom.Add("ip2");
            accuracy.bottom.Add("label");
            accuracy.top.Add("accuracy");
            accuracy.include.Add(new NetStateRule(Phase.TEST));
            net_param.layer.Add(accuracy);

            LayerParameter loss = new LayerParameter(LayerParameter.LayerType.SOFTMAXWITH_LOSS);

            loss.name = "loss";
            loss.bottom.Add("ip2");
            loss.bottom.Add("label");
            loss.top.Add("loss");
            net_param.layer.Add(loss);

            // Convert model to text descriptor.
            RawProto proto = net_param.ToProto("root");

            return(proto.ToString());
        }
コード例 #9
0
        /// <summary>
        /// Add octave convolution, batch-norm layers.
        /// </summary>
        /// <param name="strInputLayer">Specifies the first input layer.</param>
        /// <param name="strOutputLayer">Specifies the output layer.</param>
        /// <param name="nNumOutput">Specifies the number of output.</param>
        /// <param name="nKernelSize">Specifies the kernel size.</param>
        /// <param name="nPad">Specifies the pad.</param>
        /// <param name="nStride">Specifies the stride.</param>
        /// <param name="dfAlphaIn">Specifies the alpha in.</param>
        /// <param name="dfAlphaOut">Specifies the alpha out.</param>
        /// <param name="bUseBias">Optionally, specifies to use bias (default = true).</param>
        /// <param name="strConvPrefix">Optionally, specifies the convolution layer name prefix (default = "").</param>
        /// <param name="strConvPostfix">Optionally, specifies the convolution layer name postfix (default = "").</param>
        /// <param name="strBnPrefix">Optionally, specifies the batch-norm layer name prefix (default = "").</param>
        /// <param name="strBnPostfix">Optionally, specifies the batch-norm layer name postfix (default = "_bn").</param>
        /// <param name="strLayerPostfix">Optionally, specifies a layer name postfix (default = "").</param>
        /// <param name="phaseExclude">Optionally, specifies a phase to exclude (default = NONE).</param>
        /// <returns>The last layer added is returned.</returns>
        protected LayerParameter addOctConvBNLayer(string strInputLayer, string strOutputLayer, int nNumOutput, int nKernelSize, int nPad, int nStride, double dfAlphaIn, double dfAlphaOut, bool bUseBias = true, string strConvPrefix = "", string strConvPostfix = "", string strBnPrefix = "", string strBnPostfix = "_bn", string strLayerPostfix = "", Phase phaseExclude = Phase.NONE)
        {
            double dfLrMult     = 1;
            bool   bNamedParams = false;

            LayerParameter lastLayer1;
            string         strName = strConvPrefix + strOutputLayer + strConvPostfix;

            LayerParameter.LayerType type      = LayerParameter.LayerType.CONVOLUTION_OCTAVE;
            LayerParameter           convLayer = new LayerParameter(type);

            convLayer.convolution_param.weight_filler = new FillerParameter("xavier");
            convLayer.convolution_param.bias_filler   = new FillerParameter("constant", 0);
            convLayer.convolution_param.bias_term     = bUseBias;
            convLayer.name = strName + strLayerPostfix;
            convLayer.convolution_param.kernel_size.Add((uint)nKernelSize);
            convLayer.convolution_param.pad.Add((uint)nPad);
            convLayer.convolution_param.stride.Add((uint)nStride);
            convLayer.convolution_param.dilation.Add((uint)1);
            convLayer.convolution_param.num_output = (uint)nNumOutput;
            convLayer.top.Add(convLayer.name);

            convLayer.convolution_octave_param.alpha_in  = dfAlphaIn;
            convLayer.convolution_octave_param.alpha_out = dfAlphaOut;

            if (dfAlphaOut > 0)
            {
                convLayer.top.Add(convLayer.name + "_l");
            }

            addExclusion(convLayer, phaseExclude);

            // Setup the BachNorm Layer
            convLayer.parameters.Add(new ParamSpec(dfLrMult, 1.0, (bNamedParams) ? strName + "_w" : null));
            convLayer.convolution_param.weight_filler = new FillerParameter("gaussian", 0, 0, 0.01);
            convLayer.convolution_param.bias_term     = false;

            LayerParameter bnLayer = new LayerParameter(LayerParameter.LayerType.BATCHNORM);

            strName      = strBnPrefix + strOutputLayer + strBnPostfix;
            bnLayer.name = strName + strLayerPostfix;
            bnLayer.batch_norm_param.eps = 0.001;
            bnLayer.batch_norm_param.moving_average_fraction = 0.999;
            bnLayer.batch_norm_param.use_global_stats        = false;
            bnLayer.parameters.Add(new ParamSpec(0.0, 0.0, (bNamedParams) ? strName + "_w1" : null));
            bnLayer.parameters.Add(new ParamSpec(0.0, 0.0, (bNamedParams) ? strName + "_w2" : null));
            bnLayer.parameters.Add(new ParamSpec(0.0, 0.0, (bNamedParams) ? strName + "_w3" : null));
            bnLayer.top.Add(bnLayer.name);
            addExclusion(bnLayer, phaseExclude);

            string strInputLayer2 = null;

            if (dfAlphaIn > 0)
            {
                strInputLayer2 = strInputLayer + "_l";
            }

            lastLayer1 = connectAndAddLayer(strInputLayer, convLayer, strInputLayer2);
            lastLayer1 = connectAndAddLayer(lastLayer1, bnLayer, true, true);

            LayerParameter reluLayer = new LayerParameter(LayerParameter.LayerType.RELU);

            reluLayer.name = convLayer.name + "_relu";
            addExclusion(reluLayer, phaseExclude);
            lastLayer1 = connectAndAddLayer(lastLayer1, reluLayer, true, true);

            if (dfAlphaOut > 0)
            {
                bnLayer      = new LayerParameter(LayerParameter.LayerType.BATCHNORM);
                strName      = strBnPrefix + strOutputLayer + strBnPostfix;
                bnLayer.name = strName + strLayerPostfix + "_l";
                bnLayer.batch_norm_param.eps = 0.001;
                bnLayer.batch_norm_param.moving_average_fraction = 0.999;
                bnLayer.batch_norm_param.use_global_stats        = false;
                bnLayer.parameters.Add(new ParamSpec(0.0, 0.0, (bNamedParams) ? strName + "_w1" : null));
                bnLayer.parameters.Add(new ParamSpec(0.0, 0.0, (bNamedParams) ? strName + "_w2" : null));
                bnLayer.parameters.Add(new ParamSpec(0.0, 0.0, (bNamedParams) ? strName + "_w3" : null));
                bnLayer.top.Add(bnLayer.name + "_l");
                addExclusion(bnLayer, phaseExclude);

                LayerParameter lastLayer2 = connectAndAddLayer(convLayer, bnLayer, true, true, 1);

                reluLayer      = new LayerParameter(LayerParameter.LayerType.RELU);
                reluLayer.name = convLayer.name + "_relu_l";
                addExclusion(reluLayer, phaseExclude);
                connectAndAddLayer(lastLayer2, reluLayer, true, true);
            }

            return(convLayer);
        }