Esempio n. 1
0
        public void TestCreateTrainingModel()
        {
            ModelBuilder builder = create();

            NetParameter net_param = builder.CreateModel();
            RawProto     proto     = net_param.ToProto("root");
            string       strNet    = proto.ToString();

            RawProto     proto2     = RawProto.Parse(strNet);
            NetParameter net_param2 = NetParameter.FromProto(proto2);

            m_log.CHECK(net_param2.Compare(net_param), "The two net parameters should be the same!");

            // verify creating the model.
            SolverParameter solver      = builder.CreateSolver();
            RawProto        protoSolver = solver.ToProto("root");
            string          strSolver   = protoSolver.ToString();

            SettingsCaffe      settings  = new SettingsCaffe();
            CancelEvent        evtCancel = new CancelEvent();
            MyCaffeControl <T> mycaffe   = new MyCaffeControl <T>(settings, m_log, evtCancel);

            save(strNet, strSolver, false);

            //            mycaffe.LoadLite(Phase.TRAIN, strSolver, strNet, null);
            mycaffe.Dispose();
        }
Esempio n. 2
0
        /// <summary>
        /// Load the MNIST LeNet model and set its sources to the MNIST dataset (already loaded
        /// in the database using the MyCaffeTestApplication).
        /// </summary>
        /// <param name="ds">Specifies the MNIST dataset descriptor.</param>
        /// <returns>The NetParameter for the LeNet is returned.</returns>
        public NetParameter CreateMnistModel(DatasetDescriptor ds)
        {
            string       str      = System.Text.Encoding.Default.GetString(Properties.Resources.lenet_train_test);
            RawProto     proto    = RawProto.Parse(str);
            NetParameter netParam = NetParameter.FromProto(proto);

            for (int i = 0; i < netParam.layer.Count; i++)
            {
                LayerParameter layer = netParam.layer[i];

                if (layer.type == LayerParameter.LayerType.DATA)
                {
                    if (layer.include[0].phase == Phase.TRAIN)
                    {
                        layer.data_param.source = ds.TrainingSourceName;
                    }
                    else
                    {
                        layer.data_param.source = ds.TestingSourceName;
                    }
                }
            }

            return(netParam);
        }
Esempio n. 3
0
        /// <summary>
        /// The constructor.
        /// </summary>
        /// <param name="cuda">Specifies the instance of CudaDnn to use.</param>
        /// <param name="log">Specifies the output log.</param>
        /// <param name="evtCancel">Specifies the cancel event used to abort processing.</param>
        /// <param name="strModelType">Specifies the model type: 'vgg19', 'vgg16'</param>
        /// <param name="strModel">Specifies the network model to use.</param>
        /// <param name="rgWeights">Optionally, specifies the weights to use (or <i>null</i> to ignore).</param>
        /// <param name="bCaffeModel">Specifies whether or not the weights are in the caffe (<i>true</i>) or mycaffe (<i>false</i>) format.</param>
        /// <param name="solverType">Optionally, specifies the solver type to use (default = LBFGS).</param>
        /// <param name="dfLearningRate">Optionally, specifies the solver learning rate (default = 1.0).</param>
        public NeuralStyleTransfer(CudaDnn <T> cuda, Log log, CancelEvent evtCancel, string strModelType, string strModel, byte[] rgWeights, bool bCaffeModel, SolverParameter.SolverType solverType = SolverParameter.SolverType.LBFGS, double dfLearningRate = 1.0)
        {
            m_cuda           = cuda;
            m_log            = log;
            m_evtCancel      = evtCancel;
            m_rgWeights      = rgWeights;
            m_solverType     = solverType;
            m_dfLearningRate = dfLearningRate;

            if (m_evtCancel != null)
            {
                m_evtCancel.Reset();
            }

            RawProto proto = RawProto.Parse(strModel);

            m_param = NetParameter.FromProto(proto);

            add_input_layer(m_param);
            m_rgstrUsedLayers = load_layers(strModelType);
            prune(m_param, m_rgstrUsedLayers);
            add_gram_layers(m_param);

            m_transformationParam             = new TransformationParameter();
            m_transformationParam.color_order = (bCaffeModel) ? TransformationParameter.COLOR_ORDER.BGR : TransformationParameter.COLOR_ORDER.RGB;
            m_transformationParam.scale       = 1.0;
            m_transformationParam.mean_value  = m_rgMeanValues;

            m_persist = new PersistCaffe <T>(m_log, false);
        }
Esempio n. 4
0
        /// <summary>
        /// The ResizeModel method gives the custom trainer the opportunity to resize the model if needed.
        /// </summary>
        /// <param name="strModel">Specifies the model descriptor.</param>
        /// <param name="rgVocabulary">Specifies the vocabulary.</param>
        /// <param name="log">Specifies the output log.</param>
        /// <returns>A new model discriptor is returned (or the same 'strModel' if no changes were made).</returns>
        /// <remarks>Note, this method is called after PreloadData.</remarks>
        string IXMyCaffeCustomTrainerRNN.ResizeModel(Log log, string strModel, BucketCollection rgVocabulary)
        {
            if (rgVocabulary == null || rgVocabulary.Count == 0)
            {
                return(strModel);
            }

            int                   nVocabCount  = rgVocabulary.Count;
            NetParameter          p            = NetParameter.FromProto(RawProto.Parse(strModel));
            string                strEmbedName = "";
            EmbedParameter        embed        = null;
            string                strIpName    = "";
            InnerProductParameter ip           = null;

            foreach (LayerParameter layer in p.layer)
            {
                if (layer.type == LayerParameter.LayerType.EMBED)
                {
                    strEmbedName = layer.name;
                    embed        = layer.embed_param;
                }
                else if (layer.type == LayerParameter.LayerType.INNERPRODUCT)
                {
                    strIpName = layer.name;
                    ip        = layer.inner_product_param;
                }
            }

            if (embed != null)
            {
                if (embed.input_dim != (uint)nVocabCount)
                {
                    log.WriteLine("WARNING: Embed layer '" + strEmbedName + "' input dim changed from " + embed.input_dim.ToString() + " to " + nVocabCount.ToString() + " to accomodate for the vocabulary count.");
                    embed.input_dim = (uint)nVocabCount;
                }
            }

            if (ip.num_output != (uint)nVocabCount)
            {
                log.WriteLine("WARNING: InnerProduct layer '" + strIpName + "' num_output changed from " + ip.num_output.ToString() + " to " + nVocabCount.ToString() + " to accomodate for the vocabulary count.");
                ip.num_output = (uint)nVocabCount;
            }

            m_rgVocabulary = rgVocabulary;

            RawProto proto = p.ToProto("root");

            return(proto.ToString());
        }
Esempio n. 5
0
        /// <summary>
        /// The constructor.
        /// </summary>
        /// <param name="cuda">Specifies the instance of CudaDnn to use.</param>
        /// <param name="log">Specifies the output log.</param>
        /// <param name="evtCancel">Specifies the cancel event used to abort processing.</param>
        /// <param name="rgLayers">Specifies the layers along with their style and content weights.</param>
        /// <param name="strModelDesc">Specifies the network model descriptor to use.</param>
        /// <param name="rgWeights">Optionally, specifies the weights to use (or <i>null</i> to ignore).</param>
        /// <param name="bCaffeModel">Specifies whether or not the weights are in the caffe (<i>true</i>) or mycaffe (<i>false</i>) format.</param>
        /// <param name="solverType">Optionally, specifies the solver type to use (default = LBFGS).</param>
        /// <param name="dfLearningRate">Optionally, specifies the solver learning rate (default = 1.0).</param>
        /// <param name="nMaxImageSize">Optionally, specifies the default maximum image size (default = 840).</param>
        /// <param name="nLBFGSCorrections">Optionally, specifies the LBFGS Corrections (only used when using the LBFGS solver, default = 100).</param>
        /// <param name="netShare">Optionally, specifies a net to share.</param>
        public NeuralStyleTransfer(CudaDnn <T> cuda, Log log, CancelEvent evtCancel, Dictionary <string, Tuple <double, double> > rgLayers, string strModelDesc, byte[] rgWeights, bool bCaffeModel, SolverParameter.SolverType solverType = SolverParameter.SolverType.LBFGS, double dfLearningRate = 1.0, int nMaxImageSize = 840, int nLBFGSCorrections = 100, Net <T> netShare = null)
        {
            m_log                  = log;
            m_evtCancel            = evtCancel;
            m_rgWeights            = rgWeights;
            m_solverType           = solverType;
            m_dfLearningRate       = dfLearningRate;
            m_nDefaultMaxImageSize = nMaxImageSize;
            m_nLBFGSCorrections    = nLBFGSCorrections;

            setupNetShare(netShare, cuda);

            if (m_evtCancel != null)
            {
                m_evtCancel.Reset();
            }

            RawProto proto = RawProto.Parse(strModelDesc);

            m_param = NetParameter.FromProto(proto);

            Dictionary <string, double> rgStyle   = new Dictionary <string, double>();
            Dictionary <string, double> rgContent = new Dictionary <string, double>();

            foreach (KeyValuePair <string, Tuple <double, double> > kv in rgLayers)
            {
                if (kv.Value.Item1 != 0)
                {
                    rgStyle.Add(kv.Key, kv.Value.Item1);
                }

                if (kv.Value.Item2 != 0)
                {
                    rgContent.Add(kv.Key, kv.Value.Item2);
                }
            }

            add_input_layer(m_param);
            m_rgstrUsedLayers = load_layers(rgStyle, rgContent);
            prune(m_param, m_rgstrUsedLayers);
            add_gram_layers(m_param);

            m_transformationParam             = new TransformationParameter();
            m_transformationParam.color_order = (bCaffeModel) ? TransformationParameter.COLOR_ORDER.BGR : TransformationParameter.COLOR_ORDER.RGB;
            m_transformationParam.scale       = 1.0;
            m_transformationParam.mean_value  = m_rgMeanValues;

            m_persist = new PersistCaffe <T>(m_log, false);
        }
Esempio n. 6
0
        public void TestCreateDeployModel()
        {
            ModelBuilder builder = create();

            NetParameter net_param = builder.CreateDeployModel();
            RawProto     proto     = net_param.ToProto("root");
            string       strNet    = proto.ToString();

            RawProto     proto2     = RawProto.Parse(strNet);
            NetParameter net_param2 = NetParameter.FromProto(proto2);

            m_log.CHECK(net_param2.Compare(net_param), "The two net parameters should be the same!");

            // verify creating the model.
            SettingsCaffe      settings  = new SettingsCaffe();
            CancelEvent        evtCancel = new CancelEvent();
            MyCaffeControl <T> mycaffe   = new MyCaffeControl <T>(settings, m_log, evtCancel);

            save(strNet, null, true);

            //            mycaffe.LoadToRun(strNet, null, new BlobShape(1, 3, 300, 300));
            mycaffe.Dispose();
        }
Esempio n. 7
0
        /// <summary>
        /// Replace the Data input layer with the MemoryData input layer.
        /// </summary>
        /// <param name="strModel">Specifies the model descriptor to change.</param>
        /// <param name="nBatchSize">Specifies the batch size.</param>
        /// <returns>The new model descriptor with the MemoryData layer is returned.</returns>
        private string fixup_model(string strModel, int nBatchSize)
        {
            RawProto     proto     = RawProto.Parse(strModel);
            NetParameter net_param = NetParameter.FromProto(proto);

            for (int i = 0; i < net_param.layer.Count; i++)
            {
                if (net_param.layer[i].type == LayerParameter.LayerType.DATA)
                {
                    LayerParameter layer = new LayerParameter(LayerParameter.LayerType.INPUT);
                    layer.name    = net_param.layer[i].name;
                    layer.top     = net_param.layer[i].top;
                    layer.bottom  = net_param.layer[i].bottom;
                    layer.include = net_param.layer[i].include;

                    layer.input_param.shape.Add(new BlobShape(nBatchSize, 1, 28, 28));
                    layer.input_param.shape.Add(new BlobShape(nBatchSize, 1, 1, 1));
                    net_param.layer[i] = layer;
                }
            }

            return(net_param.ToProto("root").ToString());
        }
Esempio n. 8
0
        /// <summary>
        /// The ResizeModel method gives the custom trainer the opportunity to resize the model if needed.
        /// </summary>
        /// <param name="strModel">Specifies the model descriptor.</param>
        /// <param name="rgVocabulary">Specifies the vocabulary.</param>
        /// <returns>A new model discriptor is returned (or the same 'strModel' if no changes were made).</returns>
        /// <remarks>Note, this method is called after PreloadData.</remarks>
        public string ResizeModel(string strModel, BucketCollection rgVocabulary)
        {
            if (rgVocabulary == null || rgVocabulary.Count == 0)
            {
                return(strModel);
            }

            int                   nVocabCount = rgVocabulary.Count;
            NetParameter          p           = NetParameter.FromProto(RawProto.Parse(strModel));
            EmbedParameter        embed       = null;
            InnerProductParameter ip          = null;

            foreach (LayerParameter layer in p.layer)
            {
                if (layer.type == LayerParameter.LayerType.EMBED)
                {
                    embed = layer.embed_param;
                }
                else if (layer.type == LayerParameter.LayerType.INNERPRODUCT)
                {
                    ip = layer.inner_product_param;
                }
            }

            if (embed != null)
            {
                embed.input_dim = (uint)nVocabCount;
            }

            ip.num_output = (uint)nVocabCount;

            m_rgVocabulary = rgVocabulary;

            RawProto proto = p.ToProto("root");

            return(proto.ToString());
        }
Esempio n. 9
0
        static void Main(string[] args)
        {
            if (!sqlCheck())
            {
                return;
            }

            Log log = new Log("test");

            log.OnWriteLine += Log_OnWriteLine;
            CancelEvent   cancel   = new CancelEvent();
            SettingsCaffe settings = new SettingsCaffe();

            // Load all images into memory before training.
            settings.ImageDbLoadMethod = IMAGEDB_LOAD_METHOD.LOAD_ALL;
            // Use GPU ID = 0
            settings.GpuIds = "0";

            // Load the descriptors from their respective files
            string strSolver = load_file("C:\\ProgramData\\MyCaffe\\test_data\\models\\siamese\\mnist\\solver.prototxt");
            string strModel  = load_file("C:\\ProgramData\\MyCaffe\\test_data\\models\\siamese\\mnist\\train_val.prototxt");

            RawProto       proto     = RawProto.Parse(strModel);
            NetParameter   net_param = NetParameter.FromProto(proto);
            LayerParameter layer     = net_param.FindLayer(LayerParameter.LayerType.DECODE);

            layer.decode_param.target = DecodeParameter.TARGET.CENTROID;
            proto    = net_param.ToProto("root");
            strModel = proto.ToString();

            // Load the MNIST data descriptor.
            DatasetFactory    factory = new DatasetFactory();
            DatasetDescriptor ds      = factory.LoadDataset("MNIST");

            // Create a test project with the dataset and descriptors
            ProjectEx project = new ProjectEx("Test");

            project.SetDataset(ds);
            project.ModelDescription  = strModel;
            project.SolverDescription = strSolver;

            // Crate the MyCaffeControl (with the 'float' base type)
            string strCudaPath             = "C:\\Program Files\\SignalPop\\MyCaffe\\cuda_11.3\\CudaDnnDll.11.3.dll";
            MyCaffeControl <float> mycaffe = new MyCaffeControl <float>(settings, log, cancel, null, null, null, null, strCudaPath);

            // Load the project, using the TRAIN phase.
            mycaffe.Load(Phase.TRAIN, project);

            // Train the model for 4000 iterations
            // (which uses the internal solver and internal training net)
            int nIterations = 4000;

            mycaffe.Train(nIterations);

            // Test the model for 100 iterations
            // (which uses the internal testing net)
            nIterations = 100;
            double dfAccuracy = mycaffe.Test(nIterations);

            // Report the testing accuracy.
            log.WriteLine("Accuracy = " + dfAccuracy.ToString("P"));

            mycaffe.Dispose();

            Console.Write("Press any key...");
            Console.ReadKey();
        }