コード例 #1
0
 /// <summary>
 /// The constructor.
 /// </summary>
 /// <param name="mycaffe">Specifies the MyCaffeControl to use for learning and prediction.</param>
 /// <param name="properties">Specifies the property set containing the key/value pairs of property settings.</param>
 /// <param name="random">Specifies a Random number generator used for random selection.</param>
 /// <param name="icallback">Specifies the callback for parent notifications and queries.</param>
 public TrainerNoisyDqn(MyCaffeControl <T> mycaffe, PropertySet properties, CryptoRandom random, IxTrainerCallback icallback)
 {
     m_icallback  = icallback;
     m_mycaffe    = mycaffe;
     m_properties = properties;
     m_random     = random;
 }
コード例 #2
0
        public void TestCreateTrainingModel()
        {
            ModelBuilder builder = create();

            NetParameter net_param = builder.CreateModel();
            RawProto     proto     = net_param.ToProto("root");
            string       strNet    = proto.ToString();

            RawProto     proto2     = RawProto.Parse(strNet);
            NetParameter net_param2 = NetParameter.FromProto(proto2);

            m_log.CHECK(net_param2.Compare(net_param), "The two net parameters should be the same!");

            // verify creating the model.
            SolverParameter solver      = builder.CreateSolver();
            RawProto        protoSolver = solver.ToProto("root");
            string          strSolver   = protoSolver.ToString();

            SettingsCaffe      settings  = new SettingsCaffe();
            CancelEvent        evtCancel = new CancelEvent();
            MyCaffeControl <T> mycaffe   = new MyCaffeControl <T>(settings, m_log, evtCancel);

            save(strNet, strSolver, false);

            //            mycaffe.LoadLite(Phase.TRAIN, strSolver, strNet, null);
            mycaffe.Dispose();
        }
コード例 #3
0
        public void TestAlexNetCiFar()
        {
            CancelEvent   evtCancel = new CancelEvent();
            SettingsCaffe settings  = new SettingsCaffe();

            settings.ImageDbLoadMethod          = IMAGEDB_LOAD_METHOD.LOAD_ON_DEMAND;
            settings.EnableRandomInputSelection = true;
            settings.GpuIds = getGpuIds();

            Trace.WriteLine("Running TestAlexNetCiFar on GPU " + settings.GpuIds);

            ProjectEx          p    = getProject();
            MyCaffeControl <T> ctrl = new MyCaffeControl <T>(settings, m_log, evtCancel);

            try
            {
                ctrl.Load(Phase.TRAIN, p);
                ctrl.OnTrainingIteration += ctrl_OnTrainingIteration;
                ctrl.Train();
            }
            catch (Exception excpt)
            {
                throw excpt;
            }
            finally
            {
                ctrl.Dispose();
            }
        }
コード例 #4
0
        /// <summary>
        /// Optionally overridden to return a new type of trainer.
        /// </summary>
        /// <remarks>
        /// Override this method when using the MyCaffeControl that uses the <i>double</i> base type.
        /// </remarks>
        /// <param name="caffe">Specifies the MyCaffeControl used.</param>
        /// <returns>The IxTraininer interface implemented by the new trainer is returned.</returns>
        protected virtual IxTrainerRL create_trainerF(Component caffe)
        {
            MyCaffeControl <float> mycaffe = caffe as MyCaffeControl <float>;

            m_nProjectID = mycaffe.CurrentProject.OriginalID;
            m_dsCi       = mycaffe.DatasetConnectInfo;

            int.TryParse(mycaffe.CurrentProject.GetSolverSetting("max_iter"), out m_nItertions);
            int.TryParse(mycaffe.CurrentProject.GetSolverSetting("snapshot"), out m_nSnapshot);

            switch (m_trainerType)
            {
            case TRAINER_TYPE.PG_SIMPLE:
                return(new pg.simple.TrainerPG <float>(mycaffe, m_properties, m_random, this));

            case TRAINER_TYPE.PG_ST:
                return(new pg.st.TrainerPG <float>(mycaffe, m_properties, m_random, this));

            case TRAINER_TYPE.PG_MT:
                return(new pg.mt.TrainerPG <float>(mycaffe, m_properties, m_random, this));

            default:
                throw new Exception("Unknown trainer type '" + m_trainerType.ToString() + "'!");
            }
        }
コード例 #5
0
        /// <summary>
        /// The constructor.
        /// </summary>
        /// <param name="mycaffe">Specifies the instance of MyCaffe assoiated with the open project - when using more than one Brain, this is the master project.</param>
        /// <param name="properties">Specifies the properties passed into the trainer.</param>
        /// <param name="random">Specifies the random number generator used.</param>
        /// <param name="phase">Specifies the phase under which to run.</param>
        public Brain(MyCaffeControl <T> mycaffe, PropertySet properties, CryptoRandom random, Phase phase)
        {
            m_mycaffe    = mycaffe;
            m_solver     = mycaffe.GetInternalSolver();
            m_netOutput  = mycaffe.GetInternalNet(phase);
            m_netTarget  = new Net <T>(m_mycaffe.Cuda, m_mycaffe.Log, m_netOutput.net_param, m_mycaffe.CancelEvent, null, phase);
            m_properties = properties;
            m_random     = random;

            Blob <T> data = m_netOutput.blob_by_name("data");

            if (data == null)
            {
                m_mycaffe.Log.FAIL("Missing the expected input 'data' blob!");
            }

            m_nBatchSize = data.num;

            Blob <T> logits = m_netOutput.blob_by_name("logits");

            if (logits == null)
            {
                m_mycaffe.Log.FAIL("Missing the expected input 'logits' blob!");
            }

            m_nActionCount = logits.channels;

            m_transformer        = m_mycaffe.DataTransformer;
            m_blobActions        = new Blob <T>(m_mycaffe.Cuda, m_mycaffe.Log, false);
            m_blobQValue         = new Blob <T>(m_mycaffe.Cuda, m_mycaffe.Log);
            m_blobNextQValue     = new Blob <T>(m_mycaffe.Cuda, m_mycaffe.Log);
            m_blobExpectedQValue = new Blob <T>(m_mycaffe.Cuda, m_mycaffe.Log);
            m_blobDone           = new Blob <T>(m_mycaffe.Cuda, m_mycaffe.Log, false);
            m_blobLoss           = new Blob <T>(m_mycaffe.Cuda, m_mycaffe.Log);
            m_blobWeights        = new Blob <T>(m_mycaffe.Cuda, m_mycaffe.Log, false);

            m_fGamma = (float)properties.GetPropertyAsDouble("Gamma", m_fGamma);

            m_memLoss = m_netOutput.FindLastLayer(LayerParameter.LayerType.MEMORY_LOSS) as MemoryLossLayer <T>;
            if (m_memLoss == null)
            {
                m_mycaffe.Log.FAIL("Missing the expected MEMORY_LOSS layer!");
            }

            double?dfRate = mycaffe.CurrentProject.GetSolverSettingAsNumeric("base_lr");

            if (dfRate.HasValue)
            {
                m_dfLearningRate = dfRate.Value;
            }

            m_nMiniBatch = m_properties.GetPropertyAsInt("MiniBatch", m_nMiniBatch);
            m_bUseAcceleratedTraining = properties.GetPropertyAsBool("UseAcceleratedTraining", false);

            if (m_nMiniBatch > 1)
            {
                m_colAccumulatedGradients = m_netOutput.learnable_parameters.Clone();
                m_colAccumulatedGradients.SetDiff(0);
            }
        }
コード例 #6
0
ファイル: TrainerPG.cs プロジェクト: SuperDaveWhite/MyCaffe
        public Brain(MyCaffeControl <T> mycaffe, PropertySet properties, CryptoRandom random, Phase phase)
        {
            m_mycaffe    = mycaffe;
            m_net        = mycaffe.GetInternalNet(phase);
            m_solver     = mycaffe.GetInternalSolver();
            m_properties = properties;
            m_random     = random;

            m_memData = m_net.FindLayer(LayerParameter.LayerType.MEMORYDATA, null) as MemoryDataLayer <T>;
            m_memLoss = m_net.FindLayer(LayerParameter.LayerType.MEMORY_LOSS, null) as MemoryLossLayer <T>;
            SoftmaxLayer <T> softmax = m_net.FindLayer(LayerParameter.LayerType.SOFTMAX, null) as SoftmaxLayer <T>;

            if (softmax != null)
            {
                throw new Exception("The PG.SIMPLE trainer does not support the Softmax layer, use the 'PG.ST' or 'PG.MT' trainer instead.");
            }

            if (m_memData == null)
            {
                throw new Exception("Could not find the MemoryData Layer!");
            }

            if (m_memLoss == null)
            {
                throw new Exception("Could not find the MemoryLoss Layer!");
            }

            m_memLoss.OnGetLoss += memLoss_OnGetLoss;

            m_blobDiscountedR    = new Blob <T>(mycaffe.Cuda, mycaffe.Log);
            m_blobPolicyGradient = new Blob <T>(mycaffe.Cuda, mycaffe.Log);

            m_nMiniBatch = mycaffe.CurrentProject.GetBatchSize(phase);
        }
コード例 #7
0
        /// <summary>
        /// Load the previously saved lstm state (saved from the trained encoder/decoder
        /// model).  The encoder cy and hy are loaded along with the ip1a, decoder and ip1
        /// layer weights.  The Encoder state cy and hy are used to initialize the Decoder's
        /// initial state which is why the Decoder uses a Clip = 1 from the start.
        /// </summary>
        /// <param name="mycaffe">Specifies the instance of the MyCaffeControl.</param>
        private void loadLstmState(MyCaffeControl <float> mycaffe)
        {
            Net <float>   net          = mycaffe.GetInternalNet(Phase.RUN);
            Layer <float> decoderLayer = net.FindLayer(LayerParameter.LayerType.LSTM, "decoder");
            Layer <float> layerIp1a    = net.FindLayer(LayerParameter.LayerType.INNERPRODUCT, "ip1a");
            Layer <float> layerIp1     = net.FindLayer(LayerParameter.LayerType.INNERPRODUCT, "ip1");

            string strFile = getWeightFileName("sin.lstm_state");

            Console.WriteLine("Loading LSTM state to '" + strFile + "'...");
            using (FileStream fs = File.Open(strFile, FileMode.Open, FileAccess.Read))
                using (BinaryReader br = new BinaryReader(fs))
                {
                    for (int i = 0; i < decoderLayer.internal_blobs.Count; i++)
                    {
                        loadBlob(br, decoderLayer.internal_blobs[i]);
                    }

                    for (int i = 0; i < decoderLayer.blobs.Count; i++)
                    {
                        loadBlob(br, decoderLayer.blobs[i]);
                    }

                    for (int i = 0; i < layerIp1a.blobs.Count; i++)
                    {
                        loadBlob(br, layerIp1a.blobs[i]);
                    }

                    for (int i = 0; i < layerIp1.blobs.Count; i++)
                    {
                        loadBlob(br, layerIp1.blobs[i]);
                    }
                }
        }
コード例 #8
0
        /// <summary>
        /// Save the LSTM state (cy and hy) and associated weights from the ip1a, decoder and ip1
        /// layers.
        /// </summary>
        /// <param name="mycaffe">Specifies the instance of the MyCaffeControl used.</param>
        private void saveLstmState(MyCaffeControl <float> mycaffe)
        {
            Net <float>   net          = mycaffe.GetInternalNet(Phase.TRAIN);
            Layer <float> encoderLayer = net.FindLayer(LayerParameter.LayerType.LSTM, "encoder");
            Layer <float> decoderLayer = net.FindLayer(LayerParameter.LayerType.LSTM, "decoder");
            Layer <float> layerIp1a    = net.FindLayer(LayerParameter.LayerType.INNERPRODUCT, "ip1a");
            Layer <float> layerIp1     = net.FindLayer(LayerParameter.LayerType.INNERPRODUCT, "ip1");

            string strFile = getWeightFileName("sin.lstm_state");

            Console.WriteLine("Saving LSTM state to '" + strFile + "'...");
            using (FileStream fs = File.Create(strFile))
                using (BinaryWriter bw = new BinaryWriter(fs))
                {
                    for (int i = 0; i < encoderLayer.internal_blobs.Count; i++)
                    {
                        saveBlob(bw, encoderLayer.internal_blobs[i]);
                    }

                    for (int i = 0; i < decoderLayer.blobs.Count; i++)
                    {
                        saveBlob(bw, decoderLayer.blobs[i]);
                    }

                    for (int i = 0; i < layerIp1a.blobs.Count; i++)
                    {
                        saveBlob(bw, layerIp1a.blobs[i]);
                    }

                    for (int i = 0; i < layerIp1.blobs.Count; i++)
                    {
                        saveBlob(bw, layerIp1.blobs[i]);
                    }
                }
        }
コード例 #9
0
 public Agent(IxTrainerCallback icallback, MyCaffeControl <T> mycaffe, PropertySet properties, CryptoRandom random, Phase phase, BucketCollection rgVocabulary, bool bUsePreloadData, string strRunProperties = null)
 {
     m_icallback  = icallback;
     m_brain      = new Brain <T>(mycaffe, properties, random, icallback as IxTrainerCallbackRNN, phase, rgVocabulary, bUsePreloadData, strRunProperties);
     m_properties = properties;
     m_random     = random;
 }
コード例 #10
0
        /// <summary>
        /// The constructor.
        /// </summary>
        /// <param name="icallback">Specifies the callback used for update notifications sent to the parent.</param>
        /// <param name="mycaffe">Specifies the instance of MyCaffe with the open project.</param>
        /// <param name="properties">Specifies the properties passed into the trainer.</param>
        /// <param name="random">Specifies the random number generator used.</param>
        /// <param name="phase">Specifies the phase of the internal network to use.</param>
        public DqnAgent(IxTrainerCallback icallback, MyCaffeControl <T> mycaffe, PropertySet properties, CryptoRandom random, Phase phase)
        {
            m_icallback  = icallback;
            m_brain      = new Brain <T>(mycaffe, properties, random, phase);
            m_properties = properties;
            m_random     = random;

            m_fGamma              = (float)properties.GetPropertyAsDouble("Gamma", m_fGamma);
            m_bUseRawInput        = properties.GetPropertyAsBool("UseRawInput", m_bUseRawInput);
            m_nMaxMemory          = properties.GetPropertyAsInt("MaxMemory", m_nMaxMemory);
            m_nTrainingUpdateFreq = properties.GetPropertyAsInt("TrainingUpdateFreq", m_nTrainingUpdateFreq);
            m_nExplorationNum     = properties.GetPropertyAsInt("ExplorationNum", m_nExplorationNum);
            m_nEpsSteps           = properties.GetPropertyAsInt("EpsSteps", m_nEpsSteps);
            m_dfEpsStart          = properties.GetPropertyAsDouble("EpsStart", m_dfEpsStart);
            m_dfEpsEnd            = properties.GetPropertyAsDouble("EpsEnd", m_dfEpsEnd);
            m_dfEpsDelta          = (m_dfEpsStart - m_dfEpsEnd) / m_nEpsSteps;
            m_dfExplorationRate   = m_dfEpsStart;

            if (m_dfEpsStart < 0 || m_dfEpsStart > 1)
            {
                throw new Exception("The 'EpsStart' is out of range - please specify a real number in the range [0,1]");
            }

            if (m_dfEpsEnd < 0 || m_dfEpsEnd > 1)
            {
                throw new Exception("The 'EpsEnd' is out of range - please specify a real number in the range [0,1]");
            }

            if (m_dfEpsEnd > m_dfEpsStart)
            {
                throw new Exception("The 'EpsEnd' must be less than the 'EpsStart' value.");
            }
        }
コード例 #11
0
        /// <summary>
        /// Run the trained model.
        /// </summary>
        /// <param name="mycaffe">Specifies the mycaffe instance running the sequence run model.</param>
        /// <param name="bw">Specifies the background worker.</param>
        /// <param name="data">Specifies the data to run the model on.</param>
        private void runModel(MyCaffeControl <float> mycaffe, BackgroundWorker bw, Data data)
        {
            Net <float>  net               = m_mycaffe.GetInternalNet(Phase.RUN);
            Blob <float> blobDecInput      = net.FindBlob("dec_input");
            Blob <float> blobDecClip       = net.FindBlob("clipD");
            Blob <float> blobIp1           = net.FindBlob("ip1");
            int          nDecInputLayerIdx = net.layer_index_by_name("dec_input_embed");

            try
            {
                Tuple <List <int>, int> input = data.GetInputData();
                List <int> rgInput            = input.Item1;
                int        nIxInput           = input.Item2;

                loadData(Phase.RUN, rgInput, nIxInput, null, 0);

                net.Forward();

                int nCount = 0;

                long lPos;
                m_mycaffe.Cuda.max(blobIp1.count(), blobIp1.gpu_data, out lPos);
                nIxInput = (int)lPos;

                string strOut = "";

                while (nIxInput != 0)
                {
                    string strWord = data.Vocabulary.IndexToWord(nIxInput);

                    if (strWord.Length == 0)
                    {
                        break;
                    }

                    strOut += strWord + " ";

                    blobDecInput.SetData(nIxInput, 0);
                    net.Forward();

                    m_mycaffe.Cuda.max(blobIp1.count(), blobIp1.gpu_data, out lPos);
                    nIxInput = (int)lPos;

                    nCount++;
                    if (nCount > 80)
                    {
                        break;
                    }

                    blobDecClip.SetData(1, 0);
                }

                m_log.WriteLine("Robot: " + strOut.Trim(), true);
            }
            catch (Exception excpt)
            {
                m_log.WriteLine("Robot: " + excpt.Message);
            }
        }
コード例 #12
0
ファイル: MgrPreprocessor.cs プロジェクト: maplewei/MyCaffe
 /// <summary>
 /// The constructor.
 /// </summary>
 /// <param name="imycaffe">Specifies the instance of the MyCaffeControl to use.</param>
 /// <param name="idb">Specifies the instance of the streaming database to use.</param>
 public MgrPreprocessor(IXMyCaffe <T> imycaffe, IXStreamDatabase idb)
 {
     m_mycaffe    = (MyCaffeControl <T>)imycaffe;
     m_idb        = idb;
     m_extension  = new Extension <T>(imycaffe as IXMyCaffeExtension <T>);
     m_blobInput  = new Blob <T>(m_mycaffe.Cuda, m_mycaffe.Log);
     m_blobOutput = new Blob <T>(m_mycaffe.Cuda, m_mycaffe.Log);
 }
コード例 #13
0
 /// <summary>
 /// The constructor.
 /// </summary>
 /// <param name="mycaffe">Specifies the MyCaffeControl to use for learning and prediction.</param>
 /// <param name="properties">Specifies the property set containing the key/value pairs of property settings.</param>
 /// <param name="random">Specifies the random number generator to use.</param>
 /// <param name="icallback">Specifies the callback for parent notifications and queries.</param>
 /// <param name="rgVocabulary">Specifies the vocabulary to use.</param>
 public TrainerRNN(MyCaffeControl <T> mycaffe, PropertySet properties, CryptoRandom random, IxTrainerCallback icallback, BucketCollection rgVocabulary)
 {
     m_icallback    = icallback;
     m_mycaffe      = mycaffe;
     m_properties   = properties;
     m_random       = random;
     m_rgVocabulary = rgVocabulary;
 }
コード例 #14
0
        private void FormAbout_Load(object sender, EventArgs e)
        {
            this.lblProduct.Text = AssemblyProduct;
            this.lblVersion.Text = String.Format("Version {0}", AssemblyVersion);
            edtLicense.Text      = MyCaffeControl <float> .GetLicenseTextEx(" ");

            edtLicense.SelectionLength = 0;
            edtLicense.SelectionStart  = 0;
            edtLicense.ScrollToCaret();
        }
コード例 #15
0
        /// <summary>
        /// The constructor.
        /// </summary>
        /// <param name="icallback">Specifies the callback used for update notifications sent to the parent.</param>
        /// <param name="mycaffe">Specifies the instance of MyCaffe with the open project.</param>
        /// <param name="properties">Specifies the properties passed into the trainer.</param>
        /// <param name="random">Specifies the random number generator used.</param>
        /// <param name="phase">Specifies the phase of the internal network to use.</param>
        public DqnAgent(IxTrainerCallback icallback, MyCaffeControl <T> mycaffe, PropertySet properties, CryptoRandom random, Phase phase)
        {
            m_icallback  = icallback;
            m_brain      = new Brain <T>(mycaffe, properties, random, phase);
            m_properties = properties;
            m_random     = random;

            m_fGamma       = (float)properties.GetPropertyAsDouble("Gamma", m_fGamma);
            m_bUseRawInput = properties.GetPropertyAsBool("UseRawInput", m_bUseRawInput);
        }
コード例 #16
0
        public Brain(MyCaffeControl <T> mycaffe, PropertySet properties, CryptoRandom random, Phase phase)
        {
            m_mycaffe    = mycaffe;
            m_net        = mycaffe.GetInternalNet(phase);
            m_solver     = mycaffe.GetInternalSolver();
            m_properties = properties;
            m_random     = random;

            m_memData = m_net.FindLayer(LayerParameter.LayerType.MEMORYDATA, null) as MemoryDataLayer <T>;
            m_memLoss = m_net.FindLayer(LayerParameter.LayerType.MEMORY_LOSS, null) as MemoryLossLayer <T>;
            m_softmax = m_net.FindLayer(LayerParameter.LayerType.SOFTMAX, null) as SoftmaxLayer <T>;

            if (m_memData == null)
            {
                throw new Exception("Could not find the MemoryData Layer!");
            }

            if (m_memLoss == null)
            {
                throw new Exception("Could not find the MemoryLoss Layer!");
            }

            m_memData.OnDataPack += memData_OnDataPack;
            m_memLoss.OnGetLoss  += memLoss_OnGetLoss;

            m_blobDiscountedR     = new Blob <T>(mycaffe.Cuda, mycaffe.Log);
            m_blobPolicyGradient  = new Blob <T>(mycaffe.Cuda, mycaffe.Log);
            m_blobActionOneHot    = new Blob <T>(mycaffe.Cuda, mycaffe.Log);
            m_blobDiscountedR1    = new Blob <T>(mycaffe.Cuda, mycaffe.Log);
            m_blobPolicyGradient1 = new Blob <T>(mycaffe.Cuda, mycaffe.Log);
            m_blobActionOneHot1   = new Blob <T>(mycaffe.Cuda, mycaffe.Log);
            m_blobLoss            = new Blob <T>(mycaffe.Cuda, mycaffe.Log);
            m_blobAprobLogit      = new Blob <T>(mycaffe.Cuda, mycaffe.Log);

            if (m_softmax != null)
            {
                LayerParameter p = new LayerParameter(LayerParameter.LayerType.SOFTMAXCROSSENTROPY_LOSS);
                p.loss_weight.Add(1);
                p.loss_weight.Add(0);
                p.loss_param.normalization = LossParameter.NormalizationMode.NONE;
                m_softmaxCe = new SoftmaxCrossEntropyLossLayer <T>(mycaffe.Cuda, mycaffe.Log, p);
            }

            m_colAccumulatedGradients = m_net.learnable_parameters.Clone();
            m_colAccumulatedGradients.SetDiff(0);

            int nMiniBatch = mycaffe.CurrentProject.GetBatchSize(phase);

            if (nMiniBatch != 0)
            {
                m_nMiniBatch = nMiniBatch;
            }

            m_nMiniBatch = m_properties.GetPropertyAsInt("MiniBatch", m_nMiniBatch);
        }
コード例 #17
0
        /// <summary>
        /// Save the weights to a file.
        /// </summary>
        /// <param name="strTag">Specifies the tag applied to the filename.</param>
        /// <param name="mycaffe">Specifies the instance of mycaffe whos weights are to be saved.</param>
        private void saveWeights(string strTag, MyCaffeControl <float> mycaffe)
        {
            string strFile = getWeightFileName(strTag);

            byte[] rgWts = mycaffe.GetWeights();

            using (FileStream fs = File.Create(strFile))
                using (BinaryWriter bw = new BinaryWriter(fs))
                {
                    bw.Write(rgWts);
                }
        }
コード例 #18
0
        //-----------------------------------------------------------------------------------------
        //  Simpler Classification (using solver)
        //-----------------------------------------------------------------------------------------

        /// <summary>
        /// The SimplerClassification shows how to use the solver directly and load data via its OnStart (for training) and
        /// OnTestStart (for testing) events.
        /// </summary>
        /// <remarks>
        /// IMPORTANT: This sample is for demonstration, using the Simplest Classification method is the fastest recommended method that uses the Image Database.
        ///
        /// This sample requires that you have already loaded the MNIST dataset into SQL (or SQLEXPRESS) using the MyCaffe
        /// Test Application by selecting its 'Database | Load MNIST...' menu item.
        /// </remarks>
        /// <param name="sender">Specifies the event sender.</param>
        /// <param name="e">Specifies the event args.</param>
        private void btnSimplerClassification_Click(object sender, EventArgs e)
        {
            Stopwatch     sw         = new Stopwatch();
            int           nBatchSize = 32;
            SettingsCaffe settings   = new SettingsCaffe();

            settings.GpuIds = "0";

            if (!Directory.Exists(m_strImageDirTesting) || !Directory.Exists(m_strImageDirTraining))
            {
                MessageBox.Show("You must first export the MNIST images by pressing the Export button!", "Export Needed", MessageBoxButtons.OK, MessageBoxIcon.Error);
                return;
            }

            m_rgstrTrainingFiles = Directory.GetFiles(m_strImageDirTraining);
            m_rgstrTrainingFiles = m_rgstrTrainingFiles.Where(p => p.Contains(".png")).ToArray();
            m_rgstrTestingFiles  = Directory.GetFiles(m_strImageDirTesting);
            m_rgstrTestingFiles  = m_rgstrTestingFiles.Where(p => p.Contains(".png")).ToArray();

            string strSolver;
            string strModel;

            load_descriptors("mnist", out strSolver, out strModel); // Load the descriptors from their respective files (installed by MyCaffe Test Application install)
            strModel  = fixup_model(strModel, nBatchSize);
            strSolver = fixup_solver(strSolver, 10000);             // set the interval beyond the iterations to skip testing during solving.

            MyCaffeControl <float> mycaffe = new MyCaffeControl <float>(settings, m_log, m_evtCancel);

            mycaffe.Load(Phase.TRAIN,                                  // using the training phase.
                         strSolver,                                    // solver descriptor, that specifies to use the SGD solver.
                         strModel,                                     // simple LENET model descriptor.
                         null, null, null, false, null, false, false); // no weights are loaded and no image database is used.

            // Perform your own training
            Solver <float> solver = mycaffe.GetInternalSolver();

            solver.OnStart     += Solver_OnStart;
            solver.OnTestStart += Solver_OnTestStart;

            // Run the solver to train the net.
            int nIterations = 5000;

            solver.Solve(nIterations);

            // Run the solver to test the net (using its internal test net)
            nIterations = 100;
            double dfAccuracy = solver.TestClassification(nIterations);

            m_log.WriteLine("Accuracy = " + dfAccuracy.ToString("P"));

            MessageBox.Show("Average Accuracy = " + dfAccuracy.ToString("P"), "Traing/Test on MNIST Completed", MessageBoxButtons.OK, MessageBoxIcon.Information);
        }
コード例 #19
0
        //-----------------------------------------------------------------------------------------
        //  Simplest Classification (using MyCaffeControl and MyCaffeImageDatabase)
        //-----------------------------------------------------------------------------------------

        /// <summary>
        /// The SimplestClassification shows how to the MyCaffeControl (and internal MyCaffeImageDatabase) to train and test on the MNIST dataset.
        /// </summary>
        /// <param name="sender">Specifies the event sender.</param>
        /// <param name="e">Specifies the event args.</param>
        private void btnSimplestClassification_Click(object sender, EventArgs e)
        {
            DatasetFactory factory  = new DatasetFactory();
            Stopwatch      sw       = new Stopwatch();
            SettingsCaffe  settings = new SettingsCaffe();

            // Load all images into memory before training.
            settings.ImageDbLoadMethod = IMAGEDB_LOAD_METHOD.LOAD_ALL;
            // Use GPU ID = 0.
            settings.GpuIds = "0";

            string strSolver;
            string strModel;

            // Load the descriptors from their respective files (installed by MyCaffe Test Application install)
            load_descriptors("mnist", out strSolver, out strModel);

            // NOTE: model fixup not needed for we will use the DATA layer which pulls data from SQL or SQLEXPRESS via the MyCaffeImageDatabase.
            // Set the interval beyond the iterations to skip testing during solving.
            strSolver = fixup_solver(strSolver, 10000);

            // Load the MNIST dataset descriptor.
            DatasetDescriptor ds = factory.LoadDataset("MNIST");

            // Create a test project with the dataset and descriptors.
            ProjectEx project = new ProjectEx("Test");

            project.SetDataset(ds);
            project.ModelDescription  = strModel;
            project.SolverDescription = strSolver;
            project.WeightsState      = null;

            // Create the MyCaffeControl
            MyCaffeControl <float> mycaffe = new MyCaffeControl <float>(settings, m_log, m_evtCancel);

            // Load the project, using the TRAIN phase.
            mycaffe.Load(Phase.TRAIN, project);

            // Trian the model for 5000 interations (which uses the internal solver and internal training net)
            int nIterations = 5000;

            mycaffe.Train(nIterations);

            // Test the model for 100 iterations (which uses the internal solver and internal testing net)
            nIterations = 100;
            double dfAccuracy = mycaffe.Test(nIterations);

            // Report the testing accuracy.
            m_log.WriteLine("Accuracy = " + dfAccuracy.ToString("P"));

            MessageBox.Show("Average Accuracy = " + dfAccuracy.ToString("P"), "Traing/Test on MNIST Completed", MessageBoxButtons.OK, MessageBoxIcon.Information);
        }
コード例 #20
0
        /// <summary>
        /// Optionally overridden to return a new type of trainer.
        /// </summary>
        /// <remarks>
        /// Override this method when using the MyCaffeControl that uses the <i>double</i> base type.
        /// </remarks>
        /// <param name="caffe">Specifies the MyCaffeControl used.</param>
        /// <param name="stage">Specifies the stage under which the trainer is created.</param>
        /// <returns>The IxTraininer interface implemented by the new trainer is returned.</returns>
        protected virtual IxTrainer create_trainerF(Component caffe, Stage stage)
        {
            MyCaffeControl <float> mycaffe = caffe as MyCaffeControl <float>;

            m_nProjectID = mycaffe.CurrentProject.OriginalID;
            m_dsCi       = mycaffe.DatasetConnectInfo;

            int.TryParse(mycaffe.CurrentProject.GetSolverSetting("max_iter"), out m_nIterations);
            int.TryParse(mycaffe.CurrentProject.GetSolverSetting("snapshot"), out m_nSnapshot);

            m_properties.SetProperty("UsePreLoadData", m_bUsePreloadData.ToString());

            if (stage == Stage.RNN)
            {
                switch (m_trainerType)
                {
                case TRAINER_TYPE.RNN_SIMPLE:
                    return(new rnn.simple.TrainerRNN <float>(mycaffe, m_properties, m_random, this, m_rgVocabulary));

                default:
                    throw new Exception("The trainer type '" + m_trainerType.ToString() + "' is not supported in the RNN stage!");
                }
            }
            else
            {
                switch (m_trainerType)
                {
                case TRAINER_TYPE.PG_SIMPLE:
                    return(new pg.simple.TrainerPG <float>(mycaffe, m_properties, m_random, this));

                case TRAINER_TYPE.PG_ST:
                    return(new pg.st.TrainerPG <float>(mycaffe, m_properties, m_random, this));

                case TRAINER_TYPE.PG_MT:
                    return(new pg.mt.TrainerPG <float>(mycaffe, m_properties, m_random, this));

                case TRAINER_TYPE.C51_ST:
                    return(new dqn.c51.st.TrainerC51 <float>(mycaffe, m_properties, m_random, this));

                case TRAINER_TYPE.DQN_ST:
                    return(new dqn.noisy.st.TrainerNoisyDqn <float>(mycaffe, m_properties, m_random, this));

                case TRAINER_TYPE.DQN_SIMPLE:
                    return(new dqn.noisy.simple.TrainerNoisyDqn <float>(mycaffe, m_properties, m_random, this));

                default:
                    throw new Exception("The trainer type '" + m_trainerType.ToString() + "' is not supported in the RL stage!");
                }
            }
        }
コード例 #21
0
 /// <summary>
 /// Run the trained model.
 /// </summary>
 /// <param name="mycaffe">Specifies the mycaffe instance running the sequence run model.</param>
 /// <param name="bw">Specifies the background worker.</param>
 /// <param name="strInput">Specifies the input data to run the model on.</param>
 private void runModel(MyCaffeControl <float> mycaffe, BackgroundWorker bw, string strInput)
 {
     try
     {
         m_log.WriteLine("You: " + strInput);
         int         nK      = (m_input.UseBeamSearch) ? 3 : 1;
         PropertySet input   = new PropertySet("InputData=" + strInput);
         PropertySet results = m_mycaffe.Run(input, nK);
         m_log.WriteLine("Robot: " + results.GetProperty("Results").TrimEnd(' ', '|'), true);
     }
     catch (Exception excpt)
     {
         m_log.WriteLine("Robot: " + excpt.Message);
     }
 }
コード例 #22
0
        /// <summary>
        /// The constructor.
        /// </summary>
        /// <param name="p">Specifies the parameters that define how to train and run the model.</param>
        public Trainer(Parameters p)
        {
            m_strOutputPath = Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData);
            m_param         = p;

            if (p.Mode == Parameters.MODE.TRAIN)
            {
                m_mycaffeTrain = new MyCaffeControl <float>(m_settings, m_log, m_evtCancel);
                m_mycaffeTrain.OnTrainingIteration += m_mycaffe_OnTrainingIteration;
            }

            m_mycaffeRun = new MyCaffeControl <float>(m_settings, m_log, m_evtCancel);

            m_swTraining.Start();
        }
コード例 #23
0
ファイル: EventArgs.cs プロジェクト: alllucky1996/MyCaffe
        /// <summary>
        /// The constructor.
        /// </summary>
        /// <param name="mycaffe">Specifies the MyCaffeControl used.</param>
        public InitializeArgs(Component mycaffe)
        {
            m_caffe = mycaffe;

            if (mycaffe is MyCaffeControl <double> )
            {
                MyCaffeControl <double> mycaffe1 = mycaffe as MyCaffeControl <double>;
                m_log           = mycaffe1.Log;
                m_nOriginalDsId = mycaffe1.CurrentProject.Dataset.ID;
            }
            else
            {
                MyCaffeControl <float> mycaffe1 = mycaffe as MyCaffeControl <float>;
                m_log           = mycaffe1.Log;
                m_nOriginalDsId = mycaffe1.CurrentProject.Dataset.ID;
            }
        }
コード例 #24
0
        /// <summary>
        /// Optionally overridden to return a new type of trainer.
        /// </summary>
        /// <remarks>
        /// Override this method when using the MyCaffeControl that uses the <i>double</i> base type.
        /// </remarks>
        /// <param name="caffe">Specifies the MyCaffeControl used.</param>
        /// <returns>The IxTraininer interface implemented by the new trainer is returned.</returns>
        protected virtual IxTrainerRNN create_trainerF(Component caffe)
        {
            MyCaffeControl <float> mycaffe = caffe as MyCaffeControl <float>;

            m_nProjectID = mycaffe.CurrentProject.ID;
            int.TryParse(mycaffe.CurrentProject.GetSolverSetting("max_iter"), out m_nIterations);
            int.TryParse(mycaffe.CurrentProject.GetSolverSetting("snapshot"), out m_nSnapshot);

            switch (m_trainerType)
            {
            case TRAINER_TYPE.RNN_SIMPLE:
                return(new rnn.simple.TrainerRNN <float>(mycaffe, m_properties, m_random, this, m_rgVocabulary, true));

            default:
                throw new Exception("Unknown trainer type '" + m_trainerType.ToString() + "'!");
            }
        }
コード例 #25
0
        /// <summary>
        /// Shutdown and close the gym.
        /// </summary>
        public void Close()
        {
            if (m_blobWork != null)
            {
                m_blobWork.Dispose();
                m_blobWork = null;
            }

            if (m_mycaffe != null)
            {
                m_mycaffe.Dispose();
                m_mycaffe = null;
            }

            if (m_imgdb != null)
            {
                ((MyCaffeImageDatabase2)m_imgdb).Dispose();
                m_imgdb = null;
            }
        }
コード例 #26
0
        /// <summary>
        /// The constructor.
        /// </summary>
        /// <param name="gym">Optionally, specifies another Gym to copy.</param>
        public ModelGym(ModelGym gym = null)
        {
            if (gym != null)
            {
                m_strName      = gym.m_strName;
                m_strModelDesc = gym.m_strModelDesc;
                m_strDataset   = gym.m_strDataset;
                m_nGpuID       = gym.m_nGpuID;
                m_rgWeights    = gym.m_rgWeights;
                m_log          = gym.m_log;

                m_mycaffe     = gym.m_mycaffe;
                gym.m_mycaffe = null;

                m_imgdb     = gym.m_imgdb;
                gym.m_imgdb = null;

                m_evtCancel     = gym.m_evtCancel;
                gym.m_evtCancel = null;
            }
        }
コード例 #27
0
        private void saveWeights(MyCaffeControl <float> mycaffe, string strName)
        {
            string strDir  = Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData) + "\\MyCaffe\\test_data\\models\\mnist\\";
            string strFile = strDir + strName + ".mycaffemodel";

//          byte[] rgWeights = mycaffe.GetWeights();
            Net <float> net = mycaffe.GetInternalNet(Phase.TRAIN);

            byte[] rgWeights = net.SaveWeights(mycaffe.Persist, false);

            if (File.Exists(strFile))
            {
                File.Delete(strFile);
            }

            using (FileStream fs = File.OpenWrite(strFile))
                using (BinaryWriter bw = new BinaryWriter(fs))
                {
                    bw.Write(rgWeights);
                }
        }
コード例 #28
0
        /// <summary>
        /// Initialize the gym with the specified properties.
        /// </summary>
        /// <param name="log">Specifies the output log to use.</param>
        /// <param name="properties">Specifies the properties containing Gym specific initialization parameters.</param>
        /// <remarks>
        /// The ModelGym uses the following initialization properties.
        ///
        /// 'GpuID' - the GPU to run on.
        /// 'ModelDescription' - the model description of the model to use.
        /// 'Dataset' - the name of the dataset to use.
        /// 'Weights' - the model trained weights.
        /// 'CudaPath' - the path of the CudaDnnDLL to use.
        /// 'BatchSize' - the batch size used when running images through the model (default = 16).
        /// 'RecreateData' - when 'True' the data is re-run through the model, otherwise if already run the data is loaded from file (faster).
        /// </remarks>
        public void Initialize(Log log, PropertySet properties)
        {
            m_nGpuID        = properties.GetPropertyAsInt("GpuID");
            m_strModelDesc  = properties.GetProperty("ModelDescription");
            m_strDataset    = properties.GetProperty("Dataset");
            m_rgWeights     = properties.GetPropertyBlob("Weights");
            m_nBatchSize    = properties.GetPropertyAsInt("BatchSize", 16);
            m_bRecreateData = properties.GetPropertyAsBool("RecreateData", false);
            m_strProject    = properties.GetProperty("ProjectName");
            if (string.IsNullOrEmpty(m_strProject))
            {
                m_strProject = "default";
            }

            string strCudaPath = properties.GetProperty("CudaPath");

            SettingsCaffe s = new SettingsCaffe();

            s.GpuIds            = m_nGpuID.ToString();
            s.ImageDbLoadMethod = IMAGEDB_LOAD_METHOD.LOAD_ON_DEMAND_BACKGROUND;

            m_imgdb = new MyCaffeImageDatabase2(log);
            m_imgdb.InitializeWithDsName1(s, m_strDataset);
            m_ds = m_imgdb.GetDatasetByName(m_strDataset);

            SimpleDatum sd    = m_imgdb.QueryImage(m_ds.TrainingSource.ID, 0, IMGDB_LABEL_SELECTION_METHOD.NONE, IMGDB_IMAGE_SELECTION_METHOD.NONE);
            BlobShape   shape = new BlobShape(1, sd.Channels, sd.Height, sd.Width);

            if (m_evtCancel == null)
            {
                m_evtCancel = new CancelEvent();
            }

            m_mycaffe = new MyCaffeControl <float>(s, log, m_evtCancel, null, null, null, null, strCudaPath);
            m_mycaffe.LoadToRun(m_strModelDesc, m_rgWeights, shape);

            m_log = log;
        }
コード例 #29
0
        public void TestCreateDeployModel()
        {
            ModelBuilder builder = create();

            NetParameter net_param = builder.CreateDeployModel();
            RawProto     proto     = net_param.ToProto("root");
            string       strNet    = proto.ToString();

            RawProto     proto2     = RawProto.Parse(strNet);
            NetParameter net_param2 = NetParameter.FromProto(proto2);

            m_log.CHECK(net_param2.Compare(net_param), "The two net parameters should be the same!");

            // verify creating the model.
            SettingsCaffe      settings  = new SettingsCaffe();
            CancelEvent        evtCancel = new CancelEvent();
            MyCaffeControl <T> mycaffe   = new MyCaffeControl <T>(settings, m_log, evtCancel);

            save(strNet, null, true);

            //            mycaffe.LoadToRun(strNet, null, new BlobShape(1, 3, 300, 300));
            mycaffe.Dispose();
        }
コード例 #30
0
ファイル: MyCaffeTrainerDual.cs プロジェクト: lulzzz/MyCaffe
        /// <summary>
        /// Optionally overridden to return a new type of trainer.
        /// </summary>
        /// <remarks>
        /// Override this method when using the MyCaffeControl that uses the <i>double</i> base type.
        /// </remarks>
        /// <param name="caffe">Specifies the MyCaffeControl used.</param>
        /// <param name="stage">Specifies the stage under which the trainer is created.</param>
        /// <returns>The IxTraininer interface implemented by the new trainer is returned.</returns>
        protected virtual IxTrainer create_trainerD(Component caffe, Stage stage)
        {
            MyCaffeControl <double> mycaffe = caffe as MyCaffeControl <double>;

            m_nProjectID = mycaffe.CurrentProject.ID;
            int.TryParse(mycaffe.CurrentProject.GetSolverSetting("max_iter"), out m_nIterations);
            int.TryParse(mycaffe.CurrentProject.GetSolverSetting("snapshot"), out m_nSnapshot);

            if (stage == Stage.RNN)
            {
                switch (m_trainerType)
                {
                case TRAINER_TYPE.RNN_SIMPLE:
                    return(new rnn.simple.TrainerRNN <double>(mycaffe, m_properties, m_random, this, m_rgVocabulary, m_bUsePreloadData));

                default:
                    throw new Exception("The trainer type '" + m_trainerType.ToString() + "' is not supported in the RNN stage!");
                }
            }
            else
            {
                switch (m_trainerType)
                {
                case TRAINER_TYPE.PG_SIMPLE:
                    return(new pg.simple.TrainerPG <double>(mycaffe, m_properties, m_random, this));

                case TRAINER_TYPE.PG_ST:
                    return(new pg.st.TrainerPG <double>(mycaffe, m_properties, m_random, this));

                case TRAINER_TYPE.PG_MT:
                    return(new pg.mt.TrainerPG <double>(mycaffe, m_properties, m_random, this));

                default:
                    throw new Exception("The trainer type '" + m_trainerType.ToString() + "' is not supported in the RL stage!");
                }
            }
        }