Esempio n. 1
0
        public InputLayerEx(CudaDnn <T> cuda, Log log, Layer <T> layer) : base(cuda, log, layer.layer_param)
        {
            string strPath = Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData);

            strPath += "\\mycaffe\\test_data\\data\\text\\";
            LayerParameter text_param = new LayerParameter(LayerParameter.LayerType.TEXT_DATA);

            text_param.text_data_param.batch_size     = 1;
            text_param.text_data_param.decoder_source = strPath + "robot_text.txt";
            text_param.text_data_param.encoder_source = strPath + "human_text.txt";
            text_param.text_data_param.enable_normal_encoder_output  = true;
            text_param.text_data_param.enable_reverse_encoder_output = true;
            text_param.text_data_param.sample_size = 1000;
            text_param.text_data_param.shuffle     = false;
            text_param.text_data_param.time_steps  = 80;
            text_param.phase = Phase.TEST;

            m_dataLayer = new TextDataLayer <T>(cuda, log, text_param);
            BlobCollection <T> colBottom = new BlobCollection <T>();
            BlobCollection <T> colTop    = new BlobCollection <T>();

            colTop.Add(new Blob <T>(cuda, log));
            colTop.Add(new Blob <T>(cuda, log));
            colTop.Add(new Blob <T>(cuda, log));
            colTop.Add(new Blob <T>(cuda, log));
            colTop.Add(new Blob <T>(cuda, log));
            colTop.Add(new Blob <T>(cuda, log));
            colTop.Add(new Blob <T>(cuda, log));

            m_dataLayer.Setup(colBottom, colTop);

            colTop.Dispose();
        }
Esempio n. 2
0
        /// <summary>
        /// Load the data into the model.
        /// </summary>
        /// <param name="strInput">Specifies the encoder input sentence.</param>
        /// <param name="nIxInput">Specifies the decoder current input word index.</param>
        private BlobCollection <float> loadData(string strInput, int nIxInput)
        {
            Net <float>            net          = m_mycaffe.GetInternalNet(Phase.RUN);
            TextDataLayer <float>  layer        = net.FindLayer("TextData", "data") as TextDataLayer <float>;
            Blob <float>           blobData     = net.FindBlob("ienc");
            Blob <float>           blobDatar    = net.FindBlob("iencr");
            Blob <float>           blobClipE    = net.FindBlob("iencc");
            Blob <float>           blobDecInput = net.FindBlob("idec");
            BlobCollection <float> colBtm       = new BlobCollection <float>();

            colBtm.Add(blobDecInput);
            colBtm.Add(blobData);
            colBtm.Add(blobDatar);
            colBtm.Add(blobClipE);

            layer.PreProcessInput(strInput, nIxInput, colBtm);

            return(colBtm);
        }
Esempio n. 3
0
        /// <summary>
        /// The DoWork thread is the main tread used to train or run the model depending on the operation selected.
        /// </summary>
        /// <param name="sender">Specifies the sender</param>
        /// <param name="e">specifies the arguments.</param>
        private void m_bw_DoWork(object sender, DoWorkEventArgs e)
        {
            BackgroundWorker bw = sender as BackgroundWorker;

            m_input = e.Argument as InputData;
            SettingsCaffe s = new SettingsCaffe();

            s.ImageDbLoadMethod = IMAGEDB_LOAD_METHOD.LOAD_ALL;

            try
            {
                m_model.Batch = m_input.Batch;
                m_mycaffe     = new MyCaffeControl <float>(s, m_log, m_evtCancel);

                // Train the model.
                if (m_input.Operation == InputData.OPERATION.TRAIN)
                {
                    m_model.Iterations = (int)((m_input.Epochs * 7000) / m_model.Batch);
                    m_log.WriteLine("Training for " + m_input.Epochs.ToString() + " epochs (" + m_model.Iterations.ToString("N0") + " iterations).", true);
                    m_log.WriteLine("INFO: " + m_model.Iterations.ToString("N0") + " iterations.", true);
                    m_log.WriteLine("Using hidden = " + m_input.HiddenSize.ToString() + ", and word size = " + m_input.WordSize.ToString() + ".", true);

                    // Load the Seq2Seq training model.
                    NetParameter    netParam    = m_model.CreateModel(m_input.InputFileName, m_input.TargetFileName, m_input.HiddenSize, m_input.WordSize, m_input.UseSoftmax, m_input.UseExternalIp);
                    string          strModel    = netParam.ToProto("root").ToString();
                    SolverParameter solverParam = m_model.CreateSolver(m_input.LearningRate);
                    string          strSolver   = solverParam.ToProto("root").ToString();
                    byte[]          rgWts       = loadWeights("sequence");

                    m_strModel  = strModel;
                    m_strSolver = strSolver;

                    m_mycaffe.OnTrainingIteration += m_mycaffe_OnTrainingIteration;
                    m_mycaffe.OnTestingIteration  += m_mycaffe_OnTestingIteration;
                    m_mycaffe.LoadLite(Phase.TRAIN, strSolver, strModel, rgWts, false, false);

                    if (!m_input.UseSoftmax)
                    {
                        MemoryLossLayer <float> lossLayerTraining = m_mycaffe.GetInternalNet(Phase.TRAIN).FindLayer(LayerParameter.LayerType.MEMORY_LOSS, "loss") as MemoryLossLayer <float>;
                        if (lossLayerTraining != null)
                        {
                            lossLayerTraining.OnGetLoss += LossLayer_OnGetLossTraining;
                        }
                        MemoryLossLayer <float> lossLayerTesting = m_mycaffe.GetInternalNet(Phase.TEST).FindLayer(LayerParameter.LayerType.MEMORY_LOSS, "loss") as MemoryLossLayer <float>;
                        if (lossLayerTesting != null)
                        {
                            lossLayerTesting.OnGetLoss += LossLayer_OnGetLossTesting;
                        }
                    }

                    m_blobProbs = new Blob <float>(m_mycaffe.Cuda, m_mycaffe.Log);
                    m_blobScale = new Blob <float>(m_mycaffe.Cuda, m_mycaffe.Log);

                    TextDataLayer <float> dataLayerTraining = m_mycaffe.GetInternalNet(Phase.TRAIN).FindLayer(LayerParameter.LayerType.TEXT_DATA, "data") as TextDataLayer <float>;
                    if (dataLayerTraining != null)
                    {
                        dataLayerTraining.OnGetData += DataLayerTraining_OnGetDataTraining;
                    }

                    // Train the Seq2Seq model.
                    m_plotsSequenceLoss          = new PlotCollection("Sequence Loss");
                    m_plotsSequenceAccuracyTest  = new PlotCollection("Sequence Accuracy Test");
                    m_plotsSequenceAccuracyTrain = new PlotCollection("Sequence Accuracy Train");
                    m_mycaffe.Train(m_model.Iterations);
                    saveWeights("sequence", m_mycaffe);
                }

                // Run a trained model.
                else
                {
                    NetParameter netParam = m_model.CreateModel(m_input.InputFileName, m_input.TargetFileName, m_input.HiddenSize, m_input.WordSize, m_input.UseSoftmax, m_input.UseExternalIp, Phase.RUN);
                    string       strModel = netParam.ToProto("root").ToString();
                    byte[]       rgWts    = loadWeights("sequence");

                    strModel = m_model.PrependInput(strModel);

                    m_strModelRun = strModel;

                    int nN = m_model.TimeSteps;
                    m_mycaffe.LoadToRun(strModel, rgWts, new BlobShape(new List <int>()
                    {
                        nN, 1, 1, 1
                    }), null, null, false, false);

                    m_blobProbs = new Blob <float>(m_mycaffe.Cuda, m_mycaffe.Log);
                    m_blobScale = new Blob <float>(m_mycaffe.Cuda, m_mycaffe.Log);

                    runModel(m_mycaffe, bw, m_input.InputText);
                }
            }
            catch (Exception excpt)
            {
                throw excpt;
            }
            finally
            {
                // Cleanup.
                if (m_mycaffe != null)
                {
                    m_mycaffe.Dispose();
                    m_mycaffe = null;
                }
            }
        }