Ejemplo n.º 1
0
        public void FeedNet(bool bTrain)
        {
            bool  bFound;
            int   nIdx;
            Phase phase = (bTrain) ? Phase.TRAIN : Phase.TEST;

            // Real Data
            if (m_bIsDataReal)
            {
                if (m_bUsePreloadData)
                {
                    double[] rgdfData = (bTrain) ? m_rgdfTrainData : m_rgdfTestData;

                    // Re-order the data according to caffe input specification for LSTM layer.
                    for (int i = 0; i < m_nBatchSize; i++)
                    {
                        int nCurrentValIdx = m_random.Next(rgdfData.Length - m_nSequenceLength - 1);

                        for (int j = 0; j < m_nSequenceLength; j++)
                        {
                            // Feed the net with input data and labels (clips are always the same)
                            double dfData = rgdfData[nCurrentValIdx + j];
                            // Labels are the same with an offset of +1
                            double dfLabel   = rgdfData[nCurrentValIdx + j + 1]; // predict next value
                            float  fDataIdx  = findIndex(dfData, out bFound);
                            float  fLabelIdx = findIndex(dfLabel, out bFound);

                            // LSTM: Create input data, the data must be in the order
                            // seq1_val1, seq2_val1, ..., seqBatch_Size_val1, seq1_val2, seq2_val2, ..., seqBatch_Size_valSequence_Length
                            if (m_lstmType == LayerParameter.LayerType.LSTM)
                            {
                                nIdx = m_nBatchSize * j + i;
                            }

                            // LSTM_SIMPLE: Create input data, the data must be in the order
                            // seq1_val1, seq1_val2, ..., seq1_valBatchSize, seq2_val1, seq2_val2, ..., seqSequenceLength_valBatchSize
                            else
                            {
                                nIdx = i * m_nBatchSize + j;
                            }

                            m_rgDataInput[nIdx] = (T)Convert.ChangeType(fDataIdx, typeof(T));

                            if (m_nSequenceLengthLabel == (m_nSequenceLength * m_nBatchSize) || j == m_nSequenceLength - 1)
                            {
                                m_rgLabelInput[nIdx] = (T)Convert.ChangeType(fLabelIdx, typeof(T));
                            }
                        }
                    }

                    m_blobData.mutable_cpu_data  = m_rgDataInput;
                    m_blobLabel.mutable_cpu_data = m_rgLabelInput;
                }
                else
                {
                    m_mycaffe.Log.CHECK_EQ(m_nBatchSize, m_nThreads, "The 'Threads' setting of " + m_nThreads.ToString() + " must match the batch size = " + m_nBatchSize.ToString() + "!");

                    List <GetDataArgs> rgDataArgs = new List <GetDataArgs>();

                    if (m_nBatchSize == 1)
                    {
                        GetDataArgs e = getDataArgs(phase, 0, 0, true, m_nBatchSize);
                        m_icallback.OnGetData(e);
                        rgDataArgs.Add(e);
                    }
                    else
                    {
                        for (int i = 0; i < m_nBatchSize; i++)
                        {
                            rgDataArgs.Add(getDataArgs(phase, i, 0, true, m_nBatchSize));
                        }

                        if (!m_dataPool.Run(rgDataArgs))
                        {
                            m_mycaffe.Log.FAIL("Data Time Out - Failed to collect all data to build the RNN batch!");
                        }
                    }

                    double[] rgData  = rgDataArgs[0].State.Data.GetData <double>();
                    double[] rgLabel = rgDataArgs[0].State.Label.GetData <double>();
                    double[] rgClip  = rgDataArgs[0].State.Clip.GetData <double>();

                    int nDataLen  = rgData.Length;
                    int nLabelLen = rgLabel.Length;
                    int nClipLen  = rgClip.Length;
                    int nDataItem = nDataLen / nLabelLen;

                    if (m_nBatchSize > 1)
                    {
                        rgData  = new double[nDataLen * m_nBatchSize];
                        rgLabel = new double[nLabelLen * m_nBatchSize];
                        rgClip  = new double[nClipLen * m_nBatchSize];

                        for (int i = 0; i < m_nBatchSize; i++)
                        {
                            for (int j = 0; j < m_nSequenceLength; j++)
                            {
                                // LSTM: Create input data, the data must be in the order
                                // seq1_val1, seq2_val1, ..., seqBatch_Size_val1, seq1_val2, seq2_val2, ..., seqBatch_Size_valSequence_Length
                                if (m_lstmType == LayerParameter.LayerType.LSTM)
                                {
                                    nIdx = m_nBatchSize * j + i;
                                }

                                // LSTM_SIMPLE: Create input data, the data must be in the order
                                // seq1_val1, seq1_val2, ..., seq1_valBatchSize, seq2_val1, seq2_val2, ..., seqSequenceLength_valBatchSize
                                else
                                {
                                    nIdx = i * m_nBatchSize + j;
                                }

                                Array.Copy(rgDataArgs[i].State.Data.GetData <double>(), 0, rgData, nIdx * nDataItem, nDataItem);
                                rgLabel[nIdx] = rgDataArgs[i].State.Label.GetDataAtD(j);
                                rgClip[nIdx]  = rgDataArgs[i].State.Clip.GetDataAtD(j);
                            }
                        }
                    }

                    string strSolverErr = "";
                    if (m_nSolverSequenceLength >= 0 && m_nSolverSequenceLength != m_nSequenceLength)
                    {
                        strSolverErr = "The solver parameter 'SequenceLength' length of " + m_nSolverSequenceLength.ToString() + " must match the model sequence length of " + m_nSequenceLength.ToString() + ".  ";
                    }

                    int nExpectedCount = m_blobData.count();
                    m_mycaffe.Log.CHECK_EQ(nExpectedCount, rgData.Length, strSolverErr + "The size of the data received ('" + rgData.Length.ToString() + "') does mot match the expected data count of '" + nExpectedCount.ToString() + "'!");
                    m_blobData.mutable_cpu_data = Utility.ConvertVec <T>(rgData);

                    nExpectedCount = m_blobLabel.count();
                    m_mycaffe.Log.CHECK_EQ(nExpectedCount, rgLabel.Length, strSolverErr + "The size of the label received ('" + rgLabel.Length.ToString() + "') does not match the expected label count of '" + nExpectedCount.ToString() + "'!");
                    m_blobLabel.mutable_cpu_data = Utility.ConvertVec <T>(rgLabel);

                    nExpectedCount = m_blobClip.count();
                    m_mycaffe.Log.CHECK_EQ(nExpectedCount, rgClip.Length, strSolverErr + "The size of the clip received ('" + rgClip.Length.ToString() + "') does not match the expected clip count of '" + nExpectedCount.ToString() + "'!");
                    m_blobClip.mutable_cpu_data = Utility.ConvertVec <T>(rgClip);
                }
            }
            // Byte Data (uses a vocabulary if available)
            else
            {
                byte[] rgData = (bTrain) ? m_rgTrainData : m_rgTestData;
                // Create input data, the data must be in the order
                // seq1_char1, seq2_char1, ..., seqBatch_Size_char1, seq1_char2, seq2_char2, ..., seqBatch_Size_charSequence_Length
                // As seq1_charSequence_Length == seq2_charSequence_Length-1 == seq3_charSequence_Length-2 == ... we can perform block copy for efficientcy.
                // Labels are the same with an offset of +1

                // Re-order the data according to caffe input specification for LSTM layer.
                for (int i = 0; i < m_nBatchSize; i++)
                {
                    int nCurrentCharIdx = m_random.Next(rgData.Length - m_nSequenceLength - 2);

                    for (int j = 0; j < m_nSequenceLength; j++)
                    {
                        // Feed the net with input data and labels (clips are always the same)
                        byte bData = rgData[nCurrentCharIdx + j];
                        // Labels are the same with an offset of +1
                        byte  bLabel    = rgData[nCurrentCharIdx + j + 1]; // predict next character
                        float fDataIdx  = findIndex(bData, out bFound);
                        float fLabelIdx = findIndex(bLabel, out bFound);

                        // LSTM: Create input data, the data must be in the order
                        // seq1_val1, seq2_val1, ..., seqBatch_Size_val1, seq1_val2, seq2_val2, ..., seqBatch_Size_valSequence_Length
                        if (m_lstmType == LayerParameter.LayerType.LSTM)
                        {
                            nIdx = m_nBatchSize * j + i;
                        }

                        // LSTM_SIMPLE: Create input data, the data must be in the order
                        // seq1_val1, seq1_val2, ..., seq1_valBatchSize, seq2_val1, seq2_val2, ..., seqSequenceLength_valBatchSize
                        else
                        {
                            nIdx = i * m_nBatchSize + j;
                        }

                        m_rgDataInput[nIdx] = (T)Convert.ChangeType(fDataIdx, typeof(T));

                        if (m_nSequenceLengthLabel == (m_nSequenceLength * m_nBatchSize) || j == m_nSequenceLength - 1)
                        {
                            m_rgLabelInput[nIdx] = (T)Convert.ChangeType(fLabelIdx, typeof(T));
                        }
                    }
                }

                m_blobData.mutable_cpu_data  = m_rgDataInput;
                m_blobLabel.mutable_cpu_data = m_rgLabelInput;
            }
        }
Ejemplo n.º 2
0
        public void FeedNet(bool bTrain)
        {
            bool bFound;
            int  nIdx;

            // Real Data
            if (m_bIsDataReal)
            {
                if (m_bUsePreloadData)
                {
                    double[] rgdfData = (bTrain) ? m_rgdfTrainData : m_rgdfTestData;

                    // Re-order the data according to caffe input specification for LSTM layer.
                    for (int i = 0; i < m_nBatchSize; i++)
                    {
                        int nCurrentValIdx = m_random.Next(rgdfData.Length - m_nSequenceLength - 1);

                        for (int j = 0; j < m_nSequenceLength; j++)
                        {
                            // Feed the net with input data and labels (clips are always the same)
                            double dfData = rgdfData[nCurrentValIdx + j];
                            // Labels are the same with an offset of +1
                            double dfLabel   = rgdfData[nCurrentValIdx + j + 1]; // predict next value
                            float  fDataIdx  = findIndex(dfData, out bFound);
                            float  fLabelIdx = findIndex(dfLabel, out bFound);

                            // LSTM: Create input data, the data must be in the order
                            // seq1_val1, seq2_val1, ..., seqBatch_Size_val1, seq1_val2, seq2_val2, ..., seqBatch_Size_valSequence_Length
                            if (m_lstmType == LayerParameter.LayerType.LSTM)
                            {
                                nIdx = m_nBatchSize * j + i;
                            }

                            // LSTM_SIMPLE: Create input data, the data must be in the order
                            // seq1_val1, seq1_val2, ..., seq1_valBatchSize, seq2_val1, seq2_val2, ..., seqSequenceLength_valBatchSize
                            else
                            {
                                nIdx = i * m_nBatchSize + j;
                            }

                            m_rgDataInput[nIdx] = (T)Convert.ChangeType(fDataIdx, typeof(T));

                            if (m_nSequenceLengthLabel == (m_nSequenceLength * m_nBatchSize) || j == m_nSequenceLength - 1)
                            {
                                m_rgLabelInput[nIdx] = (T)Convert.ChangeType(fLabelIdx, typeof(T));
                            }
                        }
                    }

                    m_blobData.mutable_cpu_data  = m_rgDataInput;
                    m_blobLabel.mutable_cpu_data = m_rgLabelInput;
                }
                else
                {
                    GetDataArgs e = getDataArgs(0, true);
                    m_icallback.OnGetData(e);

                    string strSolverErr = "";
                    if (m_nSolverSequenceLength >= 0 && m_nSolverSequenceLength != m_nSequenceLength)
                    {
                        strSolverErr = "The solver parameter 'SequenceLength' length of " + m_nSolverSequenceLength.ToString() + " must match the model sequence length of " + m_nSequenceLength.ToString() + ".  ";
                    }

                    int nExpectedCount = m_blobData.count();
                    m_mycaffe.Log.CHECK_EQ(nExpectedCount, e.State.Data.ItemCount, strSolverErr + "The size of the data received ('" + e.State.Data.ItemCount.ToString() + "') does mot match the expected data count of '" + nExpectedCount.ToString() + "'!");
                    m_blobData.mutable_cpu_data = Utility.ConvertVec <T>(e.State.Data.RealData);

                    nExpectedCount = m_blobLabel.count();
                    m_mycaffe.Log.CHECK_EQ(nExpectedCount, e.State.Label.ItemCount, strSolverErr + "The size of the label received ('" + e.State.Label.ItemCount.ToString() + "') does not match the expected label count of '" + nExpectedCount.ToString() + "'!");
                    m_blobLabel.mutable_cpu_data = Utility.ConvertVec <T>(e.State.Label.RealData);
                }
            }
            // Byte Data (uses a vocabulary if available)
            else
            {
                byte[] rgData = (bTrain) ? m_rgTrainData : m_rgTestData;
                // Create input data, the data must be in the order
                // seq1_char1, seq2_char1, ..., seqBatch_Size_char1, seq1_char2, seq2_char2, ..., seqBatch_Size_charSequence_Length
                // As seq1_charSequence_Length == seq2_charSequence_Length-1 == seq3_charSequence_Length-2 == ... we can perform block copy for efficientcy.
                // Labels are the same with an offset of +1

                // Re-order the data according to caffe input specification for LSTM layer.
                for (int i = 0; i < m_nBatchSize; i++)
                {
                    int nCurrentCharIdx = m_random.Next(rgData.Length - m_nSequenceLength - 1);

                    for (int j = 0; j < m_nSequenceLength; j++)
                    {
                        // Feed the net with input data and labels (clips are always the same)
                        byte bData = rgData[nCurrentCharIdx + j];
                        // Labels are the same with an offset of +1
                        byte  bLabel    = rgData[nCurrentCharIdx + j + 1]; // predict next character
                        float fDataIdx  = findIndex(bData, out bFound);
                        float fLabelIdx = findIndex(bLabel, out bFound);

                        // LSTM: Create input data, the data must be in the order
                        // seq1_val1, seq2_val1, ..., seqBatch_Size_val1, seq1_val2, seq2_val2, ..., seqBatch_Size_valSequence_Length
                        if (m_lstmType == LayerParameter.LayerType.LSTM)
                        {
                            nIdx = m_nBatchSize * j + i;
                        }

                        // LSTM_SIMPLE: Create input data, the data must be in the order
                        // seq1_val1, seq1_val2, ..., seq1_valBatchSize, seq2_val1, seq2_val2, ..., seqSequenceLength_valBatchSize
                        else
                        {
                            nIdx = i * m_nBatchSize + j;
                        }

                        m_rgDataInput[nIdx] = (T)Convert.ChangeType(fDataIdx, typeof(T));

                        if (m_nSequenceLengthLabel == (m_nSequenceLength * m_nBatchSize) || j == m_nSequenceLength - 1)
                        {
                            m_rgLabelInput[nIdx] = (T)Convert.ChangeType(fLabelIdx, typeof(T));
                        }
                    }
                }

                m_blobData.mutable_cpu_data  = m_rgDataInput;
                m_blobLabel.mutable_cpu_data = m_rgLabelInput;
            }
        }