Ejemplo n.º 1
0
        public SimpleLayer[] InnerDecode(Sequence pSequence, out SimpleLayer[] outputHiddenLayer, out Matrix <double> rawOutputLayer)
        {
            int numStates = pSequence.States.Length;

            SimpleLayer[] mForward  = null;
            SimpleLayer[] mBackward = null;

            Parallel.Invoke(() =>
            {
                //Computing forward RNN
                forwardRNN.netReset(false);
                mForward = new SimpleLayer[numStates];
                for (int curState = 0; curState < numStates; curState++)
                {
                    State state = pSequence.States[curState];
                    forwardRNN.SetInputLayer(state, curState, numStates, null);
                    forwardRNN.computeHiddenLayer(state);

                    mForward[curState] = forwardRNN.GetHiddenLayer();
                }
            },
                            () =>
            {
                //Computing backward RNN
                backwardRNN.netReset(false);
                mBackward = new SimpleLayer[numStates];
                for (int curState = numStates - 1; curState >= 0; curState--)
                {
                    State state = pSequence.States[curState];
                    backwardRNN.SetInputLayer(state, curState, numStates, null, false);
                    backwardRNN.computeHiddenLayer(state);       //compute probability distribution

                    mBackward[curState] = backwardRNN.GetHiddenLayer();
                }
            });

            //Merge forward and backward
            SimpleLayer[] mergedHiddenLayer = new SimpleLayer[numStates];
            Parallel.For(0, numStates, parallelOption, curState =>
            {
                mergedHiddenLayer[curState] = new SimpleLayer(L1);
                SimpleLayer cells           = mergedHiddenLayer[curState];
                SimpleLayer forwardCells    = mForward[curState];
                SimpleLayer backwardCells   = mBackward[curState];

                int i = 0;
                while (i < forwardRNN.L1 - Vector <double> .Count)
                {
                    Vector <double> v1 = new Vector <double>(forwardCells.cellOutput, i);
                    Vector <double> v2 = new Vector <double>(backwardCells.cellOutput, i);
                    Vector <double> v  = (v1 + v2) / vecConst2;

                    v.CopyTo(cells.cellOutput, i);

                    i += Vector <float> .Count;
                }

                while (i < forwardRNN.L1)
                {
                    cells.cellOutput[i] = (forwardCells.cellOutput[i] + backwardCells.cellOutput[i]) / 2.0;
                    i++;
                }
            });

            //Calculate output layer
            Matrix <double> tmp_rawOutputLayer = new Matrix <double>(numStates, L2);

            SimpleLayer[] seqOutput = new SimpleLayer[numStates];
            Parallel.For(0, numStates, parallelOption, curState =>
            {
                seqOutput[curState]     = new SimpleLayer(L2);
                SimpleLayer outputCells = seqOutput[curState];

                matrixXvectorADD(outputCells, mergedHiddenLayer[curState], Hidden2OutputWeight, L2, L1, 0);

                double[] tmp_vector = tmp_rawOutputLayer[curState];
                outputCells.cellOutput.CopyTo(tmp_vector, 0);

                //Activation on output layer
                SoftmaxLayer(outputCells);
            });

            outputHiddenLayer = mergedHiddenLayer;
            rawOutputLayer    = tmp_rawOutputLayer;

            return(seqOutput);
        }