Ejemplo n.º 1
0
        private SimpleLayer[] ComputeMiddleLayers(Sequence pSequence, SimpleLayer[] lastLayers, SimpleLayer forwardLayer,
                                                  SimpleLayer backwardLayer)
        {
            var numStates = lastLayers.Length;

            SimpleLayer[] mForward  = null;
            SimpleLayer[] mBackward = null;
            Parallel.Invoke(() =>
            {
                //Computing forward RNN
                forwardLayer.Reset(false);
                mForward = new SimpleLayer[lastLayers.Length];
                for (var curState = 0; curState < lastLayers.Length; curState++)
                {
                    var state = pSequence.States[curState];
                    forwardLayer.ForwardPass(state.SparseFeature, lastLayers[curState].Cell);
                    mForward[curState] = forwardLayer.CloneHiddenLayer();
                }
            },
                            () =>
            {
                //Computing backward RNN
                backwardLayer.Reset(false);
                mBackward = new SimpleLayer[lastLayers.Length];
                for (var curState = lastLayers.Length - 1; curState >= 0; curState--)
                {
                    var state = pSequence.States[curState];
                    backwardLayer.ForwardPass(state.SparseFeature, lastLayers[curState].Cell);
                    mBackward[curState] = backwardLayer.CloneHiddenLayer();
                }
            });

            //Merge forward and backward
            var mergedLayer = new SimpleLayer[numStates];

            Parallel.For(0, numStates, parallelOption, curState =>
            {
                var state             = pSequence.States[curState];
                mergedLayer[curState] = new SimpleLayer(forwardLayer.LayerConfig)
                {
                    SparseFeature = state.SparseFeature,
                    DenseFeature  = lastLayers[curState].Cell
                };

                var forwardCells  = mForward[curState];
                var backwardCells = mBackward[curState];

                var i = 0;
                while (i < forwardLayer.LayerSize - Vector <float> .Count)
                {
                    var v1 = new Vector <float>(forwardCells.Cell, i);
                    var v2 = new Vector <float>(backwardCells.Cell, i);
                    var v  = (v1 + v2) / vecConst2;

                    v.CopyTo(mergedLayer[curState].Cell, i);

                    i += Vector <float> .Count;
                }

                while (i < forwardLayer.LayerSize)
                {
                    mergedLayer[curState].Cell[i] =
                        (float)((forwardCells.Cell[i] + backwardCells.Cell[i]) / 2.0);
                    i++;
                }
            });

            return(mergedLayer);
        }
Ejemplo n.º 2
0
        /// <summary>
        ///     Compute the output of bottom layer
        /// </summary>
        /// <param name="pSequence"></param>
        /// <param name="forwardLayer"></param>
        /// <param name="backwardLayer"></param>
        /// <returns></returns>
        private SimpleLayer[] ComputeBottomLayer(Sequence pSequence, SimpleLayer forwardLayer, SimpleLayer backwardLayer)
        {
            var numStates = pSequence.States.Length;

            SimpleLayer[] mForward  = null;
            SimpleLayer[] mBackward = null;
            Parallel.Invoke(() =>
            {
                //Computing forward RNN
                forwardLayer.Reset(false);
                mForward = new SimpleLayer[numStates];
                for (var curState = 0; curState < numStates; curState++)
                {
                    var state = pSequence.States[curState];
                    SetRuntimeFeatures(state, curState, numStates, null);
                    forwardLayer.ForwardPass(state.SparseFeature, state.DenseFeature.CopyTo());
                    mForward[curState] = forwardLayer.CloneHiddenLayer();
                }
            },
                            () =>
            {
                //Computing backward RNN
                backwardLayer.Reset(false);
                mBackward = new SimpleLayer[numStates];
                for (var curState = numStates - 1; curState >= 0; curState--)
                {
                    var state = pSequence.States[curState];
                    SetRuntimeFeatures(state, curState, numStates, null, false);
                    backwardLayer.ForwardPass(state.SparseFeature, state.DenseFeature.CopyTo());
                    //compute probability distribution

                    mBackward[curState] = backwardLayer.CloneHiddenLayer();
                }
            });

            var mergedLayer = new SimpleLayer[numStates];

            Parallel.For(0, numStates, parallelOption, curState =>
            {
                var state             = pSequence.States[curState];
                mergedLayer[curState] = new SimpleLayer(forwardLayer.LayerConfig)
                {
                    SparseFeature = state.SparseFeature,
                    DenseFeature  = state.DenseFeature.CopyTo()
                };

                var forwardCells  = mForward[curState];
                var backwardCells = mBackward[curState];

                var i = 0;
                while (i < forwardLayer.LayerSize - Vector <float> .Count)
                {
                    var v1 = new Vector <float>(forwardCells.Cell, i);
                    var v2 = new Vector <float>(backwardCells.Cell, i);
                    var v  = (v1 + v2) / vecConst2;

                    v.CopyTo(mergedLayer[curState].Cell, i);

                    i += Vector <float> .Count;
                }

                while (i < forwardLayer.LayerSize)
                {
                    mergedLayer[curState].Cell[i] =
                        (float)((forwardCells.Cell[i] + backwardCells.Cell[i]) / 2.0);
                    i++;
                }
            });

            return(mergedLayer);
        }