Beispiel #1
0
        private SimpleLayer[] ComputeMiddleLayers(Sequence pSequence, SimpleLayer[] lastLayers, SimpleLayer forwardLayer, SimpleLayer backwardLayer)
        {
            int numStates = lastLayers.Length;

            SimpleLayer[] mForward  = null;
            SimpleLayer[] mBackward = null;
            Parallel.Invoke(() =>
            {
                //Computing forward RNN
                forwardLayer.netReset(false);
                mForward = new SimpleLayer[lastLayers.Length];
                for (int curState = 0; curState < lastLayers.Length; curState++)
                {
                    State state = pSequence.States[curState];
                    forwardLayer.computeLayer(state.SparseFeature, lastLayers[curState].cellOutput);
                    mForward[curState] = forwardLayer.CloneHiddenLayer();
                }
            },
                            () =>
            {
                //Computing backward RNN
                backwardLayer.netReset(false);
                mBackward = new SimpleLayer[lastLayers.Length];
                for (int curState = lastLayers.Length - 1; curState >= 0; curState--)
                {
                    State state = pSequence.States[curState];
                    backwardLayer.computeLayer(state.SparseFeature, lastLayers[curState].cellOutput);
                    mBackward[curState] = backwardLayer.CloneHiddenLayer();
                }
            });

            //Merge forward and backward
            SimpleLayer[] mergedLayer = new SimpleLayer[numStates];
            Parallel.For(0, numStates, parallelOption, curState =>
            {
                State state           = pSequence.States[curState];
                mergedLayer[curState] = new SimpleLayer(forwardLayer.LayerSize);
                mergedLayer[curState].SparseFeature = state.SparseFeature;
                mergedLayer[curState].DenseFeature  = lastLayers[curState].cellOutput;

                SimpleLayer forwardCells  = mForward[curState];
                SimpleLayer backwardCells = mBackward[curState];

                int i = 0;
                while (i < forwardLayer.LayerSize - Vector <double> .Count)
                {
                    Vector <double> v1 = new Vector <double>(forwardCells.cellOutput, i);
                    Vector <double> v2 = new Vector <double>(backwardCells.cellOutput, i);
                    Vector <double> v  = (v1 + v2) / vecConst2;

                    v.CopyTo(mergedLayer[curState].cellOutput, i);

                    i += Vector <float> .Count;
                }

                while (i < forwardLayer.LayerSize)
                {
                    mergedLayer[curState].cellOutput[i] = (forwardCells.cellOutput[i] + backwardCells.cellOutput[i]) / 2.0;
                    i++;
                }
            });

            return(mergedLayer);
        }
Beispiel #2
0
        private SimpleLayer[] ComputeMiddleLayers(Sequence pSequence, SimpleLayer[] lastLayers, SimpleLayer forwardLayer,
                                                  SimpleLayer backwardLayer)
        {
            var numStates = lastLayers.Length;

            SimpleLayer[] mForward  = null;
            SimpleLayer[] mBackward = null;
            Parallel.Invoke(() =>
            {
                //Computing forward RNN
                forwardLayer.Reset(false);
                mForward = new SimpleLayer[lastLayers.Length];
                for (var curState = 0; curState < lastLayers.Length; curState++)
                {
                    var state = pSequence.States[curState];
                    forwardLayer.ForwardPass(state.SparseFeature, lastLayers[curState].Cell);
                    mForward[curState] = forwardLayer.CloneHiddenLayer();
                }
            },
                            () =>
            {
                //Computing backward RNN
                backwardLayer.Reset(false);
                mBackward = new SimpleLayer[lastLayers.Length];
                for (var curState = lastLayers.Length - 1; curState >= 0; curState--)
                {
                    var state = pSequence.States[curState];
                    backwardLayer.ForwardPass(state.SparseFeature, lastLayers[curState].Cell);
                    mBackward[curState] = backwardLayer.CloneHiddenLayer();
                }
            });

            //Merge forward and backward
            var mergedLayer = new SimpleLayer[numStates];

            Parallel.For(0, numStates, parallelOption, curState =>
            {
                var state             = pSequence.States[curState];
                mergedLayer[curState] = new SimpleLayer(forwardLayer.LayerConfig)
                {
                    SparseFeature = state.SparseFeature,
                    DenseFeature  = lastLayers[curState].Cell
                };

                var forwardCells  = mForward[curState];
                var backwardCells = mBackward[curState];

                var i = 0;
                while (i < forwardLayer.LayerSize - Vector <float> .Count)
                {
                    var v1 = new Vector <float>(forwardCells.Cell, i);
                    var v2 = new Vector <float>(backwardCells.Cell, i);
                    var v  = (v1 + v2) / vecConst2;

                    v.CopyTo(mergedLayer[curState].Cell, i);

                    i += Vector <float> .Count;
                }

                while (i < forwardLayer.LayerSize)
                {
                    mergedLayer[curState].Cell[i] =
                        (float)((forwardCells.Cell[i] + backwardCells.Cell[i]) / 2.0);
                    i++;
                }
            });

            return(mergedLayer);
        }
Beispiel #3
0
        /// <summary>
        /// Compute the output of bottom layer
        /// </summary>
        /// <param name="pSequence"></param>
        /// <param name="forwardLayer"></param>
        /// <param name="backwardLayer"></param>
        /// <returns></returns>
        private SimpleLayer[] ComputeBottomLayer(Sequence pSequence, SimpleLayer forwardLayer, SimpleLayer backwardLayer)
        {
            int numStates = pSequence.States.Length;

            SimpleLayer[] mForward  = null;
            SimpleLayer[] mBackward = null;
            Parallel.Invoke(() =>
            {
                //Computing forward RNN
                forwardLayer.netReset(false);
                mForward = new SimpleLayer[numStates];
                for (int curState = 0; curState < numStates; curState++)
                {
                    State state = pSequence.States[curState];
                    SetRuntimeFeatures(state, curState, numStates, null);
                    forwardLayer.computeLayer(state.SparseFeature, state.DenseFeature.CopyTo());
                    mForward[curState] = forwardLayer.CloneHiddenLayer();
                }
            },
                            () =>
            {
                //Computing backward RNN
                backwardLayer.netReset(false);
                mBackward = new SimpleLayer[numStates];
                for (int curState = numStates - 1; curState >= 0; curState--)
                {
                    State state = pSequence.States[curState];
                    SetRuntimeFeatures(state, curState, numStates, null, false);
                    backwardLayer.computeLayer(state.SparseFeature, state.DenseFeature.CopyTo());       //compute probability distribution

                    mBackward[curState] = backwardLayer.CloneHiddenLayer();
                }
            });

            SimpleLayer[] mergedLayer = new SimpleLayer[numStates];
            Parallel.For(0, numStates, parallelOption, curState =>
            {
                State state           = pSequence.States[curState];
                mergedLayer[curState] = new SimpleLayer(forwardLayer.LayerSize);
                mergedLayer[curState].SparseFeature = state.SparseFeature;
                mergedLayer[curState].DenseFeature  = state.DenseFeature.CopyTo();

                SimpleLayer forwardCells  = mForward[curState];
                SimpleLayer backwardCells = mBackward[curState];

                int i = 0;
                while (i < forwardLayer.LayerSize - Vector <double> .Count)
                {
                    Vector <double> v1 = new Vector <double>(forwardCells.cellOutput, i);
                    Vector <double> v2 = new Vector <double>(backwardCells.cellOutput, i);
                    Vector <double> v  = (v1 + v2) / vecConst2;

                    v.CopyTo(mergedLayer[curState].cellOutput, i);

                    i += Vector <float> .Count;
                }

                while (i < forwardLayer.LayerSize)
                {
                    mergedLayer[curState].cellOutput[i] = (forwardCells.cellOutput[i] + backwardCells.cellOutput[i]) / 2.0;
                    i++;
                }
            });

            return(mergedLayer);
        }
Beispiel #4
0
        /// <summary>
        ///     Compute the output of bottom layer
        /// </summary>
        /// <param name="pSequence"></param>
        /// <param name="forwardLayer"></param>
        /// <param name="backwardLayer"></param>
        /// <returns></returns>
        private SimpleLayer[] ComputeBottomLayer(Sequence pSequence, SimpleLayer forwardLayer, SimpleLayer backwardLayer)
        {
            var numStates = pSequence.States.Length;

            SimpleLayer[] mForward  = null;
            SimpleLayer[] mBackward = null;
            Parallel.Invoke(() =>
            {
                //Computing forward RNN
                forwardLayer.Reset(false);
                mForward = new SimpleLayer[numStates];
                for (var curState = 0; curState < numStates; curState++)
                {
                    var state = pSequence.States[curState];
                    SetRuntimeFeatures(state, curState, numStates, null);
                    forwardLayer.ForwardPass(state.SparseFeature, state.DenseFeature.CopyTo());
                    mForward[curState] = forwardLayer.CloneHiddenLayer();
                }
            },
                            () =>
            {
                //Computing backward RNN
                backwardLayer.Reset(false);
                mBackward = new SimpleLayer[numStates];
                for (var curState = numStates - 1; curState >= 0; curState--)
                {
                    var state = pSequence.States[curState];
                    SetRuntimeFeatures(state, curState, numStates, null, false);
                    backwardLayer.ForwardPass(state.SparseFeature, state.DenseFeature.CopyTo());
                    //compute probability distribution

                    mBackward[curState] = backwardLayer.CloneHiddenLayer();
                }
            });

            var mergedLayer = new SimpleLayer[numStates];

            Parallel.For(0, numStates, parallelOption, curState =>
            {
                var state             = pSequence.States[curState];
                mergedLayer[curState] = new SimpleLayer(forwardLayer.LayerConfig)
                {
                    SparseFeature = state.SparseFeature,
                    DenseFeature  = state.DenseFeature.CopyTo()
                };

                var forwardCells  = mForward[curState];
                var backwardCells = mBackward[curState];

                var i = 0;
                while (i < forwardLayer.LayerSize - Vector <float> .Count)
                {
                    var v1 = new Vector <float>(forwardCells.Cell, i);
                    var v2 = new Vector <float>(backwardCells.Cell, i);
                    var v  = (v1 + v2) / vecConst2;

                    v.CopyTo(mergedLayer[curState].Cell, i);

                    i += Vector <float> .Count;
                }

                while (i < forwardLayer.LayerSize)
                {
                    mergedLayer[curState].Cell[i] =
                        (float)((forwardCells.Cell[i] + backwardCells.Cell[i]) / 2.0);
                    i++;
                }
            });

            return(mergedLayer);
        }