Beispiel #1
0
        public virtual void computeLayer(SparseVector sparseFeature, double[] denseFeature, bool isTrain = true)
        {
            if (SparseFeatureSize > 0)
            {
                //Apply sparse features
                SparseFeature = sparseFeature;
                Parallel.For(0, LayerSize, parallelOption, b =>
                {
                    double score      = 0;
                    double[] vector_b = SparseWeights[b];
                    for (int i = 0; i < SparseFeature.Count; i++)
                    {
                        var entry = SparseFeature.GetEntry(i);
                        score    += entry.Value * vector_b[entry.Key];
                    }
                    cellOutput[b] += score;
                });
            }

            if (DenseFeatureSize > 0)
            {
                DenseFeature = denseFeature;
                RNNHelper.matrixXvectorADD(cellOutput, denseFeature, DenseWeights, LayerSize, DenseFeatureSize);
            }
        }
Beispiel #2
0
        public virtual void LearnFeatureWeights(int numStates, int curState)
        {
            if (DenseFeatureSize > 0)
            {
                //Update hidden-output weights
                Parallel.For(0, LayerSize, parallelOption, c =>
                {
                    double er2        = er[c];
                    double[] vector_c = DenseWeights[c];
                    for (int a = 0; a < DenseFeatureSize; a++)
                    {
                        double delta           = RNNHelper.NormalizeGradient(er2 * DenseFeature[a]);
                        double newLearningRate = RNNHelper.UpdateLearningRate(DenseWeightsLearningRate, c, a, delta);
                        vector_c[a]           += newLearningRate * delta;
                    }
                });
            }


            if (SparseFeatureSize > 0)
            {
                //Update hidden-output weights
                Parallel.For(0, LayerSize, parallelOption, c =>
                {
                    double er2        = er[c];
                    double[] vector_c = SparseWeights[c];
                    for (int a = 0; a < SparseFeature.Count; a++)
                    {
                        int pos                = SparseFeature.GetEntry(a).Key;
                        double val             = SparseFeature.GetEntry(a).Value;
                        double delta           = RNNHelper.NormalizeGradient(er2 * val);
                        double newLearningRate = RNNHelper.UpdateLearningRate(SparseWeightsLearningRate, c, pos, delta);
                        vector_c[pos]         += newLearningRate * delta;
                    }
                });
            }
        }
Beispiel #3
0
        // forward process. output layer consists of tag value
        public override void computeLayer(SparseVector sparseFeature, double[] denseFeature, bool isTrain = true)
        {
            //keep last hidden layer and erase activations
            cellOutput.CopyTo(previousCellOutput, 0);

            //Apply previous feature to current time
            //hidden(t-1) -> hidden(t)
            RNNHelper.matrixXvectorADD(cellOutput, previousCellOutput, BpttWeights, LayerSize, LayerSize);

            //Apply features on hidden layer
            SparseFeature = sparseFeature;
            DenseFeature  = denseFeature;

            if (SparseFeatureSize > 0)
            {
                //Apply sparse features
                Parallel.For(0, LayerSize, parallelOption, b =>
                {
                    double score = 0;
                    if (SparseFeatureSize > 0)
                    {
                        double[] vector_b = SparseWeights[b];
                        for (int i = 0; i < SparseFeature.Count; i++)
                        {
                            var entry = SparseFeature.GetEntry(i);
                            score    += entry.Value * vector_b[entry.Key];
                        }
                    }
                    cellOutput[b] += score;
                });
            }

            if (DenseFeatureSize > 0)
            {
                //Apply dense features
                RNNHelper.matrixXvectorADD(cellOutput, DenseFeature, DenseWeights, LayerSize, DenseFeatureSize, false);
            }

            //activate layer
            activityLayer();
        }
Beispiel #4
0
        // forward process. output layer consists of tag value
        public override void computeLayer(SparseVector sparseFeature, double[] denseFeature, bool isTrain = true)
        {
            //keep last hidden layer and erase activations
            cellOutput.CopyTo(previousCellOutput, 0);

            //Apply previous feature to current time
            //hidden(t-1) -> hidden(t)
            RNNHelper.matrixXvectorADD(cellOutput, previousCellOutput, BpttWeights, LayerSize, LayerSize);

            //Apply features on hidden layer
            SparseFeature = sparseFeature;
            DenseFeature = denseFeature;

            if (SparseFeatureSize > 0)
            {
                //Apply sparse features
                Parallel.For(0, LayerSize, parallelOption, b =>
                {
                    double score = 0;
                    if (SparseFeatureSize > 0)
                    {
                        double[] vector_b = SparseWeights[b];
                        for (int i = 0; i < SparseFeature.Count; i++)
                        {
                            var entry = SparseFeature.GetEntry(i);
                            score += entry.Value * vector_b[entry.Key];
                        }
                    }
                    cellOutput[b] += score;
                });
            }

            if (DenseFeatureSize > 0)
            {
                //Apply dense features
                RNNHelper.matrixXvectorADD(cellOutput, DenseFeature, DenseWeights, LayerSize, DenseFeatureSize, false);
            }

            //activate layer
            activityLayer(isTrain);
        }
Beispiel #5
0
        // forward process. output layer consists of tag value
        public override void computeLayer(SparseVector sparseFeature, double[] denseFeature, bool isTrain = true)
        {
            //inputs(t) -> hidden(t)
            //Get sparse feature and apply it into hidden layer
            SparseFeature = sparseFeature;
            DenseFeature  = denseFeature;

            Parallel.For(0, LayerSize, parallelOption, j =>
            {
                LSTMCell cell_j = cell[j];

                //hidden(t-1) -> hidden(t)
                cell_j.previousCellState = cell_j.cellState;
                previousCellOutput[j]    = cellOutput[j];

                Vector4 vecCell_j = Vector4.Zero;

                if (SparseFeatureSize > 0)
                {
                    //Apply sparse weights
                    Vector4[] weights = input2hidden[j];
                    for (int i = 0; i < SparseFeature.Count; i++)
                    {
                        var entry  = SparseFeature.GetEntry(i);
                        vecCell_j += weights[entry.Key] * entry.Value;
                    }
                }

                //Apply dense weights
                if (DenseFeatureSize > 0)
                {
                    Vector4[] weights = feature2hidden[j];
                    for (int i = 0; i < DenseFeatureSize; i++)
                    {
                        vecCell_j += weights[i] * (float)DenseFeature[i];
                    }
                }

                //rest the value of the net input to zero
                cell_j.netIn     = vecCell_j.X;
                cell_j.netForget = vecCell_j.Y;
                //reset each netCell state to zero
                cell_j.netCellState = vecCell_j.Z;
                //reset each netOut to zero
                cell_j.netOut = vecCell_j.W;

                double cell_j_previousCellOutput = previousCellOutput[j];

                //include internal connection multiplied by the previous cell state
                cell_j.netIn += cell_j.previousCellState * cell_j.wPeepholeIn + cell_j_previousCellOutput * cell_j.wCellIn;
                //squash input
                cell_j.yIn = Sigmoid(cell_j.netIn);

                //include internal connection multiplied by the previous cell state
                cell_j.netForget += cell_j.previousCellState * cell_j.wPeepholeForget + cell_j_previousCellOutput * cell_j.wCellForget;
                cell_j.yForget    = Sigmoid(cell_j.netForget);

                cell_j.netCellState += cell_j_previousCellOutput * cell_j.wCellState;
                cell_j.yCellState    = TanH(cell_j.netCellState);

                //cell state is equal to the previous cell state multipled by the forget gate and the cell inputs multiplied by the input gate
                cell_j.cellState = cell_j.yForget * cell_j.previousCellState + cell_j.yIn * cell_j.yCellState;

                ////include the internal connection multiplied by the CURRENT cell state
                cell_j.netOut += cell_j.cellState * cell_j.wPeepholeOut + cell_j_previousCellOutput * cell_j.wCellOut;

                //squash output gate
                cell_j.yOut = Sigmoid(cell_j.netOut);

                cellOutput[j] = TanH(cell_j.cellState) * cell_j.yOut;

                cell[j] = cell_j;
            });
        }
Beispiel #6
0
        // forward process. output layer consists of tag value
        public override void computeLayer(SparseVector sparseFeature, double[] denseFeature, bool isTrain = true)
        {
            //inputs(t) -> hidden(t)
            //Get sparse feature and apply it into hidden layer
            SparseFeature = sparseFeature;
            DenseFeature = denseFeature;

            Parallel.For(0, LayerSize, parallelOption, j =>
            {
                LSTMCell cell_j = cell[j];

                //hidden(t-1) -> hidden(t)
                cell_j.previousCellState = cell_j.cellState;
                previousCellOutput[j] = cellOutput[j];

                Vector4 vecCell_j = Vector4.Zero;

                if (SparseFeatureSize > 0)
                {
                    //Apply sparse weights
                    Vector4[] weights = input2hidden[j];
                    for (int i = 0; i < SparseFeature.Count; i++)
                    {
                        var entry = SparseFeature.GetEntry(i);
                        vecCell_j += weights[entry.Key] * entry.Value;
                    }
                }

                //Apply dense weights
                if (DenseFeatureSize > 0)
                {
                    Vector4[] weights = feature2hidden[j];
                    for (int i = 0; i < DenseFeatureSize; i++)
                    {
                        vecCell_j += weights[i] * (float)DenseFeature[i];
                    }
                }

                //rest the value of the net input to zero
                cell_j.netIn = vecCell_j.X;
                cell_j.netForget = vecCell_j.Y;
                //reset each netCell state to zero
                cell_j.netCellState = vecCell_j.Z;
                //reset each netOut to zero
                cell_j.netOut = vecCell_j.W;

                double cell_j_previousCellOutput = previousCellOutput[j];

                //include internal connection multiplied by the previous cell state
                cell_j.netIn += cell_j.previousCellState * cell_j.wPeepholeIn + cell_j_previousCellOutput * cell_j.wCellIn;
                //squash input
                cell_j.yIn = Sigmoid(cell_j.netIn);

                //include internal connection multiplied by the previous cell state
                cell_j.netForget += cell_j.previousCellState * cell_j.wPeepholeForget + cell_j_previousCellOutput * cell_j.wCellForget;
                cell_j.yForget = Sigmoid(cell_j.netForget);

                cell_j.netCellState += cell_j_previousCellOutput * cell_j.wCellState;
                cell_j.yCellState = TanH(cell_j.netCellState);

                if (mask[j] == true)
                {
                    cell_j.cellState = 0;
                }
                else
                {
                    //cell state is equal to the previous cell state multipled by the forget gate and the cell inputs multiplied by the input gate
                    cell_j.cellState = cell_j.yForget * cell_j.previousCellState + cell_j.yIn * cell_j.yCellState;
                }

                if (isTrain == false)
                {
                    cell_j.cellState = cell_j.cellState * (1.0 - Dropout);
                }

                ////include the internal connection multiplied by the CURRENT cell state
                cell_j.netOut += cell_j.cellState * cell_j.wPeepholeOut + cell_j_previousCellOutput * cell_j.wCellOut;

                //squash output gate
                cell_j.yOut = Sigmoid(cell_j.netOut);

                cellOutput[j] = TanH(cell_j.cellState) * cell_j.yOut;

                cell[j] = cell_j;
            });
        }