示例#1
0
        public SimpleLayer CloneHiddenLayer()
        {
            SimpleLayer m = new SimpleLayer(LayerSize);

            int j = 0;

            while (j < LayerSize - Vector <double> .Count)
            {
                Vector <double> vCellOutput = new Vector <double>(cellOutput, j);
                vCellOutput.CopyTo(m.cellOutput, j);
                Vector <double> vEr = new Vector <double>(er, j);
                vEr.CopyTo(m.er, j);

                j += Vector <double> .Count;
            }

            while (j < LayerSize)
            {
                m.cellOutput[j] = cellOutput[j];
                m.er[j]         = er[j];
                j++;
            }

            return(m);
        }
示例#2
0
        public static SimpleLayer Load(BinaryReader br, LayerType layerType)
        {
            LayerConfig config = new LayerConfig();

            config.LayerSize = br.ReadInt32();
            config.LayerType = layerType;
            SimpleLayer layer = new SimpleLayer(config);

            layer.SparseFeatureSize = br.ReadInt32();
            layer.DenseFeatureSize  = br.ReadInt32();

            if (layer.SparseFeatureSize > 0)
            {
                Logger.WriteLine("Loading sparse feature weights...");
                layer.SparseWeights = RNNHelper.LoadMatrix(br);
            }

            if (layer.DenseFeatureSize > 0)
            {
                Logger.WriteLine("Loading dense feature weights...");
                layer.DenseWeights = RNNHelper.LoadMatrix(br);
            }

            return(layer);
        }
示例#3
0
        public ForwardRNN(List <SimpleLayer> hiddenLayerList, int outputLayerSize)
        {
            HiddenLayerList = hiddenLayerList;

            OutputLayer = new SimpleLayer(outputLayerSize);
            OutputLayer.InitializeWeights(0, HiddenLayerList[HiddenLayerList.Count - 1].LayerSize);
        }
示例#4
0
        public virtual void ComputeLayerErr(SimpleLayer nextLayer, float[] destErrLayer, float[] srcErrLayer)
        {
            var sampledSoftmaxLayer = nextLayer as SampledSoftmaxLayer;

            if (sampledSoftmaxLayer != null)
            {
                RNNHelper.matrixXvectorADDErr(destErrLayer, srcErrLayer, sampledSoftmaxLayer.DenseWeights, LayerSize,
                                              sampledSoftmaxLayer.negativeSampleWordList);
            }
            else
            {
                var lstmLayer = nextLayer as LSTMLayer;
                if (lstmLayer != null)
                {
                    for (var i = 0; i < LayerSize; i++)
                    {
                        var err = 0.0f;
                        for (var k = 0; k < nextLayer.LayerSize; k++)
                        {
                            err += srcErrLayer[k] * lstmLayer.wDenseOutputGate.weights[k][i];
                        }
                        destErrLayer[i] = err; // RNNHelper.NormalizeGradient(err);
                    }
                }
                else
                {
                    //error output->hidden for words from specific class
                    RNNHelper.matrixXvectorADDErr(destErrLayer, srcErrLayer, nextLayer.DenseWeights, LayerSize,
                                                  nextLayer.LayerSize);
                }
            }
        }
示例#5
0
        public virtual SimpleLayer CreateLayerSharedWegiths()
        {
            SimpleLayer layer = new SimpleLayer(LayerConfig);

            ShallowCopyWeightTo(layer);
            return(layer);
        }
示例#6
0
文件: RNN.cs 项目: My-Khan/RNNSharp
        public void matrixXvectorADD(SimpleLayer dest, SimpleCell[] srcvec, Matrix <double> srcmatrix, int DestSize, int SrcSize, int type)
        {
            if (type == 0)
            {
                //ac mod
                Parallel.For(0, DestSize, parallelOption, i =>
                {
                    double[] vector_i = srcmatrix[i];
                    double cellOutput = 0;
                    for (int j = 0; j < SrcSize; j++)
                    {
                        cellOutput += srcvec[j].cellOutput * vector_i[j];
                    }

                    dest.cellOutput[i] = cellOutput;
                });
            }
            else
            {
                Parallel.For(0, DestSize, parallelOption, i =>
                {
                    double er = 0;
                    for (int j = 0; j < SrcSize; j++)
                    {
                        er += srcvec[j].er * srcmatrix[j][i];
                    }

                    dest.er[i] = NormalizeGradient(er);
                });
            }
        }
示例#7
0
        private SimpleLayer[] ComputeTopLayer(SimpleLayer[] lastLayer, out Matrix <double> rawOutputLayer, bool isTrain)
        {
            int numStates = lastLayer.Length;

            //Calculate output layer
            Matrix <double> tmp_rawOutputLayer = new Matrix <double>(numStates, OutputLayer.LayerSize);

            SimpleLayer[] seqFinalOutput = new SimpleLayer[numStates];
            Parallel.For(0, numStates, parallelOption, curState =>
            {
                seqFinalOutput[curState] = new SimpleLayer(OutputLayer.LayerSize);
                SimpleLayer outputCells  = seqFinalOutput[curState];

                outputCells.DenseWeights             = OutputLayer.DenseWeights;
                outputCells.DenseWeightsLearningRate = OutputLayer.DenseWeightsLearningRate;
                outputCells.DenseFeatureSize         = OutputLayer.DenseFeatureSize;
                outputCells.computeLayer(null, lastLayer[curState].cellOutput, isTrain);
                outputCells.cellOutput.CopyTo(tmp_rawOutputLayer[curState], 0);
                Softmax(outputCells);
            });

            rawOutputLayer = tmp_rawOutputLayer;

            return(seqFinalOutput);
        }
示例#8
0
        public static DropoutLayer Load(BinaryReader br, LayerType layerType)
        {
            DropoutLayer       dropoutLayer;
            DropoutLayerConfig config      = new DropoutLayerConfig();
            SimpleLayer        simpleLayer = SimpleLayer.Load(br, layerType);

            config.DropoutRatio = br.ReadSingle();
            config.LayerSize    = simpleLayer.LayerSize;

            dropoutLayer = new DropoutLayer(config);
            dropoutLayer.SparseFeatureSize = simpleLayer.SparseFeatureSize;
            dropoutLayer.DenseFeatureSize  = simpleLayer.DenseFeatureSize;

            if (dropoutLayer.SparseFeatureSize > 0)
            {
                dropoutLayer.SparseWeights = simpleLayer.SparseWeights;
            }

            if (dropoutLayer.DenseFeatureSize > 0)
            {
                dropoutLayer.DenseWeights = simpleLayer.DenseWeights;
            }

            return(dropoutLayer);
        }
示例#9
0
        public SimpleLayer CloneHiddenLayer()
        {
            var m = new SimpleLayer(LayerConfig);

            var j = 0;

            while (j < LayerSize - Vector <float> .Count)
            {
                var vCellOutput = new Vector <float>(Cell, j);
                vCellOutput.CopyTo(m.Cell, j);
                var vEr = new Vector <float>(Err, j);
                vEr.CopyTo(m.Err, j);

                j += Vector <float> .Count;
            }

            while (j < LayerSize)
            {
                m.Cell[j] = Cell[j];
                m.Err[j]  = Err[j];
                j++;
            }

            return(m);
        }
示例#10
0
        public BiRNN(List <SimpleLayer> s_forwardRNN, List <SimpleLayer> s_backwardRNN, SimpleLayer outputLayer)
        {
            forwardHiddenLayers  = s_forwardRNN;
            backwardHiddenLayers = s_backwardRNN;

            //Initialize output layer
            OutputLayer = outputLayer;
        }
示例#11
0
文件: BiRNN.cs 项目: dmit25/RNNSharp
        public BiRNN(List<SimpleLayer> s_forwardRNN, List<SimpleLayer> s_backwardRNN, int outputLayerSize)
        {
            forwardHiddenLayers = s_forwardRNN;
            backwardHiddenLayers = s_backwardRNN;

            //Initialize output layer
            OutputLayer = new SimpleLayer(outputLayerSize);
            OutputLayer.InitializeWeights(0, forwardHiddenLayers[forwardHiddenLayers.Count - 1].LayerSize);
        }
示例#12
0
        public BiRNN(List <SimpleLayer> s_forwardRNN, List <SimpleLayer> s_backwardRNN, int outputLayerSize)
        {
            forwardHiddenLayers  = s_forwardRNN;
            backwardHiddenLayers = s_backwardRNN;

            //Initialize output layer
            OutputLayer = new SimpleLayer(outputLayerSize);
            OutputLayer.InitializeWeights(0, forwardHiddenLayers[forwardHiddenLayers.Count - 1].LayerSize);
        }
示例#13
0
        private SimpleLayer[] ComputeTopLayer(Sequence pSequence, SimpleLayer[] lastLayer, out Matrix <double> rawOutputLayer, bool isTraining, bool outputRawScore, out int[] seqBestOutput)
        {
            int numStates = lastLayer.Length;

            seqBestOutput = new int[numStates];

            //Calculate output layer
            Matrix <double> tmp_rawOutputLayer = null;

            if (outputRawScore == true)
            {
                tmp_rawOutputLayer = new Matrix <double>(numStates, OutputLayer.LayerSize);
            }

            List <int> labelSet = new List <int>();

            foreach (State state in pSequence.States)
            {
                labelSet.Add(state.Label);
            }

            //Initialize output layer or reallocate it
            if (seqFinalOutput == null || seqFinalOutput.Length < numStates)
            {
                seqFinalOutput = Array.CreateInstance(OutputLayer.GetType(), numStates);
                for (int i = 0; i < numStates; i++)
                {
                    seqFinalOutput.SetValue(Activator.CreateInstance(OutputLayer.GetType(), OutputLayer.LayerSize), i);
                    OutputLayer.ShallowCopyWeightTo((SimpleLayer)seqFinalOutput.GetValue(i));
                }
            }

            Parallel.For(0, numStates, parallelOption, curState =>
            {
                State state                = pSequence.States[curState];
                var outputCells            = (SimpleLayer)seqFinalOutput.GetValue(curState);
                outputCells.LabelShortList = labelSet;
                outputCells.computeLayer(state.SparseFeature, lastLayer[curState].cellOutput, isTraining);

                if (outputRawScore == true)
                {
                    outputCells.cellOutput.CopyTo(tmp_rawOutputLayer[curState], 0);
                }
                outputCells.Softmax(isTraining);
            });
            SimpleLayer[] tmpSeqFinalOutput = new SimpleLayer[numStates];
            for (int i = 0; i < numStates; i++)
            {
                tmpSeqFinalOutput[i] = (SimpleLayer)seqFinalOutput.GetValue(i);
                seqBestOutput[i]     = tmpSeqFinalOutput[i].GetBestOutputIndex(isTraining);
            }

            rawOutputLayer = tmp_rawOutputLayer;

            return(tmpSeqFinalOutput);
        }
示例#14
0
        public virtual void ShallowCopyWeightTo(SimpleLayer destLayer)
        {
            destLayer.DenseWeights             = DenseWeights;
            destLayer.DenseWeightsLearningRate = DenseWeightsLearningRate;
            destLayer.DenseFeatureSize         = DenseFeatureSize;

            destLayer.SparseWeights             = SparseWeights;
            destLayer.SparseWeightsLearningRate = SparseWeightsLearningRate;
            destLayer.SparseFeatureSize         = SparseFeatureSize;
        }
示例#15
0
        public override void ShallowCopyWeightTo(SimpleLayer destLayer)
        {
            NCEOutputLayer layer = destLayer as NCEOutputLayer;

            layer.accFreqTable  = accFreqTable;
            layer.accTagIdTable = accTagIdTable;
            layer.vocab_size    = vocab_size;
            layer.accTotalFreq  = accTotalFreq;

            base.ShallowCopyWeightTo(layer);
        }
示例#16
0
        private SimpleLayer[] ComputeTopLayer(Sequence pSequence, SimpleLayer[] lastLayer,
                                              out Matrix <float> rawOutputLayer, bool isTraining, bool outputRawScore, out int[] seqBestOutput)
        {
            var numStates = lastLayer.Length;

            seqBestOutput = new int[numStates];

            //Calculate output layer
            Matrix <float> tmp_rawOutputLayer = null;

            if (outputRawScore)
            {
                tmp_rawOutputLayer = new Matrix <float>(numStates, OutputLayer.LayerSize);
            }

            var labelSet = pSequence.States.Select(state => state.Label).ToList();

            //Initialize output layer or reallocate it
            if (seqFinalOutput == null || seqFinalOutput.Length < numStates)
            {
                seqFinalOutput = Array.CreateInstance(OutputLayer.GetType(), numStates);
                for (var i = 0; i < numStates; i++)
                {
                    seqFinalOutput.SetValue(Activator.CreateInstance(OutputLayer.GetType(), OutputLayer.LayerConfig), i);
                    OutputLayer.ShallowCopyWeightTo((SimpleLayer)seqFinalOutput.GetValue(i));
                }
            }

            Parallel.For(0, numStates, parallelOption, curState =>
            {
                var state                  = pSequence.States[curState];
                var outputCells            = (SimpleLayer)seqFinalOutput.GetValue(curState);
                outputCells.LabelShortList = labelSet;
                outputCells.ForwardPass(state.SparseFeature, lastLayer[curState].Cell, isTraining);

                if (outputRawScore)
                {
                    outputCells.Cell.CopyTo(tmp_rawOutputLayer[curState], 0);
                }
                outputCells.Softmax(isTraining);
            });
            var tmpSeqFinalOutput = new SimpleLayer[numStates];

            for (var i = 0; i < numStates; i++)
            {
                tmpSeqFinalOutput[i] = (SimpleLayer)seqFinalOutput.GetValue(i);
                seqBestOutput[i]     = tmpSeqFinalOutput[i].GetBestOutputIndex(isTraining);
            }

            rawOutputLayer = tmp_rawOutputLayer;

            return(tmpSeqFinalOutput);
        }
示例#17
0
        private void ResetBpttMem()
        {
            bptt_inputs = new SparseVector[MAX_RNN_HIST];

            bptt_hidden = new SimpleLayer[bptt + bptt_block + 1];
            for (var i = 0; i < bptt + bptt_block + 1; i++)
            {
                bptt_hidden[i] = new SimpleLayer(LayerConfig);
            }

            bptt_fea = new float[bptt + bptt_block + 2][];
        }
示例#18
0
        public void resetBpttMem()
        {
            bptt_inputs = new SparseVector[MAX_RNN_HIST];

            bptt_hidden = new SimpleLayer[bptt + bptt_block + 1];
            for (int i = 0; i < bptt + bptt_block + 1; i++)
            {
                bptt_hidden[i] = new SimpleLayer(LayerSize);
            }

            bptt_fea = new double[bptt + bptt_block + 2][];
        }
示例#19
0
        public override void LoadModel(string filename)
        {
            Logger.WriteLine("Loading SimpleRNN model: {0}", filename);

            StreamReader sr = new StreamReader(filename);
            BinaryReader br = new BinaryReader(sr.BaseStream);

            int modelType = br.ReadInt32();

            ModelDirection = (MODELDIRECTION)br.ReadInt32();

            int iflag = br.ReadInt32();

            if (iflag == 1)
            {
                IsCRFTraining = true;
            }
            else
            {
                IsCRFTraining = false;
            }

            //Create cells of each layer
            int layerSize = br.ReadInt32();

            HiddenLayerList = new List <SimpleLayer>();
            for (int i = 0; i < layerSize; i++)
            {
                SimpleLayer layer = null;
                if (modelType == 0)
                {
                    layer = new BPTTLayer();
                }
                else
                {
                    layer = new LSTMLayer();
                }

                layer.Load(br);
                HiddenLayerList.Add(layer);
            }

            OutputLayer = new SimpleLayer();
            OutputLayer.Load(br);

            if (iflag == 1)
            {
                Logger.WriteLine("Loading CRF tag trans weights...");
                CRFTagTransWeights = RNNHelper.LoadMatrix(br);
            }

            sr.Close();
        }
示例#20
0
        public SimpleLayer GetHiddenLayer()
        {
            SimpleLayer m = new SimpleLayer(LayerSize);

            for (int i = 0; i < LayerSize; i++)
            {
                m.cellOutput[i] = cellOutput[i];
                m.er[i]         = er[i];
            }

            return(m);
        }
示例#21
0
 public override void ComputeLayerErr(SimpleLayer nextLayer)
 {
     base.ComputeLayerErr(nextLayer);
     //Apply drop out on error in hidden layer
     for (var i = 0; i < LayerSize; i++)
     {
         if (mask[i])
         {
             Errs[i] = 0;
         }
     }
 }
示例#22
0
        public override void ComputeLayerErr(SimpleLayer nextLayer, double[] destErrLayer, double[] srcErrLayer)
        {
            //error output->hidden for words from specific class
            RNNHelper.matrixXvectorADDErr(destErrLayer, srcErrLayer, nextLayer.DenseWeights, LayerSize, nextLayer.LayerSize);

            for (int i = 0; i < LayerSize; i++)
            {
                if (mask[i] == true)
                {
                    destErrLayer[i] = 0;
                }
            }
        }
示例#23
0
        public override SimpleLayer GetHiddenLayer()
        {
            SimpleLayer m = new SimpleLayer(L1);

            for (int i = 0; i < L1; i++)
            {
                m.cellOutput[i] = neuHidden[i].cellOutput;
                m.er[i]         = neuHidden[i].er;
                m.mask[i]       = neuHidden[i].mask;
            }

            return(m);
        }
示例#24
0
        public virtual void ComputeLayerErr(SimpleLayer nextLayer)
        {
            NCEOutputLayer largeOutputLayer = nextLayer as NCEOutputLayer;

            if (largeOutputLayer != null)
            {
                RNNHelper.matrixXvectorADDErr(er, largeOutputLayer.er, largeOutputLayer.DenseWeights, LayerSize, largeOutputLayer.negativeSampleWordList);
            }
            else
            {
                //error output->hidden for words from specific class
                RNNHelper.matrixXvectorADDErr(er, nextLayer.er, nextLayer.DenseWeights, LayerSize, nextLayer.LayerSize);
            }
        }
示例#25
0
        public virtual void ShallowCopyWeightTo(SimpleLayer destLayer)
        {
            destLayer.DenseWeights             = DenseWeights;
            destLayer.DenseWeightsDelta        = DenseWeightsDelta;
            destLayer.DenseWeightsLearningRate = DenseWeightsLearningRate;
            destLayer.DenseFeatureSize         = DenseFeatureSize;

            destLayer.SparseWeights             = SparseWeights;
            destLayer.SparseWeightsDelta        = SparseWeightsDelta;
            destLayer.SparseWeightsLearningRate = SparseWeightsLearningRate;
            destLayer.SparseFeatureSize         = SparseFeatureSize;

            destLayer.InitializeInternalTrainingParameters();
        }
示例#26
0
        public override void ComputeLayerErr(SimpleLayer nextLayer)
        {
            //error output->hidden for words from specific class
            RNNHelper.matrixXvectorADDErr(er, nextLayer.er, nextLayer.DenseWeights, LayerSize, nextLayer.LayerSize);

            //Apply drop out on error in hidden layer
            for (int i = 0; i < LayerSize; i++)
            {
                if (mask[i] == true)
                {
                    er[i] = 0;
                }
            }
        }
示例#27
0
文件: BiRNN.cs 项目: My-Khan/RNNSharp
        public override int[] PredictSentenceCRF(Sequence pSequence, RunningMode runningMode)
        {
            //Reset the network
            int numStates = pSequence.States.Length;

            //Predict output
            SimpleLayer[]   mergedHiddenLayer = null;
            Matrix <double> rawOutputLayer    = null;

            SimpleLayer[] seqOutput = InnerDecode(pSequence, out mergedHiddenLayer, out rawOutputLayer);

            ForwardBackward(numStates, rawOutputLayer);

            if (runningMode != RunningMode.Test)
            {
                //Get the best result
                for (int i = 0; i < numStates; i++)
                {
                    logp += Math.Log10(CRFSeqOutput[i][pSequence.States[i].Label] + 0.0001);
                }
            }

            int[] predict = Viterbi(rawOutputLayer, numStates);

            if (runningMode == RunningMode.Train)
            {
                UpdateBigramTransition(pSequence);

                //Update hidden-output layer weights
                for (int curState = 0; curState < numStates; curState++)
                {
                    int         label          = pSequence.States[curState].Label;
                    SimpleLayer layer          = seqOutput[curState];
                    double[]    CRFOutputLayer = CRFSeqOutput[curState];

                    //For standard RNN
                    for (int c = 0; c < L2; c++)
                    {
                        layer.er[c] = -CRFOutputLayer[c];
                    }
                    layer.er[label] = 1 - CRFOutputLayer[label];
                }

                LearnTwoRNN(pSequence, mergedHiddenLayer, seqOutput);
            }

            return(predict);
        }
示例#28
0
        public virtual void ComputeLayerErr(SimpleLayer nextLayer, float[] destErrLayer, float[] srcErrLayer)
        {
            var sampledSoftmaxLayer = nextLayer as SampledSoftmaxLayer;

            if (sampledSoftmaxLayer != null)
            {
                RNNHelper.matrixXvectorADDErr(destErrLayer, srcErrLayer, sampledSoftmaxLayer.DenseWeights, LayerSize,
                                              sampledSoftmaxLayer.negativeSampleWordList);
            }
            else
            {
                var lstmLayer = nextLayer as LSTMLayer;
                if (lstmLayer != null)
                {
                    Array.Clear(destErrLayer, 0, destErrLayer.Length);
                    for (var k = 0; k < nextLayer.LayerSize; k++)
                    {
                        int     i       = 0;
                        float[] weights = lstmLayer.wDenseOutputGate.weights[k];
                        float   err     = srcErrLayer[k];

                        var moreItems = (LayerSize % Vector <float> .Count);
                        while (i < LayerSize - moreItems)
                        {
                            Vector <float> vecWeights = new Vector <float>(weights, i);
                            Vector <float> vecErrs    = new Vector <float>(destErrLayer, i);
                            vecErrs += err * vecWeights;

                            vecErrs.CopyTo(destErrLayer, i);
                            i += Vector <float> .Count;
                        }

                        while (i < LayerSize)
                        {
                            destErrLayer[i] += err * weights[i];
                            i++;
                        }
                    }
                }
                else
                {
                    //error output->hidden for words from specific class
                    RNNHelper.matrixXvectorADDErr(destErrLayer, srcErrLayer, nextLayer.DenseWeights, LayerSize,
                                                  nextLayer.LayerSize);
                }
            }
        }
示例#29
0
        public virtual void ComputeLayerErr(SimpleLayer nextLayer, double[] destErrLayer, double[] srcErrLayer)
        {
            //error output->hidden for words from specific class
            RNNHelper.matrixXvectorADDErr(destErrLayer, srcErrLayer, nextLayer.DenseWeights, LayerSize, nextLayer.LayerSize);

            if (Dropout > 0)
            {
                //Apply drop out on error in hidden layer
                for (int i = 0; i < LayerSize; i++)
                {
                    if (mask[i] == true)
                    {
                        destErrLayer[i] = 0;
                    }
                }
            }
        }
示例#30
0
        public override void LearnNet(State state, int numStates, int curState)
        {
            int maxBptt = 0;

            for (maxBptt = 0; maxBptt < bptt + bptt_block - 1; maxBptt++)
            {
                if (bptt_inputs[maxBptt] == null)
                {
                    break;
                }
            }

            //Shift memory needed for bptt to next time step,
            //and save current hidden and feature layer nodes values for bptt
            SimpleLayer last_bptt_hidden = bptt_hidden[maxBptt];

            double[] last_bptt_fea = bptt_fea[maxBptt];
            for (int a = maxBptt; a > 0; a--)
            {
                bptt_inputs[a] = bptt_inputs[a - 1];
                bptt_hidden[a] = bptt_hidden[a - 1];
                bptt_fea[a]    = bptt_fea[a - 1];
            }

            bptt_inputs[0] = state.SparseData;
            bptt_hidden[0] = last_bptt_hidden;
            bptt_fea[0]    = last_bptt_fea;
            for (int i = 0; i < L1; i++)
            {
                last_bptt_hidden.cellOutput[i] = neuHidden.cellOutput[i];
                last_bptt_hidden.er[i]         = neuHidden.er[i];
                last_bptt_hidden.mask[i]       = neuHidden.mask[i];
            }

            for (int i = 0; i < DenseFeatureSize; i++)
            {
                last_bptt_fea[i] = neuFeatures[i];
            }

            // time to learn bptt
            if (curState > 0 && ((curState % bptt_block) == 0 || curState == (numStates - 1)))
            {
                learnBptt(state);
            }
        }
示例#31
0
        // forward process. output layer consists of tag value
        public override void computeHiddenLayer(State state, bool isTrain = true)
        {
            //keep last hidden layer and erase activations
            neuLastHidden = neuHidden;

            //hidden(t-1) -> hidden(t)
            neuHidden = new SimpleLayer(L1);
            matrixXvectorADD(neuHidden, neuLastHidden, HiddenBpttWeights, L1, L1, 0);

            //Apply feature values on hidden layer
            var sparse = state.SparseData;
            int n      = sparse.Count;

            Parallel.For(0, L1, parallelOption, b =>
            {
                //Sparse features:
                //inputs(t) -> hidden(t)
                //Get sparse feature and apply it into hidden layer

                double[] vector_b = Input2HiddenWeights[b];
                double cellOutput = 0;
                for (int i = 0; i < n; i++)
                {
                    var entry   = sparse.GetEntry(i);
                    cellOutput += entry.Value * vector_b[entry.Key];
                }


                //Dense features:
                //fea(t) -> hidden(t)
                if (DenseFeatureSize > 0)
                {
                    vector_b = Feature2HiddenWeights[b];
                    for (int j = 0; j < DenseFeatureSize; j++)
                    {
                        cellOutput += neuFeatures[j] * vector_b[j];
                    }
                }

                neuHidden.cellOutput[b] += cellOutput;
            });

            //activate 1      --sigmoid
            computeHiddenActivity(isTrain);
        }
示例#32
0
        public SimpleLayer GetHiddenLayer()
        {
            SimpleLayer m = new SimpleLayer(LayerSize);
            for (int i = 0; i < LayerSize; i++)
            {
                m.cellOutput[i] = cellOutput[i];
                m.er[i] = er[i];
                m.mask[i] = mask[i];
            }

            return m;
        }
示例#33
0
        public override void ComputeLayerErr(SimpleLayer nextLayer)
        {
            LSTMLayer layer = nextLayer as LSTMLayer;

            if (layer != null)
            {
                Parallel.For(0, LayerSize, parallelOption, i =>
                {
                    er[i] = 0.0;
                    if (mask[i] == false)
                    {
                        for (int k = 0; k < nextLayer.LayerSize; k++)
                        {
                            er[i] += layer.er[k] * layer.feature2hidden[k][i].W;
                        }
                    }
                });
            }
            else
            {
                base.ComputeLayerErr(nextLayer);
            }
        }
示例#34
0
文件: RNN.cs 项目: dmit25/RNNSharp
        public void Softmax(SimpleLayer outputLayer)
        {
            double sum = 0;
            for (int c = 0; c < outputLayer.LayerSize; c++)
            {
                double cellOutput = outputLayer.cellOutput[c];
                if (cellOutput > 50) cellOutput = 50;
                if (cellOutput < -50) cellOutput = -50;
                double val = Math.Exp(cellOutput);
                sum += val;
                outputLayer.cellOutput[c] = val;
            }
            int i = 0;
            Vector<double> vecSum = new Vector<double>(sum);
            while (i < outputLayer.LayerSize - Vector<double>.Count)
            {
                Vector<double> v = new Vector<double>(outputLayer.cellOutput, i);
                v /= vecSum;
                v.CopyTo(outputLayer.cellOutput, i);
                i += Vector<double>.Count;
            }

            while (i < outputLayer.LayerSize)
            {
                outputLayer.cellOutput[i] /= sum;
                i++;
            }
        }
示例#35
0
文件: BiRNN.cs 项目: dmit25/RNNSharp
        /// <summary>
        /// Compute the output of bottom layer
        /// </summary>
        /// <param name="pSequence"></param>
        /// <param name="forwardLayer"></param>
        /// <param name="backwardLayer"></param>
        /// <returns></returns>
        private SimpleLayer[] ComputeBottomLayer(Sequence pSequence, SimpleLayer forwardLayer, SimpleLayer backwardLayer)
        {
            int numStates = pSequence.States.Length;
            SimpleLayer[] mForward = null;
            SimpleLayer[] mBackward = null;
            Parallel.Invoke(() =>
            {
                //Computing forward RNN
                forwardLayer.netReset(false);
                mForward = new SimpleLayer[numStates];
                for (int curState = 0; curState < numStates; curState++)
                {
                    State state = pSequence.States[curState];
                    SetInputLayer(state, curState, numStates, null);
                    forwardLayer.computeLayer(state.SparseData, state.DenseData.CopyTo());
                    mForward[curState] = forwardLayer.GetHiddenLayer();
                }
            },
             () =>
             {
                 //Computing backward RNN
                 backwardLayer.netReset(false);
                 mBackward = new SimpleLayer[numStates];
                 for (int curState = numStates - 1; curState >= 0; curState--)
                 {
                     State state = pSequence.States[curState];
                     SetInputLayer(state, curState, numStates, null, false);
                     backwardLayer.computeLayer(state.SparseData, state.DenseData.CopyTo());      //compute probability distribution

                     mBackward[curState] = backwardLayer.GetHiddenLayer();
                 }
             });

            SimpleLayer[] mergedLayer = new SimpleLayer[numStates];
            Parallel.For(0, numStates, parallelOption, curState =>
            {
                State state = pSequence.States[curState];

                mergedLayer[curState] = new SimpleLayer(forwardLayer.LayerSize);
                mergedLayer[curState].SparseFeature = state.SparseData;
                mergedLayer[curState].DenseFeature = state.DenseData.CopyTo();

                SimpleLayer forwardCells = mForward[curState];
                SimpleLayer backwardCells = mBackward[curState];

                int i = 0;
                while (i < forwardLayer.LayerSize - Vector<double>.Count)
                {
                    Vector<double> v1 = new Vector<double>(forwardCells.cellOutput, i);
                    Vector<double> v2 = new Vector<double>(backwardCells.cellOutput, i);
                    Vector<double> v = (v1 + v2) / vecConst2;

                    v.CopyTo(mergedLayer[curState].cellOutput, i);

                    i += Vector<float>.Count;
                }

                while (i < forwardLayer.LayerSize)
                {
                    mergedLayer[curState].cellOutput[i] = (forwardCells.cellOutput[i] + backwardCells.cellOutput[i]) / 2.0;
                    i++;
                }
            });

            return mergedLayer;
        }
示例#36
0
文件: BiRNN.cs 项目: dmit25/RNNSharp
        /// <summary>
        /// Pass error from the last layer to the first layer
        /// </summary>
        /// <param name="pSequence"></param>
        /// <param name="seqFinalOutput"></param>
        /// <param name="isCRF"></param>
        /// <returns></returns>
        private void ComputeDeepErr(Sequence pSequence, SimpleLayer[] seqFinalOutput, out List<double[][]> fErrLayers, out List<double[][]> bErrLayers, bool isCRF = false)
        {
            int numStates = pSequence.States.Length;
            int numLayers = forwardHiddenLayers.Count;

            //Calculate output layer error
            for (int curState = 0; curState < numStates; curState++)
            {
                int label = pSequence.States[curState].Label;
                SimpleLayer layer = seqFinalOutput[curState];

                if (isCRF == false)
                {
                    for (int c = 0; c < layer.LayerSize; c++)
                    {
                        layer.er[c] = -layer.cellOutput[c];
                    }
                    layer.er[label] = 1.0 - layer.cellOutput[label];
                }
                else
                {
                    double[] CRFOutputLayer = CRFSeqOutput[curState];
                    for (int c = 0; c < layer.LayerSize; c++)
                    {
                        layer.er[c] = -CRFOutputLayer[c];
                    }
                    layer.er[label] = 1 - CRFOutputLayer[label];
                }
            }

            //Now we already have err in output layer, let's pass them back to other layers
            fErrLayers = new List<double[][]>();
            bErrLayers = new List<double[][]>();
            for (int i = 0; i < numLayers; i++)
            {
                fErrLayers.Add(null);
                bErrLayers.Add(null);
            }

            //Pass error from i+1 to i layer
            SimpleLayer forwardLayer = forwardHiddenLayers[numLayers - 1];
            SimpleLayer backwardLayer = backwardHiddenLayers[numLayers - 1];

            double[][] errLayer = new double[numStates][];
            Parallel.For(0, numStates, parallelOption, curState =>
            {
                errLayer[curState] = new double[forwardLayer.LayerSize];
                forwardLayer.ComputeLayerErr(seqFinalOutput[curState], errLayer[curState], seqFinalOutput[curState].er);
            });
            fErrLayers[numLayers - 1] = errLayer;
            bErrLayers[numLayers - 1] = errLayer;

            // Forward
            for (int i = numLayers - 2; i >= 0; i--)
            {
                forwardLayer = forwardHiddenLayers[i];
                errLayer = new double[numStates][];
                double[][] srcErrLayer = fErrLayers[i + 1];
                Parallel.For(0, numStates, parallelOption, curState =>
                {
                    int curState2 = numStates - curState - 1;

                    errLayer[curState2] = new double[forwardLayer.LayerSize];
                    forwardLayer.ComputeLayerErr(forwardHiddenLayers[i + 1], errLayer[curState2], srcErrLayer[curState2]);
                });

                fErrLayers[i] = errLayer;
            }

            // Backward
            for (int i = numLayers - 2; i >= 0; i--)
            {
                backwardLayer = backwardHiddenLayers[i];
                errLayer = new double[numStates][];
                double[][] srcErrLayer = bErrLayers[i + 1];
                Parallel.For(0, numStates, parallelOption, curState =>
                {
                    errLayer[curState] = new double[backwardLayer.LayerSize];
                    backwardLayer.ComputeLayerErr(backwardHiddenLayers[i + 1], errLayer[curState], srcErrLayer[curState]);
                });

                bErrLayers[i] = errLayer;
            }
        }
示例#37
0
文件: BiRNN.cs 项目: dmit25/RNNSharp
        private SimpleLayer[] ComputeMiddleLayers(SimpleLayer[] lastLayers, SimpleLayer forwardLayer, SimpleLayer backwardLayer)
        {
            int numStates = lastLayers.Length;

            SimpleLayer[] mForward = null;
            SimpleLayer[] mBackward = null;
            Parallel.Invoke(() =>
            {
                //Computing forward RNN
                forwardLayer.netReset(false);
                mForward = new SimpleLayer[lastLayers.Length];
                for (int curState = 0; curState < lastLayers.Length; curState++)
                {
                    forwardLayer.computeLayer(null, lastLayers[curState].cellOutput);
                    mForward[curState] = forwardLayer.GetHiddenLayer();
                }
            },
             () =>
             {
                 //Computing backward RNN
                 backwardLayer.netReset(false);
                 mBackward = new SimpleLayer[lastLayers.Length];
                 for (int curState = lastLayers.Length - 1; curState >= 0; curState--)
                 {
                     backwardLayer.computeLayer(null, lastLayers[curState].cellOutput);
                     mBackward[curState] = backwardLayer.GetHiddenLayer();
                 }
             });

            //Merge forward and backward
            SimpleLayer[] mergedLayer = new SimpleLayer[numStates];
            Parallel.For(0, numStates, parallelOption, curState =>
            {
                mergedLayer[curState] = new SimpleLayer(forwardLayer.LayerSize);
                mergedLayer[curState].SparseFeature = null;
                mergedLayer[curState].DenseFeature = lastLayers[curState].cellOutput;

                SimpleLayer forwardCells = mForward[curState];
                SimpleLayer backwardCells = mBackward[curState];

                int i = 0;
                while (i < forwardLayer.LayerSize - Vector<double>.Count)
                {
                    Vector<double> v1 = new Vector<double>(forwardCells.cellOutput, i);
                    Vector<double> v2 = new Vector<double>(backwardCells.cellOutput, i);
                    Vector<double> v = (v1 + v2) / vecConst2;

                    v.CopyTo(mergedLayer[curState].cellOutput, i);

                    i += Vector<float>.Count;
                }

                while (i < forwardLayer.LayerSize)
                {
                    mergedLayer[curState].cellOutput[i] = (forwardCells.cellOutput[i] + backwardCells.cellOutput[i]) / 2.0;
                    i++;
                }
            });

            return mergedLayer;
        }
示例#38
0
文件: BiRNN.cs 项目: dmit25/RNNSharp
        private SimpleLayer[] ComputeTopLayer(SimpleLayer[] lastLayer, out Matrix<double> rawOutputLayer, bool isTrain)
        {
            int numStates = lastLayer.Length;

            //Calculate output layer
            Matrix<double> tmp_rawOutputLayer = new Matrix<double>(numStates, OutputLayer.LayerSize);
            SimpleLayer[] seqFinalOutput = new SimpleLayer[numStates];
            Parallel.For(0, numStates, parallelOption, curState =>
            {
                seqFinalOutput[curState] = new SimpleLayer(OutputLayer.LayerSize);
                SimpleLayer outputCells = seqFinalOutput[curState];

                outputCells.DenseWeights = OutputLayer.DenseWeights;
                outputCells.DenseWeightsLearningRate = OutputLayer.DenseWeightsLearningRate;
                outputCells.DenseFeatureSize = OutputLayer.DenseFeatureSize;
                outputCells.computeLayer(null, lastLayer[curState].cellOutput, isTrain);
                outputCells.cellOutput.CopyTo(tmp_rawOutputLayer[curState], 0);
                Softmax(outputCells);

            });

            rawOutputLayer = tmp_rawOutputLayer;

            return seqFinalOutput;
        }
示例#39
0
文件: BiRNN.cs 项目: dmit25/RNNSharp
        private void DeepLearningNet(Sequence pSequence, SimpleLayer[] seqOutput, List<double[][]> fErrLayers, 
            List<double[][]> bErrLayers, List<SimpleLayer[]> layerList)
        {
            int numStates = pSequence.States.Length;
            int numLayers = forwardHiddenLayers.Count;

            //Learning output layer
            Parallel.Invoke(() =>
            {
                for (int curState = 0; curState < numStates; curState++)
                {
                    seqOutput[curState].LearnFeatureWeights(numStates, curState);
                }
            },
            () =>
            {
                Parallel.For(0, numLayers, parallelOption, i =>
                {
                    Parallel.Invoke(() =>
                    {
                        SimpleLayer forwardLayer = forwardHiddenLayers[i];
                        forwardLayer.netReset(true);
                        for (int curState = 0; curState < numStates; curState++)
                        {
                            forwardLayer.computeLayer(layerList[i][curState].SparseFeature, layerList[i][curState].DenseFeature, true);
                            forwardLayer.er = fErrLayers[i][curState];
                            forwardLayer.LearnFeatureWeights(numStates, curState);
                        }
                    },
                    () =>
                    {
                        SimpleLayer backwardLayer = backwardHiddenLayers[i];
                        backwardLayer.netReset(true);
                        for (int curState = 0; curState < numStates; curState++)
                        {
                            int curState2 = numStates - curState - 1;
                            backwardLayer.computeLayer(layerList[i][curState2].SparseFeature, layerList[i][curState2].DenseFeature, true);
                            backwardLayer.er = bErrLayers[i][curState2];
                            backwardLayer.LearnFeatureWeights(numStates, curState);
                        }
                    });
                });
            });
        }
示例#40
0
文件: BiRNN.cs 项目: dmit25/RNNSharp
        public override void LoadModel(string filename)
        {
            Logger.WriteLine(Logger.Level.info, "Loading bi-directional model: {0}", filename);

            using (StreamReader sr = new StreamReader(filename))
            {
                BinaryReader br = new BinaryReader(sr.BaseStream);

                int modelType = br.ReadInt32();
                ModelDirection = (MODELDIRECTION)br.ReadInt32();

                int iflag = br.ReadInt32();
                if (iflag == 1)
                {
                    IsCRFTraining = true;
                }
                else
                {
                    IsCRFTraining = false;
                }

                int layerSize = br.ReadInt32();

                //Load forward layers from file
                forwardHiddenLayers = new List<SimpleLayer>();
                for (int i = 0; i < layerSize; i++)
                {
                    SimpleLayer layer = null;
                    if (modelType == 0)
                    {
                        Logger.WriteLine("Create BPTT hidden layer");
                        layer = new BPTTLayer();
                    }
                    else
                    {
                        Logger.WriteLine("Crate LSTM hidden layer");
                        layer = new LSTMLayer();
                    }

                    layer.Load(br);
                    forwardHiddenLayers.Add(layer);
                }

                //Load backward layers from file
                backwardHiddenLayers = new List<SimpleLayer>();
                for (int i = 0; i < layerSize; i++)
                {
                    SimpleLayer layer = null;
                    if (modelType == 0)
                    {
                        Logger.WriteLine("Create BPTT hidden layer");
                        layer = new BPTTLayer();
                    }
                    else
                    {
                        Logger.WriteLine("Crate LSTM hidden layer");
                        layer = new LSTMLayer();
                    }

                    layer.Load(br);
                    backwardHiddenLayers.Add(layer);
                }

                OutputLayer = new SimpleLayer();
                OutputLayer.Load(br);

                if (iflag == 1)
                {
                    Logger.WriteLine("Loading CRF tag trans weights...");
                    CRFTagTransWeights = RNNHelper.LoadMatrix(br);
                }
            }
        }
示例#41
0
        public virtual void ComputeLayerErr(SimpleLayer nextLayer)
        {
            //error output->hidden for words from specific class
            RNNHelper.matrixXvectorADDErr(er, nextLayer.er, nextLayer.DenseWeights, LayerSize, nextLayer.LayerSize);

            if (Dropout > 0)
            {
                //Apply drop out on error in hidden layer
                for (int i = 0; i < LayerSize; i++)
                {
                    if (mask[i] == true)
                    {
                        er[i] = 0;
                    }
                }
            }
        }
示例#42
0
        public void resetBpttMem()
        {
            bptt_inputs = new SparseVector[MAX_RNN_HIST];

            bptt_hidden = new SimpleLayer[bptt + bptt_block + 1];
            for (int i = 0; i < bptt + bptt_block + 1; i++)
            {
                bptt_hidden[i] = new SimpleLayer(LayerSize);
            }

            bptt_fea = new double[bptt + bptt_block + 2][];
        }