コード例 #1
0
        public void SubtractGrads(NeuronValues grads)
        {
            if (grads.weigths.Count != Weigths.Count)
            {
                throw new ArgumentException();
            }

            for (int i = 0; i < Weigths.Count; i++)
            {
                Weigths[i] -= grads.weigths[i];
            }
        }
コード例 #2
0
ファイル: LSTMCell.cs プロジェクト: GGasset/NeuronalNetwork
        public new void SubtractGrads(NeuronValues grads)
        {
            if (grads.weigths.Count != Weigths.Count)
            {
                throw new ArgumentException();
            }

            for (int i = 0; i < Weigths.Count; i++)
            {
                Weigths[i] -= grads.weigths[i];
            }

            recurrent.outputWeigth -= grads.lstmWeigths.outputWeigth;
            recurrent.storeWeigth  -= grads.lstmWeigths.storeWeigth;
            recurrent.forgetWeigth -= grads.lstmWeigths.forgetWeigth;
        }
コード例 #3
0
        internal void SetWeigthsAndStates(List <NeuronValues> neuronValues = null)
        {
            if (neuronValues != null)
            {
                for (int i = 0; i < Math.Min(neuronValues.Count, NeuronsInfo.Count); i++)
                {
                    NeuronInfo   info   = NeuronsInfo[i];
                    NeuronValues values = neuronValues[i];
                    switch (info.neuronType)
                    {
                    case NeuronTypes.feedForward:
                        Neurons[info.ArrayIndex].Weigths = neuronValues[i].weigths;
                        break;

                    case NeuronTypes.lSTM:
                        LSTMCells[info.ArrayIndex].Weigths = values.weigths;
                        LSTMCells[info.ArrayIndex].recurrent.storeWeigth  = values.lstmWeigths.storeWeigth;
                        LSTMCells[info.ArrayIndex].recurrent.forgetWeigth = values.lstmWeigths.forgetWeigth;
                        LSTMCells[info.ArrayIndex].recurrent.outputWeigth = values.lstmWeigths.outputWeigth;
                        LSTMCells[info.ArrayIndex].recurrent.hiddenState  = values.lstmWeigths.hiddenState;
                        LSTMCells[info.ArrayIndex].recurrent.cellState    = values.lstmWeigths.cellState;
                        break;

                    case NeuronTypes.recurrent:
                        RecurrentNeurons[info.ArrayIndex].recurrentWeigth = values.recurrentWeigth;
                        if (values.lstmWeigths != null)
                        {
                            RecurrentNeurons[info.ArrayIndex].hiddenState = values.lstmWeigths.hiddenState;
                        }
                        break;

                    default:
                        throw new NotImplementedException();
                    }
                }
            }
            else
            {
                neuronValues = new List <NeuronValues>();
                for (int i = 0; i < NeuronsInfo.Count; i++)
                {
                    neuronValues.Add(new NeuronValues(previousLayerLenght, NeuronsInfo[i].neuronType));
                }
                SetWeigthsAndStates(neuronValues);
            }
        }
コード例 #4
0
        //Add function in layer to add neuronvalues and at temporal make a grid of them
        //Check endValues is compatible with temporalStates
        public void GetGrads(List <double[]> costs, List <double[]> prevActivations, List <List <NeuronValues> > temporalStates, ActivationFunctions activation
                             , out List <double[]> prevActivationGrads, out double biasGradient, out List <NeuronValues> gradValues)
        {
            if (costs.Count != Lenght)
            {
                throw new Exception("incorrect layer length");
            }
            else if (temporalStates.Count != Lenght)
            {
                throw new Exception();
            }
            biasGradient = 0;
            List <List <NeuronValues> > Grads = new List <List <NeuronValues> >();

            for (int t = 0; t < costs.Count; t++)//GetInitialGrads
            {
                Grads.Add(new List <NeuronValues>());
                for (int i = 0; i < costs[t].Length; i++)
                {
                    int arrayIndex = NeuronsInfo[i].ArrayIndex;
                    switch (NeuronsInfo[i].neuronType)
                    {
                    case NeuronTypes.lSTM:
                        LSTMCells[arrayIndex].GetInitialGrads(costs[t][i], prevActivations[t], bias, temporalStates[t][i].lstmWeigths, out double cellGrad, out double outputWeigthGrad);
                        Grads[t].Add(new NeuronValues(NeuronTypes.lSTM));
                        Grads[t][i].lstmWeigths.outputWeigth = outputWeigthGrad;
                        Grads[t][i].lstmWeigths.cellState    = cellGrad;
                        break;

                    case NeuronTypes.feedForward:
                        Grads[t].Add(new NeuronValues(NeuronTypes.feedForward));
                        break;

                    case NeuronTypes.recurrent:
                        Grads[t].Add(new NeuronValues(NeuronTypes.recurrent));
                        break;

                    default:
                        throw new NotImplementedException();
                    }
                }
            }
            prevActivationGrads = new List <double[]>();
            int temporalindex = 0;

            for (int t = costs.Count - 1; t >= 0; t--)//Get layer gradients
            {
                for (int i = 0; i < Lenght; i++)
                {
                    List <double> tempPrevActivationGrads;
                    List <double> tempWeigthGrads;
                    double        biasGrad;
                    int           arrayIndex = NeuronsInfo[i].ArrayIndex;
                    switch (NeuronsInfo[i].neuronType)
                    {
                    case NeuronTypes.feedForward:
                        Neurons[arrayIndex].GetGradients(costs[t][i], prevActivations[t], activation, bias, out tempWeigthGrads, out tempPrevActivationGrads, out biasGrad);
                        break;

                    case NeuronTypes.lSTM:
                        LSTMCells[i].GetGrads(prevActivations[t], bias, temporalStates[t][temporalindex].lstmWeigths
                                              , Grads[t][i].lstmWeigths.cellState, t == 0 ? 0 : Grads[t - 1][i].lstmWeigths.cellState, t == 0 ? 0 : Grads[t - 1][i].lstmWeigths.cellState, Grads[t][i].lstmWeigths.outputWeigth
                                              , out tempPrevActivationGrads, out tempWeigthGrads, out biasGrad, out LSTMCell.LSTMWeigths lSTMWeigthsGrads);
                        Grads[t][i].lstmWeigths = lSTMWeigthsGrads;
                        temporalindex++;
                        break;

                    case NeuronTypes.recurrent:
                        RecurrentNeurons[i].GetGradients(costs[t][i], bias, temporalStates[t][i].lstmWeigths.hiddenState, t == 0? 0: costs[t - 1][i], prevActivations[t], activation
                                                         , out tempPrevActivationGrads, out tempWeigthGrads, out double recWeigthGrad, out biasGrad);
                        Grads[t][i].recurrentWeigth = recWeigthGrad;
                        break;

                    default:
                        throw new NotImplementedException();
                    }
                    Grads[t][i].weigths = tempWeigthGrads;
                    biasGradient       += biasGrad;
                    for (int preActI = 0; preActI < prevActivationGrads.Count; preActI++)//adding to prevActivation grads
                    {
                        prevActivationGrads[t][preActI] += tempPrevActivationGrads[i];
                    }
                }
            }

            gradValues = new List <NeuronValues>();
            for (int t = 0; t < Grads.Count; t++)
            {
                NeuronValues tempVal = new NeuronValues(Grads[t][0].NeuronType);
                for (int i = 0; i < Grads[t].Count; i++)
                {
                    tempVal += Grads[t][i];
                }
                gradValues.Add(tempVal);
            }
        }