Пример #1
0
        public FeedForwardController Clone()
        {
            HiddenLayer newHiddenLayer = HiddenLayer.Clone();
            OutputLayer newOutputLayer = OutputLayer.Clone();

            return(new FeedForwardController(newHiddenLayer, newOutputLayer));
        }
Пример #2
0
        public void ForwardPropagation(HiddenLayer hiddenLayer)
        {
            //Foreach neuron in classic output layer
            for (int i = 0; i < _outputSize; i++)
            {
                double sum     = 0;
                Unit[] weights = _hiddenToOutputLayerWeights[i];

                //Foreach input from hidden layer
                for (int j = 0; j < _controllerSize; j++)
                {
                    sum += weights[j].Value * hiddenLayer.HiddenLayerNeurons[j].Value;
                }

                //Plus threshold
                sum += weights[_controllerSize].Value;
                OutputLayerNeurons[i].Value = Sigmoid.GetValue(sum);
            }

            //Foreach neuron in head output layer
            for (int i = 0; i < _headCount; i++)
            {
                Unit[][] headsWeights = _hiddenToHeadsWeights[i];
                Head     head         = HeadsNeurons[i];

                for (int j = 0; j < headsWeights.Length; j++)
                {
                    double sum         = 0;
                    Unit[] headWeights = headsWeights[j];
                    //Foreach input from hidden layer
                    for (int k = 0; k < _controllerSize; k++)
                    {
                        sum += headWeights[k].Value * hiddenLayer.HiddenLayerNeurons[k].Value;
                    }
                    //Plus threshold
                    sum           += headWeights[_controllerSize].Value;
                    head[j].Value += sum;
                }
            }
        }
Пример #3
0
        public void BackwardErrorPropagation(double[] knownOutput, HiddenLayer hiddenLayer)
        {
            for (int j = 0; j < _outputSize; j++)
            {
                //Delta
                OutputLayerNeurons[j].Gradient = OutputLayerNeurons[j].Value - knownOutput[j];
            }

            //Output error backpropagation
            for (int j = 0; j < _outputSize; j++)
            {
                Unit unit = OutputLayerNeurons[j];
                Unit[] weights = _hiddenToOutputLayerWeights[j];
                for (int i = 0; i < _controllerSize; i++)
                {
                    hiddenLayer.HiddenLayerNeurons[i].Gradient += weights[i].Value * unit.Gradient;
                }
            }

            //Heads error backpropagation
            for (int j = 0; j < _headCount; j++)
            {
                Head head = HeadsNeurons[j];
                Unit[][] weights = _hiddenToHeadsWeights[j];
                for (int k = 0; k < _headUnitSize; k++)
                {
                    Unit unit = head[k];
                    Unit[] weightsK = weights[k];
                    for (int i = 0; i < _controllerSize; i++)
                    {
                        hiddenLayer.HiddenLayerNeurons[i].Gradient += unit.Gradient * weightsK[i].Value;
                    }
                }
            }

            //Wyh1 error backpropagation
            for (int i = 0; i < _outputSize; i++)
            {
                Unit[] wyh1I = _hiddenToOutputLayerWeights[i];
                double yGrad = OutputLayerNeurons[i].Gradient;
                for (int j = 0; j < _controllerSize; j++)
                {
                    wyh1I[j].Gradient += yGrad * hiddenLayer.HiddenLayerNeurons[j].Value;
                }
                wyh1I[_controllerSize].Gradient += yGrad;
            }

            //TODO refactor names
            //Wuh1 error backpropagation
            for (int i = 0; i < _headCount; i++)
            {
                Head head = HeadsNeurons[i];
                Unit[][] units = _hiddenToHeadsWeights[i];
                for (int j = 0; j < _headUnitSize; j++)
                {
                    Unit headUnit = head[j];
                    Unit[] wuh1ij = units[j];

                    for (int k = 0; k < _controllerSize; k++)
                    {
                        Unit unit = hiddenLayer.HiddenLayerNeurons[k];
                        wuh1ij[k].Gradient += headUnit.Gradient * unit.Value;
                    }
                    wuh1ij[_controllerSize].Gradient += headUnit.Gradient;
                }
            }
        }
Пример #4
0
        public void ForwardPropagation(HiddenLayer hiddenLayer)
        {
            //Foreach neuron in classic output layer
            for (int i = 0; i < _outputSize; i++)
            {
                double sum = 0;
                Unit[] weights = _hiddenToOutputLayerWeights[i];

                //Foreach input from hidden layer
                for (int j = 0; j < _controllerSize; j++)
                {
                    sum += weights[j].Value * hiddenLayer.HiddenLayerNeurons[j].Value;
                }

                //Plus threshold
                sum += weights[_controllerSize].Value;
                OutputLayerNeurons[i].Value = Sigmoid.GetValue(sum);
            }

            //Foreach neuron in head output layer
            for (int i = 0; i < _headCount; i++)
            {
                Unit[][] headsWeights = _hiddenToHeadsWeights[i];
                Head head = HeadsNeurons[i];

                for (int j = 0; j < headsWeights.Length; j++)
                {
                    double sum = 0;
                    Unit[] headWeights = headsWeights[j];
                    //Foreach input from hidden layer
                    for (int k = 0; k < _controllerSize; k++)
                    {
                        sum += headWeights[k].Value * hiddenLayer.HiddenLayerNeurons[k].Value;
                    }
                    //Plus threshold
                    sum += headWeights[_controllerSize].Value;
                    head[j].Value += sum;
                }
            }
        }
Пример #5
0
 private FeedForwardController(HiddenLayer hiddenLayer, OutputLayer outputLayer)
 {
     HiddenLayer = hiddenLayer;
     OutputLayer = outputLayer;
 }
Пример #6
0
 public FeedForwardController(int controllerSize, int inputSize, int outputSize, int headCount, int memoryUnitSizeM)
 {
     HiddenLayer = new HiddenLayer(controllerSize, inputSize, headCount, memoryUnitSizeM);
     OutputLayer = new OutputLayer(outputSize, controllerSize, headCount, memoryUnitSizeM);
 }
Пример #7
0
 private FeedForwardController(HiddenLayer hiddenLayer, OutputLayer outputLayer)
 {
     HiddenLayer = hiddenLayer;
     OutputLayer = outputLayer;
 }
Пример #8
0
 public FeedForwardController(int controllerSize, int inputSize, int outputSize, int headCount, int memoryUnitSizeM)
 {
     HiddenLayer = new HiddenLayer(controllerSize, inputSize, headCount, memoryUnitSizeM);
     OutputLayer = new OutputLayer(outputSize, controllerSize, headCount, memoryUnitSizeM);
 }
Пример #9
0
        public void BackwardErrorPropagation(double[] knownOutput, HiddenLayer hiddenLayer)
        {
            for (int j = 0; j < _outputSize; j++)
            {
                //Delta
                OutputLayerNeurons[j].Gradient = OutputLayerNeurons[j].Value - knownOutput[j];
            }

            //Output error backpropagation
            for (int j = 0; j < _outputSize; j++)
            {
                Unit   unit    = OutputLayerNeurons[j];
                Unit[] weights = _hiddenToOutputLayerWeights[j];
                for (int i = 0; i < _controllerSize; i++)
                {
                    hiddenLayer.HiddenLayerNeurons[i].Gradient += weights[i].Value * unit.Gradient;
                }
            }

            //Heads error backpropagation
            for (int j = 0; j < _headCount; j++)
            {
                Head     head    = HeadsNeurons[j];
                Unit[][] weights = _hiddenToHeadsWeights[j];
                for (int k = 0; k < _headUnitSize; k++)
                {
                    Unit   unit     = head[k];
                    Unit[] weightsK = weights[k];
                    for (int i = 0; i < _controllerSize; i++)
                    {
                        hiddenLayer.HiddenLayerNeurons[i].Gradient += unit.Gradient * weightsK[i].Value;
                    }
                }
            }

            //Wyh1 error backpropagation
            for (int i = 0; i < _outputSize; i++)
            {
                Unit[] wyh1I = _hiddenToOutputLayerWeights[i];
                double yGrad = OutputLayerNeurons[i].Gradient;
                for (int j = 0; j < _controllerSize; j++)
                {
                    wyh1I[j].Gradient += yGrad * hiddenLayer.HiddenLayerNeurons[j].Value;
                }
                wyh1I[_controllerSize].Gradient += yGrad;
            }

            //TODO refactor names
            //Wuh1 error backpropagation
            for (int i = 0; i < _headCount; i++)
            {
                Head     head  = HeadsNeurons[i];
                Unit[][] units = _hiddenToHeadsWeights[i];
                for (int j = 0; j < _headUnitSize; j++)
                {
                    Unit   headUnit = head[j];
                    Unit[] wuh1ij   = units[j];

                    for (int k = 0; k < _controllerSize; k++)
                    {
                        Unit unit = hiddenLayer.HiddenLayerNeurons[k];
                        wuh1ij[k].Gradient += headUnit.Gradient * unit.Value;
                    }
                    wuh1ij[_controllerSize].Gradient += headUnit.Gradient;
                }
            }
        }