Exemplo n.º 1
0
        internal static float[] GetDeltasHiddenLayer(float[] outputSignals, float[][] weightsNextLayer, float[] deltasErrorsNextLayer, ActivationFunc activationFunc)
        {
            int _neuronsCountThisLayer = outputSignals.Length;
            int _neuronsCountNextLayer = deltasErrorsNextLayer.Length;

            float[] _deltasErrors = new float[outputSignals.Length];
            float   _sumWeightsAndDeltasNextLayer;
            float   _derivativeActivFunc;

            for (int _neuron = 0; _neuron < _neuronsCountThisLayer; _neuron++)
            {
                _sumWeightsAndDeltasNextLayer = GetSumWeightsAndDeltasNextLayer(_neuron);
                _derivativeActivFunc          = NeuralMath.GetDerivativeByFunction(outputSignals[_neuron], activationFunc);
                _deltasErrors[_neuron]        = _sumWeightsAndDeltasNextLayer * _derivativeActivFunc;
            }

            return(_deltasErrors);

            float GetSumWeightsAndDeltasNextLayer(int neuronPrevLayer)
            {
                float _sumWeightsAndDeltas = 0;

                for (int _neuronNextLayer = 0; _neuronNextLayer < _neuronsCountNextLayer; _neuronNextLayer++)
                {
                    _sumWeightsAndDeltas += deltasErrorsNextLayer[_neuronNextLayer] * weightsNextLayer[neuronPrevLayer][_neuronNextLayer];
                }
                return(_sumWeightsAndDeltas);
            }
        }
Exemplo n.º 2
0
        private void UpdateMiniBatch(List <Tuple <double[, ], double[, ]> > miniBatch, double eta)
        {
            List <double[, ]> nablaB = new List <double[, ]>();
            List <double[, ]> nablaW = new List <double[, ]>();

            for (int i = 0; i < numberLayers - 1; i++)
            {
                nablaB.Add(new double[biases[i].GetLength(0), biases[i].GetLength(1)]);
                nablaW.Add(new double[weights[i].GetLength(0), weights[i].GetLength(1)]);
            }

            foreach (Tuple <double[, ], double[, ]> batch in miniBatch)
            {
                Tuple <List <double[, ]>, List <double[, ]> > deltaNablas = BackPropogation(batch);
                for (int j = 0; j < numberLayers - 1; j++)
                {
                    nablaB[j] = NeuralMath.AddMatrix(nablaB[j], deltaNablas.Item1[j]);
                    nablaW[j] = NeuralMath.AddMatrix(nablaW[j], deltaNablas.Item2[j]);
                }
            }

            for (int i = 0; i < numberLayers - 1; i++)
            {
                biases[i]  = NeuralMath.SubtractMatrix(biases[i], NeuralMath.ScaleMatrix(nablaB[i], eta / miniBatch.Count));
                weights[i] = NeuralMath.SubtractMatrix(weights[i], NeuralMath.ScaleMatrix(nablaW[i], eta / miniBatch.Count));
            }
        }
Exemplo n.º 3
0
        private double[,] FeedForward(double[,] a)
        {
            double[,] output = a;

            for (int layer = 0; layer < numberLayers - 1; layer++)
            {
                double[,] wa = NeuralMath.DotMatrix(weights[layer], output);
                output       = NeuralMath.Sigmoid(NeuralMath.AddMatrix(wa, biases[layer]));
            }

            return(output);
        }
Exemplo n.º 4
0
        internal static float[] GetDeltasOutputLayer(float[] outputSignals, float[] expectedSignals, ActivationFunc activationFunc)
        {
            int _neuronsCountThisLayer = outputSignals.Length;

            float[] _deltasErrors = new float[_neuronsCountThisLayer];
            float   _errorOutSignal;
            float   _derivativeActivFunc;

            for (int _neuron = 0; _neuron < _neuronsCountThisLayer; _neuron++)
            {
                _errorOutSignal        = expectedSignals[_neuron] - outputSignals[_neuron];
                _derivativeActivFunc   = NeuralMath.GetDerivativeByFunction(outputSignals[_neuron], activationFunc);
                _deltasErrors[_neuron] = _errorOutSignal * _derivativeActivFunc;
            }

            return(_deltasErrors);
        }
Exemplo n.º 5
0
        private Tuple <List <double[, ]>, List <double[, ]> > BackPropogation(Tuple <double[, ], double[, ]> batch)
        {
            List <double[, ]> nablaB = new List <double[, ]>();
            List <double[, ]> nablaW = new List <double[, ]>();

            for (int i = 0; i < numberLayers - 1; i++)
            {
                nablaB.Add(new double[biases[i].GetLength(0), biases[i].GetLength(1)]);
                nablaW.Add(new double[weights[i].GetLength(0), weights[i].GetLength(1)]);
            }

            // feed forward
            double[,] activation = batch.Item1;
            List <double[, ]> activations = new List <double[, ]> {
                batch.Item1
            };
            List <double[, ]> zs = new List <double[, ]>();

            for (int i = 0; i < numberLayers - 1; i++)
            {
                double[,] z = NeuralMath.AddMatrix(NeuralMath.DotMatrix(weights[i], activation), biases[i]);
                zs.Add(z);
                activation = NeuralMath.Sigmoid(z);
                activations.Add(activation);
            }

            // backward pass
            double[,] delta = NeuralMath.MultiplyMatrix(NeuralMath.CostDerivative(activations[activations.Count - 1], batch.Item2),
                                                        NeuralMath.SigmoidPrime(zs[zs.Count - 1]));
            nablaB[nablaB.Count - 1] = delta;
            nablaW[nablaW.Count - 1] = NeuralMath.DotMatrix(delta, NeuralMath.Transpose(activations[activations.Count - 2]));

            for (int l = 2; l < numberLayers; l++)
            {
                double[,] z  = zs[zs.Count - l];
                double[,] sp = NeuralMath.SigmoidPrime(z);
                delta        = NeuralMath.MultiplyMatrix(NeuralMath.DotMatrix(
                                                             NeuralMath.Transpose(weights[weights.Count - l + 1]), delta), sp);
                nablaB[nablaB.Count - l] = delta;
                nablaW[nablaW.Count - l] = NeuralMath.DotMatrix(delta, NeuralMath.Transpose(activations[activations.Count - l - 1]));
            }
            return(Tuple.Create(nablaB, nablaW));
        }
Exemplo n.º 6
0
        internal static float[] GetOutputSignalOfSample(float[] inputSignal, float[][] neuronWeights, ActivationFunc activationFunc, float[] isBiasNeuron)
        {
            int _synapsesCount = neuronWeights.Length;
            int _neuronsCount  = neuronWeights[0].Length;

            float[]   _outputSignals  = new float[_neuronsCount];
            float[][] _neuronWeightsT = TransposeArray(neuronWeights);

            for (int neuron = 0; neuron < _neuronsCount; neuron++)
            {
                float _inputSignalForNeuron = 0;
                for (int synapse = 0; synapse < _synapsesCount; synapse++)
                {
                    _inputSignalForNeuron += inputSignal[synapse] * _neuronWeightsT[neuron][synapse] + (isBiasNeuron != null ? isBiasNeuron[neuron] : 0);
                }
                _outputSignals[neuron] = NeuralMath.GetApproximateByFunction(_inputSignalForNeuron, activationFunc);
            }

            return(_outputSignals);
        }
Exemplo n.º 7
0
        internal static float[][] GetOutputSignalsOfAllSamples(float[][] inputSignals, float[][] neuronWeights, ActivationFunc activationFunc, float[] isBiasNeuron)
        {
            int _synapsesCount = neuronWeights.Length;
            int _neuronsCount  = neuronWeights[0].Length;

            float[][] _outputSignals  = new float[inputSignals.Length][];
            float[][] _neuronWeightsT = TransposeArray(neuronWeights);

            Parallel.For(0, inputSignals.Length, numberOfActiveDataset =>
            {
                float _inputSignalForNeuron;
                if (_outputSignals[numberOfActiveDataset] == null)
                {
                    _outputSignals[numberOfActiveDataset] = new float[_neuronsCount];
                }

                for (int neuron = 0; neuron < _neuronsCount; neuron++)
                {
                    _inputSignalForNeuron = SumAllSynapseSignals(numberOfActiveDataset, neuron, _synapsesCount);
                    _outputSignals[numberOfActiveDataset][neuron] = NeuralMath.GetApproximateByFunction(_inputSignalForNeuron, activationFunc);
                }
            });

            return(_outputSignals);

            float SumAllSynapseSignals(int currentSet, int neuron, int synapsesOfNeuron)
            {
                float _inputSignal = 0;

                for (int synapse = 0; synapse < synapsesOfNeuron; synapse++)
                {
                    _inputSignal += inputSignals[currentSet][synapse] * _neuronWeightsT[neuron][synapse] + (isBiasNeuron != null ? isBiasNeuron[neuron] : 0);
                }
                return(_inputSignal);
            }
        }