Esempio n. 1
0
        /// <summary>
        /// Calculates outputs of the layer regarding inputs.
        /// </summary>
        /// <param name="inputVector">Vector of inputs.</param>
        /// <param name="resultVector">Vector of resulting outputs of the layer's neurons.</param>
        /// <returns>Empty string on success or error message.</returns>
        public string React(float[] inputVector, ref float[] resultVector)
        {
            // CHECKING INPUT VECTOR LENGTH TO MATCH THE INPUT NUMBER
            if (inputVector.Length != coeffs[0].Length)
            {
                return("\n-> Error calculating layer reaction on input vector with length " + inputVector.Length.ToString() + " instead of " + coeffs[0].Length.ToString() + " .");
            }

            // CHECKING RESULT VECTOR LENGTH TO MATCH THE NEURON NUMBER
            if (resultVector.Length != neuronDeltas.Length)
            {
                return("\n-> Error calculating layer reaction with neurons number " + neuronDeltas.Length.ToString() + " to vector wiht length " + resultVector.Length.ToString() + " .");
            }

            // CALCULATING REACTIONS OF EACH NEURON
            float softMaxSum = 0;

            for (int neuronInd = 0; neuronInd < neuronDeltas.Length; neuronInd++)
            {
                // CALCULATING SUMM OF WEIGHTENED INPUTS FOR CURRENT NEURON
                float sum = biases[neuronInd];
                for (int inputInd = 0; inputInd < coeffs[neuronInd].Length; inputInd++)
                {
                    sum += inputVector[inputInd] * coeffs[neuronInd][inputInd];
                }

                resultVector[neuronInd] = AFType.CalcActivation(sum, ANNetwork.SayNetworkAlpha());
                softMaxSum += resultVector[neuronInd];
            }

            // FOR SOFTMAX FUNCTION ADDITIONAL OPERATION
            if (AFType.ToString() == AFTypeSoftMax.name)
            {
                for (int neuronInd = 0; neuronInd < resultVector.Length; neuronInd++)
                {
                    resultVector[neuronInd] /= softMaxSum;
                }
            }

            return("");
        }
Esempio n. 2
0
        /// <summary>
        /// Performing a gradient decent back propogation through the layer.
        /// </summary>
        /// <param name="layerInputs">Vector of inputs (outputs of previous layers or inputs of the example).</param>
        /// <param name="layerOutputs">Vector of outputs of the layer, already calculated previously.</param>
        /// <param name="inDerivatives">Vector of incomming from next (in network architecture) layer or from cost function derivatives for their back propogation.</param>
        /// <param name="outDerivatives">Vector of outcomming derivatives for previous (in network architecture) layer.</param>
        /// <param name="lSpeed">Learning speed for deltas calculations.</param>
        /// <param name="examplesNum">Number of examples in a batch for delta sums scaling.</param>
        /// <returns>Empty string on success or error message.</returns>
        public string BPropogate(float[] layerInputs, float[] layerOutputs, float[] inDerivatives, ref float[] outDerivatives, float lSpeed, int examplesNum)
        {
            // CHECKING INPUTED VECTOR'S LENGTH TO MATCH LAYERS NEURON COEFFICIENTS NUMBER
            if (layerInputs.Length != coeffs[0].Length)
            {
                return("Inputs vector length (" + layerInputs.Length.ToString() + ") don't match layer neurons' coefficients number (" + coeffs[0].Length.ToString() + ").");
            }

            if (layerOutputs.Length != neuronDeltas.Length)
            {
                return("Outputs vector length (" + layerOutputs.Length.ToString() + ") don't match layers neuron number (" + neuronDeltas.Length.ToString() + ").");
            }

            if (inDerivatives.Length != neuronDeltas.Length)
            {
                return("Derivatives vector length (" + inDerivatives.Length.ToString() + ") don't match layers neuron number (" + neuronDeltas.Length.ToString() + ").");
            }

            // PREPARING VECTOR OF DERIVATIVES FOR NEXT (ACTUALY PREVIOUS IN NETWORK ARCHITECTURE) LAYER - NUMBER OF SUMS = NUMBER OF INPUTS (NEURONS IN PREVIOUS LAYER)
            float[] nextDerivatives = new float[coeffs[0].Length];
            for (int inputInd = 0; inputInd < nextDerivatives.Length; inputInd++)
            {
                nextDerivatives[inputInd] = 0;
            }

            // CALCULATING DELTAS OF CURRENT LAYER NEURONS AND DERIVATIVES FOR NEXT LAYER
            for (int neuronInd = 0; neuronInd < neuronDeltas.Length; neuronInd++)
            {
                // CALCULATING ACTIVATION FUNCTION DERIVATIVE USING THIS LAYER OUTPUT AND ALPHA
                try
                {
                    neuronDeltas[neuronInd] = AFType.CalcDerivative(layerOutputs[neuronInd], ANNetwork.SayNetworkAlpha()) * inDerivatives[neuronInd];
                }
                catch (Exception e)
                {
                    return("Error calculating activation function for neuron " + neuronInd.ToString() + " . " + e.Message);
                }

                // CALCULATING EACH COEFFICIENT OF CURRENT NEURON DELTA TO INCREASE DELTA SUM
                for (int inputInd = 0; inputInd < coeffDeltaSums[neuronInd].Length; inputInd++)
                {
                    // SINGLE COEFFICIENT DELTA INCREASE IS CALCULATED WITH NEURON DELTA, INPUTED TO THIS COEFFICIENT VALUE, LEARNING SPEED AND NUMBER OF EXAMPLES TO GET MEAN
                    coeffDeltaSums[neuronInd][inputInd] += neuronDeltas[neuronInd] * layerInputs[inputInd] * lSpeed / examplesNum;

                    // INCRESING DERIVATIVE SUM OF CURRENT INPUT NEURON OF NEXT LAYER
                    nextDerivatives[inputInd] += coeffs[neuronInd][inputInd] * neuronDeltas[neuronInd];
                }

                // BIAS DELTA INCREASE IS CALCULATED WITH NEURON DELTA, LEARNING SPEED AND NUMBER OF EXAMPLES TO GET MEAN
                biasDeltaSums[neuronInd] += neuronDeltas[neuronInd] * lSpeed / examplesNum;
            }

            outDerivatives = nextDerivatives;

            return("");
        }