Example #1
0
        public FeedForwardResult FeedForward(double[] inputs)
        {
            if (inputs.Length != NumInputs)
            {
                throw new NeuralNetworkException($"Expected input length to be {NumInputs} but got {inputs.Length}.");
            }

            var result         = new FeedForwardResult();
            var inputsWithBias = GetInputsWithBias(inputs);

            result.InputWithBias = inputsWithBias;

            var hidden = new double[NumHidden + 1];

            hidden[NumHidden]   = 1; // Bias.
            result.HiddenLayers = new [] { hidden };

            for (var j = 0; j < NumHidden; j++)
            {
                var preActivation = 0.0;
                var offset        = j * (NumInputs + 1);

                for (var i = 0; i <= NumInputs; i++)
                {
                    preActivation += HiddenWeights[offset + i] * inputsWithBias[i];
                }

                hidden[j] = Math.Tanh(preActivation);
            }

            var sumOfPreOutputs = 0.0;
            var preOutputs      = new double[NumOutputs];

            for (var k = 0; k < NumOutputs; k++)
            {
                var preOutput = 0.0;
                var offset    = k * (NumHidden + 1);

                for (var j = 0; j < NumHidden + 1; j++)
                {
                    preOutput += OutputWeights[offset + j] * hidden[j];
                }

                preOutputs[k]    = Math.Exp(preOutput);
                sumOfPreOutputs += preOutputs[k];
            }

            var outputs = new double[NumOutputs];

            for (var k = 0; k < NumOutputs; k++)
            {
                outputs[k] = preOutputs[k] / sumOfPreOutputs;
            }

            result.Output = outputs;

            return(result);
        }
Example #2
0
        public FeedForwardResult FeedForward(double[] inputs)
        {
            if (inputs.Length != NumInputs)
                throw new NeuralNetworkException($"Expected input length to be {NumInputs} but got {inputs.Length}.");

            var result = new FeedForwardResult();
            var inputsWithBias = GetInputsWithBias(inputs);
            result.InputWithBias = inputsWithBias;

            var hidden = new double[NumHidden + 1];
            hidden[NumHidden] = 1; // Bias.
            result.HiddenLayers = new [] {hidden};

            for (var j = 0; j < NumHidden; j++)
            {
                var preActivation = 0.0;
                var offset = j * (NumInputs + 1);

                for (var i = 0; i <= NumInputs; i++)
                    preActivation += HiddenWeights[offset + i] * inputsWithBias[i];

                hidden[j] = Math.Tanh(preActivation);
            }

            var sumOfPreOutputs = 0.0;
            var preOutputs = new double[NumOutputs];

            for (var k = 0; k < NumOutputs; k++)
            {
                var preOutput = 0.0;
                var offset = k * (NumHidden + 1);

                for (var j = 0; j < NumHidden + 1; j++)
                    preOutput += OutputWeights[offset + j] * hidden[j];

                preOutputs[k] = Math.Exp(preOutput);
                sumOfPreOutputs += preOutputs[k];
            }

            var outputs = new double[NumOutputs];

            for (var k = 0; k < NumOutputs; k++)
                outputs[k] = preOutputs[k] / sumOfPreOutputs;

            result.Output = outputs;

            return result;
        }
Example #3
0
        /// <summary>
        /// Activation functions: tanh for hidden nodes, softmax for output nodes.
        /// </summary>
        public FeedForwardResult FeedForward(double[] inputs)
        {
            if (inputs.Length != NumInputs)
                throw new NeuralNetworkException($"Argument 'inputs' should have width {NumInputs}; was {inputs.Length}.");

            // Add bias input node (always = 1).
            var inputsWithBias = new double[NumInputs + 1];
            Array.Copy(inputs, inputsWithBias, inputs.Length);
            inputsWithBias[inputsWithBias.Length - 1] = 1;

            // Prepare hidden nodes. Include bias node (always = 1) as the last node in each layer.
            var hidden = new double[HiddenLayerSizes.Count][];

            for (var i = 0; i < HiddenLayerSizes.Count; i++)
            {
                var length = HiddenLayerSizes[i] + 1;
                hidden[i] = new double[length];
                hidden[i][length - 1] = 1;
            }

            // Calculate first hidden layer.
            for (var j = 0; j < HiddenLayerSizes[0]; j++)
            {
                var preActivation = 0.0;
                var offset = j * (NumInputs + 1);

                for (var i = 0; i < NumInputs + 1; i++)
                    preActivation += inputsWithBias[i] * Weights[0][offset + i];

                hidden[0][j] = Math.Tanh(preActivation);
            }

            // Calculate the rest of the hidden layers.
            for (var l = 1; l < HiddenLayerSizes.Count; l++)
            {
                for (var j = 0; j < HiddenLayerSizes[l]; j++)
                {
                    var preActivation = 0.0;
                    var prevLayerSize = HiddenLayerSizes[l - 1] + 1;
                    var offset = j * prevLayerSize;

                    for (var i = 0; i < prevLayerSize; i++)
                        preActivation += hidden[l-1][i] * Weights[l][offset + i];

                    hidden[l][j] = Math.Tanh(preActivation);
                }
            }

            // Calculate the output layer.
            var lastHiddenLayer = hidden[HiddenLayerSizes.Count - 1];
            var lastHiddenLayerSize = lastHiddenLayer.Length;
            var outputWeights = Weights[Weights.Length - 1];
            var sumOfPreOutputs = 0.0;
            var preOutputs = new double[NumOutputs];

            for (var k = 0; k < NumOutputs; k++)
            {
                var preOutput = 0.0;
                var offset = k * lastHiddenLayerSize;

                for (var j = 0; j < lastHiddenLayerSize; j++)
                    preOutput += outputWeights[offset + j] * lastHiddenLayer[j];

                preOutputs[k] = Math.Exp(preOutput);
                sumOfPreOutputs += preOutputs[k];
            }

            var outputs = new double[NumOutputs];

            for (var k = 0; k < NumOutputs; k++)
                outputs[k] = preOutputs[k] / sumOfPreOutputs;

            var result = new FeedForwardResult();
            result.InputWithBias = inputsWithBias;
            result.HiddenLayers = hidden;
            result.Output = outputs;

            return result;
        }
Example #4
0
        /// <summary>
        /// Activation functions: tanh for hidden nodes, softmax for output nodes.
        /// </summary>
        public FeedForwardResult FeedForward(double[] inputs)
        {
            if (inputs.Length != NumInputs)
            {
                throw new NeuralNetworkException($"Argument 'inputs' should have width {NumInputs}; was {inputs.Length}.");
            }

            // Add bias input node (always = 1).
            var inputsWithBias = new double[NumInputs + 1];

            Array.Copy(inputs, inputsWithBias, inputs.Length);
            inputsWithBias[inputsWithBias.Length - 1] = 1;

            // Prepare hidden nodes. Include bias node (always = 1) as the last node in each layer.
            var hidden = new double[HiddenLayerSizes.Count][];

            for (var i = 0; i < HiddenLayerSizes.Count; i++)
            {
                var length = HiddenLayerSizes[i] + 1;
                hidden[i]             = new double[length];
                hidden[i][length - 1] = 1;
            }

            // Calculate first hidden layer.
            for (var j = 0; j < HiddenLayerSizes[0]; j++)
            {
                var preActivation = 0.0;
                var offset        = j * (NumInputs + 1);

                for (var i = 0; i < NumInputs + 1; i++)
                {
                    preActivation += inputsWithBias[i] * Weights[0][offset + i];
                }

                hidden[0][j] = Math.Tanh(preActivation);
            }

            // Calculate the rest of the hidden layers.
            for (var l = 1; l < HiddenLayerSizes.Count; l++)
            {
                for (var j = 0; j < HiddenLayerSizes[l]; j++)
                {
                    var preActivation = 0.0;
                    var prevLayerSize = HiddenLayerSizes[l - 1] + 1;
                    var offset        = j * prevLayerSize;

                    for (var i = 0; i < prevLayerSize; i++)
                    {
                        preActivation += hidden[l - 1][i] * Weights[l][offset + i];
                    }

                    hidden[l][j] = Math.Tanh(preActivation);
                }
            }

            // Calculate the output layer.
            var lastHiddenLayer     = hidden[HiddenLayerSizes.Count - 1];
            var lastHiddenLayerSize = lastHiddenLayer.Length;
            var outputWeights       = Weights[Weights.Length - 1];
            var sumOfPreOutputs     = 0.0;
            var preOutputs          = new double[NumOutputs];

            for (var k = 0; k < NumOutputs; k++)
            {
                var preOutput = 0.0;
                var offset    = k * lastHiddenLayerSize;

                for (var j = 0; j < lastHiddenLayerSize; j++)
                {
                    preOutput += outputWeights[offset + j] * lastHiddenLayer[j];
                }

                preOutputs[k]    = Math.Exp(preOutput);
                sumOfPreOutputs += preOutputs[k];
            }

            var outputs = new double[NumOutputs];

            for (var k = 0; k < NumOutputs; k++)
            {
                outputs[k] = preOutputs[k] / sumOfPreOutputs;
            }

            var result = new FeedForwardResult();

            result.InputWithBias = inputsWithBias;
            result.HiddenLayers  = hidden;
            result.Output        = outputs;

            return(result);
        }