Example #1
0
        internal virtual void Initialize()
        {
            Random random;

            unchecked
            {
                random = new Random((int)DateTime.UtcNow.Ticks);
            }
            // TODO: add support for bias values
            for (int li = 0; li < this.layers.Count - 1; li++)
            {
                NeuralNetworkLayer currentLayer = this.layers[li];
                NeuralNetworkLayer nextLayer    = this.layers[li + 1];

                for (int i = 0; i < currentLayer.Inputs.Length; i++)
                {
                    if (li < this.layers.Count - 1)
                    {
                        for (int j = 0; j < nextLayer.Inputs.Length; j++)
                        {
                            //currentLayer.Weights[i, j] = random.NextDouble() * 4 - 2;
                            currentLayer.Weights[i, j]         = random.NextDouble();
                            currentLayer.LastWeightDelta[i, j] = 0.0;
                        }
                    }

                    currentLayer.Errors[i] = double.MinValue;
                }
            }
        }
Example #2
0
        public NeuralNetworkBuilder AddHiddenLayer(int numberOfNodes, ActivationFunction activationFunction, bool addBias)
        {
            if (this.currentNetwork.InputLayer == null)
            {
                throw new NeuralNetworkConfigurationException("You can't add a hidden layer begore adding an input one!");
            }

            if (this.currentNetwork.OutputLayer != null)
            {
                throw new NeuralNetworkConfigurationException("You can't add a hidden layer after the output one was defined!");
            }

            var previousLayer = this.currentNetwork.Layers.Last();

            previousLayer.Weights         = new double[previousLayer.Inputs.Length, numberOfNodes + (addBias ? 1 : 0)];
            previousLayer.LastWeightDelta = new double[previousLayer.Inputs.Length, numberOfNodes + (addBias ? 1 : 0)];

            NeuralNetworkLayer hiddenLayer = new NeuralNetworkLayer();

            hiddenLayer.Inputs             = new double[numberOfNodes + (addBias ? 1 : 0)];
            hiddenLayer.Errors             = new double[numberOfNodes + (addBias ? 1 : 0)];
            hiddenLayer.ActivationFunction = activationFunction;
            this.currentNetwork.Layers.Add(hiddenLayer);

            return(this);
        }
Example #3
0
        public NeuralNetworkBuilder AddOutputLayer(int numberOfOutputs, ActivationFunction activationFunction)
        {
            if (this.currentNetwork.InputLayer == null)
            {
                throw new NeuralNetworkConfigurationException("You can't add an output layer before adding and input one");
            }

            if (this.currentNetwork.OutputLayer != null)
            {
                throw new NeuralNetworkConfigurationException("You can't add more than one output layers");
            }

            var previousLayer = this.currentNetwork.Layers.Last();

            previousLayer.Weights         = new double[previousLayer.Inputs.Length, numberOfOutputs];
            previousLayer.LastWeightDelta = new double[previousLayer.Inputs.Length, numberOfOutputs];

            NeuralNetworkLayer outputLayer = new NeuralNetworkLayer();

            outputLayer.Inputs             = new double[numberOfOutputs];
            outputLayer.Errors             = new double[numberOfOutputs];
            outputLayer.ActivationFunction = activationFunction;
            outputLayer.Weights            = null;
            this.currentNetwork.Layers.Add(outputLayer);
            this.currentNetwork.OutputLayer = outputLayer;

            return(this);
        }
Example #4
0
        public NeuralNetworkBuilder AddInputLayer(int numberOfInputs, ActivationFunction activationFunction, bool addBias)
        {
            if (this.currentNetwork.InputLayer != null)
            {
                throw new NeuralNetworkConfigurationException("You can't have more than one input layers!");
            }

            NeuralNetworkLayer inputLayer = new NeuralNetworkLayer();

            inputLayer.Inputs             = new double[numberOfInputs + (addBias ? 1 : 0)];
            inputLayer.Errors             = new double[numberOfInputs + (addBias ? 1 : 0)];
            inputLayer.ActivationFunction = activationFunction;
            // postpone initialization of weights
            this.currentNetwork.Layers.Add(inputLayer);
            this.currentNetwork.InputLayer = inputLayer;

            return(this);
        }
Example #5
0
        public double[] Run(double[] inputValues)
        {
            for (int i = 0; i < inputValues.Length; i++)
            {
                this.inputLayer.Inputs[i] = inputValues[i];
            }

            for (int li = 0; li < this.layers.Count - 1; li++)
            {
                NeuralNetworkLayer currentLayer = this.layers[li];
                NeuralNetworkLayer nextLayer    = this.layers[li + 1];

                for (int i = 0; i < nextLayer.Inputs.Length; i++)
                {
                    double sum = 0.0;
                    for (int j = 0; j < currentLayer.Inputs.Length; j++)
                    {
                        sum += currentLayer.Inputs[j] * currentLayer.Weights[j, i];
                    }
                    nextLayer.Inputs[i] = currentLayer.ActivationFunction.Forward(sum);
                }
            }
            return(outputLayer.Inputs);
        }