private void CalculateAgain(ActivationNeuron neuron, int edge)
        {
            double randomWeight = GetRandomNumber(-0.5, 0.5);

            randomWeighUpdateG = randomWeight;
            neuron[edge]      += randomWeight;
        }
        /// <summary>
        ///  Update network's weights.
        /// </summary>
        ///
        /// <returns>The sum of squared weights divided by 2.</returns>
        ///
        private double loadArrayIntoNetwork()
        {
            double w, sumOfSquaredWeights = 0.0;

            // For each layer in the network
            for (int li = 0, cur = 0; li < network.Layers.Length; li++)
            {
                ActivationLayer layer = network.Layers[li] as ActivationLayer;

                // for each neuron in the layer
                for (int ni = 0; ni < layer.Neurons.Length; ni++, cur++)
                {
                    ActivationNeuron neuron = layer.Neurons[ni] as ActivationNeuron;

                    // for each weight in the neuron
                    for (int wi = 0; wi < neuron.Weights.Length; wi++, cur++)
                    {
                        neuron.Weights[wi]   = w = weights[cur] + deltas[cur];
                        sumOfSquaredWeights += w * w;
                    }

                    // for each threshold value (bias):
                    neuron.Threshold     = w = weights[cur] + deltas[cur];
                    sumOfSquaredWeights += w * w;
                }
            }

            return(sumOfSquaredWeights / 2.0);
        }
Example #3
0
        public ILayer CreateLayer(int type, int numberOfNeurons, IActivationFunction activationFunction)
        {
            ILayer layer = null;

            switch (type)
            {
            case TYPE_ACTIVATION:
                layer = new ActivationLayer();
                for (int i = 0; i < numberOfNeurons; i++)
                {
                    var neuron = new ActivationNeuron(numberOfNeurons, activationFunction);
                    layer.AddLayer(neuron);
                }
                break;

            case TYPE_DISTANCE:
                layer = new DistanceLayer();
                for (int i = 0; i < numberOfNeurons; i++)
                {
                    var neuron = new DistanceNeuron(numberOfNeurons);
                    layer.AddLayer(neuron);
                }
                break;

            default:
                throw new ArgumentException();
            }


            return(layer);
        }
Example #4
0
            /// <summary>
            /// Set the best solution (weights) to the network.
            /// </summary>
            private void SetBestWeightsToTheNetwork()
            {
                if (Equals(Network, null))
                {
                    return;
                }
                if (Equals(BestWeights, null))
                {
                    return;
                }

                //Copy the weights to the network:
                double[] chromosomeGenes = BestWeights;
                // put best chromosome's value into neural network's weights
                int v = 0;

                for (int i = 0; i < Network.Layers.Length; i++)
                {
                    Layer layer = Network.Layers[i];

                    for (int j = 0; j < layer.Neurons.Length; j++)
                    {
                        ActivationNeuron neuron = layer.Neurons[j] as ActivationNeuron;

                        for (int k = 0; k < neuron.Weights.Length; k++)
                        {
                            neuron.Weights[k] = chromosomeGenes[v++];
                        }
                        neuron.Threshold = chromosomeGenes[v++];
                    }
                }
            }
        /// <summary>
        ///   Creates the initial weight vector w
        /// </summary>
        ///
        /// <returns>The sum of squared weights divided by 2.</returns>
        ///
        private double saveNetworkToArray()
        {
            double w, sumOfSquaredWeights = 0.0;

            // for each layer in the network
            for (int li = 0, cur = 0; li < network.Layers.Length; li++)
            {
                ActivationLayer layer = network.Layers[li] as ActivationLayer;

                // for each neuron in the layer
                for (int ni = 0; ni < network.Layers[li].Neurons.Length; ni++, cur++)
                {
                    ActivationNeuron neuron = layer.Neurons[ni] as ActivationNeuron;

                    // for each weight in the neuron
                    for (int wi = 0; wi < neuron.InputsCount; wi++, cur++)
                    {
                        // We copy it to the starting weights vector
                        w = weights[cur] = (float)neuron.Weights[wi];
                        sumOfSquaredWeights += w * w;
                    }

                    // and also for the threshold value (bias):
                    w = weights[cur] = (float)neuron.Threshold;
                    sumOfSquaredWeights += w * w;
                }
            }
            return(sumOfSquaredWeights / 2.0);
        }
Example #6
0
        public DoubleArrayChromosome ToDoubleArrarChromosome()
        {
            double[] chromosomeGenes = new double[totalNumberOfWeights];

            // asign new weights and thresholds to network from the given chromosome
            int count = 0;

            for (int i = 0, layersCount = Layers.Length; i < layersCount; i++)
            {
                Layer layer = Layers[i];

                for (int j = 0; j < layer.Neurons.Length; j++)
                {
                    ActivationNeuron neuron = layer.Neurons[j] as ActivationNeuron;

                    for (int k = 0; k < neuron.Weights.Length; k++)
                    {
                        chromosomeGenes[count++] = neuron.Weights[k];
                    }
                    chromosomeGenes[count++] = neuron.Threshold;
                }
            }

            return(new DoubleArrayChromosome(new UniformGenerator(new Range(-1f, 1f)),
                                             new ExponentialGenerator(1),
                                             new UniformGenerator(new Range(-0.5f, 0.5f)),
                                             chromosomeGenes));
        }
        public void SutCallsOnNextWithTransformedInput(decimal input, long output, [Frozen]Mock<IObserver<long>> mockBackend, [Frozen]Mock<ITransformingFunction<decimal, long>> mockFunction, ActivationNeuron<decimal, long> sut)
        {
            mockFunction.Setup(function => function.Evaluate(input)).Returns(output);

            sut.OnNext(input);

            mockBackend.Verify(backend => backend.OnNext(output), Times.Once());
        }
 public ActivationLayer(int neuronsCount, int inputsCount, IActivationFunction function)
     : base(neuronsCount, inputsCount)
 {
     for (int i = 0; i < neuronsCount; i++)
     {
         neurons[i] = new ActivationNeuron(inputsCount, function);
     }
 }
        /// <summary>
        /// Runs learning epoch.
        /// </summary>
        ///
        /// <param name="input">Array of input vectors.</param>
        /// <param name="output">Array of output vectors.</param>
        ///
        /// <returns>Returns summary squared learning error for the entire epoch.</returns>
        ///
        /// <remarks><para><note>While running the neural network's learning process, it is required to
        /// pass the same <paramref name="input"/> and <paramref name="output"/> values for each
        /// epoch. On the very first run of the method it will initialize evolutionary fitness
        /// function with the given input/output. So, changing input/output in middle of the learning
        /// process, will break it.</note></para></remarks>
        ///
        public double RunEpoch(double[][] input, double[][] output)
        {
            Debug.Assert(input.Length > 0);
            Debug.Assert(output.Length > 0);
            Debug.Assert(input.Length == output.Length);
            Debug.Assert(network.InputsCount == input.Length);

            // check if it is a first run and create population if so
            if (population == null)
            {
                // sample chromosome
                DoubleArrayChromosome chromosomeExample = new DoubleArrayChromosome(
                    chromosomeGenerator, mutationMultiplierGenerator, mutationAdditionGenerator,
                    numberOfNetworksWeights);

                // create population ...
                population = new Population(populationSize, chromosomeExample,
                                            new EvolutionaryFitness(network, input, output), selectionMethod);
                // ... and configure it
                population.CrossoverRate          = crossOverRate;
                population.MutationRate           = mutationRate;
                population.RandomSelectionPortion = randomSelectionRate;
            }

            // run genetic epoch
            population.RunEpoch();

            // get best chromosome of the population
            DoubleArrayChromosome chromosome = (DoubleArrayChromosome)population.BestChromosome;

            double[] chromosomeGenes = chromosome.Value;

            // put best chromosome's value into neural network's weights
            int v = 0;

            for (int i = 0; i < network.Layers.Length; i++)
            {
                Layer layer = network.Layers[i];

                for (int j = 0; j < layer.Neurons.Length; j++)
                {
                    ActivationNeuron neuron = layer.Neurons[j] as ActivationNeuron;

                    for (int k = 0; k < neuron.Weights.Length; k++)
                    {
                        neuron.Weights[k] = chromosomeGenes[v++];
                    }
                    neuron.Threshold = chromosomeGenes[v++];
                }
            }

            Debug.Assert(v == numberOfNetworksWeights);

            return(1.0 / chromosome.Fitness);
        }
Example #10
0
        public void AddLayer(object neuron)
        {
            if (neuron.GetType() != this.GetType())
            {
                //Throw some kind of error here
            }

            ActivationNeuron ActNeuron = (ActivationNeuron)neuron;

            Neurons.Add(ActNeuron);
        }
Example #11
0
        public RBMLayer(int neuronsCount, int inputsCount, IActivationFunction function)
            : base(neuronsCount, inputsCount, function)
        {
            neurons_ = new Neuron[inputsCount];

            output_ = new double[inputsCount];

            for (int i = 0; i < inputsCount; i++)
            {
                neurons_[i] = new ActivationNeuron(neuronsCount, function);
            }
        }
Example #12
0
        public RBMLayer(int neuronsCount, int inputsCount, IActivationFunction function)
            : base(neuronsCount, inputsCount, function)
        {
            neurons_ = new Neuron[inputsCount];

            output_ = new double[inputsCount];

            for(int i = 0; i < inputsCount; i++)
            {
                neurons_[i] = new ActivationNeuron(neuronsCount, function);
            }
        }
        public void NeuronInitalizeActivationSigmoid()
        {
            int    InputCount  = 3;
            double outputValue = 0;
            IActivationFunction ActivationSigmoid = new SigmoidFunction();
            Neuron ActNeuron = new ActivationNeuron(InputCount, ActivationSigmoid);

            ActNeuron.FeedForward(InputValues);
            outputValue = ActNeuron.Compute();

            Assert.True(outputValue != 0);
        }
        /// <summary>
        ///   Calculates the Jacobian Matrix using Finite Differences
        /// </summary>
        ///
        /// <returns>Returns the sum of squared errors of the network divided by 2.</returns>
        ///
        private double JacobianByFiniteDifference(double[][] input, double[][] desiredOutput)
        {
            double e, sumOfSquaredErrors = 0;

            // for each input training sample
            for (int i = 0, row = 0; i < input.Length; i++)
            {
                // Compute a forward pass
                double[] networkOutput = network.Compute(input[i]);

                // for each output respective to the input
                for (int j = 0; j < networkOutput.Length; j++, row++)
                {
                    // Calculate network error to build the residuals vector
                    e = errors[row] = desiredOutput[i][j] - networkOutput[j];
                    sumOfSquaredErrors += e * e;

                    // Computation of one of the Jacobian Matrix rows by numerical differentiation:
                    // for each weight w_j in the network, we have to compute its partial derivative
                    // to build the Jacobian matrix.

                    // So, for each layer:
                    for (int li = 0, col = 0; li < network.Layers.Length; li++)
                    {
                        ActivationLayer layer = network.Layers[li] as ActivationLayer;

                        // for each neuron:
                        for (int ni = 0; ni < layer.Neurons.Length; ni++, col++)
                        {
                            ActivationNeuron neuron = layer.Neurons[ni] as ActivationNeuron;

                            // for each weight:
                            for (int wi = 0; wi < neuron.InputsCount; wi++, col++)
                            {
                                // Compute its partial derivative
                                jacobian[col][row] = (float)ComputeDerivative(input[i], li, ni,
                                                                              wi, ref derivativeStepSize[col], networkOutput[j], j);
                            }

                            // and also for each threshold value (bias)
                            jacobian[col][row] = (float)ComputeDerivative(input[i], li, ni,
                                                                          -1, ref derivativeStepSize[col], networkOutput[j], j);
                        }
                    }
                }
            }

            // returns the sum of squared errors / 2
            return(sumOfSquaredErrors / 2.0);
        }
        /// <summary>
        /// Evaluates chromosome.
        /// </summary>
        ///
        /// <param name="chromosome">Chromosome to evaluate.</param>
        ///
        /// <returns>Returns chromosome's fitness value.</returns>
        ///
        /// <remarks>The method calculates fitness value of the specified
        /// chromosome.</remarks>
        ///
        public double Evaluate(IChromosome chromosome)
        {
            DoubleArrayChromosome daChromosome = (DoubleArrayChromosome)chromosome;

            double[] chromosomeGenes = daChromosome.Value;
            // total number of weight in neural network
            int totalNumberOfWeights = 0;

            // asign new weights and thresholds to network from the given chromosome
            for (int i = 0, layersCount = network.Layers.Length; i < layersCount; i++)
            {
                Layer layer = network.Layers[i];

                for (int j = 0; j < layer.Neurons.Length; j++)
                {
                    ActivationNeuron neuron = layer.Neurons[j] as ActivationNeuron;

                    for (int k = 0; k < neuron.Weights.Length; k++)
                    {
                        neuron.Weights[k] = chromosomeGenes[totalNumberOfWeights++];
                    }
                    neuron.Threshold = chromosomeGenes[totalNumberOfWeights++];
                }
            }

            // post check if all values are processed and lenght of chromosome
            // is equal to network size
            Debug.Assert(totalNumberOfWeights == daChromosome.Length);

            double totalError = 0;

            for (int i = 0, inputVectorsAmount = input.Length; i < inputVectorsAmount; i++)
            {
                double[] computedOutput = network.Compute(input[i]);

                for (int j = 0, outputLength = output[0].Length; j < outputLength; j++)
                {
                    double error = output[i][j] - computedOutput[j];
                    totalError += error * error;
                }
            }

            if (totalError > 0)
            {
                return(1.0 / totalError);
            }

            // zero error means the best fitness
            return(double.MaxValue);
        }
        public void Xor(bool val1, bool val2)
        {
            // Arrange
            var boolTransformer = new BoolDecimalTransformer();

            var receiver = new ValueStoringNeuron<bool>();
            var wrappedReceiver = new OneToManyBufferingDecorator<decimal>(
                new ManyInputNeuron<decimal, decimal>(
                    new DecimalSumFunction(),
                    new ActivationNeuron<decimal, decimal>(
                        new DecimalThresholdFunction
                        {
                            Threshold = 0.5M
                        }, new ActivationNeuron<decimal, bool>(
                            boolTransformer, receiver)))
                , 3);

            var pivotNeuron = new OneToManyBufferingDecorator<decimal>(
                new ManyInputNeuron<decimal, decimal>(
                    new DecimalSumFunction(),
                    new ActivationNeuron<decimal, decimal>(
                        new DecimalThresholdFunction
                        {
                            Threshold = 1.5M
                        },
                        new DecimalWeight(wrappedReceiver, -2M)))
                , 2);

            var leftNeuron = new ActivationNeuron<bool, decimal>(
                boolTransformer,
                new CompositeObserver<decimal>(
                    new DecimalWeight(wrappedReceiver, 1),
                    new DecimalWeight(pivotNeuron, 1)));

            var rightNeuron = new ActivationNeuron<bool, decimal>(
                boolTransformer,
                new CompositeObserver<decimal>(
                    new DecimalWeight(wrappedReceiver, 1),
                    new DecimalWeight(pivotNeuron, 1)));

            // Act
            leftNeuron.OnNext(val1);
            rightNeuron.OnNext(val2);

            // Assert
            var result = receiver.LastValue;
            var expected = val1 ^ val2;
            result.Should().Be(expected);
        }
Example #17
0
        /// <summary>
        /// Calculate weights updates
        /// </summary>
        ///
        /// <param name="input">Network's input vector.</param>
        ///
        private void CalculateGradient(double[] input)
        {
            // 1. calculate updates for the first layer
            ActivationLayer layer = network.Layers[0] as ActivationLayer;

            double[]   weightErrors              = neuronErrors[0];
            double[][] layerWeightsDerivatives   = weightsDerivatives[0];
            double[]   layerThresholdDerivatives = thresholdsDerivatives[0];

            // So, for each neuron of the first layer:
            for (int i = 0; i < layer.Neurons.Length; i++)
            {
                ActivationNeuron neuron = layer.Neurons[i] as ActivationNeuron;
                double[]         neuronWeightDerivatives = layerWeightsDerivatives[i];

                // for each weight of the neuron:
                for (int j = 0; j < neuron.InputsCount; j++)
                {
                    neuronWeightDerivatives[j] += weightErrors[i] * input[j];
                }
                layerThresholdDerivatives[i] += weightErrors[i];
            }

            // 2. for all other layers
            for (int k = 1; k < network.Layers.Length; k++)
            {
                layer                     = network.Layers[k] as ActivationLayer;
                weightErrors              = neuronErrors[k];
                layerWeightsDerivatives   = weightsDerivatives[k];
                layerThresholdDerivatives = thresholdsDerivatives[k];

                ActivationLayer layerPrev = network.Layers[k - 1] as ActivationLayer;

                // for each neuron of the layer
                for (int i = 0; i < layer.Neurons.Length; i++)
                {
                    ActivationNeuron neuron = layer.Neurons[i] as ActivationNeuron;
                    double[]         neuronWeightDerivatives = layerWeightsDerivatives[i];

                    // for each weight of the neuron
                    for (int j = 0; j < layerPrev.Neurons.Length; j++)
                    {
                        neuronWeightDerivatives[j] += weightErrors[i] * layerPrev.Neurons[j].Output;
                    }
                    layerThresholdDerivatives[i] += weightErrors[i];
                }
            }
        }
Example #18
0
        public BackpropagationSynapse(
            ActivationNeuron sourceNeuron, ActivationNeuron targetNeuron, ConexionBackpropagation parent)
        {
            Helper.ValidateNotNull(sourceNeuron, "sourceNeuron");
            Helper.ValidateNotNull(targetNeuron, "targetNeuron");
            Helper.ValidateNotNull(parent, "parent");

            this.weight = 1f;
            this.delta  = 0f;

            sourceNeuron.TargetSynapses.Add(this);
            targetNeuron.SourceSynapses.Add(this);

            this.sourceNeuron = sourceNeuron;
            this.targetNeuron = targetNeuron;
            this.parent       = parent;
        }
        /// <summary>
        /// Creates a new Backpropagation Synapse connecting the given neurons
        /// </summary>
        /// <param name="sourceNeuron">
        /// The source neuron
        /// </param>
        /// <param name="targetNeuron">
        /// The target neuron
        /// </param>
        /// <param name="parent">
        /// Parent connector containing this syanpse
        /// </param>
        /// <exception cref="System.ArgumentNullException">
        /// If any of the arguments is <c>null</c>.
        /// </exception>
        public BackpropagationSynapse(
            ActivationNeuron sourceNeuron, ActivationNeuron targetNeuron, BackpropagationConnector parent)
        {
            Helper.ValidateNotNull(sourceNeuron, "sourceNeuron");
            Helper.ValidateNotNull(targetNeuron, "targetNeuron");
            Helper.ValidateNotNull(parent, "parent");

            this.weight = 1f;
            this.delta = 0f;

            sourceNeuron.TargetSynapses.Add(this);
            targetNeuron.SourceSynapses.Add(this);

            this.sourceNeuron = sourceNeuron;
            this.targetNeuron = targetNeuron;
            this.parent = parent;
        }
Example #20
0
        private void RgaOptimizer_CalculateFitnessValues(ref List <Genome> genomes)
        {
            // The sum of error
            double SumErr = 0;

            double[] outpts;
            //Evaluation of each chromosome error
            foreach (Genome chromosome in genomes)
            {
                double[] chromosomeGenes = chromosome.TheArray.ToArray();
                // put best chromosome's value into neural network's weights
                int v = 0;

                for (int i = 0; i < network.Layers.Length; i++)
                {
                    Layer layer = network.Layers[i];

                    for (int j = 0; j < layer.Neurons.Length; j++)
                    {
                        ActivationNeuron neuron = layer.Neurons[j] as ActivationNeuron;

                        for (int k = 0; k < neuron.Weights.Length; k++)
                        {
                            neuron.Weights[k] = chromosomeGenes[v++];
                        }
                        neuron.Threshold = chromosomeGenes[v++];
                    }
                }

                //Evaluation of error for each tarining data:
                SumErr = 0;

                for (int k = 0; k < this.Inputs.GetLength(0); k++)
                {
                    outpts = this.network.Compute(this.Inputs[k]);

                    for (int l = 0; l < outpts.Length; l++)
                    {
                        SumErr += Math.Pow((Outputs[k][l] - outpts[l]), 2);
                    }
                }
                chromosome.CurrentFitness = (1 / SumErr);
            }
        }
        public void NeuronUpdateWeights()
        {
            double learningRate = 0.3;
            double delta        = -0.3;

            int    InputCount        = 3;
            double outputValue       = 0;
            double outputAfterUpdate = 0;
            IActivationFunction ActivationSigmoid = new SigmoidFunction();
            Neuron ActNeuron = new ActivationNeuron(InputCount, ActivationSigmoid);

            ActNeuron.FeedForward(InputValues);
            outputValue = ActNeuron.Compute();
            Assert.True(outputValue != 0);

            ActNeuron.UpdateWeight(learningRate, delta);
            outputAfterUpdate = ActNeuron.Compute();
            Assert.NotEqual(outputValue, outputAfterUpdate);
        }
Example #22
0
        private void SetNeurons()
        {
            int v = 0;

            for (int i = 0, layersCount = _activationNetwork.Layers.Length; i < layersCount; i++)
            {
                Layer layer = _activationNetwork.Layers[i];
                for (int j = 0, neuronsCount = layer.Neurons.Length; j < neuronsCount; j++)
                {
                    ActivationNeuron neuron = (ActivationNeuron)layer.Neurons[j];
                    for (int k = 0, weightsCount = neuron.Weights.Length; k < weightsCount; k++)
                    {
                        neuron.Weights[k] = _genes[v++];
                    }

                    neuron.Threshold = _genes[v++];
                }
            }
        }
Example #23
0
            //Evaluate fitness of solutions (weights in the network)
            private void Optimizer_ObjectiveFunction(double[] positions, ref double fitnessValue)
            {
                // The sum of error
                double SumErr = 0;

                double[] outpts;
                //Evaluation of each chromosome error
                double[] chromosomeGenes = positions;
                // put best chromosome's value into neural network's weights
                int v = 0;

                for (int i = 0; i < network.Layers.Length; i++)
                {
                    Layer layer = network.Layers[i];

                    for (int j = 0; j < layer.Neurons.Length; j++)
                    {
                        ActivationNeuron neuron = layer.Neurons[j] as ActivationNeuron;

                        for (int k = 0; k < neuron.Weights.Length; k++)
                        {
                            neuron.Weights[k] = chromosomeGenes[v++];
                        }
                        neuron.Threshold = chromosomeGenes[v++];
                    }
                }

                //Evaluation of error for each tarining data : sum(e)= sum[(Yi-Si)^2]:

                for (int k = 0; k < this.Inputs.GetLength(0); k++)
                {
                    outpts = this.network.Compute(this.Inputs[k]);

                    for (int l = 0; l < outpts.Length; l++)
                    {
                        SumErr += Math.Pow((Outputs[k][l] - outpts[l]), 2);
                    }
                }

                fitnessValue = SumErr;
            }
Example #24
0
        public void CreateActivationLayerTest()
        {
            LayerFactory        factory             = new LayerFactory();
            IActivationFunction ActivationThresHold = new ThresholdFunction();

            ActivationLayer layer = (ActivationLayer)factory.CreateLayer(NETWORK_TYPE_ACTIVATION, INPUT_COUNT, ActivationThresHold);

            Assert.Equal(3, layer.GetNeuronCount());
            ActivationNeuron actNeuron = (ActivationNeuron)layer.GetNeuron(1);

            Assert.NotNull(actNeuron);

            List <ISynapse> inputs = actNeuron.FetchInputs();

            Assert.NotNull(inputs);
            Assert.Equal(INPUT_COUNT, inputs.Count);

            ISynapse input = inputs[1];

            Assert.True(input.Weight != 0);
        }
Example #25
0
        /// <summary>
        /// Creates a new activation layer.
        /// </summary>
        ///
        /// <param name="neuronCount">The number of (activation) neurons.</param>
        /// <param name="activationFunction">The activation function</param>
        ///<param name="parentNetwork">The parnet network.</param>
        public ActivationLayer(ActivationLayerBlueprint blueprint, INetwork parentNetwork)
        {
            // Create the neurons.
            neurons = new List <IActivationNeuron>(blueprint.NeuronCount);
            for (int i = 0; i < blueprint.NeuronCount; i++)
            {
                IActivationNeuron neuron = new ActivationNeuron(this);
                neurons.Add(neuron);
            }

            sourceConnectors = new List <IConnector>();
            targetConnectors = new List <IConnector>();

            // Validate the activation function.
            Utilities.ObjectNotNull(blueprint.ActivationFunction, "activationFunction");
            this.activationFunction = blueprint.ActivationFunction;

            // Validate the parent network.
            Utilities.ObjectNotNull(parentNetwork, "parentNetwork");
            this.parentNetwork = parentNetwork;
        }
        /// <summary>
        ///   Creates a new <see cref="ActivationNetwork"/> from this instance.
        /// </summary>
        ///
        /// <param name="outputs">The number of output neurons in the last layer.</param>
        /// <param name="function">The activation function to use in the last layer.</param>
        ///
        /// <returns>An <see cref="ActivationNetwork"/> containing this network.</returns>
        ///
        public ActivationNetwork ToActivationNetwork(IActivationFunction function, int outputs)
        {
            ActivationNetwork ann = new ActivationNetwork(function,
                                                          inputsCount, hidden.Neurons.Length, outputs);

            // For each neuron
            for (int i = 0; i < hidden.Neurons.Length; i++)
            {
                ActivationNeuron aneuron = ann.Layers[0].Neurons[i] as ActivationNeuron;
                StochasticNeuron sneuron = hidden.Neurons[i];

                // For each weight
                for (int j = 0; j < sneuron.Weights.Length; j++)
                {
                    aneuron.Weights[j] = sneuron.Weights[j];
                }
                aneuron.Threshold          = sneuron.Threshold;
                aneuron.ActivationFunction = sneuron.ActivationFunction;
            }

            return(ann);
        }
Example #27
0
        public void SetWithDoubleArrayChromosome(DoubleArrayChromosome daChromosome)
        {
            int count = 0;

            double[] chromosomeGenes = daChromosome.Value;
            // asign new weights and thresholds to network from the given chromosome
            for (int i = 0, layersCount = Layers.Length; i < layersCount; i++)
            {
                Layer layer = Layers[i];

                for (int j = 0; j < layer.Neurons.Length; j++)
                {
                    ActivationNeuron neuron = layer.Neurons[j] as ActivationNeuron;

                    for (int k = 0; k < neuron.Weights.Length; k++)
                    {
                        neuron.Weights[k] = chromosomeGenes[count++];
                    }
                    neuron.Threshold = chromosomeGenes[count++];
                }
            }
        }
Example #28
0
        /// <summary>
        /// Runs learning iteration
        /// </summary>
        ///
        /// <param name="input">input vector</param>
        /// <param name="output">desired output vector</param>
        ///
        /// <returns>Returns squared error divided by 2</returns>
        ///
        /// <remarks>Runs one learning iteration and updates neuron's
        /// weights.</remarks>
        ///
        public double Run(double[] input, double[] output)
        {
            // compute output of network
            double[] networkOutput = network.Compute(input);

            // get the only layer of the network
            ActivationLayer layer = network[0];
            // get activation function of the layer
            IActivationFunction activationFunction = layer[0].ActivationFunction;

            // summary network absolute error
            double error = 0.0;

            // update weights of each neuron
            for (int j = 0, k = layer.NeuronsCount; j < k; j++)
            {
                // get neuron of the layer
                ActivationNeuron neuron = layer[j];
                // calculate neuron's error
                double e = output[j] - networkOutput[j];
                // get activation function's derivative
                double functionDerivative = activationFunction.Derivative2(networkOutput[j]);

                // update weights
                for (int i = 0, n = neuron.InputsCount; i < n; i++)
                {
                    neuron[i] += learningRate * e * functionDerivative * input[i];
                }

                // update threshold value
                neuron.Threshold += learningRate * e * functionDerivative;

                // sum error
                error += (e * e);
            }

            return(error / 2);
        }
Example #29
0
        private void UpdateRBM()
        {
            for (int i = 0; i < rbm.NeuronsCount; i++)
            {
                ActivationNeuron neuron = rbm.Neurons[i];

                for (int j = 0; j < neuron.InputsCount; j++)
                {
                    neuron.Weights[j] = neuron.Weights[j] + learningRate * (x1[j] * h1[i] - Q[i] * x2[j]);

                    rbm.Neurons_[j].Weights[i] = neuron.Weights[j];
                }
            }

            for (int i = 0; i < rbm.NeuronsCount; i++)
            {
                rbm.Neurons[i].Threshold = rbm.Neurons[i].Threshold + learningRate * (h1[i] - Q[i]);
            }

            for (int i = 0; i < rbm.InputsCount; i++)
            {
                rbm.Neurons_[i].Threshold = rbm.Neurons_[i].Threshold + learningRate * (x1[i] - x2[i]);
            }
        }
Example #30
0
        /// <summary>
        /// Runs learning iteration.
        /// </summary>
        ///
        /// <param name="input">Input vector.</param>
        /// <param name="output">Desired output vector.</param>
        ///
        /// <returns>Returns absolute error - difference between current network's output and
        /// desired output.</returns>
        ///
        /// <remarks><para>Runs one learning iteration and updates neuron's
        /// weights in the case if neuron's output is not equal to the
        /// desired output.</para></remarks>
        ///
        public double Run(double[] input, double[] output)
        {
            // compute output of network
            double[] networkOutput = network.Compute(input);

            // get the only layer of the network
            Layer layer = network.Layers[0];

            // summary network absolute error
            double error = 0.0;

            // check output of each neuron and update weights
            for (int j = 0; j < layer.Neurons.Length; j++)
            {
                double e = output[j] - networkOutput[j];

                if (e != 0)
                {
                    ActivationNeuron perceptron = layer.Neurons[j] as ActivationNeuron;

                    // update weights
                    for (int i = 0; i < perceptron.Weights.Length; i++)
                    {
                        perceptron.Weights[i] += learningRate * e * input[i];
                    }

                    // update threshold value
                    perceptron.Threshold += learningRate * e;

                    // make error to be absolute
                    error += Math.Abs(e);
                }
            }

            return(error);
        }
        public void And(bool val1, bool val2)
        {
            // Arrange
            var boolTransformer = new BoolDecimalTransformer();

            var receiver = new ValueStoringNeuron<bool>();
            var perceptron = new OneToManyBufferingDecorator<decimal>(
                new ManyInputNeuron<decimal, decimal>(
                    new DecimalSumFunction(),
                    new ActivationNeuron<decimal, decimal>(
                        new DecimalThresholdFunction
                        {
                            Threshold = 2M
                        }, new ActivationNeuron<decimal, bool>(
                            boolTransformer, receiver)))
                , 2);

            var leftNeuron = new ActivationNeuron<bool, decimal>(
                boolTransformer,
                perceptron);

            var rightNeuron = new ActivationNeuron<bool, decimal>(
                boolTransformer,
                perceptron);

            // Act
            leftNeuron.OnNext(val1);
            rightNeuron.OnNext(val2);

            // Assert
            var result = receiver.LastValue;
            var expected = val1 && val2;
            result.Should().Be(expected);
        }
 /// <summary>
 ///   Initializes a new instance of the <see cref="ActivationMaximization"/> class.
 /// </summary>
 /// 
 /// <param name="neuron">The neuron to be visualized.</param>
 /// 
 public ActivationMaximization(ActivationNeuron neuron)
 {
     this.neuron = neuron;
 }
 public void SutIsEvaluatable(ActivationNeuron<decimal, long> sut)
 {
     Assert.IsAssignableFrom<IObserver<decimal>>(sut);
 }
Example #34
0
        /// <summary>
        ///   Update network weights.
        /// </summary>
        ///
        private void UpdateNetwork(bool errIncrease = false)
        {
            // For each layer of the network
            for (int i = 0; i < weightsUpdates.Length; i++)
            {
                ActivationLayer layer = this.network.Layers[i] as ActivationLayer;
                double[][]      layerWeightsUpdates   = weightsUpdates[i];
                double[]        layerThresholdUpdates = thresholdsUpdates[i];

                double[][] layerWeightsDerivatives   = weightsDerivatives[i];
                double[]   layerThresholdDerivatives = thresholdsDerivatives[i];

                double[][] layerPreviousWeightsDerivatives   = weightsPreviousDerivatives[i];
                double[]   layerPreviousThresholdDerivatives = thresholdsPreviousDerivatives[i];

                // For each neuron in the current layer
                for (int j = 0; j < layerWeightsUpdates.Length; j++)
                {
                    ActivationNeuron neuron = layer.Neurons[j] as ActivationNeuron;

                    double[] neuronWeightUpdates             = layerWeightsUpdates[j];
                    double[] neuronWeightDerivatives         = layerWeightsDerivatives[j];
                    double[] neuronPreviousWeightDerivatives = layerPreviousWeightsDerivatives[j];

                    double S;

                    // For each weight in the current neuron
                    for (int k = 0; k < neuronPreviousWeightDerivatives.Length; k++)
                    {
                        S = neuronPreviousWeightDerivatives[k] * neuronWeightDerivatives[k];

                        if (S > 0.0)
                        {
                            neuronWeightUpdates[k]             = Math.Min(neuronWeightUpdates[k] * etaPlus, deltaMax);
                            neuron.Weights[k]                 -= Math.Sign(neuronWeightDerivatives[k]) * neuronWeightUpdates[k];
                            neuronPreviousWeightDerivatives[k] = neuronWeightDerivatives[k];
                        }
                        else if (S < 0.0)
                        {
                            var delta = Math.Max(neuronWeightUpdates[k] * etaMinus, deltaMin);
                            if (errIncrease)
                            {
                                neuron.Weights[k] -= neuronWeightUpdates[k]; // revert previous update
                            }
                            neuronWeightUpdates[k]             = delta;
                            neuronPreviousWeightDerivatives[k] = 0.0;
                        }
                        else
                        {
                            neuron.Weights[k] -= Math.Sign(neuronWeightDerivatives[k]) * neuronWeightUpdates[k];
                            neuronPreviousWeightDerivatives[k] = neuronWeightDerivatives[k];
                        }
                    }

                    S = layerPreviousThresholdDerivatives[j] * layerThresholdDerivatives[j];

                    if (S > 0.0)
                    {
                        layerThresholdUpdates[j]             = Math.Min(layerThresholdUpdates[j] * etaPlus, deltaMax);
                        neuron.Threshold                    -= Math.Sign(layerThresholdDerivatives[j]) * layerThresholdUpdates[j];
                        layerPreviousThresholdDerivatives[j] = layerThresholdDerivatives[j];
                    }
                    else if (S < 0.0)
                    {
                        var delta = Math.Max(layerThresholdUpdates[j] * etaMinus, deltaMin);
                        if (errIncrease)
                        {
                            neuron.Threshold -= layerThresholdUpdates[j]; // revert previous update
                        }
                        layerThresholdUpdates[j]             = delta;
                        layerPreviousThresholdDerivatives[j] = 0.0;
                    }
                    else
                    {
                        neuron.Threshold -= Math.Sign(layerThresholdDerivatives[j]) * layerThresholdUpdates[j];
                        layerPreviousThresholdDerivatives[j] = layerThresholdDerivatives[j];
                    }
                }
            }
        }
        /// <summary>
        ///   Calculates partial derivatives for all weights of the network.
        /// </summary>
        ///
        /// <param name="input">The input vector.</param>
        /// <param name="desiredOutput">Desired output vector.</param>
        /// <param name="outputIndex">The current output location (index) in the desired output vector.</param>
        ///
        /// <returns>Returns summary squared error of the last layer.</returns>
        ///
        private double CalculateDerivatives(double[] input, double[] desiredOutput, int outputIndex)
        {
            // Assume all network neurons have the same activation function
            var function = (network.Layers[0].Neurons[0] as ActivationNeuron).ActivationFunction;


            // Start by the output layer first
            int             outputLayerIndex = network.Layers.Length - 1;
            ActivationLayer outputLayer      = network.Layers[outputLayerIndex] as ActivationLayer;

            double[] previousLayerOutput;

            // If we have only one single layer, the previous layer outputs is given by the input layer
            previousLayerOutput = (outputLayerIndex == 0) ? input : network.Layers[outputLayerIndex - 1].Output;

            // Clear derivatives for other output's neurons
            for (int i = 0; i < thresholdsDerivatives[outputLayerIndex].Length; i++)
            {
                thresholdsDerivatives[outputLayerIndex][i] = 0;
            }

            for (int i = 0; i < weightDerivatives[outputLayerIndex].Length; i++)
            {
                for (int j = 0; j < weightDerivatives[outputLayerIndex][i].Length; j++)
                {
                    weightDerivatives[outputLayerIndex][i][j] = 0;
                }
            }

            // Retrieve current desired output neuron
            ActivationNeuron outputNeuron = outputLayer.Neurons[outputIndex] as ActivationNeuron;

            float[] neuronWeightDerivatives = weightDerivatives[outputLayerIndex][outputIndex];

            double output     = outputNeuron.Output;
            double error      = desiredOutput[outputIndex] - output;
            double derivative = function.Derivative2(output);

            // Set derivative for each weight in the neuron
            for (int i = 0; i < neuronWeightDerivatives.Length; i++)
            {
                neuronWeightDerivatives[i] = (float)(derivative * previousLayerOutput[i]);
            }

            // Set derivative for the current threshold (bias) term
            thresholdsDerivatives[outputLayerIndex][outputIndex] = (float)derivative;


            // Now, proceed to the next hidden layers
            for (int li = network.Layers.Length - 2; li >= 0; li--)
            {
                int nextLayerIndex = li + 1;

                ActivationLayer layer     = network.Layers[li] as ActivationLayer;
                ActivationLayer nextLayer = network.Layers[nextLayerIndex] as ActivationLayer;

                // If we are in the first layer, the previous layer is just the input layer
                previousLayerOutput = (li == 0) ? input : network.Layers[li - 1].Output;

                // Now, we will compute the derivatives for the current layer applying the chain
                //  rule. To apply the chain-rule, we will make use of the previous derivatives
                //  computed for the inner layers (forming a calculation chain, hence the name).

                // So, for each neuron in the current layer:
                for (int ni = 0; ni < layer.Neurons.Length; ni++)
                {
                    ActivationNeuron neuron = layer.Neurons[ni] as ActivationNeuron;

                    neuronWeightDerivatives = weightDerivatives[li][ni];

                    float[] layerDerivatives     = thresholdsDerivatives[li];
                    float[] nextLayerDerivatives = thresholdsDerivatives[li + 1];

                    double sum = 0;

                    // The chain-rule can be stated as (f(w*g(x))' = f'(w*g(x)) * w*g'(x)
                    //
                    // We will start computing the second part of the product. Since the g'
                    //  derivatives have already been computed in the previous computation,
                    //  we will be summing all previous function derivatives and weighting
                    //  them using their connection weight (synapses).
                    //
                    // So, for each neuron in the next layer:
                    for (int nj = 0; nj < nextLayerDerivatives.Length; nj++)
                    {
                        // retrieve the weight connecting the output of the current
                        //   neuron and the activation function of the next neuron.
                        double weight = nextLayer.Neurons[nj].Weights[ni];

                        // accumulate the synapse weight * next layer derivative
                        sum += weight * nextLayerDerivatives[nj];
                    }

                    // Continue forming the chain-rule statement
                    derivative = sum * function.Derivative2(neuron.Output);

                    // Set derivative for each weight in the neuron
                    for (int wi = 0; wi < neuronWeightDerivatives.Length; wi++)
                    {
                        neuronWeightDerivatives[wi] = (float)(derivative * previousLayerOutput[wi]);
                    }

                    // Set derivative for the current threshold
                    layerDerivatives[ni] = (float)(derivative);

                    // The threshold derivatives also gather the derivatives for
                    // the layer, and thus can be re-used in next calculations.
                }
            }

            // return error
            return(error);
        }
        public void SutChainsOnCompleted([Frozen]Mock<IObserver<long>> mockBackend, ActivationNeuron<decimal, long> sut)
        {
            sut.OnCompleted();

            mockBackend.Verify(backend => backend.OnCompleted(), Times.Once());
        }
        public void SutTransformsInputOnNext(decimal input, [Frozen]Mock<ITransformingFunction<decimal, long>> mockFunction, ActivationNeuron<decimal, long> sut)
        {
            sut.OnNext(input);

            mockFunction.Verify(function => function.Evaluate(input), Times.AtLeastOnce());
        }
 internal ActivationNeuronBackpropagator(ActivationNeuron neuron)
     : base(neuron)
 {
     Neuron = neuron;
     biasConn = (IBackwardConnection)neuron;
 }
        /// <summary>
        /// Update network's weights.
        /// </summary>
        ///
        private void UpdateNetwork()
        {
            double[][] layerWeightsUpdates;
            double[]   layerThresholdUpdates;
            double[]   neuronWeightUpdates;

            double[][] layerWeightsDerivatives;
            double[]   layerThresholdDerivatives;
            double[]   neuronWeightDerivatives;

            double[][] layerPreviousWeightsDerivatives;
            double[]   layerPreviousThresholdDerivatives;
            double[]   neuronPreviousWeightDerivatives;

            // for each layer of the network
            for (int i = 0; i < network.Layers.Length; i++)
            {
                ActivationLayer layer = network.Layers[i] as ActivationLayer;

                layerWeightsUpdates   = weightsUpdates[i];
                layerThresholdUpdates = thresholdsUpdates[i];

                layerWeightsDerivatives   = weightsDerivatives[i];
                layerThresholdDerivatives = thresholdsDerivatives[i];

                layerPreviousWeightsDerivatives   = weightsPreviousDerivatives[i];
                layerPreviousThresholdDerivatives = thresholdsPreviousDerivatives[i];

                // for each neuron of the layer
                for (int j = 0; j < layer.Neurons.Length; j++)
                {
                    ActivationNeuron neuron = layer.Neurons[j] as ActivationNeuron;

                    neuronWeightUpdates             = layerWeightsUpdates[j];
                    neuronWeightDerivatives         = layerWeightsDerivatives[j];
                    neuronPreviousWeightDerivatives = layerPreviousWeightsDerivatives[j];

                    double S = 0;

                    // for each weight of the neuron
                    for (int k = 0; k < neuron.InputsCount; k++)
                    {
                        S = neuronPreviousWeightDerivatives[k] * neuronWeightDerivatives[k];

                        if (S > 0)
                        {
                            neuronWeightUpdates[k]             = Math.Min(neuronWeightUpdates[k] * etaPlus, deltaMax);
                            neuron.Weights[k]                 -= Math.Sign(neuronWeightDerivatives[k]) * neuronWeightUpdates[k];
                            neuronPreviousWeightDerivatives[k] = neuronWeightDerivatives[k];
                        }
                        else if (S < 0)
                        {
                            neuronWeightUpdates[k]             = Math.Max(neuronWeightUpdates[k] * etaMinus, deltaMin);
                            neuronPreviousWeightDerivatives[k] = 0;
                        }
                        else
                        {
                            neuron.Weights[k] -= Math.Sign(neuronWeightDerivatives[k]) * neuronWeightUpdates[k];
                            neuronPreviousWeightDerivatives[k] = neuronWeightDerivatives[k];
                        }
                    }

                    // update treshold
                    S = layerPreviousThresholdDerivatives[j] * layerThresholdDerivatives[j];

                    if (S > 0)
                    {
                        layerThresholdUpdates[j]             = Math.Min(layerThresholdUpdates[j] * etaPlus, deltaMax);
                        neuron.Threshold                    -= Math.Sign(layerThresholdDerivatives[j]) * layerThresholdUpdates[j];
                        layerPreviousThresholdDerivatives[j] = layerThresholdDerivatives[j];
                    }
                    else if (S < 0)
                    {
                        layerThresholdUpdates[j]     = Math.Max(layerThresholdUpdates[j] * etaMinus, deltaMin);
                        layerThresholdDerivatives[j] = 0;
                    }
                    else
                    {
                        neuron.Threshold -= Math.Sign(layerThresholdDerivatives[j]) * layerThresholdUpdates[j];
                        layerPreviousThresholdDerivatives[j] = layerThresholdDerivatives[j];
                    }
                }
            }
        }
Example #40
0
 /// <summary>
 ///   Initializes a new instance of the <see cref="ActivationMaximization"/> class.
 /// </summary>
 ///
 /// <param name="neuron">The neuron to be visualized.</param>
 ///
 public ActivationMaximization(ActivationNeuron neuron)
 {
     this.neuron = neuron;
 }
        public void SutChainsOnNext(decimal input, [Frozen]Mock<IObserver<long>> mockBackend, ActivationNeuron<decimal, long> sut)
        {
            sut.OnNext(input);

            mockBackend.Verify(backend => backend.OnNext(It.IsAny<long>()), Times.Once());
        }