Esempio n. 1
0
        private float[] _weights; // synapses weights

        #endregion Fields

        #region Constructors

        /// <summary>
        ///   Constructor
        /// </summary>
        /// <param name = "inputs">Number of inputs of the neuron</param>
        public Neuron(int inputs)
        {
            _function = new SigmoidFunction();
            _inputsCount = Math.Max(1, inputs);
            _weights = new float[_inputsCount];
            Randomize();
        }
        /// <summary>
        ///   Normalize in place desired output
        /// </summary>
        /// <param name = "function">Activation function used</param>
        /// <param name = "output">Normalize output</param>
        /// <returns>Normalized output</returns>
        public static double[] NormalizeOneDesiredOutputInPlace(IActivationFunction function, double[] output)
        {
            if (function is ActivationSigmoid)
            {
                for (int i = 0, n = output.Length; i < n; i++)
                {
                    output[i] = (output[i] > 0 ? 0.8 : 0.2);
                }
            }
            else if (function is ActivationTANH)
            {
                for (int i = 0, n = output.Length; i < n; i++)
                {
                    output[i] = (output[i] > 0.5 ? 0.5 : -0.5);
                }
            }
            else if (function is ActivationLinear)
            {
                /*do nothing*/
            }
            else
            {
                throw new ArgumentException("Unknown activation function");
            }

            return output;
        }
Esempio n. 3
0
 /// <summary>
 ///   Constructor
 /// </summary>
 /// <param name = "inputs">Number of inputs of the neuron</param>
 /// <param name = "function">Activation function of the neuron</param>
 public Neuron(int inputs, IActivationFunction function)
 {
     _function = function;
     _inputsCount = Math.Max(1, inputs);
     _weights = new float[_inputsCount];
     Randomize();
 }
 /// <summary>
 ///   Normalize in place desired input
 /// </summary>
 /// <param name = "function">Activation function</param>
 /// <param name = "input">Input to normalize</param>
 /// <returns>Reference to normalized input</returns>
 public static double[] NormalizeDesiredInputInPlace(IActivationFunction function, double[] input)
 {
     if (function is ActivationTANH)
     {
         for (int i = 0, n = input.Length; i < n; i++)
         {
             input[i] = (input[i] == 0 ? 0.0f : (input[i] < 0 ? -0.8f : 0.8f));
         }
     }
     else if (function is ActivationSigmoid)
     {
         for (int i = 0, n = input.Length; i < n; i++)
         {
             input[i] = (input[i] == 0 ? 0.0f : (input[i] < 0 ? 0.2f : 0.8f));
         }
     }
     else if (function is ActivationLinear)
     {
         /*do nothing*/
     }
     else
     {
         throw new ArgumentException("Unknown activation function");
     }
     return input;
 }
Esempio n. 5
0
        public Substrate(uint input, uint output, uint hidden, IActivationFunction function)
        {
            weightRange = HyperNEATParameters.weightRange;
            threshold = HyperNEATParameters.threshold;

            inputCount = input;
            outputCount = output;
            hiddenCount = hidden;
            activationFunction = function;

            inputDelta = 2.0f / (inputCount);
            if (hiddenCount != 0)
                hiddenDelta = 2.0f / (hiddenCount);
            else
                hiddenDelta = 0;
            outputDelta = 2.0f / (outputCount);

            //SharpNEAT requires that the neuronlist be input|bias|output|hidden
            neurons=new NeuronGeneList((int)(inputCount + outputCount+ hiddenCount));
            //setup the inputs
            for (uint a = 0; a < inputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, activationFunction));
            }

            //setup the outputs
            for (uint a = 0; a < outputCount; a++)
            {
                neurons.Add(new NeuronGene(a + inputCount, NeuronType.Output, activationFunction));
            }
            for (uint a = 0; a < hiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + inputCount+outputCount, NeuronType.Hidden, activationFunction));
            }
        }
		static public INetwork DecodeToConcurrentNetwork(NeatGenome.NeatGenome g, IActivationFunction activationFn)
		{
		//----- Loop the neuronGenes. Create Neuron for each one.
			// Store a table of neurons keyed by their id.
			Hashtable neuronTable = new Hashtable(g.NeuronGeneList.Count);
			NeuronList neuronList = new NeuronList();

			foreach(NeuronGene neuronGene in g.NeuronGeneList)
			{
				Neuron newNeuron = new Neuron(activationFn, neuronGene.NeuronType, neuronGene.InnovationId);
				neuronTable.Add(newNeuron.Id, newNeuron);
				neuronList.Add(newNeuron);
			}

		//----- Loop the connection genes. Create a Connection for each one and bind them to the relevant Neurons.
			foreach(ConnectionGene connectionGene in g.ConnectionGeneList)
			{
				Connection newConnection = new Connection(connectionGene.SourceNeuronId, connectionGene.TargetNeuronId, connectionGene.Weight);

				// Bind the connection to it's source neuron.
				newConnection.SetSourceNeuron((Neuron)neuronTable[connectionGene.SourceNeuronId]);

				// Store the new connection against it's target neuron.
				((Neuron)(neuronTable[connectionGene.TargetNeuronId])).ConnectionList.Add(newConnection);
			}

			return new ConcurrentNetwork(neuronList);
		}
Esempio n. 7
0
        /// <summary>
        /// Construct a starting NEAT population.
        /// </summary>
        ///
        /// <param name="inputCount">The input neuron count.</param>
        /// <param name="outputCount">The output neuron count.</param>
        /// <param name="populationSize">The population size.</param>
        public NEATPopulation(int inputCount, int outputCount,
                              int populationSize)
            : base(populationSize)
        {
            _neatActivationFunction = new ActivationSigmoid();
            _outputActivationFunction = new ActivationLinear();
            InputCount = inputCount;
            OutputCount = outputCount;

            if (populationSize == 0)
            {
                throw new NeuralNetworkError(
                    "Population must have more than zero genomes.");
            }

            // create the initial population
            for (int i = 0; i < populationSize; i++)
            {
                var genome = new NEATGenome(AssignGenomeID(), inputCount,
                                            outputCount);
                Add(genome);
            }

            // create initial innovations
            var genome2 = (NEATGenome) Genomes[0];
            Innovations = new NEATInnovationList(this, genome2.Links,
                                                 genome2.Neurons);
        }
Esempio n. 8
0
 public Neuron(Neuron neuron)
 {
     m_inputsCount = neuron.m_inputsCount;
     m_weights = (double[])neuron.m_weights.Clone();
     m_function = neuron.m_function;
     m_threshold = neuron.m_threshold;
 }
Esempio n. 9
0
 public Neuron(int inputCount, double[] weights, IActivationFunction function, double threshold)
 {
     m_inputsCount = inputCount;
     m_weights = weights;
     m_function = function;
     m_threshold = threshold;
 }
Esempio n. 10
0
 public Neuron(int inputCount, IActivationFunction function)
 {
     m_inputsCount = inputCount;
     m_function = function;
     m_weights = new double[inputCount + 1];
     SetRandomWeights();
 }
Esempio n. 11
0
 public FlatLayer(IActivationFunction activation, int count, double biasActivation)
 {
     this.Activation = activation;
     this._x10f4d88af727adbc = count;
     this._x25922738b86264c8 = biasActivation;
     this._x4d51c0aa16352a14 = null;
 }
 /// <summary>
 /// Set new activation function for all neurons of the network.
 /// </summary>
 /// 
 /// <param name="function">Activation function to set.</param>
 /// 
 /// <remarks><para>The method sets new activation function for all neurons by calling
 /// <see cref="ActivationLayer.SetActivationFunction"/> method for each layer of the network.</para></remarks>
 /// 
 public void SetActivationFunction( IActivationFunction function )
 {
     for ( int i = 0; i < layers.Length; i++ )
     {
         ( (ActivationLayer) layers[i] ).SetActivationFunction( function );
     }
 }
 /// <summary>
 /// Construct with a single IActivationFunction.
 /// </summary>
 /// <param name="activationFn"></param>
 public NeatActivationFunctionLibrary(IActivationFunction activationFn)
 {
     _activationFn = activationFn;
     _activationFnInfo = new ActivationFunctionInfo(0, 1.0, activationFn);
     _activationFnInfoList = new List<ActivationFunctionInfo>(1);
     _activationFnInfoList.Add(_activationFnInfo);
 }
        /// <summary>
        /// Constructs a FastCyclicNetwork with the provided pre-built FastConnection array and 
        /// associated data.
        /// </summary>
        public FastCyclicNetwork(FastConnection[] connectionArray,
                                 IActivationFunction[] neuronActivationFnArray,
                                 double[][] neuronAuxArgsArray,
                                 int neuronCount,
                                 int inputNeuronCount,
                                 int outputNeuronCount,
                                 int timestepsPerActivation)
        {
            _connectionArray = connectionArray;
            _neuronActivationFnArray = neuronActivationFnArray;
            _neuronAuxArgsArray = neuronAuxArgsArray;

            // Create neuron pre- and post-activation signal arrays.
            _preActivationArray = new double[neuronCount];
            _postActivationArray = new double[neuronCount];

            // Wrap sub-ranges of the neuron signal arrays as input and output arrays for IBlackBox.
            // Offset is 1 to skip bias neuron (The value at index 1 is the first black box input).
            _inputSignalArrayWrapper = new SignalArray(_postActivationArray, 1, inputNeuronCount);

            // Offset to skip bias and input neurons. Output neurons follow input neurons in the arrays.
            _outputSignalArrayWrapper = new SignalArray(_postActivationArray, inputNeuronCount+1, outputNeuronCount);

            // Store counts for use during activation.
            _inputNeuronCount = inputNeuronCount;
            _inputAndBiasNeuronCount = inputNeuronCount+1;
            _outputNeuronCount = outputNeuronCount;
            _timestepsPerActivation = timestepsPerActivation;

            // Initialise the bias neuron's fixed output value.
            _postActivationArray[0] = 1.0;
        }
Esempio n. 15
0
 public Neuron(IActivationFunction activationFunc, double charge)
 {
     Charge = charge;
     Error = 0;
     Activation = activationFunc;
     In = new Dictionary<Neuron, double>();
 }
Esempio n. 16
0
 /// <summary>
 /// Copy constructor.
 /// </summary>
 /// <param name="copyFrom"></param>
 public NeuronGene(NeuronGene copyFrom)
 {
     this.innovationId = copyFrom.innovationId;
     this.neuronType = copyFrom.neuronType;
     this.activationFunction = copyFrom.activationFunction;
     this.Layer = copyFrom.Layer;
 }
		/// <summary>
		/// Set new activation function for all neurons of the layer.
		/// </summary>
		/// 
		/// <param name="function">Activation function to set.</param>
		/// 
		/// <remarks><para>The methods sets new activation function for each neuron by setting
		/// their <see cref="ActivationNeuron.ActivationFunction"/> property.</para></remarks>
		/// 
		public void SetActivationFunction( IActivationFunction function )
		{
			for ( int i = 0; i < neurons.Length; i++ )
			{
				( (ActivationNeuron) neurons[i] ).ActivationFunction = function;
			}
		}
		/// <summary>
		/// Initializes a new instance of the <see cref="ActivationLayer"/> class.
		/// </summary>
		/// 
		/// <param name="neuronsCount">Layer's neurons count.</param>
		/// <param name="inputsCount">Layer's inputs count.</param>
		/// <param name="function">Activation function of neurons of the layer.</param>
		/// 
		/// <remarks>The new layer is randomized (see <see cref="ActivationNeuron.Randomize"/>
		/// method) after it is created.</remarks>
		/// 
		public ActivationLayer( int neuronsCount, int inputsCount, IActivationFunction function )
			: base( neuronsCount, inputsCount )
		{
			// create each neuron
			for ( int i = 0; i < neurons.Length; i++ )
				neurons[i] = new ActivationNeuron( inputsCount, function );
		}
Esempio n. 19
0
 /// <summary>
 /// Construct a neuron gene.
 /// </summary>
 /// <param name="type">The neuron type.</param>
 /// <param name="theActivationFunction">The activation function.</param>
 /// <param name="id">The neuron id.</param>
 /// <param name="innovationId">The innovation id.</param>
 public NEATNeuronGene(NEATNeuronType type, IActivationFunction theActivationFunction, long id, long innovationId)
 {
     NeuronType = type;
     InnovationId = innovationId;
     Id = id;
     ActivationFunction = theActivationFunction;
 }
 /// <summary>
 /// Not used for this type of plugin.
 /// </summary>
 ///
 /// <param name="gradients">Not used.</param>
 /// <param name="layerOutput">Not used.</param>
 /// <param name="weights">Not used.</param>
 /// <param name="layerDelta">Not used.</param>
 /// <param name="af">Not used.</param>
 /// <param name="index">Not used.</param>
 /// <param name="fromLayerIndex">Not used.</param>
 /// <param name="fromLayerSize">Not used.</param>
 /// <param name="toLayerIndex">Not used.</param>
 /// <param name="toLayerSize">Not used.</param>
 public void CalculateGradient(double[] gradients,
                               double[] layerOutput, double[] weights,
                               double[] layerDelta, IActivationFunction af,
                               int index, int fromLayerIndex, int fromLayerSize,
                               int toLayerIndex, int toLayerSize)
 {
 }
		public FloatFastConcurrentNetwork(	int biasNeuronCount, 
										int inputNeuronCount,
                                        int outputNeuronCount,
                                        int outputsPerPolicy, // Schrum: Added
										int totalNeuronCount,
										FloatFastConnection[] connectionArray, 
										IActivationFunction[] activationFnArray)
		{
			this.biasNeuronCount = biasNeuronCount;
			this.inputNeuronCount = inputNeuronCount;
			this.totalInputNeuronCount = biasNeuronCount + inputNeuronCount;
            this.outputNeuronCount = outputNeuronCount;
            this.outputsPerPolicy = outputsPerPolicy; // Schrum: Added

			this.connectionArray = connectionArray;
			this.activationFnArray = activationFnArray;
			
			//----- Allocate the arrays that make up the neural network.
			// The neuron signals are initialised to 0 by default. Only bias nodes need setting to 1.
			neuronSignalArray = new float[totalNeuronCount];
			_neuronSignalArray = new float[totalNeuronCount];

			for(int i=0; i<biasNeuronCount; i++)
				neuronSignalArray[i] = 1.0F;
		}
Esempio n. 22
0
        public static FastConcurrentMultiplicativeNetwork DecodeToFastConcurrentMultiplicativeNetwork(NeatGenome.NeatGenome g, IActivationFunction activationFn)
        {
            int outputNeuronCount = g.OutputNeuronCount;
            int neuronGeneCount = g.NeuronGeneList.Count;

            // Slightly inefficient - determine the number of bias nodes. Fortunately there is not actually
            // any reason to ever have more than one bias node - although there may be 0.
            int neuronGeneIdx=0;
            for(; neuronGeneIdx<neuronGeneCount; neuronGeneIdx++)
            {
                if(g.NeuronGeneList[neuronGeneIdx].NeuronType != NeuronType.Bias)
                    break;
            }
            int biasNodeCount = neuronGeneIdx;
            int inputNeuronCount = g.InputNeuronCount;

            // ConnectionGenes point to a neuron ID. We need to map this ID to a 0 based index for
            // efficiency. To do this we build a table of indexes (ints) keyed on neuron ID.
            // TODO: An alternative here would be to forgo the building of a table and do a binary
            // search directly on the NeuronGeneList - probably a good idea to use a heuristic based upon
            // neuroncount*connectioncount that decides on which technique to use. Small networks will
            // likely be faster to decode using the binary search.

            // Actually we can partly achieve the above optimzation by using HybridDictionary instead of Hashtable.
            // Although creating a table is a bit expensive.
            HybridDictionary neuronIndexTable = new HybridDictionary(neuronGeneCount);
            for(int i=0; i<neuronGeneCount; i++)
                neuronIndexTable.Add(g.NeuronGeneList[i].InnovationId, i);

            // Count how many of the connections are actually enabled. TODO: make faster - store disable count?
            int connectionGeneCount = g.ConnectionGeneList.Count;
            int connectionCount=connectionGeneCount;
            //			for(int i=0; i<connectionGeneCount; i++)
            //			{
            //				if(g.ConnectionGeneList[i].Enabled)
            //					connectionCount++;
            //			}

            // Now we can build the connection array(s).
            FloatFastConnection[] connectionArray = new FloatFastConnection[connectionCount];
            int connectionIdx=0;
            for(int connectionGeneIdx=0; connectionGeneIdx<connectionCount; connectionGeneIdx++)
            {
                ConnectionGene connectionGene = g.ConnectionGeneList[connectionIdx];
                connectionArray[connectionIdx].sourceNeuronIdx = (int)neuronIndexTable[connectionGene.SourceNeuronId];
                connectionArray[connectionIdx].targetNeuronIdx = (int)neuronIndexTable[connectionGene.TargetNeuronId];
                connectionArray[connectionIdx].weight = (float)connectionGene.Weight;
                connectionIdx++;
            }

            // Now sort the connection array on sourceNeuronIdx, secondary sort on targetNeuronIdx.
            // TODO: custom sort routine to prevent boxing/unboxing required by Array.Sort(ValueType[])
            //Array.Sort(connectionArray, fastConnectionComparer);
            QuickSortFastConnections(0, fastConnectionArray.Length-1);

            return new FastConcurrentMultiplicativeNetwork(
                biasNodeCount, inputNeuronCount,
                outputNeuronCount, neuronGeneCount,
                connectionArray, activationFn);
        }
Esempio n. 23
0
        /// <param name="activationFn">Not strictly part of a genome. But it is useful to document which function
        /// the genome is supposed to run against when decoded into a network.</param>
        public static void Write(XmlNode parentNode, NeatGenome genome, IActivationFunction activationFn)
        {
            //----- Start writing. Create document root node.
            XmlElement xmlGenome = XmlUtilities.AddElement(parentNode, "genome");
            XmlUtilities.AddAttribute(xmlGenome, "id", genome.GenomeId.ToString());
            XmlUtilities.AddAttribute(xmlGenome, "species-id", genome.SpeciesId.ToString());
            XmlUtilities.AddAttribute(xmlGenome, "age", genome.GenomeAge.ToString());
            XmlUtilities.AddAttribute(xmlGenome, "fitness", genome.Fitness.ToString("0.00"));
            XmlUtilities.AddAttribute(xmlGenome, "activation-fn-id", activationFn.FunctionId);

            //----- Write neurons.
            XmlElement xmlNeurons = XmlUtilities.AddElement(xmlGenome, "neurons");
            foreach(NeuronGene neuronGene in genome.NeuronGeneList)
                WriteNeuron(xmlNeurons, neuronGene);

            //----- Write modules.
            XmlElement xmlModules = XmlUtilities.AddElement(xmlGenome, "modules");
            foreach (ModuleGene moduleGene in genome.ModuleGeneList)
                WriteModule(xmlModules, moduleGene);

            //----- Write Connections.
            XmlElement xmlConnections = XmlUtilities.AddElement(xmlGenome, "connections");
            foreach(ConnectionGene connectionGene in genome.ConnectionGeneList)
                WriteConnectionGene(xmlConnections, connectionGene);

            //----- Write beahavior
            if(genome.Behavior!=null)
            {
                if(genome.Behavior.behaviorList!=null)
                {
                    XmlElement xmlBehavior = XmlUtilities.AddElement(xmlGenome, "behavior");
                    WriteBehavior(xmlBehavior,genome.Behavior);
                }
            }
        }
Esempio n. 24
0
 /// <summary>
 ///     Construct a 2D convolution layer.
 /// </summary>
 /// <param name="theActivation">The activation function.</param>
 /// <param name="theNumFilters">The number of filters.</param>
 /// <param name="theFilterRows">The rows in each filter.</param>
 /// <param name="theFilterColumns">The columns in each filter.</param>
 public Conv2DLayer(IActivationFunction theActivation, int theNumFilters, int theFilterRows, int theFilterColumns)
 {
     Activation = theActivation;
     FilterRows = theFilterRows;
     FilterColumns = theFilterColumns;
     _numFilters = theNumFilters;
 }
Esempio n. 25
0
        public ActivationNeuron(double bias, IActivationFunction activationFunction)
        {
            Contract.Requires(activationFunction != null);

            ActivationFunction = activationFunction;
            Bias = bias;
        }
 /// <summary>
 /// Construct with the provided id, selection probability and activation function.
 /// </summary>
 public ActivationFunctionInfo(int id, 
                               double selectionProbability,
                               IActivationFunction activationFn)
 {
     _id = id;
     _selectionProbability = selectionProbability;
     _activationFn = activationFn;
 }
 public ActivationLayer(int neuronsCount, int inputsCount, IActivationFunction function)
     : base(neuronsCount, inputsCount)
 {
     for (int i = 0; i < neuronsCount; i++)
     {
         this.neurons[i] = new ActivationNeuron(inputsCount, function);
     }
 }
Esempio n. 28
0
 /// <summary>
 /// Construct a flat layer.
 /// </summary>
 ///
 /// <param name="activation">The activation function.</param>
 /// <param name="count">The neuron count.</param>
 /// <param name="biasActivation">The bias activation.</param>
 public FlatLayer(IActivationFunction activation, int count,
     double biasActivation)
 {
     Activation = activation;
     _count = count;
     _biasActivation = biasActivation;
     _contextFedBy = null;
 }
Esempio n. 29
0
 public Neuron(int inputsCount, IActivationFunction function)
 {
     this.Output = 0.0;
     this.Threshold = 0.0;
     this.InputsCount = Math.Max(1, inputsCount);
     this.Weights = new List<double>();
     this.ActivationFunction = function;
 }
Esempio n. 30
0
 /// <summary>
 /// Construct a flat layer.
 /// </summary>
 ///
 /// <param name="activation">The activation function.</param>
 /// <param name="count">The neuron count.</param>
 /// <param name="biasActivation">The bias activation.</param>
 /// <param name="paras">The parameters.</param>
 public FlatLayer(IActivationFunction activation, int count,
         double biasActivation, double[] paras)
 {
     this.activation = activation;
     this.count = count;
     this.biasActivation = biasActivation;
     this.contextFedBy = null;
 }
Esempio n. 31
0
        public void SingleLayerTest(int neuronCount, float[] inputs, float[] weightsAndBiases, IActivationFunction activationFunction)
        {
            var inCnt     = inputs.Length;
            var outputs   = new float[neuronCount];
            var memIn     = inputs.AsMemory();
            var weMemFlat = weightsAndBiases.AsMemory();
            var memOut    = outputs.AsMemory();
            var weMem     = HelpersMisc.SliceArray(ref weMemFlat, inCnt + 1, neuronCount);
            var layer     = new Layer(neuronCount, ref weMem, ref memIn, ref memOut, activationFunction);

            layer.CalculateWithoutNeuronParallel();

            Assert.Multiple(() =>
            {
                for (var i = 0; i < neuronCount; i++)
                {
                    var wAndB = weMem[i].ToArray();
                    //var total = inputs.Select((t, j) => wAndB[j] * t).Sum();
                    var total = PrivateSum(inputs.Select((t, j) => wAndB[j] * t));

                    total   += wAndB[inputs.Length];
                    var nOut = activationFunction.Forward(ref total);
                    Assert.That(outputs[i], Is.EqualTo(nOut) /*.Within(.0001f)*/, $"Output is not as expected on neuron #{i}");
                }
            });
        }
Esempio n. 32
0
 public Neuron(IActivationFunction transferFunc, IWeightInputOperation operateFunc)
 {
     this.transferFunc = transferFunc;
     this.operateFunc  = operateFunc;
     this.weights      = new List <double>();
 }
Esempio n. 33
0
 public void ResetEvaluator(IActivationFunction activationFn)
 {
     // populationEvaluator = new SingleFilePopulationEvaluator(new RobotDualNetworkEvaluator(), activationFn);
     populationEvaluator = new FoodGathererPopulationEvaluator(new FoodGathererNetworkEvaluator(), activationFn);
 }
Esempio n. 34
0
 /// <summary>
 /// Construct this layer with a non-default activation function, also
 /// determine if a bias is desired or not.
 /// </summary>
 ///
 /// <param name="activationFunction">The activation function to use.</param>
 /// <param name="neuronCount">How many neurons in this layer.</param>
 /// <param name="hasBias">True if this layer has a bias.</param>
 public BasicLayer(IActivationFunction activationFunction,
                   bool hasBias, int neuronCount)
     : base(activationFunction, neuronCount, (hasBias) ? 1.0d : 0.0d)
 {
 }
        /// <summary>
        /// Back Propagate
        /// </summary>
        /// <param name="neuralNetwork">Neural Network</param>
        /// <param name="expectedResults">Expected Results</param>
        /// <param name="activationFn">Activation Function</param>
        /// <param name="learningRate">Learning Rate (Not yet implemented)</param>
        private static void BackPropagate(NeuralNetwork neuralNetwork, double[] expectedResults, IActivationFunction activationFn, double learningRate)
        {
            // Calculate the error of each output neuron vs target
            for (int i = 0; i < neuralNetwork.OutputLayer.Count; i++)
            {
                neuralNetwork.OutputLayer[i].Error = activationFn.derivative(neuralNetwork.OutputLayer[i].Output) * (expectedResults[i] - neuralNetwork.OutputLayer[i].Output);
            }

            // Calc the global network error
            neuralNetwork.GlobalError = neuralNetwork.OutputLayer.Sum(x => x.Error);

            // Re-calc Output Layer weights
            for (int i = 0; i < neuralNetwork.OutputLayer.Count; i++)
            {
                AdjustNeuronWeights(neuralNetwork.OutputLayer[i], neuralNetwork.GlobalError);
            }

            // Recalc Hidden Layer weights
            // Calc Hidden Layer Errors
            for (int i = 0; i < neuralNetwork.HiddenLayer.Count; i++)
            {
                double totalError = 0;

                for (int j = 0; j < neuralNetwork.OutputLayer.Count; j++)
                {
                    totalError += activationFn.derivative(neuralNetwork.HiddenLayer[i].Output) * neuralNetwork.GlobalError * neuralNetwork.OutputLayer[j].Weights[i];
                }

                neuralNetwork.HiddenLayer[i].Error = totalError;
            }

            // then adjusts the hidden neurons' weights, based on their errors
            for (int i = 0; i < neuralNetwork.HiddenLayer.Count; i++)
            {
                AdjustNeuronWeights(neuralNetwork.HiddenLayer[i]);
            }
        }
Esempio n. 36
0
        static public List <Layer> BuildDense(int inputSize, int outputSize, int[] hiddenSize, int deep, double[] matrixInit, double[] biasInit, IActivationFunction <double> hidden, IActivationFunction <double> input = null, IActivationFunction <double> output = null)
        {
            List <Layer> layers = new List <Layer>(deep);

            input ??= new ReLU();
            output ??= new ReLU();

            if (deep < 1)
            {
                throw new Exception("Too few layers!");
            }
            else if (deep == 1)
            {
                layers.Add(new Layer(inputSize, outputSize, matrixInit[0], biasInit[0], hidden));
            }
            else
            {
                layers.Add(new Layer(hiddenSize[0], inputSize, matrixInit[0], biasInit[0], input));
                for (int i = 1; i < deep - 1; i++)
                {
                    layers.Add(new Layer(hiddenSize[i], hiddenSize[i - 1], matrixInit[i], biasInit[i], hidden));
                }
                layers.Add(new Layer(outputSize, hiddenSize[deep - 2], matrixInit[deep - 1], biasInit[deep - 1], output));
            }
            return(layers);
        }
 private static void AssertMonotonic(IActivationFunction <double> actFn, bool strict)
 {
     Assert.IsTrue(TestUtils.IsMonotonicIncreasing(actFn.Fn, -6, 6, 0.01, strict));
 }
Esempio n. 38
0
 private AxonFactory(IActivationFunction activationFunction)
 {
     _activationFunction = activationFunction;
 }
Esempio n. 39
0
 public static IAxonFactory GetInstance(IActivationFunction activationFunction)
 {
     return(new AxonFactory(activationFunction));
 }
Esempio n. 40
0
 /// <summary>
 ///     Construct a single dimension layer, this is usually used for non-convolutional neural networks.
 /// </summary>
 /// <param name="activation">The activation function.  All layers, except input will have activation functions.</param>
 /// <param name="theHasBias">True, if this layer has a bias, all layers except the output have bias.</param>
 /// <param name="theCount">The neuron count.</param>
 public BasicLayer(IActivationFunction activation, bool theHasBias, int theCount)
     : this(activation, theHasBias, new[] { theCount })
 {
 }
Esempio n. 41
0
 /// <summary>
 /// Initializes a new instance of the <see cref="Activation"/> class.
 /// </summary>
 ///
 /// <param name="activation">The activation function.</param>
 ///
 public Activation(IActivationFunction activation)
 {
     this.supports_masking = true;
     this.activation       = activation;
 }
Esempio n. 42
0
 public RadialBasisFunctionLayer(IActivationFunction activationFunction, int neurons, int layer, int p)
     : this(activationFunction, neurons, layer)
 {
     this.p = p;
 }
Esempio n. 43
0
 public RadialBasisFunctionLayer(IActivationFunction activationFunction, int neurons, int layer)
     : this(neurons, layer)
 {
     this.activationFunction_ = activationFunction;
 }
 /// <summary>
 /// Initalizes an ANeuronLayer.
 /// </summary>
 /// <param name="passedNumberOfNeurons">The Number of Neurons in the Layer.</param>
 /// <param name="passedNumberOfWeightsPerNeuron">The Number of Weights Per Neuron in the Layer.</param>
 /// <param name="passedActivationFunction">The Activation Function of all Neurons in the Layer.</param>
 public ANeuronLayer(uint passedNumberOfNeurons, uint passedNumberOfWeightsPerNeuron, IActivationFunction passedActivationFunction)
 {
     this._Neurons = new List <INeuron>((int)passedNumberOfNeurons);
     this._NumberOfWeightsPerNeuron = passedNumberOfWeightsPerNeuron;
     this._NumberOfNeurons          = passedNumberOfNeurons;
     this._ActivationFunction       = passedActivationFunction;
     CreateNeurons();
 }
Esempio n. 45
0
 public Activation(int nodes, IActivationFunction activationFunction) : base(nodes, nodes)
 {
     this.activationFunction = activationFunction;
 }
Esempio n. 46
0
 static public Layer BuildDense(int inputSize, int outputSize, double matrixInit, double biasInit, IActivationFunction <double> f)
 {
     return(new Layer(outputSize, inputSize, matrixInit, biasInit, f));
 }
Esempio n. 47
0
 public Activation(IActivationFunction activationFunction, Layer layer) : base(layer.Inputs, layer)
 {
     this.activationFunction = activationFunction;
 }
 /// <summary>
 /// Construct a data description with an activation function, but no range.
 /// </summary>
 /// <param name="activationFunction">The activation function.</param>
 /// <param name="type">The type of data.</param>
 /// <param name="input">Used for input?</param>
 /// <param name="predict">Used for prediction?</param>
 public TemporalDataDescription(IActivationFunction activationFunction,
                                Type type, bool input, bool predict)
     : this(activationFunction, 0, 0, type, input, predict)
 {
 }
Esempio n. 49
0
        static public List <Layer> BuildRandom(int inputSize, int outputSize, int hiddenSize, int deep, double mCenter = 0, double mOffset = 1, double bCenter = 0, double bOffset = 1, IActivationFunction <double> hidden = null, IActivationFunction <double> input = null, IActivationFunction <double> output = null)
        {
            List <Layer> layers = new List <Layer>(deep);

            input ??= new ReLU();
            output ??= new ReLU();
            hidden ??= new ReLU();

            if (deep < 1)
            {
                throw new Exception("Too few layers!");
            }
            else if (deep == 1)
            {
                layers.Add(BuildRandom(inputSize, outputSize, output, mCenter, mOffset, bCenter, bOffset));
            }
            else
            {
                layers.Add(BuildRandom(inputSize, hiddenSize, input, mCenter, mOffset, bCenter, bOffset));
                for (int i = 1; i < deep - 1; i++)
                {
                    layers.Add(BuildRandom(hiddenSize, hiddenSize, hidden, mCenter, mOffset, bCenter, bOffset));
                }
                layers.Add(BuildRandom(hiddenSize, outputSize, output, mCenter, mOffset, bCenter, bOffset));
            }
            return(layers);
        }
Esempio n. 50
0
 /// <summary>
 /// Copy constructor.
 /// </summary>
 /// <param name="copyFrom"></param>
 public NeuronGene(NeuronGene copyFrom)
 {
     this.innovationId       = copyFrom.innovationId;
     this.neuronType         = copyFrom.neuronType;
     this.activationFunction = copyFrom.activationFunction;
 }
Esempio n. 51
0
 /// <summary>
 /// Конструктор нейрона по весам
 /// </summary>
 /// <param name="weights">Вектор весов</param>
 /// <param name="bias">Смещение - нулевой вес</param>
 /// <param name="af">Интерфейс функции активации</param>
 public Neuron(Vector weights, double bias, IActivationFunction af)
 {
     Weights            = weights;
     Bias               = bias;
     activationFunction = af;
 }
Esempio n. 52
0
 public NeuronGene(uint innovationId, NeuronType neuronType, IActivationFunction activationFunction)
 {
     this.innovationId       = innovationId;
     this.neuronType         = neuronType;
     this.activationFunction = activationFunction;
 }
        /// <summary>
        /// Train Neural Network (N.B Do not train for just 1 example, always randomize training data)
        /// </summary>
        /// <param name="neuralNetwork">Neural Network</param>
        /// <param name="trainingData">Training Data</param>
        /// <param name="epochs">Number of Training Epochs</param>
        /// <param name="activationFn">Activation Function</param>
        /// <param name="learningRate">Learning Rate</param>
        /// <returns>Results Array</returns>
        public double[][] Train(NeuralNetwork neuralNetwork, TrainingDataDTO trainingData, int epochs, IActivationFunction activationFn, double learningRate)
        {
            // Get / create some inital vars for results obj
            int arrayWidth = trainingData.Inputs.GetLength(0) + trainingData.ExpectedResults.GetLength(0);

            double[][] results = new double[epochs * trainingData.Inputs.Length][];
            int        counter = 0;

            for (int i = 0; i < epochs; i++)
            {
                for (int j = 0; j < trainingData.Inputs.Length; j++)
                {
                    double[] inputs          = trainingData.Inputs[j];
                    double[] expectedResults = trainingData.ExpectedResults[j];
                    _neuralNetworkProcessingService.ProcessFeedForwardNeuralNetwork(neuralNetwork, inputs, activationFn);

                    BackPropagate(neuralNetwork, expectedResults, activationFn, learningRate);

                    // Populate results obj
                    results[counter] =
                        inputs.Concat(expectedResults)                           // Inputs and Expected Results
                        .Concat(neuralNetwork.OutputLayer.Select(x => x.Output)) // Actual Outputs
                        .Concat(new double[1] {
                        neuralNetwork.GlobalError
                    }).ToArray();                                                       // Overall Global Network Error

                    counter++;
                }
            }

            return(results);
        }
Esempio n. 54
0
            public Builder Activation(IActivationFunction activation)
            {
                this.activation = activation ?? throw new ArgumentNullException(nameof(activation));

                return(this);
            }
Esempio n. 55
0
        /// <summary>
        /// Initalizes an ANeuralNetwork.
        /// </summary>
        /// <param name="passedNumberOfInputNeurons">The Number of Neurons in the Input Layer.</param>
        /// <param name="passedNumberOfNeuronsPerHiddenLayer">The Number of Neurons in the Hidden Layers.</param>
        /// <param name="passedNumberOfHiddenLayers">The Number of Hidden Layers.</param>
        /// <param name="passedNumberOfOutputNeurons">The Number of Neurons in the Output Layer.</param>
        /// <param name="passedNumberOfWeightsForInput">The Number of Weights for an Input Neuron.</param>
        /// <param name="passedActivationFunction">The Activation Function for all Neurons in the Network.</param>
        public ANeuralNetwork(uint passedNumberOfInputNeurons, uint passedNumberOfNeuronsPerHiddenLayer, uint passedNumberOfHiddenLayers, uint passedNumberOfOutputNeurons, uint passedNumberOfWeightsForInput, IActivationFunction passedActivationFunction)
        {
            //Initalize and Assign Input and Output Layers.
            INeuronLayer inputNeuronLayer  = new NeuronLayer(passedNumberOfInputNeurons, passedNumberOfWeightsForInput, passedActivationFunction);
            INeuronLayer outputNeuronLayer = new NeuronLayer(passedNumberOfOutputNeurons, passedNumberOfNeuronsPerHiddenLayer, passedActivationFunction);

            IList <INeuronLayer> neuronLayers = new List <INeuronLayer>((int)(2 + passedNumberOfHiddenLayers));

            neuronLayers.Add(inputNeuronLayer);

            for (int i = 1; i < passedNumberOfHiddenLayers; i++)
            {
                neuronLayers.Add(new NeuronLayer(passedNumberOfNeuronsPerHiddenLayer, neuronLayers[i - 1].NumberOfNeurons, passedActivationFunction));
            }

            neuronLayers.Add(outputNeuronLayer);

            INeuronLayers l = new NeuronLayers(neuronLayers);
        }
Esempio n. 56
0
 /// <summary>
 /// Construct the activation summation.
 /// </summary>
 /// <param name="theActivationFunction">The activation function.</param>
 public BasicActivationSummation(
     IActivationFunction theActivationFunction)
 {
     ActivationFunction = theActivationFunction;
 }
Esempio n. 57
0
 public Neuron(IActivationFunction transferFunc)
     : this(transferFunc, MultiplyOperationFunction.Instance)
 {
 }
Esempio n. 58
0
 /// <summary>
 /// Initializes a new instance of the <see cref="ActivationNeuron"/> class.
 /// </summary>
 ///
 /// <param name="inputs">Neuron's inputs count.</param>
 /// <param name="function">Neuron's activation function.</param>
 ///
 public ActivationNeuron(int inputs, IActivationFunction function)
     : base(inputs)
 {
     this.function = function;
 }
 public ActivationLayerBlueprint(int neuronCount, IActivationFunction activationFunction)
     : base(neuronCount)
 {
     this.activationFunction = activationFunction;
 }
 public void ResetEvaluator(IActivationFunction activationFn)
 {
     populationEvaluator = new
                           EveryonePopulationEvaluator(new CCEAGeomCtrlNetworkEvaluator(m_neatQTable));
 }