Beispiel #1
0
        public void ApplyLearning(INeuralLayer layer, ref double learningRate)
        {
            foreach (KeyValuePair<INeuronSignal, NeuralFactor> m in m_input)
                m.Value.ApplyWeightChange(ref learningRate);

            m_bias.ApplyWeightChange(ref learningRate);
        }
Beispiel #2
0
        public void InitializeLearning(INeuralLayer layer)
        {
            foreach (KeyValuePair<INeuronSignal, NeuralFactor> m in m_input)
                m.Value.ResetWeightChange();

            m_bias.ResetWeightChange();
        }
Beispiel #3
0
        public NeuralCell(INeuralLayer layer, Type func, int axonsCount, double skew)
        {
            _layer      = layer;
            _axonsCount = axonsCount;

            _weights = new double[axonsCount];
            for (var i = 0; i < axonsCount; i++)
            {
                _weights[i] = 0;
            }

            _function = ((IActivationFunction)Activator.CreateInstance(func))?.Create(this, skew) ?? throw new NeuralTypeException(typeof(IActivationFunction), func);
        }
Beispiel #4
0
        public void Pulse(INeuralLayer layer)
        {
            lock (this)
            {
                m_output = 0;

                foreach (KeyValuePair <INeuronSignal, NeuralFactor> item in m_input)
                {
                    m_output += item.Key.Output * item.Value.Weight;
                }

                m_output += m_bias.Weight * BiasWeight;

                m_output = Sigmoid(m_output);
            }
        }
Beispiel #5
0
        public void Pulse(INeuralLayer layer)
        {
            lock (this)
            {
                m_output = 0;

                foreach (KeyValuePair <INeuronSignal, NeuralFactor> item in m_input)
                {
                    m_output += item.Key.Output * item.Value.Weight;
                }

                m_output += m_bias.Weight;

                m_output = aktivationFunction.ActivateFunction(m_output);
            }
        }
Beispiel #6
0
        public void Initialize(int randomSeed, int inputNeuronCount,
                               int hiddenNeuronCount, int outputNeuronCount)
        {
            int    i, j;
            Random rand;

            // initializations
            rand          = new Random(randomSeed);
            m_inputLayer  = new NeuralLayer();
            m_outputLayer = new NeuralLayer();
            m_hiddenLayer = new NeuralLayer();

            for (i = 0; i < inputNeuronCount; i++)
            {
                m_inputLayer.Add(new Neuron(0));
            }

            for (i = 0; i < outputNeuronCount; i++)
            {
                m_outputLayer.Add(new Neuron(rand.NextDouble()));
            }

            for (i = 0; i < hiddenNeuronCount; i++)
            {
                m_hiddenLayer.Add(new Neuron(rand.NextDouble()));
            }

            // wire-up input layer to hidden layer
            for (i = 0; i < m_hiddenLayer.Count; i++)
            {
                for (j = 0; j < m_inputLayer.Count; j++)
                {
                    m_hiddenLayer[i].Input.Add(m_inputLayer[j],
                                               new NeuralFactor(rand.NextDouble()));
                }
            }

            // wire-up output layer to hidden layer
            for (i = 0; i < m_outputLayer.Count; i++)
            {
                for (j = 0; j < m_hiddenLayer.Count; j++)
                {
                    m_outputLayer[i].Input.Add(m_hiddenLayer[j],
                                               new NeuralFactor(rand.NextDouble()));
                }
            }
        }
 protected virtual void InitializeData(INeuralLayer <double> layer, double[] values)
 {
     if (values == null)
     {
         for (int i = 0; i < layer.Count; ++i)
         {
             layer[i].Value = 0;
         }
     }
     else
     {
         for (int i = 0; i < layer.Count; ++i)
         {
             layer[i].Value = values[i];
         }
     }
 }
Beispiel #8
0
        public override void Pulse(INeuralLayer <double> source)
        {
            lock (key)
            {
                double newValue = 0;

                foreach (IDendrite <double> dendrite in this.Inputs)
                {
                    newValue += dendrite.Output.Value * dendrite.DendriteWeight.Weight;
                }

                newValue += this.Bias.Output.Value * this.Bias.DendriteWeight.Weight;

                // Run Event before setting new value
                this.OnNeuronValueChange?.Invoke(this, new NNEventArgs(null, source, this, null, newValue));
                this.Value = newValue;
            }
        }
        public override void BuildNetwork(int inputNeurons, IEnumerable <int> hiddenLayersNeurons, int outputNeurons)
        {
            // Prepare Input
            this.InputLayer = this.BuildLayer(inputNeurons);

            // Prepare Hidden Layers
            this.HiddenLayers = new List <INeuralLayer <double> >();
            if (hiddenLayersNeurons != null)
            {
                foreach (int neuronsInLayer in hiddenLayersNeurons)
                {
                    INeuralLayer <double> hiddenLayer = this.BuildLayer(neuronsInLayer);
                    this.HiddenLayers.Add(hiddenLayer);
                }
            }

            // Prepare Output
            this.OutputLayer = this.BuildLayer(outputNeurons);
        }
Beispiel #10
0
        public void Initialize(int seed, int inputNeuronCount, int hiddenNeuronCount, int outputNeuronCount)
        {
            int          i, layerCount;
            Random       rnd;
            INeuralLayer layer;

            rnd           = new Random(seed);
            m_inputLayer  = new NeuralLayer();
            m_outputLayer = new NeuralLayer();
            m_hiddenLayer = new NeuralLayer();

            for (i = 0; i < inputNeuronCount; i++)
            {
                m_inputLayer.Add(new Neuron());
            }
            for (i = 0; i < outputNeuronCount; i++)
            {
                m_outputLayer.Add(new Neuron());
            }
            for (i = 0; i < hiddenNeuronCount; i++)
            {
                m_hiddenLayer.Add(new Neuron());
            }

            // input layer to hidden layer
            foreach (INeuron hiddenNeuron in m_hiddenLayer)
            {
                foreach (INeuron inputNeuron in m_inputLayer)
                {
                    hiddenNeuron.Input.Add(inputNeuron, new NeuralFactor(rnd.NextDouble()));
                }
            }
            // output neuron to hidden layer
            foreach (INeuron outputNeuron in m_outputLayer)
            {
                foreach (INeuron hiddenNeuron in m_hiddenLayer)
                {
                    outputNeuron.Input.Add(hiddenNeuron, new NeuralFactor(rnd.NextDouble()));
                }
            }
        }
        private void AdjustOutputLayerWeightChange(INeuralLayer <double> layer)
        {
            INeuron <double> outputNeuron, neuron;

            for (int i = 0; i < layer.Count; ++i)
            {
                neuron = layer[i];

                for (int j = 0; j < this.Output.Count; ++j)
                {
                    outputNeuron = this.Output[j /*neuron*/];
                    double newDendriteWeight = outputNeuron[i].DendriteWeight.Weight + (this.LearningRate * this.Output[j].Error * neuron.Value);

                    // Run an Event.
                    outputNeuron.OnDendriteWeightChange?.Invoke(this, new NNEventArgs(this, layer, outputNeuron, outputNeuron[i], newDendriteWeight));

                    outputNeuron[i].DendriteWeight.Weight   = newDendriteWeight;
                    outputNeuron.Bias.DendriteWeight.Delta += this.LearningRate * this.Output[j].Error * outputNeuron.Bias.DendriteWeight.Weight;
                }
            }
        }
Beispiel #12
0
 public virtual INeuralCell Create(INeuralLayer layer, Type func, int axonsCount, double skew) => new NeuralCell(layer, func, axonsCount, skew);
Beispiel #13
0
        public void Pulse(INeuralLayer layer)
        {
            lock (this)
            {
                m_output = 0;

                foreach (KeyValuePair<INeuronSignal, NeuralFactor> item in m_input)
                    m_output += item.Key.Output * item.Value.Weight;

                m_output += m_bias.Weight;

                m_output = Sigmoid(m_output);
            }
        }
Beispiel #14
0
 public void OnDeserialize(INeuralLayer layer)
 {
     _layer = layer;
 }
        private void BackPropagation(double[] wantedResults)
        {
            int    i, j;
            double currentValue, error;

            INeuron <double> outputNeuron, inputNeuron, hiddenNeuron, neuron;

            // Calculate output error values
            i = j = 0;
            for (i = 0; i < this.Output.Count; ++i)
            {
                INeuron <double> localNeuron = this.Output[i];
                currentValue      = localNeuron.Value;
                localNeuron.Error = (wantedResults[i] - currentValue) * currentValue * (1.0F - currentValue);
            }

            // Calculate hidden layers error values
            i = j = 0;
            foreach (INeuralLayer <double> layer in this.HiddenLayers)
            {
                for (i = 0; i < layer.Count; ++i)
                {
                    neuron = layer[i];
                    error  = 0;

                    for (j = 0; j < this.Output.Count; ++j)
                    {
                        outputNeuron = this.Output[j];
                        error       += outputNeuron.Error * outputNeuron[i /*neuron*/].DendriteWeight.Weight * neuron.Value * (1.0 - neuron.Value);
                    }

                    neuron.Error = error;
                }
                error = 0;
            }

            // Adjust output layer weight change
            i = j = 0;
            if (this.HiddenLayers.Count > 0)
            {
                foreach (INeuralLayer <double> layer in this.HiddenLayers)
                {
                    this.AdjustOutputLayerWeightChange(layer);
                }
            }
            else
            {
                this.AdjustOutputLayerWeightChange(this.Output);
            }

            // Adjust first hidden layer weight change
            if (this.HiddenLayers.Count > 0)
            {
                INeuralLayer <double> hiddenLayer = this.HiddenLayers.ElementAt(0);

                for (i = 0; i < this.Input.Count; ++i)
                {
                    inputNeuron = this.Input[i];

                    for (j = 0; j < hiddenLayer.Count; ++j)
                    {
                        hiddenNeuron = hiddenLayer[j];
                        hiddenNeuron[i /*inputNeuron*/].DendriteWeight.Weight += this.LearningRate * hiddenNeuron.Error * inputNeuron.Value;
                        hiddenNeuron.Bias.DendriteWeight.Delta += this.LearningRate * hiddenNeuron.Error * inputNeuron.Bias.DendriteWeight.Weight;
                    }
                }
            }
        }
 public NeuralLayerException(INeuralLayer layer, double[] input, string message) : base(message)
 {
     Layer = layer;
 }
Beispiel #17
0
 public virtual INeuralBias Create(INeuralLayer layer) => new NeuralBias(layer);
Beispiel #18
0
 public NNEventArgs(INeuralNetwork <double> network, INeuralLayer <double> layer, INeuron <double> neuron, IDendrite <double> dendrite, double value) :
     base(network, layer, neuron, dendrite, value)
 {
 }
        public override void Initialize(INeuralLayer <double> input, ICollection <INeuralLayer <double> > hiddenLayers, INeuralLayer <double> output)
        {
            this.Input        = input;
            this.HiddenLayers = hiddenLayers;
            this.Output       = output;

            // Connect layers
            if (this.HiddenLayers != null && this.HiddenLayers.Count > 0) // There is at least one hidden layer
            {
                this.ConnectLayers(this.Input, this.HiddenLayers.ElementAt(0));

                for (int i = 1; i < this.HiddenLayers.Count; ++i)
                {
                    var layerInput  = this.HiddenLayers.ElementAt(i - 1);
                    var layerOutput = this.HiddenLayers.ElementAt(i);
                    this.ConnectLayers(layerInput, layerOutput);
                }

                // Connect last hidden with output
                this.ConnectLayers(this.HiddenLayers.ElementAt(this.HiddenLayers.Count - 1), this.Output);
            }
            else
            {
                this.ConnectLayers(this.Input, this.Output);
            }
        }
 public abstract void Initialize(INeuralLayer <TNeuronDataType> input, ICollection <INeuralLayer <TNeuronDataType> > hiddenLayers, INeuralLayer <TNeuronDataType> output);
Beispiel #21
0
 public void ApplyLearning(INeuralLayer layer)
 {
 }
Beispiel #22
0
 public override void ApplyLearning(INeuralLayer <double> source)
 {
     // Nothing here
 }
        public void Initialize(int randomSeed)
        {
            var random = new Random(randomSeed);

            _inputLayer = new NeuralLayer {
                Name = "InputLayer"
            };
            _outputLayer = new NeuralLayer {
                Name = "OutputLayer"
            };
            _hiddenLayers = new List <INeuralLayer>();
            foreach (var hiddenLayer in NetConfiguration.HiddenLayerNodes)
            {
                _hiddenLayers.Add(new NeuralLayer {
                    Name = $"HiddenLayer{hiddenLayer.Key}"
                });
            }


            for (var i = 0; i < NetConfiguration.NrOfInputNeurons; i++)
            {
                _inputLayer.Add(new Neuron(0));
            }

            for (var i = 0; i < NetConfiguration.NumberOfOutputNeurons; i++)
            {
                _outputLayer.Add(new Neuron(random.NextDouble()));
            }

            _inputLayer.OutputLayer = _hiddenLayers.First();


            for (var i = 0; i < NetConfiguration.HiddenLayerNodes.Count; i++)
            {
                var nrOfNodesInLayer = NetConfiguration.HiddenLayerNodes.Values.ToArray()[i];
                for (var j = 0; j < nrOfNodesInLayer; j++)
                {
                    _hiddenLayers[i].Add(new Neuron(random.NextDouble()));
                }
            }

            // wire-up the hidden layers
            foreach (var hiddenLayer in _hiddenLayers)
            {
                var index = _hiddenLayers.IndexOf(hiddenLayer);
                hiddenLayer.InputLayer  = index == 0 ? _inputLayer : _hiddenLayers[index - 1];
                hiddenLayer.OutputLayer = index < _hiddenLayers.Count - 1 ? _hiddenLayers[index + 1] : _outputLayer;

                foreach (var hiddenNode in hiddenLayer)
                {
                    foreach (var inputNode in hiddenLayer.InputLayer)
                    {
                        hiddenNode.Input.Add(inputNode, new NeuralFactor(random.NextDouble()));
                    }
                }
            }

            _outputLayer.InputLayer = _hiddenLayers.Last();

            // wire-up output layer to hidden layer
            foreach (var outputNode in _outputLayer)
            {
                foreach (var hiddenNode in _outputLayer.InputLayer)
                {
                    outputNode.Input.Add(hiddenNode, new NeuralFactor(random.NextDouble()));
                }
            }
        }
Beispiel #24
0
 public NeuralBias(INeuralLayer layer)
 {
     _layer = layer;
 }