public KLayer(int inputsCount, int neuronsCount) { _Neurons = new KNeuron[neuronsCount]; _Output = new double[neuronsCount]; for (int i = 0; i < _Neurons.Length; i++) _Neurons[i] = new KNeuron(inputsCount); }
public KLayer(int inputsCount, int neuronsCount) { _Neurons = new KNeuron[neuronsCount]; _Output = new double[neuronsCount]; for (int i = 0; i < _Neurons.Length; i++) { _Neurons[i] = new KNeuron(inputsCount); } }
public double Run(KLearnData sample) { double error = 0.0; _Network.Compute(sample._Input); int winner = _Network.GetWinner(); // get layer of the network KLayer layer = _Network[0]; layer[winner].AddSymbol(sample._Symbol); // update weight of the winner only // check learning radius if (_LearningRadius == 0) { KNeuron neuron = layer[winner]; for (int i = 0, n = neuron._Weights.Length; i < n; i++) { neuron[i] += (sample._Input[i] - neuron[i]) * 0.01; } } else { // winner's X and Y int wx = winner % _Width; int wy = winner / _Width; // walk through all neurons of the layer for (int j = 0, m = layer._Neurons.Length; j < m; j++) { KNeuron neuron = layer[j]; int dx = (j % _Width) - wx; int dy = (j / _Width) - wy; // update factor ( Gaussian based ) double factor = Math.Exp(-(double)(dx * dx + dy * dy) / _SquaredRadius2); // update weight of the neuron for (int i = 0, n = neuron._Weights.Length; i < n; i++) { // calculate the error double e = (sample._Input[i] - neuron[i]) * factor; error += Math.Abs(e); // update weight neuron[i] += e * _LearningRate; } } } return(0); }