public override void Forward(bool isTraining) { Neurons.ForEach((N, i) => { N.InVal = N.Bias + N.InSynapses.Sum(S => S.Weight * S.InNeuron.OutVal); N.OutVal = Activation.Activate(N.InVal); }); }
public override void CalcGrads(ILoss loss, Array <double> targets) { Neurons.ForEach((N, i) => { N.Gradient = Activation.Derivative(N.InVal, N.OutVal) * loss.Derivative(targets[i], N.OutVal); N.InSynapses.ForEach(S => S.Gradient = N.Gradient * S.InNeuron.OutVal); }); }
public void Run() { Neurons.ForEach(x => x.Run()); if (NextLayer != null) { NextLayer.Run(); } }
public override void CalcGrads() { Neurons.ForEach((N, i) => { N.Gradient = N.OutSynapses.Sum(S => S.Weight * S.OutNeuron.Gradient) * Activation.Derivative(N.InVal, N.OutVal); N.InSynapses.ForEach(S => S.Gradient = N.Gradient * S.InNeuron.OutVal); }); }
public override void Forward(bool isTraining) { Neurons.ForEach((N, i) => { N.InVal = N.InSynapses[0].InNeuron.OutVal; N.OutVal = N.InVal; }); }
private void InitWeights() { Neurons.ForEach(N => { Array <double> inWeights = MergeOp.CalcWeights(N.InSynapses); N.InSynapses.ForEach((S, i) => S.Weight = inWeights[i]); }); }
public sealed override void Optimize(IOptimizer optimizer) { if (IsTrainable == false) { return; } Neurons.ForEach(N => { N.BatchDelta += optimizer.Optimize(N); N.InSynapses.ForEach(S => S.BatchDelta += optimizer.Optimize(S)); }); }
public override void Forward(bool isTraining) { double eSum = 0.0; Neurons.ForEach((N, i) => { N.InVal = N.Bias + N.InSynapses.Sum(S => S.Weight * S.InNeuron.OutVal); N.OutVal = Exp(N.InVal); eSum += N.OutVal; }); Neurons.ForEach(N => N.OutVal /= eSum); }
public override void Forward(bool isTraining) { if (MergeOp.RequiresUpdate) { InitWeights(); } Neurons.ForEach(N => { N.InVal = N.InSynapses.Sum(S => S.Weight * S.InNeuron.OutVal); N.OutVal = N.InVal; }); }
public override void Connect(Array <Layer> inLayers) { inLayers.ForEach((L, i) => { if (L.Shape != Shape) { throw new ShapeMismatchException($"{nameof(inLayers)}[{i}] shape mismatch."); } }); Neurons.ForEach((outN, i) => outN.InSynapses = inLayers.Select(inL => new Synapse(inL.Neurons[i], outN))); inLayers.ForEach(inL => inL.Neurons.ForEach((inN, i) => inN.OutSynapses = new Array <Synapse>(new Synapse(inN, Neurons[i])))); }
public override void Forward(bool isTraining) { if (isTraining) { Neurons.ForEach((N, i) => { Synapse S = N.InSynapses[0]; N.InVal = S.Weight * S.InNeuron.OutVal; N.OutVal = N.InVal; }); } else { base.Forward(false); } }
public override void CalcGrads() { double gSum = 0.0; Neurons.ForEach((N, i) => { N.Gradient = N.OutSynapses.Sum(S => S.Weight * S.OutNeuron.Gradient); gSum += N.Gradient * N.OutVal; }); Neurons.ForEach(N => { N.Gradient = N.OutVal * (N.Gradient - gSum); N.InSynapses.ForEach(S => S.Gradient = N.Gradient * S.InNeuron.OutVal); }); }
public override void Connect(Layer inLayer) { if (inLayer.Shape.Volume != Shape.Volume) { throw new ShapeMismatchException($"{nameof(inLayer)} shape volume mismatch."); } Neurons.ForEach((outN, i) => { Neuron inN = inLayer.Neurons[i]; var S = new Synapse(inN, outN); var arr = new Array <Synapse>(S); outN.InSynapses = arr; inN.OutSynapses = arr; }); }
public override void CalcGrads(ILoss loss, Array <double> targets) { double gSum = 0.0; Neurons.ForEach((N, i) => { N.Gradient = loss.Derivative(targets[i], N.OutVal); gSum += N.Gradient * N.OutVal; }); Neurons.ForEach(N => { N.Gradient = N.OutVal * (N.Gradient - gSum); N.InSynapses.ForEach(S => S.Gradient = N.Gradient * S.InNeuron.OutVal); }); }
public sealed override void Update() { if (IsTrainable == false) { return; } Neurons.ForEach(N => { N.Bias += N.BatchDelta; N.BatchDelta = 0.0; N.InSynapses.ForEach(S => { S.Weight += S.BatchDelta; S.BatchDelta = 0.0; }); }); }
public override void Update() { if (Spatial) { int featureVol = Shape.Volume / Shape.Dims[0]; bool drop = false; Neurons.ForEach((N, i) => { if (i % featureVol == 0) { drop = GRandom.Uniform(0, 1) <= DropChance; } N.InSynapses[0].Weight = drop ? 0.0 : 1.0; }); } else { Neurons.ForEach(N => N.InSynapses[0].Weight = GRandom.Uniform(0, 1) <= DropChance ? 0.0 : 1.0); } }
public override void Initialize() { Neurons.ForEach(N => N.InSynapses[0].Weight = 1.0); }
public Layer RandomizeWeights() { Neurons.ForEach(n => n.RandomizeWeights()); return(this); }
public void SetActivationFunction(Func <T, T> function) { Neurons.ForEach(n => n.ForEach(nn => nn.ActivationFunction = function)); }
public void CalibrateWeights(double learningRate, double delta) { Weight += learningRate * delta; Neurons.ForEach(neuron => neuron.UpdateWeights(Weight)); }
public void ForwardSignal() { Neurons.ForEach(neuron => neuron.Fire()); }
internal void BuildDendrites(int dendritesPerNeuron, double defaultWeight) { Neurons.ForEach(n => n.Dendrites = Enumerable.Range(0, dendritesPerNeuron).Select(i => new Dendrite { SourceNeuronId = new Tuple <int, int>(Id - 1, i), Weight = defaultWeight }).ToList()); }