示例#1
0
 public override void Forward(bool isTraining)
 {
     Neurons.ForEach((N, i) =>
     {
         N.InVal  = N.Bias + N.InSynapses.Sum(S => S.Weight * S.InNeuron.OutVal);
         N.OutVal = Activation.Activate(N.InVal);
     });
 }
示例#2
0
 public override void CalcGrads(ILoss loss, Array <double> targets)
 {
     Neurons.ForEach((N, i) =>
     {
         N.Gradient = Activation.Derivative(N.InVal, N.OutVal) * loss.Derivative(targets[i], N.OutVal);
         N.InSynapses.ForEach(S => S.Gradient = N.Gradient * S.InNeuron.OutVal);
     });
 }
示例#3
0
文件: Layer.cs 项目: aboody2009/TPM
 public void Run()
 {
     Neurons.ForEach(x => x.Run());
     if (NextLayer != null)
     {
         NextLayer.Run();
     }
 }
示例#4
0
 public override void CalcGrads()
 {
     Neurons.ForEach((N, i) =>
     {
         N.Gradient = N.OutSynapses.Sum(S => S.Weight * S.OutNeuron.Gradient) * Activation.Derivative(N.InVal, N.OutVal);
         N.InSynapses.ForEach(S => S.Gradient = N.Gradient * S.InNeuron.OutVal);
     });
 }
示例#5
0
 public override void Forward(bool isTraining)
 {
     Neurons.ForEach((N, i) =>
     {
         N.InVal  = N.InSynapses[0].InNeuron.OutVal;
         N.OutVal = N.InVal;
     });
 }
示例#6
0
        private void InitWeights()
        {
            Neurons.ForEach(N =>
            {
                Array <double> inWeights = MergeOp.CalcWeights(N.InSynapses);

                N.InSynapses.ForEach((S, i) => S.Weight = inWeights[i]);
            });
        }
示例#7
0
        public sealed override void Optimize(IOptimizer optimizer)
        {
            if (IsTrainable == false)
            {
                return;
            }

            Neurons.ForEach(N =>
            {
                N.BatchDelta += optimizer.Optimize(N);
                N.InSynapses.ForEach(S => S.BatchDelta += optimizer.Optimize(S));
            });
        }
示例#8
0
        public override void Forward(bool isTraining)
        {
            double eSum = 0.0;

            Neurons.ForEach((N, i) =>
            {
                N.InVal  = N.Bias + N.InSynapses.Sum(S => S.Weight * S.InNeuron.OutVal);
                N.OutVal = Exp(N.InVal);
                eSum    += N.OutVal;
            });

            Neurons.ForEach(N => N.OutVal /= eSum);
        }
示例#9
0
        public override void Forward(bool isTraining)
        {
            if (MergeOp.RequiresUpdate)
            {
                InitWeights();
            }

            Neurons.ForEach(N =>
            {
                N.InVal  = N.InSynapses.Sum(S => S.Weight * S.InNeuron.OutVal);
                N.OutVal = N.InVal;
            });
        }
示例#10
0
        public override void Connect(Array <Layer> inLayers)
        {
            inLayers.ForEach((L, i) =>
            {
                if (L.Shape != Shape)
                {
                    throw new ShapeMismatchException($"{nameof(inLayers)}[{i}] shape mismatch.");
                }
            });

            Neurons.ForEach((outN, i) => outN.InSynapses = inLayers.Select(inL => new Synapse(inL.Neurons[i], outN)));

            inLayers.ForEach(inL => inL.Neurons.ForEach((inN, i) => inN.OutSynapses = new Array <Synapse>(new Synapse(inN, Neurons[i]))));
        }
示例#11
0
 public override void Forward(bool isTraining)
 {
     if (isTraining)
     {
         Neurons.ForEach((N, i) =>
         {
             Synapse S = N.InSynapses[0];
             N.InVal   = S.Weight * S.InNeuron.OutVal;
             N.OutVal  = N.InVal;
         });
     }
     else
     {
         base.Forward(false);
     }
 }
示例#12
0
        public override void CalcGrads()
        {
            double gSum = 0.0;

            Neurons.ForEach((N, i) =>
            {
                N.Gradient = N.OutSynapses.Sum(S => S.Weight * S.OutNeuron.Gradient);
                gSum      += N.Gradient * N.OutVal;
            });

            Neurons.ForEach(N =>
            {
                N.Gradient = N.OutVal * (N.Gradient - gSum);
                N.InSynapses.ForEach(S => S.Gradient = N.Gradient * S.InNeuron.OutVal);
            });
        }
示例#13
0
        public override void Connect(Layer inLayer)
        {
            if (inLayer.Shape.Volume != Shape.Volume)
            {
                throw new ShapeMismatchException($"{nameof(inLayer)} shape volume mismatch.");
            }

            Neurons.ForEach((outN, i) =>
            {
                Neuron inN      = inLayer.Neurons[i];
                var S           = new Synapse(inN, outN);
                var arr         = new Array <Synapse>(S);
                outN.InSynapses = arr;
                inN.OutSynapses = arr;
            });
        }
示例#14
0
        public override void CalcGrads(ILoss loss, Array <double> targets)
        {
            double gSum = 0.0;

            Neurons.ForEach((N, i) =>
            {
                N.Gradient = loss.Derivative(targets[i], N.OutVal);
                gSum      += N.Gradient * N.OutVal;
            });

            Neurons.ForEach(N =>
            {
                N.Gradient = N.OutVal * (N.Gradient - gSum);
                N.InSynapses.ForEach(S => S.Gradient = N.Gradient * S.InNeuron.OutVal);
            });
        }
示例#15
0
        public sealed override void Update()
        {
            if (IsTrainable == false)
            {
                return;
            }

            Neurons.ForEach(N =>
            {
                N.Bias      += N.BatchDelta;
                N.BatchDelta = 0.0;

                N.InSynapses.ForEach(S =>
                {
                    S.Weight    += S.BatchDelta;
                    S.BatchDelta = 0.0;
                });
            });
        }
示例#16
0
        public override void Update()
        {
            if (Spatial)
            {
                int  featureVol = Shape.Volume / Shape.Dims[0];
                bool drop       = false;

                Neurons.ForEach((N, i) =>
                {
                    if (i % featureVol == 0)
                    {
                        drop = GRandom.Uniform(0, 1) <= DropChance;
                    }

                    N.InSynapses[0].Weight = drop ? 0.0 : 1.0;
                });
            }
            else
            {
                Neurons.ForEach(N => N.InSynapses[0].Weight = GRandom.Uniform(0, 1) <= DropChance ? 0.0 : 1.0);
            }
        }
示例#17
0
 public override void Initialize()
 {
     Neurons.ForEach(N => N.InSynapses[0].Weight = 1.0);
 }
示例#18
0
文件: Layer.cs 项目: obarlik/Nevrona
        public Layer RandomizeWeights()
        {
            Neurons.ForEach(n => n.RandomizeWeights());

            return(this);
        }
示例#19
0
 public void SetActivationFunction(Func <T, T> function)
 {
     Neurons.ForEach(n => n.ForEach(nn => nn.ActivationFunction = function));
 }
示例#20
0
        public void CalibrateWeights(double learningRate, double delta)
        {
            Weight += learningRate * delta;

            Neurons.ForEach(neuron => neuron.UpdateWeights(Weight));
        }
示例#21
0
 public void ForwardSignal()
 {
     Neurons.ForEach(neuron => neuron.Fire());
 }
示例#22
0
 internal void BuildDendrites(int dendritesPerNeuron, double defaultWeight)
 {
     Neurons.ForEach(n => n.Dendrites = Enumerable.Range(0, dendritesPerNeuron).Select(i => new Dendrite {
         SourceNeuronId = new Tuple <int, int>(Id - 1, i), Weight = defaultWeight
     }).ToList());
 }