Ejemplo n.º 1
0
 protected void FinaliseOutputs(Step downstream)
 {
     for (int i = 0; i < downstream.WeightedInputs.Length; i++)
     {
         downstream.Output[i] = downstream.CalculateActivation(downstream.WeightedInputs[i]);
     }
 }
Ejemplo n.º 2
0
 protected void FinaliseErrorSecondDerivatives(Step downstream)
 {
     for (int i = 0; i < downstream.ErrorDerivative.Length; i++)
     {
         double weightedInputs = downstream.WeightedInputs[i];
         double activationDerivative = downstream.CalculateActivationDerivative(weightedInputs);
         downstream.ErrorDerivative[i] *= activationDerivative * activationDerivative;
     }
 }
Ejemplo n.º 3
0
 protected void EstimateBiasSecondDerivative(Step downstream)
 {
     for (int i = 0; i < downstream.Length; i++)
     {
         // Calculating the sum of: second derivatives of error with respect to the bias weight.
         // Note that the bias is implemented as an always-on Neuron with a (the same) weight to the outputs neurons.
         BiasStepSize += downstream.ErrorDerivative[i] * 1.0 * 1.0;
     }
 }
Ejemplo n.º 4
0
        protected override void TrainCore(Step downstream)
        {
            RectangularStep step = (RectangularStep)downstream;
            Debug.Assert(MapCount == downstream.Upstream.Count);

            for (int i = 0; i < MapCount; i++)
            {
                PropogateError(step, i);
            }
        }
Ejemplo n.º 5
0
        protected override void PreTrainCore(Step downstream)
        {
            RectangularStep step = (RectangularStep)downstream;
            Debug.Assert(MapCount == downstream.Upstream.Count);

            for (int i = 0; i < MapCount; i++)
            {
                PropogateUnitSecondDerivatives(step, i);
            }
            EstimateBiasSecondDerivative(step);
        }
Ejemplo n.º 6
0
        protected void FinaliseErrorFirstDerivatives(Step downstream)
        {
            // Calculating the dEj/dWij and dEi/dOi both requires a multiplication by the derivative of the activation function,
            // it is done here once so it doesn't need to be done for each individual calculations.

            // This turns dEk/dOk into dEk/dAk by multiplying it by dOk/dAk
            for (int i = 0; i < downstream.ErrorDerivative.Length; i++)
            {
                double weightedInputs = downstream.WeightedInputs[i];
                double activationDerivative = downstream.CalculateActivationDerivative(weightedInputs);
                downstream.ErrorDerivative[i] *= activationDerivative;
            }
        }
Ejemplo n.º 7
0
        protected override void PreTrainCore(Step downstream)
        {
            int inputIndex = 0;
            foreach (Step upstream in downstream.Upstream)
            {
                Debug.Assert(inputIndex + upstream.Length <= InputLength);
                for (int i = 0; i < upstream.Length; i++)
                {
                    // Error second derivative relative to output is constant, as first derivative is 2.0 * (state - desiredState).
                    upstream.ErrorDerivative[inputIndex] = 2.0;

                    inputIndex += 1;
                }
            }
        }
Ejemplo n.º 8
0
        protected override void PreTrainCore(Step downstream)
        {
            Debug.Assert(InputNeurons % downstream.Upstream.Count == 0);
            int neuronsPerUpstream = InputNeurons / downstream.Upstream.Count;

            int inputNeuron = 0;
            foreach (Step upstream in downstream.Upstream)
            {
                Debug.Assert(upstream.Length == neuronsPerUpstream);

                for (int i = 0; i < neuronsPerUpstream; i++)
                {
                    PropogateSecondDerivatives(downstream, upstream, i, inputNeuron++);
                }
            }
            EstimateBiasSecondDerivative(downstream);
        }
Ejemplo n.º 9
0
        protected void PropogateForward(Step downstream, int output)
        {
            double sumSquaredError = 0;
            int inputIndex = 0;
            int definitionIndex = output * InputLength;
            foreach (Step upstream in downstream.Upstream)
            {
                Debug.Assert(inputIndex + upstream.Length <= InputLength);
                for (int i = 0; i < upstream.Length; i++)
                {
                    double difference = upstream.Output[i] - ClassStateDefinitions[definitionIndex];
                    sumSquaredError += difference * difference;

                    inputIndex += 1;
                    definitionIndex += 1;
                }
            }
            downstream.Output[output] = sumSquaredError;
        }
Ejemplo n.º 10
0
        protected override void TrainCore(Step downstream)
        {
            int inputIndex = 0;
            int definitionIndex = correctClass * InputLength;
            foreach (Step upstream in downstream.Upstream)
            {
                Debug.Assert(inputIndex + upstream.Length <= InputLength);
                for (int i = 0; i < upstream.Length; i++)
                {
                    double desiredState = ClassStateDefinitions[definitionIndex];

                    double firstDerivative = 2.0 * (upstream.Output[inputIndex] - desiredState);
                    upstream.ErrorDerivative[inputIndex] = firstDerivative;

                    inputIndex += 1;
                    definitionIndex += 1;
                }
            }
        }
Ejemplo n.º 11
0
 public override void ProprogateForward(Step downstream)
 {
     base.ProprogateForward(downstream);
     FinaliseOutputs(downstream);
 }
Ejemplo n.º 12
0
 public override void PreTrain(Step downstream)
 {
     FinaliseErrorSecondDerivatives(downstream);
     base.PreTrain(downstream);
 }
Ejemplo n.º 13
0
 public MarkingStep(Step upstream, LeNetConfiguration configuration)
     : this(new[] { upstream }, configuration)
 {
 }
Ejemplo n.º 14
0
 public virtual void ProprogateForward(Step downstream)
 {
     PropogateForwardCore(downstream);
 }
Ejemplo n.º 15
0
 public override void ProprogateForward(Step downstream)
 {
     base.ProprogateForward(downstream);
     FinaliseOutputs(downstream);
 }
Ejemplo n.º 16
0
 public override void Train(Step downstream)
 {
     FinaliseErrorFirstDerivatives(downstream);
     base.Train(downstream);
 }
Ejemplo n.º 17
0
        protected void PropogateError(Step downstream, Step upstream, int upstreamNeuron, int inputNeuron)
        {
            int weightIndex = inputNeuron * OutputNeurons;

            double upstreamState = upstream.Output[upstreamNeuron];

            double inputError = 0.0;
            for (int output = 0; output < OutputNeurons; output++)
            {
                double downstreamErrorDerivative = downstream.ErrorDerivative[output];

                // Calculate inputs error gradient by taking the sum, for all outputs of
                // dEk/dAj multiplied by dAj/dOj (w/sum =dEj/dOj);
                inputError += (downstreamErrorDerivative * Weight[weightIndex]);

                // Calculate the Weight's first derivative with respect to the error
                double weightErrorGradient = downstreamErrorDerivative * upstreamState;
                double deltaWeight = WeightStepSize[weightIndex] * weightErrorGradient;
                Weight[weightIndex] -= deltaWeight;

                weightIndex += 1;
            }
            upstream.ErrorDerivative[upstreamNeuron] = inputError;
        }
Ejemplo n.º 18
0
        protected override void TrainCore(Step downstream)
        {
            Debug.Assert(InputNeurons % downstream.Upstream.Count == 0);
            int neuronsPerUpstream = InputNeurons / downstream.Upstream.Count;

            int inputNeuron = 0;
            foreach (Step upstream in downstream.Upstream)
            {
                Debug.Assert(upstream.Length == neuronsPerUpstream);
                for (int i = 0; i < neuronsPerUpstream; i++)
                {
                    PropogateError(downstream, upstream, i, inputNeuron++);
                }
            }
        }
Ejemplo n.º 19
0
 protected abstract void PropogateForwardCore(Step step);
Ejemplo n.º 20
0
        protected void PropogateSecondDerivatives(Step downstream, Step upstream, int upstreamNeuron, int inputNeuron)
        {
            int weightIndex = inputNeuron * OutputNeurons;

            double upstreamState = upstream.Output[upstreamNeuron];
            double upstreamErrorSecondDerivative = 0.0;

            for (int output = 0; output < OutputNeurons; output++)
            {
                double downstreamErrorSecondDerivative = downstream.ErrorDerivative[output]; // (d^2)E/(dAj)^2, where Aj is the sum of inputs to this downstream unit.

                // Here we calculate (d^2)Ej/(dWij)^2 by multiplying the 2nd derivative of E with respect to the sum of inputs, Aj
                // by the state of Oi, the upstream unit, squared. Refer to Equation 25 in document.
                // The summing happening here is described by equation 23.
                double weight2ndDerivative = downstreamErrorSecondDerivative * upstreamState * upstreamState;

                WeightStepSize[weightIndex] = weight2ndDerivative;

                double weight = Weight[weightIndex];

                // This is implementing the last sigma of Equation 27.
                // This propogates error second derivatives back to previous layer, but will need to be multiplied by the second derivative
                // of the activation function at the previous layer.
                upstreamErrorSecondDerivative += weight * weight * downstreamErrorSecondDerivative;

                weightIndex += 1;

            }

            upstream.ErrorDerivative[upstreamNeuron] += upstreamErrorSecondDerivative;
        }
Ejemplo n.º 21
0
        protected override void PropogateForwardCore(Step downstream)
        {
            Debug.Assert(InputNeurons % downstream.Upstream.Count == 0);
            int neuronsPerUpstream = InputNeurons / downstream.Upstream.Count;

            int inputIndex = 0;
            foreach (Step upstream in downstream.Upstream)
            {
                Debug.Assert(inputIndex + upstream.Length <= InputNeurons);

                for (int i = 0; i < neuronsPerUpstream; i++)
                {
                    PropogateForward(downstream, upstream, i, inputIndex++);
                }
            }
        }
Ejemplo n.º 22
0
        protected void PropogateForward(Step downstream, Step upstream, int upstreamNeuron, int inputNeuron)
        {
            int weightIndex = inputNeuron * OutputNeurons;

            double upstreamNeuronOutput = upstream.Output[upstreamNeuron];
            double weightedSum = Bias;
            for (int o = 0; o < OutputNeurons; o++)
            {
                downstream.WeightedInputs[o] += upstreamNeuronOutput * Weight[weightIndex++];
            }
        }
Ejemplo n.º 23
0
        protected override void PropogateForwardCore(Step downstream)
        {
            Debug.Assert(downstream.Upstream.Count == 1);

            for (int o = 0; o < ClassCount; o++)
            {
                PropogateForward(downstream, o);
            }
        }
Ejemplo n.º 24
0
 public StepSnapshot(Step step, int width)
 {
     this.Step = step;
     OutputSnapshot = new double[step.Output.Length];
     Width = width;
 }
Ejemplo n.º 25
0
 public virtual void PreTrain(Step step)
 {
     PreTrainingSamples += 1;
     PreTrainCore(step);
 }
Ejemplo n.º 26
0
 public MarkingStep(Step upstream, LeNetConfiguration configuration)
     : this(new[] { upstream }, configuration)
 {
 }
Ejemplo n.º 27
0
 public virtual void Train(Step downstream)
 {
     TrainCore(downstream);
 }
Ejemplo n.º 28
0
 public override void Train(Step downstream)
 {
     FinaliseErrorFirstDerivatives(downstream);
     base.Train(downstream);
 }
Ejemplo n.º 29
0
 protected abstract void TrainCore(Step downstream);
Ejemplo n.º 30
0
 public override void PreTrain(Step downstream)
 {
     FinaliseErrorSecondDerivatives(downstream);
     base.PreTrain(downstream);
 }