public override void Execute() { // Reset the input sample Owner.ResetSample(); // For every sample... for (uint i = 0; i < Owner.ForwardSamplesPerStep; i++) { // Forward propagation Owner.ForwardPropagation(); if (NetworkMode == MyFeedForwardMode.TRAINING) { // Backward propagation BackPropAgent.Execute(m_trainingStep); } // Switch to the next input if (i < Owner.ForwardSamplesPerStep - 1) { Owner.NextSample(); } m_trainingStep++; } }
public override void Execute() { // Reset the input sample Owner.ResetSample(); Owner.InputLayer.SetInputMemoryBlock(Owner.TrainingData); // For every sample of TrainingData // Do forward pass on TrainingData and backward pass if (!UseTrainingSignal || Owner.TrainingSignal.IsIncomingRised()) { for (uint i = 0; i < Owner.ForwardSamplesPerStep; i++) { // Forward propagation Owner.ForwardPropagation(); // Backward propagation BackPropAgent.Execute(m_trainingStep); // Switch to the next input if (i < Owner.ForwardSamplesPerStep - 1) { Owner.NextSample(); } m_trainingStep++; } } if (!UseForwardSignal || Owner.ForwardSignal.IsIncomingRised()) { Owner.ResetSample(); Owner.InputLayer.SetInputMemoryBlock(Owner.DataInput); // For every sample // Do forward pass on first data input for (uint i = 0; i < Owner.ForwardSamplesPerStep; i++) { // Forward propagation Owner.ForwardPropagation(); // Copy the end layer output to the node output Owner.CopyResult(); // Switch to the next input if (i < Owner.ForwardSamplesPerStep - 1) { Owner.NextSample(); } } } if (RepeatTrainingSignal && UseTrainingSignal && Owner.TrainingSignal.IsIncomingRised()) { Owner.TrainingSignal.Raise(); } else { Owner.TrainingSignal.Drop(); } if (RepeatForwardSignal && UseForwardSignal && Owner.ForwardSignal.IsIncomingRised()) { Owner.ForwardSignal.Raise(); } else { Owner.ForwardSignal.Drop(); } }