/// <summary>
 /// All units in this layer will use a leaky rectified linear unit (ReLU) activation function
 /// </summary>
 public static IInventoryAndChaining ReluActivation(this IOutputUnitActivationCreator unitActivationCreator)
 {
     return(unitActivationCreator.OutputUnitActivation <ReluUnitActivationTraining>());
 }
 /// <summary>
 /// All units in this layer will use a hyperbolic tangent (tanh) activation function
 /// </summary>
 public static IInventoryAndChaining HyperbolicTangentActivation(this IOutputUnitActivationCreator unitActivationCreator)
 {
     return(unitActivationCreator.OutputUnitActivation <HyperbolicTangentUnitActivationTraining>());
 }
 /// <summary>
 /// All units in this layer will use a bipolar activation function
 /// </summary>
 public static IInventoryAndChaining BipolarActivation(this IOutputUnitActivationCreator unitActivationCreator)
 {
     return(unitActivationCreator.OutputUnitActivation <BipolarUnitActivationTraining>());
 }
 /// <summary>
 /// All units in this layer will an identity activation function (returns the same value inputed). Equal to Σ(weight(n) * netInput(n)) for all n units in the layer below
 /// </summary>
 public static IInventoryAndChaining IdentityActivation(this IOutputUnitActivationCreator unitActivationCreator)
 {
     return(unitActivationCreator.OutputUnitActivation <IdentityUnitActivationTraining>());
 }
 /// <summary>
 /// All units in this layer will use a sigmoid activation function
 /// </summary>
 public static IInventoryAndChaining SigmoidActivation(this IOutputUnitActivationCreator unitActivationCreator)
 {
     return(unitActivationCreator.OutputUnitActivation <SigmoidUnitActivationTraining>());
 }