public Layer(LayerDefinition layerDefinition, Matrix <double> weights = null, Vector <double> biases = null) { NodeCount = layerDefinition.NodeCount; Activation = layerDefinition.Activation; Weights = weights; Biases = biases; }
public static void Activate(Vector <double> X, EnmActivations activation, Vector <double> result) { switch (activation) { default: case EnmActivations.LRelu: Activations.LeakyReLU(X, 0.2, result); return; case EnmActivations.Sigmoid: Activations.Sigmoid(X, result); return; } }
public LayerDefinition(int nodeCount, EnmActivations activation) { NodeCount = nodeCount; Activation = activation; }