public NeuralNetworkDenseLayer(NeuralNetworkNeuron[] neurons, NeuralNetworkActivationFunctionType activationFunctionType) { Type = NeuralNetworkLayerType.Dense; Neurons = neurons; ActivationFunctionType = activationFunctionType; Weights = new float[Neurons.Length][]; for (int i = 0; i < Neurons.Length; i++) { Weights[i] = Neurons[i].Weights; } }
public float ForwardPropagation(float[] inputs, NeuralNetworkActivationFunctionType activationFunctionType) { float sum = 0; #if NETFRAMEWORK NeuralNetworkAPI.Neuron(inputs, Weights, Weights.Length, ref sum); #else for (int i = 0; i < Weights.Length; i++) { sum += inputs[i] * Weights[i]; } ; #endif return(NeuralNetworkAPI.ActivationFunction(activationFunctionType, sum + Bias)); }
public NeuralNetworkConv2DLayer( NeuralNetworkKernel[] kernels, int width, int height, int strideX, int strideY, NeuralNetworkActivationFunctionType activationFunctionType, NeuralNetworkPaddingType paddingType ) { Type = NeuralNetworkLayerType.Conv2D; Kernels = kernels; Width = width; Height = height; StrideX = strideX; StrideY = strideY; ActivationFunctionType = activationFunctionType; PaddingType = paddingType; }
public static float ActivationFunction(NeuralNetworkActivationFunctionType type, float value) { switch (type) { case NeuralNetworkActivationFunctionType.ReLU: return(value < 0 ? 0 : value); case NeuralNetworkActivationFunctionType.Softmax: return(value); case NeuralNetworkActivationFunctionType.Sigmoid: return((float)(1 / (1 + Math.Exp(-value)))); case NeuralNetworkActivationFunctionType.Linear: return(value); default: throw new NotImplementedException(type.ToString()); } }