public float[] Feed(float[] input, bool pushForward = true) { float[] output = new float[OutputSize]; for (int y = 0; y < OutputSize; y++) { int start = y * InputSize; float z = Bias[y]; int w = start; for (int x = 0; x < InputSize; x++, w++) { z += Weight[w] * input[x]; } output[y] = FlatFunction(z); } if (pushForward && nextLayer != null) { return(nextLayer.Feed(output)); } else { return(output); } }
public NeuralTrainLayer(NeuralNetworkLayer layer, float[] input) { float[] result = layer.Feed(input, false); Input = input; Output = result; Layer = layer; DCDB = new float[Output.Length]; DCDW = new float[input.Length * Output.Length]; }