public void SingleLayerTest(int neuronCount, float[] inputs, float[] weightsAndBiases, IActivationFunction activationFunction) { var inCnt = inputs.Length; var outputs = new float[neuronCount]; var memIn = inputs.AsMemory(); var weMemFlat = weightsAndBiases.AsMemory(); var memOut = outputs.AsMemory(); var weMem = HelpersMisc.SliceArray(ref weMemFlat, inCnt + 1, neuronCount); var layer = new Layer(neuronCount, ref weMem, ref memIn, ref memOut, activationFunction); layer.CalculateWithoutNeuronParallel(); Assert.Multiple(() => { for (var i = 0; i < neuronCount; i++) { var wAndB = weMem[i].ToArray(); //var total = inputs.Select((t, j) => wAndB[j] * t).Sum(); var total = PrivateSum(inputs.Select((t, j) => wAndB[j] * t)); total += wAndB[inputs.Length]; var nOut = activationFunction.Forward(ref total); Assert.That(outputs[i], Is.EqualTo(nOut) /*.Within(.0001f)*/, $"Output is not as expected on neuron #{i}"); } }); }
public void SingleLayerSingleNeuron() { var inputs = new[] { .5f }; var weightsAndBiases = new[] { .5f, 1f }; var outputs = new[] { 0f }; var actFunc = new Linear(); var memIn = inputs.AsMemory(); var weMemFlat = weightsAndBiases.AsMemory(); var memOut = outputs.AsMemory(); var weMem = HelpersMisc.SliceArray(ref weMemFlat, 2, 1); var layer = new Layer(1, ref weMem, ref memIn, ref memOut, actFunc); layer.CalculateWithoutNeuronParallel(); Assert.That(outputs[0], Is.EqualTo(.5f * .5f + 1f)); }