internal float[] Compute(ComputeDevice mathLib, float[] input, ref List <float[]> activations, ref List <float[]> zValues, bool flushMathlibWorkingCache) { var current = input; bool applySigmoid = zValues == null; PasstroughActivation passtroughActivation = applySigmoid ? null : new PasstroughActivation(); foreach (var layer in layers) { current = layer.Compute(mathLib, current, applySigmoid ? activationFunction : passtroughActivation); if (zValues != null) { zValues.Add((float[])current.Clone()); for (int i = 0; i < current.Length; i++) { current[i] = activationFunction.Calculate(current[i]); } } if (activations != null) { activations.Add(current); } } if (flushMathlibWorkingCache) { mathLib.FlushWorkingCache(); } return(current); }
public void _EvaluateNetworkZValues(float[] input, Network network, ref List <float[]> z_values, ref List <float[]> activations) { Utils.Assert(activations.Count == 0); Utils.Assert(z_values.Count == 0); var passtrough_activation = new PasstroughActivation(); foreach (var layer in network.layers) { float[] layer_input = activations.Count == 0 ? input : activations.Last(); float[] z = CalculateLayer(layer.weightMx, layer.biases, layer_input, passtrough_activation); float[] act = new float[z.Length]; for (int i = 0; i < z.Length; i++) { act[i] = layer.activationFunction.Calculate(z[i]); } z_values.Add(z); activations.Add(act); } }