public float[] Compute(ComputeDevice calculator, float[] input, IActivationFunction activationFunction) { return(calculator.CalculateLayer(weightMx, biases, input, activationFunction)); }
public override float[] CalculateLayer(float[,] weightMx, float[] bias, float[] prevActivations, IActivationFunction sigmoidFunction) { float[] ret = new float[weightMx.GetLength(0)]; for (int m = 0; m < weightMx.GetLength(0); m++) { float acc = 0.0f; for (int k = 0; k < weightMx.GetLength(1); k++) { acc += weightMx[m, k] * prevActivations[k]; } acc += bias[m]; ret[m] = sigmoidFunction.Calculate(acc); } return(ret); }
internal Layer(Layer o) { this.activationFunction = o.activationFunction; this.weightMx = (float[, ])o.weightMx.Clone(); this.biases = (float[])o.biases.Clone(); }
public abstract unsafe float[] CalculateLayer(float[,] weightMx, float[] bias, float[] prevActivations, IActivationFunction sigmoidFunction);
/// <summary> /// Create a network with specific weights and biases /// </summary> /// <param name="inputLayers">A structure containing the weights and biases for each network</param> /// <param name="activationFunction">The activation function used by the network</param> /// <returns></returns> public static Network CreateNetwork(List <List <Tuple <List <float>, float> > > inputLayers, IActivationFunction activationFunction) { List <Layer> layers = new List <Layer>(); foreach (var layerData in inputLayers) { int neuronCountInLayer = layerData.Count; int weightsPerNeuron = layerData[0].Item1.Count; if (layers.Count > 0) { if (weightsPerNeuron != layers.Last().biases.Length) { throw new Exception("Invalid layer config! Layer #" + layers.Count + " doesnt have the number of biases required by the previous layer!"); } if (weightsPerNeuron != layers.Last().weightMx.GetLength(0)) { throw new Exception("Invalid layer config! Layer #" + layers.Count + " doesnt have the number of weights required by the previous layer!"); } } float[,] weightMx = new float[neuronCountInLayer, weightsPerNeuron]; float[] biases = new float[neuronCountInLayer]; for (int i = 0; i < neuronCountInLayer; ++i) { for (int j = 0; j < weightsPerNeuron; ++j) { weightMx[i, j] = layerData[i].Item1[j]; } biases[i] = layerData[i].Item2; } layers.Add(new Layer(weightMx, biases)); } return(new Network(layers, activationFunction)); }
Network(List <Layer> layers, IActivationFunction activationFunction) { this.layers = layers; this.activationFunction = activationFunction; this.activationFunctionName = activationFunction.GetSerializedName(); }
public override float CalculateDelta(float z, float a, float desiredOutput, IActivationFunction activationFunction) { return(a - desiredOutput); }
public abstract float CalculateDelta(float z, float a, float desiredOutput, IActivationFunction activationFunction);
public override float[] CalculateLayer(float[,] weightMx, float[] bias, float[] prevActivations, IActivationFunction sigmoidFunction) { throw new NotImplementedException(); }