/// <summary> /// Create new NeuralNetwork from existing(src). /// </summary> /// <param name="src">Existing NeuralNetwork to copy.</param> public NeuralNetwork(NeuralNetwork src) { inputLayer = new NeuralNetworkLayer(src.inputLayer); hiddenLayers = new NeuralNetworkLayer[src.hiddenLayers.Length]; for (int i = 0; i < hiddenLayers.Length; i++) { hiddenLayers[i] = new NeuralNetworkLayer(src.hiddenLayers[i]); hiddenLayers[i].Init(); } outputLayer = new NeuralNetworkLayer(src.outputLayer); outputLayer.Init(); //setup layer connections if (hiddenLayers.Length > 0) { //hidden layer connections hiddenConnections = new NeuralNetworkLayerConnection[hiddenLayers.Length]; hiddenRecurringConnections = new NeuralNetworkLayerConnection[hiddenLayers.Length]; maxNumberOfHiddenNeurons = 0; maxNumberOfSynapses = 0; for (int i = 0; i < hiddenLayers.Length; i++) { if (i == 0) { hiddenConnections[0] = new NeuralNetworkLayerConnection(inputLayer, hiddenLayers[0]); } else { hiddenConnections[i] = new NeuralNetworkLayerConnection(hiddenLayers[i - 1], hiddenLayers[i]); } //recurrent connection for hidden layer if (hiddenLayers[i].recurring) { hiddenRecurringConnections[i] = new NeuralNetworkLayerConnection(hiddenLayers[i], hiddenLayers[i]); } else { hiddenRecurringConnections[i] = null; } //calc max hidden neurons if (hiddenLayers[i].numberOfNeurons > maxNumberOfHiddenNeurons) { maxNumberOfHiddenNeurons = hiddenLayers[i].numberOfNeurons; } if (hiddenConnections[i].numberOfSynapses > maxNumberOfSynapses) { maxNumberOfSynapses = hiddenConnections[i].numberOfSynapses; } } //output connection outputConnection = new NeuralNetworkLayerConnection(hiddenLayers[hiddenLayers.Length - 1], outputLayer); if (outputConnection.numberOfSynapses > maxNumberOfSynapses) { maxNumberOfSynapses = outputConnection.numberOfSynapses; } } else { maxNumberOfHiddenNeurons = 0; maxNumberOfSynapses = 0; //direct input to output connection outputConnection = new NeuralNetworkLayerConnection(inputLayer, outputLayer); if (outputConnection.numberOfSynapses > maxNumberOfSynapses) { maxNumberOfSynapses = outputConnection.numberOfSynapses; } } }
/// <summary> /// Allocate memory arrays. /// </summary> /// <param name="nn">Source network.</param> public void Setup(NeuralNetwork nn) { nn.SetupExecutionArrays(out inputData, out outputData, out hiddenData, out hiddenRecurringData); Reset(true); }
/// <summary> /// Compile NeuralNetwork into 1-4D shader function. /// </summary> /// <param name="nn"></param> /// <returns></returns> public static string AsShader(NeuralNetwork nn, string fname, bool glsl) { int channels = nn.outputLayer.numberOfNeurons; if (channels < 1 || channels > 4) { return(null); //invalid, max 4 output } string chn = ""; switch (channels) { case 1: chn = "float"; break; case 2: chn = "vec2"; break; case 3: chn = "vec3"; break; case 4: chn = "vec4"; break; } int ichannels = nn.inputLayer.numberOfNeurons; if (ichannels < 1 || ichannels > 4) { return(null); //invalid, max 4 input } string ichn = ""; switch (ichannels) { case 1: ichn = "float"; break; case 2: ichn = "vec2"; break; case 3: ichn = "vec3"; break; case 4: ichn = "vec4"; break; } string[] inNames = new string[] { "uv.x", "uv.y", "uv.z", "uv.w" }; string glslCode = chn + " " + fname + "(" + ichn + " uv) {"; int lastNumNeurons = ichannels, baseId = 0, lastId = 0, weightIndex; int[] stateIds = new int[nn.hiddenLayers.Length > 0 ? nn.hiddenLayers.Length - 1 : 0]; for (int i = 0; i < nn.hiddenLayers.Length; i++) { string activeFunc = GetActivationFunctionGLSLName(nn.hiddenLayers[i].activationFunction); if (i != 0) { if (i < nn.hiddenLayers.Length - 1) { stateIds[i] = baseId; } lastId = baseId - lastNumNeurons; } else { if (i < stateIds.Length) { stateIds[i] = 0; } } int k = nn.hiddenLayers[i].numberOfNeurons; weightIndex = 0; while (k-- > 0) { glslCode += "float v" + (baseId + k) + " = " + activeFunc + "(" + nn.hiddenLayers[i].biases[k].ToString(".0######") + "+"; if (i == 0) { //input -> hidden int j = lastNumNeurons; while (j-- > 0) { glslCode += inNames[j] + "*" + nn.hiddenConnections[i].weights[weightIndex++].ToString(".0######") + (j == 0 ? "" : "+"); } glslCode += ");"; } else { //hidden -> hidden int j = lastNumNeurons; while (j-- > 0) { glslCode += "v" + (lastId + j) + "*" + nn.hiddenConnections[i].weights[weightIndex++].ToString(".0######") + (j == 0 ? "" : "+"); } glslCode += ");"; } } lastNumNeurons = nn.hiddenLayers[i].numberOfNeurons; baseId += lastNumNeurons; } //hidden/input->output lastId = baseId - lastNumNeurons; string oactiveFunc = GetActivationFunctionGLSLName(nn.outputLayer.activationFunction); string[] ocs = new string[channels]; int c = channels; weightIndex = 0; while (c-- > 0) { string ostr = oactiveFunc + "(" + nn.outputLayer.biases[c].ToString(".0######") + "+"; if (nn.hiddenLayers.Length == 0) { int j = ichannels; while (j-- > 0) { ostr += inNames[j] + "*" + nn.outputConnection.weights[weightIndex++].ToString(".0######") + ((j == 0) ? "" : "+"); } } else { int j = lastNumNeurons; while (j-- > 0) { ostr += "v" + (lastId + j) + "*" + nn.outputConnection.weights[weightIndex++].ToString(".0######") + ((j == 0) ? "" : "+"); } } ocs[c] = ostr + ")"; } //prepare return statement and end of function if (channels == 1) { glslCode += "return " + ocs[0] + ";}"; } else { glslCode += "return " + chn + "("; for (int i = 0; i < channels; i++) { glslCode += (i == 0 ? "" : ",") + ocs[i]; } glslCode += ");}"; } if (!glsl) { return(glslCode.Replace("vec", "float")); } return(glslCode); }
//a few util functions used /// <summary> /// Compile NeuralNetwork into image-processing GLSL shader, only supports rectifier activation function and all layer sizes must be divisble by 3. Shader expects input in float3 array 'is'. /// </summary> /// <param name="nn"></param> /// <returns></returns> public static string AsShader(NeuralNetwork neuralNet) { int KERNEL_AREA = neuralNet.inputLayer.numberOfNeurons / 3; //save neural network as glsl shader string shaderSrc = ""; for (int h = 0; h < neuralNet.hiddenLayers.Length + 1; h++) { NeuralNetworkLayer layer; NeuralNetworkLayerConnection connect; int isize; string ov, iv; if (h == 0) { iv = "is"; isize = KERNEL_AREA; } else { iv = "hs" + (h - 1); isize = neuralNet.hiddenLayers[h - 1].numberOfNeurons / 3; } if (h >= neuralNet.hiddenLayers.Length) { ov = "os"; layer = neuralNet.outputLayer; connect = neuralNet.outputConnection; } else { ov = "hs" + h; layer = neuralNet.hiddenLayers[h]; connect = neuralNet.hiddenConnections[h]; } int osize = layer.numberOfNeurons / 3; shaderSrc += "vec3 " + ov + "[" + osize + "];" + Environment.NewLine; int weightIndex = 0, k = osize; while (k-- > 0) { shaderSrc += ov + "[" + k + "] = clamp(vec3("; int c = 3; while (c-- > 0) { shaderSrc += layer.biases[(k * 3 + 2) - c].ToString(".0######") + "+"; int w = isize; while (w-- > 0) { shaderSrc += "dot(" + iv + "[" + w + "],vec3(" + connect.weights[weightIndex + 2].ToString(".0######") + "," + connect.weights[weightIndex + 1].ToString(".0######") + "," + connect.weights[weightIndex].ToString(".0######") + "))"; if (w != 0) { shaderSrc += "+"; } weightIndex += 3; } if (c != 0) { shaderSrc += ","; } else { shaderSrc += "),0.,1.);" + Environment.NewLine; } } if (k == 0) { shaderSrc += Environment.NewLine; } } } return(shaderSrc); }
/// <summary> /// Compile forward+backward propagating NeuralNetwork into image-processing GLSL shader, only supports rectifier activation function and all layer sizes must be divisble by 3. Shader expects input in float3 array 'is' and target output as 'ts'. /// </summary> /// <param name="neuralNet"></param> /// <returns></returns> public static string GenerationAsShader(NeuralNetwork neuralNet) { int KERNEL_AREA = neuralNet.inputLayer.numberOfNeurons / 3; //save neural network as glsl shader string shaderSrc = "//forward propagation" + Environment.NewLine; for (int h = 0; h < neuralNet.hiddenLayers.Length + 1; h++) { NeuralNetworkLayer layer; NeuralNetworkLayerConnection connect; int isize; string ov, iv; if (h == 0) { iv = "is"; isize = KERNEL_AREA; } else { iv = "hs" + (h - 1); isize = neuralNet.hiddenLayers[h - 1].numberOfNeurons / 3; } if (h >= neuralNet.hiddenLayers.Length) { ov = "os"; layer = neuralNet.outputLayer; connect = neuralNet.outputConnection; } else { ov = "hs" + h; layer = neuralNet.hiddenLayers[h]; connect = neuralNet.hiddenConnections[h]; } int osize = layer.numberOfNeurons / 3; shaderSrc += "vec3 " + ov + "[" + osize + "];" + Environment.NewLine; int weightIndex = 0, k = osize; while (k-- > 0) { shaderSrc += ov + "[" + k + "] = clamp(vec3("; int c = 3; while (c-- > 0) { shaderSrc += layer.biases[(k * 3 + 2) - c].ToString(".0######") + "+"; int w = isize; while (w-- > 0) { shaderSrc += "dot(" + iv + "[" + w + "],vec3(" + connect.weights[weightIndex + 2].ToString(".0######") + "," + connect.weights[weightIndex + 1].ToString(".0######") + "," + connect.weights[weightIndex].ToString(".0######") + "))"; if (w != 0) { shaderSrc += "+"; } weightIndex += 3; } if (c != 0) { shaderSrc += ","; } else { shaderSrc += "),0.,1.);" + Environment.NewLine; } } if (k == 0) { shaderSrc += Environment.NewLine; } } } int nout = neuralNet.outputLayer.numberOfNeurons / 3; shaderSrc += Environment.NewLine + "//target output difference/deriv" + Environment.NewLine + "vec3 td[" + nout + "];" + Environment.NewLine; for (int i = 0; i < nout; i++) { shaderSrc += "td[" + i + "] = os[" + i + "]-ts[" + i + "];" + Environment.NewLine; } shaderSrc += "//back propagation" + Environment.NewLine; for (int h = neuralNet.hiddenLayers.Length; h > -1; h--) { NeuralNetworkLayer layer; NeuralNetworkLayerConnection connect; int isize, osize; string ov, iv, dv; if (h == 0) { iv = "is"; osize = KERNEL_AREA; } else { iv = "hs" + (h - 1); osize = neuralNet.hiddenLayers[h - 1].numberOfNeurons / 3; } if (h >= neuralNet.hiddenLayers.Length) { dv = "td"; layer = neuralNet.outputLayer; connect = neuralNet.outputConnection; } else { dv = "ds" + (h + 1); layer = neuralNet.hiddenLayers[h]; connect = neuralNet.hiddenConnections[h]; } ov = "ds" + h; isize = layer.numberOfNeurons / 3; shaderSrc += "vec3 " + ov + "[" + osize + "];" + Environment.NewLine; int weightIndex = 0, k = osize; while (k-- > 0) { shaderSrc += ov + "[" + k + "] = (vec3(1.0,1.0,1.0)-" + iv + "[" + k + "])*vec3("; int c = 3; while (c-- > 0) { int w = isize; while (w-- > 0) { shaderSrc += "dot(" + dv + "[" + w + "],vec3(" + connect.weights[weightIndex + 2].ToString(".0######") + "," + connect.weights[weightIndex + 1].ToString(".0######") + "," + connect.weights[weightIndex].ToString(".0######") + "))"; if (w != 0) { shaderSrc += "+"; } weightIndex += 3; } if (c != 0) { shaderSrc += ","; } else { shaderSrc += ");" + Environment.NewLine; } } if (k == 0) { shaderSrc += Environment.NewLine; } } } return(shaderSrc); }
/// <summary> /// Record NeuralNetworks loss and create next generation. /// </summary> /// <param name="nn"></param> /// <param name="loss"></param> /// <returns>Next generation NeuralNetwork.</returns> public NeuralNetwork NextGeneration(NeuralNetwork nn, float loss) { NeuralNetwork rtnn = nn; if (first) { rtnn.CopyWeightsAndBiases(sourceNetwork); first = false; } else { if (loss <= 1.0f) { lock (breedLock) { //returned performance, record performance and mutate new network lossDelta += mutationIncreaseRate; if (lossDelta > maxMutationRate) { lossDelta = maxMutationRate; } if (loss < bestLoss) { //new best loss lossDelta = minMutationRate; if (breeding) { secondBestLoss = bestLoss; secondBestNetwork = bestNetwork; } bestLoss = loss; bestNetwork = nn; rtnn = new NeuralNetwork(nn); } else if (breeding && loss < secondBestLoss) { secondBestLoss = loss; secondBestNetwork = nn; rtnn = new NeuralNetwork(nn); } //create new if (breeding && bestLoss <= maxBreedingLoss) { if (bestNetwork != null && secondBestNetwork != null) { rtnn.CopyWeightsAndBiases(bestNetwork); rtnn.Breed(secondBestNetwork); if (lossDelta > 0.0f) { rtnn.Mutate(Utils.NextFloat01() * lossDelta); } } else { rtnn.RandomizeWeightsAndBiases(); } } else { rtnn.RandomizeWeightsAndBiases(); } } } else { rtnn.RandomizeWeightsAndBiases(); } } generations++; return(rtnn); }