/// <summary> /// Create new NeuralNetworkEvolver to train on specific input/target data. /// </summary> /// <param name="breed">Breeding flag.</param> /// <param name="sourcenn">NeuralNetwork to evolve.</param> /// <param name="nThreads">Number of threads to simulate evolution on.</param> /// <param name="inData">Input data.</param> /// <param name="targetDat">Target output data.</param> public NeuralNetworkEvolver(bool breed, NeuralNetwork sourcenn, int nThreads, float[][] inData, float[][] targetDat) { numThreads = nThreads; sourceNetwork = sourcenn; inputOutputLossFunction = premadePerfFunc; inputData = inData; targetData = targetDat; breeding = breed; threads = new Thread[numThreads]; subjects = new NeuralNetworkProgram[numThreads]; readyNextData = new bool[numThreads]; for (int i = 0; i < nThreads; i++) { int ci = i; threads[i] = new Thread(() => evolverThread(ci)); readyNextData[i] = false; subjects[i] = new NeuralNetworkProgram(sourcenn); subjects[i].neuralNetwork = new NeuralNetwork(sourceNetwork); subjects[i].neuralNetwork.CopyWeightsAndBiases(sourceNetwork); } }
private void premadePerfFunc(NeuralNetworkProgram nnp) { nnp.hasOutput = false; if (nnp.state != -1) { //calculate loss float perf = 0.0f; float[] odat = nnp.context.outputData, tdat = targetData[nnp.state]; int i = odat.Length; while (i-- > 0) { float amax = Math.Abs(odat[i] - tdat[i]); if (lossType == NeuralNetworkTrainer.LOSS_TYPE_AVERAGE) { perf += amax; } else { if (amax > perf) { perf = amax; } } } if (lossType == NeuralNetworkTrainer.LOSS_TYPE_AVERAGE) { nnp.loss += perf / (float)odat.Length; } else { if (perf > nnp.loss) { nnp.loss = perf; } } //advance state nnp.state++; if (nnp.state >= targetData.Length) { nnp.total += nnp.state; nnp.state = -1; } } else { nnp.state = 0; nnp.total = 0; } if (nnp.state != -1) { //put next input data Array.Copy(inputData[nnp.state], nnp.context.inputData, inputData[nnp.state].Length); nnp.hasInput = true; } }
/// <summary> /// Sample 1D neural network from xMin to xMax building array of 2D points. /// </summary> /// <param name="numSamples"></param> /// <param name="nn"></param> /// <param name="xMin"></param> /// <param name="xMax"></param> /// <returns></returns> public static float[][] SampleNeuralNetwork(int numSamples, NeuralNetwork nn, float xMin, float xMax) { NeuralNetworkProgram nnp = new NeuralNetworkProgram(nn); nnp.context.Reset(true); float[][] res = new float[numSamples][]; for (int i = 0; i < numSamples; i++) { float sx = (i / (float)(numSamples - 1)) * (xMax - xMin) + xMin; nnp.context.inputData[0] = sx; nnp.Execute(); res[i] = new float[] { sx, nnp.context.outputData[0] }; } return(res); }
/// <summary> /// Create new NeuralNetworkEvolver to train using a custom performance function. /// </summary> /// <param name="breed">Breeding flag.</param> /// <param name="sourcenn">NeuralNetwork to evolve.</param> /// <param name="nThreads">Number of threads to simulate evolution on.</param> /// <param name="perfFunc">Performance/InputOutput processing function.</param> public NeuralNetworkEvolver(bool breed, NeuralNetwork sourcenn, int nThreads, ProcessOutputInputGetLoss lossFunc) { numThreads = nThreads; sourceNetwork = sourcenn; inputOutputLossFunction = lossFunc; breeding = breed; threads = new Thread[numThreads]; subjects = new NeuralNetworkProgram[numThreads]; readyNextData = new bool[numThreads]; for (int i = 0; i < nThreads; i++) { int ci = i; threads[i] = new Thread(() => evolverThread(ci)); readyNextData[i] = false; subjects[i] = new NeuralNetworkProgram(sourcenn); subjects[i].neuralNetwork = new NeuralNetwork(sourceNetwork); subjects[i].neuralNetwork.CopyWeightsAndBiases(sourceNetwork); } }
private void evolverThread(int id) { NeuralNetworkProgram subject = subjects[id]; while (running) { float loss = -1.0f; if (subject.state == -1) { bool reset = true; if (onStreamNextData != null) { readyNextData[id] = true; if (id == 0) { while (true) { bool ready = true; for (int i = 0; i < numThreads; i++) { ready &= readyNextData[i]; } if (ready) { break; } Thread.Sleep(1); } reset = onStreamNextData(ref inputData, ref targetData); for (int i = 0; i < numThreads; i++) { readyNextData[i] = false; } } else { while (readyNextData[id]) { Thread.Sleep(1); } } } if (reset) { if (subject.total > 0) { loss = subject.loss; if (lossType == NeuralNetworkTrainer.LOSS_TYPE_AVERAGE) { loss /= (float)subject.total; } } subject.loss = 0.0f; } } if (loss > -1.0f) { subject.neuralNetwork = NextGeneration(subject.neuralNetwork, loss); //reset state subject.context.Reset(false); if (loss <= desiredLoss) { //hit performance goal, done! if (onReachedGoal != null) { onReachedGoal(); } //clean up running = false; return; } } inputOutputLossFunction(subject); subject.Execute(); if (evolverDelay != 0) { Thread.Sleep(evolverDelay); } } }