private void CalculatePopulationCostsForInputOutputPair(InputOutputPairModel inputOutputPair) { var individualIndex = isFirstLearningIteration ? 0 : selectionSize; for (; individualIndex < population.Length; individualIndex++) { var model = population[individualIndex].Model; var allActivations = modelExecuter.Execute(model, inputOutputPair.Inputs); var cost = CalculateCost(inputOutputPair.Outputs, allActivations.Last()); lock (populationCostLocks[individualIndex]) { population[individualIndex].Cost += cost; } } }
public void Learn(InputOutputPairModel[] batch) { layerWeightDeltaTotals = new double[Model.WeightLayers.Length][, ]; layerBiasDeltaTotals = new double[Model.BiasLayers.Length][]; Parallel.ForEach(batch, inputOutputPair => { var allActivations = executer.Execute(Model, inputOutputPair.Inputs); var deltaOutputActivations = allActivations.Last().Select((output, index) => CalculateDerivativeCost(output, inputOutputPair.Outputs[index])).ToArray(); var inputActivationLayers = allActivations.Take(allActivations.Length - 1).ToArray(); PropagateBackwards(inputActivationLayers, Model, deltaOutputActivations); }); ApplyLayerDeltas(batch); }
public double GetModelScore() { var correct = 0; var incorrect = 0; Parallel.ForEach(testInputOutputPairs, pair => { var outputs = executer.Execute(learner.Model, pair.Inputs).Last(); var outputLabel = Array.IndexOf(outputs, outputs.Max()); var expectedLabel = Array.IndexOf(pair.Outputs, pair.Outputs.Max()); if (outputLabel == expectedLabel) { Interlocked.Increment(ref correct); } else { Interlocked.Increment(ref incorrect); } }); return((double)correct / (correct + incorrect)); }