private void CanItLearnRulesWith(IList<IMLDataPair> inputData, IList<IMLDataPair> verfData, int hiddenLayerCount, int neuronCount, IActivationFunction actFunc, double learnRate, double momentum, int batchSize, int maxEpochs) { var model = new DbModel(); var funcName = actFunc.GetType().Name; var tdCount = inputData.Count(); if (model.TicTacToeResult.Any(r => r.HiddenLayerCount == hiddenLayerCount && r.NeuronPerLayercount == neuronCount && r.ActivationFunction == funcName && r.LearningRate == learnRate && r.BatchSize == batchSize && r.Momentum == momentum && r.Name == Name && r.Epochs == maxEpochs && r.TrainingDataCount == tdCount)) return; var nn = CreateNetwork(inputData, hiddenLayerCount, neuronCount, actFunc); var train = new Backpropagation(nn, new BasicMLDataSet(inputData), learnRate, momentum); train.BatchSize = batchSize; int epoch = 1; do { train.Iteration(); epoch++; } while (epoch < maxEpochs); int good = verfData.Count(verf => { var output = nn.Compute(verf.Input); return Enumerable.Range(0, 9).All(i => Math.Round(output[i]) == Math.Round(verf.Ideal[i])); }); int bad = VerfDataCount - good; var result = new TicTacToeResult() { HiddenLayerCount = hiddenLayerCount, NeuronPerLayercount = neuronCount, ActivationFunction = funcName, Bad = bad, Good = good, TrainingDataCount = tdCount, Momentum = momentum, LearningRate = learnRate, BatchSize = batchSize, Epochs = epoch, Error = train.Error, Name = Name, }; model.TicTacToeResult.Add(result); model.SaveChanges(); }