public NeuralTrainResult SimpleTrain() { if (DataSets == null || Net == null) { return(null); } Net.Randomize(); var result = new NeuralTrainResult { EpochCount = EpochsCount, ErrorWriteCycle = ErrorWriteCycle, Start = DateTime.UtcNow, TolerableError = TolerableError, TrainSets = DataSets, TargetNet = Net, ErrorHistory = new List <double>(), EpochFinished = 0, LearnRate = LearnRate }; var counter = EpochsCount; var totalError = 0d; for (; counter > 0; counter--) { result.EpochFinished++; totalError = 0d; for (var i = 0; i < DataSets.Length; i++) { var input = DataSets[i].Item1; var correctOut = DataSets[i].Item2; var currentOut = Net.Activate(input); var outDelta = new double[currentOut.Length]; for (var j = 0; j < outDelta.Length; j++) { outDelta[j] = correctOut[j] - currentOut[j]; totalError += Math.Pow(outDelta[j], 2); } Net.Adjust(correctOut, LearnRate); } totalError /= DataSets.Length; if (Log != null && counter % ErrorWriteCycle == 0) { result.ErrorHistory.Add(totalError); Log(EpochsCount, EpochsCount - counter, TolerableError, LearnRate, totalError); } if (totalError <= TolerableError) { break; } } result.ResultError = totalError; result.Stop = DateTime.UtcNow; return(result); }