public void BackpropError_LearnedTargetValue(double targetValue) { var key = new[] { Guid.NewGuid() }; var currentValue = _ruleset.GetInferenceForRule(Array.Empty <double>(), key); for (var i = 0; i < 100; i++) { _ruleset.BackpropError(Array.Empty <double>(), key, targetValue - currentValue, 1.0); currentValue = _ruleset.GetInferenceForRule(Array.Empty <double>(), key); } Assert.AreEqual(targetValue, currentValue, 1e-2); }
public double Inference(IReadOnlyList <double> inputX, double?expectedValue = null) { if (inputX?.Count != _termLayers.Length) { throw new ArgumentNullException(nameof(inputX)); } var learningData = new List <(IReadOnlyList <Guid> TermsCombo, double Firing)>(); var retval = RecursiveComputeOutput(inputX, new Guid[inputX.Count], new double[inputX.Count], 1.0, 0, learningData, expectedValue); if (!expectedValue.HasValue) { return(retval); } var error = expectedValue.Value - retval; foreach (var(termsCombo, generalFiring) in learningData) { _ruleset.BackpropError(inputX, termsCombo, error, generalFiring); } _generalErrorStd = (1 - _smoothingAverageRate) * (_generalErrorStd + _smoothingAverageRate * (_generalErrorAverage - Math.Abs(error)) * (_generalErrorAverage - Math.Abs(error))); _generalErrorAverage -= _smoothingAverageRate * (_generalErrorAverage - Math.Abs(error)); for (var layer = 0; layer < _termLayers.Length; layer++) { _termLayers[layer].BackpropError(inputX[layer], Math.Abs(error)); _termLayers[layer].CreationStep(inputX[layer], _generalErrorAverage, _generalErrorStd); } for (var layer = 0; layer < _termLayers.Length; layer++) { if (_termLayers[layer] .TryEliminateTerm(out var eliminatingTerm, _generalErrorAverage, _generalErrorStd)) { _ruleset.EliminateRules(layer, eliminatingTerm); } } return(retval); }