public double CalculateScore(IMachineLearningAlgorithm algo) { double sum = algo.LongTermMemory.Sum(); sum /= algo.LongTermMemory.Length; return(Math.Abs(10 - sum)); }
public TrainNelderMead(IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore, double stepValue) { _algorithm = theAlgorithm; _score = theScore; _start = (double[])_algorithm.LongTermMemory.Clone(); _trainedWeights = (double[])_algorithm.LongTermMemory.Clone(); int n = _start.Length; _p = new double[n * (n + 1)]; _pstar = new double[n]; _p2Star = new double[n]; _pbar = new double[n]; _y = new double[n + 1]; _nn = n + 1; _del = 1.0; _rq = 0.000001 * n; _step = new double[_start.Length]; _jcount = _konvge = 500; for (int i = 0; i < _step.Length; i++) { _step[i] = stepValue; } }
/// <summary> /// Construct a greedy random algorithm. /// </summary> /// <param name="theShouldMinimize">True, if we should minimize.</param> /// <param name="theAlgorithm">The algorithm to optimize.</param> /// <param name="theScore">The score function.</param> public TrainGreedyRandom(bool theShouldMinimize, IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore) { _algorithm = theAlgorithm; _score = theScore; _shouldMinimize = theShouldMinimize; // Set the last error to a really bad value so it will be reset on the first iteration. _lastError = _shouldMinimize ? double.PositiveInfinity : Double.NegativeInfinity; }
/// <summary> /// Construct the simulated annealing trainer. /// </summary> /// <param name="theAlgorithm">The algorithm to optimize.</param> /// <param name="theScore">The score function.</param> /// <param name="theKMax">The max number of iterations.</param> /// <param name="theStartingTemperature">The starting temperature.</param> /// <param name="theEndingTemperature">The ending temperature.</param> public TrainAnneal(IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore, int theKMax, double theStartingTemperature, double theEndingTemperature) { _algorithm = theAlgorithm; _score = theScore; _kMax = theKMax; _currentError = _score.CalculateScore(_algorithm); _startingTemperature = theStartingTemperature; _endingTemperature = theEndingTemperature; _globalBest = new double[theAlgorithm.LongTermMemory.Length]; Array.Copy(_algorithm.LongTermMemory, 0, _globalBest, 0, _globalBest.Length); }
/// <inheritdoc/> public double CalculateScore(IMachineLearningAlgorithm algo) { var ralgo = (IRegressionAlgorithm)algo; // evaulate _errorCalc.Clear(); foreach (var pair in _trainingData) { double[] output = ralgo.ComputeRegression(pair.Input); _errorCalc.UpdateError(output, pair.Ideal, 1.0); } return _errorCalc.Calculate(); }
/// <inheritdoc/> public double CalculateScore(IMachineLearningAlgorithm algo) { var ralgo = (IRegressionAlgorithm)algo; // evaulate _errorCalc.Clear(); foreach (var pair in _trainingData) { double[] output = ralgo.ComputeRegression(pair.Input); _errorCalc.UpdateError(output, pair.Ideal, 1.0); } return(_errorCalc.Calculate()); }
/// <inheritdoc/> public double CalculateScore(IMachineLearningAlgorithm algo) { int incorrectCount = 0; int totalCount = 0; var ralgo = (IClassificationAlgorithm)algo; foreach (var aTrainingData in _trainingData) { totalCount++; var output = ralgo.ComputeClassification(aTrainingData.Input); if (output != (int)aTrainingData.Ideal[0]) { incorrectCount++; } } return((double)incorrectCount / totalCount); }
/// <inheritdoc/> public double CalculateScore(IMachineLearningAlgorithm algo) { int incorrectCount = 0; int totalCount = 0; var ralgo = (IClassificationAlgorithm)algo; foreach (var aTrainingData in _trainingData) { totalCount++; var output = ralgo.ComputeClassification(aTrainingData.Input); if ( output != (int)aTrainingData.Ideal[0] ) { incorrectCount++; } } return (double)incorrectCount / totalCount; }
/// <summary> /// Construct a hill climbing algorithm. /// </summary> /// <param name="theShouldMinimize">True, if we should minimize.</param> /// <param name="theAlgorithm">The algorithm to optimize.</param> /// <param name="theScore">The scoring function.</param> /// <param name="acceleration">The acceleration for step sizes.</param> /// <param name="stepSize">The initial step sizes.</param> public TrainHillClimb(bool theShouldMinimize, IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore, double acceleration, double stepSize) { _algorithm = theAlgorithm; _score = theScore; _shouldMinimize = theShouldMinimize; _stepSize = new double[theAlgorithm.LongTermMemory.Length]; for (int i = 0; i < theAlgorithm.LongTermMemory.Length; i++) { _stepSize[i] = stepSize; } _candidate[0] = -acceleration; _candidate[1] = -1/acceleration; _candidate[2] = 0; _candidate[3] = 1/acceleration; _candidate[4] = acceleration; // Set the last error to a really bad value so it will be reset on the first iteration. _lastError = _shouldMinimize ? double.PositiveInfinity : double.NegativeInfinity; }
/// <summary> /// Construct a hill climbing algorithm. /// </summary> /// <param name="theShouldMinimize">True, if we should minimize.</param> /// <param name="theAlgorithm">The algorithm to optimize.</param> /// <param name="theScore">The scoring function.</param> /// <param name="acceleration">The acceleration for step sizes.</param> /// <param name="stepSize">The initial step sizes.</param> public TrainHillClimb(bool theShouldMinimize, IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore, double acceleration, double stepSize) { _algorithm = theAlgorithm; _score = theScore; _shouldMinimize = theShouldMinimize; _stepSize = new double[theAlgorithm.LongTermMemory.Length]; for (int i = 0; i < theAlgorithm.LongTermMemory.Length; i++) { _stepSize[i] = stepSize; } _candidate[0] = -acceleration; _candidate[1] = -1 / acceleration; _candidate[2] = 0; _candidate[3] = 1 / acceleration; _candidate[4] = acceleration; // Set the last error to a really bad value so it will be reset on the first iteration. _lastError = _shouldMinimize ? double.PositiveInfinity : double.NegativeInfinity; }
/// <summary> /// Construct a hill climbing algorithm. Use acceleration of 1.2 and initial step size of 1. /// </summary> /// <param name="theShouldMinimize">True, if we should minimize.</param> /// <param name="theAlgorithm">The algorithm to optimize.</param> /// <param name="theScore">The scoring function.</param> public TrainHillClimb(bool theShouldMinimize, IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore) : this(theShouldMinimize, theAlgorithm, theScore, 1.2, 1) { }
/// <summary> /// Construct the simulated annealing trainer. Use 1000 iterations and temperature from 400 to 0.0001. /// </summary> /// <param name="theAlgorithm">The algorithm to optimize.</param> /// <param name="theScore">The score function.</param> public TrainAnneal(IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore) : this(theAlgorithm, theScore, 1000, 400, 0.0001) { }
public double CalculateScore(IMachineLearningAlgorithm algo) { double sum = algo.LongTermMemory.Sum(); sum /= algo.LongTermMemory.Length; return Math.Abs(10 - sum); }
public TrainNelderMead(IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore, double stepValue) { _algorithm = theAlgorithm; _score = theScore; _start = (double[]) _algorithm.LongTermMemory.Clone(); _trainedWeights = (double[]) _algorithm.LongTermMemory.Clone(); int n = _start.Length; _p = new double[n*(n + 1)]; _pstar = new double[n]; _p2Star = new double[n]; _pbar = new double[n]; _y = new double[n + 1]; _nn = n + 1; _del = 1.0; _rq = 0.000001*n; _step = new double[_start.Length]; _jcount = _konvge = 500; for (int i = 0; i < _step.Length; i++) { _step[i] = stepValue; } }
public TrainNelderMead(IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore) : this(theAlgorithm, theScore, 100) { }