Exemple #1
0
        /// <summary>
        ///     Performs gradient descent to optomise theta parameters.
        /// </summary>
        /// <param name="theta">Initial Theta (Zeros)</param>
        /// <param name="x">Training set</param>
        /// <param name="y">Training labels</param>
        /// <param name="maxIterations">Maximum number of iterations to run gradient descent</param>
        /// <param name="learningRateAlpha">The learning rate (Alpha)</param>
        /// <param name="costFunction">Cost function to use for gradient descent</param>
        /// <param name="lambda">The regularization constant to apply</param>
        /// <param name="regularizer">The regularization function to apply</param>
        /// <returns></returns>
        public static Tuple<double, Vector> Run(
            Vector theta, 
            Matrix x, 
            Vector y, 
            int maxIterations, 
            double learningRateAlpha, 
            ICostFunction costFunction, 
            double lambda, 
            IRegularizer regularizer)
        {
            var bestTheta = theta.Copy();
            var bestCost = double.PositiveInfinity;

            double currentCost = 0;
            var currentGradient = theta.Copy();

            for (var i = 0; i <= maxIterations; i++)
            {
                currentCost = costFunction.ComputeCost(bestTheta, x, y, lambda, regularizer);
                currentGradient = costFunction.ComputeGradient(bestTheta, x, y, lambda, regularizer);

                if (currentCost < bestCost)
                {
                    bestTheta = bestTheta - learningRateAlpha * currentGradient;
                    bestCost = currentCost;
                }
                else
                {
                    learningRateAlpha = learningRateAlpha * 0.99;
                }
            }

            return new Tuple<double, Vector>(bestCost, bestTheta);
        }
Exemple #2
0
        /// <summary>
        /// Performs gradient descent to optomise theta parameters.
        /// </summary>
        /// <param name="theta">Initial Theta (Zeros)</param>
        /// <param name="x">Training set</param>
        /// <param name="y">Training labels</param>
        /// <param name="maxIterations">Maximum number of iterations to run gradient descent</param>
        /// <param name="learningRateAlpha">The learning rate (Alpha)</param>
        /// <param name="costFunction">Cost function to use for gradient descent</param>
        /// <param name="lambda">The regularization constant to apply</param>
        /// <param name="regularizer">The regularization function to apply</param>
        /// <returns></returns>
        public static Tuple <double, Vector> Run(Vector theta, Matrix x, Vector y, int maxIterations, double learningRateAlpha,
                                                 ICostFunction costFunction, double lambda, IRegularizer regularizer)
        {
            Vector bestTheta = theta.Copy();
            double bestCost  = double.PositiveInfinity;

            double currentCost     = 0;
            Vector currentGradient = theta.Copy();

            for (int i = 0; i <= maxIterations; i++)
            {
                currentCost     = costFunction.ComputeCost(bestTheta, x, y, lambda, regularizer);
                currentGradient = costFunction.ComputeGradient(bestTheta, x, y, lambda, regularizer);

                if (currentCost < bestCost)
                {
                    bestTheta = bestTheta - learningRateAlpha * currentGradient;
                    bestCost  = currentCost;
                }
                else
                {
                    learningRateAlpha = learningRateAlpha * 0.99;
                }
            }

            return(new Tuple <double, Vector>(bestCost, bestTheta));
        }
 /// <summary>
 /// Update and return the Cost.
 /// </summary>
 /// <param name="costFunction">The cost function to optimize.</param>
 /// <param name="properties">Properties for the optimization routine.</param>
 /// <returns>Double</returns>
 public virtual double UpdateCost(ICostFunction costFunction, OptimizerProperties properties)
 {
     return costFunction.ComputeCost(properties.Theta);
 }
Exemple #4
0
 /// <summary>
 /// Update and return the Cost.
 /// </summary>
 /// <param name="costFunction">The cost function to optimize.</param>
 /// <param name="properties">Properties for the optimization routine.</param>
 /// <returns>Double</returns>
 public virtual double UpdateCost(ICostFunction costFunction, OptimizerProperties properties)
 {
     return(costFunction.ComputeCost(properties.Theta));
 }