Пример #1
0
        public void Minimize(IHypothesis hypothesis, Vector[] inputs, double[] outputs)
        {
            if (iterationCount < 1) return;

            // first iteration
            var prevGrad  = hypothesis.BatchGradient(inputs, outputs);
            hypothesis.Weights = hypothesis.Weights - learningRate * prevGrad;

            // other iterations
            for (var dummy = 1; dummy < iterationCount; ++dummy)
            {
                // find gradient of cost(loss) function
                var grad = hypothesis.BatchGradient(inputs, outputs);
                // find Euclidean norm of delta of last two gradients
                var dGrad = (grad - prevGrad);
                var dGradNorm = dGrad.EuclideanNorm;
                // adopt adaptive learning rate
                var rate = learningRate * dGradNorm;
                // do gradient step
                hypothesis.Weights = hypothesis.Weights - (learningRate * dGradNorm) * grad;

                // store gradient thus we can easily find delta of gradient
                prevGrad = grad;
            }
        }
Пример #2
0
        public void Minimize(IHypothesis hypothesis, Vector[] inputs, double[] outputs)
        {
            if (inputs.Length == 0)
                return;

            var inputCount   = inputs.Length;
            var inputSize    = inputs[0].Size;

            //  calculate invert avg multiplier
            var contribution  = 1.0d / (double)inputSize;
            //  prevent to passing small inputs away without weigths have been changed
            var currentRefreshRate = System.Math.Min(refreshRate, inputCount);

            for (var dummy = 0; dummy < iterationCount; ++dummy)
            {
                var gradAcc = new Vector(inputSize);

                for (var i = 0; i < inputCount; ++i)
                {
                    // find gradient of cost(loss) function
                    gradAcc += hypothesis.Gradient(inputs[i], outputs[i]);

                    if ((i % currentRefreshRate) == 0)
                    {
                        // do gradient step
                        hypothesis.Weights = hypothesis.Weights - learningRate * contribution * gradAcc;
                        // reset accumulator
                        gradAcc.Transform(x => 0.0d);
                    }
                }
                hypothesis.Weights = hypothesis.Weights - learningRate * contribution * gradAcc;
            }
        }