Exemplo n.º 1
0
            private protected AveragedTrainStateBase(IChannel ch, int numFeatures, LinearModelParameters predictor, AveragedLinearTrainer <TTransformer, TModel> parent)
                : base(ch, numFeatures, predictor, parent)
            {
                // Do the other initializations by setting the setters as if user had set them
                // Initialize the averaged weights if needed (i.e., do what happens when Averaged is set)
                Averaged = parent.AveragedLinearTrainerOptions.Averaged;
                if (Averaged)
                {
                    if (parent.AveragedLinearTrainerOptions.AveragedTolerance > 0)
                    {
                        VBufferUtils.Densify(ref Weights);
                    }
                    Weights.CopyTo(ref TotalWeights);
                }
                else
                {
                    // It is definitely advantageous to keep weights dense if we aren't adding them
                    // to another vector with each update.
                    VBufferUtils.Densify(ref Weights);
                }
                _resetWeightsAfterXExamples = parent.AveragedLinearTrainerOptions.ResetWeightsAfterXExamples ?? 0;
                _args = parent.AveragedLinearTrainerOptions;
                _loss = parent.LossFunction;

                Gain = 1;
            }
Exemplo n.º 2
0
        /// <summary>
        /// A small helper for comparing a loss's computations to expected values.
        /// </summary>
        /// <param name="lossFunc">The training loss.</param>
        /// <param name="label">The ideal labeled output.</param>
        /// <param name="output">The actual output.</param>
        /// <param name="expectedLoss">The expected value of this loss, given
        /// <c>label</c> and <c>output</c></param>
        /// <param name="expectedUpdate">The expected value of the update
        /// step, given <c>label</c> and <c>output</c></param>
        /// <param name="differentiable">Whether the loss function is differentiable
        /// w.r.t. the output in the vicinity of the output value</param>
        private void TestHelper(IScalarLoss lossFunc, double label, double output, double expectedLoss, double expectedUpdate, bool differentiable = true)
        {
            Double loss       = lossFunc.Loss((float)output, (float)label);
            float  derivative = lossFunc.Derivative((float)output, (float)label);

            Assert.Equal(expectedLoss, loss, 5);
            Assert.Equal(expectedUpdate, -derivative, 5);

            if (differentiable)
            {
                // In principle, the update should be the negative of the first derivative of the loss.
                // Use a simple finite difference method to see if it's in the right ballpark.
                float  almostOutput = Math.Max((float)output * (1 + _epsilon), (float)output + _epsilon);
                Double almostLoss   = lossFunc.Loss(almostOutput, (float)label);
                Assert.Equal((almostLoss - loss) / (almostOutput - output), derivative, 1);
            }
        }