Exemplo n.º 1
0
        /// <summary>
        /// Calculate an approximate gradient of a given function, at a number of discrete sample points.
        /// </summary>
        /// <param name="paramSamplingInfo">Sampling metadata.</param>
        /// <param name="yArr">The function output/result at a number of discrete sample points.</param>
        /// <param name="gradientArr">An array to store the calculated gradients within.</param>
        public static void CalcGradients(
            ParamSamplingInfo paramSamplingInfo,
            double[] yArr,
            double[] gradientArr)
        {
            // Notes.
            // The gradient at a sample point is approximated by taking the gradient of the line between the two
            // sample points either side of that point. For the first and last sample points we take the gradient
            // of the line between the sample point and its single adjacent sample point (as an alternative we could
            // sample an additional point at each end that doesn't get used for the function regression evaluation.
            //
            // This approach is rather crude, but fast. A better approach might be to do a polynomial regression on
            // the sample point and its nearest two adjacent samples, and then take the gradient of the polynomial
            // regression at the required point; obviously that would required more computational work to do so may
            // not be beneficial in the overall context of an evolutionary algorithm.
            //
            // Furthermore, the difference between this gradient approximation and the true gradient decreases with
            // increases sample density, therefore this is a reasonable approach *if* the sample density is
            // sufficiently high.

            // Handle the end points as special cases.
            // First point.
            double[] xArr = paramSamplingInfo.XArr;
            gradientArr[0] = CalcGradient(xArr[0], yArr[0], xArr[1], yArr[1]);

            // Intermediate points.
            int width = Vector <double> .Count;
            int i     = 1;

            for (; i < xArr.Length - width - 1; i += width)
            {
                // Calc a block of x deltas.
                var vecLeft   = new Vector <double>(xArr, i - 1);
                var vecRight  = new Vector <double>(xArr, i + 1);
                var xVecDelta = vecRight - vecLeft;

                // Calc a block of y deltas.
                vecLeft  = new Vector <double>(yArr, i - 1);
                vecRight = new Vector <double>(yArr, i + 1);
                var yVecDelta = vecRight - vecLeft;

                // Divide the y's by x's to obtain the gradients.
                var gradientVec = yVecDelta / xVecDelta;

                gradientVec.CopyTo(gradientArr, i);
            }

            // Calc gradients for remaining intermediate points (if any).
            for (; i < xArr.Length - 1; i++)
            {
                gradientArr[i] = CalcGradient(xArr[i - 1], yArr[i - 1], xArr[i + 1], yArr[i + 1]);
            }

            // Last point.
            gradientArr[i] = CalcGradient(xArr[i - 1], yArr[i - 1], xArr[i], yArr[i]);
        }
        private static BlackBoxProbe CreateBlackBoxProbe(
            Func <double, double> fn,
            ParamSamplingInfo paramSamplingInfo)
        {
            // Determine the mid output value of the function (over the specified sample points) and a scaling factor
            // to apply the to neural network response for it to be able to recreate the function (because the neural net
            // output range is [0,1] when using the logistic function as the neuron activation function).
            FuncRegressionUtils.CalcFunctionMidAndScale(fn, paramSamplingInfo, out double mid, out double scale);

            return(new BlackBoxProbe(paramSamplingInfo, mid, scale));
        }
Exemplo n.º 3
0
        /// <summary>
        /// Probe the given function by taking samples of it at a number of discrete sample points.
        /// </summary>
        /// <param name="fn">The function to probe/sample.</param>
        /// <param name="paramSamplingInfo">Sampling metadata.</param>
        /// <param name="responseArr">An array to store the sample results within.</param>
        public static void Probe(
            Func <double, double> fn,
            ParamSamplingInfo paramSamplingInfo,
            double[] responseArr)
        {
            Debug.Assert(responseArr.Length == paramSamplingInfo.SampleResolution);

            double[] xArr = paramSamplingInfo.XArr;

            for (int i = 0; i < xArr.Length; i++)
            {
                responseArr[i] = fn(xArr[i]);
            }
        }
        /// <summary>
        /// Construct a new instance.
        /// </summary>
        /// <param name="fn">The target function.</param>
        /// <param name="paramSamplingInfo">Sampling (defines the x range and sampling density).</param>
        /// <param name="gradientMseWeight">The fitness weighting to assign to the gradient mean squared error (MSE) score.</param>
        public FuncRegressionEvaluationScheme(
            Func <double, double> fn,
            ParamSamplingInfo paramSamplingInfo,
            double gradientMseWeight)
        {
            _paramSamplingInfo = paramSamplingInfo;
            _gradientMseWeight = gradientMseWeight;

            // Alloc arrays.
            int sampleCount = _paramSamplingInfo.SampleResolution;

            _yArrTarget        = new double[sampleCount];
            _gradientArrTarget = new double[sampleCount];

            // Predetermine target responses.
            FuncRegressionUtils.Probe(fn, paramSamplingInfo, _yArrTarget);
            FuncRegressionUtils.CalcGradients(paramSamplingInfo, _yArrTarget, _gradientArrTarget);

            // Create blackbox probe.
            _blackBoxProbe = CreateBlackBoxProbe(fn, paramSamplingInfo);
        }
        /// <summary>
        /// Construct a new instance.
        /// </summary>
        /// <param name="paramSamplingInfo">Parameter sampling info.</param>
        /// <param name="gradientMseWeight">Fitness weighting to apply to the gradient fitness score.</param>
        /// <param name="yArrTarget">Array of target y values (function output values).</param>
        /// <param name="gradientArrTarget">Array of target gradient values.</param>
        /// <param name="blackBoxProbe">Black box probe. For obtaining the y value response array from an instance of <see cref="IBlackBox{T}"/>.</param>
        internal FuncRegressionEvaluator(
            ParamSamplingInfo paramSamplingInfo,
            double gradientMseWeight,
            double[] yArrTarget,
            double[] gradientArrTarget,
            IBlackBoxProbe blackBoxProbe)
        {
            _paramSamplingInfo = paramSamplingInfo;
            _gradientMseWeight = gradientMseWeight;
            _yMseWeight        = 1.0 - gradientMseWeight;

            _yArrTarget        = yArrTarget;
            _gradientArrTarget = gradientArrTarget;

            // Alloc working arrays for receiving black box outputs.
            int sampleCount = _paramSamplingInfo.SampleResolution;

            _yArr        = new double[sampleCount];
            _gradientArr = new double[sampleCount];

            _blackBoxProbe = blackBoxProbe;
        }
Exemplo n.º 6
0
        /// <summary>
        /// Determine the mid output value of the function (over the specified sample points) and a scaling factor
        /// to apply the to neural network response for it to be able to recreate the function (because the neural net
        /// output range is [0,1] when using the logistic function as the neuron activation function).
        /// </summary>
        /// <param name="fn">The function to be sampled.</param>
        /// <param name="paramSamplingInfo">Parameter sampling info.</param>
        /// <param name="mid">Returns the mid value of the function (halfway between min and max).</param>
        /// <param name="scale">Returns the scale of the function.</param>
        public static void CalcFunctionMidAndScale(
            Func <double, double> fn,
            ParamSamplingInfo paramSamplingInfo,
            out double mid, out double scale)
        {
            double[] xArr = paramSamplingInfo.XArr;
            double   min  = fn(xArr[0]);
            double   max  = min;

            for (int i = 0; i < xArr.Length; i++)
            {
                double y = fn(xArr[i]);
                min = Math.Min(y, min);
                max = Math.Max(y, max);
            }

            // TODO: explain this (0.8 is logistic function range, 0.5 is the logistic function output value when its input is zero).
            double range = max - min;

            scale = range / 0.8;
            mid   = (min + max) / 2.0;
        }
        public static void CalcGradients(
            ParamSamplingInfo paramSamplingInfo,
            double[] yArr,
            double[] gradientArr)
        {
            // TODO: Can this be vectorized?

            // Handle the end points as special cases.
            // First point.
            double[] xArr = paramSamplingInfo.XArr;
            gradientArr[0] = CalcGradient(xArr[0], yArr[0], xArr[1], yArr[1]);

            // Intermediate points.
            for (int i = 1; i < xArr.Length - 1; i++)
            {
                gradientArr[i] = CalcGradient(xArr[i - 1], yArr[i - 1], xArr[i + 1], yArr[i + 1]);
            }

            // Last point.
            int lastIdx = xArr.Length - 1;

            gradientArr[lastIdx] = CalcGradient(xArr[lastIdx - 1], yArr[lastIdx - 1], xArr[lastIdx], yArr[lastIdx]);
        }
 /// <summary>
 /// Construct a new instance.
 /// </summary>
 /// <param name="paramSamplingInfo">Parameter sampling info.</param>
 /// <param name="offset">Offset to apply to each neural network output response.</param>
 /// <param name="scale">Scaling factor to apply to each neural network output response.</param>
 public BlackBoxProbe(ParamSamplingInfo paramSamplingInfo, double offset, double scale)
 {
     _paramSamplingInfo = paramSamplingInfo;
     _offset            = offset;
     _scale             = scale;
 }