コード例 #1
0
        /// <summary>
        /// Initialize the experiment with some optional XML configuration data.
        /// </summary>
        public void Initialize(string name, XmlElement xmlConfig)
        {
            _name                    = name;
            _populationSize          = XmlUtils.GetValueAsInt(xmlConfig, "PopulationSize");
            _specieCount             = XmlUtils.GetValueAsInt(xmlConfig, "SpecieCount");
            _activationScheme        = ExperimentUtils.CreateActivationScheme(xmlConfig, "Activation");
            _complexityRegulationStr = XmlUtils.TryGetValueAsString(xmlConfig, "ComplexityRegulationStrategy");
            _complexityThreshold     = XmlUtils.TryGetValueAsInt(xmlConfig, "ComplexityThreshold");
            _description             = XmlUtils.TryGetValueAsString(xmlConfig, "Description");
            _parallelOptions         = ExperimentUtils.ReadParallelOptions(xmlConfig);

            _eaParams             = new NeatEvolutionAlgorithmParameters();
            _eaParams.SpecieCount = _specieCount;

            _neatGenomeParams = new NeatGenomeParameters();
            _neatGenomeParams.FeedforwardOnly = _activationScheme.AcyclicNetwork;

            // Determine what function to regress.
            string     fnIdStr = XmlUtils.GetValueAsString(xmlConfig, "Function");
            FunctionId fnId    = (FunctionId)Enum.Parse(typeof(FunctionId), fnIdStr);

            _fn = FunctionUtils.GetFunction(fnId);

            // Read parameter sampling scheme settings.
            int    sampleResolution = XmlUtils.GetValueAsInt(xmlConfig, "SampleResolution");
            double sampleMin        = XmlUtils.GetValueAsDouble(xmlConfig, "SampleMin");
            double sampleMax        = XmlUtils.GetValueAsDouble(xmlConfig, "SampleMax");

            _paramSamplingInfo = new ParamSamplingInfo(sampleMin, sampleMax, sampleResolution);
        }
コード例 #2
0
    private static void ReadEvaluationSchemeConfig(
        JsonElement configElem,
        out Func <double, double> fn,
        out ParamSamplingInfo paramSamplingInfo,
        out double gradientMseWeight)
    {
        // Get the customEvaluationSchemeConfig section.
        if (!configElem.TryGetProperty("customEvaluationSchemeConfig", out JsonElement evalSchemeElem))
        {
            throw new Exception("customEvaluationSchemeConfig not defined.");
        }

        // Read function ID.
        string     functionIdStr = JsonReadMandatoryUtils.ReadStringMandatory(evalSchemeElem, "functionId");
        FunctionId functionId    = (FunctionId)Enum.Parse(typeof(FunctionId), functionIdStr);

        fn = FunctionFactory.GetFunction(functionId);

        // Read sample interval min and max, and sample resolution.
        double sampleIntervalMin = JsonReadMandatoryUtils.ReadDoubleMandatory(evalSchemeElem, "sampleIntervalMin");
        double sampleIntervalMax = JsonReadMandatoryUtils.ReadDoubleMandatory(evalSchemeElem, "sampleIntervalMax");
        int    sampleResolution  = JsonReadMandatoryUtils.ReadIntMandatory(evalSchemeElem, "sampleResolution");

        paramSamplingInfo = new ParamSamplingInfo(sampleIntervalMin, sampleIntervalMax, sampleResolution);

        // Read the weight to apply to the gradientMse readings in the final fitness score.
        // 0 means don't use the gradient measurements, 1 means give them equal weight to the y position readings at each x sample point.
        gradientMseWeight = JsonReadMandatoryUtils.ReadDoubleMandatory(evalSchemeElem, "gradientMseWeight");
    }
コード例 #3
0
    /// <summary>
    /// Calculate an approximate gradient of a given function, at a number of discrete sample points.
    /// </summary>
    /// <param name="paramSamplingInfo">Sampling metadata.</param>
    /// <param name="yArr">The function output/result at a number of discrete sample points.</param>
    /// <param name="gradientArr">An array to store the calculated gradients within.</param>
    public static void CalcGradients(
        ParamSamplingInfo paramSamplingInfo,
        double[] yArr,
        double[] gradientArr)
    {
        // Notes.
        // The gradient at a sample point is approximated by taking the gradient of the line between the two
        // sample points either side of that point. For the first and last sample points we take the gradient
        // of the line between the sample point and its single adjacent sample point (as an alternative we could
        // sample an additional point at each end that doesn't get used for the function regression evaluation.
        //
        // This approach is rather crude, but fast. A better approach might be to do a polynomial regression on
        // the sample point and its nearest two adjacent samples, and then take the gradient of the polynomial
        // regression at the required point; obviously that would required more computational work to do so may
        // not be beneficial in the overall context of an evolutionary algorithm.
        //
        // Furthermore, the difference between this gradient approximation and the true gradient decreases with
        // increases sample density, therefore this is a reasonable approach *if* the sample density is
        // sufficiently high.

        // Handle the end points as special cases.
        // First point.
        double[] xArr = paramSamplingInfo.XArr;
        gradientArr[0] = CalcGradient(xArr[0], yArr[0], xArr[1], yArr[1]);

        // Intermediate points.
        int width = Vector <double> .Count;
        int i     = 1;

        for (; i < xArr.Length - width - 1; i += width)
        {
            // Calc a block of x deltas.
            var vecLeft   = new Vector <double>(xArr, i - 1);
            var vecRight  = new Vector <double>(xArr, i + 1);
            var xVecDelta = vecRight - vecLeft;

            // Calc a block of y deltas.
            vecLeft  = new Vector <double>(yArr, i - 1);
            vecRight = new Vector <double>(yArr, i + 1);
            var yVecDelta = vecRight - vecLeft;

            // Divide the y's by x's to obtain the gradients.
            var gradientVec = yVecDelta / xVecDelta;

            gradientVec.CopyTo(gradientArr, i);
        }

        // Calc gradients for remaining intermediate points (if any).
        for (; i < xArr.Length - 1; i++)
        {
            gradientArr[i] = CalcGradient(xArr[i - 1], yArr[i - 1], xArr[i + 1], yArr[i + 1]);
        }

        // Last point.
        gradientArr[i] = CalcGradient(xArr[i - 1], yArr[i - 1], xArr[i], yArr[i]);
    }
コード例 #4
0
        /// <summary>
        /// Create a System.Windows.Forms derived object for displaying output for a domain (e.g. show best genome's output/performance/behaviour in the domain).
        /// </summary>
        public AbstractDomainView CreateDomainView()
        {
            if (1 == InputCount)
            {
                ParamSamplingInfo paramInfo = _paramSamplingInfo;
                return(new FnRegressionView2D(_fn, paramInfo, true, CreateGenomeDecoder()));
            }

            return(null);
        }
コード例 #5
0
    private static BlackBoxProbe CreateBlackBoxProbe(
        Func <double, double> fn,
        ParamSamplingInfo paramSamplingInfo)
    {
        // Determine the mid output value of the function (over the specified sample points) and a scaling factor
        // to apply the to neural network response for it to be able to recreate the function (because the neural net
        // output range is [0,1] when using the logistic function as the neuron activation function).
        FuncRegressionUtils.CalcFunctionMidAndScale(fn, paramSamplingInfo, out double mid, out double scale);

        return(new BlackBoxProbe(paramSamplingInfo, mid, scale));
    }
コード例 #6
0
    /// <summary>
    /// Probe the given function by taking samples of it at a number of discrete sample points.
    /// </summary>
    /// <param name="fn">The function to probe/sample.</param>
    /// <param name="paramSamplingInfo">Sampling metadata.</param>
    /// <param name="responseArr">An array to store the sample results within.</param>
    public static void Probe(
        Func <double, double> fn,
        ParamSamplingInfo paramSamplingInfo,
        double[] responseArr)
    {
        Debug.Assert(responseArr.Length == paramSamplingInfo.SampleResolution);

        double[] xArr = paramSamplingInfo.XArr;

        for (int i = 0; i < xArr.Length; i++)
        {
            responseArr[i] = fn(xArr[i]);
        }
    }
コード例 #7
0
        public void CalcGradients()
        {
            const int         sampleCount = 100;
            ParamSamplingInfo psi         = new ParamSamplingInfo(0, 2 * Math.PI, sampleCount);

            double[] yArr = new double[sampleCount];
            FuncRegressionUtils.Probe((x) => Math.Sin(x), psi, yArr);

            // Calc gradients.
            double[] gradientArr = new double[sampleCount];
            FuncRegressionUtils.CalcGradients(psi, yArr, gradientArr);

            // Calc expected gradients (using simple non-vectorized logic).
            double[] gradientArrExpected = new double[sampleCount];
            CalcGradients_IndependentImpl(psi, yArr, gradientArrExpected);

            // Compare results.
            Assert.Equal(gradientArrExpected, gradientArr);
        }
コード例 #8
0
        private static void CalcGradients_IndependentImpl(
            ParamSamplingInfo paramSamplingInfo,
            double[] yArr,
            double[] gradientArr)
        {
            // Handle the end points as special cases.
            // First point.
            double[] xArr = paramSamplingInfo.XArr;
            gradientArr[0] = CalcGradient(xArr[0], yArr[0], xArr[1], yArr[1]);

            // Intermediate points.
            int i = 1;

            for (; i < xArr.Length - 1; i++)
            {
                gradientArr[i] = CalcGradient(xArr[i - 1], yArr[i - 1], xArr[i + 1], yArr[i + 1]);
            }

            // Last point.
            gradientArr[i] = CalcGradient(xArr[i - 1], yArr[i - 1], xArr[i], yArr[i]);
        }
    /// <summary>
    /// Construct a new instance.
    /// </summary>
    /// <param name="fn">The target function.</param>
    /// <param name="paramSamplingInfo">Sampling (defines the x range and sampling density).</param>
    /// <param name="gradientMseWeight">The fitness weighting to assign to the gradient mean squared error (MSE) score.</param>
    public GenerativeFnRegressionEvaluationScheme(
        Func <double, double> fn,
        ParamSamplingInfo paramSamplingInfo,
        double gradientMseWeight)
    {
        _paramSamplingInfo = paramSamplingInfo;
        _gradientMseWeight = gradientMseWeight;

        // Alloc arrays.
        int sampleCount = _paramSamplingInfo.SampleResolution;

        _yArrTarget        = new double[sampleCount];
        _gradientArrTarget = new double[sampleCount];

        // Calculate the target responses (the expected/correct responses).
        FuncRegressionUtils.Probe(fn, paramSamplingInfo, _yArrTarget);
        FuncRegressionUtils.CalcGradients(paramSamplingInfo, _yArrTarget, _gradientArrTarget);

        // Create blackbox probe.
        _blackBoxProbe = CreateBlackBoxProbe(fn, paramSamplingInfo);
    }
コード例 #10
0
    /// <summary>
    /// Construct a new instance.
    /// </summary>
    /// <param name="paramSamplingInfo">Parameter sampling info.</param>
    /// <param name="gradientMseWeight">Fitness weighting to apply to the gradient fitness score.</param>
    /// <param name="yArrTarget">Array of target y values (function output values).</param>
    /// <param name="gradientArrTarget">Array of target gradient values.</param>
    /// <param name="blackBoxProbe">Black box probe. For obtaining the y value response array from an instance of <see cref="IBlackBox{T}"/>.</param>
    internal FuncRegressionEvaluator(
        ParamSamplingInfo paramSamplingInfo,
        double gradientMseWeight,
        double[] yArrTarget,
        double[] gradientArrTarget,
        IBlackBoxProbe blackBoxProbe)
    {
        _paramSamplingInfo = paramSamplingInfo;
        _gradientMseWeight = gradientMseWeight;
        _yMseWeight        = 1.0 - gradientMseWeight;

        _yArrTarget        = yArrTarget;
        _gradientArrTarget = gradientArrTarget;

        // Alloc working arrays for receiving black box outputs.
        int sampleCount = _paramSamplingInfo.SampleResolution;

        _yArr        = new double[sampleCount];
        _gradientArr = new double[sampleCount];

        _blackBoxProbe = blackBoxProbe;
    }
コード例 #11
0
    /// <summary>
    /// Determine the mid output value of the function (over the specified sample points) and a scaling factor
    /// to apply the to neural network response for it to be able to recreate the function (because the neural net
    /// output range is [0,1] when using the logistic function as the neuron activation function).
    /// </summary>
    /// <param name="fn">The function to be sampled.</param>
    /// <param name="paramSamplingInfo">Parameter sampling info.</param>
    /// <param name="mid">Returns the mid value of the function (halfway between min and max).</param>
    /// <param name="scale">Returns the scale of the function.</param>
    public static void CalcFunctionMidAndScale(
        Func <double, double> fn,
        ParamSamplingInfo paramSamplingInfo,
        out double mid, out double scale)
    {
        double[] xArr = paramSamplingInfo.XArr;
        double   min  = fn(xArr[0]);
        double   max  = min;

        for (int i = 0; i < xArr.Length; i++)
        {
            double y = fn(xArr[i]);
            min = Math.Min(y, min);
            max = Math.Max(y, max);
        }

        // TODO: explain this (0.8 is logistic function range, 0.5 is the logistic function output value when its input is zero).
        double range = max - min;

        scale = range / 0.8;
        mid   = (min + max) / 2.0;
    }
コード例 #12
0
 /// <summary>
 /// Construct a generative function regression evaluator with the provided parameter sampling info and function to regress.
 /// </summary>
 public GenerativeFnRegressionEvaluator(IFunction fn, ParamSamplingInfo paramSamplingInfo, double gradientMseWeighting)
     : base(fn, paramSamplingInfo, gradientMseWeighting, CreateGenerativeBlackBoxProbe(fn, paramSamplingInfo))
 {
 }
コード例 #13
0
        public FuncRegressionUtilsBenchmarks()
        {
            var psi = new ParamSamplingInfo(0, 2 * Math.PI, __sampleCount);

            FuncRegressionUtils.Probe((x) => Math.Sin(x), psi, _yArr);
        }
コード例 #14
0
 /// <summary>
 /// Construct a new instance.
 /// </summary>
 /// <param name="paramSamplingInfo">Parameter sampling info.</param>
 /// <param name="offset">Offset to apply to each neural network output response.</param>
 /// <param name="scale">Scaling factor to apply to each neural network output response.</param>
 public BlackBoxProbe(ParamSamplingInfo paramSamplingInfo, double offset, double scale)
 {
     _paramSamplingInfo = paramSamplingInfo;
     _offset            = offset;
     _scale             = scale;
 }