/// <summary>
        /// Initialize the experiment with some optional XML configuration data.
        /// </summary>
        public void Initialize(string name, XmlElement xmlConfig)
        {
            _name                    = name;
            _populationSize          = XmlUtils.GetValueAsInt(xmlConfig, "PopulationSize");
            _specieCount             = XmlUtils.GetValueAsInt(xmlConfig, "SpecieCount");
            _activationScheme        = ExperimentUtils.CreateActivationScheme(xmlConfig, "Activation");
            _complexityRegulationStr = XmlUtils.TryGetValueAsString(xmlConfig, "ComplexityRegulationStrategy");
            _complexityThreshold     = XmlUtils.TryGetValueAsInt(xmlConfig, "ComplexityThreshold");
            _description             = XmlUtils.TryGetValueAsString(xmlConfig, "Description");
            _parallelOptions         = ExperimentUtils.ReadParallelOptions(xmlConfig);
            ExperimentUtils.ReadRbfAuxArgMutationConfig(xmlConfig, out _rbfMutationSigmaCenter, out _rbfMutationSigmaRadius);

            _eaParams                         = new NeatEvolutionAlgorithmParameters();
            _eaParams.SpecieCount             = _specieCount;
            _neatGenomeParams                 = new NeatGenomeParameters();
            _neatGenomeParams.FeedforwardOnly = _activationScheme.AcyclicNetwork;
            _neatGenomeParams.ConnectionWeightMutationProbability = 0.788;
            _neatGenomeParams.AddConnectionMutationProbability    = 0.001;
            _neatGenomeParams.AddConnectionMutationProbability    = 0.01;
            _neatGenomeParams.NodeAuxStateMutationProbability     = 0.2;
            _neatGenomeParams.DeleteConnectionMutationProbability = 0.001;

            // Determine what function to regress.
            string     fnIdStr = XmlUtils.GetValueAsString(xmlConfig, "Function");
            FunctionId fnId    = (FunctionId)Enum.Parse(typeof(FunctionId), fnIdStr);

            _fn = FunctionUtils.GetFunction(fnId);

            // Read parameter sampling scheme settings.
            int    sampleResolution = XmlUtils.GetValueAsInt(xmlConfig, "SampleResolution");
            double sampleMin        = XmlUtils.GetValueAsDouble(xmlConfig, "SampleMin");
            double sampleMax        = XmlUtils.GetValueAsDouble(xmlConfig, "SampleMax");

            _paramSamplingInfo = new ParamSamplingInfo(sampleMin, sampleMax, sampleResolution);
        }
예제 #2
0
        public static void CalcGradients(
            ParamSamplingInfo paramSamplingInfo,
            double[] yArr,
            double[] gradientArr)
        {
            // Notes.
            // The gradient at a sample point is approximated by taking the gradient of the line between the two
            // sample points either side of that point. For the first and last sample points we take the gradient
            // of the line between the sample point and its single adjacent sample point (as an alternative we could
            // sample an additional point at each end that doesn't get used for the function regression evaluation.
            //
            // This approach is rather crude, but fast. A better approach might be to do a polynomial regression on
            // the sample point and its nearest two adjacent samples, and then take the gradient of the polynomial
            // regression at the required point; obviously that would required more computational work to do so may
            // not be beneficial in the overall context of an evolutionary algorithm.
            //
            // Furthermore, the difference between this gradient approximation and the true gradient decreases with
            // increases sample density, therefore this is a reasonable approach *if* the sample density is
            // sufficiently high.

            // Handle the end points as special cases.
            // First point.
            double[] xArr = paramSamplingInfo._xArr;
            gradientArr[0] = CalcGradient(xArr[0], yArr[0], xArr[1], yArr[1]);

            // Intermediate points.
            int width = Vector <double> .Count;
            int i     = 1;

            for (; i < xArr.Length - width - 1; i += width)
            {
                // Calc a block of x deltas.
                var vecLeft   = new Vector <double>(xArr, i - 1);
                var vecRight  = new Vector <double>(xArr, i + 1);
                var xVecDelta = vecRight - vecLeft;

                // Calc a block of y deltas.
                vecLeft  = new Vector <double>(yArr, i - 1);
                vecRight = new Vector <double>(yArr, i + 1);
                var yVecDelta = vecRight - vecLeft;

                // Divide the y's by x's to obtain the gradients.
                var gradientVec = yVecDelta / xVecDelta;

                gradientVec.CopyTo(gradientArr, i);
            }

            // Calc gradients for remaining intermediate points (if any).
            for (; i < xArr.Length - 1; i++)
            {
                gradientArr[i] = CalcGradient(xArr[i - 1], yArr[i - 1], xArr[i + 1], yArr[i + 1]);
            }

            // Last point.
            gradientArr[i] = CalcGradient(xArr[i - 1], yArr[i - 1], xArr[i], yArr[i]);
        }
예제 #3
0
        private static BlackBoxProbe CreateBlackBoxProbe(IFunction fn, ParamSamplingInfo paramSamplingInfo)
        {
            // Determine the mid output value of the function (over the specified sample points) and a scaling factor
            // to apply the to neural network response for it to be able to recreate the function (because the neural net
            // output range is [0,1] when using the logistic function as the neuron activation function).
            FnRegressionUtils.CalcFunctionMidAndScale(fn, paramSamplingInfo, out double mid, out double scale);

            var blackBoxProbe = new BlackBoxProbe(paramSamplingInfo, mid, scale);

            return(blackBoxProbe);
        }
예제 #4
0
        public static void CalcGradients(ParamSamplingInfo paramSamplingInfo, double[] yArr, double[] gradientArr)
        {
            // Handle the end points as special cases.
            // First point.
            double[] xArr = paramSamplingInfo._xArr;
            gradientArr[0] = CalcGradient(xArr[0], yArr[0], xArr[1], yArr[1]);

            // Intermediate points.
            for (int i = 1; i < xArr.Length - 1; i++)
            {
                gradientArr[i] = CalcGradient(xArr[i - 1], yArr[i - 1], xArr[i + 1], yArr[i + 1]);
            }

            // Last point.
            int lastIdx = xArr.Length - 1;

            gradientArr[lastIdx] = CalcGradient(xArr[lastIdx - 1], yArr[lastIdx - 1], xArr[lastIdx], yArr[lastIdx]);
        }
예제 #5
0
        /// <summary>
        /// Construct a function regression evaluator with the provided parameter sampling info and function to regress.
        /// </summary>
        public FnRegressionEvaluator(IFunction fn, ParamSamplingInfo paramSamplingInfo, double gradientMseWeight, IBlackBoxProbe blackBoxProbe)
        {
            _paramSamplingInfo = paramSamplingInfo;
            _gradientMseWeight = gradientMseWeight;
            _yMseWeight        = 1.0 - gradientMseWeight;
            _blackBoxProbe     = blackBoxProbe;

            // Predetermine target responses.
            int sampleCount = _paramSamplingInfo._sampleCount;

            _yArrTarget        = new double[sampleCount];
            _gradientArrTarget = new double[sampleCount];

            FunctionProbe fnProbe = new FunctionProbe(paramSamplingInfo);

            fnProbe.Probe(fn, _yArrTarget);
            FnRegressionUtils.CalcGradients(paramSamplingInfo, _yArrTarget, _gradientArrTarget);
        }
예제 #6
0
        public static void CalcFunctionMidAndScale(IFunction fn, ParamSamplingInfo paramSamplingInfo, out double mid, out double scale)
        {
            double[] xArr = paramSamplingInfo._xArr;
            double   min  = fn.GetValue(xArr[0]);
            double   max  = min;

            for (int i = 0; i < xArr.Length; i++)
            {
                double y = fn.GetValue(xArr[i]);
                min = Math.Min(y, min);
                max = Math.Max(y, max);
            }

            // TODO: explain this (0.8 is logistic function range, 0.5 is the logistic function output value when its input is zero).
            double range = max - min;

            scale = range / 0.8;
            mid   = ((min + max) / 2.0);
        }
예제 #7
0
        /// <summary>
        /// Constructs with the details of the function regression problem to be visualized.
        /// </summary>
        /// <param name="fn">The function being regressed.</param>
        /// <param name="generativeMode">Indicates that blackbox has no inputs; it will generate a waveform as a function of time.</param>
        /// <param name="paramSamplingInfo">Parameter sampling info.</param>
        /// <param name="genomeDecoder">Genome decoder.</param>
        public FnRegressionView2D(IFunction fn, ParamSamplingInfo paramSamplingInfo, bool generativeMode, IGenomeDecoder <NeatGenome, IBlackBox> genomeDecoder)
        {
            InitializeComponent();
            InitGraph(string.Empty, string.Empty, string.Empty);

            _fn = fn;
            _paramSamplingInfo = paramSamplingInfo;
            _generativeMode    = generativeMode;
            _genomeDecoder     = genomeDecoder;

            // Determine the mid output value of the function (over the specified sample points) and a scaling factor
            // to apply the to neural network response for it to be able to recreate the function (because the neural net
            // output range is [0,1] when using the logistic function as the neuron activation function).
            double mid, scale;

            FnRegressionUtils.CalcFunctionMidAndScale(fn, paramSamplingInfo, out mid, out scale);
            if (generativeMode)
            {
                _blackBoxProbe = new GenerativeBlackBoxProbe(paramSamplingInfo, mid, scale);
            }
            else
            {
                _blackBoxProbe = new BlackBoxProbe(paramSamplingInfo, mid, scale);
            }

            _yArrTarget = new double[paramSamplingInfo._sampleCount];

            // Pre-build plot point objects.
            _plotPointListTarget   = new PointPairList();
            _plotPointListResponse = new PointPairList();

            double[] xArr = paramSamplingInfo._xArr;
            for (int i = 0; i < xArr.Length; i++)
            {
                double x = xArr[i];
                _plotPointListTarget.Add(x, _fn.GetValue(x));
                _plotPointListResponse.Add(x, 0.0);
            }

            // Bind plot points to graph.
            zed.GraphPane.AddCurve("Target", _plotPointListTarget, Color.Black, SymbolType.None);
            zed.GraphPane.AddCurve("Network Response", _plotPointListResponse, Color.Red, SymbolType.None);
        }
예제 #8
0
 /// <summary>
 /// Construct.
 /// </summary>
 /// <param name="paramSamplingInfo">Parameter sampling info.</param>
 /// <param name="offset">Offset to apply to each neural network output response.</param>
 /// <param name="scale">Scaling factor to apply to each neural network output response.</param>
 public GenerativeBlackBoxProbe(ParamSamplingInfo paramSamplingInfo, double offset, double scale)
 {
     _paramSamplingInfo = paramSamplingInfo;
     _offset            = offset;
     _scale             = scale;
 }
예제 #9
0
 /// <summary>
 /// Construct a function regression evaluator with the provided parameter sampling info and function to regress.
 /// </summary>
 public FnRegressionEvaluator(IFunction fn, ParamSamplingInfo paramSamplingInfo, double gradientMseWeight)
     : this(fn, paramSamplingInfo, gradientMseWeight, CreateBlackBoxProbe(fn, paramSamplingInfo))
 {
 }
예제 #10
0
 public FunctionProbe(ParamSamplingInfo paramSamplingInfo)
 {
     _paramSamplingInfo = paramSamplingInfo;
 }