Esempio n. 1
0
        /// <summary>
        /// Calculate a frequency distribution for the provided array of values.
        /// 1) The minimum and maximum values are found.
        /// 2) The resulting value range is divided into equal sized sub-ranges (categoryCount).
        /// 3) The number of values that fall into each category is determined.
        /// </summary>
        public static HistogramData BuildHistogramData(double[] valArr, int categoryCount)
        {
            // Determine min/max.
            MathArrayUtils.MinMax(valArr, out double min, out double max);

            // Note. each bucket's range has interval [low,high), i.e. samples exactly equal to 'high'
            // will fall into the next highest bucket. Therefore to prevent the maximum sample vAalue falling into the
            // last bucket by itself, we inflate the range by a small proportion so that the max value falls just below
            // the max range covered by the distribution.
            double range = (max - min) * 1.01;

            // Handle special case where the data series contains a single value.
            if (0.0 == range)
            {
                return(new HistogramData(min, max, 0.0, new int[] { valArr.Length }));
            }

            // Loop values and for each one increment the relevant category's frequency count.
            double incr = range / categoryCount;

            int[] frequencyArr = new int[categoryCount];
            for (int i = 0; i < valArr.Length; i++)
            {
                frequencyArr[(int)((valArr[i] - min) / incr)]++;
            }
            return(new HistogramData(min, max, incr, frequencyArr));
        }
Esempio n. 2
0
        /// <summary>
        /// Evaluate the provided black box against the function regression task,
        /// and return its fitness score.
        /// </summary>
        /// <param name="box">The black box to evaluate.</param>
        public FitnessInfo Evaluate(IBlackBox <double> box)
        {
            // Probe the black box over the full range of the input parameter.
            _blackBoxProbe.Probe(box, _yArr);

            // Calc gradients.
            FuncRegressionUtils.CalcGradients(_paramSamplingInfo, _yArr, _gradientArr);

            // Calc y position mean squared error (MSE), and apply weighting.
            double yMse = MathArrayUtils.MeanSquaredDelta(_yArr, _yArrTarget);

            yMse *= _yMseWeight;

            // Calc gradient mean squared error.
            double gradientMse = MathArrayUtils.MeanSquaredDelta(_gradientArr, _gradientArrTarget);

            gradientMse *= _gradientMseWeight;

            // Calc fitness as the inverse of MSE (higher value is fitter).
            // Add a constant to avoid divide by zero, and to constrain the fitness range between bad and good solutions;
            // this allows the selection strategy to select solutions that are mediocre and therefore helps preserve diversity.
            double fitness = 20.0 / (yMse + gradientMse + 0.02);

            return(new FitnessInfo(fitness));
        }
Esempio n. 3
0
        private static void MinMax(UniformDistributionSampler sampler, int len)
        {
            // Alloc arrays and fill with uniform random noise.
            double[] a = new double[len];
            sampler.Sample(a);

            // Calc results and compare.
            MinMax(a, out double expectedMin, out double expectedMax);
            MathArrayUtils.MinMax(a, out double actualMin, out double actualMax);

            Assert.AreEqual(expectedMin, actualMin, 1e-10);
            Assert.AreEqual(expectedMax, actualMax, 1e-10);
        }
Esempio n. 4
0
        private static void SumSquaredDelta(UniformDistributionSampler sampler, int len)
        {
            // Alloc arrays and fill with uniform random noise.
            double[] a = new double[len];
            double[] b = new double[len];
            sampler.Sample(a);
            sampler.Sample(b);

            // Calc results and compare.
            double expected = SumSquaredDelta(a, b);
            double actual   = MathArrayUtils.SumSquaredDelta(a, b);

            Assert.AreEqual(expected, actual, 1e-10);
        }
Esempio n. 5
0
        private static void Clip(UniformDistributionSampler sampler, int len)
        {
            // Alloc array and fill with uniform random noise.
            double[] x = new double[len];
            sampler.Sample(x);

            // Clip the elements of the array with the safe routine.
            double[] expected = (double[])x.Clone();
            Clip(expected, -1.1, 18.8);

            // Clip the elements of the array.
            double[] actual = (double[])x.Clone();
            MathArrayUtils.Clip(actual, -1.1, 18.8);

            // Compare expected with actual array.
            Assert.IsTrue(ArrayUtils.Equals(expected, actual));
        }
        /// <summary>
        /// Evaluate the provided IBlackBox against the XOR problem domain and return its fitness score.
        /// </summary>
        public FitnessInfo Evaluate(IBlackBox box)
        {
            int sampleCount = _paramSamplingInfo._sampleCount;

            // TODO: We can avoid a memory allocation here by allocating at construction time, but this requires modification of
            // ParallelGenomeListEvaluator to utilise multiple evaluators (one per thread).
            double[] yArr        = new double[sampleCount];
            double[] gradientArr = new double[sampleCount];

            // Probe the black box over the full range of the input parameter.
            _blackBoxProbe.Probe(box, yArr);

            // Calc gradients.
            FnRegressionUtils.CalcGradients(_paramSamplingInfo, yArr, gradientArr);

            // Calc y position mean squared error (MSE), and apply weighting.
            double yMse = MathArrayUtils.MeanSquaredDelta(yArr, _yArrTarget);

            yMse *= _yMseWeight;

            // Calc gradient mean squared error.
            double gradientMse = MathArrayUtils.MeanSquaredDelta(gradientArr, _gradientArrTarget);

            gradientMse *= _gradientMseWeight;

            // Calc fitness as the inverse of MSE (higher value is fitter).
            // Add a constant to avoid divide by zero, and to constrain the fitness range between bad and good solutions;
            // this allows the selection strategy to select solutions that are mediocre and therefore helps preserve diversity.
            double fitness = 20.0 / (yMse + gradientMse + 0.02);

            // Test for stopping condition (near perfect response).
            if (fitness >= 100000.0)
            {
                _stopConditionSatisfied = true;
            }
            _evalCount++;
            return(new FitnessInfo(fitness, fitness));
        }