Beispiel #1
0
        private void startAsyncButton_Click(object sender, EventArgs e)
        {
            int    numOfCashier      = 0;
            int    numOfSelfCheckout = 0;
            int    numOfMachines     = 0;
            double arrPro            = 0;

            if (int.TryParse(this.numOfCashierTextBox.Text, out numOfCashier) &&
                int.TryParse(this.numOfSelfCheckoutTextBox.Text, out numOfSelfCheckout) &&
                int.TryParse(this.numOfSelfCheckMachineTextBox.Text, out numOfMachines) &&
                double.TryParse(this.arrivalRateTextBox.Text, out arrPro))
            {
                int maxIteration;
                if (int.TryParse(this.maxIterationTextBox.Text, out maxIteration) && maxIteration > 0)
                {
                    _sim = new SimulationDemo.Simulation(numCashier: numOfCashier, numSelfChechout: numOfSelfCheckout, numMachine: numOfMachines, maxIteration: maxIteration);
                }
                else
                {
                    _sim = new SimulationDemo.Simulation(numCashier: numOfCashier, numSelfChechout: numOfSelfCheckout, numMachine: numOfMachines);
                }

                IDistribution dist = new Poison(arrPro);
                DistributionHelper.UpdateDistribution(EventEnum.Arrival, dist);

                // Start the asynchronous operation.
                backgroundWorker1.RunWorkerAsync();
                this.startbutton.Enabled                  = false;
                this.numOfCashierTextBox.Enabled          = false;
                this.numOfSelfCheckoutTextBox.Enabled     = false;
                this.numOfSelfCheckMachineTextBox.Enabled = false;

                this.startbutton.Text = "Started";
            }
        }
Beispiel #2
0
        public static Result TestSpearmanMethod(List <double> excerpt)
        {
            double[,] h = new double[excerpt.Count - 1, excerpt.Count - 1];
            for (int i = 0; i < excerpt.Count - 1; i++)
            {
                for (int j = i + 1; j < excerpt.Count; j++)
                {
                    if (excerpt[i] == excerpt[j])
                    {
                        h[i, j - 1] = 0.5;
                    }
                    else if (excerpt[i] < excerpt[j])
                    {
                        h[i, j - 1] = 1;
                    }
                }
            }
            double v = 0;

            for (int i = 0; i < excerpt.Count - 1; i++)
            {
                for (int j = i + 1; j < excerpt.Count; j++)
                {
                    v += (j - 1) * h[i, j - 1];
                }
            }
            double rc = 1 - 12 * v / excerpt.Count / (excerpt.Count * excerpt.Count - 1);
            double d  = 1.0 / (excerpt.Count - 1);
            double s  = rc / Math.Sqrt(d);
            double q  = DistributionHelper.GetNormalDistributionQuantile(TimeSeriesEnvironment.Current.Alpha);

            return(new Result(q, s));
        }
        /// <summary>
        /// Transforms the generic <paramref name="preconsolidationStress"/> into
        /// a <see cref="MacroStabilityInwardsPreconsolidationStress"/>.
        /// </summary>
        /// <param name="preconsolidationStress">The preconsolidation stress to use
        /// in the transformation.</param>
        /// <returns>A <see cref="MacroStabilityInwardsPreconsolidationStress"/>
        /// based on the given data.</returns>
        /// <exception cref="ArgumentNullException">Thrown when <paramref name="preconsolidationStress"/>
        /// is <c>null</c>.</exception>
        /// <exception cref="ImportedDataTransformException">Thrown when the
        /// <paramref name="preconsolidationStress"/> could not be transformed into
        /// a <see cref="MacroStabilityInwardsPreconsolidationStress"/>.</exception>
        public static MacroStabilityInwardsPreconsolidationStress Transform(PreconsolidationStress preconsolidationStress)
        {
            if (preconsolidationStress == null)
            {
                throw new ArgumentNullException(nameof(preconsolidationStress));
            }

            var location = new Point2D(preconsolidationStress.XCoordinate,
                                       preconsolidationStress.ZCoordinate);

            try
            {
                DistributionHelper.ValidateLogNormalDistribution(preconsolidationStress.StressDistributionType,
                                                                 preconsolidationStress.StressShift);

                var distribution = new VariationCoefficientLogNormalDistribution
                {
                    Mean = (RoundedDouble)preconsolidationStress.StressMean,
                    CoefficientOfVariation = (RoundedDouble)preconsolidationStress.StressCoefficientOfVariation
                };

                return(new MacroStabilityInwardsPreconsolidationStress(location, distribution));
            }
            catch (Exception e) when(e is DistributionValidationException || e is ArgumentOutOfRangeException)
            {
                string errorMessage = CreateErrorMessage(location, e.Message);

                throw new ImportedDataTransformException(errorMessage, e);
            }
            catch (ArgumentException e)
            {
                throw new ImportedDataTransformException(e.Message, e);
            }
        }
        public void getOutcomeMinMax_Test()
        {
            double[][] samples = new double[10][];
            for (int i = 0; i < samples.Length; i++)
            {
                samples[i] = new double[] { i * 0.1, -0.2, 0.2, 0.3, 10 };
            }

            samples[3]    = null;
            samples[4][1] = -0.4; //MinMIn
            samples[5][2] = 0.41; //MaxMax

            samples[6][3] = 0.42; //MaxActual
            samples[5][3] = -0.2; //MinActual

            double maxMax, minMin, maxActual, minActual;
            double maxDistance, maxDistanceCount;
            double maxMaxCount, minMinCount, maxActualCount, minActualCount;

            DistributionHelper.getSampleOutcomesMinMax(samples, 0, out maxMax, out maxMaxCount, out minMin, out minMinCount, out maxDistance, out maxDistanceCount, out maxActual, out maxActualCount, out minActual, out minActualCount);

            //Todo: Write another for minValuesPercent != 0

            Assert.AreEqual(0.41, maxMax);
            Assert.AreEqual(-0.4, minMin);
            Assert.AreEqual(0.42, maxActual);
            Assert.AreEqual(-0.2, minActual);

            Assert.AreEqual(10, maxActualCount);
            Assert.AreEqual(10, maxMaxCount);
            Assert.AreEqual(10, minMinCount);
            Assert.AreEqual(10, maxDistanceCount);
            Assert.AreEqual(10, minActualCount);
        }
        public void getMinMax_NAN_Test()
        {
            double[] inputs = new double[200];
            int      v      = 0;

            for (int i = 0; i < inputs.Length; i += 2)
            {
                inputs[i] = v++;
            }

            for (int i = 1; i < inputs.Length; i += 2)
            {
                inputs[i] = double.NaN;
            }

            Assert.AreEqual(inputs[0], 0);
            Assert.AreEqual(inputs[2], 1);
            Assert.AreEqual(inputs[4], 2);
            Assert.AreEqual(inputs[6], 3);

            Assert.AreEqual(inputs[1], double.NaN);
            Assert.AreEqual(inputs[3], double.NaN);
            Assert.AreEqual(inputs[5], double.NaN);
            Assert.AreEqual(inputs[7], double.NaN);

            double min, max;

            DistributionHelper.getMinMax(inputs, 10, out min, out max);

            Assert.AreEqual(5, min, 1);
            Assert.AreEqual(95, max, 1);
        }
        public static PieceConstIntensityResult PieceConstIntensity(Queue q, int m)
        {
            int    n   = q.Count;
            double min = q.Min();
            double dt  = (q.Max() - min) / m;

            int Ns = 0;
            List <PieceConstIntensityClassResult> results = new List <PieceConstIntensityClassResult>();

            for (int i = 0; i < m - 1; i++)
            {
                int ns = q.Count(x => x >= min + dt * i && x < min + dt * (i + 1));
                Ns += ns;
                double val = ns / dt / (n - Ns);

                ConfidenceInterval cnf = new ConfidenceInterval(val,
                                                                x => x * DistributionHelper.ChiSquareDistributionQuantile(Globals.Alpha / 2, 2 * ns) / ns / 2,
                                                                x => x * DistributionHelper.ChiSquareDistributionQuantile(1 - Globals.Alpha / 2, 2 * ns) / ns / 2,
                                                                min + dt * i,
                                                                min + dt * (i + 1));

                PieceConstIntensityClassResult res = new PieceConstIntensityClassResult(i + 1, cnf, ns);
                results.Add(res);
            }

            return(new PieceConstIntensityResult(results));
        }
Beispiel #7
0
        public static Result TestCorrelation(double r, int n, int k)
        {
            double q = DistributionHelper.GetStudentDistributionQuantile(
                TimeSeriesEnvironment.Current.Alpha,
                n - k - 2);
            double t = r * Math.Sqrt(n - k - 2) / Math.Sqrt(1 - r * r);

            return(new Result(q, t));
        }
Beispiel #8
0
        private void UpdateArrivalRate_Click(object sender, EventArgs e)
        {
            double arrPro = 0;

            if (double.TryParse(this.arrivalRateTextBox.Text, out arrPro))
            {
                IDistribution dist = new Poison(arrPro);
                DistributionHelper.UpdateDistribution(EventEnum.Arrival, dist);
            }
            ;
        }
Beispiel #9
0
        public void ValidateShiftedLogNormalDistribution_ValidDistribution_DoesNotThrowException()
        {
            // Setup
            const long distributionType = SoilLayerConstants.LogNormalDistributionValue;

            // Call
            TestDelegate call = () => DistributionHelper.ValidateShiftedLogNormalDistribution(distributionType);

            // Assert
            Assert.DoesNotThrow(call);
        }
Beispiel #10
0
        public void ValidateLogNormalDistribution_DistributionTypeNull_DoesNotThrowException()
        {
            // Setup
            var    random = new Random(21);
            double shift  = random.NextDouble();

            // Call
            TestDelegate call = () => DistributionHelper.ValidateLogNormalDistribution(null, shift);

            // Assert
            Assert.DoesNotThrow(call);
        }
Beispiel #11
0
        public void ValidateShiftedLogNormalDistribution_InvalidDistributionType_ThrowsDistributionValidationException()
        {
            // Setup
            const long invalidDistributionType = -1;

            // Call
            TestDelegate call = () => DistributionHelper.ValidateShiftedLogNormalDistribution(invalidDistributionType);

            // Assert
            var exception = Assert.Throws <DistributionValidationException>(call);

            Assert.AreEqual("Parameter moet verschoven lognormaal verdeeld zijn.", exception.Message);
        }
        public static Image visualizeArray(double[] input, int width, int height, int lineSize = 3)
        {
            Bitmap   bmp = new Bitmap(width, height);
            Graphics g   = Graphics.FromImage(bmp);

            g.Clear(Color.LightGray);

            double stepSize = Convert.ToDouble(input.Length) / Convert.ToDouble(width);

            double min, max;

            DistributionHelper.getMinMax(input, out min, out max);

            int oldX = -1, oldY = -1;

            Color cUp   = Color.Green;
            Color cDown = Color.Blue;

            for (int x = 0; x < width; x++)
            {
                int index = Convert.ToInt32(stepSize * x);
                if (index < input.Length && double.IsNaN(input[index]) == false)
                {
                    int y = height - Convert.ToInt32((input[index] - min) / (max - min) * height);

                    if (oldX != -1)
                    {
                        for (int yOffset = -(lineSize / 2); yOffset < (lineSize / 2); yOffset++)
                        {
                            if (y + yOffset > 0 && y + yOffset < height)
                            {
                                bmp.SetPixel(x, y + yOffset, y > oldY ? cDown : cUp);
                            }
                        }
                    }

                    oldX = x;
                    oldY = y;
                }
                else
                {
                    for (int y = 0; y < height; y++)
                    {
                        bmp.SetPixel(x, y, Color.Yellow);
                    }
                }
            }

            return(bmp);
        }
        public void getMinMax_Test_SmallValues()
        {
            double[] inputs = new double[100];
            for (int i = 0; i < inputs.Length; i++)
            {
                inputs[i] = i * 0.001;
            }

            double min, max;

            DistributionHelper.getMinMax(inputs, 10, out min, out max);

            Assert.AreEqual(5 * 0.001, min, 1 * 0.001);
            Assert.AreEqual(95 * 0.001, max, 1 * 0.001);
        }
 /// <summary>
 /// Validates the distribution properties of a parameter which is defined as a
 /// log normal distribution.
 /// </summary>
 /// <param name="soilLayerName">The name of the soil layer.</param>
 /// <param name="distributionType">The distribution type of the parameter.</param>
 /// <param name="parameterName">The name of the parameter.</param>
 /// <exception cref="ImportedDataTransformException">Thrown when the distribution properties are invalid.</exception>
 private static void ValidateStochasticShiftedLogNormalDistributionParameter(string soilLayerName,
                                                                             long?distributionType,
                                                                             string parameterName)
 {
     try
     {
         DistributionHelper.ValidateShiftedLogNormalDistribution(
             distributionType);
     }
     catch (DistributionValidationException e)
     {
         string errorMessage = CreateErrorMessageForParameter(soilLayerName, parameterName, e.Message);
         throw new ImportedDataTransformException(errorMessage, e);
     }
 }
Beispiel #15
0
        public void ValidateLogNormalDistribution_ShiftNonZero_ThrowsDistributionValidationException()
        {
            // Setup
            const long distributionType = SoilLayerConstants.LogNormalDistributionValue;

            var    random = new Random(21);
            double shift  = random.NextDouble();

            // Call
            TestDelegate call = () => DistributionHelper.ValidateLogNormalDistribution(distributionType,
                                                                                       shift);

            // Assert
            var exception = Assert.Throws <DistributionValidationException>(call);

            Assert.AreEqual("Parameter moet lognormaal verdeeld zijn met een verschuiving gelijk aan 0.", exception.Message);
        }
        public static double optimizeOutcomeCodePercentage(int rounds, out double outcomeCodePercent, double[][] priceData, double[][] outcomes, out double buyDist, out double sellDist)
        {
            outcomeCodePercent = 0.5;

            buyDist  = double.NaN;
            sellDist = double.NaN;

            double lastScore = double.MinValue;
            double direction = -0.01;

            int round;

            for (round = 1; round < rounds; round++)
            {
                double   successRatioCode;
                bool[][] outcomeCodes = OutcomeGenerator.getOutcomeCode(priceData, outcomes, outcomeCodePercent, out successRatioCode);

                if (successRatioCode < 0.9)
                {
                    throw new TooLittleValidDataException("Too low success ratio: " + successRatioCode);
                }

                DistributionHelper.getOutcomeCodeDistribution(outcomeCodes, out buyDist, out sellDist);

                double score = ((buyDist + sellDist) / 2) * outcomeCodePercent;
                if (score < lastScore) //Wrong direction
                {
                    direction = direction * (-1);
                    Logger.log("New opt. direction: " + direction);
                }

                if (outcomeCodePercent <= 0 && direction <= 0)
                {
                    direction = Math.Abs(direction);
                }

                outcomeCodePercent += (direction / (1 + (round / 20)));

                Logger.log("PercDist OPT. Round " + round + " -> " + outcomeCodePercent + "% = |s" + Math.Round(score, 4) + "| b" + Math.Round(buyDist, 4) + " s" + Math.Round(sellDist, 4));

                lastScore = score;
                round++;
            }

            return(outcomeCodePercent);
        }
Beispiel #17
0
        public void ValidateLogNormalDistribution_InvalidDistributionType_ThrowsDistributionValidationException()
        {
            // Setup
            const long distributionType = -1;

            var    random = new Random(21);
            double shift  = random.NextDouble();

            // Call
            TestDelegate call = () => DistributionHelper.ValidateLogNormalDistribution(distributionType,
                                                                                       shift);

            // Assert
            var exception = Assert.Throws <DistributionValidationException>(call);

            Assert.AreEqual("Parameter moet lognormaal verdeeld zijn.", exception.Message);
        }
        public void getMinMax_Test_MoreComplex()
        {
            double[] inputs = new double[200];
            for (int i = 0; i < 100; i++)
            {
                inputs[i] = i;
            }

            for (int i = 100; i < 200; i++)
            {
                inputs[i] = 100;
            }

            double min, max;

            DistributionHelper.getMinMax(inputs, 10, out min, out max);
            Assert.AreEqual(10, min, 1);
            Assert.AreEqual(100, max, 1);
        }
        public static StatEstimationResult StatEstimation(Queue q, int m)
        {
            int           n     = q.Count;
            List <double> times = q.ToTime;
            double        dt    = times.Last() / m;

            List <StatEstimationClassResult> intervals = new List <StatEstimationClassResult>();
            double ua           = DistributionHelper.GetNormalDistributionQuantile(Globals.Alpha / 2);
            double constantPart = ua * ua / n / dt / 2;

            for (int i = 0; i < m; i++)
            {
                int                ns      = times.Count(x => x >= dt * i && (i == m - 1 || x < dt * (i + 1)));
                double             us      = ns / dt / n;
                double             difPart = ua * Math.Sqrt(ns + ua * ua / 4) / n / dt;
                ConfidenceInterval cnf     = new ConfidenceInterval(us, x => x + constantPart - difPart, x => x + constantPart + difPart, dt * i, dt * (i + 1));
                intervals.Add(new StatEstimationClassResult(i + 1, cnf));
            }

            return(new StatEstimationResult(intervals));
        }
Beispiel #20
0
        public static Result TestSignsMethod(List <double> excerpt)
        {
            double l = 0,
                   m = (excerpt.Count - 1) / 2,
                   d = (excerpt.Count + 1) / 12;

            for (int i = 0; i < excerpt.Count - 1; i++)
            {
                if (excerpt[i] == excerpt[i + 1])
                {
                    l += 0.5;
                }
                else if (excerpt[i] > excerpt[i + 1])
                {
                    l += 1;
                }
            }
            double u = (l - m) / Math.Sqrt(d);
            double q = DistributionHelper.GetNormalDistributionQuantile(TimeSeriesEnvironment.Current.Alpha);

            return(new Result(q, u));
        }
        public static double findOutcomeCodeForDesiredDistribution(double desiredDistribution, double tolerance, double[][] priceData, double[][] outcomeMatrix, out double buyDist, out double sellDist)
        {
            double outcomeCodePercent = 0.5;
            int    round = 0;

            while (true)
            {
                double   successRatioCode;
                bool[][] outcomeCodes = OutcomeGenerator.getOutcomeCode(priceData, outcomeMatrix, outcomeCodePercent, out successRatioCode);

                if (successRatioCode < 0.9)
                {
                    throw new TooLittleValidDataException("Too few outcome codes: " + successRatioCode);
                }

                DistributionHelper.getOutcomeCodeDistribution(outcomeCodes, out buyDist, out sellDist);

                double score = (buyDist + sellDist) / 2;
                if (score > desiredDistribution - tolerance && score < desiredDistribution + tolerance)
                {
                    break;
                }
                else if (score > desiredDistribution + tolerance)
                {
                    outcomeCodePercent += (outcomeCodePercent / (10 + round));
                }
                else if (score < desiredDistribution - tolerance)
                {
                    outcomeCodePercent -= (outcomeCodePercent / (10 + round));
                }

                Logger.log("SetDist OPT. Round " + round + " -> " + outcomeCodePercent + "% = b" + Math.Round(buyDist, 4) + " s" + Math.Round(sellDist, 4) + " =" + Math.Round(score, 4));

                round++;
            }

            return(outcomeCodePercent);
        }
        public void getOutcomeCodeDistribution_Test()
        {
            bool[][] inputs = new bool[100][];
            for (int i = 0; i < 50; i++)
            {
                inputs[i] = new bool[] { true, true };
            }

            for (int i = 50; i < 100; i++)
            {
                inputs[i] = new bool[] { false, true };
            }

            for (int i = 50; i < 75; i++)
            {
                inputs[i][(int)OutcomeCodeMatrixIndices.Sell] = false;
            }

            double buyR, sellR;

            DistributionHelper.getOutcomeCodeDistribution(inputs, out buyR, out sellR);
            Assert.AreEqual(0.5, buyR);
            Assert.AreEqual(0.75, sellR);
        }
        public void getSampleCodesBuyMaxSellMax_Test()
        {
            double[][] samples = new double[10][];
            for (int i = 0; i < samples.Length; i++)
            {
                samples[i] = new double[] { i * 0.1, 0.2, 0.2, 10 };
            }

            samples[3]    = null;
            samples[4][1] = 0.4;  //Buy
            samples[5][2] = 0.41; //Sell

            double buy, sell;
            double buyCount, sellCount;

            DistributionHelper.getSampleOutcomeCodesBuyMaxSellMax(samples, 0, out buy, out buyCount, out sell, out sellCount);

            //Todo: Write another for minValuesPercent != 0

            Assert.AreEqual(0.4, buy);
            Assert.AreEqual(0.41, sell);
            Assert.AreEqual(10, buyCount);
            Assert.AreEqual(10, sellCount);
        }
        public LearningIndicator(WalkerIndicator indicator, double[][] prices, bool[][] outcomeCodes, double[][] outcomes, long timeframe, double targetPercent, double minPercentThreshold, int steps, bool createStatistics)
        {
            this.targetPercent = targetPercent;
            this.timeframe     = timeframe;

            double validRatio;

            double[] values = IndicatorRunner.getIndicatorValues(prices, indicator.Clone(), out validRatio);
            if (validRatio < 0.5)
            {
                throw new TooLittleValidDataException("Not enough valid values: " + validRatio);
            }

            //May be does not work properly... todo:
            double min, max, usedValuesRatio;

            //DistributionHelper.getMinMax(values, 4, out min, out max);
            DistributionHelper.getMinMax(values, out min, out max);

            outcomeCodeSamplingTable = IndicatorSampler.sampleValuesOutcomeCode(values, outcomeCodes, min, max, steps, out usedValuesRatio);
            if (usedValuesRatio < 0.5)
            {
                throw new TooLittleValidDataException("Not enough sampling for outcomeCode: " + usedValuesRatio);
            }

            outcomeSamplingTable = IndicatorSampler.sampleValuesOutcome(values, prices, outcomes, min, max, out usedValuesRatio, 40);
            if (usedValuesRatio < 0.5)
            {
                throw new TooLittleValidDataException("Not enough sampling for outcome: " + usedValuesRatio);
            }

            this.usedValues = usedValuesRatio;

            if (createStatistics)
            {
                //Predictive power calculation
                predictivePower = new double[33];
                IndicatorSampler.getStatisticsOutcomeCodes(values, outcomeCodes, out predictivePower[0], out predictivePower[1], out predictivePower[2], out predictivePower[3]);
                IndicatorSampler.getStatisticsOutcomes(values, prices, outcomes, out predictivePower[4], out predictivePower[5], out predictivePower[6], out predictivePower[7], out predictivePower[8], out predictivePower[9]);

                DistributionHelper.getSampleOutcomeCodesBuyMaxSellMax(outcomeCodeSamplingTable, minPercentThreshold, out predictivePower[10], out predictivePower[11], out predictivePower[12], out predictivePower[13]);
                DistributionHelper.getSampleOutcomesMinMax(outcomeSamplingTable, minPercentThreshold, out predictivePower[14], out predictivePower[15], out predictivePower[16], out predictivePower[17], out predictivePower[18], out predictivePower[19], out predictivePower[20], out predictivePower[21], out predictivePower[22], out predictivePower[23]);

                //Outcome Code

                List <double> buyCodesDist        = new List <double>(),
                              sellCodesDist       = new List <double>(),
                              buySellDistanceDist = new List <double>(),
                              minMaxDistanceDist  = new List <double>(),
                              minDist             = new List <double>(),
                              maxDist             = new List <double>(),
                              actualDist          = new List <double>();

                double totalCodeSamples = 0;
                foreach (double[] row in outcomeCodeSamplingTable)
                {
                    totalCodeSamples += row[(int)SampleValuesOutcomeCodesIndices.SamplesCount];
                }

                int regardedStates = 0;
                foreach (double[] row in outcomeCodeSamplingTable)
                {
                    if ((row[(int)SampleValuesOutcomeCodesIndices.SamplesCount] / totalCodeSamples) * 100 >= minPercentThreshold) //minPercentThreshold
                    {
                        buyCodesDist.Add(row[(int)SampleValuesOutcomeCodesIndices.BuyRatio]);
                        sellCodesDist.Add(row[(int)SampleValuesOutcomeCodesIndices.SellRatio]);
                        buySellDistanceDist.Add(Math.Abs(row[(int)SampleValuesOutcomeCodesIndices.BuyRatio] - row[(int)SampleValuesOutcomeCodesIndices.SellRatio]));
                        regardedStates++;
                    }
                }

                predictivePower[(int)LearningIndicatorPredictivePowerIndecies.valuesOverMinPercentRatioCode] = Convert.ToDouble(regardedStates) / outcomeCodeSamplingTable.Length;

                if (regardedStates <= 2)
                {
                    throw new TooLittleStatesException("Too little sates: " + regardedStates);
                }

                predictivePower[(int)LearningIndicatorPredictivePowerIndecies.buyCodeStD]             = buyCodesDist.StandardDeviation();
                predictivePower[(int)LearningIndicatorPredictivePowerIndecies.sellCodeStD]            = sellCodesDist.StandardDeviation();
                predictivePower[(int)LearningIndicatorPredictivePowerIndecies.buySellCodeDistanceStD] = buySellDistanceDist.StandardDeviation();

                //Outcome

                double totalSamples = 0;
                foreach (double[] row in outcomeSamplingTable)
                {
                    totalSamples += row[(int)SampleValuesOutcomeIndices.SamplesCount];
                }

                //Avgs
                regardedStates = 0;
                foreach (double[] row in outcomeSamplingTable)
                {
                    if ((row[(int)SampleValuesOutcomeIndices.SamplesCount] / totalSamples) * 100 > minPercentThreshold) //minPercentThreshold
                    {
                        maxDist.Add(row[(int)SampleValuesOutcomeIndices.MaxAvg]);
                        minDist.Add(row[(int)SampleValuesOutcomeIndices.MinAvg]);
                        minMaxDistanceDist.Add(Math.Abs(row[(int)SampleValuesOutcomeIndices.MaxAvg]) + row[(int)SampleValuesOutcomeIndices.MinAvg]);
                        actualDist.Add(row[(int)SampleValuesOutcomeIndices.ActualAvg]);
                        regardedStates++;
                    }
                }

                predictivePower[(int)LearningIndicatorPredictivePowerIndecies.valuesOverMinPercentRatioOutcome] += Convert.ToDouble(regardedStates) / outcomeSamplingTable.Length;

                //avg distances
                predictivePower[(int)LearningIndicatorPredictivePowerIndecies.maxStD]            = maxDist.StandardDeviation();
                predictivePower[(int)LearningIndicatorPredictivePowerIndecies.minStD]            = minDist.StandardDeviation();
                predictivePower[(int)LearningIndicatorPredictivePowerIndecies.minMaxDistanceStd] = minMaxDistanceDist.StandardDeviation();
                predictivePower[(int)LearningIndicatorPredictivePowerIndecies.actualStD]         = actualDist.StandardDeviation();

                if (double.IsNaN(predictivePower[(int)LearningIndicatorPredictivePowerIndecies.buyCodeStD]) ||
                    double.IsNaN(predictivePower[(int)LearningIndicatorPredictivePowerIndecies.sellCodeStD]) ||
                    double.IsNaN(predictivePower[(int)LearningIndicatorPredictivePowerIndecies.buySellCodeDistanceStD]) ||
                    double.IsNaN(predictivePower[(int)LearningIndicatorPredictivePowerIndecies.maxStD]) ||
                    double.IsNaN(predictivePower[(int)LearningIndicatorPredictivePowerIndecies.minStD]) ||
                    double.IsNaN(predictivePower[(int)LearningIndicatorPredictivePowerIndecies.minMaxDistanceStd]) ||
                    double.IsNaN(predictivePower[(int)LearningIndicatorPredictivePowerIndecies.actualStD]))
                {
                    throw new Exception("Not a valid predictive power!");
                }

                //End predictive power calculation
            }

            this.indicator = indicator;
        }