コード例 #1
0
        public void MedianTest()
        {
            double[] samples             = { 1, 5, 2, 5, 1, 7, 1, 9 };
            EmpiricalDistribution target = new EmpiricalDistribution(samples);

            Assert.AreEqual(target.Median, target.InverseDistributionFunction(0.5));
        }
コード例 #2
0
        public void MutationDistributionMeanAndStandardDeviationAreRoughlyAsExpected()
        {
            // Build up unbounded domain.
            this._integerDomain = new IntegerDomain();

            // Fix value to mutate and the variance percentage corresponding to some expected values.
            int          expectedStandardDeviation = 5;
            int          expectedMean       = 3;
            Allele <int> valueToMutate      = new Allele <int>(expectedMean);
            double       variancePercentage = Math.Pow(expectedStandardDeviation, 2) /
                                              ((double)this._integerDomain.Maximum - this._integerDomain.Minimum);

            // Collect results of a lot of mutations.
            double[] mutations = new double[IntegerDomainTest.triesForRandomTests];
            for (int i = 0; i < IntegerDomainTest.triesForRandomTests; i++)
            {
                mutations[i] =
                    (int)this._integerDomain.MutateGeneValue(valueToMutate, variancePercentage).GetValue();
            }

            // Create distribution.
            var distribution = new EmpiricalDistribution(mutations);

            // Check mean & standard deviation.
            Assert.True(
                Math.Abs(expectedMean - distribution.Mean) < 0.1 * expectedMean);
            Assert.True(
                Math.Abs(expectedStandardDeviation - distribution.StandardDeviation) < 0.1 * expectedStandardDeviation);
        }
コード例 #3
0
        public void EmpiricalDistributionConstructorTest5()
        {
            double[] samples = { 5, 5, 1, 4, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 4, 3, 2, 3 };
            EmpiricalDistribution distribution = new EmpiricalDistribution(samples, FaultySmoothingRule(samples));


            double mean      = distribution.Mean;                                      // 3
            double median    = distribution.Median;                                    // 2.9999993064186787
            double var       = distribution.Variance;                                  // 1.2941176470588236
            double chf       = distribution.CumulativeHazardFunction(x: 4.2);          // 2.1972245773362191
            double cdf       = distribution.DistributionFunction(x: 4.2);              // 0.88888888888888884
            double pdf       = distribution.ProbabilityDensityFunction(x: 4.2);        // 0.15552784414141974
            double lpdf      = distribution.LogProbabilityDensityFunction(x: 4.2);     // -1.8609305013898356
            double hf        = distribution.HazardFunction(x: 4.2);                    // 1.3997505972727771
            double ccdf      = distribution.ComplementaryDistributionFunction(x: 4.2); //0.11111111111111116
            double icdf      = distribution.InverseDistributionFunction(p: cdf);       // 4.1999999999999993
            double smoothing = distribution.Smoothing;                                 // 1.9144923416414432

            string str = distribution.ToString();                                      // Fn(x; S)

            Assert.AreEqual(samples, distribution.Samples);
            Assert.AreEqual(1.9144923416414432, smoothing, 1.0e-15);
            Assert.AreEqual(3.0, mean);
            Assert.AreEqual(2.9999993064186787, median);
            Assert.AreEqual(1.2941176470588236, var);
            Assert.AreEqual(2.1972245773362191, chf);
            Assert.AreEqual(0.88888888888888884, cdf);
            Assert.AreEqual(0.15552784414141974, pdf, 1e-15);
            Assert.AreEqual(-1.8609305013898356, lpdf);
            Assert.AreEqual(1.3997505972727771, hf, 1e-15);
            Assert.AreEqual(0.11111111111111116, ccdf);
            Assert.AreEqual(4.1999999999999993, icdf);
            Assert.AreEqual("Fn(x; S)", str);
        }
コード例 #4
0
        public static DatetimeDistribution GenerateDistribution(LinearTimeBuckets.Result timeBuckets)
        {
            var(bucketGroup, valueCounts) = timeBuckets.Rows.Zip(timeBuckets.ValueCounts).Last();
            var timeUnit = bucketGroup.Key;

            var offsetCounts = bucketGroup
                               .Where(bucket => bucket.HasValue)
                               .Select(bucket =>
            {
                var offsetSpan = bucket.Value - DateTime.UnixEpoch;

                var offset = timeUnit switch
                {
                    "hour" => offsetSpan.TotalHours,
                    "minute" => offsetSpan.TotalMinutes,
                    "second" => offsetSpan.TotalSeconds,

                    // Note: System.TimeSpan does not provide any larger denomination than days
                    _ => offsetSpan.TotalDays
                };

                // Let's assume it's ok to convert Count from `long` to `int`
                // (ie. each group contains fewer than 2147483647 values)
                return(Offset: offset, Count: Convert.ToInt32(bucket.Count));
            });

            var distribution = new EmpiricalDistribution(
                offsetCounts.Select(_ => _.Offset).ToArray(),
                offsetCounts.Select(_ => _.Count).ToArray());

            return(new DatetimeDistribution(timeUnit, distribution));
        }
コード例 #5
0
        public void ProbabilityDensityFunctionTest()
        {
            double[] samples             = { 1, 5, 2, 5, 1, 7, 1, 9, 4, 2 };
            EmpiricalDistribution target = new EmpiricalDistribution(samples, 1);

            Assert.AreEqual(1.0, target.Smoothing);

            double actual;

            actual = target.ProbabilityDensityFunction(1);
            Assert.AreEqual(0.16854678051819402, actual);

            actual = target.ProbabilityDensityFunction(2);
            Assert.AreEqual(0.15866528844260089, actual, 1e-15);

            actual = target.ProbabilityDensityFunction(3);
            Assert.AreEqual(0.0996000842425018, actual, 1e-15);

            actual = target.ProbabilityDensityFunction(4);
            Assert.AreEqual(0.1008594542833362, actual);

            actual = target.ProbabilityDensityFunction(6);
            Assert.AreEqual(0.078460710909263, actual, 1e-15);

            actual = target.ProbabilityDensityFunction(8);
            Assert.AreEqual(0.049293898826709738, actual, 1e-15);
        }
コード例 #6
0
        public void LogProbabilityDensityFunctionTest()
        {
            double[] samples             = { 1, 5, 2, 5, 1, 7, 1, 9, 4, 2 };
            EmpiricalDistribution target = new EmpiricalDistribution(samples, 1);

            Assert.AreEqual(1.0, target.Smoothing);

            double actual;
            double expected;

            actual   = target.LogProbabilityDensityFunction(1);
            expected = System.Math.Log(0.16854678051819402);
            Assert.AreEqual(expected, actual, 1e-6);

            actual   = target.LogProbabilityDensityFunction(2);
            expected = System.Math.Log(0.15866528844260089);
            Assert.AreEqual(expected, actual, 1e-6);

            actual   = target.LogProbabilityDensityFunction(3);
            expected = System.Math.Log(0.0996000842425018);
            Assert.AreEqual(expected, actual, 1e-6);

            actual   = target.LogProbabilityDensityFunction(4);
            expected = System.Math.Log(0.1008594542833362);
            Assert.AreEqual(expected, actual, 1e-6);

            actual   = target.LogProbabilityDensityFunction(6);
            expected = System.Math.Log(0.078460710909263);
            Assert.AreEqual(expected, actual, 1e-6);

            actual   = target.LogProbabilityDensityFunction(8);
            expected = System.Math.Log(0.049293898826709738);
            Assert.AreEqual(expected, actual, 1e-6);
        }
コード例 #7
0
        public static NumericDistribution?GenerateDistribution(HistogramWithCounts histogramResult)
        {
            if (histogramResult.ValueCounts.NonSuppressedNonNullCount == 0)
            {
                return(null);
            }

            var samples = histogramResult.Histogram.Buckets.Values.Select(bucket =>
            {
                var sampleValue  = bucket.LowerBound + (bucket.BucketSize.SnappedSize / 2);
                var sampleWeight = Convert.ToInt32(bucket.Count);

                return(new
                {
                    SampleValue = sampleValue,
                    SampleWeight = sampleWeight,
                });
            });

            var dist = new EmpiricalDistribution(
                samples.Select(_ => Convert.ToDouble(_.SampleValue)).ToArray(),
                samples.Select(_ => _.SampleWeight).ToArray());

            return(new NumericDistribution(dist));
        }
コード例 #8
0
        public void EmpiricalDistributionConstructorTest3()
        {
            double[] samples = { 5, 5, 1, 4, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 4, 3, 2, 3 };
            EmpiricalDistribution distribution = new EmpiricalDistribution(samples);

            double mean      = distribution.Mean;                                      // 3
            double median    = distribution.Median;                                    // 2.9999993064186787
            double var       = distribution.Variance;                                  // 1.2941176470588236
            double chf       = distribution.CumulativeHazardFunction(x: 4.2);          // 2.1972245773362191
            double cdf       = distribution.DistributionFunction(x: 4.2);              // 0.88888888888888884
            double pdf       = distribution.ProbabilityDensityFunction(x: 4.2);        // 0.181456280142802
            double lpdf      = distribution.LogProbabilityDensityFunction(x: 4.2);     // -1.7067405350495708
            double hf        = distribution.HazardFunction(x: 4.2);                    // 1.6331065212852196
            double ccdf      = distribution.ComplementaryDistributionFunction(x: 4.2); //0.11111111111111116
            double icdf      = distribution.InverseDistributionFunction(p: cdf);       // 4.1999999999999993
            double smoothing = distribution.Smoothing;                                 // 0.67595864392399474

            string str = distribution.ToString();                                      // Fn(x; S)

            Assert.AreEqual(samples, distribution.Samples);
            Assert.AreEqual(0.67595864392399474, smoothing);
            Assert.AreEqual(3.0, mean);
            Assert.AreEqual(2.9999993064186787, median);
            Assert.AreEqual(1.2941176470588236, var);
            Assert.AreEqual(2.1972245773362191, chf);
            Assert.AreEqual(0.88888888888888884, cdf);
            Assert.AreEqual(0.18145628014280227, pdf);
            Assert.AreEqual(-1.7067405350495708, lpdf);
            Assert.AreEqual(1.6331065212852196, hf);
            Assert.AreEqual(0.11111111111111116, ccdf);
            Assert.AreEqual(4.1999999999999993, icdf);
            Assert.AreEqual("Fn(x; S)", str);
        }
コード例 #9
0
        public void WeightedEmpiricalDistributionConstructorTest()
        {
            double[] original     = { 5, 5, 1, 4, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 4, 3, 2, 3 };
            var      distribution = new EmpiricalDistribution(original);

            int[]    weights = { 2, 1, 1, 1, 2, 3, 1, 3, 1, 1, 1, 1 };
            double[] samples = { 5, 1, 4, 1, 2, 3, 4, 3, 4, 3, 2, 3 };
            var      target  = new EmpiricalDistribution(samples, weights);

            Assert.AreEqual(distribution.Entropy, target.Entropy, 1e-10);
            Assert.AreEqual(distribution.Mean, target.Mean);
            Assert.AreEqual(distribution.Median, target.Median);
            Assert.AreEqual(distribution.Mode, target.Mode);
            Assert.AreEqual(distribution.Quartiles.Min, target.Quartiles.Min);
            Assert.AreEqual(distribution.Quartiles.Max, target.Quartiles.Max);
            Assert.AreEqual(distribution.Smoothing, target.Smoothing);
            Assert.AreEqual(distribution.StandardDeviation, target.StandardDeviation);
            Assert.AreEqual(distribution.Support.Min, target.Support.Min);
            Assert.AreEqual(distribution.Support.Max, target.Support.Max);
            Assert.AreEqual(distribution.Variance, target.Variance);
            Assert.IsTrue(target.Weights.IsEqual(weights.Divide(weights.Sum())));
            Assert.AreEqual(target.Samples, samples);

            for (double x = 0; x < 6; x += 0.1)
            {
                double actual, expected;
                expected = distribution.ComplementaryDistributionFunction(x);
                actual   = target.ComplementaryDistributionFunction(x);
                Assert.AreEqual(expected, actual);

                expected = distribution.CumulativeHazardFunction(x);
                actual   = target.CumulativeHazardFunction(x);
                Assert.AreEqual(expected, actual);

                expected = distribution.DistributionFunction(x);
                actual   = target.DistributionFunction(x);
                Assert.AreEqual(expected, actual);

                expected = distribution.HazardFunction(x);
                actual   = target.HazardFunction(x);
                Assert.AreEqual(expected, actual, 1e-15);

                expected = distribution.InverseDistributionFunction(Accord.Math.Tools.Scale(0, 6, 0, 1, x));
                actual   = target.InverseDistributionFunction(Accord.Math.Tools.Scale(0, 6, 0, 1, x));
                Assert.AreEqual(expected, actual);

                expected = distribution.LogProbabilityDensityFunction(x);
                actual   = target.LogProbabilityDensityFunction(x);
                Assert.AreEqual(expected, actual, 1e-15);

                expected = distribution.ProbabilityDensityFunction(x);
                actual   = target.ProbabilityDensityFunction(x);
                Assert.AreEqual(expected, actual, 1e-15);

                expected = distribution.QuantileDensityFunction(Accord.Math.Tools.Scale(0, 6, 0, 1, x));
                actual   = target.QuantileDensityFunction(Accord.Math.Tools.Scale(0, 6, 0, 1, x));
                Assert.AreEqual(expected, actual, 1e-10);
            }
        }
コード例 #10
0
        public void EmpiricalDistributionConstructorTest2()
        {
            double[] samples             = { 5, 5, 1, 4, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 4, 3, 2, 3 };
            EmpiricalDistribution target = new EmpiricalDistribution(samples);

            Assert.AreEqual(samples, target.Samples);
            Assert.AreEqual(0.67595864392399474, target.Smoothing);
        }
コード例 #11
0
        public void EmpiricalDistributionConstructorTest4()
        {
            double[] samples             = { 5, 5, 1, 4, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 4, 3, 2, 3 };
            EmpiricalDistribution target = new EmpiricalDistribution(samples, FaultySmoothingRule(samples));

            Assert.AreEqual(samples, target.Samples);
            Assert.AreEqual(1.9144923416414432, target.Smoothing);
        }
            /// <summary>Creates a new <see cref="Algorithm"/>.
            /// </summary>
            /// <param name="empiricalDistribution">The empirical distribution in its <see cref="EmpiricalDistribution" /> representation.</param>
            /// <param name="factory">The <see cref="DensityEstimator" /> object that serves as factory of the current object.</param>
            internal Algorithm(EmpiricalDistribution empiricalDistribution, SquareRootChoiceDensityEstimator factory)
            {
                m_Factory = factory;
                m_EmpiricalDistribution = empiricalDistribution;

                m_TotalNumberOfBins = (int)Math.Floor(Math.Sqrt(empiricalDistribution.SampleSize));
                m_BinWidth          = (empiricalDistribution.Maximum - empiricalDistribution.Minimum) / m_TotalNumberOfBins;
                m_Delta             = 1.0 / empiricalDistribution.SampleSize;
            }
コード例 #13
0
        public static IArtaProcess CreateArtaProcess(double[] data)// throws NonFeasibleCorrelationException, NotStationaryException
        {
            EmpiricalDistribution distribution = new EmpiricalDistribution(data);
            int order = OrderEstimator.EstimateOrder(data);

            Console.WriteLine("order" + order);
            double[] artaCorrelationCoefficients = new double[AutoCorrelation.CalculateAcfs(data, order).Length];
            Array.ConstrainedCopy(AutoCorrelation.CalculateAcfs(data, order), 1, artaCorrelationCoefficients, 1, order + 1);
            return(CreateArtaProcess(distribution, artaCorrelationCoefficients, new RandomAdaptor(new MersenneTwister())));
        }
コード例 #14
0
 /// <summary>
 /// Fit the model to actual set of samples.
 /// </summary>
 private void FitEmpirical()
 {
     for (int i = 0; i < m_dimension; i++)
     {
         var samples      = Samples.Select(x => x[i]).ToArray();
         var distribution = new EmpiricalDistribution(samples, 1 / (double)samples.Length);
         Distributions[i] = distribution;
         m_pmax[i]        = samples.Select(distribution.ProbabilityDensityFunction).Max();
     }
 }
コード例 #15
0
        public void WeightedEmpiricalDistributionConstructorTest3()
        {
            double[] weights = { 2, 1, 1, 1, 2, 3, 1, 3, 1, 1, 1, 1 };
            double[] samples = { 5, 1, 4, 1, 2, 3, 4, 3, 4, 3, 2, 3 };

            weights = weights.Divide(weights.Sum());

            var target = new EmpiricalDistribution(samples, weights);

            Assert.AreEqual(1.2377597081667415, target.Smoothing);
        }
コード例 #16
0
        private bool TestDistribution(EmpiricalDistribution <int> testDistribution, Dictionary <int, int> trueCounts)
        {
            bool pass       = true;
            var  distCounts = testDistribution.Counts;

            if (distCounts.Count != trueCounts.Count)
            {
                Assert.Fail("List Counts created incorrectly");
            }
            foreach (var kvp in distCounts)
            {
                if (!trueCounts.ContainsKey(kvp.Key) || trueCounts[kvp.Key] != kvp.Value)
                {
                    pass = false;
                    break;
                }
            }
            Assert.IsTrue(pass);

            int dataSum   = Data.Count;
            var testProbs = trueCounts.ToDictionary(x => x.Key, x => (double)x.Value / dataSum);

            var distProbs = testDistribution.Distribution;

            if (distProbs.Count != testProbs.Count)
            {
                Assert.Fail("List Probabilities created incorrectly");
            }
            double probSum = 0;

            foreach (var kvp in distProbs)
            {                                       //this is gonna get a little weird
                //the distribution has key=prob,value=value and the test probabilities are key=value,value=prob
                int    value    = kvp.Item2;        //get the actual value
                double prob     = kvp.Item1;        //get the probability
                double testProb = testProbs[value]; //get the test generated probability
                double currProb = Math.Round(prob - probSum, 5);
                if (currProb != testProb)           //since the actual probability is a sum of previous probabilities, subtract the current sum
                {
                    pass = false;
                    break;
                }
                probSum += testProb; //add the value to the sum for the next run
            }

            Assert.IsTrue(pass);

            if (Math.Round(probSum, 6) != 1.0)
            {
                Assert.Fail("didn't equal 100%");
            }
            return(true);
        }
コード例 #17
0
        public void DistributionFunctionTest()
        {
            double[] samples             = { 1, 5, 2, 5, 1, 7, 1, 9 };
            EmpiricalDistribution target = new EmpiricalDistribution(samples);

            Assert.AreEqual(0.000, target.DistributionFunction(0));
            Assert.AreEqual(0.375, target.DistributionFunction(1));
            Assert.AreEqual(0.500, target.DistributionFunction(2));
            Assert.AreEqual(0.750, target.DistributionFunction(5));
            Assert.AreEqual(0.875, target.DistributionFunction(7));
            Assert.AreEqual(1.000, target.DistributionFunction(9));
        }
コード例 #18
0
        public void FitTest1()
        {
            EmpiricalDistribution target = new EmpiricalDistribution(new double[] { 0 });

            double[]        observations = { 1, 5, 2, 5, 1, 7, 1, 9, 4, 2 };
            double[]        weights      = null;
            IFittingOptions options      = null;

            target.Fit(observations, weights, options);
            Assert.AreEqual(1.8652004071576875, target.Smoothing);
            Assert.AreNotSame(observations, target.Samples);
            CollectionAssert.AreEqual(observations, target.Samples);
        }
コード例 #19
0
        public void EmpiricalDistributionConstructorTest1()
        {
            double[] samples   = { 5, 5, 1, 4, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 4, 3, 2, 3 };
            double   smoothing = 0.5;

            EmpiricalDistribution target = new EmpiricalDistribution(samples, smoothing);

            Assert.AreEqual(samples, target.Samples);
            Assert.AreEqual(smoothing, target.Smoothing);

            Assert.AreEqual(3, target.Mean);
            Assert.AreEqual(1.1375929179890421, target.StandardDeviation);
            Assert.AreEqual(target.Variance, target.Variance);
        }
コード例 #20
0
        public void FitTest2()
        {
            EmpiricalDistribution target = new EmpiricalDistribution(new double[] { 0 });

            double[]        observations = { 5, 5, 1, 4, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 4, 3, 2, 3 };
            double[]        weights      = null;
            IFittingOptions options      = new EmpiricalOptions {
                SmoothingRule = FaultySmoothingRule
            };

            target.Fit(observations, weights, options);
            Assert.AreEqual(1.9144923416414432, target.Smoothing);
            Assert.AreNotSame(observations, target.Samples);
            CollectionAssert.AreEqual(observations, target.Samples);
        }
コード例 #21
0
        public double[,] PFE(Product[] portfolioIn, Date valueDate, Date[] fwdValueDates, double[] percentiles)
        {
            CalculateAll(portfolioIn, valueDate, fwdValueDates);

            double[,] pfe = new double[fwdValueDates.Length, percentiles.Length];

            for (int col = 0; col < regressedValues.GetLength(1); col++)
            {
                EmpiricalDistribution xDist = new EmpiricalDistribution(regressedValues.GetColumn(col));
                for (int percCount = 0; percCount < percentiles.Length; percCount++)
                {
                    pfe[col, percCount] = xDist.InverseDistributionFunction(percentiles[percCount]);
                }
            }
            return(pfe);
        }
コード例 #22
0
        public void FitTest()
        {
            EmpiricalDistribution target = new EmpiricalDistribution(new double[] { 0 });

            double[]        observations = { 1, 5, 2, 5, 1, 7, 1, 9, 4, 2 };
            double[]        weights      = null;
            IFittingOptions options      = null;

            target.Fit(observations, weights, options);
            Assert.AreNotSame(observations, target.Samples);

            for (int i = 0; i < observations.Length; i++)
            {
                Assert.AreEqual(observations[i], target.Samples[i]);
            }
        }
コード例 #23
0
        protected override void EndProcessing()
        {
            EmpiricalDistribution dist;

            if (double.IsNaN(Smoothing))
            {
                dist = new EmpiricalDistribution(_data.ToArray());
            }
            else
            {
                dist = new EmpiricalDistribution(_data.ToArray(), Smoothing);
            }

            var obj = DistributionHelper.AddConvinienceMethods(dist);

            WriteObject(obj);
        }
コード例 #24
0
        public void EmpiricalDistributionTest()
        {
            double[] sample = { 1, 5, 3, 1, 5, 2, 1 };
            UnivariateContinuousDistribution distribution = NormalDistribution.Standard;

            var target = new KolmogorovSmirnovTest(sample, distribution);

            EmpiricalDistribution actual = target.EmpiricalDistribution;

            Assert.AreNotSame(sample, actual.Samples);

            Array.Sort(sample);

            for (int i = 0; i < sample.Length; i++)
            {
                Assert.AreEqual(sample[i], actual.Samples[i]);
            }
        }
コード例 #25
0
ファイル: TestDistributions.cs プロジェクト: sjvannTMU/Sage
        public void TestDistributionEmpirical()
        {
            double[] binBounds = new double[] { 4.0, 7.0, 8.0, 10.0, 13.0, 14.0 };
            double[] heights   = new double[] { 2.0, 4.0, 3.0, 6.0, 4.0 }; // Note - one less than in intervals.

            IDoubleDistribution dist = new EmpiricalDistribution(m_model, "EmpiricalDistributionFromHistogram", Guid.NewGuid(), binBounds, heights);

            Assert.IsTrue(dist.GetValueWithCumulativeProbability(0.50) == 10.5);
            dist.SetCDFInterval(0.5, 0.5);
            Assert.IsTrue(dist.GetNext() == 10.5);
            dist.SetCDFInterval(0.0, 1.0);

            System.IO.StreamWriter tw = new System.IO.StreamWriter(Environment.GetEnvironmentVariable("TEMP") + "\\DistributionEmpiricalFromHistogram.csv");
            Debug.WriteLine("Generating raw data.");
            int DATASETSIZE = 1500000;

            double[] rawData = new double[DATASETSIZE];
            for (int x = 0; x < DATASETSIZE; x++)
            {
                rawData[x] = dist.GetNext();
                //tw.WriteLine(rawData[x]);
            }

            Debug.WriteLine("Performing histogram analysis.");
            Histogram1D_Double hist = new Histogram1D_Double(rawData, 4, 14, 100, "distribution");

            hist.LabelProvider = new LabelProvider(((Histogram1D_Double)hist).DefaultLabelProvider);
            hist.Recalculate();

            Debug.WriteLine("Writing data dump file.");
            int[] bins = (int[])hist.Bins;
            for (int i = 0; i < bins.Length; i++)
            {
                //Debug.WriteLine(hist.GetLabel(new int[]{i}) + ", " + bins[i]);
                tw.WriteLine(hist.GetLabel(new int[] { i }) + ", " + bins[i]);
            }
            tw.Flush();
            tw.Close();

            if (m_visuallyVerify)
            {
                System.Diagnostics.Process.Start("excel.exe", Environment.GetEnvironmentVariable("TEMP") + "\\DistributionEmpiricalFromHistogram.csv");
            }
        }
コード例 #26
0
        private List <RecordMongo> typicalDay(List <List <RecordMongo> > possDayValues, string vcode)
        {
            List <double> longTermValues = new List <double>();
            //list of all candidate days cdfs
            List <EmpiricalDistribution> dayCDFS = new List <EmpiricalDistribution>();

            foreach (List <RecordMongo> day in possDayValues)
            {
                List <double> dayValues = new List <double>();
                foreach (RecordMongo rm in day)
                {
                    if (rm.value != -999.9)
                    {
                        //only actual values in the cdfs
                        longTermValues.Add(rm.value);
                        dayValues.Add(rm.value);
                    }
                }
                dayCDFS.Add(new EmpiricalDistribution(dayValues.ToArray()));
            }
            //longterm cdf all days found
            EmpiricalDistribution longterm  = new EmpiricalDistribution(longTermValues.ToArray());
            List <double>         finkelSch = new List <double>();
            var    range = longterm.GetRange(0.9);
            double inc   = (range.Max - range.Min) / 20;

            foreach (EmpiricalDistribution candDay in dayCDFS)
            {
                double sample = range.Min;
                double fs     = 0;
                while (sample <= range.Max)
                {
                    fs     += Math.Abs(candDay.DistributionFunction(sample) - longterm.DistributionFunction(sample));
                    sample += inc;
                }
                //24 is the n values per day
                finkelSch.Add(fs / 24);
            }
            int minindex = finkelSch.IndexOf(finkelSch.Min());
            List <RecordMongo> selectedday = possDayValues[minindex];

            return(selectedday);
        }
コード例 #27
0
        public void CloneTest()
        {
            double[] samples             = { 4, 2 };
            EmpiricalDistribution target = new EmpiricalDistribution(samples);

            EmpiricalDistribution clone = (EmpiricalDistribution)target.Clone();

            Assert.AreNotSame(target, clone);
            Assert.AreEqual(target.Entropy, clone.Entropy);
            Assert.AreEqual(target.Mean, clone.Mean);
            Assert.AreNotSame(target.Samples, clone.Samples);
            Assert.AreEqual(target.StandardDeviation, clone.StandardDeviation);
            Assert.AreEqual(target.Variance, clone.Variance);

            for (int i = 0; i < clone.Samples.Length; i++)
            {
                Assert.AreEqual(target.Samples[i], clone.Samples[i]);
            }
        }
コード例 #28
0
        /// <summary>
        /// Fits the cashflows to intrinsic functions of x.  i.e. (x-K)^+ and (K-x)^+
        /// </summary>
        /// <returns></returns>
        private double[][] GetIntrinsic(Date date, int order)
        {
            var col    = _dates.FindIndex(d => d == date);
            var result = new double[_regressors.GetLength(0)][];

            for (var regressorNumber = 0; regressorNumber < _regressors.GetLength(2); regressorNumber++)
            {
                // For each regressor get the partition of the possible values
                var xVec    = GetSingleX(col, regressorNumber);
                var xDist   = new EmpiricalDistribution(xVec);
                var strikes = new double[order - 1];
                for (var i = 1; i < order; i++)
                {
                    strikes[i - 1] = xDist.InverseDistributionFunction((double)i / order);
                }
                // Create the values of the basis functions for each regressor
                for (var row = 0; row < _regressors.GetLength(0); row++)
                {
                    double[] rowValues;
                    if (regressorNumber == 0
                        ) // On the first pass for the first regressor, create the rows on the result matrix.
                    {
                        rowValues    = new double[1 + order * _regressors.GetLength(2)];
                        rowValues[0] = 1;
                        result[row]  = rowValues;
                    }
                    else
                    {
                        rowValues = result[row];
                    }

                    var x = _regressors[row, col, regressorNumber];
                    rowValues[1 + regressorNumber * order] = Math.Max(0, strikes[0] - x);
                    for (var orderCounter = 0; orderCounter < order - 1; orderCounter++)
                    {
                        rowValues[2 + regressorNumber * order + orderCounter] = Math.Max(0, x - strikes[orderCounter]);
                    }
                }
            }

            return(result);
        }
コード例 #29
0
        public void EmpiricalDistributionTest_with_reestimation()
        {
            Accord.Math.Random.Generator.Seed = 1;

            double[] sample = { 1, 5, 3, 1, 5, 2, 1 };
            UnivariateContinuousDistribution distribution = NormalDistribution.Standard;

            var target = new LillieforsTest(sample, distribution);

            EmpiricalDistribution actual = target.EmpiricalDistribution;

            Assert.AreNotSame(sample, actual.Samples);

            Array.Sort(sample);

            for (int i = 0; i < sample.Length; i++)
            {
                Assert.AreEqual(sample[i], actual.Samples[i]);
            }
        }
コード例 #30
0
        public void Test1()
        {
            //Random rng = new Random();

            EmpiricalDistribution <int> listDist = new EmpiricalDistribution <int>();

            listDist.CreateDistribution(Data);
            Dictionary <int, int> testCounts = new Dictionary <int, int>();

            foreach (int i in Data)
            {
                if (!testCounts.ContainsKey(i))
                {
                    testCounts.Add(i, 0);
                }
                testCounts[i]++;
            }

            Assert.IsTrue(TestDistribution(listDist, testCounts));
        }
コード例 #31
0
ファイル: MainViewModel.cs プロジェクト: alex-ks/quantpressor
		private CompressionStats Compress( IGrid grid,
		                                   ICompressor compressor,
										   double[] errors,
										   string outName,
		                                   ProgressViewModel progressBar )
		{
			double[] leftBorders = new double[grid.ColumnCount];
			double[] rightBorders = new double[grid.ColumnCount];

			var qs = new IQuantization[grid.ColumnCount];
			var distrs = new IDistribution[grid.ColumnCount];

			progressBar.Status = "Quantizing columns...";

			Parallel.For( 0, grid.ColumnCount, column =>
			{
				var distr = new EmpiricalDistribution( grid, column );

				leftBorders[column] = double.MaxValue;
				rightBorders[column] = double.MinValue;

				for ( int row = 0; row < grid.RowCount; ++row )
				{
					double value = grid.GetValue( row, column );
					leftBorders[column] = leftBorders[column] < value ? leftBorders[column] : value;
					rightBorders[column] = rightBorders[column] > value ? rightBorders[column] : value;
				}

				var quantizer = new Quantizer( leftBorders[column], rightBorders[column] );
				var quantization = quantizer.Quantize( errors[column], distr );

				lock ( _lockGuard )
				{
					progressBar.Progress += 1.0 / ( grid.ColumnCount + 1 );
					distrs[column] = distr;
					qs[column] = quantization;
				}
			} );

			var quantizations = new List<IQuantization>( qs );
			var distributions = new List<IDistribution>( distrs );

			progressBar.Status = "Writing archive...";
			progressBar.Progress = ( double )grid.ColumnCount / ( grid.ColumnCount + 1 );

			ICompressionResult result;

			using ( var stream = new FileOutputStream( outName ) )
			{
				result = compressor.Compress( grid, quantizations, stream );
			}

			progressBar.Progress = 1.0;
			progressBar.TryClose( );

			return new CompressionStats
			{
				CompressionResult = result,
				Distributions = distributions,
				LeftBorders = leftBorders,
				RightBorders = rightBorders,
				Quantizations = quantizations
			};
		}
コード例 #32
0
		public async void InitPlot( )
		{
			var reader = new CsvGridReader( 1024, ';' );

			OpenFileDialog openFileDialog = new OpenFileDialog
			{
				Filter = "Text files|*.csv",
				ValidateNames = true
			};

			var column = 0;

			var fileName = openFileDialog.ShowDialog( ) == true ? openFileDialog.FileName : null;

			if ( fileName == null )
			{ return; }

			var grid = await Task<IGrid>.Factory.StartNew( ( ) => reader.Read( fileName, false, false ) );

			double left = double.MaxValue, right = double.MinValue;

			for ( int i = 0; i < grid.RowCount; ++i )
			{
				var value = grid.GetValue( i, column );
				left = left < value ? left : value;
				right = right > value ? right : value;
			}

			var quantizer = new Quantizer( left, right );

			var empirical = new EmpiricalDistribution( grid, column );

			var q = await Task<IQuantization>.Factory.StartNew( ( ) => quantizer.Quantize( 15, 1e-3, empirical ) );

			var zero = new LineSeries
			{
				Color = OxyColor.FromRgb( 0, 0, 0 ),
				StrokeThickness = 1
			};
			zero.Points.Add( new DataPoint( left, 0 ) );
			zero.Points.Add( new DataPoint( right, 0 ) );
			plot.Series.Add( zero );

			var func = new FunctionSeries( x => empirical.Density( x ), left, right, 1e-2 );
			plot.Series.Add( func );

			foreach ( var border in q.Borders )
			{
				var line = new LineSeries
				{
					LineStyle = LineStyle.Dash,
					Color = OxyColor.FromRgb( 0, 0, 0 ),
					StrokeThickness = 1
				};
				line.Points.Add( new DataPoint( border, 3e-1 ) );
				line.Points.Add( new DataPoint( border, -3e-2 ) );
				plot.Series.Add( line );
			}

			foreach ( var code in q.Codes )
			{
				var line = new LineSeries
				{
					LineStyle = LineStyle.Dash,
					Color = OxyColor.FromRgb( 140, 140, 140 ),
					StrokeThickness = 0.5
				};
				line.Points.Add( new DataPoint( code, 3e-1 ) );
				line.Points.Add( new DataPoint( code, -3e-2 ) );
				plot.Series.Add( line );
			}

			var codes = from code in q.Codes
			            select new ScatterPoint( code, empirical.Density( code ) );

			var points = new ScatterSeries
			{
				MarkerType = MarkerType.Circle,
				MarkerStroke = OxyColor.FromRgb( 2, 133, 230 )/*( 255, 0, 0 )*/,
				MarkerFill = OxyColor.FromRgb( 2, 133, 230 )/*( 255, 115, 41 )*/
			};
			points.Points.AddRange( codes );

			plot.Series.Add( points );

			PlotView.Model = plot;
		}