/** Method: Constructor  */
 internal MontecarloConv(int maxIt)
 {
     rg         = new RndGenerator();
     convData   = new List <double>();
     hist       = new Histogram(100);
     de         = new KernelDensity(1, 100, 100);
     this.maxIt = 2000; //TODO: eliminate parameter
 }
        public double DiversityDisabilities()
        {
            List <double> listeCount = new List <double>();

            foreach (Disabilities item in Enum.GetValues(typeof(Disabilities)))
            {
                listeCount.Add(this.employees.Where(x => x.disabilities == item).Count());
            }
            return(Math.Round((1 - KernelDensity.GaussianKernel(1 / Statistics.StandardDeviation(listeCount))) * 100));
        }
        public double DiversityCivilState()
        {
            List <double> listeCount = new List <double>();

            foreach (CivilState item in Enum.GetValues(typeof(CivilState)))
            {
                listeCount.Add(this.employees.Where(x => x.civil_State == item).Count());
            }
            return(Math.Round((KernelDensity.GaussianKernel(1 / Statistics.StandardDeviation(listeCount))) * 100));
        }
Beispiel #4
0
        public void KDETestGaussianKernelBandwidth2()
        {
            var estimate = KernelDensity.EstimateGaussian(-3.5d, 2.0d, _testData);

            AssertHelpers.AlmostEqualRelative(0.046875864115900, estimate, 10);

            estimate = KernelDensity.EstimateGaussian(0.0d, 2.0d, _testData);
            AssertHelpers.AlmostEqualRelative(0.186580447512078, estimate, 10);

            estimate = KernelDensity.EstimateGaussian(2.0d, 2.0d, _testData);
            AssertHelpers.AlmostEqualRelative(0.123339405007761, estimate, 10);
        }
Beispiel #5
0
        public void KDETestGaussianKernelBandwidth0p5()
        {
            var estimate = KernelDensity.EstimateGaussian(-3.5d, 0.5d, _testData);

            AssertHelpers.AlmostEqualRelative(5.311490430807364e-007, estimate, 10);

            estimate = KernelDensity.EstimateGaussian(0.0d, 0.5d, _testData);
            AssertHelpers.AlmostEqualRelative(0.369994803886827, estimate, 10);

            estimate = KernelDensity.EstimateGaussian(2.0d, 0.5d, _testData);
            AssertHelpers.AlmostEqualRelative(0.032447347007482, estimate, 10);
        }
Beispiel #6
0
        internal MontecarloGenericConv(int maxIt, int normMin)
        {
            distributions = new List <Distribution>();
            de            = new KernelDensity(1, 100, 100);
            nd            = new NormalDistrib();
            rg            = new RndGenerator();
            this.maxIt    = maxIt;
            this.normMin  = normMin;

            this.mean  = -1;
            this.stDev = -1;
        }
Beispiel #7
0
        public void KDETestUniformKernelBandwidth1()
        {
            Assert.AreEqual(0.5d, KernelDensity.UniformKernel(0));

            var estimate = KernelDensity.EstimateUniform(-3.5d, 1.0d, _testData);

            AssertHelpers.AlmostEqualRelative(0, estimate, 10);

            estimate = KernelDensity.EstimateUniform(0.0d, 1.0d, _testData);
            AssertHelpers.AlmostEqualRelative(0.35, estimate, 10);

            estimate = KernelDensity.EstimateUniform(2.0d, 1.0d, _testData);
            AssertHelpers.AlmostEqualRelative(0.1, estimate, 10);
        }
Beispiel #8
0
        public void KDETestTriangularKernelBandwidth1()
        {
            Assert.AreEqual(1.0d, KernelDensity.TriangularKernel(0));

            var estimate = KernelDensity.EstimateTriangular(-3.5d, 1.0d, _testData);

            AssertHelpers.AlmostEqualRelative(0, estimate, 10);

            estimate = KernelDensity.EstimateTriangular(0.0d, 1.0d, _testData);
            AssertHelpers.AlmostEqualRelative(0.347688490533868, estimate, 10);

            estimate = KernelDensity.EstimateTriangular(2.0d, 1.0d, _testData);
            AssertHelpers.AlmostEqualRelative(0.004216757636608, estimate, 10);
        }
Beispiel #9
0
        public void KDETestEpanechnikovKernelBandwidth1()
        {
            Assert.AreEqual(0.75d, KernelDensity.EpanechnikovKernel(0));

            var estimate = KernelDensity.EstimateEpanechnikov(-3.5d, 1.0d, _testData);

            AssertHelpers.AlmostEqualRelative(0, estimate, 10);

            estimate = KernelDensity.EstimateEpanechnikov(0.0d, 1.0d, _testData);
            AssertHelpers.AlmostEqualRelative(0.353803214812608, estimate, 10);

            estimate = KernelDensity.EstimateEpanechnikov(2.0d, 1.0d, _testData);
            AssertHelpers.AlmostEqualRelative(0.006248168996717, estimate, 10);
        }
        public double DiversityAge()
        {
            List <double> listeCount = new List <double>();

            foreach (Age item in Enum.GetValues(typeof(Age)))
            {
                listeCount.Add(this.employees.Where(x => x.age == item).Count());
            }


            var y = Statistics.StandardDeviation(listeCount);

            return(Math.Round((KernelDensity.GaussianKernel(1 / y)) * 100));
        }
Beispiel #11
0
        public void KDETestGaussianKernelBandwidth1()
        {
            //Density of standard normal distribution at 0
            AssertHelpers.AlmostEqualRelative(0.398942280401433, KernelDensity.GaussianKernel(0), 10);

            var estimate = KernelDensity.EstimateGaussian(-3.5d, 1.0d, _testData);

            AssertHelpers.AlmostEqualRelative(0.004115405028907, estimate, 10);

            estimate = KernelDensity.EstimateGaussian(0.0d, 1.0d, _testData);
            AssertHelpers.AlmostEqualRelative(0.310485907659139, estimate, 10);

            estimate = KernelDensity.EstimateGaussian(2.0d, 1.0d, _testData);
            AssertHelpers.AlmostEqualRelative(0.099698581377801, estimate, 10);
        }
Beispiel #12
0
        public void KDETestCustomKernelBandwidth1()
        {
            double Kernel(double x) => 0.5d * Math.Exp(-Math.Abs(x));

            Assert.AreEqual(0.5d, Kernel(0));

            var estimate = KernelDensity.Estimate(-3.5d, 1.0d, _testData, Kernel);

            AssertHelpers.AlmostEqualRelative(0.018396636706009, estimate, 10);

            estimate = KernelDensity.Estimate(0.0d, 1.0d, _testData, Kernel);
            AssertHelpers.AlmostEqualRelative(0.272675897096678, estimate, 10);

            estimate = KernelDensity.Estimate(2.0d, 1.0d, _testData, Kernel);
            AssertHelpers.AlmostEqualRelative(0.092580285110347, estimate, 10);
        }
        /** Method: Load data for calculation
        *  data -  data from time series
        *  n -  number (real) of convolutions */
        void IConvolution.LoadData(List <double> data, double n)
        {
            if (data.Count == 0)
            {
                throw new Exception("Error. No data");
            }
            double res = n - (int)n;

            this.n = n;
            convData.Clear();
            hist.Clear();
            de = new KernelDensity(1, 100, 100);
            int [] indexes  = new int[(int)n + 1];
            int    indexRes = -1;
            double sum;

            for (int i = 0; i < maxIt / 2; i++)
            {
                sum = 0;
                for (int j = 1; j <= n; j++)
                {
                    indexes[j - 1] = rg.NextInt(0, data.Count - 1);
                    sum           += data[indexes[j - 1]];
                }
                if (res > 0)
                {
                    indexRes = rg.NextInt(0, data.Count - 1);
                    sum      = sum + data[indexRes] * res;
                }
                convData.Add(sum);

                sum = 0;
                for (int j = 1; j <= n; j++)
                {
                    sum += data[data.Count - 1 - indexes[j - 1]];
                }
                if (res > 0)
                {
                    sum = sum + data[data.Count - 1 - indexRes] * res;
                }
                convData.Add(sum);
            }
            hist.LoadData(convData);
            de.LoadHist(hist);
            de.SetMaxInt();
        }
Beispiel #14
0
        private void LoadMontConvolution()
        {
            List <double> convData = new List <double>();
            Histogram     convHist = new Histogram();

            double sum;

            for (int i = 0; i < maxIt; i++)
            {
                sum = 0;
                foreach (Distribution dist in distributions)
                {
                    dist.RawData = dist.Histogram.GetRawData();
                }
                foreach (Distribution dist in distributions)
                {
                    int    nLeadTimesInt = (int)dist.NLeadTimes;
                    double nLeadTimesRes = dist.NLeadTimes - nLeadTimesInt;
                    int[]  indexes       = new int[(int)dist.NLeadTimes + 1];
                    int    indexRes      = -1;

                    for (int j = 1; j <= nLeadTimesInt; j++)
                    {
                        indexes[j - 1] = rg.NextInt(0, dist.RawData.Count - 1);
                        sum           += dist.RawData[indexes[j - 1]];
                    }
                    if (nLeadTimesRes > 0)
                    {
                        indexRes = rg.NextInt(0, dist.RawData.Count - 1);
                        sum      = sum + dist.RawData[indexRes] * nLeadTimesRes;
                    }
                    convData.Add(sum * dist.Factor);
                }
            }

            convHist.LoadData(convData);
            de = new KernelDensity(1, 100, 100);
            de.LoadHist(convHist);
            de.SetMaxInt();
        }