示例#1
0
        public void TestLogLogLogisticFeaturedLL()
        {
            DataGen dg = new DataGen();

            dg.GenTrivFeaturesData();
            double shapeMax = 5.0;
            double scaleMax = 900.0;

            LogLogistic modelLogLogisticFeatured = new LogLogistic(dg.organicRecoveryDurations,
                                                                   dg.inorganicRecoverydurations,
                                                                   dg.fSamples, dg.fCensored);

            modelLogLogisticFeatured.ShapeUpperBound = shapeMax;
            modelLogLogisticFeatured.ScaleUpperBound = scaleMax;

            double[,] warr = new double[2, 2] {
                { 1, 1 },
                { 1, 1 }
            };
            Matrix <double> w = Matrix <double> .Build.DenseOfArray(warr);

            var loglik = modelLogLogisticFeatured.LogLikelihood(w, dg.fSamples, dg.fCensored);

            System.Console.WriteLine("LogLikelihood is: " + loglik.ToString());
            Assert.IsTrue(Math.Abs(loglik + 55.83229) < 1e-3);
        }
示例#2
0
        public void TestLogLogisticFeaturedGrad()
        {
            DataGen dg = new DataGen();

            dg.GenTrivFeaturesData();
            double shapeMax = 5.0;
            double scaleMax = 900.0;

            LogLogistic modelLogLogisticFeatured = new LogLogistic(dg.organicRecoveryDurations,
                                                                   dg.inorganicRecoverydurations,
                                                                   dg.fSamples, dg.fCensored);

            modelLogLogisticFeatured.ShapeUpperBound = shapeMax;
            modelLogLogisticFeatured.ScaleUpperBound = scaleMax;

            double[,] warr = new double[2, 2] {
                { 1, 1 },
                { 1, 1 }
            };
            Matrix <double> w = Matrix <double> .Build.DenseOfArray(warr);

            var grd      = modelLogLogisticFeatured.GradLL2(w, dg.fSamples, dg.fCensored);
            var grd_numr = modelLogLogisticFeatured.NumericalGradLL(w);

            System.Console.WriteLine("Gradient first component:" + grd[0, 0].ToString());
            Assert.IsTrue(Math.Abs(grd[0, 0] - grd_numr[0, 0]) < 1e-3);
        }
示例#3
0
        public void GenLogLogisticWFeatures()
        {
            LogLogistic   ll        = new LogLogistic(1.2, 300.0);
            List <double> llSamples = ll.GenerateSample(2000);

            var censorLvl = llSamples.Sum() / llSamples.Count();
            var t         = CensorList(llSamples, censorLvl);

            int n_x = llSamples.Count(ti => ti > censorLvl);

            var x = NPOnes(n_x, censorLvl);

            List <double[]> fSamplesData  = new List <double[]>();
            List <double[]> fCensoredData = new List <double[]>();

            for (int i = 0; i < t.Count(); i++)
            {
                fSamplesData.Add(new double[] { 1.0, 2.0, 3.0 });
            }

            for (int i = 0; i < x.Count(); i++)
            {
                fCensoredData.Add(new double[] { 1.0, 2.0, 3.0 });
            }

            ll        = new LogLogistic(0.7, 80.0);
            llSamples = ll.GenerateSample(2000);

            censorLvl = llSamples.Sum() / llSamples.Count();
            var t1 = CensorList(llSamples, censorLvl);

            AppendToLst(t, t1);

            n_x = llSamples.Count(ti => ti > censorLvl);
            var x1 = NPOnes(n_x, censorLvl);

            AppendToLst(x, x1);

            for (int i = 0; i < t1.Count(); i++)
            {
                fSamplesData.Add(new double[] { 1.0, 4.0, 2.0 });
            }

            for (int i = 0; i < x1.Count(); i++)
            {
                fCensoredData.Add(new double[] { 1.0, 4.0, 2.0 });
            }

            this.organicRecoveryDurations   = t;
            this.inorganicRecoverydurations = x;

            Matrix <double> fSamples = Matrix <double> .Build.DenseOfArray(CreateRectangularArray(fSamplesData));

            Matrix <double> fCensored = Matrix <double> .Build.DenseOfArray(CreateRectangularArray(fCensoredData));

            this.fSamples  = fSamples;
            this.fCensored = fCensored;
        }
示例#4
0
        public void TestLogLogisticWFeatures()
        {
            DataGen dg = new DataGen();

            dg.GenLogLogisticWFeatures();

            double shapeMax = 5.0;
            double scaleMax = 500.0;

            double[]        arr  = new double[] { 1.0, 150.0 };
            Vector <double> init = Vector <double> .Build.DenseOfArray(arr);

            LogLogistic modelLogLogistic = new LogLogistic(dg.organicRecoveryDurations,
                                                           dg.inorganicRecoverydurations);

            modelLogLogistic.GradientDescent(init);

            Console.WriteLine("LL without features is " +
                              modelLogLogistic.LogLikelihood(modelLogLogistic.Kappa, modelLogLogistic.Lambda) +
                              " with Kappa " + modelLogLogistic.Kappa + " and Lambda " + modelLogLogistic.Lambda);

            double[,] warr = new double[2, dg.fCensored.ColumnCount];
            warr[0, 0]     = Sigmoid.InverseSigmoid(modelLogLogistic.Kappa, shapeMax);
            warr[1, 0]     = Sigmoid.InverseSigmoid(modelLogLogistic.Lambda, scaleMax);
            Matrix <double> w = Matrix <double> .Build.DenseOfArray(warr);

            LogLogistic modelLogLogisticFeatured = new LogLogistic(dg.organicRecoveryDurations,
                                                                   dg.inorganicRecoverydurations,
                                                                   dg.fSamples, dg.fCensored);

            modelLogLogisticFeatured.ShapeUpperBound = shapeMax;
            modelLogLogisticFeatured.ScaleUpperBound = scaleMax;
            Matrix <double> logLogisticParameters = modelLogLogisticFeatured.GradientDescent(w, 2001);
            Vector <double> frstSample            = Vector <double> .Build.DenseOfArray(
                new double[] { 1.0, 2.0, 3.0 });

            Vector <double> scndSample = Vector <double> .Build.DenseOfArray(
                new double[] { 1.0, 4.0, 2.0 });

            Vector <double> res         = logLogisticParameters.Multiply(frstSample);
            var             alpha_shape = res[0];
            var             shape       = Sigmoid.Transform(alpha_shape, shapeMax);
            var             alpha_scale = res[1];
            var             scale       = Sigmoid.Transform(alpha_scale, scaleMax);

            res         = logLogisticParameters.Multiply(scndSample);
            alpha_shape = res[0];
            shape       = Sigmoid.Transform(alpha_shape, shapeMax);
            alpha_scale = res[1];
            scale       = Sigmoid.Transform(alpha_scale, scaleMax);
            Assert.IsTrue(Math.Abs(scale - 80.0) < 2.0);
        }
示例#5
0
        public void TestMixedLogLogisticFitting()
        {
            LogLogistic   lg         = new LogLogistic(1.2, 300.0);
            List <double> llSamples  = lg.GenerateSample(5000);
            LogLogistic   lg1        = new LogLogistic(0.7, 80.0);
            List <double> llSamples1 = lg1.GenerateSample(5000);

            DataGen.AppendToLst(llSamples, llSamples1);
            var           ti = llSamples;
            List <double> xi = new List <double> {
                .1
            };
            LogLogistic llModel = new LogLogistic(ti, xi);

            double[]        arr  = new double[] { 0.8, 150.0 };
            Vector <double> init = Vector <double> .Build.DenseOfArray(arr);

            llModel.GradientDescent(init);
            Debug.WriteLine("Model completed!");
            Assert.IsTrue(Math.Abs(llModel.Kappa - 0.83) < 1e-1);
        }
示例#6
0
        public void TestLogLogisticFitting()
        {
            LogLogistic lg         = new LogLogistic(1.2, 300);
            var         lgSamples  = lg.GenerateSample(3000);
            var         lgCensored = new List <double> {
                0.1, 0.1
            };

            double[]        arr  = new double[] { 0.5, 12.0 };
            Vector <double> init = Vector <double> .Build.DenseOfArray(arr);

            LogLogistic lg2 = new LogLogistic(lgSamples, lgCensored);

            lg2.GradientDescent(init);

            Console.WriteLine(lg2.Kappa);

            //The un-biased estimator of the LogLogistic distribution
            //has a high variance. So, we have to set the bar low.
            Assert.IsTrue(Math.Abs(lg2.Kappa - 1.2) < 1e-1);
        }