} // end of smoothed.area.estimate //random.pow < -function(a, range) public static double RandomPow(int a, double[] range) { //# sample a value from f(x) = 1/x^a on the range specified double z; double U = UniformDistribution.RUnif(); if (a == 1) { if (double.IsInfinity(range[1])) { range[1] = 1E8; } z = Diff(Tools.Combine(U - 1, U).Multiply(range.Log())).Exp()[0]; } else { double[] fCum = Tools.Combine(Math.Pow(range[0], (1 - a)), Math.Pow(range[1], (1 - a))); double tmp = fCum.Multiply(Tools.Combine(1.0 - U, U)).Sum(); z = Math.Pow(tmp, (1.0 / (1.0 - a))); } return(z); } //# end of random.pow
} //# end of rnorm.right.censored private static double[] RNormRightCensored(double mu, double sd, double[] upper) { //# This function was incorrectly named rnorm.left.censored in previous versions //# mu, sd: same length (1, or same as length(upper)) double[] logP = UniformDistribution.RUnif(upper.Length).Log().Add(NormalDistribution.PNorm(upper, mu: mu, sigma: sd, log_p: true)); double[] y = NormalDistribution.QNorm(logP, mu: mu, sigma: sd, log_p: true); return(y); } //# end of rnorm.right.censored
/* R file: random.c * R fucntion name: ProbSampleNoReplace * On tire depuis 0..weights.Length-1 une suite d'entiers distincts dont la cardinalité est 'sampleSize' */ public int[] GetSample() { int[] perm = new int[weights.Length]; int[] ans = new int[sampleSize]; double rT, mass, totalMass; int i, j, k, n1; double[] normalizedWeights = new double[weights.Length]; Array.Copy(weights, normalizedWeights, weights.Length); FixUpProb(normalizedWeights, ans.Length, false); for (i = 0; i < weights.Length; i++) { perm[i] = i; } Array.Sort(normalizedWeights, perm, Zygotine.Util.DescendingComparer.Desc); totalMass = 1.0; for (i = 0, n1 = weights.Length - 1; i < ans.Length; i++, n1--) { rT = totalMass * UniformDistribution.RUnif(); mass = 0.0; for (j = 0; j < n1; j++) { mass += normalizedWeights[j]; if (rT <= mass) { break; } } ans[i] = perm[j]; totalMass -= normalizedWeights[j]; for (k = j; k < n1; k++) { normalizedWeights[k] = normalizedWeights[k + 1]; perm[k] = perm[k + 1]; } } return(ans); }
} //# end of rnorm.censored internal static double RUnifLogP1(double[] logPLim, bool lowerTail = true, double u = double.NaN) { // On suppose logPLim est de longueur 2, et que logPLim[1] > logPLim[0] if (double.IsNaN(u)) { u = UniformDistribution.RUnif(); } //# To sample 'size' values uniformly in c(exp(logp.lim[1]), exp(logp.lim[2])) double w = logPLim[1] - logPLim[0]; if (!lowerTail) { w = Math.Abs(w); } double logP = Math.Log(u) + ExponentialDistribution.PExp(w, logP: true); double x = ExponentialDistribution.QExp(logP, logP: true); x = logPLim.Max() - x; return(x); }
}// end constructor /* * La méthode étant internal, elle peut être invoquée d'un programme externe à la librairie. * La seule méthode qui peut invoquer Run, c'est la méthode Compute de Model qui ne le fera que si le modèle est * jugé valide. */ internal override void Run() { SGNFnAParam localA = null; GenObject oTV = null; GenObject oMu = null; GenObject oSigma = null; GenObject oME = null; YGen genY = YGen.EmptyInstance; TrueValuesGen genTV = null; double[] burninMu; double[] burninSigma; double[] burninCV = null; double[] sampleMu; double[] sampleSigma; double[] sampleCV = null; double mu; double sigma; int iter = -1, savedIter; double muCondMean; double yBar; double muCondSD; double[] pLim = new double[2]; double p; double[] muLim = new double[] { this.MuLower, this.MuUpper }; double logSigmaSD; try { logSigmaSD = 1 / Math.Sqrt(this.LogSigmaPrec); if (ME.Any) { if (ME.ThroughCV) { if (OutcomeIsLogNormallyDistributed) { oTV = new TrueValue_CV_LogN_GenObject(); } else { oTV = new TrueValue_CV_Norm_GenObject(); } } else { //oTV = new TrueValue_SD_GenObject(); } } //# modif_0.12 int combinedN = this.Data.N + (this.PastData.Defined ? PastData.N : 0); if (ME.ThroughCV && !OutcomeIsLogNormallyDistributed) { oMu = new MuTruncatedData_GenObject(combinedN); //# modif_0.12 oSigma = GenObject.GetSigmaTruncatedDataLNormGenObject(combinedN, this.LogSigmaMu, logSigmaSD); //# modif_0.12 } else { oSigma = GenObject.GetSigmaGenObject(combinedN, this.LogSigmaMu, logSigmaSD); //# modif_0.12 } localA = oSigma.A.Clone(); if (ME.Any && !ME.Known) { oME = GenObject.GetMeGenObject(this.ME, this.OutcomeIsLogNormallyDistributed, this.Data.N); } int nIterations = NBurnin + NIter * NThin; //les tableaux pour les chaines sampleMu = Result.Chains.GetChain("muSample"); sampleSigma = Result.Chains.GetChain("sdSample"); burninMu = Result.Chains.GetChain("muBurnin"); burninSigma = Result.Chains.GetChain("sdBurnin"); if (ME.ThroughCV) { sampleCV = Result.Chains.GetChain("cvSample"); burninCV = Result.Chains.GetChain("cvBurnin"); } bool inestimableLowerLimit = false; //Initial values for mu and sigma mu = InitMu; sigma = InitSigma; savedIter = 0; // pour les échantillons if (this.Data.AnyCensored) { genY = YGen.Inits(this.Data, mu, sigma, meThroughCV: this.ME.ThroughCV, logNormalDistrn: OutcomeIsLogNormallyDistributed); } if (ME.Any) { ME.Parm = ME.InitialValue; } //Boucle principale for (iter = 0; iter < nIterations; iter++) { if (ME.Any) { genTV = TrueValuesGen.GetInstance(genY, this.Data, mu, sigma, this.ME, logNormalDistrn: OutcomeIsLogNormallyDistributed, o: oTV); } if (this.Data.AnyCensored) { //y.gen(true.values, data, sigma, me, outcome.is.logNormally.distributed, mu=mu) //On ne tient pas compte de true.values, ni de me ... genY = YGen.GetInstance(this.ME, genTV, this.Data, mu, sigma, OutcomeIsLogNormallyDistributed); } OutLogoutMoments moments = OutLogoutMoments.Get(this.ME.Any, this.OutcomeIsLogNormallyDistributed, this.Data, genY, genTV); double sigmaBeta = (moments.Sum2 - 2 * mu * moments.Sum + this.Data.N * mu * mu) / 2.0; if (PastData.Defined) { sigmaBeta = sigmaBeta + PastData.N / 2.0 * Math.Pow(PastData.Mean - mu, 2) + PastData.NS2 / 2.0; } double[] start = new double[0]; if (this.ME.ThroughCV && !OutcomeIsLogNormallyDistributed) { //ici // A <- c(o.sigma$A, list(b=sigma.beta, mu=mu)) localA = oSigma.A.Clone(); localA.B = sigmaBeta; localA.Mu = mu; start = Tools.Combine(sigma); inestimableLowerLimit = false; } else { localA.B = sigmaBeta; start = oSigma.Start(localA); inestimableLowerLimit = true; } Icdf icdf = new Icdf(oSigma, localA, Tools.Combine(0, double.PositiveInfinity)); sigma = icdf.Bidon(start, inestimableLowerLimit); yBar = moments.Sum / this.Data.N; muCondMean = this.PastData.Defined ? (moments.Sum + PastData.N * PastData.Mean) / combinedN : yBar; // # new_0.12 if (this.ME.ThroughCV && !this.OutcomeIsLogNormallyDistributed) { mu = MuTruncatedGen.GetInstance(oMu, muLim, muCondMean, sigma).Mu; } else { muCondSD = sigma / Math.Sqrt(combinedN); pLim = NormalDistribution.PNorm(muLim.Substract(muCondMean).Divide(muCondSD)); p = UniformDistribution.RUnif(1, pLim[0], pLim[1])[0]; mu = NormalDistribution.QNorm(p, mu: muCondMean, sigma: muCondSD); } //# Sample Measurement Error from its posterior density if (this.ME.Any && !this.ME.Known) { this.ME.Parm = MEParmGen.GetInstance(oME, this.ME, this.Data, genY, genTV).Parm; } if (iter < NBurnin) { if (MonitorBurnin) { burninMu[iter] = mu; burninSigma[iter] = sigma; if (this.ME.Any && !this.ME.Known) { burninCV[iter] = ME.Parm; } } } else if ((iter - NBurnin) % NThin == 0) { sampleMu[savedIter] = mu; sampleSigma[savedIter] = sigma; if (this.ME.Any && !this.ME.Known) { sampleCV[savedIter] = ME.Parm; } savedIter++; } }// for( int iter = 1 ... } catch (Exception ex) { this.Result.Messages.AddError(WEException.GetStandardMessage(ex, iter, Result.PRNGSeed), this.ClassName); return; } } //end Run