internal static MEParmGen GetInstance(GenObject o, MeasurementError me, DataSummary data, YGen genY, TrueValuesGen genTV) { MEParmGen instance = new MEParmGen(); double b; double[] tmpY, tmpT; if (me.ThroughCV) { if (o.LogNormalDistrn) { tmpY = Tools.Combine(data.LogY, genY.LogGT, genY.LogLT, genY.LogI); tmpT = Tools.Combine(genTV.LogY, genTV.LogGT, genTV.LogLT, genTV.LogI); b = tmpY.Substract(tmpT).Exp().Substract(1.0).Sqr().Sum(); b /= 2.0; } else { tmpY = Tools.Combine(data.Y, genY.GT, genY.LT, genY.I); tmpT = Tools.Combine(genTV.Y, genTV.GT, genTV.LT, genTV.I); b = tmpY.Divide(tmpT).Substract(1.0).Sqr().Reverse().Sum(); b /= 2.0; } SGNFnAParam localA = o.A.Clone(); localA.B = b; localA.Range = me.GetRange(); double[] range = me.GetRange(); Icdf icdf = new Icdf(o, localA, range); instance.Parm = icdf.Bidon(o.Start(localA), inestLowerLim: range[0] == 0); } else { tmpY = Tools.Combine(data.Y, genY.GT, genY.LT, genY.I); tmpT = Tools.Combine(genTV.Y, genTV.GT, genTV.LT, genTV.I); b = tmpY.Substract(tmpT).Sqr().Sum(); b /= 2.0; if (o.LogNormalDistrn) { SGNFnAParam localA = o.A.Clone(); localA.B = b; localA.Range = me.GetRange(); localA.Truevalues = Tools.Copy(tmpT); //me.parm <- dens.gen.icdf(o, A, range=me$range, inestimable.lower.limit=me$range[1]==0) double[] range = me.GetRange(); Icdf icdf = new Icdf(o, localA, range); instance.Parm = icdf.Bidon(inestLowerLim: range[0] == 0.0); } else { instance.Parm = WebExpoFunctions3.SqrtInvertedGammaGen(data.N, b, me.GetRange(), o); } } return(instance); }
internal static YGen Inits(DataSummary data, double mu, double sigma, bool meThroughCV, bool logNormalDistrn) { YGen newYGen = new YGen(); if (logNormalDistrn) { if (data.AnyGT) { newYGen.LogGT = RNorm4CensoredMeasures.RNormCensored(mu, sigma, lower: data.LogGT); newYGen.GT = newYGen.LogGT.Exp(); } if (data.AnyLT) { newYGen.LogLT = RNorm4CensoredMeasures.RNormCensored(mu, sigma, upper: data.LogLT, negativeDisallowed: meThroughCV && !logNormalDistrn); newYGen.LT = newYGen.LogLT.Exp(); } if (data.AnyIntervalCensored) { newYGen.LogI = RNorm4CensoredMeasures.RNormCensored(mu, sigma, lower: data.LogIntervalGT, upper: data.LogIntervalLT); newYGen.I = newYGen.LogI.Exp(); } } else { if (data.AnyGT) { newYGen.GT = RNorm4CensoredMeasures.RNormCensored(mu, sigma, lower: data.GT); } if (data.AnyLT) { newYGen.LT = RNorm4CensoredMeasures.RNormCensored(mu, sigma, upper: data.LT, negativeDisallowed: meThroughCV && !logNormalDistrn); } if (data.AnyIntervalCensored) { newYGen.I = RNorm4CensoredMeasures.RNormCensored(mu, sigma, lower: data.IntervalGT, upper: data.IntervalLT); } } return(newYGen); }
public static OutLogoutMoments Get(bool anyME, bool logNormalDistrn, DataSummary data, YGen genY, TrueValuesGen genTV) { OutLogoutMoments olm = new OutLogoutMoments(); if (anyME) { double[] y; if (logNormalDistrn) { y = Tools.Combine(genTV.LogY, genTV.LogGT, genTV.LogLT, genTV.LogI); } else { y = Tools.Combine(genTV.Y, genTV.GT, genTV.LT, genTV.I); } olm.Sum = y.Sum(); olm.Sum2 = y.Sqr().Sum(); return(olm); } else if (logNormalDistrn) { olm.Sum = data.LogUncensoredSum; olm.Sum2 = data.LogUncensoredSum2; olm.Sum += genY.LogGT.Sum() + genY.LogLT.Sum() + genY.LogI.Sum(); olm.Sum2 += genY.LogGT.Sqr().Sum() + genY.LogLT.Sqr().Sum() + genY.LogI.Sqr().Sum(); } else { olm.Sum = data.UncensoredSum; olm.Sum2 = data.UncensoredSum2; olm.Sum += genY.GT.Sum() + genY.LT.Sum() + genY.I.Sum(); olm.Sum2 += genY.GT.Sqr().Sum() + genY.LT.Sqr().Sum() + genY.I.Sqr().Sum(); } return(olm); }
private YGen(MeasurementError me, TrueValuesGen genTV, DataSummary data, double mu, double sigma, bool logNormalDistrn = true) { if (me.Any) { // Sample y (censored) values | true values if (data.AnyGT) { double[] tmpMean = genTV.GT; double[] tmpSD; if (me.ThroughCV) { tmpSD = tmpMean.Multiply(me.Parm); } else { tmpSD = Tools.Rep(me.Parm, tmpMean.Length); } this.GT = RNorm4CensoredMeasures.RNormCensored(tmpMean, tmpSD, lower: data.GT); if (logNormalDistrn) { this.LogGT = this.GT.Log(); } } if (data.AnyLT) { double[] tmpMean = genTV.LT; double[] tmpSD; if (me.ThroughCV) { tmpSD = tmpMean.Multiply(me.Parm); } else { tmpSD = Tools.Rep(me.Parm, tmpMean.Length); } this.I = RNorm4CensoredMeasures.RNormCensored(tmpMean, tmpSD, upper: data.LT, negativeDisallowed: logNormalDistrn || me.ThroughCV); if (logNormalDistrn) { this.LogLT = this.LT.Log(); } } if (data.AnyIntervalCensored) { double[] tmpMean = genTV.I; double[] tmpSD; if (me.ThroughCV) { tmpSD = tmpMean.Multiply(me.Parm); } else { tmpSD = Tools.Rep(me.Parm, tmpMean.Length); } this.I = RNorm4CensoredMeasures.RNormCensored(tmpMean, tmpSD, lower: data.IntervalGT, upper: data.IntervalLT); if (logNormalDistrn) { this.LogI = this.I.Log(); } } } else { if (logNormalDistrn) { if (data.AnyGT) { this.LogGT = RNorm4CensoredMeasures.RNormCensored(mu, sigma, lower: data.LogGT); this.GT = this.LogGT.Exp(); } if (data.AnyLT) { this.LogLT = RNorm4CensoredMeasures.RNormCensored(mu, sigma, upper: data.LogLT); this.LT = this.LogLT.Exp(); } if (data.AnyIntervalCensored) { this.LogI = RNorm4CensoredMeasures.RNormCensored(mu, sigma, lower: data.LogIntervalGT, upper: data.LogIntervalLT); this.I = this.LogI.Exp(); } } else { if (data.AnyGT) { this.GT = RNorm4CensoredMeasures.RNormCensored(mu, sigma, lower: data.GT); } if (data.AnyLT) { this.LT = RNorm4CensoredMeasures.RNormCensored(mu, sigma, upper: data.LT); } if (data.AnyIntervalCensored) { this.I = RNorm4CensoredMeasures.RNormCensored(mu, sigma, lower: data.IntervalGT, upper: data.IntervalLT); } } } }// internal YGen (constructor)
public static YGen GetInstance(MeasurementError me, TrueValuesGen genTV, DataSummary data, double mu, double sigma, bool logNormalDistrn = true) { if (me.Any) { YGen newYGen = new YGen(); // Sample y (censored) values | true values if (data.AnyGT) { double[] tmpMean = genTV.GT; double[] tmpSD; if (me.ThroughCV) { tmpSD = tmpMean.Multiply(me.Parm); } else { tmpSD = Tools.Rep(me.Parm, tmpMean.Length); } newYGen.GT = RNorm4CensoredMeasures.RNormCensored(tmpMean, tmpSD, lower: data.GT); if (logNormalDistrn) { newYGen.LogGT = newYGen.GT.Log(); } } if (data.AnyLT) { double[] tmpMean = genTV.LT; double[] tmpSD; if (me.ThroughCV) { tmpSD = tmpMean.Multiply(me.Parm); } else { tmpSD = Tools.Rep(me.Parm, tmpMean.Length); } newYGen.LT = RNorm4CensoredMeasures.RNormCensored(tmpMean, tmpSD, upper: data.LT, negativeDisallowed: logNormalDistrn || me.ThroughCV); if (logNormalDistrn) { newYGen.LogLT = newYGen.LT.Log(); } } if (data.AnyIntervalCensored) { double[] tmpMean = genTV.I; double[] tmpSD; if (me.ThroughCV) { tmpSD = tmpMean.Multiply(me.Parm); } else { tmpSD = Tools.Rep(me.Parm, tmpMean.Length); } newYGen.I = RNorm4CensoredMeasures.RNormCensored(tmpMean, tmpSD, lower: data.IntervalGT, upper: data.IntervalLT); if (logNormalDistrn) { newYGen.LogI = newYGen.I.Log(); } } return(newYGen); } else { return(YGen.Inits(data, mu, sigma, meThroughCV: data.METhroughCV, logNormalDistrn: logNormalDistrn)); } }
private TrueValuesGen(YGen genY, DataSummary data, double mu, double sigma, MeasurementError me, bool logNormalDistrn = true, GenObject o = null) { if (me.ThroughSD && !logNormalDistrn) { double meSD = me.Parm; double tauStar = 1 / Math.Pow(sigma, 2) + 1 / Math.Pow(meSD, 2); double sdStar = 1 / Math.Sqrt(tauStar); if (data.YLength > 0) { double[] tmpMean = (data.Y.Divide(Math.Pow(meSD, 2)).Add(mu / Math.Pow(sigma, 2))).Divide(tauStar); this.Y = NormalDistribution.RNorm(data.YLength, tmpMean, Tools.Rep(sdStar, tmpMean.Length)); } if (data.GTLength > 0) { double[] tmpMean = (genY.GT.Divide(Math.Pow(meSD, 2)).Add(mu / Math.Pow(sigma, 2))).Divide(tauStar); this.Y = NormalDistribution.RNorm(data.GTLength, tmpMean, Tools.Rep(sdStar, tmpMean.Length)); } if (data.LTLength > 0) { double[] tmpMean = (genY.LT.Divide(Math.Pow(meSD, 2)).Add(mu / Math.Pow(sigma, 2))).Divide(tauStar); this.Y = NormalDistribution.RNorm(data.GTLength, tmpMean, Tools.Rep(sdStar, tmpMean.Length)); } if (data.IntervalLength > 0) { double[] tmpMean = (genY.I.Divide(Math.Pow(meSD, 2)).Add(mu / Math.Pow(sigma, 2))).Divide(tauStar); this.Y = NormalDistribution.RNorm(data.GTLength, tmpMean, Tools.Rep(sdStar, tmpMean.Length)); } } else { o.A.Sigma2 = sigma * sigma; this.Y = new double[data.YLength]; this.GT = new double[data.GTLength]; this.LT = new double[data.LTLength]; this.I = new double[data.IntervalLength]; for (int j = 0; j < data.YLength; j++) { this.Y[j] = TrueValueGen(o, me, data.Y[j], mu, logY: logNormalDistrn?data.LogY[j] : Tools.ND); } for (int j = 0; j < data.GTLength; j++) { this.GT[j] = TrueValueGen(o, me, genY.GT[j], mu, logY: logNormalDistrn?genY.LogGT[j] : Tools.ND); } for (int j = 0; j < data.LTLength; j++) { this.LT[j] = TrueValueGen(o, me, genY.LT[j], mu, logY: logNormalDistrn?genY.LogLT[j] : Tools.ND); } for (int j = 0; j < data.IntervalLength; j++) { this.I[j] = TrueValueGen(o, me, genY.I[j], mu, logY: logNormalDistrn?genY.LogI[j] : Tools.ND); } if (logNormalDistrn) { this.LogY = this.Y.Log(); this.LogGT = this.GT.Log(); this.LogLT = this.LT.Log(); this.LogI = this.I.Log(); } } } //# end of truevalues.gen
} //# end of truevalues.gen internal static TrueValuesGen GetInstance(YGen genY, DataSummary data, double mu, double sigma, MeasurementError me, bool logNormalDistrn = true, GenObject o = null) { return(new TrueValuesGen(genY, data, mu, sigma, me, logNormalDistrn, o)); }
//Tools.Combine(-0.693147180559945, 0.797884560802865, -0.318309886183791, 0.0363356023574984); // logPhi.quadratic.approx.coeff internal static InitialValues DefaultInits(DataSummary data, bool logNormalDistrn, double[] muRange, double[] sigmaRange, bool includeCensoredData) { //# modif_0.13 //# added argument include.censored.data to this fct double mu; double sigma; double sigmaLower = sigmaRange.Min(); double sigmaUpper = sigmaRange.Max(); List <double> normalizedY = new List <double>(); if (data.YLength > 0) { normalizedY.AddRange(logNormalDistrn ? data.LogY : data.Y); } // new_0.12 if (includeCensoredData) { if (data.GTLength > 0) { normalizedY.AddRange(logNormalDistrn ? data.LogGT : data.GT); } if (data.LTLength > 0) { normalizedY.AddRange(logNormalDistrn ? data.LogLT : data.LT); } if (data.IntervalLength > 0) { double[] midPoints = new double[data.IntervalLength]; double[] lt = logNormalDistrn ? data.LogIntervalLT : data.IntervalLT; double[] gt = logNormalDistrn ? data.LogIntervalGT : data.IntervalGT; for (int i = 0; i < data.IntervalLength; i++) { midPoints[i] = gt[i] + (lt[i] - gt[i]) / 2.0; } normalizedY.AddRange(midPoints); } } if (data.YLength > 0) { mu = normalizedY.Mean(); double lower = muRange.Min(); if (mu < lower) { mu = lower; } else { double muUpper = muRange.Max(); if (mu > muUpper) { mu = muUpper; } } if (normalizedY.Count > 1) { sigma = Math.Sqrt(normalizedY.Variance()); } else if (!includeCensoredData) { InitialValues tmp = DefaultInits(data, logNormalDistrn, muRange, sigmaRange, includeCensoredData: true); sigma = tmp.SigmaWithin; } else { //# new_0.13 sigma = 0; //# will be corrected below } if (sigma < sigmaLower) { sigma = sigmaLower; } // sigma >= 0 && sigma >= sigmaLower if (sigma == 0.0) { // sigmaLower <= 0 !! sigma = sigmaUpper / 10.0; } } else { sigma = sigmaLower > 0 ? sigmaLower : sigmaUpper / 10.0; mu = Tools.Mean(muRange); } return(new InitialValues(mu, sigma)); } // Default.inits
internal InitialValues DefaultInits(DataSummary data, bool includeCensoredData) { double muOverall = Tools.ND; double sigmaWithin = Tools.ND; bool logNormalDstrn = this.LogNormalDstrn; bool useUniformPriorOnSds = this.UseUniformPriorOnSds; List <double> workerMeans = new List <double>(); List <double> workerSds = new List <double>(); List <double> normalizedData = new List <double>(); int[] MeasureCountByWorker = new int[data.NWorkers]; Dictionary <int, List <double> > measureValueByWorker = new Dictionary <int, List <double> >(); if (data.YLength > 0) { //Une liste pour chaque travailleurs. wo == workerOrdinal for (int wo = 0; wo < data.NWorkers; wo++) { measureValueByWorker[wo] = new List <double>(); } Measure[] lm; //On ajoute les mesures pour chacun des travailleurs lm = data.UncensoredMeasures; for (int i = 0; i < lm.Length; i++) { int wo = lm[i].Worker.Ordinal; List <double> lst = measureValueByWorker[wo]; double value = logNormalDstrn ? Math.Log(lm[i].A) : lm[i].A; lst.Add(value); normalizedData.Add(value); } } if (includeCensoredData) { Measure[] lm; lm = data.GTMeasures; for (int i = 0; i < lm.Length; i++) { int wo = lm[i].Worker.Ordinal; List <double> lst = measureValueByWorker[wo]; double value = logNormalDstrn ? Math.Log(lm[i].A) : lm[i].A; lst.Add(value); normalizedData.Add(value); } lm = data.LTMeasures; for (int i = 0; i < lm.Length; i++) { int wo = lm[i].Worker.Ordinal; List <double> lst = measureValueByWorker[wo]; double value = logNormalDstrn ? Math.Log(lm[i].A) : lm[i].A; lst.Add(value); normalizedData.Add(value); } lm = data.IntervalMeasures; for (int i = 0; i < lm.Length; i++) { int wo = lm[i].Worker.Ordinal; List <double> lst = measureValueByWorker[wo]; double midP = lm[i].A + (lm[i].B - lm[i].A) / 2; double value = logNormalDstrn ? Math.Log(midP) : midP; lst.Add(value); normalizedData.Add(value); } } if (normalizedData.Count > 0) { for (int o = 0; o < data.NWorkers; o++) { List <double> src = measureValueByWorker[o]; if (src.Count == 0) { continue; } if (src.Count == 1) { workerMeans.Add(src[0]); continue; // pas de sd } int n = 0; double mean = 0; double M2 = 0; foreach (double x in src) { n++; double delta = x - mean; mean += delta / n; M2 += delta * (x - mean); } workerMeans.Add(mean); if (n > 1) { workerSds.Add(Math.Sqrt(M2 / (n - 1))); } } //means.Length > 0 muOverall = workerMeans.Average(); //# modif_0.13 if (workerSds.Count == 0) { if (workerMeans.Count > 1) { sigmaWithin = Math.Sqrt(workerMeans.Variance()); } } else { sigmaWithin = workerSds.Mean(); } } else { muOverall = 0.0; normalizedData.Clear(); } // # new_0.13 if (Tools.IsND(sigmaWithin) && !includeCensoredData) { //# call this fct again, but this time including censored data InitialValues tmp = DefaultInits(data, includeCensoredData: true); sigmaWithin = tmp.SigmaWithin; } // # new_0.13 if (Tools.IsND(sigmaWithin)) { sigmaWithin = GetSigmaWithin(); } return(new InitialValues(muOverall, sigmaWithin, normalizedData)); } //# end of Default.inits.local
internal WorkerData(BetweenWorkerModel parent) { this.parentModel = parent; this.logNormalDistrn = this.parentModel.OutcomeIsLogNormallyDistributed; Data = parent.Data; workerDigests = new WorkerDigest[Data.NWorkers]; int i = 0; this.MeasureCountByWorker = new int[Data.NWorkers]; foreach (Worker w in Data.WorkersByOrdinal) { MeasureCountByWorker[i] = Data.MeasuresByWorker[w].Count; workerDigests[i] = new WorkerDigest(ordinal: i, mean: 0, measureCount: Data.MeasuresByWorker[w].Count); i++; } DFRecordByMeasureType.Add(Measure.MeasureType.Uncensored, new List <DfRecord>()); DFRecordByMeasureType.Add(Measure.MeasureType.GreaterThan, new List <DfRecord>()); DFRecordByMeasureType.Add(Measure.MeasureType.LessThan, new List <DfRecord>()); DFRecordByMeasureType.Add(Measure.MeasureType.Interval, new List <DfRecord>()); df = new DfRecord[Data.N]; int j = 0; Positions[(int)Measure.MeasureType.Uncensored] = new Position(j, Data.UncensoredMeasures.Length); for (i = 0; i < Data.UncensoredMeasures.Length; i++) { int workerOrdinal = Data.UncensoredMeasures[i].Worker.Ordinal; Measure m = Data.UncensoredMeasures[i]; m.WorkerDataOrdinal = j; df[j] = new DfRecord( measureOrdinal: j, genValue: this.logNormalDistrn ? Math.Log(m.A) : m.A, workerOrdinal: workerOrdinal, workerDigest: workerDigests[workerOrdinal]); DFRecordByMeasureType[Measure.MeasureType.Uncensored].Add(df[j]); j++; } Positions[(int)Measure.MeasureType.GreaterThan] = new Position(j, Data.GT.Length); for (i = 0; i < Data.GTMeasures.Length; i++) { int workerOrdinal = Data.GTMeasures[i].Worker.Ordinal; Measure m = Data.GTMeasures[i]; m.WorkerDataOrdinal = j; df[j] = new DfRecord( measureOrdinal: j, genValue: Tools.ND, workerOrdinal: workerOrdinal, workerDigest: workerDigests[workerOrdinal]); DFRecordByMeasureType[Measure.MeasureType.GreaterThan].Add(df[j]); j++; } Positions[(int)Measure.MeasureType.LessThan] = new Position(j, Data.LT.Length); for (i = 0; i < Data.LTMeasures.Length; i++) { int workerOrdinal = Data.LTMeasures[i].Worker.Ordinal; Measure m = Data.LTMeasures[i]; m.WorkerDataOrdinal = j; df[j] = new DfRecord( measureOrdinal: j, genValue: Tools.ND, workerOrdinal: workerOrdinal, workerDigest: workerDigests[workerOrdinal]); DFRecordByMeasureType[Measure.MeasureType.LessThan].Add(df[j]); j++; } Positions[(int)Measure.MeasureType.Interval] = new Position(j, Data.IntervalGT.Length); for (i = 0; i < Data.IntervalMeasures.Length; i++) { int workerOrdinal = Data.IntervalMeasures[i].Worker.Ordinal; Measure m = Data.IntervalMeasures[i]; m.WorkerDataOrdinal = j; df[j] = new DfRecord( measureOrdinal: j, genValue: Tools.ND, workerOrdinal: workerOrdinal, workerDigest: workerDigests[workerOrdinal]); DFRecordByMeasureType[Measure.MeasureType.Interval].Add(df[j]); j++; } this.WorkerIds = df.Select(rec => rec.WorkerOrdinal).ToArray(); UpdatePublicProperties(); }