/* * Main FDR computation function. * Takes as input an array of tables, already sorted by Fisher scores. * Outputs a map from p-value to FDR. * */ private Map <double, FDRData> computeFDR(List <ContingencyTable> actTables) { int iTable = 0, cTables = actTables.Count; ContingencyTable ctCurrent = null; double dFirstLargerKey = 0.0; double dHyperProbability = 0.0, dFisherScore = 0.0; DateTime dtBefore = DateTime.Now, dtAfter = DateTime.Now; TimeSpan tsCurrent = TimeSpan.Zero, tsTotal = TimeSpan.Zero; int cTableCount = 1; int cReprotInterval = 0; Map <double, FDRData> slFDR = null; double dSumObservedPValues = 0.0, dCurrentTableFisherTestPValue = 0.0; double dSumNullPValues = 0.0, dExpectedNullPValue = 0.0; double dEPhiNull = 0.0, dEPhiObserved = 0.0; int cNullPValues = 0; int cObservedPValues = 0; if (m_bReportProgress) { m_bContinue = m_prReport.reportPhase("Computing pooled p-values."); m_bContinue = m_prReport.reportMessage("Started computing pooled p-values values.", true); } slFDR = initFDRMap(actTables); cReprotInterval = Math.Min(actTables.Count / 10, MAX_REPROT_POINT); for (iTable = 0; iTable < cTables && m_bContinue; iTable++) { ctCurrent = (ContingencyTable)actTables[iTable]; dCurrentTableFisherTestPValue = ctCurrent.getFisher2TailPermutationTest(); dSumObservedPValues += dCurrentTableFisherTestPValue; //dEPhiObserved += -Math.Log(1 - 0.99999999 * dCurrentTableFisherTestPValue); dEPhiObserved += Math.Sqrt(dCurrentTableFisherTestPValue); cObservedPValues++; double[,] adScores = ctCurrent.computeAllPermutationsScores(); int iCurrent = 0; if (m_bHuge) { cTableCount = (int)m_ctcTableCounts.getCachedValue(ctCurrent); } else { cTableCount = 1; } for (iCurrent = 0; iCurrent < adScores.GetLength(0); iCurrent++) { dHyperProbability = adScores[iCurrent, 0]; dFisherScore = adScores[iCurrent, 1]; dSumNullPValues += dHyperProbability; dExpectedNullPValue += dFisherScore * dHyperProbability; //dEPhiNull += -Math.Log(1 - 0.99999999 * dFisherScore) * dHyperProbability; dEPhiNull += Math.Sqrt(dFisherScore) * dHyperProbability; cNullPValues++; dFirstLargerKey = getNextKey(slFDR.KeyList, dFisherScore); slFDR[dFirstLargerKey].PooledPValue += (dHyperProbability * cTableCount); } if ((iTable > 0) && (iTable % cReprotInterval == 0)) { if (m_bReportProgress) { dtAfter = DateTime.Now; tsCurrent = dtAfter.Subtract(dtBefore); tsTotal += tsCurrent; m_bContinue = m_prReport.reportProcessedTables(iTable, cTables); m_bContinue = m_prReport.reportMessage("Done " + iTable + " tables, avg time (ms) " + Math.Round(tsTotal.TotalMilliseconds / (iTable + 1)) + ", total time " + tsTotal, true); } } } double dPi = 1.0; if ((m_pmEvaluatePi == PiMethod.WeightedSum) || (m_pmEvaluatePi == PiMethod.DoubleAverage)) { if (m_pmEvaluatePi == PiMethod.WeightedSum) { dPi = (dSumObservedPValues / cObservedPValues) / (dExpectedNullPValue / dSumNullPValues); // \pi_0 = (\sum_T p(T))/(\sum_T p(T)pr(T|H=0)) } else if (m_pmEvaluatePi == PiMethod.DoubleAverage) { dPi = 2.0 * (dSumObservedPValues / cObservedPValues); // \pi_0 = 2 * avg(p) } double dPhiPi = dEPhiObserved / dEPhiNull; m_bContinue = m_prReport.reportMessage("Estimating PI = " + dPi, true); } else if (m_pmEvaluatePi == PiMethod.Filtering) { Map <double, double> slPi = computeFilteringPi(actTables, slFDR.KeyList); List <double> lKeys = new List <double>(slFDR.Keys); foreach (double dKey in lKeys) { slFDR[dKey].FilteringPi = slPi[dKey]; } } m_dPi = dPi; sumFDRs(actTables, slFDR, dPi); return(slFDR); }