/*
         * Computing pi in the filtering case.
         * In this case we compute a different pi for each p-value
         * A table is considered relevant for the pi computation of a p-value p only if its marginals support a p-value that is more extreme than p.
         * */
        private Map <double, double> computeFilteringPi(List <ContingencyTable> actTables, List <double> lPValues)
        {
            Map <double, List <ContingencyTable> > slRelevantTables = new Map <double, List <ContingencyTable> >();
            double dSumObservedPValuesInRange = 0.0, dCurrentTableFisherTestPValue = 0.0;
            int    cObservedTablesInRange = 0;
            double dFisherScore = 0.0, dHyperProbability = 0.0, dMinimalPossiblePValue = 0.0, dFirstLargerKey = 0.0;
            double dSumExpectedNullsInRange = 0;
            double dSumNullProbsInRange = 0.0;
            int    cNullsInRange = 0;
            int    iTable = 0;
            Map <double, double> slPi = new Map <double, double>();
            ContingencyTable     ctCurrent = null;

            if (m_bReportProgress)
            {
                m_bContinue = m_prReport.reportPhase("Computing relevant tables.");
                m_bContinue = m_prReport.reportMessage("Started computing relevant tables for PI computation.", true);
            }

            //We first compute the list of relevant tables.
            //For each table we compute its minimal achievable p-value and add it to the next p-value on the list.
            //Now, the relevant tables are all the tables that belong to a p-value that is more exterme than the current one.
            for (iTable = 0; iTable < actTables.Count && m_bContinue; iTable++)
            {
                ctCurrent = (ContingencyTable)actTables[iTable];
                dMinimalPossiblePValue = ctCurrent.getMinimalAchievablePValue();
                dFirstLargerKey        = getNextKey(lPValues, dMinimalPossiblePValue);
                if (!slRelevantTables.ContainsKey(dFirstLargerKey))
                {
                    slRelevantTables.Add(dFirstLargerKey, new List <ContingencyTable>());
                }
                slRelevantTables[dFirstLargerKey].Add(ctCurrent);
                if (m_bReportProgress && (iTable > 0) && (iTable % 1000 == 0))
                {
                    m_bContinue = m_prReport.reportProcessedTables(iTable, actTables.Count);
                }
            }

            //We iterate from smallest p-value to largest. The order is important because we want the relevant tables list to grow all the time.
            for (iTable = 0; iTable < actTables.Count && m_bContinue; iTable++)
            {
                ctCurrent = (ContingencyTable)actTables[iTable];

                dCurrentTableFisherTestPValue = round(ctCurrent.getFisher2TailPermutationTest());

                if (slRelevantTables.ContainsKey(dCurrentTableFisherTestPValue))
                {
                    //Now we iterate over the list of relevant tables
                    //Note - a table never becomes irrelevant. Therefore we always accumulate more observations and remove any.
                    foreach (ContingencyTable ctRelevant in slRelevantTables[dCurrentTableFisherTestPValue])
                    {
                        dFisherScore = ctRelevant.getFisher2TailPermutationTest();

                        dSumObservedPValuesInRange += dFisherScore;
                        cObservedTablesInRange++;
                        //TODO - calling computeAllPermutationsScores twice - inefficient
                        double[,] adScores = ctRelevant.computeAllPermutationsScores();

                        for (int iCurrent = 0; iCurrent < adScores.GetLength(0); iCurrent++)
                        {
                            dHyperProbability = adScores[iCurrent, 0];
                            dFisherScore      = adScores[iCurrent, 1];

                            dSumNullProbsInRange     += dHyperProbability;
                            dSumExpectedNullsInRange += dFisherScore * dHyperProbability;
                            cNullsInRange++;
                        }
                    }
                    slRelevantTables.Remove(dCurrentTableFisherTestPValue);
                }
                //After iterating over all the relevant tables we compute the PI for that p-value
                //using the weighted sum method
                slPi[dCurrentTableFisherTestPValue] = (dSumObservedPValuesInRange / cObservedTablesInRange) /
                                                      (dSumExpectedNullsInRange / dSumNullProbsInRange);
                if (m_bReportProgress && (iTable > 0) && (iTable % 1000 == 0))
                {
                    m_bContinue = m_prReport.reportProcessedTables(iTable, actTables.Count);
                }
            }
            slPi[10.0] = 1.0;
            return(slPi);
        }