コード例 #1
0
        public static ScanMetaDataCollectionDIA AggregateMetaDataDIA(CentroidStreamCollection centroidStreams, SegmentScanCollection segmentScans, MethodDataContainer methodData,
                                                                     TrailerExtraCollection trailerExtras, RetentionTimeCollection retentionTimes, ScanIndex index, int maxProcesses)
        {
            //ProgressIndicator progress = new ProgressIndicator(index.ScanEnumerators[MSOrderType.Any].Count(),
            //   "Formatting scan meta data");

            ScanMetaDataCollectionDIA metaData = new ScanMetaDataCollectionDIA();

            int[] scans = index.ScanEnumerators[MSOrderType.Any];

            double isoWindow = MetaDataCalculations.Ms1IsoWindow(methodData);

            Console.WriteLine("Aggregating meta data");

            metaData.FillTime = MetaDataCalculations.FillTimes(trailerExtras, index);

            metaData.DutyCycle = MetaDataCalculations.DutyCycle(retentionTimes, index);

            metaData.IntensityDistribution = MetaDataCalculations.IntensityDistributions(centroidStreams, segmentScans, index, maxProcesses);

            metaData.SummedIntensity = MetaDataCalculations.SummedIntensities(centroidStreams, segmentScans, index, maxProcesses);

            metaData.FractionConsumingTop80PercentTotalIntensity = MetaDataCalculations.Top80Frac(centroidStreams, segmentScans, index, maxProcesses);

            //Task.WaitAll();

            return(metaData);
        }
コード例 #2
0
        public static Dictionary <int, double> Ms1Interference(CentroidStreamCollection centroidStreams, PrecursorMassCollection precursorMasses,
                                                               TrailerExtraCollection trailerExtras, PrecursorScanCollection precursorScans, ScanEventReactionCollection reactions, ScanIndex index)
        {
            ConcurrentDictionary <int, double> interference = new ConcurrentDictionary <int, double>();

            int chunkSize = Constants.MultiThreading.ChunkSize(index.ScanEnumerators[index.AnalysisOrder].Count());

            var batches = index.ScanEnumerators[index.AnalysisOrder].Chunk(chunkSize);

            Parallel.ForEach(batches, Constants.MultiThreading.Options(), batch =>
            {
                foreach (int scan in batch)
                {
                    int preScan = precursorScans[scan].MasterScan;
                    interference.AddOrUpdate(scan, Algorithms.Ms1Interference.CalculateForOneScan(centroidStreams[preScan], reactions[scan],
                                                                                                  precursorMasses[scan].MonoisotopicMZ, trailerExtras[scan].ChargeState), (a, b) => b);
                }
            });

            var interferenceOut = new Dictionary <int, double>();

            foreach (var item in interference)
            {
                interferenceOut.Add(item.Key, item.Value);
            }

            return(interferenceOut);
        }
コード例 #3
0
        public static void QcDIA(IRawDataPlus rawFile, WorkflowParameters parameters)
        {
            rawFile.SelectInstrument(Device.MS, 1);

            rawFile.CheckIfBoxcar();

            ScanIndex Index = Extract.ScanIndices(rawFile);

            MethodDataContainer methodData = Extract.MethodData(rawFile, Index);

            CentroidStreamCollection centroidStreams = new CentroidStreamCollection();

            SegmentScanCollection segmentScans = new SegmentScanCollection();

            (centroidStreams, segmentScans) = Extract.MsData(rawFile: rawFile, index: Index);

            TrailerExtraCollection trailerExtras = Extract.TrailerExtras(rawFile, Index);

            RetentionTimeCollection retentionTimes = Extract.RetentionTimes(rawFile, Index);

            ScanMetaDataCollectionDIA metaData = MetaDataProcessingDIA.AggregateMetaDataDIA(centroidStreams, segmentScans, methodData,
                                                                                            trailerExtras, retentionTimes, Index);

            RawMetricsDataDIA metrics = MetaDataProcessingDIA.GetMetricsDataDIA(metaData, methodData, rawFile.FileName, retentionTimes, Index);

            QcDataContainer qcData = new QcDataContainer();

            qcData.DIA = metrics;

            QcDataCollection qcDataCollection = QC.QcWorkflow.LoadOrCreateQcCollection(parameters);

            QC.QcWorkflow.UpdateQcCollection(qcDataCollection, qcData, methodData, rawFile.FileName);
        }
コード例 #4
0
        public static Dictionary <int, double> SummedIntensities(CentroidStreamCollection centroidStreams,
                                                                 SegmentScanCollection segmentScans, ScanIndex index)
        {
            ConcurrentDictionary <int, double> summedInt = new ConcurrentDictionary <int, double>();

            int chunkSize = Constants.MultiThreading.ChunkSize(index.allScans.Keys.Count());

            var batches = index.allScans.Keys.Chunk(chunkSize);

            Parallel.ForEach(batches, Constants.MultiThreading.Options(), batch =>
            {
                foreach (int scan in batch)
                {
                    if (index.allScans[scan].MassAnalyzer == MassAnalyzerType.MassAnalyzerFTMS)
                    {
                        summedInt.AddOrUpdate(scan, centroidStreams[scan].Intensities.Sum(), (a, b) => b);
                    }
                    else
                    {
                        summedInt.AddOrUpdate(scan, segmentScans[scan].Intensities.Sum(), (a, b) => b);
                    }
                }
            });

            var sumsOut = new Dictionary <int, double>();

            foreach (var item in summedInt)
            {
                sumsOut.Add(item.Key, item.Value);
            }

            return(sumsOut);
        }
コード例 #5
0
        public static Dictionary <int, double> Top80Frac(CentroidStreamCollection centroidStreams,
                                                         SegmentScanCollection segmentScans, ScanIndex index)
        {
            ConcurrentDictionary <int, double> top80 = new ConcurrentDictionary <int, double>();

            int chunkSize = Constants.MultiThreading.ChunkSize(index.allScans.Keys.Count());

            var batches = index.allScans.Keys.Chunk(chunkSize);

            Parallel.ForEach(batches, Constants.MultiThreading.Options(), batch =>
            {
                foreach (int scan in batch)
                {
                    if (index.allScans[scan].MassAnalyzer == MassAnalyzerType.MassAnalyzerFTMS)
                    {
                        top80.AddOrUpdate(scan, centroidStreams[scan].Intensities.FractionOfScansConsumingTotalIntensity(percent: 80), (a, b) => b);
                    }
                    else
                    {
                        top80.AddOrUpdate(scan, segmentScans[scan].Intensities.FractionOfScansConsumingTotalIntensity(percent: 80), (a, b) => b);
                    }
                }
            });

            var topOut = new Dictionary <int, double>();

            foreach (var item in top80)
            {
                topOut.Add(item.Key, item.Value);
            }

            return(topOut);
        }
コード例 #6
0
        public static Dictionary <int, Distribution> IntensityDistributions(CentroidStreamCollection centroidStreams,
                                                                            SegmentScanCollection segmentScans, ScanIndex index)
        {
            ConcurrentDictionary <int, Distribution> intDist = new ConcurrentDictionary <int, Distribution>();

            int chunkSize = Constants.MultiThreading.ChunkSize(index.allScans.Keys.Count());

            var batches = index.allScans.Keys.Chunk(chunkSize);

            Parallel.ForEach(batches, Constants.MultiThreading.Options(), batch =>
            {
                foreach (int scan in batch)
                {
                    if (index.allScans[scan].MassAnalyzer == MassAnalyzerType.MassAnalyzerFTMS)
                    {
                        intDist.AddOrUpdate(scan, new Distribution(centroidStreams[scan].Intensities), (a, b) => b);
                    }
                    else
                    {
                        intDist.AddOrUpdate(scan, new Distribution(segmentScans[scan].Intensities), (a, b) => b);
                    }
                }
            });

            Dictionary <int, Distribution> distOut = new Dictionary <int, Distribution>();

            foreach (var item in intDist)
            {
                distOut.Add(item.Key, item.Value);
            }
            return(distOut);
        }
コード例 #7
0
        public static void RefineMonoIsoMassChargeValues(CentroidStreamCollection centroids, PrecursorMassCollection precursorMasses, TrailerExtraCollection trailerExtras, PrecursorPeakCollection precursorPeaks, PrecursorScanCollection precursorScans)
        {
            int    ms2Scan, ms1Scan, refinedCharge;
            double refinedMass;

            ProgressIndicator P = new ProgressIndicator(precursorPeaks.Count(), "Refining precursor charge and monoisotopic mass");

            P.Start();

            foreach (var peak in precursorPeaks)
            {
                ms2Scan = peak.Value.Ms2Scan;

                if (peak.Value.PeakFound)
                {
                    ms1Scan = peak.Value.MaxScan;
                }
                else
                {
                    ms1Scan = precursorScans[ms2Scan].MasterScan;
                }

                (refinedCharge, refinedMass) = GetMonoIsotopicMassCharge(centroids[ms1Scan], precursorMasses[ms2Scan].ParentMZ, trailerExtras[ms2Scan].ChargeState);

                precursorMasses[ms2Scan].MonoisotopicMZ = refinedMass;
                trailerExtras[ms2Scan].MonoisotopicMZ   = refinedMass;
                trailerExtras[ms2Scan].ChargeState      = refinedCharge;

                P.Update();
            }
            P.Done();
        }
コード例 #8
0
ファイル: QuantifyAllScans.cs プロジェクト: stoeckli/RawTools
        public static QuantDataCollection Quantify(CentroidStreamCollection centroidScans, SegmentScanCollection segmentScans, WorkflowParameters parameters, MethodDataContainer methodData, ScanIndex index)
        {
            int[] scans = index.ScanEnumerators[index.AnalysisOrder];

            QuantDataCollection quantData = new QuantDataCollection();

            ProgressIndicator progress = new ProgressIndicator(scans.Length, "Quantifying reporter ions");

            string labelingReagents = parameters.ParseParams.LabelingReagents;

            quantData.LabelingReagents = labelingReagents;

            foreach (int scan in scans)
            {
                if (methodData.QuantAnalyzer == MassAnalyzerType.MassAnalyzerFTMS)
                {
                    quantData.Add(scan, QuantifyReporters.QuantifyOneScan(centroidScans[scan], labelingReagents));
                }
                else
                {
                    quantData.Add(scan, QuantifyReporters.QuantifyOneScan(segmentScans[scan], labelingReagents));
                }

                progress.Update();
            }
            progress.Done();

            return(quantData);
        }
コード例 #9
0
ファイル: RunSearch.cs プロジェクト: marcos914/RawTools
        public static void WriteSearchMGF(WorkflowParameters parameters, CentroidStreamCollection centroids, SegmentScanCollection segments, RetentionTimeCollection retentionTimes,
                                          PrecursorMassCollection precursorMasses, PrecursorScanCollection precursorScans, TrailerExtraCollection trailerExtras, MethodDataContainer methodData,
                                          ScanIndex index, string rawFileName, bool fixedScans = false)
        {
            var pars = parameters.QcParams.SearchParameters;

            int[] scans = AdditionalMath.SelectRandomScans(scans: index.ScanEnumerators[MSOrderType.Ms2],
                                                           num: parameters.QcParams.NumberSpectra, fixedScans: parameters.QcParams.FixedScans);

            string mgfFile = ReadWrite.GetPathToFile(parameters.QcParams.QcSearchDataDirectory, rawFileName, ".mgf");

            MgfWriter.WriteMGF(rawFileName, centroids, segments, parameters, retentionTimes, precursorMasses, precursorScans,
                               trailerExtras, methodData, index, outputFile: mgfFile, scans: scans);
        }
コード例 #10
0
        public static (CentroidStreamCollection centroids, SegmentScanCollection segments) MsData(IRawDataPlus rawFile, ScanIndex index)
        {
            rawFile.SelectInstrument(Device.MS, 1);
            CentroidStreamCollection centroids = new CentroidStreamCollection();
            SegmentScanCollection    segments  = new SegmentScanCollection();

            CentroidStream centroid;
            SegmentedScan  segment;

            var scans = index.allScans;
            //var lockTarget = new object(); // this is so we can keep track of progress in the parallel loop

            ProgressIndicator P = new ProgressIndicator(scans.Count(), "Extracting scan data");

            P.Start();

            foreach (int scan in scans.Keys)
            {
                // first get out the mass spectrum
                if (index.allScans[scan].MassAnalyzer == MassAnalyzerType.MassAnalyzerFTMS)
                {
                    centroid = rawFile.GetCentroidStream(scan, false);

                    centroids[scan] = new CentroidStreamData(centroid);
                }
                else
                {
                    segment = rawFile.GetSegmentedScanFromScanNumber(scan, null);

                    segments[scan] = new SegmentedScanData(segment);
                }
                //lock (lockTarget)
                //{
                //    P.Update();
                //}
                P.Update();
            }
            P.Done();

            return(centroids, segments);
        }
コード例 #11
0
        public static ScanMetaDataCollectionDDA AggregateMetaDataDDA(CentroidStreamCollection centroidStreams, SegmentScanCollection segmentScans, MethodDataContainer methodData,
                                                                     PrecursorScanCollection precursorScans, TrailerExtraCollection trailerExtras, PrecursorMassCollection precursorMasses,
                                                                     RetentionTimeCollection retentionTimes, ScanDependentsCollections scanDependents, ScanEventReactionCollection reactions, ScanIndex index)
        {
            //ProgressIndicator progress = new ProgressIndicator(index.ScanEnumerators[MSOrderType.Any].Count(),
            //   "Formatting scan meta data");

            ScanMetaDataCollectionDDA metaData = new ScanMetaDataCollectionDDA();

            int[] scans = index.ScanEnumerators[MSOrderType.Any];

            double isoWindow = MetaDataCalculations.Ms1IsoWindow(methodData);

            Console.WriteLine("Calculating meta data");

            Console.WriteLine("  MS1 isolation interference");
            metaData.Ms1IsolationInterference = MetaDataCalculations.Ms1Interference(centroidStreams, precursorMasses, trailerExtras,
                                                                                     precursorScans, reactions, index);

            Console.WriteLine("  MS2 scan cycle density");
            metaData.MS2ScansPerCycle = MetaDataCalculations.MS2ScansPerCycle(scanDependents, index);

            Console.WriteLine("  Ion injection time");
            metaData.FillTime = MetaDataCalculations.FillTimes(trailerExtras, index);

            Console.WriteLine("  Duty cycle");
            metaData.DutyCycle = MetaDataCalculations.DutyCycle(retentionTimes, index);

            Console.WriteLine("  Intensity distribution");
            metaData.IntensityDistribution = MetaDataCalculations.IntensityDistributions(centroidStreams, segmentScans, index);

            Console.WriteLine("  Summed intensities");
            metaData.SummedIntensity = MetaDataCalculations.SummedIntensities(centroidStreams, segmentScans, index);

            metaData.FractionConsumingTop80PercentTotalIntensity = MetaDataCalculations.Top80Frac(centroidStreams, segmentScans, index);

            //Task.WaitAll();

            return(metaData);
        }
コード例 #12
0
        private static PrecursorPeakData OnePeak(CentroidStreamCollection centroids, RetentionTimeCollection retentionTimes, double targetMass, int parentScan, int ddScan, ScanIndex index)
        {
            PrecursorPeakData peak = new PrecursorPeakData();

            int firstScan = parentScan,
                lastScan = parentScan,
                maxScan = parentScan,
                currentScan = parentScan,
                previousMS1scan, nextMS1scan;

            bool containsFirstMS1Scan = false,
                 containsLastMS1Scan  = false;

            int[] MS1Scans = index.ScanEnumerators[MSOrderType.Ms];

            double minMassDiff, maxIntensity, parentIntensity;

            List <int>    scans              = new List <int>();
            List <double> profileTimes       = new List <double>();
            List <double> profileIntensities = new List <double>();

            double[] masses, intensities, massDiff;

            Dictionary <int, double> indexedIntensities = new Dictionary <int, double>();

            peak.Ms2Scan = ddScan;

            // first take care of the parent scan data. In QE data sometimes the parent mass is missing from the parent spectrum, so we need to deal with that.

            masses = centroids[currentScan].Masses;//.Where(i => (i > parentMass - 1 & i < parentMass + 1)).ToArray();
            //masses = (from mass in rawData.centroidStreams[currentScan].Masses where mass > parentMass - 1 & mass < parentMass + 1 select mass).ToArray();
            //masses = masses.Where(i => (i > parentMass - 1 & i < parentMass + 1)).ToArray();

            if (masses.Length == 0)
            {
                peak.PeakFound = false;
                return(peak);
            }

            massDiff = new double[masses.Length];

            for (int i = 0; i < masses.Length; i++)
            {
                massDiff[i] = Math.Abs(masses[i] - targetMass);
            }

            minMassDiff = massDiff.Min();

            if (minMassDiff / targetMass * 1e6 < 10)
            {
                peak.PeakFound = true;
            }
            else
            {
                peak.PeakFound = false;
                return(peak);
            }

            int scanIndex = Array.IndexOf(MS1Scans, parentScan);

            // now find the first ms1 scan of the peak, just follow the mass (within tolerance) accross scans until it goes to baseline
            while (true)
            {
                currentScan = MS1Scans[scanIndex];
                masses      = centroids[currentScan].Masses;
                intensities = centroids[currentScan].Intensities;

                massDiff = new double[masses.Length];

                for (int i = 0; i < masses.Length; i++)
                {
                    massDiff[i] = Math.Abs(masses[i] - targetMass);
                }

                minMassDiff = massDiff.Min();

                if (minMassDiff / targetMass * 1e6 < 10)
                {
                    scans.Add(currentScan);
                    scanIndex -= 1;
                    indexedIntensities.Add(currentScan, intensities[Array.IndexOf(massDiff, minMassDiff)]);
                    if (scanIndex < 0)
                    {
                        previousMS1scan = currentScan;
                        break;
                    }
                }
                else
                {
                    if (scanIndex == 0)
                    {
                        previousMS1scan = currentScan;
                    }
                    else
                    {
                        previousMS1scan = MS1Scans[scanIndex - 1];
                    }
                    break;
                }
            }

            // now find the last ms1 scan of the peak
            scanIndex = Array.IndexOf(MS1Scans, parentScan) + 1; // reset the ms1 scan indexer, add 1 so we don't replicate the parent scan

            while (true)
            {
                // Check to make sure the ms1 scan isn't the last one....
                if (scanIndex >= MS1Scans.Length)
                {
                    currentScan = MS1Scans.Last();
                    nextMS1scan = currentScan;
                    break;
                }

                currentScan = MS1Scans[scanIndex];
                masses      = centroids[currentScan].Masses;
                intensities = centroids[currentScan].Intensities;

                massDiff = new double[masses.Length];

                for (int i = 0; i < masses.Length; i++)
                {
                    massDiff[i] = Math.Abs(masses[i] - targetMass);
                }

                minMassDiff = massDiff.Min();

                if (minMassDiff / targetMass * 1e6 < 10)
                {
                    scans.Add(currentScan);
                    scanIndex += 1;
                    indexedIntensities.Add(currentScan, intensities[Array.IndexOf(massDiff, minMassDiff)]);
                    if (scanIndex >= MS1Scans.Length)
                    {
                        nextMS1scan = currentScan;
                        break;
                    }
                }
                else
                {
                    if (scanIndex == MS1Scans.Length - 1)
                    {
                        nextMS1scan = currentScan;
                    }
                    else
                    {
                        nextMS1scan = MS1Scans[scanIndex + 1];
                    }
                    break;
                }
            }
            // We need to add an index and intensity for the scans before and after the peak. Otherwise fitting and other calculations later will be a huge pain.
            // We make note of the peaks which contain the first or last MS1 scans. This edge cases will probably need special treatment.

            if (previousMS1scan != scans.Min())
            {
                scans.Add(previousMS1scan);
                indexedIntensities.Add(previousMS1scan, 0);
            }
            else
            {
                containsFirstMS1Scan = true;
            }
            if (nextMS1scan != scans.Max())
            {
                scans.Add(nextMS1scan);
                indexedIntensities.Add(nextMS1scan, 0);
            }
            else
            {
                containsLastMS1Scan = true;
            }

            scans.Sort();
            firstScan = scans.First();
            lastScan  = scans.Last();


            // add the retention times and intensities

            foreach (int scan in scans)
            {
                profileTimes.Add(retentionTimes[scan]);
                profileIntensities.Add(indexedIntensities[scan]);
            }

            if (profileIntensities[0] == 0 & profileIntensities[1] == 0)
            {
            }

            maxIntensity    = profileIntensities.Max();
            parentIntensity = indexedIntensities[parentScan];

            maxScan = scans[profileIntensities.IndexOf(maxIntensity)];

            peak.FirstScan = firstScan;
            peak.LastScan  = lastScan;
            peak.MaxScan   = maxScan;
            //peak.PreviousScan = previousMS1scan;
            //peak.NextScan = nextMS1scan;
            peak.ParentScan           = parentScan;
            peak.NScans               = scans.Count();
            peak.Scans                = scans.ToArray();
            peak.ContainsFirstMS1Scan = containsFirstMS1Scan;
            peak.ContainsLastMS1Scan  = containsLastMS1Scan;

            peak.ParentIntensity  = parentIntensity;
            peak.MaximumIntensity = maxIntensity;

            peak.MaximumRetTime = retentionTimes[maxScan];
            peak.ParentRetTime  = retentionTimes[parentScan];

            peak.BaselineWidth = profileTimes.Last() - profileTimes.First();

            peak.Intensities = profileIntensities.ToArray();
            peak.RetTimes    = profileTimes.ToArray();

            return(peak);
        }
コード例 #13
0
        public static PrecursorPeakCollection AnalyzeAllPeaks(CentroidStreamCollection centroids, RetentionTimeCollection retentionTimes,
                                                              PrecursorMassCollection precursorMasses, PrecursorScanCollection precursorScans, ScanIndex index, int MaxProcesses)
        {
            ConcurrentDictionary <int, PrecursorPeakData> peaks = new ConcurrentDictionary <int, PrecursorPeakData>();

            DistributionMultiple allPeaksAsymmetry = new DistributionMultiple();
            DistributionMultiple allPeaksWidths    = new DistributionMultiple();
            var lockTarget = new object(); // this is so we can keep track of progress in the parallel loop

            int chunkSize = Constants.MultiThreading.ChunkSize(index.ScanEnumerators[MSOrderType.Ms2].Count());

            var batches = index.ScanEnumerators[MSOrderType.Ms2].Chunk(chunkSize);

            ProgressIndicator P = new ProgressIndicator(total: index.ScanEnumerators[MSOrderType.Ms2].Length, message: "Analyzing precursor peaks");

            P.Start();

            Parallel.ForEach(batches, Constants.MultiThreading.Options(MaxProcesses), batch =>
            {
                PrecursorPeakData peak;
                foreach (int scan in batch)
                {
                    // [2018-12-04] changing to use picked mass and not monoisomass. The monoisomass might be low in intensity and would not represent the whole elution profile
                    peak = OnePeak(centroids, retentionTimes, precursorMasses[scan].ParentMZ, precursorScans[scan].MasterScan, ddScan: scan, index: index);

                    if (peak.NScans < 5 | peak.PeakFound == false |
                        peak.ContainsFirstMS1Scan | peak.ContainsLastMS1Scan)
                    {
                        peak.PeakShape = null;
                    }
                    else
                    {
                        var newShape   = GetPeakShape(peak);
                        peak.PeakShape = newShape;
                        allPeaksAsymmetry.Add(newShape.Asymmetry);
                        allPeaksWidths.Add(newShape.Width);
                    }

                    peak.Area = CalculatePeakArea(peak);

                    peaks.AddOrUpdate(scan, peak, (a, b) => b);

                    lock (lockTarget)
                    {
                        P.Update();
                    }
                }
            });
            P.Done();

            var peaksOut = new PrecursorPeakCollection();

            foreach (var item in peaks)
            {
                peaksOut.Add(item.Key, item.Value);
            }

            if (allPeaksWidths.P50.Count() == 0)
            {
                peaksOut.PeakShapeMedians = new Data.Containers.PeakShape(width: new Width(), asymmetry: new Asymmetry(), peakMax: 0);
            }
            else
            {
                peaksOut.PeakShapeMedians = new Data.Containers.PeakShape(width: allPeaksWidths.GetMedians(), asymmetry: allPeaksAsymmetry.GetMedians(), peakMax: 0);
            }

            return(peaksOut);
        }