public static void QcDIA(IRawDataPlus rawFile, WorkflowParameters parameters) { rawFile.SelectInstrument(Device.MS, 1); rawFile.CheckIfBoxcar(); ScanIndex Index = Extract.ScanIndices(rawFile); MethodDataContainer methodData = Extract.MethodData(rawFile, Index); CentroidStreamCollection centroidStreams = new CentroidStreamCollection(); SegmentScanCollection segmentScans = new SegmentScanCollection(); (centroidStreams, segmentScans) = Extract.MsData(rawFile: rawFile, index: Index); TrailerExtraCollection trailerExtras = Extract.TrailerExtras(rawFile, Index); RetentionTimeCollection retentionTimes = Extract.RetentionTimes(rawFile, Index); ScanMetaDataCollectionDIA metaData = MetaDataProcessingDIA.AggregateMetaDataDIA(centroidStreams, segmentScans, methodData, trailerExtras, retentionTimes, Index); RawMetricsDataDIA metrics = MetaDataProcessingDIA.GetMetricsDataDIA(metaData, methodData, rawFile.FileName, retentionTimes, Index); QcDataContainer qcData = new QcDataContainer(); qcData.DIA = metrics; QcDataCollection qcDataCollection = QC.QcWorkflow.LoadOrCreateQcCollection(parameters); QC.QcWorkflow.UpdateQcCollection(qcDataCollection, qcData, methodData, rawFile.FileName); }
public static RawMetricsDataDIA GetMetricsDataDIA(ScanMetaDataCollectionDIA metaData, MethodDataContainer methodData, string rawFileName, RetentionTimeCollection retentionTimes, ScanIndex index) { RawMetricsDataDIA metricsData = new RawMetricsDataDIA(); metricsData.DateAcquired = methodData.CreationDate; metricsData.Instrument = methodData.Instrument; Console.WriteLine("Calculating metrics"); metricsData.RawFileName = rawFileName; metricsData.Instrument = methodData.Instrument; metricsData.MS1Analyzer = methodData.MassAnalyzers[MSOrderType.Ms]; metricsData.MS2Analyzer = methodData.MassAnalyzers[MSOrderType.Ms2]; metricsData.TotalAnalysisTime = retentionTimes[index.ScanEnumerators[MSOrderType.Any].Last()] - retentionTimes[index.ScanEnumerators[MSOrderType.Any].First()]; metricsData.NumberOfEsiFlags = MetricsCalculations.NumberOfEsiFlags(metaData, index); metricsData.TotalScans = index.TotalScans; metricsData.MS1Scans = index.ScanEnumerators[MSOrderType.Ms].Length; metricsData.MS2Scans = index.ScanEnumerators[MSOrderType.Ms2].Length; metricsData.MSOrder = methodData.AnalysisOrder; metricsData.MedianSummedMS1Intensity = MetricsCalculations.GetMedianSummedMSIntensity(metaData.SummedIntensity, index, MSOrderType.Ms); metricsData.MedianSummedMS2Intensity = MetricsCalculations.GetMedianSummedMSIntensity(metaData.SummedIntensity, index, MSOrderType.Ms2); metricsData.MedianMS1FillTime = MetricsCalculations.GetMedianMSFillTime(metaData.FillTime, index, MSOrderType.Ms); metricsData.MedianMS2FillTime = MetricsCalculations.GetMedianMSFillTime(metaData.FillTime, index, MSOrderType.Ms2); metricsData.MeanDutyCycle = MetricsCalculations.GetMedianDutyCycle(metaData.DutyCycle, index); metricsData.MedianMs2FractionConsumingTop80PercentTotalIntensity = MetricsCalculations.GetMedianMs2FractionConsumingTop80PercentTotalIntensity( metaData.FractionConsumingTop80PercentTotalIntensity, index); metricsData.MS1ScanRate = metricsData.MS1Scans / metricsData.TotalAnalysisTime; metricsData.MS2ScanRate = metricsData.MS2Scans / metricsData.TotalAnalysisTime; (double timeBefore, double timeAfter, double fracAbove) = MetricsCalculations.ChromIntensityMetrics(metaData, retentionTimes, index); metricsData.TimeBeforeFirstScanToExceedPoint1MaxIntensity = timeBefore; metricsData.TimeAfterLastScanToExceedPoint1MaxIntensity = timeAfter; metricsData.FractionOfRunAbovePoint1MaxIntensity = fracAbove; metricsData.Ms1FillTimeDistribution = new Distribution((from x in index.ScanEnumerators[MSOrderType.Ms] select metaData.FillTime[x]).ToArray()); metricsData.Ms2FillTimeDistribution = new Distribution((from x in index.ScanEnumerators[MSOrderType.Ms2] select metaData.FillTime[x]).ToArray()); return(metricsData); }
public static void ParseDIA(IRawFileThreadManager rawFile, WorkflowParameters parameters) { var staticRawFile = rawFile.CreateThreadAccessor(); staticRawFile.SelectInstrument(Device.MS, 1); staticRawFile.CheckIfBoxcar(); ScanIndex Index = Extract.ScanIndices(rawFile.CreateThreadAccessor()); TrailerExtraCollection trailerExtras = Extract.TrailerExtras(rawFile.CreateThreadAccessor(), Index); MethodDataContainer methodData = Extract.MethodData(rawFile.CreateThreadAccessor(), Index); PrecursorScanCollection precursorScans = Extract.PrecursorScansByMasterScanMs2Only(rawFile.CreateThreadAccessor(), trailerExtras, Index); (CentroidStreamCollection centroidStreams, SegmentScanCollection segmentScans) = Extract.MsData(rawFile: rawFile.CreateThreadAccessor(), index: Index); RetentionTimeCollection retentionTimes = Extract.RetentionTimes(rawFile.CreateThreadAccessor(), Index); ScanMetaDataCollectionDIA metaData = MetaDataProcessingDIA.AggregateMetaDataDIA(centroidStreams, segmentScans, methodData, trailerExtras, retentionTimes, Index); RawMetricsDataDIA metrics = null; if (parameters.ParseParams.Metrics) { metrics = MetaDataProcessingDIA.GetMetricsDataDIA(metaData, methodData, staticRawFile.FileName, retentionTimes, Index); MetricsWriter.WriteMatrix(metrics, staticRawFile.FileName, parameters.ParseParams.OutputDirectory); } if (parameters.ParseParams.Parse) { string matrixFileName = ReadWrite.GetPathToFile(parameters.ParseParams.OutputDirectory, staticRawFile.FileName, "._parse.txt"); //MatrixWriter.ParseQuantDIA() //ParseWriter writerDIA = new ParseWriter(matrixFileName, centroidStreams, segmentScans, metaData, // retentionTimes, trailerExtras, precursorScans, Index); //writerDIA.WriteMatrixDIA(); } // I'm not sure what goes into a DIA mgf file, so we aren't making one yet //if (parameters.ParseParams.WriteMgf) //{ // ParseWriter writerMGF = new ParseWriter(centroidStreams, segmentScans, parameters, retentionTimes, precursorMasses, precursorScans, trailerExtras, methodData, Index); // writerMGF.WriteMGF(staticRawFile.FileName); //} }
public static RetentionTimeCollection RetentionTimes(IRawDataPlus rawFile, ScanIndex index) { rawFile.SelectInstrument(Device.MS, 1); Log.Information("Extracting retention times"); RetentionTimeCollection retentionTimes = new RetentionTimeCollection(); var scans = index.ScanEnumerators[MSOrderType.Any]; ProgressIndicator progress = new ProgressIndicator(scans.Count(), "Extracting retention times"); foreach (int scan in scans) { retentionTimes[scan] = rawFile.RetentionTimeFromScanNumber(scan); progress.Update(); } progress.Done(); return(retentionTimes); }
public static Dictionary <int, double> DutyCycle(RetentionTimeCollection retentionTimes, ScanIndex index) { Dictionary <int, double> dutyCycle = new Dictionary <int, double>(); int[] scans = index.ScanEnumerators[MSOrderType.Ms]; for (int i = 0; i < scans.Length; i++) { var scan = scans[i]; if (i < scans.Length - 1) { dutyCycle.Add(scan, (retentionTimes[scans[i + 1]] - retentionTimes[scan]) * 60); } else { dutyCycle.Add(scan, Double.NaN); } } return(dutyCycle); }
public static ScanMetaDataCollectionDDA AggregateMetaDataDDA(CentroidStreamCollection centroidStreams, SegmentScanCollection segmentScans, MethodDataContainer methodData, PrecursorScanCollection precursorScans, TrailerExtraCollection trailerExtras, PrecursorMassCollection precursorMasses, RetentionTimeCollection retentionTimes, ScanDependentsCollections scanDependents, ScanEventReactionCollection reactions, ScanIndex index) { //ProgressIndicator progress = new ProgressIndicator(index.ScanEnumerators[MSOrderType.Any].Count(), // "Formatting scan meta data"); ScanMetaDataCollectionDDA metaData = new ScanMetaDataCollectionDDA(); int[] scans = index.ScanEnumerators[MSOrderType.Any]; double isoWindow = MetaDataCalculations.Ms1IsoWindow(methodData); Console.WriteLine("Calculating meta data"); Console.WriteLine(" MS1 isolation interference"); metaData.Ms1IsolationInterference = MetaDataCalculations.Ms1Interference(centroidStreams, precursorMasses, trailerExtras, precursorScans, reactions, index); Console.WriteLine(" MS2 scan cycle density"); metaData.MS2ScansPerCycle = MetaDataCalculations.MS2ScansPerCycle(scanDependents, index); Console.WriteLine(" Ion injection time"); metaData.FillTime = MetaDataCalculations.FillTimes(trailerExtras, index); Console.WriteLine(" Duty cycle"); metaData.DutyCycle = MetaDataCalculations.DutyCycle(retentionTimes, index); Console.WriteLine(" Intensity distribution"); metaData.IntensityDistribution = MetaDataCalculations.IntensityDistributions(centroidStreams, segmentScans, index); Console.WriteLine(" Summed intensities"); metaData.SummedIntensity = MetaDataCalculations.SummedIntensities(centroidStreams, segmentScans, index); metaData.FractionConsumingTop80PercentTotalIntensity = MetaDataCalculations.Top80Frac(centroidStreams, segmentScans, index); //Task.WaitAll(); return(metaData); }
public static void WriteSearchMGF(WorkflowParameters parameters, CentroidStreamCollection centroids, SegmentScanCollection segments, RetentionTimeCollection retentionTimes, PrecursorMassCollection precursorMasses, PrecursorScanCollection precursorScans, TrailerExtraCollection trailerExtras, MethodDataContainer methodData, ScanIndex index, string rawFileName, bool fixedScans = false) { var pars = parameters.QcParams.SearchParameters; int[] scans = AdditionalMath.SelectRandomScans(scans: index.ScanEnumerators[MSOrderType.Ms2], num: parameters.QcParams.NumberSpectra, fixedScans: parameters.QcParams.FixedScans); string mgfFile = ReadWrite.GetPathToFile(parameters.QcParams.QcSearchDataDirectory, rawFileName, ".mgf"); MgfWriter.WriteMGF(rawFileName, centroids, segments, parameters, retentionTimes, precursorMasses, precursorScans, trailerExtras, methodData, index, outputFile: mgfFile, scans: scans); }
public static RawMetricsDataDDA GetMetricsDataDDA(ScanMetaDataCollectionDDA metaData, MethodDataContainer methodData, string rawFileName, RetentionTimeCollection retentionTimes, ScanIndex index, PrecursorPeakCollection peakData, PrecursorScanCollection precursorScans, QuantDataCollection quantData = null) { RawMetricsDataDDA metricsData = new RawMetricsDataDDA(); metricsData.DateAcquired = methodData.CreationDate; metricsData.Instrument = methodData.Instrument; Console.WriteLine("Calculating metrics"); metricsData.RawFileName = rawFileName; metricsData.Instrument = methodData.Instrument; metricsData.MS1Analyzer = methodData.MassAnalyzers[MSOrderType.Ms]; metricsData.MS2Analyzer = methodData.MassAnalyzers[MSOrderType.Ms2]; metricsData.TotalAnalysisTime = retentionTimes[index.ScanEnumerators[MSOrderType.Any].Last()] - retentionTimes[index.ScanEnumerators[MSOrderType.Any].First()]; metricsData.NumberOfEsiFlags = MetricsCalculations.NumberOfEsiFlags(metaData, index); metricsData.TotalScans = index.TotalScans; metricsData.MS1Scans = index.ScanEnumerators[MSOrderType.Ms].Length; metricsData.MS2Scans = index.ScanEnumerators[MSOrderType.Ms2].Length; if (methodData.AnalysisOrder == MSOrderType.Ms3) { metricsData.MS3Analyzer = methodData.MassAnalyzers[MSOrderType.Ms3]; metricsData.MS3Scans = index.ScanEnumerators[MSOrderType.Ms3].Length; } else { metricsData.MS3Analyzer = MassAnalyzerType.Any; metricsData.MS3Scans = 0; } var pickedMs1 = new HashSet <int>((from x in index.ScanEnumerators[methodData.AnalysisOrder] select precursorScans[x].MasterScan)).ToList(); metricsData.MSOrder = methodData.AnalysisOrder; metricsData.MedianSummedMS1Intensity = MetricsCalculations.GetMedianSummedMSIntensity(metaData.SummedIntensity, index, MSOrderType.Ms); metricsData.MedianSummedMS2Intensity = MetricsCalculations.GetMedianSummedMSIntensity(metaData.SummedIntensity, index, MSOrderType.Ms2); metricsData.MedianPrecursorIntensity = (from x in peakData.Keys select peakData[x].ParentIntensity).ToArray().Percentile(50); metricsData.MedianMS1FillTime = MetricsCalculations.GetMedianMSFillTime(metaData.FillTime, index, MSOrderType.Ms); metricsData.MedianMS2FillTime = MetricsCalculations.GetMedianMSFillTime(metaData.FillTime, index, MSOrderType.Ms2); if (methodData.AnalysisOrder == MSOrderType.Ms3) { metricsData.MedianMS3FillTime = MetricsCalculations.GetMedianMSFillTime(metaData.FillTime, index, MSOrderType.Ms3); } metricsData.MeanTopN = MetricsCalculations.GetMeanMs2ScansPerCycle(metaData.MS2ScansPerCycle); metricsData.MeanDutyCycle = MetricsCalculations.GetMedianDutyCycle(metaData.DutyCycle, index); metricsData.MedianMs2FractionConsumingTop80PercentTotalIntensity = MetricsCalculations.GetMedianMs2FractionConsumingTop80PercentTotalIntensity( metaData.FractionConsumingTop80PercentTotalIntensity, index); metricsData.MS1ScanRate = metricsData.MS1Scans / metricsData.TotalAnalysisTime; metricsData.MS2ScanRate = metricsData.MS2Scans / metricsData.TotalAnalysisTime; if (methodData.AnalysisOrder == MSOrderType.Ms3) { metricsData.MS3ScanRate = metricsData.MS3Scans / metricsData.TotalAnalysisTime; } metricsData.MedianBaselinePeakWidth = peakData.PeakShapeMedians.Width.P10; metricsData.MedianHalfHeightPeakWidth = peakData.PeakShapeMedians.Width.P50; // we can't access the instrument method in Linux, so we will assume the gradient length is the length of the MS acquisition metricsData.Gradient = retentionTimes[index.allScans.Keys.Max()]; metricsData.PeakCapacity = metricsData.Gradient / metricsData.MedianHalfHeightPeakWidth; metricsData.MedianAsymmetryFactor = peakData.PeakShapeMedians.Asymmetry.P10; // add isolation interference metricsData.MedianMs1IsolationInterference = (from scan in index.ScanEnumerators[methodData.AnalysisOrder] select metaData.Ms1IsolationInterference[scan]).ToArray().Percentile(50); (double timeBefore, double timeAfter, double fracAbove) = MetricsCalculations.ChromIntensityMetrics(metaData, retentionTimes, index); metricsData.TimeBeforeFirstScanToExceedPoint1MaxIntensity = timeBefore; metricsData.TimeAfterLastScanToExceedPoint1MaxIntensity = timeAfter; metricsData.FractionOfRunAbovePoint1MaxIntensity = fracAbove; metricsData.Ms1FillTimeDistribution = new Distribution((from x in index.ScanEnumerators[MSOrderType.Ms] select metaData.FillTime[x]).ToArray()); metricsData.Ms2FillTimeDistribution = new Distribution((from x in index.ScanEnumerators[MSOrderType.Ms2] select metaData.FillTime[x]).ToArray()); if (methodData.AnalysisOrder == MSOrderType.Ms3) { metricsData.Ms3FillTimeDistribution = new Distribution((from x in index.ScanEnumerators[MSOrderType.Ms3] select metaData.FillTime[x]).ToArray()); } metricsData.PeakShape.Asymmetry.P10 = peakData.PeakShapeMedians.Asymmetry.P10; metricsData.PeakShape.Asymmetry.P50 = peakData.PeakShapeMedians.Asymmetry.P50; metricsData.PeakShape.Width.P10 = peakData.PeakShapeMedians.Width.P10; metricsData.PeakShape.Width.P50 = peakData.PeakShapeMedians.Width.P50; // now add the quant meta data, if quant was performed double medianReporterIntensity = 0; QuantMetaData quantMetaData = new QuantMetaData(); SerializableDictionary <string, double> medianReporterIntensityByChannel = new SerializableDictionary <string, double>(); if (quantData != null) { string reagent = quantData.LabelingReagents; string[] allTags = new LabelingReagents().Reagents[reagent].Labels; List <double> allChannels = new List <double>(); Dictionary <string, List <double> > byChannel = new Dictionary <string, List <double> >(); foreach (string tag in allTags) { byChannel.Add(tag, new List <double>()); } foreach (int scan in index.ScanEnumerators[methodData.AnalysisOrder]) { foreach (string tag in allTags) { byChannel[tag].Add(quantData[scan][tag].Intensity); allChannels.Add(quantData[scan][tag].Intensity); } } medianReporterIntensity = allChannels.ToArray().Percentile(50); foreach (string tag in allTags) { medianReporterIntensityByChannel[tag] = byChannel[tag].ToArray().Percentile(50); } quantMetaData.medianReporterIntensity = medianReporterIntensity; quantMetaData.medianReporterIntensityByChannel = medianReporterIntensityByChannel; quantMetaData.quantTags = allTags; metricsData.QuantMeta = quantMetaData; metricsData.IncludesQuant = true; } return(metricsData); }
public static ScanMetaDataCollectionDIA AggregateMetaDataDIA(CentroidStreamCollection centroidStreams, SegmentScanCollection segmentScans, MethodDataContainer methodData, TrailerExtraCollection trailerExtras, RetentionTimeCollection retentionTimes, ScanIndex index, int maxProcesses) { //ProgressIndicator progress = new ProgressIndicator(index.ScanEnumerators[MSOrderType.Any].Count(), // "Formatting scan meta data"); ScanMetaDataCollectionDIA metaData = new ScanMetaDataCollectionDIA(); int[] scans = index.ScanEnumerators[MSOrderType.Any]; double isoWindow = MetaDataCalculations.Ms1IsoWindow(methodData); Console.WriteLine("Aggregating meta data"); metaData.FillTime = MetaDataCalculations.FillTimes(trailerExtras, index); metaData.DutyCycle = MetaDataCalculations.DutyCycle(retentionTimes, index); metaData.IntensityDistribution = MetaDataCalculations.IntensityDistributions(centroidStreams, segmentScans, index, maxProcesses); metaData.SummedIntensity = MetaDataCalculations.SummedIntensities(centroidStreams, segmentScans, index, maxProcesses); metaData.FractionConsumingTop80PercentTotalIntensity = MetaDataCalculations.Top80Frac(centroidStreams, segmentScans, index, maxProcesses); //Task.WaitAll(); return(metaData); }
public static (double TimeBefore, double TimeAfter, double FractionAbove) ChromIntensityMetrics(ScanMetaDataCollectionDDA metaData, RetentionTimeCollection retentionTimes, ScanIndex index) { return(ChromIntensityMetrics(metaData.SummedIntensity, retentionTimes, index)); }
public static (double TimeBefore, double TimeAfter, double FractionAbove) ChromIntensityMetrics(Dictionary <int, double> SummedIntensity, RetentionTimeCollection retentionTimes, ScanIndex index) { double firstRtToExceed10 = 0; double lastRtToExceed10 = 0; double proportionCovered; var scans = index.ScanEnumerators[MSOrderType.Ms]; var reversedScans = scans.Reverse(); var totalIntList = (from x in scans select SummedIntensity[x]).ToArray(); // get Q1 of total intensity from all scans double threshold = totalIntList.Max() / 10; // get first RT which exceeds Q1 for (int i = 0; i < scans.Length; i++) { int scan = scans[i]; if (totalIntList.MovingAverage(i, 20) > threshold) { firstRtToExceed10 = retentionTimes[scan]; break; } } for (int i = scans.Length - 1; i >= 0; i--) { int scan = scans[i]; if (totalIntList.MovingAverage(i, 20) > threshold) { lastRtToExceed10 = retentionTimes[scan]; break; } } // get proportion of run encompassed by these times //proportionCovered = (lastRtToExceedQ1 - firstRtToExceedQ1) / metrics.TotalAnalysisTime; proportionCovered = (lastRtToExceed10 - firstRtToExceed10) / retentionTimes[index.ScanEnumerators[MSOrderType.Ms].Last()]; return(firstRtToExceed10, retentionTimes[index.ScanEnumerators[MSOrderType.Ms].Last()] - lastRtToExceed10, proportionCovered);
private static PrecursorPeakData OnePeak(CentroidStreamCollection centroids, RetentionTimeCollection retentionTimes, double targetMass, int parentScan, int ddScan, ScanIndex index) { PrecursorPeakData peak = new PrecursorPeakData(); int firstScan = parentScan, lastScan = parentScan, maxScan = parentScan, currentScan = parentScan, previousMS1scan, nextMS1scan; bool containsFirstMS1Scan = false, containsLastMS1Scan = false; int[] MS1Scans = index.ScanEnumerators[MSOrderType.Ms]; double minMassDiff, maxIntensity, parentIntensity; List <int> scans = new List <int>(); List <double> profileTimes = new List <double>(); List <double> profileIntensities = new List <double>(); double[] masses, intensities, massDiff; Dictionary <int, double> indexedIntensities = new Dictionary <int, double>(); peak.Ms2Scan = ddScan; // first take care of the parent scan data. In QE data sometimes the parent mass is missing from the parent spectrum, so we need to deal with that. masses = centroids[currentScan].Masses;//.Where(i => (i > parentMass - 1 & i < parentMass + 1)).ToArray(); //masses = (from mass in rawData.centroidStreams[currentScan].Masses where mass > parentMass - 1 & mass < parentMass + 1 select mass).ToArray(); //masses = masses.Where(i => (i > parentMass - 1 & i < parentMass + 1)).ToArray(); if (masses.Length == 0) { peak.PeakFound = false; return(peak); } massDiff = new double[masses.Length]; for (int i = 0; i < masses.Length; i++) { massDiff[i] = Math.Abs(masses[i] - targetMass); } minMassDiff = massDiff.Min(); if (minMassDiff / targetMass * 1e6 < 10) { peak.PeakFound = true; } else { peak.PeakFound = false; return(peak); } int scanIndex = Array.IndexOf(MS1Scans, parentScan); // now find the first ms1 scan of the peak, just follow the mass (within tolerance) accross scans until it goes to baseline while (true) { currentScan = MS1Scans[scanIndex]; masses = centroids[currentScan].Masses; intensities = centroids[currentScan].Intensities; massDiff = new double[masses.Length]; for (int i = 0; i < masses.Length; i++) { massDiff[i] = Math.Abs(masses[i] - targetMass); } minMassDiff = massDiff.Min(); if (minMassDiff / targetMass * 1e6 < 10) { scans.Add(currentScan); scanIndex -= 1; indexedIntensities.Add(currentScan, intensities[Array.IndexOf(massDiff, minMassDiff)]); if (scanIndex < 0) { previousMS1scan = currentScan; break; } } else { if (scanIndex == 0) { previousMS1scan = currentScan; } else { previousMS1scan = MS1Scans[scanIndex - 1]; } break; } } // now find the last ms1 scan of the peak scanIndex = Array.IndexOf(MS1Scans, parentScan) + 1; // reset the ms1 scan indexer, add 1 so we don't replicate the parent scan while (true) { // Check to make sure the ms1 scan isn't the last one.... if (scanIndex >= MS1Scans.Length) { currentScan = MS1Scans.Last(); nextMS1scan = currentScan; break; } currentScan = MS1Scans[scanIndex]; masses = centroids[currentScan].Masses; intensities = centroids[currentScan].Intensities; massDiff = new double[masses.Length]; for (int i = 0; i < masses.Length; i++) { massDiff[i] = Math.Abs(masses[i] - targetMass); } minMassDiff = massDiff.Min(); if (minMassDiff / targetMass * 1e6 < 10) { scans.Add(currentScan); scanIndex += 1; indexedIntensities.Add(currentScan, intensities[Array.IndexOf(massDiff, minMassDiff)]); if (scanIndex >= MS1Scans.Length) { nextMS1scan = currentScan; break; } } else { if (scanIndex == MS1Scans.Length - 1) { nextMS1scan = currentScan; } else { nextMS1scan = MS1Scans[scanIndex + 1]; } break; } } // We need to add an index and intensity for the scans before and after the peak. Otherwise fitting and other calculations later will be a huge pain. // We make note of the peaks which contain the first or last MS1 scans. This edge cases will probably need special treatment. if (previousMS1scan != scans.Min()) { scans.Add(previousMS1scan); indexedIntensities.Add(previousMS1scan, 0); } else { containsFirstMS1Scan = true; } if (nextMS1scan != scans.Max()) { scans.Add(nextMS1scan); indexedIntensities.Add(nextMS1scan, 0); } else { containsLastMS1Scan = true; } scans.Sort(); firstScan = scans.First(); lastScan = scans.Last(); // add the retention times and intensities foreach (int scan in scans) { profileTimes.Add(retentionTimes[scan]); profileIntensities.Add(indexedIntensities[scan]); } if (profileIntensities[0] == 0 & profileIntensities[1] == 0) { } maxIntensity = profileIntensities.Max(); parentIntensity = indexedIntensities[parentScan]; maxScan = scans[profileIntensities.IndexOf(maxIntensity)]; peak.FirstScan = firstScan; peak.LastScan = lastScan; peak.MaxScan = maxScan; //peak.PreviousScan = previousMS1scan; //peak.NextScan = nextMS1scan; peak.ParentScan = parentScan; peak.NScans = scans.Count(); peak.Scans = scans.ToArray(); peak.ContainsFirstMS1Scan = containsFirstMS1Scan; peak.ContainsLastMS1Scan = containsLastMS1Scan; peak.ParentIntensity = parentIntensity; peak.MaximumIntensity = maxIntensity; peak.MaximumRetTime = retentionTimes[maxScan]; peak.ParentRetTime = retentionTimes[parentScan]; peak.BaselineWidth = profileTimes.Last() - profileTimes.First(); peak.Intensities = profileIntensities.ToArray(); peak.RetTimes = profileTimes.ToArray(); return(peak); }
public static PrecursorPeakCollection AnalyzeAllPeaks(CentroidStreamCollection centroids, RetentionTimeCollection retentionTimes, PrecursorMassCollection precursorMasses, PrecursorScanCollection precursorScans, ScanIndex index, int MaxProcesses) { ConcurrentDictionary <int, PrecursorPeakData> peaks = new ConcurrentDictionary <int, PrecursorPeakData>(); DistributionMultiple allPeaksAsymmetry = new DistributionMultiple(); DistributionMultiple allPeaksWidths = new DistributionMultiple(); var lockTarget = new object(); // this is so we can keep track of progress in the parallel loop int chunkSize = Constants.MultiThreading.ChunkSize(index.ScanEnumerators[MSOrderType.Ms2].Count()); var batches = index.ScanEnumerators[MSOrderType.Ms2].Chunk(chunkSize); ProgressIndicator P = new ProgressIndicator(total: index.ScanEnumerators[MSOrderType.Ms2].Length, message: "Analyzing precursor peaks"); P.Start(); Parallel.ForEach(batches, Constants.MultiThreading.Options(MaxProcesses), batch => { PrecursorPeakData peak; foreach (int scan in batch) { // [2018-12-04] changing to use picked mass and not monoisomass. The monoisomass might be low in intensity and would not represent the whole elution profile peak = OnePeak(centroids, retentionTimes, precursorMasses[scan].ParentMZ, precursorScans[scan].MasterScan, ddScan: scan, index: index); if (peak.NScans < 5 | peak.PeakFound == false | peak.ContainsFirstMS1Scan | peak.ContainsLastMS1Scan) { peak.PeakShape = null; } else { var newShape = GetPeakShape(peak); peak.PeakShape = newShape; allPeaksAsymmetry.Add(newShape.Asymmetry); allPeaksWidths.Add(newShape.Width); } peak.Area = CalculatePeakArea(peak); peaks.AddOrUpdate(scan, peak, (a, b) => b); lock (lockTarget) { P.Update(); } } }); P.Done(); var peaksOut = new PrecursorPeakCollection(); foreach (var item in peaks) { peaksOut.Add(item.Key, item.Value); } if (allPeaksWidths.P50.Count() == 0) { peaksOut.PeakShapeMedians = new Data.Containers.PeakShape(width: new Width(), asymmetry: new Asymmetry(), peakMax: 0); } else { peaksOut.PeakShapeMedians = new Data.Containers.PeakShape(width: allPeaksWidths.GetMedians(), asymmetry: allPeaksAsymmetry.GetMedians(), peakMax: 0); } return(peaksOut); }