public static QuantDataCollection Quantify(CentroidStreamCollection centroidScans, SegmentScanCollection segmentScans, WorkflowParameters parameters, MethodDataContainer methodData, ScanIndex index) { int[] scans = index.ScanEnumerators[index.AnalysisOrder]; QuantDataCollection quantData = new QuantDataCollection(); ProgressIndicator progress = new ProgressIndicator(scans.Length, "Quantifying reporter ions"); string labelingReagents = parameters.ParseParams.LabelingReagents; quantData.LabelingReagents = labelingReagents; foreach (int scan in scans) { if (methodData.QuantAnalyzer == MassAnalyzerType.MassAnalyzerFTMS) { quantData.Add(scan, QuantifyReporters.QuantifyOneScan(centroidScans[scan], labelingReagents)); } else { quantData.Add(scan, QuantifyReporters.QuantifyOneScan(segmentScans[scan], labelingReagents)); } progress.Update(); } progress.Done(); return(quantData); }
public static void Quantify(this QuantDataCollection quantData, RawDataCollection rawData, IRawDataPlus rawFile, string labelingReagent) { MassAnalyzerType quantAnalyzer = rawData.methodData.QuantAnalyzer; if (quantAnalyzer == MassAnalyzerType.MassAnalyzerFTMS) { rawData.ExtractCentroidStreams(rawFile, rawData.methodData.AnalysisOrder); } else { rawData.ExtractSegmentScans(rawFile, rawData.methodData.AnalysisOrder); } int[] scans; ScanIndex scanIndex = rawData.scanIndex; Dictionary <int, CentroidStreamData> centroidScans = rawData.centroidStreams; Dictionary <int, SegmentedScanData> segmentScans = rawData.segmentedScans; scans = scanIndex.ScanEnumerators[scanIndex.AnalysisOrder]; ProgressIndicator progress = new ProgressIndicator(scans.Length, "Quantifying reporter ions"); quantData.LabelingReagents = labelingReagent; foreach (int scan in scans) { if (quantAnalyzer == MassAnalyzerType.MassAnalyzerFTMS) { quantData.Add(scan, new QuantifyReporters(centroidScans[scan], labelingReagent).quantData); } else { quantData.Add(scan, new QuantifyReporters(segmentScans[scan], labelingReagent).quantData); } progress.Update(); } progress.Done(); rawData.Performed.Add(Operations.Quantification); }
public static RawMetricsDataDDA GetMetricsDataDDA(ScanMetaDataCollectionDDA metaData, MethodDataContainer methodData, string rawFileName, RetentionTimeCollection retentionTimes, ScanIndex index, PrecursorPeakCollection peakData, PrecursorScanCollection precursorScans, QuantDataCollection quantData = null) { RawMetricsDataDDA metricsData = new RawMetricsDataDDA(); metricsData.DateAcquired = methodData.CreationDate; metricsData.Instrument = methodData.Instrument; Console.WriteLine("Calculating metrics"); metricsData.RawFileName = rawFileName; metricsData.Instrument = methodData.Instrument; metricsData.MS1Analyzer = methodData.MassAnalyzers[MSOrderType.Ms]; metricsData.MS2Analyzer = methodData.MassAnalyzers[MSOrderType.Ms2]; metricsData.TotalAnalysisTime = retentionTimes[index.ScanEnumerators[MSOrderType.Any].Last()] - retentionTimes[index.ScanEnumerators[MSOrderType.Any].First()]; metricsData.NumberOfEsiFlags = MetricsCalculations.NumberOfEsiFlags(metaData, index); metricsData.TotalScans = index.TotalScans; metricsData.MS1Scans = index.ScanEnumerators[MSOrderType.Ms].Length; metricsData.MS2Scans = index.ScanEnumerators[MSOrderType.Ms2].Length; if (methodData.AnalysisOrder == MSOrderType.Ms3) { metricsData.MS3Analyzer = methodData.MassAnalyzers[MSOrderType.Ms3]; metricsData.MS3Scans = index.ScanEnumerators[MSOrderType.Ms3].Length; } else { metricsData.MS3Analyzer = MassAnalyzerType.Any; metricsData.MS3Scans = 0; } var pickedMs1 = new HashSet <int>((from x in index.ScanEnumerators[methodData.AnalysisOrder] select precursorScans[x].MasterScan)).ToList(); metricsData.MSOrder = methodData.AnalysisOrder; metricsData.MedianSummedMS1Intensity = MetricsCalculations.GetMedianSummedMSIntensity(metaData.SummedIntensity, index, MSOrderType.Ms); metricsData.MedianSummedMS2Intensity = MetricsCalculations.GetMedianSummedMSIntensity(metaData.SummedIntensity, index, MSOrderType.Ms2); metricsData.MedianPrecursorIntensity = (from x in peakData.Keys select peakData[x].ParentIntensity).ToArray().Percentile(50); metricsData.MedianMS1FillTime = MetricsCalculations.GetMedianMSFillTime(metaData.FillTime, index, MSOrderType.Ms); metricsData.MedianMS2FillTime = MetricsCalculations.GetMedianMSFillTime(metaData.FillTime, index, MSOrderType.Ms2); if (methodData.AnalysisOrder == MSOrderType.Ms3) { metricsData.MedianMS3FillTime = MetricsCalculations.GetMedianMSFillTime(metaData.FillTime, index, MSOrderType.Ms3); } metricsData.MeanTopN = MetricsCalculations.GetMeanMs2ScansPerCycle(metaData.MS2ScansPerCycle); metricsData.MeanDutyCycle = MetricsCalculations.GetMedianDutyCycle(metaData.DutyCycle, index); metricsData.MedianMs2FractionConsumingTop80PercentTotalIntensity = MetricsCalculations.GetMedianMs2FractionConsumingTop80PercentTotalIntensity( metaData.FractionConsumingTop80PercentTotalIntensity, index); metricsData.MS1ScanRate = metricsData.MS1Scans / metricsData.TotalAnalysisTime; metricsData.MS2ScanRate = metricsData.MS2Scans / metricsData.TotalAnalysisTime; if (methodData.AnalysisOrder == MSOrderType.Ms3) { metricsData.MS3ScanRate = metricsData.MS3Scans / metricsData.TotalAnalysisTime; } metricsData.MedianBaselinePeakWidth = peakData.PeakShapeMedians.Width.P10; metricsData.MedianHalfHeightPeakWidth = peakData.PeakShapeMedians.Width.P50; // we can't access the instrument method in Linux, so we will assume the gradient length is the length of the MS acquisition metricsData.Gradient = retentionTimes[index.allScans.Keys.Max()]; metricsData.PeakCapacity = metricsData.Gradient / metricsData.MedianHalfHeightPeakWidth; metricsData.MedianAsymmetryFactor = peakData.PeakShapeMedians.Asymmetry.P10; // add isolation interference metricsData.MedianMs1IsolationInterference = (from scan in index.ScanEnumerators[methodData.AnalysisOrder] select metaData.Ms1IsolationInterference[scan]).ToArray().Percentile(50); (double timeBefore, double timeAfter, double fracAbove) = MetricsCalculations.ChromIntensityMetrics(metaData, retentionTimes, index); metricsData.TimeBeforeFirstScanToExceedPoint1MaxIntensity = timeBefore; metricsData.TimeAfterLastScanToExceedPoint1MaxIntensity = timeAfter; metricsData.FractionOfRunAbovePoint1MaxIntensity = fracAbove; metricsData.Ms1FillTimeDistribution = new Distribution((from x in index.ScanEnumerators[MSOrderType.Ms] select metaData.FillTime[x]).ToArray()); metricsData.Ms2FillTimeDistribution = new Distribution((from x in index.ScanEnumerators[MSOrderType.Ms2] select metaData.FillTime[x]).ToArray()); if (methodData.AnalysisOrder == MSOrderType.Ms3) { metricsData.Ms3FillTimeDistribution = new Distribution((from x in index.ScanEnumerators[MSOrderType.Ms3] select metaData.FillTime[x]).ToArray()); } metricsData.PeakShape.Asymmetry.P10 = peakData.PeakShapeMedians.Asymmetry.P10; metricsData.PeakShape.Asymmetry.P50 = peakData.PeakShapeMedians.Asymmetry.P50; metricsData.PeakShape.Width.P10 = peakData.PeakShapeMedians.Width.P10; metricsData.PeakShape.Width.P50 = peakData.PeakShapeMedians.Width.P50; // now add the quant meta data, if quant was performed double medianReporterIntensity = 0; QuantMetaData quantMetaData = new QuantMetaData(); SerializableDictionary <string, double> medianReporterIntensityByChannel = new SerializableDictionary <string, double>(); if (quantData != null) { string reagent = quantData.LabelingReagents; string[] allTags = new LabelingReagents().Reagents[reagent].Labels; List <double> allChannels = new List <double>(); Dictionary <string, List <double> > byChannel = new Dictionary <string, List <double> >(); foreach (string tag in allTags) { byChannel.Add(tag, new List <double>()); } foreach (int scan in index.ScanEnumerators[methodData.AnalysisOrder]) { foreach (string tag in allTags) { byChannel[tag].Add(quantData[scan][tag].Intensity); allChannels.Add(quantData[scan][tag].Intensity); } } medianReporterIntensity = allChannels.ToArray().Percentile(50); foreach (string tag in allTags) { medianReporterIntensityByChannel[tag] = byChannel[tag].ToArray().Percentile(50); } quantMetaData.medianReporterIntensity = medianReporterIntensity; quantMetaData.medianReporterIntensityByChannel = medianReporterIntensityByChannel; quantMetaData.quantTags = allTags; metricsData.QuantMeta = quantMetaData; metricsData.IncludesQuant = true; } return(metricsData); }
public static void UniversalDDA(IRawFileThreadManager rawFileThreadManager, WorkflowParameters parameters, QcDataCollection qcDataCollection) { MethodDataContainer methodData; CentroidStreamCollection centroidStreams; SegmentScanCollection segmentScans; TrailerExtraCollection trailerExtras; PrecursorMassCollection precursorMasses; RetentionTimeCollection retentionTimes; ScanEventReactionCollection reactions; ScanMetaDataCollectionDDA metaData = null; PrecursorPeakCollection peakData = null; int nScans; var staticRawFile = rawFileThreadManager.CreateThreadAccessor(); staticRawFile.SelectInstrument(Device.MS, 1); var err = staticRawFile.FileError; if (err.HasError) { Console.WriteLine("ERROR: {0} reports error code: {1}. The associated message is: {2}", Path.GetFileName(staticRawFile.FileName), err.ErrorCode, err.ErrorMessage); Console.WriteLine("Skipping this file"); Log.Error("{FILE} reports error code: {ERRORCODE}. The associated message is: {ERRORMESSAGE}", Path.GetFileName(staticRawFile.FileName), err.ErrorCode, err.ErrorMessage); return; } //staticRawFile.CheckIfBoxcar(); (ScanIndex Index, PrecursorScanCollection precursorScans, ScanDependentsCollections scanDependents) = Extract.ScanIndicesPrecursorsDependents(rawFileThreadManager, MaxProcesses: parameters.MaxProcesses); nScans = Index.ScanEnumerators[MSOrderType.Ms2].Length; using (var rawFile = rawFileThreadManager.CreateThreadAccessor()) { reactions = Extract.ScanEvents(rawFile, Index); methodData = Extract.MethodData(rawFile, Index); (centroidStreams, segmentScans) = Extract.MsData(rawFile: rawFile, index: Index); trailerExtras = Extract.TrailerExtras(rawFile, Index); precursorMasses = Extract.PrecursorMasses(rawFile, precursorScans, trailerExtras, Index); retentionTimes = Extract.RetentionTimes(rawFile, Index); } if (parameters.ParseParams.Parse | parameters.ParseParams.Quant | parameters.ParseParams.Metrics | parameters.RefineMassCharge | parameters.QcParams.QcDirectory != null) { peakData = AnalyzePeaks.AnalyzeAllPeaks(centroidStreams, retentionTimes, precursorMasses, precursorScans, Index, parameters.MaxProcesses); if (parameters.RefineMassCharge) { MonoIsoPredictor.RefineMonoIsoMassChargeValues(parameters, centroidStreams, precursorMasses, trailerExtras, peakData, precursorScans); } metaData = MetaDataProcessingDDA.AggregateMetaDataDDA(centroidStreams, segmentScans, methodData, precursorScans, trailerExtras, precursorMasses, retentionTimes, scanDependents, reactions, Index, parameters.MaxProcesses); } QuantDataCollection quantData = null; if (parameters.ParseParams.Quant) { quantData = Quantification.Quantify(centroidStreams, segmentScans, parameters, methodData, Index); } RawMetricsDataDDA rawMetrics = null; if (parameters.ParseParams.Metrics | parameters.QcParams.QcDirectory != null) { rawMetrics = MetaDataProcessingDDA.GetMetricsDataDDA(metaData, methodData, staticRawFile.FileName, retentionTimes, Index, peakData, precursorScans, quantData); } if (parameters.ParseParams.Metrics) { MetricsWriter.WriteMatrix(rawMetrics, null, staticRawFile.FileName, parameters.ParseParams.OutputDirectory); } if (parameters.ParseParams.Parse | parameters.ParseParams.Quant) { string matrixFileName = ReadWrite.GetPathToFile(parameters.ParseParams.OutputDirectory, staticRawFile.FileName, "_Matrix.txt"); /* * ParseWriter writerDDA = new ParseWriter(matrixFileName, centroidStreams, segmentScans, metaData, retentionTimes, * precursorMasses, precursorScans, peakData, trailerExtras, Index, quantData); * writerDDA.WriteMatrixDDA(methodData.AnalysisOrder); */ MatrixWriter.ParseQuantDDA(matrixFileName, centroidStreams, segmentScans, metaData, retentionTimes, precursorMasses, precursorScans, peakData, trailerExtras, Index, quantData); } if (parameters.ParseParams.WriteMgf) { //ParseWriter writerMGF = new ParseWriter(centroidStreams, segmentScans, parameters, retentionTimes, precursorMasses, precursorScans, trailerExtras, methodData, Index); //writerMGF.WriteMGF(staticRawFile.FileName); MgfWriter.WriteMGF(staticRawFile.FileName, centroidStreams, segmentScans, parameters, retentionTimes, precursorMasses, precursorScans, trailerExtras, methodData, Index); } if (parameters.ParseParams.Chromatogram != null) { ChromatogramWriter.WriteChromatogram(centroidStreams, segmentScans, retentionTimes, methodData, Index, parameters, staticRawFile.FileName); } if (parameters.QcParams.QcDirectory != null) { qcDataCollection = QC.QcWorkflow.LoadOrCreateQcCollection(parameters); SearchMetricsContainer searchMetrics = new SearchMetricsContainer(staticRawFile.FileName, staticRawFile.CreationDate, methodData); // check if the raw file already exists in the QC data with a different name if (QcWorkflow.CheckIfFilePresentInQcCollection(staticRawFile.FileName, qcDataCollection)) { Log.Information("A file with the same creation date and time as {File} already exists in the QC data", staticRawFile.FileName); Console.WriteLine("A file with the same creation date and time as {File} already exists in the QC data. Skipping to next file.", staticRawFile.FileName); } else { if (parameters.QcParams.PerformSearch) { Search.WriteSearchMGF(parameters, centroidStreams, segmentScans, retentionTimes, precursorMasses, precursorScans, trailerExtras, methodData, Index, staticRawFile.FileName, parameters.QcParams.FixedScans); Search.RunSearch(parameters, methodData, staticRawFile.FileName); searchMetrics = SearchQC.ParseSearchResults(searchMetrics, parameters, staticRawFile.FileName, nScans); } QcDataContainer qcData = new QcDataContainer(); qcData.DDA = rawMetrics; qcData.SearchMetrics = searchMetrics; QC.QcWorkflow.UpdateQcCollection(qcDataCollection, qcData, methodData, staticRawFile.FileName); } } }
public static void GetMetricsData(this MetricsData metricsData, ScanMetaDataCollection metaData, RawDataCollection rawData, IRawDataPlus rawFile, QuantDataCollection quantData = null) { List <Operations> operations = new List <Operations> { Operations.ScanIndex, Operations.RetentionTimes, Operations.MethodData, Operations.MetaData }; if (!rawData.isBoxCar) { operations.Add(Operations.PeakRetAndInt); operations.Add(Operations.PeakShape); } rawData.Check(rawFile, operations); metricsData.RawFileName = rawData.rawFileName; metricsData.Instrument = rawData.instrument; metricsData.MS1Analyzer = rawData.methodData.MassAnalyzers[MSOrderType.Ms]; metricsData.MS2Analyzer = rawData.methodData.MassAnalyzers[MSOrderType.Ms2]; metricsData.TotalAnalysisTime = rawData.retentionTimes[rawData.scanIndex.ScanEnumerators[MSOrderType.Any].Last()] - rawData.retentionTimes[rawData.scanIndex.ScanEnumerators[MSOrderType.Any].First()]; metricsData.TotalScans = rawData.scanIndex.allScans.Count(); metricsData.MS1Scans = rawData.scanIndex.ScanEnumerators[MSOrderType.Ms].Length; metricsData.MS2Scans = rawData.scanIndex.ScanEnumerators[MSOrderType.Ms2].Length; if (rawData.methodData.AnalysisOrder == MSOrderType.Ms3) { metricsData.MS3Analyzer = rawData.methodData.MassAnalyzers[MSOrderType.Ms3]; metricsData.MS3Scans = rawData.scanIndex.ScanEnumerators[MSOrderType.Ms3].Length; } else { metricsData.MS3Analyzer = MassAnalyzerType.Any; metricsData.MS3Scans = 0; } metricsData.MSOrder = rawData.methodData.AnalysisOrder; List <double> ms2intensities = new List <double>(); List <double> precursorIntensities = new List <double>(); List <double> ms1fillTimes = new List <double>(); List <double> ms2fillTimes = new List <double>(); List <double> ms3fillTimes = new List <double>(); List <double> ms2scansPerCycle = new List <double>(); List <double> dutyCycles = new List <double>(); List <double> fractionConsuming80 = new List <double>(); foreach (int scan in rawData.scanIndex.ScanEnumerators[MSOrderType.Ms]) { ms1fillTimes.Add(metaData[scan].FillTime); ms2scansPerCycle.Add(metaData[scan].MS2ScansPerCycle); dutyCycles.Add(metaData[scan].DutyCycle); } foreach (int scan in rawData.scanIndex.ScanEnumerators[MSOrderType.Ms2]) { precursorIntensities.Add(rawData.peakData[scan].ParentIntensity); ms2intensities.Add(metaData[scan].SummedIntensity); ms2fillTimes.Add(metaData[scan].FillTime); fractionConsuming80.Add(metaData[scan].FractionConsumingTop80PercentTotalIntensity); } if (rawData.methodData.AnalysisOrder == MSOrderType.Ms3) { foreach (int scan in rawData.scanIndex.ScanEnumerators[MSOrderType.Ms3]) { ms3fillTimes.Add(metaData[scan].FillTime); } } metricsData.MedianPrecursorIntensity = precursorIntensities.ToArray().Percentile(50); metricsData.MedianMs2FractionConsumingTop80PercentTotalIntensity = fractionConsuming80.ToArray().Percentile(50); metricsData.MedianSummedMS2Intensity = ms2intensities.ToArray().Percentile(50); metricsData.MedianMS1FillTime = ms1fillTimes.ToArray().Percentile(50); metricsData.MedianMS2FillTime = ms2fillTimes.ToArray().Percentile(50); if (rawData.methodData.AnalysisOrder == MSOrderType.Ms3) { metricsData.MedianMS3FillTime = ms3fillTimes.ToArray().Percentile(50); } else { metricsData.MedianMS3FillTime = -1; } metricsData.MeanTopN = ms2scansPerCycle.Average(); metricsData.MeanDutyCycle = dutyCycles.Average(); metricsData.MS1ScanRate = metricsData.MS1Scans / metricsData.TotalAnalysisTime; metricsData.MS2ScanRate = metricsData.MS2Scans / metricsData.TotalAnalysisTime; metricsData.MS3ScanRate = metricsData.MS3Scans / metricsData.TotalAnalysisTime; // only do the following if it isn't a boxcar experiment if (!rawData.isBoxCar) { metricsData.MedianBaselinePeakWidth = rawData.peakData.PeakShapeMedians.Width.P10; metricsData.MedianHalfHeightPeakWidth = rawData.peakData.PeakShapeMedians.Width.P50; // we can't access the instrument method in Linux, so we will assume the gradient length is the length of the MS acquisition metricsData.Gradient = rawData.retentionTimes[rawData.scanIndex.allScans.Keys.Max()]; metricsData.PeakCapacity = metricsData.Gradient / metricsData.MedianHalfHeightPeakWidth; metricsData.MedianAsymmetryFactor = rawData.peakData.PeakShapeMedians.Asymmetry.P10; } // add isolation interference metricsData.MedianMs1IsolationInterference = (from scan in rawData.scanIndex.ScanEnumerators[rawData.methodData.AnalysisOrder] select rawData.metaData[scan].Ms1IsolationInterference).ToArray().Percentile(50); // now add the quant meta data, if quant was performed double medianReporterIntensity = 0; QuantMetaData quantMetaData = new QuantMetaData(); SerializableDictionary <string, double> medianReporterIntensityByChannel = new SerializableDictionary <string, double>(); if (quantData != null & rawData.Performed.Contains(Operations.Quantification)) { string reagent = quantData.LabelingReagents; string[] allTags = new LabelingReagents().Reagents[reagent].Labels; List <double> allChannels = new List <double>(); Dictionary <string, List <double> > byChannel = new Dictionary <string, List <double> >(); foreach (string tag in allTags) { byChannel.Add(tag, new List <double>()); } foreach (int scan in rawData.scanIndex.ScanEnumerators[rawData.methodData.AnalysisOrder]) { foreach (string tag in allTags) { byChannel[tag].Add(quantData[scan][tag].Intensity); allChannels.Add(quantData[scan][tag].Intensity); } } medianReporterIntensity = allChannels.ToArray().Percentile(50); foreach (string tag in allTags) { medianReporterIntensityByChannel[tag] = byChannel[tag].ToArray().Percentile(50); } quantMetaData.medianReporterIntensity = medianReporterIntensity; quantMetaData.medianReporterIntensityByChannel = medianReporterIntensityByChannel; quantMetaData.quantTags = allTags; metricsData.QuantMeta = quantMetaData; metricsData.IncludesQuant = true; } }
public static void ParseDDA(IRawFileThreadManager rawFileThreadManager, WorkflowParameters parameters) { MethodDataContainer methodData; CentroidStreamCollection centroidStreams; SegmentScanCollection segmentScans; TrailerExtraCollection trailerExtras; PrecursorMassCollection precursorMasses; RetentionTimeCollection retentionTimes; ScanEventReactionCollection reactions; ScanMetaDataCollectionDDA metaData = null; PrecursorPeakCollection peakData = null; RawMetricsDataDDA rawMetrics = null; QuantDataCollection quantData = null; var staticRawFile = rawFileThreadManager.CreateThreadAccessor(); staticRawFile.SelectInstrument(Device.MS, 1); //staticRawFile.CheckIfBoxcar(); (ScanIndex Index, PrecursorScanCollection precursorScans, ScanDependentsCollections scanDependents) = Extract.ScanIndicesPrecursorsDependents(rawFileThreadManager); using (var rawFile = rawFileThreadManager.CreateThreadAccessor()) { reactions = Extract.ScanEvents(rawFile, Index); methodData = Extract.MethodData(rawFile, Index); (centroidStreams, segmentScans) = Extract.MsData(rawFile: rawFile, index: Index); trailerExtras = Extract.TrailerExtras(rawFile, Index); precursorMasses = Extract.PrecursorMasses(rawFile, precursorScans, trailerExtras, Index); retentionTimes = Extract.RetentionTimes(rawFile, Index); } if (parameters.ParseParams.Parse | parameters.ParseParams.Quant | parameters.ParseParams.Metrics | parameters.RefineMassCharge) { peakData = AnalyzePeaks.AnalyzeAllPeaks(centroidStreams, retentionTimes, precursorMasses, precursorScans, Index); if (parameters.RefineMassCharge) { MonoIsoPredictor.RefineMonoIsoMassChargeValues(centroidStreams, precursorMasses, trailerExtras, peakData, precursorScans); } metaData = MetaDataProcessingDDA.AggregateMetaDataDDA(centroidStreams, segmentScans, methodData, precursorScans, trailerExtras, precursorMasses, retentionTimes, scanDependents, reactions, Index); } if (parameters.ParseParams.Quant) { quantData = Quantification.Quantify(centroidStreams, segmentScans, parameters, methodData, Index); } if (parameters.ParseParams.Metrics) { rawMetrics = MetaDataProcessingDDA.GetMetricsDataDDA(metaData, methodData, staticRawFile.FileName, retentionTimes, Index, peakData, precursorScans, quantData); MetricsWriter.WriteMatrix(rawMetrics, null, staticRawFile.FileName, parameters.ParseParams.OutputDirectory); } if (parameters.ParseParams.Parse | parameters.ParseParams.Quant) { string matrixFileName = ReadWrite.GetPathToFile(parameters.ParseParams.OutputDirectory, staticRawFile.FileName, "_Matrix.txt"); /* * ParseWriter writerDDA = new ParseWriter(matrixFileName, centroidStreams, segmentScans, metaData, retentionTimes, * precursorMasses, precursorScans, peakData, trailerExtras, Index, quantData); * writerDDA.WriteMatrixDDA(methodData.AnalysisOrder); */ MatrixWriter.ParseQuantDDA(matrixFileName, centroidStreams, segmentScans, metaData, retentionTimes, precursorMasses, precursorScans, peakData, trailerExtras, Index, quantData); } if (parameters.ParseParams.WriteMgf) { //ParseWriter writerMGF = new ParseWriter(centroidStreams, segmentScans, parameters, retentionTimes, precursorMasses, precursorScans, trailerExtras, methodData, Index); //writerMGF.WriteMGF(staticRawFile.FileName); MgfWriter.WriteMGF(staticRawFile.FileName, centroidStreams, segmentScans, parameters, retentionTimes, precursorMasses, precursorScans, trailerExtras, methodData, Index); } if (parameters.ParseParams.Chromatogram != null) { ChromatogramWriter.WriteChromatogram(centroidStreams, segmentScans, retentionTimes, methodData, Index, parameters, staticRawFile.FileName); } }
public static void WriteMatrix(RawDataCollection rawData, ScanMetaDataCollection metaData, IRawDataPlus rawFile, QuantDataCollection quantData = null, string outputDirectory = null) { string fileName = ReadWrite.GetPathToFile(outputDirectory, rawData.rawFileName, "_Matrix.txt"); CheckIfDone.Check(rawData, rawFile, new List <Operations> { Operations.ScanIndex, Operations.MethodData, Operations.PrecursorScans, Operations.RetentionTimes, Operations.PrecursorMasses, Operations.TrailerExtras, Operations.MetaData }); using (StreamWriter f = new StreamWriter(fileName)) //Open a new file { List <int> scans; if (!rawData.isBoxCar) { scans = rawData.scanIndex.ScanEnumerators[rawData.scanIndex.AnalysisOrder].ToList(); } else { scans = rawData.precursorScans.Keys.ToList(); scans.Sort(); } ProgressIndicator progress = new ProgressIndicator(scans.Count(), "Writing matrix to disk"); f.Write("MS3ScanNumber\tMS2ScanNumber\tMS1ScanNumber\tQuantScanRetTime\tParentScanRetTime\tDutyCycle" + "\tMS2ScansPerCycle\tParentIonMass\tMonoisotopicMass\tPrecursorCharge\tMS1IsolationInterference"); if (!rawData.isBoxCar) { f.Write("\tParentPeakFound"); } if (rawData.Performed.Contains(Operations.PeakArea) & !rawData.isBoxCar) { f.Write("\tParentPeakArea"); } if (!rawData.isBoxCar) { f.Write("\tPeakFirstScan\tPeakMaxScan\tPeakLastScan\tBaseLinePeakWidth(s)\tPeakParentScanIntensity\tPeakMaxIntensity"); } f.Write("\tMS1IonInjectionTime\tMS2IonInjectionTime" + "\tMS3IonInjectionTime\tHCDEnergy\tMS1MedianIntensity\tMS2MedianIntensity\t"); if (quantData != null) { string reagents = quantData.LabelingReagents; foreach (string label in new LabelingReagents().Reagents[reagents].Labels) { f.Write(label + "Intensity\t"); } foreach (string label in new LabelingReagents().Reagents[reagents].Labels) { f.Write(label + "Mass\t"); } foreach (string label in new LabelingReagents().Reagents[reagents].Labels) { f.Write(label + "Noise\t"); } foreach (string label in new LabelingReagents().Reagents[reagents].Labels) { f.Write(label + "Resolution\t"); } foreach (string label in new LabelingReagents().Reagents[reagents].Labels) { f.Write(label + "Baseline\t"); } } f.Write("\n"); foreach (int scan in scans) { int ms3scan, ms2scan, masterScan; if (rawData.scanIndex.AnalysisOrder == MSOrderType.Ms3) { ms3scan = rawData.precursorScans[scan].MS3Scan; ms2scan = rawData.precursorScans[scan].MS2Scan; masterScan = rawData.precursorScans[scan].MasterScan; } else { ms3scan = -1; ms2scan = rawData.precursorScans[scan].MS2Scan; masterScan = rawData.precursorScans[scan].MasterScan; } f.Write(ms3scan.ToString() + "\t" + ms2scan.ToString() + "\t" + masterScan.ToString() + "\t"); f.Write(rawData.retentionTimes[scan].ToString() + "\t" + rawData.retentionTimes[masterScan].ToString() + "\t"); f.Write(metaData[masterScan].DutyCycle.ToString() + "\t" + metaData[masterScan].MS2ScansPerCycle.ToString() + "\t"); f.Write(rawData.precursorMasses[ms2scan].ParentMZ.ToString() + "\t"); f.Write(rawData.precursorMasses[ms2scan].MonoisotopicMZ.ToString() + "\t"); f.Write(rawData.trailerExtras[ms2scan].ChargeState.ToString() + "\t"); f.Write(rawData.metaData[scan].Ms1IsolationInterference.ToString() + "\t"); if (!rawData.isBoxCar) { f.Write(rawData.peakData[ms2scan].PeakFound.ToString() + "\t"); } if (rawData.Performed.Contains(Operations.PeakArea) & !rawData.isBoxCar) { f.Write(rawData.peakData[ms2scan].Area.ToString() + "\t"); } if (!rawData.isBoxCar) { f.Write(rawData.peakData[ms2scan].FirstScan.ToString() + "\t"); f.Write(rawData.peakData[ms2scan].MaxScan.ToString() + "\t"); f.Write(rawData.peakData[ms2scan].LastScan.ToString() + "\t"); f.Write((rawData.peakData[ms2scan].BaselineWidth * 60).ToString() + "\t"); f.Write(rawData.peakData[ms2scan].ParentIntensity.ToString() + "\t"); f.Write(rawData.peakData[ms2scan].MaximumIntensity.ToString() + "\t"); } f.Write(rawData.trailerExtras[masterScan].InjectionTime.ToString() + "\t"); if (rawData.scanIndex.AnalysisOrder == MSOrderType.Ms3) { f.Write(rawData.trailerExtras[ms2scan].InjectionTime.ToString() + "\t"); f.Write(rawData.trailerExtras[ms3scan].InjectionTime.ToString() + "\t"); } else { f.Write(rawData.trailerExtras[ms2scan].InjectionTime.ToString() + "\t"); f.Write("-1\t"); } f.Write(rawData.trailerExtras[scan].HCDEnergy + "\t"); f.Write(metaData[masterScan].IntensityDistribution.P50 + "\t"); f.Write(metaData[ms2scan].IntensityDistribution.P50 + "\t"); if (quantData != null) { foreach (string label in quantData[scan].Keys) { f.Write(quantData[scan][label].Intensity + "\t"); } foreach (string label in quantData[scan].Keys) { f.Write(quantData[scan][label].Mass + "\t"); } foreach (string label in quantData[scan].Keys) { f.Write(quantData[scan][label].Noise + "\t"); } foreach (string label in quantData[scan].Keys) { f.Write(quantData[scan][label].Resolution + "\t"); } foreach (string label in quantData[scan].Keys) { f.Write(quantData[scan][label].Baseline + "\t"); } } f.Write("\n"); progress.Update(); } progress.Done(); } }
static int DoStuff(ArgumentParser.ParseOptions opts) { List <string> files = new List <string>(); if (opts.InputFiles.Count() > 0) // did the user give us a list of files? { List <string> problems = new List <string>(); files = opts.InputFiles.ToList(); // check if the list provided contains only .raw files foreach (string file in files) { if (!file.EndsWith(".raw", StringComparison.OrdinalIgnoreCase)) { problems.Add(file); } } if (problems.Count() == 1) { Console.WriteLine("\nERROR: {0} does not appear to be a .raw file. Invoke '>RawTools --help' if you need help.", problems.ElementAt(0)); Log.Error("Invalid file provided: {0}", problems.ElementAt(0)); return(1); } if (problems.Count() > 1) { Console.WriteLine("\nERROR: The following {0} files do not appear to be .raw files. Invoke '>RawTools --help' if you need help." + "\n\n{1}", problems.Count(), String.Join("\n", problems)); Log.Error("Invalid files provided: {0}", String.Join(" ", problems)); return(1); } Log.Information("Files to be processed, provided as list: {0}", String.Join(" ", files)); } else // did the user give us a directory? { if (Directory.Exists(opts.InputDirectory)) { files = Directory.GetFiles(opts.InputDirectory, "*.*", SearchOption.TopDirectoryOnly) .Where(s => s.EndsWith(".raw", StringComparison.OrdinalIgnoreCase)).ToList(); } else { Console.WriteLine("ERROR: The provided directory does not appear to be valid."); Log.Error("Invalid directory provided: {0}", opts.InputDirectory); return(1); } Log.Information("Files to be processed, provided as directory: {0}", String.Join(" ", files)); } if (opts.Quant) { List <string> possible = new List <string>() { "TMT0", "TMT2", "TMT6", "TMT10", "TMT11", "iTRAQ4", "iTRAQ8" }; if (!possible.Contains(opts.LabelingReagents)) { Console.WriteLine("ERROR: For quantification, the labeling reagent must be one of {TMT0, TMT2, TMT6, TMT10, TMT11, iTRAQ4, iTRAQ8}"); Log.Error("Invalid labeling reagent provided: {0}", opts.LabelingReagents); return(1); } } if (opts.Chromatogram != null) { List <string> possible = new List <string>() { "1T", "2T", "3T", "1B", "2B", "3B", "1TB", "2TB", "3TB", "1TB", "2TB", "3TB" }; if (!possible.Contains(opts.Chromatogram)) { Console.WriteLine("ERROR: Incorrect format for --chro. See help."); Log.Error("Invalid chromatogram argument provided: {Chro}", opts.Chromatogram); return(1); } } System.Diagnostics.Stopwatch singleFileTime = new System.Diagnostics.Stopwatch(); System.Diagnostics.Stopwatch totalTime = new System.Diagnostics.Stopwatch(); totalTime.Start(); foreach (string file in files) { singleFileTime.Start(); Console.WriteLine("\nProcessing: {0}\n", file); using (IRawDataPlus rawFile = RawFileReaderFactory.ReadFile(fileName: file)) { rawFile.SelectInstrument(Device.MS, 1); Log.Information("Now processing: {File} --- Instrument: {Instrument}", Path.GetFileName(file), rawFile.GetInstrumentData().Name); RawDataCollection rawData = new RawDataCollection(rawFile: rawFile); QuantDataCollection quantData = new QuantDataCollection(); bool isBoxCar = rawData.isBoxCar; if (rawData.isBoxCar) { Console.WriteLine("\nRaw file appears to be a boxcar-type experiment. Precursor peak analysis won't be performed!\n"); } if (opts.ParseData | opts.Metrics | opts.Quant) { rawData.ExtractAll(rawFile); if (!isBoxCar) { rawData.CalcPeakRetTimesAndInts(rawFile: rawFile); } } if (opts.Quant) { rawData.quantData.Quantify(rawData: rawData, rawFile: rawFile, labelingReagent: opts.LabelingReagents); } if (opts.UnlabeledQuant & !isBoxCar) { rawData.QuantifyPrecursorPeaks(rawFile); } if (opts.Metrics) { rawData.metaData.AggregateMetaData(rawData, rawFile); } if (opts.ParseData | opts.Quant) { if (opts.Quant) { Parse.WriteMatrix(rawData: rawData, rawFile: rawFile, metaData: rawData.metaData, quantData: rawData.quantData, outputDirectory: opts.OutputDirectory); } else { Parse.WriteMatrix(rawData: rawData, rawFile: rawFile, metaData: rawData.metaData, outputDirectory: opts.OutputDirectory); } } if (opts.WriteMGF) { MGF.WriteMGF(rawData: rawData, rawFile: rawFile, outputDirectory: opts.OutputDirectory, cutoff: opts.MassCutOff, intensityCutoff: opts.IntensityCutoff); } if (opts.Metrics) { MetricsData metricsData = new MetricsData(); if (opts.Quant) { metricsData.GetMetricsData(metaData: rawData.metaData, rawData: rawData, rawFile: rawFile, quantData: rawData.quantData); } else { metricsData.GetMetricsData(metaData: rawData.metaData, rawData: rawData, rawFile: rawFile); } metricsData.GetMetricsData(metaData: rawData.metaData, rawData: rawData, rawFile: rawFile); Metrics.WriteMatrix(rawData, metricsData, opts.OutputDirectory); } if (opts.Chromatogram != null) { int order = Convert.ToInt32((opts.Chromatogram.ElementAt(0).ToString())); if (order > (int)rawData.methodData.AnalysisOrder) { Log.Error("Specified MS order ({Order}) for chromatogram is higher than experiment order ({ExpOrder})", (MSOrderType)order, rawData.methodData.AnalysisOrder); Console.WriteLine("Specified MS order ({0}) for chromatogram is higher than experiment order ({1}). Chromatogram(s) won't be written.", (MSOrderType)order, rawData.methodData.AnalysisOrder); } else { rawData.WriteChromatogram(rawFile, (MSOrderType)order, opts.Chromatogram.Contains("T"), opts.Chromatogram.Contains("B"), opts.OutputDirectory); } } } singleFileTime.Stop(); Console.WriteLine("\nElapsed time: {0} s", Math.Round(Convert.ToDouble(singleFileTime.ElapsedMilliseconds) / 1000.0, 2)); singleFileTime.Reset(); } totalTime.Stop(); Console.WriteLine("\nTime to process all {0} files: {1}", files.Count(), totalTime.Elapsed); return(0); }