private MSPeakResult ConvertTextToPeakUIMFResult(string line) { var peakResult = new MSPeakResult(); var processedLine = ProcessLine(line); if (processedLine.Count < 7) { throw new IOException("Trying to import peak data into UIMF data object, but not enough columns are present in the source text file"); } peakResult.PeakID = Convert.ToInt32(processedLine[0]); peakResult.FrameNum = Convert.ToInt32(processedLine[1]); peakResult.Scan_num = Convert.ToInt32(processedLine[2]); var mz = Convert.ToDouble(processedLine[3]); var intensity = Convert.ToSingle(processedLine[4]); var fwhm = Convert.ToSingle(processedLine[5]); var sn = Convert.ToSingle(processedLine[6]); peakResult.MSPeak = new MSPeak(mz, intensity, fwhm, sn); if (processedLine.Count > 7) { peakResult.MSPeak.MSFeatureID = Convert.ToInt32(processedLine[7]); } return(peakResult); }
private MSPeakResult convertTextToPeakUIMFResult(string line) { var peakresult = new MSPeakResult(); var processedLine = processLine(line); if (processedLine.Count < 7) { throw new System.IO.IOException("Trying to import peak data into UIMF data object, but not enough columns are present in the source text file"); } peakresult.PeakID = Convert.ToInt32(processedLine[0]); peakresult.FrameNum = Convert.ToInt32(processedLine[1]); peakresult.Scan_num = Convert.ToInt32(processedLine[2]); peakresult.MSPeak = new DeconTools.Backend.Core.MSPeak(); peakresult.MSPeak.XValue = Convert.ToDouble(processedLine[3]); peakresult.MSPeak.Height = Convert.ToSingle(processedLine[4]); peakresult.MSPeak.Width = Convert.ToSingle(processedLine[5]); peakresult.MSPeak.SignalToNoise = Convert.ToSingle(processedLine[6]); if (processedLine.Count > 7) { peakresult.MSPeak.MSFeatureID = Convert.ToInt32(processedLine[7]); } return(peakresult); }
private MSPeakResult ConvertDecon2LSPeakToPeakResult(Engine.Results.LcmsPeak p) { float intensity; if (p.Intensity > float.MaxValue) { intensity = float.MaxValue; } else { intensity = (float)p.Intensity; } var peakResult = new MSPeakResult { MSPeak = new MSPeak { XValue = p.Mz, Height = intensity }, Scan_num = p.ScanNum, PeakID = this.m_peakIndex }; return(peakResult); }
//TODO: make this so that it works with UIMF data //TODO: use column header lookup instead of hard coded values private MSPeakResult ConvertTextToPeakResult(string line) { var peakResult = new MSPeakResult(); var columnCounter = 0; var processedLine = ProcessLine(line); peakResult.PeakID = Convert.ToInt32(processedLine[columnCounter]); //NOTE - for UIMF data the frame column is loaded into the 'Scan_num' property. This is kind of ugly since there is //already a FrameNum property. I'm doing this so that we can process UIMF files in IQ. We need to fix this later. peakResult.Scan_num = Convert.ToInt32(processedLine[++columnCounter]); //UIMF peak data contains an extra column if (_peaksAreFromUIMF) { ++columnCounter; } var mz = Convert.ToDouble(processedLine[++columnCounter]); var intensity = Convert.ToSingle(processedLine[++columnCounter]); var fwhm = Convert.ToSingle(processedLine[++columnCounter]); var sn = Convert.ToSingle(processedLine[++columnCounter]); peakResult.MSPeak = new MSPeak(mz, intensity, fwhm, sn); if (_containsMSFeatureIDColumn) { var currentCounter = ++columnCounter; peakResult.MSPeak.MSFeatureID = Convert.ToInt32(processedLine[currentCounter]); } return(peakResult); }
private bool findPeakWithinMSFeatureResults(List <IsosResult> msFeatureList, MSPeakResult peakResult, double scanTolerance) { double toleranceInPPM = 5; var toleranceInMZ = toleranceInPPM / 1e6 * peakResult.MSPeak.XValue; foreach (var msfeature in msFeatureList) { if (msfeature.IsotopicProfile == null || msfeature.IsotopicProfile.Peaklist == null || msfeature.IsotopicProfile.Peaklist.Count == 0) { continue; } //check target peak is within an allowable scan tolerance var targetPeakIsWithinScanTol = Math.Abs(msfeature.ScanSet.PrimaryScanNumber - peakResult.Scan_num) <= scanTolerance; if (!targetPeakIsWithinScanTol) { continue; } var peaksWithinTol = PeakUtilities.GetMSPeaksWithinTolerance(msfeature.IsotopicProfile.Peaklist, peakResult.MSPeak.XValue, toleranceInMZ); if (peaksWithinTol.Count == 0) { continue; } else { return(true); } } return(false); }
public bool peakPresent(MSPeakResult peak) { bool present = false; peak.SortOnKey = IPeak.SortKey.MZ; //first check if that scan was already processed //we'll have to make sure that we're not throwing an array out of bounds exception here if (peak.Scan_num <= 598 && peak.Scan_num >= 2) { //this is done so that we're assuming that a peak present in the same frame and +- 1 scan in IMS apart doesn't //need to be processed again. Based on the real tolerance we could increase this value. For now lets be conservative if (scanMapBitArray[peak.Scan_num] || scanMapBitArray[peak.Scan_num - 1] || scanMapBitArray[peak.Scan_num + 1]) { //then check if the peak is present in the tree based on mass tolerance if (peaksProcessed.Find(peak) != null) { present = true; } else { peaksProcessed.Add(peak); } } else { //set the scan to be processed as true scanMapBitArray[peak.Scan_num] = true; //add the peak to the list of processed peaks, so that we can search for another one that's similar peaksProcessed.Add(peak); } } else { //we should ignore it as it's too close to the junk part of the run //TODO change these values to constants based on the UIMF file if (peak.Frame_num <= 1 || peak.Frame_num >= 2399) { present = true; } } return(present); }
private void displayPeakInfoAndFate(MSPeakResult peak, string peakFate) { var sb = new StringBuilder(); sb.Append(peak.PeakID); sb.Append('\t'); sb.Append(peak.Scan_num); sb.Append('\t'); sb.Append(peak.MSPeak.XValue); sb.Append('\t'); sb.Append(peak.MSPeak.Height); sb.Append('\t'); sb.Append(peakFate); sb.Append('\t'); sb.Append(peak.ChromID); Console.WriteLine(sb.ToString()); }
private IsosResult getMSFeatureForCurrentSourcePeak(MSPeakResult peakResult, Run run) { if (run.ResultCollection.IsosResultBin == null || run.ResultCollection.IsosResultBin.Count == 0) { return(null); } var isosResultPossiblyContainingSourcePeak = new Dictionary <IsosResult, double>(); //store possible isosResult, along with it's difference with the peakResult for (var i = 0; i < run.ResultCollection.IsosResultBin.Count; i++) { var msfeature = run.ResultCollection.IsosResultBin[i]; double toleranceInMZ = peakResult.MSPeak.Width / 2; var peaksWithinTolerance = PeakUtilities.GetMSPeaksWithinTolerance(msfeature.IsotopicProfile.Peaklist, peakResult.MSPeak.XValue, toleranceInMZ); if (peaksWithinTolerance == null || peaksWithinTolerance.Count == 0) { } else { var diff = Math.Abs(peaksWithinTolerance[0].XValue - peakResult.MSPeak.XValue); isosResultPossiblyContainingSourcePeak.Add(msfeature, diff); } } if (isosResultPossiblyContainingSourcePeak.Count == 0) { return(null); } else if (isosResultPossiblyContainingSourcePeak.Count == 1) { return(isosResultPossiblyContainingSourcePeak.First().Key); } else { return(isosResultPossiblyContainingSourcePeak.Keys.OrderByDescending(p => p.IntensityAggregate).First()); } }
//TODO: make this so that it works with UIMF data //TODO: use column header lookup instead of hard coded values private MSPeakResult convertTextToPeakResult(string line) { var peakresult = new MSPeakResult(); var columnCounter = 0; var processedLine = processLine(line); peakresult.PeakID = Convert.ToInt32(processedLine[columnCounter]); //NOTE - for UIMF data the frame column is loaded into the 'Scan_num' property. This is kind of ugly since there is //already a FrameNum property. I'm doing this so that we can process UIMF files in IQ. We need to fix this later. peakresult.Scan_num = Convert.ToInt32(processedLine[++columnCounter]); peakresult.MSPeak = new DeconTools.Backend.Core.MSPeak(); //UIMF peak data contains an extra column if (_peaksAreFromUIMF) { ++columnCounter; } peakresult.MSPeak.XValue = Convert.ToDouble(processedLine[++columnCounter]); peakresult.MSPeak.Height = Convert.ToSingle(processedLine[++columnCounter]); peakresult.MSPeak.Width = Convert.ToSingle(processedLine[++columnCounter]); peakresult.MSPeak.SignalToNoise = Convert.ToSingle(processedLine[++columnCounter]); if (_containsMSFeatureIDColumn) { var currentCounter = ++columnCounter; peakresult.MSPeak.MSFeatureID = Convert.ToInt32(processedLine[currentCounter]); } return(peakresult); }
/// <summary> /// TODO: add info here /// </summary> /// <param name="intensityMap"></param> /// <param name="maxIntensity"></param> /// <param name="threshold"></param> /// <param name="startFrameInMap"></param> /// <param name="startScanInMap"></param> /// <param name="startFrame"></param> /// <param name="startScan"></param> /// <param name="frameAndScanNumbers"></param> /// <param name="minimumScanNumber"></param> /// <param name="maximumScanNumber"></param> /// <param name="totalSummed"></param> /// <returns></returns> public List <MSPeakResult> getFrameAndScanNumberListFromIntensityMap(int[][] intensityMap, int maxIntensity, float threshold, ushort startFrameInMap, ushort startScanInMap, ushort startFrame, ushort startScan, Dictionary <ushort, List <ushort> > frameAndScanNumbers, out ushort minimumScanNumber, out ushort maximumScanNumber, out ushort totalSummed) { var peaksForCurveFitting = new List <MSPeakResult>(3000); var peakId = 0; var frameIndex = startFrameInMap; var scanIndex = startScanInMap; var scanNumber = startScan; var frameNumber = startFrame; totalSummed = 0; //multiply the threshold by max intensity for now threshold *= maxIntensity; //start at the center of the map minimumScanNumber = ushort.MaxValue; maximumScanNumber = ushort.MinValue; //go up from the start frame value while (frameIndex > 0 && intensityMap[frameIndex][startScanInMap] >= threshold) { var scanNumberList = new List <ushort>(200); var end = intensityMap[frameIndex].Length; scanIndex = startScanInMap; scanNumber = startScan; //go left to determine the first value taht's below the threshold while (scanIndex > 0 && intensityMap[frameIndex][scanIndex] > threshold) { var peak = new MSPeak(); peak.Height = intensityMap[frameIndex][scanIndex]; var msPeak = new MSPeakResult(peakId++, frameNumber, scanNumber, peak); peaksForCurveFitting.Add(msPeak); if (scanNumber < minimumScanNumber) { minimumScanNumber = scanNumber; } else if (scanNumber > maximumScanNumber) { maximumScanNumber = scanNumber; } scanNumberList.Add(scanNumber--); scanIndex--; } //start the search for the right value from the next scan scanIndex = (ushort)(startScanInMap + 1); scanNumber = (ushort)(startScan + 1); //go right to determine the next value that's below the threshold while (scanIndex < end && intensityMap[frameIndex][scanIndex] > threshold) { var peak = new MSPeak(); peak.Height = intensityMap[frameIndex][scanIndex]; var msPeak = new MSPeakResult(peakId++, frameNumber, scanNumber, peak); peaksForCurveFitting.Add(msPeak); if (scanNumber < minimumScanNumber) { minimumScanNumber = scanNumber; } else if (scanNumber > maximumScanNumber) { maximumScanNumber = scanNumber; } scanNumberList.Add(scanNumber++); scanIndex++; } frameIndex--; //this means we've finished adding scan numbers to the list for current frame //now check if that is more than 3 if (scanNumberList.Count < 3) { break; } else { scanNumberList.Sort(); frameAndScanNumbers.Add(frameNumber, scanNumberList); totalSummed += (ushort)scanNumberList.Count; } frameNumber--; } //go down frameIndex = (ushort)(startFrameInMap + 1); scanIndex = startScanInMap; scanNumber = startScan; frameNumber = (ushort)(startFrame + 1); while (frameIndex < intensityMap.Length - 1 && intensityMap[frameIndex][startScanInMap] >= threshold) { //processing frame // Console.WriteLine("processing frame " + frameIndex); var scanNumberList = new List <ushort>(200); var end = intensityMap[frameIndex].Length; scanIndex = startScanInMap; scanNumber = startScan; //go left to determine the first value taht's below the threshold while (scanIndex > 0 && intensityMap[frameIndex][scanIndex] > threshold) { var peak = new MSPeak(); peak.Height = intensityMap[frameIndex][scanIndex]; var msPeak = new MSPeakResult(peakId++, frameNumber, scanNumber, peak); peaksForCurveFitting.Add(msPeak); if (scanNumber < minimumScanNumber) { minimumScanNumber = scanNumber; } else if (scanNumber > maximumScanNumber) { maximumScanNumber = scanNumber; } scanNumberList.Add(scanNumber--); scanIndex--; } //start the search for the right value from the next scan scanIndex = (ushort)(startScanInMap + 1); scanNumber = (ushort)(startScan + 1); //go right to determine the next value that's below the threshold while (scanIndex < end && intensityMap[frameIndex][scanIndex] > threshold) { var peak = new MSPeak(); peak.Height = intensityMap[frameIndex][scanIndex]; var msPeak = new MSPeakResult(peakId++, frameNumber, scanNumber, peak); peaksForCurveFitting.Add(msPeak); if (scanNumber < minimumScanNumber) { minimumScanNumber = scanNumber; } else if (scanNumber > maximumScanNumber) { maximumScanNumber = scanNumber; } scanNumberList.Add(scanNumber++); scanIndex++; } frameIndex++; //this means we've finished adding scan numbers to the list for current frame //now check if that is more than 3 if (scanNumberList.Count < 3) { break; } else { scanNumberList.Sort(); frameAndScanNumbers.Add(frameNumber, scanNumberList); totalSummed += (ushort)scanNumberList.Count; } frameNumber++; } return(peaksForCurveFitting); }
public override void ImportPeaks(List <MSPeakResult> peakList) { var fact = DbProviderFactories.GetFactory("System.Data.SQLite"); var queryString = "SELECT peak_id, scan_num, mz, intensity, fwhm FROM T_Peaks;"; using (var cnn = fact.CreateConnection()) { if (cnn == null) { throw new Exception("Factory.CreateConnection returned a null DbConnection instance in ImportPeaks"); } cnn.ConnectionString = "Data Source=" + _sqLiteFilename; try { cnn.Open(); } catch (Exception ex) { throw new Exception("Peak import failed. Couldn't connect to SQLite database. \n\nDetails: " + ex.Message); } using (var command = cnn.CreateCommand()) { command.CommandText = "SELECT COUNT(*) FROM T_Peaks;"; numRecords = Convert.ToInt32(command.ExecuteScalar()); } using (var command = cnn.CreateCommand()) { command.CommandText = queryString; var reader = command.ExecuteReader(); var progressCounter = 0; var lastReportProgress = DateTime.UtcNow; var lastReportProgressConsole = DateTime.UtcNow; while (reader.Read()) { var peakResult = new MSPeakResult { PeakID = (int)(long)reader["peak_id"], Scan_num = (int)(long)reader["scan_num"] }; var mz = (double)reader["mz"]; var intensity = (float)(double)reader["intensity"]; var fwhm = (float)(double)reader["fwhm"]; peakResult.MSPeak = new MSPeak(mz, intensity, fwhm); peakList.Add(peakResult); if (backgroundWorker != null) { if (backgroundWorker.CancellationPending) { return; } } progressCounter++; reportProgress(progressCounter, ref lastReportProgress, ref lastReportProgressConsole); } reader.Close(); } } }
public override void ImportPeaks(List <MSPeakResult> peakList) { var fact = DbProviderFactories.GetFactory("System.Data.SQLite"); var queryString = "SELECT peak_id, scan_num, mz, intensity, fwhm FROM T_Peaks;"; using (var cnn = fact.CreateConnection()) { cnn.ConnectionString = "Data Source=" + this.sqliteFilename; try { cnn.Open(); } catch (Exception ex) { throw new Exception("Peak import failed. Couldn't connect to SQLite database. \n\nDetails: " + ex.Message); } using (var command = cnn.CreateCommand()) { command.CommandText = "SELECT COUNT(*) FROM T_Peaks;"; numRecords = Convert.ToInt32(command.ExecuteScalar()); } using (var command = cnn.CreateCommand()) { command.CommandText = queryString; var reader = command.ExecuteReader(); MSPeakResult peakresult; var progressCounter = 0; while (reader.Read()) { peakresult = new MSPeakResult(); var test = (long)reader["peak_id"]; peakresult.PeakID = (int)(long)reader["peak_id"]; peakresult.Scan_num = (int)(long)reader["scan_num"]; peakresult.MSPeak = new MSPeak(); peakresult.MSPeak.XValue = (double)reader["mz"]; peakresult.MSPeak.Height = (float)(double)reader["intensity"]; peakresult.MSPeak.Width = (float)(double)reader["fwhm"]; peakList.Add(peakresult); if (this.backgroundWorker != null) { if (backgroundWorker.CancellationPending) { return; } } progressCounter++; reportProgress(progressCounter); } reader.Close(); } } }
private void addPeakToProcessedPeakList(MSPeakResult peak) { peak.ChromID = peak.PeakID; this.processedMSPeaks.Add(peak); }
//TODO: make a ChromatogramObject that will help handle my MSPeakResults, etc. public XYData GenerateChromatogram(Dictionary <int, List <MSPeakResult> > groupedMsPeakList, int minScan, int maxScan, List <double> targetMZList, double tolerance, int chromIDToAssign, Globals.ToleranceUnit toleranceUnit = Globals.ToleranceUnit.PPM) { Check.Require(groupedMsPeakList != null && groupedMsPeakList.Count > 0, "Cannot generate chromatogram. Source msPeakList is empty or hasn't been defined."); var scanTolerance = 5; // TODO: keep an eye on this // PNNLOmics.Generic.AnonymousComparer<MSPeakResult> comparer = new PNNLOmics.Generic.AnonymousComparer<MSPeakResult>((x, y) => x.MSPeak.XValue.CompareTo(y.MSPeak.XValue)); var comparer = new MSPeakResultComparer(); var tempPeakList = new List <MSPeakResult>(); for (var i = minScan - scanTolerance; i < maxScan + scanTolerance; i++) { if (groupedMsPeakList == null || !groupedMsPeakList.TryGetValue(i, out var msPeakResultList)) { continue; } foreach (var targetMZ in targetMZList) { double lowerMZ; double upperMZ; if (toleranceUnit == Globals.ToleranceUnit.PPM) { lowerMZ = targetMZ - tolerance * targetMZ / 1e6; upperMZ = targetMZ + tolerance * targetMZ / 1e6; } else if (toleranceUnit == Globals.ToleranceUnit.MZ) { lowerMZ = targetMZ - tolerance; upperMZ = targetMZ + tolerance; } else { throw new ArgumentOutOfRangeException("Trying to create chromatogram, but the " + toleranceUnit + " unit isn't supported"); } var lowMsPeak = new MSPeak(lowerMZ); var lowMsPeakResult = new MSPeakResult { MSPeak = lowMsPeak }; var binarySearchResult = msPeakResultList.BinarySearch(lowMsPeakResult, comparer); var nearestSearchResult = binarySearchResult >= 0 ? binarySearchResult : ~binarySearchResult; for (var j = nearestSearchResult; j < msPeakResultList.Count; j++) { var msPeakResult = msPeakResultList[j]; if (msPeakResult.MSPeak.XValue <= upperMZ) { tempPeakList.Add(msPeakResult); } } } } XYData chromData = null; if (!tempPeakList.Any()) { //TODO: we want to return 0 intensity values. But need to make sure there are no downstream problems with this change. } else { chromData = GetChromDataAndFillInZerosAndAssignChromID(tempPeakList, chromIDToAssign); } return(chromData); }
private void findTargetPeakAddResultsToCollectionAndMarkAssociatedPeaks(List <IsosResult> chromPeakBasedMSFeatures, MSPeakResult peakResult, Run run, double scanTolerance) { double toleranceInPPM = 5; var toleranceInMZ = toleranceInPPM / 1e6 * peakResult.MSPeak.XValue; var foundPeakWithinMSFeature = false; var msFeatureList = run.ResultCollection.IsosResultBin; //this is the small list if features found within a small m/z range, based on the targeted peak. for (var i = 0; i < msFeatureList.Count; i++) { var msfeature = msFeatureList[i]; if (msfeature.IsotopicProfile == null || msfeature.IsotopicProfile.Peaklist == null || msfeature.IsotopicProfile.Peaklist.Count == 0) { continue; } var peaksWithinTol = DeconTools.Backend.Utilities.PeakUtilities.GetMSPeaksWithinTolerance(msfeature.IsotopicProfile.Peaklist, peakResult.MSPeak.XValue, toleranceInMZ); if (peaksWithinTol.Count == 0) { foundPeakWithinMSFeature = false; } else if (peaksWithinTol.Count == 1) { foundPeakWithinMSFeature = true; var peakResultAlreadyFoundInAnMSFeature = findPeakWithinMSFeatureResults(chromPeakBasedMSFeatures, peakResult, scanTolerance); if (!peakResultAlreadyFoundInAnMSFeature) { run.ResultCollection.ResultList.Add(msfeature); // add it to the big long list of MSFeatures found m_msFeatureCounter++; } chromPeakBasedMSFeatures.Add(msfeature); //also add it to a temporary list. This list is much smaller and can be searched more quickly. } else { Console.WriteLine("Not sure what to do with this case!"); } if (foundPeakWithinMSFeature) { break; } } }