private void Extend(IFoundPeak crawdadPeak) { // Look a number of steps dependent on the width of the peak, since interval width // may vary. int toleranceLen = Math.Max(MinToleranceLen, (int)Math.Round(crawdadPeak.Fwhm * FractionFwhmLen)); crawdadPeak.ResetBoundaries(ExtendBoundary(crawdadPeak, crawdadPeak.StartIndex, -1, toleranceLen), ExtendBoundary(crawdadPeak, crawdadPeak.EndIndex, 1, toleranceLen)); }
public static string PeakToString(IFoundPeak foundPeak) { // ReSharper disable LocalizableElement return(String.Format( "StartIndex: {0} EndIndex: {1} TimeIndex: {2} Area: {3} BackgroundArea: {4} Height: {5} Fwhm: {6} FwhmDegenerate: {7} Identified: {8}", foundPeak.StartIndex, foundPeak.EndIndex, foundPeak.TimeIndex, foundPeak.Area, foundPeak.BackgroundArea, foundPeak.Height, foundPeak.Fwhm, foundPeak.FwhmDegenerate, foundPeak.Identified)); // ReSharper restore LocalizableElement }
public void SetExplicitPeakBounds(ExplicitPeakBounds peakBounds) { Finder = Crawdads.NewCrawdadPeakFinder(); Finder.SetChromatogram(Times, Intensities); if (peakBounds.IsEmpty) { RawPeaks = new IFoundPeak[0]; } else { RawPeaks = new[] { Finder.GetPeak(TimeToIndex(peakBounds.StartTime), TimeToIndex(peakBounds.EndTime)) }; } }
public ChromPeak CalcChromPeak(IFoundPeak peakMax, ChromPeak.FlagValues flags, out IFoundPeak peak) { // Reintegrate all peaks to the max peak, even the max peak itself, since its boundaries may // have been extended from the Crawdad originals. if (peakMax == null) { peak = null; return(ChromPeak.EMPTY); } peak = CalcPeak(peakMax.StartIndex, peakMax.EndIndex); return(new ChromPeak(Finder, peak, flags, Times, Intensities, MassErrors10X)); }
private bool AreCoeluting(IFoundPeak peakMax, IFoundPeak peak) { if (peak.Area == 0) { return(false); } int start = peakMax.StartIndex, end = peakMax.EndIndex; if (peak.StartIndex != start || peak.EndIndex != end) { return(false); } int len = peakMax.Length; if (len < MIN_COELUTION_RESCUE_POINTS) { return(false); } double maxBaseline = Math.Min(peakMax.SafeGetIntensity(start), peakMax.SafeGetIntensity(end)); double peakBaseline = Math.Min(peak.SafeGetIntensity(start), peak.SafeGetIntensity(end)); double[] maxIntens = new double[len]; double[] peakIntens = new double[len]; bool seenUnequal = false; for (int i = 0; i < len; i++) { double maxI = maxIntens[i] = peakMax.SafeGetIntensity(i + start) - maxBaseline; double peakI = peakIntens[i] = peak.SafeGetIntensity(i + start) - peakBaseline; if (maxI != peakI) { seenUnequal = true; } } // Avoid self-rescue if (!seenUnequal) { return(false); } var statMax = new Statistics(maxIntens); var statPeak = new Statistics(peakIntens); double r = statMax.R(statPeak); if (r < MIN_COELUTION_RESCUE_CORRELATION) { return(false); } return(true); // For debugging }
private int ExtendBoundary(IFoundPeak peakPrimary, int indexBoundary, int increment, int toleranceLen) { if (peakPrimary.Fwhm >= MinToleranceSmoothFwhm) { indexBoundary = ExtendBoundary(peakPrimary, false, indexBoundary, increment, toleranceLen); } // TODO: // Because smoothed data can have a tendency to reach baseline one // interval sooner than the raw data, do a final check to choose the // boundary correctly for the raw data. //indexBoundary = RetractBoundary(peakPrimary, true, indexBoundary, -increment); //indexBoundary = ExtendBoundary(peakPrimary, true, indexBoundary, increment, toleranceLen); return(indexBoundary); }
public void FindPeaks(double[] retentionTimes, bool requireDocNode) { Finder = Crawdads.NewCrawdadPeakFinder(); Finder.SetChromatogram(Times, Intensities); if (requireDocNode && DocNode == null) { RawPeaks = new IFoundPeak[0]; } else { RawPeaks = Finder.CalcPeaks(MAX_PEAKS, TimesToIndices(retentionTimes)); // Calculate smoothing for later use in extending the Crawdad peaks IntensitiesSmooth = ChromatogramInfo.SavitzkyGolaySmooth(Intensities.ToArray()); } }
public ChromPeak CalcChromPeak(IFoundPeak peakMax, ChromPeak.FlagValues flags, out IFoundPeak peak) { // Reintegrate all peaks to the max peak, even the max peak itself, since its boundaries may // have been extended from the Crawdad originals. if (peakMax == null) { peak = null; return(ChromPeak.EMPTY); } peak = CalcPeak(peakMax.StartIndex, peakMax.EndIndex); // If a forced peak is found to be sufficiently coeluting with the max peak, then clear the forced flag if ((flags & ChromPeak.FlagValues.forced_integration) != 0 && AreCoeluting(peakMax, peak)) { flags &= ~ChromPeak.FlagValues.forced_integration; } return(new ChromPeak(Finder, peak, flags, TimeIntensities, RawTimes)); }
private int ExtendBoundary(IFoundPeak peakPrimary, bool useRaw, int indexBoundary, int increment, int toleranceLen) { var intensities = _intensities; int lenIntensities = intensities.Count; var boundaryIntensity = intensities[indexBoundary]; var maxIntensity = boundaryIntensity; // Look for a descent proportional to the height of the peak. Because, SRM data is // so low noise, just looking for any descent can lead to boundaries very far away from // the peak. float height = peakPrimary.Height; double minDescent = height * DescentTol; // Put a limit on how high intensity can go before the search is terminated double maxHeight = ((height - boundaryIntensity) * AscentTol) + boundaryIntensity; // Extend the index in the direction of the increment for (int i = indexBoundary + increment; i > 0 && i < lenIntensities - 1 && Math.Abs(indexBoundary - i) < toleranceLen; i += increment) { double maxIntensityCurrent = intensities[i]; // If intensity goes above the maximum, stop looking if (maxIntensityCurrent > maxHeight) { break; } // If descent greater than tolerance, step until it no longer is while (maxIntensity - maxIntensityCurrent > minDescent) { indexBoundary += increment; if (indexBoundary == i) { maxIntensity = maxIntensityCurrent; } else { maxIntensityCurrent = intensities[indexBoundary]; } } } return(indexBoundary); }
/// <summary> /// Returns a ChromPeak and IFoundPeak that match the start and end times a particular other IFoundPeak /// that was found by Crawdad. /// </summary> public Tuple <ChromPeak, IFoundPeak> IntegrateFoundPeak(IFoundPeak peakMax, ChromPeak.FlagValues flags) { Assume.IsNotNull(PeakFinder); var interpolatedPeak = PeakFinder.GetPeak(peakMax.StartIndex, peakMax.EndIndex); if ((flags & ChromPeak.FlagValues.forced_integration) != 0 && ChromData.AreCoeluting(peakMax, interpolatedPeak)) { flags &= ~ChromPeak.FlagValues.forced_integration; } var chromPeak = new ChromPeak(PeakFinder, interpolatedPeak, flags, InterpolatedTimeIntensities, RawTimeIntensities?.Times); if (TimeIntervals != null) { chromPeak = IntegratePeakWithoutBackground(InterpolatedTimeIntensities.Times[peakMax.StartIndex], InterpolatedTimeIntensities.Times[peakMax.EndIndex], flags); } return(Tuple.Create(chromPeak, interpolatedPeak)); }
public ChromPeak CalcChromPeak(IFoundPeak peakMax, ChromPeak.FlagValues flags, TimeIntervals timeIntervals, out IFoundPeak peak) { // Reintegrate all peaks to the max peak, even the max peak itself, since its boundaries may // have been extended from the Crawdad originals. if (peakMax == null) { peak = null; return(ChromPeak.EMPTY); } var peakIntegrator = new PeakIntegrator(TimeIntensities, Finder) { RawTimeIntensities = RawTimeIntensities, TimeIntervals = timeIntervals }; var tuple = peakIntegrator.IntegrateFoundPeak(peakMax, flags); peak = tuple.Item2; return(tuple.Item1); }
public ChromPeak CalcChromPeak(IFoundPeak peakMax, ChromPeak.FlagValues flags) { _chromPeak = Data.CalcChromPeak(peakMax, flags, out _crawPeak); return(_chromPeak); }
public ChromDataPeak(ChromData data, IFoundPeak peak) { Data = data; _crawPeak = peak; }
public void SkipFindingPeaks(double[] retentionTimes) { Finder = Crawdads.NewCrawdadPeakFinder(); Finder.SetChromatogram(Times, Intensities); RawPeaks = new IFoundPeak[0]; }