private void Extend(CrawdadPeak crawdadPeak)
        {
            // Look a number of steps dependent on the width of the peak, since interval width
            // may vary.
            int toleranceLen = Math.Max(MinToleranceLen, (int)Math.Round(crawdadPeak.Fwhm * FractionFwhmLen));

            crawdadPeak.ResetBoundaries(ExtendBoundary(crawdadPeak, crawdadPeak.StartIndex, -1, toleranceLen),
                ExtendBoundary(crawdadPeak, crawdadPeak.EndIndex, 1, toleranceLen));
        }
Esempio n. 2
0
        private void Extend(CrawdadPeak crawdadPeak)
        {
            // Look a number of steps dependent on the width of the peak, since interval width
            // may vary.
            int toleranceLen = Math.Max(MinToleranceLen, (int)Math.Round(crawdadPeak.Fwhm * FractionFwhmLen));

            crawdadPeak.ResetBoundaries(ExtendBoundary(crawdadPeak, crawdadPeak.StartIndex, -1, toleranceLen),
                                        ExtendBoundary(crawdadPeak, crawdadPeak.EndIndex, 1, toleranceLen));
        }
Esempio n. 3
0
        public ChromPeak CalcChromPeak(CrawdadPeak peakMax, ChromPeak.FlagValues flags, out CrawdadPeak peak)
        {
            // Reintegrate all peaks to the max peak, even the max peak itself, since its boundaries may
            // have been extended from the Crawdad originals.
            if (peakMax == null)
            {
                peak = null;
                return(ChromPeak.EMPTY);
            }

            peak = CalcPeak(peakMax.StartIndex, peakMax.EndIndex);
            return(new ChromPeak(Finder, peak, flags, Times, Intensities, MassErrors10X));
        }
Esempio n. 4
0
 private int ExtendBoundary(CrawdadPeak peakPrimary, int indexBoundary, int increment, int toleranceLen)
 {
     if (peakPrimary.Fwhm >= MinToleranceSmoothFwhm)
     {
         indexBoundary = ExtendBoundary(peakPrimary, false, indexBoundary, increment, toleranceLen);
     }
     // TODO:
     // Because smoothed data can have a tendency to reach baseline one
     // interval sooner than the raw data, do a final check to choose the
     // boundary correctly for the raw data.
     //indexBoundary = RetractBoundary(peakPrimary, true, indexBoundary, -increment);
     //indexBoundary = ExtendBoundary(peakPrimary, true, indexBoundary, increment, toleranceLen);
     return(indexBoundary);
 }
Esempio n. 5
0
 public void FindPeaks(double[] retentionTimes, bool requireDocNode)
 {
     Finder = new CrawdadPeakFinder();
     Finder.SetChromatogram(Times, Intensities);
     if (requireDocNode && DocNode == null)
     {
         RawPeaks = new CrawdadPeak[0];
     }
     else
     {
         RawPeaks = Finder.CalcPeaks(MAX_PEAKS, TimesToIndices(retentionTimes));
         // Calculate smoothing for later use in extending the Crawdad peaks
         IntensitiesSmooth = ChromatogramInfo.SavitzkyGolaySmooth(Intensities);
     }
 }
Esempio n. 6
0
        private int ExtendBoundary(CrawdadPeak peakPrimary, bool useRaw, int indexBoundary, int increment, int toleranceLen)
        {
            var intensities       = _intensities;
            int lenIntensities    = intensities.Count;
            var boundaryIntensity = intensities[indexBoundary];
            var maxIntensity      = boundaryIntensity;
            // Look for a descent proportional to the height of the peak.  Because, SRM data is
            // so low noise, just looking for any descent can lead to boundaries very far away from
            // the peak.
            float  height     = peakPrimary.Height;
            double minDescent = height * DescentTol;
            // Put a limit on how high intensity can go before the search is terminated
            double maxHeight = ((height - boundaryIntensity) * AscentTol) + boundaryIntensity;

            // Extend the index in the direction of the increment
            for (int i = indexBoundary + increment;
                 i > 0 && i < lenIntensities - 1 && Math.Abs(indexBoundary - i) < toleranceLen;
                 i += increment)
            {
                double maxIntensityCurrent = intensities[i];

                // If intensity goes above the maximum, stop looking
                if (maxIntensityCurrent > maxHeight)
                {
                    break;
                }

                // If descent greater than tolerance, step until it no longer is
                while (maxIntensity - maxIntensityCurrent > minDescent)
                {
                    indexBoundary += increment;
                    if (indexBoundary == i)
                    {
                        maxIntensity = maxIntensityCurrent;
                    }
                    else
                    {
                        maxIntensityCurrent = intensities[indexBoundary];
                    }
                }
            }

            return(indexBoundary);
        }
Esempio n. 7
0
 public ChromPeak CalcChromPeak(CrawdadPeak peakMax, ChromPeak.FlagValues flags)
 {
     _chromPeak = Data.CalcChromPeak(peakMax, flags, out _crawPeak);
     return(_chromPeak);
 }
Esempio n. 8
0
 public ChromDataPeak(ChromData data, CrawdadPeak peak)
 {
     Data      = data;
     _crawPeak = peak;
 }
 private int ExtendBoundary(CrawdadPeak peakPrimary, int indexBoundary, int increment, int toleranceLen)
 {
     if (peakPrimary.Fwhm >= MinToleranceSmoothFwhm)
     {
         indexBoundary = ExtendBoundary(peakPrimary, false, indexBoundary, increment, toleranceLen);
     }
     // TODO:
     // Because smoothed data can have a tendency to reach baseline one
     // interval sooner than the raw data, do a final check to choose the
     // boundary correctly for the raw data.
     //indexBoundary = RetractBoundary(peakPrimary, true, indexBoundary, -increment);
     //indexBoundary = ExtendBoundary(peakPrimary, true, indexBoundary, increment, toleranceLen);
     return indexBoundary;
 }
Esempio n. 10
0
        private int ExtendBoundary(CrawdadPeak peakPrimary, bool useRaw, int indexBoundary, int increment, int toleranceLen)
        {
            var intensities = _intensities;
            int lenIntensities = intensities.Count;
            var boundaryIntensity = intensities[indexBoundary];
            var maxIntensity = boundaryIntensity;
            // Look for a descent proportional to the height of the peak.  Because, SRM data is
            // so low noise, just looking for any descent can lead to boundaries very far away from
            // the peak.
            float height = peakPrimary.Height;
            double minDescent = height * DescentTol;
            // Put a limit on how high intensity can go before the search is terminated
            double maxHeight = ((height - boundaryIntensity) * AscentTol) + boundaryIntensity;

            // Extend the index in the direction of the increment
            for (int i = indexBoundary + increment;
                 i > 0 && i < lenIntensities - 1 && Math.Abs(indexBoundary - i) < toleranceLen;
                 i += increment)
            {
                double maxIntensityCurrent = intensities[i];

                // If intensity goes above the maximum, stop looking
                if (maxIntensityCurrent > maxHeight)
                    break;

                // If descent greater than tolerance, step until it no longer is
                while (maxIntensity - maxIntensityCurrent > minDescent)
                {
                    indexBoundary += increment;
                    if (indexBoundary == i)
                        maxIntensity = maxIntensityCurrent;
                    else
                        maxIntensityCurrent = intensities[indexBoundary];
                }
            }

            return indexBoundary;
        }