public void FindPeakElement_OnTheRight_VeryLongList() { var l = GetListOfElement(1000); var r = PeakFinder.FindPeakElement(l, 0, l.Count - 1); Assert.AreEqual(999, r); }
public void FindPeakElement_OnTheRight() { var l = new List <int> { 1, 2, 3, 4, 1 }; // Peak is 4[3] Assert.AreEqual(3, PeakFinder.FindPeakElement(l, 0, l.Count - 1)); l = new List <int> { 1, 2, 3, 4, 5, 1 }; // Peak is 5[4] Assert.AreEqual(4, PeakFinder.FindPeakElement(l, 0, l.Count - 1)); l = new List <int> { 1, 2, 3, 4, 5, 6, 1 }; // Peak is 6[5] Assert.AreEqual(5, PeakFinder.FindPeakElement(l, 0, l.Count - 1)); l = new List <int> { 1, 2, 3, 4, 5, 6, 7 }; // Peak is 7[6] Assert.AreEqual(6, PeakFinder.FindPeakElement(l, 0, l.Count - 1)); l = new List <int> { 1, 2, 6, 5, 3, 7, 4 }; // Peak is 2[6] Assert.AreEqual(2, PeakFinder.FindPeakElement(l, 0, l.Count - 1)); }
public void FindPeakElement_OnTheLeft_VeryLongList() { var l = GetListOfElement(1000); l.Reverse(); var r = PeakFinder.FindPeakElement(l, 0, l.Count - 1); Assert.AreEqual(0, r); }
public void getExcelColumnHeaderTest() { int position; string result = string.Empty; string expected = string.Empty; position = 27; expected = "AA"; result = PeakFinder.getExcelColumnHeader(position); Assert.AreEqual(result, expected); Console.WriteLine(result); position = 28; expected = "AB"; result = PeakFinder.getExcelColumnHeader(position); Assert.AreEqual(result, expected); Console.WriteLine(result); position = 26; expected = "Z"; result = PeakFinder.getExcelColumnHeader(position); Assert.AreEqual(result, expected); Console.WriteLine(result); position = 36; expected = "AJ"; result = PeakFinder.getExcelColumnHeader(position); Console.WriteLine(result); position = 41; expected = "AO"; result = PeakFinder.getExcelColumnHeader(position); Console.WriteLine(result); position = 702; expected = "ZZ"; result = PeakFinder.getExcelColumnHeader(position); Console.WriteLine(result); position = 703; expected = "AAA"; result = PeakFinder.getExcelColumnHeader(position); Console.WriteLine(result); position = 18278; expected = "ZZ"; result = PeakFinder.getExcelColumnHeader(position); Console.WriteLine(result); position = 18279; expected = "AAAA"; result = PeakFinder.getExcelColumnHeader(position); Console.WriteLine(result); }
public void IsPeakElement_AtBoundaryLimitOnLeft() { Assert.IsTrue(PeakFinder.IsPeakElement(new List <int> { 3, 2 }, 0)); Assert.IsTrue(PeakFinder.IsPeakElement(new List <int> { 3, 2, 1, 0 }, 0)); Assert.IsFalse(PeakFinder.IsPeakElement(new List <int> { 1, 2 }, 0)); Assert.IsFalse(PeakFinder.IsPeakElement(new List <int> { 1, 2, 1, 0 }, 0)); }
public void IsPeakElement_AtBoundaryLimitOnRight() { Assert.IsTrue(PeakFinder.IsPeakElement(new List <int> { 3, 2, 3 }, 2)); Assert.IsTrue(PeakFinder.IsPeakElement(new List <int> { 2, 3 }, 1)); Assert.IsFalse(PeakFinder.IsPeakElement(new List <int> { 3, 2, 1 }, 2)); Assert.IsFalse(PeakFinder.IsPeakElement(new List <int> { 2, 1 }, 1)); }
/// <summary> /// The Main Method. Handles initial UI. /// </summary> /// <param name="args"></param> static void Main(string[] args) { bool loopExitFlag = false; do { Console.WriteLine("Select the number for peak finding method do you want to choose..."); Console.WriteLine("1-> Finding a peak for a 1 Dimensional array in a straight forward way"); Console.WriteLine("2-> Finding a peak for a 1 Dimensional array using Divide and Conquer"); Console.WriteLine("3-> Finding a peak for a 2 Dimensional array using Divide and Conquer"); Console.WriteLine("5-> Exit"); PeakFinder pf = new PeakFinder(); int chosenOption = 0; int.TryParse(Console.ReadLine(), out chosenOption); switch (chosenOption) { case 1: int[] arrInput = pf.Get1DArrayInput(); pf.StraightForward_1D(arrInput); break; case 2: int[] arrInput2 = pf.Get1DArrayInput(); pf.DivideAndConquer_1D(arrInput2); break; case 3: int[,] arrInput3 = pf.Get2DArrayInput(); pf.Print2DMatrix(arrInput3); int numberOfColumns = arrInput3.GetLength(1); pf.DivideAndConquer_2D(arrInput3, 0, numberOfColumns - 1); break; case 5: loopExitFlag = true; break; default: Console.WriteLine("\n\n"); Console.WriteLine("Please enter an appropriate input"); break; } }while (loopExitFlag == false); }
public void FindPeakElement_IsPeakElementAtIndex() { var l = new List <int> { 1, 3, 2 }; // Peak is 3[1] Assert.AreEqual(1, PeakFinder.FindPeakElement(l, 0, l.Count - 1)); l = new List <int> { 3, 2 }; // Peak is 3[0] Assert.AreEqual(0, PeakFinder.FindPeakElement(l, 0, l.Count - 1)); l = new List <int> { 2, 3 }; // Peak is 3[0] Assert.AreEqual(1, PeakFinder.FindPeakElement(l, 1, l.Count - 1)); }
protected override IEnumerable<PeakSet> BulkPeakFind(DataReader uimf, FileInfo originFile, int frameNumber) { var mzData = GetFullMzInfo(uimf, frameNumber); var doubleMzData = mzData.Select( keyValuePair => new KeyValuePair<double, double>(keyValuePair.Key, keyValuePair.Value)) .ToList(); var mzpeaks = PeakFinder.FindPeaks(doubleMzData); var mzPeakOutputLocation = DataExporter.GetOutputLocation( originFile, "Mz_Peaks", frameNumber, "xml"); DataExporter.OutputPeaks(mzpeaks, mzPeakOutputLocation); var peakSet = new List<PeakSet>(); peakSet.Add(mzpeaks); return peakSet; }
/// <summary> /// Returns a ChromPeak and IFoundPeak that match the start and end times a particular other IFoundPeak /// that was found by Crawdad. /// </summary> public Tuple <ChromPeak, IFoundPeak> IntegrateFoundPeak(IFoundPeak peakMax, ChromPeak.FlagValues flags) { Assume.IsNotNull(PeakFinder); var interpolatedPeak = PeakFinder.GetPeak(peakMax.StartIndex, peakMax.EndIndex); if ((flags & ChromPeak.FlagValues.forced_integration) != 0 && ChromData.AreCoeluting(peakMax, interpolatedPeak)) { flags &= ~ChromPeak.FlagValues.forced_integration; } var chromPeak = new ChromPeak(PeakFinder, interpolatedPeak, flags, InterpolatedTimeIntensities, RawTimeIntensities?.Times); if (TimeIntervals != null) { chromPeak = IntegratePeakWithoutBackground(InterpolatedTimeIntensities.Times[peakMax.StartIndex], InterpolatedTimeIntensities.Times[peakMax.EndIndex], flags); } return(Tuple.Create(chromPeak, interpolatedPeak)); }
protected override IEnumerable <PeakSet> BulkPeakFind(DataReader uimf, FileInfo originFile, int frameNumber) { var peakSets = new List <PeakSet>(); foreach (var xicTarget in Options.XicTargetList) { var xicData = GetXicInfo(uimf, frameNumber, xicTarget.TargetMz, xicTarget.Tolerance, this.Options.Getmsms); var xicPeaks = PeakFinder.FindPeaks(xicData); var xicPeakOutputLocation = DataExporter.GetOutputLocation( originFile, "XiC_Peaks_mz_" + xicTarget.TargetMz + "_tolerance_" + xicTarget.Tolerance + "_Frame", frameNumber, "xml"); DataExporter.OutputPeaks(xicPeaks, xicPeakOutputLocation); peakSets.Add(xicPeaks); } return(peakSets); }
protected override IEnumerable <PeakSet> BulkPeakFind(DataReader uimf, FileInfo originFile, int frameNumber) { var ticData = GetFullScanInfo(uimf, frameNumber); var doubleTicData = ticData.Select(scanInfo => new KeyValuePair <double, double>(scanInfo.DriftTime, scanInfo.TIC)) .ToList(); var ticPeaks = PeakFinder.FindPeaks(doubleTicData); var mzPeakOutputLocation = DataExporter.GetOutputLocation( originFile, "TiC_Peaks", frameNumber, "xml"); DataExporter.OutputPeaks(ticPeaks, mzPeakOutputLocation); var peakSets = new List <PeakSet>(); peakSets.Add(ticPeaks); return(peakSets); }
public void FindPeak1DVersion() { List <int> expectedPeak = new List <int>(); expectedPeak.Add(20); expectedPeak.Add(90); // { 10, 20, 15, 2, 23, 90, 67} List <int> array = new List <int>(); array.Add(10); array.Add(20); array.Add(15); array.Add(2); array.Add(23); array.Add(90); array.Add(67); int result = PeakFinder.PeakFinder1D(array); Assert.IsTrue(expectedPeak.Contains(result)); }
public void FindPeak2DVersion1() { int[,] matrix = new int[, ] { { 1, 2, 3, 4, 5 }, { 5, 6, 9, 8, 6 }, { 9, 0, 1, 2, 9 }, { 3, 4, 5, 6, 8 } }; // get the # of rows. int rows = matrix.GetLength(0); // get the # of columns. int cols = matrix.GetLength(1); List <int> expectedPeak = new List <int>(); expectedPeak.Add(9); int result = PeakFinder.PeakFinder2D(matrix, rows, cols); Assert.IsTrue(expectedPeak.Contains(result)); }
public void FindPeakElement_OnTheLeft() { var l = new List <int> { 1, 4, 3, 2, 1 }; // Peak is 4[1] Assert.AreEqual(1, PeakFinder.FindPeakElement(l, 0, l.Count - 1)); l = new List <int> { 1, 5, 4, 3, 2, 1 }; // Peak is 5[1] Assert.AreEqual(1, PeakFinder.FindPeakElement(l, 0, l.Count - 1)); l = new List <int> { 5, 4, 3, 2, 1 }; // Peak is 5[0] Assert.AreEqual(0, PeakFinder.FindPeakElement(l, 0, l.Count - 1)); l = new List <int> { 1, 6, 5, 4, 3, 2, 1 }; // Peak is 6[1] Assert.AreEqual(1, PeakFinder.FindPeakElement(l, 0, l.Count - 1)); l = new List <int> { 6, 5, 4, 3, 2, 1 }; // Peak is 6[0] Assert.AreEqual(0, PeakFinder.FindPeakElement(l, 0, l.Count - 1)); l = new List <int> { 1, 7, 6, 5, 4, 3, 2, 1 }; // Peak is 7[1] Assert.AreEqual(1, PeakFinder.FindPeakElement(l, 0, l.Count - 1)); l = new List <int> { 7, 6, 5, 4, 3, 2, 1 }; // Peak is 7[0] Assert.AreEqual(0, PeakFinder.FindPeakElement(l, 0, l.Count - 1)); }
public void IsPeakElement_WithinBoundary() { Assert.IsTrue(PeakFinder.IsPeakElement(new List <int> { 1, 3, 2 }, 1)); Assert.IsTrue(PeakFinder.IsPeakElement(new List <int> { 1, 1, 3, 2, 2 }, 2)); Assert.IsTrue(PeakFinder.IsPeakElement(new List <int> { 3, 3, 3 }, 1)); Assert.IsTrue(PeakFinder.IsPeakElement(new List <int> { 1, 3, 3, 3, 2 }, 2)); Assert.IsFalse(PeakFinder.IsPeakElement(new List <int> { 1, 0, 2 }, 1)); Assert.IsFalse(PeakFinder.IsPeakElement(new List <int> { 1, 1, 0, 2, 2 }, 2)); }
/// <summary> /// Return the ChromPeak with the specified start and end times chosen by a user. /// </summary> public ChromPeak IntegratePeak(float startTime, float endTime, ChromPeak.FlagValues flags) { if (TimeIntervals != null) { // For a triggered acquisition, we just use the start and end time supplied by the // user and Crawdad is not involved with the peak integration. return(IntegratePeakWithoutBackground(startTime, endTime, flags)); } if (PeakFinder == null) { PeakFinder = CreatePeakFinder(InterpolatedTimeIntensities); } int startIndex = InterpolatedTimeIntensities.IndexOfNearestTime(startTime); int endIndex = InterpolatedTimeIntensities.IndexOfNearestTime(endTime); if (startIndex == endIndex) { return(ChromPeak.EMPTY); } var foundPeak = PeakFinder.GetPeak(startIndex, endIndex); return(new ChromPeak(PeakFinder, foundPeak, flags, InterpolatedTimeIntensities, RawTimeIntensities?.Times)); }
/// <summary> /// Create Experimental Run. /// Refactor Idea: This class should not hold mca/motor/thermometer at all. It should be pass along from DAQThread as arguments. /// </summary> /// <param name="mca"></param> /// <param name="motor"></param> /// <param name="thermometer"></param> /// <param name="runmode"></param> public ExperimentalRun(DT5780 mca, LinearMotor motor, DigitalThermometer thermometer, RunMode runmode = RunMode.Experiment) { this.mca = mca; this.thermometer = thermometer; this.motor = motor; Timeout = 60; MaxSample = 0; Log = new ExperimentLog(); ClearData(); RunMode = runmode; Channel0PeakFinder = new PeakFinder(); Channel1PeakFinder = new PeakFinder(); ExperimentStatus = ExperimentStatusEnum.Prepared; StartActionMap = new Dictionary<RunMode, DAQThread.OnSuccessfulStartDelegate>{ {RunMode.Experiment, OnExperimentStart}, {RunMode.Calibrate, OnCalibrationStart}, {RunMode.Simulated, OnSimulationStart} }; StopActionMap = new Dictionary<RunMode, DAQThread.OnSuccessfulStopDelegate>{ {RunMode.Experiment, OnExperimentEnd}, {RunMode.Calibrate, OnCalibrationEnd}, {RunMode.Simulated, OnSimulationEnd} }; UpdateActionMap = new Dictionary<RunMode, DAQThread.UpdateProgressDelegate>{ {RunMode.Experiment, OnExperimentUpdate}, {RunMode.Calibrate, OnCalibrationUpdate}, {RunMode.Simulated, OnSimulationUpdate} }; InitCommand(); Channel0FittedDataPoints = new DataPoint[0]; Channel1FittedDataPoints = new DataPoint[0]; }
/// <summary> /// /// </summary> /// <param name="peakDetector"></param> /// <param name="scanNumbers"></param> /// <param name="peakData"></param> /// <param name="peakFinderOptions"></param> /// <returns>Detected peaks will be in the peakData object</returns> private bool FindPeaksWork( PeakFinder peakDetector, IList <int> scanNumbers, PeakDataContainer peakData, udtSICPeakFinderOptionsType peakFinderOptions) { const float sngPeakMaximum = 0; bool validPeakFound; // Smooth the Y data, and store in peakData.SmoothedYData // Note that if using a Butterworth filter, then we increase peakData.PeakWidthPointsMinimum if too small, compared to 1/SamplingFrequency var peakWidthPointsMinimum = peakData.PeakWidthPointsMinimum; var dataIsSmoothed = SmoothData(peakData.YData, peakData.DataCount, peakFinderOptions, ref peakWidthPointsMinimum, out var smoothedYData, out var errorMessage); // peakWidthPointsMinimum may have been auto-updated peakData.PeakWidthPointsMinimum = peakWidthPointsMinimum; // Store the smoothed data in the data container peakData.SetSmoothedData(smoothedYData); var peakDetectIntensityThresholdPercentageOfMaximum = (int)Math.Round(peakFinderOptions.IntensityThresholdFractionMax * 100); const int peakWidthInSigma = 2; const bool useValleysForPeakWidth = true; const bool movePeakLocationToMaxIntensity = true; if (peakFinderOptions.FindPeaksOnSmoothedData && dataIsSmoothed) { peakData.Peaks = peakDetector.DetectPeaks( peakData.XData, peakData.SmoothedYData, peakFinderOptions.IntensityThresholdAbsoluteMinimum, peakData.PeakWidthPointsMinimum, peakDetectIntensityThresholdPercentageOfMaximum, peakWidthInSigma, useValleysForPeakWidth, movePeakLocationToMaxIntensity); } else { // Look for the peaks, using peakData.PeakWidthPointsMinimum as the minimum peak width peakData.Peaks = peakDetector.DetectPeaks( peakData.XData, peakData.YData, peakFinderOptions.IntensityThresholdAbsoluteMinimum, peakData.PeakWidthPointsMinimum, peakDetectIntensityThresholdPercentageOfMaximum, peakWidthInSigma, useValleysForPeakWidth, movePeakLocationToMaxIntensity); } if (peakData.Peaks == null) { // Fatal error occurred while finding peaks return(false); } if (peakData.PeakWidthPointsMinimum == MINIMUM_PEAK_WIDTH) { // Testing the minimum peak width; run some checks ExamineNarrowPeaks(peakData, peakFinderOptions); } if (peakData.Peaks.Count <= 0) { // No peaks were found return(false); } foreach (var peak in peakData.Peaks) { peak.IsValid = false; // Find the center and boundaries of this peak // Make sure peak.LocationIndex is between peak.LeftEdge and peak.RightEdge if (peak.LeftEdge > peak.LocationIndex) { Console.WriteLine("peak.LeftEdge is > peak.LocationIndex; this is probably a programming error"); peak.LeftEdge = peak.LocationIndex; } if (peak.RightEdge < peak.LocationIndex) { Console.WriteLine("peak.RightEdge is < peak.LocationIndex; this is probably a programming error"); peak.RightEdge = peak.LocationIndex; } // See if the peak boundaries (left and right edges) need to be narrowed or expanded // Do this by stepping left or right while the intensity is decreasing. If an increase is found, but the // next point after the increasing point is less than the current point, then possibly keep stepping; the // test for whether to keep stepping is that the next point away from the increasing point must be less // than the current point. If this is the case, replace the increasing point with the average of the // current point and the point two points away // // Use smoothed data for this step // Determine the smoothing window based on peakData.PeakWidthPointsMinimum // If peakData.PeakWidthPointsMinimum <= 4 then do not filter if (!dataIsSmoothed) { // Need to smooth the data now peakWidthPointsMinimum = peakData.PeakWidthPointsMinimum; dataIsSmoothed = SmoothData( peakData.YData, peakData.DataCount, peakFinderOptions, ref peakWidthPointsMinimum, out smoothedYData, out errorMessage); // peakWidthPointsMinimum may have been auto-updated peakData.PeakWidthPointsMinimum = peakWidthPointsMinimum; // Store the smoothed data in the data container peakData.SetSmoothedData(smoothedYData); } // First see if we need to narrow the peak by looking for decreasing intensities moving toward the peak center // We'll use the unsmoothed data for this while (peak.LeftEdge < peak.LocationIndex - 1) { if (peakData.YData[peak.LeftEdge] > peakData.YData[peak.LeftEdge + 1]) { // OrElse (usedSmoothedDataForPeakDetection AndAlso peakData.SmoothedYData[peak.LeftEdge) < 0) Then peak.LeftEdge += 1; } else { break; } } while (peak.RightEdge > peak.LocationIndex + 1) { if (peakData.YData[peak.RightEdge - 1] < peakData.YData[peak.RightEdge]) { // OrElse (usedSmoothedDataForPeakDetection AndAlso peakData.SmoothedYData[peak.RightEdge) < 0) Then peak.RightEdge -= 1; } else { break; } } // Now see if we need to expand the peak by looking for decreasing intensities moving away from the peak center, // but allowing for small increases // We'll use the smoothed data for this; if we encounter negative values in the smoothed data, we'll keep going until we reach the low point since huge peaks can cause some odd behavior with the Butterworth filter // Keep track of the number of times we step over an increased value ExpandPeakLeftEdge(peakData, peakFinderOptions, peak, sngPeakMaximum, dataIsSmoothed); ExpandPeakRightEdge(peakData, peakFinderOptions, peak, sngPeakMaximum, dataIsSmoothed); peak.IsValid = true; if (!peakFinderOptions.ReturnClosestPeak) { continue; } // If peakData.OriginalPeakLocationIndex is not between peak.LeftEdge and peak.RightEdge, then check // if the scan number for peakData.OriginalPeakLocationIndex is within .MaxDistanceScansNoOverlap scans of // either of the peak edges; if not, then mark the peak as invalid since it does not contain the // scan for the parent ion if (peakData.OriginalPeakLocationIndex < peak.LeftEdge) { if ( Math.Abs(scanNumbers[peakData.OriginalPeakLocationIndex] - scanNumbers[peak.LeftEdge]) > peakFinderOptions.MaxDistanceScansNoOverlap) { peak.IsValid = false; } } else if (peakData.OriginalPeakLocationIndex > peak.RightEdge) { if ( Math.Abs(scanNumbers[peakData.OriginalPeakLocationIndex] - scanNumbers[peak.RightEdge]) > peakFinderOptions.MaxDistanceScansNoOverlap) { peak.IsValid = false; } } } // Find the peak with the largest area that has peakData.PeakIsValid = True peakData.BestPeak = null; var bestPeakArea = double.MinValue; foreach (var peak in peakData.Peaks) { if (peak.IsValid) { if (peak.Area > bestPeakArea) { peakData.BestPeak = peak; bestPeakArea = peak.Area; } } } if (peakData.BestPeak != null) { validPeakFound = true; } else { validPeakFound = false; } return(validPeakFound); }
/// <summary> /// /// </summary> /// <param name="peakFinderOptions"></param> /// <param name="xyData"></param> /// <param name="originalPeakLocationIndex"> /// Data point index in the x values that should be a part of the peak /// Used for determining the best peak</param> /// <param name="smoothedYData">Smoothed Y values</param> /// <returns></returns> public List <clsPeak> FindPeaks( udtSICPeakFinderOptionsType peakFinderOptions, List <KeyValuePair <int, double> > xyData, int originalPeakLocationIndex, out List <double> smoothedYData) { if (xyData.Count == 0) { smoothedYData = new List <double>(); return(new List <clsPeak>()); } // Compute the potential peak area for this SIC var udtSICPotentialAreaStatsForPeak = FindMinimumPotentialPeakArea(xyData, peakFinderOptions); // Estimate the noise level var noiseAnalyzer = new NoiseLevelAnalyzer(); const bool ignoreNonPositiveData = false; var intensityData = new double[xyData.Count]; var scanNumbers = new int[xyData.Count]; for (var index = 0; index < xyData.Count; index++) { scanNumbers[index] = xyData[index].Key; intensityData[index] = xyData[index].Value; } noiseAnalyzer.ComputeTrimmedNoiseLevel(intensityData, 0, intensityData.Length - 1, peakFinderOptions.SICBaselineNoiseOptions, ignoreNonPositiveData, out var udtBaselineNoiseStats); // Find maximumPotentialPeakArea and dataPointCountAboveThreshold var maximumPotentialPeakArea = FindMaximumPotentialPeakArea(intensityData, peakFinderOptions, udtBaselineNoiseStats, out var dataPointCountAboveThreshold); if (maximumPotentialPeakArea < 1) { maximumPotentialPeakArea = 1; } var areaBasedSignalToNoise = maximumPotentialPeakArea / udtSICPotentialAreaStatsForPeak.MinimumPotentialPeakArea; if (areaBasedSignalToNoise < 1) { areaBasedSignalToNoise = 1; } var peakDetector = new PeakFinder(); var peakData = new PeakDataContainer(); peakData.SetData(xyData); if (Math.Abs(peakFinderOptions.ButterworthSamplingFrequency) < float.Epsilon) { peakFinderOptions.ButterworthSamplingFrequency = 0.25f; } peakData.PeakWidthPointsMinimum = (int)Math.Round(peakFinderOptions.InitialPeakWidthScansScaler * Math.Log10(Math.Floor(areaBasedSignalToNoise)) * 10); // Assure that .InitialPeakWidthScansMaximum is no greater than .InitialPeakWidthScansMaximum // and no greater than dataPointCountAboveThreshold/2 (rounded up) peakData.PeakWidthPointsMinimum = Math.Min(peakData.PeakWidthPointsMinimum, peakFinderOptions.InitialPeakWidthScansMaximum); peakData.PeakWidthPointsMinimum = Math.Min(peakData.PeakWidthPointsMinimum, (int)Math.Ceiling(dataPointCountAboveThreshold / 2.0)); if (peakData.PeakWidthPointsMinimum > peakData.DataCount * 0.8) { peakData.PeakWidthPointsMinimum = (int)Math.Floor(peakData.DataCount * 0.8); } if (peakData.PeakWidthPointsMinimum < MINIMUM_PEAK_WIDTH) { peakData.PeakWidthPointsMinimum = MINIMUM_PEAK_WIDTH; } peakData.OriginalPeakLocationIndex = originalPeakLocationIndex; var peakFoundContainingOriginalPeakLocation = FindPeaksWork( peakDetector, scanNumbers, peakData, peakFinderOptions); smoothedYData = peakData.SmoothedYData.ToList(); return(peakData.Peaks); }
/// <summary> /// Read Histogram from Log and find peak /// </summary> /// <param name="setting"></param> public void FillInitialPeakGuess(PeakFinderSetting setting) { Debug.Assert(setting.Channel == 0 || setting.Channel == 1); double[] y = setting.Channel == 0 ? Log.MCAData.Channel0Hist.ToDoubleArray() : Log.MCAData.Channel1Hist.ToDoubleArray(); PeakFinder pf = new PeakFinder(y, setting.TriggerLevel, setting.ScalingPower, setting.SmoothWindow); if (setting.Channel == 0) { Channel0PeakFinder = pf; } else if (setting.Channel == 1) { Channel1PeakFinder = pf; } //Debug.WriteLine("aaa: {0}", pf.Guesses.Length); }