internal static void SetPeaks(ref PeakData peakData, ref clsPeak[] peaks) { foreach (var pk in peaks) { peakData.AddPeak(new clsPeak(pk)); } peakData.InitializeUnprocessedPeakData(); }
/*[gord] the following is currently unused. The idea was to give weighting to the algorithm so that * the user could favor certain fitting parameters (i.e. space between isotopomers) over others * public double FindIsotopicDist(PeakProcessing.PeakData peakData, int cs, PeakProcessing.Peak peak, * IsotopeFitRecord isoRecord, double deleteIntensityThreshold, double spacingWeight, double spacingVar, * double signalToNoiseWeight, double signalToNoiseThresh, double ratioWeight, double ratioThreshold, * double fitWeight, double fitThreshold, bool debug = false) * { * if (cs <= 0) * { * Environment.Exit(1); * } * * //Get theoretical distribution using Mercury algorithm * double peakMass = (peak.mdbl_mz - ChargeCarrierMass) * cs; * double resolution = peak.mdbl_mz / peak.mdbl_FWHM; * GetIsotopeDistribution(peakMass, cs, resolution, out TheoreticalDistMzs, * out TheoreticalDistIntensities, * deleteIntensityThreshold, debug); * * double theorMostAbundantPeakMz = IsotopeDistribution.mdbl_max_peak_mz; * double delta = peak.mdbl_mz - theorMostAbundantPeakMz; * double spacingScore = 0; * double signalToNoiseScore = 0; * double ratioScore = 0; * double totalScore = 0; * double maximumScore = spacingWeight + signalToNoiseWeight + ratioWeight + fitWeight; * * //this will select peaks to the left until * for (double dd = 1.003 / cs; dd <= 10.03 / cs; dd += 1.003 / cs) * { * double theorLeftPeakMz = 0; * double theorLeftPeakIntensity = 0; * PeakProcessing.Peak leftPeak; * peakData.FindPeak(peak.mdbl_mz - dd - peak.mdbl_FWHM, peak.mdbl_mz - dd + peak.mdbl_FWHM, out leftPeak); * //PeakProcessing.FindPeak * IsotopeDistribution.FindPeak(theorMostAbundantPeakMz - dd - 0.2 / cs, * theorMostAbundantPeakMz - dd + 0.2 / cs, out theorLeftPeakMz, out theorLeftPeakIntensity); * * if (leftPeak.mdbl_mz > 0) //if there is an experimental peak... * { * //get spacing score * spacingScore = spacingWeight * 1; * * //get S/N score * if (leftPeak.mdbl_SN > signalToNoiseThresh) * { * signalToNoiseScore = signalToNoiseWeight * 1; * } * * //get Ratio score * double leftPeakRatio = leftPeak.mdbl_intensity / peak.mdbl_intensity; * double theorLeftPeakRatio = theorLeftPeakIntensity / 1; * //TODO: need to check if this most abundant theor peak's intensity is 1 * } * * //get Ratio score * } * //get S/N score * //get Fit score * //calculate maximum score * //get overall score * return 0; * }*/ public PeakData GetTheoreticalIsotopicDistributionPeakList(List <double> xvals, List <double> yvals) { var peakList = new PeakData(); var processor = new PeakProcessor(); processor.SetOptions(0.5, 1, false, PeakFitType.Apex); processor.DiscoverPeaks(xvals, yvals, 0, 10000); var numpeaks = processor.PeakData.GetNumPeaks(); for (var i = 0; i < numpeaks; i++) { ThrashV1Peak peak; processor.PeakData.GetPeak(i, out peak); peakList.AddPeak(peak); } return(peakList); }
/// <summary> /// will calculate the delta mz (referenced to the theor) based on several of the observed peaks /// </summary> /// <param name="startingDelta"></param> /// <param name="peakWidth"></param> /// <param name="obsPeakData"></param> /// <param name="theorPeakData"></param> /// <param name="theorIntensityCutOff"></param> /// <returns></returns> public double CalculateDeltaFromSeveralObservedPeaks(double startingDelta, double peakWidth, PeakData obsPeakData, PeakData theorPeakData, double theorIntensityCutOff) { //the idea is to use a selected number of theor peaks //and for each theor peak, use the delta (mz offset) info //to find the obs peak data and determine the delta value for that peak. //accumulate delta values in an array and then calculate a weighted average var numTheorPeaks = theorPeakData.GetNumPeaks(); var filteredTheorPeakData = new PeakData(); //filter the theor list var numFilteredTheorPeaks = 0; for (var i = 0; i < numTheorPeaks; i++) { ThrashV1Peak peak; theorPeakData.GetPeak(i, out peak); if (peak.Intensity >= theorIntensityCutOff) { filteredTheorPeakData.AddPeak(peak); numFilteredTheorPeaks++; } } if (numFilteredTheorPeaks == 0) { return(startingDelta); } var deltaArray = new double[numFilteredTheorPeaks]; var intensityArray = new double[numFilteredTheorPeaks]; double intensitySum = 0; //double weightedSumOfDeltas = 0; for (var i = 0; i < numFilteredTheorPeaks; i++) { ThrashV1Peak theorPeak; filteredTheorPeakData.GetPeak(i, out theorPeak); var targetMzLower = theorPeak.Mz + startingDelta - peakWidth; var targetMzUpper = theorPeak.Mz + startingDelta + peakWidth; ThrashV1Peak foundPeak; obsPeakData.FindPeak(targetMzLower, targetMzUpper, out foundPeak); if (foundPeak.Mz > 0) { deltaArray[i] = foundPeak.Mz - theorPeak.Mz; intensityArray[i] = foundPeak.Intensity; intensitySum += foundPeak.Intensity; } else { deltaArray[i] = startingDelta; intensityArray[i] = 0; //obs peak was not found; therefore assign 0 intensity (will have no effect on delta calc) } } if (intensitySum.Equals(0)) { return(startingDelta); // no obs peaks found at all; return default } //now perform a weighted average double weightedDelta = 0; for (var i = 0; i < numFilteredTheorPeaks; i++) { weightedDelta += intensityArray[i] / intensitySum * deltaArray[i]; } return(weightedDelta); }