//add peak to an existing region region addPeakToRegion(peak nextpeak, region openReg) { openReg.name += "," + nextpeak.name; openReg.score++; openReg.peakList.Add(nextpeak); return(openReg); }
/// <summary> /// returns the distance of two consecutive peaks according to the parameters given by the user /// true && true: summit of narrowed peak to summit of narrowed peak /// true && false: summit of non-narrowed peak to summit of non-narrowed peak /// false && true: start of narrowed peak to end of narrowed peak /// false && false: start of non-narrowed peak to end of non-narrowed peak /// </summary> /// <param name="firstpeak">examined peak</param> /// <param name="nextpeak">next peak</param> /// <returns>the distance between the peaks based in the given parameters</returns> int distanceOfConsecutivePeaks(peak firstpeak, peak nextpeak) { if (distanceOption) { return((nextpeak.startIndex + nextpeak.summit) - (firstpeak.startIndex + firstpeak.summit)); } else { return((nextpeak.startIndex + nextpeak.middle) - (firstpeak.startIndex + firstpeak.middle)); } }
/// <summary> /// check if the combined peak file is sorted properly /// in the following comments the x marks the distance that we consider /// in general: /// distanceOption is true when we calculate the distance from summit to summit and false when we calculate the distance from midpoint to midpoint /// </summary> /// <param name="examinedPeak"></param> /// <returns>an integer that denotes what to compare</returns> int checkSorting(peak examinedPeak) { if (distanceOption) { return(examinedPeak.startIndex + examinedPeak.summit); } else { return(examinedPeak.startIndex + examinedPeak.middle); } }
//just open a new region region openNewRegion(peak firstpeak, peak nextpeak, int cnt, char strd) { return(new region() { chromosome = firstpeak.chromosome, regionName = "reg" + cnt, name = "reg" + cnt + "-" + firstpeak.name + "," + nextpeak.name, score = 2, strand = strd, peakList = new List <peak>() { firstpeak, nextpeak } }); }
public void prepareNetwork(List <Tuple <string, int> > tfsInReg, peak newPeak) { if (!tfsInReg.Any(x => x.Item1 == newPeak.TFname)) { foreach (Tuple <string, int> s in tfsInReg) { if (_tfOccsperChr.ContainsKey(s.Item1)) { if (_tfOccsperChr[s.Item1].ContainsKey(newPeak.TFname)) { _tfOccsperChr[s.Item1][newPeak.TFname].increaseCount(Math.Abs((newPeak.startIndex + newPeak.summit) - s.Item2)); } else { _tfOccsperChr[s.Item1].Add(newPeak.TFname, new tfOccurrences(Math.Abs((newPeak.startIndex + newPeak.summit) - s.Item2))); } } else { _tfOccsperChr.Add(s.Item1, new Dictionary <string, tfOccurrences>() { { newPeak.TFname, new tfOccurrences(Math.Abs((newPeak.startIndex + newPeak.summit) - s.Item2)) } }); } if (_tfOccsperChr.ContainsKey(newPeak.TFname)) { if (_tfOccsperChr[newPeak.TFname].ContainsKey(s.Item1)) { _tfOccsperChr[newPeak.TFname][s.Item1].increaseCount(Math.Abs((newPeak.startIndex + newPeak.summit) - s.Item2)); } else { _tfOccsperChr[newPeak.TFname].Add(s.Item1, new tfOccurrences(Math.Abs((newPeak.startIndex + newPeak.summit) - s.Item2))); } } else { _tfOccsperChr.Add(newPeak.TFname, new Dictionary <string, tfOccurrences>() { { s.Item1, new tfOccurrences(Math.Abs((newPeak.startIndex + newPeak.summit) - s.Item2)) } }); } } } }
//just to avoid some more lines of code //called only from regionFinder and is used only when we detect a region that contains only one peak in it region singleRegion(peak peakToAdd, int cnt) { return(new region() { chromosome = peakToAdd.chromosome, regionName = "reg" + cnt, name = "reg" + cnt + "-" + peakToAdd.name, score = 1, peakList = new List <peak>() { peakToAdd }, strand = strandSpecificity ? peakToAdd.strand : '.', startIndex = peakToAdd.startIndex, endIndex = peakToAdd.endIndex, pValue = peakToAdd.pValue }); }
/// <summary> /// Prints the given peak. /// </summary> /// <param name="prntPk">the peak to print.</param> /// <param name="output">the output stream where to print the peak.</param> public void printPeak(peak prntPk, StreamWriter output) { switch (numOfCols) { case 3: output.WriteLine(string.Format("{0}\t{1}\t{2}", prntPk.chromosome, prntPk.startIndex, prntPk.endIndex)); break; case 4: output.WriteLine(string.Format("{0}\t{1}\t{2}\t{3}", prntPk.chromosome, prntPk.startIndex, prntPk.endIndex, prntPk.name)); break; case 5: output.WriteLine(string.Format("{0}\t{1}\t{2}\t{3}\t{4}", prntPk.chromosome, prntPk.startIndex, prntPk.endIndex, prntPk.name, prntPk.score)); break; case 6: output.WriteLine(string.Format("{0}\t{1}\t{2}\t{3}\t{4}\t{5}", prntPk.chromosome, prntPk.startIndex, prntPk.endIndex, prntPk.name, prntPk.score, prntPk.strand)); break; case 7: output.WriteLine(string.Format("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}", prntPk.chromosome, prntPk.startIndex, prntPk.endIndex, prntPk.name, prntPk.score, prntPk.strand, prntPk.signalValue)); break; case 8: output.WriteLine(string.Format("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}", prntPk.chromosome, prntPk.startIndex, prntPk.endIndex, prntPk.name, prntPk.score, prntPk.strand, prntPk.signalValue, prntPk.pValue)); break; case 9: output.WriteLine(string.Format("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}", prntPk.chromosome, prntPk.startIndex, prntPk.endIndex, prntPk.name, prntPk.score, prntPk.strand, prntPk.signalValue, prntPk.pValue, prntPk.qValue)); break; case 10: output.WriteLine(string.Format("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}\t{9}", prntPk.chromosome, prntPk.startIndex, prntPk.endIndex, prntPk.name, prntPk.score, prntPk.strand, prntPk.signalValue, prntPk.pValue, prntPk.qValue, prntPk.summit)); break; default: exit("something went wrong while printing"); break; } }
/// <summary> /// converts an loaded peak to a region. checks if the region name is in the correct format and adds all the peaks in the peak list /// </summary> /// <param name="newPeak">the loaded peak (that is actually a region)</param> /// <param name="lineCounter">the line counter</param> /// <returns>the loaded region</returns> public region peakToRegion(peak newPeak, int lineCounter) { List <Tuple <string, int> > tfsInReg; #region convert peak to region region newRegion = new region() { chromosome = newPeak.chromosome, startIndex = newPeak.startIndex, endIndex = newPeak.endIndex, name = newPeak.name, score = newPeak.score, strand = newPeak.strand, signalValue = newPeak.signalValue, pValue = newPeak.pValue, qValue = newPeak.qValue, summit = newPeak.summit, peakList = new List <peak>() }; #endregion #region check if region name correct if (newRegion.name.Split('-').Length != 2 || !newRegion.name.Split('-').First().StartsWith("reg")) { exit("the name field of the input file in line " + lineCounter + " is in a wrong format"); } else { newRegion.regionName = newRegion.name.Split('-').First(); } #endregion #region add peaks to peak list tfsInReg = new List <Tuple <string, int> >(); foreach (string sp in newRegion.name.Split('-').Last().Split(',')) { if (sp.Split('_').Length < 2) { exit("the name field of the input file in line " + lineCounter + " is in a wrong format"); } else { newRegion.peakList.Add(new peak() { chromosome = newRegion.chromosome, startIndex = 1, endIndex = 1, TFname = sp.Split('_').First(), peakName = sp.Split('_').Last(), name = sp, score = 0, strand = '.', signalValue = 0, pValue = -1, qValue = -1, summit = 1 }); #region add peak statistical data statistics.addToTfStatsPre(sp.Split('_').First(), new List <int>() { 0 }, 1); #endregion #region keep some stats for network if (!tfsInReg.Any(x => x.Item1 == sp.Split('_').First())) { foreach (Tuple <string, int> s in tfsInReg) { if (tfOccs.ContainsKey(s.Item1)) { if (tfOccs[s.Item1].ContainsKey(sp.Split('_').First())) { tfOccs[s.Item1][sp.Split('_').First()].increaseCount(0); } else { tfOccs[s.Item1].Add(sp.Split('_').First(), new tfOccurrences(0)); } } else { tfOccs.Add(s.Item1, new Dictionary <string, tfOccurrences>() { { sp.Split('_').First(), new tfOccurrences(0) } }); } if (tfOccs.ContainsKey(sp.Split('_').First())) { if (tfOccs[sp.Split('_').First()].ContainsKey(s.Item1)) { tfOccs[sp.Split('_').First()][s.Item1].increaseCount(0); } else { tfOccs[sp.Split('_').First()].Add(s.Item1, new tfOccurrences(0)); } } else { tfOccs.Add(sp.Split('_').First(), new Dictionary <string, tfOccurrences>() { { s.Item1, new tfOccurrences(0) } }); } } } tfsInReg.Add(new Tuple <string, int>(sp.Split('_').First(), 0)); #endregion } } #endregion return(newRegion); }
/// <summary> /// construct a peak from an xml file element /// </summary> /// <param name="peakElement">xml peak element</param> /// <param name="inputFile">the name of the input xml file</param> /// <param name="elementCounter">region element counter</param> /// <param name="attributeCounter">peak within region attribute counter</param> /// <param name="lineCounter">the xml file line counter</param> /// <returns>returns the constructed peak</returns> public peak peakFromXML(XElement peakElement, string inputFile, int elementCounter, int attributeCounter, int lineCounter) { int tmpInt; double tmpDbl; peak newPeak = new peak(); #region test chromosome 0 try { if (!chromosomeNamesAndLength.ContainsKey(peakElement.Attribute("chr").Value) && !ignoreChromosomeLength) { exit("the file " + inputFile.Split(OSseparator).Last() + " has an invalid chromosome entry in element " + elementCounter + " attribute " + attributeCounter + " (line " + lineCounter + ")"); } else { newPeak.chromosome = peakElement.Attribute("chr").Value; } } catch (Exception) { exit("the file " + inputFile.Split(OSseparator).Last() + " does not contain chromosome information in element " + elementCounter + " attribute " + attributeCounter + " (line " + lineCounter + ")"); } #endregion #region test start position 1 try { if (!int.TryParse(peakElement.Attribute("s").Value, out tmpInt)) { exit("the file " + inputFile.Split(OSseparator).Last() + " has an invalid start index entry in element " + elementCounter + " attribute " + attributeCounter + ". Integer expected" + " (line " + lineCounter + ")"); } if (tmpInt < 0) { exit("the file " + inputFile.Split(OSseparator).Last() + " has an invalid start index entry in element " + elementCounter + " attribute " + attributeCounter + ". Positive expected" + " (line " + lineCounter + ")"); } newPeak.startIndex = tmpInt; } catch (Exception) { exit("the file " + inputFile.Split(OSseparator).Last() + " does not contain start index information in element " + elementCounter + " attribute " + attributeCounter + " (line " + lineCounter + ")"); } #endregion #region test end position 2 try { if (!int.TryParse(peakElement.Attribute("e").Value, out tmpInt)) { exit("the file " + inputFile.Split(OSseparator).Last() + " has an invalid end index entry in element " + elementCounter + " attribute " + attributeCounter + ". Integer expected" + " (line " + lineCounter + ")"); } if (tmpInt < 0) { exit("the file " + inputFile.Split(OSseparator).Last() + " has an invalid end index entry in element " + elementCounter + " attribute " + attributeCounter + ". Positive expected" + " (line " + lineCounter + ")"); } if (!ignoreChromosomeLength) { if (tmpInt > chromosomeNamesAndLength[newPeak.chromosome]) { exit("the file " + inputFile.Split(OSseparator).Last() + " has an invalid end index entry in element " + elementCounter + " attribute " + attributeCounter + ". Exceeding chromosome's limits" + " (line " + lineCounter + ")"); } } newPeak.endIndex = tmpInt; } catch (Exception) { exit("the file " + inputFile.Split(OSseparator).Last() + " does not contain end index information in element " + elementCounter + " attribute " + attributeCounter + " (line " + lineCounter + ")"); } #endregion #region test name 3 try { newPeak.name = peakElement.Attribute("n").Value; if (newPeak.name.Split('_').Length > 1) { newPeak.TFname = newPeak.name.Split('_').First(); newPeak.peakName = newPeak.name.Split('_').Last(); } else { newPeak.TFname = "TF"; newPeak.peakName = "peak" + Convert.ToString(lineCounter - ((2 * elementCounter) - 2)); } } catch (Exception) { exit("the file " + inputFile.Split(OSseparator).Last() + " does not contain name information in element " + elementCounter + " attribute " + attributeCounter + " (line " + lineCounter + ")"); } #endregion #region test score 4 try { if (numOfCols > 4) { if (!double.TryParse(peakElement.Attribute("scr").Value, out tmpDbl)) { exit("the file " + inputFile.Split(OSseparator).Last() + " has an invalid score entry in element " + elementCounter + " attribute " + attributeCounter + ". Integer expected" + " (line " + lineCounter + ")"); } newPeak.score = tmpDbl; } else { newPeak.score = 0; } } catch (Exception) { exit("the file " + inputFile.Split(OSseparator).Last() + " does not contain score information in element " + elementCounter + " attribute " + attributeCounter + " (line " + lineCounter + ")"); } #endregion #region test strand 5 try { if (numOfCols > 5) { if (!strandSymbols.Exists(x => x == peakElement.Attribute("strd").Value[0])) { exit("the file " + inputFile.Split(OSseparator).Last() + " has an invalid strand entry in element " + elementCounter + " attribute " + attributeCounter + ". +/-/. expected" + " (line " + lineCounter + ")"); } newPeak.strand = peakElement.Attribute("strd").Value[0]; } else { newPeak.strand = '.'; } } catch (Exception) { exit("the file " + inputFile.Split(OSseparator).Last() + " does not contain strand information in element " + elementCounter + " attribute " + attributeCounter + " (line " + lineCounter + ")"); } #endregion #region test signalValue 6 try { if (numOfCols > 6) { if (!double.TryParse(peakElement.Attribute("sv").Value, out tmpDbl)) { exit("the file " + inputFile.Split(OSseparator).Last() + " has an invalid signalValue entry in element " + elementCounter + " attribute " + attributeCounter + ". Numeric expected" + " (line " + lineCounter + ")"); } newPeak.signalValue = tmpDbl; } else { newPeak.signalValue = 0; } } catch (Exception) { exit("the file " + inputFile.Split(OSseparator).Last() + " does not contain signalValue information in element " + elementCounter + " attribute " + attributeCounter + " (line " + lineCounter + ")"); } #endregion #region test pValue 7 try { if (numOfCols > 7) { if (!double.TryParse(peakElement.Attribute("pv").Value, out tmpDbl)) { exit("the file " + inputFile.Split(OSseparator).Last() + " has an invalid pValue entry in element " + elementCounter + " attribute " + attributeCounter + ". Numeric expected" + " (line " + lineCounter + ")"); } newPeak.pValue = tmpDbl; } else { newPeak.pValue = -1; } } catch (Exception) { exit("the file " + inputFile.Split(OSseparator).Last() + " does not contain pValue information in element " + elementCounter + " attribute " + attributeCounter + " (line " + lineCounter + ")"); } #endregion #region test qValue 8 try { if (numOfCols > 8) { if (!double.TryParse(peakElement.Attribute("qv").Value, out tmpDbl)) { exit("the file " + inputFile.Split(OSseparator).Last() + " has an invalid qValue entry in element " + elementCounter + " attribute " + attributeCounter + ". Numeric expected" + " (line " + lineCounter + ")"); } newPeak.qValue = tmpDbl; } else { newPeak.qValue = -1; } } catch (Exception) { exit("the file " + inputFile.Split(OSseparator).Last() + " does not contain qValue information in element " + elementCounter + " attribute " + attributeCounter + " (line " + lineCounter + ")"); } #endregion #region test summit 9 try { if (numOfCols > 9) { if (!int.TryParse(peakElement.Attribute("sm").Value, out tmpInt)) { exit("the file " + inputFile.Split(OSseparator).Last() + " has an invalid summit entry in element " + elementCounter + " attribute " + attributeCounter + ". Integer expected" + " (line " + lineCounter + ")"); } if (tmpInt < 0 && tmpInt != -1) { exit("the file " + inputFile.Split(OSseparator).Last() + " has an invalid summit entry in element " + elementCounter + " attribute " + attributeCounter + ". Positive or -1 expected" + " (line " + lineCounter + ")"); } newPeak.summit = tmpInt; } else { newPeak.summit = Convert.ToInt32((newPeak.endIndex - newPeak.startIndex) / 2); } } catch (Exception) { exit("the file " + inputFile.Split(OSseparator).Last() + " does not contain summit information in element " + elementCounter + " attribute " + attributeCounter + " (line " + lineCounter + ")"); } #endregion return(newPeak); }
int peakStartPlusSummit(peak newPeak) { return(newPeak.startIndex + newPeak.summit); }
int peakStartPlusSummit(peak newPeak, bool narrowThePeak, int summitWindow) { return(newPeak.startIndex + newPeak.summit); }
/// <summary> /// Creates a peak instance from a bed file line. /// </summary> /// <returns>The generated peak.</returns> /// <param name="line">bed format line.</param> /// <param name="numOfCols">number of columns.</param> /// <param name="peakFile">the name of the input peak file. Used for error checking purposes.</param> /// <param name="lineCounter">line counter of the peak file. Used for error checking purposes.</param> /// <param name="TFname">transcription factor name.</param> public peak peakFromLine(string line, int numOfCols, string peakFile, int lineCounter, string TFname) { int tmpInt; double tmpDbl; peak newPeak = new peak(); List <string> breakAtTab = line.Split('\t').ToList(); #region check num of cols if (breakAtTab.Count != numOfCols) { exit("the file " + peakFile.Split(OSseparator).Last() + " has different number on columns in line " + lineCounter); } #endregion #region test chromosome 0 if (!chromosomeNamesAndLength.ContainsKey(breakAtTab[0]) && !ignoreChromosomeLength) { exit("the file " + peakFile.Split(OSseparator).Last() + " has an invalid element in column 1 line " + lineCounter); } else { newPeak.chromosome = breakAtTab[0]; } #endregion #region test start position 1 if (!int.TryParse(breakAtTab[1], out tmpInt)) { exit("the file " + peakFile.Split(OSseparator).Last() + " has an invalid element in column 2 line " + lineCounter + ". Integer expected"); } if (tmpInt < 0) { exit("the file " + peakFile.Split(OSseparator).Last() + " has an invalid element in column 2 line " + lineCounter + ". Positive expected"); } newPeak.startIndex = tmpInt; #endregion #region test end position 2 if (!int.TryParse(breakAtTab[2], out tmpInt)) { exit("the file " + peakFile.Split(OSseparator).Last() + " has an invalid element in column 3 line " + lineCounter + ". Integer expected"); } if (tmpInt < 0) { exit("the file " + peakFile.Split(OSseparator).Last() + " has an invalid element in column 3 line " + lineCounter + ". Positive expected"); } if (!ignoreChromosomeLength) { if (tmpInt > chromosomeNamesAndLength[breakAtTab[0]]) { exit("the file " + peakFile.Split(OSseparator).Last() + " has an invalid element in column 3 line " + lineCounter + ". Exceeding chromosome's limits"); } } newPeak.endIndex = tmpInt; #endregion #region check for equality betwen start and end if (newPeak.startIndex == newPeak.endIndex) { exit("the file " + peakFile.Split(OSseparator).Last() + " has an invalid element in line " + lineCounter + ". Start is equal to end index!"); } #endregion #region test name 3 if (mode == "regions" || mode == "tfNet" || mode == "peaks") { newPeak.peakName = "peak" + lineCounter; newPeak.TFname = TFname; if (numOfCols > 3) { newPeak.name = newPeak.TFname + "_" + newPeak.peakName + "_" + breakAtTab[3]; } else { newPeak.name = newPeak.TFname + "_" + newPeak.peakName; } if (mode == "regions") { if (breakAtTab[3].Split('_').Length > 1) { newPeak.TFname = breakAtTab[3].Split('_').First(); newPeak.peakName = breakAtTab[3].Split('_').ElementAt(1); newPeak.name = newPeak.TFname + "_" + newPeak.peakName; } } } else { newPeak.name = breakAtTab[3]; } #endregion #region test score 4 if (numOfCols > 4) { if (!double.TryParse(breakAtTab[4], out tmpDbl)) { exit("the file " + peakFile.Split(OSseparator).Last() + " has an invalid element in column 5 line " + lineCounter + ". Integer expected"); } if (filterByScore != 0 && tmpDbl < filterByScore) { return(null); } newPeak.score = tmpDbl; } else { newPeak.score = 0; } #endregion #region test strand 5 if (numOfCols > 5) { if (breakAtTab[5].Length == 0) { exit("the file " + peakFile.Split(OSseparator).Last() + " has an empty element in column 6 line " + lineCounter + ". +/-/. expected"); } if (!strandSymbols.Exists(x => x == breakAtTab[5][0])) { exit("the file " + peakFile.Split(OSseparator).Last() + " has an invalid element in column 6 line " + lineCounter + ". +/-/. expected"); } newPeak.strand = breakAtTab[5][0]; } else { newPeak.strand = '.'; } #endregion #region test signalValue 6 if (numOfCols > 6) { if (!double.TryParse(breakAtTab[6], out tmpDbl)) { exit("the file " + peakFile.Split(OSseparator).Last() + " has an invalid element in column 7 line " + lineCounter + ". Numeric expected"); } if (!(mode == "filter" || mode == "network")) { if (filterBySV != 0 && tmpDbl < filterBySV) { return(null); } } newPeak.signalValue = tmpDbl; } else { newPeak.signalValue = 0; } #endregion #region test pValue 7 if (numOfCols > 7) { if (!double.TryParse(breakAtTab[7], out tmpDbl)) { exit("the file " + peakFile.Split(OSseparator).Last() + " has an invalid element in column 8 line " + lineCounter + ". Numeric expected"); } if (!(mode == "filter" || mode == "network")) { if (filterByPvalue != 0 && tmpDbl > filterByPvalue) { return(null); } if (tmpDbl == -1.0 && !noValueAssigned) { return(null); } } newPeak.pValue = tmpDbl; //if (tmpDbl != -1 && regionPvalue) //{ // if (minusLog10) // { // newPeak.pValue = Math.Pow(10, -tmpDbl); // } // else // { // newPeak.pValue = tmpDbl; // } //} //else //{ // newPeak.pValue = tmpDbl; //} } else { newPeak.pValue = -1; } #endregion #region test qValue 8 if (numOfCols > 8) { if (!double.TryParse(breakAtTab[8], out tmpDbl)) { exit("the file " + peakFile.Split(OSseparator).Last() + " has an invalid element in column 9 line " + lineCounter + ". Numeric expected"); } if (!(mode == "filter" || mode == "network")) { if (filterByQvalue != 0 && tmpDbl > filterByQvalue) { return(null); } if (tmpDbl == -1.0 && !noValueAssigned) { return(null); } } newPeak.qValue = tmpDbl; //if (tmpDbl != -1 && regionPvalue) //{ // if (minusLog10) // { // newPeak.qValue = Math.Pow(10, -tmpDbl); // } // else // { // newPeak.qValue = tmpDbl; // } //} //else //{ // newPeak.qValue = tmpDbl; //} } else { newPeak.qValue = -1; } #endregion #region test summit 9 if (numOfCols > 9) { if (!int.TryParse(breakAtTab[9], out tmpInt)) { exit("the file " + peakFile.Split(OSseparator).Last() + " has an invalid element in column 10 line " + lineCounter + ". Integer expected"); } if (tmpInt < 0 && tmpInt != -1) { exit("the file " + peakFile.Split(OSseparator).Last() + " has an invalid element in column 10 line " + lineCounter + ". Positive expected"); } if (!(mode == "filter" || mode == "network")) { if (!unknownSummit && tmpInt < 1) { return(null); } } if (tmpInt == -1) { newPeak.summit = narrowThePeak ? summitWindow : middlePoint(newPeak.startIndex, newPeak.endIndex); } else { newPeak.summit = tmpInt; } } else if (!(mode == "filter" || mode == "network")) { if (!unknownSummit) { return(null); } } else { newPeak.summit = narrowThePeak ? summitWindow : middlePoint(newPeak.startIndex, newPeak.endIndex); } #endregion newPeak.middle = narrowThePeak ? summitWindow : middlePoint(newPeak.startIndex, newPeak.endIndex); newPeak.startIndex = narrowThePeak ? newStartIndex(newPeak.startIndex, newPeak.summit, summitWindow) : newPeak.startIndex; newPeak.endIndex = narrowThePeak ? newEndIndex(newPeak.startIndex, newPeak.summit, summitWindow, newPeak.endIndex, newPeak.chromosome) : newPeak.endIndex; return(newPeak); }