public void AnalyzeSample() { try { ResultSpectrum.Clear(); PlotPoints.Clear(); foreach (var peaks in Peaks.Zip(SamplePeaks, Tuple.Create)) { var newspec = new AnalyzedSpectrum(); newspec.PeakIntensity = Math.Abs(peaks.Item2.PeakIntensity - peaks.Item1.PeakIntensity); newspec.PeakPixel = peaks.Item1.PeakPixel; ResultSpectrum.Add(newspec); var _point = new DataPoint(newspec.PeakPixel, newspec.PeakIntensity); PlotPoints.Add(_point); } firstAnalyze = true; //reset flag allows to make next measure //select and add extrema to result list double x0 = 0; double y0 = 0; foreach (var resspec in ResultSpectrum) { //1. find maximum if (resspec.PeakIntensity > x0) { x0 = resspec.PeakIntensity; y0 = x0; } else { //2. find minimum if (resspec.PeakIntensity > y0) { //3. Add it to collection var diffirence = x0 - y0; if (diffirence > 30) { var s = new AnalyzedSpectrum(); s.PeakIntensity = y0; s.PeakPixel = resspec.PeakPixel - 1; Result.Add(s); } //reset variables x0 = resspec.PeakIntensity; y0 = x0; } else { y0 = resspec.PeakIntensity; } } } } catch (Exception e) { MessageBox.Show(e.ToString()); } }
public void ShouldCountBlocksOfA(int[] A, int expected) { Peaks counter = new Peaks(); int actual = counter.Count(A); Assert.AreEqual(expected, actual); }
public PeptideDistribution ComputeTracerAmounts(Peaks peaks, out IList <double> observedIntensities, out IDictionary <TracerFormula, IList <double> > predictedIntensities) { if (peaks.GetChildCount() == 0) { observedIntensities = null; predictedIntensities = null; return(null); } observedIntensities = peaks.GetAverageIntensities(); var result = new PeptideDistribution(this, PeptideQuantity.tracer_count) { Parent = this }; PeptideFileAnalysis.TurnoverCalculator.GetTracerAmounts( result, peaks.GetAverageIntensitiesExcludedAsNaN(), out predictedIntensities); if (result.ChildCount > 2) { double turnover; IDictionary <TracerFormula, double> bestMatch; result.PrecursorEnrichmentFormula = PeptideFileAnalysis.TurnoverCalculator.ComputePrecursorEnrichmentAndTurnover(result.ToDictionary(), out turnover, out bestMatch); if (result.PrecursorEnrichmentFormula != null) { result.PrecursorEnrichment = result.PrecursorEnrichmentFormula.Values.Sum() / 100.0; } result.Turnover = turnover; } return(result); }
public void test_methodName_withCertainState_shouldDoSomething(int[] given, int expected) { var target = new Peaks(); var actual = target.solution(given); Assert.AreEqual(expected, actual); }
public string ToOutputString() { StringBuilder sb = new StringBuilder(); sb.Append(SpectraFileName); sb.Append('\t'); sb.Append(OneBasedScan); sb.Append('\t'); sb.Append(RetentionTime); sb.Append('\t'); sb.Append(MonoisotopicMass); sb.Append('\t'); sb.Append(MonoisotopicMass); sb.Append('\t'); sb.Append(Charge); sb.Append('\t'); sb.Append(string.Join(",", Peaks.Select(p => p.ExperimentalMz.ToString("F4")))); sb.Append('\t'); sb.Append(Peaks.First().ExperimentalMz.ToMass(Charge)); sb.Append('\t'); sb.Append(SignalToNoise); sb.Append('\t'); sb.Append(DeltaScore); sb.Append('\t'); sb.Append(Math.Log(Peaks.Sum(p => p.ExperimentalIntensity))); sb.Append('\t'); sb.Append(TotalScanDeconvolutedIntensity); sb.Append('\t'); sb.Append(string.Join(",", Peaks.Select(p => (Math.Log(p.ExperimentalIntensity, 2) - Math.Log(p.TheoreticalIntensity, 2)) + ";" + (p.ExperimentalIntensity - this.Baseline) / this.NoiseFwhm))); return(sb.ToString()); }
public void GetBlockNumber_Sample_3() { var input = new[] { 1, 2, 3, 4, 3, 4, 1, 2, 3, 4, 6, 2 }; var solver = new Peaks(); var peaksNumber = solver.GetBlockNumber(input); peaksNumber.Should().Be(3); }
public void GetBlockNumber_PrimeLength_1() { var input = new[] { 1, 2, 3, 4, 3, 4, 1, 2, 3, 4, 6 }; var solver = new Peaks(); var peaksNumber = solver.GetBlockNumber(input); peaksNumber.Should().Be(1); }
public void GetBlockNumber_Empty_0() { var input = new int[0]; var solver = new Peaks(); var peaksNumber = solver.GetBlockNumber(input); peaksNumber.Should().Be(0); }
public void GetBlockNumber_OneItem_0() { var input = new[] { 1 }; var solver = new Peaks(); var peaksNumber = solver.GetBlockNumber(input); peaksNumber.Should().Be(0); }
public void GetBlockNumber_AllItemsTheSame_0(int arrayLength) { var input = Enumerable.Repeat(1, arrayLength).ToArray(); var solver = new Peaks(); var peaksNumber = solver.GetBlockNumber(input); peaksNumber.Should().Be(0); }
private void CalculateIntensityErrors() { double sumNormalizedAbundance = Peaks.Sum(p => p.TheoreticalNormalizedAbundance); double sumExperimentalIntensity = Peaks.Sum(p => p.ExperimentalIntensity); foreach (var peak in Peaks) { double fractionOfTotalEnvelopeAbundance = peak.TheoreticalNormalizedAbundance / sumNormalizedAbundance; double theoreticalIntensity = fractionOfTotalEnvelopeAbundance * sumExperimentalIntensity; peak.TheoreticalIntensity = theoreticalIntensity; } }
public void Basic() { // Arrange int[] A = new int[] { 1, 2, 3, 4, 3, 4, 1, 2, 3, 4, 6, 2 }; int expected = 3; Peaks solution = new Peaks(); // Act int result = solution.Solution(A); // Assert Assert.AreEqual(expected, result); }
public void WithZeros() { // Arrange int[] A = new int[] { 0, 0, 0, 0, 0, 0, 0 }; int expected = 0; Peaks solution = new Peaks(); // Act int result = solution.Solution(A); // Assert Assert.AreEqual(expected, result); }
public void WithoutPeek() { // Arrange int[] A = new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 }; int expected = 0; Peaks solution = new Peaks(); // Act int result = solution.Solution(A); // Assert Assert.AreEqual(expected, result); }
public void WithPrimeNumber() { // Arrange int[] A = new int[] { 1, 2, 3, 4, 3, 4, 1, 2, 3, 4, 6, 2, 4 }; int expected = 1; Peaks solution = new Peaks(); // Act int result = solution.Solution(A); // Assert Assert.AreEqual(expected, result); }
public void WithSingleElement() { // Arrange int[] A = new int[] { 1 }; int expected = 0; Peaks solution = new Peaks(); // Act int result = solution.Solution(A); // Assert Assert.AreEqual(expected, result); }
public void GetBlockNumber_BigDecreaseIncrease_2() { int size = 55_000; var input = Enumerable.Range(1, size).Reverse().Concat(Enumerable.Range(1, size)).ToArray(); Swap(input, 0, 1); Swap(input, input.Length - 1, input.Length - 2); var solver = new Peaks(); var peaksNumber = solver.GetBlockNumber(input); peaksNumber.Should().Be(2); }
protected bool Equals(PeptideFileAnalysisData other) { return(MsDataFileId == other.MsDataFileId && ChromatogramStartTime.Equals(other.ChromatogramStartTime) && ChromatogramEndTime.Equals(other.ChromatogramEndTime) && AutoFindPeak.Equals(other.AutoFindPeak) && Peaks.Equals(other.Peaks) && ChromatogramSetId == other.ChromatogramSetId && Equals(PsmTimes, other.PsmTimes) && Equals(ChromatogramSet, other.ChromatogramSet) && Equals(ValidationStatus, other.ValidationStatus) && Equals(Note, other.Note)); }
public override int GetHashCode() { unchecked { int hashCode = MsDataFileId.GetHashCode(); hashCode = (hashCode * 397) ^ ChromatogramStartTime.GetHashCode(); hashCode = (hashCode * 397) ^ ChromatogramEndTime.GetHashCode(); hashCode = (hashCode * 397) ^ AutoFindPeak.GetHashCode(); hashCode = (hashCode * 397) ^ Peaks.GetHashCode(); hashCode = (hashCode * 397) ^ ChromatogramSetId.GetHashCode(); hashCode = (hashCode * 397) ^ (null == PsmTimes ? 0 : PsmTimes.GetHashCode()); hashCode = (hashCode * 397) ^ (null == ChromatogramSet ? 0 : ChromatogramSet.GetHashCode()); return(hashCode); } }
public void Calculate(Peaks peaks) { IList <double> observedIntensities; IDictionary <TracerPercentFormula, IList <double> > tracerPercentPredictedIntensities; var precursorEnrichment = ComputePrecursorEnrichments(peaks, out observedIntensities, out tracerPercentPredictedIntensities); if (precursorEnrichment != null) { AddChild(PeptideQuantity.precursor_enrichment, precursorEnrichment); } IDictionary <TracerFormula, IList <double> > tracerPredictedIntensities; var tracerCount = ComputeTracerAmounts(peaks, out observedIntensities, out tracerPredictedIntensities); if (tracerCount != null) { AddChild(PeptideQuantity.tracer_count, tracerCount); } }
public PeptideDistribution ComputePrecursorEnrichments( Peaks peaks, out IList <double> observedIntensities, out IDictionary <TracerPercentFormula, IList <double> > predictedIntensities) { if (peaks.ChildCount == 0) { observedIntensities = null; predictedIntensities = null; return(null); } observedIntensities = peaks.GetAverageIntensities(); var result = new PeptideDistribution(this, PeptideQuantity.precursor_enrichment) { Parent = this }; PeptideFileAnalysis.TurnoverCalculator.GetEnrichmentAmounts(result, peaks.GetAverageIntensitiesExcludedAsNaN(), PeptideFileAnalysis.PeptideAnalysis.IntermediateLevels, out predictedIntensities); return(result); }
public static void GetPeaks(string[] args) { var fileName = args.Length < 2 ? "data/xResult" : args[1]; var epsilon = args.Length < 3 ? 50 : Int32.Parse(args[2]); var treshold = args.Length < 4 ? 300 : Int32.Parse(args[3]); string[] lines = File.ReadAllLines(String.Format("{0}.txt", fileName)); var data = lines.Aggregate(new List <int>(), (p, c) => { var value = Int32.Parse(c.Split(' ')[1]); p.Add(value); return(p); }).ToArray(); var peaksFile = String.Format("{0}_peaks", fileName); Peaks.GetPeaks(peaksFile, data, epsilon, treshold); return; }
//fucntion generates random peaks, based off of values set in the game manager public void RandomlyCreatePeaks() { Random.InitState(GM_.Instance.config.seed); int amount_of_peaks = Random.Range((int)GM_.Instance.config.random_peaks_values.peaks_amount.x, (int)GM_.Instance.config.random_peaks_values.peaks_amount.y); //determine amount of peaks peaks.Clear(); //clear any already potential peaks for (int i = 0; i < amount_of_peaks; i++) //loop for maoutn of peaks { //create new peak with random radius and position based on user defined vlaues Peaks new_peak = new Peaks(); new_peak.radius = Random.Range(GM_.Instance.config.random_peaks_values.radius_size_range.x, GM_.Instance.config.random_peaks_values.radius_size_range.y); float x_pos, y_pos; x_pos = Random.Range(-GM_.Instance.config.city_limits_x, GM_.Instance.config.city_limits_x); y_pos = Random.Range(-GM_.Instance.config.city_limits_z, GM_.Instance.config.city_limits_z); new_peak.centre_position = new Vector2(x_pos, y_pos); peaks.Add(new_peak); //save the peak } }
static void Main(string[] args) { /*IEnumerable<string> lines = File.ReadAllLines("order-details.csv"); * var query = * from line in lines * let row = line.Split(",") * let fourthColumn = row[3] * select int.Parse(fourthColumn); * Console.WriteLine(query.Sum()); * var max = query.Max(); * Console.WriteLine(max); * * //scriviamo in un file * File.WriteAllText("max.text", max.ToString());*/ for (int i = 0; i < args.Length; i++) { Console.WriteLine(args[i].ToUpper()); } // var firstCustomer = new Customer(); Customer firstCustomer = new Customer("Francesco", "Bacci", "*****@*****.**"); firstCustomer.Login(); Customer secondCustomer = new Customer("Mario", "Rossi", "*****@*****.**"); secondCustomer.Login(); Customer.SaySomething(); //class Article Article article = new Article("Face mask", 12.44); article.List(); article.Retrieve(0); article.Destroy(0); Console.WriteLine(article.Description); // GET article.Description = "Notebook"; // SET Console.WriteLine($"{article.Description} price is : {article.Price}"); // class OrderHeader OrderHeader orderHeader = new OrderHeader(20, DateTime.Now); orderHeader.List(); orderHeader.Retrieve(0); orderHeader.Destroy(0); Console.WriteLine($"Date: {orderHeader.Date}, UserID: {orderHeader.UserId}"); //mountain #8 Peaks.FirstColumn(";", "Monte Falco, 1658, Parco Foreste Casentinesi ; Monte Falterona, 1654, Parco Foreste Casentinesi; Monte Fumaiolo, 1407, Appennino Tosco Emiliano"); /* Console.WriteLine(args[0]); * Console.WriteLine(args[1]); * Console.WriteLine("Enter your name:"); * string tuoNome = Console.ReadLine(); * Console.WriteLine($"Il tuo nome è {tuoNome.ToUpper()}!"); */ /* public static void Main(string[] args){ * * var prices = new List<Article>{}; * * Article article1 = new Article(45.56, "felpa"); * Article article2 = new Article(52.55, "jeans"); * * prices.Add(article1); * prices.Add(article2); * * Console.WriteLine(prices[1].Description); * * var total = prices.Aggregate(0.00, (acc , val) => acc + val.Price); * Console.WriteLine(total); * } * * class Article * { * public double Price {get;} = 52.12; * public string Description = "Description example"; * * public Article(double price, string description) * { * this.Price = price; * this.Description = description; * }*/ // issue 8 millestone 1------------------------------------------------------------------------ // we can create new customers // Create a new users or a list of users..... // Create a new users or a list of users..... Customer User1 = new Customer("user1", "surname1", "*****@*****.**"); // Create a new customers; Customer User2 = new Customer("user2", "surname2", "*****@*****.**"); Customers insert = new Customers(); // Create a new List of customers insert.AddCustomer(User1); insert.AddCustomer(User2); //With this method we can add users in a list of users //Now we can create a new Article and add to a list of articles Article scarpe = new Article("scarpe blu", 55.2); // Article take description, stock and price as arguments Article felpa = new Article("felpa bianca", 88.1); Article maglia = new Article("maglia nera", 24.9); Articles CreateList = new Articles(); //Set an istance to create a list of article CreateList.addToListOfArticles(scarpe); CreateList.addToListOfArticles(felpa); //Add article to articles CreateList.addToListOfArticles(maglia); CreateList.List(); //Add a method to diplay all articles add //millestone 2------------------------------ //we can search for an article by its description, and we can print its price //we can add the searched article to the Cart Console.WriteLine(CreateList.search("felpa binca").Price); //search an article and print his price //Add article var myarticle = CreateList.search("felpa bianca"); Cart Cart = new Cart(myarticle, User1, 20); // Milestone 3 ----------------------------- //We can add multiple article to the cart Cart newCart1 = new Cart(scarpe, User1, 10); Cart newCart2 = new Cart(maglia, User2, 20); //Create a new istance of a cart newCart1.addMultiplyArticle(scarpe); //Add multiply article to cart newCart1.addMultiplyArticle(maglia); newCart1.Total(); //Calculate the total from the cart1 }
/// <summary> /// run clustering, Rescue PSMs, update idpDB /// </summary> private void RescuePSMsByClustering() { DateTime startTime = DateTime.Now; reportProgressDelegate reportProgress = new reportProgressDelegate(setProgress); reportStatusDelegate reportStatus = new reportStatusDelegate(setStatus); string database = session.Connection.GetDataSource(); logFile = Path.ChangeExtension(database, ".log.txt"); string config = string.Format("Parameters:\r\n" + "PrecursorMZTol: {0} \r\n" + "FragmentMZTol: {1} \r\n" + "Similarity Threshold >= {2} \r\n" + "Rank <= {3} \r\n" + "Cluster Size >= {4} \r\n" + "Search Scores: {5}{6}{7};{8}{9}{10};{11}{12}{13} \r\n\r\n", precursorMzTolerance, fragmentMzTolerance, similarityThreshold, maxRank, minClusterSize, searchScore1Name, searchScore1Order, searchScore1Threshold, searchScore2Name, searchScore2Order, searchScore2Threshold, searchScore3Name, searchScore3Order, searchScore3Threshold); reportStatus(config); //if (writeLog) // File.WriteAllText(logFile, config); /* * back up original idpDB */ if (backupDB) { string dbBackupFile = Path.ChangeExtension(database, ".backup.idpDB"); reportStatus(string.Format("Backing up idpDB to {0} ... ", dbBackupFile)); reportProgress(-1, "Backing up idpDB"); File.Copy(database, dbBackupFile, true); reportStatus(reportSecondsElapsed((DateTime.Now - startTime).TotalSeconds)); } //reportStatus("Dropping filters... \r\n"); // basicDataFilter.DropFilters(session); //// this will drop all filtered tables and rename unfiltered tables //basicDataFilter.ApplyBasicFilters(session); reportStatus("Querying spectra..."); reportProgress(-1, "Querying spectra..."); IList<object[]> queryRows; lock (session) //// SQL query to retrieve spectrum info for unfiltered psm, filter query results by rank1 search score // queryRows = session.CreateSQLQuery(@"SELECT s.Id, source.Name, NativeID, PrecursorMZ // FROM Spectrum s // JOIN SpectrumSource source ON s.Source = source.Id // JOIN UnfilteredPeptideSpectrumMatch psm ON s.Id = psm.Spectrum AND psm.Rank = 1 // JOIN PeptideSpectrumMatchScore psmScore ON psm.Id = psmScore.PsmId // JOIN PeptideSpectrumMatchScoreName scoreName ON psmScore.ScoreNameId=scoreName.Id // WHERE (scoreName.Name = " + "'" + searchScore1Name + "'" + " AND psmScore.Value " + searchScore1Order + searchScore1Threshold.ToString() + ") OR (scoreName.Name = " + "'" + searchScore2Name + "'" + " AND psmScore.Value " + searchScore2Order + searchScore2Threshold.ToString() + ") OR (scoreName.Name = " + "'" + searchScore3Name + "'" + " AND psmScore.Value " + searchScore3Order + searchScore3Threshold.ToString() + ")" + // " GROUP BY s.Id" // ).List<object[]>(); //// SQL query to retrieve spectrum info for unfiltered psm that map to identified peptide, filter by search score queryRows = session.CreateSQLQuery(@"SELECT s.Id, source.Name, NativeID, PrecursorMZ FROM UnfilteredSpectrum s JOIN SpectrumSource source ON s.Source = source.Id JOIN UnfilteredPeptideSpectrumMatch psm ON s.Id = psm.Spectrum JOIN Peptide p ON p.Id = psm.Peptide JOIN PeptideSpectrumMatchScore psmScore ON psm.Id = psmScore.PsmId JOIN PeptideSpectrumMatchScoreName scoreName ON psmScore.ScoreNameId=scoreName.Id WHERE (scoreName.Name = " + "'" + searchScore1Name + "'" + " AND psmScore.Value " + searchScore1Order + searchScore1Threshold.ToString() + ") OR (scoreName.Name = " + "'" + searchScore2Name + "'" + " AND psmScore.Value " + searchScore2Order + searchScore2Threshold.ToString() + ") OR (scoreName.Name = " + "'" + searchScore3Name + "'" + " AND psmScore.Value " + searchScore3Order + searchScore3Threshold.ToString() + ")" + " GROUP BY s.Id" ).List<object[]>(); var foundSpectraList = session.CreateSQLQuery(@"SELECT distinct spectrum FROM PeptideSpectrumMatch").List<object>(); var foundSpectra = new HashSet<long>(); { long tempLong; foreach (var item in foundSpectraList) if (long.TryParse(item.ToString(), out tempLong)) foundSpectra.Add(tempLong); } var spectrumRows = queryRows.Select(o => new SpectrumRow(o)).OrderBy(o => o.SourceName).ToList(); ////converted IOrderedEnumerable to List, the former one may end up with multiple enumeration, each invokes constructor, resulting a fresh set of object /* * extract peaks for each spectrum, spectrumRows was sorted by SourceName */ string currentSourceName = null; string currentSourcePath = null; msdata.MSData msd = null; int spectrumRowsCount = spectrumRows.Count(); //Set<long> processedSpectrumIDs = new Set<long>(); reportStatus(reportSecondsElapsed((DateTime.Now - startTime).TotalSeconds)); reportStatus(string.Format("Extracting peaks for {0} spectra ... ", spectrumRowsCount)); lock (owner) for (int i = 0; i < spectrumRowsCount; ++i) { if (_bgWorkerClustering.CancellationPending) { _bgWorkerCancelled = true; return; } var row = spectrumRows.ElementAt(i); reportProgress((int)(((double)(i + 1) / (double)spectrumRowsCount) * 100), string.Format("Extracting peaks ({0}/{1}) from {2}", i + 1, spectrumRowsCount, row.SourceName)); //if (processedSpectrumIDs.Contains(row.SpectrumId)) // break; if (row.SourceName != currentSourceName) { currentSourceName = row.SourceName; currentSourcePath = IDPickerForm.LocateSpectrumSource(currentSourceName, session.Connection.GetDataSource()); if (msd != null) msd.Dispose(); msd = new pwiz.CLI.msdata.MSDataFile(currentSourcePath); SpectrumListFactory.wrap(msd, "threshold count 100 most-intense"); //only keep the top 100 peaks //SpectrumListFactory.wrap(msd, "threshold bpi-relative .5 most-intense"); //keep all peaks that are at least 50% of the intensity of the base peak //SpectrumListFactory.wrap(msd, "threshold tic-cutoff .95 most-intense"); //keep all peaks that count for 95% TIC //threshold <count|count-after-ties|absolute|bpi-relative|tic-relative|tic-cutoff> <threshold> <most-intense|least-intense> [int_set(MS levels)] } var spectrumList = msd.run.spectrumList; var pwizSpectrum = spectrumList.spectrum(spectrumList.find(row.SpectrumNativeID), true); //may create indexoutofrange error if no spectrum nativeID row.OriginalMZs = pwizSpectrum.getMZArray().data; //getMZArray().data returns IList<double> row.OriginalIntensities = pwizSpectrum.getIntensityArray().data; //processedSpectrumIDs.Add(row.SpectrumId); } /* * re-sort spectrumRows by precursorMZ * walk through each spectrum. compare similarity to all other spectra within the precursorMZTolerance * (e.g. compare 1 to 2,3,4, then 2 to 3,4,5, then 3 to 4,5 etc), * if above similarityThreshold, add link edge to BOTH spectra * merge all connected spectra to a cluster */ reportStatus(reportSecondsElapsed((DateTime.Now - startTime).TotalSeconds)); reportStatus("Computing similarities... "); var spectrumRowsOrderByPrecursorMZ = (from randomVar in spectrumRows orderby randomVar.PrecursorMZ select randomVar).ToList(); LinkMap linkMap = new LinkMap(); //// spectrum Id as key, directly linked spectra as value double similarityScore = 0; lock (owner) for (int i = 0; i < spectrumRowsCount; ++i) { if (_bgWorkerClustering.CancellationPending) { _bgWorkerCancelled = true; return; } var row = spectrumRowsOrderByPrecursorMZ.ElementAt(i); reportProgress((int)(((double)(i + 1) / (double)spectrumRowsCount) * 100), "Computing similarities"); for (int j = i + 1; j < spectrumRowsCount; ++j) { var nextRow = spectrumRowsOrderByPrecursorMZ.ElementAt(j); if (Math.Abs(row.PrecursorMZ - nextRow.PrecursorMZ) > precursorMzTolerance) { break; } else { ////compare pairwise similarity, link spectra passing threshold to both spectrum Peaks rowPeakList = new Peaks(row.OriginalMZs, row.OriginalIntensities); Peaks nextRowPeakList = new Peaks(nextRow.OriginalMZs, nextRow.OriginalIntensities); //// converting peak intensities to sqrt here is 5-fold slower than doing this in DotProductCompareTo function //Peaks rowPeakList = new Peaks(row.OriginalMZs, row.OriginalIntensities.Select(o => Math.Sqrt(o)).ToList()); //Peaks nextRowPeakList = new Peaks(nextRow.OriginalMZs, nextRow.OriginalIntensities.Select(o => Math.Sqrt(o)).ToList()); similarityScore = ClusteringAnalysis.DotProductCompareTo(rowPeakList, nextRowPeakList, fragmentMzTolerance); //reportStatus("similarity between " + row.SpectrumNativeID + " and " + nextRow.SpectrumNativeID + " is " + similarityScore.ToString() + "\r\n"); if (similarityScore >= similarityThreshold) { linkMap[(long)row.SpectrumId].Add((long)nextRow.SpectrumId); linkMap[(long)nextRow.SpectrumId].Add((long)row.SpectrumId); //// if a -> b, then b -> a } } } } reportStatus(reportSecondsElapsed((DateTime.Now - startTime).TotalSeconds)); reportStatus("Clustering spectra... "); reportProgress(-1, "Clustering spectra"); linkMap.GetMergedLinkList(); reportStatus(reportSecondsElapsed((DateTime.Now - startTime).TotalSeconds)); //// print clustered spectra //foreach (var cluster in linkMap.MergedLinkList) //{ // reportStatus("Number of spectra in cluster: " + cluster.Count().ToString() + "\r\n"); // foreach (var sID in cluster) // { // var nativeID = (from o in spectrumRows where o.SpectrumId == sID select o.SpectrumNativeID).First(); // reportStatus(nativeID.ToString() + "\t"); // } // reportStatus("\r\n"); //} ////free some memory queryRows.Clear(); queryRows = null; msd.Dispose(); msd = null; spectrumRows.Clear(); spectrumRows = null; spectrumRowsOrderByPrecursorMZ.Clear(); spectrumRowsOrderByPrecursorMZ = null; /* * Go through each cluster, rescue PSMs if spectra in the same cluster were identified as the same peptide (id) */ List<Set<long>> clusterSetList = (from o in linkMap.MergedLinkList where o.Count >= minClusterSize select o).ToList(); //// each element in the list is a set of clustered spectrum Ids, select sets with at least minClusterSize element int clusterSetListCount = clusterSetList.Count(); var allSpectrumIDs = (from o in clusterSetList from j in o select j).ToList(); reportStatus(string.Format("Number of clusters: {0} \r\n", clusterSetListCount)); reportStatus(string.Format("Number of spectra clustered: {0}/{1} ({2:0.0%}) \r\n", allSpectrumIDs.Count, spectrumRowsCount, (double)allSpectrumIDs.Count / spectrumRowsCount)); IList<object> identPSMQueryRows; lock (session) identPSMQueryRows = session.CreateSQLQuery(@"SELECT psm.Id FROM PeptideSpectrumMatch psm").List<object>(); var identPSMIdSet = new Set<long>(identPSMQueryRows.Select(o => (long)o)); reportStatus(string.Format("Number of PSMs identified: {0} \r\n", identPSMIdSet.Count)); //// create a temp table to store clustered spectrum IDs session.CreateSQLQuery(@"DROP TABLE IF EXISTS TempSpecIds; CREATE TEMP TABLE TempSpecIds (Id INTEGER PRIMARY KEY) ").ExecuteUpdate(); var insertTempSpecIdscmd = session.Connection.CreateCommand(); insertTempSpecIdscmd.CommandText = "INSERT INTO TempSpecIds VALUES (?)"; var insertTempSpecIdsParameters = new List<System.Data.IDbDataParameter>(); for (int i = 0; i < 1; ++i) { insertTempSpecIdsParameters.Add(insertTempSpecIdscmd.CreateParameter()); insertTempSpecIdscmd.Parameters.Add(insertTempSpecIdsParameters[i]); } insertTempSpecIdscmd.Prepare(); foreach (var id in allSpectrumIDs) { insertTempSpecIdsParameters[0].Value = id; insertTempSpecIdscmd.ExecuteNonQuery(); } IList<object> allPsmIdQueryRows; lock (session) //// SQL query to retrieve all psm id for clustered spectra with score above a threshold allPsmIdQueryRows = session.CreateSQLQuery(@"SELECT GROUP_CONCAT(psm.Id) FROM TempSpecIds JOIN UnfilteredPeptideSpectrumMatch psm ON TempSpecIds.Id = psm.Spectrum JOIN PeptideSpectrumMatchScore psmScore ON psm.Id = psmScore.PsmId JOIN PeptideSpectrumMatchScoreName scoreName ON psmScore.ScoreNameId=scoreName.Id WHERE psm.Rank <= " + maxRank.ToString() + " AND ((scoreName.Name = " + "'" + searchScore1Name + "'" + " AND psmScore.Value " + searchScore1Order + searchScore1Threshold.ToString() + ") OR (scoreName.Name = " + "'" + searchScore2Name + "'" + " AND psmScore.Value " + searchScore2Order + searchScore2Threshold.ToString() + ") OR (scoreName.Name = " + "'" + searchScore3Name + "'" + " AND psmScore.Value " + searchScore3Order + searchScore3Threshold.ToString() + "))" + " GROUP BY TempSpecIds.Id, psm.Charge" ).List<object>(); var allPsmIdsRows = allPsmIdQueryRows.Select(o => new PsmIdRow(o)).ToList(); Set<long> allPsmIds = new Set<long>(); foreach (var row in allPsmIdsRows) { allPsmIds.Union(row.PsmIds); } session.CreateSQLQuery(@"DROP TABLE IF EXISTS TempSpecIds").ExecuteUpdate(); reportStatus("Querying PSMs..."); reportProgress(-1, "Querying PSMs"); IList<object[]> allClusterQueryRows; //// create a temp table to store psm IDs session.CreateSQLQuery(@"DROP TABLE IF EXISTS TempPsmIds; CREATE TEMP TABLE TempPsmIds (Id INTEGER PRIMARY KEY) ").ExecuteUpdate(); var cmd = session.Connection.CreateCommand(); cmd.CommandText = "INSERT INTO TempPsmIds VALUES (?)"; var parameters = new List<System.Data.IDbDataParameter>(); for (int i = 0; i < 1; ++i) { parameters.Add(cmd.CreateParameter()); cmd.Parameters.Add(parameters[i]); } cmd.Prepare(); foreach (var id in allPsmIds) { parameters[0].Value = id; cmd.ExecuteNonQuery(); } //// qurey string for revison 286, no DecoySequence in Peptide table // string queryCmd = @"SELECT psm.Id as psmId, s.Id, source.Name, s.NativeID, psm.Rank, psm.Charge, psmScore.Value, IFNULL(GROUP_CONCAT(DISTINCT pm.Offset || ':' || mod.MonoMassDelta),''), // (SELECT SUBSTR(pro.Sequence, pi.Offset+1, pi.Length) // FROM PeptideInstance pi // JOIN ProteinData pro ON pi.Protein=pro.Id // WHERE pi.Protein=pro.Id AND // pi.Id=(SELECT MIN(pi2.Id) // FROM PeptideInstance pi2 // WHERE psm.Peptide=pi2.Peptide)) // FROM TempIDs tempIDs // JOIN Spectrum s ON s.Id = tempIDs.Id // JOIN SpectrumSource source ON s.Source = source.Id // JOIN PeptideSpectrumMatch psm ON s.Id = psm.Spectrum // LEFT JOIN PeptideModification pm ON psm.Id = pm.PeptideSpectrumMatch // LEFT JOIN Modification mod ON pm.Modification = mod.Id // JOIN PeptideSpectrumMatchScore psmScore ON psm.Id = psmScore.PsmId // JOIN PeptideSpectrumMatchScoreName scoreName ON psmScore.ScoreNameId=scoreName.Id // WHERE scoreName.Name = " + "'" + searchScoreName + "'" + " AND psm.Rank <= 5" + // " GROUP BY psm.Id"; //AND s.Id IN ( " + String.Join(",", allSpectrumIDs.Select(o => o.ToString()).ToArray()) + " ) " + //// query string for revison 288, added DecoySequence in Peptide table // string queryCmd = @"SELECT psm.Id as psmId, s.Id, source.Name, s.NativeID, psm.Rank, psm.Charge, psmScore.Value, IFNULL(GROUP_CONCAT(DISTINCT pm.Offset || ':' || mod.MonoMassDelta),''), // (SELECT IFNULL(SUBSTR(pro.Sequence, pi.Offset+1, pi.Length), (SELECT DecoySequence FROM Peptide p WHERE p.Id = pi.Peptide)) // FROM PeptideInstance pi // LEFT JOIN ProteinData pro ON pi.Protein=pro.Id // WHERE pi.Id=(SELECT pi2.Id FROM PeptideInstance pi2 WHERE pi2.Peptide=psm.Peptide LIMIT 1)) // FROM TempIDs tempIDs // JOIN Spectrum s ON s.Id = tempIDs.Id // JOIN SpectrumSource source ON s.Source = source.Id // JOIN PeptideSpectrumMatch psm ON s.Id = psm.Spectrum // LEFT JOIN PeptideModification pm ON psm.Id = pm.PeptideSpectrumMatch // LEFT JOIN Modification mod ON pm.Modification = mod.Id // JOIN PeptideSpectrumMatchScore psmScore ON psm.Id = psmScore.PsmId // JOIN PeptideSpectrumMatchScoreName scoreName ON psmScore.ScoreNameId=scoreName.Id // WHERE scoreName.Name = " + "'" + searchScoreName + "'" + " AND psm.Rank <= 5" + // " GROUP BY psm.Id"; ////query string for revision 291, retrive by PSM Ids // string queryCmd = @"SELECT psm.Id as psmId, psm.Peptide,s.Id, source.Name, s.NativeID, psm.Charge, IFNULL(GROUP_CONCAT(DISTINCT pm.Offset || ':' || mod.MonoMassDelta),''), // (SELECT IFNULL(SUBSTR(pd.Sequence, pi.Offset+1, pi.Length), (SELECT DecoySequence FROM UnfilteredPeptide p WHERE p.Id = pi.Peptide))), // GROUP_CONCAT(pro.Accession),psm.QValue, psm.Rank, psmScore.Value, analysis.Id // FROM TempPsmIds tempPsmIds // JOIN UnfilteredPeptideSpectrumMatch psm ON psm.Id = tempPsmIds.Id // JOIN Analysis analysis ON psm.Analysis = analysis.Id // JOIN Spectrum s ON s.Id = psm.Spectrum // JOIN SpectrumSource source ON s.Source = source.Id // JOIN UnfilteredPeptideInstance pi ON psm.Peptide = pi.Peptide // JOIN UnfilteredProtein pro ON pi.Protein = pro.Id // LEFT JOIN ProteinData pd ON pi.Protein=pd.Id // LEFT JOIN PeptideModification pm ON psm.Id = pm.PeptideSpectrumMatch // LEFT JOIN Modification mod ON pm.Modification = mod.Id // LEFT JOIN PeptideSpectrumMatchScore psmScore ON psm.Id = psmScore.PsmId // LEFT JOIN PeptideSpectrumMatchScoreName scoreName ON psmScore.ScoreNameId=scoreName.Id // WHERE scoreName.Name = " + "'" + searchScore1Name + "'" + // " GROUP BY psm.Id"; // query for r291, fix no seq for some peptides shared by target and decoy proteins, query seq for target and decoy proteins separately then union string queryCmd = @"SELECT psm.Id as psmId, psm.Peptide,s.Id, source.Name, s.NativeID, psm.Charge, IFNULL(GROUP_CONCAT(DISTINCT pm.Offset || ':' || mod.MonoMassDelta),''), IFNULL(IFNULL(SUBSTR(pd.Sequence, pi.Offset+1, pi.Length),(SELECT DecoySequence FROM UnfilteredPeptide p WHERE p.Id = pi.Peptide)), (SELECT SUBSTR(pd.Sequence, pi.Offset+1, pi.Length) FROM UnfilteredPeptideInstance pi JOIN UnfilteredProtein pro ON pi.Protein = pro.Id AND pro.IsDecoy = 0 LEFT JOIN ProteinData pd ON pi.Protein=pd.Id WHERE psm.Peptide = pi.Peptide UNION SELECT p.DecoySequence FROM UnfilteredPeptide p JOIN UnfilteredPeptideInstance pi ON p.Id = pi.Peptide JOIN UnfilteredProtein pro ON pi.Protein = pro.Id AND pro.IsDecoy = 1 WHERE psm.Peptide = pi.Peptide AND p.DecoySequence is not null)), GROUP_CONCAT(pro.Accession), psm.QValue, psm.Rank, psmScore.Value, psm.Analysis FROM TempPsmIds tempPsmIds JOIN UnfilteredPeptideSpectrumMatch psm ON psm.Id = tempPsmIds.Id JOIN UnfilteredSpectrum s ON s.Id = psm.Spectrum JOIN SpectrumSource source ON s.Source = source.Id JOIN UnfilteredPeptideInstance pi ON psm.Peptide = pi.Peptide JOIN UnfilteredProtein pro ON pi.Protein = pro.Id LEFT JOIN ProteinData pd ON pi.Protein=pd.Id LEFT JOIN PeptideModification pm ON psm.Id = pm.PeptideSpectrumMatch LEFT JOIN Modification mod ON pm.Modification = mod.Id LEFT JOIN PeptideSpectrumMatchScore psmScore ON psm.Id = psmScore.PsmId LEFT JOIN PeptideSpectrumMatchScoreName scoreName ON psmScore.ScoreNameId=scoreName.Id WHERE scoreName.Name in ( " + "'" + searchScore1Name + "','" + searchScore2Name + "','" + searchScore3Name + "')" + " GROUP BY psm.Id"; lock (session) allClusterQueryRows = session.CreateSQLQuery(queryCmd).List<object[]>(); var allClusterSpectrumRows = allClusterQueryRows.Select(o => new ClusterSpectrumRow(o)).ToList(); session.CreateSQLQuery(@"DROP TABLE IF EXISTS TempPsmIds").ExecuteUpdate(); reportStatus(reportSecondsElapsed((DateTime.Now - startTime).TotalSeconds)); reportStatus(string.Format("Number of PSMs retrieved: {0} \r\n", allClusterSpectrumRows.Count)); reportStatus("Rescuing PSMs... "); if (writeLog) { string logHeader = string.Join("\t", new string[] { "SourceName", "NativeID", "Charge", "RescuedSequence", "Protein", "ScoreName", "SearchScore", "BAScore", "QValue", "Rank", "Rank1Sequence", "Rank1Protein", "Rank1SearchScore", "Rank1BAScore", "Rank1Qvalue", "\r\n" }); File.WriteAllText(logFile, logHeader); } Dictionary<long, UpdateValues> updateDict = new Dictionary<long, UpdateValues>(); ////key: Id in unfiltered psm table, value: reassigned Qvalue and reassinged Rank Set<long> rescuedDistinctSpectraIds = new Set<long>(); //// SQL query to retrieve anlaysis Id and search score order in QonvertSettings table IList<object[]> qonvertSettingsQueryRows; lock (session) qonvertSettingsQueryRows = session.CreateSQLQuery("SELECT Id, ScoreInfoByName FROM QonverterSettings").List<object[]>(); var qonvertSettingRows = qonvertSettingsQueryRows.Select(o => new qonvertSettingRows(o)).ToList(); Dictionary<long, string> analysisScoreOrder = new Dictionary<long, string>(); Dictionary<long, string> analysisScoreName = new Dictionary<long, string>(); foreach (var qonvertSettingRow in qonvertSettingRows) { analysisScoreOrder.Add(qonvertSettingRow.Id, qonvertSettingRow.ScoreOrder); analysisScoreName.Add(qonvertSettingRow.Id, qonvertSettingRow.ScoreName); } ////walk through each cluster to rescue PSMs for (int i = 0; i < clusterSetListCount; ++i) { var clusterSet = clusterSetList.ElementAt(i); if (_bgWorkerClustering.CancellationPending) { _bgWorkerCancelled = true; return; } //reportStatus("Clustering set: " + String.Join(",",clusterSet.Select(j => j.ToString()).ToArray()) + "\r\n"); reportProgress((int)(((double)(i + 1) / (double)clusterSetListCount) * 100), "Rescuing PSMs"); var clusterSpectrumRows = (from o in allClusterSpectrumRows where clusterSet.Contains(o.SpectrumId) select o).ToList(); //Map<long, Set<long>> peptideIdDict = new Map<long, Set<long>>(); //key: peptide id, value: psm ids //Set<long> unprocessedPSMIds = new Set<long>(); Set<string> unprocessedSpecChargeAnalysisSet = new Set<string>(); //spectrumId.charge.analysis var pepSeqDict = new PepDictionary(); //key: modified peptide sequence, value: spectrumId.charge.analysis, score //var peptideIdDict = new PepDictionary(); //key: peptide ID, value: PSM Ids and scores foreach (var row in clusterSpectrumRows) { //peptideIdDict.Add(row.PeptideId,row.PSMId, row.SearchScore); //peptideIdDict[row.PeptideId].Add(row.PSMId); pepSeqDict.Add(row.ModifiedSequence, row.SpectrumId, row.Charge, row.Analysis, row.SearchScore, row.PSMId); //unprocessedPSMIds.Add(row.PSMId); //unprocessedSpectrumCharge.Add(row.SpectrumId.ToString() + "." + row.Charge.ToString()); unprocessedSpecChargeAnalysisSet.Add(row.SpectrumId.ToString() + "." + row.Charge.ToString() + "." + row.Analysis.ToString()); } pepSeqDict.ComputeBayesianAverage(analysisScoreOrder); //replace score from sum of search scores to Bayesian Average var sortedPepSeqDictKeys = from k in pepSeqDict.Keys orderby pepSeqDict[k].FinalScore descending, pepSeqDict[k].PsmIdSpecDict.Count() descending select k; // sort by score, if tied, second sort by # of linked psms foreach (var pepSeq in sortedPepSeqDictKeys) { if (unprocessedSpecChargeAnalysisSet.Count == 0) break; if (pepSeqDict[pepSeq].PsmIdSpecDict.Keys.Any(pId => identPSMIdSet.Contains(pId))) ////at least one psm identified as this peptide in this cluster { foreach (var psmId in pepSeqDict[pepSeq].PsmIdSpecDict.Keys) { var row = (from o in clusterSpectrumRows where o.PSMId == psmId select o).First(); string spec = row.SpectrumId.ToString() + "." + row.Charge.ToString() + "." + row.Analysis.ToString(); if (unprocessedSpecChargeAnalysisSet.Contains(spec)) { if (identPSMIdSet.Contains(psmId) || foundSpectra.Contains(row.SpectrumId)) { //// not process ident PSMs unprocessedSpecChargeAnalysisSet.Remove(spec); } else { updateDict.Add(psmId, new UpdateValues(-1, 1)); //// update Qvalue = -1, Rank =1 ++rescuedPSMsCount; rescuedDistinctSpectraIds.Add(row.SpectrumId); unprocessedSpecChargeAnalysisSet.Remove(spec); if (writeLog) { string originalRank1Seq = ""; string originalRank1Protein = ""; string originalRank1Score = ""; string originalRank1BAScore = ""; string originalRank1Qvalue = ""; if (row.Rank != 1) { var originalRank1Rows = (from o in clusterSpectrumRows where o.SpectrumId == row.SpectrumId && o.Rank == 1 && o.Charge == row.Charge && o.Analysis == row.Analysis select new { o.ModifiedSequence, o.Protein, o.SearchScore, o.QValue }).ToList(); ////may exist more than one rank1 hits foreach (var originalRank1Row in originalRank1Rows) { originalRank1Seq += originalRank1Row.ModifiedSequence + ";"; originalRank1Protein += originalRank1Row.Protein + ";"; originalRank1Score += originalRank1Row.SearchScore.ToString("0.0000") + ";"; originalRank1BAScore += pepSeqDict.ContainsKey(originalRank1Row.ModifiedSequence) ? pepSeqDict[originalRank1Row.ModifiedSequence].FinalScore.ToString("0.0000") + ";" : ""; originalRank1Qvalue += originalRank1Row.QValue.ToString("0.0000") + ";"; } } string logLine = string.Join("\t", new string[] { row.SourceName, row.SpectrumNativeID, row.Charge.ToString(), row.ModifiedSequence, row.Protein, analysisScoreName[row.Analysis], row.SearchScore.ToString("0.0000"), pepSeqDict[pepSeq].FinalScore.ToString("0.0000"), row.QValue.ToString("0.0000"), row.Rank.ToString(), originalRank1Seq, originalRank1Protein, originalRank1Score, originalRank1BAScore, originalRank1Qvalue }); using (StreamWriter sw = File.AppendText(logFile)) { sw.WriteLine(logLine); } } } } } } } //// end of foreach (var pepSeq in sortedPepSeqDictKeys) } //// end of for (int i = 0; i < clusterSetListCount; ++i) reportStatus(string.Format("{0} seconds elapsed\r\n", (DateTime.Now - startTime).TotalSeconds)); /* *update unfiltered psm table in idpDB */ if (rescuedPSMsCount == 0) return; reportStatus("Updating idpDB... "); session.Transaction.Begin(); //basicDataFilter.DropFilters(session); // tables were dropped before querying var updateCmd = session.Connection.CreateCommand(); updateCmd.CommandText = "UPDATE UnfilteredPeptideSpectrumMatch SET QValue = ?, Rank = ? WHERE Id = ?"; var updateParameters = new List<System.Data.IDbDataParameter>(); for (int i = 0; i < 3; ++i) { updateParameters.Add(updateCmd.CreateParameter()); updateCmd.Parameters.Add(updateParameters[i]); } updateCmd.Prepare(); int updateCount = 0; int allUpdateCount = updateDict.Count; foreach (KeyValuePair<long, UpdateValues> pair in updateDict) { updateParameters[0].Value = pair.Value.ReassignedQvalue; //// Qvalue updateParameters[1].Value = pair.Value.ReassignedRank; //// Rank updateParameters[2].Value = pair.Key; //// psm id updateCmd.ExecuteNonQuery(); reportProgress((int)(((double)(updateCount + 1) / (double)allUpdateCount) * 100), "Updating idpDB"); ++updateCount; } session.Transaction.Commit(); //basicDataFilter.ApplyBasicFilters(session); reportStatus(reportSecondsElapsed((DateTime.Now - startTime).TotalSeconds)); reportStatus(string.Format("Rescued {0} PSMs for {1} distinct spectra\r\n", rescuedPSMsCount, rescuedDistinctSpectraIds.Count)); reportProgress(0, "Ready"); /* * not recompute q values, reload idpDB, implemented in _bgWorkerClustering_RunWorkerCompleted */ } //// end of RescuePSMsByClustering
public void AnalyzeSpectrum() { try { Peaks.Clear(); PlotPoints.Clear(); if (firstAnalyze) { SamplePeaks.Clear(); } var uri = new Uri(PicPath); var bitmap = new BitmapImage(uri); int stride = bitmap.PixelWidth * 4; int size = bitmap.PixelHeight * stride; byte[] pixels = new byte[size]; bitmap.CopyPixels(pixels, stride, 0); //make an average values //1. get sum of each color components int[] avgpixels = new int[stride]; for (int y = 0; y < bitmap.PixelHeight; y++) { for (int x = 0; x < bitmap.PixelWidth; x++) { int index = y * stride + 4 * x; avgpixels[4 * x] += pixels[index]; avgpixels[4 * x + 1] += pixels[index + 1]; avgpixels[4 * x + 2] += pixels[index + 2]; } } //2. get an average values - smooth for (int x = 0; x < bitmap.PixelWidth; x++) { int index = bitmap.PixelHeight * stride + 4 * x; avgpixels[4 * x] /= bitmap.PixelHeight; avgpixels[4 * x + 1] /= bitmap.PixelHeight; avgpixels[4 * x + 2] /= bitmap.PixelHeight; } //get intensity and make points on plot for (int x = 0; x < bitmap.PixelWidth; x++) { var spec = new AnalyzedSpectrum(); int index = 4 * x; int red = avgpixels[index]; int green = avgpixels[index + 1]; int blue = avgpixels[index + 2]; spec.PeakPixel = x + 380; //scale to wavelenght spec.PeakIntensity = Math.Round(Math.Sqrt(0.299 * red * red + 0.587 * green * green + 0.114 * blue * blue), 2); // hsp color model Peaks.Add(spec); if (firstAnalyze) { SamplePeaks.Add(spec); } var _point = new DataPoint(spec.PeakPixel, spec.PeakIntensity); PlotPoints.Add(_point); } firstAnalyze = false; } catch (Exception e) { MessageBox.Show("You must make a photo of spectrum first!"); } }
static void Main(string[] args) { Peaks.FirstColumn(";", "Monte Falco, 1658, Parco Foreste Casentinesi ; Monte Falterona, 1654, Parco Foreste Casentinesi; Monte Fumaiolo, 1407, Appennino Tosco Emiliano"); }
public Polygon CutPolygon(Polygon P) { PointF I, Tmp1, Tmp2; Vector Nv, D, W; List <PointF> Q = new List <PointF>(); Peaks.Add(Peaks[0]); double Dsk, Wsk, t; // Обход по всем сторонам отсекателя for (int i = 1; i < Peaks.Count; i++) { Nv = GetNormal(Peaks[i - 1], Peaks[i]); if (VisiblePeak(P.GetPeak(0), Peaks[i], Nv)) { Q.Add(P.GetPeak(0)); } // Обход по всем сторонам многоугольника for (int j = 1; j < P.Count(); j++) { Tmp1 = P.GetPeak(j - 1); Tmp2 = P.GetPeak(j); // Вектор отрезка D = new Vector(Tmp2.X - Tmp1.X, Tmp2.Y - Tmp1.Y); // Угол и с какой он стороны Dsk = ScalarMultVector(D, Nv); // Проверка, что отрезок не вырождается в точку и не параллелен if (Dsk != 0) { // Вектор, который соединяет начало отрезка и вершину многоугольника W = new Vector(Tmp1.X - Peaks[i - 1].X, Tmp1.Y - Peaks[i - 1].Y); Wsk = ScalarMultVector(W, Nv); t = -Wsk / Dsk; if (t >= 0 && t <= 1) { I = new PointF((float)(P.GetPeak(j - 1).X + t * D.X), (float)(P.GetPeak(j - 1).Y + t * D.Y)); Q.Add(I); } } if (VisiblePeak(P.GetPeak(j), Peaks[i], Nv)) { Q.Add(P.GetPeak(j)); } } Q.Add(Q[0]); P.Clear(); P.AddRange(Q); Q.Clear(); } return(P); }
public void SumEnvelopeTo(double[] targetEnvelope) { Peaks.SumEnvelopeTo(targetEnvelope); }
public override void Clear() { Peaks.Clear(); Direction = 0; }
public void DoWork() { Stopwatch entire = new Stopwatch(); entire.Start(); for (int xyzxyz = 0; xyzxyz < parameters.peakListFile.Length; xyzxyz++) { Stopwatch total = new Stopwatch(); total.Start(); DBConnect db = new DBConnect(); Peaks peakData = new Peaks(); double mol_weight = PeakReader.peakListReader(peakData.mass, peakData.intens, parameters.peakListFile[xyzxyz], parameters.fileType); ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// //long mem = 0; //mem = GC.GetTotalMemory(false); // db.Set_Progress(parameters.queryid, 10);///////////prg CurrentJobs.current[location].progress = "Module 1 of 9: Running Mass tuner.";//prg Stopwatch timer = new Stopwatch(); execution_time time = new execution_time(); if (parameters.GUI_mass > 0) { mol_weight = parameters.GUI_mass; } timer.Start(); if (parameters.autotune == 1) { //int massCount = peakData.mass.Count; //double[] massArray = new double[peakData.mass.Count]; //massArray = peakData.mass.ToArray<double>(); // double[] b = new double[massCount * massCount]; //int[] c = new int[massCount * massCount]; WholeProteinMassTunerGPU.GPU_generator(peakData.mass, peakData.intens, ref mol_weight, parameters.MW_tolerance); } timer.Stop(); time.tuner_time = timer.Elapsed.ToString(); timer.Reset(); // db.Set_Progress(parameters.queryid, 80);///////////prg CurrentJobs.current[location].progress = "Module 2 of 9: Filtering DB on molecular weight."; timer.Start(); // Thread.Sleep(5000); List <proteins> filter_prot = new List <proteins>(); try { filter_prot = MW_Module.Fasta_Reader(mol_weight, parameters.MW_tolerance, parameters.protDB, parameters.filterDB); } catch (Exception eez) { string asdd = eez.InnerException.ToString(); } timer.Stop(); time.mw_filter_time = timer.Elapsed.ToString(); timer.Reset(); ///////////////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////////////// CurrentJobs.current[location].progress = "Module 3 of 9: Generating PST's"; timer.Start(); string input = ""; StringBuilder sb = new StringBuilder(); sb.Clear(); List <int> pro_ind = new List <int>(); //for (int i = 0; i < 200000; i++ ) for (int i = 0; i < filter_prot.Count; i++) { sb.Append(filter_prot[i].sequence); pro_ind.Add(filter_prot[i].sequence.Length); //input += PROT[i].sequence; } // PROT = new List<proteins>(); input = sb.ToString(); sb.Clear(); int I_len = input.Length; if (parameters.denovo_allow == 1) { List <tags> estTags = EST_module.findEST(peakData.mass, peakData.intens, parameters.hopThreshhold / 17, parameters.minimum_est_length, parameters.maximum_est_length); // db.Set_Progress(parameters.queryid, 85);///////////prg CurrentJobs.current[location].progress = "Module 4 of 9: Filtering on the basis of PST's."; // filter_prot = EST_module.EST_filter(estTags, filter_prot); String[] keys = new String[estTags.Count]; for (int est = 0; est < estTags.Count; est++) { keys[est] = estTags[est].AA.ToUpper(); } if (filter_prot.Count > 0) { char[] matched_result = Device_search.Execute(keys, input, I_len); EST_module.GPU_Filter(matched_result, estTags, pro_ind, filter_prot); matched_result = null; } input = ""; keys = null; pro_ind.Clear(); estTags.Clear(); } timer.Stop(); time.pst_time = timer.Elapsed.ToString(); timer.Reset(); // db.Set_Progress(parameters.queryid, 90);///////////prg CurrentJobs.current[location].progress = "Module 5 of 9: Executing PTM module."; timer.Start(); List <proteins> modified_proteins = new List <proteins>(); // EMPTY LIST List <proteins> shortlisted_proteins = new List <proteins>(); if (parameters.ptm_allow == 1) { PTM_2.PTM_Control_2(filter_prot, parameters.ptm_tolerance, modified_proteins, parameters.ptm_code_var, parameters.ptm_code_fix, shortlisted_proteins, parameters.insilico_frag_type, parameters.HandleIons, peakData.mass, parameters.hopThreshhold, parameters.MW_sweight, parameters.PST_sweight, parameters.Insilico_sweight); } //PTM_Module.PTM_Control(filter_prot, parameters.ptm_tolerance, modified_proteins, parameters.ptm_code_var, parameters.ptm_code_fix, shortlisted_proteins); else { shortlisted_proteins = filter_prot; foreach (proteins kkk in shortlisted_proteins) { kkk.ptm_particulars = new List <Sites>(); } } if (parameters.ptm_code_fix.Count != 0 && parameters.ptm_code_var.Count != 0) { if (shortlisted_proteins.Count > 1) { double buffer = 0; for (int p = 1; p < shortlisted_proteins.Count; p++) { buffer = MW_Module.MW_filter(mol_weight, parameters.MW_tolerance, shortlisted_proteins.ElementAt(p).MW, false); if (buffer == -7) { shortlisted_proteins.Remove(shortlisted_proteins.ElementAt(p)); } else { shortlisted_proteins.ElementAt(p).MW_score = buffer; } } } } //foreach(pr) timer.Stop(); time.ptm_time = timer.Elapsed.ToString(); timer.Reset(); // db.Set_Progress(parameters.queryid, 93);///////////prg CurrentJobs.current[location].progress = "Module 6 of 9: Generating Insilico Fragments."; //Thread.Sleep(5000); timer.Start(); //List<Fragment> insilico = new List<Fragment>(); //insilico = if (parameters.ptm_allow == 0) { Insilico_Module.insilico_fragmentation(shortlisted_proteins, parameters.insilico_frag_type, parameters.HandleIons); } // db.Set_Progress(parameters.queryid, 95);///////////prg CurrentJobs.current[location].progress = "Module 7 of 9: Insilico Filteration."; //INSILICO FIltering of xxx if (parameters.ptm_allow == 0) { Insilico_Module.Insilico_filter(shortlisted_proteins, peakData.mass, parameters.hopThreshhold); } //Insilico_Module.Insilico_filter_U(modified_proteins, peakData.mass, parameters.hopThreshhold); timer.Stop(); time.insilico_time = timer.Elapsed.ToString(); timer.Reset(); // db.Set_Progress(parameters.queryid, 98);///////////prg CurrentJobs.current[location].progress = "Module 8 of 9: Evaluating Final Scores."; //Thread.Sleep(5000); for (int i = 0; i < shortlisted_proteins.Count; i++) { shortlisted_proteins[i].set_score(parameters.MW_sweight, parameters.PST_sweight, parameters.Insilico_sweight); } //modified_proteins.Sort((x,y)=>y.score.CompareTo(x.score)); shortlisted_proteins.AsParallel().OrderBy(x => x.score); //List<proteins> SortedList = modified_proteins.OrderByDescending(o => o.score).ToList(); // db.Set_Progress(parameters.queryid, 100);///////////prg CurrentJobs.current[location].progress = "Module 9 of 9: Storing Results."; shortlisted_proteins.Sort((x, y) => y.score.CompareTo(x.score)); List <proteins> store_prot = new List <proteins>(); //Console.ReadKey(); for (int store_i = 0; store_i < 30 && store_i < shortlisted_proteins.Count; store_i++) { store_prot.Add(shortlisted_proteins[store_i]); } //Results final = new Results(parameters.queryid, SortedList); //Results final = new Results(parameters.queryid, modified_proteins); Results final = new Results(parameters.queryid, store_prot, time); total.Stop(); final.times.total_time = total.Elapsed.ToString(); db.store_results(final, parameters.peakListFile[xyzxyz], xyzxyz); //Send_Results_Link(parameters); CurrentJobs.current[location].progress = "Done"; } Send_Results_Link(parameters); entire.Stop(); String timeForLoop; timeForLoop = entire.Elapsed.ToString(); //string result_serial = Newtonsoft.Json.JsonConvert.SerializeObject(final); }