public static SrmDocument RecalculateAlignments(SrmDocument document, IProgressMonitor progressMonitor) { var newSources = ListAvailableRetentionTimeSources(document.Settings); var newResultsSources = ListSourcesForResults(document.Settings.MeasuredResults, newSources); var allLibraryRetentionTimes = ReadAllRetentionTimes(document, newSources); var newFileAlignments = new List <FileRetentionTimeAlignments>(); IProgressStatus progressStatus = new ProgressStatus(@"Aligning retention times"); // CONSIDER: localize? Will users see this? foreach (var retentionTimeSource in newResultsSources.Values) { progressStatus = progressStatus.ChangePercentComplete(100 * newFileAlignments.Count / newResultsSources.Count); progressMonitor.UpdateProgress(progressStatus); try { var fileAlignments = CalculateFileRetentionTimeAlignments(retentionTimeSource.Name, allLibraryRetentionTimes, progressMonitor); newFileAlignments.Add(fileAlignments); } catch (OperationCanceledException) { progressMonitor.UpdateProgress(progressStatus.Cancel()); return(null); } } var newDocRt = new DocumentRetentionTimes(newSources.Values, newFileAlignments); var newDocument = document.ChangeSettings(document.Settings.ChangeDocumentRetentionTimes(newDocRt)); Debug.Assert(IsLoaded(newDocument)); progressMonitor.UpdateProgress(progressStatus.Complete()); return(newDocument); }
public override bool Run(CancellationTokenSource cancelToken) { IProgressStatus status = new ProgressStatus(); //UpdateProgress(status); bool success = true; foreach (var spectrumFilename in SpectrumFileNames) { try { var pr = new ProcessRunner(); var psi = new ProcessStartInfo(@"java", @"-Xmx8G -jar MSGFPlus.jar " + $@"-s {spectrumFilename} -d {FastaFileNames[0]} -tda 1" + $@"-t {precursorMzTolerance} -ti {isotopeErrorRange.Item1},{isotopeErrorRange.Item2} " + $@"-m {fragmentationMethod} -inst {instrumentType} -e {enzyme} -ntt {ntt} -maxMissedCleavages {maxMissedCleavages}"); pr.Run(psi, string.Empty, this, ref status); status = status.Complete().ChangeMessage(Resources.DDASearchControl_SearchProgress_Search_done); } catch (OperationCanceledException) { status = status.Cancel().ChangeMessage(Resources.DDASearchControl_SearchProgress_Search_canceled); success = false; } catch (Exception ex) { status = status.ChangeErrorException(ex).ChangeMessage(string.Format(Resources.DdaSearch_Search_failed__0, ex.Message)); UpdateProgress(status); success = false; } if (cancelToken.IsCancellationRequested) { success = false; } } var deleteHelper = new DeleteTempHelper(modsFile); deleteHelper.DeletePath(); return(success); }
/// <summary> /// Returns a string representation of the report based on the document. /// </summary> private string ReportToCsvString(SrmDocument doc, char separator, IProgressMonitor progressMonitor) { IProgressStatus status = new ProgressStatus(string.Format(Resources.ReportSpec_ReportToCsvString_Exporting__0__report, Name)); progressMonitor.UpdateProgress(status); Report report = Report.Load(this); StringWriter writer = new StringWriter(); using (Database database = new Database(doc.Settings) { ProgressMonitor = progressMonitor, Status = status, PercentOfWait = 80 }) { database.AddSrmDocument(doc); status = database.Status; ResultSet resultSet; try { resultSet = report.Execute(database); } catch (Exception) { progressMonitor.UpdateProgress(status.Cancel()); throw; } progressMonitor.UpdateProgress(status = status.ChangePercentComplete(95)); ResultSet.WriteReportHelper(resultSet, separator, writer, LocalizationHelper.CurrentCulture); } writer.Flush(); string csv = writer.ToString(); writer.Close(); progressMonitor.UpdateProgress(status.Complete()); return(csv); }
// ReSharper restore UnusedMember.Local private bool Load(ILoadMonitor loader) { ProgressStatus status = new ProgressStatus(string.Format(Resources.BiblioSpecLibrary_Load_Loading__0__library, Path.GetFileName(FilePath))); loader.UpdateProgress(status); long lenRead = 0; // AdlerChecksum checksum = new AdlerChecksum(); try { // Use a buffered stream for initial read BufferedStream stream = new BufferedStream(CreateStream(loader), 32 * 1024); int countHeader = (int)LibHeaders.count * 4; byte[] libHeader = new byte[countHeader]; if (stream.Read(libHeader, 0, countHeader) != countHeader) { throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Data_truncation_in_library_header_File_may_be_corrupted); } lenRead += countHeader; // Check the first byte of the primary version number to determine // whether the format is little- or big-endian. Little-endian will // have the version number in this byte, while big-endian will have zero. if (libHeader[(int)LibHeaders.version1 * 4] == 0) { _bigEndian = true; } int numSpectra = GetInt32(libHeader, (int)LibHeaders.num_spectra); var dictLibrary = new Dictionary <LibKey, BiblioSpectrumInfo>(numSpectra); var setSequences = new HashSet <LibSeqKey>(); string revStr = string.Format("{0}.{1}", // Not L10N GetInt32(libHeader, (int)LibHeaders.version1), GetInt32(libHeader, (int)LibHeaders.version2)); Revision = float.Parse(revStr, CultureInfo.InvariantCulture); // checksum.MakeForBuff(libHeader, AdlerChecksum.ADLER_START); countHeader = (int)SpectrumHeaders.count * 4; byte[] specHeader = new byte[1024]; byte[] specSequence = new byte[1024]; for (int i = 0; i < numSpectra; i++) { int percent = i * 100 / numSpectra; if (status.PercentComplete != percent) { // Check for cancellation after each integer change in percent loaded. if (loader.IsCanceled) { loader.UpdateProgress(status.Cancel()); return(false); } // If not cancelled, update progress. loader.UpdateProgress(status = status.ChangePercentComplete(percent)); } // Read spectrum header int bytesRead = stream.Read(specHeader, 0, countHeader); if (bytesRead != countHeader) { throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Data_truncation_in_spectrum_header_File_may_be_corrupted); } // If this is the first header, and the sequence length is zero, // then this is a Linux format library. Switch to linux format, // and start over. if (i == 0 && GetInt32(specHeader, (int)SpectrumHeaders.seq_len) == 0) { _linuxFormat = true; stream.Seek(lenRead, SeekOrigin.Begin); // Re-ead spectrum header countHeader = (int)SpectrumHeadersLinux.count * 4; bytesRead = stream.Read(specHeader, 0, countHeader); if (bytesRead != countHeader) { throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Data_truncation_in_spectrum_header_File_may_be_corrupted); } } lenRead += bytesRead; // checksum.MakeForBuff(specHeader, checksum.ChecksumValue); int charge = GetInt32(specHeader, (int)SpectrumHeaders.charge); if (charge > TransitionGroup.MAX_PRECURSOR_CHARGE) { throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Invalid_precursor_charge_found_File_may_be_corrupted); } int numPeaks = GetInt32(specHeader, (int)SpectrumHeaders.num_peaks); int seqLength = GetInt32(specHeader, (_linuxFormat ? (int)SpectrumHeadersLinux.seq_len : (int)SpectrumHeaders.seq_len)); int copies = GetInt32(specHeader, (_linuxFormat ? (int)SpectrumHeadersLinux.copies : (int)SpectrumHeaders.copies)); // Read sequence information int countSeq = (seqLength + 1) * 2; if (stream.Read(specSequence, 0, countSeq) != countSeq) { throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Data_truncation_in_spectrum_sequence_File_may_be_corrupted); } lenRead += countSeq; // checksum.MakeForBuff(specSequence, checksum.ChecksumValue); // Store in dictionary if (IsUnmodified(specSequence, seqLength + 1, seqLength)) { // These libraries should not have duplicates, but just in case. // CONSIDER: Emit error about redundancy? // These legacy libraries assume [+57.0] modified Cysteine LibKey key = new LibKey(GetCModified(specSequence, ref seqLength), 0, seqLength, charge); if (!dictLibrary.ContainsKey(key)) { dictLibrary.Add(key, new BiblioSpectrumInfo((short)copies, (short)numPeaks, lenRead)); } setSequences.Add(new LibSeqKey(key)); } // Read over peaks int countPeaks = 2 * sizeof(Single) * numPeaks; stream.Seek(countPeaks, SeekOrigin.Current); // Skip spectrum lenRead += countPeaks; // checksum.MakeForBuff(specPeaks, checksum.ChecksumValue); } // Checksum = checksum.ChecksumValue; _dictLibrary = dictLibrary; _setSequences = setSequences; loader.UpdateProgress(status.Complete()); return(true); } catch (InvalidDataException x) { loader.UpdateProgress(status.ChangeErrorException(x)); return(false); } catch (IOException x) { loader.UpdateProgress(status.ChangeErrorException(x)); return(false); } catch (Exception x) { x = new Exception(string.Format(Resources.BiblioSpecLibrary_Load_Failed_loading_library__0__, FilePath), x); loader.UpdateProgress(status.ChangeErrorException(x)); return(false); } finally { if (ReadStream != null) { // Close the read stream to ensure we never leak it. // This only costs on extra open, the first time the // active document tries to read. try { ReadStream.CloseStream(); } catch (IOException) {} } } }
public static void AddSpectra(MidasLibSpec libSpec, MsDataFilePath[] resultsFiles, SrmDocument doc, ILoadMonitor monitor, out List <MsDataFilePath> failedFiles) { // Get spectra from results files var newSpectra = new List <DbSpectrum>(); var progress = new ProgressStatus(string.Empty).ChangeMessage(Resources.MidasLibrary_AddSpectra_Reading_MIDAS_spectra); const int percentResultsFiles = 80; failedFiles = new List <MsDataFilePath>(); for (var i = 0; i < resultsFiles.Length; i++) { var resultsFile = resultsFiles[i]; try { monitor.UpdateProgress(progress.ChangePercentComplete(i * percentResultsFiles / resultsFiles.Length)); var filePath = resultsFile.GetFilePath(); if (File.Exists(filePath)) { var sampleIndex = resultsFile.GetSampleIndex(); using (var msd = new MsDataFileImpl(filePath, sampleIndex == -1 ? 0 : sampleIndex, resultsFile.GetLockMassParameters(), requireVendorCentroidedMS2: true)) { if (ChromatogramDataProvider.HasChromatogramData(msd) && SpectraChromDataProvider.HasSpectrumData(msd)) { var chromPrecursors = ReadChromPrecursorsFromMsd(msd, monitor).ToList(); newSpectra.AddRange(ReadDbSpectraFromMsd(msd, monitor)); MatchSpectraToChrom(newSpectra, chromPrecursors, monitor); } } MatchSpectraToPeptides(newSpectra, doc, monitor); } else { failedFiles.Add(resultsFile); } } catch (Exception x) { monitor.UpdateProgress(progress.ChangeErrorException(x)); failedFiles.Add(resultsFile); } if (monitor.IsCanceled) { monitor.UpdateProgress(progress.Cancel()); return; } } if (!newSpectra.Any()) { monitor.UpdateProgress(progress.Complete()); return; } progress = progress.ChangePercentComplete(percentResultsFiles); monitor.UpdateProgress(progress); // Add spectra to library var midasLib = !File.Exists(libSpec.FilePath) ? Create(libSpec) : Load(libSpec, monitor); if (midasLib == null) { monitor.UpdateProgress(progress.ChangeErrorException(new Exception(Resources.MidasLibrary_AddSpectra_Error_loading_MIDAS_library_for_adding_spectra_))); return; } progress = progress.ChangeMessage(Resources.MidasLibrary_AddSpectra_Adding_spectra_to_MIDAS_library); monitor.UpdateProgress(progress); var results = new Dictionary <string, DbResultsFile>(); if (midasLib._spectra != null) { foreach (var kvp in midasLib._spectra) { results[kvp.Key.FilePath] = kvp.Key; } } using (var sessionFactory = SessionFactoryFactory.CreateSessionFactory(libSpec.FilePath, typeof(MidasLibrary), false)) using (var session = new SessionWithLock(sessionFactory.OpenSession(), new ReaderWriterLock(), true)) using (var transaction = session.BeginTransaction()) { for (var i = 0; i < newSpectra.Count; i++) { if (monitor.IsCanceled) { transaction.Rollback(); monitor.UpdateProgress(progress.Cancel()); return; } var spectrum = newSpectra[i]; monitor.UpdateProgress(progress.ChangePercentComplete(percentResultsFiles + (int)(100.0 * i / newSpectra.Count))); DbResultsFile resultsFile; if (!results.TryGetValue(spectrum.ResultsFile.FilePath, out resultsFile)) { resultsFile = new DbResultsFile(spectrum.ResultsFile) { Id = null }; results[spectrum.ResultsFile.FilePath] = resultsFile; session.SaveOrUpdate(resultsFile); } else if (midasLib._spectra != null) { List <DbSpectrum> existingSpectra; if (midasLib._spectra.TryGetValue(resultsFile, out existingSpectra) && existingSpectra.Any(x => Equals(x.ResultsFile.FilePath, spectrum.ResultsFile.FilePath) && Equals(x.PrecursorMz, spectrum.PrecursorMz) && Equals(x.RetentionTime, spectrum.RetentionTime))) { // This spectrum already exists in the library continue; } } var spectrumNewDisconnected = new DbSpectrum(spectrum) { Id = null, ResultsFile = resultsFile }; session.SaveOrUpdate(spectrumNewDisconnected); } transaction.Commit(); monitor.UpdateProgress(progress.Complete()); } }
private bool Load(IProgressMonitor monitor) { _spectra = null; if (FilePath == null) { return(false); } var info = new FileInfo(FilePath); if (!info.Exists || info.Length == 0) { return(false); } var progress = new ProgressStatus(string.Empty).ChangeMessage(Resources.MidasLibrary_Load_Loading_MIDAS_library); monitor.UpdateProgress(progress); var spectra = new Dictionary <DbResultsFile, List <DbSpectrum> >(); try { using (var sessionFactory = SessionFactoryFactory.CreateSessionFactory(FilePath, typeof(MidasLibrary), false)) using (var session = new SessionWithLock(sessionFactory.OpenSession(), new ReaderWriterLock(), false)) { var libInfo = session.CreateCriteria(typeof(DbLibInfo)).List <DbLibInfo>(); if (libInfo.Count != 1) { throw new Exception(Resources.MidasLibrary_Load_Error_reading_LibInfo_from_MIDAS_library); } SchemaVersion = libInfo[0].SchemaVersion; LibraryGuid = libInfo[0].Guid; var readSpectra = session.CreateCriteria(typeof(DbSpectrum)).List <DbSpectrum>(); progress = progress.ChangeSegments(0, readSpectra.Count); foreach (var spectrum in readSpectra) { if (monitor.IsCanceled) { monitor.UpdateProgress(progress.Cancel()); return(false); } progress = progress.NextSegment(); monitor.UpdateProgress(progress); List <DbSpectrum> list; if (!spectra.TryGetValue(spectrum.ResultsFile, out list)) { list = new List <DbSpectrum>(); spectra[spectrum.ResultsFile] = list; } list.Add(spectrum); } } } catch { monitor.UpdateProgress(progress.Cancel()); return(false); } _spectra = spectra; monitor.UpdateProgress(progress.Complete()); return(true); }
private SrmDocument LookupProteinMetadata(SrmDocument docOrig, IProgressMonitor progressMonitor) { lock (_processedNodes) { // Check to make sure this operation was not canceled while this thread was // waiting to acquire the lock. This also cleans up pending work. if (progressMonitor.IsCanceled) { return(null); } IProgressStatus progressStatus = new ProgressStatus(Resources.ProteinMetadataManager_LookupProteinMetadata_resolving_protein_details); int nResolved = 0; int nUnresolved = docOrig.PeptideGroups.Select(pg => pg.ProteinMetadata.NeedsSearch()).Count(); if ((nUnresolved > 0) && !docOrig.Settings.PeptideSettings.BackgroundProteome.IsNone) { // Do a quick check to see if background proteome already has the info if (!docOrig.Settings.PeptideSettings.BackgroundProteome.NeedsProteinMetadataSearch) { try { using (var proteomeDb = docOrig.Settings.PeptideSettings.BackgroundProteome.OpenProteomeDb()) { foreach (PeptideGroupDocNode nodePepGroup in docOrig.PeptideGroups) { if (_processedNodes.ContainsKey(nodePepGroup.Id.GlobalIndex)) { // We did this before we were interrupted progressMonitor.UpdateProgress(progressStatus = progressStatus.ChangePercentComplete(100 * nResolved++ / nUnresolved)); } else if (nodePepGroup.ProteinMetadata.NeedsSearch()) { var proteinMetadata = proteomeDb.GetProteinMetadataByName(nodePepGroup.Name); if ((proteinMetadata == null) && !Equals(nodePepGroup.Name, nodePepGroup.OriginalName)) { proteinMetadata = proteomeDb.GetProteinMetadataByName(nodePepGroup.OriginalName); // Original name might hit } if ((proteinMetadata == null) && !String.IsNullOrEmpty(nodePepGroup.ProteinMetadata.Accession)) { proteinMetadata = proteomeDb.GetProteinMetadataByName(nodePepGroup.ProteinMetadata.Accession); // Parsed accession might hit } if ((proteinMetadata != null) && !proteinMetadata.NeedsSearch()) { // Background proteome has already resolved this _processedNodes.Add(nodePepGroup.Id.GlobalIndex, proteinMetadata); progressMonitor.UpdateProgress( progressStatus = progressStatus.ChangePercentComplete(100 * nResolved++ / nUnresolved)); } } if (progressMonitor.IsCanceled) { progressMonitor.UpdateProgress(progressStatus.Cancel()); return(null); } } } } // ReSharper disable once EmptyGeneralCatchClause catch { // The protDB file is busy, or some other issue - just go directly to web } } } if (nResolved != nUnresolved) { try { // Now go to the web for more protein metadata (or pretend to, depending on WebEnabledFastaImporter.DefaultWebAccessMode) var docNodesWithUnresolvedProteinMetadata = new Dictionary <ProteinSearchInfo, PeptideGroupDocNode>(); var proteinsToSearch = new List <ProteinSearchInfo>(); foreach (PeptideGroupDocNode node in docOrig.PeptideGroups) { if (node.ProteinMetadata.NeedsSearch() && !_processedNodes.ContainsKey(node.Id.GlobalIndex)) // Did we already process this? { var proteinMetadata = node.ProteinMetadata; if (proteinMetadata.WebSearchInfo.IsEmpty()) // Never even been hit with regex { // Use Regexes to get some metadata, and a search term var parsedProteinMetaData = FastaImporter.ParseProteinMetaData(proteinMetadata); if ((parsedProteinMetaData == null) || Equals(parsedProteinMetaData.Merge(proteinMetadata), proteinMetadata.SetWebSearchCompleted())) { // That didn't parse well enough to make a search term, or didn't add any new info - just set it as searched so we don't keep trying _processedNodes.Add(node.Id.GlobalIndex, proteinMetadata.SetWebSearchCompleted()); if (progressMonitor.IsCanceled) { progressMonitor.UpdateProgress(progressStatus.Cancel()); return(null); } progressMonitor.UpdateProgress(progressStatus = progressStatus.ChangePercentComplete(100 * nResolved++ / nUnresolved)); proteinMetadata = null; // No search to be done } else { proteinMetadata = proteinMetadata.Merge(parsedProteinMetaData); // Fill in any gaps with parsed info } } if (proteinMetadata != null) { // We note the sequence length because it's useful in disambiguating search results proteinsToSearch.Add(new ProteinSearchInfo(new DbProteinName(null, proteinMetadata), node.PeptideGroup.Sequence == null ? 0 : node.PeptideGroup.Sequence.Length)); docNodesWithUnresolvedProteinMetadata.Add(proteinsToSearch.Last(), node); } } } if (progressMonitor.IsCanceled) { progressMonitor.UpdateProgress(progressStatus.Cancel()); return(null); } progressMonitor.UpdateProgress(progressStatus = progressStatus.ChangePercentComplete(100 * nResolved / nUnresolved)); // Now we actually hit the internet if (proteinsToSearch.Any()) { foreach (var result in FastaImporter.DoWebserviceLookup(proteinsToSearch, progressMonitor, false)) // Resolve them all, now { Debug.Assert(!result.GetProteinMetadata().NeedsSearch()); _processedNodes.Add(docNodesWithUnresolvedProteinMetadata[result].Id.GlobalIndex, result.GetProteinMetadata()); if (progressMonitor.IsCanceled) { progressMonitor.UpdateProgress(progressStatus.Cancel()); return(null); } progressMonitor.UpdateProgress(progressStatus = progressStatus.ChangePercentComplete(100 * nResolved++ / nUnresolved)); } } } catch (OperationCanceledException) { progressMonitor.UpdateProgress(progressStatus.Cancel()); return(null); } } // And finally write back to the document var listProteins = new List <PeptideGroupDocNode>(); foreach (PeptideGroupDocNode node in docOrig.MoleculeGroups) { if (_processedNodes.ContainsKey(node.Id.GlobalIndex)) { listProteins.Add(node.ChangeProteinMetadata(_processedNodes[node.Id.GlobalIndex])); } else { listProteins.Add(node); } } var docNew = docOrig.ChangeChildrenChecked(listProteins.Cast <DocNode>().ToArray()); progressMonitor.UpdateProgress(progressStatus.Complete()); return((SrmDocument)docNew); } }
private BackgroundProteome Load(IDocumentContainer container, PeptideSettings settings, SrmDocument docCurrent, bool isBackgroundLoad) { // Only allow one background proteome to load at a time. This can // get tricky, if the user performs an undo and then a redo across // a change in background proteome. // Our only priority is accessing web services to add missing protein metadata. // There may also be a load initiation by the Peptide Settings dialog as foreground task, // it takes priority over the background task. lock (_lockLoadBackgroundProteome) { BackgroundProteome originalBackgroundProteome = settings.BackgroundProteome; BackgroundProteome validatedBackgroundProtome = originalBackgroundProteome.DatabaseValidated ? originalBackgroundProteome : new BackgroundProteome(originalBackgroundProteome.BackgroundProteomeSpec); if (IsNotLoadedExplained(settings, validatedBackgroundProtome, true) == null) { // protein metadata is resolved CompleteProcessing(container, validatedBackgroundProtome); Helpers.AssignIfEquals(ref validatedBackgroundProtome, originalBackgroundProteome); return(validatedBackgroundProtome); // No change needed } // we are here to resolve the protein metadata string name = originalBackgroundProteome.Name; IProgressStatus progressStatus = new ProgressStatus(string.Format(Resources.BackgroundProteomeManager_LoadBackground_Resolving_protein_details_for__0__proteome, name)); try { // The transaction commit for writing the digestion info can be very lengthy, avoid lock timeouts // by doing that work in a tempfile that no other thread knows aboout using (FileSaver fs = new FileSaver(originalBackgroundProteome.DatabasePath, StreamManager)) { File.Copy(originalBackgroundProteome.DatabasePath, fs.SafeName, true); var digestHelper = new DigestHelper(this, container, docCurrent, name, fs.SafeName, true); bool success = digestHelper.LookupProteinMetadata(ref progressStatus); if (digestHelper.IsCanceled || !success) { // Processing was canceled if (docCurrent != null) { EndProcessing(docCurrent); } UpdateProgress(progressStatus.Cancel()); return(null); } using (var proteomeDb = ProteomeDb.OpenProteomeDb(originalBackgroundProteome.DatabasePath)) { proteomeDb.DatabaseLock.AcquireWriterLock(int.MaxValue); // Wait for any existing readers to complete, prevent any new ones try { if (File.GetLastWriteTime(fs.RealName) <= File.GetLastWriteTime(fs.SafeName)) // Don't overwrite if foreground task has already updated { proteomeDb.CloseDbConnection(); // Get rid of any file handles if (!fs.Commit()) { if (docCurrent != null) { EndProcessing(docCurrent); } throw new IOException(string.Format(Resources.BackgroundProteomeManager_LoadBackground_Unable_to_rename_temporary_file_to__0__, fs.RealName)); } } } finally { proteomeDb.DatabaseLock.ReleaseWriterLock(); } } var updatedProteome = new BackgroundProteome(originalBackgroundProteome); using (var proteomeDb = originalBackgroundProteome.OpenProteomeDb()) { proteomeDb.AnalyzeDb(); // Now it's safe to start this potentially lengthy indexing operation } CompleteProcessing(container, updatedProteome); UpdateProgress(progressStatus.Complete()); return(updatedProteome); } } catch (Exception x) { var message = new StringBuilder(); message.AppendLine( string.Format(Resources.BackgroundProteomeManager_LoadBackground_Failed_updating_background_proteome__0__, name)); message.Append(x.Message); UpdateProgress(progressStatus.ChangeErrorException(new IOException(message.ToString(), x))); return(null); } } }
private SrmDocument LookupProteinMetadata(SrmDocument docOrig, IProgressMonitor progressMonitor) { lock (_processedNodes) { // Check to make sure this operation was not canceled while this thread was // waiting to acquire the lock. This also cleans up pending work. if (progressMonitor.IsCanceled) { return(null); } IProgressStatus progressStatus = new ProgressStatus(Resources.ProteinMetadataManager_LookupProteinMetadata_resolving_protein_details); int nResolved = 0; int nUnresolved = docOrig.PeptideGroups.Select(pg => pg.ProteinMetadata.NeedsSearch()).Count(); if ((nUnresolved > 0) && !docOrig.Settings.PeptideSettings.BackgroundProteome.IsNone) { // Do a quick check to see if background proteome already has the info if (!docOrig.Settings.PeptideSettings.BackgroundProteome.NeedsProteinMetadataSearch) { try { using (var proteomeDb = docOrig.Settings.PeptideSettings.BackgroundProteome.OpenProteomeDb()) { foreach (PeptideGroupDocNode nodePepGroup in docOrig.PeptideGroups) { if (_processedNodes.ContainsKey(nodePepGroup.Id.GlobalIndex)) { // We did this before we were interrupted nResolved++; } else if (nodePepGroup.ProteinMetadata.NeedsSearch()) { var proteinMetadata = proteomeDb.GetProteinMetadataByName(nodePepGroup.Name); if ((proteinMetadata == null) && !Equals(nodePepGroup.Name, nodePepGroup.OriginalName)) { proteinMetadata = proteomeDb.GetProteinMetadataByName(nodePepGroup.OriginalName); // Original name might hit } if ((proteinMetadata == null) && !String.IsNullOrEmpty(nodePepGroup.ProteinMetadata.Accession)) { proteinMetadata = proteomeDb.GetProteinMetadataByName(nodePepGroup.ProteinMetadata.Accession); // Parsed accession might hit } if ((proteinMetadata != null) && !proteinMetadata.NeedsSearch()) { // Background proteome has already resolved this _processedNodes.Add(nodePepGroup.Id.GlobalIndex, proteinMetadata); nResolved++; } } if (!UpdatePrecentComplete(progressMonitor, 100 * nResolved / nUnresolved, ref progressStatus)) { return(null); } } } } // ReSharper disable once EmptyGeneralCatchClause catch { // The protDB file is busy, or some other issue - just go directly to web } } } if (nResolved != nUnresolved) { try { // Now go to the web for more protein metadata (or pretend to, depending on WebEnabledFastaImporter.DefaultWebAccessMode) var docNodesWithUnresolvedProteinMetadata = new Dictionary <ProteinSearchInfo, PeptideGroupDocNode>(); var proteinsToSearch = new List <ProteinSearchInfo>(); foreach (PeptideGroupDocNode node in docOrig.PeptideGroups) { if (node.ProteinMetadata.NeedsSearch() && !_processedNodes.ContainsKey(node.Id.GlobalIndex)) // Did we already process this? { var proteinMetadata = node.ProteinMetadata; if (proteinMetadata.WebSearchInfo.IsEmpty()) // Never even been hit with regex { // Use Regexes to get some metadata, and a search term var parsedProteinMetaData = FastaImporter.ParseProteinMetaData(proteinMetadata); if ((parsedProteinMetaData == null) || Equals(parsedProteinMetaData.Merge(proteinMetadata), proteinMetadata.SetWebSearchCompleted())) { // That didn't parse well enough to make a search term, or didn't add any new info - just set it as searched so we don't keep trying _processedNodes.Add(node.Id.GlobalIndex, proteinMetadata.SetWebSearchCompleted()); if (!UpdatePrecentComplete(progressMonitor, 100 * nResolved++ / nUnresolved, ref progressStatus)) { return(null); } proteinMetadata = null; // No search to be done } else { proteinMetadata = proteinMetadata.Merge(parsedProteinMetaData); // Fill in any gaps with parsed info } } if (proteinMetadata != null) { // We note the sequence length because it's useful in disambiguating search results proteinsToSearch.Add(new ProteinSearchInfo(new DbProteinName(null, proteinMetadata), node.PeptideGroup.Sequence == null ? 0 : node.PeptideGroup.Sequence.Length)); docNodesWithUnresolvedProteinMetadata.Add(proteinsToSearch.Last(), node); } } } if (!UpdatePrecentComplete(progressMonitor, 100 * nResolved / nUnresolved, ref progressStatus)) { return(null); } // Now we actually hit the internet if (proteinsToSearch.Any()) { foreach (var result in FastaImporter.DoWebserviceLookup(proteinsToSearch, progressMonitor, false)) // Resolve them all, now { Assume.IsTrue(!result.GetProteinMetadata().NeedsSearch()); _processedNodes.Add(docNodesWithUnresolvedProteinMetadata[result].Id.GlobalIndex, result.GetProteinMetadata()); if (!UpdatePrecentComplete(progressMonitor, 100 * nResolved++ / nUnresolved, ref progressStatus)) { return(null); } } } } catch (OperationCanceledException) { progressMonitor.UpdateProgress(progressStatus.Cancel()); return(null); } } // And finally write back to the document var listProteins = new List <PeptideGroupDocNode>(); foreach (PeptideGroupDocNode node in docOrig.MoleculeGroups) { if (_processedNodes.TryGetValue(node.Id.GlobalIndex, out var proteinMetadata)) { // Compare existing and proposed metadata, ignoring name difference in case user changed // the name manually in the Targets tree while a background metadata lookup was going on, and // ignoring web search details since the existing node probably hasn't any yet. // // This fixes issue https://skyline.ms/announcements/home/support/thread.view?rowId=49107 in which: // the user pasted a protein sequence into the Targets tree // then tried to type in a name to replace the default assigned name "sequence1" // after a few seconds the displayed name reverted to "sequence1" upon background protein metadata search completion // N.B. as this is timing dependent, and our automated tests are mandated to not require internet // access, writing a test for this fix (i.e. adding timings to the fake web lookup system) proved to // be tricky and finally deemed not worth the effort for this fairly obscure problem. if (!Equals(node.ProteinMetadata.Name, proteinMetadata.Name) && // Different name Equals(node.ProteinMetadata.ChangeName(proteinMetadata.Name).ClearWebSearchInfo(), // But otherwise identical proteinMetadata.ClearWebSearchInfo())) { // Leave (apparently user-renamed) node alone, and note the web search that was actually used. listProteins.Add(node.ChangeProteinMetadata(node.ProteinMetadata.ChangeWebSearchInfo(proteinMetadata.WebSearchInfo))); } else { // Update the protein metadata for this node, if any listProteins.Add(node.ChangeProteinMetadata(proteinMetadata)); } } else { // Not yet processed listProteins.Add(node); } } var docNew = docOrig.ChangeChildrenChecked(listProteins.Cast <DocNode>().ToArray()); progressMonitor.UpdateProgress(progressStatus.Complete()); return((SrmDocument)docNew); } }
// ReSharper restore NonLocalizedString public static List <string> ConvertPilotFiles(IList <string> inputFiles, IProgressMonitor progress, ProgressStatus status) { string groupConverterExePath = null; var inputFilesPilotConverted = new List <string>(); for (int index = 0; index < inputFiles.Count; index++) { string inputFile = inputFiles[index]; if (!inputFile.EndsWith(BiblioSpecLiteBuilder.EXT_PILOT)) { inputFilesPilotConverted.Add(inputFile); continue; } string outputFile = Path.ChangeExtension(inputFile, BiblioSpecLiteBuilder.EXT_PILOT_XML); // Avoid re-converting files that have already been converted if (File.Exists(outputFile)) { // Avoid duplication, in case the user accidentally adds both .group and .group.xml files // for the same results if (!inputFiles.Contains(outputFile)) { inputFilesPilotConverted.Add(outputFile); } continue; } string message = string.Format(Resources.VendorIssueHelper_ConvertPilotFiles_Converting__0__to_xml, Path.GetFileName(inputFile)); int percent = index * 100 / inputFiles.Count; progress.UpdateProgress(status = status.ChangeMessage(message).ChangePercentComplete(percent)); if (groupConverterExePath == null) { var key = Registry.LocalMachine.OpenSubKey(KEY_PROTEIN_PILOT, false); if (key != null) { string proteinPilotCommandWithArgs = (string)key.GetValue(string.Empty); var proteinPilotCommandWithArgsSplit = proteinPilotCommandWithArgs.Split(new[] { "\" \"" }, StringSplitOptions.RemoveEmptyEntries); // Remove " "%1" // Not L10N string path = Path.GetDirectoryName(proteinPilotCommandWithArgsSplit[0].Trim(new[] { '\\', '\"' })); // Remove preceding " if (path != null) { var groupFileExtractorPath = Path.Combine(path, EXE_GROUP_FILE_EXTRACTOR); if (File.Exists(groupFileExtractorPath)) { groupConverterExePath = groupFileExtractorPath; } else { var group2XmlPath = Path.Combine(path, EXE_GROUP2_XML); if (File.Exists(group2XmlPath)) { groupConverterExePath = group2XmlPath; } else { string errorMessage = string.Format(Resources.VendorIssueHelper_ConvertPilotFiles_Unable_to_find__0__or__1__in_directory__2____Please_reinstall_ProteinPilot_software_to_be_able_to_handle__group_files_, EXE_GROUP_FILE_EXTRACTOR, EXE_GROUP2_XML, path); throw new IOException(errorMessage); } } } } if (groupConverterExePath == null) { throw new IOException(Resources.VendorIssueHelper_ConvertPilotFiles_ProteinPilot_software__trial_or_full_version__must_be_installed_to_convert___group__files_to_compatible___group_xml__files_); } } // run group2xml // ReSharper disable NonLocalizedString var argv = new[] { "XML", "\"" + inputFile + "\"", "\"" + outputFile + "\"" }; // ReSharper restore NonLocalizedString var psi = new ProcessStartInfo(groupConverterExePath) { CreateNoWindow = true, UseShellExecute = false, // Common directory includes the directory separator WorkingDirectory = Path.GetDirectoryName(groupConverterExePath) ?? string.Empty, Arguments = string.Join(" ", argv.ToArray()), // Not L10N RedirectStandardError = true, RedirectStandardOutput = true, }; var sbOut = new StringBuilder(); var proc = new Process { StartInfo = psi }; proc.Start(); var reader = new ProcessStreamReader(proc); string line; while ((line = reader.ReadLine()) != null) { if (progress.IsCanceled) { proc.Kill(); throw new LoadCanceledException(status.Cancel()); } sbOut.AppendLine(line); } while (!proc.WaitForExit(200)) { if (progress.IsCanceled) { proc.Kill(); return(inputFilesPilotConverted); } } if (proc.ExitCode != 0) { throw new IOException(TextUtil.LineSeparate(string.Format(Resources.VendorIssueHelper_ConvertPilotFiles_Failure_attempting_to_convert_file__0__to__group_xml_, inputFile), string.Empty, sbOut.ToString())); } inputFilesPilotConverted.Add(outputFile); } progress.UpdateProgress(status.ChangePercentComplete(100)); return(inputFilesPilotConverted); }
private static void ConvertBrukerToMzml(string filePathBruker, string outputPath, IProgressMonitor monitor, ProgressStatus status) { // We use CompassXport, if it is installed, to convert a Bruker raw file to mzML. This solves two // issues: the Bruker reader can't be called on any thread other than the main thread, and there // is no 64-bit version of the reader. So we start CompassXport in its own 32-bit process, // and use it to convert the raw data to mzML in a temporary file, which we read back afterwards. var key = Registry.LocalMachine.OpenSubKey(KEY_COMPASSXPORT, false); string compassXportExe = (key != null) ? (string)key.GetValue(string.Empty) : null; if (compassXportExe == null) { throw new IOException(Resources.VendorIssueHelper_ConvertBrukerToMzml_CompassXport_software_must_be_installed_to_import_Bruker_raw_data_files_); } // CompassXport arguments // ReSharper disable NonLocalizedString var argv = new[] { "-a \"" + filePathBruker + "\"", // input file (directory) "-o \"" + outputPath + "\"", // output file (directory) "-mode 2", // mode 2 (mzML) "-raw 0" // export line spectra (profile data is HUGE and SLOW!) }; // ReSharper restore NonLocalizedString // Start CompassXport in its own process. var psi = new ProcessStartInfo(compassXportExe) { CreateNoWindow = true, UseShellExecute = false, // Common directory includes the directory separator WorkingDirectory = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) ?? string.Empty, Arguments = string.Join(" ", argv), // Not L10N RedirectStandardError = true, RedirectStandardOutput = true, }; var proc = new Process { StartInfo = psi }; proc.Start(); // CompassXport starts by calculating a hash of the input file. This takes a long time, and there is // no intermediate output during this time. So we set the progress bar some fraction of the way and // let it sit there and animate while we wait for the start of spectra processing. const int hashPercent = 25; // percentage of import time allocated to calculating the input file hash int spectrumCount = 0; var sbOut = new StringBuilder(); var reader = new ProcessStreamReader(proc); string line; while ((line = reader.ReadLine()) != null) { if (monitor.IsCanceled) { proc.Kill(); throw new LoadCanceledException(status.Cancel()); } sbOut.AppendLine(line); line = line.Trim(); // The main part of conversion starts with the hash calculation. if (line.StartsWith("Calculating hash")) // Not L10N { status = status.ChangeMessage(Resources.VendorIssueHelper_ConvertBrukerToMzml_Calculating_hash_of_input_file) .ChangePercentComplete(hashPercent); monitor.UpdateProgress(status); continue; } // Determine how many spectra will be converted so we can track progress. var match = Regex.Match(line, @"Converting (\d+) spectra"); // Not L10N if (match.Success) { spectrumCount = int.Parse(match.Groups[1].Value); continue; } // Update progress as each spectra batch is converted. match = Regex.Match(line, @"Spectrum \d+ - (\d+)"); // Not L10N if (match.Success) { var spectrumEnd = int.Parse(match.Groups[1].Value); var percentComplete = hashPercent + (100 - hashPercent) * spectrumEnd / spectrumCount; status = status.ChangeMessage(line).ChangePercentComplete(percentComplete); monitor.UpdateProgress(status); } } while (!proc.WaitForExit(200)) { if (monitor.IsCanceled) { proc.Kill(); throw new LoadCanceledException(status.Cancel()); } } if (proc.ExitCode != 0) { throw new IOException(TextUtil.LineSeparate(string.Format(Resources.VendorIssueHelper_ConvertBrukerToMzml_Failure_attempting_to_convert__0__to_mzML_using_CompassXport_, filePathBruker), string.Empty, sbOut.ToString())); } }
protected override bool LoadBackground(IDocumentContainer container, SrmDocument document, SrmDocument docCurrent) { // Only allow one background proteome to load at a time. This can // get tricky, if the user performs an undo and then a redo across // a change in background proteome. // Our first priority is doing the digestions, the second is accessing web // services to add missing protein metadata. lock (_lockLoadBackgroundProteome) { BackgroundProteome originalBackgroundProteome = GetBackgroundProteome(docCurrent); // Check to see whether the Digestion already exists but has not been queried yet. BackgroundProteome backgroundProteomeWithDigestions = new BackgroundProteome(originalBackgroundProteome, true); if (IsNotLoadedExplained(docCurrent, backgroundProteomeWithDigestions, true) == null) { // digest is ready, and protein metdata is resolved CompleteProcessing(container, backgroundProteomeWithDigestions); return(true); } // are we here to do the digest, or to resolve the protein metadata? bool getMetadata = (IsNotLoadedExplained(docCurrent, backgroundProteomeWithDigestions, false) == null) && backgroundProteomeWithDigestions.NeedsProteinMetadataSearch; string name = originalBackgroundProteome.Name; ProgressStatus progressStatus = new ProgressStatus(string.Format(getMetadata?Resources.BackgroundProteomeManager_LoadBackground_Resolving_protein_details_for__0__proteome:Resources.BackgroundProteomeManager_LoadBackground_Digesting__0__proteome, name)); try { using (FileSaver fs = new FileSaver(originalBackgroundProteome.DatabasePath, StreamManager)) { File.Copy(originalBackgroundProteome.DatabasePath, fs.SafeName, true); var digestHelper = new DigestHelper(this, container, docCurrent, name, fs.SafeName, true); bool success; if (getMetadata) { success = digestHelper.LookupProteinMetadata(ref progressStatus); } else { success = (digestHelper.Digest(ref progressStatus) != null); } if (!success) { // Processing was canceled EndProcessing(docCurrent); UpdateProgress(progressStatus.Cancel()); return(false); } using (var proteomeDb = ProteomeDb.OpenProteomeDb(originalBackgroundProteome.DatabasePath)) { proteomeDb.DatabaseLock.AcquireWriterLock(int.MaxValue); try { if (!fs.Commit()) { EndProcessing(docCurrent); throw new IOException( string.Format( Resources .BackgroundProteomeManager_LoadBackground_Unable_to_rename_temporary_file_to__0__, fs.RealName)); } } finally { proteomeDb.DatabaseLock.ReleaseWriterLock(); } } CompleteProcessing(container, new BackgroundProteome(originalBackgroundProteome, true)); UpdateProgress(progressStatus.Complete()); return(true); } } catch (Exception x) { var message = new StringBuilder(); message.AppendLine( string.Format(Resources.BackgroundProteomeManager_LoadBackground_Failed_updating_background_proteome__0__, name)); message.Append(x.Message); UpdateProgress(progressStatus.ChangeErrorException(new IOException(message.ToString(), x))); return(false); } } }
private bool Load(ILoadMonitor loader, ProgressStatus status, bool cached) { try { int loadPercent = 100; if (!cached) { // Building the cache will take 95% of the load time. loadPercent = 5; status = status.ChangeMessage(string.Format(Resources.XHunterLibrary_Load_Building_binary_cache_for__0__library, Path.GetFileName(FilePath))); status = status.ChangePercentComplete(0); loader.UpdateProgress(status); if (!CreateCache(loader, status, 100 - loadPercent)) { return(false); } } status = status.ChangeMessage(string.Format(Resources.XHunterLibrary_Load_Loading__0__library, Path.GetFileName(FilePath))); loader.UpdateProgress(status); var sm = loader.StreamManager; using (Stream stream = sm.CreateStream(CachePath, FileMode.Open, true)) { // Read library header from the end of the cache int countHeader = (int)LibHeaders.count * sizeof(int); stream.Seek(-countHeader, SeekOrigin.End); byte[] libHeader = new byte[countHeader]; ReadComplete(stream, libHeader, countHeader); int version = GetInt32(libHeader, (int)LibHeaders.format_version); if (version != FORMAT_VERSION_CACHE) { return(false); } int countRevisionBytes = GetInt32(libHeader, (int)LibHeaders.revision_byte_count); int countIdBytes = GetInt32(libHeader, (int)LibHeaders.id_byte_count); stream.Seek(-countHeader - countRevisionBytes - countIdBytes, SeekOrigin.End); Revision = ReadString(stream, countRevisionBytes); Id = ReadString(stream, countIdBytes); int numSpectra = GetInt32(libHeader, (int)LibHeaders.num_spectra); var setSequences = new Dictionary <LibSeqKey, bool>(numSpectra); var libraryEntries = new XHunterSpectrumInfo[numSpectra]; // Seek to beginning of spectrum headers long locationHeaders = BitConverter.ToInt64(libHeader, ((int)LibHeaders.location_headers_lo) * sizeof(int)); stream.Seek(locationHeaders, SeekOrigin.Begin); byte[] specSequence = new byte[1024]; byte[] specHeader = new byte[1024]; countHeader = (int)SpectrumCacheHeader.count * 4; for (int i = 0; i < numSpectra; i++) { int percent = (100 - loadPercent) + (i * loadPercent / numSpectra); if (status.PercentComplete != percent) { // Check for cancellation after each integer change in percent loaded. if (loader.IsCanceled) { loader.UpdateProgress(status.Cancel()); return(false); } // If not cancelled, update progress. loader.UpdateProgress(status = status.ChangePercentComplete(percent)); } // Read spectrum header ReadComplete(stream, specHeader, countHeader); int seqKeyHash = GetInt32(specHeader, ((int)SpectrumCacheHeader.seq_key_hash)); int seqKeyLength = GetInt32(specHeader, ((int)SpectrumCacheHeader.seq_key_length)); int charge = GetInt32(specHeader, ((int)SpectrumCacheHeader.charge)); if (charge == 0 || charge > TransitionGroup.MAX_PRECURSOR_CHARGE) { throw new InvalidDataException(Resources.XHunterLibrary_Load_Invalid_precursor_charge_found_File_may_be_corrupted); } float i2 = BitConverter.ToSingle(specHeader, ((int)SpectrumCacheHeader.i2) * 4); long location = BitConverter.ToInt64(specHeader, ((int)SpectrumCacheHeader.location_lo) * 4); int numPeaks = GetInt32(specHeader, ((int)SpectrumCacheHeader.num_peaks)); float expect = BitConverter.ToSingle(specHeader, ((int)SpectrumCacheHeader.expect) * 4); int seqLength = GetInt32(specHeader, (int)SpectrumCacheHeader.seq_len); // Read sequence information ReadComplete(stream, specSequence, seqLength); LibKey key = new LibKey(specSequence, 0, seqLength, charge); libraryEntries[i] = new XHunterSpectrumInfo(key, i2, expect, (short)numPeaks, location); if (seqKeyLength > 0) { LibSeqKey seqKey = new LibSeqKey(key, seqKeyHash, seqKeyLength); setSequences.Add(seqKey, true); } } // Checksum = checksum.ChecksumValue; _libraryEntries = libraryEntries; _setSequences = setSequences; loader.UpdateProgress(status.Complete()); // Create the stream from which the spectra will be read CreateStream(loader); } return(true); } catch (InvalidDataException x) { if (!cached) { loader.UpdateProgress(status.ChangeErrorException(x)); } return(false); } catch (IOException x) { if (!cached) { loader.UpdateProgress(status.ChangeErrorException(x)); } return(false); } catch (Exception x) { if (!cached) { x = new Exception(string.Format(Resources.XHunterLibrary_Load_Failed_loading_library__0__, FilePath), x); loader.UpdateProgress(status.ChangeErrorException(x)); } return(false); } finally { if (ReadStream != null) { // Close the read stream to ensure we never leak it. // This only costs on extra open, the first time the // active document tries to read. try { ReadStream.CloseStream(); } catch (IOException) { } } } }
// ReSharper restore UnusedMember.Local private bool CreateCache(ILoadMonitor loader, ProgressStatus status, int percent) { var sm = loader.StreamManager; BufferedStream stream = new BufferedStream(CreateStream(loader), 32 * 1024); int version = 1; string id = string.Empty, revision = string.Empty; int size = ReadSize(stream); int i; if (size == 0) { version = 2; size = ReadSize(stream); const int countLibHeader = 256 - 8; byte[] libHeader = new byte[countLibHeader]; if (stream.Read(libHeader, 0, libHeader.Length) != libHeader.Length) { throw new InvalidDataException(Resources.XHunterLibrary_CreateCache_Data_truncation_in_library_header_File_may_be_corrupted); } for (i = 0; i < libHeader.Length; i++) { if (libHeader[i] == 0) { break; } } string header = Encoding.UTF8.GetString(libHeader, 0, i); Match match = REGEX_HEADER.Match(header); if (match.Success) { version = int.Parse(match.Groups[1].Value); id = match.Groups[2].Value; revision = match.Groups[3].Value; } } var setLibKeys = new Dictionary <LibKey, bool>(size); var setSequences = new Dictionary <LibSeqKey, bool>(size); var libraryEntries = new List <XHunterSpectrumInfo>(size); const int countHeader = ((int)SpectrumHeaders2.count) * sizeof(int); byte[] specHeader = new byte[1024]; byte[] specSequence = new byte[1024]; i = 0; while (stream.Read(specHeader, 0, countHeader) == countHeader) { int percentComplete = (i++ *percent / size); if (status.PercentComplete != percentComplete) { // Check for cancellation after each integer change in percent loaded. if (loader.IsCanceled) { loader.UpdateProgress(status.Cancel()); return(false); } // If not cancelled, update progress. loader.UpdateProgress(status = status.ChangePercentComplete(percentComplete)); } int charge = (version == 1 ? GetInt32(specHeader, ((int)SpectrumHeaders1.charge)) : GetInt32(specHeader, ((int)SpectrumHeaders2.charge))); float i2 = (version == 1 ? GetSingle(specHeader, ((int)SpectrumHeaders1.i2)) : GetSingle(specHeader, ((int)SpectrumHeaders2.i2))); int seqLength = (version == 1 ? GetInt32(specHeader, ((int)SpectrumHeaders1.seq_len)) : GetInt32(specHeader, ((int)SpectrumHeaders2.seq_len))); float expect = (version == 1 ? 0.001f : GetSingle(specHeader, (int)SpectrumHeaders2.expect)); // Read sequence information ReadComplete(stream, specSequence, seqLength); specSequence[seqLength] = 0; short numPeaks = (short)ReadSize(stream); // Save spectrum location long location = stream.Position; // Read over spectrum int countPeaks = (sizeof(byte) + sizeof(float)) * numPeaks; stream.Seek(countPeaks, SeekOrigin.Current); // Skip spectrum // Read modifications int numMods = ReadSize(stream); byte[] sequence = specSequence; if (numMods > 0) { StringBuilder sb = new StringBuilder(); ReadComplete(stream, specHeader, (4 + sizeof(double)) * numMods); int iLast = 0; double modTotal = 0; for (int j = 0; j < numMods; j++) { int iPos = GetInt32(specHeader, j * 3); double mod = BitConverter.ToDouble(specHeader, (j * 3 + 1) * 4); // X! Hunter allows multiple modifications on the same // residue. So, they need to be totaled, and assigned to a // single residue to allow them to match Skyline modification // settings. if (iPos > iLast) { if (modTotal != 0) { sb.Append(SequenceMassCalc.GetModDiffDescription(modTotal)); } sb.Append(Encoding.UTF8.GetString(specSequence, iLast, iPos - iLast)); modTotal = 0; } modTotal += mod; iLast = iPos; } if (modTotal != 0) { sb.Append(SequenceMassCalc.GetModDiffDescription(modTotal)); } sb.Append(Encoding.UTF8.GetString(specSequence, iLast, seqLength - iLast)); sequence = Encoding.UTF8.GetBytes(sb.ToString()); seqLength = sb.Length; } // Skip over homologs (list of protein IDs and start positions from a FASTA // file used to generate the library) int numHomologs = ReadSize(stream); for (int j = 0; j < numHomologs; j++) { stream.Seek(ReadSize(stream) + 4, SeekOrigin.Current); } // These libraries should not have duplicates, but just in case. // Apparently, GPM libraries do contain redundancies, as we found // when a revision lost this test. var key = new LibKey(sequence, 0, seqLength, charge); if (!setLibKeys.ContainsKey(key)) { setLibKeys.Add(key, true); libraryEntries.Add(new XHunterSpectrumInfo(key, i2, expect, numPeaks, location)); } } libraryEntries.Sort(CompareSpectrumInfo); using (FileSaver fs = new FileSaver(CachePath, sm)) using (Stream outStream = sm.CreateStream(fs.SafeName, FileMode.Create, true)) { foreach (var info in libraryEntries) { LibSeqKey seqKey = new LibSeqKey(info.Key); if (setSequences.ContainsKey(seqKey)) { outStream.Write(BitConverter.GetBytes(0), 0, sizeof(int)); outStream.Write(BitConverter.GetBytes(-1), 0, sizeof(int)); } else { // If it is unique, it will need to be added at cache load time. setSequences.Add(seqKey, true); outStream.Write(BitConverter.GetBytes(seqKey.GetHashCode()), 0, sizeof(int)); outStream.Write(BitConverter.GetBytes(seqKey.Length), 0, sizeof(int)); } outStream.Write(BitConverter.GetBytes(info.Key.Charge), 0, sizeof(int)); outStream.Write(BitConverter.GetBytes(info.ProcessedIntensity), 0, sizeof(float)); outStream.Write(BitConverter.GetBytes(info.Location), 0, sizeof(long)); outStream.Write(BitConverter.GetBytes(info.NumPeaks), 0, sizeof(int)); outStream.Write(BitConverter.GetBytes(info.Expect), 0, sizeof(float)); info.Key.WriteSequence(outStream); } byte[] revisionBytes = Encoding.UTF8.GetBytes(revision); outStream.Write(revisionBytes, 0, revisionBytes.Length); byte[] idBytes = Encoding.UTF8.GetBytes(id); outStream.Write(idBytes, 0, idBytes.Length); outStream.Write(BitConverter.GetBytes(revisionBytes.Length), 0, sizeof(int)); outStream.Write(BitConverter.GetBytes(idBytes.Length), 0, sizeof(int)); outStream.Write(BitConverter.GetBytes(FORMAT_VERSION_CACHE), 0, sizeof(int)); outStream.Write(BitConverter.GetBytes(libraryEntries.Count), 0, sizeof(int)); outStream.Write(BitConverter.GetBytes((long)0), 0, sizeof(long)); sm.Finish(outStream); fs.Commit(); sm.SetCache(FilePath, CachePath); } loader.UpdateProgress(status.Complete()); return(true); }