Exemplo n.º 1
0
        private void ExportLiveReport(string reportName, string reportFile, char reportColSeparator, bool reportInvariant)
        {
            var viewContext = DocumentGridViewContext.CreateDocumentGridViewContext(_doc, reportInvariant
                ? DataSchemaLocalizer.INVARIANT
                : SkylineDataSchema.GetLocalizedSchemaLocalizer());
            var viewInfo = viewContext.GetViewInfo(PersistedViews.MainGroup.Id.ViewName(reportName));
            if (null == viewInfo)
            {
                _out.WriteLine(Resources.CommandLine_ExportLiveReport_Error__The_report__0__does_not_exist__If_it_has_spaces_in_its_name__use__double_quotes__around_the_entire_list_of_command_parameters_, reportName);
                return;
            }
            _out.WriteLine(Resources.CommandLine_ExportLiveReport_Exporting_report__0____, reportName);

            try
            {
                using (var saver = new FileSaver(reportFile))
                {
                    if (!saver.CanSave())
                    {
                        _out.WriteLine(Resources.CommandLine_ExportLiveReport_Error__The_report__0__could_not_be_saved_to__1__, reportName, reportFile);
                        _out.WriteLine(Resources.CommandLine_ExportLiveReport_Check_to_make_sure_it_is_not_read_only_);
                    }

                    var status = new ProgressStatus(string.Empty);
                    IProgressMonitor broker = new CommandProgressMonitor(_out, status);

                    using (var writer = new StreamWriter(saver.SafeName))
                    {
                        viewContext.Export(broker, ref status, viewInfo, writer,
                            new DsvWriter(reportInvariant ? CultureInfo.InvariantCulture : LocalizationHelper.CurrentCulture, reportColSeparator));
                    }

                    broker.UpdateProgress(status.Complete());
                    saver.Commit();
                    _out.WriteLine(Resources.CommandLine_ExportLiveReport_Report__0__exported_successfully_to__1__, reportName, reportFile);
                }
            }
            catch (Exception x)
            {
                _out.WriteLine(Resources.CommandLine_ExportLiveReport_Error__Failure_attempting_to_save__0__report_to__1__, reportName, reportFile);
                _out.WriteLine(x.Message);
            }
        }
Exemplo n.º 2
0
        public void ExportChromatograms(string chromatogramsFile, bool precursors, bool products, bool basePeaks, bool tics)
        {
            _out.WriteLine(Resources.CommandLine_ExportChromatograms_Exporting_chromatograms_file__0____, chromatogramsFile);

            var chromExtractors = new List<ChromExtractor>();
            if (tics)
                chromExtractors.Add(ChromExtractor.summed);
            if (basePeaks)
                chromExtractors.Add(ChromExtractor.base_peak);

            var chromSources = new List<ChromSource>();
            if (precursors)
                chromSources.Add(ChromSource.ms1);
            if (products)
                chromSources.Add(ChromSource.fragment);

            if (chromExtractors.Count == 0 && chromSources.Count == 0)
            {
                _out.WriteLine(Resources.CommandLine_ExportChromatograms_Error__At_least_one_chromatogram_type_must_be_selected);
                return;
            }

            var filesToExport = Document.Settings.HasResults
                ? Document.Settings.MeasuredResults.MSDataFilePaths.Select(f => f.GetFileName()).ToList()
                : new List<string>();
            if (filesToExport.Count == 0)
            {
                _out.WriteLine(Resources.CommandLine_ExportChromatograms_Error__The_document_must_have_imported_results);
                return;
            }

            try
            {
                var chromExporter = new ChromatogramExporter(Document);
                using (var saver = new FileSaver(chromatogramsFile))
                using (var writer = new StreamWriter(saver.SafeName))
                {
                    var status = new ProgressStatus(string.Empty);
                    IProgressMonitor broker = new CommandProgressMonitor(_out, status);
                    chromExporter.Export(writer, broker, filesToExport, LocalizationHelper.CurrentCulture, chromExtractors, chromSources);
                    writer.Close();
                    broker.UpdateProgress(status.Complete());
                    saver.Commit();
                    _out.WriteLine(Resources.CommandLine_ExportChromatograms_Chromatograms_file__0__exported_successfully_, chromatogramsFile);
                }
            }
            catch (Exception x)
            {
                _out.WriteLine(Resources.CommandLine_ExportChromatograms_Error__Failure_attempting_to_save_chromatograms_file__0_, chromatogramsFile);
                _out.WriteLine(x.Message);
            }
        }
        protected override bool LoadBackground(IDocumentContainer container, SrmDocument document, SrmDocument docCurrent)
        {
            // Only allow one background proteome to load at a time.  This can
            // get tricky, if the user performs an undo and then a redo across
            // a change in background proteome.
            // Our first priority is doing the digestions, the second is accessing web
            // services to add missing protein metadata.
            lock (_lockLoadBackgroundProteome)
            {
                BackgroundProteome originalBackgroundProteome = GetBackgroundProteome(docCurrent);
                // Check to see whether the Digestion already exists but has not been queried yet.
                BackgroundProteome backgroundProteomeWithDigestions = new BackgroundProteome(originalBackgroundProteome, true);
                if (IsNotLoadedExplained(docCurrent, backgroundProteomeWithDigestions, true) == null)
                {
                    // digest is ready, and protein metdata is resolved
                    CompleteProcessing(container, backgroundProteomeWithDigestions);
                    return true;
                }
                // are we here to do the digest, or to resolve the protein metadata?
                bool getMetadata = (IsNotLoadedExplained(docCurrent, backgroundProteomeWithDigestions, false) == null) &&
                    backgroundProteomeWithDigestions.NeedsProteinMetadataSearch;

                string name = originalBackgroundProteome.Name;
                ProgressStatus progressStatus =
                    new ProgressStatus(string.Format(getMetadata?Resources.BackgroundProteomeManager_LoadBackground_Resolving_protein_details_for__0__proteome:Resources.BackgroundProteomeManager_LoadBackground_Digesting__0__proteome, name));
                try
                {
                    using (FileSaver fs = new FileSaver(originalBackgroundProteome.DatabasePath, StreamManager))
                    {
                        File.Copy(originalBackgroundProteome.DatabasePath, fs.SafeName, true);
                        var digestHelper = new DigestHelper(this, container, docCurrent, name, fs.SafeName, true);
                        bool success;
                        if (getMetadata)
                            success = digestHelper.LookupProteinMetadata(ref progressStatus);
                        else
                            success = (digestHelper.Digest(ref progressStatus) != null);

                        if (!success)
                        {
                            // Processing was canceled
                            EndProcessing(docCurrent);
                            UpdateProgress(progressStatus.Cancel());
                            return false;
                        }
                        using (var proteomeDb = ProteomeDb.OpenProteomeDb(originalBackgroundProteome.DatabasePath))
                        {
                            proteomeDb.DatabaseLock.AcquireWriterLock(int.MaxValue);
                            try
                            {
                                if (!fs.Commit())
                                {
                                    EndProcessing(docCurrent);
                                    throw new IOException(
                                        string.Format(
                                            Resources
                                                .BackgroundProteomeManager_LoadBackground_Unable_to_rename_temporary_file_to__0__,
                                            fs.RealName));
                                }
                            }
                            finally
                            {
                                proteomeDb.DatabaseLock.ReleaseWriterLock();
                            }
                        }

                        CompleteProcessing(container, new BackgroundProteome(originalBackgroundProteome, true));
                        UpdateProgress(progressStatus.Complete());
                        return true;
                    }
                }
                catch (Exception x)
                {
                    var message = new StringBuilder();
                    message.AppendLine(
                        string.Format(Resources.BackgroundProteomeManager_LoadBackground_Failed_updating_background_proteome__0__,
                                      name));
                    message.Append(x.Message);
                    UpdateProgress(progressStatus.ChangeErrorException(new IOException(message.ToString(), x)));
                    return false;
                }
            }
        }
Exemplo n.º 4
0
        private SrmDocument LookupProteinMetadata(SrmDocument docOrig, IProgressMonitor progressMonitor)
        {
            lock (_processedNodes)
            {
                // Check to make sure this operation was not canceled while this thread was
                // waiting to acquire the lock.  This also cleans up pending work.
                if (progressMonitor.IsCanceled)
                    return null;

                var progressStatus = new ProgressStatus(Resources.ProteinMetadataManager_LookupProteinMetadata_resolving_protein_details);
                int nResolved = 0;
                int nUnresolved = docOrig.PeptideGroups.Select(pg => pg.ProteinMetadata.NeedsSearch()).Count();

                if ((nUnresolved > 0) && !docOrig.Settings.PeptideSettings.BackgroundProteome.IsNone)
                {
                    // Do a quick check to see if background proteome already has the info
                    if (!docOrig.Settings.PeptideSettings.BackgroundProteome.NeedsProteinMetadataSearch)
                    {
                        try
                        {
                            using (var proteomeDb = docOrig.Settings.PeptideSettings.BackgroundProteome.OpenProteomeDb())
                            {
                                foreach (PeptideGroupDocNode nodePepGroup in docOrig.PeptideGroups)
                                {
                                    if (_processedNodes.ContainsKey(nodePepGroup.Id.GlobalIndex))
                                    {
                                        // We did this before we were interrupted
                                        progressMonitor.UpdateProgress(progressStatus = progressStatus.ChangePercentComplete(100 * nResolved++ / nUnresolved));
                                    }
                                    else if (nodePepGroup.ProteinMetadata.NeedsSearch())
                                    {
                                        var proteinMetadata = proteomeDb.GetProteinMetadataByName(nodePepGroup.Name);
                                        if ((proteinMetadata == null) && !Equals(nodePepGroup.Name, nodePepGroup.OriginalName))
                                            proteinMetadata = proteomeDb.GetProteinMetadataByName(nodePepGroup.OriginalName); // Original name might hit
                                        if ((proteinMetadata == null) && !String.IsNullOrEmpty(nodePepGroup.ProteinMetadata.Accession))
                                            proteinMetadata = proteomeDb.GetProteinMetadataByName(nodePepGroup.ProteinMetadata.Accession); // Parsed accession might hit
                                        if ((proteinMetadata != null) && !proteinMetadata.NeedsSearch())
                                        {
                                            // Background proteome has already resolved this
                                            _processedNodes.Add(nodePepGroup.Id.GlobalIndex, proteinMetadata);
                                            progressMonitor.UpdateProgress(
                                                progressStatus =
                                                    progressStatus.ChangePercentComplete(100*nResolved++/nUnresolved));
                                        }
                                    }
                                    if (progressMonitor.IsCanceled)
                                    {
                                        progressMonitor.UpdateProgress(progressStatus.Cancel());
                                        return null;
                                    }
                                }
                            }
                        }
                        // ReSharper disable once EmptyGeneralCatchClause
                        catch
                        {
                            // The protDB file is busy, or some other issue - just go directly to web
                        }
                    }
                }
                if (nResolved != nUnresolved)
                {
                    try
                    {
                        // Now go to the web for more protein metadata (or pretend to, depending on WebEnabledFastaImporter.DefaultWebAccessMode)
                        var docNodesWithUnresolvedProteinMetadata = new Dictionary<ProteinSearchInfo,PeptideGroupDocNode>();
                        var proteinsToSearch = new List<ProteinSearchInfo>();
                        foreach (PeptideGroupDocNode node in docOrig.PeptideGroups)
                        {
                            if (node.ProteinMetadata.NeedsSearch() && !_processedNodes.ContainsKey(node.Id.GlobalIndex)) // Did we already process this?
                            {
                                var proteinMetadata = node.ProteinMetadata;
                                if (proteinMetadata.WebSearchInfo.IsEmpty()) // Never even been hit with regex
                                {
                                    // Use Regexes to get some metadata, and a search term
                                    var parsedProteinMetaData = FastaImporter.ParseProteinMetaData(proteinMetadata);
                                    if ((parsedProteinMetaData == null) || Equals(parsedProteinMetaData.Merge(proteinMetadata),proteinMetadata.SetWebSearchCompleted()))
                                    {
                                        // That didn't parse well enough to make a search term, or didn't add any new info - just set it as searched so we don't keep trying
                                        _processedNodes.Add(node.Id.GlobalIndex, proteinMetadata.SetWebSearchCompleted());
                                        progressMonitor.UpdateProgress(progressStatus = progressStatus.ChangePercentComplete(100 * nResolved++ / nUnresolved));
                                        proteinMetadata = null;  // No search to be done
                                    }
                                    else
                                    {
                                        proteinMetadata = proteinMetadata.Merge(parsedProteinMetaData);  // Fill in any gaps with parsed info
                                    }
                                }
                                if (proteinMetadata != null)
                                {
                                    // We note the sequence length because it's useful in disambiguating search results
                                    proteinsToSearch.Add(new ProteinSearchInfo(new DbProteinName(null, proteinMetadata),
                                        node.PeptideGroup.Sequence == null ? 0 : node.PeptideGroup.Sequence.Length));
                                    docNodesWithUnresolvedProteinMetadata.Add(proteinsToSearch.Last(), node);
                                }
                            }
                        }
                        if (progressMonitor.IsCanceled)
                        {
                            progressMonitor.UpdateProgress(progressStatus.Cancel());
                            return null;
                        }
                        progressMonitor.UpdateProgress(progressStatus = progressStatus.ChangePercentComplete(100 * nResolved / nUnresolved));

                        // Now we actually hit the internet
                        if (proteinsToSearch.Any())
                        {
                            foreach (var result in FastaImporter.DoWebserviceLookup(proteinsToSearch, progressMonitor, false)) // Resolve them all, now
                            {
                                Debug.Assert(!result.GetProteinMetadata().NeedsSearch());
                                _processedNodes.Add(docNodesWithUnresolvedProteinMetadata[result].Id.GlobalIndex, result.GetProteinMetadata());
                                progressMonitor.UpdateProgress(progressStatus = progressStatus.ChangePercentComplete(100 * nResolved++ / nUnresolved));
                            }
                        }
                    }
                    catch (OperationCanceledException)
                    {
                        progressMonitor.UpdateProgress(progressStatus.Cancel());
                        return null;
                    }

                }

                // And finally write back to the document
                var listProteins = new List<PeptideGroupDocNode>();
                foreach (PeptideGroupDocNode node in docOrig.MoleculeGroups)
                {
                    if (_processedNodes.ContainsKey(node.Id.GlobalIndex))
                    {
                        listProteins.Add(node.ChangeProteinMetadata(_processedNodes[node.Id.GlobalIndex]));
                    }
                    else
                    {
                        listProteins.Add(node);
                    }
                }
                var docNew = docOrig.ChangeChildrenChecked(listProteins.Cast<DocNode>().ToArray());
                progressMonitor.UpdateProgress(progressStatus.Complete());
                return (SrmDocument)docNew;
            }
        }
Exemplo n.º 5
0
        private bool ExportReport(string fileName, char separator)
        {
            try
            {
                using (var saver = new FileSaver(fileName))
                {
                    if (!saver.CanSave(this))
                        return false;

                    using (var writer = new StreamWriter(saver.SafeName))
                    {
                        Report report = GetReport();
                        bool success = false;

                        using (var longWait = new LongWaitDlg { Text = Resources.ExportReportDlg_ExportReport_Generating_Report })
                        {
                            longWait.PerformWork(this, 1500, broker =>
                            {
                                var status = new ProgressStatus(Resources.ExportReportDlg_GetDatabase_Analyzing_document);
                                broker.UpdateProgress(status);
                                Database database = EnsureDatabase(broker, 80, ref status);
                                if (broker.IsCanceled)
                                    return;
                                broker.UpdateProgress(status = status.ChangeMessage(Resources.ExportReportDlg_ExportReport_Building_report));
                                ResultSet resultSet = report.Execute(database);
                                if (broker.IsCanceled)
                                    return;
                                broker.UpdateProgress(status = status.ChangePercentComplete(95)
                                    .ChangeMessage(Resources.ExportReportDlg_ExportReport_Writing_report));

                                ResultSet.WriteReportHelper(resultSet, separator, writer, CultureInfo);

                                writer.Flush();
                                writer.Close();

                                if (broker.IsCanceled)
                                    return;
                                broker.UpdateProgress(status.Complete());

                                saver.Commit();
                                success = true;
                            });
                        }

                        return success;
                    }
                }
            }
            catch (Exception x)
            {
                MessageDlg.ShowWithException(this, string.Format(Resources.ExportReportDlg_ExportReport_Failed_exporting_to, fileName, GetExceptionDisplayMessage(x)), x);
                return false;
            }
        }
Exemplo n.º 6
0
 public static SrmDocument RecalculateAlignments(SrmDocument document, IProgressMonitor progressMonitor)
 {
     var newSources = ListAvailableRetentionTimeSources(document.Settings);
     var newResultsSources = ListSourcesForResults(document.Settings.MeasuredResults, newSources);
     var allLibraryRetentionTimes = ReadAllRetentionTimes(document, newSources);
     var newFileAlignments = new List<FileRetentionTimeAlignments>();
     var progressStatus = new ProgressStatus("Aligning retention times"); // Not L10N?  Will users see this?
     foreach (var retentionTimeSource in newResultsSources.Values)
     {
         progressStatus = progressStatus.ChangePercentComplete(100*newFileAlignments.Count/newResultsSources.Count);
         progressMonitor.UpdateProgress(progressStatus);
         try
         {
             var fileAlignments = CalculateFileRetentionTimeAlignments(retentionTimeSource.Name, allLibraryRetentionTimes, progressMonitor);
             newFileAlignments.Add(fileAlignments);
         }
         catch (OperationCanceledException)
         {
             progressMonitor.UpdateProgress(progressStatus.Cancel());
             return null;
         }
     }
     var newDocRt = new DocumentRetentionTimes(newSources.Values, newFileAlignments);
     var newDocument = document.ChangeSettings(document.Settings.ChangeDocumentRetentionTimes(newDocRt));
     Debug.Assert(IsLoaded(newDocument));
     progressMonitor.UpdateProgress(progressStatus.Complete());
     return newDocument;
 }
Exemplo n.º 7
0
        // ReSharper restore UnusedMember.Local
        private bool Load(ILoadMonitor loader)
        {
            ProgressStatus status =
                new ProgressStatus(string.Format(Resources.BiblioSpecLibrary_Load_Loading__0__library,
                                                 Path.GetFileName(FilePath)));
            loader.UpdateProgress(status);

            long lenRead = 0;
            // AdlerChecksum checksum = new AdlerChecksum();

            try
            {
                // Use a buffered stream for initial read
                BufferedStream stream = new BufferedStream(CreateStream(loader), 32 * 1024);

                int countHeader = (int) LibHeaders.count*4;
                byte[] libHeader = new byte[countHeader];
                if (stream.Read(libHeader, 0, countHeader) != countHeader)
                    throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Data_truncation_in_library_header_File_may_be_corrupted);
                lenRead += countHeader;
                // Check the first byte of the primary version number to determine
                // whether the format is little- or big-endian.  Little-endian will
                // have the version number in this byte, while big-endian will have zero.
                if (libHeader[(int) LibHeaders.version1 * 4] == 0)
                    _bigEndian = true;

                int numSpectra = GetInt32(libHeader, (int) LibHeaders.num_spectra);
                var dictLibrary = new Dictionary<LibKey, BiblioSpectrumInfo>(numSpectra);
                var setSequences = new HashSet<LibSeqKey>();

                string revStr = string.Format("{0}.{1}", // Not L10N
                                              GetInt32(libHeader, (int) LibHeaders.version1),
                                              GetInt32(libHeader, (int) LibHeaders.version2));
                Revision = float.Parse(revStr, CultureInfo.InvariantCulture);

                // checksum.MakeForBuff(libHeader, AdlerChecksum.ADLER_START);

                countHeader = (int) SpectrumHeaders.count*4;
                byte[] specHeader = new byte[1024];
                byte[] specSequence = new byte[1024];
                for (int i = 0; i < numSpectra; i++)
                {
                    int percent = i * 100 / numSpectra;
                    if (status.PercentComplete != percent)
                    {
                        // Check for cancellation after each integer change in percent loaded.
                        if (loader.IsCanceled)
                        {
                            loader.UpdateProgress(status.Cancel());
                            return false;
                        }

                        // If not cancelled, update progress.
                        loader.UpdateProgress(status = status.ChangePercentComplete(percent));
                    }

                    // Read spectrum header
                    int bytesRead = stream.Read(specHeader, 0, countHeader);
                    if (bytesRead != countHeader)
                        throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Data_truncation_in_spectrum_header_File_may_be_corrupted);

                    // If this is the first header, and the sequence length is zero,
                    // then this is a Linux format library.  Switch to linux format,
                    // and start over.
                    if (i == 0 && GetInt32(specHeader, (int)SpectrumHeaders.seq_len) == 0)
                    {
                        _linuxFormat = true;
                        stream.Seek(lenRead, SeekOrigin.Begin);

                        // Re-ead spectrum header
                        countHeader = (int)SpectrumHeadersLinux.count * 4;
                        bytesRead = stream.Read(specHeader, 0, countHeader);
                        if (bytesRead != countHeader)
                            throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Data_truncation_in_spectrum_header_File_may_be_corrupted);
                    }

                    lenRead += bytesRead;

                    // checksum.MakeForBuff(specHeader, checksum.ChecksumValue);

                    int charge = GetInt32(specHeader, (int)SpectrumHeaders.charge);
                    if (charge > TransitionGroup.MAX_PRECURSOR_CHARGE)
                        throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Invalid_precursor_charge_found_File_may_be_corrupted);

                    int numPeaks = GetInt32(specHeader, (int)SpectrumHeaders.num_peaks);
                    int seqLength = GetInt32(specHeader, (_linuxFormat ?
                        (int)SpectrumHeadersLinux.seq_len : (int)SpectrumHeaders.seq_len));
                    int copies = GetInt32(specHeader, (_linuxFormat ?
                        (int)SpectrumHeadersLinux.copies : (int)SpectrumHeaders.copies));

                    // Read sequence information
                    int countSeq = (seqLength + 1)*2;
                    if (stream.Read(specSequence, 0, countSeq) != countSeq)
                        throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Data_truncation_in_spectrum_sequence_File_may_be_corrupted);

                    lenRead += countSeq;

                    // checksum.MakeForBuff(specSequence, checksum.ChecksumValue);

                    // Store in dictionary
                    if (IsUnmodified(specSequence, seqLength + 1, seqLength))
                    {
                        // These libraries should not have duplicates, but just in case.
                        // CONSIDER: Emit error about redundancy?
                        // These legacy libraries assume [+57.0] modified Cysteine
                        LibKey key = new LibKey(GetCModified(specSequence, ref seqLength), 0, seqLength, charge);
                        if (!dictLibrary.ContainsKey(key))
                            dictLibrary.Add(key, new BiblioSpectrumInfo((short)copies, (short)numPeaks, lenRead));
                        setSequences.Add(new LibSeqKey(key));
                    }

                    // Read over peaks
                    int countPeaks = 2*sizeof(Single)*numPeaks;
                    stream.Seek(countPeaks, SeekOrigin.Current);    // Skip spectrum
                    lenRead += countPeaks;

                    // checksum.MakeForBuff(specPeaks, checksum.ChecksumValue);
                }

                // Checksum = checksum.ChecksumValue;
                _dictLibrary = dictLibrary;
                _setSequences = setSequences;
                loader.UpdateProgress(status.Complete());
                return true;
            }
            catch (InvalidDataException x)
            {
                loader.UpdateProgress(status.ChangeErrorException(x));
                return false;
            }
            catch (IOException x)
            {
                loader.UpdateProgress(status.ChangeErrorException(x));
                return false;
            }
            catch (Exception x)
            {
                x = new Exception(string.Format(Resources.BiblioSpecLibrary_Load_Failed_loading_library__0__, FilePath), x);
                loader.UpdateProgress(status.ChangeErrorException(x));
                return false;
            }
            finally
            {
                if (ReadStream != null)
                {
                    // Close the read stream to ensure we never leak it.
                    // This only costs on extra open, the first time the
                    // active document tries to read.
                    try { ReadStream.CloseStream(); }
                    catch(IOException) {}
                }
            }
        }