コード例 #1
0
            public void Run(ProcessStartInfo psi, string stdin, IProgressMonitor progress, ref ProgressStatus status, TextWriter writer)
            {
                if (shouldCancel)
                {
                    status.Cancel();
                    progress.UpdateProgress(status = status.Cancel());
                    return;
                }

                if (!string.IsNullOrEmpty(stringToWriteToWriter))
                {
                    writer.WriteLine(stringToWriteToWriter);
                }
                status.ChangePercentComplete(100);
                progress.UpdateProgress(status);
            }
コード例 #2
0
        public SrmDocument Import(TextReader reader, IProgressMonitor progressMonitor, long lineCount, bool isMinutes, bool removeMissing = false, bool changePeaks = true)
        {
            var status = new ProgressStatus(Resources.PeakBoundaryImporter_Import_Importing_Peak_Boundaries);
            double timeConversionFactor = isMinutes ? 1.0 : 60.0;
            int linesRead = 0;
            int progressPercent = 0;
            var docNew = (SrmDocument) Document.ChangeIgnoreChangingChildren(true);
            var docReference = docNew;
            var sequenceToNode = new Dictionary<Tuple<string, bool>, IList<IdentityPath>>();
            var fileNameToFileMatch = new Dictionary<string, ChromSetFileMatch>();
            var trackAdjustedResults = new HashSet<ResultsKey>();
            var modMatcher = new ModificationMatcher();
            // Make the dictionary of modified peptide strings to doc nodes and paths
            for (int i = 0; i < Document.MoleculeCount; ++i)
            {
                IdentityPath peptidePath = Document.GetPathTo((int) SrmDocument.Level.Molecules, i);
                PeptideDocNode peptideNode = (PeptideDocNode) Document.FindNode(peptidePath);
                var peptidePair = new Tuple<string, bool>(peptideNode.RawTextId, peptideNode.IsDecoy);
                IList<IdentityPath> idPathList;
                // Each (sequence, isDecoy) pair can be associated with more than one peptide,
                // to handle the case of duplicate peptides in the doucment.
                if (sequenceToNode.TryGetValue(peptidePair, out idPathList))
                {
                    idPathList.Add(peptidePath);
                    sequenceToNode[peptidePair] = idPathList;
                }
                else
                {
                    idPathList = new List<IdentityPath> { peptidePath };
                    sequenceToNode.Add(peptidePair, idPathList);
                }
            }

            // Add annotations as possible columns
            var allFieldNames = new List<string[]>(FIELD_NAMES);
            allFieldNames.AddRange(from def in Document.Settings.DataSettings.AnnotationDefs
                                   where def.AnnotationTargets.Contains(AnnotationDef.AnnotationTarget.precursor_result)
                                   select new[] { def.Name });

            string line = reader.ReadLine();
            linesRead++;
            int[] fieldIndices;
            int fieldsTotal;
            // If we aren't changing peaks, allow start and end time to be missing
            var requiredFields = changePeaks ? REQUIRED_FIELDS : REQUIRED_NO_CHROM;
            char correctSeparator = ReadFirstLine(line, allFieldNames, requiredFields, out fieldIndices, out fieldsTotal);

            while ((line = reader.ReadLine()) != null)
            {
                linesRead++;
                if (progressMonitor != null)
                {
                    if (progressMonitor.IsCanceled)
                        return Document;
                    int progressNew = (int) (linesRead*100/lineCount);
                    if (progressPercent != progressNew)
                    {
                        progressMonitor.UpdateProgress(status = status.ChangePercentComplete(progressNew));
                        progressPercent = progressNew;
                    }
                }
                var dataFields = new DataFields(fieldIndices, line.ParseDsvFields(correctSeparator), allFieldNames);
                if (dataFields.Length != fieldsTotal)
                {
                    throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Line__0__field_count__1__differs_from_the_first_line__which_has__2_,
                        linesRead, dataFields.Length, fieldsTotal));
                }
                string modifiedPeptideString = dataFields.GetField(Field.modified_peptide);
                modMatcher.CreateMatches(Document.Settings,
                                        new List<string> {modifiedPeptideString},
                                        Settings.Default.StaticModList,
                                        Settings.Default.HeavyModList);
                // Convert the modified peptide string into a standardized form that
                // converts unimod, names, etc, into masses, eg [+57.0]
                var nodeForModPep = modMatcher.GetModifiedNode(modifiedPeptideString);
                if (nodeForModPep == null)
                {
                    throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Peptide_has_unrecognized_modifications__0__at_line__1_, modifiedPeptideString, linesRead));
                }
                nodeForModPep = nodeForModPep.ChangeSettings(Document.Settings, SrmSettingsDiff.ALL);
                modifiedPeptideString = nodeForModPep.RawTextId; // Modified sequence, or custom ion name
                string fileName = dataFields.GetField(Field.filename);
                bool isDecoy = dataFields.IsDecoy(linesRead);
                var peptideIdentifier = new Tuple<string, bool>(modifiedPeptideString, isDecoy);
                int charge;
                bool chargeSpecified = dataFields.TryGetCharge(linesRead, out charge);
                string sampleName = dataFields.GetField(Field.sample_name);

                double? startTime = null;
                double? endTime = null;
                if (changePeaks)
                {
                    startTime = dataFields.GetTime(Field.start_time, Resources.PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_start_time_, linesRead);
                    if (startTime.HasValue)
                        startTime = startTime / timeConversionFactor;
                    endTime = dataFields.GetTime(Field.end_time, Resources.PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_end_time_, linesRead);
                    if (endTime.HasValue)
                        endTime = endTime / timeConversionFactor;
                }

                // Error if only one of startTime and endTime is null
                if (startTime == null && endTime != null)
                    throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Missing_start_time_on_line__0_, linesRead));
                if (startTime != null && endTime == null)
                    throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Missing_end_time_on_line__0_, linesRead));
                // Add filename to second dictionary if not yet encountered
                ChromSetFileMatch fileMatch;
                if (!fileNameToFileMatch.TryGetValue(fileName, out fileMatch))
                {
                    fileMatch = Document.Settings.MeasuredResults.FindMatchingMSDataFile(MsDataFileUri.Parse(fileName));
                    fileNameToFileMatch.Add(fileName, fileMatch);
                }
                if (fileMatch == null)
                {
                    UnrecognizedFiles.Add(fileName);
                    continue;
                }
                var chromSet = fileMatch.Chromatograms;
                string nameSet = chromSet.Name;
                ChromFileInfoId[] fileIds;
                if (sampleName == null)
                {
                    fileIds = chromSet.MSDataFileInfos.Select(x => x.FileId).ToArray();
                }
                else
                {
                    var sampleFile = chromSet.MSDataFileInfos.FirstOrDefault(info => Equals(sampleName, info.FilePath.GetSampleName()));
                    if (sampleFile == null)
                    {
                        throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Sample__0__on_line__1__does_not_match_the_file__2__, sampleName, linesRead, fileName));
                    }
                    fileIds = new[] {sampleFile.FileId};
                }
                // Look up the IdentityPath of peptide in first dictionary
                IList<IdentityPath> pepPaths;
                if (!sequenceToNode.TryGetValue(peptideIdentifier, out pepPaths))
                {
                    UnrecognizedPeptides.Add(modifiedPeptideString);
                    continue;
                }

                // Define the annotations to be added
                var annotations = dataFields.GetAnnotations();
                AnnotationsAdded = annotations.Keys.ToList();

                // Loop over all the transition groups in that peptide to find matching charge,
                // or use all transition groups if charge not specified
                bool foundSample = false;
                foreach (var pepPath in pepPaths)
                {
                    var nodePep = (PeptideDocNode)docNew.FindNode(pepPath);
                    for(int i = 0; i < nodePep.Children.Count; ++i)
                    {
                        var groupRelPath = nodePep.GetPathTo(i);
                        var groupNode = (TransitionGroupDocNode) nodePep.FindNode(groupRelPath);
                        if (!chargeSpecified || charge == groupNode.TransitionGroup.PrecursorCharge)
                        {
                            var groupFileIndices =
                                new HashSet<int>(groupNode.ChromInfos.Select(x => x.FileId.GlobalIndex));
                            // Loop over the files in this groupNode to find the correct sample
                            // Change peak boundaries for the transition group
                            foreach (var fileId in fileIds)
                            {
                                if (groupFileIndices.Contains(fileId.GlobalIndex))
                                {
                                    var groupPath = new IdentityPath(pepPath, groupNode.Id);
                                    // Attach annotations
                                    docNew = docNew.AddPrecursorResultsAnnotations(groupPath, fileId, annotations);
                                    // Change peak
                                    var filePath = chromSet.GetFileInfo(fileId).FilePath;
                                    if (changePeaks)
                                    {
                                        docNew = docNew.ChangePeak(groupPath, nameSet, filePath,
                                            null, startTime, endTime, UserSet.IMPORTED, null, false);
                                    }
                                    // For removing peaks that are not in the file, if removeMissing = true
                                    trackAdjustedResults.Add(new ResultsKey(fileId.GlobalIndex, groupNode.Id));
                                    foundSample = true;
                                }
                            }
                        }
                    }
                }
                if (!foundSample)
                {
                    UnrecognizedChargeStates.Add(new UnrecognizedChargeState(charge, fileName, modifiedPeptideString));
                }
            }
            // Remove peaks from the document that weren't in the file.
            if (removeMissing)
                docNew = RemoveMissing(docNew, trackAdjustedResults, changePeaks);
            // If nothing has changed, return the old Document before ChangeIgnoreChangingChildren was turned off
            if (!ReferenceEquals(docNew, docReference))
                Document = (SrmDocument) Document.ChangeIgnoreChangingChildren(false).ChangeChildrenChecked(docNew.Children);
            return Document;
        }
コード例 #3
0
ファイル: OptimizationDb.cs プロジェクト: lgatto/proteowizard
        public static OptimizationDb ConvertFromOldFormat(string path, IProgressMonitor loadMonitor, ProgressStatus status, SrmDocument document)
        {
            // Try to open assuming old format (Id, PeptideModSeq, Charge, Mz, Value, Type)
            var precursors = new Dictionary<string, HashSet<int>>(); // PeptideModSeq -> charges
            var optimizations = new List<Tuple<DbOptimization, double>>(); // DbOptimization, product m/z
            int maxCharge = 1;
            using (SQLiteConnection connection = new SQLiteConnection("Data Source = " + path)) // Not L10N
            using (SQLiteCommand command = new SQLiteCommand(connection))
            {
                connection.Open();
                command.CommandText = "SELECT PeptideModSeq, Charge, Mz, Value, Type FROM OptimizationLibrary"; // Not L10N
                using (SQLiteDataReader reader = command.ExecuteReader())
                {
                    while (reader.Read())
                    {
                        var type = (OptimizationType)reader["Type"]; // Not L10N
                        var modifiedSequence = reader["PeptideModSeq"].ToString(); // Not L10N
                        var charge = (int)reader["Charge"]; // Not L10N
                        var productMz = (double)reader["Mz"]; // Not L10N
                        var value = (double)reader["Value"]; // Not L10N
                        optimizations.Add(new Tuple<DbOptimization, double>(new DbOptimization(type, modifiedSequence, charge, string.Empty, -1, value), productMz));

                        if (!precursors.ContainsKey(modifiedSequence))
                        {
                            precursors[modifiedSequence] = new HashSet<int>();
                        }
                        precursors[modifiedSequence].Add(charge);
                        if (charge > maxCharge)
                        {
                            maxCharge = charge;
                        }
                    }
                }
            }

            var peptideList = (from precursor in precursors
                               from charge in precursor.Value
                               select string.Format("{0}{1}", precursor.Key, Transition.GetChargeIndicator(charge)) // Not L10N
                               ).ToList();

            var newDoc = new SrmDocument(document != null ? document.Settings : SrmSettingsList.GetDefault());
            newDoc = newDoc.ChangeSettings(newDoc.Settings
                .ChangePeptideLibraries(libs => libs.ChangePick(PeptidePick.filter))
                .ChangeTransitionFilter(filter =>
                    filter.ChangeFragmentRangeFirstName("ion 1") // Not L10N
                          .ChangeFragmentRangeLastName("last ion") // Not L10N
                          .ChangeProductCharges(Enumerable.Range(1, maxCharge).ToList())
                          .ChangeIonTypes(new []{ IonType.y, IonType.b }))
                .ChangeTransitionLibraries(libs => libs.ChangePick(TransitionLibraryPick.none))
                );
            var matcher = new ModificationMatcher { FormatProvider = NumberFormatInfo.InvariantInfo };
            matcher.CreateMatches(newDoc.Settings, peptideList, Settings.Default.StaticModList, Settings.Default.HeavyModList);
            FastaImporter importer = new FastaImporter(newDoc, matcher);
            string text = string.Format(">>{0}\r\n{1}", newDoc.GetPeptideGroupId(true), TextUtil.LineSeparate(peptideList)); // Not L10N
            PeptideGroupDocNode imported = importer.Import(new StringReader(text), null, Helpers.CountLinesInString(text)).First();

            int optimizationsUpdated = 0;
            foreach (PeptideDocNode nodePep in imported.Children)
            {
                string sequence = newDoc.Settings.GetSourceTextId(nodePep);
                foreach (var nodeGroup in nodePep.TransitionGroups)
                {
                    int charge = nodeGroup.PrecursorCharge;
                    foreach (var nodeTran in nodeGroup.Transitions)
                    {
                        double productMz = nodeTran.Mz;
                        foreach (var optimization in optimizations.Where(opt =>
                            string.IsNullOrEmpty(opt.Item1.FragmentIon) &&
                            opt.Item1.ProductCharge == -1 &&
                            opt.Item1.PeptideModSeq == sequence &&
                            opt.Item1.Charge == charge &&
                            Math.Abs(opt.Item2 - productMz) < 0.00001))
                        {
                            optimization.Item1.FragmentIon = nodeTran.FragmentIonName;
                            optimization.Item1.ProductCharge = nodeTran.Transition.Charge;
                            ++optimizationsUpdated;
                        }
                    }
                }
            }

            if (optimizations.Count > optimizationsUpdated)
            {
                throw new OptimizationsOpeningException(string.Format(Resources.OptimizationDb_ConvertFromOldFormat_Failed_to_convert__0__optimizations_to_new_format_,
                                                                      optimizations.Count - optimizationsUpdated));
            }

            using (var fs = new FileSaver(path))
            {
                OptimizationDb db = CreateOptimizationDb(fs.SafeName);
                db.UpdateOptimizations(optimizations.Select(opt => opt.Item1).ToArray(), new DbOptimization[0]);
                fs.Commit();

                if (loadMonitor != null)
                    loadMonitor.UpdateProgress(status.ChangePercentComplete(100));
                return GetOptimizationDb(fs.RealName, null, null);
            }
        }
コード例 #4
0
        private SrmDocument LookupProteinMetadata(SrmDocument docOrig, IProgressMonitor progressMonitor)
        {
            lock (_processedNodes)
            {
                // Check to make sure this operation was not canceled while this thread was
                // waiting to acquire the lock.  This also cleans up pending work.
                if (progressMonitor.IsCanceled)
                    return null;

                var progressStatus = new ProgressStatus(Resources.ProteinMetadataManager_LookupProteinMetadata_resolving_protein_details);
                int nResolved = 0;
                int nUnresolved = docOrig.PeptideGroups.Select(pg => pg.ProteinMetadata.NeedsSearch()).Count();

                if ((nUnresolved > 0) && !docOrig.Settings.PeptideSettings.BackgroundProteome.IsNone)
                {
                    // Do a quick check to see if background proteome already has the info
                    if (!docOrig.Settings.PeptideSettings.BackgroundProteome.NeedsProteinMetadataSearch)
                    {
                        try
                        {
                            using (var proteomeDb = docOrig.Settings.PeptideSettings.BackgroundProteome.OpenProteomeDb())
                            {
                                foreach (PeptideGroupDocNode nodePepGroup in docOrig.PeptideGroups)
                                {
                                    if (_processedNodes.ContainsKey(nodePepGroup.Id.GlobalIndex))
                                    {
                                        // We did this before we were interrupted
                                        progressMonitor.UpdateProgress(progressStatus = progressStatus.ChangePercentComplete(100 * nResolved++ / nUnresolved));
                                    }
                                    else if (nodePepGroup.ProteinMetadata.NeedsSearch())
                                    {
                                        var proteinMetadata = proteomeDb.GetProteinMetadataByName(nodePepGroup.Name);
                                        if ((proteinMetadata == null) && !Equals(nodePepGroup.Name, nodePepGroup.OriginalName))
                                            proteinMetadata = proteomeDb.GetProteinMetadataByName(nodePepGroup.OriginalName); // Original name might hit
                                        if ((proteinMetadata == null) && !String.IsNullOrEmpty(nodePepGroup.ProteinMetadata.Accession))
                                            proteinMetadata = proteomeDb.GetProteinMetadataByName(nodePepGroup.ProteinMetadata.Accession); // Parsed accession might hit
                                        if ((proteinMetadata != null) && !proteinMetadata.NeedsSearch())
                                        {
                                            // Background proteome has already resolved this
                                            _processedNodes.Add(nodePepGroup.Id.GlobalIndex, proteinMetadata);
                                            progressMonitor.UpdateProgress(
                                                progressStatus =
                                                    progressStatus.ChangePercentComplete(100*nResolved++/nUnresolved));
                                        }
                                    }
                                    if (progressMonitor.IsCanceled)
                                    {
                                        progressMonitor.UpdateProgress(progressStatus.Cancel());
                                        return null;
                                    }
                                }
                            }
                        }
                        // ReSharper disable once EmptyGeneralCatchClause
                        catch
                        {
                            // The protDB file is busy, or some other issue - just go directly to web
                        }
                    }
                }
                if (nResolved != nUnresolved)
                {
                    try
                    {
                        // Now go to the web for more protein metadata (or pretend to, depending on WebEnabledFastaImporter.DefaultWebAccessMode)
                        var docNodesWithUnresolvedProteinMetadata = new Dictionary<ProteinSearchInfo,PeptideGroupDocNode>();
                        var proteinsToSearch = new List<ProteinSearchInfo>();
                        foreach (PeptideGroupDocNode node in docOrig.PeptideGroups)
                        {
                            if (node.ProteinMetadata.NeedsSearch() && !_processedNodes.ContainsKey(node.Id.GlobalIndex)) // Did we already process this?
                            {
                                var proteinMetadata = node.ProteinMetadata;
                                if (proteinMetadata.WebSearchInfo.IsEmpty()) // Never even been hit with regex
                                {
                                    // Use Regexes to get some metadata, and a search term
                                    var parsedProteinMetaData = FastaImporter.ParseProteinMetaData(proteinMetadata);
                                    if ((parsedProteinMetaData == null) || Equals(parsedProteinMetaData.Merge(proteinMetadata),proteinMetadata.SetWebSearchCompleted()))
                                    {
                                        // That didn't parse well enough to make a search term, or didn't add any new info - just set it as searched so we don't keep trying
                                        _processedNodes.Add(node.Id.GlobalIndex, proteinMetadata.SetWebSearchCompleted());
                                        progressMonitor.UpdateProgress(progressStatus = progressStatus.ChangePercentComplete(100 * nResolved++ / nUnresolved));
                                        proteinMetadata = null;  // No search to be done
                                    }
                                    else
                                    {
                                        proteinMetadata = proteinMetadata.Merge(parsedProteinMetaData);  // Fill in any gaps with parsed info
                                    }
                                }
                                if (proteinMetadata != null)
                                {
                                    // We note the sequence length because it's useful in disambiguating search results
                                    proteinsToSearch.Add(new ProteinSearchInfo(new DbProteinName(null, proteinMetadata),
                                        node.PeptideGroup.Sequence == null ? 0 : node.PeptideGroup.Sequence.Length));
                                    docNodesWithUnresolvedProteinMetadata.Add(proteinsToSearch.Last(), node);
                                }
                            }
                        }
                        if (progressMonitor.IsCanceled)
                        {
                            progressMonitor.UpdateProgress(progressStatus.Cancel());
                            return null;
                        }
                        progressMonitor.UpdateProgress(progressStatus = progressStatus.ChangePercentComplete(100 * nResolved / nUnresolved));

                        // Now we actually hit the internet
                        if (proteinsToSearch.Any())
                        {
                            foreach (var result in FastaImporter.DoWebserviceLookup(proteinsToSearch, progressMonitor, false)) // Resolve them all, now
                            {
                                Debug.Assert(!result.GetProteinMetadata().NeedsSearch());
                                _processedNodes.Add(docNodesWithUnresolvedProteinMetadata[result].Id.GlobalIndex, result.GetProteinMetadata());
                                progressMonitor.UpdateProgress(progressStatus = progressStatus.ChangePercentComplete(100 * nResolved++ / nUnresolved));
                            }
                        }
                    }
                    catch (OperationCanceledException)
                    {
                        progressMonitor.UpdateProgress(progressStatus.Cancel());
                        return null;
                    }

                }

                // And finally write back to the document
                var listProteins = new List<PeptideGroupDocNode>();
                foreach (PeptideGroupDocNode node in docOrig.MoleculeGroups)
                {
                    if (_processedNodes.ContainsKey(node.Id.GlobalIndex))
                    {
                        listProteins.Add(node.ChangeProteinMetadata(_processedNodes[node.Id.GlobalIndex]));
                    }
                    else
                    {
                        listProteins.Add(node);
                    }
                }
                var docNew = docOrig.ChangeChildrenChecked(listProteins.Cast<DocNode>().ToArray());
                progressMonitor.UpdateProgress(progressStatus.Complete());
                return (SrmDocument)docNew;
            }
        }
コード例 #5
0
ファイル: BlibDb.cs プロジェクト: lgatto/proteowizard
        /// <summary>
        /// Make a BiblioSpec SQLite library from a list of spectra and their intensities.
        /// </summary>
        /// <param name="librarySpec">Library spec for which the new library is created</param>
        /// <param name="listSpectra">List of existing spectra, by LibKey</param>
        /// <param name="libraryName">Name of the library to be created</param>
        /// <param name="progressMonitor">Progress monitor to display progress in creating library</param>
        /// <returns>A library of type <see cref="BiblioSpecLiteLibrary"/></returns>
        public BiblioSpecLiteLibrary CreateLibraryFromSpectra(BiblioSpecLiteSpec librarySpec,
            List<SpectrumMzInfo> listSpectra,
            string libraryName,
            IProgressMonitor progressMonitor)
        {
            const string libAuthority = BiblioSpecLiteLibrary.DEFAULT_AUTHORITY;
            const int majorVer = 1;
            const int minorVer = 0;
            string libId = libraryName;
            // Use a very specific LSID, since it really only matches this document.
            string libLsid = string.Format("urn:lsid:{0}:spectral_libary:bibliospec:nr:minimal:{1}:{2}:{3}.{4}", // Not L10N
                libAuthority, libId, Guid.NewGuid(), majorVer, minorVer);

            var dictLibrary = new Dictionary<LibKey, BiblioLiteSpectrumInfo>();

            using (ISession session = OpenWriteSession())
            using (ITransaction transaction = session.BeginTransaction())
            {
                int progressPercent = 0;
                int i = 0;
                var status = new ProgressStatus(Resources.BlibDb_CreateLibraryFromSpectra_Creating_spectral_library_for_imported_transition_list);
                foreach (var spectrum in listSpectra)
                {
                    ++i;
                    var dbRefSpectrum = RefSpectrumFromPeaks(spectrum);
                    session.Save(dbRefSpectrum);
                    dictLibrary.Add(spectrum.Key,
                                    new BiblioLiteSpectrumInfo(spectrum.Key, dbRefSpectrum.Copies,
                                                                dbRefSpectrum.NumPeaks,
                                                                (int)(dbRefSpectrum.Id ?? 0),
                                                                default(IndexedRetentionTimes),
                                                                default(IndexedIonMobilities)));
                    if (progressMonitor != null)
                    {
                        if (progressMonitor.IsCanceled)
                            return null;
                        int progressNew = (i * 100 / listSpectra.Count);
                        if (progressPercent != progressNew)
                        {
                            progressMonitor.UpdateProgress(status = status.ChangePercentComplete(progressNew));
                            progressPercent = progressNew;
                        }
                    }
                }

                session.Flush();
                session.Clear();
                // Simulate ctime(d), which is what BlibBuild uses.
                string createTime = string.Format("{0:ddd MMM dd HH:mm:ss yyyy}", DateTime.Now); // Not L10N? different date/time format in different countries
                DbLibInfo libInfo = new DbLibInfo
                {
                    LibLSID = libLsid,
                    CreateTime = createTime,
                    NumSpecs = dictLibrary.Count,
                    MajorVersion = majorVer,
                    MinorVersion = minorVer
                };

                session.Save(libInfo);
                session.Flush();
                session.Clear();
                transaction.Commit();
            }

            var libraryEntries = dictLibrary.Values.ToArray();
            return new BiblioSpecLiteLibrary(librarySpec, libLsid, majorVer, minorVer,
                libraryEntries, FileStreamManager.Default);
        }
コード例 #6
0
        private bool ExportReport(string fileName, char separator)
        {
            try
            {
                using (var saver = new FileSaver(fileName))
                {
                    if (!saver.CanSave(this))
                        return false;

                    using (var writer = new StreamWriter(saver.SafeName))
                    {
                        Report report = GetReport();
                        bool success = false;

                        using (var longWait = new LongWaitDlg { Text = Resources.ExportReportDlg_ExportReport_Generating_Report })
                        {
                            longWait.PerformWork(this, 1500, broker =>
                            {
                                var status = new ProgressStatus(Resources.ExportReportDlg_GetDatabase_Analyzing_document);
                                broker.UpdateProgress(status);
                                Database database = EnsureDatabase(broker, 80, ref status);
                                if (broker.IsCanceled)
                                    return;
                                broker.UpdateProgress(status = status.ChangeMessage(Resources.ExportReportDlg_ExportReport_Building_report));
                                ResultSet resultSet = report.Execute(database);
                                if (broker.IsCanceled)
                                    return;
                                broker.UpdateProgress(status = status.ChangePercentComplete(95)
                                    .ChangeMessage(Resources.ExportReportDlg_ExportReport_Writing_report));

                                ResultSet.WriteReportHelper(resultSet, separator, writer, CultureInfo);

                                writer.Flush();
                                writer.Close();

                                if (broker.IsCanceled)
                                    return;
                                broker.UpdateProgress(status.Complete());

                                saver.Commit();
                                success = true;
                            });
                        }

                        return success;
                    }
                }
            }
            catch (Exception x)
            {
                MessageDlg.ShowWithException(this, string.Format(Resources.ExportReportDlg_ExportReport_Failed_exporting_to, fileName, GetExceptionDisplayMessage(x)), x);
                return false;
            }
        }
コード例 #7
0
ファイル: Import.cs プロジェクト: lgatto/proteowizard
        public IEnumerable<PeptideGroupDocNode> Import(TextReader reader, IProgressMonitor progressMonitor, long lineCount)
        {
            // Set starting values for limit counters
            _countPeptides = Document.PeptideCount;
            _countIons = Document.PeptideTransitionCount;

            // Store set of existing FASTA sequences to keep from duplicating
            HashSet<FastaSequence> set = new HashSet<FastaSequence>();
            foreach (PeptideGroupDocNode nodeGroup in Document.Children)
            {
                FastaSequence fastaSeq = nodeGroup.Id as FastaSequence;
                if (fastaSeq != null)
                    set.Add(fastaSeq);
            }

            var peptideGroupsNew = new List<PeptideGroupDocNode>();
            PeptideGroupBuilder seqBuilder = null;

            long linesRead = 0;
            int progressPercent = -1;

            string line;
            var status = new ProgressStatus(string.Empty);
            while ((line = reader.ReadLine()) != null)
            {
                linesRead++;
                if (progressMonitor != null)
                {
                    // TODO when changing from ILongWaitBroker to IProgressMonitor, the old code was:
                    // if (progressMonitor.IsCanceled || progressMonitor.IsDocumentChanged(Document))
                    // IProgressMonitor does not have IsDocumentChangesd.
                    if (progressMonitor.IsCanceled)
                        return new PeptideGroupDocNode[0];
                    int progressNew = (int) (linesRead*100/lineCount);
                    if (progressPercent != progressNew)
                        progressMonitor.UpdateProgress(status = status.ChangePercentComplete(progressPercent = progressNew));
                }

                if (line.StartsWith(">")) // Not L10N
                {
                    if (_countIons > SrmDocument.MAX_TRANSITION_COUNT ||
                            _countPeptides > SrmDocument.MAX_PEPTIDE_COUNT)
                        throw new InvalidDataException(Resources.FastaImporter_Import_Document_size_limit_exceeded);

                    if (seqBuilder != null)
                        AddPeptideGroup(peptideGroupsNew, set, seqBuilder);

                    seqBuilder = _modMatcher == null
                        ? new PeptideGroupBuilder(line, PeptideList, Document.Settings)
                        : new PeptideGroupBuilder(line, _modMatcher, Document.Settings);
                    if (progressMonitor != null)
                        progressMonitor.UpdateProgress(status = status.ChangeMessage(string.Format(Resources.FastaImporter_Import_Adding_protein__0__, seqBuilder.Name)));
                }
                else if (seqBuilder == null)
                {
                    if (line.Trim().Length == 0)
                        continue;
                    break;
                }
                else
                {
                    seqBuilder.AppendSequence(line);
                }
            }
            // Add last sequence.
            if (seqBuilder != null)
                AddPeptideGroup(peptideGroupsNew, set, seqBuilder);
            return peptideGroupsNew;
        }
コード例 #8
0
        public void Run(ProcessStartInfo psi, string stdin, IProgressMonitor progress, ref ProgressStatus status, TextWriter writer)
        {
            // Make sure required streams are redirected.
            psi.RedirectStandardOutput = true;
            psi.RedirectStandardError = true;

            var proc = Process.Start(psi);
            if (proc == null)
                throw new IOException(string.Format("Failure starting {0} command.", psi.FileName)); // Not L10N
            if (stdin != null)
            {
                try
                {
                    proc.StandardInput.Write(stdin);
                }
                finally
                {
                    proc.StandardInput.Close();
                }
            }

            var reader = new ProcessStreamReader(proc);
            StringBuilder sbError = new StringBuilder();
            int percentLast = 0;
            string line;
            while ((line = reader.ReadLine(progress)) != null)
            {
                if (writer != null && !line.StartsWith(HideLinePrefix))
                    writer.WriteLine(line);

                if (progress == null || line.ToLowerInvariant().StartsWith("error")) // Not L10N
                {
                    sbError.AppendLine(line);
                }
                else // if (progress != null)
                {
                    if (progress.IsCanceled)
                    {
                        proc.Kill();
                        progress.UpdateProgress(status = status.Cancel());
                        return;
                    }

                    if (line.EndsWith("%")) // Not L10N
                    {
                        double percent;
                        string[] parts = line.Split(' ');
                        string percentPart = parts[parts.Length - 1];
                        if (double.TryParse(percentPart.Substring(0, percentPart.Length - 1), out percent))
                        {
                            percentLast = (int) percent;
                            status = status.ChangePercentComplete(percentLast);
                            if (percent >= 100 && status.SegmentCount > 0)
                                status = status.NextSegment();
                            progress.UpdateProgress(status);
                        }
                    }
                    else if (MessagePrefix == null || line.StartsWith(MessagePrefix))
                    {
                        // Remove prefix, if there is one.
                        if (MessagePrefix != null)
                            line = line.Substring(MessagePrefix.Length);

                        status = status.ChangeMessage(line);
                        progress.UpdateProgress(status);
                    }
                }
            }
            proc.WaitForExit();
            int exit = proc.ExitCode;
            if (exit != 0)
            {
                line = proc.StandardError.ReadLine();
                if (line != null)
                    sbError.AppendLine(line);
                if (sbError.Length == 0)
                    throw new IOException("Error occurred running process."); // Not L10N
                throw new IOException(sbError.ToString());
            }
            // Make to complete the status, if the process succeeded, but never
            // printed 100% to the console
            if (percentLast < 100)
            {
                status = status.ChangePercentComplete(100);
                if (status.SegmentCount > 0)
                    status = status.NextSegment();
                if (progress != null)
                    progress.UpdateProgress(status);
            }
        }
コード例 #9
0
 public static SrmDocument RecalculateAlignments(SrmDocument document, IProgressMonitor progressMonitor)
 {
     var newSources = ListAvailableRetentionTimeSources(document.Settings);
     var newResultsSources = ListSourcesForResults(document.Settings.MeasuredResults, newSources);
     var allLibraryRetentionTimes = ReadAllRetentionTimes(document, newSources);
     var newFileAlignments = new List<FileRetentionTimeAlignments>();
     var progressStatus = new ProgressStatus("Aligning retention times"); // Not L10N?  Will users see this?
     foreach (var retentionTimeSource in newResultsSources.Values)
     {
         progressStatus = progressStatus.ChangePercentComplete(100*newFileAlignments.Count/newResultsSources.Count);
         progressMonitor.UpdateProgress(progressStatus);
         try
         {
             var fileAlignments = CalculateFileRetentionTimeAlignments(retentionTimeSource.Name, allLibraryRetentionTimes, progressMonitor);
             newFileAlignments.Add(fileAlignments);
         }
         catch (OperationCanceledException)
         {
             progressMonitor.UpdateProgress(progressStatus.Cancel());
             return null;
         }
     }
     var newDocRt = new DocumentRetentionTimes(newSources.Values, newFileAlignments);
     var newDocument = document.ChangeSettings(document.Settings.ChangeDocumentRetentionTimes(newDocRt));
     Debug.Assert(IsLoaded(newDocument));
     progressMonitor.UpdateProgress(progressStatus.Complete());
     return newDocument;
 }
コード例 #10
0
ファイル: SkylineFiles.cs プロジェクト: lgatto/proteowizard
        private void ImportMassList(MassListInputs inputs, string description)
        {
            SrmTreeNode nodePaste = SequenceTree.SelectedNode as SrmTreeNode;
            IdentityPath insertPath = nodePaste != null ? nodePaste.Path : null;
            IdentityPath selectPath = null;
            List<MeasuredRetentionTime> irtPeptides = null;
            List<SpectrumMzInfo> librarySpectra = null;
            List<TransitionImportErrorInfo> errorList = null;
            List<PeptideGroupDocNode> peptideGroups = null;
            RetentionTimeRegression retentionTimeRegressionStore = null;
            var docCurrent = DocumentUI;
            SrmDocument docNew = null;
            var retentionTimeRegression = docCurrent.Settings.PeptideSettings.Prediction.RetentionTime;
            RCalcIrt calcIrt = retentionTimeRegression != null ? (retentionTimeRegression.Calculator as RCalcIrt) : null;
            using (var longWaitDlg = new LongWaitDlg(this) {Text = description})
            {
                longWaitDlg.PerformWork(this, 1000, longWaitBroker =>
                {
                    docNew = docCurrent.ImportMassList(inputs, longWaitBroker,
                        insertPath, out selectPath, out irtPeptides, out librarySpectra, out errorList, out peptideGroups);
                });
            }
            bool isDocumentSame = ReferenceEquals(docNew, docCurrent);
            // If nothing was imported (e.g. operation was canceled or zero error-free transitions) and also no errors, just return
            if (isDocumentSame && !errorList.Any())
                return;
            // Show the errors, giving the option to accept the transitions without errors,
            // if there are any
            if (errorList.Any())
            {
                using (var errorDlg = new ImportTransitionListErrorDlg(errorList, isDocumentSame))
                {
                    if (errorDlg.ShowDialog(this) == DialogResult.Cancel || isDocumentSame)
                    {
                        return;
                    }
                }
            }

            var dbIrtPeptides = irtPeptides.Select(rt => new DbIrtPeptide(rt.PeptideSequence, rt.RetentionTime, false, TimeSource.scan)).ToList();
            var dbIrtPeptidesFilter = ImportAssayLibraryHelper.GetUnscoredIrtPeptides(dbIrtPeptides, calcIrt);
            bool overwriteExisting = false;
            MassListInputs irtInputs = null;
            // If there are no iRT peptides or none with different values than the database, don't import any iRT's
            if (dbIrtPeptidesFilter.Any())
            {
                // Ask whether or not to include iRT peptides in the paste
                string useIrtMessage = calcIrt == null
                    ? Resources.SkylineWindow_ImportMassList_The_transition_list_appears_to_contain_iRT_values__but_the_document_does_not_have_an_iRT_calculator___Create_a_new_calculator_and_add_these_iRT_values_
                    : Resources.SkylineWindow_ImportMassList_The_transition_list_appears_to_contain_iRT_library_values___Add_these_iRT_values_to_the_iRT_calculator_;
                string yesButton = calcIrt == null
                    ? Resources.SkylineWindow_ImportMassList__Create___
                    : Resources.SkylineWindow_ImportMassList_Add;
                var useIrtResult = MultiButtonMsgDlg.Show(this, useIrtMessage,
                    yesButton, Resources.SkylineWindow_ImportMassList__Skip, true);
                if (useIrtResult == DialogResult.Cancel)
                {
                    return;
                }
                if (useIrtResult == DialogResult.Yes)
                {
                    if (calcIrt == null)
                    {
                        // If there is no iRT calculator, ask the user to create one
                        using (var dlg = new CreateIrtCalculatorDlg(docNew, DocumentFilePath, Settings.Default.RTScoreCalculatorList, peptideGroups))
                        {
                            if (dlg.ShowDialog(this) != DialogResult.OK)
                            {
                                return;
                            }

                            docNew = dlg.Document;
                            calcIrt = (RCalcIrt)docNew.Settings.PeptideSettings.Prediction.RetentionTime.Calculator;
                            dlg.UpdateLists(librarySpectra, dbIrtPeptidesFilter);
                            if (!string.IsNullOrEmpty(dlg.IrtFile))
                                irtInputs = new MassListInputs(dlg.IrtFile);
                        }
                    }
                    string dbPath = calcIrt.DatabasePath;
                    IrtDb db = File.Exists(dbPath) ? IrtDb.GetIrtDb(dbPath, null) : IrtDb.CreateIrtDb(dbPath);
                    var oldPeptides = db.GetPeptides().ToList();
                    IList<DbIrtPeptide.Conflict> conflicts;
                    dbIrtPeptidesFilter = DbIrtPeptide.MakeUnique(dbIrtPeptidesFilter);
                    DbIrtPeptide.FindNonConflicts(oldPeptides, dbIrtPeptidesFilter, null, out conflicts);
                    // Ask whether to keep or overwrite peptides that are present in the import and already in the database
                    if (conflicts.Any())
                    {
                        string messageOverwrite = string.Format(Resources.SkylineWindow_ImportMassList_The_iRT_calculator_already_contains__0__of_the_imported_peptides_,
                                                                conflicts.Count);
                        var overwriteResult = MultiButtonMsgDlg.Show(this,
                            TextUtil.LineSeparate(messageOverwrite, conflicts.Count == 1
                                ? Resources.SkylineWindow_ImportMassList_Keep_the_existing_iRT_value_or_overwrite_with_the_imported_value_
                                : Resources.SkylineWindow_ImportMassList_Keep_the_existing_iRT_values_or_overwrite_with_imported_values_),
                            Resources.SkylineWindow_ImportMassList__Keep, Resources.SkylineWindow_ImportMassList__Overwrite, true);
                        if (overwriteResult == DialogResult.Cancel)
                        {
                            return;
                        }
                        overwriteExisting = overwriteResult == DialogResult.No;
                    }
                    using (var longWaitDlg = new LongWaitDlg(this) { Text = Resources.SkylineWindow_ImportMassList_Adding_iRT_values_})
                    {
                        longWaitDlg.PerformWork(this, 100, progressMonitor =>
                            docNew = docNew.AddIrtPeptides(dbIrtPeptidesFilter, overwriteExisting, progressMonitor));
                    }
                    if (docNew == null)
                        return;
                    retentionTimeRegressionStore = docNew.Settings.PeptideSettings.Prediction.RetentionTime;
                }
            }
            BiblioSpecLiteSpec docLibrarySpec = null;
            BiblioSpecLiteLibrary docLibrary = null;
            int indexOldLibrary = -1;
            if (librarySpectra.Any())
            {
                string addLibraryMessage = Resources.SkylineWindow_ImportMassList_The_transition_list_appears_to_contain_spectral_library_intensities___Create_a_document_library_from_these_intensities_;
                var addLibraryResult = MultiButtonMsgDlg.Show(this, addLibraryMessage,
                    Resources.SkylineWindow_ImportMassList__Create___, Resources.SkylineWindow_ImportMassList__Skip,
                    true);
                if (addLibraryResult == DialogResult.Cancel)
                {
                    return;
                }
                if (addLibraryResult == DialogResult.Yes)
                {
                    // Can't name a library after the document if the document is unsaved
                    // In this case, prompt to save
                    if (DocumentFilePath == null)
                    {
                        string saveDocumentMessage = Resources.SkylineWindow_ImportMassList_You_must_save_the_Skyline_document_in_order_to_create_a_spectral_library_from_a_transition_list_;
                        var saveDocumentResult = MultiButtonMsgDlg.Show(this, saveDocumentMessage, MultiButtonMsgDlg.BUTTON_OK);
                        if (saveDocumentResult == DialogResult.Cancel)
                        {
                            return;
                        }
                        else if (!SaveDocumentAs())
                        {
                            return;
                        }
                    }

                    librarySpectra = SpectrumMzInfo.RemoveDuplicateSpectra(librarySpectra);

                    string documentLibrary = BiblioSpecLiteSpec.GetLibraryFileName(DocumentFilePath);
            // ReSharper disable once AssignNullToNotNullAttribute
                    string outputPath = Path.Combine(Path.GetDirectoryName(documentLibrary),
                                                     Path.GetFileNameWithoutExtension(documentLibrary) + BiblioSpecLiteSpec.ASSAY_NAME + BiblioSpecLiteSpec.EXT);
                    bool libraryExists = File.Exists(outputPath);
                    string name = Path.GetFileNameWithoutExtension(DocumentFilePath) + BiblioSpecLiteSpec.ASSAY_NAME;
                    indexOldLibrary = docNew.Settings.PeptideSettings.Libraries.LibrarySpecs.IndexOf(spec => spec != null && spec.FilePath == outputPath);
                    bool libraryLinkedToDoc = indexOldLibrary != -1;
                    if (libraryLinkedToDoc)
                    {
                        string oldName = docNew.Settings.PeptideSettings.Libraries.LibrarySpecs[indexOldLibrary].Name;
                        var libraryOld = docNew.Settings.PeptideSettings.Libraries.GetLibrary(oldName);
                        var additionalSpectra = SpectrumMzInfo.GetInfoFromLibrary(libraryOld);
                        additionalSpectra = SpectrumMzInfo.RemoveDuplicateSpectra(additionalSpectra);
                        librarySpectra = SpectrumMzInfo.MergeWithOverwrite(librarySpectra, additionalSpectra);
                        foreach (var stream in libraryOld.ReadStreams)
                            stream.CloseStream();
                    }
                    if (libraryExists && !libraryLinkedToDoc)
                    {
                        string replaceLibraryMessage = string.Format(Resources.SkylineWindow_ImportMassList_There_is_an_existing_library_with_the_same_name__0__as_the_document_library_to_be_created___Overwrite_this_library_or_skip_import_of_library_intensities_,
                                          name);
                        // If the document does not have an assay library linked to it, then ask if user wants to delete the one that we have found
                        var replaceLibraryResult = MultiButtonMsgDlg.Show(this, replaceLibraryMessage,
                            Resources.SkylineWindow_ImportMassList__Overwrite, Resources.SkylineWindow_ImportMassList__Skip, true);
                        if (replaceLibraryResult == DialogResult.Cancel)
                            return;
                        if (replaceLibraryResult == DialogResult.No)
                            librarySpectra.Clear();
                    }
                    if (librarySpectra.Any())
                    {
                        // Delete the existing library; either it's not tied to the document or we've already extracted the spectra
                        if (libraryExists)
                        {
                            FileEx.SafeDelete(outputPath);
                            FileEx.SafeDelete(Path.ChangeExtension(outputPath, BiblioSpecLiteSpec.EXT_REDUNDANT));
                        }
                        using (var blibDb = BlibDb.CreateBlibDb(outputPath))
                        {
                            docLibrarySpec = new BiblioSpecLiteSpec(name, outputPath);
                            using (var longWaitDlg = new LongWaitDlg(this) { Text = Resources.SkylineWindow_ImportMassList_Creating_Spectral_Library })
                            {
                                longWaitDlg.PerformWork(this, 1000, progressMonitor =>
                                {
                                    docLibrary = blibDb.CreateLibraryFromSpectra(docLibrarySpec, librarySpectra, name, progressMonitor);
                                    if (docLibrary == null)
                                        return;
                                    var newSettings = docNew.Settings.ChangePeptideLibraries(libs => libs.ChangeLibrary(docLibrary, docLibrarySpec, indexOldLibrary));
                                    var status = new ProgressStatus(Resources.SkylineWindow_ImportMassList_Finishing_up_import);
                                    progressMonitor.UpdateProgress(status);
                                    progressMonitor.UpdateProgress(status.ChangePercentComplete(100));
                                    docNew = docNew.ChangeSettings(newSettings);
                                });
                                if (docLibrary == null)
                                    return;
                            }
                        }
                    }
                }
            }

            ModifyDocument(description, doc =>
            {
                if (ReferenceEquals(doc, docCurrent))
                    return docNew;
                try
                {
                    // If the document was changed during the operation, try all the changes again
                    // using the information given by the user.
                    docCurrent = DocumentUI;
                    doc = doc.ImportMassList(inputs, insertPath, out selectPath);
                    if (irtInputs != null)
                    {
                        doc = doc.ImportMassList(irtInputs, null, out selectPath);
                    }
                    var newSettings = doc.Settings;
                    if (retentionTimeRegressionStore != null)
                    {
                        newSettings = newSettings.ChangePeptidePrediction(prediction =>
                            prediction.ChangeRetentionTime(retentionTimeRegressionStore));
                    }
                    if (docLibrarySpec != null)
                    {
                        newSettings = newSettings.ChangePeptideLibraries(libs =>
                            libs.ChangeLibrary(docLibrary, docLibrarySpec, indexOldLibrary));
                    }
                    if (!ReferenceEquals(doc.Settings, newSettings))
                        doc = doc.ChangeSettings(newSettings);
                }
                catch (Exception x)
                {
                    throw new InvalidDataException(string.Format(Resources.SkylineWindow_ImportMassList_Unexpected_document_change_during_operation___0_, x.Message, x));
                }
                return doc;
            });

            if (selectPath != null)
                SequenceTree.SelectedPath = selectPath;
            if (retentionTimeRegressionStore != null)
            {
                Settings.Default.RetentionTimeList.Add(retentionTimeRegressionStore);
                Settings.Default.RTScoreCalculatorList.Add(retentionTimeRegressionStore.Calculator);
            }
            if (docLibrarySpec != null)
            {
                Settings.Default.SpectralLibraryList.Insert(0, docLibrarySpec);
            }
        }
コード例 #11
0
 public Digestion Digest(Enzyme enzyme, DigestSettings digestSettings, ILoadMonitor loader)
 {
     ProgressStatus progressStatus = new ProgressStatus(string.Format(Resources.BackgroundProteomeSpec_Digest_Digesting__0__, enzyme.Name));
     using (var proteomeDb = OpenProteomeDb())
     {
         return proteomeDb.Digest(new ProteaseImpl(enzyme),
                                     (s, i) =>
                                     {
                                         loader.UpdateProgress(progressStatus.ChangePercentComplete(i));
                                         return !loader.IsCanceled;
                                     });
     }
 }
コード例 #12
0
ファイル: BiblioSpec.cs プロジェクト: lgatto/proteowizard
        // ReSharper restore UnusedMember.Local
        private bool Load(ILoadMonitor loader)
        {
            ProgressStatus status =
                new ProgressStatus(string.Format(Resources.BiblioSpecLibrary_Load_Loading__0__library,
                                                 Path.GetFileName(FilePath)));
            loader.UpdateProgress(status);

            long lenRead = 0;
            // AdlerChecksum checksum = new AdlerChecksum();

            try
            {
                // Use a buffered stream for initial read
                BufferedStream stream = new BufferedStream(CreateStream(loader), 32 * 1024);

                int countHeader = (int) LibHeaders.count*4;
                byte[] libHeader = new byte[countHeader];
                if (stream.Read(libHeader, 0, countHeader) != countHeader)
                    throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Data_truncation_in_library_header_File_may_be_corrupted);
                lenRead += countHeader;
                // Check the first byte of the primary version number to determine
                // whether the format is little- or big-endian.  Little-endian will
                // have the version number in this byte, while big-endian will have zero.
                if (libHeader[(int) LibHeaders.version1 * 4] == 0)
                    _bigEndian = true;

                int numSpectra = GetInt32(libHeader, (int) LibHeaders.num_spectra);
                var dictLibrary = new Dictionary<LibKey, BiblioSpectrumInfo>(numSpectra);
                var setSequences = new HashSet<LibSeqKey>();

                string revStr = string.Format("{0}.{1}", // Not L10N
                                              GetInt32(libHeader, (int) LibHeaders.version1),
                                              GetInt32(libHeader, (int) LibHeaders.version2));
                Revision = float.Parse(revStr, CultureInfo.InvariantCulture);

                // checksum.MakeForBuff(libHeader, AdlerChecksum.ADLER_START);

                countHeader = (int) SpectrumHeaders.count*4;
                byte[] specHeader = new byte[1024];
                byte[] specSequence = new byte[1024];
                for (int i = 0; i < numSpectra; i++)
                {
                    int percent = i * 100 / numSpectra;
                    if (status.PercentComplete != percent)
                    {
                        // Check for cancellation after each integer change in percent loaded.
                        if (loader.IsCanceled)
                        {
                            loader.UpdateProgress(status.Cancel());
                            return false;
                        }

                        // If not cancelled, update progress.
                        loader.UpdateProgress(status = status.ChangePercentComplete(percent));
                    }

                    // Read spectrum header
                    int bytesRead = stream.Read(specHeader, 0, countHeader);
                    if (bytesRead != countHeader)
                        throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Data_truncation_in_spectrum_header_File_may_be_corrupted);

                    // If this is the first header, and the sequence length is zero,
                    // then this is a Linux format library.  Switch to linux format,
                    // and start over.
                    if (i == 0 && GetInt32(specHeader, (int)SpectrumHeaders.seq_len) == 0)
                    {
                        _linuxFormat = true;
                        stream.Seek(lenRead, SeekOrigin.Begin);

                        // Re-ead spectrum header
                        countHeader = (int)SpectrumHeadersLinux.count * 4;
                        bytesRead = stream.Read(specHeader, 0, countHeader);
                        if (bytesRead != countHeader)
                            throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Data_truncation_in_spectrum_header_File_may_be_corrupted);
                    }

                    lenRead += bytesRead;

                    // checksum.MakeForBuff(specHeader, checksum.ChecksumValue);

                    int charge = GetInt32(specHeader, (int)SpectrumHeaders.charge);
                    if (charge > TransitionGroup.MAX_PRECURSOR_CHARGE)
                        throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Invalid_precursor_charge_found_File_may_be_corrupted);

                    int numPeaks = GetInt32(specHeader, (int)SpectrumHeaders.num_peaks);
                    int seqLength = GetInt32(specHeader, (_linuxFormat ?
                        (int)SpectrumHeadersLinux.seq_len : (int)SpectrumHeaders.seq_len));
                    int copies = GetInt32(specHeader, (_linuxFormat ?
                        (int)SpectrumHeadersLinux.copies : (int)SpectrumHeaders.copies));

                    // Read sequence information
                    int countSeq = (seqLength + 1)*2;
                    if (stream.Read(specSequence, 0, countSeq) != countSeq)
                        throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Data_truncation_in_spectrum_sequence_File_may_be_corrupted);

                    lenRead += countSeq;

                    // checksum.MakeForBuff(specSequence, checksum.ChecksumValue);

                    // Store in dictionary
                    if (IsUnmodified(specSequence, seqLength + 1, seqLength))
                    {
                        // These libraries should not have duplicates, but just in case.
                        // CONSIDER: Emit error about redundancy?
                        // These legacy libraries assume [+57.0] modified Cysteine
                        LibKey key = new LibKey(GetCModified(specSequence, ref seqLength), 0, seqLength, charge);
                        if (!dictLibrary.ContainsKey(key))
                            dictLibrary.Add(key, new BiblioSpectrumInfo((short)copies, (short)numPeaks, lenRead));
                        setSequences.Add(new LibSeqKey(key));
                    }

                    // Read over peaks
                    int countPeaks = 2*sizeof(Single)*numPeaks;
                    stream.Seek(countPeaks, SeekOrigin.Current);    // Skip spectrum
                    lenRead += countPeaks;

                    // checksum.MakeForBuff(specPeaks, checksum.ChecksumValue);
                }

                // Checksum = checksum.ChecksumValue;
                _dictLibrary = dictLibrary;
                _setSequences = setSequences;
                loader.UpdateProgress(status.Complete());
                return true;
            }
            catch (InvalidDataException x)
            {
                loader.UpdateProgress(status.ChangeErrorException(x));
                return false;
            }
            catch (IOException x)
            {
                loader.UpdateProgress(status.ChangeErrorException(x));
                return false;
            }
            catch (Exception x)
            {
                x = new Exception(string.Format(Resources.BiblioSpecLibrary_Load_Failed_loading_library__0__, FilePath), x);
                loader.UpdateProgress(status.ChangeErrorException(x));
                return false;
            }
            finally
            {
                if (ReadStream != null)
                {
                    // Close the read stream to ensure we never leak it.
                    // This only costs on extra open, the first time the
                    // active document tries to read.
                    try { ReadStream.CloseStream(); }
                    catch(IOException) {}
                }
            }
        }
コード例 #13
0
ファイル: Import.cs プロジェクト: lgatto/proteowizard
        public IEnumerable<PeptideGroupDocNode> Import(IProgressMonitor progressMonitor,
            ColumnIndices indices,
            IDictionary<string, FastaSequence> dictNameSeq,
            out List<MeasuredRetentionTime> irtPeptides,
            out List<SpectrumMzInfo> librarySpectra,
            out List<TransitionImportErrorInfo> errorList)
        {
            irtPeptides = new List<MeasuredRetentionTime>();
            librarySpectra = new List<SpectrumMzInfo>();
            errorList = new List<TransitionImportErrorInfo>();
            var status = new ProgressStatus(string.Empty);
            // Get the lines used to guess the necessary columns and create the row reader
            if (progressMonitor != null)
            {
                if (progressMonitor.IsCanceled)
                    return new PeptideGroupDocNode[0];
                status = status.ChangeMessage(Resources.MassListImporter_Import_Reading_transition_list);
            }

            var lines = new List<string>(Inputs.ReadLines());

            if (progressMonitor != null)
            {
                if (progressMonitor.IsCanceled)
                    return new PeptideGroupDocNode[0];
                status = status.ChangeMessage(Resources.MassListImporter_Import_Inspecting_peptide_sequence_information);
            }
            if (indices != null)
            {
                _rowReader = new GeneralRowReader(FormatProvider, Separator, indices, Settings, lines);
            }
            else
            {
                // Check first line for validity
                var line = lines.FirstOrDefault();
                string[] fields = line.ParseDsvFields(Separator);
                string[] headers = fields.All(field => GetColumnType(field.Trim(), FormatProvider) != typeof (double))
                                       ? fields
                                       : null;
                int decoyColumn = -1;
                int irtColumn = -1;
                int libraryColumn = -1;
                var irtNames = new[] { "tr_recalibrated", "irt" }; // Not L10N
                var libraryNames = new[] { "libraryintensity", "relativeintensity", "relative_intensity", "relativefragmentintensity", "library_intensity" }; // Not L10N
                var decoyNames = new[] { "decoy" }; // Not L10N
                if (headers != null)
                {
                    lines.RemoveAt(0);
                    decoyColumn = headers.IndexOf(col => decoyNames.Contains(col.ToLowerInvariant()));
                    irtColumn = headers.IndexOf(col => irtNames.Contains(col.ToLowerInvariant()));
                    libraryColumn = headers.IndexOf(col => libraryNames.Contains(col.ToLowerInvariant()));
                    line = lines.FirstOrDefault();
                    fields = line != null ? line.ParseDsvFields(Separator) : new string[0];
                }
                if (fields.Length < 3)
                    throw new InvalidDataException(Resources.MassListImporter_Import_Invalid_transition_list_Transition_lists_must_contain_at_least_precursor_m_z_product_m_z_and_peptide_sequence);

                // If no numeric columns in the first row
                _rowReader = ExPeptideRowReader.Create(lines, decoyColumn, FormatProvider, Separator, Settings, irtColumn, libraryColumn);
                if (_rowReader == null)
                {
                    _rowReader = GeneralRowReader.Create(lines, headers, decoyColumn, FormatProvider, Separator, Settings, irtColumn, libraryColumn);
                    if (_rowReader == null && headers == null)
                    {
                        // Check for a possible header row
                        headers = lines[0].Split(Separator);
                        lines.RemoveAt(0);
                        _rowReader = GeneralRowReader.Create(lines, headers, decoyColumn, FormatProvider, Separator, Settings, irtColumn, libraryColumn);
                    }
                    if (_rowReader == null)
                        throw new LineColNumberedIoException(Resources.MassListImporter_Import_Failed_to_find_peptide_column, 1, -1);
                }
            }

            // Set starting values for limit counters
            _countPeptides = Document.PeptideCount;
            _countIons = Document.PeptideTransitionCount;

            List<PeptideGroupDocNode> peptideGroupsNew = new List<PeptideGroupDocNode>();
            PeptideGroupBuilder seqBuilder = null;

            // Process lines
            long lineIndex = 0;
            foreach (string row in lines)
            {
                lineIndex++;
                var errorInfo = _rowReader.NextRow(row, lineIndex);
                if (errorInfo != null)
                {
                    errorList.Add(errorInfo);
                    continue;
                }

                if (progressMonitor != null)
                {
                    if (progressMonitor.IsCanceled)
                    {
                        irtPeptides.Clear();
                        librarySpectra.Clear();
                        errorList.Clear();
                        return new PeptideGroupDocNode[0];
                    }

                    int percentComplete = (int)(lineIndex * 100 / lines.Count);

                    if (status.PercentComplete != percentComplete)
                    {
                        string message = string.Format(Resources.MassListImporter_Import_Importing__0__,
                            _rowReader.TransitionInfo.ProteinName ?? _rowReader.TransitionInfo.PeptideSequence);
                        status = status.ChangePercentComplete(percentComplete).ChangeMessage(message);
                    }
                }

                seqBuilder = AddRow(seqBuilder, _rowReader, dictNameSeq, peptideGroupsNew, lineIndex, irtPeptides, librarySpectra, errorList);
            }

            // Add last sequence.
            if (seqBuilder != null)
                AddPeptideGroup(peptideGroupsNew, seqBuilder, irtPeptides, librarySpectra, errorList);

            return MergeEqualGroups(peptideGroupsNew);
        }
コード例 #14
0
ファイル: IrtDb.cs プロジェクト: lgatto/proteowizard
 private IrtDb Load(IProgressMonitor loadMonitor, ProgressStatus status)
 {
     var result = ChangeProp(ImClone(this), im => im.LoadPeptides(im.GetPeptides()));
     // Not really possible to show progress, unless we switch to raw reading
     if (loadMonitor != null)
         loadMonitor.UpdateProgress(status.ChangePercentComplete(100));
     return result;
 }
コード例 #15
0
        /// <summary>
        /// Train the model by iterative calculating weights to separate target and decoy transition groups.
        /// </summary>
        /// <param name="targets">Target transition groups.</param>
        /// <param name="decoys">Decoy transition groups.</param>
        /// <param name="initParameters">Initial model parameters (weights and bias)</param>
        /// <param name="includeSecondBest"> Include the second best peaks in the targets as decoys?</param>
        /// <param name="preTrain">Use a pre-trained model to bootstrap the learning.</param>
        /// <param name="progressMonitor"></param>
        /// <returns>Immutable model with new weights.</returns>
        public override IPeakScoringModel Train(IList<IList<float[]>> targets, IList<IList<float[]>> decoys, LinearModelParams initParameters,
            bool includeSecondBest = false, bool preTrain = true, IProgressMonitor progressMonitor = null)
        {
            if(initParameters == null)
                initParameters = new LinearModelParams(_peakFeatureCalculators.Count);
            return ChangeProp(ImClone(this), im =>
                {
                    targets = targets.Where(list => list.Count > 0).ToList();
                    decoys = decoys.Where(list => list.Count > 0).ToList();
                    var targetTransitionGroups = new ScoredGroupPeaksSet(targets);
                    var decoyTransitionGroups = new ScoredGroupPeaksSet(decoys);
                    // Bootstrap from the pre-trained legacy model
                    if (preTrain)
                    {
                        var preTrainedWeights = new double[initParameters.Weights.Count];
                        for (int i = 0; i < preTrainedWeights.Length; ++i)
                        {
                            if (double.IsNaN(initParameters.Weights[i]))
                            {
                                preTrainedWeights[i] = double.NaN;
                            }
                        }
                        int standardEnabledCount = GetEnabledCount(LegacyScoringModel.StandardFeatureCalculators, initParameters.Weights);
                        int analyteEnabledCount = GetEnabledCount(LegacyScoringModel.AnalyteFeatureCalculators, initParameters.Weights);
                        bool hasStandards = standardEnabledCount >= analyteEnabledCount;
                        var calculators = hasStandards ? LegacyScoringModel.StandardFeatureCalculators : LegacyScoringModel.AnalyteFeatureCalculators;
                        for (int i = 0; i < calculators.Length; ++i)
                        {
                            if (calculators[i].GetType() == typeof (MQuestRetentionTimePredictionCalc))
                                continue;
                            SetCalculatorValue(calculators[i].GetType(), LegacyScoringModel.DEFAULT_WEIGHTS[i], preTrainedWeights);
                        }
                        targetTransitionGroups.ScorePeaks(preTrainedWeights);
                        decoyTransitionGroups.ScorePeaks(preTrainedWeights);
                    }

                    // Iteratively refine the weights through multiple iterations.
                    var calcWeights = new double[initParameters.Weights.Count];
                    Array.Copy(initParameters.Weights.ToArray(), calcWeights, initParameters.Weights.Count);
                    double decoyMean = 0;
                    double decoyStdev = 0;
                    bool colinearWarning = false;
                    // This may take a long time between progress updates, but just measure progress by cycles through the training
                    var status = new ProgressStatus(Resources.MProphetPeakScoringModel_Train_Training_peak_scoring_model);
                    if (progressMonitor != null)
                        progressMonitor.UpdateProgress(status);
                    for (int iteration = 0; iteration < MAX_ITERATIONS; iteration++)
                    {
                        if (progressMonitor != null)
                        {
                            if (progressMonitor.IsCanceled)
                                throw new OperationCanceledException();

                            progressMonitor.UpdateProgress(status =
                                status.ChangeMessage(string.Format(Resources.MProphetPeakScoringModel_Train_Training_peak_scoring_model__iteration__0__of__1__, iteration + 1, MAX_ITERATIONS))
                                      .ChangePercentComplete((iteration + 1) * 100 / (MAX_ITERATIONS + 1)));
                        }

                        im.CalculateWeights(iteration, targetTransitionGroups, decoyTransitionGroups,
                                            includeSecondBest, calcWeights, out decoyMean, out decoyStdev, ref colinearWarning);

                        GC.Collect();   // Each loop generates a number of large objects. GC helps to keep private bytes under control
                    }
                    if (progressMonitor != null)
                        progressMonitor.UpdateProgress(status.ChangePercentComplete(100));

                    var parameters = new LinearModelParams(calcWeights);
                    parameters = parameters.RescaleParameters(decoyMean, decoyStdev);
                    im.Parameters = parameters;
                    im.ColinearWarning = colinearWarning;
                    im.UsesSecondBest = includeSecondBest;
                    im.UsesDecoys = decoys.Count > 0;
                });
        }
コード例 #16
0
            public void Run(ProcessStartInfo psi, string stdin, IProgressMonitor progress, ref ProgressStatus status, TextWriter writer)
            {
                if (shouldCancel)
                {
                    status.Cancel();
                    progress.UpdateProgress(status = status.Cancel());
                    return;
                }

                if (!string.IsNullOrEmpty(stringToWriteToWriter))
                    writer.WriteLine(stringToWriteToWriter);
                status.ChangePercentComplete(100);
                progress.UpdateProgress(status);
            }
コード例 #17
0
        public void Run(ProcessStartInfo psi, string stdin, IProgressMonitor progress, ref ProgressStatus status)
        {
            // Make sure required streams are redirected.
            psi.RedirectStandardOutput = true;
            psi.RedirectStandardError  = true;

            var proc = Process.Start(psi);

            if (proc == null)
            {
                throw new IOException(string.Format("Failure starting {0} command.", psi.FileName));
            }
            if (stdin != null)
            {
                try
                {
                    proc.StandardInput.Write(stdin);
                }
                finally
                {
                    proc.StandardInput.Close();
                }
            }

            var           reader      = new ProcessStreamReader(proc);
            StringBuilder sbError     = new StringBuilder();
            int           percentLast = 0;
            string        line;

            while ((line = reader.ReadLine()) != null)
            {
                if (progress == null || line.ToLower().StartsWith("error"))
                {
                    sbError.AppendLine(line);
                }
                else // if (progress != null)
                {
                    if (progress.IsCanceled)
                    {
                        proc.Kill();
                        progress.UpdateProgress(status = status.Cancel());
                        return;
                    }

                    if (line.EndsWith("%"))
                    {
                        double   percent;
                        string[] parts       = line.Split(' ');
                        string   percentPart = parts[parts.Length - 1];
                        if (double.TryParse(percentPart.Substring(0, percentPart.Length - 1), out percent))
                        {
                            percentLast = (int)percent;
                            status      = status.ChangePercentComplete(percentLast);
                            if (percent >= 100 && status.SegmentCount > 0)
                            {
                                status = status.NextSegment();
                            }
                            progress.UpdateProgress(status);
                        }
                    }
                    else if (MessagePrefix == null || line.StartsWith(MessagePrefix))
                    {
                        // Remove prefix, if there is one.
                        if (MessagePrefix != null)
                        {
                            line = line.Substring(MessagePrefix.Length);
                        }

                        status = status.ChangeMessage(line);
                        progress.UpdateProgress(status);
                    }
                }
            }
            proc.WaitForExit();
            int exit = proc.ExitCode;

            if (exit != 0)
            {
                line = proc.StandardError.ReadLine();
                if (line != null)
                {
                    sbError.AppendLine(line);
                }
                if (sbError.Length == 0)
                {
                    throw new IOException("Error occurred running process.");
                }
                throw new IOException(sbError.ToString());
            }
            // Make to complete the status, if the process succeeded, but never
            // printed 100% to the console
            else if (percentLast < 100)
            {
                status = status.ChangePercentComplete(100);
                if (status.SegmentCount > 0)
                {
                    status = status.NextSegment();
                }
                if (progress != null)
                {
                    progress.UpdateProgress(status);
                }
            }
        }
コード例 #18
0
        // ReSharper restore NonLocalizedString
        public static List<string> ConvertPilotFiles(IList<string> inputFiles, IProgressMonitor progress, ProgressStatus status)
        {
            string groupConverterExePath = null;
            var inputFilesPilotConverted = new List<string>();

            for (int index = 0; index < inputFiles.Count; index++)
            {
                string inputFile = inputFiles[index];
                if (!inputFile.EndsWith(BiblioSpecLiteBuilder.EXT_PILOT))
                {
                    inputFilesPilotConverted.Add(inputFile);
                    continue;
                }
                string outputFile = Path.ChangeExtension(inputFile, BiblioSpecLiteBuilder.EXT_PILOT_XML);
                // Avoid re-converting files that have already been converted
                if (File.Exists(outputFile))
                {
                    // Avoid duplication, in case the user accidentally adds both .group and .group.xml files
                    // for the same results
                    if (!inputFiles.Contains(outputFile))
                        inputFilesPilotConverted.Add(outputFile);
                    continue;
                }

                string message = string.Format(Resources.VendorIssueHelper_ConvertPilotFiles_Converting__0__to_xml, Path.GetFileName(inputFile));
                int percent = index * 100 / inputFiles.Count;
                progress.UpdateProgress(status = status.ChangeMessage(message).ChangePercentComplete(percent));

                if (groupConverterExePath == null)
                {
                    var key = Registry.LocalMachine.OpenSubKey(KEY_PROTEIN_PILOT, false);
                    if (key != null)
                    {
                        string proteinPilotCommandWithArgs = (string)key.GetValue(string.Empty);

                        var proteinPilotCommandWithArgsSplit =
                            proteinPilotCommandWithArgs.Split(new[] { "\" \"" }, StringSplitOptions.RemoveEmptyEntries);     // Remove " "%1" // Not L10N
                        string path = Path.GetDirectoryName(proteinPilotCommandWithArgsSplit[0].Trim(new[] { '\\', '\"' })); // Remove preceding "
                        if (path != null)
                        {
                            var groupFileExtractorPath = Path.Combine(path, EXE_GROUP_FILE_EXTRACTOR);
                            if (File.Exists(groupFileExtractorPath))
                            {
                                groupConverterExePath = groupFileExtractorPath;
                            }
                            else
                            {
                                var group2XmlPath = Path.Combine(path, EXE_GROUP2_XML);
                                if (File.Exists(group2XmlPath))
                                {
                                    groupConverterExePath = group2XmlPath;
                                }
                                else
                                {
                                    string errorMessage = string.Format(Resources.VendorIssueHelper_ConvertPilotFiles_Unable_to_find__0__or__1__in_directory__2____Please_reinstall_ProteinPilot_software_to_be_able_to_handle__group_files_,
                                        EXE_GROUP_FILE_EXTRACTOR, EXE_GROUP2_XML, path);
                                    throw new IOException(errorMessage);
                                }
                            }
                        }
                    }

                    if (groupConverterExePath == null)
                    {
                        throw new IOException(Resources.VendorIssueHelper_ConvertPilotFiles_ProteinPilot_software__trial_or_full_version__must_be_installed_to_convert___group__files_to_compatible___group_xml__files_);
                    }
                }

                // run group2xml
                // ReSharper disable NonLocalizedString
                var argv = new[]
                               {
                                   "XML",
                                   "\"" + inputFile + "\"",
                                   "\"" + outputFile + "\""
                               };
                // ReSharper restore NonLocalizedString

                var psi = new ProcessStartInfo(groupConverterExePath)
                              {
                                  CreateNoWindow = true,
                                  UseShellExecute = false,
                                  // Common directory includes the directory separator
                                  WorkingDirectory = Path.GetDirectoryName(groupConverterExePath) ?? string.Empty,
                                  Arguments = string.Join(" ", argv.ToArray()), // Not L10N
                                  RedirectStandardError = true,
                                  RedirectStandardOutput = true,
                              };

                var sbOut = new StringBuilder();
                var proc = new Process {StartInfo = psi};
                proc.Start();

                var reader = new ProcessStreamReader(proc);
                string line;
                while ((line = reader.ReadLine()) != null)
                {
                    if (progress.IsCanceled)
                    {
                        proc.Kill();
                        throw new LoadCanceledException(status.Cancel());
                    }

                    sbOut.AppendLine(line);
                }

                while (!proc.WaitForExit(200))
                {
                    if (progress.IsCanceled)
                    {
                        proc.Kill();
                        return inputFilesPilotConverted;
                    }
                }

                if (proc.ExitCode != 0)
                {
                    throw new IOException(TextUtil.LineSeparate(string.Format(Resources.VendorIssueHelper_ConvertPilotFiles_Failure_attempting_to_convert_file__0__to__group_xml_,
                                                        inputFile), string.Empty, sbOut.ToString()));
                }

                inputFilesPilotConverted.Add(outputFile);
            }
            progress.UpdateProgress(status.ChangePercentComplete(100));
            return inputFilesPilotConverted;
        }
コード例 #19
0
        /// <summary>
        /// Generate an isolation list containing multiplexed windows, attempting to minimize the number
        /// and frequency of repeated window pairings within each scan.
        /// </summary>
        /// <param name="writer">writer to write results</param>
        /// <param name="windowsPerScan">how many windows are contained in each scan</param>
        /// <param name="progressMonitor">progress monitor</param>
        private void WriteMultiplexedWindows(TextWriter writer, int windowsPerScan, IProgressMonitor progressMonitor)
        {
            int maxInstrumentWindows = Assume.Value(_maxInstrumentWindows);
            int windowCount = IsolationScheme.PrespecifiedIsolationWindows.Count;
            int cycleCount = maxInstrumentWindows / windowCount;
            double totalScore = 0.0;

            // Prepare to generate the best isolation list possible within the given time limit.
            var startTime = DateTime.Now;
            var cycle = new Cycle(windowCount, windowsPerScan);
            int cyclesGenerated = 0;
            ProgressStatus status = new ProgressStatus(Resources.AbstractDiaExporter_WriteMultiplexedWindows_Exporting_Isolation_List);
            progressMonitor.UpdateProgress(status);

            // Generate each cycle.
            for (int cycleNumber = 1; cycleNumber <= cycleCount; cycleNumber++)
            {
                // Update status.
                if (progressMonitor.IsCanceled)
                    return;
                progressMonitor.UpdateProgress(status.ChangePercentComplete(
                    (int) (DateTime.Now - startTime).TotalSeconds*100/CalculationTime).ChangeMessage(
                        string.Format(Resources.AbstractDiaExporter_WriteMultiplexedWindows_Exporting_Isolation_List__0__cycles_out_of__1__,
                            cycleNumber - 1, cycleCount)));

                double secondsRemaining = CalculationTime - (DateTime.Now - startTime).TotalSeconds;
                double secondsPerCycle = secondsRemaining / (cycleCount - cycleNumber + 1);
                var endTime = DateTime.Now.AddSeconds(secondsPerCycle);

                Cycle bestCycle = null;
                do
                {
                    // Generate a bunch of cycles, looking for one with the lowest score.
                    const int attemptCount = 50;
                    for (int i = 0; i < attemptCount; i++)
                    {
                        cycle.Generate(cycleNumber);
                        if (bestCycle == null || bestCycle.CycleScore > cycle.CycleScore)
                        {
                            bestCycle = new Cycle(cycle);
                            if (bestCycle.CycleScore == 0.0)
                            {
                                cyclesGenerated += i + 1 - attemptCount;
                                endTime = DateTime.Now; // Break outer loop.
                                break;
                            }
                        }
                    }
                    cyclesGenerated += attemptCount;
                } while (DateTime.Now < endTime);

                // ReSharper disable PossibleNullReferenceException
                totalScore += bestCycle.CycleScore;
                WriteCycle(writer, bestCycle, cycleNumber);
                WriteCycleInfo(bestCycle, cycleNumber, cyclesGenerated, startTime);
                // ReSharper restore PossibleNullReferenceException

            }

            WriteTotalScore(totalScore);

            // Show 100% in the wait dialog.
            progressMonitor.UpdateProgress(status.ChangePercentComplete(100).ChangeMessage(
                string.Format(Resources.AbstractDiaExporter_WriteMultiplexedWindows_Exporting_Isolation_List__0__cycles_out_of__0__,
                              cycleCount)));
        }