public static void GetLibIrtProviders(Library lib, IrtStandard standard, IProgressMonitor monitor, out IRetentionTimeProvider[] irtProviders, out List <IrtStandard> autoStandards, out DbIrtPeptide[] cirtPeptides) { monitor?.UpdateProgress(new ProgressStatus().ChangePercentComplete(-1)); irtProviders = lib.RetentionTimeProvidersIrt.ToArray(); if (!irtProviders.Any()) { irtProviders = lib.RetentionTimeProviders.ToArray(); } var isAuto = ReferenceEquals(standard, IrtStandard.AUTO); autoStandards = isAuto ? IrtStandard.BestMatch(irtProviders.SelectMany(provider => provider.PeptideRetentionTimes).Select(rt => rt.PeptideSequence)) : null; if (ReferenceEquals(standard, IrtStandard.CIRT_SHORT) || isAuto && autoStandards.Count == 0) { var libPeptides = new TargetMap <bool>(irtProviders .SelectMany(provider => provider.PeptideRetentionTimes) .Select(rt => new KeyValuePair <Target, bool>(rt.PeptideSequence, true))); cirtPeptides = IrtStandard.CIRT.Peptides.Where(pep => libPeptides.ContainsKey(pep.ModifiedTarget)).ToArray(); } else { cirtPeptides = new DbIrtPeptide[0]; } }
/// <summary> /// Creates bitmaps for the texture /// </summary> private static Bitmap[] CreateTextureBitmaps( IProgressMonitor progressMonitor, ISpherePlanet planet ) { Bitmap[] faceBitmaps = new Bitmap[ 6 ]; int width = 256; int height = 256; progressMonitor.UpdateProgress( 0 ); ISpherePlanetTerrainRenderer renderer = planet.Renderer.GetRenderer<ISpherePlanetTerrainRenderer>( ); if ( renderer == null ) { throw new InvalidOperationException( "Expected a valid ISpherePlanetTerrainRenderer to be available" ); } faceBitmaps[ ( int )CubeMapFace.PositiveX ] = renderer.CreateMarbleTextureFace( CubeMapFace.PositiveX, width, height ); progressMonitor.UpdateProgress( 1 / 6.0f ); faceBitmaps[ ( int )CubeMapFace.NegativeX ] = renderer.CreateMarbleTextureFace( CubeMapFace.NegativeX, width, height ); progressMonitor.UpdateProgress( 2 / 6.0f ); faceBitmaps[ ( int )CubeMapFace.PositiveY ] = renderer.CreateMarbleTextureFace( CubeMapFace.PositiveY, width, height ); progressMonitor.UpdateProgress( 3 / 6.0f ); faceBitmaps[ ( int )CubeMapFace.NegativeY ] = renderer.CreateMarbleTextureFace( CubeMapFace.NegativeY, width, height ); progressMonitor.UpdateProgress( 4 / 6.0f ); faceBitmaps[ ( int )CubeMapFace.PositiveZ ] = renderer.CreateMarbleTextureFace( CubeMapFace.PositiveZ, width, height ); progressMonitor.UpdateProgress( 5 / 6.0f ); faceBitmaps[ ( int )CubeMapFace.NegativeZ ] = renderer.CreateMarbleTextureFace( CubeMapFace.NegativeZ, width, height ); progressMonitor.UpdateProgress( 1 ); foreach ( object cubeMapFace in Enum.GetValues( typeof( CubeMapFace ) ) ) { faceBitmaps[ ( int )cubeMapFace ].Save( "PlanetCubeMap" + cubeMapFace + ".png" ); } return faceBitmaps; }
private OptimizationDb Load(IProgressMonitor loadMonitor, ProgressStatus status) { var result = ChangeProp(ImClone(this), im => im.LoadOptimizations(im.GetOptimizations())); // Not really possible to show progress, unless we switch to raw reading if (loadMonitor != null) { loadMonitor.UpdateProgress(status.ChangePercentComplete(100)); } return(result); }
public bool Export(CancellationToken cancellationToken, IProgressMonitor progressMonitor, ref IProgressStatus status, ViewInfo viewInfo, TextWriter writer, DsvWriter dsvWriter) { progressMonitor = progressMonitor ?? new SilentProgressMonitor(); using (var bindingListSource = new BindingListSource(cancellationToken)) { bindingListSource.SetViewContext(this, viewInfo); progressMonitor.UpdateProgress(status = status.ChangePercentComplete(5) .ChangeMessage(Resources.ExportReportDlg_ExportReport_Writing_report)); WriteDataWithStatus(progressMonitor, ref status, writer, bindingListSource, dsvWriter); if (progressMonitor.IsCanceled) { return(false); } writer.Flush(); progressMonitor.UpdateProgress(status = status.Complete()); } return(true); }
private IrtDb Load(IProgressMonitor loadMonitor, ProgressStatus status, out IList <DbIrtPeptide> dbPeptides) { var rawPeptides = dbPeptides = GetPeptides(); var result = ChangeProp(ImClone(this), im => im.LoadPeptides(rawPeptides)); // Not really possible to show progress, unless we switch to raw reading if (loadMonitor != null) { loadMonitor.UpdateProgress(status.ChangePercentComplete(100)); } return(result); }
private bool UpdateProgressAndCheckForCancellation(IProgressMonitor progressMonitor, ref IProgressStatus status, string message, int pctComplete) { if (progressMonitor.IsCanceled) { return(false); } if (pctComplete != status.PercentComplete) { progressMonitor.UpdateProgress(status = status.ChangeMessage(message).ChangePercentComplete(pctComplete)); } return(true); }
public void Extract(IProgressMonitor progressMonitor) { ProgressMonitor = progressMonitor; ProgressMonitor.UpdateProgress(_progressStatus = new ProgressStatus(DefaultMessage)); var extractDir = ExtractDir(SharedPath); using (ZipFile zip = ZipFile.Read(SharedPath)) { CountEntries = zip.Entries.Count; ExpectedSize = zip.Entries.Select(entry => entry.UncompressedSize).Sum(); zip.ExtractProgress += SrmDocumentSharing_ExtractProgress; string documentName = FindSharedSkylineFile(zip); string parentDir = Path.GetDirectoryName(SharedPath); if (!string.IsNullOrEmpty(parentDir)) { extractDir = Path.Combine(parentDir, extractDir); } extractDir = GetNonExistentDir(extractDir); DocumentPath = Path.Combine(extractDir, documentName); foreach (var entry in zip.Entries) { if (ProgressMonitor.IsCanceled) { break; } try { entry.Extract(extractDir); ExtractedSize += entry.UncompressedSize; } catch (Exception) { if (!ProgressMonitor.IsCanceled) { throw; } } } } if (ProgressMonitor.IsCanceled) { DirectoryEx.SafeDelete(extractDir); } }
public void webClient_UploadProgressChanged(object sender, UploadProgressChangedEventArgs e) { var message = string.Format(FileSize.FormatProvider, Resources.WebPanoramaPublishClient_webClient_UploadProgressChanged_Uploaded__0_fs__of__1_fs_, e.BytesSent, e.TotalBytesToSend); _progressStatus = _progressStatus.ChangeMessage(message).ChangePercentComplete(e.ProgressPercentage); _progressMonitor.UpdateProgress(_progressStatus); if (_progressMonitor.IsCanceled) { _webClient.CancelAsync(); } }
private void Download(SkypFile skyp, IProgressMonitor progressMonitor, FormEx parentWindow = null) { var progressStatus = new ProgressStatus(string.Format(Resources.SkypSupport_Download_Downloading__0_, skyp.SkylineDocUri)); progressMonitor.UpdateProgress(progressStatus); if (DownloadClient == null) { DownloadClient = new WebDownloadClient(progressMonitor, progressStatus); } DownloadClient.Download(skyp.SkylineDocUri, skyp.DownloadPath, skyp.Server?.Username, skyp.Server?.Password); if (progressMonitor.IsCanceled || DownloadClient.IsError) { FileEx.SafeDelete(skyp.DownloadPath, true); } if (DownloadClient.IsError) { var message = string.Format( Resources .SkypSupport_Download_There_was_an_error_downloading_the_Skyline_document_specified_in_the_skyp_file___0__, skyp.SkylineDocUri); if (DownloadClient.Error != null) { var exceptionMsg = DownloadClient.Error.Message; message = TextUtil.LineSeparate(message, exceptionMsg); if (exceptionMsg.Contains(ERROR401)) { message = TextUtil.LineSeparate(message, string.Format( Resources .SkypSupport_Download_You_may_have_to_add__0__as_a_Panorama_server_from_the_Tools___Options_menu_in_Skyline_, skyp.SkylineDocUri.Host)); } else if (exceptionMsg.Contains(ERROR403)) { message = TextUtil.LineSeparate(message, string.Format( Resources.SkypSupport_Download_You_do_not_have_permissions_to_download_this_file_from__0__, skyp.SkylineDocUri.Host)); } } throw new Exception(message, DownloadClient.Error); } }
private void RunExecutable(SrmDocument document, IToolMacroProvider toolMacroProvider, TextWriter textWriter, IProgressMonitor progressMonitor, Control parent) { ActionUtil.RunAsync(() => { try { RunExecutableBackground(document, toolMacroProvider, textWriter, progressMonitor, parent); } catch (Exception e) { progressMonitor.UpdateProgress(new ProgressStatus(string.Empty).ChangeErrorException(e)); } }, @"Run Executable"); }
private void PostToLink(string url, SrmDocument doc, IProgressMonitor progressMonitor, IWebHelpers webHelpers) { ActionUtil.RunAsync(() => { try { PostToLinkBackground(url, doc, progressMonitor, webHelpers); } catch (Exception exception) { progressMonitor.UpdateProgress(new ProgressStatus(string.Empty).ChangeErrorException(exception)); } }, @"Post To Link"); }
public static ProcessedIrtAverages ProcessRetentionTimes(IProgressMonitor monitor, IEnumerable <IRetentionTimeProvider> providers, int countProviders, DbIrtPeptide[] standardPeptideList, DbIrtPeptide[] items) { IProgressStatus status = new ProgressStatus(Resources.LibraryGridViewDriver_ProcessRetentionTimes_Adding_retention_times); var dictProviderData = new List <KeyValuePair <string, RetentionTimeProviderData> >(); var dictPeptideAverages = new Dictionary <Target, IrtPeptideAverages>(); var runCount = 0; foreach (var retentionTimeProvider in providers) { if (monitor.IsCanceled) { return(null); } var message = string.Format(Resources.LibraryGridViewDriver_ProcessRetentionTimes_Converting_retention_times_from__0__, retentionTimeProvider.Name); monitor.UpdateProgress(status = status.ChangeMessage(message)); runCount++; var data = new RetentionTimeProviderData(retentionTimeProvider, standardPeptideList.OrderBy(peptide => peptide.Irt)); if (data.RegressionSuccess || data.CalcRegressionWith(retentionTimeProvider, standardPeptideList, items)) { // Trace.WriteLine(string.Format("slope = {0}, intercept = {1}", data.RegressionRefined.Slope, data.RegressionRefined.Intercept)); AddRetentionTimesToDict(retentionTimeProvider, data.RegressionRefined, dictPeptideAverages, standardPeptideList); } dictProviderData.Add(new KeyValuePair <string, RetentionTimeProviderData>(retentionTimeProvider.Name, data)); monitor.UpdateProgress(status = status.ChangePercentComplete(runCount * 100 / countProviders)); } monitor.UpdateProgress(status.Complete()); return(new ProcessedIrtAverages(dictPeptideAverages, dictProviderData)); }
public static SrmDocument UpdateAnnotations(SrmDocument document, IProgressMonitor progressMonitor, IProgressStatus status) { if (document.Settings.DataSettings.AnnotationDefs.All(def => def.Expression == null)) { return(document); } if (progressMonitor != null) { progressMonitor.UpdateProgress(status.ChangeMessage(Resources.DocumentAnnotationUpdater_UpdateAnnotations_Updating_calculated_annotations)); } DocumentAnnotationUpdater updater = new DocumentAnnotationUpdater(document, progressMonitor); return(updater.UpdateDocument(document)); }
public static void CreateIrtDb(string path, ProcessedIrtAverages processed, DbIrtPeptide[] standardPeptides, bool recalibrate, IrtRegressionType regressionType, IProgressMonitor monitor) { DbIrtPeptide[] newStandards = null; if (recalibrate) { monitor.UpdateProgress(new ProgressStatus().ChangeSegments(0, 2)); newStandards = processed.RecalibrateStandards(standardPeptides).ToArray(); processed = RCalcIrt.ProcessRetentionTimes(monitor, processed.ProviderData.Select(data => data.RetentionTimeProvider).ToArray(), newStandards.ToArray(), new DbIrtPeptide[0], regressionType); } var irtDb = IrtDb.CreateIrtDb(path); irtDb.AddPeptides(monitor, (newStandards ?? standardPeptides).Concat(processed.DbIrtPeptides).ToList()); }
private void PostToLink(string url, SrmDocument doc, IProgressMonitor progressMonitor, IWebHelpers webHelpers) { var thread = new Thread(() => { try { PostToLinkBackground(url, doc, progressMonitor, webHelpers); } catch (Exception exception) { progressMonitor.UpdateProgress(new ProgressStatus(string.Empty).ChangeErrorException(exception)); } }); thread.Start(); }
private void RunExecutable(SrmDocument document, IToolMacroProvider toolMacroProvider, TextWriter textWriter, IProgressMonitor progressMonitor, Control parent) { var thread = new Thread(() => { try { RunExecutableBackground(document, toolMacroProvider, textWriter, progressMonitor, parent); } catch (Exception e) { progressMonitor.UpdateProgress(new ProgressStatus(string.Empty).ChangeErrorException(e)); } }); LocalizationHelper.InitThread(thread); thread.Start(); }
public void Share(IProgressMonitor progressMonitor) { ProgressMonitor = progressMonitor; ProgressMonitor.UpdateProgress(_progressStatus = new ProgressStatus(DefaultMessage)); using (var zip = new ZipFileShare()) { if (ShareType.Complete) { ShareComplete(zip); } else { ShareMinimal(zip); } } }
public ProgressStatus UpdatePercentCompleteProgress(IProgressMonitor progressMonitor, long currentCount, long totalCount) { if (progressMonitor.IsCanceled) { throw new OperationCanceledException(); } int percentComplete = (int)(100 * currentCount / totalCount); if (percentComplete == PercentComplete) { return(this); } var statusNew = ChangePercentComplete(percentComplete); progressMonitor.UpdateProgress(statusNew); return(statusNew); }
/// <summary> /// Initialize isolation scheme export. /// </summary> protected bool InitExport(string fileName, IProgressMonitor progressMonitor) { if (progressMonitor.IsCanceled) { return(false); } // First export transition lists to map in memory Export(null, progressMonitor); // If filename is null, then no more work needs to be done. if (fileName == null) { progressMonitor.UpdateProgress(new ProgressStatus(string.Empty).Complete()); return(false); } return(true); }
public void Share(IProgressMonitor progressMonitor) { ProgressMonitor = progressMonitor; ProgressMonitor.UpdateProgress(_progressStatus = new ProgressStatus(DefaultMessage)); using (var zip = new ZipFile()) { // Make sure large files don't cause this to fail. zip.UseZip64WhenSaving = Zip64Option.AsNecessary; if (CompleteSharing) { ShareComplete(zip); } else { ShareMinimal(zip); } } }
public void ExportSpectralLibrary(string path, IProgressMonitor progressMonitor) { const string name = "exported"; var spectra = new Dictionary <LibKey, SpectrumMzInfo>(); foreach (var nodePepGroup in Document.MoleculeGroups) { foreach (var nodePep in nodePepGroup.Molecules) { foreach (var nodeTranGroup in nodePep.TransitionGroups) { for (var i = 0; i < Document.Settings.MeasuredResults.Chromatograms.Count; i++) { ProcessTransitionGroup(spectra, nodePepGroup, nodePep, nodeTranGroup, i); } } } } var rCalcIrt = Document.Settings.HasRTPrediction ? Document.Settings.PeptideSettings.Prediction.RetentionTime.Calculator as RCalcIrt : null; IProgressStatus status = new ProgressStatus(); if (rCalcIrt != null && progressMonitor != null) { progressMonitor.UpdateProgress(status = status.ChangeSegments(0, 2)); } using (var blibDb = BlibDb.CreateBlibDb(path)) { var libSpec = new BiblioSpecLiteSpec(name, path); blibDb.CreateLibraryFromSpectra(libSpec, spectra.Values.ToList(), name, progressMonitor, ref status); } if (rCalcIrt != null) { IrtDb.CreateIrtDb(path).AddPeptides(progressMonitor, rCalcIrt.GetDbIrtPeptides().ToList(), ref status); } }
public static ProcessedIrtAverages ProcessRetentionTimes(IProgressMonitor monitor, IRetentionTimeProvider[] providers, DbIrtPeptide[] standardPeptideList, DbIrtPeptide[] items) { var matchedStandard = IrtStandard.WhichStandard(standardPeptideList.Select(pep => pep.ModifiedTarget)); if (matchedStandard != null) { var dummyDoc = new SrmDocument(SrmSettingsList.GetDefault()); using (var reader = matchedStandard.GetDocumentReader()) { if (reader != null) { dummyDoc = dummyDoc.ImportDocumentXml(reader, string.Empty, MeasuredResults.MergeAction.remove, false, null, Settings.Default.StaticModList, Settings.Default.HeavyModList, null, out _, out _, false); standardPeptideList = standardPeptideList.Select(pep => new DbIrtPeptide(pep)).ToArray(); foreach (var dummyPep in dummyDoc.Molecules.Where(pep => pep.HasExplicitMods)) { var standardPepIdx = standardPeptideList.IndexOf(pep => dummyPep.ModifiedTarget.Equals(pep.ModifiedTarget)); standardPeptideList[standardPepIdx] = new DbIrtPeptide(standardPeptideList[standardPepIdx]) { ModifiedTarget = dummyDoc.Settings.GetModifiedSequence(dummyPep.ModifiedTarget, IsotopeLabelType.heavy, dummyPep.ExplicitMods) }; } } } } IProgressStatus status = new ProgressStatus(Resources.LibraryGridViewDriver_ProcessRetentionTimes_Adding_retention_times); var dictPeptideAverages = new Dictionary <Target, IrtPeptideAverages>(); var providerData = new List <RetentionTimeProviderData>(); var runCount = 0; foreach (var retentionTimeProvider in providers) { if (monitor.IsCanceled) { return(null); } monitor.UpdateProgress(status = status.ChangeMessage(string.Format( Resources.LibraryGridViewDriver_ProcessRetentionTimes_Converting_retention_times_from__0__, retentionTimeProvider.Name))); runCount++; var data = new RetentionTimeProviderData(retentionTimeProvider, standardPeptideList); if (data.RegressionSuccess || data.CalcRegressionWith(retentionTimeProvider, standardPeptideList, items)) { AddRetentionTimesToDict(retentionTimeProvider, data.RegressionRefined, dictPeptideAverages, standardPeptideList); } providerData.Add(data); monitor.UpdateProgress(status = status.ChangePercentComplete(runCount * 100 / providers.Length)); } monitor.UpdateProgress(status.Complete()); return(new ProcessedIrtAverages(dictPeptideAverages, providerData)); }
//Throws DatabaseOpeningException public static IrtDb GetIrtDb(string path, IProgressMonitor loadMonitor) { var status = new ProgressStatus(string.Format(Resources.IrtDb_GetIrtDb_Loading_iRT_database__0_, path)); if (loadMonitor != null) loadMonitor.UpdateProgress(status); try { if (path == null) throw new DatabaseOpeningException(Resources.IrtDb_GetIrtDb_Database_path_cannot_be_null); if (!File.Exists(path)) throw new DatabaseOpeningException(String.Format(Resources.IrtDb_GetIrtDb_The_file__0__does_not_exist_, path)); string message; Exception xInner = null; try { //Check for a valid SQLite file and that it has our schema //Allow only one thread at a time to read from the same path using (var sessionFactory = GetSessionFactory(path)) { lock (sessionFactory) { return new IrtDb(path, sessionFactory).Load(loadMonitor, status); } } } catch (UnauthorizedAccessException x) { message = string.Format(Resources.IrtDb_GetIrtDb_You_do_not_have_privileges_to_access_the_file__0_, path); xInner = x; } catch (DirectoryNotFoundException x) { message = string.Format(Resources.IrtDb_GetIrtDb_The_path_containing__0__does_not_exist, path); xInner = x; } catch (FileNotFoundException x) { message = string.Format(Resources.IrtDb_GetIrtDb_The_file__0__could_not_be_created_Perhaps_you_do_not_have_sufficient_privileges, path); xInner = x; } catch (SQLiteException x) { message = string.Format(Resources.IrtDb_GetIrtDb_The_file__0__is_not_a_valid_iRT_database_file, path); xInner = x; } catch (Exception x) { message = string.Format(Resources.IrtDb_GetIrtDb_The_file__0__could_not_be_opened, path); xInner = x; } throw new DatabaseOpeningException(message, xInner); } catch (DatabaseOpeningException x) { if (loadMonitor == null) throw; loadMonitor.UpdateProgress(status.ChangeErrorException(x)); return null; } }
private IrtDb Load(IProgressMonitor loadMonitor, ProgressStatus status) { var result = ChangeProp(ImClone(this), im => im.LoadPeptides(im.GetPeptides())); // Not really possible to show progress, unless we switch to raw reading if (loadMonitor != null) loadMonitor.UpdateProgress(status.ChangePercentComplete(100)); return result; }
public bool BuildLibrary(IProgressMonitor progress) { _ambiguousMatches = null; ProgressStatus status = new ProgressStatus(Resources.BiblioSpecLiteBuilder_BuildLibrary_Preparing_to_build_library); progress.UpdateProgress(status); if (InputFiles.Any(f => f.EndsWith(EXT_PILOT))) { try { InputFiles = VendorIssueHelper.ConvertPilotFiles(InputFiles, progress, status); if (progress.IsCanceled) return false; } catch (Exception x) { progress.UpdateProgress(status.ChangeErrorException(x)); return false; } } string message = string.Format(Resources.BiblioSpecLiteBuilder_BuildLibrary_Building__0__library, Path.GetFileName(OutputPath)); progress.UpdateProgress(status = status.ChangeMessage(message)); string redundantLibrary = BiblioSpecLiteSpec.GetRedundantName(OutputPath); var blibBuilder = new BlibBuild(redundantLibrary, InputFiles, TargetSequences) { Authority = Authority, IncludeAmbiguousMatches = IncludeAmbiguousMatches, CutOffScore = CutOffScore, Id = Id, }; try { if (!blibBuilder.BuildLibrary(Action, progress, ref status, out _ambiguousMatches)) { return false; } } catch (IOException x) { progress.UpdateProgress(status.ChangeErrorException(x)); return false; } catch (Exception x) { Console.WriteLine(x.Message); progress.UpdateProgress(status.ChangeErrorException( new Exception(string.Format(Resources.BiblioSpecLiteBuilder_BuildLibrary_Failed_trying_to_build_the_redundant_library__0__, redundantLibrary)))); return false; } var blibFilter = new BlibFilter(); status = new ProgressStatus(message); progress.UpdateProgress(status); // Write the non-redundant library to a temporary file first try { using (var saver = new FileSaver(OutputPath)) { if (!blibFilter.Filter(redundantLibrary, saver.SafeName, progress, ref status)) { return false; } saver.Commit(); } } catch (IOException x) { progress.UpdateProgress(status.ChangeErrorException(x)); return false; } catch { progress.UpdateProgress(status.ChangeErrorException( new Exception(string.Format(Resources.BiblioSpecLiteBuilder_BuildLibrary_Failed_trying_to_build_the_library__0__, OutputPath)))); return false; } finally { if (!KeepRedundant) FileEx.SafeDelete(redundantLibrary, true); } return true; }
public void Extract(IProgressMonitor progressMonitor) { ProgressMonitor = progressMonitor; ProgressMonitor.UpdateProgress(_progressStatus = new ProgressStatus(DefaultMessage)); string extractDir = Path.GetFileName(SharedPath) ?? string.Empty; if (PathEx.HasExtension(extractDir, EXT_SKY_ZIP)) extractDir = extractDir.Substring(0, extractDir.Length - EXT_SKY_ZIP.Length); else if (PathEx.HasExtension(extractDir, EXT)) extractDir = extractDir.Substring(0, extractDir.Length - EXT.Length); using (ZipFile zip = ZipFile.Read(SharedPath)) { CountEntries = zip.Entries.Count; ExpectedSize = zip.Entries.Select(entry => entry.UncompressedSize).Sum(); zip.ExtractProgress += SrmDocumentSharing_ExtractProgress; string documentName = FindSharedSkylineFile(zip); string parentDir = Path.GetDirectoryName(SharedPath); if (!string.IsNullOrEmpty(parentDir)) extractDir = Path.Combine(parentDir, extractDir); extractDir = GetNonExistantDir(extractDir); DocumentPath = Path.Combine(extractDir, documentName); foreach (var entry in zip.Entries) { if (ProgressMonitor.IsCanceled) break; try { entry.Extract(extractDir); ExtractedSize += entry.UncompressedSize; } catch (Exception) { if (!ProgressMonitor.IsCanceled) throw; } } } if (ProgressMonitor.IsCanceled) { DirectoryEx.SafeDelete(extractDir); } }
private SrmDocument LookupProteinMetadata(SrmDocument docOrig, IProgressMonitor progressMonitor) { lock (_processedNodes) { // Check to make sure this operation was not canceled while this thread was // waiting to acquire the lock. This also cleans up pending work. if (progressMonitor.IsCanceled) return null; var progressStatus = new ProgressStatus(Resources.ProteinMetadataManager_LookupProteinMetadata_resolving_protein_details); int nResolved = 0; int nUnresolved = docOrig.PeptideGroups.Select(pg => pg.ProteinMetadata.NeedsSearch()).Count(); if ((nUnresolved > 0) && !docOrig.Settings.PeptideSettings.BackgroundProteome.IsNone) { // Do a quick check to see if background proteome already has the info if (!docOrig.Settings.PeptideSettings.BackgroundProteome.NeedsProteinMetadataSearch) { try { using (var proteomeDb = docOrig.Settings.PeptideSettings.BackgroundProteome.OpenProteomeDb()) { foreach (PeptideGroupDocNode nodePepGroup in docOrig.PeptideGroups) { if (_processedNodes.ContainsKey(nodePepGroup.Id.GlobalIndex)) { // We did this before we were interrupted progressMonitor.UpdateProgress(progressStatus = progressStatus.ChangePercentComplete(100 * nResolved++ / nUnresolved)); } else if (nodePepGroup.ProteinMetadata.NeedsSearch()) { var proteinMetadata = proteomeDb.GetProteinMetadataByName(nodePepGroup.Name); if ((proteinMetadata == null) && !Equals(nodePepGroup.Name, nodePepGroup.OriginalName)) proteinMetadata = proteomeDb.GetProteinMetadataByName(nodePepGroup.OriginalName); // Original name might hit if ((proteinMetadata == null) && !String.IsNullOrEmpty(nodePepGroup.ProteinMetadata.Accession)) proteinMetadata = proteomeDb.GetProteinMetadataByName(nodePepGroup.ProteinMetadata.Accession); // Parsed accession might hit if ((proteinMetadata != null) && !proteinMetadata.NeedsSearch()) { // Background proteome has already resolved this _processedNodes.Add(nodePepGroup.Id.GlobalIndex, proteinMetadata); progressMonitor.UpdateProgress( progressStatus = progressStatus.ChangePercentComplete(100*nResolved++/nUnresolved)); } } if (progressMonitor.IsCanceled) { progressMonitor.UpdateProgress(progressStatus.Cancel()); return null; } } } } // ReSharper disable once EmptyGeneralCatchClause catch { // The protDB file is busy, or some other issue - just go directly to web } } } if (nResolved != nUnresolved) { try { // Now go to the web for more protein metadata (or pretend to, depending on WebEnabledFastaImporter.DefaultWebAccessMode) var docNodesWithUnresolvedProteinMetadata = new Dictionary<ProteinSearchInfo,PeptideGroupDocNode>(); var proteinsToSearch = new List<ProteinSearchInfo>(); foreach (PeptideGroupDocNode node in docOrig.PeptideGroups) { if (node.ProteinMetadata.NeedsSearch() && !_processedNodes.ContainsKey(node.Id.GlobalIndex)) // Did we already process this? { var proteinMetadata = node.ProteinMetadata; if (proteinMetadata.WebSearchInfo.IsEmpty()) // Never even been hit with regex { // Use Regexes to get some metadata, and a search term var parsedProteinMetaData = FastaImporter.ParseProteinMetaData(proteinMetadata); if ((parsedProteinMetaData == null) || Equals(parsedProteinMetaData.Merge(proteinMetadata),proteinMetadata.SetWebSearchCompleted())) { // That didn't parse well enough to make a search term, or didn't add any new info - just set it as searched so we don't keep trying _processedNodes.Add(node.Id.GlobalIndex, proteinMetadata.SetWebSearchCompleted()); progressMonitor.UpdateProgress(progressStatus = progressStatus.ChangePercentComplete(100 * nResolved++ / nUnresolved)); proteinMetadata = null; // No search to be done } else { proteinMetadata = proteinMetadata.Merge(parsedProteinMetaData); // Fill in any gaps with parsed info } } if (proteinMetadata != null) { // We note the sequence length because it's useful in disambiguating search results proteinsToSearch.Add(new ProteinSearchInfo(new DbProteinName(null, proteinMetadata), node.PeptideGroup.Sequence == null ? 0 : node.PeptideGroup.Sequence.Length)); docNodesWithUnresolvedProteinMetadata.Add(proteinsToSearch.Last(), node); } } } if (progressMonitor.IsCanceled) { progressMonitor.UpdateProgress(progressStatus.Cancel()); return null; } progressMonitor.UpdateProgress(progressStatus = progressStatus.ChangePercentComplete(100 * nResolved / nUnresolved)); // Now we actually hit the internet if (proteinsToSearch.Any()) { foreach (var result in FastaImporter.DoWebserviceLookup(proteinsToSearch, progressMonitor, false)) // Resolve them all, now { Debug.Assert(!result.GetProteinMetadata().NeedsSearch()); _processedNodes.Add(docNodesWithUnresolvedProteinMetadata[result].Id.GlobalIndex, result.GetProteinMetadata()); progressMonitor.UpdateProgress(progressStatus = progressStatus.ChangePercentComplete(100 * nResolved++ / nUnresolved)); } } } catch (OperationCanceledException) { progressMonitor.UpdateProgress(progressStatus.Cancel()); return null; } } // And finally write back to the document var listProteins = new List<PeptideGroupDocNode>(); foreach (PeptideGroupDocNode node in docOrig.MoleculeGroups) { if (_processedNodes.ContainsKey(node.Id.GlobalIndex)) { listProteins.Add(node.ChangeProteinMetadata(_processedNodes[node.Id.GlobalIndex])); } else { listProteins.Add(node); } } var docNew = docOrig.ChangeChildrenChecked(listProteins.Cast<DocNode>().ToArray()); progressMonitor.UpdateProgress(progressStatus.Complete()); return (SrmDocument)docNew; } }
public IEnumerable<PeptideGroupDocNode> Import(TextReader reader, IProgressMonitor progressMonitor, long lineCount) { // Set starting values for limit counters _countPeptides = Document.PeptideCount; _countIons = Document.PeptideTransitionCount; // Store set of existing FASTA sequences to keep from duplicating HashSet<FastaSequence> set = new HashSet<FastaSequence>(); foreach (PeptideGroupDocNode nodeGroup in Document.Children) { FastaSequence fastaSeq = nodeGroup.Id as FastaSequence; if (fastaSeq != null) set.Add(fastaSeq); } var peptideGroupsNew = new List<PeptideGroupDocNode>(); PeptideGroupBuilder seqBuilder = null; long linesRead = 0; int progressPercent = -1; string line; var status = new ProgressStatus(string.Empty); while ((line = reader.ReadLine()) != null) { linesRead++; if (progressMonitor != null) { // TODO when changing from ILongWaitBroker to IProgressMonitor, the old code was: // if (progressMonitor.IsCanceled || progressMonitor.IsDocumentChanged(Document)) // IProgressMonitor does not have IsDocumentChangesd. if (progressMonitor.IsCanceled) return new PeptideGroupDocNode[0]; int progressNew = (int) (linesRead*100/lineCount); if (progressPercent != progressNew) progressMonitor.UpdateProgress(status = status.ChangePercentComplete(progressPercent = progressNew)); } if (line.StartsWith(">")) // Not L10N { if (_countIons > SrmDocument.MAX_TRANSITION_COUNT || _countPeptides > SrmDocument.MAX_PEPTIDE_COUNT) throw new InvalidDataException(Resources.FastaImporter_Import_Document_size_limit_exceeded); if (seqBuilder != null) AddPeptideGroup(peptideGroupsNew, set, seqBuilder); seqBuilder = _modMatcher == null ? new PeptideGroupBuilder(line, PeptideList, Document.Settings) : new PeptideGroupBuilder(line, _modMatcher, Document.Settings); if (progressMonitor != null) progressMonitor.UpdateProgress(status = status.ChangeMessage(string.Format(Resources.FastaImporter_Import_Adding_protein__0__, seqBuilder.Name))); } else if (seqBuilder == null) { if (line.Trim().Length == 0) continue; break; } else { seqBuilder.AppendSequence(line); } } // Add last sequence. if (seqBuilder != null) AddPeptideGroup(peptideGroupsNew, set, seqBuilder); return peptideGroupsNew; }
public void Run(ProcessStartInfo psi, string stdin, IProgressMonitor progress, ref ProgressStatus status, TextWriter writer) { if (shouldCancel) { status.Cancel(); progress.UpdateProgress(status = status.Cancel()); return; } if (!string.IsNullOrEmpty(stringToWriteToWriter)) writer.WriteLine(stringToWriteToWriter); status.ChangePercentComplete(100); progress.UpdateProgress(status); }
/// <summary> /// Called to generate cloud bitmaps /// </summary> private void GenerateBitmaps( IProgressMonitor progress ) { GameProfiles.Game.CloudGeneration.Begin( ); m_XOffset = Utils.Wrap( m_XOffset + 0.002f, 0, Constants.TwoPi ); m_ZOffset = Utils.Wrap( m_ZOffset + 0.0025f, 0, Constants.TwoPi ); // Simple cloud coverage cycle m_CloudCoverage = Utils.Wrap( m_CloudCoverage + 0.01f, 0, Constants.TwoPi ); progress.UpdateProgress( 0 ); BuildFaceBitmap( CubeMapFace.PositiveX ); progress.UpdateProgress( 1 / 6.0f ); BuildFaceBitmap( CubeMapFace.NegativeX ); progress.UpdateProgress( 2 / 6.0f ); BuildFaceBitmap( CubeMapFace.PositiveY ); progress.UpdateProgress( 3 / 6.0f ); BuildFaceBitmap( CubeMapFace.NegativeY ); progress.UpdateProgress( 4 / 6.0f ); BuildFaceBitmap( CubeMapFace.PositiveZ ); progress.UpdateProgress( 5 / 6.0f ); BuildFaceBitmap( CubeMapFace.NegativeZ ); progress.UpdateProgress( 1 ); GameProfiles.Game.CloudGeneration.End( ); GameProfiles.Game.CloudGeneration.Reset( ); }
private Bitmap GenerateNormalMap( WaveAnimationParameters parameters, float t, float maxT, IProgressMonitor progress, float curProgress, float progressPerFrame ) { int width = parameters.Width; int height = parameters.Height; ComplexF[] frequencyMap = GetFrequencyMap( parameters ); ComplexF[] invFrequencyMap = GetInvFrequencyMap( parameters ); Bitmap bmp = new Bitmap( width, height, PixelFormat.Format24bppRgb ); ComplexF[] resMap = Generate( frequencyMap, invFrequencyMap, width, height, t, maxT, true ); Fourier.FFT2( resMap, width, height, FourierDirection.Backward ); for ( int y = 0; y < height; ++y) { for ( int x = 0; x < width; ++x ) { byte nX = ( byte )( Math.Max( 0, Math.Min( 256, 128 + resMap[ x + y * width ].Re * 8 ) ) ); byte nY = ( byte )( Math.Max( 0, Math.Min( 256, 128 + resMap[ x + y * width ].Im * 8 ) ) ); bmp.SetPixel( x, y, Color.FromArgb( nX, 0, nY ) ); } progress.UpdateProgress( curProgress + progressPerFrame * ( y / ( float )( height - 1 ) ) ); } return bmp; }
/// <summary> /// Generate an isolation list containing multiplexed windows, attempting to minimize the number /// and frequency of repeated window pairings within each scan. /// </summary> /// <param name="writer">writer to write results</param> /// <param name="windowsPerScan">how many windows are contained in each scan</param> /// <param name="progressMonitor">progress monitor</param> private void WriteMultiplexedWindows(TextWriter writer, int windowsPerScan, IProgressMonitor progressMonitor) { int maxInstrumentWindows = Assume.Value(_maxInstrumentWindows); int windowCount = IsolationScheme.PrespecifiedIsolationWindows.Count; int cycleCount = maxInstrumentWindows / windowCount; double totalScore = 0.0; // Prepare to generate the best isolation list possible within the given time limit. var startTime = DateTime.Now; var cycle = new Cycle(windowCount, windowsPerScan); int cyclesGenerated = 0; ProgressStatus status = new ProgressStatus(Resources.AbstractDiaExporter_WriteMultiplexedWindows_Exporting_Isolation_List); progressMonitor.UpdateProgress(status); // Generate each cycle. for (int cycleNumber = 1; cycleNumber <= cycleCount; cycleNumber++) { // Update status. if (progressMonitor.IsCanceled) return; progressMonitor.UpdateProgress(status.ChangePercentComplete( (int) (DateTime.Now - startTime).TotalSeconds*100/CalculationTime).ChangeMessage( string.Format(Resources.AbstractDiaExporter_WriteMultiplexedWindows_Exporting_Isolation_List__0__cycles_out_of__1__, cycleNumber - 1, cycleCount))); double secondsRemaining = CalculationTime - (DateTime.Now - startTime).TotalSeconds; double secondsPerCycle = secondsRemaining / (cycleCount - cycleNumber + 1); var endTime = DateTime.Now.AddSeconds(secondsPerCycle); Cycle bestCycle = null; do { // Generate a bunch of cycles, looking for one with the lowest score. const int attemptCount = 50; for (int i = 0; i < attemptCount; i++) { cycle.Generate(cycleNumber); if (bestCycle == null || bestCycle.CycleScore > cycle.CycleScore) { bestCycle = new Cycle(cycle); if (bestCycle.CycleScore == 0.0) { cyclesGenerated += i + 1 - attemptCount; endTime = DateTime.Now; // Break outer loop. break; } } } cyclesGenerated += attemptCount; } while (DateTime.Now < endTime); // ReSharper disable PossibleNullReferenceException totalScore += bestCycle.CycleScore; WriteCycle(writer, bestCycle, cycleNumber); WriteCycleInfo(bestCycle, cycleNumber, cyclesGenerated, startTime); // ReSharper restore PossibleNullReferenceException } WriteTotalScore(totalScore); // Show 100% in the wait dialog. progressMonitor.UpdateProgress(status.ChangePercentComplete(100).ChangeMessage( string.Format(Resources.AbstractDiaExporter_WriteMultiplexedWindows_Exporting_Isolation_List__0__cycles_out_of__0__, cycleCount))); }
/// <summary> /// Train the model by iterative calculating weights to separate target and decoy transition groups. /// </summary> /// <param name="targets">Target transition groups.</param> /// <param name="decoys">Decoy transition groups.</param> /// <param name="initParameters">Initial model parameters (weights and bias)</param> /// <param name="includeSecondBest"> Include the second best peaks in the targets as decoys?</param> /// <param name="preTrain">Use a pre-trained model to bootstrap the learning.</param> /// <param name="progressMonitor"></param> /// <returns>Immutable model with new weights.</returns> public override IPeakScoringModel Train(IList <IList <float[]> > targets, IList <IList <float[]> > decoys, LinearModelParams initParameters, bool includeSecondBest = false, bool preTrain = true, IProgressMonitor progressMonitor = null) { if (initParameters == null) { initParameters = new LinearModelParams(_peakFeatureCalculators.Count); } return(ChangeProp(ImClone(this), im => { targets = targets.Where(list => list.Count > 0).ToList(); decoys = decoys.Where(list => list.Count > 0).ToList(); var targetTransitionGroups = new ScoredGroupPeaksSet(targets); var decoyTransitionGroups = new ScoredGroupPeaksSet(decoys); // Bootstrap from the pre-trained legacy model if (preTrain) { var preTrainedWeights = new double[initParameters.Weights.Count]; for (int i = 0; i < preTrainedWeights.Length; ++i) { if (double.IsNaN(initParameters.Weights[i])) { preTrainedWeights[i] = double.NaN; } } int standardEnabledCount = GetEnabledCount(LegacyScoringModel.StandardFeatureCalculators, initParameters.Weights); int analyteEnabledCount = GetEnabledCount(LegacyScoringModel.AnalyteFeatureCalculators, initParameters.Weights); bool hasStandards = standardEnabledCount >= analyteEnabledCount; var calculators = hasStandards ? LegacyScoringModel.StandardFeatureCalculators : LegacyScoringModel.AnalyteFeatureCalculators; for (int i = 0; i < calculators.Length; ++i) { if (calculators[i].GetType() == typeof(MQuestRetentionTimePredictionCalc)) { continue; } SetCalculatorValue(calculators[i].GetType(), LegacyScoringModel.DEFAULT_WEIGHTS[i], preTrainedWeights); } targetTransitionGroups.ScorePeaks(preTrainedWeights); decoyTransitionGroups.ScorePeaks(preTrainedWeights); } // Iteratively refine the weights through multiple iterations. var calcWeights = new double[initParameters.Weights.Count]; Array.Copy(initParameters.Weights.ToArray(), calcWeights, initParameters.Weights.Count); double decoyMean = 0; double decoyStdev = 0; bool colinearWarning = false; // This may take a long time between progress updates, but just measure progress by cycles through the training IProgressStatus status = new ProgressStatus(Resources.MProphetPeakScoringModel_Train_Training_peak_scoring_model); if (progressMonitor != null) { progressMonitor.UpdateProgress(status); } for (int iteration = 0; iteration < MAX_ITERATIONS; iteration++) { if (progressMonitor != null) { if (progressMonitor.IsCanceled) { throw new OperationCanceledException(); } progressMonitor.UpdateProgress(status = status.ChangeMessage(string.Format(Resources.MProphetPeakScoringModel_Train_Training_peak_scoring_model__iteration__0__of__1__, iteration + 1, MAX_ITERATIONS)) .ChangePercentComplete((iteration + 1) * 100 / (MAX_ITERATIONS + 1))); } im.CalculateWeights(iteration, targetTransitionGroups, decoyTransitionGroups, includeSecondBest, calcWeights, out decoyMean, out decoyStdev, ref colinearWarning); GC.Collect(); // Each loop generates a number of large objects. GC helps to keep private bytes under control } if (progressMonitor != null) { progressMonitor.UpdateProgress(status.ChangePercentComplete(100)); } var parameters = new LinearModelParams(calcWeights); parameters = parameters.RescaleParameters(decoyMean, decoyStdev); im.Parameters = parameters; im.ColinearWarning = colinearWarning; im.UsesSecondBest = includeSecondBest; im.UsesDecoys = decoys.Count > 0; })); }
private static void ConvertBrukerToMzml(string filePathBruker, string outputPath, IProgressMonitor monitor, ProgressStatus status) { // We use CompassXport, if it is installed, to convert a Bruker raw file to mzML. This solves two // issues: the Bruker reader can't be called on any thread other than the main thread, and there // is no 64-bit version of the reader. So we start CompassXport in its own 32-bit process, // and use it to convert the raw data to mzML in a temporary file, which we read back afterwards. var key = Registry.LocalMachine.OpenSubKey(KEY_COMPASSXPORT, false); string compassXportExe = (key != null) ? (string)key.GetValue(string.Empty) : null; if (compassXportExe == null) throw new IOException(Resources.VendorIssueHelper_ConvertBrukerToMzml_CompassXport_software_must_be_installed_to_import_Bruker_raw_data_files_); // CompassXport arguments // ReSharper disable NonLocalizedString var argv = new[] { "-a \"" + filePathBruker + "\"", // input file (directory) "-o \"" + outputPath + "\"", // output file (directory) "-mode 2", // mode 2 (mzML) "-raw 0" // export line spectra (profile data is HUGE and SLOW!) }; // ReSharper restore NonLocalizedString // Start CompassXport in its own process. var psi = new ProcessStartInfo(compassXportExe) { CreateNoWindow = true, UseShellExecute = false, // Common directory includes the directory separator WorkingDirectory = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) ?? string.Empty, Arguments = string.Join(" ", argv), // Not L10N RedirectStandardError = true, RedirectStandardOutput = true, }; var proc = new Process { StartInfo = psi }; proc.Start(); // CompassXport starts by calculating a hash of the input file. This takes a long time, and there is // no intermediate output during this time. So we set the progress bar some fraction of the way and // let it sit there and animate while we wait for the start of spectra processing. const int hashPercent = 25; // percentage of import time allocated to calculating the input file hash int spectrumCount = 0; var sbOut = new StringBuilder(); var reader = new ProcessStreamReader(proc); string line; while ((line = reader.ReadLine()) != null) { if (monitor.IsCanceled) { proc.Kill(); throw new LoadCanceledException(status.Cancel()); } sbOut.AppendLine(line); line = line.Trim(); // The main part of conversion starts with the hash calculation. if (line.StartsWith("Calculating hash")) // Not L10N { status = status.ChangeMessage(Resources.VendorIssueHelper_ConvertBrukerToMzml_Calculating_hash_of_input_file) .ChangePercentComplete(hashPercent); monitor.UpdateProgress(status); continue; } // Determine how many spectra will be converted so we can track progress. var match = Regex.Match(line, @"Converting (\d+) spectra"); // Not L10N if (match.Success) { spectrumCount = int.Parse(match.Groups[1].Value); continue; } // Update progress as each spectra batch is converted. match = Regex.Match(line, @"Spectrum \d+ - (\d+)"); // Not L10N if (match.Success) { var spectrumEnd = int.Parse(match.Groups[1].Value); var percentComplete = hashPercent + (100-hashPercent)*spectrumEnd/spectrumCount; status = status.ChangeMessage(line).ChangePercentComplete(percentComplete); monitor.UpdateProgress(status); } } while (!proc.WaitForExit(200)) { if (monitor.IsCanceled) { proc.Kill(); throw new LoadCanceledException(status.Cancel()); } } if (proc.ExitCode != 0) { throw new IOException(TextUtil.LineSeparate(string.Format(Resources.VendorIssueHelper_ConvertBrukerToMzml_Failure_attempting_to_convert__0__to_mzML_using_CompassXport_, filePathBruker), string.Empty, sbOut.ToString())); } }
/// <summary> /// Make a BiblioSpec SQLite library from a list of spectra and their intensities. /// </summary> /// <param name="librarySpec">Library spec for which the new library is created</param> /// <param name="listSpectra">List of existing spectra, by LibKey</param> /// <param name="libraryName">Name of the library to be created</param> /// <param name="progressMonitor">Progress monitor to display progress in creating library</param> /// <returns>A library of type <see cref="BiblioSpecLiteLibrary"/></returns> public BiblioSpecLiteLibrary CreateLibraryFromSpectra(BiblioSpecLiteSpec librarySpec, List<SpectrumMzInfo> listSpectra, string libraryName, IProgressMonitor progressMonitor) { const string libAuthority = BiblioSpecLiteLibrary.DEFAULT_AUTHORITY; const int majorVer = 1; const int minorVer = 0; string libId = libraryName; // Use a very specific LSID, since it really only matches this document. string libLsid = string.Format("urn:lsid:{0}:spectral_libary:bibliospec:nr:minimal:{1}:{2}:{3}.{4}", // Not L10N libAuthority, libId, Guid.NewGuid(), majorVer, minorVer); var dictLibrary = new Dictionary<LibKey, BiblioLiteSpectrumInfo>(); using (ISession session = OpenWriteSession()) using (ITransaction transaction = session.BeginTransaction()) { int progressPercent = 0; int i = 0; var status = new ProgressStatus(Resources.BlibDb_CreateLibraryFromSpectra_Creating_spectral_library_for_imported_transition_list); foreach (var spectrum in listSpectra) { ++i; var dbRefSpectrum = RefSpectrumFromPeaks(spectrum); session.Save(dbRefSpectrum); dictLibrary.Add(spectrum.Key, new BiblioLiteSpectrumInfo(spectrum.Key, dbRefSpectrum.Copies, dbRefSpectrum.NumPeaks, (int)(dbRefSpectrum.Id ?? 0), default(IndexedRetentionTimes), default(IndexedIonMobilities))); if (progressMonitor != null) { if (progressMonitor.IsCanceled) return null; int progressNew = (i * 100 / listSpectra.Count); if (progressPercent != progressNew) { progressMonitor.UpdateProgress(status = status.ChangePercentComplete(progressNew)); progressPercent = progressNew; } } } session.Flush(); session.Clear(); // Simulate ctime(d), which is what BlibBuild uses. string createTime = string.Format("{0:ddd MMM dd HH:mm:ss yyyy}", DateTime.Now); // Not L10N? different date/time format in different countries DbLibInfo libInfo = new DbLibInfo { LibLSID = libLsid, CreateTime = createTime, NumSpecs = dictLibrary.Count, MajorVersion = majorVer, MinorVersion = minorVer }; session.Save(libInfo); session.Flush(); session.Clear(); transaction.Commit(); } var libraryEntries = dictLibrary.Values.ToArray(); return new BiblioSpecLiteLibrary(librarySpec, libLsid, majorVer, minorVer, libraryEntries, FileStreamManager.Default); }
public UpdateProgressResponse UpdateProgress(IProgressStatus status) { return(_monitor.UpdateProgress(status)); }
/// <summary> /// Train the model by iterative calculating weights to separate target and decoy transition groups. /// </summary> /// <param name="targets">Target transition groups.</param> /// <param name="decoys">Decoy transition groups.</param> /// <param name="initParameters">Initial model parameters (weights and bias)</param> /// <param name="includeSecondBest"> Include the second best peaks in the targets as decoys?</param> /// <param name="preTrain">Use a pre-trained model to bootstrap the learning.</param> /// <param name="progressMonitor"></param> /// <returns>Immutable model with new weights.</returns> public override IPeakScoringModel Train(IList<IList<float[]>> targets, IList<IList<float[]>> decoys, LinearModelParams initParameters, bool includeSecondBest = false, bool preTrain = true, IProgressMonitor progressMonitor = null) { if(initParameters == null) initParameters = new LinearModelParams(_peakFeatureCalculators.Count); return ChangeProp(ImClone(this), im => { targets = targets.Where(list => list.Count > 0).ToList(); decoys = decoys.Where(list => list.Count > 0).ToList(); var targetTransitionGroups = new ScoredGroupPeaksSet(targets); var decoyTransitionGroups = new ScoredGroupPeaksSet(decoys); // Bootstrap from the pre-trained legacy model if (preTrain) { var preTrainedWeights = new double[initParameters.Weights.Count]; for (int i = 0; i < preTrainedWeights.Length; ++i) { if (double.IsNaN(initParameters.Weights[i])) { preTrainedWeights[i] = double.NaN; } } int standardEnabledCount = GetEnabledCount(LegacyScoringModel.StandardFeatureCalculators, initParameters.Weights); int analyteEnabledCount = GetEnabledCount(LegacyScoringModel.AnalyteFeatureCalculators, initParameters.Weights); bool hasStandards = standardEnabledCount >= analyteEnabledCount; var calculators = hasStandards ? LegacyScoringModel.StandardFeatureCalculators : LegacyScoringModel.AnalyteFeatureCalculators; for (int i = 0; i < calculators.Length; ++i) { if (calculators[i].GetType() == typeof (MQuestRetentionTimePredictionCalc)) continue; SetCalculatorValue(calculators[i].GetType(), LegacyScoringModel.DEFAULT_WEIGHTS[i], preTrainedWeights); } targetTransitionGroups.ScorePeaks(preTrainedWeights); decoyTransitionGroups.ScorePeaks(preTrainedWeights); } // Iteratively refine the weights through multiple iterations. var calcWeights = new double[initParameters.Weights.Count]; Array.Copy(initParameters.Weights.ToArray(), calcWeights, initParameters.Weights.Count); double decoyMean = 0; double decoyStdev = 0; bool colinearWarning = false; // This may take a long time between progress updates, but just measure progress by cycles through the training var status = new ProgressStatus(Resources.MProphetPeakScoringModel_Train_Training_peak_scoring_model); if (progressMonitor != null) progressMonitor.UpdateProgress(status); for (int iteration = 0; iteration < MAX_ITERATIONS; iteration++) { if (progressMonitor != null) { if (progressMonitor.IsCanceled) throw new OperationCanceledException(); progressMonitor.UpdateProgress(status = status.ChangeMessage(string.Format(Resources.MProphetPeakScoringModel_Train_Training_peak_scoring_model__iteration__0__of__1__, iteration + 1, MAX_ITERATIONS)) .ChangePercentComplete((iteration + 1) * 100 / (MAX_ITERATIONS + 1))); } im.CalculateWeights(iteration, targetTransitionGroups, decoyTransitionGroups, includeSecondBest, calcWeights, out decoyMean, out decoyStdev, ref colinearWarning); GC.Collect(); // Each loop generates a number of large objects. GC helps to keep private bytes under control } if (progressMonitor != null) progressMonitor.UpdateProgress(status.ChangePercentComplete(100)); var parameters = new LinearModelParams(calcWeights); parameters = parameters.RescaleParameters(decoyMean, decoyStdev); im.Parameters = parameters; im.ColinearWarning = colinearWarning; im.UsesSecondBest = includeSecondBest; im.UsesDecoys = decoys.Count > 0; }); }
// ReSharper restore NonLocalizedString /// <summary> /// Executes an export for all chromatograms in the document /// with file names matching one of the files in filesToExport /// writer = location to write the chromatogram data to /// longWaitBroker = progress bar (can be null) /// filesToExport = file names for which to write chromatograms /// cultureInfo = local culture /// chromExtractors = list of special chromatogram types to include (base peak, etc) /// chromSources = type of ions to include (precursor, product) /// </summary> public void Export(TextWriter writer, IProgressMonitor longWaitBroker, IList<string> filesToExport, CultureInfo cultureInfo, IList<ChromExtractor> chromExtractors, IList<ChromSource> chromSources) { int currentReplicates = 0; int totalReplicates = _chromatogramSets.Count; var status = new ProgressStatus(string.Empty); FormatHeader(writer, FIELD_NAMES); foreach (var chromatograms in _chromatogramSets) { if (longWaitBroker != null) { int percentComplete = currentReplicates++ * 100 / totalReplicates; if (percentComplete < 100) { longWaitBroker.UpdateProgress(status = status.ChangeMessage(string.Format(Resources.ChromatogramExporter_Export_Exporting_Chromatograms_for__0_, chromatograms.Name)).ChangePercentComplete(percentComplete)); } } foreach (var extractor in chromExtractors) { ChromatogramGroupInfo[] arrayChromSpecial; if (!_measuredResults.TryLoadAllIonsChromatogram(chromatograms, extractor, true, out arrayChromSpecial)) { // TODO: need error determination here continue; } foreach (var chromInfo in arrayChromSpecial) { string fileName = chromInfo.FilePath.GetFileName(); // Skip the files that have not been selected for export if (!filesToExport.Contains(fileName)) continue; IList<float> times = chromInfo.Times; IList<float> intensities = chromInfo.IntensityArray[0]; float tic = CalculateTic(times, intensities); string extractorName = GetExtractorName(extractor); string[] fieldArray = { fileName, TextUtil.EXCEL_NA, TextUtil.EXCEL_NA, TextUtil.EXCEL_NA, extractorName, TextUtil.EXCEL_NA, TextUtil.EXCEL_NA, System.Convert.ToString(tic, cultureInfo) }; FormatChromLine(writer, fieldArray, times, intensities, cultureInfo); } } foreach (var peptideNode in Document.Molecules) { foreach (TransitionGroupDocNode groupNode in peptideNode.Children) ExportGroupNode(peptideNode, groupNode, chromatograms, filesToExport, chromSources, writer, cultureInfo); } } }
// Throws DatabaseOpeningException public static IonMobilityDb GetIonMobilityDb(string path, IProgressMonitor loadMonitor) { var status = new ProgressStatus(string.Format(Resources.IonMobilityDb_GetIonMobilityDb_Loading_ion_mobility_library__0_, path)); if (loadMonitor != null) loadMonitor.UpdateProgress(status); try { if (String.IsNullOrEmpty(path)) throw new DatabaseOpeningException(Resources.IonMobilityDb_GetIonMobilityDb_Please_provide_a_path_to_an_existing_ion_mobility_library_); if (!File.Exists(path)) throw new DatabaseOpeningException( string.Format( Resources.IonMobilityDb_GetIonMobilityDb_The_ion_mobility_library_file__0__could_not_be_found__Perhaps_you_did_not_have_sufficient_privileges_to_create_it_, path)); string message; try { //Check for a valid SQLite file and that it has our schema //Allow only one thread at a time to read from the same path using (var sessionFactory = GetSessionFactory(path)) { lock (sessionFactory) { return new IonMobilityDb(path, sessionFactory).Load(loadMonitor, status); } } } catch (UnauthorizedAccessException) { message = string.Format(Resources.IonMobilityDb_GetIonMobilityDb_You_do_not_have_privileges_to_access_the_ion_mobility_library_file__0_, path); } catch (DirectoryNotFoundException) { message = string.Format(Resources.IonMobilityDb_GetIonMobilityDb_The_path_containing_ion_mobility_library__0__does_not_exist_, path); } catch (FileNotFoundException) { message = string.Format(Resources.IonMobilityDb_GetIonMobilityDb_The_ion_mobility_library_file__0__could_not_be_found__Perhaps_you_did_not_have_sufficient_privileges_to_create_it_, path); } catch (Exception) // SQLiteException is already something of a catch-all, just lump it with the others here { message = string.Format(Resources.IonMobilityDb_GetIonMobilityDb_The_file__0__is_not_a_valid_ion_mobility_library_file_, path); } throw new DatabaseOpeningException(message); } catch (DatabaseOpeningException x) { if (loadMonitor == null) throw; loadMonitor.UpdateProgress(status.ChangeErrorException(x)); return null; } }
public void Run(ProcessStartInfo psi, string stdin, IProgressMonitor progress, ref ProgressStatus status, TextWriter writer) { // Make sure required streams are redirected. psi.RedirectStandardOutput = true; psi.RedirectStandardError = true; var proc = Process.Start(psi); if (proc == null) throw new IOException(string.Format("Failure starting {0} command.", psi.FileName)); // Not L10N if (stdin != null) { try { proc.StandardInput.Write(stdin); } finally { proc.StandardInput.Close(); } } var reader = new ProcessStreamReader(proc); StringBuilder sbError = new StringBuilder(); int percentLast = 0; string line; while ((line = reader.ReadLine(progress)) != null) { if (writer != null && !line.StartsWith(HideLinePrefix)) writer.WriteLine(line); if (progress == null || line.ToLowerInvariant().StartsWith("error")) // Not L10N { sbError.AppendLine(line); } else // if (progress != null) { if (progress.IsCanceled) { proc.Kill(); progress.UpdateProgress(status = status.Cancel()); return; } if (line.EndsWith("%")) // Not L10N { double percent; string[] parts = line.Split(' '); string percentPart = parts[parts.Length - 1]; if (double.TryParse(percentPart.Substring(0, percentPart.Length - 1), out percent)) { percentLast = (int) percent; status = status.ChangePercentComplete(percentLast); if (percent >= 100 && status.SegmentCount > 0) status = status.NextSegment(); progress.UpdateProgress(status); } } else if (MessagePrefix == null || line.StartsWith(MessagePrefix)) { // Remove prefix, if there is one. if (MessagePrefix != null) line = line.Substring(MessagePrefix.Length); status = status.ChangeMessage(line); progress.UpdateProgress(status); } } } proc.WaitForExit(); int exit = proc.ExitCode; if (exit != 0) { line = proc.StandardError.ReadLine(); if (line != null) sbError.AppendLine(line); if (sbError.Length == 0) throw new IOException("Error occurred running process."); // Not L10N throw new IOException(sbError.ToString()); } // Make to complete the status, if the process succeeded, but never // printed 100% to the console if (percentLast < 100) { status = status.ChangePercentComplete(100); if (status.SegmentCount > 0) status = status.NextSegment(); if (progress != null) progress.UpdateProgress(status); } }
public bool BuildLibrary(IProgressMonitor progress) { _ambiguousMatches = null; IProgressStatus status = new ProgressStatus(Resources.BiblioSpecLiteBuilder_BuildLibrary_Preparing_to_build_library); progress.UpdateProgress(status); if (InputFiles.Any(f => f.EndsWith(EXT_PILOT))) { try { InputFiles = VendorIssueHelper.ConvertPilotFiles(InputFiles, progress, status); if (progress.IsCanceled) { return(false); } } catch (Exception x) { progress.UpdateProgress(status.ChangeErrorException(x)); return(false); } } string message = string.Format(Resources.BiblioSpecLiteBuilder_BuildLibrary_Building__0__library, Path.GetFileName(OutputPath)); progress.UpdateProgress(status = status.ChangeMessage(message)); string redundantLibrary = BiblioSpecLiteSpec.GetRedundantName(OutputPath); var blibBuilder = new BlibBuild(redundantLibrary, InputFiles, TargetSequences) { IncludeAmbiguousMatches = IncludeAmbiguousMatches, CutOffScore = CutOffScore, Id = Id, }; try { if (!blibBuilder.BuildLibrary(Action, progress, ref status, out _ambiguousMatches)) { return(false); } } catch (IOException x) { progress.UpdateProgress(status.ChangeErrorException(x)); return(false); } catch (Exception x) { Console.WriteLine(x.Message); progress.UpdateProgress(status.ChangeErrorException( new Exception(string.Format(Resources.BiblioSpecLiteBuilder_BuildLibrary_Failed_trying_to_build_the_redundant_library__0__, redundantLibrary)))); return(false); } var blibFilter = new BlibFilter(); status = new ProgressStatus(message); progress.UpdateProgress(status); // Write the non-redundant library to a temporary file first try { using (var saver = new FileSaver(OutputPath)) { if (!blibFilter.Filter(redundantLibrary, saver.SafeName, progress, ref status)) { return(false); } saver.Commit(); } } catch (IOException x) { progress.UpdateProgress(status.ChangeErrorException(x)); return(false); } catch { progress.UpdateProgress(status.ChangeErrorException( new Exception(string.Format(Resources.BiblioSpecLiteBuilder_BuildLibrary_Failed_trying_to_build_the_library__0__, OutputPath)))); return(false); } finally { if (!KeepRedundant) { FileEx.SafeDelete(redundantLibrary, true); } } return(true); }
// Throws DatabaseOpeningException public static IonMobilityDb GetIonMobilityDb(string path, IProgressMonitor loadMonitor) { var status = new ProgressStatus(string.Format(Resources.IonMobilityDb_GetIonMobilityDb_Loading_ion_mobility_library__0_, path)); if (loadMonitor != null) { loadMonitor.UpdateProgress(status); } try { if (String.IsNullOrEmpty(path)) { throw new DatabaseOpeningException(Resources.IonMobilityDb_GetIonMobilityDb_Please_provide_a_path_to_an_existing_ion_mobility_library_); } if (!File.Exists(path)) { throw new DatabaseOpeningException( string.Format( Resources.IonMobilityDb_GetIonMobilityDb_The_ion_mobility_library_file__0__could_not_be_found__Perhaps_you_did_not_have_sufficient_privileges_to_create_it_, path)); } string message; Exception xInner = null; try { //Check for a valid SQLite file and that it has our schema //Allow only one thread at a time to read from the same path using (var sessionFactory = GetSessionFactory(path)) { lock (sessionFactory) { return(new IonMobilityDb(path, sessionFactory).Load(loadMonitor, status)); } } } catch (UnauthorizedAccessException x) { message = string.Format(Resources.IonMobilityDb_GetIonMobilityDb_You_do_not_have_privileges_to_access_the_ion_mobility_library_file__0_, path); xInner = x; } catch (DirectoryNotFoundException x) { message = string.Format(Resources.IonMobilityDb_GetIonMobilityDb_The_path_containing_ion_mobility_library__0__does_not_exist_, path); xInner = x; } catch (FileNotFoundException x) { message = string.Format(Resources.IonMobilityDb_GetIonMobilityDb_The_ion_mobility_library_file__0__could_not_be_found__Perhaps_you_did_not_have_sufficient_privileges_to_create_it_, path); xInner = x; } catch (Exception x) // SQLiteException is already something of a catch-all, just lump it with the others here { message = string.Format(Resources.IonMobilityDb_GetIonMobilityDb_The_file__0__is_not_a_valid_ion_mobility_library_file_, path); xInner = x; } throw new DatabaseOpeningException(message, xInner); } catch (DatabaseOpeningException x) { if (loadMonitor == null) { throw; } loadMonitor.UpdateProgress(status.ChangeErrorException(x)); return(null); } }
public ProgressStatus UpdatePercentCompleteProgress(IProgressMonitor progressMonitor, long currentCount, long totalCount) { if (progressMonitor.IsCanceled) throw new OperationCanceledException(); int percentComplete = (int) (100 * currentCount / totalCount); if (percentComplete == PercentComplete) return this; var statusNew = ChangePercentComplete(percentComplete); progressMonitor.UpdateProgress(statusNew); return statusNew; }
public static SrmDocument RecalculateAlignments(SrmDocument document, IProgressMonitor progressMonitor) { var newSources = ListAvailableRetentionTimeSources(document.Settings); var newResultsSources = ListSourcesForResults(document.Settings.MeasuredResults, newSources); var allLibraryRetentionTimes = ReadAllRetentionTimes(document, newSources); var newFileAlignments = new List<FileRetentionTimeAlignments>(); var progressStatus = new ProgressStatus("Aligning retention times"); // Not L10N? Will users see this? foreach (var retentionTimeSource in newResultsSources.Values) { progressStatus = progressStatus.ChangePercentComplete(100*newFileAlignments.Count/newResultsSources.Count); progressMonitor.UpdateProgress(progressStatus); try { var fileAlignments = CalculateFileRetentionTimeAlignments(retentionTimeSource.Name, allLibraryRetentionTimes, progressMonitor); newFileAlignments.Add(fileAlignments); } catch (OperationCanceledException) { progressMonitor.UpdateProgress(progressStatus.Cancel()); return null; } } var newDocRt = new DocumentRetentionTimes(newSources.Values, newFileAlignments); var newDocument = document.ChangeSettings(document.Settings.ChangeDocumentRetentionTimes(newDocRt)); Debug.Assert(IsLoaded(newDocument)); progressMonitor.UpdateProgress(progressStatus.Complete()); return newDocument; }
public SrmDocument Import(TextReader reader, IProgressMonitor progressMonitor, long lineCount, bool isMinutes, bool removeMissing = false, bool changePeaks = true) { var status = new ProgressStatus(Resources.PeakBoundaryImporter_Import_Importing_Peak_Boundaries); double timeConversionFactor = isMinutes ? 1.0 : 60.0; int linesRead = 0; int progressPercent = 0; var docNew = (SrmDocument) Document.ChangeIgnoreChangingChildren(true); var docReference = docNew; var sequenceToNode = new Dictionary<Tuple<string, bool>, IList<IdentityPath>>(); var fileNameToFileMatch = new Dictionary<string, ChromSetFileMatch>(); var trackAdjustedResults = new HashSet<ResultsKey>(); var modMatcher = new ModificationMatcher(); // Make the dictionary of modified peptide strings to doc nodes and paths for (int i = 0; i < Document.MoleculeCount; ++i) { IdentityPath peptidePath = Document.GetPathTo((int) SrmDocument.Level.Molecules, i); PeptideDocNode peptideNode = (PeptideDocNode) Document.FindNode(peptidePath); var peptidePair = new Tuple<string, bool>(peptideNode.RawTextId, peptideNode.IsDecoy); IList<IdentityPath> idPathList; // Each (sequence, isDecoy) pair can be associated with more than one peptide, // to handle the case of duplicate peptides in the doucment. if (sequenceToNode.TryGetValue(peptidePair, out idPathList)) { idPathList.Add(peptidePath); sequenceToNode[peptidePair] = idPathList; } else { idPathList = new List<IdentityPath> { peptidePath }; sequenceToNode.Add(peptidePair, idPathList); } } // Add annotations as possible columns var allFieldNames = new List<string[]>(FIELD_NAMES); allFieldNames.AddRange(from def in Document.Settings.DataSettings.AnnotationDefs where def.AnnotationTargets.Contains(AnnotationDef.AnnotationTarget.precursor_result) select new[] { def.Name }); string line = reader.ReadLine(); linesRead++; int[] fieldIndices; int fieldsTotal; // If we aren't changing peaks, allow start and end time to be missing var requiredFields = changePeaks ? REQUIRED_FIELDS : REQUIRED_NO_CHROM; char correctSeparator = ReadFirstLine(line, allFieldNames, requiredFields, out fieldIndices, out fieldsTotal); while ((line = reader.ReadLine()) != null) { linesRead++; if (progressMonitor != null) { if (progressMonitor.IsCanceled) return Document; int progressNew = (int) (linesRead*100/lineCount); if (progressPercent != progressNew) { progressMonitor.UpdateProgress(status = status.ChangePercentComplete(progressNew)); progressPercent = progressNew; } } var dataFields = new DataFields(fieldIndices, line.ParseDsvFields(correctSeparator), allFieldNames); if (dataFields.Length != fieldsTotal) { throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Line__0__field_count__1__differs_from_the_first_line__which_has__2_, linesRead, dataFields.Length, fieldsTotal)); } string modifiedPeptideString = dataFields.GetField(Field.modified_peptide); modMatcher.CreateMatches(Document.Settings, new List<string> {modifiedPeptideString}, Settings.Default.StaticModList, Settings.Default.HeavyModList); // Convert the modified peptide string into a standardized form that // converts unimod, names, etc, into masses, eg [+57.0] var nodeForModPep = modMatcher.GetModifiedNode(modifiedPeptideString); if (nodeForModPep == null) { throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Peptide_has_unrecognized_modifications__0__at_line__1_, modifiedPeptideString, linesRead)); } nodeForModPep = nodeForModPep.ChangeSettings(Document.Settings, SrmSettingsDiff.ALL); modifiedPeptideString = nodeForModPep.RawTextId; // Modified sequence, or custom ion name string fileName = dataFields.GetField(Field.filename); bool isDecoy = dataFields.IsDecoy(linesRead); var peptideIdentifier = new Tuple<string, bool>(modifiedPeptideString, isDecoy); int charge; bool chargeSpecified = dataFields.TryGetCharge(linesRead, out charge); string sampleName = dataFields.GetField(Field.sample_name); double? startTime = null; double? endTime = null; if (changePeaks) { startTime = dataFields.GetTime(Field.start_time, Resources.PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_start_time_, linesRead); if (startTime.HasValue) startTime = startTime / timeConversionFactor; endTime = dataFields.GetTime(Field.end_time, Resources.PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_end_time_, linesRead); if (endTime.HasValue) endTime = endTime / timeConversionFactor; } // Error if only one of startTime and endTime is null if (startTime == null && endTime != null) throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Missing_start_time_on_line__0_, linesRead)); if (startTime != null && endTime == null) throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Missing_end_time_on_line__0_, linesRead)); // Add filename to second dictionary if not yet encountered ChromSetFileMatch fileMatch; if (!fileNameToFileMatch.TryGetValue(fileName, out fileMatch)) { fileMatch = Document.Settings.MeasuredResults.FindMatchingMSDataFile(MsDataFileUri.Parse(fileName)); fileNameToFileMatch.Add(fileName, fileMatch); } if (fileMatch == null) { UnrecognizedFiles.Add(fileName); continue; } var chromSet = fileMatch.Chromatograms; string nameSet = chromSet.Name; ChromFileInfoId[] fileIds; if (sampleName == null) { fileIds = chromSet.MSDataFileInfos.Select(x => x.FileId).ToArray(); } else { var sampleFile = chromSet.MSDataFileInfos.FirstOrDefault(info => Equals(sampleName, info.FilePath.GetSampleName())); if (sampleFile == null) { throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Sample__0__on_line__1__does_not_match_the_file__2__, sampleName, linesRead, fileName)); } fileIds = new[] {sampleFile.FileId}; } // Look up the IdentityPath of peptide in first dictionary IList<IdentityPath> pepPaths; if (!sequenceToNode.TryGetValue(peptideIdentifier, out pepPaths)) { UnrecognizedPeptides.Add(modifiedPeptideString); continue; } // Define the annotations to be added var annotations = dataFields.GetAnnotations(); AnnotationsAdded = annotations.Keys.ToList(); // Loop over all the transition groups in that peptide to find matching charge, // or use all transition groups if charge not specified bool foundSample = false; foreach (var pepPath in pepPaths) { var nodePep = (PeptideDocNode)docNew.FindNode(pepPath); for(int i = 0; i < nodePep.Children.Count; ++i) { var groupRelPath = nodePep.GetPathTo(i); var groupNode = (TransitionGroupDocNode) nodePep.FindNode(groupRelPath); if (!chargeSpecified || charge == groupNode.TransitionGroup.PrecursorCharge) { var groupFileIndices = new HashSet<int>(groupNode.ChromInfos.Select(x => x.FileId.GlobalIndex)); // Loop over the files in this groupNode to find the correct sample // Change peak boundaries for the transition group foreach (var fileId in fileIds) { if (groupFileIndices.Contains(fileId.GlobalIndex)) { var groupPath = new IdentityPath(pepPath, groupNode.Id); // Attach annotations docNew = docNew.AddPrecursorResultsAnnotations(groupPath, fileId, annotations); // Change peak var filePath = chromSet.GetFileInfo(fileId).FilePath; if (changePeaks) { docNew = docNew.ChangePeak(groupPath, nameSet, filePath, null, startTime, endTime, UserSet.IMPORTED, null, false); } // For removing peaks that are not in the file, if removeMissing = true trackAdjustedResults.Add(new ResultsKey(fileId.GlobalIndex, groupNode.Id)); foundSample = true; } } } } } if (!foundSample) { UnrecognizedChargeStates.Add(new UnrecognizedChargeState(charge, fileName, modifiedPeptideString)); } } // Remove peaks from the document that weren't in the file. if (removeMissing) docNew = RemoveMissing(docNew, trackAdjustedResults, changePeaks); // If nothing has changed, return the old Document before ChangeIgnoreChangingChildren was turned off if (!ReferenceEquals(docNew, docReference)) Document = (SrmDocument) Document.ChangeIgnoreChangingChildren(false).ChangeChildrenChecked(docNew.Children); return Document; }
public SrmDocument Import(TextReader reader, IProgressMonitor progressMonitor, long lineCount, bool isMinutes, bool removeMissing = false, bool changePeaks = true) { IProgressStatus status = new ProgressStatus(Resources.PeakBoundaryImporter_Import_Importing_Peak_Boundaries); double timeConversionFactor = isMinutes ? 1.0 : 60.0; int linesRead = 0; int progressPercent = 0; var docNew = (SrmDocument)Document.ChangeIgnoreChangingChildren(true); var docReference = docNew; var sequenceToNode = MakeSequenceDictionary(Document); var fileNameToFileMatch = new Dictionary <string, ChromSetFileMatch>(); var trackAdjustedResults = new HashSet <ResultsKey>(); var modMatcher = new ModificationMatcher(); var canonicalSequenceDict = new Dictionary <string, string>(); // Add annotations as possible columns var allFieldNames = new List <string[]>(FIELD_NAMES); allFieldNames.AddRange(from def in Document.Settings.DataSettings.AnnotationDefs where def.AnnotationTargets.Contains(AnnotationDef.AnnotationTarget.precursor_result) select new[] { def.Name }); string line = reader.ReadLine(); linesRead++; int[] fieldIndices; int fieldsTotal; // If we aren't changing peaks, allow start and end time to be missing var requiredFields = changePeaks ? REQUIRED_FIELDS : REQUIRED_NO_CHROM; char correctSeparator = ReadFirstLine(line, allFieldNames, requiredFields, out fieldIndices, out fieldsTotal); while ((line = reader.ReadLine()) != null) { linesRead++; if (progressMonitor != null) { if (progressMonitor.IsCanceled) { return(Document); } int progressNew = (int)(linesRead * 100 / lineCount); if (progressPercent != progressNew) { progressMonitor.UpdateProgress(status = status.ChangePercentComplete(progressNew)); progressPercent = progressNew; } } var dataFields = new DataFields(fieldIndices, line.ParseDsvFields(correctSeparator), allFieldNames); if (dataFields.Length != fieldsTotal) { throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Line__0__field_count__1__differs_from_the_first_line__which_has__2_, linesRead, dataFields.Length, fieldsTotal)); } string modifiedPeptideString = dataFields.GetField(Field.modified_peptide); string fileName = dataFields.GetField(Field.filename); bool isDecoy = dataFields.IsDecoy(linesRead); IList <IdentityPath> pepPaths; if (!sequenceToNode.TryGetValue(Tuple.Create(modifiedPeptideString, isDecoy), out pepPaths)) { string canonicalSequence; if (!canonicalSequenceDict.TryGetValue(modifiedPeptideString, out canonicalSequence)) { if (modifiedPeptideString.Any(c => c < 'A' || c > 'Z')) { modMatcher.CreateMatches(Document.Settings, new List <string> { modifiedPeptideString }, Settings.Default.StaticModList, Settings.Default.HeavyModList); var nodeForModPep = modMatcher.GetModifiedNode(modifiedPeptideString); if (nodeForModPep == null) { throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Peptide_has_unrecognized_modifications__0__at_line__1_, modifiedPeptideString, linesRead)); } nodeForModPep = nodeForModPep.ChangeSettings(Document.Settings, SrmSettingsDiff.ALL); // Convert the modified peptide string into a standardized form that // converts unimod, names, etc, into masses, eg [+57.0] canonicalSequence = nodeForModPep.ModifiedTarget.Sequence; canonicalSequenceDict.Add(modifiedPeptideString, canonicalSequence); } } if (null != canonicalSequence) { sequenceToNode.TryGetValue(Tuple.Create(canonicalSequence, isDecoy), out pepPaths); } } if (null == pepPaths) { UnrecognizedPeptides.Add(modifiedPeptideString); continue; } Adduct charge; bool chargeSpecified = dataFields.TryGetCharge(linesRead, out charge); string sampleName = dataFields.GetField(Field.sample_name); double?apexTime = dataFields.GetTime(Field.apex_time, timeConversionFactor, Resources.PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_time_, linesRead); double?startTime = dataFields.GetTime(Field.start_time, timeConversionFactor, Resources.PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_start_time_, linesRead); double?endTime = dataFields.GetTime(Field.end_time, timeConversionFactor, Resources.PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_end_time_, linesRead); // Error if only one of startTime and endTime is null if (startTime == null && endTime != null) { if (changePeaks) { throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Missing_start_time_on_line__0_, linesRead)); } endTime = null; } if (startTime != null && endTime == null) { if (changePeaks) { throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Missing_end_time_on_line__0_, linesRead)); } startTime = null; } // Add filename to second dictionary if not yet encountered ChromSetFileMatch fileMatch; if (!fileNameToFileMatch.TryGetValue(fileName, out fileMatch)) { fileMatch = Document.Settings.MeasuredResults.FindMatchingMSDataFile(MsDataFileUri.Parse(fileName)); fileNameToFileMatch.Add(fileName, fileMatch); } if (fileMatch == null) { UnrecognizedFiles.Add(fileName); continue; } var chromSet = fileMatch.Chromatograms; string nameSet = chromSet.Name; ChromFileInfoId[] fileIds; if (sampleName == null) { fileIds = chromSet.MSDataFileInfos.Select(x => x.FileId).ToArray(); } else { var sampleFile = chromSet.MSDataFileInfos.FirstOrDefault(info => Equals(sampleName, info.FilePath.GetSampleName())); if (sampleFile == null) { throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Sample__0__on_line__1__does_not_match_the_file__2__, sampleName, linesRead, fileName)); } fileIds = new[] { sampleFile.FileId }; } // Define the annotations to be added var annotations = dataFields.GetAnnotations(); if (!changePeaks) { if (apexTime.HasValue) { annotations.Add(ComparePeakBoundaries.APEX_ANNOTATION, dataFields.GetField(Field.apex_time)); } if (startTime.HasValue && endTime.HasValue) { annotations.Add(ComparePeakBoundaries.START_TIME_ANNOTATION, dataFields.GetField(Field.start_time)); annotations.Add(ComparePeakBoundaries.END_TIME_ANNOTATION, dataFields.GetField(Field.end_time)); } } AnnotationsAdded = annotations.Keys.ToList(); // Loop over all the transition groups in that peptide to find matching charge, // or use all transition groups if charge not specified bool foundSample = false; foreach (var pepPath in pepPaths) { var nodePep = (PeptideDocNode)docNew.FindNode(pepPath); foreach (TransitionGroupDocNode groupNode in nodePep.Children) { if (chargeSpecified && charge != groupNode.TransitionGroup.PrecursorAdduct) { continue; } // Loop over the files in this groupNode to find the correct sample // Change peak boundaries for the transition group foreach (var fileId in GetApplicableFiles(fileIds, groupNode)) { var groupPath = new IdentityPath(pepPath, groupNode.Id); // Attach annotations if (annotations.Any()) { docNew = docNew.AddPrecursorResultsAnnotations(groupPath, fileId, annotations); } // Change peak var filePath = chromSet.GetFileInfo(fileId).FilePath; if (changePeaks) { docNew = docNew.ChangePeak(groupPath, nameSet, filePath, null, startTime, endTime, UserSet.IMPORTED, null, false); } // For removing peaks that are not in the file, if removeMissing = true trackAdjustedResults.Add(new ResultsKey(fileId.GlobalIndex, groupNode.Id)); foundSample = true; } } } if (!foundSample) { UnrecognizedChargeStates.Add(new UnrecognizedChargeState(charge, fileName, modifiedPeptideString)); } } // Remove peaks from the document that weren't in the file. if (removeMissing) { docNew = RemoveMissing(docNew, trackAdjustedResults, changePeaks); } // If nothing has changed, return the old Document before ChangeIgnoreChangingChildren was turned off if (!ReferenceEquals(docNew, docReference)) { Document = (SrmDocument)Document.ChangeIgnoreChangingChildren(false).ChangeChildrenChecked(docNew.Children); } return(Document); }
public void Run(ProcessStartInfo psi, string stdin, IProgressMonitor progress, ref IProgressStatus status, TextWriter writer) { // Make sure required streams are redirected. psi.RedirectStandardOutput = true; psi.RedirectStandardError = true; _messageLog.Clear(); var proc = Process.Start(psi); if (proc == null) { throw new IOException(string.Format(@"Failure starting {0} command.", psi.FileName)); } if (stdin != null) { try { proc.StandardInput.Write(stdin); } finally { proc.StandardInput.Close(); } } var reader = new ProcessStreamReader(proc); StringBuilder sbError = new StringBuilder(); int percentLast = 0; string line; while ((line = reader.ReadLine(progress)) != null) { if (writer != null && !line.StartsWith(HideLinePrefix)) { writer.WriteLine(line); } if (progress == null || line.ToLowerInvariant().StartsWith(@"error")) { sbError.AppendLine(line); } else // if (progress != null) { if (progress.IsCanceled) { proc.Kill(); progress.UpdateProgress(status = status.Cancel()); return; } if (!string.IsNullOrEmpty(MessagePrefix) && line.StartsWith(MessagePrefix)) { _messageLog.Add(line.Substring(MessagePrefix.Length)); } else if (line.EndsWith(@"%")) { double percent; string[] parts = line.Split(' '); string percentPart = parts[parts.Length - 1]; if (double.TryParse(percentPart.Substring(0, percentPart.Length - 1), out percent)) { percentLast = (int)percent; status = status.ChangePercentComplete(percentLast); if (percent >= 100 && status.SegmentCount > 0) { status = status.NextSegment(); } progress.UpdateProgress(status); } } else if (StatusPrefix == null || line.StartsWith(StatusPrefix)) { // Remove prefix, if there is one. if (StatusPrefix != null) { line = line.Substring(StatusPrefix.Length); } status = status.ChangeMessage(line); progress.UpdateProgress(status); } } } proc.WaitForExit(); int exit = proc.ExitCode; if (exit != 0) { line = proc.StandardError.ReadLine(); if (line != null) { sbError.AppendLine(line); } if (sbError.Length == 0) { sbError.AppendLine(@"Error occurred running process."); } string processPath = Path.GetDirectoryName(psi.FileName)?.Length == 0 ? Path.Combine(Environment.CurrentDirectory, psi.FileName) : psi.FileName; // ReSharper disable LocalizableElement sbError.AppendFormat("\r\nCommand-line: {0} {1}\r\nWorking directory: {2}{3}", processPath, // ReSharper restore LocalizableElement string.Join(" ", proc.StartInfo.Arguments), psi.WorkingDirectory, stdin != null ? "\r\nStandard input:\r\n" + stdin : ""); throw new IOException(sbError.ToString()); } // Make to complete the status, if the process succeeded, but never // printed 100% to the console if (percentLast < 100) { status = status.ChangePercentComplete(100); if (status.SegmentCount > 0) { status = status.NextSegment(); } if (progress != null) { progress.UpdateProgress(status); } } }
public static IEnumerable <ValidatingIonMobilityPeptide> ConvertDriftTimesToCollisionalCrossSections(IProgressMonitor monitor, IEnumerable <IIonMobilityInfoProvider> providers, int countProviders, IDictionary <int, RegressionLine> regressions) { IProgressStatus status = new ProgressStatus(Resources.CollisionalCrossSectionGridViewDriver_ProcessIonMobilityValues_Reading_ion_mobility_information); var peptideIonMobilities = new List <ValidatingIonMobilityPeptide>(); int runCount = 0; foreach (var ionMobilityInfoProvider in providers) { if ((monitor != null) && monitor.IsCanceled) { return(null); } runCount++; string message = string.Format(Resources.CollisionalCrossSectionGridViewDriver_ProcessDriftTimes_Reading_ion_mobility_data_from__0__, ionMobilityInfoProvider.Name); if (monitor != null) { monitor.UpdateProgress(status = status.ChangeMessage(message)); } foreach (var ionMobilityList in ionMobilityInfoProvider.GetIonMobilityDict()) { // If there is more than one value, just average them double totalDrift = 0; double totalHighEnergyOffset = 0; int count = 0; foreach (var ionMobilityInfo in ionMobilityList.Value) { totalHighEnergyOffset += ionMobilityInfo.HighEnergyDriftTimeOffsetMsec; if (ionMobilityInfo.IsCollisionalCrossSection) { totalDrift += ionMobilityInfo.Value; } else { // Convert from a measured drift time RegressionLine regression; if ((regressions != null) && regressions.TryGetValue(ionMobilityList.Key.Charge, out regression)) { totalDrift += regression.GetX(ionMobilityInfo.Value); // x = (y-intercept)/slope } else { throw new Exception(String.Format(Resources.CollisionalCrossSectionGridViewDriver_ProcessIonMobilityValues_Cannot_import_measured_drift_time_for_sequence__0___no_collisional_cross_section_conversion_parameters_were_provided_for_charge_state__1__, ionMobilityList.Key.Sequence, ionMobilityList.Key.Charge)); } } count++; } if (count > 0) { peptideIonMobilities.Add(new ValidatingIonMobilityPeptide(ionMobilityList.Key.Sequence, totalDrift / count, totalHighEnergyOffset / count)); } } if (monitor != null) { monitor.UpdateProgress(status = status.ChangePercentComplete(runCount * 100 / countProviders)); } } if (monitor != null) { monitor.UpdateProgress(status.Complete()); } return(peptideIonMobilities); }
public void Share(IProgressMonitor progressMonitor) { ProgressMonitor = progressMonitor; ProgressMonitor.UpdateProgress(_progressStatus = new ProgressStatus(DefaultMessage)); using (var zip = new ZipFile()) { // Make sure large files don't cause this to fail. zip.UseZip64WhenSaving = Zip64Option.AsNecessary; if (CompleteSharing) ShareComplete(zip); else ShareMinimal(zip); } }
// ReSharper restore NonLocalizedString public static List<string> ConvertPilotFiles(IList<string> inputFiles, IProgressMonitor progress, ProgressStatus status) { string groupConverterExePath = null; var inputFilesPilotConverted = new List<string>(); for (int index = 0; index < inputFiles.Count; index++) { string inputFile = inputFiles[index]; if (!inputFile.EndsWith(BiblioSpecLiteBuilder.EXT_PILOT)) { inputFilesPilotConverted.Add(inputFile); continue; } string outputFile = Path.ChangeExtension(inputFile, BiblioSpecLiteBuilder.EXT_PILOT_XML); // Avoid re-converting files that have already been converted if (File.Exists(outputFile)) { // Avoid duplication, in case the user accidentally adds both .group and .group.xml files // for the same results if (!inputFiles.Contains(outputFile)) inputFilesPilotConverted.Add(outputFile); continue; } string message = string.Format(Resources.VendorIssueHelper_ConvertPilotFiles_Converting__0__to_xml, Path.GetFileName(inputFile)); int percent = index * 100 / inputFiles.Count; progress.UpdateProgress(status = status.ChangeMessage(message).ChangePercentComplete(percent)); if (groupConverterExePath == null) { var key = Registry.LocalMachine.OpenSubKey(KEY_PROTEIN_PILOT, false); if (key != null) { string proteinPilotCommandWithArgs = (string)key.GetValue(string.Empty); var proteinPilotCommandWithArgsSplit = proteinPilotCommandWithArgs.Split(new[] { "\" \"" }, StringSplitOptions.RemoveEmptyEntries); // Remove " "%1" // Not L10N string path = Path.GetDirectoryName(proteinPilotCommandWithArgsSplit[0].Trim(new[] { '\\', '\"' })); // Remove preceding " if (path != null) { var groupFileExtractorPath = Path.Combine(path, EXE_GROUP_FILE_EXTRACTOR); if (File.Exists(groupFileExtractorPath)) { groupConverterExePath = groupFileExtractorPath; } else { var group2XmlPath = Path.Combine(path, EXE_GROUP2_XML); if (File.Exists(group2XmlPath)) { groupConverterExePath = group2XmlPath; } else { string errorMessage = string.Format(Resources.VendorIssueHelper_ConvertPilotFiles_Unable_to_find__0__or__1__in_directory__2____Please_reinstall_ProteinPilot_software_to_be_able_to_handle__group_files_, EXE_GROUP_FILE_EXTRACTOR, EXE_GROUP2_XML, path); throw new IOException(errorMessage); } } } } if (groupConverterExePath == null) { throw new IOException(Resources.VendorIssueHelper_ConvertPilotFiles_ProteinPilot_software__trial_or_full_version__must_be_installed_to_convert___group__files_to_compatible___group_xml__files_); } } // run group2xml // ReSharper disable NonLocalizedString var argv = new[] { "XML", "\"" + inputFile + "\"", "\"" + outputFile + "\"" }; // ReSharper restore NonLocalizedString var psi = new ProcessStartInfo(groupConverterExePath) { CreateNoWindow = true, UseShellExecute = false, // Common directory includes the directory separator WorkingDirectory = Path.GetDirectoryName(groupConverterExePath) ?? string.Empty, Arguments = string.Join(" ", argv.ToArray()), // Not L10N RedirectStandardError = true, RedirectStandardOutput = true, }; var sbOut = new StringBuilder(); var proc = new Process {StartInfo = psi}; proc.Start(); var reader = new ProcessStreamReader(proc); string line; while ((line = reader.ReadLine()) != null) { if (progress.IsCanceled) { proc.Kill(); throw new LoadCanceledException(status.Cancel()); } sbOut.AppendLine(line); } while (!proc.WaitForExit(200)) { if (progress.IsCanceled) { proc.Kill(); return inputFilesPilotConverted; } } if (proc.ExitCode != 0) { throw new IOException(TextUtil.LineSeparate(string.Format(Resources.VendorIssueHelper_ConvertPilotFiles_Failure_attempting_to_convert_file__0__to__group_xml_, inputFile), string.Empty, sbOut.ToString())); } inputFilesPilotConverted.Add(outputFile); } progress.UpdateProgress(status.ChangePercentComplete(100)); return inputFilesPilotConverted; }
//Throws DatabaseOpeningException public static OptimizationDb GetOptimizationDb(string path, IProgressMonitor loadMonitor, SrmDocument document) { var status = new ProgressStatus(string.Format(Resources.OptimizationDb_GetOptimizationDb_Loading_optimization_library__0_, path)); if (loadMonitor != null) loadMonitor.UpdateProgress(status); try { if (path == null) throw new OptimizationsOpeningException(Resources.OptimizationDb_GetOptimizationDb_Library_path_cannot_be_null_); if (!File.Exists(path)) throw new OptimizationsOpeningException(String.Format(Resources.OptimizationDb_GetOptimizationDb_The_file__0__does_not_exist_, path)); string message; try { //Check for a valid SQLite file and that it has our schema //Allow only one thread at a time to read from the same path using (var sessionFactory = GetSessionFactory(path)) { lock (sessionFactory) { return new OptimizationDb(path, sessionFactory).Load(loadMonitor, status); } } } catch (UnauthorizedAccessException) { message = string.Format(Resources.OptimizationDb_GetOptimizationDb_You_do_not_have_privilieges_to_access_the_file__0__, path); } catch (DirectoryNotFoundException) { message = string.Format(Resources.OptimizationDb_GetOptimizationDb_The_path_containing__0__does_not_exist_, path); } catch (FileNotFoundException) { message = string.Format(Resources.OptimizationDb_GetOptimizationDb_The_file__0__could_not_be_created__Perhaps_you_do_not_have_sufficient_privileges_, path); } catch (SQLiteException) { message = string.Format(Resources.OptimizationDb_GetOptimizationDb_The_file__0__is_not_a_valid_optimization_library_file_, path); } catch (GenericADOException) { try { return ConvertFromOldFormat(path, loadMonitor, status, document); } catch (Exception e) { message = string.Format(Resources.OptimizationDb_GetOptimizationDb_The_file__0__could_not_be_opened__conversion_from_old_format_failed____1_, path, e.Message); } } catch (Exception e) { message = string.Format(Resources.OptimizationDb_GetOptimizationDb_The_file__0__could_not_be_opened___1_, path, e.Message); } throw new OptimizationsOpeningException(message); } catch (OptimizationsOpeningException x) { if (loadMonitor == null) throw; loadMonitor.UpdateProgress(status.ChangeErrorException(x)); return null; } }
// ReSharper restore NonLocalizedString /// <summary> /// Executes an export for all chromatograms in the document /// with file names matching one of the files in filesToExport /// writer = location to write the chromatogram data to /// longWaitBroker = progress bar (can be null) /// filesToExport = file names for which to write chromatograms /// cultureInfo = local culture /// chromExtractors = list of special chromatogram types to include (base peak, etc) /// chromSources = type of ions to include (precursor, product) /// </summary> public void Export(TextWriter writer, IProgressMonitor longWaitBroker, IList <string> filesToExport, CultureInfo cultureInfo, IList <ChromExtractor> chromExtractors, IList <ChromSource> chromSources) { int currentReplicates = 0; int totalReplicates = _chromatogramSets.Count; IProgressStatus status = new ProgressStatus(string.Empty); FormatHeader(writer, FIELD_NAMES); foreach (var chromatograms in _chromatogramSets) { if (longWaitBroker != null) { int percentComplete = currentReplicates++ *100 / totalReplicates; if (percentComplete < 100) { longWaitBroker.UpdateProgress(status = status.ChangeMessage(string.Format(Resources.ChromatogramExporter_Export_Exporting_Chromatograms_for__0_, chromatograms.Name)).ChangePercentComplete(percentComplete)); } } foreach (var extractor in chromExtractors) { ChromatogramGroupInfo[] arrayChromSpecial; if (!_measuredResults.TryLoadAllIonsChromatogram(chromatograms, extractor, true, out arrayChromSpecial)) { // TODO: need error determination here continue; } foreach (var chromInfo in arrayChromSpecial) { string fileName = chromInfo.FilePath.GetFileName(); // Skip the files that have not been selected for export if (!filesToExport.Contains(fileName)) { continue; } var firstChromatogram = chromInfo.TransitionPointSets.First(); IList <float> times = firstChromatogram.Times; IList <float> intensities = firstChromatogram.Intensities; float tic = CalculateTic(times, intensities); string extractorName = GetExtractorName(extractor); string[] fieldArray = { fileName, TextUtil.EXCEL_NA, TextUtil.EXCEL_NA, TextUtil.EXCEL_NA, extractorName, TextUtil.EXCEL_NA, TextUtil.EXCEL_NA, System.Convert.ToString(tic, cultureInfo) }; FormatChromLine(writer, fieldArray, times, intensities, cultureInfo); } } var molecules = Document.Molecules.ToArray(); for (int iMolecule = 0; iMolecule < molecules.Length; iMolecule++) { if (longWaitBroker != null) { if (longWaitBroker.IsCanceled) { return; } longWaitBroker.UpdateProgress(status = status.ChangePercentComplete(iMolecule * 100 / molecules.Length)); } var peptideNode = molecules[iMolecule]; foreach (TransitionGroupDocNode groupNode in peptideNode.Children) { if (longWaitBroker != null && longWaitBroker.IsCanceled) { return; } ExportGroupNode(peptideNode, groupNode, chromatograms, filesToExport, chromSources, writer, cultureInfo); } } } }
public static OptimizationDb ConvertFromOldFormat(string path, IProgressMonitor loadMonitor, ProgressStatus status, SrmDocument document) { // Try to open assuming old format (Id, PeptideModSeq, Charge, Mz, Value, Type) var precursors = new Dictionary<string, HashSet<int>>(); // PeptideModSeq -> charges var optimizations = new List<Tuple<DbOptimization, double>>(); // DbOptimization, product m/z int maxCharge = 1; using (SQLiteConnection connection = new SQLiteConnection("Data Source = " + path)) // Not L10N using (SQLiteCommand command = new SQLiteCommand(connection)) { connection.Open(); command.CommandText = "SELECT PeptideModSeq, Charge, Mz, Value, Type FROM OptimizationLibrary"; // Not L10N using (SQLiteDataReader reader = command.ExecuteReader()) { while (reader.Read()) { var type = (OptimizationType)reader["Type"]; // Not L10N var modifiedSequence = reader["PeptideModSeq"].ToString(); // Not L10N var charge = (int)reader["Charge"]; // Not L10N var productMz = (double)reader["Mz"]; // Not L10N var value = (double)reader["Value"]; // Not L10N optimizations.Add(new Tuple<DbOptimization, double>(new DbOptimization(type, modifiedSequence, charge, string.Empty, -1, value), productMz)); if (!precursors.ContainsKey(modifiedSequence)) { precursors[modifiedSequence] = new HashSet<int>(); } precursors[modifiedSequence].Add(charge); if (charge > maxCharge) { maxCharge = charge; } } } } var peptideList = (from precursor in precursors from charge in precursor.Value select string.Format("{0}{1}", precursor.Key, Transition.GetChargeIndicator(charge)) // Not L10N ).ToList(); var newDoc = new SrmDocument(document != null ? document.Settings : SrmSettingsList.GetDefault()); newDoc = newDoc.ChangeSettings(newDoc.Settings .ChangePeptideLibraries(libs => libs.ChangePick(PeptidePick.filter)) .ChangeTransitionFilter(filter => filter.ChangeFragmentRangeFirstName("ion 1") // Not L10N .ChangeFragmentRangeLastName("last ion") // Not L10N .ChangeProductCharges(Enumerable.Range(1, maxCharge).ToList()) .ChangeIonTypes(new []{ IonType.y, IonType.b })) .ChangeTransitionLibraries(libs => libs.ChangePick(TransitionLibraryPick.none)) ); var matcher = new ModificationMatcher { FormatProvider = NumberFormatInfo.InvariantInfo }; matcher.CreateMatches(newDoc.Settings, peptideList, Settings.Default.StaticModList, Settings.Default.HeavyModList); FastaImporter importer = new FastaImporter(newDoc, matcher); string text = string.Format(">>{0}\r\n{1}", newDoc.GetPeptideGroupId(true), TextUtil.LineSeparate(peptideList)); // Not L10N PeptideGroupDocNode imported = importer.Import(new StringReader(text), null, Helpers.CountLinesInString(text)).First(); int optimizationsUpdated = 0; foreach (PeptideDocNode nodePep in imported.Children) { string sequence = newDoc.Settings.GetSourceTextId(nodePep); foreach (var nodeGroup in nodePep.TransitionGroups) { int charge = nodeGroup.PrecursorCharge; foreach (var nodeTran in nodeGroup.Transitions) { double productMz = nodeTran.Mz; foreach (var optimization in optimizations.Where(opt => string.IsNullOrEmpty(opt.Item1.FragmentIon) && opt.Item1.ProductCharge == -1 && opt.Item1.PeptideModSeq == sequence && opt.Item1.Charge == charge && Math.Abs(opt.Item2 - productMz) < 0.00001)) { optimization.Item1.FragmentIon = nodeTran.FragmentIonName; optimization.Item1.ProductCharge = nodeTran.Transition.Charge; ++optimizationsUpdated; } } } } if (optimizations.Count > optimizationsUpdated) { throw new OptimizationsOpeningException(string.Format(Resources.OptimizationDb_ConvertFromOldFormat_Failed_to_convert__0__optimizations_to_new_format_, optimizations.Count - optimizationsUpdated)); } using (var fs = new FileSaver(path)) { OptimizationDb db = CreateOptimizationDb(fs.SafeName); db.UpdateOptimizations(optimizations.Select(opt => opt.Item1).ToArray(), new DbOptimization[0]); fs.Commit(); if (loadMonitor != null) loadMonitor.UpdateProgress(status.ChangePercentComplete(100)); return GetOptimizationDb(fs.RealName, null, null); } }
public static IrtDb GetIrtDb(string path, IProgressMonitor loadMonitor, out IList <DbIrtPeptide> dbPeptides) { var status = new ProgressStatus(string.Format(Resources.IrtDb_GetIrtDb_Loading_iRT_database__0_, path)); if (loadMonitor != null) { loadMonitor.UpdateProgress(status); } try { if (path == null) { throw new DatabaseOpeningException(Resources.IrtDb_GetIrtDb_Database_path_cannot_be_null); } if (!File.Exists(path)) { throw new DatabaseOpeningException(String.Format(Resources.IrtDb_GetIrtDb_The_file__0__does_not_exist_, path)); } string message; Exception xInner = null; try { //Check for a valid SQLite file and that it has our schema //Allow only one thread at a time to read from the same path using (var sessionFactory = GetSessionFactory(path)) { lock (sessionFactory) { return(new IrtDb(path, sessionFactory).Load(loadMonitor, status, out dbPeptides)); } } } catch (UnauthorizedAccessException x) { message = string.Format(Resources.IrtDb_GetIrtDb_You_do_not_have_privileges_to_access_the_file__0_, path); xInner = x; } catch (DirectoryNotFoundException x) { message = string.Format(Resources.IrtDb_GetIrtDb_The_path_containing__0__does_not_exist, path); xInner = x; } catch (FileNotFoundException x) { message = string.Format(Resources.IrtDb_GetIrtDb_The_file__0__could_not_be_created_Perhaps_you_do_not_have_sufficient_privileges, path); xInner = x; } catch (SQLiteException x) { message = string.Format(Resources.IrtDb_GetIrtDb_The_file__0__is_not_a_valid_iRT_database_file, path); xInner = x; } catch (Exception x) { message = string.Format(Resources.IrtDb_GetIrtDb_The_file__0__could_not_be_opened, path); xInner = x; } throw new DatabaseOpeningException(message, xInner); } catch (DatabaseOpeningException x) { if (loadMonitor == null) { throw; } loadMonitor.UpdateProgress(status.ChangeErrorException(x)); dbPeptides = new DbIrtPeptide[0]; return(null); } }
/// <summary> /// Initialize isolation scheme export. /// </summary> protected bool InitExport(string fileName, IProgressMonitor progressMonitor) { if (progressMonitor.IsCanceled) return false; // First export transition lists to map in memory Export(null, progressMonitor); // If filename is null, then no more work needs to be done. if (fileName == null) { progressMonitor.UpdateProgress(new ProgressStatus(string.Empty).Complete()); return false; } return true; }