private void DoTestImportSim(bool asSmallMolecules) { if (asSmallMolecules && !RunSmallMoleculeTestVersions) { System.Console.Write(MSG_SKIPPING_SMALLMOLECULE_TEST_VERSION); return; } var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE); string docPath = testFilesDir.GetTestPath(DOCUMENT_NAME); string cachePath = ChromatogramCache.FinalPathForName(docPath, null); FileEx.SafeDelete(cachePath); SrmDocument doc = ResultsUtil.DeserializeDocument(docPath); var pepdoc = doc; if (asSmallMolecules) { var refine = new RefinementSettings(); doc = refine.ConvertToSmallMolecules(pepdoc, TestContext.ResultsDirectory); } using (var docContainer = new ResultsTestDocumentContainer(doc, docPath)) { // Import the mzML file and verify Mz range Import(docContainer, testFilesDir.GetTestPath(RESULTS_NAME), 510, 512); Import(docContainer, testFilesDir.GetTestPath(RESULTS_NAME2), 555, 557); } }
public void CreateDb(string fileName) { // If the file exists, then the user chose to overwrite, // so delete the existing file. try { FileEx.SafeDelete(fileName); } catch (IOException x) { MessageDlg.ShowException(this, x); return; } Settings.Default.ProteomeDbDirectory = Path.GetDirectoryName(fileName); try { ProteomeDb.CreateProteomeDb(fileName); } catch (Exception x) { var message = TextUtil.LineSeparate(string.Format( Resources .BuildBackgroundProteomeDlg_btnCreate_Click_An_error_occurred_attempting_to_create_the_proteome_file__0__, fileName), x.Message); MessageDlg.ShowWithException(this, message, x); } if (textName.Text.Length == 0) { textName.Text = Path.GetFileNameWithoutExtension(fileName); } textPath.Text = fileName; // This will cause RefreshStatus() }
public void CreateDatabase(string path) { //The file that was just created does not have a schema, so SQLite won't touch it. //The file must have a schema or not exist for use with SQLite, so we'll delete //it and install a schema try { FileEx.SafeDelete(path); } catch (IOException x) { MessageDlg.ShowException(this, x); return; } //Create file, initialize db try { IonMobilityDb.CreateIonMobilityDb(path); textDatabase.Text = path; } catch (DatabaseOpeningException x) { MessageDlg.ShowException(this, x); } catch (Exception x) { var message = TextUtil.LineSeparate(string.Format(Resources.EditIonMobilityLibraryDlg_CreateDatabase_The_ion_mobility_library_file__0__could_not_be_created, path), x.Message); MessageDlg.ShowWithException(this, message, x); } }
private static void DeleteFiles(TestFilesDir testFilesDir, IEnumerable <string> fileNames) { foreach (var fileName in fileNames) { FileEx.SafeDelete(testFilesDir.GetTestPath(fileName)); } }
public void WatersLockmassCmdlinePerfTest() { if (IsPerfTest && !RunPerfTests) { return; // Don't want to run this lengthy test right now } TestFilesZip = "https://skyline.gs.washington.edu/perftests/PerfTestLockmass.zip"; TestFilesPersistent = new[] { "ID19638_01_UCA195_2533_082715.raw" }; // List of files that we'd like to unzip alongside parent zipFile, and (re)use in place TestFilesDir = new TestFilesDir(TestContext, TestFilesZip, "CmdlineTest", TestFilesPersistent); var skyfile = GetTestPath("2533_FattyAcids.sky"); var rawPath = GetTestPath(TestFilesPersistent[0]); const double lockmassNegative = 554.2615; // Exercise the commandline var outPathUncorrected = TestFilesDir.GetTestPath("cmdlineTestUncorrected.sky"); var outPathCorrected = TestFilesDir.GetTestPath("cmdlineTestCorrected.sky"); RunCommand("--in=" + skyfile, "--import-file=" + rawPath, "--out=" + outPathUncorrected); FileEx.SafeDelete(Path.ChangeExtension(skyfile, ChromatogramCache.EXT)); var cmdDocUncorrected = ResultsUtil.DeserializeDocument(outPathUncorrected); RunCommand("--in=" + skyfile, "--import-file=" + rawPath, "--import-lockmass-negative=" + lockmassNegative, "--out=" + outPathCorrected); var cmdDocCorrected = ResultsUtil.DeserializeDocument(outPathCorrected); ComparePeaks(cmdDocCorrected, cmdDocUncorrected); }
private void BuildLibraryError(string inputFile, string libraryPath, params string[] messageParts) { string redundantBuildPath = TestFilesDir.GetTestPath(_libraryName + BiblioSpecLiteSpec.EXT_REDUNDANT); FileEx.SafeDelete(redundantBuildPath); string nonredundantBuildPath = TestFilesDir.GetTestPath(_libraryName + BiblioSpecLiteSpec.EXT); FileEx.SafeDelete(nonredundantBuildPath); ReportLibraryBuildFailures = false; BuildLibrary(TestFilesDir.GetTestPath("library_errors"), new[] { inputFile }, libraryPath, false, false, false, false, null); var messageDlg = WaitForOpenForm <MessageDlg>(); Assert.IsNotNull(messageDlg, "No message box shown"); AssertEx.Contains(messageDlg.Message, "ERROR"); if (messageParts.Length == 0) { AssertEx.Contains(messageDlg.Message, inputFile, "line"); } else { AssertEx.Contains(messageDlg.Message, messageParts); } OkDialog(messageDlg, messageDlg.OkDialog); CheckLibraryExistence(redundantBuildPath, false); CheckLibraryExistence(nonredundantBuildPath, false); WaitForConditionUI(() => !PeptideSettingsUI.IsBuildingLibrary); }
public BiblioSpecLiteBuilder GetLibBuilder(SrmDocument doc, string docFilePath, bool includeAmbiguousMatches) { string outputPath = BiblioSpecLiteSpec.GetLibraryFileName(docFilePath); // Check to see if the library is already there, and if it is, // "Append" instead of "Create" bool libraryExists = File.Exists(outputPath); var libraryBuildAction = LibraryBuildAction.Create; if (libraryExists) { if (doc.Settings.HasDocumentLibrary) { libraryBuildAction = LibraryBuildAction.Append; } else { // If the document does not have a document library, then delete the one that we have found // CONSIDER: it may be that user is trying to re-import, in which case this file is probably in use FileEx.SafeDelete(outputPath); FileEx.SafeDelete(Path.ChangeExtension(outputPath, BiblioSpecLiteSpec.EXT_REDUNDANT)); } } string name = Path.GetFileNameWithoutExtension(docFilePath); return(new BiblioSpecLiteBuilder(name, outputPath, SearchFilenames) { Action = libraryBuildAction, KeepRedundant = true, CutOffScore = CutoffScore, Id = Helpers.MakeId(name), IncludeAmbiguousMatches = includeAmbiguousMatches }); }
private void RemovePartialCacheFiles(string[] files) { WaitForChromatogramManagerQuiet(); foreach (var file in files) { string cacheFile = ChromatogramCache.PartPathForName(SkylineWindow.DocumentFilePath, new MsDataFilePath(file)); FileEx.SafeDelete(cacheFile, true); } }
private void btnCreate_Click(object sender, EventArgs e) { string filterProtDb = TextUtil.FileDialogFiltersAll(FILTER_PROTDB); string fileName; using (var saveFileDialog = new SaveFileDialog { Filter = filterProtDb, InitialDirectory = Settings.Default.ProteomeDbDirectory, Title = Resources.BuildBackgroundProteomeDlg_btnCreate_Click_Create_Background_Proteome, OverwritePrompt = true, }) { if (saveFileDialog.ShowDialog(this) == DialogResult.Cancel) { return; } fileName = saveFileDialog.FileName; } // If the file exists, then the user chose to overwrite, // so delete the existing file. try { FileEx.SafeDelete(fileName); } catch (IOException x) { MessageDlg.ShowException(this, x); return; } Settings.Default.ProteomeDbDirectory = Path.GetDirectoryName(fileName); textPath.Text = fileName; if (textName.Text.Length == 0) { textName.Text = Path.GetFileNameWithoutExtension(fileName); } try { ProteomeDb.CreateProteomeDb(fileName); } catch (Exception x) { var message = TextUtil.LineSeparate(string.Format(Resources.BuildBackgroundProteomeDlg_btnCreate_Click_An_error_occurred_attempting_to_create_the_proteome_file__0__, fileName), x.Message); MessageDlg.ShowWithException(this, message, x); } RefreshStatus(); }
public void SafeDeleteTest() { // Test ArgumentException. AssertEx.ThrowsException <IOException>(() => FileEx.SafeDelete(null)); AssertEx.ThrowsException <IOException>(() => FileEx.SafeDelete("")); // Not L10N AssertEx.ThrowsException <IOException>(() => FileEx.SafeDelete(" ")); // Not L10N AssertEx.ThrowsException <IOException>(() => FileEx.SafeDelete("<path with illegal chars>")); // Not L10N AssertEx.NoExceptionThrown <IOException>(() => FileEx.SafeDelete(null, true)); AssertEx.NoExceptionThrown <IOException>(() => FileEx.SafeDelete("", true)); // Not L10N AssertEx.NoExceptionThrown <IOException>(() => FileEx.SafeDelete(" ", true)); // Not L10N AssertEx.NoExceptionThrown <IOException>(() => FileEx.SafeDelete("<path with illegal chars>", true)); // Not L10N // Test DirectoryNotFoundException. AssertEx.ThrowsException <IOException>(() => FileEx.SafeDelete(@"c:\blah-blah-blah\blah.txt")); // Not L10N AssertEx.NoExceptionThrown <IOException>(() => FileEx.SafeDelete(@"c:\blah-blah-blah\blah.txt", true)); // Not L10N // Test PathTooLongException. var pathTooLong = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx".Replace("x", "xxxxxxxxxx"); // Not L10N AssertEx.ThrowsException <IOException>(() => FileEx.SafeDelete(pathTooLong)); AssertEx.NoExceptionThrown <IOException>(() => FileEx.SafeDelete(pathTooLong, true)); // Test IOException. const string busyFile = "TestBusyDelete.txt"; // Not L10N using (File.CreateText(busyFile)) { AssertEx.ThrowsException <IOException>(() => FileEx.SafeDelete(busyFile)); AssertEx.NoExceptionThrown <IOException>(() => FileEx.SafeDelete(busyFile, true)); } AssertEx.NoExceptionThrown <IOException>(() => FileEx.SafeDelete(busyFile)); // Test UnauthorizedAccessException. const string readOnlyFile = "TestReadOnlyFile.txt"; // Not L10N // ReSharper disable LocalizableElement File.WriteAllText(readOnlyFile, "Testing read only file delete.\n"); // Not L10N // ReSharper restore LocalizableElement var fileInfo = new FileInfo(readOnlyFile) { IsReadOnly = true }; AssertEx.ThrowsException <IOException>(() => FileEx.SafeDelete(readOnlyFile)); AssertEx.NoExceptionThrown <IOException>(() => FileEx.SafeDelete(readOnlyFile, true)); fileInfo.IsReadOnly = false; AssertEx.NoExceptionThrown <IOException>(() => FileEx.SafeDelete(readOnlyFile)); var directory = Environment.CurrentDirectory; AssertEx.ThrowsException <IOException>(() => FileEx.SafeDelete(directory)); AssertEx.NoExceptionThrown <IOException>(() => FileEx.SafeDelete(directory, true)); }
private void Download(SkypFile skyp, IProgressMonitor progressMonitor, FormEx parentWindow = null) { var progressStatus = new ProgressStatus(string.Format(Resources.SkypSupport_Download_Downloading__0_, skyp.SkylineDocUri)); progressMonitor.UpdateProgress(progressStatus); if (DownloadClient == null) { DownloadClient = new WebDownloadClient(progressMonitor, progressStatus); } DownloadClient.Download(skyp.SkylineDocUri, skyp.DownloadPath, skyp.Server?.Username, skyp.Server?.Password); if (progressMonitor.IsCanceled || DownloadClient.IsError) { FileEx.SafeDelete(skyp.DownloadPath, true); } if (DownloadClient.IsError) { var message = string.Format( Resources .SkypSupport_Download_There_was_an_error_downloading_the_Skyline_document_specified_in_the_skyp_file___0__, skyp.SkylineDocUri); if (DownloadClient.Error != null) { var exceptionMsg = DownloadClient.Error.Message; message = TextUtil.LineSeparate(message, exceptionMsg); if (exceptionMsg.Contains(ERROR401)) { message = TextUtil.LineSeparate(message, string.Format( Resources .SkypSupport_Download_You_may_have_to_add__0__as_a_Panorama_server_from_the_Tools___Options_menu_in_Skyline_, skyp.SkylineDocUri.Host)); } else if (exceptionMsg.Contains(ERROR403)) { message = TextUtil.LineSeparate(message, string.Format( Resources.SkypSupport_Download_You_do_not_have_permissions_to_download_this_file_from__0__, skyp.SkylineDocUri.Host)); } } throw new Exception(message, DownloadClient.Error); } }
private void DoTestAgilentCEOpt() { var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE); string docPath = testFilesDir.GetTestPath(DOCUMENT_NAME); string cachePath = ChromatogramCache.FinalPathForName(docPath, null); FileEx.SafeDelete(cachePath); SrmDocument doc = ResultsUtil.DeserializeDocument(docPath); using (var docContainer = new ResultsTestDocumentContainer(doc, docPath)) { // Import the .wiff file ExportImport(docContainer, testFilesDir.GetTestPath(RESULTS_NAME)); } }
private static void ValidateRelativeRT(RelativeRT relativeRT, SrmDocument doc, string docPath, List <ChromatogramSet> listChromatograms) { FileEx.SafeDelete(Path.ChangeExtension(docPath, ChromatogramCache.EXT)); SrmSettings settings = doc.Settings.ChangePeptideModifications(mods => mods.ChangeHeavyModifications( mods.HeavyModifications.Select(m => m.ChangeRelativeRT(relativeRT)).ToArray())); var docMods = doc.ChangeSettings(settings); var docResults = docMods.ChangeMeasuredResults(new MeasuredResults(listChromatograms)); var docContainer = new ResultsTestDocumentContainer(docMods, docPath); Assert.IsTrue(docContainer.SetDocument(docResults, docMods, true)); docContainer.AssertComplete(); docContainer.Release(); }
public void ThermoMixedPeptidesTest() { var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE); string docPath; SrmDocument docMixed = InitMixedDocument(testFilesDir, out docPath); FileEx.SafeDelete(Path.ChangeExtension(docPath, ChromatogramCache.EXT)); SrmDocument docUnmixed = InitUnmixedDocument(testFilesDir, out docPath); FileEx.SafeDelete(Path.ChangeExtension(docPath, ChromatogramCache.EXT)); string extRaw = ExtensionTestContext.ExtThermoRaw; var listChromatograms = new List <ChromatogramSet> { new ChromatogramSet("rep03", new[] { MsDataFileUri.Parse(testFilesDir.GetTestPath( "Site20_STUDY9P_PHASEII_QC_03" + extRaw)) }), new ChromatogramSet("rep05", new[] { MsDataFileUri.Parse(testFilesDir.GetTestPath( "Site20_STUDY9P_PHASEII_QC_05" + extRaw)) }) }; var docResults = docMixed.ChangeMeasuredResults(new MeasuredResults(listChromatograms)); var docContainerMixed = new ResultsTestDocumentContainer(docMixed, docPath); Assert.IsTrue(docContainerMixed.SetDocument(docResults, docMixed, true)); docContainerMixed.AssertComplete(); docMixed = docContainerMixed.Document; SrmDocument docMixedUnmixed = (SrmDocument)docMixed.ChangeChildren(new DocNode[0]); IdentityPath tempPath; docMixedUnmixed = docMixedUnmixed.AddPeptideGroups(docUnmixed.PeptideGroups, true, IdentityPath.ROOT, out tempPath, out tempPath); docResults = docUnmixed.ChangeMeasuredResults(new MeasuredResults(listChromatograms)); var docContainerUnmixed = new ResultsTestDocumentContainer(docUnmixed, docPath); Assert.IsTrue(docContainerUnmixed.SetDocument(docResults, docUnmixed, true)); docContainerUnmixed.AssertComplete(); docUnmixed = docContainerUnmixed.Document; AssertEx.DocumentCloned(docMixedUnmixed, docUnmixed); docContainerMixed.Release(); docContainerUnmixed.Release(); }
public void DoAsymmetricIsolationTest(RefinementSettings.ConvertToSmallMoleculesMode asSmallMolecules) { if (asSmallMolecules != RefinementSettings.ConvertToSmallMoleculesMode.none && !RunSmallMoleculeTestVersions) { Console.Write(MSG_SKIPPING_SMALLMOLECULE_TEST_VERSION); return; } TestSmallMolecules = false; // We test small molecules explicitly in this test LocalizationHelper.InitThread(); // TODO: All unit tests should be correctly initialized var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE); string docPath = testFilesDir.GetTestPath("TROUBLED_File.sky"); string cachePath = ChromatogramCache.FinalPathForName(docPath, null); FileEx.SafeDelete(cachePath); SrmDocument doc = ResultsUtil.DeserializeDocument(docPath); var refine = new RefinementSettings(); doc = refine.ConvertToSmallMolecules(doc, testFilesDir.FullPath, asSmallMolecules); const int expectedMoleculeCount = 1; // At first small molecules did not support multiple label types AssertEx.IsDocumentState(doc, null, 1, expectedMoleculeCount, 2, 6); using (var docContainer = new ResultsTestDocumentContainer(doc, docPath)) { // Import the first RAW file (or mzML for international) string rawPath = testFilesDir.GetTestPath("Rush_p3_96_21May16_Smeagol.mzML"); var measuredResults = new MeasuredResults(new[] { new ChromatogramSet("Single", new[] { rawPath }) }); { // Import with symmetric isolation window var docResults = docContainer.ChangeMeasuredResults(measuredResults, expectedMoleculeCount, 1, 1, 3, 3); var nodeGroup = docResults.MoleculeTransitionGroups.First(); double ratio = nodeGroup.Results[0][0].Ratio ?? 0; // The expected ratio is 1.0, but the symmetric isolation window should produce poor results if (asSmallMolecules != RefinementSettings.ConvertToSmallMoleculesMode.masses_only) // Can't use labels without a formula { Assert.AreEqual(0.008, ratio, 0.001); } } } testFilesDir.Dispose(); }
private void DoTestAgilentCEOpt() { // The special mode for exercising non-proteomic molecules just doesn't make sense with this test TestSmallMolecules = false; var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE); string docPath = testFilesDir.GetTestPath(DOCUMENT_NAME); string cachePath = ChromatogramCache.FinalPathForName(docPath, null); FileEx.SafeDelete(cachePath); SrmDocument doc = ResultsUtil.DeserializeDocument(docPath); using (var docContainer = new ResultsTestDocumentContainer(doc, docPath)) { // Import the .wiff file ExportImport(docContainer, testFilesDir.GetTestPath(RESULTS_NAME)); } }
private void TestExportTransitionList() { var filePathActual = GetTestPath("actual.csv"); FileEx.SafeDelete(filePathActual); var exportDialog = ShowDialog <ExportMethodDlg>(() => SkylineWindow.ShowExportMethodDialog(ExportFileType.List)); // Export CE optimization transition list RunUI(() => { exportDialog.InstrumentType = ExportInstrumentType.THERMO_QUANTIVA; exportDialog.ExportStrategy = ExportStrategy.Single; exportDialog.MethodType = ExportMethodType.Standard; exportDialog.OptimizeType = ExportOptimize.NONE; exportDialog.WriteCompensationVoltages = true; }); MultiButtonMsgDlg errDlg1 = null; RunDlg <MultiButtonMsgDlg>(() => exportDialog.OkDialog(filePathActual), // Expect The_settings_for_this_document_do_not_match_the_instrument_type... errDlg => { errDlg1 = errDlg; RunUI(errDlg.ClickNo); }); // Expect You_are_missing_compensation_voltages_for_the_following... var errDlg2 = FindOpenForms <MultiButtonMsgDlg>().FirstOrDefault(f => f != errDlg1); // ReSharper disable once PossibleNullReferenceException RunUI(errDlg2.ClickOk); WaitForCondition(() => File.Exists(filePathActual)); var actual = File.ReadAllLines(filePathActual); var expected = File.ReadAllLines(GetTestPath("expected.csv")); for (var i = 0; i < Math.Min(expected.Length, actual.Length); i++) { AssertEx.AreEqual(expected[i], actual[i], $@"transitions differ at line {i}"); } AssertEx.AreEqual(expected.Length, actual.Length, @"different transition count"); }
public static void CreateIrtDatabase(string path) { FileEx.SafeDelete(path); //Create file, initialize db try { IrtDb.CreateIrtDb(path); } catch (DatabaseOpeningException) { throw; } catch (Exception x) { var message = TextUtil.LineSeparate(string.Format(Resources.EditIrtCalcDlg_CreateDatabase_The_file__0__could_not_be_created, path), x.Message); throw new IOException(message, x); } }
/// <summary> /// Given a list of packages it determines which need to be installed and which are already installed. /// </summary> /// <param name="packages">Collection of package names to check for</param> /// <param name="pathToR">Path to R</param> /// <returns>Collection of packages that need to be installed</returns> public static ICollection <ToolPackage> WhichPackagesToInstall(ICollection <ToolPackage> packages, string pathToR) { List <ToolPackage> packagesToInstall = new List <ToolPackage>(); string pathToScript = WriteCheckForPackagesFile(packages); string response = RunRscript(pathToR, pathToScript); string[] lines = response.Split('\n'); foreach (var line in lines.Where(l => !string.IsNullOrEmpty(l))) { string[] split = line.Split('-'); if (split.Length > 1 && split[1].Contains(@"FALSE")) { string packageName = split[0].Trim(); var toInstall = packages.First(packageContainer => Equals(packageContainer.Name, packageName)); packagesToInstall.Add(toInstall); } } FileEx.SafeDelete(pathToScript); return(packagesToInstall); }
public void DoTestDemux(bool asSmallMolecules) { var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE); string docPathMsx = testFilesDir.GetTestPath("MsxTest.sky"); string dataPathMsx = testFilesDir.GetTestPath("MsxTest.mzML"); string cachePathMsx = ChromatogramCache.FinalPathForName(docPathMsx, null); FileEx.SafeDelete(cachePathMsx); SrmDocument docMsx = ResultsUtil.DeserializeDocument(docPathMsx); if (asSmallMolecules) { var refine = new RefinementSettings(); docMsx = refine.ConvertToSmallMolecules(docMsx); } var fullScanInitialMsx = docMsx.Settings.TransitionSettings.FullScan; Assert.IsTrue(fullScanInitialMsx.IsEnabledMsMs); TestMsx(docMsx, dataPathMsx); string docPathOverlap = testFilesDir.GetTestPath("OverlapTest.sky"); string dataPathOverlap = testFilesDir.GetTestPath("OverlapTest.mzML"); string cachePathOverlap = ChromatogramCache.FinalPathForName(docPathOverlap, null); FileEx.SafeDelete(cachePathOverlap); SrmDocument docOverlap = ResultsUtil.DeserializeDocument(docPathOverlap); if (asSmallMolecules) { var refine = new RefinementSettings(); docOverlap = refine.ConvertToSmallMolecules(docOverlap); } var fullScanInitialOverlap = docMsx.Settings.TransitionSettings.FullScan; Assert.IsTrue(fullScanInitialOverlap.IsEnabledMsMs); TestOverlap(docOverlap, dataPathOverlap); }
private void DoTestImportSim(bool asSmallMolecules) { TestSmallMolecules = false; // Don't need that magic extra node var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE); string docPath = testFilesDir.GetTestPath(DOCUMENT_NAME); string cachePath = ChromatogramCache.FinalPathForName(docPath, null); FileEx.SafeDelete(cachePath); SrmDocument doc = ResultsUtil.DeserializeDocument(docPath); var pepdoc = doc; if (asSmallMolecules) { var refine = new RefinementSettings(); doc = refine.ConvertToSmallMolecules(pepdoc); } var docContainer = new ResultsTestDocumentContainer(doc, docPath); // Import the mzML file and verify Mz range Import(docContainer, testFilesDir.GetTestPath(RESULTS_NAME), 510, 512); Import(docContainer, testFilesDir.GetTestPath(RESULTS_NAME2), 555, 557); }
public void WatersCacheTest() { // First test transition from per-replicate caching strategy to // single cache per document strategy. var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE); // Open the replicate document, and let it reload the data from mzML // showing the document can find data files by name in its own directory, // since the document paths will not match those on disk. string docPath; var doc = InitWatersDocument(testFilesDir, out docPath); var docReload = InitWatersDocument(testFilesDir, "160109_Mix1_calcurve_rep.sky", out docPath); using (var docContainer = new ResultsTestDocumentContainer(doc, docPath)) { var streamManager = docContainer.ChromatogramManager.StreamManager; Assert.IsTrue(docContainer.SetDocument(docReload, doc, true)); docContainer.AssertComplete(); docReload = docContainer.Document; // Release file handles to cache files created during load Assert.IsTrue(docContainer.SetDocument(doc, docReload)); // Delete the cache string cachePath = Path.ChangeExtension(docPath, ".skyd"); FileEx.SafeDelete(cachePath); // Then try using cached replicate files // Move per-replicate cache files into place var replicateCacheNames = new[] { "160109_Mix1_calcurve_rep_calcurve_070.skyd", "160109_Mix1_calcurve_rep_calcurve_073.skyd" }; GetCacheFiles(testFilesDir, replicateCacheNames); // Delete the files these cache DeleteFiles(testFilesDir, new[] { "160109_Mix1_calcurve_070.mzML", "160109_Mix1_calcurve_073.mzML", }); var docCached = InitWatersDocument(testFilesDir, "160109_Mix1_calcurve_rep.sky", out docPath); Assert.IsTrue(docContainer.SetDocument(docCached, doc, true)); docContainer.AssertComplete(); docCached = docContainer.Document; // The document with data from the .mzML files should be the same as // the one loaded from the .skyd files. // Unfortunately, this is to hard to maintain when cache changes are made. // AssertEx.Cloned(docCached, docReload); // The one cache should be present Assert.IsTrue(File.Exists(cachePath)); // And the replicate cache files should have been removed foreach (var cacheName in replicateCacheNames) { var path = testFilesDir.GetTestPath(cacheName); Assert.IsFalse(File.Exists(path)); } // Save the cache file time stamp // ReSharper disable once AssignNullToNotNullAttribute var cacheInfo = new FileInfo(cachePath); long cacheSize = cacheInfo.Length; // Adding files already in the document should have no impact on the cache. string extRaw = ExtensionTestContext.ExtWatersRaw; var listChromatograms = new List <ChromatogramSet>(docCached.Settings.MeasuredResults.Chromatograms) { new ChromatogramSet("extra1", new[] { MsDataFileUri.Parse(testFilesDir.GetTestPath("160109_Mix1_calcurve_075" + extRaw)) }), new ChromatogramSet("extra2", new[] { MsDataFileUri.Parse(testFilesDir.GetTestPath("160109_Mix1_calcurve_078.mzML")) }) }; // Adding a new file should cause the cache to grow. var settings = docCached.Settings.MeasuredResults.ChangeChromatograms(listChromatograms); var docGrow = docCached.ChangeMeasuredResults(settings); Assert.IsTrue(docContainer.SetDocument(docGrow, docCached, true)); docContainer.AssertComplete(); docGrow = docContainer.Document; cacheInfo = new FileInfo(cachePath); Assert.IsTrue(cacheSize < cacheInfo.Length); cacheSize = cacheInfo.Length; var writeTime = cacheInfo.LastWriteTime; listChromatograms.Add( new ChromatogramSet("double", new[] { testFilesDir.GetTestPath("160109_Mix1_calcurve_075" + extRaw), testFilesDir.GetTestPath("160109_Mix1_calcurve_078.mzML") })); settings = docGrow.Settings.MeasuredResults.ChangeChromatograms(listChromatograms); var docNoCacheChange1 = docGrow.ChangeMeasuredResults(settings); Assert.IsTrue(docContainer.SetDocument(docNoCacheChange1, docGrow, true)); docContainer.AssertComplete(); docNoCacheChange1 = docContainer.Document; Assert.AreEqual(writeTime, File.GetLastWriteTime(cachePath)); // Removing files should have no impact, until optimized listChromatograms.RemoveRange(listChromatograms.Count - 2, 2); listChromatograms.RemoveAt(1); settings = docNoCacheChange1.Settings.MeasuredResults.ChangeChromatograms(listChromatograms); var docNoCacheChange2 = docNoCacheChange1.ChangeMeasuredResults(settings); Assert.IsTrue(docContainer.SetDocument(docNoCacheChange2, docNoCacheChange1, true)); docContainer.AssertComplete(); docNoCacheChange2 = docContainer.Document; Assert.AreEqual(writeTime, File.GetLastWriteTime(cachePath)); // Optimizing should shrink the cache var results = docNoCacheChange2.Settings.MeasuredResults.OptimizeCache(docPath, streamManager); var docOptimized = docNoCacheChange2.ChangeSettings(docNoCacheChange2.Settings.ChangeMeasuredResults(results)); // This should not cause a reload Assert.IsTrue(docContainer.SetDocument(docOptimized, docNoCacheChange2, false)); cacheInfo = new FileInfo(cachePath); Assert.IsTrue(cacheSize > cacheInfo.Length); // Test file caches // First reload the files from .mzML docReload = InitWatersDocument(testFilesDir, "160109_Mix1_calcurve_file.sky", out docPath); // Change the path to use the right .skyd file docContainer.DocumentFilePath = docPath; Assert.IsTrue(docContainer.SetDocument(docReload, docOptimized, true)); docContainer.AssertComplete(); docReload = docContainer.Document; // Release file handles to cache files created during load Assert.IsTrue(docContainer.SetDocument(doc, docReload)); // Delete the cache cachePath = Path.ChangeExtension(docPath, ".skyd"); FileEx.SafeDelete(cachePath); // Then try using cached files // Move per-file cache files into place var fileCacheNames = new[] { "160109_Mix1_calcurve_075.mzML.skyd", "160109_Mix1_calcurve_078.mzML.skyd" }; GetCacheFiles(testFilesDir, fileCacheNames); // Swap the mzML files, so the test will fail, if not reading from the cache // CONSIDER: Should this really work, since they have different time stamps? string file075 = testFilesDir.GetTestPath("160109_Mix1_calcurve_075.mzML"); string file078 = testFilesDir.GetTestPath("160109_Mix1_calcurve_078.mzML"); string fileTemp = file075 + ".tmp"; File.Move(file075, fileTemp); File.Move(file078, file075); File.Move(fileTemp, file078); docCached = InitWatersDocument(testFilesDir, "160109_Mix1_calcurve_file.sky", out docPath); // Make sure cache files exactly match the names the loader will look for var listResultsFiles = new List <MsDataFileUri>(); foreach (var chromatogram in docCached.Settings.MeasuredResults.Chromatograms) { listResultsFiles.AddRange(chromatogram.MSDataFilePaths); } for (int i = 0; i < fileCacheNames.Length; i++) { string partPath = ChromatogramCache.PartPathForName(docPath, listResultsFiles[i]); File.Move(testFilesDir.GetTestPath(fileCacheNames[i]), partPath); } Assert.IsTrue(docContainer.SetDocument(docCached, doc, true)); docContainer.AssertComplete(); // docCached = docContainer.Document; // The document with data from the .mzML files should be the same as // the one loaded from the .skyd files. // Unfortunately, this is to hard to maintain when cache changes are made. // AssertEx.Cloned(docCached, docReload); // The one cache should be present Assert.IsTrue(File.Exists(Path.ChangeExtension(docPath, ".skyd"))); // And the replicate cache files should have been removed foreach (var cacheName in fileCacheNames) { Assert.IsFalse(File.Exists(testFilesDir.GetTestPath(cacheName))); } } testFilesDir.Dispose(); }
public void ThermoCancelImportTest() { var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE); string docPath; SrmDocument doc = InitThermoDocument(testFilesDir, out docPath); var docContainer = new ResultsTestDocumentContainer(doc, docPath); string resultsPath = testFilesDir.GetTestPath("Site20_STUDY9P_PHASEII_QC_03" + ExtensionTestContext.ExtThermoRaw); string dirPath = Path.GetDirectoryName(resultsPath) ?? ""; // Remove any existing temp and cache files foreach (var path in Directory.GetFiles(dirPath)) { if (IsCacheOrTempFile(path)) { FileEx.SafeDelete(path); } } string name = Path.GetFileNameWithoutExtension(resultsPath); var listChromatograms = new List <ChromatogramSet> { new ChromatogramSet(name, new[] { MsDataFileUri.Parse(resultsPath) }) }; var docResults = doc.ChangeMeasuredResults(new MeasuredResults(listChromatograms)); // Start cache load, but don't wait for completion Assert.IsTrue(docContainer.SetDocument(docResults, doc)); // Wait up to 1 second for the cache to start being written for (int i = 0; i < 100; i++) { if (Directory.GetFiles(dirPath).IndexOf(IsCacheOrTempFile) != -1) { break; } Thread.Sleep(10); } Assert.IsTrue(Directory.GetFiles(dirPath).IndexOf(IsCacheOrTempFile) != -1, "Failed to create cache file"); // Cancel by reverting to the original document Assert.IsTrue(docContainer.SetDocument(doc, docResults)); // Wait up to 5 seconds for cancel to occur for (int i = 0; i < 50; i++) { if (docContainer.LastProgress.IsCanceled) { break; } Thread.Sleep(100); } if (!docContainer.LastProgress.IsCanceled) { Assert.Fail("Attempt to cancel results load failed. {0}", docContainer.LastProgress.ErrorException != null ? docContainer.LastProgress.ErrorException.Message : string.Empty); } // Wait up to 20 seconds for the cache to be removed for (int i = 0; i < 200; i++) { if (Directory.GetFiles(dirPath).IndexOf(IsCacheOrTempFile) == -1) { break; } Thread.Sleep(100); } // Cache file has been removed Assert.IsTrue(Directory.GetFiles(dirPath).IndexOf(IsCacheOrTempFile) == -1, "Failed to remove cache file"); testFilesDir.Dispose(); }
public void WatersMultiReplicateTest() { var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE); string docPath; SrmDocument docOriginal = InitWatersDocument(testFilesDir, out docPath); SrmDocument doc = docOriginal; var listCachePaths = new List <string>(); using (var docContainer = new ResultsTestDocumentContainer(doc, docPath)) { string extRaw = ExtensionTestContext.ExtWatersRaw; string[] replicatePaths = { testFilesDir.GetTestPath("160109_Mix1_calcurve_070.mzML"), testFilesDir.GetTestPath("160109_Mix1_calcurve_073.mzML"), testFilesDir.GetTestPath("160109_Mix1_calcurve_075" + extRaw), testFilesDir.GetTestPath("160109_Mix1_calcurve_078.mzML") }; // Count peaks where higher concentration replicates show less area int outOfOrder = 0; foreach (string path in replicatePaths) { var listChromatograms = new List <ChromatogramSet>(); if (doc.Settings.HasResults) { listChromatograms.AddRange(doc.Settings.MeasuredResults.Chromatograms); } string name = Path.GetFileNameWithoutExtension(path); if (name != null) { name = name.Substring(name.Length - 12); } listChromatograms.Add(new ChromatogramSet(name, new[] { MsDataFileUri.Parse(path) })); int len = listChromatograms.Count; var docResults = doc.ChangeMeasuredResults(doc.Settings.MeasuredResults != null ? doc.Settings.MeasuredResults.ChangeChromatograms(listChromatograms) : new MeasuredResults(listChromatograms)); // Adding unloaded results should add a new null result. foreach (var nodeTran in docResults.PeptideTransitions) { Assert.IsTrue(nodeTran.HasResults); Assert.AreEqual(listChromatograms.Count, nodeTran.Results.Count); Assert.IsTrue(nodeTran.Results[len - 1].IsEmpty); } Assert.IsTrue(docContainer.SetDocument(docResults, doc, true), string.Format("Failed adding results for {0}.", path)); docContainer.AssertComplete(); docResults = docContainer.Document; Assert.IsTrue(docResults.Settings.MeasuredResults.IsLoaded); var transOld = doc.PeptideTransitions.ToArray(); var transNew = docResults.PeptideTransitions.ToArray(); Assert.AreEqual(transOld.Length, transNew.Length); int countPeaks = 0; for (int i = 0; i < transNew.Length; i++) { // Make sure new peak was added to each transition var nodeTranNew = transNew[i]; Assert.IsTrue(nodeTranNew.HasResults); Assert.AreEqual(len, nodeTranNew.Results.Count); var chromInfo = nodeTranNew.Results[len - 1][0]; Assert.IsNotNull(chromInfo); if (!chromInfo.IsEmpty) { countPeaks++; } // Make sure previously loaded peaks did not change for (int j = 0; j < len - 1; j++) { var chromInfoPrevious = transOld[i].Results[j][0]; Assert.AreSame(chromInfoPrevious, nodeTranNew.Results[j][0]); if ((chromInfo.IsEmpty && !chromInfoPrevious.IsEmpty) || (!chromInfo.IsEmpty && chromInfoPrevious.Area >= chromInfo.Area)) { outOfOrder++; } } } // Allow 2 missed peaks Assert.IsTrue(countPeaks >= transNew.Length - (TestSmallMolecules ? 1 : 0) - 2); // Check results calculations for peptides and groups foreach (var nodePep in docResults.Peptides) { Assert.AreEqual(len, nodePep.Results.Count); Assert.IsTrue(nodePep.HasResults); var chromInfo = nodePep.Results[len - 1][0]; Assert.AreEqual(1, nodePep.Children.Count); var nodeGroup = (TransitionGroupDocNode)nodePep.Children[0]; Assert.IsTrue(nodeGroup.HasResults); Assert.AreEqual(len, nodeGroup.Results.Count); var chromInfoGroup = nodeGroup.Results[len - 1][0]; Assert.IsTrue(chromInfoGroup.PeakCountRatio >= 0.5); Assert.IsTrue(chromInfoGroup.RetentionTime.HasValue); Assert.IsTrue(chromInfoGroup.Area.HasValue && chromInfoGroup.Area > 290); Assert.AreEqual(chromInfo.RetentionTime, chromInfoGroup.RetentionTime); Assert.AreEqual(chromInfo.PeakCountRatio, chromInfoGroup.PeakCountRatio); } doc = docResults; } Assert.AreEqual(13, outOfOrder, 1); // Remove the original data foreach (string path in replicatePaths) { if (File.Exists(path)) { FileEx.SafeDelete(path); } else { DirectoryEx.SafeDelete(path); } } FileEx.SafeDelete(docPath); // Save the document string xmlSaved = null; var docPersisted = AssertEx.RoundTrip(doc, ref xmlSaved); Assert.IsTrue(!docPersisted.Settings.MeasuredResults.IsLoaded); // Make sure the persisted document round-trips. // The orginal doesn't because of changing precision in the results info. AssertEx.Serializable(docPersisted, AssertEx.DocumentCloned); // Make sure the loaded document has reasonable results info // before the cache files are loaded for (int i = 0; i < doc.Children.Count; i++) { PeptideGroupDocNode nodePepGroup1 = (PeptideGroupDocNode)doc.Children[i]; if (TestSmallMolecules && nodePepGroup1.Name.Equals(SrmDocument.TestingNonProteomicMoleculeGroupName)) { continue; } PeptideGroupDocNode nodePepGroup2 = (PeptideGroupDocNode)docPersisted.Children[i]; Assert.AreNotSame(nodePepGroup1, nodePepGroup2); for (int j = 0; j < nodePepGroup1.Children.Count; j++) { PeptideDocNode nodePep1 = (PeptideDocNode)nodePepGroup1.Children[j]; PeptideDocNode nodePep2 = (PeptideDocNode)nodePepGroup2.Children[j]; Assert.AreNotSame(nodePep1, nodePep2); Assert.AreEqual(nodePep1.Results.Count, nodePep2.Results.Count); for (int k = 0; k < nodePep1.Results.Count; k++) { Assert.AreEqual(nodePep1.Results[k][0].PeakCountRatio, nodePep2.Results[k][0].PeakCountRatio); } for (int k = 0; k < nodePep1.Children.Count; k++) { TransitionGroupDocNode nodeGroup1 = (TransitionGroupDocNode)nodePep1.Children[k]; TransitionGroupDocNode nodeGroup2 = (TransitionGroupDocNode)nodePep2.Children[k]; Assert.AreNotSame(nodeGroup1, nodeGroup2); Assert.AreEqual(nodeGroup1.Results.Count, nodeGroup2.Results.Count); for (int l = 0; l < nodeGroup1.Results.Count; l++) { Assert.AreEqual(nodeGroup1.Results[l][0].PeakCountRatio, nodeGroup2.Results[l][0].PeakCountRatio); } for (int l = 0; l < nodeGroup1.Children.Count; l++) { TransitionDocNode nodeTran1 = (TransitionDocNode)nodeGroup1.Children[l]; TransitionDocNode nodeTran2 = (TransitionDocNode)nodeGroup2.Children[l]; Assert.AreNotSame(nodeTran1, nodeTran2); Assert.AreEqual(nodeTran1.Results.Count, nodeTran2.Results.Count); for (int m = 0; m < nodeTran1.Results.Count; m++) { if (!nodeTran1.Results[m].IsEmpty && !nodeTran2.Results[m].IsEmpty) { Assert.AreEqual(nodeTran1.Results[m][0].IsEmpty, nodeTran2.Results[m][0].IsEmpty); } else { Assert.AreEqual(nodeTran1.Results[m], nodeTran2.Results[m]); // both null } } } } } } // Reload data from .skyd files Assert.IsTrue(docContainer.SetDocument(docPersisted, doc, true)); docContainer.AssertComplete(); doc = docContainer.Document; var results = doc.Settings.MeasuredResults; const float tolerance = (float)TransitionInstrument.DEFAULT_MZ_MATCH_TOLERANCE; foreach (var pair in doc.PeptidePrecursorPairs) { foreach (var chromSet in results.Chromatograms) { ChromatogramGroupInfo[] chromGroupInfo; Assert.IsTrue(results.TryLoadChromatogram(chromSet, pair.NodePep, pair.NodeGroup, tolerance, true, out chromGroupInfo)); } } // The single final cache path should be open now listCachePaths.AddRange(doc.Settings.MeasuredResults.CachePaths); // Should only have one cache file at this point Assert.AreEqual(1, listCachePaths.Count); foreach (var cachePath in listCachePaths) { // Attempting to delete should throw string path = cachePath; AssertEx.ThrowsException <IOException>(() => FileEx.SafeDelete(path)); } } foreach (var cachePath in listCachePaths) { // Cache files should be closed now, and delete successfully. FileEx.SafeDelete(cachePath); } testFilesDir.Dispose(); }
public void ThermoCancelImportTest() { var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE); string resultsPath = testFilesDir.GetTestPath("Site20_STUDY9P_PHASEII_QC_03" + ExtensionTestContext.ExtThermoRaw); string dirPath = Path.GetDirectoryName(resultsPath) ?? ""; string docPath; SrmDocument doc = InitThermoDocument(testFilesDir, out docPath); // Give this two chances to succeed. It can succeed tens of thousands of times // in a row, but it still occasionally fails on nightly tests. Hopefully two // tries will make this extremely unlikely. for (int tries = 0; tries < 2; tries++) { using (var docContainer = new ResultsTestDocumentContainer(doc, docPath)) { // Remove any existing temp and cache files foreach (var path in Directory.GetFiles(dirPath)) { if (IsCacheOrTempFile(path)) { FileEx.SafeDelete(path); } } string name = Path.GetFileNameWithoutExtension(resultsPath); var listChromatograms = new List <ChromatogramSet> { new ChromatogramSet(name, new[] { MsDataFileUri.Parse(resultsPath) }) }; var docResults = doc.ChangeMeasuredResults(new MeasuredResults(listChromatograms)); // Start cache load, but don't wait for completion Assert.IsTrue(docContainer.SetDocument(docResults, doc)); // Wait up to 10 second for the cache to start being written bool cacheFound = false; for (int i = 0; i < 1000; i++) { if (Directory.GetFiles(dirPath).IndexOf(IsCacheOrTempFile) != -1) { cacheFound = true; break; } Thread.Sleep(10); } if (!cacheFound) { Assert.Fail(TextUtil.LineSeparate("Failed to create cache file. Found files:", TextUtil.LineSeparate(Directory.GetFiles(dirPath)))); } // Cancel by reverting to the original document Assert.IsTrue(docContainer.SetDocument(doc, docResults)); // Wait up to 10 seconds for cancel to occur bool cancelOccurred = false; for (int i = 0; i < 1000; i++) { if (docContainer.LastProgress != null && docContainer.LastProgress.IsCanceled) { cancelOccurred = true; break; } Thread.Sleep(10); } // Wait up to 20 seconds for the cache to be removed bool cacheRemoved = false; for (int i = 0; i < 200; i++) { if (Directory.GetFiles(dirPath).IndexOf(IsCacheOrTempFile) == -1) { cacheRemoved = true; break; } Thread.Sleep(100); } if (!cacheRemoved) { if (tries == 0 && File.Exists(Path.ChangeExtension(docPath, ChromatogramCache.EXT))) { // Allow a single failure where we end up with the final cache instead of a cancelation FileEx.SafeDelete(docPath); continue; // Try again } if (!cancelOccurred) { Assert.Fail("Attempt to cancel results load failed on try {0}. {1}", tries + 1, docContainer.LastProgress != null && docContainer.LastProgress.ErrorException != null ? docContainer.LastProgress.ErrorException.Message : string.Empty); } Assert.Fail(TextUtil.LineSeparate("Failed to remove cache file. Found files:", TextUtil.LineSeparate(Directory.GetFiles(dirPath)))); } break; // If we make it here then, successful } } // Cache file has been removed testFilesDir.Dispose(); }
public bool BuildLibrary(IProgressMonitor progress) { _ambiguousMatches = null; IProgressStatus status = new ProgressStatus(Resources.BiblioSpecLiteBuilder_BuildLibrary_Preparing_to_build_library); progress.UpdateProgress(status); if (InputFiles.Any(f => f.EndsWith(EXT_PILOT))) { try { InputFiles = VendorIssueHelper.ConvertPilotFiles(InputFiles, progress, status); if (progress.IsCanceled) { return(false); } } catch (Exception x) { progress.UpdateProgress(status.ChangeErrorException(x)); return(false); } } string message = string.Format(Resources.BiblioSpecLiteBuilder_BuildLibrary_Building__0__library, Path.GetFileName(OutputPath)); progress.UpdateProgress(status = status.ChangeMessage(message)); string redundantLibrary = BiblioSpecLiteSpec.GetRedundantName(OutputPath); var blibBuilder = new BlibBuild(redundantLibrary, InputFiles, TargetSequences) { IncludeAmbiguousMatches = IncludeAmbiguousMatches, CutOffScore = CutOffScore, Id = Id, }; try { if (!blibBuilder.BuildLibrary(Action, progress, ref status, out _ambiguousMatches)) { return(false); } } catch (IOException x) { progress.UpdateProgress(status.ChangeErrorException(x)); return(false); } catch (Exception x) { Console.WriteLine(x.Message); progress.UpdateProgress(status.ChangeErrorException( new Exception(string.Format(Resources.BiblioSpecLiteBuilder_BuildLibrary_Failed_trying_to_build_the_redundant_library__0__, redundantLibrary)))); return(false); } var blibFilter = new BlibFilter(); status = new ProgressStatus(message); progress.UpdateProgress(status); // Write the non-redundant library to a temporary file first try { using (var saver = new FileSaver(OutputPath)) { if (!blibFilter.Filter(redundantLibrary, saver.SafeName, progress, ref status)) { return(false); } saver.Commit(); } } catch (IOException x) { progress.UpdateProgress(status.ChangeErrorException(x)); return(false); } catch { progress.UpdateProgress(status.ChangeErrorException( new Exception(string.Format(Resources.BiblioSpecLiteBuilder_BuildLibrary_Failed_trying_to_build_the_library__0__, OutputPath)))); return(false); } finally { if (!KeepRedundant) { FileEx.SafeDelete(redundantLibrary, true); } } return(true); }
public bool BuildLibrary(IProgressMonitor progress) { RetentionTimeRegression regr = null; var standardSpectra = new List <SpectrumMzInfo>(); if (IrtStandard != null && !ReferenceEquals(IrtStandard, IrtStandard.EMPTY)) { // Align Prosit iRTs with iRT standard var standardPeptidesToAdd = SkylineWindow.ReadStandardPeptides(IrtStandard); if (standardPeptidesToAdd != null && standardPeptidesToAdd.Count > 0) { // Get iRTs var standardIRTMap = _rtModel.Predict(_prositClient, _document.Settings, standardPeptidesToAdd.Select(p => (PrositRetentionTimeModel.PeptideDocNodeWrapper)p.NodePep).ToArray(), CancellationToken.None); var original = standardIRTMap.ToDictionary(p => p.Key.ModifiedTarget, p => p.Value); var target = IrtStandard.Peptides.ToDictionary(p => p.ModifiedTarget, p => p.Irt); var aligned = AlignedRetentionTimes.AlignLibraryRetentionTimes(target, original, 0.0, RegressionMethodRT.linear, CancellationToken.None); regr = aligned.Regression; // Get spectra var standardMS = _intensityModel.PredictBatches(_prositClient, progress, _document.Settings, standardPeptidesToAdd.Select(p => p.WithNCE(_nce)).ToArray(), CancellationToken.None); // Merge iRT and MS2 into SpecMzInfos standardSpectra = standardMS.Spectra.Select(m => m.SpecMzInfo).ToList(); for (var i = 0; i < standardSpectra.Count; ++i) { if (standardIRTMap.TryGetValue(standardMS.Spectra[i].PeptidePrecursorNCE.NodePep, out var iRT)) { standardSpectra[i].RetentionTime = iRT; } } } } // Predict fragment intensities PrositMS2Spectra ms = _intensityModel.PredictBatches(_prositClient, progress, _document.Settings, _peptides.Zip(_precursors, (pep, prec) => new PrositIntensityModel.PeptidePrecursorNCE(pep, prec, _nce)).ToArray(), CancellationToken.None); var specMzInfo = ms.Spectra.Select(m => m.SpecMzInfo).ToList(); // Predict iRTs for peptides var distinctPeps = _peptides.Select(p => (PrositRetentionTimeModel.PeptideDocNodeWrapper)p).Distinct( new SystemLinqExtensionMethods.FuncEqualityComparer <PrositRetentionTimeModel.PeptideDocNodeWrapper>( (p1, p2) => p1.Node.ModifiedSequence == p2.Node.ModifiedSequence)).ToArray(); var iRTMap = _rtModel.PredictBatches(_prositClient, progress, _document.Settings, distinctPeps, CancellationToken.None); for (var i = 0; i < specMzInfo.Count; ++i) { if (iRTMap.TryGetValue(ms.Spectra[i].PeptidePrecursorNCE.NodePep, out var iRT)) { specMzInfo[i].RetentionTime = regr?.Conversion?.GetY(iRT) ?? iRT; } } // Build library var librarySpectra = SpectrumMzInfo.RemoveDuplicateSpectra(standardSpectra.Concat(specMzInfo).ToList()); // Delete if already exists, no merging with Prosit var libraryExists = File.Exists(LibrarySpec.FilePath); if (libraryExists) { var replace = _replaceLibrary(); if (!replace) { return(false); } FileEx.SafeDelete(LibrarySpec.FilePath); } if (!librarySpectra.Any()) { return(true); } // Build the library using (var blibDb = BlibDb.CreateBlibDb(LibrarySpec.FilePath)) { var docLibrarySpec = new BiblioSpecLiteSpec(LibrarySpec.Name, LibrarySpec.FilePath); BiblioSpecLiteLibrary docLibraryNew = null; var docLibrarySpec2 = docLibrarySpec; docLibraryNew = blibDb.CreateLibraryFromSpectra(docLibrarySpec2, librarySpectra, LibrarySpec.Name, progress); if (docLibraryNew == null) { return(false); } } return(true); }
private void DoFullScanFilterTest(RefinementSettings.ConvertToSmallMoleculesMode asSmallMolecules, out List <SrmDocument> docCheckpoints, bool centroided = false) { docCheckpoints = new List <SrmDocument>(); var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE); string docPath = testFilesDir.GetTestPath("BSA_Protea_label_free_20100323_meth3_multi.sky"); var expectedPepCount = 7; var expectedTransGroupCount = 7; var expectedTransCount = 49; var doc = InitFullScanDocument(ref docPath, 2, ref expectedPepCount, ref expectedTransGroupCount, ref expectedTransCount, asSmallMolecules); if (centroided && ExtensionTestContext.CanImportThermoRaw) { const double ppm20 = 20.0; doc = doc.ChangeSettings(doc.Settings.ChangeTransitionFullScan(fs => fs.ChangePrecursorResolution(FullScanMassAnalyzerType.centroided, ppm20, 0))); } using (var docContainer = new ResultsTestDocumentContainer(doc, docPath)) { // Import the first RAW file (or mzML for international) string rawPath = testFilesDir.GetTestPath("ah_20101011y_BSA_MS-MS_only_5-2" + ExtensionTestContext.ExtThermoRaw); var measuredResults = new MeasuredResults(new[] { new ChromatogramSet("Single", new[] { new MsDataFilePath(rawPath) }) }); SrmDocument docResults = docContainer.ChangeMeasuredResults(measuredResults, 3, 3, 21); docCheckpoints.Add(docResults); // Refilter allowing multiple precursors per spectrum SrmDocument docMulti = doc.ChangeSettings(doc.Settings.ChangeTransitionFullScan( fs => fs.ChangeAcquisitionMethod(FullScanAcquisitionMethod.DIA, new IsolationScheme("Test", 2)))); AssertEx.Serializable(docMulti, AssertEx.DocumentCloned); // Release data cache file Assume.IsTrue(docContainer.SetDocument(docMulti, docResults)); // And remove it FileEx.SafeDelete(Path.ChangeExtension(docPath, ChromatogramCache.EXT)); docCheckpoints.Add(docContainer.ChangeMeasuredResults(measuredResults, 6, 6, 38)); // Import full scan Orbi-Velos data docPath = testFilesDir.GetTestPath("BSA_Protea_label_free_20100323_meth3_long_acc_template.sky"); expectedPepCount = 3; expectedTransGroupCount = 3; expectedTransCount = 21; doc = InitFullScanDocument(ref docPath, 1, ref expectedPepCount, ref expectedTransGroupCount, ref expectedTransCount, asSmallMolecules); docCheckpoints.Add(doc); Assume.AreEqual(FullScanMassAnalyzerType.orbitrap, doc.Settings.TransitionSettings.FullScan.ProductMassAnalyzer); // Make sure saving this type of document works AssertEx.Serializable(doc, AssertEx.DocumentCloned); Assume.IsTrue(docContainer.SetDocument(doc, docContainer.Document)); rawPath = testFilesDir.GetTestPath("ah_20101029r_BSA_CID_FT_centroid_3uscan_3" + ExtensionTestContext.ExtThermoRaw); measuredResults = new MeasuredResults(new[] { new ChromatogramSet("Accurate", new[] { rawPath }) }); docCheckpoints.Add(docContainer.ChangeMeasuredResults(measuredResults, 3, 3, 21)); // Import LTQ data with MS1 and MS/MS docPath = testFilesDir.GetTestPath("BSA_Protea_label_free_20100323_meth3_test4.sky"); expectedPepCount = 3; expectedTransGroupCount = 4; expectedTransCount = 32; doc = InitFullScanDocument(ref docPath, 3, ref expectedPepCount, ref expectedTransGroupCount, ref expectedTransCount, asSmallMolecules); Assume.AreEqual(FullScanMassAnalyzerType.none, doc.Settings.TransitionSettings.FullScan.ProductMassAnalyzer); Assume.AreEqual(FullScanMassAnalyzerType.none, doc.Settings.TransitionSettings.FullScan.PrecursorMassAnalyzer); docCheckpoints.Add(doc); var docBoth = doc.ChangeSettings(doc.Settings.ChangeTransitionFullScan(fs => fs.ChangeAcquisitionMethod(FullScanAcquisitionMethod.Targeted, null) .ChangePrecursorResolution(FullScanMassAnalyzerType.qit, TransitionFullScan.DEFAULT_RES_QIT, null))); docCheckpoints.Add(docBoth); AssertEx.Serializable(docBoth, AssertEx.DocumentCloned); Assume.IsTrue(docContainer.SetDocument(docBoth, docContainer.Document)); string dataPath = testFilesDir.GetTestPath("klc_20100329v_Protea_Peptide_Curve_200fmol_uL_tech1.mzML"); var listResults = new List <ChromatogramSet> { new ChromatogramSet("MS1 and MS/MS", new[] { dataPath }), }; measuredResults = new MeasuredResults(listResults.ToArray()); docCheckpoints.Add(docContainer.ChangeMeasuredResults(measuredResults, expectedPepCount, expectedTransGroupCount, expectedTransCount - 6)); // The mzML was filtered for the m/z range 410 to 910. foreach (var nodeTran in docContainer.Document.MoleculeTransitions) { Assume.IsTrue(nodeTran.HasResults); Assume.IsNotNull(nodeTran.Results[0]); if (410 > nodeTran.Mz || nodeTran.Mz > 910) { Assume.IsTrue(nodeTran.Results[0][0].IsForcedIntegration); } else { Assume.IsFalse(nodeTran.Results[0][0].IsForcedIntegration); } } // Import LTQ data with MS1 and MS/MS using multiple files for a single replicate listResults.Add(new ChromatogramSet("Multi-file", new[] { testFilesDir.GetTestPath("both_DRV.mzML"), testFilesDir.GetTestPath("both_KVP.mzML"), })); measuredResults = new MeasuredResults(listResults.ToArray()); docCheckpoints.Add(docContainer.ChangeMeasuredResults(measuredResults, expectedPepCount - 1, expectedTransGroupCount - 1, expectedTransCount - 6)); if (asSmallMolecules == RefinementSettings.ConvertToSmallMoleculesMode.masses_only) { return; // Can't work with isotope distributions when we don't have ion formulas } int indexResults = listResults.Count - 1; var matchIdentifierDRV = "DRV"; if (asSmallMolecules != RefinementSettings.ConvertToSmallMoleculesMode.none) { matchIdentifierDRV = RefinementSettings.TestingConvertedFromProteomicPeptideNameDecorator + matchIdentifierDRV; } int index = 0; foreach (var nodeTran in docContainer.Document.MoleculeTransitions) { Assume.IsTrue(nodeTran.HasResults); Assume.AreEqual(listResults.Count, nodeTran.Results.Count); var peptide = nodeTran.Transition.Group.Peptide; if (peptide.IsCustomMolecule && index == 24) { // Conversion to small molecule loses some of the nuance of "Sequence" vs "FastaSequence", comparisons are inexact Assume.AreEqual("pep_DRVY[+80.0]IHPF", nodeTran.PrimaryCustomIonEquivalenceKey); break; } // DRV without FASTA sequence should not have data for non-precursor transitions if (!peptide.TextId.StartsWith(matchIdentifierDRV) || (!peptide.IsCustomMolecule && !peptide.Begin.HasValue)) { Assume.IsNotNull(nodeTran.Results[indexResults]); Assume.IsFalse(nodeTran.Results[indexResults][0].IsEmpty); } else if (nodeTran.Transition.IonType != IonType.precursor) { Assert.IsTrue(nodeTran.Results[indexResults].IsEmpty); } else { // Random, bogus peaks chosen in both files Assume.IsNotNull(nodeTran.Results[indexResults]); Assume.AreEqual(2, nodeTran.Results[indexResults].Count); Assume.IsFalse(nodeTran.Results[indexResults][0].IsEmpty); Assume.IsFalse(nodeTran.Results[indexResults][1].IsEmpty); } index++; } // Verify handling of bad request for vendor centroided data - out-of-range PPM docPath = testFilesDir.GetTestPath("Yeast_HI3 Peptides_test.sky"); expectedPepCount = 2; expectedTransGroupCount = 2; expectedTransCount = 2; doc = InitFullScanDocument(ref docPath, 2, ref expectedPepCount, ref expectedTransGroupCount, ref expectedTransCount, asSmallMolecules); Assume.AreEqual(FullScanMassAnalyzerType.none, doc.Settings.TransitionSettings.FullScan.ProductMassAnalyzer); Assume.AreEqual(FullScanMassAnalyzerType.none, doc.Settings.TransitionSettings.FullScan.PrecursorMassAnalyzer); var docBad = doc; AssertEx.ThrowsException <InvalidDataException>(() => docBad.ChangeSettings(docBad.Settings.ChangeTransitionFullScan(fs => fs.ChangePrecursorIsotopes(FullScanPrecursorIsotopes.Count, 1, IsotopeEnrichmentsList.DEFAULT) .ChangePrecursorResolution(FullScanMassAnalyzerType.centroided, 50 * 1000, 400))), string.Format(Resources.TransitionFullScan_ValidateRes_Mass_accuracy_must_be_between__0__and__1__for_centroided_data_, TransitionFullScan.MIN_CENTROID_PPM, TransitionFullScan.MAX_CENTROID_PPM)); // Verify relationship between PPM and resolving power const double ppm = 20.0; // Should yield same filter width as resolving power 50,000 in TOF var docNoCentroid = doc.ChangeSettings(doc.Settings.ChangeTransitionFullScan(fs => fs.ChangePrecursorIsotopes(FullScanPrecursorIsotopes.Count, 1, IsotopeEnrichmentsList.DEFAULT) .ChangePrecursorResolution(FullScanMassAnalyzerType.centroided, ppm, 0))); AssertEx.Serializable(docNoCentroid, AssertEx.DocumentCloned); Assume.IsTrue(docContainer.SetDocument(docNoCentroid, docContainer.Document)); const double mzTest = 400.0; var filterWidth = docNoCentroid.Settings.TransitionSettings.FullScan.GetPrecursorFilterWindow(mzTest); Assume.AreEqual(mzTest * 2.0 * ppm * 1E-6, filterWidth); // Verify relationship between normal and high-selectivity extraction var docTofNormal = docNoCentroid.ChangeSettings(doc.Settings.ChangeTransitionFullScan(fs => fs.ChangePrecursorResolution(FullScanMassAnalyzerType.tof, 50 * 1000, null))); AssertEx.Serializable(docTofNormal, AssertEx.DocumentCloned); var docTofSelective = docTofNormal.ChangeSettings(doc.Settings.ChangeTransitionFullScan(fs => fs.ChangePrecursorResolution(FullScanMassAnalyzerType.tof, 25 * 1000, null) .ChangeUseSelectiveExtraction(true))); AssertEx.Serializable(docTofSelective, AssertEx.DocumentCloned); var filterWidthTof = docTofNormal.Settings.TransitionSettings.FullScan.GetPrecursorFilterWindow(mzTest); var filterWidthSelective = docTofSelective.Settings.TransitionSettings.FullScan.GetPrecursorFilterWindow(mzTest); Assume.AreEqual(filterWidth, filterWidthTof); Assume.AreEqual(filterWidth, filterWidthSelective); // Verify handling of bad request for vendor centroided data - ask for centroiding in mzML const string fileName = "S_2_LVN.mzML"; var filePath = testFilesDir.GetTestPath(fileName); AssertEx.ThrowsException <AssertFailedException>(() => { listResults = new List <ChromatogramSet> { new ChromatogramSet("rep1", new[] { new MsDataFilePath(filePath) }), }; docContainer.ChangeMeasuredResults(new MeasuredResults(listResults.ToArray()), 1, 1, 1); }, string.Format(Resources.NoCentroidedDataException_NoCentroidedDataException_No_centroided_data_available_for_file___0_____Adjust_your_Full_Scan_settings_, filePath)); // Import FT data with only MS1 docPath = testFilesDir.GetTestPath("Yeast_HI3 Peptides_test.sky"); expectedPepCount = 2; expectedTransGroupCount = 2; expectedTransCount = 2; doc = InitFullScanDocument(ref docPath, 2, ref expectedPepCount, ref expectedTransGroupCount, ref expectedTransCount, asSmallMolecules); Assume.AreEqual(FullScanMassAnalyzerType.none, doc.Settings.TransitionSettings.FullScan.ProductMassAnalyzer); Assume.AreEqual(FullScanMassAnalyzerType.none, doc.Settings.TransitionSettings.FullScan.PrecursorMassAnalyzer); var docMs1 = doc.ChangeSettings(doc.Settings.ChangeTransitionFullScan(fs => fs.ChangePrecursorIsotopes(FullScanPrecursorIsotopes.Count, 1, IsotopeEnrichmentsList.DEFAULT) .ChangePrecursorResolution(FullScanMassAnalyzerType.tof, 50 * 1000, null))); Assume.AreEqual(filterWidth, docMs1.Settings.TransitionSettings.FullScan.GetPrecursorFilterWindow(mzTest)); docMs1 = doc.ChangeSettings(doc.Settings.ChangeTransitionFullScan(fs => fs.ChangePrecursorIsotopes(FullScanPrecursorIsotopes.Count, 1, IsotopeEnrichmentsList.DEFAULT) .ChangePrecursorResolution(FullScanMassAnalyzerType.ft_icr, 50 * 1000, mzTest))); AssertEx.Serializable(docMs1, AssertEx.DocumentCloned); Assume.IsTrue(docContainer.SetDocument(docMs1, docContainer.Document)); const string rep1 = "rep1"; listResults = new List <ChromatogramSet> { new ChromatogramSet(rep1, new[] { filePath }), }; measuredResults = new MeasuredResults(listResults.ToArray()); docCheckpoints.Add(docContainer.ChangeMeasuredResults(measuredResults, 1, 1, 1)); // Because of the way the mzML files were filtered, all of the LVN peaks should be present // in the first replicate, and all of the NVN peaks should be present in the other. var matchIdentifierLVN = "LVN"; if (asSmallMolecules != RefinementSettings.ConvertToSmallMoleculesMode.none) { matchIdentifierLVN = RefinementSettings.TestingConvertedFromProteomicPeptideNameDecorator + matchIdentifierLVN; } foreach (var nodeTranGroup in docContainer.Document.MoleculeTransitionGroups) { foreach (var docNode in nodeTranGroup.Children) { var nodeTran = (TransitionDocNode)docNode; Assume.IsTrue(nodeTran.HasResults); Assume.AreEqual(1, nodeTran.Results.Count); if (nodeTran.Transition.Group.Peptide.Target.ToString().StartsWith(matchIdentifierLVN)) { Assume.IsFalse(nodeTran.Results[0][0].IsEmpty); } else { Assume.IsTrue(nodeTran.Results[0][0].IsEmpty); } } } const string rep2 = "rep2"; listResults.Add(new ChromatogramSet(rep2, new[] { testFilesDir.GetTestPath("S_2_NVN.mzML") })); measuredResults = new MeasuredResults(listResults.ToArray()); docCheckpoints.Add(docContainer.ChangeMeasuredResults(measuredResults, 1, 1, 1)); // Because of the way the mzML files were filtered, all of the LVN peaks should be present // in the first replicate, and all of the NVN peaks should be present in the other. foreach (var nodeTranGroup in docContainer.Document.MoleculeTransitionGroups) { foreach (var docNode in nodeTranGroup.Children) { var nodeTran = (TransitionDocNode)docNode; Assume.IsTrue(nodeTran.HasResults); Assume.AreEqual(2, nodeTran.Results.Count); if (nodeTran.Transition.Group.Peptide.Target.ToString().StartsWith(matchIdentifierLVN)) { Assume.IsTrue(nodeTran.Results[1][0].IsEmpty); } else { Assume.IsFalse(nodeTran.Results[1][0].IsEmpty); } } } // Chromatograms should be present in the cache for a number of isotopes. var docMs1Isotopes = docContainer.Document.ChangeSettings(doc.Settings .ChangeTransitionFullScan(fs => fs.ChangePrecursorIsotopes(FullScanPrecursorIsotopes.Count, 3, IsotopeEnrichmentsList.DEFAULT)) .ChangeTransitionFilter(filter => filter.ChangePeptideIonTypes(new[] { IonType.precursor }) .ChangeSmallMoleculeIonTypes(new[] { IonType.precursor }))); docCheckpoints.Add(docMs1Isotopes); AssertEx.IsDocumentState(docMs1Isotopes, null, 2, 2, 2); // Need to reset auto-manage for transitions var refineAutoSelect = new RefinementSettings { AutoPickChildrenAll = PickLevel.transitions }; docMs1Isotopes = refineAutoSelect.Refine(docMs1Isotopes); AssertEx.IsDocumentState(docMs1Isotopes, null, 2, 2, 6); AssertResult.IsDocumentResultsState(docMs1Isotopes, rep1, 1, 1, 0, 3, 0); AssertResult.IsDocumentResultsState(docMs1Isotopes, rep2, 1, 1, 0, 3, 0); docCheckpoints.Add(docMs1Isotopes); // Add M-1 transitions, and verify that they have chromatogram data also, but // empty peaks in all cases var docMs1All = docMs1Isotopes.ChangeSettings(docMs1Isotopes.Settings .ChangeTransitionFullScan(fs => fs.ChangePrecursorIsotopes(FullScanPrecursorIsotopes.Percent, 0, IsotopeEnrichmentsList.DEFAULT)) .ChangeTransitionIntegration(i => i.ChangeIntegrateAll(false))); // For compatibility with v2.5 and earlier docCheckpoints.Add(docMs1All); AssertEx.IsDocumentState(docMs1All, null, 2, 2, 10); AssertResult.IsDocumentResultsState(docMs1All, rep1, 1, 1, 0, 4, 0); AssertResult.IsDocumentResultsState(docMs1All, rep2, 1, 1, 0, 4, 0); var ms1AllTranstions = docMs1All.MoleculeTransitions.ToArray(); var tranM1 = ms1AllTranstions[0]; Assert.AreEqual(-1, tranM1.Transition.MassIndex); Assert.IsTrue(!tranM1.Results[0].IsEmpty && !tranM1.Results[1].IsEmpty); Assert.IsTrue(tranM1.Results[0][0].IsEmpty && tranM1.Results[1][0].IsForcedIntegration); tranM1 = ms1AllTranstions[5]; Assert.AreEqual(-1, tranM1.Transition.MassIndex); Assert.IsTrue(!tranM1.Results[0].IsEmpty && !tranM1.Results[1].IsEmpty); Assert.IsTrue(tranM1.Results[0][0].IsForcedIntegration && tranM1.Results[1][0].IsEmpty); } }
public void WiffResultsTest() { TestFilesDir testFilesDir = new TestFilesDir(TestContext, ZIP_FILE); SrmDocument doc = InitWiffDocument(testFilesDir); using (var docContainer = new ResultsTestDocumentContainer(doc, testFilesDir.GetTestPath("SimpleWiffTest.sky"))) { FileEx.SafeDelete(ChromatogramCache.FinalPathForName(docContainer.DocumentFilePath, null)); var listChromatograms = new List <ChromatogramSet>(); if (ExtensionTestContext.CanImportAbWiff) { string pathWiff = testFilesDir.GetTestPath("051309_digestion.wiff"); string[] dataIds = MsDataFileImpl.ReadIds(pathWiff); for (int i = 0; i < dataIds.Length; i++) { string nameSample = dataIds[i]; if (!Equals(nameSample, "test") && listChromatograms.Count == 0) { continue; } string pathSample = SampleHelp.EncodePath(pathWiff, nameSample, i, LockMassParameters.EMPTY, false, false); listChromatograms.Add(new ChromatogramSet(nameSample, new[] { MsDataFileUri.Parse(pathSample) })); } } else { listChromatograms.Add(new ChromatogramSet("test", new[] { MsDataFileUri.Parse(testFilesDir.GetTestPath("051309_digestion-test.mzML")) })); listChromatograms.Add(new ChromatogramSet("rfp9,before,h,1", new[] { MsDataFileUri.Parse(testFilesDir.GetTestPath("051309_digestion-rfp9,before,h,1.mzML")) })); } // Should have added test and one after Assert.AreEqual(2, listChromatograms.Count); var docResults = doc.ChangeMeasuredResults(new MeasuredResults(listChromatograms)); Assert.IsTrue(docContainer.SetDocument(docResults, doc, true)); docContainer.AssertComplete(); docResults = docContainer.Document; AssertEx.IsDocumentState(docResults, 6, 9, 9, 18, 54); Assert.IsTrue(docResults.Settings.MeasuredResults.IsLoaded); foreach (var nodeTran in docResults.PeptideTransitions) { Assert.IsTrue(nodeTran.HasResults); Assert.AreEqual(2, nodeTran.Results.Count); } // Remove the last chromatogram listChromatograms.RemoveAt(1); var docResultsSingle = docResults.ChangeMeasuredResults(new MeasuredResults(listChromatograms)); AssertResult.IsDocumentResultsState(docResultsSingle, "test", 9, 2, 9, 8, 27); // Add mzXML version of test sample listChromatograms.Add(new ChromatogramSet("test-mzXML", new[] { MsDataFileUri.Parse(testFilesDir.GetTestPath("051309_digestion-s3.mzXML")) })); var docMzxml = docResults.ChangeMeasuredResults(new MeasuredResults(listChromatograms)); Assert.IsTrue(docContainer.SetDocument(docMzxml, docResults, true)); docContainer.AssertComplete(); docMzxml = docContainer.Document; // Verify mzXML and native contained same results // Unfortunately mzWiff produces chromatograms with now zeros, which // need to be interpolated into place. This means a .wiff file and // its mzWiff mzXML file will never be the same. AssertResult.MatchChromatograms(docMzxml, 0, 1, -1, 0); } // TODO: Switch to a using clause when PWiz is fixed, and this assertion fails // AssertEx.ThrowsException<IOException>(() => testFilesDir.Dispose()); }
protected override void DoTest() { var skyfile = TestFilesDir.GetTestPath("2533_FattyAcids.sky"); const double lockmassNegative = 554.2615; const double lockmassToler = 0.25; // Per Hans Vissers @ Waters SrmDocument corrected = null, uncorrected2 = null, uncorrected1 = null; for (var testloop = 2; testloop >= 0; testloop--) { RunUI(() => SkylineWindow.OpenFile(skyfile)); var doc0 = WaitForDocumentLoaded(); AssertEx.IsDocumentState(doc0, null, 1, 11, 11, 22); Stopwatch loadStopwatch = new Stopwatch(); loadStopwatch.Start(); ImportResults(GetTestPath(TestFilesPersistent[0]), (testloop == 0) ? new LockMassParameters(0, lockmassNegative, lockmassToler) // ESI- data : LockMassParameters.EMPTY); var document = WaitForDocumentLoaded(400000); loadStopwatch.Stop(); if (testloop < 2) { DebugLog.Info("lockmass {0} load time = {1}", (testloop == 0) ? "corrected" : "uncorrected", loadStopwatch.ElapsedMilliseconds); } if (testloop == 0) { corrected = document; } else if (testloop == 1) { uncorrected1 = document; } else { uncorrected2 = document; } if (testloop > 0) { RunUI(() => SkylineWindow.NewDocument(true)); } } Assert.AreNotEqual(corrected, uncorrected2); // Corrected pass should differ Assert.AreEqual(uncorrected2, uncorrected1); // Both uncorrected passes should agree Assert.IsNotNull(corrected); Assert.IsNotNull(uncorrected2); ComparePeaks(corrected, uncorrected2); var correctedPeaks = Peaks(corrected); var uncorrectedPeaks = Peaks(uncorrected1); // Verify roundtrip with and without .skyd for (var loop = 0; loop < 2; loop++) { var outfile = TestFilesDirs[0].GetTestPath("test" + loop + ".sky"); var withoutCache = (loop == 0); RunUI(() => { SkylineWindow.SaveDocument(outfile, !withoutCache); SkylineWindow.NewDocument(true); if (withoutCache) { FileEx.SafeDelete(Path.ChangeExtension(outfile, ChromatogramCache.EXT)); // kill the .skyd file } SkylineWindow.OpenFile(outfile); }); var reopened = WaitForDocumentLoaded(); var reopenedPeaks = Peaks(reopened); Assert.AreEqual(correctedPeaks.Count, reopenedPeaks.Count); for (var i = 0; i < correctedPeaks.Count; i++) { var correctedPeak = correctedPeaks[i]; var uncorrectedPeak = uncorrectedPeaks[i]; var reopenedPeak = reopenedPeaks[i]; Assert.AreNotEqual(uncorrectedPeak, reopenedPeak, "reopened peaks should have lockmass correction"); Assert.AreEqual(correctedPeak, reopenedPeak, "reopened peaks should agree"); } } // And finally, verify that reimport successfully uses the lockmass values cached in the chromatograms Settings.Default.LockmassParameters = LockMassParameters.EMPTY; // Make sure we aren't pulling from settngs var manageResults = ShowDialog <ManageResultsDlg>(SkylineWindow.ManageResults); RunUI(() => { manageResults.SelectedChromatograms = new[] { SkylineWindow.Document.Settings.MeasuredResults.Chromatograms[0] }; manageResults.ReimportResults(); manageResults.OkDialog(); }); WaitForDocumentChange(corrected); WaitForCondition(10 * 60 * 1000, () => SkylineWindow.Document.Settings.MeasuredResults.IsLoaded); // 10 minutes var reimportedPeaks = Peaks(SkylineWindow.Document); Assert.AreEqual(correctedPeaks.Count, reimportedPeaks.Count); for (var i = 0; i < correctedPeaks.Count; i++) { var correctedPeak = correctedPeaks[i]; var uncorrectedPeak = uncorrectedPeaks[i]; var reimportedPeak = reimportedPeaks[i]; Assert.AreNotEqual(uncorrectedPeak, reimportedPeak, "reimported peaks should have lockmass correction"); Assert.AreEqual(correctedPeak, reimportedPeak, "reimported peaks should agree"); } }