示例#1
0
        public void AgilentFormatsTest()
        {
            var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE);

            string docPath;
            SrmDocument doc = InitAgilentDocument(testFilesDir, out docPath);

            var docContainer = new ResultsTestDocumentContainer(doc, docPath);
            const string replicateName = "AgilentTest";
            string extRaw = ExtensionTestContext.ExtAgilentRaw;
            var chromSets = new[]
                                {
                                    new ChromatogramSet(replicateName, new[]
                                        { new MsDataFilePath(testFilesDir.GetTestPath("081809_100fmol-MichromMix-05" + extRaw)),  }),
                                };
            var docResults = doc.ChangeMeasuredResults(new MeasuredResults(chromSets));
            Assert.IsTrue(docContainer.SetDocument(docResults, doc, true));
            docContainer.AssertComplete();
            docResults = docContainer.Document;
            AssertResult.IsDocumentResultsState(docResults, replicateName,
                doc.PeptideCount, doc.PeptideTransitionGroupCount, 0, doc.PeptideTransitionCount, 0);

            // Release file handles
            docContainer.Release();
            testFilesDir.Dispose();
        }
        public void TestMeasuredDriftValues()
        {
            var testFilesDir = new TestFilesDir(TestContext, @"Test\Results\BlibDriftTimeTest.zip"); // Re-used from BlibDriftTimeTest
            // Open document with some peptides but no results
            var docPath = testFilesDir.GetTestPath("BlibDriftTimeTest.sky");
            SrmDocument docOriginal = ResultsUtil.DeserializeDocument(docPath);
            var docContainer = new ResultsTestDocumentContainer(docOriginal, docPath);
            var doc = docContainer.Document;

            // Import an mz5 file that contains drift info
            const string replicateName = "ID12692_01_UCA168_3727_040714";
            var chromSets = new[]
                                {
                                    new ChromatogramSet(replicateName, new[]
                                        { new MsDataFilePath(testFilesDir.GetTestPath("ID12692_01_UCA168_3727_040714.mz5")),  }),
                                };
            var docResults = doc.ChangeMeasuredResults(new MeasuredResults(chromSets));
            Assert.IsTrue(docContainer.SetDocument(docResults, docOriginal, true));
            docContainer.AssertComplete();
            var document = docContainer.Document;
            document = document.ChangeSettings(document.Settings.ChangePeptidePrediction(prediction => new PeptidePrediction(null, DriftTimePredictor.EMPTY)));

            // Verify ability to extract predictions from raw data
            var newPred = document.Settings.PeptideSettings.Prediction.DriftTimePredictor.ChangeMeasuredDriftTimesFromResults(
                    document, docContainer.DocumentFilePath);
            var result = newPred.MeasuredDriftTimePeptides;
            Assert.AreEqual(TestSmallMolecules? 2: 1, result.Count);
            const double expectedDT = 4.0019;
            var expectedOffset = .4829;
            Assert.AreEqual(expectedDT, result.Values.First().DriftTimeMsec(false).Value, .001);
            Assert.AreEqual(expectedOffset, result.Values.First().HighEnergyDriftTimeOffsetMsec, .001);

            // Check ability to update, and to preserve unchanged
            var revised = new Dictionary<LibKey, DriftTimeInfo>();
            var libKey = result.Keys.First();
            revised.Add(libKey, new DriftTimeInfo(4, 0.234));
            var libKey2 = new LibKey("DEADEELS",2);
            revised.Add(libKey2, new DriftTimeInfo(5, 0.123));
            document =
                document.ChangeSettings(
                    document.Settings.ChangePeptidePrediction(prediction => new PeptidePrediction(null, new DriftTimePredictor("test", revised, null, null, 40))));
            newPred = document.Settings.PeptideSettings.Prediction.ChangeDriftTimePredictor(
                document.Settings.PeptideSettings.Prediction.DriftTimePredictor.ChangeMeasuredDriftTimesFromResults(
                    document, docContainer.DocumentFilePath)).DriftTimePredictor;
            result = newPred.MeasuredDriftTimePeptides;
            Assert.AreEqual(TestSmallMolecules ? 3 : 2, result.Count);
            Assert.AreEqual(expectedDT, result[libKey].DriftTimeMsec(false).Value, .001);
            Assert.AreEqual(expectedOffset, result[libKey].HighEnergyDriftTimeOffsetMsec, .001);
            Assert.AreEqual(5, result[libKey2].DriftTimeMsec(false).Value, .001);
            Assert.AreEqual(0.123, result[libKey2].HighEnergyDriftTimeOffsetMsec, .001);

            docContainer.Release();
        }
示例#3
0
        public void TestBlibDriftTimes()
        {
            var testFilesDir = new TestFilesDir(TestContext, @"Test\Results\BlibDriftTimeTest.zip");
            // Open document with some peptides but no results
            var docPath = testFilesDir.GetTestPath("BlibDriftTimeTest.sky");
            SrmDocument docOriginal = ResultsUtil.DeserializeDocument(docPath);
            var docContainer = new ResultsTestDocumentContainer(docOriginal, docPath);
            var doc = docContainer.Document;

            // Use the bare drift times in the spectral library
            var librarySpec = new BiblioSpecLiteSpec("drift test",
                                                testFilesDir.GetTestPath("BlibDriftTimeTest.blib"));
            doc = doc.ChangeSettings(
                doc.Settings.ChangePeptideLibraries(lib => lib.ChangeLibrarySpecs(new[] { librarySpec })).
                ChangePeptidePrediction(p => p.ChangeLibraryDriftTimesResolvingPower(20)).
                ChangePeptidePrediction(p => p.ChangeUseLibraryDriftTimes(true))
                );

            // Import an mz5 file that needs drift info that's in the original data set,
            // but preserved in the .blib file associated with a different raw source
            // Without the bugfix this won't get any drift time filtering.
            const string replicateName = "ID12692_01_UCA168_3727_040714";
            var chromSets = new[]
                                {
                                    new ChromatogramSet(replicateName, new[]
                                        { new MsDataFilePath(testFilesDir.GetTestPath("ID12692_01_UCA168_3727_040714.mz5")),  }),
                                };
            var docResults = doc.ChangeMeasuredResults(new MeasuredResults(chromSets));
            Assert.IsTrue(docContainer.SetDocument(docResults, docOriginal, true));
            docContainer.AssertComplete();
            var document = docContainer.Document;

            float tolerance = (float)document.Settings.TransitionSettings.Instrument.MzMatchTolerance;
            double maxHeight = 0;
            var results = document.Settings.MeasuredResults;
            Assert.AreEqual(2, document.PeptidePrecursorPairs.Count());
            var pair = document.PeptidePrecursorPairs.ToArray()[1];
            ChromatogramGroupInfo[] chromGroupInfo;
            Assert.IsTrue(results.TryLoadChromatogram(0, pair.NodePep, pair.NodeGroup,
                tolerance, true, out chromGroupInfo));
            Assert.AreEqual(1, chromGroupInfo.Length);
            var chromGroup = chromGroupInfo[0];
            Assert.AreEqual(2 , chromGroup.NumPeaks); // This will be higher if we don't filter on DT
            foreach (var tranInfo in chromGroup.TransitionPointSets)
            {
                maxHeight = Math.Max(maxHeight, tranInfo.MaxIntensity);
            }
            Assert.AreEqual(278 , maxHeight, 1);  // Without DT filtering, this will be much greater - about 996

            docContainer.Release();
        }
示例#4
0
        private static void doTest(TestFilesDir testFilesDir, string skyFile, double expectedRT, string[] filenames, double? expectedRatio)
        {
            string docPath;
            var document = InitExplicitRTDocument(testFilesDir, skyFile, out docPath);
            var docContainer = new ResultsTestDocumentContainer(document, docPath);

            var doc = docContainer.Document;
            var listChromatograms = new List<ChromatogramSet>();
            foreach (var filename in filenames)
            {
                var path = MsDataFileUri.Parse(filename + ExtensionTestContext.ExtWatersRaw);
                listChromatograms.Add(AssertResult.FindChromatogramSet(doc, path) ??
                                      new ChromatogramSet(path.GetFileName().Replace('.', '_'), new[] { path }));
            }
            var docResults = doc.ChangeMeasuredResults(new MeasuredResults(listChromatograms));
            Assert.IsTrue(docContainer.SetDocument(docResults, doc, true));
            docContainer.AssertComplete();
            document = docContainer.Document;

            float tolerance = (float) document.Settings.TransitionSettings.Instrument.MzMatchTolerance;
            foreach (var pair in document.MoleculePrecursorPairs)
            {
                ChromatogramGroupInfo[] chromGroupInfo;
                Assert.IsTrue(document.Settings.MeasuredResults.TryLoadChromatogram(0, pair.NodePep, pair.NodeGroup, tolerance,
                    true, out chromGroupInfo));
                Assert.IsTrue(document.Settings.MeasuredResults.TryLoadChromatogram(1, pair.NodePep, pair.NodeGroup, tolerance,
                    true, out chromGroupInfo));
            }
            var nResults = 0;
            foreach (var nodePep in document.Molecules)
            {
                foreach (var results in nodePep.Results)
                {
                    foreach (var result in results)
                    {
                        Assert.AreEqual(expectedRT, result.RetentionTime ?? 0, .1); // We should pick peaks based on explicit RT
                        if (expectedRatio.HasValue) // If we didn't, ratios won't be right
                        {
                            Assert.IsNotNull(result.LabelRatios[0].Ratio);
                            Assert.AreEqual(expectedRatio.Value, result.LabelRatios[0].Ratio.Ratio, .1);
                        }
                        nResults++;
                    }
                }
            }
            Assert.AreEqual(filenames.Length*document.MoleculeGroupCount, nResults);

            // Release file handles
            docContainer.Release();
        }
示例#5
0
 public static void MatchChromatograms(ResultsTestDocumentContainer docContainer,
     MsDataFileUri path1, MsDataFileUri path2, int delta, int missing,
     LockMassParameters lockMassParameters = null)
 {
     var doc = docContainer.Document;
     var listChromatograms = new List<ChromatogramSet>();
     foreach (var path in new[] { path1, path2 })
     {
         listChromatograms.Add(FindChromatogramSet(doc, path) ??
             new ChromatogramSet((path.GetFileName() ?? "").Replace('.', '_'), new[] { path }));
     }
     var docResults = doc.ChangeMeasuredResults(new MeasuredResults(listChromatograms));
     Assert.IsTrue(docContainer.SetDocument(docResults, doc, true));
     docContainer.AssertComplete();
     docResults = docContainer.Document;
     MatchChromatograms(docResults, 0, 1, delta, missing);
 }
示例#6
0
        public void WiffResultsTest()
        {
            TestFilesDir testFilesDir = new TestFilesDir(TestContext, ZIP_FILE);

            SrmDocument doc = InitWiffDocument(testFilesDir);
            var docContainer = new ResultsTestDocumentContainer(doc,
                testFilesDir.GetTestPath("SimpleWiffTest.sky"));
            FileEx.SafeDelete(ChromatogramCache.FinalPathForName(docContainer.DocumentFilePath, null));

            var listChromatograms = new List<ChromatogramSet>();

            if (ExtensionTestContext.CanImportAbWiff)
            {
                string pathWiff = testFilesDir.GetTestPath("051309_digestion.wiff");
                string[] dataIds = MsDataFileImpl.ReadIds(pathWiff);

                for (int i = 0; i < dataIds.Length; i++)
                {
                    string nameSample = dataIds[i];
                    if (!Equals(nameSample, "test") && listChromatograms.Count == 0)
                        continue;
                    string pathSample = SampleHelp.EncodePath(pathWiff, nameSample, i, LockMassParameters.EMPTY, false, false);
                    listChromatograms.Add(new ChromatogramSet(nameSample, new[] { MsDataFileUri.Parse(pathSample) }));
                }
            }
            else
            {
                listChromatograms.Add(new ChromatogramSet("test",
                    new[] { MsDataFileUri.Parse(testFilesDir.GetTestPath("051309_digestion-test.mzML")) }));
                listChromatograms.Add(new ChromatogramSet("rfp9,before,h,1",
                    new[] { MsDataFileUri.Parse(testFilesDir.GetTestPath("051309_digestion-rfp9,before,h,1.mzML")) }));
            }

            // Should have added test and one after
            Assert.AreEqual(2, listChromatograms.Count);

            var docResults = doc.ChangeMeasuredResults(new MeasuredResults(listChromatograms));
            Assert.IsTrue(docContainer.SetDocument(docResults, doc, true));
            docContainer.AssertComplete();

            docResults = docContainer.Document;

            AssertEx.IsDocumentState(docResults, 6, 9, 9, 18, 54);
            Assert.IsTrue(docResults.Settings.MeasuredResults.IsLoaded);

            foreach (var nodeTran in docResults.PeptideTransitions)
            {
                Assert.IsTrue(nodeTran.HasResults);
                Assert.AreEqual(2, nodeTran.Results.Count);
            }

            // Remove the last chromatogram
            listChromatograms.RemoveAt(1);

            var docResultsSingle = docResults.ChangeMeasuredResults(new MeasuredResults(listChromatograms));

            AssertResult.IsDocumentResultsState(docResultsSingle, "test", 9, 2, 9, 8, 27);

            // Add mzXML version of test sample
            listChromatograms.Add(new ChromatogramSet("test-mzXML", new[] { MsDataFileUri.Parse(testFilesDir.GetTestPath("051309_digestion-s3.mzXML")) }));

            var docMzxml = docResults.ChangeMeasuredResults(new MeasuredResults(listChromatograms));
            Assert.IsTrue(docContainer.SetDocument(docMzxml, docResults, true));
            docContainer.AssertComplete();
            docMzxml = docContainer.Document;
            // Verify mzXML and native contained same results
            // Unfortunately mzWiff produces chromatograms with now zeros, which
            // need to be interpolated into place.  This means a .wiff file and
            // its mzWiff mzXML file will never be the same.
            AssertResult.MatchChromatograms(docMzxml, 0, 1, -1, 0);
            // Release all file handels
            Assert.IsTrue(docContainer.SetDocument(doc, docContainer.Document));

            // TODO: Switch to a using clause when PWiz is fixed, and this assertion fails
            //            AssertEx.ThrowsException<IOException>(() => testFilesDir.Dispose());
        }
示例#7
0
        /* TODO bspratt drift time libs for small molecules

        [TestMethod]
        public void WatersImsMsePredictedDriftTimesChromatogramTestAsSmallMolecules()
        {
            WatersImsMseChromatogramTest(DriftFilterType.predictor, true);
        }

        [TestMethod]
        public void WatersImsMseLibraryDriftTimesChromatogramTestAsSmallMolecules()
        {
            WatersImsMseChromatogramTest(DriftFilterType.library, true);
        }

         */
        private void WatersImsMseChromatogramTest(DriftFilterType mode,
            RefinementSettings.ConvertToSmallMoleculesMode asSmallMolecules = RefinementSettings.ConvertToSmallMoleculesMode.none)
        {
            string subdir = (asSmallMolecules == RefinementSettings.ConvertToSmallMoleculesMode.none) ? null : asSmallMolecules.ToString();
            var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE, subdir);
            TestSmallMolecules = false; // Don't need that extra magic node

            bool withDriftTimePredictor = (mode == DriftFilterType.predictor); // Load the doc that has a drift time predictor?
            bool withDriftTimeFilter = (mode != DriftFilterType.none); // Perform drift time filtering?  (either with predictor, or with bare times in blib file)
            string docPath;
            SrmDocument document = InitWatersImsMseDocument(testFilesDir, withDriftTimePredictor ? "single_with_driftinfo.sky" : "single_no_driftinfo.sky", asSmallMolecules, out docPath);
            AssertEx.IsDocumentState(document, (withDriftTimePredictor || (asSmallMolecules != RefinementSettings.ConvertToSmallMoleculesMode.none)) ? 1 : 0, 1, 1, 1, 8); // Drift time lib load bumps the doc version
            var docContainer = new ResultsTestDocumentContainer(document, docPath);
            var doc = docContainer.Document;
            var docOriginal = doc;

            string testModeStr = withDriftTimePredictor ? "with drift time predictor" : "without drift time info";

            if (withDriftTimeFilter && !withDriftTimePredictor)
            {
                // Use the bare drift times in the spectral library
                var librarySpec = new BiblioSpecLiteSpec("drift test",
                                                    testFilesDir.GetTestPath("mse-mobility.filtered-scaled.blib"));
                doc = doc.ChangeSettings(
                    doc.Settings.ChangePeptideLibraries(lib => lib.ChangeLibrarySpecs(new[] { librarySpec })).
                    ChangePeptidePrediction(p => p.ChangeLibraryDriftTimesResolvingPower(100)).
                    ChangePeptidePrediction(p => p.ChangeUseLibraryDriftTimes(true))
                    );
                testModeStr = "with drift times from spectral library";
            }

            var listChromatograms = new List<ChromatogramSet>();
            // A small subset of the QC_HDMSE_02_UCA168_3495_082213 data set (RT 21.5-22.5) from Will Thompson
            const string path = @"waters-mobility.mz5";
            listChromatograms.Add(AssertResult.FindChromatogramSet(doc, new MsDataFilePath(path)) ??
                                    new ChromatogramSet(Path.GetFileName(path).Replace('.', '_'), new[] { path }));
            var docResults = doc.ChangeMeasuredResults(new MeasuredResults(listChromatograms));
            Assert.IsTrue(docContainer.SetDocument(docResults, docOriginal, true));
            docContainer.AssertComplete();
            document = docContainer.Document;

            float tolerance = (float)document.Settings.TransitionSettings.Instrument.MzMatchTolerance;
            double maxHeight = 0;
            var results = document.Settings.MeasuredResults;
            Assert.AreEqual(1, document.MoleculePrecursorPairs.Count());
            foreach (var pair in document.MoleculePrecursorPairs)
            {
                ChromatogramGroupInfo[] chromGroupInfo;
                Assert.IsTrue(results.TryLoadChromatogram(0, pair.NodePep, pair.NodeGroup,
                    tolerance, true, out chromGroupInfo));
                Assert.AreEqual(1, chromGroupInfo.Length, testModeStr);
                var chromGroup = chromGroupInfo[0];
                var expectedPeaks = ((asSmallMolecules == RefinementSettings.ConvertToSmallMoleculesMode.masses_only) ? 6 : 5);
                Assert.AreEqual(withDriftTimeFilter ? 3 : expectedPeaks, chromGroup.NumPeaks, testModeStr); // This will be higher if we don't filter on DT
                foreach (var tranInfo in chromGroup.TransitionPointSets)
                {
                    maxHeight = Math.Max(maxHeight, tranInfo.MaxIntensity);
                }
            }
            Assert.AreEqual(withDriftTimeFilter? 5226 : 20075 , maxHeight, 1, testModeStr);  // Without DT filtering, this will be much greater

            // now drill down for specific values
            int nPeptides = 0;
            foreach (var nodePep in document.Molecules.Where(nodePep => nodePep.Results[0] != null))
            {
                // expecting just one peptide result in this small data set
                if (nodePep.Results[0].Sum(chromInfo => chromInfo.PeakCountRatio > 0 ? 1 : 0) > 0)
                {
                    Assert.AreEqual(21.94865, (double)nodePep.GetMeasuredRetentionTime(0), .0001, testModeStr);
                    Assert.AreEqual(1.0, (double)nodePep.GetPeakCountRatio(0), 0.0001, testModeStr);
                    nPeptides++;
                }
            }
            Assert.AreEqual(1, nPeptides);

            if (withDriftTimePredictor || withDriftTimeFilter)
            {
                // Verify that the .imdb pr .blib file goes out in the share zipfile
                for (int complete = 0; complete <= 1; complete++)
                {
                    var sharePath = testFilesDir.GetTestPath(complete==1?"share_complete.zip":"share_minimized.zip");
                    var share = new SrmDocumentSharing(document, docPath, sharePath, complete==1);
                    using (var longWaitDlg = new LongWaitDlg
                    {
                        // ReSharper disable once LocalizableElement
                        Text = "unit test WatersImsTest -- sharing document",
                    })
                    {
                        longWaitDlg.PerformWork(null, 1000, share.Share);
                        Assert.IsFalse(longWaitDlg.IsCanceled);
                    }

                    var files = share.ListEntries().ToArray();
                    Assert.IsTrue(files.Contains(withDriftTimePredictor ? "scaled.imdb" : "mse-mobility.filtered-scaled.blib"));
                    // And round trip it to make sure we haven't left out any new features in minimized imdb or blib files
                    using (var longWaitDlg = new LongWaitDlg
                    {
                        // ReSharper disable once LocalizableElement
                        Text = "unit test WatersImsTest",
                    })
                    {
                        longWaitDlg.PerformWork(null, 1000, share.Extract);
                        Assert.IsFalse(longWaitDlg.IsCanceled);
                    }
                    using (TextReader reader = new StreamReader(share.DocumentPath))
                    {
                        XmlSerializer documentSerializer = new XmlSerializer(typeof(SrmDocument));
                        var document2 = (SrmDocument) documentSerializer.Deserialize(reader);
                        Assert.IsNotNull(document2);
                        var im = document.Settings.GetIonMobilities(new MsDataFilePath(path));
                        var pep = document2.Molecules.First();
                        foreach (TransitionGroupDocNode nodeGroup in pep.Children)
                        {
                            double windowDT;
                            var centerDriftTime = document.Settings.PeptideSettings.Prediction.GetDriftTime(
                                                       pep, nodeGroup, im, out windowDT);
                            Assert.AreEqual(3.86124, centerDriftTime.DriftTimeMsec(false) ?? 0, .0001, testModeStr);
                            Assert.AreEqual(0.077224865797235934, windowDT, .0001, testModeStr);
                        }
                    }
                }
            }

            // Release file handles
            docContainer.Release();
            testFilesDir.Dispose();
            string cachePath = ChromatogramCache.FinalPathForName(docPath, null);
            FileEx.SafeDelete(cachePath);
        }
示例#8
0
        public void TestImportPeakBoundary()
        {
            // Load the SRM document and relevant files
            var testFilesDir = new TestFilesDir(TestContext, TEST_ZIP_PATH);
            bool isIntl = (TextUtil.CsvSeparator != TextUtil.SEPARATOR_CSV);
            var precursorMzs = isIntl ? _precursorMzsIntl : _precursorMzsUs;
            var peakBoundaryFileTsv = testFilesDir.GetTestPath(isIntl
                                                                   ? "PeakBoundaryTsvIntl.tsv"
                                                                   : "PeakBoundaryTsv.tsv");
            var peakBoundaryFileCsv = testFilesDir.GetTestPath(isIntl
                                                                   ? "PeakBoundaryIntl.csv"
                                                                   : "PeakBoundaryUS.csv");
            var peakBoundaryDoc = testFilesDir.GetTestPath("Chrom05.sky");
            SrmDocument doc = ResultsUtil.DeserializeDocument(peakBoundaryDoc);

            // Load an empty doc, so that we can make a change and
            // cause the .skyd to be loaded
            var docContainer = new ResultsTestDocumentContainer(null, peakBoundaryDoc);
            docContainer.SetDocument(doc, null, true);
            docContainer.AssertComplete();
            SrmDocument docResults = docContainer.Document;
            // Test Tsv import, looking at first .raw file
            DoFileImportTests(docResults, peakBoundaryFileTsv, _precursorCharge,
                _tsvMinTime1, _tsvMaxTime1, _tsvIdentified1, _tsvAreas1, _peptides, 0, precursorMzs, annote);
            // Test Tsv import, looking at second .raw file
            DoFileImportTests(docResults, peakBoundaryFileTsv, _precursorCharge,
                _tsvMinTime2, _tsvMaxTime2, _tsvIdentified2, _tsvAreas2, _peptides, 1, precursorMzs, annote);

            // Test Csv import for local format
            DoFileImportTests(docResults, peakBoundaryFileCsv, _precursorCharge,
                _csvMinTime1, _csvMaxTime1, _csvIdentified1, _csvAreas1, _peptides, 0, precursorMzs, annote);
            DoFileImportTests(docResults, peakBoundaryFileCsv, _precursorCharge,
                _csvMinTime2, _csvMaxTime2, _csvIdentified2, _csvAreas2, _peptides, 1, precursorMzs, annote);

            // Test that importing same file twice leads to no change to document the second time
            var docNew = ImportFileToDoc(docResults, peakBoundaryFileTsv);
            var docNewSame = ImportFileToDoc(docNew, peakBoundaryFileTsv);
            Assert.AreSame(docNew, docNewSame);
            Assert.AreNotSame(docNew, docResults);

            // Test that exporting peak boundaries and then importing them leads to no change
            string peakBoundaryExport = testFilesDir.GetTestPath("TestRoundTrip.csv");
            ReportSpec reportSpec = MakeReportSpec();
            ReportToCsv(reportSpec, docNew, peakBoundaryExport);
            var docRoundTrip = ImportFileToDoc(docNew, peakBoundaryExport);
            Assert.AreSame(docNew, docRoundTrip);

            var cult = LocalizationHelper.CurrentCulture;
            var cultI = CultureInfo.InvariantCulture;
            // 1. Empty file -
            ImportThrowsException(docResults, string.Empty,
                Resources.PeakBoundaryImporter_Import_Failed_to_read_the_first_line_of_the_file);

            // 2. No separator in first line
            ImportThrowsException(docResults, "No-valid-separators",
                TextUtil.CsvSeparator == TextUtil.SEPARATOR_CSV
                    ? Resources.PeakBoundaryImporter_DetermineCorrectSeparator_The_first_line_does_not_contain_any_of_the_possible_separators_comma__tab_or_space_
                    : Resources.PeakBoundaryImporter_DetermineCorrectSeparator_The_first_line_does_not_contain_any_of_the_possible_separators_semicolon__tab_or_space_);

            // 3. Missing field names
            string csvSep = TextUtil.CsvSeparator.ToString(cultI);
            string spaceSep = TextUtil.SEPARATOR_SPACE.ToString(cultI);
            ImportThrowsException(docResults, string.Join(csvSep, PeakBoundaryImporter.STANDARD_FIELD_NAMES.Take(3).ToArray()),
                Resources.PeakBoundaryImporter_Import_Failed_to_find_the_necessary_headers__0__in_the_first_line);

            string headerRow = string.Join(csvSep, PeakBoundaryImporter.STANDARD_FIELD_NAMES.Take(6));
            string headerRowSpaced = string.Join(spaceSep, PeakBoundaryImporter.STANDARD_FIELD_NAMES.Take(6));
            string[] values =
            {
                "TPEVDDEALEK", "Q_2012_0918_RJ_13.raw", (3.5).ToString(cult), (4.5).ToString(cult), 2.ToString(cult), 0.ToString(cult)
            };

            // 4. Mismatched field count
            ImportThrowsException(docResults, TextUtil.LineSeparate(headerRow, string.Join(spaceSep, values)),
                Resources.PeakBoundaryImporter_Import_Line__0__field_count__1__differs_from_the_first_line__which_has__2_);

            // 5. Invalid charge state
            string[] valuesBadCharge = new List<string>(values).ToArray();
            valuesBadCharge[(int) PeakBoundaryImporter.Field.charge] = (3.5).ToString(cult);
            ImportThrowsException(docResults, TextUtil.LineSeparate(headerRow, string.Join(csvSep, valuesBadCharge)),
                Resources.PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_charge_state_);
            valuesBadCharge[(int) PeakBoundaryImporter.Field.charge] = TextUtil.EXCEL_NA;
            ImportThrowsException(docResults, TextUtil.LineSeparate(headerRow, string.Join(csvSep, valuesBadCharge)),
                Resources.PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_charge_state_);

            // 6. Invalid start time
            string[] valuesBadTime = new List<string>(values).ToArray();
            valuesBadTime[(int) PeakBoundaryImporter.Field.start_time] = "bad";
            ImportThrowsException(docResults, TextUtil.LineSeparate(headerRowSpaced, string.Join(spaceSep, valuesBadTime)),
                Resources.PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_start_time_);
            valuesBadTime[(int)PeakBoundaryImporter.Field.end_time] = "bad";
            ImportThrowsException(docResults, TextUtil.LineSeparate(headerRowSpaced, string.Join(spaceSep, valuesBadTime)),
                Resources.PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_start_time_);

            // But ok if not adjusting peaks
            ImportNoException(docResults, TextUtil.LineSeparate(headerRowSpaced, string.Join(spaceSep, valuesBadTime)), true, false, false);

            // 7. Invalid end time
            valuesBadTime[(int) PeakBoundaryImporter.Field.start_time] =
                values[(int) PeakBoundaryImporter.Field.start_time];
            ImportThrowsException(docResults, TextUtil.LineSeparate(headerRowSpaced, string.Join(spaceSep, valuesBadTime)),
                Resources.PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_end_time_);

            // But ok if not adjusting peaks
            ImportNoException(docResults, TextUtil.LineSeparate(headerRowSpaced, string.Join(spaceSep, valuesBadTime)), true, false, false);

            // #N/A in times ok
            valuesBadTime[(int)PeakBoundaryImporter.Field.start_time] =
                valuesBadTime[(int)PeakBoundaryImporter.Field.end_time] = TextUtil.EXCEL_NA;
            ImportNoException(docResults, TextUtil.LineSeparate(headerRowSpaced, string.Join(spaceSep, valuesBadTime)));
            // If only start time #N/A throws exception
            valuesBadTime[(int)PeakBoundaryImporter.Field.start_time] = (3.5).ToString(cult);
            ImportThrowsException(docResults, TextUtil.LineSeparate(headerRowSpaced, string.Join(spaceSep, valuesBadTime)),
                Resources.PeakBoundaryImporter_Import_Missing_end_time_on_line__0_);
            // If only end time #N/A throws exception
            valuesBadTime[(int)PeakBoundaryImporter.Field.start_time] = TextUtil.EXCEL_NA;
            valuesBadTime[(int)PeakBoundaryImporter.Field.end_time] = (3.5).ToString(cult);
            ImportThrowsException(docResults, TextUtil.LineSeparate(headerRowSpaced, string.Join(spaceSep, valuesBadTime)),
                Resources.PeakBoundaryImporter_Import_Missing_start_time_on_line__0_);
            // Empty times throws exception
            valuesBadTime[(int)PeakBoundaryImporter.Field.start_time] =
                valuesBadTime[(int)PeakBoundaryImporter.Field.end_time] = string.Empty;
            ImportThrowsException(docResults, TextUtil.LineSeparate(headerRowSpaced, string.Join(spaceSep, valuesBadTime)),
                Resources.PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_start_time_);

            // 8. Not imported file gets skipped
            string[] valuesBadFile = new List<string>(values).ToArray();
            valuesBadFile[(int) PeakBoundaryImporter.Field.filename] = "Q_2012_0918_RJ_15.raw";
            ImportNoException(docResults, TextUtil.LineSeparate(headerRowSpaced, string.Join(spaceSep, valuesBadFile)));

            // 9. Unknown modification state gets skipped
            string[] valuesBadSequence = new List<string>(values).ToArray();
            valuesBadSequence[(int)PeakBoundaryImporter.Field.modified_peptide] = "T[+80]PEVDDEALEK";
            ImportNoException(docResults, TextUtil.LineSeparate(headerRow, string.Join(csvSep, valuesBadSequence)));

            // 10. Unknown peptide sequence gets skipped
            valuesBadSequence[(int)PeakBoundaryImporter.Field.modified_peptide] = "PEPTIDER";
            ImportNoException(docResults, TextUtil.LineSeparate(headerRow, string.Join(csvSep, valuesBadSequence)));

            // 11. Bad value in decoy field
            string[] valuesBadDecoys = new List<string>(values).ToArray();
            valuesBadDecoys[(int)PeakBoundaryImporter.Field.is_decoy] = 3.ToString(cult);
            ImportThrowsException(docResults, TextUtil.LineSeparate(headerRow, string.Join(csvSep, valuesBadDecoys)),
                Resources.PeakBoundaryImporter_Import_The_decoy_value__0__on_line__1__is_invalid__must_be_0_or_1_);

            // 12. Import with bad sample throws exception
            string[] valuesSample =
            {
                "TPEVDDEALEK", "Q_2012_0918_RJ_13.raw", (3.5).ToString(cult), (4.5).ToString(cult), 2.ToString(cult), 0.ToString(cult), "badSample"
            };
            string headerRowSample = string.Join(csvSep, PeakBoundaryImporter.STANDARD_FIELD_NAMES);
            ImportThrowsException(docResults, TextUtil.LineSeparate(headerRowSample, string.Join(csvSep, valuesSample)),
                Resources.PeakBoundaryImporter_Import_Sample__0__on_line__1__does_not_match_the_file__2__);

            // 13. Decoys, charge state, and sample missing ok
            var valuesFourFields = valuesSample.Take(4);
            string headerFourFields = string.Join(csvSep, PeakBoundaryImporter.STANDARD_FIELD_NAMES.Take(4));
            ImportNoException(docResults, TextUtil.LineSeparate(headerFourFields, string.Join(csvSep, valuesFourFields)));

            // 14. Valid (charge state, fileName, peptide) combo that is not in document gets skipped
            string[] valuesBadCombo = new List<string>(values).ToArray();
            valuesBadCombo[(int) PeakBoundaryImporter.Field.charge] = (5).ToString(cult);
            ImportNoException(docResults, TextUtil.LineSeparate(headerRow, string.Join(csvSep, valuesBadCombo)));

            // Note: Importing with all 7 columns is tested as part of MProphetResultsHandlerTest

            // Release open streams
            docContainer.Release();

            // Now check a file that has peptide ID's, and see that they're properly ported
            var peptideIdPath = testFilesDir.GetTestPath("Template_MS1Filtering_1118_2011_3-2min.sky");
            SrmDocument docId = ResultsUtil.DeserializeDocument(peptideIdPath);
            docId = docId.ChangeSettings(docId.Settings.ChangePeptideLibraries(libraries =>
                {
                    var lib = libraries.Libraries[0];
                    return libraries.ChangeLibrarySpecs(new LibrarySpec[]
                        {
                            new BiblioSpecLiteSpec(lib.Name, testFilesDir.GetTestPath(lib.FileNameHint))
                        });
                }));

            var docContainerId = new ResultsTestDocumentContainer(null, peptideIdPath);
            docContainerId.SetDocument(docId, null, true);
            docContainerId.AssertComplete();
            SrmDocument docResultsId = docContainerId.Document;
            var peakBoundaryFileId = testFilesDir.GetTestPath(isIntl
                                                                  ? "Template_MS1Filtering_1118_2011_3-2min_new_intl.tsv"
                                                                  : "Template_MS1Filtering_1118_2011_3-2min_new.tsv");
            DoFileImportTests(docResultsId, peakBoundaryFileId, _precursorChargeId,
                _idMinTime1, _idMaxTime1, _idIdentified1, _idAreas1, _peptidesId, 0);

            // 15. Decminal import format ok
            var headerUnimod = string.Join(csvSep, PeakBoundaryImporter.STANDARD_FIELD_NAMES.Take(4));
            var valuesUnimod =  new []
            {
                "LGGLRPES[+" + string.Format("{0:F01}", 80.0) + "]PESLTSVSR", "100803_0005b_MCF7_TiTip3.wiff", (80.5).ToString(cult), (82.0).ToString(cult)
            };
            ImportNoException(docResultsId, TextUtil.LineSeparate(headerUnimod, string.Join(csvSep, valuesUnimod)));

            // 16. Integer import format ok
            valuesUnimod[0] = "LGGLRPES[+80]PESLTSVSR";
            ImportNoException(docResultsId, TextUtil.LineSeparate(headerUnimod, string.Join(csvSep, valuesUnimod)));

            // 17. Unimod import format ok
            valuesUnimod[0] = "LGGLRPES(UniMod:21)PESLTSVSR";
            ImportNoException(docResultsId, TextUtil.LineSeparate(headerUnimod, string.Join(csvSep, valuesUnimod)));

            // 18. Strange capitalizations OK
            valuesUnimod[0] = "LGGLRPES(uniMoD:21)PESLTSVSR";
            ImportNoException(docResultsId, TextUtil.LineSeparate(headerUnimod, string.Join(csvSep, valuesUnimod)));

            // 18. Unimod with brackets OK
            valuesUnimod[0] = "LGGLRPES[uniMoD:21]PESLTSVSR";
            ImportNoException(docResultsId, TextUtil.LineSeparate(headerUnimod, string.Join(csvSep, valuesUnimod)));

            // Release open streams
            docContainerId.Release();
        }
示例#9
0
        public void DocLoadLibrary()
        {
            // Load the document
            var testFilesDir = new TestFilesDir(TestContext, TEST_ZIP_PATH);
            string loadPath = testFilesDir.GetTestPath("DocWithLibrary.sky");
            string libraryPath = testFilesDir.GetTestPath("Yeast_MRMer_min.blib");
            var doc = ResultsUtil.DeserializeDocument(loadPath);
            doc = doc.ChangeSettings(doc.Settings.ChangePeptideLibraries(
                lib => lib.ChangeLibrarySpecs(new[] {new BiblioSpecLiteSpec(lib.Libraries[0].Name, libraryPath),})));

            // Cause library load and subsequent document update
            var docContainer = new ResultsTestDocumentContainer(null, loadPath);
            docContainer.SetDocument(doc, null, true);
            docContainer.AssertComplete();

            // Check that library info on peptides and transitions were not recalculated
            // during document load
            var docLoaded = docContainer.Document;
            Assert.AreEqual(6, docLoaded.PeptideCount);
            Assert.AreEqual(36, docLoaded.PeptideTransitionCount);
            var transitions = docLoaded.PeptideTransitions.ToArray();
            Assert.AreEqual("y12", transitions[0].FragmentIonName);
            Assert.AreEqual(1, transitions[0].LibInfo.Rank);
            Assert.AreEqual("y12", transitions[3].FragmentIonName);
            Assert.AreEqual(1, transitions[3].LibInfo.Rank);
            Assert.AreEqual("b3", transitions[14].FragmentIonName);
            Assert.AreEqual(2, transitions[14].LibInfo.Rank);
            Assert.AreEqual("b3", transitions[17].FragmentIonName);
            Assert.AreEqual(2, transitions[17].LibInfo.Rank);

            var docLibraryChanged = docLoaded.ChangeSettings(docLoaded.Settings.ChangePeptideLibraries(
                lib => lib.ChangeLibraries(new LibrarySpec[0], new Library[0])
                          .ChangeLibrarySpecs(new[] {new BiblioSpecLiteSpec("Test reload", libraryPath),})));
            docContainer.SetDocument(docLibraryChanged, docLoaded, true);
            var docChangedLoaded = docContainer.Document;

            // Check that document changed to be in synch with the library
            Assert.AreEqual(3, docChangedLoaded.PeptideCount);
            Assert.AreEqual(18, docChangedLoaded.PeptideTransitionCount);
            var transitionsNew = docChangedLoaded.PeptideTransitions.ToArray();
            Assert.AreEqual("y7", transitionsNew[0].FragmentIonName);
            Assert.AreEqual(1, transitionsNew[0].LibInfo.Rank);
            Assert.AreEqual("y7", transitionsNew[3].FragmentIonName);
            Assert.AreEqual(1, transitionsNew[3].LibInfo.Rank);
            Assert.AreEqual("y6", transitionsNew[8].FragmentIonName);
            Assert.AreEqual(2, transitionsNew[8].LibInfo.Rank);
            Assert.AreEqual("y6", transitionsNew[11].FragmentIonName);
            Assert.AreEqual(2, transitionsNew[11].LibInfo.Rank);
            for (int i = 1; i < 3; i++)
            {
                Assert.AreSame(transitions[i], transitionsNew[i]);
                Assert.AreSame(transitions[i+3], transitionsNew[i+3]);
            }
            for (int i = 12; i < 14; i++)
            {
                Assert.AreSame(transitions[i], transitionsNew[i-6]);
                Assert.AreSame(transitions[i+3], transitionsNew[i-3]);
            }
            for (int i = 24; i < 27; i++)
            {
                Assert.AreSame(transitions[i], transitionsNew[i-12]);
                Assert.AreSame(transitions[i+3], transitionsNew[i-9]);
            }

            // Release open streams
            docContainer.Release();
        }
示例#10
0
        public void WatersMultiReplicateTest()
        {
            var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE);

            string docPath;
            SrmDocument docOriginal = InitWatersDocument(testFilesDir, out docPath);
            SrmDocument doc = docOriginal;
            var docContainer = new ResultsTestDocumentContainer(doc, docPath);
            string extRaw = ExtensionTestContext.ExtWatersRaw;

            string[] replicatePaths =
            {
                testFilesDir.GetTestPath("160109_Mix1_calcurve_070.mzML"),
                testFilesDir.GetTestPath("160109_Mix1_calcurve_073.mzML"),
                testFilesDir.GetTestPath("160109_Mix1_calcurve_075" + extRaw),
                testFilesDir.GetTestPath("160109_Mix1_calcurve_078.mzML")
            };
            // Count peaks where higher concentration replicates show less area
            int outOfOrder = 0;
            foreach (string path in replicatePaths)
            {
                var listChromatograms = new List<ChromatogramSet>();
                if (doc.Settings.HasResults)
                    listChromatograms.AddRange(doc.Settings.MeasuredResults.Chromatograms);

                string name = Path.GetFileNameWithoutExtension(path);
                if (name != null)
                    name = name.Substring(name.Length - 12);
                listChromatograms.Add(new ChromatogramSet(name, new[] {MsDataFileUri.Parse(path)}));
                int len = listChromatograms.Count;

                var docResults = doc.ChangeMeasuredResults(new MeasuredResults(listChromatograms));
                // Adding unloaded results should add a new null result.
                foreach (var nodeTran in docResults.PeptideTransitions)
                {
                    Assert.IsTrue(nodeTran.HasResults);
                    Assert.AreEqual(listChromatograms.Count, nodeTran.Results.Count);
                    Assert.IsNull(nodeTran.Results[len - 1]);
                }

                Assert.IsTrue(docContainer.SetDocument(docResults, doc, true),
                    string.Format("Failed adding results for {0}.", path));
                docContainer.AssertComplete();
                docResults = docContainer.Document;

                Assert.IsTrue(docResults.Settings.MeasuredResults.IsLoaded);

                var transOld = doc.PeptideTransitions.ToArray();
                var transNew = docResults.PeptideTransitions.ToArray();
                Assert.AreEqual(transOld.Length, transNew.Length);
                int countPeaks = 0;
                for (int i = 0; i < transNew.Length; i++)
                {
                    // Make sure new peak was added to each transition
                    var nodeTranNew = transNew[i];
                    Assert.IsTrue(nodeTranNew.HasResults);
                    Assert.AreEqual(len, nodeTranNew.Results.Count);
                    var chromInfo = nodeTranNew.Results[len - 1][0];
                    Assert.IsNotNull(chromInfo);

                    if (!chromInfo.IsEmpty)
                        countPeaks++;

                    // Make sure previously loaded peaks did not change
                    for (int j = 0; j < len - 1; j++)
                    {
                        var chromInfoPrevious = transOld[i].Results[j][0];
                        Assert.AreSame(chromInfoPrevious, nodeTranNew.Results[j][0]);
                        if ((chromInfo.IsEmpty && !chromInfoPrevious.IsEmpty) ||
                                (!chromInfo.IsEmpty && chromInfoPrevious.Area >= chromInfo.Area))
                            outOfOrder++;
                    }
                }
                // Allow 2 missed peaks
                Assert.IsTrue(countPeaks >= transNew.Length - (TestSmallMolecules ? 1 : 0) - 2);

                // Check results calculations for peptides and groups
                foreach (var nodePep in docResults.Peptides)
                {
                    Assert.AreEqual(len, nodePep.Results.Count);
                    Assert.IsTrue(nodePep.HasResults);
                    var chromInfo = nodePep.Results[len - 1][0];
                    Assert.AreEqual(1, nodePep.Children.Count);
                    var nodeGroup = (TransitionGroupDocNode) nodePep.Children[0];
                    Assert.IsTrue(nodeGroup.HasResults);
                    Assert.AreEqual(len, nodeGroup.Results.Count);
                    var chromInfoGroup = nodeGroup.Results[len - 1][0];
                    Assert.IsTrue(chromInfoGroup.PeakCountRatio >= 0.5);
                    Assert.IsTrue(chromInfoGroup.RetentionTime.HasValue);
                    Assert.IsTrue(chromInfoGroup.Area.HasValue && chromInfoGroup.Area > 290);

                    Assert.AreEqual(chromInfo.RetentionTime, chromInfoGroup.RetentionTime);
                    Assert.AreEqual(chromInfo.PeakCountRatio, chromInfoGroup.PeakCountRatio);
                }

                doc = docResults;
            }

            Assert.AreEqual(13, outOfOrder, 1);

            // Remove the original data
            foreach (string path in replicatePaths)
            {
                if (File.Exists(path))
                    FileEx.SafeDelete(path);
                else
                    DirectoryEx.SafeDelete(path);
            }
            FileEx.SafeDelete(docPath);

            // Save the document
            string xmlSaved = null;
            var docPersisted = AssertEx.RoundTrip(doc, ref xmlSaved);
            Assert.IsTrue(!docPersisted.Settings.MeasuredResults.IsLoaded);
            // Make sure the persisted document round-trips.
            // The orginal doesn't because of changing precision in the results info.
            AssertEx.Serializable(docPersisted, AssertEx.DocumentCloned);

            // Make sure the loaded document has reasonable results info
            // before the cache files are loaded
            for (int i = 0; i < doc.Children.Count; i++)
            {
                PeptideGroupDocNode nodePepGroup1 = (PeptideGroupDocNode) doc.Children[i];
                if (TestSmallMolecules && nodePepGroup1.Name.Equals(SrmDocument.TestingNonProteomicMoleculeGroupName))
                    continue;
                PeptideGroupDocNode nodePepGroup2 = (PeptideGroupDocNode) docPersisted.Children[i];
                Assert.AreNotSame(nodePepGroup1, nodePepGroup2);
                for (int j = 0; j < nodePepGroup1.Children.Count; j++)
                {
                    PeptideDocNode nodePep1 = (PeptideDocNode) nodePepGroup1.Children[j];
                    PeptideDocNode nodePep2 = (PeptideDocNode) nodePepGroup2.Children[j];
                    Assert.AreNotSame(nodePep1, nodePep2);
                    Assert.AreEqual(nodePep1.Results.Count, nodePep2.Results.Count);
                    for (int k = 0; k < nodePep1.Results.Count; k++)
                        Assert.AreEqual(nodePep1.Results[k][0].PeakCountRatio, nodePep2.Results[k][0].PeakCountRatio);
                    for (int k = 0; k < nodePep1.Children.Count; k++)
                    {
                        TransitionGroupDocNode nodeGroup1 = (TransitionGroupDocNode) nodePep1.Children[k];
                        TransitionGroupDocNode nodeGroup2 = (TransitionGroupDocNode) nodePep2.Children[k];
                        Assert.AreNotSame(nodeGroup1, nodeGroup2);
                        Assert.AreEqual(nodeGroup1.Results.Count, nodeGroup2.Results.Count);
                        for (int l = 0; l < nodeGroup1.Results.Count; l++)
                            Assert.AreEqual(nodeGroup1.Results[l][0].PeakCountRatio,
                                            nodeGroup2.Results[l][0].PeakCountRatio);
                        for (int l = 0; l < nodeGroup1.Children.Count; l++)
                        {
                            TransitionDocNode nodeTran1 = (TransitionDocNode) nodeGroup1.Children[l];
                            TransitionDocNode nodeTran2 = (TransitionDocNode) nodeGroup2.Children[l];
                            Assert.AreNotSame(nodeTran1, nodeTran2);
                            Assert.AreEqual(nodeTran1.Results.Count, nodeTran2.Results.Count);
                            for (int m = 0; m < nodeTran1.Results.Count; m++)
                            {
                                if (nodeTran1.Results[m] != null && nodeTran2.Results[m] != null)
                                    Assert.AreEqual(nodeTran1.Results[m][0].IsEmpty, nodeTran2.Results[m][0].IsEmpty);
                                else
                                    Assert.AreEqual(nodeTran1.Results[m], nodeTran2.Results[m]); // both null
                            }
                        }
                    }
                }
            }

            // Reload data from .skyd files
            Assert.IsTrue(docContainer.SetDocument(docPersisted, doc, true));
            docContainer.AssertComplete();
            doc = docContainer.Document;

            var results = doc.Settings.MeasuredResults;
            const float tolerance = (float) TransitionInstrument.DEFAULT_MZ_MATCH_TOLERANCE;
            foreach (var pair in doc.PeptidePrecursorPairs)
            {
                foreach (var chromSet in results.Chromatograms)
                {
                    ChromatogramGroupInfo[] chromGroupInfo;
                    Assert.IsTrue(results.TryLoadChromatogram(chromSet, pair.NodePep, pair.NodeGroup,
                                                              tolerance, true, out chromGroupInfo));
                }
            }

            // The single final cache path should be open now
            var listCachePaths = new List<string>(doc.Settings.MeasuredResults.CachePaths);
            // Should only have one cache file at this point
            Assert.AreEqual(1, listCachePaths.Count);
            foreach (var cachePath in listCachePaths)
            {
                // Attempting to delete should throw
                string path = cachePath;
                AssertEx.ThrowsException<IOException>(() => FileEx.SafeDelete(path));
            }

            // Release the .skyd file
            docContainer.Release();
            foreach (var cachePath in listCachePaths)
            {
                // Cache files should be closed now, and delete successfully.
                FileEx.SafeDelete(cachePath);
            }
            testFilesDir.Dispose();
        }
        public void TestMProphetResultsHandler()
        {
            var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE);
            var documentFile = testFilesDir.GetTestPath("MProphetGold-trained.sky");
            SrmDocument doc = ResultsUtil.DeserializeDocument(documentFile);
            // Load libraries
            doc = doc.ChangeSettings(doc.Settings.ChangePeptideLibraries(libraries =>
                {
                    var lib = libraries.Libraries[0];
                    return libraries.ChangeLibrarySpecs(new LibrarySpec[]
                        {
                            new BiblioSpecLiteSpec(lib.Name, testFilesDir.GetTestPath(lib.FileNameHint))
                        });
                }));
            // Load an empty doc, so that we can make a change and
            // cause the .skyd to be loaded
            var docContainer = new ResultsTestDocumentContainer(null, documentFile);
            docContainer.SetDocument(doc, null, true);
            docContainer.AssertComplete();
            SrmDocument docOriginal = docContainer.Document;
            var peakScoringModel = docOriginal.Settings.PeptideSettings.Integration.PeakScoringModel;
            var resultsHandler = new MProphetResultsHandler(docOriginal, peakScoringModel) { QValueCutoff = Q_CUTOFF };

            // 1. Reintegrate and export report produces expected file
            resultsHandler.ScoreFeatures();
            var docNew = resultsHandler.ChangePeaks();
            var reportSpec = MakeReportSpec();
            if (IsSaveAll)
            {
                // For regenerating expected files if things change
                ReportToCsv(reportSpec, docNew, testFilesDir.GetTestPath(REPORT_EXPECTED), CultureInfo.GetCultureInfo("en-US"));
                ReportToCsv(reportSpec, docNew, testFilesDir.GetTestPathIntl(REPORT_EXPECTED), CultureInfo.GetCultureInfo("fr-FR"));
            }
            string docNewActual = testFilesDir.GetTestPath(REPORT_ACTUAL);
            string docNewExpected = testFilesDir.GetTestPathLocale(REPORT_EXPECTED);
            ReportToCsv(reportSpec, docNew, docNewActual, CultureInfo.CurrentCulture);
            AssertEx.FileEquals(docNewExpected, docNewActual);

            // 2. Reintegrating again gives no change in document
            var resultsHandlerRepeat = new MProphetResultsHandler(docNew, peakScoringModel) { QValueCutoff = Q_CUTOFF };
            resultsHandlerRepeat.ScoreFeatures();
            var docRepeat = resultsHandlerRepeat.ChangePeaks();
            Assert.AreSame(docRepeat, docNew);
            Assert.AreNotSame(docOriginal, docNew);

            // 3. Export mProphet results gives expected file
            var calcs = peakScoringModel.PeakFeatureCalculators;
            var mProphetActual = testFilesDir.GetTestPath(MPROPHET_ACTUAL);
            var mProphetExpected = testFilesDir.GetTestPathLocale(MPROPHET_EXPECTED);
            if (IsSaveAll)
            {
                // For regenerating files
                SaveMProphetFeatures(resultsHandler, testFilesDir.GetTestPath(MPROPHET_EXPECTED), CultureInfo.GetCultureInfo("en-US"), calcs);
                SaveMProphetFeatures(resultsHandler, testFilesDir.GetTestPathIntl(MPROPHET_EXPECTED), CultureInfo.GetCultureInfo("fr-FR"), calcs);
            }
            SaveMProphetFeatures(resultsHandler, mProphetActual, CultureInfo.CurrentCulture, calcs);
            AssertEx.FileEquals(mProphetExpected, mProphetActual);

            // 4. Export mProphet -> Import Peak Boundaries leads to same result as reintegrate
            var resultsHandlerQAll = new MProphetResultsHandler(docOriginal, peakScoringModel) {QValueCutoff = 1.0};
            resultsHandlerQAll.ScoreFeatures();
            var docNewQAll = resultsHandlerQAll.ChangePeaks();
            var peakBoundaryImporter = new PeakBoundaryImporter(docNewQAll);
            long lineCount = Helpers.CountLinesInFile(mProphetActual);
            peakBoundaryImporter.Import(mProphetActual, null, lineCount);
            var docImport = peakBoundaryImporter.Document;
            // Serialized documents are easier to debug when something is different
            string strDocNew = SerializeDoc(docNewQAll);
            string strDocImport = SerializeDoc(docImport);
            AssertEx.NoDiff(strDocNew, strDocImport);
            Assert.AreSame(docNewQAll, docImport);

            // 5. Reintegration with q value cutoff of <0 causes all peaks set to null
            var handlerAllNull = new MProphetResultsHandler(docOriginal, peakScoringModel) {QValueCutoff = -0.001};
            handlerAllNull.ScoreFeatures();
            var docNull = handlerAllNull.ChangePeaks();
            foreach (var transitionNode in docNull.PeptideTransitions)
                foreach(var chromInfo in transitionNode.ChromInfos)
                    Assert.IsTrue(chromInfo.IsEmpty || transitionNode.IsDecoy);

            // 6. Reintegration adjusts example peak to null at q=0.005 cutoff, but adjusts it to a non-null peak at q=0.20
            const int groupNum = 11;
            var midQNode = resultsHandler.Document.PeptideTransitionGroups.ToList()[groupNum];
            foreach (var chromInfo in midQNode.Transitions.SelectMany(transition => transition.ChromInfos))
                Assert.IsTrue(chromInfo.IsEmpty);
            resultsHandler.QValueCutoff = Q_CUTOFF_HIGH;
            resultsHandler.ChangePeaks();
            var midQNodeNew = resultsHandler.Document.PeptideTransitionGroups.ToList()[groupNum];
            foreach (var chromInfo in midQNodeNew.Transitions.SelectMany(transition => transition.ChromInfos))
                Assert.IsFalse(chromInfo.IsEmpty);

            // 7. Labeled peptide pairs still have matching peaks
            foreach (var peptideNode in resultsHandler.Document.Peptides)
            {
                Assert.AreEqual(2, peptideNode.TransitionGroupCount);
                var groupList = peptideNode.TransitionGroups.ToList();
                var lightGroup = groupList[0];
                var heavyGroup = groupList[0];
                var lightChromInfo = lightGroup.ChromInfos.ToList()[0];
                var heavyChromInfo = heavyGroup.ChromInfos.ToList()[0];
                Assert.AreEqual(lightChromInfo.StartRetentionTime, heavyChromInfo.StartRetentionTime);
                Assert.AreEqual(lightChromInfo.EndRetentionTime, heavyChromInfo.EndRetentionTime);
                Assert.AreEqual(lightChromInfo.RetentionTime, heavyChromInfo.RetentionTime);
            }

            // 8. Verify that chosen peaks and q values are the same as those in mProphet paper:
            // http://www.nature.com/nmeth/journal/v8/n5/full/nmeth.1584.html#/supplementary-information
            // TODO: Grab this data from the mProphet paper

            // Release open streams
            docContainer.Release();
        }
示例#12
0
        public void DoThermoRatioTest(RefinementSettings.ConvertToSmallMoleculesMode smallMoleculesTestMode)
        {
            TestSmallMolecules = false;  // We do this explicitly

            var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE);
            string docPath;
            SrmDocument doc = InitThermoDocument(testFilesDir, out docPath);
            SrmSettings settings = doc.Settings.ChangePeptideModifications(mods =>
                mods.ChangeInternalStandardTypes(new[]{IsotopeLabelType.light}));
            doc = doc.ChangeSettings(settings);
            if (smallMoleculesTestMode != RefinementSettings.ConvertToSmallMoleculesMode.none)
            {
                var docOrig = doc;
                var refine = new RefinementSettings();
                doc = refine.ConvertToSmallMolecules(doc, smallMoleculesTestMode);
                // This is our first example of a converted label doc - check roundtripping
                AssertEx.ConvertedSmallMoleculeDocumentIsSimilar(docOrig, doc);
                AssertEx.Serializable(doc);
            }
            var docContainer = new ResultsTestDocumentContainer(doc, docPath);
            string extRaw = ExtensionTestContext.ExtThermoRaw;
            var listChromatograms = new List<ChromatogramSet>
                                        {
                                            new ChromatogramSet("rep03", new[]
                                                                             {
                                                                                 MsDataFileUri.Parse(testFilesDir.GetTestPath(
                                                                                     "Site20_STUDY9P_PHASEII_QC_03" + extRaw))
                                                                             }),
                                            new ChromatogramSet("rep05", new[]
                                                                             {
                                                                                 MsDataFileUri.Parse(testFilesDir.GetTestPath(
                                                                                     "Site20_STUDY9P_PHASEII_QC_05" + extRaw))
                                                                             })
                                        };
            var docResults = doc.ChangeMeasuredResults(new MeasuredResults(listChromatograms));
            Assert.IsTrue(docContainer.SetDocument(docResults, doc, true));
            docContainer.AssertComplete();
            docResults = docContainer.Document;
            // Make sure all groups have at least 5 transitions (of 6) with ratios
            int ratioGroupMissingCount = 0;
            foreach (var nodeGroup in docResults.MoleculeTransitionGroups)
            {
                if (nodeGroup.TransitionGroup.LabelType.IsLight)
                {
                    foreach (var result in nodeGroup.Results)
                        Assert.IsFalse(result[0].Ratio.HasValue, "Light group found with a ratio");
                    foreach (TransitionDocNode nodeTran in nodeGroup.Children)
                    {
                        foreach (var resultTran in nodeTran.Results)
                            Assert.IsFalse(resultTran[0].Ratio.HasValue, "Light transition found with a ratio");
                    }
                }
                else
                {
                    bool missingRatio = false;
                    foreach (ChromInfoList<TransitionGroupChromInfo> chromInfoList in nodeGroup.Results)
                    {
                        var ratioHeavy = chromInfoList[0].Ratio;
                        if (!ratioHeavy.HasValue)
                            missingRatio = true;
                    }
                    int ratioCount1 = 0;
                    int ratioCount2 = 0;
                    foreach (TransitionDocNode nodeTranHeavy in nodeGroup.Children)
                    {
                        float? ratioHeavy = nodeTranHeavy.Results[0][0].Ratio;
                        if (ratioHeavy.HasValue)
                        {
                            Assert.IsFalse(float.IsNaN(ratioHeavy.Value) || float.IsInfinity(ratioHeavy.Value));
                            ratioCount1++;
                        }
                        ratioHeavy = nodeTranHeavy.Results[1][0].Ratio;
                        if (ratioHeavy.HasValue)
                        {
                            Assert.IsFalse(float.IsNaN(ratioHeavy.Value) || float.IsInfinity(ratioHeavy.Value));
                            ratioCount2++;
                        }
                    }
                    Assert.AreEqual(3, ratioCount1);
                    if (ratioCount2 < 2)
                        ratioGroupMissingCount++;
                    else
                        Assert.IsFalse(missingRatio, "Precursor missing ratio when transitions have ratios");
                }
            }
            // 3 groups with less than 2 transition ratios
            Assert.AreEqual(3, ratioGroupMissingCount);

            // Remove the first light transition, checking that this removes the ratio
            // from the corresponding heavy transition, but not the entire group, until
            // after all light transitions have been removed.
            IdentityPath pathFirstPep = docResults.GetPathTo((int) SrmDocument.Level.Molecules, 0);
            var nodePep = (PeptideDocNode) docResults.FindNode(pathFirstPep);
            Assert.AreEqual(2, nodePep.Children.Count);
            var nodeGroupLight = (TransitionGroupDocNode) nodePep.Children[0];
            IdentityPath pathGroupLight = new IdentityPath(pathFirstPep, nodeGroupLight.TransitionGroup);
            Assert.IsNull(nodeGroupLight.Results[0][0].Ratio, "Light group has ratio");
            var nodeGroupHeavy = (TransitionGroupDocNode) nodePep.Children[1];
            IdentityPath pathGroupHeavy = new IdentityPath(pathFirstPep, nodeGroupHeavy.TransitionGroup);
            float? ratioStart = nodeGroupHeavy.Results[0][0].Ratio;
            Assert.IsTrue(ratioStart.HasValue, "No starting heavy group ratio");
            var expectedValues = new[] { 1.403414, 1.38697791, 1.34598482 };
            for (int i = 0; i < 3; i++)
            {
                var pathLight = docResults.GetPathTo((int) SrmDocument.Level.Transitions, 0);
                var pathHeavy = docResults.GetPathTo((int) SrmDocument.Level.Transitions, 3);
                TransitionDocNode nodeTran = (TransitionDocNode) docResults.FindNode(pathHeavy);
                float? ratioTran = nodeTran.Results[0][0].Ratio;
                Assert.IsTrue(ratioTran.HasValue, "Expected transition ratio not found");
                Assert.AreEqual(ratioTran.Value, expectedValues[i], 1.0e-5);
                docResults = (SrmDocument) docResults.RemoveChild(pathLight.Parent, docResults.FindNode(pathLight));
                nodeTran = (TransitionDocNode) docResults.FindNode(pathHeavy);
                Assert.IsFalse(nodeTran.Results[0][0].Ratio.HasValue, "Unexpected transiton ratio found");
                Assert.AreEqual(pathGroupHeavy, pathHeavy.Parent, "Transition found outside expected group");
            //                nodePep = (PeptideDocNode) docResults.FindNode(pathFirstPep);
                nodeGroupHeavy = (TransitionGroupDocNode) docResults.FindNode(pathGroupHeavy);
            //                Assert.AreEqual(nodePep.Results[0][0].RatioToStandard, nodeGroupHeavy.Results[0][0].Ratio,
            //                                "Peptide and group ratios not equal");
                if (i < 2)
                {
                    float? ratioGroup = nodeGroupHeavy.Results[0][0].Ratio;
                    Assert.IsTrue(ratioGroup.HasValue, "Group ratio removed with transition ratios");
                    Assert.AreEqual(ratioStart.Value, ratioGroup.Value, 0.1,
                                    "Unexpected group ratio change by more than 0.1");
                }
                else
                {
                    Assert.IsFalse(nodeGroupHeavy.Results[0][0].Ratio.HasValue,
                                   "Group ratio still present with no transition ratios");
                }
            }
            bool asSmallMolecules = (smallMoleculesTestMode != RefinementSettings.ConvertToSmallMoleculesMode.none);
            if (!asSmallMolecules) // GetTransitions() doesn't work the same way for small molecules - it only lists existing ones
            {
                bool firstAdd = true;
                var nodeGroupLightOrig = (TransitionGroupDocNode) doc.FindNode(pathGroupLight);
                DocNode[] lightChildrenOrig = nodeGroupLightOrig.Children.ToArray();
                foreach (var nodeTran in nodeGroupLightOrig.GetTransitions(docResults.Settings,
                    null, nodeGroupLightOrig.PrecursorMz, null, null, null, false))
                {
                    var transition = nodeTran.Transition;
                    if (!firstAdd && lightChildrenOrig.IndexOf(node => Equals(node.Id, transition)) == -1)
                        continue;
                    // Add the first transition, and then the original transitions
                    docResults = (SrmDocument) docResults.Add(pathGroupLight, nodeTran);
                    nodeGroupHeavy = (TransitionGroupDocNode) docResults.FindNode(pathGroupHeavy);
                    if (firstAdd)
                        Assert.IsNull(nodeGroupHeavy.Results[0][0].Ratio, "Unexpected heavy ratio found");
                    else
                        Assert.IsNotNull(nodeGroupHeavy.Results[0][0].Ratio,
                            "Heavy ratio null after adding light children");
                    firstAdd = false;
                }
                Assert.AreEqual(ratioStart, nodeGroupHeavy.Results[0][0].Ratio);
            }
            // Release file handles
            docContainer.Release();
            testFilesDir.Dispose();
        }
示例#13
0
        private static void ValidateRelativeRT(RelativeRT relativeRT, SrmDocument doc, string docPath, List<ChromatogramSet> listChromatograms)
        {
            FileEx.SafeDelete(Path.ChangeExtension(docPath, ChromatogramCache.EXT));

            SrmSettings settings = doc.Settings.ChangePeptideModifications(mods =>
                mods.ChangeHeavyModifications(
                    mods.HeavyModifications.Select(m => m.ChangeRelativeRT(relativeRT)).ToArray()));
            var docMods = doc.ChangeSettings(settings);
            var docResults = docMods.ChangeMeasuredResults(new MeasuredResults(listChromatograms));
            var docContainer = new ResultsTestDocumentContainer(docMods, docPath);
            Assert.IsTrue(docContainer.SetDocument(docResults, docMods, true));
            docContainer.AssertComplete();
            docContainer.Release();
        }
示例#14
0
        public void ThermoMixedPeptidesTest()
        {
            var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE);
            string docPath;
            SrmDocument docMixed = InitMixedDocument(testFilesDir, out docPath);
            FileEx.SafeDelete(Path.ChangeExtension(docPath, ChromatogramCache.EXT));
            SrmDocument docUnmixed = InitUnmixedDocument(testFilesDir, out docPath);
            FileEx.SafeDelete(Path.ChangeExtension(docPath, ChromatogramCache.EXT));
            string extRaw = ExtensionTestContext.ExtThermoRaw;
            var listChromatograms = new List<ChromatogramSet>
                                        {
                                            new ChromatogramSet("rep03", new[]
                                                                             {
                                                                                 MsDataFileUri.Parse(testFilesDir.GetTestPath(
                                                                                     "Site20_STUDY9P_PHASEII_QC_03" + extRaw))
                                                                             }),
                                            new ChromatogramSet("rep05", new[]
                                                                             {
                                                                                 MsDataFileUri.Parse(testFilesDir.GetTestPath(
                                                                                     "Site20_STUDY9P_PHASEII_QC_05" + extRaw))
                                                                             })
                                        };
            var docResults = docMixed.ChangeMeasuredResults(new MeasuredResults(listChromatograms));
            var docContainerMixed = new ResultsTestDocumentContainer(docMixed, docPath);
            Assert.IsTrue(docContainerMixed.SetDocument(docResults, docMixed, true));
            docContainerMixed.AssertComplete();
            docMixed = docContainerMixed.Document;
            SrmDocument docMixedUnmixed = (SrmDocument) docMixed.ChangeChildren(new DocNode[0]);
            IdentityPath tempPath;
            docMixedUnmixed = docMixedUnmixed.AddPeptideGroups(docUnmixed.PeptideGroups, true, IdentityPath.ROOT,
                out tempPath, out tempPath);

            docResults = docUnmixed.ChangeMeasuredResults(new MeasuredResults(listChromatograms));
            var docContainerUnmixed = new ResultsTestDocumentContainer(docUnmixed, docPath);
            Assert.IsTrue(docContainerUnmixed.SetDocument(docResults, docUnmixed, true));
            docContainerUnmixed.AssertComplete();
            docUnmixed = docContainerUnmixed.Document;
            AssertEx.DocumentCloned(docMixedUnmixed, docUnmixed);

            docContainerMixed.Release();
            docContainerUnmixed.Release();
        }
示例#15
0
        public void DoAgilentMseChromatogramTest(RefinementSettings.ConvertToSmallMoleculesMode asSmallMolecules)
        {
            var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE);
            TestSmallMolecules = false; // We have an explicit test for that here

            string docPath;
            SrmDocument document = InitAgilentMseDocument(testFilesDir, out docPath);
            if (asSmallMolecules != RefinementSettings.ConvertToSmallMoleculesMode.none)
            {
                var refine = new RefinementSettings();
                document = refine.ConvertToSmallMolecules(document, asSmallMolecules);
            }
            var docContainer = new ResultsTestDocumentContainer(document, docPath);
            var doc = docContainer.Document;
            var listChromatograms = new List<ChromatogramSet>();
            var path = MsDataFileUri.Parse(@"AgilentMse\BSA-AI-0-10-25-41_first_100_scans.mzML");
            listChromatograms.Add(AssertResult.FindChromatogramSet(doc, path) ??
                    new ChromatogramSet(path.GetFileName().Replace('.', '_'), new[] { path }));
            var docResults = doc.ChangeMeasuredResults(new MeasuredResults(listChromatograms));
            Assert.IsTrue(docContainer.SetDocument(docResults, doc, true));
            docContainer.AssertComplete();
            document = docContainer.Document;

            float tolerance = (float)document.Settings.TransitionSettings.Instrument.MzMatchTolerance;
            var results = document.Settings.MeasuredResults;
            foreach (var pair in document.MoleculePrecursorPairs)
            {
                ChromatogramGroupInfo[] chromGroupInfo;
                Assert.IsTrue(results.TryLoadChromatogram(0, pair.NodePep, pair.NodeGroup,
                    tolerance, true, out chromGroupInfo));
                Assert.AreEqual(1, chromGroupInfo.Length);
            }

            // now drill down for specific values
            int nPeptides = 0;
            foreach (var nodePep in document.Molecules.Where(nodePep => nodePep.Results[0] != null))
            {
                // expecting just one peptide result in this small data set
                if (nodePep.Results[0].Sum(chromInfo => chromInfo.PeakCountRatio > 0 ? 1 : 0) > 0)
                {
                    Assert.AreEqual(0.2462, (double)nodePep.GetMeasuredRetentionTime(0), .0001, "averaged retention time differs in node "+nodePep.RawTextId);
                    Assert.AreEqual(0.3333, (double)nodePep.GetPeakCountRatio(0), 0.0001);
                    nPeptides++;
                }
            }
            Assert.AreEqual(1, nPeptides);
            // Release file handles
            docContainer.Release();
            testFilesDir.Dispose();
        }
示例#16
0
        public void WatersCacheTest()
        {
            // First test transition from per-replicate caching strategy to
            // single cache per document strategy.
            var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE);

            // Open the replicate document, and let it reload the data from mzML
            // showing the document can find data files by name in its own directory,
            // since the document paths will not match those on disk.
            string docPath;
            var doc = InitWatersDocument(testFilesDir, out docPath);
            var docReload = InitWatersDocument(testFilesDir, "160109_Mix1_calcurve_rep.sky", out docPath);
            var docContainer = new ResultsTestDocumentContainer(doc, docPath);
            var streamManager = docContainer.ChromatogramManager.StreamManager;
            Assert.IsTrue(docContainer.SetDocument(docReload, doc, true));
            docContainer.AssertComplete();
            docReload = docContainer.Document;
            // Release file handles to cache files created during load
            Assert.IsTrue(docContainer.SetDocument(doc, docReload));
            // Delete the cache
            string cachePath = Path.ChangeExtension(docPath, ".skyd");
            FileEx.SafeDelete(cachePath);

            // Then try using cached replicate files
            // Move per-replicate cache files into place
            var replicateCacheNames = new[]
                {
                    "160109_Mix1_calcurve_rep_calcurve_070.skyd",
                    "160109_Mix1_calcurve_rep_calcurve_073.skyd"
                };
            GetCacheFiles(testFilesDir, replicateCacheNames);
            // Delete the files these cache
            DeleteFiles(testFilesDir,
                new[]
                {
                    "160109_Mix1_calcurve_070.mzML",
                    "160109_Mix1_calcurve_073.mzML",
                });
            var docCached = InitWatersDocument(testFilesDir, "160109_Mix1_calcurve_rep.sky", out docPath);
            Assert.IsTrue(docContainer.SetDocument(docCached, doc, true));
            docContainer.AssertComplete();
            docCached = docContainer.Document;

            // The document with data from the .mzML files should be the same as
            // the one loaded from the .skyd files.
            // Unfortunately, this is to hard to maintain when cache changes are made.
            // AssertEx.Cloned(docCached, docReload);

            // The one cache should be present
            Assert.IsTrue(File.Exists(cachePath));
            // And the replicate cache files should have been removed
            foreach (var cacheName in replicateCacheNames)
                Assert.IsFalse(File.Exists(testFilesDir.GetTestPath(cacheName)));

            // Save the cache file time stamp
            var cacheInfo = new FileInfo(cachePath);
            long cacheSize = cacheInfo.Length;

            // Adding files already in the document should have no impact on the cache.
            string extRaw = ExtensionTestContext.ExtWatersRaw;
            var listChromatograms = new List<ChromatogramSet>(docCached.Settings.MeasuredResults.Chromatograms)
                {
                    new ChromatogramSet("extra1",
                                        new[] { MsDataFileUri.Parse(testFilesDir.GetTestPath("160109_Mix1_calcurve_075" + extRaw)) }),
                    new ChromatogramSet("extra2",
                                        new[] { MsDataFileUri.Parse(testFilesDir.GetTestPath("160109_Mix1_calcurve_078.mzML")) })
                };

            // Adding a new file should cause the cache to grow.
            var settings = docCached.Settings.MeasuredResults.ChangeChromatograms(listChromatograms);
            var docGrow = docCached.ChangeMeasuredResults(settings);
            Assert.IsTrue(docContainer.SetDocument(docGrow, docCached, true));
            docContainer.AssertComplete();
            docGrow = docContainer.Document;

            cacheInfo = new FileInfo(cachePath);
            Assert.IsTrue(cacheSize < cacheInfo.Length);

            cacheSize = cacheInfo.Length;
            var writeTime = cacheInfo.LastWriteTime;

            listChromatograms.Add(
                    new ChromatogramSet("double",
                        new[]
                            {
                                testFilesDir.GetTestPath("160109_Mix1_calcurve_075" + extRaw),
                                testFilesDir.GetTestPath("160109_Mix1_calcurve_078.mzML")
                            }));

            settings = docGrow.Settings.MeasuredResults.ChangeChromatograms(listChromatograms);
            var docNoCacheChange1 = docGrow.ChangeMeasuredResults(settings);
            Assert.IsTrue(docContainer.SetDocument(docNoCacheChange1, docGrow, true));
            docContainer.AssertComplete();
            docNoCacheChange1 = docContainer.Document;

            Assert.AreEqual(writeTime, File.GetLastWriteTime(cachePath));

            // Removing files should have no impact, until optimized
            listChromatograms.RemoveRange(listChromatograms.Count - 2, 2);
            listChromatograms.RemoveAt(1);

            settings = docNoCacheChange1.Settings.MeasuredResults.ChangeChromatograms(listChromatograms);
            var docNoCacheChange2 = docNoCacheChange1.ChangeMeasuredResults(settings);
            Assert.IsTrue(docContainer.SetDocument(docNoCacheChange2, docNoCacheChange1, true));
            docContainer.AssertComplete();
            docNoCacheChange2 = docContainer.Document;

            Assert.AreEqual(writeTime, File.GetLastWriteTime(cachePath));

            // Optimizing should shrink the cache
            var results = docNoCacheChange2.Settings.MeasuredResults.OptimizeCache(docPath, streamManager);
            var docOptimized = new SrmDocument(docNoCacheChange2,
                                               docNoCacheChange2.Settings.ChangeMeasuredResults(results),
                                               docNoCacheChange2.Children);
            // This should not cause a reload
            Assert.IsTrue(docContainer.SetDocument(docOptimized, docNoCacheChange2, false));

            cacheInfo = new FileInfo(cachePath);
            Assert.IsTrue(cacheSize > cacheInfo.Length);

            // Test file caches
            // First reload the files from .mzML
            docReload = InitWatersDocument(testFilesDir, "160109_Mix1_calcurve_file.sky", out docPath);
            // Change the path to use the right .skyd file
            docContainer.DocumentFilePath = docPath;
            Assert.IsTrue(docContainer.SetDocument(docReload, docOptimized, true));
            docContainer.AssertComplete();
            docReload = docContainer.Document;
            // Release file handles to cache files created during load
            Assert.IsTrue(docContainer.SetDocument(doc, docReload));
            // Delete the cache
            cachePath = Path.ChangeExtension(docPath, ".skyd");
            FileEx.SafeDelete(cachePath);

            // Then try using cached files
            // Move per-file cache files into place
            var fileCacheNames = new[]
                {
                    "160109_Mix1_calcurve_075.mzML.skyd",
                    "160109_Mix1_calcurve_078.mzML.skyd"
                };
            GetCacheFiles(testFilesDir, fileCacheNames);
            // Swap the mzML files, so the test will fail, if not reading from the cache
            // CONSIDER: Should this really work, since they have different time stamps?
            string file075 = testFilesDir.GetTestPath("160109_Mix1_calcurve_075.mzML");
            string file078 = testFilesDir.GetTestPath("160109_Mix1_calcurve_078.mzML");
            string fileTemp = file075 + ".tmp";
            File.Move(file075, fileTemp);
            File.Move(file078, file075);
            File.Move(fileTemp, file078);

            docCached = InitWatersDocument(testFilesDir, "160109_Mix1_calcurve_file.sky", out docPath);
            // Make sure cache files exactly match the names the loader will look for
            var listResultsFiles = new List<MsDataFileUri>();
            foreach (var chromatogram in docCached.Settings.MeasuredResults.Chromatograms)
                listResultsFiles.AddRange(chromatogram.MSDataFilePaths);
            for (int i = 0; i < fileCacheNames.Length; i++)
            {
                string partPath = ChromatogramCache.PartPathForName(docPath, listResultsFiles[i]);
                File.Move(testFilesDir.GetTestPath(fileCacheNames[i]), partPath);
            }

            Assert.IsTrue(docContainer.SetDocument(docCached, doc, true));
            docContainer.AssertComplete();
            // docCached = docContainer.Document;

            // The document with data from the .mzML files should be the same as
            // the one loaded from the .skyd files.
            // Unfortunately, this is to hard to maintain when cache changes are made.
            // AssertEx.Cloned(docCached, docReload);

            // The one cache should be present
            Assert.IsTrue(File.Exists(Path.ChangeExtension(docPath, ".skyd")));
            // And the replicate cache files should have been removed
            foreach (var cacheName in fileCacheNames)
                Assert.IsFalse(File.Exists(testFilesDir.GetTestPath(cacheName)));

            // Release file handles
            docContainer.Release();
            testFilesDir.Dispose();
        }
        private void RunMultiplePeptidesSameMz(RefinementSettings.ConvertToSmallMoleculesMode asSmallMolecules)
        {
            if (asSmallMolecules != RefinementSettings.ConvertToSmallMoleculesMode.none)
                TestDirectoryName = asSmallMolecules.ToString();

            TestSmallMolecules = false;  // Don't need the magic test node, we have an explicit test

            var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE);

            string docPath;
            SrmDocument document = InitMultiplePeptidesSameMzDocument(testFilesDir, out docPath);
            document = (new RefinementSettings()).ConvertToSmallMolecules(document, asSmallMolecules);
            var docContainer = new ResultsTestDocumentContainer(document, docPath);

            var doc = docContainer.Document;
            var listChromatograms = new List<ChromatogramSet>();
            var path = MsDataFileUri.Parse(@"AMultiplePeptidesSameMz\ljz_20131201k_Newvariant_standards_braf.mzML");
            listChromatograms.Add(AssertResult.FindChromatogramSet(doc, path) ??
                    new ChromatogramSet(path.GetFileName().Replace('.', '_'), new[] { path }));
            var docResults = doc.ChangeMeasuredResults(new MeasuredResults(listChromatograms));
            Assert.IsTrue(docContainer.SetDocument(docResults, doc, true));
            docContainer.AssertComplete();
            document = docContainer.Document;

            float tolerance = (float)document.Settings.TransitionSettings.Instrument.MzMatchTolerance;
            var results = document.Settings.MeasuredResults;
            foreach (var pair in document.MoleculePrecursorPairs)
            {
                ChromatogramGroupInfo[] chromGroupInfo;
                Assert.IsTrue(results.TryLoadChromatogram(0, pair.NodePep, pair.NodeGroup,
                    tolerance, true, out chromGroupInfo));
                Assert.AreEqual(1, chromGroupInfo.Length);  // without the fix, only the first pair will have a chromatogram
            }
            // now drill down for specific values
            int nPeptides = 0;
            foreach (var nodePep in document.Molecules.Where(nodePep => nodePep.Results[0] != null))
            {
                // expecting three peptide result in this small data set
                if (nodePep.Results[0].Sum(chromInfo => chromInfo.PeakCountRatio > 0 ? 1 : 0) > 0)
                {
                    Assert.AreEqual(34.2441024780273,(double)nodePep.GetMeasuredRetentionTime(0), .0001);
                    nPeptides++;
                }
            }
            Assert.AreEqual(3, nPeptides); // without the fix this will give just one result
            // Release file handles
            docContainer.Release();
            testFilesDir.Dispose();
        }
        public void ChromatogramExportTest()
        {
            var testFilesDir = new TestFilesDir(TestContext, TEST_ZIP_PATH);
            string chromExportDoc = testFilesDir.GetTestPath("ChromToExport.sky");
            string fileExpected1 = testFilesDir.GetTestPathLocale(EXPORT_1);
            string fileActual1 = GetActualName(fileExpected1);
            string fileExpected2 = testFilesDir.GetTestPathLocale(EXPORT_2);
            string fileActual2 = GetActualName(fileExpected2);
            string fileExpectedAll = testFilesDir.GetTestPathLocale(EXPORT_ALL);
            string fileActualAll = GetActualName(fileExpectedAll);

            SrmDocument doc = ResultsUtil.DeserializeDocument(chromExportDoc);
            // Load an empty doc, so that we can make a change and
            // cause the .skyd to be loaded
            var docContainer = new ResultsTestDocumentContainer(null, chromExportDoc);
            docContainer.SetDocument(doc, null, true);
            docContainer.AssertComplete();
            SrmDocument docResults = docContainer.Document;
            if (IsSaveAll)
            {
                // For regenerating all of the required expected files, if things change
                SaveChrom(docResults, testFilesDir.GetTestPath(EXPORT_1), FILE_NAMES_1.ToList(), CultureInfo.GetCultureInfo("en-US"), EXTRACTOR_1, SOURCES_1);
                SaveChrom(docResults, testFilesDir.GetTestPath(EXPORT_2), FILE_NAMES_2.ToList(), CultureInfo.GetCultureInfo("en-US"), EXTRACTOR_2, SOURCES_2);
                SaveChrom(docResults, testFilesDir.GetTestPath(EXPORT_ALL), FILE_NAMES_ALL.ToList(), CultureInfo.GetCultureInfo("en-US"), EXTRACTOR_ALL, SOURCES_ALL);
                SaveChrom(docResults, testFilesDir.GetTestPathIntl(EXPORT_1), FILE_NAMES_1.ToList(), CultureInfo.GetCultureInfo("fr-FR"), EXTRACTOR_1, SOURCES_1);
                SaveChrom(docResults, testFilesDir.GetTestPathIntl(EXPORT_2), FILE_NAMES_2.ToList(), CultureInfo.GetCultureInfo("fr-FR"), EXTRACTOR_2, SOURCES_2);
                SaveChrom(docResults, testFilesDir.GetTestPathIntl(EXPORT_ALL), FILE_NAMES_ALL.ToList(), CultureInfo.GetCultureInfo("fr-FR"), EXTRACTOR_ALL, SOURCES_ALL);
            }

            SaveChrom(docResults, fileActual1, FILE_NAMES_1.ToList(), LocalizationHelper.CurrentCulture, EXTRACTOR_1, SOURCES_1);
            SaveChrom(docResults, fileActual2, FILE_NAMES_2.ToList(), LocalizationHelper.CurrentCulture, EXTRACTOR_2, SOURCES_2);
            SaveChrom(docResults, fileActualAll, FILE_NAMES_ALL.ToList(), LocalizationHelper.CurrentCulture, EXTRACTOR_ALL, SOURCES_ALL);

            AssertEx.FileEquals(fileExpected1, fileActual1);
            AssertEx.FileEquals(fileExpected2, fileActual2);
            AssertEx.FileEquals(fileExpectedAll, fileActualAll);

            // Close the .skyd file
            docContainer.Release();
        }
示例#19
0
        public SrmDocument ConvertToSmallMolecules(SrmDocument doc, ref string docPath, IEnumerable <string> dataPaths,
                                                   RefinementSettings.ConvertToSmallMoleculesMode mode)
        {
            if (doc == null)
            {
                using (var cmd = new CommandLine())
                {
                    Assert.IsTrue(cmd.OpenSkyFile(docPath)); // Handles any path shifts in database files, like our .imsdb file
                    var docLoad = cmd.Document;
                    using (var docContainer = new ResultsTestDocumentContainer(null, docPath))
                    {
                        docContainer.SetDocument(docLoad, null, true);
                        docContainer.AssertComplete();
                        doc = docContainer.Document;
                    }
                }
            }
            if (mode == RefinementSettings.ConvertToSmallMoleculesMode.none)
            {
                return(doc);
            }

            var docOriginal = doc;
            var refine      = new RefinementSettings();

            docPath = docPath.Replace(".sky", "_converted_to_small_molecules.sky");
            var docSmallMol =
                refine.ConvertToSmallMolecules(doc, Path.GetDirectoryName(docPath), mode);
            var listChromatograms = new List <ChromatogramSet>();

            if (dataPaths != null)
            {
                foreach (var dataPath in dataPaths)
                {
                    if (!string.IsNullOrEmpty(dataPath))
                    {
                        listChromatograms.Add(AssertResult.FindChromatogramSet(docSmallMol, new MsDataFilePath(dataPath)) ??
                                              new ChromatogramSet(Path.GetFileName(dataPath).Replace('.', '_'),
                                                                  new[] { dataPath }));
                    }
                }
            }
            var docResults = docSmallMol.ChangeMeasuredResults(listChromatograms.Any() ? new MeasuredResults(listChromatograms) : null);

            // Since refine isn't in a document container, have to close the streams manually to avoid file locking trouble (thanks, Nick!)
            foreach (var library in docResults.Settings.PeptideSettings.Libraries.Libraries)
            {
                foreach (var stream in library.ReadStreams)
                {
                    stream.CloseStream();
                }
            }

            // Save and restore to ensure library caches
            var cmdline = new CommandLine();

            cmdline.SaveDocument(docResults, docPath, TextWriter.Null);
            Assert.IsTrue(cmdline.OpenSkyFile(docPath)); // Handles any path shifts in database files, like our .imsdb file
            docResults = cmdline.Document;
            using (var docContainer = new ResultsTestDocumentContainer(null, docPath))
            {
                docContainer.SetDocument(docResults, null, true);
                docContainer.AssertComplete();
                doc = docContainer.Document;
            }
            AssertEx.ConvertedSmallMoleculeDocumentIsSimilar(docOriginal, doc, Path.GetDirectoryName(docPath), mode);
            return(doc);
        }
示例#20
0
        public void WatersMultiFileTest()
        {
            var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE);

            string docPath;
            SrmDocument docOriginal = InitWatersDocument(testFilesDir, out docPath);
            SrmDocument doc = docOriginal;
            var docContainer = new ResultsTestDocumentContainer(doc, docPath);
            string extRaw = ExtensionTestContext.ExtWatersRaw;

            var listChromatograms = new List<ChromatogramSet>
            {
                new ChromatogramSet("double", new[]
                    {
                        MsDataFileUri.Parse(testFilesDir.GetTestPath("160109_Mix1_calcurve_070.mzML")),
                        MsDataFileUri.Parse(testFilesDir.GetTestPath("160109_Mix1_calcurve_073.mzML"))
                    }),
                new ChromatogramSet("trouble", new[]
                    {
                        MsDataFileUri.Parse(testFilesDir.GetTestPath("160109_Mix1_calcurve_075" + extRaw)),
                        MsDataFileUri.Parse(testFilesDir.GetTestPath("160109_Mix1_calcurve_078.mzML"))
                    })
            };
            var docResults = doc.ChangeMeasuredResults(new MeasuredResults(listChromatograms));
            Assert.IsTrue(docContainer.SetDocument(docResults, doc, true));
            docContainer.AssertComplete();
            docResults = docContainer.Document;
            Assert.IsTrue(docResults.Settings.HasResults);
            var measuredResults = docResults.Settings.MeasuredResults;
            var chromatograms = measuredResults.Chromatograms;
            Assert.AreEqual(2, chromatograms.Count);

            const float tolerance = (float)TransitionInstrument.DEFAULT_MZ_MATCH_TOLERANCE;

            foreach (var pair in docResults.PeptidePrecursorPairs)
            {
                var nodePep = pair.NodePep;
                var nodeGroup = pair.NodeGroup;
                Assert.IsTrue(nodeGroup.HasResults);
                Assert.AreEqual(2, nodeGroup.Results.Count);
                foreach (var result in nodeGroup.Results)
                    Assert.AreEqual(2, result.Count);
                for (int i = 0; i < 2; i++)
                {
                    ChromatogramGroupInfo[] chromInfos;
                    Assert.IsTrue(measuredResults.TryLoadChromatogram(i, nodePep, nodeGroup, tolerance, true, out chromInfos));
                    Assert.AreEqual(2, chromInfos.Length);
                    double[] peakAreas = new double[2];
                    for (int j = 0; j < 2; j++)
                    {
                        var chromInfo = chromInfos[j];
                        Assert.IsTrue(chromInfo.BestPeakIndex != -1);
                        foreach (var tranInfo in chromInfo.TransitionPointSets)
                        {
                            var peakInfo = tranInfo.GetPeak(chromInfo.BestPeakIndex);
                            if (peakInfo.IsEmpty || peakInfo.IsForcedIntegration)
                                continue;

                            // Check times
                            var times = tranInfo.Times;
                            int iStart = Array.BinarySearch(times, peakInfo.StartTime);
                            Assert.IsTrue(iStart >= 0);
                            int iEnd = Array.BinarySearch(times, peakInfo.EndTime);
                            Assert.IsTrue(iEnd >= 0);
                            int iPeak = Array.BinarySearch(times, iStart, iEnd - iStart, peakInfo.RetentionTime);
                            // Check intensities at times
                            var intensities = tranInfo.Intensities;
                            Assert.IsTrue(intensities[iStart] < intensities[iPeak]);
                            Assert.IsTrue(intensities[iEnd] < intensities[iPeak]);
                            // Sum peak area
                            peakAreas[j] += peakInfo.Area;
                        }
                    }
                    Assert.IsTrue(peakAreas[0] < peakAreas[1]);
                }
            }

            // Release file handles
            docContainer.Release();
            testFilesDir.Dispose();
        }