Beispiel #1
0
        public void ConsoleMProphetModelTest()
        {
            var          testFilesDir = new TestFilesDir(TestContext, ZIP_FILE);
            const string docName      = "mProphetModel.sky";
            var          docPath      = testFilesDir.GetTestPath(docName);
            const string modelName    = "testModel";

            // with mods and invalid cutoff score
            var output = RunCommand("--in=" + docPath,
                                    "--reintegrate-model-name=" + modelName,
                                    "--reintegrate-create-model",
                                    "--reintegrate-overwrite-peaks",
                                    "--save"
                                    );

            AssertEx.Contains(output, string.Format(Resources.CommandLine_CreateScoringModel_Creating_scoring_model__0_, modelName));
            var doc = ResultsUtil.DeserializeDocument(docPath);

            foreach (var peakFeatureCalculator in MProphetPeakScoringModel.GetDefaultCalculators(doc))
            {
                // Not all of the default calculators for the document are valid
                if (peakFeatureCalculator is MQuestRetentionTimePredictionCalc ||
                    peakFeatureCalculator is MQuestRetentionTimeSquaredPredictionCalc ||
                    peakFeatureCalculator is MQuestIntensityCorrelationCalc ||
                    peakFeatureCalculator is NextGenProductMassErrorCalc ||
                    peakFeatureCalculator is NextGenCrossWeightedShapeCalc ||
                    peakFeatureCalculator is LegacyIdentifiedCountCalc)
                {
                    continue;
                }

                AssertEx.Contains(output, peakFeatureCalculator.Name);
            }
            AssertEx.Contains(output, string.Format(Resources.CommandLine_SaveFile_File__0__saved_, docName));
        }
Beispiel #2
0
        public void TestMProphetScoringModel()
        {
            var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE);

            // Test our MProphet implementation against known good results.
            foreach (var fileWeights in _fileWeights)
            {
                // Load transition groups from data file.
                var filePath = testFilesDir.GetTestPath(fileWeights._fileName);
                ScoredGroupPeaksSet targetTransitionGroups;
                ScoredGroupPeaksSet decoyTransitionGroups;
                LoadData(filePath, out targetTransitionGroups, out decoyTransitionGroups);

                // Discard half the transition groups that are used for testing.
                targetTransitionGroups.DiscardHalf();
                decoyTransitionGroups.DiscardHalf();

                // Calculate weights for peak features.
                var scoringModel = new MProphetPeakScoringModel("mProphet", fileWeights._weights);    // Not L10N
                scoringModel = (MProphetPeakScoringModel)scoringModel.Train(targetTransitionGroups.ToList(), decoyTransitionGroups.ToList(),
                                                                            new LinearModelParams(fileWeights._weights), 10, false, false);
                Assert.AreEqual(scoringModel.Parameters.Weights.Count, fileWeights._weights.Length);
                for (int i = 0; i < scoringModel.Parameters.Weights.Count; i++)
                {
                    Assert.AreEqual(fileWeights._weights[i], scoringModel.Parameters.Weights[i], 1e-6);
                }
            }
        }
Beispiel #3
0
        protected override bool LoadBackground(IDocumentContainer container, SrmDocument document, SrmDocument docCurrent)
        {
            var loadMonitor = new LoadMonitor(this, container, container.Document);

            IPeakScoringModel scoringModel = new MProphetPeakScoringModel(
                Path.GetFileNameWithoutExtension(container.DocumentFilePath), null as LinearModelParams,
                MProphetPeakScoringModel.GetDefaultCalculators(docCurrent), true);

            var targetDecoyGenerator = new TargetDecoyGenerator(docCurrent, scoringModel, this, loadMonitor);

            // Get scores for target and decoy groups.
            List <IList <float[]> > targetTransitionGroups, decoyTransitionGroups;

            targetDecoyGenerator.GetTransitionGroups(out targetTransitionGroups, out decoyTransitionGroups);
            if (!decoyTransitionGroups.Any())
            {
                throw new InvalidDataException();
            }

            // Set intial weights based on previous model (with NaN's reset to 0)
            var initialWeights = new double[scoringModel.PeakFeatureCalculators.Count];

            // But then set to NaN the weights that have unknown values for this dataset
            for (var i = 0; i < initialWeights.Length; ++i)
            {
                if (!targetDecoyGenerator.EligibleScores[i])
                {
                    initialWeights[i] = double.NaN;
                }
            }
            var initialParams = new LinearModelParams(initialWeights);

            // Train the model.
            scoringModel = scoringModel.Train(targetTransitionGroups, decoyTransitionGroups, targetDecoyGenerator, initialParams, null, null, scoringModel.UsesSecondBest, true, loadMonitor);

            SrmDocument docNew;

            do
            {
                docCurrent = container.Document;
                docNew     = docCurrent.ChangeSettings(docCurrent.Settings.ChangePeptideIntegration(i =>
                                                                                                    i.ChangeAutoTrain(false).ChangePeakScoringModel((PeakScoringModelSpec)scoringModel)));

                // Reintegrate peaks
                var resultsHandler = new MProphetResultsHandler(docNew, (PeakScoringModelSpec)scoringModel, _cachedFeatureScores);
                resultsHandler.ScoreFeatures(loadMonitor);
                if (resultsHandler.IsMissingScores())
                {
                    throw new InvalidDataException(Resources.ImportPeptideSearchManager_LoadBackground_The_current_peak_scoring_model_is_incompatible_with_one_or_more_peptides_in_the_document_);
                }
                docNew = resultsHandler.ChangePeaks(loadMonitor);
            }while (!CompleteProcessing(container, docNew, docCurrent));

            return(true);
        }
Beispiel #4
0
        // Test that the dialog behaves correctly when opening a model
        // that is incompatible with the dataset (some or all composite scores are NaN's)
        protected void TestIncompatibleDataSet()
        {
            // Define an incompatible model
            var weights = new[] { 0.5322, -1.0352, double.NaN, double.NaN, 1.4744, 0.0430, 0.0477, -0.2740, double.NaN,
                                  2.0096, 7.7726, -0.0566, 0.4751, 0.5, 0.5, double.NaN, double.NaN,
                                  double.NaN, double.NaN, double.NaN, double.NaN, double.NaN, double.NaN };
            var parameters        = new LinearModelParams(weights, -2.5);
            var incompatibleModel = new MProphetPeakScoringModel("incompatible", parameters, null, true);

            Settings.Default.PeakScoringModelList.Add(incompatibleModel);
            RunDlg <PeptideSettingsUI>(SkylineWindow.ShowPeptideSettingsUI, peptideSettingsDlg =>
            {
                peptideSettingsDlg.ComboPeakScoringModelSelected = "incompatible";
                peptideSettingsDlg.OkDialog();
            });

            var reintegrateDlgIncompatible = ShowDialog <ReintegrateDlg>(SkylineWindow.ShowReintegrateDialog);

            var editList = ShowDialog <EditListDlg <SettingsListBase <PeakScoringModelSpec>, PeakScoringModelSpec> >(
                reintegrateDlgIncompatible.EditPeakScoringModel);

            RunUI(() => editList.SelectItem("incompatible")); // Not L10N

            RunDlg <EditPeakScoringModelDlg>(editList.EditItem, editDlgTemp =>
            {
                // All of the percentage fields should be null
                VerifyCellValues(editDlgTemp, SCORES_AND_WEIGHTS[10], 0.0);
                editDlgTemp.TrainModelClick();
                // Cell values go back to the standard trained model after we train and enable calculators,
                // despite having been loaded with weird values
                editDlgTemp.SetChecked(3, true);
                editDlgTemp.TrainModelClick();
                VerifyCellValues(editDlgTemp, SCORES_AND_WEIGHTS[1], 1.0, false);
                editDlgTemp.CancelDialog();
            });
            OkDialog(editList, editList.OkDialog);
            // Trying to reintegrate gives an error because the model is incompatible
            RunDlg <MessageDlg>(reintegrateDlgIncompatible.OkDialog, messageDlg =>
            {
                Assert.AreEqual(TextUtil.LineSeparate(string.Format(Resources.ReintegrateDlg_OkDialog_Failed_attempting_to_reintegrate_peaks_),
                                                      Resources.ReintegrateDlg_OkDialog_The_current_peak_scoring_model_is_incompatible_with_one_or_more_peptides_in_the_document___Please_train_a_new_model_),
                                messageDlg.Message);
                messageDlg.OkDialog();
            });
            OkDialog(reintegrateDlgIncompatible, reintegrateDlgIncompatible.CancelDialog);
        }
        public void ConsoleMProphetModelTest()
        {
            var          testFilesDir = new TestFilesDir(TestContext, ZIP_FILE);
            const string docName      = "mProphetModel.sky";
            var          docPath      = testFilesDir.GetTestPath(docName);
            const string modelName    = "testModel";

            var args = new List <string>
            {
                "--in=" + docPath,
                "--reintegrate-create-model",
                "--reintegrate-overwrite-peaks",
            };

            var output = RunCommand(args.ToArray());

            // Extra warning about spectral library
            Assert.AreEqual(3, CountInstances(Resources.CommandLineTest_ConsoleAddFastaTest_Warning, output));

            args.Add("--reintegrate-model-name=" + modelName);
            args.Add("--save");

            output = RunCommand(args.ToArray());

            Assert.AreEqual(1, CountInstances(Resources.CommandLineTest_ConsoleAddFastaTest_Warning, output));
            AssertEx.Contains(output, string.Format(Resources.CommandLine_CreateScoringModel_Creating_scoring_model__0_, modelName));
            var doc = ResultsUtil.DeserializeDocument(docPath);

            foreach (var peakFeatureCalculator in MProphetPeakScoringModel.GetDefaultCalculators(doc))
            {
                // Not all of the default calculators for the document are valid
                if (peakFeatureCalculator is MQuestRetentionTimePredictionCalc ||
                    peakFeatureCalculator is MQuestRetentionTimeSquaredPredictionCalc ||
                    peakFeatureCalculator is MQuestIntensityCorrelationCalc ||
                    peakFeatureCalculator is NextGenProductMassErrorCalc ||
                    peakFeatureCalculator is NextGenCrossWeightedShapeCalc ||
                    peakFeatureCalculator is LegacyIdentifiedCountCalc)
                {
                    continue;
                }

                AssertEx.Contains(output, peakFeatureCalculator.Name);
            }
            AssertEx.Contains(output, string.Format(Resources.CommandLine_SaveFile_File__0__saved_, docName));
        }
Beispiel #6
0
        /// <summary>
        /// Trains a legacy model, checks that it saves correctly in the list and the document,
        /// modifies it without changing its name and checks that the list and document models update correctly
        /// </summary>
        protected void TestModelChangesAndSave()
        {
            LegacyScoringModel peakScoringModelBase = null;

            // Test legacy model
            var reintegrateDlg = ShowDialog <ReintegrateDlg>(SkylineWindow.ShowReintegrateDialog);
            var editDlg        = ShowDialog <EditPeakScoringModelDlg>(reintegrateDlg.AddPeakScoringModel);

            RunUI(() =>
            {
                Assert.AreEqual(editDlg.PeakScoringModelName, "");
                editDlg.PeakScoringModelName = "legacy1"; // Not L10N
                editDlg.SelectedModelItem    = LegacyScoringModel.DEFAULT_NAME;
                Assert.AreEqual(editDlg.PeakScoringModelName, "legacy1");
                editDlg.TrainModelClick();
                VerifyBias(editDlg, SCORES_AND_WEIGHTS[4]);
                VerifyCellValues(editDlg, SCORES_AND_WEIGHTS[5]);
                editDlg.UsesSecondBest = true;
                editDlg.UsesDecoys     = false;
                editDlg.TrainModelClick();
                VerifyBias(editDlg, SCORES_AND_WEIGHTS[6]);
                VerifyCellValues(editDlg, SCORES_AND_WEIGHTS[7]);
                editDlg.UsesSecondBest = false;
                peakScoringModelBase   = editDlg.PeakScoringModel as LegacyScoringModel;
            });

            //  Unchecking decoys and second best leads to error on training
            RunDlg <MessageDlg>(editDlg.TrainModelClick, messageDlg =>
            {
                Assert.AreEqual(string.Format(Resources.EditPeakScoringModelDlg_btnTrainModel_Click_Cannot_train_model_without_either_decoys_or_second_best_peaks_included_),
                                messageDlg.Message);
                messageDlg.OkDialog();
            });
            OkDialog(editDlg, editDlg.OkDialog);
            RunUI(() => reintegrateDlg.ComboPeakScoringModelSelected = "legacy1");
            OkDialog(reintegrateDlg, reintegrateDlg.OkDialog);
            RunUI(() =>
            {
                // Test modification of legacy scoring model
                SkylineWindow.SaveDocument();
                var peakScoringModel = SkylineWindow.DocumentUI.Settings.PeptideSettings.Integration.PeakScoringModel as LegacyScoringModel;
                var listModels       = Settings.Default.PeakScoringModelList;
                Assert.AreEqual(listModels.Count, 4);
                var peakScoringModelList = listModels[3] as LegacyScoringModel;
                // Check that model in EditList, document model, and model from the dialog are all the same
                AssertEqualNotNull(new List <LegacyScoringModel> {
                    peakScoringModel, peakScoringModelList, peakScoringModelBase
                });
                // Check document model is the last model we trained
                // ReSharper disable PossibleNullReferenceException
                Assert.AreEqual(peakScoringModel.Name, "legacy1");
                // ReSharper restore PossibleNullReferenceException
            });
            LegacyScoringModel peakScoringModelBaseNew = null;

            RunEditPeakScoringDlg("legacy1", editDlgTemp =>
            {
                editDlgTemp.UsesDecoys = true;
                editDlgTemp.TrainModelClick();
                peakScoringModelBaseNew = editDlgTemp.PeakScoringModel as LegacyScoringModel;
                editDlgTemp.OkDialog();
            });
            RunUI(() =>
            {
                SkylineWindow.SaveDocument();
                var peakScoringModelNew     = SkylineWindow.DocumentUI.Settings.PeptideSettings.Integration.PeakScoringModel as LegacyScoringModel;
                var listModels              = Settings.Default.PeakScoringModelList;
                var peakScoringModelListNew = listModels[3] as LegacyScoringModel;
                // Check that model in EditList, document model, and model from the dialog are all the same
                AssertEqualNotNull(new List <LegacyScoringModel> {
                    peakScoringModelNew, peakScoringModelListNew, peakScoringModelBaseNew
                });
                // Check document model has changed
                // ReSharper disable PossibleNullReferenceException
                Assert.IsTrue(peakScoringModelNew.UsesDecoys);
                // ReSharper restore PossibleNullReferenceException
            });

            // Test changing legacy to mProphet model without changing name
            MProphetPeakScoringModel peakScoringModelMProphetBase = null;

            RunEditPeakScoringDlg("legacy1", editDlgTemp =>
            {
                // Switch to mProphet model
                editDlgTemp.SelectedModelItem = "mProphet";
                editDlgTemp.TrainModelClick();
                peakScoringModelMProphetBase = editDlgTemp.PeakScoringModel as MProphetPeakScoringModel;
                editDlgTemp.OkDialog();
            }
                                  );
            RunUI(() =>
            {
                SkylineWindow.SaveDocument();
                var peakScoringModelMProphet = SkylineWindow.DocumentUI.Settings.PeptideSettings.Integration.PeakScoringModel as MProphetPeakScoringModel;
                var listModels = Settings.Default.PeakScoringModelList;
                var peakScoringModelMProphetList = listModels[3] as MProphetPeakScoringModel;
                // Check that model in EditList, document model, and model from the dialog are all the same
                AssertEqualNotNull(new List <MProphetPeakScoringModel> {
                    peakScoringModelMProphet,
                    peakScoringModelMProphetList,
                    peakScoringModelMProphetBase
                });
                // Check document model has changed
                Assert.AreNotEqual(peakScoringModelBaseNew, peakScoringModelMProphet);
                // ReSharper disable PossibleNullReferenceException
                Assert.IsTrue(peakScoringModelMProphet.UsesDecoys);
                // ReSharper restore PossibleNullReferenceException
                Assert.AreEqual(peakScoringModelMProphet.PeakFeatureCalculators.Count, 23);
            });
        }