public void ConsoleMProphetModelTest() { var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE); const string docName = "mProphetModel.sky"; var docPath = testFilesDir.GetTestPath(docName); const string modelName = "testModel"; // with mods and invalid cutoff score var output = RunCommand("--in=" + docPath, "--reintegrate-model-name=" + modelName, "--reintegrate-create-model", "--reintegrate-overwrite-peaks", "--save" ); AssertEx.Contains(output, string.Format(Resources.CommandLine_CreateScoringModel_Creating_scoring_model__0_, modelName)); var doc = ResultsUtil.DeserializeDocument(docPath); foreach (var peakFeatureCalculator in MProphetPeakScoringModel.GetDefaultCalculators(doc)) { // Not all of the default calculators for the document are valid if (peakFeatureCalculator is MQuestRetentionTimePredictionCalc || peakFeatureCalculator is MQuestRetentionTimeSquaredPredictionCalc || peakFeatureCalculator is MQuestIntensityCorrelationCalc || peakFeatureCalculator is NextGenProductMassErrorCalc || peakFeatureCalculator is NextGenCrossWeightedShapeCalc || peakFeatureCalculator is LegacyIdentifiedCountCalc) { continue; } AssertEx.Contains(output, peakFeatureCalculator.Name); } AssertEx.Contains(output, string.Format(Resources.CommandLine_SaveFile_File__0__saved_, docName)); }
protected override bool LoadBackground(IDocumentContainer container, SrmDocument document, SrmDocument docCurrent) { var loadMonitor = new LoadMonitor(this, container, container.Document); IPeakScoringModel scoringModel = new MProphetPeakScoringModel( Path.GetFileNameWithoutExtension(container.DocumentFilePath), null as LinearModelParams, MProphetPeakScoringModel.GetDefaultCalculators(docCurrent), true); var targetDecoyGenerator = new TargetDecoyGenerator(docCurrent, scoringModel, this, loadMonitor); // Get scores for target and decoy groups. List <IList <float[]> > targetTransitionGroups, decoyTransitionGroups; targetDecoyGenerator.GetTransitionGroups(out targetTransitionGroups, out decoyTransitionGroups); if (!decoyTransitionGroups.Any()) { throw new InvalidDataException(); } // Set intial weights based on previous model (with NaN's reset to 0) var initialWeights = new double[scoringModel.PeakFeatureCalculators.Count]; // But then set to NaN the weights that have unknown values for this dataset for (var i = 0; i < initialWeights.Length; ++i) { if (!targetDecoyGenerator.EligibleScores[i]) { initialWeights[i] = double.NaN; } } var initialParams = new LinearModelParams(initialWeights); // Train the model. scoringModel = scoringModel.Train(targetTransitionGroups, decoyTransitionGroups, targetDecoyGenerator, initialParams, null, null, scoringModel.UsesSecondBest, true, loadMonitor); SrmDocument docNew; do { docCurrent = container.Document; docNew = docCurrent.ChangeSettings(docCurrent.Settings.ChangePeptideIntegration(i => i.ChangeAutoTrain(false).ChangePeakScoringModel((PeakScoringModelSpec)scoringModel))); // Reintegrate peaks var resultsHandler = new MProphetResultsHandler(docNew, (PeakScoringModelSpec)scoringModel, _cachedFeatureScores); resultsHandler.ScoreFeatures(loadMonitor); if (resultsHandler.IsMissingScores()) { throw new InvalidDataException(Resources.ImportPeptideSearchManager_LoadBackground_The_current_peak_scoring_model_is_incompatible_with_one_or_more_peptides_in_the_document_); } docNew = resultsHandler.ChangePeaks(loadMonitor); }while (!CompleteProcessing(container, docNew, docCurrent)); return(true); }
public void ConsoleMProphetModelTest() { var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE); const string docName = "mProphetModel.sky"; var docPath = testFilesDir.GetTestPath(docName); const string modelName = "testModel"; var args = new List <string> { "--in=" + docPath, "--reintegrate-create-model", "--reintegrate-overwrite-peaks", }; var output = RunCommand(args.ToArray()); // Extra warning about spectral library Assert.AreEqual(3, CountInstances(Resources.CommandLineTest_ConsoleAddFastaTest_Warning, output)); args.Add("--reintegrate-model-name=" + modelName); args.Add("--save"); output = RunCommand(args.ToArray()); Assert.AreEqual(1, CountInstances(Resources.CommandLineTest_ConsoleAddFastaTest_Warning, output)); AssertEx.Contains(output, string.Format(Resources.CommandLine_CreateScoringModel_Creating_scoring_model__0_, modelName)); var doc = ResultsUtil.DeserializeDocument(docPath); foreach (var peakFeatureCalculator in MProphetPeakScoringModel.GetDefaultCalculators(doc)) { // Not all of the default calculators for the document are valid if (peakFeatureCalculator is MQuestRetentionTimePredictionCalc || peakFeatureCalculator is MQuestRetentionTimeSquaredPredictionCalc || peakFeatureCalculator is MQuestIntensityCorrelationCalc || peakFeatureCalculator is NextGenProductMassErrorCalc || peakFeatureCalculator is NextGenCrossWeightedShapeCalc || peakFeatureCalculator is LegacyIdentifiedCountCalc) { continue; } AssertEx.Contains(output, peakFeatureCalculator.Name); } AssertEx.Contains(output, string.Format(Resources.CommandLine_SaveFile_File__0__saved_, docName)); }
/// <summary> /// Trains a legacy model, checks that it saves correctly in the list and the document, /// modifies it without changing its name and checks that the list and document models update correctly /// </summary> protected void TestModelChangesAndSave() { LegacyScoringModel peakScoringModelBase = null; // Test legacy model var reintegrateDlg = ShowDialog <ReintegrateDlg>(SkylineWindow.ShowReintegrateDialog); var editDlg = ShowDialog <EditPeakScoringModelDlg>(reintegrateDlg.AddPeakScoringModel); RunUI(() => { Assert.AreEqual(editDlg.PeakScoringModelName, ""); editDlg.PeakScoringModelName = "legacy1"; // Not L10N editDlg.SelectedModelItem = LegacyScoringModel.DEFAULT_NAME; Assert.AreEqual(editDlg.PeakScoringModelName, "legacy1"); editDlg.TrainModelClick(); VerifyBias(editDlg, SCORES_AND_WEIGHTS[4]); VerifyCellValues(editDlg, SCORES_AND_WEIGHTS[5]); editDlg.UsesSecondBest = true; editDlg.UsesDecoys = false; editDlg.TrainModelClick(); VerifyBias(editDlg, SCORES_AND_WEIGHTS[6]); VerifyCellValues(editDlg, SCORES_AND_WEIGHTS[7]); editDlg.UsesSecondBest = false; peakScoringModelBase = editDlg.PeakScoringModel as LegacyScoringModel; }); // Unchecking decoys and second best leads to error on training RunDlg <MessageDlg>(editDlg.TrainModelClick, messageDlg => { Assert.AreEqual(string.Format(Resources.EditPeakScoringModelDlg_btnTrainModel_Click_Cannot_train_model_without_either_decoys_or_second_best_peaks_included_), messageDlg.Message); messageDlg.OkDialog(); }); OkDialog(editDlg, editDlg.OkDialog); RunUI(() => reintegrateDlg.ComboPeakScoringModelSelected = "legacy1"); OkDialog(reintegrateDlg, reintegrateDlg.OkDialog); RunUI(() => { // Test modification of legacy scoring model SkylineWindow.SaveDocument(); var peakScoringModel = SkylineWindow.DocumentUI.Settings.PeptideSettings.Integration.PeakScoringModel as LegacyScoringModel; var listModels = Settings.Default.PeakScoringModelList; Assert.AreEqual(listModels.Count, 4); var peakScoringModelList = listModels[3] as LegacyScoringModel; // Check that model in EditList, document model, and model from the dialog are all the same AssertEqualNotNull(new List <LegacyScoringModel> { peakScoringModel, peakScoringModelList, peakScoringModelBase }); // Check document model is the last model we trained // ReSharper disable PossibleNullReferenceException Assert.AreEqual(peakScoringModel.Name, "legacy1"); // ReSharper restore PossibleNullReferenceException }); LegacyScoringModel peakScoringModelBaseNew = null; RunEditPeakScoringDlg("legacy1", editDlgTemp => { editDlgTemp.UsesDecoys = true; editDlgTemp.TrainModelClick(); peakScoringModelBaseNew = editDlgTemp.PeakScoringModel as LegacyScoringModel; editDlgTemp.OkDialog(); }); RunUI(() => { SkylineWindow.SaveDocument(); var peakScoringModelNew = SkylineWindow.DocumentUI.Settings.PeptideSettings.Integration.PeakScoringModel as LegacyScoringModel; var listModels = Settings.Default.PeakScoringModelList; var peakScoringModelListNew = listModels[3] as LegacyScoringModel; // Check that model in EditList, document model, and model from the dialog are all the same AssertEqualNotNull(new List <LegacyScoringModel> { peakScoringModelNew, peakScoringModelListNew, peakScoringModelBaseNew }); // Check document model has changed // ReSharper disable PossibleNullReferenceException Assert.IsTrue(peakScoringModelNew.UsesDecoys); // ReSharper restore PossibleNullReferenceException }); // Test changing legacy to mProphet model without changing name MProphetPeakScoringModel peakScoringModelMProphetBase = null; RunEditPeakScoringDlg("legacy1", editDlgTemp => { // Switch to mProphet model editDlgTemp.SelectedModelItem = "mProphet"; editDlgTemp.TrainModelClick(); peakScoringModelMProphetBase = editDlgTemp.PeakScoringModel as MProphetPeakScoringModel; editDlgTemp.OkDialog(); } ); RunUI(() => { SkylineWindow.SaveDocument(); var peakScoringModelMProphet = SkylineWindow.DocumentUI.Settings.PeptideSettings.Integration.PeakScoringModel as MProphetPeakScoringModel; var listModels = Settings.Default.PeakScoringModelList; var peakScoringModelMProphetList = listModels[3] as MProphetPeakScoringModel; // Check that model in EditList, document model, and model from the dialog are all the same AssertEqualNotNull(new List <MProphetPeakScoringModel> { peakScoringModelMProphet, peakScoringModelMProphetList, peakScoringModelMProphetBase }); // Check document model has changed Assert.AreNotEqual(peakScoringModelBaseNew, peakScoringModelMProphet); // ReSharper disable PossibleNullReferenceException Assert.IsTrue(peakScoringModelMProphet.UsesDecoys); // ReSharper restore PossibleNullReferenceException Assert.AreEqual(MProphetPeakScoringModel.GetDefaultCalculators(SkylineWindow.Document).Length, peakScoringModelMProphet.PeakFeatureCalculators.Count); }); }