/// <summary> /// Creates an analysis naming view model /// </summary> /// <param name="configuration"></param> public AnalysisNamingViewModel(AnalysisConfig configuration) { m_configuration = configuration; var command = new BrowseFolderCommand(command_FolderSelected); BrowseCommand = command; }
/// <summary> /// Determines if all of the data is valid yet for the analysis. /// </summary> /// <returns></returns> public static bool IsStepValid(AnalysisConfig config, AnalysisSetupStep step, ref string errorMessage) { var analysis = config.Analysis; var isStepValid = true; switch (step) { case AnalysisSetupStep.DatasetSelection: if (analysis.MetaData.Datasets.Count < 1) { errorMessage = "Select datasets before continuing."; isStepValid = false; } break; case AnalysisSetupStep.BaselineSelection: isStepValid = ValidateBaseline(analysis, ref errorMessage); break; case AnalysisSetupStep.OptionsSelection: break; case AnalysisSetupStep.Naming: isStepValid = ValidateNames(config, ref errorMessage); break; case AnalysisSetupStep.Started: if (analysis.AnalysisType != AnalysisType.Full) { isStepValid = false; } break; } return isStepValid; }
/// <summary> /// Initializes a new instance of the <see cref="MainViewModel"/> class. /// </summary> public MainViewModel() { m_config = new AnalysisConfig(); Analysis = new MultiAlignAnalysis(); m_config.AnalysisName = "Analysis.db3"; m_config.Analysis = Analysis; this.WindowTitle = "MultiAlign Rogue"; DataSelectionViewModel = new AnalysisDatasetSelectionViewModel(Analysis); SelectFilesCommand = new RelayCommand(SelectFiles, () => !string.IsNullOrWhiteSpace(this.ProjectPath)); SelectDirectoryCommand = new RelayCommand(SelectDirectory, () => !string.IsNullOrWhiteSpace(this.ProjectPath)); AddFolderCommand = new RelayCommand(AddFolderDelegate, () => !string.IsNullOrWhiteSpace(this.InputFilePath) && Directory.Exists(this.InputFilePath) && !string.IsNullOrWhiteSpace(this.ProjectPath)); SearchDmsCommand = new RelayCommand(SearchDms, () => this.ShowOpenFromDms && !string.IsNullOrWhiteSpace(this.ProjectPath)); CreateNewProjectCommand = new RelayCommand(this.CreateNewProject); SaveProjectCommand = new RelayCommand(SaveProject, () => !string.IsNullOrWhiteSpace(this.ProjectPath)); LoadProjectCommand = new RelayCommand(LoadProject); SaveAsProjectCommand = new RelayCommand(this.SaveProjectAs, () => !string.IsNullOrWhiteSpace(this.ProjectPath)); featureCache = new FeatureLoader { Providers = Analysis.DataProviders }; this.SelectedDatasets = new List<DatasetInformationViewModel>(); Datasets = new ObservableCollection<DatasetInformationViewModel>(); featureCache.Providers = Analysis.DataProviders; this.FeatureFindingSettingsViewModel = new FeatureFindingSettingsViewModel(Analysis, featureCache); this.AlignmentSettingsViewModel = new AlignmentSettingsViewModel(Analysis, featureCache); this.ClusterSettingsViewModel = new ClusterSettingsViewModel(Analysis); }
/// <summary> /// Load the data from the dataset information objects to the cache at the analysis Path /// </summary> private void PerformDataLoadAndAlignment(AnalysisConfig config) { UmcLoaderFactory.Status += UMCLoaderFactory_Status; UpdateStatus("Loading data."); var analysisOptions = config.Analysis.Options; var datasets = config.Analysis.MetaData.Datasets.ToList(); var lcmsFilterOptions = analysisOptions.LcmsFilteringOptions; var msFilterOptions = analysisOptions.MsFilteringOptions; var baselineDataset = config.Analysis.MetaData.BaselineDataset; var baselineFeatures = LoadBaselineData(baselineDataset, msFilterOptions, analysisOptions.LcmsFindingOptions, lcmsFilterOptions, config.Analysis.DataProviders, config.Analysis.MassTagDatabase, config.Analysis.Options.AlignmentOptions.IsAlignmentBaselineAMasstagDB); var alignmentData = new AlignmentDAOHibernate(); alignmentData.ClearAll(); var providers = config.Analysis.DataProviders; var featureCache = new FeatureLoader { Providers = providers }; RegisterProgressNotifier(featureCache); MassTagDatabase database = null; if (config.Analysis.MassTagDatabase != null) database = new MassTagDatabase(config.Analysis.MassTagDatabase, config.Analysis.Options.AlignmentOptions.MassTagObservationCount); SingletonDataProviders.Providers = config.Analysis.DataProviders; foreach (var dataset in datasets) { if (dataset.IsBaseline) continue; var features = featureCache.LoadDataset(dataset, analysisOptions.MsFilteringOptions, analysisOptions.LcmsFindingOptions, analysisOptions.LcmsFilteringOptions); features = AlignDataset(features, baselineFeatures, database, dataset, baselineDataset); featureCache.CacheFeatures(features); } UmcLoaderFactory.Status -= UMCLoaderFactory_Status; }
private void LoadMtdb(AnalysisConfig config) { UpdateStatus("Loading the database from the SQLite result database."); var provider = new MassTagDatabaseLoaderCache {Provider = m_config.Analysis.DataProviders.MassTags}; RegisterProgressNotifier(provider); var database = provider.LoadDatabase(); config.Analysis.MassTagDatabase = database; if (MassTagsLoaded != null) { MassTagsLoaded(this, new MassTagsLoadedEventArgs(database.MassTags, database)); } DeRegisterProgressNotifier(provider); }
/// <summary> /// Creates an entry in the DB if a new database should be created. /// </summary> /// <param name="config"></param> private void CreateMtdb(AnalysisConfig config) { MassTagDatabase database; // Load the mass tag database if we are aligning, or if we are // peak matching (but aligning to a reference dataset. if (m_config.Analysis.Options.AlignmentOptions.IsAlignmentBaselineAMasstagDB) { UpdateStatus("Loading Mass Tag database from database: " + m_config.Analysis.MetaData.Database.DatabaseName); database = MtdbLoaderFactory.LoadMassTagDatabase(m_config.Analysis.MetaData.Database, m_config.Analysis.Options.MassTagDatabaseOptions); } else if (m_config.Analysis.MetaData.Database != null && m_config.Analysis.MetaData.Database.DatabaseFormat != MassTagDatabaseFormat.None) { UpdateStatus("Loading Mass Tag database from database: " + m_config.Analysis.MetaData.Database.DatabaseName); database = MtdbLoaderFactory.LoadMassTagDatabase(m_config.Analysis.MetaData.Database, m_config.Analysis.Options.MassTagDatabaseOptions); } else { config.Analysis.MassTagDatabase = null; config.ShouldPeakMatch = false; return; } if (database != null) { config.ShouldPeakMatch = true; var totalMassTags = database.MassTags.Count; UpdateStatus("Loaded " + totalMassTags + " mass tags."); } config.Analysis.MassTagDatabase = database; if (database == null) return; config.Analysis.DataProviders.MassTags.AddAll(database.MassTags); var proteinCache = new ProteinDAO(); proteinCache.AddAll(database.AllProteins); var map = (from massTagId in database.Proteins.Keys from p in database.Proteins[massTagId] select new MassTagToProteinMap { ProteinId = p.ProteinId, MassTagId = massTagId, RefId = p.RefId }).ToList(); var tempCache = new GenericDAOHibernate<MassTagToProteinMap>(); tempCache.AddAll(map); if (MassTagsLoaded != null) MassTagsLoaded(this, new MassTagsLoadedEventArgs(database.MassTags, database)); }
/// <summary> /// Processes the MA analysis data. /// </summary> public int StartMultiAlign(AnalysisConfig config, IAnalysisReportGenerator reporter) { m_reportCreator = reporter; m_config = config; // Builds the list of algorithm providers. var builder = new AlgorithmBuilder(); // Use this to signal when the analysis is done. config.triggerEvent = new ManualResetEvent(false); config.errorEvent = new ManualResetEvent(false); m_config.stopEvent = new ManualResetEvent(false); config.errorException = null; // Print Help // See if the user wants help if (config.showHelp) { PrintHelp(); config.errorEvent.Dispose(); config.triggerEvent.Dispose(); return 0; } // Validate the command line var validated = AnalysisValidator.ValidateSetup(m_config); if (validated == AnalysisType.InvalidParameters) { PrintHelp(); return 0; } // Setup log path, analysis path, and print version to log file. SetupAnalysisEssentials(); // Determine if we have specified a valid database to extract // data from or to re-start an analysis. var databasePath = Path.Combine(config.AnalysisPath, config.AnalysisName); var databaseExists = File.Exists(databasePath); var createDatabase = ShouldCreateDatabase(validated, databaseExists); // make sure that we were not told to skip to a new part of the analysis. if (config.InitialStep >= AnalysisStep.Alignment) { createDatabase = false; } var result = 0; switch (validated) { case AnalysisType.Full: result = PerformAnalysis(config, builder, validated, createDatabase); break; } return result; }
private static bool ValidateNames(AnalysisConfig config, ref string errorMessage) { var isStepValid = true; if (config.AnalysisPath == null) { errorMessage = "An output folder location needs to be supplied."; isStepValid = false; } else { var chars = Path.GetInvalidPathChars(); var name = config.AnalysisPath; foreach (var c in chars) { if (name.Contains(c)) { errorMessage = "The path you provided has invalid characters."; return false; } } //if (!System.IO.Directory.Exists(config.AnalysisPath)) //{ // errorMessage = "The root folder you specified does not exist or is invalid."; // isStepValid = false; //} if (config.AnalysisName == null) { errorMessage = "An analysis name needs to be supplied."; isStepValid = false; } } return isStepValid; }
public AnalysisController() { m_config = null; m_reportCreator = null; m_chargeMap = new Dictionary<int, int>(); }
public AnalysisStatusArgs(AnalysisConfig configuration) { Configuration = configuration; }
/// <summary> /// Shows the new analysis setup /// </summary> private void ShowNewAnalysisSetup() { string message; var canStart = StateModerator.CanPerformNewAnalysis(out message); Status = message; if (!canStart) { return; } ApplicationStatusMediator.SetStatus("Creating new analysis."); StateModerator.CurrentViewState = ViewState.SetupAnalysisView; StateModerator.CurrentAnalysisState = AnalysisState.Setup; var config = new AnalysisConfig { Analysis = new MultiAlignAnalysis(), AnalysisPath = MainDataDirectory, AnalysisName = MainDataName }; config.Analysis.AnalysisType = AnalysisType.Full; config.Analysis.Options.AlignmentOptions.IsAlignmentBaselineAMasstagDB = false; AnalysisSetupViewModel = new AnalysisSetupViewModel(config); AnalysisSetupViewModel.AnalysisQuit += AnalysisSetupViewModel_AnalysisQuit; AnalysisSetupViewModel.AnalysisStart += AnalysisSetupViewModel_AnalysisStart; AnalysisSetupViewModel.CurrentStep = AnalysisSetupStep.DatasetSelection; }
public void Start(AnalysisConfig config) { // Set the messages Messages.Clear(); GalleryImages.Clear(); IsAnalysisRunning = true; Reporter.Config = config; m_configuration = config; Controller = new AnalysisController(); Controller.AnalysisComplete += Controller_AnalysisComplete; Controller.AnalysisError += Controller_AnalysisError; Controller.AnalysisCancelled += Controller_AnalysisCancelled; Controller.AnalysisStarted += ControllerOnAnalysisStarted; // Start the analysis. Controller.StartMultiAlignGui(config, this); }
/// <summary> /// Validates the input options to make sure everything is set. /// </summary> /// <returns></returns> public static AnalysisType ValidateSetup(AnalysisConfig config) { var analysisType = AnalysisType.Full; var isExporting = (config.ExporterNames.CrossTabPath != null); isExporting = (isExporting || config.ExporterNames.ClusterScanPath != null); isExporting = (isExporting || config.ExporterNames.ClusterMSMSPath != null); isExporting = (isExporting || config.ExporterNames.CrossTabAbundance != null); // -------------------------------------------------------------------------------- // Make sure that the analysis name and path are provided first. // -------------------------------------------------------------------------------- if (config.AnalysisName == null) { Logger.PrintMessage("No analysis database name provided."); analysisType = AnalysisType.InvalidParameters; return analysisType; } if (config.AnalysisPath == null) { Logger.PrintMessage("No analysis path provided."); analysisType = AnalysisType.InvalidParameters; return analysisType; } // -------------------------------------------------------------------------------- // If no input is provided. then they are using the database. // -------------------------------------------------------------------------------- if (config.InputPaths == null) { Logger.PrintMessage("No input file provided."); analysisType = AnalysisType.ExportDataOnly; } if (config.ParameterFile == null) { Logger.PrintMessage("No parameter file specified."); analysisType = AnalysisType.ExportDataOnly; } // -------------------------------------------------------------------------------- // Determine if they are exporting data only or not. // -------------------------------------------------------------------------------- if (analysisType == AnalysisType.ExportDataOnly) { // -------------------------------------------------------------------------------- // Imports the factors into the database. // -------------------------------------------------------------------------------- if (config.ShouldUseFactors) { Logger.PrintMessage("Importing factors only"); analysisType = AnalysisType.FactorImporting; } else if (!isExporting) { Logger.PrintMessage("No export file names provided."); analysisType = AnalysisType.InvalidParameters; return analysisType; } } return analysisType; }
/// <summary> /// Performs the analysis. /// </summary> private void PerformAnalysisGui(AnalysisConfig config, AlgorithmBuilder builder, AnalysisType validated, bool createDatabase, WorkerObject worker) { Logger.PrintMessage("Performing analysis."); MultiAlignAnalysisProcessor processor = null; try { // Creates or connects to the underlying analysis database. var providers = SetupDataProviders(createDatabase); // Create the clustering, analysis, and plotting paths. builder.BuildClusterer(config.Analysis.Options.LcmsClusteringOptions.LcmsFeatureClusteringAlgorithm); config.Analysis.DataProviders = providers; config.Analysis.AnalysisType = validated; ConstructPlotPath(); ExportParameterFile(); Logger.PrintSpacer(); PrintParameters(config.Analysis, createDatabase); Logger.PrintSpacer(); // Setup the processor. processor = ConstructAnalysisProcessor(builder, providers); // Tell the processor whether to load data or not. processor.ShouldLoadData = createDatabase; // Construct the dataset information for export.// Create dataset information. Logger.PrintMessage("Storing dataset information into the database."); var information = Enumerable.ToList(config.Analysis.MetaData.Datasets); m_config.Analysis.DataProviders.DatasetCache.AddAll(information); Logger.PrintMessage("Creating exporter options."); if (config.ExporterNames.CrossTabPath == null) { config.ExporterNames.CrossTabPath = config.AnalysisName.Replace(".db3", ""); } if (config.ExporterNames.CrossTabAbundance == null) { config.ExporterNames.CrossTabAbundance = config.AnalysisName.Replace(".db3", ""); } ConstructExporting(); Logger.PrintMessage("Cleaning up old analysis branches."); CleanupOldAnalysisBranches(config); Logger.PrintMessage("Analysis Started."); processor.StartAnalysis(config); var handleId = WaitHandle.WaitAny(new WaitHandle[] { config.triggerEvent, config.errorEvent, config.stopEvent }); if (handleId == 1) { Logger.PrintMessageWorker("There was an error during processing.", 1, false); config.triggerEvent.Dispose(); config.errorEvent.Dispose(); processor.Dispose(); if (AnalysisError != null) { AnalysisError(this, null); } return; } if (handleId == 2) { Logger.PrintMessageWorker("Stopping the analysis.", 1, false); processor.StopAnalysis(); worker.SynchEvent.Set(); Thread.Sleep(50); config.triggerEvent.Dispose(); config.errorEvent.Dispose(); processor.Dispose(); if (AnalysisCancelled != null) { AnalysisCancelled(this, null); } return; } } catch (Exception ex) { Logger.PrintMessage( "There was an error Performing analysis"); Logger.PrintMessage(ex.Message); Logger.PrintMessage(ex.StackTrace); return; } try { m_reportCreator.CreatePlotReport(); } catch (Exception ex) { Logger.PrintMessage( "There was an error when trying to create the final analysis plots, however, the data analysis is complete."); Logger.PrintMessage(ex.Message); Logger.PrintMessage(ex.StackTrace); } config.triggerEvent.Dispose(); config.errorEvent.Dispose(); processor.Dispose(); CleanupDataProviders(); Logger.PrintMessage("Indexing Database Clusters for Faster Retrieval"); var databasePath = Path.Combine(m_config.AnalysisPath, m_config.AnalysisName); DatabaseIndexer.IndexClusters(databasePath); Logger.PrintMessage("Indexing Database Features"); DatabaseIndexer.IndexFeatures(databasePath); Logger.PrintMessage("Analysis Complete"); if (AnalysisComplete != null) { AnalysisComplete(this, null); } }
/// <summary> /// Performs the analysis. /// </summary> private int PerformAnalysis(AnalysisConfig config, AlgorithmBuilder builder, AnalysisType validated, bool createDatabase) { InputAnalysisInfo analysisSetupInformation; Logger.PrintMessage("Performing analysis."); // Read the input files. bool useMtdb; var isInputFileOk = ReadInputDefinitionFile(out analysisSetupInformation, out useMtdb); if (!isInputFileOk) return 1; // Figure out if the factors are defined. if (config.options.ContainsKey("-factors")) { Logger.PrintMessage("Factor file specified."); var factorFile = config.options["-factors"][0]; analysisSetupInformation.FactorFile = factorFile; } // Creates or connects to the underlying analysis database. var providers = SetupDataProviders(createDatabase); // Create the clustering, analysis, and plotting paths. ConstructClustering(builder); config.Analysis = ConstructAnalysisObject(analysisSetupInformation); config.Analysis.DataProviders = providers; config.Analysis.AnalysisType = validated; ConstructPlotPath(); // Read the parameter files. ReadParameterFile(); // Construct Dataset information // Construct the dataset information for export. ConstructDatasetInformation(analysisSetupInformation, config.Analysis, createDatabase); if (config.ShouldUseFactors) { ConstructFactorInformation(analysisSetupInformation, config.Analysis.MetaData.Datasets, config.Analysis.DataProviders); } var isBaselineSpecified = ConstructBaselines(analysisSetupInformation, config.Analysis.MetaData, useMtdb); if (!isBaselineSpecified) { return 1; } ExportParameterFile(); Logger.PrintSpacer(); PrintParameters(config.Analysis, createDatabase); Logger.PrintSpacer(); // Setup the processor. var processor = ConstructAnalysisProcessor(builder, providers); // Tell the processor whether to load data or not. processor.ShouldLoadData = createDatabase; Logger.PrintMessage("Creating exporter options."); if (config.ExporterNames.CrossTabPath == null) { config.ExporterNames.CrossTabPath = config.AnalysisName.Replace(".db3", ""); } if (config.ExporterNames.CrossTabAbundance == null) { config.ExporterNames.CrossTabAbundance = config.AnalysisName.Replace(".db3", ""); } ConstructExporting(); Logger.PrintMessage("Cleaning up old analysis branches."); CleanupOldAnalysisBranches(config); // Start the analysis Logger.PrintMessage("Analysis Started."); processor.StartAnalysis(config); var handleId = WaitHandle.WaitAny(new WaitHandle[] { config.triggerEvent, config.errorEvent }); var wasError = false; if (handleId != 1) { try { m_reportCreator.CreatePlotReport(); } catch (Exception ex) { wasError = true; Logger.PrintMessage( "There was an error when trying to create the final analysis plots, however, the data analysis is complete."); Logger.PrintMessage(ex.Message); Logger.PrintMessage(ex.StackTrace); } config.Analysis.Dispose(); config.triggerEvent.Dispose(); config.errorEvent.Dispose(); processor.Dispose(); CleanupDataProviders(); if (!wasError) { Logger.PrintMessage("Indexing Database Features"); DatabaseIndexer.IndexFeatures(config.AnalysisPath); DatabaseIndexer.IndexClusters(config.AnalysisPath); } Logger.PrintMessage("Analysis Complete."); return 0; } // Finalize the analysis plots etc. Logger.PrintMessage("There was an error during processing."); return 1; }
private void ImportFactors(AnalysisConfig config, bool databaseExists) { Logger.PrintMessage("Updating factors"); if (!databaseExists) { Logger.PrintMessage("The database you specified to extract data from does not exist."); return; } // Create access to data. var providers = SetupDataProviders(false); if (providers == null) { Logger.PrintMessage("Could not create connection to database."); return; } // Find all the datasets var datasetsFactors = providers.DatasetCache.FindAll(); if (datasetsFactors == null || datasetsFactors.Count == 0) { Logger.PrintMessage("There are no datasets present in the current database."); CleanupDataProviders(); return; } var info = new InputAnalysisInfo(); if (config.options.ContainsKey("-factors")) { Logger.PrintMessage("Factor file specified."); var factorFile = config.options["-factors"][0]; info.FactorFile = factorFile; } ConstructFactorInformation(info, datasetsFactors.ToObservableCollection(), providers); CleanupDataProviders(); }
/// <summary> /// Performs peak matching with loaded clusters. /// </summary> private void PerformPeakMatching(AnalysisConfig config) { if (!config.ShouldPeakMatch) return; if (m_config.Analysis.MassTagDatabase == null) { UpdateStatus("Could not peak match. The database was not set."); } else { var clusters = m_config.Analysis.DataProviders.ClusterCache.FindAll(); var peakMatcher = m_algorithms.PeakMatcher; UpdateStatus("Performing Peak Matching"); var adapter = peakMatcher as STACAdapter<UMCClusterLight>; if (adapter != null) { UpdateStatus(adapter.Options.UseDriftTime ? "Using drift time." : "Ignoring drift time."); } var matchResults = new PeakMatchingResults<UMCClusterLight, MassTagLight>(); clusters.ForEach(x => x.Net = x.Net); matchResults.Matches = peakMatcher.PerformPeakMatching(clusters, m_config.Analysis.MassTagDatabase); if (adapter != null) { matchResults.FdrTable = adapter.Matcher.StacFdrTable; } m_config.Analysis.MatchResults = matchResults; if (FeaturesPeakMatched != null) { FeaturesPeakMatched(this, new FeaturesPeakMatchedEventArgs(clusters, matchResults.Matches)); } UpdateStatus("Updating database with peak matched results."); var writer = new PeakMatchResultsWriter(); int matchedMassTags; int matchedProteins; writer.WritePeakMatchResults(matchResults, m_config.Analysis.MassTagDatabase, out matchedMassTags, out matchedProteins); UpdateStatus(string.Format("Found {0} mass tag matches. Matching to {1} potential proteins.", matchedMassTags, matchedProteins)); } }
/// <summary> /// Default constructor. /// </summary> static Program() { m_config = new AnalysisConfig(); m_reportCreator = new AnalysisReportGenerator(); m_reportCreator.Config = m_config; }
public AnalysisSetupViewModel(AnalysisConfig configuration) { SetupViewModels(configuration); }
public void BuildAnalysisGraph(AnalysisConfig config) { var graph = new AnalysisGraph(); // Create a feature database if (config.ShouldCreateFeatureDatabaseOnly) { graph.AddNode(CreateNode(AnalysisStep.FindFeatures, "Feature Creation", "Creates or loads features from deisotoped data.")); } else { if (config.ShouldLoadMTDB) { var node = new AnalysisGraphNode { Name = "Load MTDB", Description = "Loads an AMT Tag Database for alignment or peptide identification", CurrentStep = AnalysisStep.LoadMtdb, Method = config.InitialStep == AnalysisStep.FindFeatures ? CreateMtdb : new DelegateAnalysisMethod(LoadMtdb) }; graph.AddNode(node); } switch (config.InitialStep) { case AnalysisStep.LoadMtdb: case AnalysisStep.FindFeatures: graph.AddNode(CreateNode(AnalysisStep.FindFeatures, "Feature Creation And Alignment", "Creates or loads features from deisotoped data and aligns them to a baseline.")); graph.AddNode(CreateNode(AnalysisStep.Clustering, "Cluster Features", "Clusters features across datasets.")); graph.AddNode(CreateNode(AnalysisStep.PeakMatching, "Feature Identification", "Matches features to an AMT Tag database.")); break; case AnalysisStep.Alignment: graph.AddNode(CreateNode(AnalysisStep.Alignment, "Feature Alignment", "Aligns features to a reference to correct for systematic errors.")); graph.AddNode(CreateNode(AnalysisStep.Clustering, "Cluster Features", "Clusters features across datasets.")); graph.AddNode(CreateNode(AnalysisStep.PeakMatching, "Feature Identification", "Matches features to an AMT Tag database.")); break; case AnalysisStep.Clustering: graph.AddNode(CreateNode(AnalysisStep.Clustering, "Cluster Features", "Clusters features across datasets.")); graph.AddNode(CreateNode(AnalysisStep.PeakMatching, "Feature Identification", "Matches features to an AMT Tag database.")); break; case AnalysisStep.PeakMatching: graph.AddNode(CreateNode(AnalysisStep.PeakMatching, "Feature Identification", "Matches features to an AMT Tag database.")); break; } } config.AnalysisGraph = graph; }
/// <summary> /// This is horrible. A common ground of feature / functionality should be made here. /// </summary> /// <param name="config"></param> /// <param name="reporter"></param> /// <returns></returns> public void StartMultiAlignGui(AnalysisConfig config, IAnalysisReportGenerator reporter) { m_worker = new BackgroundWorker(); m_worker.DoWork += m_worker_DoWork; m_workerManager = new WorkerObject(m_worker); m_reportCreator = reporter; m_config = config; m_worker.WorkerSupportsCancellation = true; m_worker.RunWorkerAsync(); }
private void SetupViewModels(AnalysisConfig configuration) { m_config = configuration; CurrentStep = AnalysisSetupStep.DatasetSelection; CancelCommand = new AnalysisCancelCommand(this); BackCommand = new AnalysisBackCommand(this); NextCommand = new AnalysisNextCommand(this); DatasetSelectionViewModel = new AnalysisDatasetSelectionViewModel(configuration.Analysis); BaselineSelectionViewModel = new AnalysisBaselineSelectionViewModel(configuration.Analysis); AnalysisNamingViewModel = new AnalysisNamingViewModel(configuration); AnalysisOptionsViewModel = new AnalysisOptionsViewModel(configuration.Analysis.Options); }
/// <summary> /// Aligns all of the datasets. /// </summary> public void PerformAlignment(AnalysisConfig config) { UpdateStatus("Performing Alignment"); // Connect to database of features. var featureCache = config.Analysis.DataProviders.FeatureCache; var options = config.Analysis.Options; // Load the baseline data. var baselineInfo = config.Analysis.MetaData.BaselineDataset; var baselineFeatures = LoadBaselineData(baselineInfo, options.MsFilteringOptions, options.LcmsFindingOptions, options.LcmsFilteringOptions, config.Analysis.DataProviders, config.Analysis.MassTagDatabase, config.Analysis.Options.AlignmentOptions.IsAlignmentBaselineAMasstagDB); // Create the alignment cache and clear it. var alignmentCache = new AlignmentDAOHibernate(); alignmentCache.ClearAll(); //config.Analysis.Options.AlignmentOptions foreach (var datasetInfo in config.Analysis.MetaData.Datasets) { if (!datasetInfo.IsBaseline) { UpdateStatus("Retrieving data from " + datasetInfo.DatasetName + " for alignment."); var features = featureCache.FindByDatasetId(datasetInfo.DatasetId) as IList<UMCLight>; features = AlignDataset(features, baselineFeatures, config.Analysis.MassTagDatabase, datasetInfo, baselineInfo); featureCache.UpdateAll(features); // This dataset is done! if (FeaturesLoaded != null) FeaturesLoaded(this, new FeaturesLoadedEventArgs(datasetInfo, features)); } else { config.Analysis.AlignmentData.Add(null); } } }
/// <summary> /// Processes the command line arguments. /// </summary> /// <param name="args"></param> public static void ProcessCommandLineArguments(string[] args, AnalysisConfig config) { var jobID = -1; var worked = false; config.options = CommandLineParser.ProcessArgs(args, 0); foreach (var option in config.options.Keys) { try { var values = config.options[option]; switch (option) { case "-job": worked = int.TryParse(values[0], out jobID); if (worked) { config.JobID = jobID; } else { Logger.PrintMessage(string.Format("The job Id {0} was not understood", values[0])); return; } break; case "-charge": config.ChargeState = Convert.ToInt32(values[0]); config.ShouldClusterOnlyCharge = true; break; //-------------------------------------------------------------------- // Path and name //-------------------------------------------------------------------- case "-path": config.AnalysisPath = values[0]; break; case "-name": config.AnalysisName = values[0]; break; //-------------------------------------------------------------------- // Files and parameters //-------------------------------------------------------------------- case "-files": config.InputPaths = values[0]; break; case "-params": config.ParameterFile = values[0]; break; case "-usefactors": config.ShouldUseFactors = true; break; //-------------------------------------------------------------------- // Log, HTML names //-------------------------------------------------------------------- case "-log": config.logPath = values[0]; break; case "-html": config.HtmlPathName = values[0]; break; //-------------------------------------------------------------------- // Print helps //-------------------------------------------------------------------- case "-h": config.showHelp = true; break; case "-help": config.showHelp = true; break; //-------------------------------------------------------------------- // Feature databases //-------------------------------------------------------------------- case "-useexistingdatabase": config.ShouldUseExistingDatabase = true; break; case "-buildfeaturedatabase": config.ShouldCreateFeatureDatabaseOnly = true; break; //-------------------------------------------------------------------- // Data exporting //-------------------------------------------------------------------- case "-exportmsms": config.ExporterNames.ClusterMSMSPath = values[0]; config.ShouldExportMSMS = true; break; case "-exportsics": config.ShouldExportSICs = true; break; case "-exportcrosstab": config.ExporterNames.CrossTabPath = values[0]; break; case "-exportabundances": config.ExporterNames.CrossTabAbundance = values[0]; break; //-------------------------------------------------------------------- // Exporting //-------------------------------------------------------------------- case "-export": config.ExporterNames.ClusterScanPath = values[0]; break; case "-noplots": config.ShouldCreateChargeStatePlots = false; config.ShouldCreatePlots = false; break; default: Logger.PrintMessage("One option was not understood: " + option); break; } } catch (ArgumentOutOfRangeException) { Logger.PrintMessage(string.Format("You did not provide enough information for the option {0}", option)); return; } } }
/// <summary> /// Performs clustering of LCMS Features /// </summary> public void PerformLcmsFeatureClustering(AnalysisConfig config) { var analysis = config.Analysis; var clusterer = m_algorithms.Clusterer; RegisterProgressNotifier(clusterer); UpdateStatus("Using Cluster Algorithm: " + clusterer); clusterer.Parameters = LcmsClusteringOptions.ConvertToOmics(analysis.Options.LcmsClusteringOptions); // This just tells us whether we are using mammoth memory partitions or not. var featureCache = config.Analysis.DataProviders.FeatureCache; var clusterCount = 0; var providers = config.Analysis.DataProviders; // Here we see if we need to separate the charge... // IMS is said to require charge separation if (!analysis.Options.LcmsClusteringOptions.ShouldSeparateCharge) { UpdateStatus("Clustering features from all charge states."); UpdateStatus("Retrieving features for clustering from cache."); var features = featureCache.FindAll(); UpdateStatus(string.Format("Clustering {0} features. ", features.Count)); var clusters = new List<UMCClusterLight>(); clusters = clusterer.Cluster(features, clusters); foreach (var cluster in clusters) { cluster.Id = clusterCount++; cluster.UmcList.ForEach(x => x.ClusterId = cluster.Id); // Updates the cluster with statistics foreach (var feature in cluster.UmcList) { cluster.MsMsCount += feature.MsMsCount; cluster.IdentifiedSpectraCount += feature.IdentifiedSpectraCount; } } providers.ClusterCache.AddAll(clusters); providers.FeatureCache.UpdateAll(features); config.Analysis.Clusters = clusters; UpdateStatus(string.Format("Found {0} clusters.", clusters.Count)); if (FeaturesClustered != null) { FeaturesClustered(this, new FeaturesClusteredEventArgs(clusters)); } } else { var maxChargeState = featureCache.FindMaxCharge(); /* * Here we cluster all charge states separately. Probably IMS Data. */ UpdateStatus("Clustering charge states individually."); for (var chargeState = 1; chargeState <= maxChargeState; chargeState++) { var features = featureCache.FindByCharge(chargeState); if (features.Count < 1) { UpdateStatus(string.Format("No features found for charge state {0}. Stopping clustering", chargeState)); break; } UpdateStatus( string.Format("Retrieved and is clustering {0} features from charge state {1}.", features.Count, chargeState)); var clusters = clusterer.Cluster(features); foreach (var cluster in clusters) { cluster.Id = clusterCount++; cluster.UmcList.ForEach(x => x.ClusterId = cluster.Id); // Updates the cluster with statistics foreach (var feature in cluster.Features) { cluster.MsMsCount += feature.MsMsCount; cluster.IdentifiedSpectraCount += feature.IdentifiedSpectraCount; } } config.Analysis.DataProviders.ClusterCache.AddAll(clusters); config.Analysis.DataProviders.FeatureCache.UpdateAll(features); UpdateStatus(string.Format("Found {0} clusters.", clusters.Count)); if (FeaturesClustered != null) { FeaturesClustered(this, new FeaturesClusteredEventArgs(clusters, chargeState)); } } config.Analysis.Clusters = config.Analysis.DataProviders.ClusterCache.FindAll(); } DeRegisterProgressNotifier(clusterer); UpdateStatus(string.Format("Finished clustering. Found {0} total clusters.", clusterCount)); }
/// <summary> /// Starts a multi-Align analysis job. /// </summary> public void StartAnalysis(AnalysisConfig config) { m_config = config; if (m_algorithms == null) { throw new NullReferenceException("The algorithms have not been set for this analysis."); } if (config.Analysis == null) { throw new NullReferenceException("The analysis data storage cannot be null."); } if (config.Analysis.DataProviders == null) { throw new NullReferenceException("The data cache providers have not been set for this analysis."); } Logger.LogPath = AnalysisPathUtils.BuildLogPath(config.AnalysisPath, config.AnalysisName); // Make sure we start with a fresh analysis. AbortAnalysisThread(m_analysisThread); var threadStart = new ThreadStart(PerformAnalysis); m_analysisThread = new Thread(threadStart); m_analysisThread.Start(); }
public void CleanupOldAnalysisBranches(AnalysisConfig config) { switch (config.InitialStep) { case AnalysisStep.None: break; case AnalysisStep.LoadMtdb: break; case AnalysisStep.FindFeatures: break; case AnalysisStep.Alignment: config.Analysis.DataProviders.FeatureCache.ClearAlignmentData(); config.Analysis.DataProviders.ClusterCache.ClearAllClusters(); config.Analysis.DataProviders.MassTagMatches.ClearAllMatches(); break; case AnalysisStep.Clustering: config.Analysis.DataProviders.ClusterCache.ClearAllClusters(); config.Analysis.DataProviders.MassTagMatches.ClearAllMatches(); break; case AnalysisStep.PeakMatching: config.Analysis.DataProviders.MassTagMatches.ClearAllMatches(); break; } }