public AlignmentSettingsViewModel(MultiAlignAnalysis analysis, FeatureLoader featureCache, IAlignmentWindowFactory alignmentWindowFactory = null, IProgress<int> progressReporter = null) { this.analysis = analysis; this.featureCache = featureCache; this.alignmentWindowFactory = alignmentWindowFactory ?? new AlignmentViewFactory(); this.progress = progressReporter ?? new Progress<int>(); this.aligner = new LCMSFeatureAligner(); this.builder = new AlgorithmBuilder(); this.CalibrationOptions = new ObservableCollection<AlignmentType>(Enum.GetValues(typeof(AlignmentType)).Cast<AlignmentType>()); this.AlignmentAlgorithms = new ObservableCollection<FeatureAlignmentType>( Enum.GetValues(typeof(FeatureAlignmentType)).Cast<FeatureAlignmentType>()); this.selectedDatasets = new ReadOnlyCollection<DatasetInformationViewModel>(new List<DatasetInformationViewModel>()); this.alignmentInformation = new List<classAlignmentData>(); this.MessengerInstance.Register<PropertyChangedMessage<IReadOnlyCollection<DatasetInformationViewModel>>>(this, sds => { this.selectedDatasets = sds.NewValue; ThreadSafeDispatcher.Invoke(() => this.AlignToBaselineCommand.RaiseCanExecuteChanged()); ThreadSafeDispatcher.Invoke(() => this.DisplayAlignmentCommand.RaiseCanExecuteChanged()); }); this.AlignToBaselineCommand = new RelayCommand(this.AsyncAlignToBaseline, () => this.SelectedBaseline != null && this.selectedDatasets != null && this.selectedDatasets.Count > 0 && this.selectedDatasets.Any(file => !file.DoingWork)); this.DisplayAlignmentCommand = new RelayCommand(this.DisplayAlignment, () => this.selectedDatasets.Any(file => file.IsAligned)); }
public ClusterSettingsViewModel(MultiAlignAnalysis analysis, IProgress<int> progressReporter = null) { this.analysis = analysis; this.options = analysis.Options; this.builder = new AlgorithmBuilder(); this.ClusterFeaturesCommand = new RelayCommand(this.AsyncClusterFeatures); this.DisplayClustersCommand = new RelayCommand(this.DisplayFeatures); this.DistanceMetrics = new ObservableCollection<DistanceMetric>(); Enum.GetValues(typeof(DistanceMetric)).Cast<DistanceMetric>().ToList().ForEach(x => this.DistanceMetrics.Add(x)); this.CentroidRepresentations = new ObservableCollection<ClusterCentroidRepresentation>(); Enum.GetValues(typeof(ClusterCentroidRepresentation)).Cast<ClusterCentroidRepresentation>().ToList().ForEach(x => this.CentroidRepresentations.Add(x)); this.ClusteringMethods = new ObservableCollection<LcmsFeatureClusteringAlgorithmType>(); Enum.GetValues(typeof(LcmsFeatureClusteringAlgorithmType)).Cast<LcmsFeatureClusteringAlgorithmType>().ToList().ForEach(x => this.ClusteringMethods.Add(x)); }
/// <summary> /// Performs the analysis. /// </summary> private void PerformAnalysisGui(AnalysisConfig config, AlgorithmBuilder builder, AnalysisType validated, bool createDatabase, WorkerObject worker) { Logger.PrintMessage("Performing analysis."); MultiAlignAnalysisProcessor processor = null; try { // Creates or connects to the underlying analysis database. var providers = SetupDataProviders(createDatabase); // Create the clustering, analysis, and plotting paths. builder.BuildClusterer(config.Analysis.Options.LcmsClusteringOptions.LcmsFeatureClusteringAlgorithm); config.Analysis.DataProviders = providers; config.Analysis.AnalysisType = validated; ConstructPlotPath(); ExportParameterFile(); Logger.PrintSpacer(); PrintParameters(config.Analysis, createDatabase); Logger.PrintSpacer(); // Setup the processor. processor = ConstructAnalysisProcessor(builder, providers); // Tell the processor whether to load data or not. processor.ShouldLoadData = createDatabase; // Construct the dataset information for export.// Create dataset information. Logger.PrintMessage("Storing dataset information into the database."); var information = Enumerable.ToList(config.Analysis.MetaData.Datasets); m_config.Analysis.DataProviders.DatasetCache.AddAll(information); Logger.PrintMessage("Creating exporter options."); if (config.ExporterNames.CrossTabPath == null) { config.ExporterNames.CrossTabPath = config.AnalysisName.Replace(".db3", ""); } if (config.ExporterNames.CrossTabAbundance == null) { config.ExporterNames.CrossTabAbundance = config.AnalysisName.Replace(".db3", ""); } ConstructExporting(); Logger.PrintMessage("Cleaning up old analysis branches."); CleanupOldAnalysisBranches(config); Logger.PrintMessage("Analysis Started."); processor.StartAnalysis(config); var handleId = WaitHandle.WaitAny(new WaitHandle[] { config.triggerEvent, config.errorEvent, config.stopEvent }); if (handleId == 1) { Logger.PrintMessageWorker("There was an error during processing.", 1, false); config.triggerEvent.Dispose(); config.errorEvent.Dispose(); processor.Dispose(); if (AnalysisError != null) { AnalysisError(this, null); } return; } if (handleId == 2) { Logger.PrintMessageWorker("Stopping the analysis.", 1, false); processor.StopAnalysis(); worker.SynchEvent.Set(); Thread.Sleep(50); config.triggerEvent.Dispose(); config.errorEvent.Dispose(); processor.Dispose(); if (AnalysisCancelled != null) { AnalysisCancelled(this, null); } return; } } catch (Exception ex) { Logger.PrintMessage( "There was an error Performing analysis"); Logger.PrintMessage(ex.Message); Logger.PrintMessage(ex.StackTrace); return; } try { m_reportCreator.CreatePlotReport(); } catch (Exception ex) { Logger.PrintMessage( "There was an error when trying to create the final analysis plots, however, the data analysis is complete."); Logger.PrintMessage(ex.Message); Logger.PrintMessage(ex.StackTrace); } config.triggerEvent.Dispose(); config.errorEvent.Dispose(); processor.Dispose(); CleanupDataProviders(); Logger.PrintMessage("Indexing Database Clusters for Faster Retrieval"); var databasePath = Path.Combine(m_config.AnalysisPath, m_config.AnalysisName); DatabaseIndexer.IndexClusters(databasePath); Logger.PrintMessage("Indexing Database Features"); DatabaseIndexer.IndexFeatures(databasePath); Logger.PrintMessage("Analysis Complete"); if (AnalysisComplete != null) { AnalysisComplete(this, null); } }
/// <summary> /// Performs the analysis. /// </summary> private int PerformAnalysis(AnalysisConfig config, AlgorithmBuilder builder, AnalysisType validated, bool createDatabase) { InputAnalysisInfo analysisSetupInformation; Logger.PrintMessage("Performing analysis."); // Read the input files. bool useMtdb; var isInputFileOk = ReadInputDefinitionFile(out analysisSetupInformation, out useMtdb); if (!isInputFileOk) return 1; // Figure out if the factors are defined. if (config.options.ContainsKey("-factors")) { Logger.PrintMessage("Factor file specified."); var factorFile = config.options["-factors"][0]; analysisSetupInformation.FactorFile = factorFile; } // Creates or connects to the underlying analysis database. var providers = SetupDataProviders(createDatabase); // Create the clustering, analysis, and plotting paths. ConstructClustering(builder); config.Analysis = ConstructAnalysisObject(analysisSetupInformation); config.Analysis.DataProviders = providers; config.Analysis.AnalysisType = validated; ConstructPlotPath(); // Read the parameter files. ReadParameterFile(); // Construct Dataset information // Construct the dataset information for export. ConstructDatasetInformation(analysisSetupInformation, config.Analysis, createDatabase); if (config.ShouldUseFactors) { ConstructFactorInformation(analysisSetupInformation, config.Analysis.MetaData.Datasets, config.Analysis.DataProviders); } var isBaselineSpecified = ConstructBaselines(analysisSetupInformation, config.Analysis.MetaData, useMtdb); if (!isBaselineSpecified) { return 1; } ExportParameterFile(); Logger.PrintSpacer(); PrintParameters(config.Analysis, createDatabase); Logger.PrintSpacer(); // Setup the processor. var processor = ConstructAnalysisProcessor(builder, providers); // Tell the processor whether to load data or not. processor.ShouldLoadData = createDatabase; Logger.PrintMessage("Creating exporter options."); if (config.ExporterNames.CrossTabPath == null) { config.ExporterNames.CrossTabPath = config.AnalysisName.Replace(".db3", ""); } if (config.ExporterNames.CrossTabAbundance == null) { config.ExporterNames.CrossTabAbundance = config.AnalysisName.Replace(".db3", ""); } ConstructExporting(); Logger.PrintMessage("Cleaning up old analysis branches."); CleanupOldAnalysisBranches(config); // Start the analysis Logger.PrintMessage("Analysis Started."); processor.StartAnalysis(config); var handleId = WaitHandle.WaitAny(new WaitHandle[] { config.triggerEvent, config.errorEvent }); var wasError = false; if (handleId != 1) { try { m_reportCreator.CreatePlotReport(); } catch (Exception ex) { wasError = true; Logger.PrintMessage( "There was an error when trying to create the final analysis plots, however, the data analysis is complete."); Logger.PrintMessage(ex.Message); Logger.PrintMessage(ex.StackTrace); } config.Analysis.Dispose(); config.triggerEvent.Dispose(); config.errorEvent.Dispose(); processor.Dispose(); CleanupDataProviders(); if (!wasError) { Logger.PrintMessage("Indexing Database Features"); DatabaseIndexer.IndexFeatures(config.AnalysisPath); DatabaseIndexer.IndexClusters(config.AnalysisPath); } Logger.PrintMessage("Analysis Complete."); return 0; } // Finalize the analysis plots etc. Logger.PrintMessage("There was an error during processing."); return 1; }
/// <summary> /// Main bulk for processing setup for the GUI version /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void m_worker_DoWork(object sender, DoWorkEventArgs e) { // Builds the list of algorithm providers. var builder = new AlgorithmBuilder(); // Use this to signal when the analysis is done. m_config.triggerEvent = new ManualResetEvent(false); m_config.errorEvent = new ManualResetEvent(false); m_config.stopEvent = new ManualResetEvent(false); m_config.errorException = null; m_config.Analysis.MetaData.AnalysisPath = m_config.AnalysisPath; m_config.Analysis.MetaData.AnalysisName = m_config.AnalysisName; // Setup log path, analysis path, and print version to log file. SetupAnalysisEssentials(); // Determine if we have specified a valid database to extract // data from or to re-start an analysis. var databasePath = Path.Combine(m_config.AnalysisPath, m_config.AnalysisName); var databaseExists = File.Exists(databasePath); var createDatabase = ShouldCreateDatabase(m_config.Analysis.AnalysisType, databaseExists); // make sure that we were not told to skip to a new part of the analysis. if (m_config.InitialStep >= AnalysisStep.Alignment) { createDatabase = false; } var validated = m_config.Analysis.AnalysisType; switch (m_config.Analysis.AnalysisType) { case AnalysisType.FactorImporting: ImportFactors(m_config, databaseExists); break; case AnalysisType.Full: PerformAnalysisGui(m_config, builder, validated, createDatabase, m_workerManager); break; case AnalysisType.ExportDataOnly: ExportData(databaseExists); break; } }
/// <summary> /// Create the clustering algorithms. /// </summary> /// <param name="builder"></param> private void ConstructClustering(AlgorithmBuilder builder) { // Setup algorithm providers. if (m_config.options.ContainsKey("-centroid")) { Logger.PrintMessage("Building centroid clusterer"); builder.BuildClusterer(LcmsFeatureClusteringAlgorithmType.Centroid); } else if (m_config.options.ContainsKey("-singlelinkage")) { Logger.PrintMessage("Building single linkage clusterer"); builder.BuildClusterer(LcmsFeatureClusteringAlgorithmType.SingleLinkage); } else { Logger.PrintMessage("Built average linkage clusterer."); } }
/// <summary> /// Creates the analysis processor and synchronizs the events. /// </summary> /// <param name="builder"></param> /// <param name="providers"></param> /// <returns></returns> private MultiAlignAnalysisProcessor ConstructAnalysisProcessor(AlgorithmBuilder builder, FeatureDataAccessProviders providers) { var processor = new MultiAlignAnalysisProcessor(); processor.AnalysisStarted += processor_AnalysisStarted; processor.AnalysisError += processor_AnalysisError; processor.FeaturesAligned += processor_FeaturesAligned; processor.FeaturesLoaded += processor_FeaturesLoaded; processor.MassTagsLoaded += processor_MassTagsLoaded; processor.FeaturesClustered += processor_FeaturesClustered; processor.FeaturesPeakMatched += processor_FeaturesPeakMatched; processor.AnalysisComplete += processor_AnalysisComplete; processor.Progress += processor_Progress; processor.BaselineFeaturesLoaded += processor_BaselineFeaturesLoaded; m_config.Analysis.DataProviders = providers; processor.AlgorithmProviders = builder.GetAlgorithmProvider(m_config.Analysis.Options); return processor; }
/// <summary> /// Processes the MA analysis data. /// </summary> public int StartMultiAlign(AnalysisConfig config, IAnalysisReportGenerator reporter) { m_reportCreator = reporter; m_config = config; // Builds the list of algorithm providers. var builder = new AlgorithmBuilder(); // Use this to signal when the analysis is done. config.triggerEvent = new ManualResetEvent(false); config.errorEvent = new ManualResetEvent(false); m_config.stopEvent = new ManualResetEvent(false); config.errorException = null; // Print Help // See if the user wants help if (config.showHelp) { PrintHelp(); config.errorEvent.Dispose(); config.triggerEvent.Dispose(); return 0; } // Validate the command line var validated = AnalysisValidator.ValidateSetup(m_config); if (validated == AnalysisType.InvalidParameters) { PrintHelp(); return 0; } // Setup log path, analysis path, and print version to log file. SetupAnalysisEssentials(); // Determine if we have specified a valid database to extract // data from or to re-start an analysis. var databasePath = Path.Combine(config.AnalysisPath, config.AnalysisName); var databaseExists = File.Exists(databasePath); var createDatabase = ShouldCreateDatabase(validated, databaseExists); // make sure that we were not told to skip to a new part of the analysis. if (config.InitialStep >= AnalysisStep.Alignment) { createDatabase = false; } var result = 0; switch (validated) { case AnalysisType.Full: result = PerformAnalysis(config, builder, validated, createDatabase); break; } return result; }