public double Get_rightTailed_p_value(int a, int b, int c, int d) { if (Report) { Report_class.WriteLine("{0}: Get right tailed p-value", typeof(Fisher_exact_test_class).Name); } double p; if (Check_if_n_not_larger_than_max_size(a, b, c, d)) { p = Get_specific_p_value(a, b, c, d); int min = (c < b) ? c : b; for (int i = 0; i < min; i++) { p += Get_specific_p_value(++a, --b, --c, ++d); } } else { p = -1; }; if (Report) { for (int i = 0; i < typeof(Fisher_exact_test_class).Name.Length + 2; i++) { Report_class.Write(" "); } Report_class.WriteLine("p_value: {0}", p); } return(p); }
public double Get_leftTailed_p_value(int a, int b, int c, int d) { if (Report) { Report_class.WriteLine("{0}: Get left tailed p-value", typeof(Fisher_exact_test_class).Name); } double p; if (Check_if_n_not_larger_than_max_size(a, b, c, d)) { p = Get_specific_p_value(a, b, c, d); int min = (a < d) ? a : d; for (int i = 0; i < min; i++) { p += Get_specific_p_value(--a, ++b, ++c, --d); } } else { p = -1; }; if (Report) { for (int i = 0; i < typeof(Fisher_exact_test_class).Name.Length + 2; i++) { Report_class.Write(" "); } Report_class.WriteLine("p_value: {0}", p); } if (p > 1) { p = 1; } return(p); }
public static void WriteData <T>(List <T> Data, ReadWriteOptions_base Options) where T : class { Report_class.WriteLine("{0}: Write file {1}", typeof(T).Name, Options.File); StreamWriter writer = new StreamWriter(Options.File, false); WriteData(Data, Options, writer); }
public static void Delete_files_in_directory_that_start_with_startString(string directory, string startString) { string[] complete_file_names = Directory.GetFiles(directory); int complete_file_names_length = complete_file_names.Length; if (complete_file_names_length > 0) { Report_class.Write("Delete files in directory {0}:", directory); Console.ReadLine(); string[] file_names = new string[complete_file_names_length]; for (int indexC = 0; indexC < complete_file_names_length; indexC++) { file_names[indexC] = Path.GetFileName(complete_file_names[indexC]); } foreach (string file_name in file_names) { if (file_name.IndexOf(startString) == 0) { FileInfo delete_file = new FileInfo(directory + file_name); delete_file.Delete(); } } } }
private void Write_parent_child_network_for_results_of_standard_enrichment_analysis(Ontology_enrichment_class onto_enrich_mbc_for_nw_visualization) { if (Options.Report) { Report_class.WriteLine("{0}: Visualize parent child relationships of standard enrichment results", typeof(Mbc_enrichment_pipeline_class).Name); } string mbc_results_subdirectory = Get_results_subdirectory_for_indicated_ontology(Ontology_type_enum.Molecular_biology_cell); int enrich_length = onto_enrich_mbc_for_nw_visualization.Enrich.Length; Ontology_enrichment_line_class enrich_line; List <Ontology_enrichment_line_class> sameLevel_enrich_list = new List <Ontology_enrichment_line_class>(); onto_enrich_mbc_for_nw_visualization.Order_by_complete_sample_pvalue(); MBCO_obo_network_class current_obo_mbc; string complete_sampleName; for (int indexE = 0; indexE < enrich_length; indexE++) { enrich_line = onto_enrich_mbc_for_nw_visualization.Enrich[indexE]; if ((indexE == 0) || (!enrich_line.Complete_sample_name.Equals(onto_enrich_mbc_for_nw_visualization.Enrich[indexE - 1].Complete_sample_name))) { sameLevel_enrich_list.Clear(); } sameLevel_enrich_list.Add(enrich_line); if ((indexE == enrich_length - 1) || (!enrich_line.Complete_sample_name.Equals(onto_enrich_mbc_for_nw_visualization.Enrich[indexE + 1].Complete_sample_name))) { complete_sampleName = enrich_line.Complete_sample_name; current_obo_mbc = this.Mbco_parentChild_nw.Deep_copy_mbco_obo_nw(); current_obo_mbc.Add_significance_and_remove_unsignificant_nodes_but_keep_all_ancestors(sameLevel_enrich_list.ToArray()); current_obo_mbc.Write_yED_nw_in_results_directory_with_nodes_colored_by_minusLog10Pvalue_without_sameLevel_processes_grouped(mbc_results_subdirectory + complete_sampleName + "_parentChild_nw"); } } }
public static int[] Get_propertyIndexes <T>(PropertyInfo[] propInfo, string[] key_propertyNames) { int key_length = key_propertyNames.Length; int[] propertyIndexes = new int[key_length]; string[] propInfo_names = new string[propInfo.Length]; for (int i = 0; i < propInfo.Length; i++) { propInfo_names[i] = propInfo[i].Name; } for (int i = 0; i < key_length; i++) { int index = Array.IndexOf(propInfo_names, key_propertyNames[i]); if (index >= 0) { propertyIndexes[i] = index; } if (index < 0) { Report_class.Write_error_line("{0}: propertyName \"{1}\" does not exist", typeof(T).Name, key_propertyNames[i]); throw new Exception(); } } return(propertyIndexes); }
public static int[] Get_propertyIndexes_of_corresponding_given_columnNames <T>(PropertyInfo[] propInfo, string[] propertyNames, string[] given_columnNames, string[] search_given_columnNames) { int search_length = search_given_columnNames.Length; int[] columnNames_indexes = new int[search_length]; if (search_length == 0) { Report_class.Write_error_line("{0}: no search columnNames to search for", typeof(T).Name); throw new Exception(); } for (int i = 0; i < search_length; i++) { int index = Array.IndexOf(given_columnNames, search_given_columnNames[i]); if (index >= 0) { columnNames_indexes[i] = index; } else { Report_class.Write_error_line("{0}: given_columnName \"{1}\" does not exist", typeof(T).Name, given_columnNames[i]); throw new Exception(); } } string[] corresponding_propertyNames = new string[search_length]; for (int indexS = 0; indexS < search_length; indexS++) { corresponding_propertyNames[indexS] = propertyNames[columnNames_indexes[indexS]]; } int[] propertyIndexes = Get_propertyIndexes <T>(propInfo, corresponding_propertyNames); return(propertyIndexes); }
private void Generate_fisher_excat_intances(params string[] experimental_background_genes) { if (Options.Report) { Report_class.WriteLine("{0}: Generate fisher exact instances", typeof(Ontology_process_analysis_class).Name); } experimental_background_genes = experimental_background_genes.Distinct().OrderBy(l => l).ToArray(); Ontology_type_enum[] ontologies = this.Options.Ontologies; int ontologies_length = ontologies.Length; Ontology_type_enum ontology; this.Ontology_fishers = new Ontology_fisher_exact_class[ontologies_length]; for (int indexO = 0; indexO < ontologies_length; indexO++) { ontology = ontologies[indexO]; this.Ontology_fishers[indexO] = new Ontology_fisher_exact_class(ontology, experimental_background_genes); this.Ontology_fishers[indexO].Generate_new_instance(); } if (Options.Report) { Report_class.WriteLine(); } }
private void Generate_parent_child_network() { if (Options.Report) { Report_class.WriteLine("{0}: Generate hierarchical parent child MBCO network", typeof(Mbc_enrichment_pipeline_class).Name); } Mbco_parentChild_nw.Generate_by_reading_safed_obo_file(); }
public static void Delete_directory_if_it_exists(string directory) { if (Directory.Exists(directory)) { Report_class.Write("Delete {0}?", directory); Console.Read(); Directory.Delete(directory, true); } }
public static void Create_subdirectory_if_it_does_not_exist(string actDirectory, string sub_directory_name) { if (!Directory.Exists(actDirectory + sub_directory_name)) { Report_class.WriteLine("{0}: Create subdirectory {1} in directory {2}", typeof(ReadWriteClass).Name, sub_directory_name, actDirectory); DirectoryInfo dir = new DirectoryInfo(actDirectory); dir.CreateSubdirectory(sub_directory_name); } }
private void Write_scp_network_for_results_of_dynamic_enrichment_analsyis(Ontology_enrichment_class dynamic_onto_enrichment_filtered, Ontology_enrichment_class standard_onto_enrichment_filtered) { if (Options.Report) { Report_class.WriteLine("{0}: Visualize SCP relationships of dynamic enrichment results", typeof(Mbc_enrichment_pipeline_class).Name); } string subdirectory = Get_results_subdirectory_for_indicated_ontology(Ontology_type_enum.Molecular_biology_cell); dynamic_onto_enrichment_filtered.Order_by_complete_sample_pvalue(); standard_onto_enrichment_filtered.Order_by_complete_sample_pvalue(); Leave_out_class leave_out = new Leave_out_class(); leave_out.Generate_by_reading_safed_file(); List <Ontology_enrichment_line_class> sameSample_ontology_enrichment = new List <Ontology_enrichment_line_class>(); int dynamic_onto_enrich_length = dynamic_onto_enrichment_filtered.Enrich.Length; Ontology_enrichment_line_class enrichment_line; Leave_out_scp_scp_network_class current_scp_network; List <string> current_scpNames = new List <string>(); string complete_sampleName; string[] standard_enriched_scps; Dictionary <string, Shape_enum> nodeLable_shape_dict = new Dictionary <string, Shape_enum>(); for (int indexE = 0; indexE < dynamic_onto_enrich_length; indexE++) { enrichment_line = dynamic_onto_enrichment_filtered.Enrich[indexE]; if ((indexE == 0) || (!enrichment_line.Equal_complete_sample(dynamic_onto_enrichment_filtered.Enrich[indexE - 1]))) { sameSample_ontology_enrichment.Clear(); } sameSample_ontology_enrichment.Add(enrichment_line); if ((indexE == dynamic_onto_enrich_length - 1) || (!enrichment_line.Equal_complete_sample(dynamic_onto_enrichment_filtered.Enrich[indexE + 1]))) { current_scpNames.Clear(); foreach (Ontology_enrichment_line_class sameSample_enrichment_line in sameSample_ontology_enrichment) { current_scpNames.AddRange(sameSample_enrichment_line.Scp_name.Split(Global_class.Scp_delimiter)); } complete_sampleName = enrichment_line.Complete_sample_name; current_scp_network = Leave_out_scp_network_for_dynamicEnrichment_visualization.Deep_copy_scp_network(); current_scp_network.Scp_nw.Keep_only_input_nodeNames(current_scpNames.ToArray()); current_scp_network.Add_ancestors_of_missing_levels(this.Mbco_parentChild_nw); standard_enriched_scps = standard_onto_enrichment_filtered.Get_all_scps_of_completeSample(complete_sampleName); nodeLable_shape_dict.Clear(); foreach (string standard_enriched_scp in standard_enriched_scps) { nodeLable_shape_dict.Add(standard_enriched_scp, Shape_enum.Rectangle); } current_scp_network.Scp_nw.Write_yED_nw_in_results_directory_with_nodes_colored_by_level_and_sameLevel_processes_grouped(subdirectory + complete_sampleName + "_dynamicEnrichment_nw", Shape_enum.Diamond, nodeLable_shape_dict); } } }
public void Generate_new_instance() { if (Options.Report) { Report_class.WriteLine("-------------------------------------------------------------------------------"); Report_class.WriteLine("{0}: Generate new instance based on {1}", typeof(Ontology_fisher_exact_class).Name, this.Ontology_association.Ontology); } this.Ontology_association.Keep_only_input_genes_in_process_gene_association(this.Background_genes); this.Fisher = new Fisher_exact_test_class(this.Background_genes.Length, false); }
public static void WriteData <T>(List <T> Data, ReadWriteOptions_base Options, StreamWriter writer) where T : class { WriteData_headline <T>(Data, Options, writer); WriteData_body <T>(Data, Options, writer); writer.Close(); if (Options.Report != ReadWrite_report_enum.Report_nothing) { Report_class.WriteLine(); } }
public static void WriteData <T>(T[] Data, ReadWriteOptions_base Options) where T : class { if (Options.Report != ReadWrite_report_enum.Report_nothing) { Report_class.WriteLine("{0}: Write file {1}", typeof(T).Name, Options.File); } StreamWriter writer = new StreamWriter(Options.File, false); WriteData(Data, Options, writer); }
public Ontology_enrichment_class Calculate_p_values_and_do_multiple_hypothesis_correction(Deg_class deg_input) { if (Options.Report) { Report_class.Write_major_separation_line(); Report_class.WriteLine("{0}: Calculate p values and perform multiple hypothesis correction for {1}", typeof(Ontology_fisher_exact_class).Name, this.Ontology_association.Ontology); } Deg_class deg = deg_input.Deep_copy(); deg.Keep_only_input_genes(this.Background_genes); this.Ontology_association.Process_gene_associations = this.Ontology_association.Process_gene_associations.OrderBy(l => l.ProcessName).ThenBy(l => l.GeneSymbol).ToArray(); deg.Degs = deg.Degs.OrderBy(l => l.Sequencing_run).ThenBy(l => l.Cell).ThenBy(l => l.Condition1).ThenBy(l => l.Condition2).ToArray(); int degs_length = deg.Degs.Length; Deg_line_class deg_line; List <string> inputGenes = new List <string>(); Ontology_enrichment_line_class[] add_enrichment_results; List <Ontology_enrichment_line_class> enrichment_results = new List <Ontology_enrichment_line_class>(); for (int indexDeg = 0; indexDeg < degs_length; indexDeg++) { deg_line = deg.Degs[indexDeg]; if ((indexDeg == 0) || (!deg_line.Sequencing_run.Equals(deg.Degs[indexDeg - 1].Sequencing_run)) || (!deg_line.Cell.Equals(deg.Degs[indexDeg - 1].Cell)) || (!deg_line.Condition1.Equals(deg.Degs[indexDeg - 1].Condition1)) || (!deg_line.Condition2.Equals(deg.Degs[indexDeg - 1].Condition2))) { inputGenes.Clear(); } if ((indexDeg != 0) && (!deg_line.Sequencing_run.Equals(deg.Degs[indexDeg - 1].Sequencing_run)) && (!deg_line.Cell.Equals(deg.Degs[indexDeg - 1].Cell)) && (!deg_line.Condition1.Equals(deg.Degs[indexDeg - 1].Condition1)) && (!deg_line.Condition2.Equals(deg.Degs[indexDeg - 1].Condition2)) && (deg_line.Gene.Equals(deg_line.Gene))) { throw new Exception(); //duplicated gene in same condition } inputGenes.Add(deg_line.Gene); if ((indexDeg == degs_length - 1) || (!deg_line.Sequencing_run.Equals(deg.Degs[indexDeg + 1].Sequencing_run)) || (!deg_line.Cell.Equals(deg.Degs[indexDeg + 1].Cell)) || (!deg_line.Condition1.Equals(deg.Degs[indexDeg + 1].Condition1)) || (!deg_line.Condition2.Equals(deg.Degs[indexDeg + 1].Condition2))) { add_enrichment_results = Calculate_p_values_and_do_mutliple_hypothesis_correcion_for_input_genes(inputGenes.ToArray(), deg_line); enrichment_results.AddRange(add_enrichment_results); } } Ontology_enrichment_class onto_enrich = new Ontology_enrichment_class(); onto_enrich.Add_to_array(enrichment_results.ToArray()); return(onto_enrich); }
public static void Delete_file_if_it_exists(string complete_file_name) { if (File.Exists(complete_file_name)) { Report_class.Write("Delete file: {0}?", complete_file_name); Console.ReadLine(); FileInfo delete_file = new FileInfo(complete_file_name); delete_file.Delete(); Report_class.WriteLine(" deleted"); } }
public NetworkNode_line_class Get_indexed_node_line_if_index_is_correct(int indexNode) { NetworkNode_line_class node_line = Nodes[indexNode]; if (node_line.NW_index != indexNode) { node_line = new NetworkNode_line_class(); Report_class.Write_error_line("{0}: Get indexed node line, Indexes do not match ({1} <-> {2})", typeof(NetworkNode_line_class).Name, indexNode, node_line.NW_index); throw new Exception(); } return(node_line); }
public static void WriteData <T>(List <T> Data, ReadWriteOptions_base Options, StreamWriter writer) where T : class { PropertyInfo[] propInfo = typeof(T).GetProperties(); PropertyInfo prop; int[] propertyIndexes = Get_propertyIndexes <T>(propInfo, Options.Key_propertyNames); //Generate and write Headline int propertyIndexes_length = propertyIndexes.Length; if (Options.File_has_headline == true) { char headline_delimiter = Options.HeadlineDelimiters[0]; StringBuilder headline = new StringBuilder(); for (int index = 0; index < propertyIndexes_length; index++) { if (index < propertyIndexes_length - 1) { headline.AppendFormat("{0}{1}", Options.Key_columnNames[index], headline_delimiter); } else { headline.AppendFormat("{0}", Options.Key_columnNames[index]); } } writer.WriteLine(headline); } //Generate and write lines char line_delimiter = Options.LineDelimiters[0]; StringBuilder line = new StringBuilder(); int data_count = Data.Count; for (int lineIndex = 0; lineIndex < data_count; lineIndex++) { line.Clear(); for (int index = 0; index < propertyIndexes_length; index++) { prop = propInfo[propertyIndexes[index]]; if (index < propertyIndexes_length - 1) { line.AppendFormat("{0}{1}", prop.GetValue(Data[lineIndex], null), line_delimiter); } else { line.AppendFormat("{0}", prop.GetValue(Data[lineIndex], null)); } } writer.WriteLine(line); } writer.Close(); Report_class.WriteLine(); }
private bool Check_if_n_not_larger_than_max_size(int a, int b, int c, int d) { bool smaller = true; int n = a + b + c + d; if (n > max_size + 1) { Report_class.Write_error_line("{0}: n ({1}) is larger than max_size ({2}): initialize new fisher exact test instance", typeof(Fisher_exact_test_class).Name, n, max_size); smaller = false; } return(smaller); }
private Data_class Generate_data_instance_with_separated_or_combined_entries(Data_class data) { if (Options.Report) { Report_class.WriteLine("{0}: Prepare data for enrichment analysis", typeof(Mbc_enrichment_pipeline_class).Name); } bool positive_entries_exist = false; bool negative_entries_exist = false; int data_length = data.Data_length; Data_line_class data_line; int col_length = data.ColChar.Columns_length; for (int indexData = 0; indexData < data_length; indexData++) { data_line = data.Data[indexData]; for (int indexCol = 0; indexCol < col_length; indexCol++) { if (data_line.Columns[indexCol] > 0) { positive_entries_exist = true; } else if (data_line.Columns[indexCol] < 0) { negative_entries_exist = true; } } } if ((positive_entries_exist) && (negative_entries_exist)) { Data_class data_sep = new Data_class(); if (Options.Data_value_signs_of_interest.Contains(Data_value_signs_of_interest_enum.Combined)) { data_sep.Add_other_data_instance(data); } if (Options.Data_value_signs_of_interest.Contains(Data_value_signs_of_interest_enum.Upregulated)) { data_sep.Add_other_data_instance(data.Get_data_instance_with_only_upregulated_entries()); } if (Options.Data_value_signs_of_interest.Contains(Data_value_signs_of_interest_enum.Downregulated)) { data_sep.Add_other_data_instance(data.Get_data_instance_with_only_downregulated_entries()); } return(data_sep); } else { return(data); } }
public static void Delete_files_in_directory(string directory) { string[] complete_file_names = Directory.GetFiles(directory); if (complete_file_names.Length > 0) { Report_class.Write("Delete files in directory {0}:", directory); Console.ReadLine(); foreach (string complete_file_name in complete_file_names) { FileInfo delete_file = new FileInfo(complete_file_name); delete_file.Delete(); } } }
public static void Delete_file_if_it_exists(string complete_file_name) { Report_class.Write("Delete file {0}:", complete_file_name); if (File.Exists(complete_file_name)) { FileInfo delete_file = new FileInfo(complete_file_name); delete_file.Delete(); Report_class.WriteLine(" deleted"); } else { Report_class.WriteLine(" File does not exist"); } }
private void Print_options() { Report_class.WriteLine("{0}: Print options", typeof(Ontology_process_analysis_options_class).Name); for (int i = 0; i < typeof(Ontology_process_analysis_options_class).Name.Length + 2; i++) { Report_class.Write(" "); } Report_class.Write("Used Ontologies:"); for (int i = 0; i < Ontologies.Length; i++) { Report_class.Write(" {0}", Ontologies[i]); } Report_class.WriteLine(); }
// a b // c d public Fisher_exact_test_class(int input_max_size, bool report) { max_size = input_max_size; Report = report; if (Report) { Report_class.WriteLine("{0}: Initialize array of factorials with max_size = {1}", typeof(Fisher_exact_test_class).Name, max_size); } log_factorials = new double[max_size + 1]; log_factorials[0] = 0; for (int i = 1; i < max_size + 1; i++) { log_factorials[i] = log_factorials[i - 1] + Math.Log(i); } }
public static void Write_one_type_array_as_single_column <T>(T[] Array, ReadWriteOptions_base Options) { Report_class.WriteLine("{0}: Write file {1}", typeof(T).Name, Options.File); StreamWriter writer = new StreamWriter(Options.File, false); //Generate and write lines char line_delimiter = Options.LineDelimiters[0]; StringBuilder line = new StringBuilder(); int array_length = Array.Length; for (int index = 0; index < array_length; index++) { writer.WriteLine(Array[index]); } writer.Close(); Report_class.WriteLine(); }
public static void WriteArray <T>(T[] array, string file_name) { Report_class.WriteLine("{0}: Write array {1}", typeof(T).Name, file_name); StreamWriter writer = new StreamWriter(file_name, false); PropertyInfo[] propInfo = typeof(T).GetProperties(); //Generate and write lines StringBuilder line = new StringBuilder(); int array_length = array.Length; for (int indexA = 0; indexA < array_length; indexA++) { writer.WriteLine(array[indexA]); } writer.Close(); Report_class.WriteLine(); }
private Ontology_enrichment_class Do_standard_enrichment_analysis_and_write_results(Data_class data, out Ontology_enrichment_class onto_enrich) { if (Options.Report) { Report_class.WriteLine("{0}: Do standard enrichment analysis and write results", typeof(Mbc_enrichment_pipeline_class).Name); } onto_enrich = this.MBCO_fisher_standard.Analyse_data_instance(data); Ontology_type_enum ontology = Ontology_type_enum.Molecular_biology_cell; string subdirectory = Get_results_subdirectory_for_indicated_ontology(ontology); onto_enrich.Write(subdirectory, "Standard_enrichment_results.txt"); Ontology_enrichment_class onto_enrich_filtered = onto_enrich.Deep_copy(); onto_enrich_filtered.Keep_enrichment_lines_below_pvalue_cutoff(Options.Maximum_pvalue_for_standardDynamicEnrichment); onto_enrich_filtered.Keep_top_x_predictions_per_level_for_each_sample(Options.Kept_top_predictions_standardEnrichment_per_level); onto_enrich_filtered.Write(subdirectory, "Standard_enrichment_results_filtered.txt"); return(onto_enrich_filtered); }
public static T[] Deep_copy_array <T>(T[] array) { T[] copy = new T[0]; if (typeof(T) == typeof(string)) { Report_class.Write_error_line("{0}: Deep copy array, typeof(T)=={1} is not allowed", typeof(Array_class).Name, typeof(T)); } else if (array != null) { int array_length = array.Length; copy = new T[array_length]; for (int indexA = 0; indexA < array_length; indexA++) { copy[indexA] = (T)array[indexA]; } } else { copy = array; } return(copy); }
private void Generate_leave_out_scp_network() { if (Options.Report) { Report_class.WriteLine("{0}: Generate SCP networks", typeof(Mbc_enrichment_pipeline_class).Name); } Dictionary <string, int> processName_processLevel_dict = Mbco_parentChild_nw.Get_processName_level_dictionary_after_setting_process_level(); Leave_out_class leave_out = new Leave_out_class(); leave_out.Generate_by_reading_safed_file(); Leave_out_scp_network_for_dynamicEnrichment_analysis.Options.Consider_scp_interactions_between_signaling_processes = this.Options.Consider_interactions_between_signalingSCPs_for_dyanmicEnrichment; Leave_out_scp_network_for_dynamicEnrichment_analysis.Options.Top_quantile_of_considered_SCP_interactions_per_level = this.Options.Top_quantile_of_scp_interactions_for_dynamicEnrichment_per_level; Leave_out_scp_network_for_dynamicEnrichment_analysis.Generate_scp_scp_network_from_leave_out(leave_out); Leave_out_scp_network_for_dynamicEnrichment_analysis.Scp_nw.Nodes.Set_processLevel_for_all_nodes_based_on_dictionary(processName_processLevel_dict); Leave_out_scp_network_for_dynamicEnrichment_visualization.Options.Consider_scp_interactions_between_signaling_processes = this.Options.Consider_interactions_between_signalingSCPs_for_dyanmicEnrichment; Leave_out_scp_network_for_dynamicEnrichment_visualization.Options.Top_quantile_of_considered_SCP_interactions_per_level = this.Options.Top_quantile_of_scp_interactions_for_visualization_of_dynamicEnrichment_per_level; Leave_out_scp_network_for_dynamicEnrichment_visualization.Generate_scp_scp_network_from_leave_out(leave_out); Leave_out_scp_network_for_dynamicEnrichment_visualization.Scp_nw.Transform_into_undirected_single_network_and_set_all_widths_to_one(); Leave_out_scp_network_for_dynamicEnrichment_visualization.Scp_nw.Nodes.Set_processLevel_for_all_nodes_based_on_dictionary(processName_processLevel_dict); }