// * Process all schemas in TNM file // * @param staging Staging object // * @param fileName name of file // * @param is InputStream // * @param singleLineNumber if not null, only process this line number // * @return IntegrationResult public static IntegrationUtils.IntegrationResult processTNMSchema(TNMStagingCSharp.Src.Staging.Staging staging, String fileName, Stream inputStream, int singleLineNumber, bool bJSONFormat) //throws IOException, InterruptedException { Dictionary <TnmOutput, String> output_values = null; Dictionary <TnmInput, String> input_values = null; // initialize the threads pool int n = Math.Min(9, Environment.ProcessorCount + 1); ThreadPool.SetMaxThreads(n, n); // go over each file int processedCases = 0; int iLineNumber = 0; //int iLineCounter = 0; MultiTasksExecutor thisMultiTasksExecutor = new MultiTasksExecutor(); thisMultiTasksExecutor.AddAction(new MultiTasksExecutor.ActionCallBack(MultiTask_TaskCompute)); int iThreads = thisMultiTasksExecutor.GetNumThreads(); mMultiTask_Staging = staging; miMultiTask_FailedCases = 0; miMultiTask_ThreadProcessedCases = 0; thisMultiTasksExecutor.StartTasks(); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); if (singleLineNumber >= 0) { System.Diagnostics.Trace.WriteLine("Starting " + fileName + ", line # " + singleLineNumber + " [" + n + " threads]"); } else { System.Diagnostics.Trace.WriteLine("Starting " + fileName + " [" + (iThreads + 1) + " threads]"); } // loop over each line in the file TextReader reader = new StreamReader(inputStream); String line = reader.ReadLine(); String input_line = ""; String output_line = ""; String[] input_strs; String[] output_strs; String[] entries; String sExpectedResult = ""; bool bStageThis = true; while (line != null) { iLineNumber++; if (iLineNumber >= 0) { bStageThis = true; if (bJSONFormat) { bStageThis = false; if (line.IndexOf("input=") >= 0) { input_line = line.Trim(); } if (line.IndexOf("expectedOutput=") >= 0) { output_line = line.Trim(); } if (output_line.Length > 0) { bStageThis = true; input_values = new Dictionary <TnmInput, String>(); output_values = new Dictionary <TnmOutput, String>(); input_line = input_line.Substring(7, input_line.Length - 8).Trim(); output_line = output_line.Substring(16, output_line.Length - 17).Trim(); input_strs = input_line.Split(",".ToCharArray()); output_strs = output_line.Split(",".ToCharArray()); // set up a mapping of output field positions in the CSV file foreach (String s in input_strs) { entries = s.Split("=".ToCharArray()); if (entries.Length == 2) { entries[0] = entries[0].Trim(); entries[1] = entries[1].Trim(); foreach (TnmInput inp in TnmInput.Values) { if (inp.toString() == entries[0]) { input_values.Add(inp, entries[1]); } } } else { System.Diagnostics.Trace.WriteLine("Line " + iLineNumber + " has " + entries.Length + " cells; it should be 2. (" + input_line + ")"); } } foreach (String s in output_strs) { entries = s.Split("=".ToCharArray()); if (entries.Length == 2) { entries[0] = entries[0].Trim(); entries[1] = entries[1].Trim(); foreach (TnmOutput outp in TnmOutput.Values) { if (outp.toString() == entries[0]) { output_values.Add(outp, entries[1]); } } } else { System.Diagnostics.Trace.WriteLine("Line " + iLineNumber + " has " + entries.Length + " cells; it should be 2. (" + output_line + ")"); } } } } else { input_values = new Dictionary <TnmInput, String>(); output_values = new Dictionary <TnmOutput, String>(); // Each line is a comma delimited string. input_strs = line.Split(",".ToCharArray()); if (input_strs.Length == 54) { String sVal = ""; for (int i = 0; i < 54; i++) { sVal = input_strs[i].Trim(); if (sVal.Length > 0) { TnmInput in_key = TnmInput.PRIMARY_SITE; TnmOutput out_key = TnmOutput.DERIVED_VERSION; switch (i) { case 0: in_key = TnmInput.PRIMARY_SITE; break; case 1: in_key = TnmInput.HISTOLOGY; break; case 2: in_key = TnmInput.DX_YEAR; break; case 3: in_key = TnmInput.BEHAVIOR; break; case 4: in_key = TnmInput.GRADE; break; case 5: in_key = TnmInput.SEX; break; case 6: in_key = TnmInput.AGE_AT_DX; break; case 7: in_key = TnmInput.RX_SUMM_SURGERY; break; case 8: in_key = TnmInput.RX_SUMM_RADIATION; break; case 9: in_key = TnmInput.REGIONAL_NODES_POSITIVE; break; case 10: in_key = TnmInput.CLIN_T; break; case 11: in_key = TnmInput.CLIN_N; break; case 12: in_key = TnmInput.CLIN_M; break; case 13: in_key = TnmInput.CLIN_STAGE_GROUP_DIRECT; break; case 14: in_key = TnmInput.PATH_T; break; case 15: in_key = TnmInput.PATH_N; break; case 16: in_key = TnmInput.PATH_M; break; case 17: in_key = TnmInput.PATH_STAGE_GROUP_DIRECT; break; case 18: in_key = TnmInput.SSF1; break; case 19: in_key = TnmInput.SSF2; break; case 20: in_key = TnmInput.SSF3; break; case 21: in_key = TnmInput.SSF4; break; case 22: in_key = TnmInput.SSF5; break; case 23: in_key = TnmInput.SSF6; break; case 24: in_key = TnmInput.SSF7; break; case 25: in_key = TnmInput.SSF8; break; case 26: in_key = TnmInput.SSF9; break; case 27: in_key = TnmInput.SSF10; break; case 28: in_key = TnmInput.SSF11; break; case 29: in_key = TnmInput.SSF12; break; case 30: in_key = TnmInput.SSF13; break; case 31: in_key = TnmInput.SSF14; break; case 32: in_key = TnmInput.SSF15; break; case 33: in_key = TnmInput.SSF16; break; case 34: in_key = TnmInput.SSF17; break; case 35: in_key = TnmInput.SSF18; break; case 36: in_key = TnmInput.SSF19; break; case 37: in_key = TnmInput.SSF20; break; case 38: in_key = TnmInput.SSF21; break; case 39: in_key = TnmInput.SSF22; break; case 40: in_key = TnmInput.SSF23; break; case 41: in_key = TnmInput.SSF24; break; case 42: in_key = TnmInput.SSF25; break; case 43: out_key = TnmOutput.DERIVED_VERSION; break; case 44: out_key = TnmOutput.CLIN_STAGE_GROUP; break; case 45: out_key = TnmOutput.PATH_STAGE_GROUP; break; case 46: out_key = TnmOutput.COMBINED_STAGE_GROUP; break; case 47: out_key = TnmOutput.COMBINED_T; break; case 48: out_key = TnmOutput.COMBINED_N; break; case 49: out_key = TnmOutput.COMBINED_M; break; case 50: out_key = TnmOutput.SOURCE_T; break; case 51: out_key = TnmOutput.SOURCE_N; break; case 52: out_key = TnmOutput.SOURCE_M; break; } if (i <= 42) { input_values.Add(in_key, sVal); } else if (i <= 52) { output_values.Add(out_key, sVal); } } } sExpectedResult = input_strs[53]; } else { System.Diagnostics.Trace.WriteLine("Error: Line " + iLineNumber + " has " + input_strs.Length + " entries."); } } if (bStageThis) { processedCases++; MultiTask_DataObj obj = new MultiTask_DataObj(); obj.mInputValues = input_values; obj.mOutputValues = output_values; obj.mbJSONFormat = bJSONFormat; obj.msExpectedResult = sExpectedResult; obj.msFileName = fileName; obj.miLineNum = iLineNumber; thisMultiTasksExecutor.AddDataItem(obj); // DEBUG /* * iLineCounter++; * if (iLineCounter >= 50000) * { * IntegrationUtils.WritelineToLog("Line: " + iLineNumber + " Time: " + stopwatch.Elapsed.TotalMilliseconds + " ms."); * iLineCounter = 0; * } */ input_line = ""; output_line = ""; } } line = reader.ReadLine(); } thisMultiTasksExecutor.WaitForCompletion(); stopwatch.Stop(); String perMs = String.Format("{0,12:F4}", ((float)stopwatch.Elapsed.TotalMilliseconds / processedCases)).Trim(); IntegrationUtils.WritelineToLog("Completed " + processedCases + " cases for " + fileName + " in " + TNMStaging_UnitTestApp.Src.Staging.IntegrationUtils.GenerateTotalTimeString(stopwatch) + " (" + perMs + " ms/case)."); IntegrationUtils.WritelineToLog("Threads Completed " + miMultiTask_ThreadProcessedCases + " cases."); if (miMultiTask_FailedCases > 0) { System.Diagnostics.Trace.WriteLine("There were " + miMultiTask_FailedCases + " failures."); } else { System.Diagnostics.Trace.WriteLine(""); } System.Diagnostics.Trace.WriteLine("-----------------------------------------------"); inputStream.Close(); return(new IntegrationUtils.IntegrationResult(processedCases, miMultiTask_FailedCases)); }
// * Process all schemas in file // * @param staging Staging object // * @param fileName name of file // * @param is InputStream // * @param singleLineNumber if not null, only process this line number // * @return IntegrationResult public static IntegrationUtils.IntegrationResult processSchema_MultiTask(TNMStagingCSharp.Src.Staging.Staging staging, String fileName, Stream inputStream, int singleLineNumber) //throws IOException, InterruptedException { // set up a mapping of output field positions in the CSV file Dictionary <CsOutput, int> mappings = new Dictionary <CsOutput, int>(100); mappings[CsOutput.AJCC6_T] = 42; mappings[CsOutput.AJCC6_TDESCRIPTOR] = 43; mappings[CsOutput.AJCC6_N] = 44; mappings[CsOutput.AJCC6_NDESCRIPTOR] = 45; mappings[CsOutput.AJCC6_M] = 46; mappings[CsOutput.AJCC6_MDESCRIPTOR] = 47; mappings[CsOutput.AJCC6_STAGE] = 48; mappings[CsOutput.AJCC7_T] = 49; mappings[CsOutput.AJCC7_TDESCRIPTOR] = 50; mappings[CsOutput.AJCC7_N] = 51; mappings[CsOutput.AJCC7_NDESCRIPTOR] = 52; mappings[CsOutput.AJCC7_M] = 53; mappings[CsOutput.AJCC7_MDESCRIPTOR] = 54; mappings[CsOutput.AJCC7_STAGE] = 55; mappings[CsOutput.SS1977_T] = 56; mappings[CsOutput.SS1977_N] = 57; mappings[CsOutput.SS1977_M] = 58; mappings[CsOutput.SS1977_STAGE] = 59; mappings[CsOutput.SS2000_T] = 60; mappings[CsOutput.SS2000_N] = 61; mappings[CsOutput.SS2000_M] = 62; mappings[CsOutput.SS2000_STAGE] = 63; mappings[CsOutput.STOR_AJCC6_T] = 64; mappings[CsOutput.STOR_AJCC6_TDESCRIPTOR] = 65; mappings[CsOutput.STOR_AJCC6_N] = 66; mappings[CsOutput.STOR_AJCC6_NDESCRIPTOR] = 67; mappings[CsOutput.STOR_AJCC6_M] = 68; mappings[CsOutput.STOR_AJCC6_MDESCRIPTOR] = 69; mappings[CsOutput.STOR_AJCC6_STAGE] = 70; mappings[CsOutput.STOR_AJCC7_T] = 71; mappings[CsOutput.STOR_AJCC7_TDESCRIPTOR] = 72; mappings[CsOutput.STOR_AJCC7_N] = 73; mappings[CsOutput.STOR_AJCC7_NDESCRIPTOR] = 74; mappings[CsOutput.STOR_AJCC7_M] = 75; mappings[CsOutput.STOR_AJCC7_MDESCRIPTOR] = 76; mappings[CsOutput.STOR_AJCC7_STAGE] = 77; mappings[CsOutput.STOR_SS1977_STAGE] = 78; mappings[CsOutput.STOR_SS2000_STAGE] = 79; // go over each file int processedCases = 0; int iLineNumber = 0; //int iLineCounter = 0; MultiTasksExecutor thisMultiTasksExecutor = new MultiTasksExecutor(); thisMultiTasksExecutor.AddAction(new MultiTasksExecutor.ActionCallBack(MultiTask_TaskCompute)); int iThreads = thisMultiTasksExecutor.GetNumThreads(); mMultiTask_Staging = staging; miMultiTask_FailedCases = 0; miMultiTask_ThreadProcessedCases = 0; thisMultiTasksExecutor.StartTasks(); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); if (singleLineNumber >= 0) { IntegrationUtils.WritelineToLog("Starting " + fileName + ", line # " + singleLineNumber + "..."); } else { IntegrationUtils.WritelineToLog("Starting " + fileName + " [" + (iThreads + 1) + " threads]"); } // loop over each line in the file TextReader reader = new StreamReader(inputStream); String line = reader.ReadLine(); while (line != null) { iLineNumber++; if (iLineNumber >= 0) { processedCases++; // split the CSV record String[] values = line.Split(",".ToCharArray()); // if a single line was requested, skip all other lines if (singleLineNumber >= 0 && singleLineNumber != iLineNumber) { continue; } if (values.Length != 80) { IntegrationUtils.WritelineToLog("Line " + iLineNumber + " has " + values.Length + " cells; it should be 80."); } else { MultiTask_DataObj obj = new MultiTask_DataObj(); obj.mMappings = mappings; obj.mValues = values; obj.msFileName = fileName; obj.miLineNum = iLineNumber; thisMultiTasksExecutor.AddDataItem(obj); /* * iLineCounter++; * if (iLineCounter > 50000) * { * IntegrationUtils.WritelineToLog("Time: " + stopwatch.Elapsed.TotalMilliseconds + " ms."); * iLineCounter = 0; * } */ } } line = reader.ReadLine(); } thisMultiTasksExecutor.WaitForCompletion(); stopwatch.Stop(); String perMs = String.Format("{0,12:F4}", ((float)stopwatch.Elapsed.TotalMilliseconds / processedCases)).Trim(); IntegrationUtils.WritelineToLog("Completed " + processedCases + " cases for " + fileName + " in " + TNMStaging_UnitTestApp.Src.Staging.IntegrationUtils.GenerateTotalTimeString(stopwatch) + " (" + perMs + " ms/case)."); IntegrationUtils.WritelineToLog("Threads Completed " + miMultiTask_ThreadProcessedCases + " cases."); if (miMultiTask_FailedCases > 0) { IntegrationUtils.WritelineToLog("There were " + miMultiTask_FailedCases + " failures."); } else { IntegrationUtils.WritelineToLog(""); } IntegrationUtils.WritelineToLog("-----------------------------------------------"); inputStream.Close(); return(new IntegrationUtils.IntegrationResult(processedCases, miMultiTask_FailedCases)); }
// * Process all schemas in file // * @param staging Staging object // * @param fileName name of file // * @param is InputStream // * @param singleLineNumber if not null, only process this line number // * @return IntegrationResult public static IntegrationUtils.IntegrationResult processSchemaSelection_MultiTask(TNMStagingCSharp.Src.Staging.Staging staging, String fileName, Stream inputStream, TestContext testLog) //throws IOException, InterruptedException { IntegrationUtils.TestReportLog = testLog; // go over each file int processedCases = 0; int iLineNumber = 0; //int iLineCounter = 0; MultiTasksExecutor thisMultiTasksExecutor = new MultiTasksExecutor(); thisMultiTasksExecutor.AddAction(new MultiTasksExecutor.ActionCallBack(MultiTask_TaskCompute)); thisMultiTasksExecutor.StartTasks(); mMultiTask_Staging = staging; miMultiTask_FailedCases = 0; miMultiTask_ThreadProcessedCases = 0; Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); // loop over each line in the file TextReader reader = new StreamReader(inputStream); String line = reader.ReadLine(); while (line != null) { iLineNumber++; if (iLineNumber >= 0) { processedCases++; // split the string; important to keep empty trailing values in the resulting array String[] parts = line.Split(",".ToCharArray(), 1000); if (parts.Length != 4) { throw new System.InvalidOperationException("Bad record in schema_selection.txt on line number: " + iLineNumber); } else { MultiTask_DataObj obj = new MultiTask_DataObj(); obj.mParts = parts; obj.msFullLine = line; obj.miLineNum = iLineNumber; thisMultiTasksExecutor.AddDataItem(obj); // DEBUG /* * iLineCounter++; * if (iLineCounter >= 500000) * { * IntegrationUtils.WritelineToLog("Processed Cases: " + processedCases); * iLineCounter = 0; * } */ } } line = reader.ReadLine(); } thisMultiTasksExecutor.WaitForCompletion(); stopwatch.Stop(); String perMs = String.Format("{0,12:F4}", ((float)stopwatch.Elapsed.TotalMilliseconds / processedCases)).Trim(); IntegrationUtils.WritelineToLog("Completed " + processedCases + " cases for " + fileName + " in " + TNMStaging_UnitTestApp.Src.Staging.IntegrationUtils.GenerateTotalTimeString(stopwatch) + " (" + perMs + " ms/case)."); IntegrationUtils.WritelineToLog("Threads Completed " + miMultiTask_ThreadProcessedCases + " cases."); if (miMultiTask_FailedCases > 0) { IntegrationUtils.WritelineToLog("There were " + miMultiTask_FailedCases + " failures."); } else { IntegrationUtils.WritelineToLog(""); } IntegrationUtils.WritelineToLog("-----------------------------------------------"); inputStream.Close(); return(new IntegrationUtils.IntegrationResult(processedCases, miMultiTask_FailedCases)); }