public void testTNMBigIntegrationTestBad() { if (DebugSettings.RUN_HUGE_BAD_TNM_TESTS) { List <String> _SCHEMA_FILES = new List <String>(); IntegrationUtils.WritelineToLog("Starting TNMBigIntegrationTest::testBigIntegrationTestBad..."); TNMStagingCSharp.Src.Staging.Staging staging = TNMStagingCSharp.Src.Staging.Staging.getInstance(TnmDataProvider.getInstance(TnmVersion.LATEST)); // hard-code data directory based on Windows vs Linux String dataDirectory; if (Environment.OSVersion.Platform != PlatformID.Unix) { dataDirectory = "\\\\omni\\btp\\csb\\Staging\\TNM_ErrorCases\\"; } else { dataDirectory = "/prj/csb/Staging/TNM"; } long totalFailures = 0; String sFilePath = dataDirectory + "schema_selection\\tnm_schema_identification.txt.gz"; totalFailures = PerformTNMSchemaSelection(staging, sFilePath); totalFailures = PerformTNMStaging(staging, dataDirectory, _SCHEMA_FILES); } }
public long PerformTNMStaging(TNMStagingCSharp.Src.Staging.Staging staging, String dataDirectory, List <String> _SCHEMA_FILES) { Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); long totalFiles = 0; long totalCases = 0; long totalFailures = 0; // get the complete list of files string[] arrfiles = Directory.GetFiles(dataDirectory, "*.*"); if (arrfiles.Length > 0) { // sort the files by name Array.Sort(arrfiles); foreach (String f in arrfiles) { String sFilename = Path.GetFileName(f); if (File.Exists(f) && sFilename.EndsWith(".gz")) { if (_SCHEMA_FILES.Count == 0 || _SCHEMA_FILES.Contains(sFilename)) { totalFiles += 1; IntegrationUtils.WritelineToLog("Staging File: " + f); FileStream fstream = File.Open(f, FileMode.Open, FileAccess.Read, FileShare.Read); GZipStream decompressionStream = new GZipStream(fstream, CompressionMode.Decompress); IntegrationUtils.IntegrationResult result = TnmIntegrationSchemaStage.processTNMSchema(staging, sFilename, decompressionStream, false); totalCases += result.getNumCases(); totalFailures += result.getNumFailures(); } } } } stopwatch.Stop(); String perMs = String.Format("{0,12:F4}", ((float)stopwatch.Elapsed.TotalMilliseconds / totalCases)).Trim(); IntegrationUtils.WritelineToLog(""); IntegrationUtils.WritelineToLog("Completed " + totalCases + " cases (" + totalFiles + " files) in " + IntegrationUtils.GenerateTotalTimeString(stopwatch) + " (" + perMs + " ms/case)."); if (totalFailures > 0) { IntegrationUtils.WritelineToLog("There were " + totalFailures + " failing cases."); } return(totalFailures); }
public static void MultiTask_TaskCompute(int id, Object task_data) { MultiTask_DataObj thisDataObj = (MultiTask_DataObj)task_data; try { SchemaLookup lookup = new SchemaLookup(thisDataObj.mParts[0], thisDataObj.mParts[1]); lookup.setInput(TnmStagingData.SSF25_KEY, thisDataObj.mParts[2]); lookup.setInput(TnmStagingData.SEX_KEY, thisDataObj.mParts[2]); List <StagingSchema> lookups = mMultiTask_Staging.lookupSchema(lookup); if (lookups == null) { IntegrationUtils.WritelineToLog("Line #" + thisDataObj.miLineNum + " [" + thisDataObj.msFullLine + "] --> mStaging.lookupSchema returned NULL."); Interlocked.Increment(ref miMultiTask_FailedCases); } else if (thisDataObj.mParts[3].Length == 0) { if (lookups.Count == 1) { IntegrationUtils.WritelineToLog("Line #" + thisDataObj.miLineNum + " [" + thisDataObj.msFullLine + "] --> The schema selection should not have found any schema but did: " + lookups[0].getId()); Interlocked.Increment(ref miMultiTask_FailedCases); } } else { if (lookups.Count != 1) { IntegrationUtils.WritelineToLog("Line #" + thisDataObj.miLineNum + " [" + thisDataObj.msFullLine + "] --> The schema selection should have found a schema, " + thisDataObj.mParts[3] + ", but did not."); Interlocked.Increment(ref miMultiTask_FailedCases); } else if (lookups[0].getId() != thisDataObj.mParts[3]) { IntegrationUtils.WritelineToLog("Line #" + thisDataObj.miLineNum + " [" + thisDataObj.msFullLine + "] --> The schema selection found schema " + lookups[0].getId() + " but it should have been " + thisDataObj.mParts[3] + "."); Interlocked.Increment(ref miMultiTask_FailedCases); } } } catch (Exception e) { if (miMultiTask_FailedCases == 0) { IntegrationUtils.WritelineToLog("Line #" + thisDataObj.miLineNum + " --> Exception processing schema selection: " + e.Message); IntegrationUtils.WritelineToLog(" StackTrace: " + e.StackTrace); } Interlocked.Increment(ref miMultiTask_FailedCases); } Interlocked.Increment(ref miMultiTask_ThreadProcessedCases); }
public long PerformTNMSchemaSelection(TNMStagingCSharp.Src.Staging.Staging staging, String sFilePath) { FileStream fstream = File.Open(sFilePath, FileMode.Open, FileAccess.Read, FileShare.Read); GZipStream decompressionStream = new GZipStream(fstream, CompressionMode.Decompress); IntegrationUtils.IntegrationResult result = TnmIntegrationSchemaSelection.processSchemaSelection( staging, "tnm_schema_identification.txt.gz", decompressionStream, null); IntegrationUtils.WritelineToLog("-----------------------------------------------"); return(result.getNumFailures()); }
public void testTNMBigIntegrationTestGood() { if (DebugSettings.RUN_HUGE_GOOD_TNM_TESTS) { List <String> _SCHEMA_FILES = new List <String>(); IntegrationUtils.WritelineToLog("Starting TNMBigIntegrationTest::testBigIntegrationTestGood..."); TNMStagingCSharp.Src.Staging.Staging staging = null; if (DebugSettings.USE_LOCAL_ZIP_FILE_FOR_TNM_TESTS) { String filename = "TNM_15.zip"; FileStream SourceStream = File.Open(filename, FileMode.Open); ExternalStagingFileDataProvider provider = new ExternalStagingFileDataProvider(SourceStream); staging = TNMStagingCSharp.Src.Staging.Staging.getInstance(provider); } else { staging = TNMStagingCSharp.Src.Staging.Staging.getInstance(TnmDataProvider.getInstance(TnmVersion.LATEST)); } // hard-code data directory based on Windows vs Linux String dataDirectory; if (Environment.OSVersion.Platform != PlatformID.Unix) { dataDirectory = "\\\\omni\\btp\\csb\\Staging\\TNM\\"; } else { dataDirectory = "/prj/csb/Staging/TNM"; } long totalFailures = 0; String sFilePath = dataDirectory + "schema_selection\\tnm_schema_identification.txt.gz"; totalFailures = PerformTNMSchemaSelection(staging, sFilePath); totalFailures = PerformTNMStaging(staging, dataDirectory, _SCHEMA_FILES); } }
// * Process all schemas in file // * @param staging Staging object // * @param fileName name of file // * @param is InputStream // * @param singleLineNumber if not null, only process this line number // * @return IntegrationResult public static IntegrationUtils.IntegrationResult processSchema_MultiTask(TNMStagingCSharp.Src.Staging.Staging staging, String fileName, Stream inputStream, int singleLineNumber) //throws IOException, InterruptedException { // set up a mapping of output field positions in the CSV file Dictionary <CsOutput, int> mappings = new Dictionary <CsOutput, int>(100); mappings[CsOutput.AJCC6_T] = 42; mappings[CsOutput.AJCC6_TDESCRIPTOR] = 43; mappings[CsOutput.AJCC6_N] = 44; mappings[CsOutput.AJCC6_NDESCRIPTOR] = 45; mappings[CsOutput.AJCC6_M] = 46; mappings[CsOutput.AJCC6_MDESCRIPTOR] = 47; mappings[CsOutput.AJCC6_STAGE] = 48; mappings[CsOutput.AJCC7_T] = 49; mappings[CsOutput.AJCC7_TDESCRIPTOR] = 50; mappings[CsOutput.AJCC7_N] = 51; mappings[CsOutput.AJCC7_NDESCRIPTOR] = 52; mappings[CsOutput.AJCC7_M] = 53; mappings[CsOutput.AJCC7_MDESCRIPTOR] = 54; mappings[CsOutput.AJCC7_STAGE] = 55; mappings[CsOutput.SS1977_T] = 56; mappings[CsOutput.SS1977_N] = 57; mappings[CsOutput.SS1977_M] = 58; mappings[CsOutput.SS1977_STAGE] = 59; mappings[CsOutput.SS2000_T] = 60; mappings[CsOutput.SS2000_N] = 61; mappings[CsOutput.SS2000_M] = 62; mappings[CsOutput.SS2000_STAGE] = 63; mappings[CsOutput.STOR_AJCC6_T] = 64; mappings[CsOutput.STOR_AJCC6_TDESCRIPTOR] = 65; mappings[CsOutput.STOR_AJCC6_N] = 66; mappings[CsOutput.STOR_AJCC6_NDESCRIPTOR] = 67; mappings[CsOutput.STOR_AJCC6_M] = 68; mappings[CsOutput.STOR_AJCC6_MDESCRIPTOR] = 69; mappings[CsOutput.STOR_AJCC6_STAGE] = 70; mappings[CsOutput.STOR_AJCC7_T] = 71; mappings[CsOutput.STOR_AJCC7_TDESCRIPTOR] = 72; mappings[CsOutput.STOR_AJCC7_N] = 73; mappings[CsOutput.STOR_AJCC7_NDESCRIPTOR] = 74; mappings[CsOutput.STOR_AJCC7_M] = 75; mappings[CsOutput.STOR_AJCC7_MDESCRIPTOR] = 76; mappings[CsOutput.STOR_AJCC7_STAGE] = 77; mappings[CsOutput.STOR_SS1977_STAGE] = 78; mappings[CsOutput.STOR_SS2000_STAGE] = 79; // go over each file int processedCases = 0; int iLineNumber = 0; //int iLineCounter = 0; MultiTasksExecutor thisMultiTasksExecutor = new MultiTasksExecutor(); thisMultiTasksExecutor.AddAction(new MultiTasksExecutor.ActionCallBack(MultiTask_TaskCompute)); int iThreads = thisMultiTasksExecutor.GetNumThreads(); mMultiTask_Staging = staging; miMultiTask_FailedCases = 0; miMultiTask_ThreadProcessedCases = 0; thisMultiTasksExecutor.StartTasks(); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); if (singleLineNumber >= 0) { IntegrationUtils.WritelineToLog("Starting " + fileName + ", line # " + singleLineNumber + "..."); } else { IntegrationUtils.WritelineToLog("Starting " + fileName + " [" + (iThreads + 1) + " threads]"); } // loop over each line in the file TextReader reader = new StreamReader(inputStream); String line = reader.ReadLine(); while (line != null) { iLineNumber++; if (iLineNumber >= 0) { processedCases++; // split the CSV record String[] values = line.Split(",".ToCharArray()); // if a single line was requested, skip all other lines if (singleLineNumber >= 0 && singleLineNumber != iLineNumber) { continue; } if (values.Length != 80) { IntegrationUtils.WritelineToLog("Line " + iLineNumber + " has " + values.Length + " cells; it should be 80."); } else { MultiTask_DataObj obj = new MultiTask_DataObj(); obj.mMappings = mappings; obj.mValues = values; obj.msFileName = fileName; obj.miLineNum = iLineNumber; thisMultiTasksExecutor.AddDataItem(obj); /* * iLineCounter++; * if (iLineCounter > 50000) * { * IntegrationUtils.WritelineToLog("Time: " + stopwatch.Elapsed.TotalMilliseconds + " ms."); * iLineCounter = 0; * } */ } } line = reader.ReadLine(); } thisMultiTasksExecutor.WaitForCompletion(); stopwatch.Stop(); String perMs = String.Format("{0,12:F4}", ((float)stopwatch.Elapsed.TotalMilliseconds / processedCases)).Trim(); IntegrationUtils.WritelineToLog("Completed " + processedCases + " cases for " + fileName + " in " + TNMStaging_UnitTestApp.Src.Staging.IntegrationUtils.GenerateTotalTimeString(stopwatch) + " (" + perMs + " ms/case)."); IntegrationUtils.WritelineToLog("Threads Completed " + miMultiTask_ThreadProcessedCases + " cases."); if (miMultiTask_FailedCases > 0) { IntegrationUtils.WritelineToLog("There were " + miMultiTask_FailedCases + " failures."); } else { IntegrationUtils.WritelineToLog(""); } IntegrationUtils.WritelineToLog("-----------------------------------------------"); inputStream.Close(); return(new IntegrationUtils.IntegrationResult(processedCases, miMultiTask_FailedCases)); }
public static void MultiTask_TaskCompute(int id, Object task_data) { MultiTask_DataObj thisDataObj = (MultiTask_DataObj)task_data; //IntegrationUtils.WritelineToLog("Task " + id + " - Executing Line " + thisDataObj.miLineNum + " - " + DateTime.Now.ToString("HH:mm:ss.ffff")); // load up inputs CsStagingData data = new CsStagingData(); data.setInput(CsInput.PRIMARY_SITE, thisDataObj.mValues[0]); data.setInput(CsInput.HISTOLOGY, thisDataObj.mValues[1]); data.setInput(CsInput.DX_YEAR, thisDataObj.mValues[2]); data.setInput(CsInput.CS_VERSION_ORIGINAL, thisDataObj.mValues[3]); data.setInput(CsInput.BEHAVIOR, thisDataObj.mValues[4]); data.setInput(CsInput.GRADE, thisDataObj.mValues[5]); data.setInput(CsInput.AGE_AT_DX, thisDataObj.mValues[6]); data.setInput(CsInput.LVI, thisDataObj.mValues[7]); data.setInput(CsInput.TUMOR_SIZE, thisDataObj.mValues[8]); data.setInput(CsInput.EXTENSION, thisDataObj.mValues[9]); data.setInput(CsInput.EXTENSION_EVAL, thisDataObj.mValues[10]); data.setInput(CsInput.LYMPH_NODES, thisDataObj.mValues[11]); data.setInput(CsInput.LYMPH_NODES_EVAL, thisDataObj.mValues[12]); data.setInput(CsInput.REGIONAL_NODES_POSITIVE, thisDataObj.mValues[13]); data.setInput(CsInput.REGIONAL_NODES_EXAMINED, thisDataObj.mValues[14]); data.setInput(CsInput.METS_AT_DX, thisDataObj.mValues[15]); data.setInput(CsInput.METS_EVAL, thisDataObj.mValues[16]); data.setInput(CsInput.SSF1, thisDataObj.mValues[17]); data.setInput(CsInput.SSF2, thisDataObj.mValues[18]); data.setInput(CsInput.SSF3, thisDataObj.mValues[19]); data.setInput(CsInput.SSF4, thisDataObj.mValues[20]); data.setInput(CsInput.SSF5, thisDataObj.mValues[21]); data.setInput(CsInput.SSF6, thisDataObj.mValues[22]); data.setInput(CsInput.SSF7, thisDataObj.mValues[23]); data.setInput(CsInput.SSF8, thisDataObj.mValues[24]); data.setInput(CsInput.SSF9, thisDataObj.mValues[25]); data.setInput(CsInput.SSF10, thisDataObj.mValues[26]); data.setInput(CsInput.SSF11, thisDataObj.mValues[27]); data.setInput(CsInput.SSF12, thisDataObj.mValues[28]); data.setInput(CsInput.SSF13, thisDataObj.mValues[29]); data.setInput(CsInput.SSF14, thisDataObj.mValues[30]); data.setInput(CsInput.SSF15, thisDataObj.mValues[31]); data.setInput(CsInput.SSF16, thisDataObj.mValues[32]); data.setInput(CsInput.SSF17, thisDataObj.mValues[33]); data.setInput(CsInput.SSF18, thisDataObj.mValues[34]); data.setInput(CsInput.SSF19, thisDataObj.mValues[35]); data.setInput(CsInput.SSF20, thisDataObj.mValues[36]); data.setInput(CsInput.SSF21, thisDataObj.mValues[37]); data.setInput(CsInput.SSF22, thisDataObj.mValues[38]); data.setInput(CsInput.SSF23, thisDataObj.mValues[39]); data.setInput(CsInput.SSF24, thisDataObj.mValues[40]); data.setInput(CsInput.SSF25, thisDataObj.mValues[41]); try { // save the expected outputs Dictionary <String, String> output = new Dictionary <String, String>(100, StringComparer.Ordinal); String sKeyValue = ""; foreach (KeyValuePair <CsOutput, int> entry in thisDataObj.mMappings) { sKeyValue = entry.Key.toString(); output[sKeyValue] = thisDataObj.mValues[entry.Value]; } // run collaborative stage; if no schema found, set the output to empty SchemaLookup lookup = new SchemaLookup(data.getInput(CsInput.PRIMARY_SITE), data.getInput(CsInput.HISTOLOGY)); lookup.setInput(CsStagingData.SSF25_KEY, data.getInput(CsInput.SSF25)); List <StagingSchema> schemas = mMultiTask_Staging.lookupSchema(lookup); if (schemas.Count == 1) { mMultiTask_Staging.stage(data); } else { Dictionary <String, String> outResult = new Dictionary <String, String>(2, StringComparer.Ordinal); outResult["schema_id"] = "<invalid>"; data.setOutput(outResult); } List <String> mismatches = new List <String>(); // compare results foreach (KeyValuePair <String, String> entry in output) { String expected = ""; output.TryGetValue(entry.Key, out expected); if (expected == null) { expected = ""; } expected = expected.Trim(); String actual = ""; data.getOutput().TryGetValue(entry.Key, out actual); if (actual == null) { actual = ""; } actual = actual.Trim(); if (expected != actual) { mismatches.Add(" " + thisDataObj.miLineNum + " --> " + entry.Key + ": EXPECTED '" + expected + "' ACTUAL: '" + actual + "'"); } } if (mismatches.Count != 0) { String sSchemaIDValue = data.getSchemaId(); IntegrationUtils.WritelineToLog(" " + thisDataObj.miLineNum + " --> [" + sSchemaIDValue + "] Mismatches in " + thisDataObj.msFileName); foreach (String mismatch in mismatches) { IntegrationUtils.WritelineToLog(mismatch); } IntegrationUtils.WritelineToLog(" " + thisDataObj.miLineNum + " *** RESULT: " + data.getResult()); IntegrationUtils.WritelineToLog(" " + thisDataObj.miLineNum + " --> " + IntegrationUtils.convertInputMap(data.getInput())); if (data.getErrors().Count > 0) { IntegrationUtils.WritelineToLog(" " + thisDataObj.miLineNum + " --> ERRORS: "); foreach (Error e in data.getErrors()) { IntegrationUtils.WritelineToLog(" " + thisDataObj.miLineNum + " --> (" + e.getTable() + ": " + e.getMessage() + ") "); } } Interlocked.Increment(ref miMultiTask_FailedCases); } } catch (Exception e) { IntegrationUtils.WritelineToLog(" " + thisDataObj.miLineNum + " --> Exception processing " + thisDataObj.msFileName + " : " + e.Message); Interlocked.Increment(ref miMultiTask_FailedCases); } Interlocked.Increment(ref miMultiTask_ThreadProcessedCases); }
// * Process all schemas in TNM file // * @param staging Staging object // * @param fileName name of file // * @param is InputStream // * @param singleLineNumber if not null, only process this line number // * @return IntegrationResult public static IntegrationUtils.IntegrationResult processTNMSchema(TNMStagingCSharp.Src.Staging.Staging staging, String fileName, Stream inputStream, int singleLineNumber, bool bJSONFormat) //throws IOException, InterruptedException { Dictionary <TnmOutput, String> output_values = null; Dictionary <TnmInput, String> input_values = null; // initialize the threads pool int n = Math.Min(9, Environment.ProcessorCount + 1); ThreadPool.SetMaxThreads(n, n); // go over each file int processedCases = 0; int iLineNumber = 0; //int iLineCounter = 0; MultiTasksExecutor thisMultiTasksExecutor = new MultiTasksExecutor(); thisMultiTasksExecutor.AddAction(new MultiTasksExecutor.ActionCallBack(MultiTask_TaskCompute)); int iThreads = thisMultiTasksExecutor.GetNumThreads(); mMultiTask_Staging = staging; miMultiTask_FailedCases = 0; miMultiTask_ThreadProcessedCases = 0; thisMultiTasksExecutor.StartTasks(); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); if (singleLineNumber >= 0) { System.Diagnostics.Trace.WriteLine("Starting " + fileName + ", line # " + singleLineNumber + " [" + n + " threads]"); } else { System.Diagnostics.Trace.WriteLine("Starting " + fileName + " [" + (iThreads + 1) + " threads]"); } // loop over each line in the file TextReader reader = new StreamReader(inputStream); String line = reader.ReadLine(); String input_line = ""; String output_line = ""; String[] input_strs; String[] output_strs; String[] entries; String sExpectedResult = ""; bool bStageThis = true; while (line != null) { iLineNumber++; if (iLineNumber >= 0) { bStageThis = true; if (bJSONFormat) { bStageThis = false; if (line.IndexOf("input=") >= 0) { input_line = line.Trim(); } if (line.IndexOf("expectedOutput=") >= 0) { output_line = line.Trim(); } if (output_line.Length > 0) { bStageThis = true; input_values = new Dictionary <TnmInput, String>(); output_values = new Dictionary <TnmOutput, String>(); input_line = input_line.Substring(7, input_line.Length - 8).Trim(); output_line = output_line.Substring(16, output_line.Length - 17).Trim(); input_strs = input_line.Split(",".ToCharArray()); output_strs = output_line.Split(",".ToCharArray()); // set up a mapping of output field positions in the CSV file foreach (String s in input_strs) { entries = s.Split("=".ToCharArray()); if (entries.Length == 2) { entries[0] = entries[0].Trim(); entries[1] = entries[1].Trim(); foreach (TnmInput inp in TnmInput.Values) { if (inp.toString() == entries[0]) { input_values.Add(inp, entries[1]); } } } else { System.Diagnostics.Trace.WriteLine("Line " + iLineNumber + " has " + entries.Length + " cells; it should be 2. (" + input_line + ")"); } } foreach (String s in output_strs) { entries = s.Split("=".ToCharArray()); if (entries.Length == 2) { entries[0] = entries[0].Trim(); entries[1] = entries[1].Trim(); foreach (TnmOutput outp in TnmOutput.Values) { if (outp.toString() == entries[0]) { output_values.Add(outp, entries[1]); } } } else { System.Diagnostics.Trace.WriteLine("Line " + iLineNumber + " has " + entries.Length + " cells; it should be 2. (" + output_line + ")"); } } } } else { input_values = new Dictionary <TnmInput, String>(); output_values = new Dictionary <TnmOutput, String>(); // Each line is a comma delimited string. input_strs = line.Split(",".ToCharArray()); if (input_strs.Length == 54) { String sVal = ""; for (int i = 0; i < 54; i++) { sVal = input_strs[i].Trim(); if (sVal.Length > 0) { TnmInput in_key = TnmInput.PRIMARY_SITE; TnmOutput out_key = TnmOutput.DERIVED_VERSION; switch (i) { case 0: in_key = TnmInput.PRIMARY_SITE; break; case 1: in_key = TnmInput.HISTOLOGY; break; case 2: in_key = TnmInput.DX_YEAR; break; case 3: in_key = TnmInput.BEHAVIOR; break; case 4: in_key = TnmInput.GRADE; break; case 5: in_key = TnmInput.SEX; break; case 6: in_key = TnmInput.AGE_AT_DX; break; case 7: in_key = TnmInput.RX_SUMM_SURGERY; break; case 8: in_key = TnmInput.RX_SUMM_RADIATION; break; case 9: in_key = TnmInput.REGIONAL_NODES_POSITIVE; break; case 10: in_key = TnmInput.CLIN_T; break; case 11: in_key = TnmInput.CLIN_N; break; case 12: in_key = TnmInput.CLIN_M; break; case 13: in_key = TnmInput.CLIN_STAGE_GROUP_DIRECT; break; case 14: in_key = TnmInput.PATH_T; break; case 15: in_key = TnmInput.PATH_N; break; case 16: in_key = TnmInput.PATH_M; break; case 17: in_key = TnmInput.PATH_STAGE_GROUP_DIRECT; break; case 18: in_key = TnmInput.SSF1; break; case 19: in_key = TnmInput.SSF2; break; case 20: in_key = TnmInput.SSF3; break; case 21: in_key = TnmInput.SSF4; break; case 22: in_key = TnmInput.SSF5; break; case 23: in_key = TnmInput.SSF6; break; case 24: in_key = TnmInput.SSF7; break; case 25: in_key = TnmInput.SSF8; break; case 26: in_key = TnmInput.SSF9; break; case 27: in_key = TnmInput.SSF10; break; case 28: in_key = TnmInput.SSF11; break; case 29: in_key = TnmInput.SSF12; break; case 30: in_key = TnmInput.SSF13; break; case 31: in_key = TnmInput.SSF14; break; case 32: in_key = TnmInput.SSF15; break; case 33: in_key = TnmInput.SSF16; break; case 34: in_key = TnmInput.SSF17; break; case 35: in_key = TnmInput.SSF18; break; case 36: in_key = TnmInput.SSF19; break; case 37: in_key = TnmInput.SSF20; break; case 38: in_key = TnmInput.SSF21; break; case 39: in_key = TnmInput.SSF22; break; case 40: in_key = TnmInput.SSF23; break; case 41: in_key = TnmInput.SSF24; break; case 42: in_key = TnmInput.SSF25; break; case 43: out_key = TnmOutput.DERIVED_VERSION; break; case 44: out_key = TnmOutput.CLIN_STAGE_GROUP; break; case 45: out_key = TnmOutput.PATH_STAGE_GROUP; break; case 46: out_key = TnmOutput.COMBINED_STAGE_GROUP; break; case 47: out_key = TnmOutput.COMBINED_T; break; case 48: out_key = TnmOutput.COMBINED_N; break; case 49: out_key = TnmOutput.COMBINED_M; break; case 50: out_key = TnmOutput.SOURCE_T; break; case 51: out_key = TnmOutput.SOURCE_N; break; case 52: out_key = TnmOutput.SOURCE_M; break; } if (i <= 42) { input_values.Add(in_key, sVal); } else if (i <= 52) { output_values.Add(out_key, sVal); } } } sExpectedResult = input_strs[53]; } else { System.Diagnostics.Trace.WriteLine("Error: Line " + iLineNumber + " has " + input_strs.Length + " entries."); } } if (bStageThis) { processedCases++; MultiTask_DataObj obj = new MultiTask_DataObj(); obj.mInputValues = input_values; obj.mOutputValues = output_values; obj.mbJSONFormat = bJSONFormat; obj.msExpectedResult = sExpectedResult; obj.msFileName = fileName; obj.miLineNum = iLineNumber; thisMultiTasksExecutor.AddDataItem(obj); // DEBUG /* * iLineCounter++; * if (iLineCounter >= 50000) * { * IntegrationUtils.WritelineToLog("Line: " + iLineNumber + " Time: " + stopwatch.Elapsed.TotalMilliseconds + " ms."); * iLineCounter = 0; * } */ input_line = ""; output_line = ""; } } line = reader.ReadLine(); } thisMultiTasksExecutor.WaitForCompletion(); stopwatch.Stop(); String perMs = String.Format("{0,12:F4}", ((float)stopwatch.Elapsed.TotalMilliseconds / processedCases)).Trim(); IntegrationUtils.WritelineToLog("Completed " + processedCases + " cases for " + fileName + " in " + TNMStaging_UnitTestApp.Src.Staging.IntegrationUtils.GenerateTotalTimeString(stopwatch) + " (" + perMs + " ms/case)."); IntegrationUtils.WritelineToLog("Threads Completed " + miMultiTask_ThreadProcessedCases + " cases."); if (miMultiTask_FailedCases > 0) { System.Diagnostics.Trace.WriteLine("There were " + miMultiTask_FailedCases + " failures."); } else { System.Diagnostics.Trace.WriteLine(""); } System.Diagnostics.Trace.WriteLine("-----------------------------------------------"); inputStream.Close(); return(new IntegrationUtils.IntegrationResult(processedCases, miMultiTask_FailedCases)); }
public static void MultiTask_TaskCompute(int id, Object task_data) { MultiTask_DataObj thisDataObj = (MultiTask_DataObj)task_data; try { // load up inputs TnmStagingData data = new TnmStagingData(); foreach (KeyValuePair <TnmInput, String> kp in thisDataObj.mInputValues) { data.setInput(kp.Key, kp.Value); } // save the expected outputs Dictionary <String, String> output = new Dictionary <String, String>(100, StringComparer.Ordinal); String sKeyValue = ""; foreach (KeyValuePair <TnmOutput, String> entry in thisDataObj.mOutputValues) { sKeyValue = entry.Key.toString(); output[sKeyValue] = entry.Value; } // run collaborative stage; if no schema found, set the output to empty SchemaLookup lookup = new SchemaLookup(data.getInput(TnmInput.PRIMARY_SITE), data.getInput(TnmInput.HISTOLOGY)); lookup.setInput(TnmStagingData.SEX_KEY, data.getInput(TnmInput.SEX)); lookup.setInput(TnmStagingData.SSF25_KEY, data.getInput(TnmInput.SSF25)); List <StagingSchema> schemas = mMultiTask_Staging.lookupSchema(lookup); if (schemas.Count == 1) { mMultiTask_Staging.stage(data); } else { Dictionary <String, String> outResult = new Dictionary <String, String>(2, StringComparer.Ordinal); outResult["schema_id"] = "<invalid>"; data.setOutput(outResult); } List <String> mismatches = new List <String>(); // compare results if (!thisDataObj.mbJSONFormat) { String sNewResultStr = ""; String sOldResultStr = thisDataObj.msExpectedResult.Trim(); if (data.getResult() == StagingData.Result.STAGED) { sNewResultStr = "STAGED"; } else if (data.getResult() == StagingData.Result.FAILED_MISSING_SITE_OR_HISTOLOGY) { sNewResultStr = "FAILED_MISSING_SITE_OR_HISTOLOGY"; } else if (data.getResult() == StagingData.Result.FAILED_NO_MATCHING_SCHEMA) { sNewResultStr = "FAILED_NO_MATCHING_SCHEMA"; } else if (data.getResult() == StagingData.Result.FAILED_MULITPLE_MATCHING_SCHEMAS) { sNewResultStr = "FAILED_MULITPLE_MATCHING_SCHEMAS"; } else if (data.getResult() == StagingData.Result.FAILED_INVALID_YEAR_DX) { sNewResultStr = "FAILED_INVALID_YEAR_DX"; } else if (data.getResult() == StagingData.Result.FAILED_INVALID_INPUT) { sNewResultStr = "FAILED_INVALID_INPUT"; } if (sNewResultStr != sOldResultStr) { mismatches.Add(" " + thisDataObj.miLineNum + " --> Result: EXPECTED '" + sOldResultStr + "' ACTUAL: '" + sNewResultStr + "'"); } } // compare output foreach (KeyValuePair <String, String> entry in output) { String expected = ""; output.TryGetValue(entry.Key, out expected); if (expected == null) { expected = ""; } expected = expected.Trim(); String actual = ""; data.getOutput().TryGetValue(entry.Key, out actual); if (actual == null) { actual = ""; } actual = actual.Trim(); if (expected != actual) { mismatches.Add(" " + thisDataObj.miLineNum + " --> " + entry.Key + ": EXPECTED '" + expected + "' ACTUAL: '" + actual + "'"); } } if (mismatches.Count != 0) { String sSchemaIDValue = data.getSchemaId(); IntegrationUtils.WritelineToLog(" " + thisDataObj.miLineNum + " --> [" + sSchemaIDValue + "] Mismatches in " + thisDataObj.msFileName); foreach (String mismatch in mismatches) { IntegrationUtils.WritelineToLog(mismatch); } IntegrationUtils.WritelineToLog(" " + thisDataObj.miLineNum + " *** RESULT: " + data.getResult()); IntegrationUtils.WritelineToLog(" " + thisDataObj.miLineNum + " --> Input: " + IntegrationUtils.convertInputMap(data.getInput())); IntegrationUtils.WritelineToLog(" " + thisDataObj.miLineNum + " --> Output: " + IntegrationUtils.convertInputMap(data.getOutput())); if (data.getErrors().Count > 0) { IntegrationUtils.WritelineToLog(" " + thisDataObj.miLineNum + " --> ERRORS: "); foreach (Error e in data.getErrors()) { IntegrationUtils.WritelineToLog(" " + thisDataObj.miLineNum + " --> (" + e.getTable() + ": " + e.getMessage() + ") "); } } Interlocked.Increment(ref miMultiTask_FailedCases); } } catch (Exception e) { IntegrationUtils.WritelineToLog(" " + thisDataObj.miLineNum + " --> Exception processing " + thisDataObj.msFileName + " : " + e.Message); Interlocked.Increment(ref miMultiTask_FailedCases); } Interlocked.Increment(ref miMultiTask_ThreadProcessedCases); }
// * Process all schemas in file // * @param staging Staging object // * @param fileName name of file // * @param is InputStream // * @param singleLineNumber if not null, only process this line number // * @return IntegrationResult public static IntegrationUtils.IntegrationResult processSchemaSelection_MultiTask(TNMStagingCSharp.Src.Staging.Staging staging, String fileName, Stream inputStream, TestContext testLog) //throws IOException, InterruptedException { IntegrationUtils.TestReportLog = testLog; // go over each file int processedCases = 0; int iLineNumber = 0; //int iLineCounter = 0; MultiTasksExecutor thisMultiTasksExecutor = new MultiTasksExecutor(); thisMultiTasksExecutor.AddAction(new MultiTasksExecutor.ActionCallBack(MultiTask_TaskCompute)); thisMultiTasksExecutor.StartTasks(); mMultiTask_Staging = staging; miMultiTask_FailedCases = 0; miMultiTask_ThreadProcessedCases = 0; Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); // loop over each line in the file TextReader reader = new StreamReader(inputStream); String line = reader.ReadLine(); while (line != null) { iLineNumber++; if (iLineNumber >= 0) { processedCases++; // split the string; important to keep empty trailing values in the resulting array String[] parts = line.Split(",".ToCharArray(), 1000); if (parts.Length != 4) { throw new System.InvalidOperationException("Bad record in schema_selection.txt on line number: " + iLineNumber); } else { MultiTask_DataObj obj = new MultiTask_DataObj(); obj.mParts = parts; obj.msFullLine = line; obj.miLineNum = iLineNumber; thisMultiTasksExecutor.AddDataItem(obj); // DEBUG /* * iLineCounter++; * if (iLineCounter >= 500000) * { * IntegrationUtils.WritelineToLog("Processed Cases: " + processedCases); * iLineCounter = 0; * } */ } } line = reader.ReadLine(); } thisMultiTasksExecutor.WaitForCompletion(); stopwatch.Stop(); String perMs = String.Format("{0,12:F4}", ((float)stopwatch.Elapsed.TotalMilliseconds / processedCases)).Trim(); IntegrationUtils.WritelineToLog("Completed " + processedCases + " cases for " + fileName + " in " + TNMStaging_UnitTestApp.Src.Staging.IntegrationUtils.GenerateTotalTimeString(stopwatch) + " (" + perMs + " ms/case)."); IntegrationUtils.WritelineToLog("Threads Completed " + miMultiTask_ThreadProcessedCases + " cases."); if (miMultiTask_FailedCases > 0) { IntegrationUtils.WritelineToLog("There were " + miMultiTask_FailedCases + " failures."); } else { IntegrationUtils.WritelineToLog(""); } IntegrationUtils.WritelineToLog("-----------------------------------------------"); inputStream.Close(); return(new IntegrationUtils.IntegrationResult(processedCases, miMultiTask_FailedCases)); }