public void testLookupOutputs() { EodSchemaLookup lookup = new EodSchemaLookup("C680", "8590"); List <StagingSchema> lookups = _STAGING.lookupSchema(lookup); Assert.AreEqual(2, lookups.Count); StagingSchema schema = _STAGING.getSchema(lookups[0].getId()); Assert.AreEqual("urethra", schema.getId()); // build list of output keys List <StagingSchemaOutput> outputs = schema.getOutputs(); HashSet <String> definedOutputs = new HashSet <String>(); foreach (StagingSchemaOutput o in outputs) { definedOutputs.Add(o.getKey()); } // test without context Assert.IsTrue(definedOutputs.SetEquals(_STAGING.getOutputs(schema))); // test with context Assert.IsTrue(definedOutputs.SetEquals(_STAGING.getOutputs(schema, lookup.getInputs()))); }
/** * Add a schema */ public void addSchema(StagingSchema schema) { initSchema(schema); _schemas[schema.getId()] = schema; foreach (KeyValuePair <string, StagingSchema> entry in _schemas) { _SchemaKeys.Add(entry.Key); } }
public void verifyInputs() { HashSet <String> errors = new HashSet <String>(); foreach (String id in _STAGING.getSchemaIds()) { StagingSchema schema = _STAGING.getSchema(id); // loop over all the inputs returned by processing the schema and make sure they are all part of the main list of inputs on the schema foreach (String input in _STAGING.getInputs(schema)) { if (!schema.getInputMap().ContainsKey(input)) { errors.Add("Error processing schema " + schema.getId() + ": Table input '" + input + "' not in master list of inputs"); } } } assertNoErrors(errors, "input values"); }
// Constructor loads all schemas and sets up table cache // @param algorithm algorithm // @param version version protected StagingFileDataProvider(String algorithm, String version) : base() { _algorithm = algorithm; _version = version; String basedir = System.IO.Directory.GetCurrentDirectory() + "\\"; if (!Directory.Exists(basedir + "Algorithms\\")) { basedir = System.IO.Directory.GetCurrentDirectory() + "\\..\\..\\..\\"; if (System.IO.Directory.GetCurrentDirectory().IndexOf("x64") >= 0) { basedir += "\\..\\"; } basedir += "Resources\\"; } String directory = ""; // loop over all tables and load them into Map try { directory = basedir + "Algorithms\\" + algorithm.ToLower() + "\\" + version + "\\tables"; foreach (String file in readLines(directory + "\\ids.txt")) { if (file.Length != 0) { TextReader reader = getStagingInputStream(directory + "\\" + file + ".json"); StagingTable table = new StagingTable(); using (reader) { Newtonsoft.Json.JsonSerializer serializer = new Newtonsoft.Json.JsonSerializer(); table = (StagingTable)serializer.Deserialize(reader, typeof(StagingTable)); } initTable(table); _tables[table.getId()] = table; } } } catch (IOException e) { throw new System.InvalidOperationException("IOException reading tables: " + e.Message); } // loop over all schemas and load them into Map try { directory = basedir + "Algorithms\\" + algorithm.ToLower() + "\\" + version + "\\schemas"; foreach (String file in readLines(directory + "\\ids.txt")) { if (file.Length != 0) { TextReader reader = getStagingInputStream(directory + "\\" + file + ".json"); StagingSchema schema = new StagingSchema(); using (reader) { Newtonsoft.Json.JsonSerializer serializer = new Newtonsoft.Json.JsonSerializer(); schema = (StagingSchema)serializer.Deserialize(reader, typeof(StagingSchema)); } initSchema(schema); _schemas[schema.getId()] = schema; } } } catch (IOException e) { throw new System.InvalidOperationException("IOException reading schemas: " + e.Message); } GenerateSchemaIds(); GenerateTableIds(); // finally, initialize any caches now that everything else has been set up invalidateCache(); }
// Initialize data provider private void init(Stream inStream) { HashSet <String> algorithms = new HashSet <String>(); HashSet <String> versions = new HashSet <String>(); using (ZipArchive archive = new ZipArchive(inStream, ZipArchiveMode.Read)) { foreach (ZipArchiveEntry entry in archive.Entries) { if ((entry.Name.Length == 0) || (!entry.Name.EndsWith(".json"))) { continue; } if (entry.FullName.StartsWith("tables")) { String s = extractEntry(entry); StagingTable table = new StagingTable(); table = Newtonsoft.Json.JsonConvert.DeserializeObject <StagingTable>(s); if (DebugSettings.DEBUG_LOADED_TABLES) { Debug.WriteLine("Table: "); Debug.WriteLine(table.GetDebugString(" ")); } initTable(table); algorithms.Add(table.getAlgorithm()); versions.Add(table.getVersion()); _tables[table.getId()] = table; } else if (entry.FullName.StartsWith("schemas")) { String s = extractEntry(entry); StagingSchema schema = new StagingSchema(); schema = Newtonsoft.Json.JsonConvert.DeserializeObject <StagingSchema>(s); if (DebugSettings.DEBUG_LOADED_SCHEMAS) { Debug.WriteLine("Schema: "); Debug.WriteLine(schema.GetDebugString(" ")); } initSchema(schema); algorithms.Add(schema.getAlgorithm()); versions.Add(schema.getVersion()); _schemas[schema.getId()] = schema; } } } // verify that all the algorithm names and versions are consistent if (algorithms.Count != 1) { throw new System.InvalidOperationException("Error initializing provider; only a single algorithm should be included in file"); } if (versions.Count != 1) { throw new System.InvalidOperationException("Error initializing provider; only a single version should be included in file"); } HashSet <String> .Enumerator enumAlg = algorithms.GetEnumerator(); HashSet <String> .Enumerator enumVer = versions.GetEnumerator(); enumAlg.MoveNext(); enumVer.MoveNext(); _algorithm = enumAlg.Current; _version = enumVer.Current; GenerateSchemaIds(); GenerateTableIds(); // finally, initialize any caches now that everything else has been set up invalidateCache(); }
// Stage the passed case. // @param data all input values are passed through this database // @return the same StagingData with output values filled in public StagingData stage(StagingData data) { // first clear out schema/output/errors/path data.setSchemaId(null); data.setOutput(new Dictionary <String, String>(100, StringComparer.Ordinal)); data.setErrors(new List <Error>(100)); data.setPath(new List <String>(100)); // make sure site and histology are supplied if (data.getInput(StagingData.PRIMARY_SITE_KEY) == null || data.getInput(StagingData.HISTOLOGY_KEY) == null) { data.setResult(StagingData.Result.FAILED_MISSING_SITE_OR_HISTOLOGY); return(data); } // get the schema; if a single schema is not found, return right away with an error List <StagingSchema> schemas = lookupSchema(new SchemaLookup(data.getInput())); if (schemas.Count != 1) { if (schemas.Count == 0) { data.setResult(StagingData.Result.FAILED_NO_MATCHING_SCHEMA); } else { data.setResult(StagingData.Result.FAILED_MULITPLE_MATCHING_SCHEMAS); } return(data); } StagingSchema schema = null; if (schemas.Count > 0) { schema = schemas[0]; } // add schema id to result data.setSchemaId(schema.getId()); // copy the input into a new context Dictionary <String, String> context = new Dictionary <String, String>(data.getInput(), StringComparer.Ordinal); // make sure all supplied inputs are defined in the definition foreach (KeyValuePair <String, String> entry in context) { if (!schema.getInputMap().ContainsKey(entry.Key)) { data.addError(new Error.ErrorBuilder(Error.Type.UNKNOWN_INPUT).message("Unknown input key supplied: " + entry.Key).key(entry.Key).build()); } } if (data.getErrors().Count > 0) { data.setResult(StagingData.Result.FAILED_INVALID_INPUT); return(data); } // add context variables addContextKeys(context); // check that year of DX is valid if (!isContextValid(schema.getId(), StagingData.YEAR_DX_KEY, context)) { data.setResult(StagingData.Result.FAILED_INVALID_YEAR_DX); return(data); } // perform the staging Result result = _engine.process(schemas[0].getId(), context); // remove the context variables removeContextKeys(context); // set the staging data result based on the Result returned from the DecisionEngine if (Result.Type.FAILED_INPUT == result.getType()) { data.setResult(StagingData.Result.FAILED_INVALID_INPUT); } else { data.setResult(StagingData.Result.STAGED); } // remove the original input keys from the resulting context; in addition, we want to remove any input keys // from the resulting context that were set with a default value; to accomplish this remove all keys that are // defined as input in the selected schema foreach (KeyValuePair <String, String> entry in data.getInput()) { context.Remove(entry.Key); } foreach (StagingSchemaInput input in schemas[0].getInputs()) { context.Remove(input.getKey()); } // add the results to the data card data.setOutput(result.getContext()); data.setErrors(result.getErrors()); data.setPath(result.getPath()); return(data); }