public void AddToDataTableTest() { var feedFilePlan = new FeedFilePlan { TransformMaps = new [] { new TransformMapping { DestCol = "intColumn", SrcColumns = "0", Type = "int", }, new TransformMapping { DestCol = "strColumn", SrcColumns = "1", }, new TransformMapping { DestCol = "boolColumn", SrcColumns = "2", Type = "bool", }, new TransformMapping { DestCol = "floatColumn", SrcColumns = "3", Type = "float", }, }, }; var dataTable = new DataTable (); dataTable.Columns.Add (FeedProcessor.LoadNumberString, typeof(Int32)); dataTable.Columns.Add (FeedProcessor.DataSourceCodeString, typeof(String)); dataTable.Columns.Add ("intColumn", typeof(int)); dataTable.Columns.Add ("strColumn", typeof(string)); dataTable.Columns.Add ("boolColumn", typeof(bool)); dataTable.Columns.Add ("floatColumn", typeof(float)); var row = new Dictionary<string, string> { { "intColumn", "42" }, { "strColumn", "foo" }, { "boolColumn", "true" }, { "floatColumn", "10.0" }, }; var rowProcessor = new RowProcessor (); rowProcessor.AddRowToDataTable (feedFilePlan, 1, "TST", row, dataTable); Assert.AreEqual (1, dataTable.Rows.Count); var dataRow = dataTable.Rows [0]; Assert.IsNotNull (dataRow); var intVal = (int)dataRow ["intColumn"]; var stringVal = (string)dataRow ["strColumn"]; var boolVal = (bool)dataRow ["boolColumn"]; var floatVal = (float)dataRow ["floatColumn"]; Assert.AreEqual (42, intVal); Assert.AreEqual ("foo", stringVal); Assert.AreEqual (true, boolVal); Assert.AreEqual (10.0f, floatVal); }
/// <summary> /// Creates and sets up the DataTable with column information. /// </summary> /// <returns>A new DataTable, with Columns set up, ready to accept new rows.</returns> /// <param name="feedFilePlan">The Plan for this Feed File; it contains all the /// TransformMaps and other information necessary to set up the column headers in this /// DataTable. /// </param> private DataTable SetUpDataTable(FeedFilePlan feedFilePlan) { var dt = new DataTable (); dt.Columns.Add (IdString, typeof(Int32)); dt.Columns.Add (LoadNumberString, typeof(Int32)); dt.Columns.Add (DataSourceCodeString, typeof(string)); foreach (var map in feedFilePlan.TransformMaps) { var t = typeof(String); // default if (map.Type != null) { switch (map.Type) { case "int": t = typeof(int); break; case "long": t = typeof(long); break; case "bool": t = typeof(bool); break; case "float": t = typeof(float); break; case "double": t = typeof(double); break; case "DateTime": t = typeof(DateTime); break; } } dt.Columns.Add (map.DestCol, t); } return dt; }
/// <summary> /// Processes the warning tests. Warning Tests are litmus tests, the only difference /// being that the handling of a failure is different. Warning tests cause the row /// to be copied to the WarnRows table, but we still process the row. /// </summary> /// <returns>A list of warnings</returns> /// <param name="row">The raw row.</param> /// <param name="plan">A FeedFilePlan with Warning Test Descriptors.</param> List<string> ProcessWarningTests(IList<string> row, FeedFilePlan plan) { var warnings = new List<String>(); if (plan.WarningTestDescriptors != null) { foreach (var warnTestDesciptor in plan.WarningTestDescriptors) { if (!PerformLitmusTest(row, warnTestDesciptor)) { warnings.Add(warnTestDesciptor.Reason); } } } return warnings; }
/// <summary> /// Processes the post-row processor methods. These are methods that optionally run /// subsequent to a row successfully processing, that generate some desired /// side-effect. /// </summary> /// <param name="loadNumber">The load number.</param> /// <param name="parsedInputRow">The raw row.</param> /// <param name="transformedRow">The complete output row.</param> /// <param name="plan">A FeedFilePlan with PostRowProcessorDescriptors.</param> void ProcessPostRowProcessors(int loadNumber, IList<string> parsedInputRow, IDictionary<string, string> transformedRow,FeedFilePlan plan) { if (plan.PostRowProcessorDescriptors == null) { return; } foreach (var postRowProcessorDescriptor in plan.PostRowProcessorDescriptors) { InvokePostRowProcessor(loadNumber, parsedInputRow, transformedRow, postRowProcessorDescriptor); } }
/// <summary> /// Processes the litmus tests. /// </summary> /// <returns>A reason for failing, on the first test that fails.</returns> /// <param name="row">The raw row.</param> /// <param name="plan">A FeedFilePlan with Litmus Test Descriptors.</param> string ProcessLitmusTests(IList<string> row, FeedFilePlan plan) { foreach (var litmusTestDescriptor in plan.LitmusTestDescriptors) { if (!PerformLitmusTest(row, litmusTestDescriptor)) { return litmusTestDescriptor.Reason; } } return String.Empty; }
/// <summary> /// Helper which acquires the parser either from the description in the FeedFilePlan /// (it favors this one), or if that's missing, from the ReadyToUseSubPlan. /// </summary> /// <param name="plan">FeedFilePlan for this file.</param> /// <returns>An IParser instance.</returns> private IParser AcquireParser(FeedFilePlan plan) { var parserDescriptor = plan.Parser ?? ReadyToUseSubPlan.ParserDescriptor; if (null == parserDescriptor) { throw new Exception(String.Format("AcquireParser - no parser defined for" + " file {0}, either in feed file plan or in readyToUsePlan", plan.FileName)); } var parser = (IParser)Activator.CreateInstance( Utilities.GetTypeFromFqName(parserDescriptor.Assembly)); parser.Properties = parserDescriptor.Properties; return parser; }
public void SetupForAllTests() { var util = new TestUtils (); var xmlStr = util.GetResourceTextFile ("TestFeedFilePlan.xml"); using (var reader = new StringReader (xmlStr)) { feedFilePlan = (FeedFilePlan) new XmlSerializer (typeof(FeedFilePlan)).Deserialize (reader); } methodResolver = new Dictionary<string, IMethodResolver> { { "zz", new TestMethodResolver () }, }; }
/// <summary> /// This function is called once for each row. It attempts to obtain a designated /// foreign Id for the row, i.e., the piece of data that will be most reconizable to /// the purveyor of the feed that will help them identify a bad row. It repurposes /// the same codepath that does general destination value calculation, so that it can /// use transform functions and lookups. /// </summary> /// <param name="loadNumber">The LoadNumber.</param> /// <param name="dataSourceCode">The DataSourceCode for the feed.</param> /// <param name="plan">The FeedFilePlan drives the FeedFile integration.</param> /// <param name="rowNumber">The numeric index of the row we're processing.</param> /// <param name="methodResolvers">Method Resolver collection.</param> /// <param name="existenceObjects">Existence Objects collection.</param>" /// <param name="lookups">Lookup objects collection.<param name="">/ /// <param name="row">The raw data for the row.</param> /// <returns>ForeignId string.</returns> public static string GetForeignId(int loadNumber, string dataSourceCode, FeedFilePlan plan, int rowNumber, IDictionary<string, IMethodResolver> methodResolvers, IDictionary<string, IExistence> existenceObjects, IDictionary<string, ILookup> lookups, IList<string> row) { if (plan.ForeignIdDescriptor == null) { throw new Exception (String.Format ( "GetForeignId - no foreignId found for DataSourceCode {0}, this is fatal", dataSourceCode)); } // We pass null for the 'row-so-far' argument, since it's meaningless at this // point. This is all before the row calculation has really begun; it's // preparatory in order to generate a foreign id for the bad rows table. try { var foreignKey = ComputeDestinationValue(loadNumber, rowNumber, row, methodResolvers,existenceObjects, lookups, plan.ForeignIdDescriptor, null); if (foreignKey.Length > maxForeignKeyLen) { foreignKey = foreignKey. Substring(0, maxForeignKeyLen - truncatedMessage.Length) + truncatedMessage; } return foreignKey; } catch (Exception ex) { return String.Format (ex.Message); } }