public ApiResponse <DataItemModel> Sample([FromRoute] Guid packageId, [FromRoute] Guid id, [FromBody] DataItemDefinitionModel request) { // Create the response object ApiResponse <DataItemModel> result = new ApiResponse <DataItemModel>() { Success = false }; // Did we find a package? Package package = SessionHandler.PackageRepository.Get(packageId); if (package != null) { // Get the connection from the current package by the id given DataConnection connection = package.DataConnection(id); // Get the definition from the model provided if ((Boolean)(request?.IsValid)) { DataItemDefinition dataDefinition = mapper.Map <DataItemDefinition>(request); // Get the sample result result = SampleConnection(package, connection, dataDefinition, 10); } else { result.Success = false; } } return(result); }
public void Merge_Files_2_Part_Primary_Key() { // Arrange TestHelper testHelper = new TestHelper(); DataConnection connection = testHelper.TestConnection(); // Get a test connection PropertyBagHelper propertyBagHelper = new PropertyBagHelper(connection); propertyBagHelper.Set <Int32>(PropertyBagItemTypeEnum.RowsToSkip, 1); propertyBagHelper.Set <Boolean>(PropertyBagItemTypeEnum.HasHeaderRecord, true); DataItemDefinition definition = testHelper.TestDefinition(TestHelper.TestFile_GenericFixedWidth); // Get the test definition of what to merge from (but also to) DataTable baseData = testHelper.PopulateDataTable(TestHelper.TestFile_GenericFixedWidth, connection); // Get the data DataTable mergeData = testHelper.PopulateDataTable(TestHelper.TestFile_MergeData, connection); // Get the data Stream testStream = new MemoryStream(); // A blank stream to write data to IDataProvider provider = new FixedWidthFileProvider(); // A flat file provider to use to write the data // Act provider.Connect(definition, connection, testStream); // Connect to the blank stream provider.Write(baseData, ""); // Write the data to the empty stream provider.Write(mergeData, ""); // Write some more records with some updates and some adds DataTable mergedData = provider.Read(""); // Get the new data set back // Assert Assert.True(mergedData.Rows.Count == (baseData.Rows.Count + mergeData.Rows.Count) - 1); // Expect of the total of 8 rows, 2 should merge }
private static DataItemDefinition Create(string deviceId, long agentInstanceId, string parentId, string parentType, MTConnectDevices.DataItem dataItem) { var obj = new DataItemDefinition(); // TrakHound Properties obj.DeviceId = deviceId; obj.ParentId = parentId; obj.ParentType = parentType; // MTConnect Properties obj.AgentInstanceId = agentInstanceId; obj.Id = dataItem.Id; obj.Name = dataItem.Name; obj.Category = dataItem.Category.ToString(); obj.Type = dataItem.Type; obj.SubType = dataItem.SubType; obj.Statistic = dataItem.Statistic; obj.Units = dataItem.Units; obj.NativeUnits = dataItem.NativeUnits; obj.NativeScale = dataItem.NativeScale; obj.CoordinateSystem = dataItem.CoordinateSystem; obj.SampleRate = dataItem.SampleRate; obj.Representation = dataItem.Representation; obj.SignificantDigits = dataItem.SignificantDigits; return(obj); }
public void Date_Read() { // Arrange DataItemProperty property = new DataItemProperty() { DataType = typeof(DateTime), Name = "DateTimeTest", OrdinalPosition = -1, Size = 0, Pattern = "dd MMM yyyy" }; DataItemDefinition definition = new DataItemDefinition() { }; // Act Object rawValue = DataFormatHelper.ReadData("23 Oct 1984", property, definition); DateTime value = (rawValue != DBNull.Value) ? (DateTime)rawValue : DateTime.MinValue; // Assert Assert.True(value != DateTime.MinValue && value.Day == 23 && value.Month == 10 && value.Year == 1984); }
/// <summary> /// Convert the JObject to a data row /// </summary> /// <param name="json"></param> public static DataRow ToDataRow(JObject json, ApiDefinition apiDefinition, DataItemDefinition definition, DataTable table) { DataRow result = table.NewRow(); // Create a new row to populate // Loop the properties in the data definition object definition.ItemProperties.ForEach(property => { // Check and see if there is an alias String alias = apiDefinition.Aliases.Where(a => a.Key == property.Name).FirstOrDefault().Value; // Check and see if the property exists in the json object // with the alias if one is given, but the name if one is not JProperty found = (json.Children <JProperty>()) .FirstOrDefault(prop => prop.Name == property.Name || prop.Name == (alias ?? String.Empty)); // Found the property? if (found != null) { Object parsedValue = DataFormatHelper.ReadData( found.Value.ToString(), property, definition); result[property.Name] = parsedValue; } }); // Send back the row return(result); }
public ApiResponse <DataItemDefinitionModel> Post([FromRoute] Guid packageId, [FromBody] DataItemDefinitionModel request) { // Create the response object ApiResponse <DataItemDefinitionModel> response = new ApiResponse <DataItemDefinitionModel>(); // Map the model to a domain object type DataItemDefinition savedDataItemDefinition = mapper.Map <DataItemDefinition>(request); // Did the mapping work ok? if (savedDataItemDefinition != null) { // Did we find a package? Package package = SessionHandler.PackageRepository.Get(packageId); if (package != null) { // Get the repository to save the package for us savedDataItemDefinition = package.Save <DataItemDefinition>(savedDataItemDefinition); } // Saved ok? if (savedDataItemDefinition != null) { // Map the data definition back to a model type and send it back to the user response.Data = mapper.Map <DataItemDefinitionModel>(savedDataItemDefinition); } // Nothing died .. Success response.Success = true; } // Send the response back return(response); }
/// <summary> /// /// </summary> /// <param name="dataTable"></param> /// <returns></returns> public static String DataTableToString(DataItemDefinition definition, DataConnection connection, DataTable dataTable) { String result = ""; // New empty string to populate // Create a helper to read the property bag items PropertyBagHelper propertyBagHelper = new PropertyBagHelper(connection); // Get the stream from the file using (MemoryStream textStream = new MemoryStream()) { // Set up the writer StreamWriter streamWriter = new StreamWriter(textStream); using (CsvWriter writer = SetupWriter(definition, streamWriter)) { // Do we need to write a header? if (propertyBagHelper.Get <Boolean>(PropertyBagItemTypeEnum.HasHeaderRecord, false)) { // Loop the header records and output the header record line manually definition.ItemProperties .Where(prop => prop.PropertyType == DataItemPropertyType.Property) .ToList() .ForEach( header => { writer.WriteField(header.Name); }); // Move to the next line and flush the data writer.NextRecord(); streamWriter.Flush(); } // Loop through the actual records and add them to the csv foreach (DataRow row in dataTable.Rows) { // Loop the header records and output the header record line manually definition.ItemProperties .Where(prop => prop.PropertyType == DataItemPropertyType.Property) .ToList() .ForEach( property => { writer.WriteField(DataFormatHelper.WriteData(row[property.Name], property, definition), property.Quoted); }); // Move to the next line and flush the data writer.NextRecord(); streamWriter.Flush(); } // Put the data back in the buffer textStream.Position = 0; result = ((new StreamReader(textStream)).ReadToEnd() ?? ""); } } return(result); // Send the formatted flat file data back }
/// <summary> /// /// </summary> /// <param name="dataTable"></param> /// <returns></returns> public static String DataTableToString(DataItemDefinition definition, DataConnection connection, DataTable dataTable) { String result = ""; // New empty string to populate // Get the stream from the file using (MemoryStream textStream = new MemoryStream()) { } return(result); // Send the formatted flat file data back }
/// <summary> /// Take some raw Json text and convert it to a datatable for the provider /// to handle e.g. updates or insertions /// </summary> /// <param name="json"></param> /// <returns>A datatable with the given format</returns> public static DataTable ToDataTable(String contentType, String json, ApiDefinition apiDefinition, DataItemDefinition dataItemDefinition) { //try //{ // Translate the content type switch (contentType.Trim().ToLower()) { case "application/json": JObject jObjectParsed = null; JArray jArrayParsed = null; try { jObjectParsed = JObject.Parse(json); } catch { try { jArrayParsed = JArray.Parse(json); } catch { } } // Was it an object? if (jObjectParsed != null) { return(ToDataTable(jObjectParsed, apiDefinition, dataItemDefinition)); } else if (jArrayParsed != null) { return(ToDataTable(jArrayParsed, apiDefinition, dataItemDefinition)); } else { return(null); // Failed } case "application/xml": case "text/xml": break; } /*} * catch (Exception ex) * { * }*/ // Catch all (including errors) return(null); }
private static DataItem ConvertDataItem(DataItemDefinition definition) { var dataItem = new DataItem(); foreach (var property in typeof(DataItem).GetProperties()) { var value = property.GetValue(definition, null); property.SetValue(dataItem, value, null); } return(dataItem); }
/// <summary> /// Set up a new csv writer based on the definition given /// </summary> /// <param name="definition">The data item definition</param> /// <returns></returns> private static CsvWriter SetupWriter(DataItemDefinition definition, TextWriter textWriter) { // Create the new writer CsvWriter writer = new CsvWriter(textWriter); // Create a helper to read the property bag items PropertyBagHelper propertyBagHelper = new PropertyBagHelper(definition); // Force all fields to be quoted or not writer.Configuration.QuoteAllFields = propertyBagHelper.Get <Boolean>(PropertyBagItemTypeEnum.QuoteAllFields, false); return(writer); }
/// <summary> /// Analyse a connection object to give back the result as a model /// for the UI to consume /// </summary> /// <param name="connection">The connection to analyse</param> /// <param name="includeSample">Include sample data with the analysis</param> /// <returns>The data item model for the UI to consume</returns> private ApiResponse <DataItemModel> AnalyseConnection([FromRoute] Guid packageId, DataConnection connection) { ApiResponse <DataItemModel> result = new ApiResponse <DataItemModel>() { Success = false, Data = new DataItemModel() }; // The result to return // Valid connection? if (connection != null) { try { // Did we find a package? Package package = SessionHandler.PackageRepository.Get(packageId); if (package != null) { // Get the appropriate provider object to analyse IDataProvider provider = (new DataProviderFactory()).Get(package, connection, false); // Get the results of the analysis of this connection DataItemDefinition definition = provider.Analyse(new AnalyseRequest <object>() { Data = null, Connection = connection }); // Did we get a result back? if (definition.ItemProperties.Count != 0) { // Assign the definition to the result result.Data.Definition = mapper.Map <DataItemDefinitionModel>(definition); result.Success = true; } } } catch (Exception ex) { result.Messages = new List <String>() { ex.Message }; result.Success = false; } } return(result); // Send the result back }
/// <summary> /// Take some raw Json text and convert it to a datatable for the provider /// to handle e.g. updates or insertions /// </summary> /// <param name="json">A JObject (Queryable) representation of the json data</param> /// <returns>A datatable with the given format</returns> public static DataTable ToDataTable(JObject json, ApiDefinition apiDefinition, DataItemDefinition dataItemDefinition) { // Create the table from the definition DataTable result = dataItemDefinition.ToDataTable(); // Call the data conversion for the root object DataRow row = ToDataRow(json, apiDefinition, dataItemDefinition, result); if (row != null) { result.Rows.Add(row); // Add the row to the results table } return(result); }
public void Analyse_Column_Numbers_From_String() { // Arrange TestHelper testHelper = new TestHelper(); String testData = testHelper.GetResourceString(TestHelper.TestFile_Headers); // Act DataItemDefinition definition = DelimitedFileHelper.AnalyseText( new AnalyseRequest <String>() { Data = testData, Connection = testHelper.TestConnection() } ); // Assert Assert.Equal(definition.ItemProperties.Count, (int)3); }
/// <summary> /// Take some raw Json text and convert it to a datatable for the provider /// to handle e.g. updates or insertions /// </summary> /// <param name="json">A JArray (Queryable) representation of the json data</param> /// <returns>A datatable with the given format</returns> public static DataTable ToDataTable(JArray json, ApiDefinition apiDefinition, DataItemDefinition dataItemDefinition) { // Create the table from the definition DataTable result = dataItemDefinition.ToDataTable(); // For each item in the array, call the data conversion foreach (JObject item in json.Children()) { DataRow row = ToDataRow(item, apiDefinition, dataItemDefinition, result); if (row != null) { result.Rows.Add(row); // Add the row to the results table } } ; return(result); }
/// <summary> /// Read the raw data file and populate the in-memory data table with it /// </summary> /// <param name="rawData">The raw flat file data from wherever it came from</param> /// <returns>If the translation was successful</returns> public static DataTable TextToDataTable(DataItemDefinition definition, DataConnection connection, String rawData) { // Create a list of data items to return DataTable dataItems = definition.ToDataTable(); // Create a helper to read the property bag items PropertyBagHelper propertyBagHelper = new PropertyBagHelper(connection); // Raw data has something to convert? if ((rawData ?? "") != "") { // Open up a text reader to stream the data to the CSV Reader using (TextReader textReader = new StringReader(rawData)) { // Get properties needed to process the file (total lines to skip etc.) Int32 lineNo = 0; Int32 linesToSkip = propertyBagHelper.Get <Int32>(PropertyBagItemTypeEnum.RowsToSkip, 0) + (propertyBagHelper.Get <Boolean>(PropertyBagItemTypeEnum.HasHeaderRecord, false) ? 1 : 0); // Loop each line of the file (ignoring lines that do not need to be processed) String line = ""; while ((line = textReader.ReadLine()) != null) { // Is this a line we should be processing if (lineNo >= linesToSkip) { DataRow row = dataItems.NewRow(); // The new row to populate based on the defintion // Process the row if (ProcessRow(line, row, definition)) { dataItems.Rows.Add(row); // Add the row if processing was successful } } lineNo++; // Increment the line number counter } } } return(dataItems); // Send the datatable back }
public ApiResponse <Boolean> Delete([FromRoute] Guid packageId, [FromRoute] Guid id) { // Create the response object ApiResponse <Boolean> response = new ApiResponse <Boolean>(); // Did we find a package? Package package = SessionHandler.PackageRepository.Get(packageId); if (package != null) { // Get the item from the repository to make sure that it is // not attached to other things DataItemDefinition dataDefinition = package.DataDefinition(id); response.Success = response.Data = package.Delete <DataItemDefinition>(id); } // Return the response return(response); }
public void Write_DataTypes() { // Arrange TestHelper testHelper = new TestHelper(); DataConnection connection = testHelper.TestConnection(); // Get a test connection DataItemDefinition definition = testHelper.TestDefinition(TestHelper.TestFile_GenericFixedWidth); // Get the test definition of what to write DataTable dataToWrite = testHelper.PopulateDataTable(TestHelper.TestFile_GenericFixedWidth); // Get the data DataTable dataToRead = null; // Table to read the data back in to (to verify it was created) Stream testStream = new MemoryStream(); // A blank stream to write data to IDataProvider provider = new FixedWidthFileProvider(); // A flat file provider to use to write the data // Act provider.Connect(definition, connection, testStream); // Connect to the blank stream provider.Write(dataToWrite, ""); // Write the data to the empty stream dataToRead = provider.Read(""); // Get the data back // Assert Assert.True(dataToRead.Rows.Count != 0); }
public void Analyse_BigData_ColumnCount() { // Arrange TestHelper testHelper = new TestHelper(); String file = testHelper.GetResourceString( TestHelper.TestFile_BigFileSalesRecords); DataConnection connection = testHelper.TestConnection(); DelimitedFileProvider provider = new DelimitedFileProvider(); // Act DataItemDefinition definition = provider.Analyse( new AnalyseRequest <object> { Data = file, Connection = connection } ); // Assert Assert.Equal(14, definition.ItemProperties.Count); }
public DataTable PopulateDataTable(String testDefinition, DataConnection connection) { // Get the test data from the resource in the manifest Stream resourceStream = GetResourceStream(testDefinition); // Get the test definition (The columns, data types etc. for this file) DataItemDefinition definition = TestDefinition(testDefinition); // Create a new flat file provider IDataProvider provider = new FixedWidthFileProvider() { TestMode = true // The provider should be marked as being in test mode }; provider.Connect(definition, connection, resourceStream); // Connect to the location of the data // Read the data from the provider DataTable data = provider.Read(""); // Get the data // Return the data table return(data); }
public void String_Read() { // Arrange DataItemProperty property = new DataItemProperty() { DataType = typeof(String), Name = "StringTest", OrdinalPosition = -1, Size = 0 }; DataItemDefinition definition = new DataItemDefinition() { }; // Act Object value = DataFormatHelper.ReadData("This Is A String", property, definition); // Assert Assert.True(value != DBNull.Value && (String)value == "This Is A String"); }
public void Boolean_Read() { // Arrange DataItemProperty property = new DataItemProperty() { DataType = typeof(Boolean), Name = "BooleanTest", OrdinalPosition = -1, Size = 0 }; DataItemDefinition definition = new DataItemDefinition() { }; // Act Object rawValue = DataFormatHelper.ReadData("true", property, definition); Boolean value = (rawValue != DBNull.Value) ? (Boolean)rawValue : false; // Assert Assert.True(value); }
public void Analyse_Column_DataType_From_String() { // Arrange TestHelper testHelper = new TestHelper(); String testData = testHelper.GetResourceString(TestHelper.TestFile_DataTypes); // Act DataItemDefinition definition = DelimitedFileHelper.AnalyseText( new AnalyseRequest <string>() { Data = testData, Connection = testHelper.TestConnection() }); // Assert Assert.Equal(definition.ItemProperties.Count, (int)5); Assert.Equal(typeof(String), definition.ItemProperties[0].DataType); Assert.Equal(typeof(DateTime), definition.ItemProperties[1].DataType); Assert.Equal(typeof(int), definition.ItemProperties[2].DataType); Assert.Equal(typeof(String), definition.ItemProperties[3].DataType); Assert.Equal(typeof(int), definition.ItemProperties[4].DataType); }
public void Filter_Records_With_Command() { // Arrange TestHelper testHelper = new TestHelper(); DataConnection connection = testHelper.TestConnection(); // Get a test connection DataItemDefinition definition = testHelper.TestDefinition(TestHelper.TestFile_GenericFixedWidth); // Get the test definition of what to data to filter DataTable unfilteredData = testHelper.PopulateDataTable(TestHelper.TestFile_GenericFixedWidth); // Get the data Stream testStream = new MemoryStream(); // A blank stream to write data to IDataProvider provider = new FixedWidthFileProvider(); // A flat file provider to use to write the data String command = "[GL Account] = '3930621977'"; // The command to do the filter // Act provider.Connect(definition, connection, testStream); // Connect to the blank stream provider.Write(unfilteredData, ""); // Write the unfiltered data to the empty stream DataTable filteredData = provider.Read(command); // Get the new data set back that has been filtered // Assert Assert.True(filteredData.Rows.Count == 1); // Expect 1 row in the filtered set }
public void Analyse_BigData_RowCount() { // Arrange TestHelper testHelper = new TestHelper(); Stream file = testHelper.GetResourceStream( TestHelper.TestFile_BigFileSalesRecords); DataConnection connection = testHelper.TestConnection(); DelimitedFileProvider provider = new DelimitedFileProvider(); // Act AnalyseRequest <Object> analysisRequest = new AnalyseRequest <Object>() { Data = file, Connection = connection }; DataItemDefinition definition = provider.Analyse(analysisRequest); provider.Connect(definition, analysisRequest.Connection, file); DataTable data = provider.Read(""); // Assert Assert.Equal(5000, data.Rows.Count); }
public void Merge_Files_2_Part_Primary_Key() { // Arrange TestHelper testHelper = new TestHelper(); DataItemDefinition definition = testHelper.TestDefinition(TestHelper.TestFile_PKMergeFrom); // Get the test definition of what to merge from (but also to) DataConnection connection = testHelper.TestConnection(); // Get a test connection DataTable baseData = testHelper.PopulateDataTable(TestHelper.TestFile_PKMergeTo); // Get the data DataTable mergeData = testHelper.PopulateDataTable(TestHelper.TestFile_PKMergeFrom); // Get the data Stream testStream = new MemoryStream(); // A blank stream to write data to IDataProvider provider = new DelimitedFileProvider(); // A flat file provider to use to write the data // Act provider.Connect(definition, connection, testStream); // Connect to the blank stream provider.Write(baseData, ""); // Write the data to the empty stream provider.Write(mergeData, ""); // Write some more records with some updates and some adds DataTable mergedData = provider.Read(""); // Get the new data set back // Assert Assert.True(mergedData.Rows.Count == 6); // Expect of the total of 8 rows, 2 should merge }
/// <summary> /// Process a line using a defintion to get a data row /// </summary> /// <param name="line"></param> /// <param name="definition"></param> /// <returns></returns> public static Boolean ProcessRow(String line, DataRow row, DataItemDefinition definition) { Boolean result = true; // Define the default result as a success // Loop the properties that are not calculated items etc. definition.ItemProperties .Where(item => item.PropertyType == DataItemPropertyType.Property) .ToList() .ForEach(item => { // Get the raw value from the line by position and length String rawValue = CutString(line, item.OrdinalPosition, item.Size); if (rawValue != null) { // Cast the data to the appropriate type using the common rule set Object value = DataFormatHelper.ReadData(rawValue, item, definition); row[item.Name] = value; } }); // Return the result return(result); }
/// <summary> /// Analyse some raw data and work out how /// </summary> /// <param name="rawData"></param> /// <returns></returns> public static DataItemDefinition AnalyseText(AnalyseRequest <String> request) { // Start with a blank definition DataItemDefinition result = new DataItemDefinition() { Culture = CultureInfo.InvariantCulture }; Boolean ambigiousCulture = true; // Raw data has something to convert? if ((request.Data ?? "") != "") { // Open up a text reader to stream the data to the CSV Reader using (TextReader textReader = new StringReader(request.Data)) { // Create an instance of the CSV Reader #warning "This no longer works as we need a definition by default, Might be worth gettig a default reader somehow" using (CsvReader csvReader = SetupReader(textReader, request.Connection, null)) { // Can we read from the stream? Int32 headerId = 0; if (csvReader.Read()) { // Read in the "headers", this may not actually be the headers // but is a quick way for us to seperate the amount of columns csvReader.ReadHeader(); // Parse the header records so that they do not include enclosing quotes headerId = 0; while (headerId < csvReader.Context.HeaderRecord.Length) { // Clean the header csvReader.Context.HeaderRecord[headerId] = DataFormatHelper.CleanString( csvReader.Context.HeaderRecord[headerId], csvReader.Configuration.Quote); // Add a new property to the definition result.ItemProperties.Add(new DataItemProperty() { Calculation = "", DataType = typeof(String), Description = $"Column {headerId.ToString()}", Name = csvReader.Context.HeaderRecord[headerId], Key = false, OrdinalPosition = headerId, Path = csvReader.Context.HeaderRecord[headerId], Pattern = "", Size = 0, PropertyType = DataItemPropertyType.Property }); headerId++; // Move to the next header } } // Now we have the base columns, let's find out what their actual data type // is and what culture the data is in (if we have dates etc.) Int32 sampleCount = 0; while (csvReader.Read() && sampleCount < request.SampleSize) { // For each of the properties that we found result.ItemProperties.ForEach(property => { // Try and get the raw value for this column if (GetField <String>(csvReader, property, out String rawValue)) { // Deriver the data type Type gatheredType = DataFormatHelper.CalculateType( DataFormatHelper.CleanString( rawValue, csvReader.Configuration.Quote)); // If the type that we just gathered from the // data source different to one that we have currently // found (headers will default to text anyway) if (gatheredType != property.DataType && gatheredType != typeof(String)) { // Set the new property type property.DataType = gatheredType; // Is the culture still ambigious? Keep checking specific data points // to determine the culture if (ambigiousCulture) { switch (property.DataType.ToShortName()) { case "datetime": // Attempt to get the culture of the date field CultureCheck gatheredCulture = DataFormatHelper.FieldCulture <DateTime>(rawValue); if (gatheredCulture.Culture != CultureInfo.InvariantCulture && !gatheredCulture.AmbigiousResult && gatheredCulture.Culture != result.Culture) { result.Culture = gatheredCulture.Culture; if (ambigiousCulture) { ambigiousCulture = false; } } break; } } } } ; }); // Move to the next sample item (if possible) sampleCount++; } } } } return(result); // Send the definition back }
/// <summary> /// Create and set up a csv reader based on the data item definition given /// </summary> /// <param name="textReader">The text reader to inject in to the CSV reader</param> /// <param name="definition">The definition of the file</param> /// <returns>The newly configured CSV Reader</returns> private static CsvReader SetupReader(TextReader textReader, DataConnection connection, DataItemDefinition definition) { // Produce a new CSV Reader CsvReader result = new CsvReader(textReader); // Create a helper to read the property bag items PropertyBagHelper propertyBagHelper = new PropertyBagHelper(connection); // Configure the CSV Reader result.Configuration.HasHeaderRecord = (definition == null) ? true : propertyBagHelper.Get <Boolean>(PropertyBagItemTypeEnum.HasHeaderRecord, true); result.Configuration.BadDataFound = null; // Don't pipe bad data result.Configuration.CultureInfo = (definition == null) ? CultureInfo.CurrentCulture : definition.Culture; result.Configuration.TrimOptions = TrimOptions.Trim; result.Configuration.Delimiter = (definition == null) ? "," : propertyBagHelper.Get <String>(PropertyBagItemTypeEnum.DelimiterCharacter, ","); result.Configuration.Quote = (definition == null) ? '"' : propertyBagHelper.Get <Char>(PropertyBagItemTypeEnum.QuoteCharacter, '"'); result.Configuration.IgnoreQuotes = (definition == null) ? true : propertyBagHelper.Get <Boolean>(PropertyBagItemTypeEnum.IgnoreQuotes, true); result.Configuration.MissingFieldFound = null; result.Configuration.ReadingExceptionOccurred = null; // Send the reader back return(result); }
/// <summary> /// Get the data from a field /// </summary> /// <param name="csvReader">The reader to handle the property get</param> /// <param name="property">The property data</param> /// <returns>If it was successful</returns> private static Boolean GetPropertyValue(CsvReader csvReader, DataItemProperty property, DataItemDefinition definition, ref Object value) { // Get the proeprty type as some types of data need handling differently straight away String propertyType = property.DataType.ToShortName(); // Get the raw data Boolean fieldFound = GetField <String>(csvReader, property, typeof(String), out String rawValue); if (fieldFound) { value = DataFormatHelper.ReadData(DataFormatHelper.CleanString(rawValue, csvReader.Configuration.Quote), property, definition); } // Return the data return(fieldFound); }