/// <summary> /// Get the property bag from a given instance /// </summary> /// <returns>The property bag item types</returns> public List <PropertyBagItemType> Get(ObjectTypes objectType, Int32 value) { // The result List <PropertyBagItemType> result = new List <PropertyBagItemType>(); // Check the enum type switch (objectType) { // We are checking data provider types case ObjectTypes.Connections: IDataProvider provider = null; DataProviderType dataProviderType = (DataProviderType)value; switch (dataProviderType) { case DataProviderType.DelimitedFileProvider: provider = new DelimitedFileProvider(); break; case DataProviderType.FixedWidthFileProvider: provider = new FixedWidthFileProvider(); break; case DataProviderType.SQLProvider: provider = new SQLProvider(); break; } // Did we get a provider? if (provider != null) { result = provider.PropertyBagTypes(); // Get the property bag types provider = null; // Destroy (effectively) } break; } // Return the result return(result); }
public void Write_DataTypes() { // Arrange TestHelper testHelper = new TestHelper(); DataConnection connection = testHelper.TestConnection(); // Get a test connection DataItemDefinition definition = testHelper.TestDefinition(TestHelper.TestFile_WriteTests); // Get the test definition of what to write DataTable dataToWrite = testHelper.PopulateDataTable(TestHelper.TestFile_WriteTests); // Get the data DataTable dataToRead = null; // Table to read the data back in to (to verify it was created) Stream testStream = new MemoryStream(); // A blank stream to write data to IDataProvider provider = new DelimitedFileProvider(); // A flat file provider to use to write the data // Act provider.Connect(definition, connection, testStream); // Connect to the blank stream provider.Write(dataToWrite, ""); // Write the data to the empty stream dataToRead = provider.Read(""); // Get the data back // Assert Assert.True(dataToRead.Rows.Count != 0); }
public void Analyse_BigData_ColumnCount() { // Arrange TestHelper testHelper = new TestHelper(); String file = testHelper.GetResourceString( TestHelper.TestFile_BigFileSalesRecords); DataConnection connection = testHelper.TestConnection(); DelimitedFileProvider provider = new DelimitedFileProvider(); // Act DataItemDefinition definition = provider.Analyse( new AnalyseRequest <object> { Data = file, Connection = connection } ); // Assert Assert.Equal(14, definition.ItemProperties.Count); }
public void Filter_Records_With_Command() { // Arrange TestHelper testHelper = new TestHelper(); DataItemDefinition definition = testHelper.TestDefinition(TestHelper.TestFile_Headers); // Get the test definition of what to data to filter DataConnection connection = testHelper.TestConnection(); // Get a test connection DataTable unfilteredData = testHelper.PopulateDataTable(TestHelper.TestFile_Headers); // Get the data Stream testStream = new MemoryStream(); // A blank stream to write data to IDataProvider provider = new DelimitedFileProvider(); // A flat file provider to use to write the data String command = "Title = 'Title 1'"; // The command to do the filter // Act provider.Connect(definition, connection, testStream); // Connect to the blank stream provider.Write(unfilteredData, ""); // Write the unfiltered data to the empty stream DataTable filteredData = provider.Read(command); // Get the new data set back that has been filtered // Assert Assert.True(filteredData.Rows.Count == 1); // Expect 1 row in the filtered set }
public DataTable PopulateDataTable(String testDefinition, DataConnection connection) { // Get the test data from the resource in the manifest Stream resourceStream = GetResourceStream(testDefinition); // Get the test definition (The columns, data types etc. for this file) DataItemDefinition definition = TestDefinition(testDefinition); // Create a new flat file provider IDataProvider provider = new DelimitedFileProvider() { TestMode = true // The provider should be marked as being in test mode }; provider.Connect(definition, connection, resourceStream); // Connect to the location of the data // Read the data from the provider DataTable data = provider.Read(""); // Get the data // Return the data table return(data); }
public void Merge_Files_2_Part_Primary_Key() { // Arrange TestHelper testHelper = new TestHelper(); DataItemDefinition definition = testHelper.TestDefinition(TestHelper.TestFile_PKMergeFrom); // Get the test definition of what to merge from (but also to) DataConnection connection = testHelper.TestConnection(); // Get a test connection DataTable baseData = testHelper.PopulateDataTable(TestHelper.TestFile_PKMergeTo); // Get the data DataTable mergeData = testHelper.PopulateDataTable(TestHelper.TestFile_PKMergeFrom); // Get the data Stream testStream = new MemoryStream(); // A blank stream to write data to IDataProvider provider = new DelimitedFileProvider(); // A flat file provider to use to write the data // Act provider.Connect(definition, connection, testStream); // Connect to the blank stream provider.Write(baseData, ""); // Write the data to the empty stream provider.Write(mergeData, ""); // Write some more records with some updates and some adds DataTable mergedData = provider.Read(""); // Get the new data set back // Assert Assert.True(mergedData.Rows.Count == 6); // Expect of the total of 8 rows, 2 should merge }
public void Analyse_BigData_RowCount() { // Arrange TestHelper testHelper = new TestHelper(); Stream file = testHelper.GetResourceStream( TestHelper.TestFile_BigFileSalesRecords); DataConnection connection = testHelper.TestConnection(); DelimitedFileProvider provider = new DelimitedFileProvider(); // Act AnalyseRequest <Object> analysisRequest = new AnalyseRequest <Object>() { Data = file, Connection = connection }; DataItemDefinition definition = provider.Analyse(analysisRequest); provider.Connect(definition, analysisRequest.Connection, file); DataTable data = provider.Read(""); // Assert Assert.Equal(5000, data.Rows.Count); }