public async Task ReadEntitiesWithFilter_First100RecordsRead() { const string IntegerPropertyName = "IntegerProperty"; var configuration = Mocks .Of <IAzureTableSourceAdapterConfiguration>(c => c.ConnectionString == Settings.AzureStorageConnectionString && c.Table == tableName && c.Filter == IntegerPropertyName + " lt 100" && c.InternalFields == AzureTableInternalFields.None) .First(); var readResults = new List <IDataItem>(); using (var adapter = await new AzureTableSourceAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance)) { IDataItem dataItem; var readOutput = new ReadOutputByRef(); while ((dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None)) != null) { readResults.Add(dataItem); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } DataItemCollectionAssert.AreEquivalent(sampleData.Where(e => (int)e[IntegerPropertyName] < 100), readResults, TestResources.InvalidDocumentsRead); }
public static async Task <List <IDataItem> > ReadCsv(ICsvFileSourceAdapterConfiguration configuration) { var records = new List <IDataItem>(); using (var source = await new CsvFileSourceAdapterFactory().CreateAsync(configuration, DataTransferContextMock.Instance)) { IDataItem record = null; var readOutput = new ReadOutputByRef(); while (true) { try { record = await source.ReadNextAsync(readOutput, CancellationToken.None); } catch (NonFatalReadException) { continue; } if (record == null) { break; } records.Add(record); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } return(records); }
public async Task ReadSimpleDocument_AllFieldsRead() { var documents = SampleData.GetSimpleDocuments(5); foreach (var document in documents) { var hashDocument = new MongoDocument(document); Collection.Insert(hashDocument); document["_id"] = hashDocument.Id.ToString(); } var readResults = new List <IDataItem>(); using (var adapter = await new MongoDbSourceAdapterFactory().CreateAsync(Configuration, DataTransferContextMock.Instance)) { var readOutput = new ReadOutputByRef(); IDataItem dataItem; while ((dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None)) != null) { readResults.Add(dataItem); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } DataItemCollectionAssert.AreEquivalent(documents, readResults, TestResources.InvalidDocumentsRead); }
public async Task ReadSampleData_AllDataRead() { var configuration = Mocks .Of <IDocumentDbSourceAdapterConfiguration>(c => c.ConnectionString == ConnectionString && c.Collection == CollectionName && c.InternalFields == false) .First(); var readResults = new List <IDataItem>(); using (var adapter = await new DocumentDbSourceAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance)) { IDataItem dataItem; var readOutput = new ReadOutputByRef(); while ((dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None)) != null) { readResults.Add(dataItem); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } DataItemCollectionAssert.AreEquivalent(sampleData, readResults, TestResources.InvalidDocumentsRead); }
protected async Task <List <IDataItem> > ReadDataAsync(IDataSourceAdapter sourceAdapter) { var results = new List <IDataItem>(); IDataItem dataItem; var readOutput = new ReadOutputByRef(); while (true) { try { dataItem = await sourceAdapter.ReadNextAsync(readOutput, CancellationToken.None); } catch (NonFatalReadException) { continue; } if (dataItem == null) { break; } results.Add(dataItem); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } return(results); }
public async Task ReadData_ApplyBelow40Query_MatchingDocumentsRead() { var configuration = Mocks .Of <IRavenDbSourceAdapterConfiguration>(c => c.ConnectionString == connectionString && c.Query == "Age: [* TO 40]" && c.Index == "AllDocs/ByAge" && c.ExcludeId == true) .First(); var readResults = new List <IDataItem>(); using (var adapter = await new RavenDbSourceAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance)) { IDataItem dataItem; var readOutput = new ReadOutputByRef(); while ((dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None)) != null) { readResults.Add(dataItem); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } DataItemCollectionAssert.AreEquivalent( sampleData.Where(d => (int)d["Age"] <= 40), readResults, TestResources.InvalidDocumentsRead); }
private static async Task <List <IDataItem> > ReadData(string fileName) { var configuration = Mocks .Of <IMongoexportFileSourceAdapterConfiguration>(c => c.Files == new[] { fileName }) .First(); var readResults = new List <IDataItem>(); using (var adapter = await(new MongoexportFileSourceAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance))) { var readOutput = new ReadOutputByRef(); IDataItem dataItem; while ((dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None)) != null) { readResults.Add(dataItem); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } return(readResults); }
internal static async Task <List <IDataItem> > ReadCsv(ICsvFileSourceAdapterConfiguration configuration) { var records = new List <IDataItem>(); using (var source = await new CsvFileSourceAdapterFactory().CreateAsync(configuration, DataTransferContextMock.Instance)) { IDataItem record = null; var readOutput = new ReadOutputByRef(); while ((record = source.ReadNextAsync(readOutput, CancellationToken.None).Result) != null) { records.Add(record); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } return(records); }
private static async Task <List <IDataItem> > ReadData(string fileName) { var readResults = new List <IDataItem>(); using (var adapter = new MongoexportFileSourceAdapter(fileName)) { var readOutput = new ReadOutputByRef(); IDataItem dataItem; while ((dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None)) != null) { readResults.Add(dataItem); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } return(readResults); }
private static async Task ReadAndVerifyFields(IAzureTableSourceAdapterConfiguration configuration, string[] expectedInternalProperties) { using (var adapter = await new AzureTableSourceAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance)) { IDataItem dataItem; var readOutput = new ReadOutputByRef(); while ((dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None)) != null) { var fieldNames = dataItem.GetFieldNames().ToArray(); foreach (var expectedInternalProperty in expectedInternalProperties) { CollectionAssert.Contains(fieldNames, expectedInternalProperty, TestResources.MissingDataItemFieldFormat, expectedInternalProperty); Assert.IsNotNull(dataItem.GetValue(expectedInternalProperty), TestResources.EmptyDataItemFieldValueFormat, expectedInternalProperty); } Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } }
public async Task ReadEntitiesWithProjection_OnlyStringPropertyRead() { const string StringPropertyName = "StringProperty"; var configuration = Mocks .Of <IAzureTableSourceAdapterConfiguration>(c => c.ConnectionString == Settings.AzureStorageConnectionString && c.Table == tableName && c.Projection == new[] { StringPropertyName } && c.InternalFields == AzureTableInternalFields.None) .First(); var readResults = new List <IDataItem>(); using (var adapter = await new AzureTableSourceAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance)) { IDataItem dataItem; var readOutput = new ReadOutputByRef(); while ((dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None)) != null) { readResults.Add(dataItem); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } DataItemCollectionAssert.AreEquivalent( sampleData .Select(i => new Dictionary <string, object> { { StringPropertyName, i[StringPropertyName] } }) .ToArray(), readResults, TestResources.InvalidDocumentsRead); }
private async Task TestSqlDataSourceAsync(ISqlDataSourceAdapterConfiguration configuration) { var columnKeys = new[] { "IntColumn", "GeographyColumn" }; var columnMappings = new Dictionary <string, string> { { columnKeys[0], "int primary key" }, { columnKeys[1], "geography" } }; var rows = new[] { new Dictionary <string, object> { { columnKeys[0], 1 }, { columnKeys[1], "geography::STGeomFromText('POINT(-122.16 43.656)', 4326)" } }, new Dictionary <string, object> { { columnKeys[0], 2 }, { columnKeys[1], "geography::STGeomFromText('LINESTRING(-122.360 47.656, -122.343 47.656 )', 4326)" } }, new Dictionary <string, object> { { columnKeys[0], 3 }, { columnKeys[1], "geography::STGeomFromText('POLYGON((-122.358 47.653 , -122.348 47.649, -122.348 47.658, -122.358 47.658, -122.358 47.653))', 4326)" } }, }; var expectedRows = new[] { new Dictionary <string, object> { { columnKeys[0], 1 }, { columnKeys[1], SampleData.Geospatial.AsPoint(new GeographyPosition(43.656, -122.16)) } }, new Dictionary <string, object> { { columnKeys[0], 2 }, { columnKeys[1], SampleData.Geospatial.AsLineString(new[] { new GeographyPosition(47.656, -122.360), new GeographyPosition(47.656, -122.343) }) } }, new Dictionary <string, object> { { columnKeys[0], 3 }, { columnKeys[1], SampleData.Geospatial.AsPolygon(new[] { new GeographyPosition(47.653, -122.358), new GeographyPosition(47.649, -122.348), new GeographyPosition(47.658, -122.348), new GeographyPosition(47.658, -122.358), new GeographyPosition(47.653, -122.358) }) } } }; using (var connection = new SqlConnection(ConnectionString)) { connection.Open(); try { CreateTable(connection, tableName, columnMappings); AddRows(connection, tableName, rows, true); using (var adapter = await new SqlDataSourceAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance, CancellationToken.None)) { var readOutput = new ReadOutputByRef(); for (var rowIndex = 0; rowIndex < rows.Length; ++rowIndex) { var dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None); Assert.IsNotNull(dataItem, TestResources.MoreDataItemsExpected); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); VerifyDataItem(expectedRows[rowIndex], dataItem); } } } finally { DropTable(connection, tableName); } } }
private static async Task TransferData(IDataSourceAdapter source, IDataSinkAdapter sink, ITransferStatistics statistics, CancellationToken cancellation) { var writeTasks = Enumerable .Range(0, sink.MaxDegreeOfParallelism) .Select(i => (Task)Task.FromResult <object>(null)) .ToArray(); var fatalExceptions = new List <Exception>(); var readOutput = new ReadOutputByRef(); IDataItem dataItem; while (!cancellation.IsCancellationRequested) { readOutput.Wipe(); try { dataItem = await source.ReadNextAsync(readOutput, cancellation); } catch (NonFatalReadException nonFatalException) { statistics.AddError(readOutput.DataItemId, nonFatalException); continue; } catch (Exception exception) { fatalExceptions.Add(exception); break; } if (dataItem == null || cancellation.IsCancellationRequested) { break; } var completed = await Task.WhenAny(writeTasks); writeTasks[Array.IndexOf(writeTasks, completed)] = TransferDataItem(sink, readOutput.DataItemId, dataItem, statistics, cancellation); } // Report completion to the sink try { await sink.CompleteAsync(cancellation); } catch (Exception exception) { fatalExceptions.Add(exception); } // Wait for all on-going writes to complete for (var index = 0; index < writeTasks.Length; ++index) { await writeTasks[index]; } // Throw fatal exceptions, if any if (fatalExceptions.Any()) { throw new AggregateException(fatalExceptions); } }
private async Task TestSqlDataSourceAsync(ISqlDataSourceAdapterConfiguration configuration) { var columnKeys = new[] { "IntColumn", "BitColumn", "NVarCharMaxColumn", "FloatColumn", "DateTimeColumn", }; var columnMappings = new Dictionary <string, string> { { columnKeys[0], "int primary key" }, { columnKeys[1], "bit" }, { columnKeys[2], "nvarchar(max)" }, { columnKeys[3], "float" }, { columnKeys[4], "datetime" }, }; var rows = new[] { new Dictionary <string, object> { { columnKeys[0], 1 }, { columnKeys[1], false }, { columnKeys[2], "String1" }, { columnKeys[3], 2.3 }, { columnKeys[4], GetSampleDateTime() } }, new Dictionary <string, object> { { columnKeys[0], 2 }, { columnKeys[1], true }, { columnKeys[2], "String2" }, { columnKeys[3], 4.5 }, { columnKeys[4], GetSampleDateTime() } }, }; using (var connection = new SqlConnection(ConnectionString)) { connection.Open(); try { CreateTable(connection, tableName, columnMappings); AddRows(connection, tableName, rows); using (var adapter = await new SqlDataSourceAdapterFactory().CreateAsync(configuration, DataTransferContextMock.Instance)) { var readOutput = new ReadOutputByRef(); for (var rowIndex = 0; rowIndex < rows.Length; ++rowIndex) { var dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None); Assert.IsNotNull(dataItem, TestResources.MoreDataItemsExpected); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); VerifyDataItem(rows[rowIndex], dataItem); } } } finally { DropTable(connection, tableName); } } }