private static async Task <List <IDataItem> > ReadData(string fileName) { var configuration = Mocks .Of <IMongoexportFileSourceAdapterConfiguration>(c => c.Files == new[] { fileName }) .First(); var readResults = new List <IDataItem>(); using (var adapter = await(new MongoexportFileSourceAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance))) { var readOutput = new ReadOutputByRef(); IDataItem dataItem; while ((dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None)) != null) { readResults.Add(dataItem); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } return(readResults); }
public async Task ReadSimpleDocument_AllFieldsRead() { var documents = SampleData.GetSimpleDocuments(5); foreach (var document in documents) { var hashDocument = new MongoDocument(document); Collection.Insert(hashDocument); document["_id"] = hashDocument.Id.ToString(); } var readResults = new List <IDataItem>(); using (var adapter = await new MongoDbSourceAdapterFactory().CreateAsync(Configuration, DataTransferContextMock.Instance)) { var readOutput = new ReadOutputByRef(); IDataItem dataItem; while ((dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None)) != null) { readResults.Add(dataItem); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } DataItemCollectionAssert.AreEquivalent(documents, readResults, TestResources.InvalidDocumentsRead); }
public async Task <IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { if (documentsCursor == null) { await InitializeDocumentsCursor(cancellation); } if (!await documentsCursor.MoveNextAsync()) { return(null); } RavenJToken idToken = null; if (documentsCursor.Current.Metadata != null && documentsCursor.Current.Metadata.TryGetValue(MetadataIdField, out idToken)) { readOutput.DataItemId = idToken.Value <string>(); } var jObject = documentsCursor.Current.Document as RavenJObject; if (jObject == null) { throw NonFatalReadException.Convert(Errors.NonJsonDocumentRead()); } if (!configuration.ExcludeIdField && idToken != null) { jObject.Add(DocumentIdField, idToken); } return(new RavenJObjectDataItem(jObject)); }
public async Task <IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { if (file == null) { file = new StreamReader(await sourceStreamProvider.CreateStream(cancellation)); } readOutput.DataItemId = String.Format(CultureInfo.InvariantCulture, Resources.DataItemIdFormat, sourceStreamProvider.Id, ++lineNumber); string jsonData = null; while (!file.EndOfStream && String.IsNullOrEmpty(jsonData = await file.ReadLineAsync())) { ; } if (file.EndOfStream && String.IsNullOrEmpty(jsonData)) { return(null); } try { return(new BsonDocumentDataItem(BsonDocument.Parse(jsonData))); } catch (Exception parseError) { throw new NonFatalReadException(parseError.Message, parseError.InnerException); } }
public static async Task <List <IDataItem> > ReadCsv(ICsvFileSourceAdapterConfiguration configuration) { var records = new List <IDataItem>(); using (var source = await new CsvFileSourceAdapterFactory().CreateAsync(configuration, DataTransferContextMock.Instance)) { IDataItem record = null; var readOutput = new ReadOutputByRef(); while (true) { try { record = await source.ReadNextAsync(readOutput, CancellationToken.None); } catch (NonFatalReadException) { continue; } if (record == null) { break; } records.Add(record); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } return(records); }
public async Task <IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { if (segmentDownloadTask == null) { MoveToNextSegment(null, cancellation); } var currentSegment = await segmentDownloadTask; // Make sure current segment has data to read while (currentEntityIndex >= currentSegment.Results.Count && currentSegment.ContinuationToken != null) { MoveToNextSegment(currentSegment.ContinuationToken, cancellation); currentSegment = await segmentDownloadTask; } if (currentEntityIndex >= currentSegment.Results.Count && currentSegment.ContinuationToken == null) { return(null); } var entity = currentSegment.Results[currentEntityIndex++]; readOutput.DataItemId = entity.RowKey; if (currentEntityIndex >= currentSegment.Results.Count && currentSegment.ContinuationToken != null) { // Start downloading next segment while current record is being processed MoveToNextSegment(currentSegment.ContinuationToken, cancellation); } return(new DynamicTableEntityDataItem(AppendInternalProperties(entity))); }
public async Task ReadEntitiesWithFilter_First100RecordsRead() { const string IntegerPropertyName = "IntegerProperty"; var configuration = Mocks .Of <IAzureTableSourceAdapterConfiguration>(c => c.ConnectionString == Settings.AzureStorageConnectionString && c.Table == tableName && c.Filter == IntegerPropertyName + " lt 100" && c.InternalFields == AzureTableInternalFields.None) .First(); var readResults = new List <IDataItem>(); using (var adapter = await new AzureTableSourceAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance)) { IDataItem dataItem; var readOutput = new ReadOutputByRef(); while ((dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None)) != null) { readResults.Add(dataItem); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } DataItemCollectionAssert.AreEquivalent(sampleData.Where(e => (int)e[IntegerPropertyName] < 100), readResults, TestResources.InvalidDocumentsRead); }
protected async Task <List <IDataItem> > ReadDataAsync(IDataSourceAdapter sourceAdapter) { var results = new List <IDataItem>(); IDataItem dataItem; var readOutput = new ReadOutputByRef(); while (true) { try { dataItem = await sourceAdapter.ReadNextAsync(readOutput, CancellationToken.None); } catch (NonFatalReadException) { continue; } if (dataItem == null) { break; } results.Add(dataItem); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } return(results); }
public async Task ReadSampleData_AllDataRead() { var configuration = Mocks .Of <IDocumentDbSourceAdapterConfiguration>(c => c.ConnectionString == ConnectionString && c.Collection == CollectionName && c.InternalFields == false) .First(); var readResults = new List <IDataItem>(); using (var adapter = await new DocumentDbSourceAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance)) { IDataItem dataItem; var readOutput = new ReadOutputByRef(); while ((dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None)) != null) { readResults.Add(dataItem); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } DataItemCollectionAssert.AreEquivalent(sampleData, readResults, TestResources.InvalidDocumentsRead); }
public async Task ReadData_ApplyBelow40Query_MatchingDocumentsRead() { var configuration = Mocks .Of <IRavenDbSourceAdapterConfiguration>(c => c.ConnectionString == connectionString && c.Query == "Age: [* TO 40]" && c.Index == "AllDocs/ByAge" && c.ExcludeId == true) .First(); var readResults = new List <IDataItem>(); using (var adapter = await new RavenDbSourceAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance)) { IDataItem dataItem; var readOutput = new ReadOutputByRef(); while ((dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None)) != null) { readResults.Add(dataItem); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } DataItemCollectionAssert.AreEquivalent( sampleData.Where(d => (int)d["Age"] <= 40), readResults, TestResources.InvalidDocumentsRead); }
public async Task <IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { // TODO: Is there a way to get some sort of an entity identifier? readOutput.DataItemId = String.Format(CultureInfo.InvariantCulture, Resources.DataItemIdFormat, ++globalEntityIndex); if (pageDownloadTask == null) { MoveToNextPage(null, cancellation); } var currentPage = await pageDownloadTask; // Make sure current page has data to read while (pageEntityIndex >= currentPage.Items.Count && IsValidContinuation(currentPage.ContinuationToken)) { MoveToNextPage(currentPage.ContinuationToken, cancellation); currentPage = await pageDownloadTask; } if (pageEntityIndex >= currentPage.Items.Count && !IsValidContinuation(currentPage.ContinuationToken)) { return(null); } var item = currentPage.Items[pageEntityIndex++]; if (pageEntityIndex >= currentPage.Items.Count && IsValidContinuation(currentPage.ContinuationToken)) { // Start downloading next page while current item is being processed MoveToNextPage(currentPage.ContinuationToken, cancellation); } return(new DynamoDbDataItem(item)); }
public async Task <IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { try { if (reader == null) { reader = new CsvReader( await sourceStreamProvider.CreateReader(cancellation), new CsvReaderConfiguration { TrimQuoted = configuration.TrimQuoted, IgnoreUnquotedNulls = configuration.NoUnquotedNulls }); header = ReadHeaderRow(); } if (header == null) { return(null); } return(await Task.Factory.StartNew <IDataItem>(ReadNext)); } finally { readOutput.DataItemId = String.Format(CultureInfo.InvariantCulture, Resources.DataItemIdFormat, sourceStreamProvider.Id, reader == null ? 0 : reader.Row); } }
public Task <IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { if (documentsCursor == null) { throw Errors.SourceIsNotInitialized(); } return(Task.Factory.StartNew <IDataItem>(ReadNext, readOutput)); }
/// <summary> /// Reads one data artifact from the encapsulated data source adapters. /// </summary> /// <param name="readOutput">Object holding additional information about the data artifact.</param> /// <param name="cancellation">Cancellation token.</param> /// <returns>Task that represents asynchronous read operation.</returns> public async Task <IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { if (!started) { started = true; if (!adapters.MoveNext()) { finished = true; } } while (!finished) { Exception readError = null; try { var dataItem = await adapters.Current.ReadNextAsync(readOutput, cancellation); if (dataItem != null) { return(dataItem); } } catch (NonFatalReadException) { throw; } catch (AggregateException aggregateException) { readError = aggregateException.Flatten().InnerException; } catch (Exception exception) { readError = exception; } if (adapters.Current != null) { try { adapters.Current.Dispose(); } catch { } } if (!adapters.MoveNext()) { finished = true; } if (readError != null) { throw new NonFatalReadException(readError.Message, readError.InnerException); } } return(null); }
public Task <IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { var result = !dataItems.MoveNext() ? null : dataItems.Current; if (dataReadCallback != null) { dataReadCallback(result); } return(Task.FromResult(result)); }
private async Task VerifyRows(IDataSourceAdapter adapter, IReadOnlyDictionary <string, object>[] expectedRows) { var readOutput = new ReadOutputByRef(); for (var rowIndex = 0; rowIndex < expectedRows.Length; ++rowIndex) { var dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None); Assert.IsNotNull(dataItem, TestResources.MoreDataItemsExpected); VerifyDataItem(expectedRows[rowIndex], dataItem); } }
/// <summary> /// Reads one data artifact from the encapsulated data source adapters. /// </summary> /// <param name="readOutput">Object holding additional information about the data artifact.</param> /// <param name="cancellation">Cancellation token.</param> /// <returns>Task that represents asynchronous read operation.</returns> public async Task<IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { if (!started) { started = true; if (!adapters.MoveNext()) finished = true; } while (!finished) { Exception readError = null; try { var dataItem = await adapters.Current.ReadNextAsync(readOutput, cancellation); if (dataItem != null) return dataItem; } catch (NonFatalReadException) { throw; } catch (AggregateException aggregateException) { readError = aggregateException.Flatten().InnerException; } catch (Exception exception) { readError = exception; } if (adapters.Current != null) try { adapters.Current.Dispose(); } catch { } if (!adapters.MoveNext()) finished = true; if (readError != null) throw new NonFatalReadException(readError.Message, readError.InnerException); } return null; }
public async Task <IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { if (documentsCursor == null || !(await documentsCursor.MoveNextAsync(cancellation))) { return(null); } var document = documentsCursor.Current; BsonValue bsonValue; if (document.TryGetValue(DocumentIdFieldName, out bsonValue)) { readOutput.DataItemId = bsonValue.ToString(); } return(new BsonDocumentDataItem(document)); }
internal static async Task <List <IDataItem> > ReadCsv(ICsvFileSourceAdapterConfiguration configuration) { var records = new List <IDataItem>(); using (var source = await new CsvFileSourceAdapterFactory().CreateAsync(configuration, DataTransferContextMock.Instance)) { IDataItem record = null; var readOutput = new ReadOutputByRef(); while ((record = source.ReadNextAsync(readOutput, CancellationToken.None).Result) != null) { records.Add(record); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } return(records); }
public async Task <IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { try { if (file == null) { file = await sourceStreamProvider.CreateReader(cancellation); jsonReader = new JsonTextReader(file) { SupportMultipleContent = true }; } return(await Task.Factory.StartNew(() => { while (jsonReader.Read() && jsonReader.TokenType != JsonToken.StartObject) { ; } if (jsonReader.TokenType != JsonToken.StartObject) { return null; } return serializer.Deserialize <IDataItem>(jsonReader); })); } finally { int lineNumber = 0, linePosition = 0; if (jsonReader != null) { lineNumber = jsonReader.LineNumber; linePosition = jsonReader.LinePosition; } readOutput.DataItemId = String.Format(CultureInfo.InvariantCulture, Resources.DataItemIdFormat, sourceStreamProvider.Id, lineNumber, linePosition); } }
private static async Task <List <IDataItem> > ReadData(string fileName) { var readResults = new List <IDataItem>(); using (var adapter = new MongoexportFileSourceAdapter(fileName)) { var readOutput = new ReadOutputByRef(); IDataItem dataItem; while ((dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None)) != null) { readResults.Add(dataItem); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } return(readResults); }
private static async Task ReadAndVerifyFields(IAzureTableSourceAdapterConfiguration configuration, string[] expectedInternalProperties) { using (var adapter = await new AzureTableSourceAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance)) { IDataItem dataItem; var readOutput = new ReadOutputByRef(); while ((dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None)) != null) { var fieldNames = dataItem.GetFieldNames().ToArray(); foreach (var expectedInternalProperty in expectedInternalProperties) { CollectionAssert.Contains(fieldNames, expectedInternalProperty, TestResources.MissingDataItemFieldFormat, expectedInternalProperty); Assert.IsNotNull(dataItem.GetValue(expectedInternalProperty), TestResources.EmptyDataItemFieldValueFormat, expectedInternalProperty); } Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } }
public override async Task <IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellationToken) { readOutput.DataItemId = String.Format(CultureInfo.InvariantCulture, Resources.DataItemIdFormat, ++rowNumber); while (!await dataReader.ReadAsync(cancellationToken)) { if (!await dataReader.NextResultAsync()) { return(null); } } var dataItem = NestedDataItem.Create(Configuration.NestingSeparator); for (var fieldIndex = 0; fieldIndex < dataReader.FieldCount; ++fieldIndex) { dataItem.AddProperty( dataReader.GetName(fieldIndex), dataReader.GetValue(fieldIndex)); } return(dataItem); }
public async Task <IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { if (documentsCursor == null) { throw Errors.SourceIsNotInitialized(); } if (!(await documentsCursor.MoveNextAsync())) { return(null); } var document = documentsCursor.Current; object idValue; if (document.TryGetValue(DocumentIdFieldName, out idValue)) { readOutput.DataItemId = idValue.ToString(); } return(Transformation.Transform(new DictionaryDataItem(document))); }
public async Task <IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { if (rowsCursor == null) { throw Errors.SourceIsNotInitialized(); } if (!await rowsCursor.MoveNextAsync(cancellation)) { return(null); } var currentRow = rowsCursor.Current; readOutput.DataItemId = currentRow.Key; var cells = new Dictionary <string, HBaseCell>(currentRow.Cells.Count + 1); if (!configuration.ExcludeId) { cells[RowIdFieldName] = new HBaseCell { ColumnName = RowIdFieldName, Value = currentRow.Key }; } foreach (var cell in rowsCursor.Current.Cells) { HBaseCell existingCell; if (!cells.TryGetValue(cell.ColumnName, out existingCell) || cell.Timestamp > existingCell.Timestamp) { cells[cell.ColumnName] = cell; } } return(new HBaseCellsDataItem(cells)); }
public async Task ReadEntitiesWithProjection_OnlyStringPropertyRead() { const string StringPropertyName = "StringProperty"; var configuration = Mocks .Of <IAzureTableSourceAdapterConfiguration>(c => c.ConnectionString == Settings.AzureStorageConnectionString && c.Table == tableName && c.Projection == new[] { StringPropertyName } && c.InternalFields == AzureTableInternalFields.None) .First(); var readResults = new List <IDataItem>(); using (var adapter = await new AzureTableSourceAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance)) { IDataItem dataItem; var readOutput = new ReadOutputByRef(); while ((dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None)) != null) { readResults.Add(dataItem); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } DataItemCollectionAssert.AreEquivalent( sampleData .Select(i => new Dictionary <string, object> { { StringPropertyName, i[StringPropertyName] } }) .ToArray(), readResults, TestResources.InvalidDocumentsRead); }
private async Task TestSqlDataSourceAsync(ISqlDataSourceAdapterConfiguration configuration) { var columnKeys = new[] { "IntColumn", "BitColumn", "NVarCharMaxColumn", "FloatColumn", "DateTimeColumn", }; var columnMappings = new Dictionary <string, string> { { columnKeys[0], "int primary key" }, { columnKeys[1], "bit" }, { columnKeys[2], "nvarchar(max)" }, { columnKeys[3], "float" }, { columnKeys[4], "datetime" }, }; var rows = new[] { new Dictionary <string, object> { { columnKeys[0], 1 }, { columnKeys[1], false }, { columnKeys[2], "String1" }, { columnKeys[3], 2.3 }, { columnKeys[4], GetSampleDateTime() } }, new Dictionary <string, object> { { columnKeys[0], 2 }, { columnKeys[1], true }, { columnKeys[2], "String2" }, { columnKeys[3], 4.5 }, { columnKeys[4], GetSampleDateTime() } }, }; using (var connection = new SqlConnection(ConnectionString)) { connection.Open(); try { CreateTable(connection, tableName, columnMappings); AddRows(connection, tableName, rows); using (var adapter = await new SqlDataSourceAdapterFactory().CreateAsync(configuration, DataTransferContextMock.Instance)) { var readOutput = new ReadOutputByRef(); for (var rowIndex = 0; rowIndex < rows.Length; ++rowIndex) { var dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None); Assert.IsNotNull(dataItem, TestResources.MoreDataItemsExpected); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); VerifyDataItem(rows[rowIndex], dataItem); } } } finally { DropTable(connection, tableName); } } }
private static async Task TransferData(IDataSourceAdapter source, IDataSinkAdapter sink, ITransferStatistics statistics, CancellationToken cancellation) { var writeTasks = Enumerable .Range(0, sink.MaxDegreeOfParallelism) .Select(i => (Task)Task.FromResult <object>(null)) .ToArray(); var fatalExceptions = new List <Exception>(); var readOutput = new ReadOutputByRef(); IDataItem dataItem; while (!cancellation.IsCancellationRequested) { readOutput.Wipe(); try { dataItem = await source.ReadNextAsync(readOutput, cancellation); } catch (NonFatalReadException nonFatalException) { statistics.AddError(readOutput.DataItemId, nonFatalException); continue; } catch (Exception exception) { fatalExceptions.Add(exception); break; } if (dataItem == null || cancellation.IsCancellationRequested) { break; } var completed = await Task.WhenAny(writeTasks); writeTasks[Array.IndexOf(writeTasks, completed)] = TransferDataItem(sink, readOutput.DataItemId, dataItem, statistics, cancellation); } // Report completion to the sink try { await sink.CompleteAsync(cancellation); } catch (Exception exception) { fatalExceptions.Add(exception); } // Wait for all on-going writes to complete for (var index = 0; index < writeTasks.Length; ++index) { await writeTasks[index]; } // Throw fatal exceptions, if any if (fatalExceptions.Any()) { throw new AggregateException(fatalExceptions); } }
public abstract Task <IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation);
public Task <IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { return(Task.Factory.StartNew <IDataItem>(ReadNext, readOutput)); }
private async Task TestSqlDataSourceAsync(ISqlDataSourceAdapterConfiguration configuration) { var columnKeys = new[] { "IntColumn", "GeographyColumn" }; var columnMappings = new Dictionary <string, string> { { columnKeys[0], "int primary key" }, { columnKeys[1], "geography" } }; var rows = new[] { new Dictionary <string, object> { { columnKeys[0], 1 }, { columnKeys[1], "geography::STGeomFromText('POINT(-122.16 43.656)', 4326)" } }, new Dictionary <string, object> { { columnKeys[0], 2 }, { columnKeys[1], "geography::STGeomFromText('LINESTRING(-122.360 47.656, -122.343 47.656 )', 4326)" } }, new Dictionary <string, object> { { columnKeys[0], 3 }, { columnKeys[1], "geography::STGeomFromText('POLYGON((-122.358 47.653 , -122.348 47.649, -122.348 47.658, -122.358 47.658, -122.358 47.653))', 4326)" } }, }; var expectedRows = new[] { new Dictionary <string, object> { { columnKeys[0], 1 }, { columnKeys[1], SampleData.Geospatial.AsPoint(new GeographyPosition(43.656, -122.16)) } }, new Dictionary <string, object> { { columnKeys[0], 2 }, { columnKeys[1], SampleData.Geospatial.AsLineString(new[] { new GeographyPosition(47.656, -122.360), new GeographyPosition(47.656, -122.343) }) } }, new Dictionary <string, object> { { columnKeys[0], 3 }, { columnKeys[1], SampleData.Geospatial.AsPolygon(new[] { new GeographyPosition(47.653, -122.358), new GeographyPosition(47.649, -122.348), new GeographyPosition(47.658, -122.348), new GeographyPosition(47.658, -122.358), new GeographyPosition(47.653, -122.358) }) } } }; using (var connection = new SqlConnection(ConnectionString)) { connection.Open(); try { CreateTable(connection, tableName, columnMappings); AddRows(connection, tableName, rows, true); using (var adapter = await new SqlDataSourceAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance, CancellationToken.None)) { var readOutput = new ReadOutputByRef(); for (var rowIndex = 0; rowIndex < rows.Length; ++rowIndex) { var dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None); Assert.IsNotNull(dataItem, TestResources.MoreDataItemsExpected); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); VerifyDataItem(expectedRows[rowIndex], dataItem); } } } finally { DropTable(connection, tableName); } } }