public async Task<IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { // TODO: Is there a way to get some sort of an entity identifier? readOutput.DataItemId = String.Format(CultureInfo.InvariantCulture, Resources.DataItemIdFormat, ++globalEntityIndex); if (pageDownloadTask == null) { MoveToNextPage(null, cancellation); } var currentPage = await pageDownloadTask; // Make sure current page has data to read while (pageEntityIndex >= currentPage.Items.Count && IsValidContinuation(currentPage.ContinuationToken)) { MoveToNextPage(currentPage.ContinuationToken, cancellation); currentPage = await pageDownloadTask; } if (pageEntityIndex >= currentPage.Items.Count && !IsValidContinuation(currentPage.ContinuationToken)) { return null; } var item = currentPage.Items[pageEntityIndex++]; if (pageEntityIndex >= currentPage.Items.Count && IsValidContinuation(currentPage.ContinuationToken)) { // Start downloading next page while current item is being processed MoveToNextPage(currentPage.ContinuationToken, cancellation); } return new DynamoDbDataItem(item); }
protected async Task<List<IDataItem>> ReadDataAsync(IDataSourceAdapter sourceAdapter) { var results = new List<IDataItem>(); IDataItem dataItem; var readOutput = new ReadOutputByRef(); while (true) { try { dataItem = await sourceAdapter.ReadNextAsync(readOutput, CancellationToken.None); } catch (NonFatalReadException) { continue; } if (dataItem == null) break; results.Add(dataItem); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } return results; }
public async Task<IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { if (file == null) { file = await sourceStreamProvider.CreateReader(cancellation); } readOutput.DataItemId = String.Format(CultureInfo.InvariantCulture, Resources.DataItemIdFormat, sourceStreamProvider.Id, ++lineNumber); string jsonData = null; while (!file.EndOfStream && String.IsNullOrEmpty(jsonData = await file.ReadLineAsync())) ; if (file.EndOfStream && String.IsNullOrEmpty(jsonData)) return null; try { return new BsonDocumentDataItem(BsonDocument.Parse(jsonData)); } catch (Exception parseError) { throw new NonFatalReadException(parseError.Message, parseError.InnerException); } }
public async Task<IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { if (segmentDownloadTask == null) { MoveToNextSegment(null, cancellation); } var currentSegment = await segmentDownloadTask; // Make sure current segment has data to read while (currentEntityIndex >= currentSegment.Results.Count && currentSegment.ContinuationToken != null) { MoveToNextSegment(currentSegment.ContinuationToken, cancellation); currentSegment = await segmentDownloadTask; } if (currentEntityIndex >= currentSegment.Results.Count && currentSegment.ContinuationToken == null) { return null; } var entity = currentSegment.Results[currentEntityIndex++]; readOutput.DataItemId = entity.RowKey; if (currentEntityIndex >= currentSegment.Results.Count && currentSegment.ContinuationToken != null) { // Start downloading next segment while current record is being processed MoveToNextSegment(currentSegment.ContinuationToken, cancellation); } return new DynamicTableEntityDataItem(AppendInternalProperties(entity)); }
public async Task<IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { try { if (reader == null) { reader = new CsvReader( await sourceStreamProvider.CreateReader(cancellation), new CsvReaderConfiguration { TrimQuoted = configuration.TrimQuoted, IgnoreUnquotedNulls = configuration.NoUnquotedNulls }); header = ReadHeaderRow(); } if (header == null) return null; return await Task.Factory.StartNew<IDataItem>(ReadNext); } finally { readOutput.DataItemId = String.Format(CultureInfo.InvariantCulture, Resources.DataItemIdFormat, sourceStreamProvider.Id, reader == null ? 0 : reader.Row); } }
public async Task<IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { try { if (file == null) { file = await sourceStreamProvider.CreateReader(cancellation); jsonReader = new JsonTextReader(file) { SupportMultipleContent = true }; } return await Task.Factory.StartNew(() => { while (jsonReader.Read() && jsonReader.TokenType != JsonToken.StartObject) ; if (jsonReader.TokenType != JsonToken.StartObject) return null; return serializer.Deserialize<IDataItem>(jsonReader); }); } finally { int lineNumber = 0, linePosition = 0; if (jsonReader != null) { lineNumber = jsonReader.LineNumber; linePosition = jsonReader.LinePosition; } readOutput.DataItemId = String.Format(CultureInfo.InvariantCulture, Resources.DataItemIdFormat, sourceStreamProvider.Id, lineNumber, linePosition); } }
public async Task ExecuteAsync(IDataSourceAdapter source, IDataSinkAdapter sink, ITransferStatistics statistics, CancellationToken cancellation) { Guard.NotNull("source", source); Guard.NotNull("sink", sink); Guard.NotNull("statistics", statistics); var writeTasks = Enumerable .Range(0, sink.MaxDegreeOfParallelism) .Select(i => (Task)Task.FromResult<object>(null)) .ToArray(); var fatalExceptions = new List<Exception>(); var readOutput = new ReadOutputByRef(); IDataItem dataItem; while (!cancellation.IsCancellationRequested) { readOutput.Wipe(); try { dataItem = await source.ReadNextAsync(readOutput, cancellation); } catch (NonFatalReadException nonFatalException) { statistics.AddError(readOutput.DataItemId, nonFatalException); continue; } catch (Exception exception) { fatalExceptions.Add(exception); break; } if (dataItem == null || cancellation.IsCancellationRequested) break; var completed = await Task.WhenAny(writeTasks); writeTasks[Array.IndexOf(writeTasks, completed)] = TransferDataItem(sink, readOutput.DataItemId, dataItem, statistics, cancellation); } // Report completion to the sink try { await sink.CompleteAsync(cancellation); } catch (Exception exception) { fatalExceptions.Add(exception); } // Wait for all on-going writes to complete for (var index = 0; index < writeTasks.Length; ++index) await writeTasks[index]; // Throw fatal exceptions, if any if (fatalExceptions.Any()) throw new AggregateException(fatalExceptions); }
public Task<IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { var result = !dataItems.MoveNext() ? null : dataItems.Current; if (dataReadCallback != null) dataReadCallback(result); return Task.FromResult(result); }
public async Task<IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { if (documentsCursor == null) { await InitializeDocumentsCursor(cancellation); } if (!await documentsCursor.MoveNextAsync()) return null; RavenJToken idToken = null; if (documentsCursor.Current.Metadata != null && documentsCursor.Current.Metadata.TryGetValue(MetadataIdField, out idToken)) readOutput.DataItemId = idToken.Value<string>(); var jObject = documentsCursor.Current.Document as RavenJObject; if (jObject == null) throw NonFatalReadException.Convert(Errors.NonJsonDocumentRead()); if (!configuration.ExcludeIdField && idToken != null) jObject.Add(DocumentIdField, idToken); return new RavenJObjectDataItem(jObject); }
private async Task TestSqlDataSourceAsync(ISqlDataSourceAdapterConfiguration configuration) { var columnKeys = new[] { "IntColumn", "BitColumn", "NVarCharMaxColumn", "FloatColumn", "DateTimeColumn", }; var columnMappings = new Dictionary<string, string> { { columnKeys[0], "int primary key" }, { columnKeys[1], "bit" }, { columnKeys[2], "nvarchar(max)" }, { columnKeys[3], "float" }, { columnKeys[4], "datetime" }, }; var rows = new[] { new Dictionary<string, object> { { columnKeys[0], 1 }, { columnKeys[1], false }, { columnKeys[2], "String1" }, { columnKeys[3], 2.3 }, { columnKeys[4], GetSampleDateTime() } }, new Dictionary<string, object> { { columnKeys[0], 2 }, { columnKeys[1], true }, { columnKeys[2], "String2" }, { columnKeys[3], 4.5 }, { columnKeys[4], GetSampleDateTime() } }, }; using (var connection = new SqlConnection(ConnectionString)) { connection.Open(); try { CreateTable(connection, tableName, columnMappings); AddRows(connection, tableName, rows); using (var adapter = await new SqlDataSourceAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance, CancellationToken.None)) { var readOutput = new ReadOutputByRef(); for (var rowIndex = 0; rowIndex < rows.Length; ++rowIndex) { var dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None); Assert.IsNotNull(dataItem, TestResources.MoreDataItemsExpected); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); VerifyDataItem(rows[rowIndex], dataItem); } } } finally { DropTable(connection, tableName); } } }
private static async Task ReadAndVerifyFields(IAzureTableSourceAdapterConfiguration configuration, string[] expectedInternalProperties) { using (var adapter = await new AzureTableSourceAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance, CancellationToken.None)) { IDataItem dataItem; var readOutput = new ReadOutputByRef(); while ((dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None)) != null) { var fieldNames = dataItem.GetFieldNames().ToArray(); foreach (var expectedInternalProperty in expectedInternalProperties) { CollectionAssert.Contains(fieldNames, expectedInternalProperty, TestResources.MissingDataItemFieldFormat, expectedInternalProperty); Assert.IsNotNull(dataItem.GetValue(expectedInternalProperty), TestResources.EmptyDataItemFieldValueFormat, expectedInternalProperty); } Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); } } }
public Task<IDataItem> ReadNextAsync(ReadOutputByRef readOutput, CancellationToken cancellation) { return Task.Factory.StartNew<IDataItem>(ReadNext, readOutput); }
private async Task VerifyRows(IDataSourceAdapter adapter, IReadOnlyDictionary<string, object>[] expectedRows) { var readOutput = new ReadOutputByRef(); for (var rowIndex = 0; rowIndex < expectedRows.Length; ++rowIndex) { var dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None); Assert.IsNotNull(dataItem, TestResources.MoreDataItemsExpected); VerifyDataItem(expectedRows[rowIndex], dataItem); } }
private async Task TestSqlDataSourceAsync(ISqlDataSourceAdapterConfiguration configuration) { var columnKeys = new[] { "IntColumn", "GeographyColumn" }; var columnMappings = new Dictionary<string, string> { { columnKeys[0], "int primary key" }, { columnKeys[1], "geography" } }; var rows = new[] { new Dictionary<string, object> { { columnKeys[0], 1 }, { columnKeys[1], "geography::STGeomFromText('POINT(-122.16 43.656)', 4326)" } }, new Dictionary<string, object> { { columnKeys[0], 2 }, { columnKeys[1], "geography::STGeomFromText('LINESTRING(-122.360 47.656, -122.343 47.656 )', 4326)" } }, new Dictionary<string, object> { { columnKeys[0], 3 }, { columnKeys[1], "geography::STGeomFromText('POLYGON((-122.358 47.653 , -122.348 47.649, -122.348 47.658, -122.358 47.658, -122.358 47.653))', 4326)" } }, }; var expectedRows = new[] { new Dictionary<string, object> { { columnKeys[0], 1 }, { columnKeys[1], SampleData.Geospatial.AsPoint(new GeographyPosition(43.656, -122.16)) } }, new Dictionary<string, object> { { columnKeys[0], 2 }, { columnKeys[1], SampleData.Geospatial.AsLineString(new[] { new GeographyPosition(47.656, -122.360), new GeographyPosition(47.656, -122.343) }) } }, new Dictionary<string, object> { { columnKeys[0], 3 }, { columnKeys[1], SampleData.Geospatial.AsPolygon(new[] { new GeographyPosition(47.653, -122.358), new GeographyPosition(47.649, -122.348), new GeographyPosition(47.658, -122.348), new GeographyPosition(47.658, -122.358), new GeographyPosition(47.653, -122.358) }) } } }; using (var connection = new SqlConnection(ConnectionString)) { connection.Open(); try { CreateTable(connection, tableName, columnMappings); AddRows(connection, tableName, rows, true); using (var adapter = await new SqlDataSourceAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance, CancellationToken.None)) { var readOutput = new ReadOutputByRef(); for (var rowIndex = 0; rowIndex < rows.Length; ++rowIndex) { var dataItem = await adapter.ReadNextAsync(readOutput, CancellationToken.None); Assert.IsNotNull(dataItem, TestResources.MoreDataItemsExpected); Assert.IsNotNull(readOutput.DataItemId, CommonTestResources.MissingDataItemId); readOutput.Wipe(); VerifyDataItem(expectedRows[rowIndex], dataItem); } } } finally { DropTable(connection, tableName); } } }