public async Task BulkWriteSampleData_HashPartitioningAcrossTwoCollections_AllDataStored() { const int NumberOfItems = 100; var configuration = Mocks .Of <IDocumentDbBulkSinkAdapterConfiguration>(m => m.ConnectionString == ConnectionString && m.Collection == new[] { "Data0", "Data1" } && m.PartitionKey == "StringProperty" && m.BatchSize == 10 && m.MaxScriptSize == 1024) .First(); var sampleData = SampleData.GetSimpleDataItems(NumberOfItems); using (var adapter = await new DocumentDbBulkSinkAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance, CancellationToken.None)) { await WriteDataAsync(adapter, sampleData); } var firstCollection = DocumentDbHelper.ReadDocuments(ConnectionString, "Data0"); Assert.IsTrue(firstCollection.Count() > 0, TestResources.DataIsNotPartitioned); var secondCollection = DocumentDbHelper.ReadDocuments(ConnectionString, "Data1"); Assert.IsTrue(secondCollection.Count() > 0, TestResources.DataIsNotPartitioned); VerifyData(sampleData, firstCollection.Union(secondCollection)); }
public async Task WriteSampleData_HashPartitioning_AllDataStored() { const string CollectionName = "ElasticData"; const int NumberOfItems = 100; var configuration = Mocks .Of <IDocumentDbParallelSinkAdapterConfiguration>(m => m.ConnectionString == ConnectionString && m.Collection == CollectionName && m.CollectionThroughput == 20000 && // 20k should be equivalent to 2 partitions m.PartitionKey == "/StringProperty" && m.ParallelRequests == 1 && m.Retries == 100) .First(); var sampleData = SampleData.GetSimpleDataItems(NumberOfItems); using (var adapter = await new DocumentDbParallelSinkAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance, CancellationToken.None)) { await WriteDataAsync(adapter, sampleData); } VerifyData(sampleData, DocumentDbHelper.ReadDocuments(ConnectionString, CollectionName)); }
public async Task WriteSampleData_RangeIndexOnIntegerProperty_IntegerRangeFilterCanBeUsed() { const string CollectionName = "Data"; const int NumberOfItems = 42; var configuration = Mocks .Of <IDocumentDbParallelSinkAdapterConfiguration>(m => m.ConnectionString == ConnectionString && m.Collection == CollectionName && m.IndexingPolicyFile == @"IndexingPolicies\IntegerPropertyRangeIndex.json" && m.ParallelRequests == 1 && m.Retries == 100) .First(); var sampleData = SampleData.GetSimpleDataItems(NumberOfItems); using (var adapter = await new DocumentDbParallelSinkAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance, CancellationToken.None)) { await WriteDataAsync(adapter, sampleData); } VerifyData( sampleData.Where(i => (int)i.GetValue("IntegerProperty") < 20).ToArray(), DocumentDbHelper.ReadDocuments(ConnectionString, CollectionName, "SELECT * FROM c WHERE c.IntegerProperty < 20")); }
public async Task WriteSampleData_RandomPartitioningAcrossTwoCollections_AllDataStored() { const int NumberOfItems = 100; var configuration = Mocks .Of <IDocumentDbParallelSinkAdapterConfiguration>(m => m.ConnectionString == ConnectionString && m.Collection == new[] { "Data[0-1]" } && m.ParallelRequests == 1 && m.Retries == 100) .First(); var sampleData = SampleData.GetSimpleDataItems(NumberOfItems); using (var adapter = await new DocumentDbParallelSinkAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance)) { await WriteDataAsync(adapter, sampleData); } var firstCollection = DocumentDbHelper.ReadDocuments(ConnectionString, "Data0"); Assert.IsTrue(firstCollection.Count() > 0, TestResources.DataIsNotPartitioned); var secondCollection = DocumentDbHelper.ReadDocuments(ConnectionString, "Data1"); Assert.IsTrue(secondCollection.Count() > 0, TestResources.DataIsNotPartitioned); VerifyData(sampleData, firstCollection.Union(secondCollection)); }
public async Task BulkWriteGeospatialData_AllDataStored() { var configuration = Mocks .Of <IDocumentDbBulkSinkAdapterConfiguration>(m => m.ConnectionString == ConnectionString && m.Collection == new[] { "Data" } && m.BatchSize == 10 && m.MaxScriptSize == 1024) .First(); var sampleData = GetSampleGeospatialDataItems(); using (var adapter = await new DocumentDbBulkSinkAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance, CancellationToken.None)) { await WriteDataAsync(adapter, sampleData); } VerifyData(GetExpectedGeospatialDataItems(), DocumentDbHelper.ReadDocuments(ConnectionString, "Data")); }
public async Task WriteSampleData_CreateDuplicates_AllDataStored() { var configuration = Mocks .Of <IDocumentDbParallelSinkAdapterConfiguration>(m => m.ConnectionString == ConnectionString && m.Collection == new[] { "Data" } && m.ParallelRequests == 1 && m.Retries == 100 && m.UpdateExisting == true) .First(); var sampleData = GetSampleDuplicateDataItems(); using (var adapter = await new DocumentDbParallelSinkAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance, CancellationToken.None)) { await WriteDataAsync(adapter, sampleData); } VerifyData(GetExpectedDuplicateDataItems(), DocumentDbHelper.ReadDocuments(ConnectionString, "Data")); }
public async Task WriteGeospatialData_AllDataStored() { const string CollectionName = "Data"; var configuration = Mocks .Of <IDocumentDbParallelSinkAdapterConfiguration>(m => m.ConnectionString == ConnectionString && m.Collection == CollectionName && m.ParallelRequests == 1 && m.Retries == 100) .First(); var sampleData = GetSampleGeospatialDataItems(); using (var adapter = await new DocumentDbParallelSinkAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance, CancellationToken.None)) { await WriteDataAsync(adapter, sampleData); } VerifyData(GetExpectedGeospatialDataItems(), DocumentDbHelper.ReadDocuments(ConnectionString, CollectionName)); }
public async Task BulkWriteSampleData_AllDataStored() { const string CollectionName = "Data"; const int NumberOfItems = 42; var configuration = Mocks .Of <IDocumentDbBulkSinkAdapterConfiguration>(m => m.ConnectionString == ConnectionString && m.Collection == new[] { CollectionName } && m.BatchSize == 10 && m.MaxScriptSize == 1024) .First(); var sampleData = SampleData.GetSimpleDataItems(NumberOfItems); using (var adapter = await new DocumentDbBulkSinkAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance, CancellationToken.None)) { await WriteDataAsync(adapter, sampleData); } VerifyData(sampleData, DocumentDbHelper.ReadDocuments(ConnectionString, "Data")); }
public async Task WriteSampleData_AllDataStored() { const string CollectionName = "Data"; const int NumberOfItems = 42; var configuration = Mocks .Of <IDocumentDbParallelSinkAdapterConfiguration>(m => m.ConnectionString == ConnectionString && m.Collection == new[] { CollectionName } && m.ParallelRequests == 1 && m.Retries == 100) .First(); var sampleData = SampleData.GetSimpleDataItems(NumberOfItems); using (var adapter = await new DocumentDbParallelSinkAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance)) { await WriteDataAsync(adapter, sampleData); } VerifyData(sampleData, DocumentDbHelper.ReadDocuments(ConnectionString, "Data")); }
public async Task BulkWriteSampleData_UpsertDuplicates_AllDataStored() { const string CollectionName = "DupicatesData"; var configuration = Mocks .Of <IDocumentDbBulkSinkAdapterConfiguration>(m => m.ConnectionString == ConnectionString && m.Collection == new[] { CollectionName } && m.BatchSize == 10 && m.MaxScriptSize == 1024 && m.UpdateExisting == true) .First(); var sampleData = GetSampleDuplicateDataItems(); using (var adapter = await new DocumentDbBulkSinkAdapterFactory() .CreateAsync(configuration, DataTransferContextMock.Instance, CancellationToken.None)) { await WriteDataAsync(adapter, sampleData); } VerifyData(GetExpectedDuplicateDataItems(), DocumentDbHelper.ReadDocuments(ConnectionString, CollectionName)); }