private bool TryGetDocumentsFromQueue(Etag nextDocEtag, ref List <JsonDocument> items) { JsonDocument result; nextDocEtag = HandleEtagGapsIfNeeded(nextDocEtag); bool hasDocs = false; while (items.Count < autoTuner.NumberOfItemsToIndexInSingleBatch && prefetchingQueue.TryPeek(out result) && nextDocEtag.CompareTo(result.Etag) >= 0) { // safe to do peek then dequeue because we are the only one doing the dequeues // and here we are single threaded prefetchingQueue.TryDequeue(out result); if (result.Etag != nextDocEtag) { continue; } items.Add(result); hasDocs = true; nextDocEtag = EtagUtil.Increment(nextDocEtag, 1); nextDocEtag = HandleEtagGapsIfNeeded(nextDocEtag); } return(hasDocs); }
public void GetLastEtag(string requestedStorage) { using (var storage = NewTransactionalStorage(requestedStorage)) { Etag etag = null; storage.Batch(accessor => etag = accessor.GetLastEtag()); Assert.Equal(Etag.Empty, etag); storage.Batch(accessor => accessor.PutFile("/file1", null, new RavenJObject())); storage.Batch(accessor => etag = accessor.GetLastEtag()); Assert.Equal(EtagUtil.Increment(Etag.Empty, 1), etag); storage.Batch(accessor => accessor.PutFile("/file3", null, new RavenJObject())); storage.Batch(accessor => etag = accessor.GetLastEtag()); Assert.Equal(EtagUtil.Increment(Etag.Empty, 2), etag); storage.Batch(accessor => accessor.PutFile("/file2", 10, new RavenJObject())); storage.Batch(accessor => etag = accessor.GetLastEtag()); Assert.Equal(EtagUtil.Increment(Etag.Empty, 3), etag); storage.Batch(accessor => accessor.PutFile("/file9", 10, new RavenJObject())); storage.Batch(accessor => etag = accessor.GetLastEtag()); Assert.Equal(EtagUtil.Increment(Etag.Empty, 4), etag); } }
public void ShouldCleanUpDocsThatResideInQueueTooLong() { Etag last = Etag.Empty; SystemTime.UtcDateTime = () => DateTime.UtcNow.Subtract(TimeSpan.FromMinutes(15)); for (int i = 0; i < 5; i++) { last = EtagUtil.Increment(last, 1); prefetchingBehavior.AfterStorageCommitBeforeWorkNotifications(new[] { new JsonDocument { Etag = last, Key = i.ToString(CultureInfo.InvariantCulture) }, }); } SystemTime.UtcDateTime = null; prefetchingBehavior.GetDocumentsBatchFrom(Etag.Empty); prefetchingBehavior.CleanupDocuments(Etag.Empty); Assert.Equal(0, prefetchingBehavior.InMemoryIndexingQueueSize); }
protected override async Task <Etag> ExportAttachments(JsonTextWriter jsonWriter, Etag lastEtag) { var totalCount = 0; while (true) { var array = GetAttachments(totalCount, lastEtag); if (array.Length == 0) { var databaseStatistics = await GetStats(); if (lastEtag == null) { lastEtag = Etag.Empty; } if (lastEtag.CompareTo(databaseStatistics.LastAttachmentEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, SmugglerOptions.BatchSize); ShowProgress("Got no results but didn't get to the last attachment etag, trying from: {0}", lastEtag); continue; } ShowProgress("Done with reading attachments, total: {0}", totalCount); return(lastEtag); } totalCount += array.Length; ShowProgress("Reading batch of {0,3} attachments, read so far: {1,10:#,#;;0}", array.Length, totalCount); foreach (var item in array) { item.WriteTo(jsonWriter); } lastEtag = Etag.Parse(array.Last().Value <string>("Etag")); } }
public void DocumentStorage_GetBestNextDocumentEtag_NonExistingDocumentEtag_LargerThan_MaxDocumentEtag(string requestedStorage) { using (var storage = NewTransactionalStorage(requestedStorage)) { Etag etag2 = null; Etag etag3 = null; storage.Batch(mutator => mutator.Documents.AddDocument("Foo", Etag.Empty, RavenJObject.FromObject(new { Name = "Bar" }), new RavenJObject())); storage.Batch(mutator => etag2 = mutator.Documents.AddDocument("Foo2", Etag.Empty, RavenJObject.FromObject(new { Name = "Bar" }), new RavenJObject()).Etag); storage.Batch(mutator => etag3 = mutator.Documents.AddDocument("Foo3", Etag.Empty, RavenJObject.FromObject(new { Name = "Bar" }), new RavenJObject()).Etag); Etag resultEtag = null; var nonExistingDocumentEtag = new Etag(UuidType.Documents, 0, 0); storage.Batch(viewer => resultEtag = viewer.Documents.GetBestNextDocumentEtag(nonExistingDocumentEtag)); var e = EtagUtil.Increment(nonExistingDocumentEtag, 1); Assert.Equal(e, resultEtag); } }
public void CanProperlyHandleNonConsecutiveUpdates() { Etag last = Etag.Empty; for (int i = 0; i < 5; i++) { last = EtagUtil.Increment(last, 1); prefetchingBehavior.AfterStorageCommitBeforeWorkNotifications(new[] { new JsonDocument { Etag = last, Key = i.ToString(CultureInfo.InvariantCulture) }, }); } last = EtagUtil.Increment(last, 10); for (int i = 0; i < 5; i++) { last = EtagUtil.Increment(last, 1); prefetchingBehavior.AfterStorageCommitBeforeWorkNotifications(new[] { new JsonDocument { Etag = last, Key = i.ToString(CultureInfo.InvariantCulture) }, }); } Assert.Equal(5, prefetchingBehavior.GetDocumentsBatchFrom(Etag.Empty).Count); Assert.Equal(5, prefetchingBehavior.GetDocumentsBatchFrom(EtagUtil.Increment(Etag.Empty, 15)).Count); }
public Etag CalculateSynchronizationEtag(Etag etag, Etag lastProcessedEtag) { if (etag == null) { if (lastProcessedEtag != null) { lock (locker) { if (currentEtag == null && lastProcessedEtag.CompareTo(synchronizationEtag) != 0) { synchronizationEtag = lastProcessedEtag; PersistSynchronizationState(); } } return(lastProcessedEtag); } return(Etag.Empty); } if (lastProcessedEtag == null) { return(Etag.Empty); } if (etag.CompareTo(lastProcessedEtag) < 0) { return(EtagUtil.Increment(etag, -1)); } return(lastProcessedEtag); }
protected override async Task <Etag> ExportAttachments(RavenConnectionStringOptions src, JsonTextWriter jsonWriter, Etag lastEtag, Etag maxEtag) { var totalCount = 0; var maxEtagReached = false; while (true) { try { if (SmugglerOptions.Limit - totalCount <= 0 || maxEtagReached) { ShowProgress("Done with reading attachments, total: {0}", totalCount); return(lastEtag); } var maxRecords = Math.Min(SmugglerOptions.Limit - totalCount, SmugglerOptions.BatchSize); var array = GetAttachments(totalCount, lastEtag, maxRecords); if (array.Length == 0) { var databaseStatistics = await GetStats(); if (lastEtag == null) { lastEtag = Etag.Empty; } if (lastEtag.CompareTo(databaseStatistics.LastAttachmentEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, maxRecords); ShowProgress("Got no results but didn't get to the last attachment etag, trying from: {0}", lastEtag); continue; } ShowProgress("Done with reading attachments, total: {0}", totalCount); return(lastEtag); } totalCount += array.Length; ShowProgress("Reading batch of {0,3} attachments, read so far: {1,10:#,#;;0}", array.Length, totalCount); foreach (var item in array) { var tempLastEtag = item.Value <string>("Etag"); if (maxEtag != null && tempLastEtag.CompareTo(maxEtag) > 0) { maxEtagReached = true; break; } item.WriteTo(jsonWriter); lastEtag = tempLastEtag; } } catch (Exception e) { ShowProgress("Got Exception during smuggler export. Exception: {0}. ", e.Message); ShowProgress("Done with reading attachments, total: {0}", totalCount, lastEtag); throw new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } } }
private Etag SkipUpdatedEtags(Etag nextEtag) { while (updatedDocuments.Any(x => x.Value.Contains(nextEtag))) { nextEtag = EtagUtil.Increment(nextEtag, 1); } return(nextEtag); }
public void Calculation3() { var synchronizer = new DatabaseEtagSynchronizer(storage); var iSynchronizer = synchronizer.GetSynchronizer(EtagSynchronizerType.Indexer); var someEtag = EtagUtil.Increment(Etag.Empty, 1); Assert.Equal(Etag.Empty, iSynchronizer.CalculateSynchronizationEtag(someEtag, null)); }
private Etag SkipDeletedEtags(Etag nextEtag) { while (documentsToRemove.Any(x => x.Value.Contains(nextEtag))) { nextEtag = EtagUtil.Increment(nextEtag, 1); } return(nextEtag); }
protected override async Task <Etag> ExportAttachments(JsonTextWriter jsonWriter, Etag lastEtag, Etag maxEtag) { if (maxEtag != null) { throw new ArgumentException("We don't support maxEtag in SmugglerApi", "maxEtag"); } int totalCount = 0; while (true) { RavenJArray attachmentInfo = null; await commands.CreateRequest("/static/?pageSize=" + SmugglerOptions.BatchSize + "&etag=" + lastEtag, "GET") .ReadResponseJsonAsync() .ContinueWith(task => attachmentInfo = (RavenJArray)task.Result); if (attachmentInfo.Length == 0) { var databaseStatistics = await GetStats(); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastAttachmentEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, SmugglerOptions.BatchSize); ShowProgress("Got no results but didn't get to the last attachment etag, trying from: {0}", lastEtag); continue; } ShowProgress("Done with reading attachments, total: {0}", totalCount); return(lastEtag); } totalCount += attachmentInfo.Length; ShowProgress("Reading batch of {0,3} attachments, read so far: {1,10:#,#;;0}", attachmentInfo.Length, totalCount); foreach (var item in attachmentInfo) { ShowProgress("Downloading attachment: {0}", item.Value <string>("Key")); byte[] attachmentData = null; await commands.CreateRequest("/static/" + item.Value <string>("Key"), "GET") .ReadResponseBytesAsync() .ContinueWith(task => attachmentData = task.Result); new RavenJObject { { "Data", attachmentData }, { "Metadata", item.Value <RavenJObject>("Metadata") }, { "Key", item.Value <string>("Key") } } .WriteTo(jsonWriter); } lastEtag = Etag.Parse(attachmentInfo.Last().Value <string>("Etag")); } }
public void Calculation5() { var synchronizer = new DatabaseEtagSynchronizer(storage); var iSynchronizer = synchronizer.GetSynchronizer(EtagSynchronizerType.Indexer); var lowerEtag = EtagUtil.Increment(Etag.Empty, 1); var higherEtag = EtagUtil.Increment(Etag.Empty, 2); Assert.Equal(lowerEtag, iSynchronizer.CalculateSynchronizationEtag(higherEtag, lowerEtag)); }
public void CalculationShouldPersist1() { var synchronizer = new DatabaseEtagSynchronizer(storage); var iSynchronizer = synchronizer.GetSynchronizer(EtagSynchronizerType.Indexer); var lowerEtag = EtagUtil.Increment(Etag.Empty, 1); Assert.Equal(lowerEtag, iSynchronizer.CalculateSynchronizationEtag(null, lowerEtag)); Assert.Equal(2, numberOfCalls); }
private Etag GetNextDocEtag(Etag etag) { var oneUpEtag = EtagUtil.Increment(etag, 1); // no need to go to disk to find the next etag if we already have it in memory if (prefetchingQueue.NextDocumentETag() == oneUpEtag) { return(oneUpEtag); } return(GetNextDocumentEtagFromDisk(etag)); }
public void ShouldDisableCollectingDocsAfterCommit() { Etag last = Etag.Empty; SystemTime.UtcDateTime = () => DateTime.UtcNow.Subtract(TimeSpan.FromMinutes(15)); prefetchingBehavior.AfterStorageCommitBeforeWorkNotifications(Enumerable.Range(0, 5).Select(x => { last = EtagUtil.Increment(last, 1); return(new JsonDocument { Etag = last, Key = x.ToString(CultureInfo.InvariantCulture) }); }).ToArray()); last = EtagUtil.Increment(last, store.Configuration.MaxNumberOfItemsToProcessInSingleBatch); prefetchingBehavior.AfterStorageCommitBeforeWorkNotifications(Enumerable.Range(0, 5).Select(x => { last = EtagUtil.Increment(last, 1); return(new JsonDocument { Etag = last, Key = x.ToString(CultureInfo.InvariantCulture) }); }).ToArray()); SystemTime.UtcDateTime = null; var documentsBatchFrom = prefetchingBehavior.GetDocumentsBatchFrom(Etag.Empty); prefetchingBehavior.CleanupDocuments(documentsBatchFrom.Last().Etag); Assert.True(prefetchingBehavior.DisableCollectingDocumentsAfterCommit); for (int i = 0; i < 5; i++) { last = EtagUtil.Increment(last, 1); prefetchingBehavior.AfterStorageCommitBeforeWorkNotifications(new[] { new JsonDocument { Etag = last, Key = i.ToString(CultureInfo.InvariantCulture) }, }); } Assert.Equal(0, prefetchingBehavior.InMemoryIndexingQueueSize); }
public void Can_increment_last_etag() { var client = NewAsyncClient(1); var id = Guid.NewGuid(); var etag = EtagUtil.Increment(Etag.Empty, 5); client.Synchronization.IncrementLastETagAsync(id, "http://localhost:12345", etag).Wait(); var lastSyncInfo = client.Synchronization.GetLastSynchronizationFromAsync(id).Result; Assert.Equal(etag, lastSyncInfo.LastSourceFileEtag); }
public void ConcurrentJsonDocumentSortedListShouldSortByEtag() { var list = new ConcurrentJsonDocumentSortedList(); var etag1 = EtagUtil.Increment(Etag.Empty, 1); var etag2 = EtagUtil.Increment(Etag.Empty, 2); var etag3 = EtagUtil.Increment(Etag.Empty, 3); var etag4 = EtagUtil.Increment(Etag.Empty, 4); var doc1 = new JsonDocument { Etag = etag1 }; var doc2 = new JsonDocument { Etag = etag2 }; var doc3 = new JsonDocument { Etag = etag3 }; var doc4 = new JsonDocument { Etag = etag4 }; using (list.EnterWriteLock()) { list.Add(doc4); list.Add(doc2); list.Add(doc1); list.Add(doc3); } JsonDocument result; Assert.True(list.TryDequeue(out result)); Assert.Equal(doc1.Etag, result.Etag); Assert.True(list.TryDequeue(out result)); Assert.Equal(doc2.Etag, result.Etag); Assert.True(list.TryDequeue(out result)); Assert.Equal(doc3.Etag, result.Etag); Assert.True(list.TryDequeue(out result)); Assert.Equal(doc4.Etag, result.Etag); }
private void PutSideBySideIndexDocument(SideBySideReplicationInfo sideBySideReplicationInfo) { using (Database.DocumentLock.Lock()) { var id = Constants.IndexReplacePrefix + sideBySideReplicationInfo.SideBySideIndex.Name; var indexReplaceDocument = sideBySideReplicationInfo.IndexReplaceDocument; if (indexReplaceDocument.MinimumEtagBeforeReplace != null) //TODO : verify that this is OK -> not sure { indexReplaceDocument.MinimumEtagBeforeReplace = EtagUtil.Increment(Database.Statistics.LastDocEtag, 1); } Database.TransactionalStorage.Batch(accessor => accessor.Documents.AddDocument(id, null, RavenJObject.FromObject(indexReplaceDocument), new RavenJObject())); } }
public void SynchronizerShouldReturnNullIfNoNewEtagsArrivedFromLastGet() { var synchronizer = new DatabaseEtagSynchronizer(storage); var iSynchronizer = synchronizer.GetSynchronizer(EtagSynchronizerType.Indexer); var someEtag = EtagUtil.Increment(Etag.Empty, 1); iSynchronizer.UpdateSynchronizationState(someEtag); var etag = iSynchronizer.GetSynchronizationEtag(); Assert.Equal(someEtag, etag); Assert.Null(iSynchronizer.GetSynchronizationEtag()); }
public void CalculationShouldNotPersist() { var synchronizer = new DatabaseEtagSynchronizer(storage); var iSynchronizer = synchronizer.GetSynchronizer(EtagSynchronizerType.Indexer); var lowerEtag = EtagUtil.Increment(Etag.Empty, 1); var higherEtag = EtagUtil.Increment(Etag.Empty, 2); iSynchronizer.UpdateSynchronizationState(higherEtag); Assert.Equal(higherEtag, iSynchronizer.GetSynchronizationEtag()); iSynchronizer.CalculateSynchronizationEtag(null, higherEtag); Assert.Equal(3, numberOfCalls); }
public async Task StreamDocsFromEtagShouldNotHandleFailover() { using (var store1 = CreateStore(configureStore: store => store.Conventions.FailoverBehavior = FailoverBehavior.AllowReadsFromSecondaries)) using (var store2 = CreateStore()) { TellFirstInstanceToReplicateToSecondInstance(); var replicationInformerForDatabase = store1.GetReplicationInformerForDatabase(store1.DefaultDatabase); await replicationInformerForDatabase.UpdateReplicationInformationIfNeeded((AsyncServerClient)store1.AsyncDatabaseCommands); var people = InitializeData(store1); var lastPersonId = people.Last().Id; var firstPersonId = people.First().Id; WaitForIndexing(store1); WaitForReplication(store2, lastPersonId); WaitForIndexing(store2); var startEtag1 = EtagUtil.Increment(store1.DatabaseCommands.Get(firstPersonId).Etag, -1); var startEtag2 = EtagUtil.Increment(store2.DatabaseCommands.Get(firstPersonId).Etag, -1); var count = 0; var enumerator = store1.DatabaseCommands.StreamDocs(fromEtag: startEtag1); while (enumerator.MoveNext()) { count++; } Assert.True(count > 0); count = 0; enumerator = store2.DatabaseCommands.StreamDocs(fromEtag: startEtag2); while (enumerator.MoveNext()) { count++; } Assert.True(count > 0); StopDatabase(0); var e = Assert.Throws <AggregateException>(() => store1.DatabaseCommands.StreamDocs(fromEtag: startEtag1)); var requestException = e.InnerException as HttpRequestException; Assert.NotNull(requestException); } }
protected override async Task <Etag> ExportAttachments(JsonTextWriter jsonWriter, Etag lastEtag) { int totalCount = 0; while (true) { RavenJArray attachmentInfo = null; var request = CreateRequest("/static/?pageSize=" + SmugglerOptions.BatchSize + "&etag=" + lastEtag); request.ExecuteRequest(reader => attachmentInfo = RavenJArray.Load(new JsonTextReader(reader))); if (attachmentInfo.Length == 0) { var databaseStatistics = await GetStats(); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastAttachmentEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, SmugglerOptions.BatchSize); ShowProgress("Got no results but didn't get to the last attachment etag, trying from: {0}", lastEtag); continue; } ShowProgress("Done with reading attachments, total: {0}", totalCount); return(lastEtag); } totalCount += attachmentInfo.Length; ShowProgress("Reading batch of {0,3} attachments, read so far: {1,10:#,#;;0}", attachmentInfo.Length, totalCount); foreach (var item in attachmentInfo) { ShowProgress("Downloading attachment: {0}", item.Value <string>("Key")); byte[] attachmentData = null; var requestData = CreateRequest("/static/" + item.Value <string>("Key")); requestData.ExecuteRequest(reader => attachmentData = reader.ReadData()); new RavenJObject { { "Data", attachmentData }, { "Metadata", item.Value <RavenJObject>("Metadata") }, { "Key", item.Value <string>("Key") } } .WriteTo(jsonWriter); } lastEtag = Etag.Parse(attachmentInfo.Last().Value <string>("Etag")); } }
public void SynchronizerShouldReturnLowestEtagInEachCycle() { var synchronizer = new DatabaseEtagSynchronizer(storage); var iSynchronizer = synchronizer.GetSynchronizer(EtagSynchronizerType.Indexer); var lowestEtag = EtagUtil.Increment(Etag.Empty, 1); var higherEtag = EtagUtil.Increment(Etag.Empty, 2); var highestEtag = EtagUtil.Increment(Etag.Empty, 2); iSynchronizer.UpdateSynchronizationState(higherEtag); iSynchronizer.UpdateSynchronizationState(lowestEtag); iSynchronizer.UpdateSynchronizationState(highestEtag); var etag = iSynchronizer.GetSynchronizationEtag(); Assert.Equal(lowestEtag, etag); }
private async static Task <Etag> ExportAttachments(DocumentStore exportStore, DocumentStore importStore, SmugglerDatabaseOptions databaseOptions, int exportBatchSize) { Etag lastEtag = databaseOptions.StartAttachmentsEtag; int totalCount = 0; while (true) { var attachments = await exportStore.AsyncDatabaseCommands.GetAttachmentsAsync(0, lastEtag, exportBatchSize); if (attachments.Length == 0) { var databaseStatistics = await exportStore.AsyncDatabaseCommands.GetStatisticsAsync(); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastAttachmentEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, exportBatchSize); ShowProgress("Got no results but didn't get to the last attachment etag, trying from: {0}", lastEtag); continue; } ShowProgress("Done with reading attachments, total: {0}", totalCount); return(lastEtag); } totalCount += attachments.Length; ShowProgress("Reading batch of {0,3} attachments, read so far: {1,10:#,#;;0}", attachments.Length, totalCount); foreach (var attachmentInformation in attachments) { if (databaseOptions.StripReplicationInformation) { attachmentInformation.Metadata = StripReplicationInformationFromMetadata(attachmentInformation.Metadata); } ShowProgress("Downloading attachment: {0}", attachmentInformation.Key); var attachment = await exportStore.AsyncDatabaseCommands.GetAttachmentAsync(attachmentInformation.Key); await importStore.AsyncDatabaseCommands.PutAttachmentAsync(attachment.Key, null, attachment.Data(), attachment.Metadata); } lastEtag = Etag.Parse(attachments.Last().Etag); } }
public void ShouldEnableCollectingDocsAfterCommit() { Etag last = Etag.Empty; SystemTime.UtcDateTime = () => DateTime.UtcNow.Subtract(TimeSpan.FromMinutes(15)); last = EtagUtil.Increment(last, 1); prefetchingBehavior.AfterStorageCommitBeforeWorkNotifications(new[] { new JsonDocument { Etag = last, Key = "items/1" } }); SystemTime.UtcDateTime = null; prefetchingBehavior.GetDocumentsBatchFrom(Etag.Empty); prefetchingBehavior.CleanupDocuments(Etag.Empty); Assert.True(prefetchingBehavior.DisableCollectingDocumentsAfterCommit); prefetchingBehavior.GetDocumentsBatchFrom(Etag.Empty.IncrementBy(5)); // will trigger check for enabling collecting docs again Assert.False(prefetchingBehavior.DisableCollectingDocumentsAfterCommit); for (int i = 0; i < 5; i++) { last = EtagUtil.Increment(last, 1); prefetchingBehavior.AfterStorageCommitBeforeWorkNotifications(new[] { new JsonDocument { Etag = last, Key = i.ToString(CultureInfo.InvariantCulture) }, }); } Assert.Equal(5, prefetchingBehavior.InMemoryIndexingQueueSize); }
private Etag SkipUpdatedEtags(Etag nextEtag) { updatedDocumentsLock.EnterReadLock(); try { var enumerator = updatedDocuments.GetEnumerator(); // here we relay on the fact that the updated docs collection is sorted while (enumerator.MoveNext() && enumerator.Current.CompareTo(nextEtag) == 0) { nextEtag = EtagUtil.Increment(nextEtag, 1); } } finally { updatedDocumentsLock.ExitReadLock(); } return(nextEtag); }
public void ReadFile(string requestedStorage) { using (var storage = NewTransactionalStorage(requestedStorage)) { storage.Batch(accessor => Assert.Null(accessor.ReadFile("file1"))); storage.Batch(accessor => accessor.PutFile("file1", null, new RavenJObject())); storage.Batch(accessor => accessor.PutFile("file2", 10, new RavenJObject())); storage.Batch(accessor => { var file1 = accessor.ReadFile("file1"); Assert.NotNull(file1); Assert.Equal("file1", file1.Name); Assert.Equal(null, file1.TotalSize); Assert.Equal(0, file1.UploadedSize); var file1Metadata = file1.Metadata; Assert.NotNull(file1Metadata); Assert.Equal(1, file1Metadata.Count); Assert.Equal(EtagUtil.Increment(Etag.Empty, 1), Etag.Parse(file1Metadata.Value <string>(Constants.MetadataEtagField))); var file2 = accessor.ReadFile("file2"); Assert.NotNull(file2); Assert.Equal("file2", file2.Name); Assert.Equal(10, file2.TotalSize); Assert.Equal(0, file2.UploadedSize); var file2Metadata = file2.Metadata; Assert.NotNull(file2Metadata); Assert.Equal(1, file2Metadata.Count); Assert.Equal(EtagUtil.Increment(Etag.Empty, 2), Etag.Parse(file2Metadata.Value <string>(Constants.MetadataEtagField))); }); } }
public void ShouldReturnDocsOnlyIfTheFirstEtagInQueueMatches() { Etag last = Etag.Empty.IncrementBy(5); // start from 5 for (int i = 0; i < 5; i++) { last = EtagUtil.Increment(last, 1); prefetchingBehavior.AfterStorageCommitBeforeWorkNotifications(new[] { new JsonDocument { Etag = last, Key = i.ToString(CultureInfo.InvariantCulture) + "/1" }, }); } Assert.Equal(0, prefetchingBehavior.GetDocumentsBatchFrom(Etag.Empty).Count); last = Etag.Empty; // now add missing docs from etag 0 for (int i = 0; i < 5; i++) { last = EtagUtil.Increment(last, 1); prefetchingBehavior.AfterStorageCommitBeforeWorkNotifications(new[] { new JsonDocument { Etag = last, Key = i.ToString(CultureInfo.InvariantCulture) + "/2" }, }); } Assert.Equal(10, prefetchingBehavior.GetDocumentsBatchFrom(Etag.Empty).Count); }
private async Task <Etag> ExportAttachments(ISmugglerDatabaseOperations exportOperations, ISmugglerDatabaseOperations importOperations, SmugglerDatabaseOptions databaseOptions) { Etag lastEtag = databaseOptions.StartAttachmentsEtag; int totalCount = 0; while (true) { try { if (databaseOptions.Limit - totalCount <= 0) { await importOperations.PutAttachment(null).ConfigureAwait(false); // force flush ShowProgress("Done with reading attachments, total: {0}", totalCount); return(lastEtag); } var maxRecords = Math.Min(databaseOptions.Limit - totalCount, databaseOptions.BatchSize); var attachments = await exportOperations.GetAttachments(totalCount, lastEtag, maxRecords).ConfigureAwait(false); if (attachments.Count == 0) { var databaseStatistics = await exportOperations.GetStats().ConfigureAwait(false); if (lastEtag == null) { lastEtag = Etag.Empty; } if (lastEtag.CompareTo(databaseStatistics.LastAttachmentEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, maxRecords); ShowProgress("Got no results but didn't get to the last attachment etag, trying from: {0}", lastEtag); continue; } ShowProgress("Done with reading attachments, total: {0}", totalCount); return(lastEtag); } totalCount += attachments.Count; ShowProgress("Reading batch of {0,3} attachments, read so far: {1,10:#,#;;0}", attachments.Count, totalCount); foreach (var attachment in attachments) { var attachmentData = await exportOperations.GetAttachmentData(attachment).ConfigureAwait(false); if (attachmentData == null) { continue; } var attachmentToExport = new AttachmentExportInfo { Key = attachment.Key, Metadata = attachment.Metadata, Data = new MemoryStream(attachmentData) }; if (databaseOptions.StripReplicationInformation) { attachmentToExport.Metadata = StripReplicationInformationFromMetadata(attachmentToExport.Metadata); } await importOperations.PutAttachment(attachmentToExport).ConfigureAwait(false); lastEtag = attachment.Etag; } } catch (Exception e) { ShowProgress("Got Exception during smuggler export. Exception: {0}. ", e.Message); ShowProgress("Done with reading attachments, total: {0}", totalCount, lastEtag); throw new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } } }