private void PrepareForCopy() { var calculatedBlockSize = ((long)Math.Ceiling((double)this.SourceHandler.TotalLength / 50000 / Constants.DefaultTransferChunkSize)) * Constants.DefaultTransferChunkSize; this.blockSize = Math.Max(TransferManager.Configurations.BlockSize, calculatedBlockSize); SingleObjectCheckpoint checkpoint = this.TransferJob.CheckPoint; if (null != checkpoint.TransferWindow) { this.lastTransferWindow = new Queue <long>(checkpoint.TransferWindow); } this.blockIds = new SortedDictionary <int, string>(); this.BlockIdPrefix = GenerateBlockIdPrefix(); this.InitializeBlockIds(); int blockCount = null == this.lastTransferWindow ? 0 : this.lastTransferWindow.Count + (int)Math.Ceiling((double)(this.SourceHandler.TotalLength - checkpoint.EntryTransferOffset) / this.blockSize); if (0 == blockCount) { this.state = State.Commit; } else { this.countdownEvent = new CountdownEvent(blockCount); // Handle record overwrite here. this.state = State.Copy; } }
private void InitializeCopyStatus() { int pageLength = TransferManager.Configurations.BlockSize; SingleObjectCheckpoint checkpoint = this.TransferJob.CheckPoint; if ((null != checkpoint.TransferWindow) && (0 != checkpoint.TransferWindow.Count)) { this.lastTransferWindow = new Queue <long>(checkpoint.TransferWindow); } int blockCount = (null == this.lastTransferWindow ? 0 : this.lastTransferWindow.Count) + (int)Math.Ceiling((double)(totalLength - checkpoint.EntryTransferOffset) / pageLength); this.countdownEvent = new CountdownEvent(blockCount); }
private async Task HandleFetchAttributesResultAsync(bool existingBlob) { this.destLocation.CheckedAccessCondition = true; if (!this.Controller.IsForceOverwrite) { // If destination file exists, query user whether to overwrite it. await this.Controller.CheckOverwriteAsync( existingBlob, this.SharedTransferData.TransferJob.Source.Instance, this.appendBlob).ConfigureAwait(false); } this.Controller.UpdateProgressAddBytesTransferred(0); if (existingBlob) { if (this.appendBlob.Properties.BlobType == BlobType.Unspecified) { throw new InvalidOperationException(Resources.FailedToGetBlobTypeException); } if (this.appendBlob.Properties.BlobType != BlobType.AppendBlob) { throw new InvalidOperationException(Resources.DestinationBlobTypeNotMatch); } } // We do check point consistency validation in reader, so directly use it here. SingleObjectCheckpoint checkpoint = this.SharedTransferData.TransferJob.CheckPoint; if ((null != checkpoint.TransferWindow) && (checkpoint.TransferWindow.Any())) { checkpoint.TransferWindow.Sort(); this.expectedOffset = checkpoint.TransferWindow[0]; } else { this.expectedOffset = checkpoint.EntryTransferOffset; } if (0 == this.expectedOffset) { this.state = State.Create; } else { if (!this.Controller.IsForceOverwrite && !existingBlob) { throw new TransferException(Resources.DestinationChangedException); } this.PreProcessed = true; if (this.expectedOffset == this.SharedTransferData.TotalLength) { this.state = State.Commit; } else { this.state = State.UploadBlob; } } this.workToken = 1; }
private async Task FetchAttributesAsync() { Debug.Assert( this.state == State.FetchAttributes, "FetchAttributesAsync called, but state isn't FetchAttributes", "Current state is {0}", this.state); this.hasWork = false; this.CheckInputStreamLength(this.SharedTransferData.TotalLength); bool exist = true; try { await this.DoFetchAttributesAsync(); } catch (StorageException se) { // Getting a storage exception is expected if the file doesn't // exist. In this case we won't error out, but set the // exist flag to false to indicate we're uploading // a new file instead of overwriting an existing one. if (null != se.RequestInformation && se.RequestInformation.HttpStatusCode == (int)HttpStatusCode.NotFound) { exist = false; } else { this.HandleFetchAttributesResult(se); throw; } } catch (Exception e) { this.HandleFetchAttributesResult(e); throw; } if (this.TransferJob.Destination.Type == TransferLocationType.AzureBlob) { (this.TransferJob.Destination as AzureBlobLocation).CheckedAccessCondition = true; } else { (this.TransferJob.Destination as AzureFileLocation).CheckedAccessCondition = true; } this.Controller.CheckOverwrite( exist, this.SharedTransferData.SourceLocation, this.DestUri.ToString()); this.Controller.UpdateProgressAddBytesTransferred(0); if (exist) { // If the destination has already existed, // and if we haven't uploaded anything to it, try to resize it to the expected length. // Or if we have uploaded something, the destination should be created by the last transferring, // don't do resize again. SingleObjectCheckpoint checkpoint = this.TransferJob.CheckPoint; bool shouldResize = (checkpoint.EntryTransferOffset == 0) && (!checkpoint.TransferWindow.Any()); if (shouldResize) { this.state = State.Resize; } else { this.InitUpload(); } } else { this.state = State.Create; } this.hasWork = true; }
private void AddRangesByCheckPoint(long startOffset, long endOffset, bool hasData, ref bool reachLastTransferOffset, ref int lastTransferWindowIndex) { SingleObjectCheckpoint checkpoint = this.transferJob.CheckPoint; if (reachLastTransferOffset) { this.rangeList.AddRange( new Range { StartOffset = startOffset, EndOffset = endOffset, HasData = hasData, }.SplitRanges(Constants.DefaultBlockSize)); } else { Range range = new Range() { StartOffset = -1, HasData = hasData }; while (lastTransferWindowIndex < checkpoint.TransferWindow.Count) { long lastTransferWindowStart = checkpoint.TransferWindow[lastTransferWindowIndex]; long lastTransferWindowEnd = Math.Min(checkpoint.TransferWindow[lastTransferWindowIndex] + this.SharedTransferData.BlockSize - 1, this.SharedTransferData.TotalLength); if (lastTransferWindowStart <= endOffset) { if (-1 == range.StartOffset) { // New range range.StartOffset = Math.Max(lastTransferWindowStart, startOffset); range.EndOffset = Math.Min(lastTransferWindowEnd, endOffset); } else { if (range.EndOffset != lastTransferWindowStart - 1) { // Store the previous range and create a new one this.rangeList.AddRange(range.SplitRanges(Constants.DefaultBlockSize)); range = new Range() { StartOffset = Math.Max(lastTransferWindowStart, startOffset), HasData = hasData }; } range.EndOffset = Math.Min(lastTransferWindowEnd, endOffset); } if (range.EndOffset == lastTransferWindowEnd) { // Reach the end of transfer window, move to next ++lastTransferWindowIndex; continue; } } break; } if (-1 != range.StartOffset) { this.rangeList.AddRange(range.SplitRanges(Constants.DefaultBlockSize)); } if (checkpoint.EntryTransferOffset <= endOffset + 1) { reachLastTransferOffset = true; if (checkpoint.EntryTransferOffset <= endOffset) { this.rangeList.AddRange(new Range() { StartOffset = checkpoint.EntryTransferOffset, EndOffset = endOffset, HasData = hasData, }.SplitRanges(Constants.DefaultBlockSize)); } } } }
/// <summary> /// It might fail to get large ranges list from storage. This method is to split the whole file to spans of 148MB to get ranges. /// In restartable, we only need to get ranges for chunks in TransferWindow and after TransferEntryOffset in check point. /// In TransferWindow, there might be some chunks adjacent to TransferEntryOffset, so this method will first merge these chunks into TransferEntryOffset; /// Then in remained chunks in the TransferWindow, it's very possible that ranges of several chunks can be got in one 148MB span. /// To avoid sending too many get ranges requests, this method will merge the chunks to 148MB spans. /// </summary> private void PrepareToGetRanges() { this.getRangesSpanIndex = -1; this.rangesSpanList = new List <RangesSpan>(); this.rangeList = new List <Range>(); this.nextDownloadIndex = 0; SingleObjectCheckpoint checkpoint = this.transferJob.CheckPoint; int blockSize = this.SharedTransferData.BlockSize; RangesSpan rangesSpan = null; if ((null != checkpoint.TransferWindow) && (checkpoint.TransferWindow.Any())) { checkpoint.TransferWindow.Sort(); long lastOffset = 0; if (checkpoint.EntryTransferOffset == this.SharedTransferData.TotalLength) { long lengthBeforeLastChunk = checkpoint.EntryTransferOffset % blockSize; lastOffset = 0 == lengthBeforeLastChunk ? checkpoint.EntryTransferOffset - blockSize : checkpoint.EntryTransferOffset - lengthBeforeLastChunk; } else { lastOffset = checkpoint.EntryTransferOffset - blockSize; } for (int i = checkpoint.TransferWindow.Count - 1; i >= 0; i--) { if (lastOffset == checkpoint.TransferWindow[i]) { checkpoint.TransferWindow.RemoveAt(i); checkpoint.EntryTransferOffset = lastOffset; } else if (lastOffset < checkpoint.TransferWindow[i]) { throw new FormatException(Resources.RestartableInfoCorruptedException); } else { break; } lastOffset = checkpoint.EntryTransferOffset - blockSize; } if (this.transferJob.CheckPoint.TransferWindow.Any()) { rangesSpan = new RangesSpan(); rangesSpan.StartOffset = checkpoint.TransferWindow[0]; rangesSpan.EndOffset = Math.Min(rangesSpan.StartOffset + Constants.PageRangesSpanSize, this.SharedTransferData.TotalLength) - 1; for (int i = 1; i < checkpoint.TransferWindow.Count; ++i) { if (checkpoint.TransferWindow[i] + blockSize > rangesSpan.EndOffset) { long lastEndOffset = rangesSpan.EndOffset; this.rangesSpanList.Add(rangesSpan); rangesSpan = new RangesSpan(); rangesSpan.StartOffset = checkpoint.TransferWindow[i] > lastEndOffset ? checkpoint.TransferWindow[i] : lastEndOffset + 1; rangesSpan.EndOffset = Math.Min(rangesSpan.StartOffset + Constants.PageRangesSpanSize, this.SharedTransferData.TotalLength) - 1; } } this.rangesSpanList.Add(rangesSpan); } } long offset = null != rangesSpan ? rangesSpan.EndOffset > checkpoint.EntryTransferOffset ? rangesSpan.EndOffset + 1 : checkpoint.EntryTransferOffset : checkpoint.EntryTransferOffset; while (offset < this.SharedTransferData.TotalLength) { rangesSpan = new RangesSpan() { StartOffset = offset, EndOffset = Math.Min(offset + Constants.PageRangesSpanSize, this.SharedTransferData.TotalLength) - 1 }; this.rangesSpanList.Add(rangesSpan); offset = rangesSpan.EndOffset + 1; } if (this.rangesSpanList.Any()) { this.getRangesCountDownEvent = new CountdownEvent(this.rangesSpanList.Count); } }
protected override async Task DoPreCopyAsync() { this.hasWork = false; long rangeSpanOffset = this.nextRangesSpanOffset; long rangeSpanLength = Math.Min(Constants.PageRangesSpanSize, this.totalLength - rangeSpanOffset); this.nextRangesSpanOffset += Constants.PageRangesSpanSize; this.hasWork = (this.nextRangesSpanOffset < this.totalLength); var pageRanges = await this.sourcePageBlob.GetPageRangesAsync( rangeSpanOffset, rangeSpanLength, Utils.GenerateConditionWithCustomerCondition(this.destLocation.AccessCondition, this.destLocation.CheckedAccessCondition), Utils.GenerateBlobRequestOptions(this.destLocation.BlobRequestOptions), Utils.GenerateOperationContext(this.TransferContext), this.CancellationToken); long pageOffset = rangeSpanOffset; List <long> pageList = new List <long>(); foreach (var pageRange in pageRanges) { if (pageOffset <= pageRange.StartOffset) { while (pageOffset + Constants.DefaultTransferChunkSize < pageRange.StartOffset) { pageOffset += Constants.DefaultTransferChunkSize; } pageList.Add(pageOffset); pageOffset += Constants.DefaultTransferChunkSize; } // pageOffset > pageRange.StartOffset while (pageOffset < pageRange.EndOffset) { pageList.Add(pageOffset); pageOffset += Constants.DefaultTransferChunkSize; } } this.pageListBag.Add(pageList); if (this.getRangesCountdownEvent.Signal()) { foreach (var pageListInARange in this.pageListBag) { this.pagesToCopy.AddRange(pageListInARange); } this.pagesToCopy.Sort(); this.nextPageToCopy = this.pagesToCopy.GetEnumerator(); this.hasNextPage = this.nextPageToCopy.MoveNext(); int pageLength = TransferManager.Configurations.BlockSize; SingleObjectCheckpoint checkpoint = this.TransferJob.CheckPoint; if ((null != checkpoint.TransferWindow) && (0 != checkpoint.TransferWindow.Count)) { this.lastTransferWindow = new Queue <long>(checkpoint.TransferWindow); } int blockCount = null == this.lastTransferWindow ? 0 : this.lastTransferWindow.Count; blockCount += this.pagesToCopy.Count; if (0 == blockCount) { this.state = State.Commit; } else { this.countdownEvent = new CountdownEvent(blockCount); this.state = State.Copy; } this.hasWork = true; } }
private async Task FetchAttributesAsync() { Debug.Assert( this.state == State.FetchAttributes, "FetchAttributesAsync called, but state isn't FetchAttributes", "Current state is {0}", this.state); if (Interlocked.CompareExchange(ref workToken, 0, 1) == 0) { return; } this.CheckInputStreamLength(this.SharedTransferData.TotalLength); bool exist = !this.Controller.IsForceOverwrite; if (!this.Controller.IsForceOverwrite) { try { await Utils.ExecuteXsclApiCallAsync( async() => await this.DoFetchAttributesAsync().ConfigureAwait(false), this.CancellationToken).ConfigureAwait(false); } #if EXPECT_INTERNAL_WRAPPEDSTORAGEEXCEPTION catch (Exception e) when(e is StorageException || (e is AggregateException && e.InnerException is StorageException)) { var se = e as StorageException ?? e.InnerException as StorageException; #else catch (StorageException se) { #endif // Getting a storage exception is expected if the file doesn't // exist. In this case we won't error out, but set the // exist flag to false to indicate we're uploading // a new file instead of overwriting an existing one. if (null != se.RequestInformation && se.RequestInformation.HttpStatusCode == (int)HttpStatusCode.NotFound) { exist = false; } else { this.HandleFetchAttributesResult(se); throw; } } catch (Exception e) { this.HandleFetchAttributesResult(e); throw; } await this.Controller.CheckOverwriteAsync( exist, this.TransferJob.Source.Instance, this.TransferJob.Destination.Instance).ConfigureAwait(false); } if (this.TransferJob.Destination.Type == TransferLocationType.AzureBlob) { (this.TransferJob.Destination as AzureBlobLocation).CheckedAccessCondition = true; } else { (this.TransferJob.Destination as AzureFileLocation).CheckedAccessCondition = true; } this.Controller.UpdateProgressAddBytesTransferred(0); SingleObjectCheckpoint checkpoint = this.TransferJob.CheckPoint; // We should create the destination if there's no previous transfer progress in the checkpoint. // Create will clear all data if the destination already exists. bool shouldCreate = (checkpoint.EntryTransferOffset == 0) && (!checkpoint.TransferWindow.Any()); if (!shouldCreate) { this.InitUpload(); } else { this.state = State.Create; } this.workToken = 1; }
private void HandleFetchAttributesResult(Exception e) { bool existingBlob = !this.Controller.IsForceOverwrite; if (null != e) { StorageException se = e as StorageException; if (null != se) { // Getting a storage exception is expected if the blob doesn't // exist. In this case we won't error out, but set the // existingBlob flag to false to indicate we're uploading // a new blob instead of overwriting an existing blob. if (null != se.RequestInformation && se.RequestInformation.HttpStatusCode == (int)HttpStatusCode.NotFound) { existingBlob = false; } else if (null != se && (0 == string.Compare(se.Message, Constants.BlobTypeMismatch, StringComparison.OrdinalIgnoreCase))) { throw new InvalidOperationException(Resources.DestinationBlobTypeNotMatch); } else { throw se; } } else { throw e; } } this.destLocation.CheckedAccessCondition = true; if (string.IsNullOrEmpty(this.destLocation.BlockIdPrefix)) { // BlockIdPrefix is never set before that this is the first time to transfer this file. // In block blob upload, it stores uploaded but not committed blocks on Azure Storage. // In DM, we use block id to identify the blocks uploaded so we only need to upload it once. // Keep BlockIdPrefix in upload job object for restarting the transfer if anything happens. this.destLocation.BlockIdPrefix = Guid.NewGuid().ToString("N") + "-"; } if (!this.Controller.IsForceOverwrite) { // If destination file exists, query user whether to overwrite it. this.Controller.CheckOverwrite( existingBlob, this.SharedTransferData.TransferJob.Source.Instance, this.destLocation.Blob); } this.Controller.UpdateProgressAddBytesTransferred(0); if (existingBlob) { if (this.destLocation.Blob.Properties.BlobType == BlobType.Unspecified) { throw new InvalidOperationException(Resources.FailedToGetBlobTypeException); } if (this.destLocation.Blob.Properties.BlobType != BlobType.BlockBlob) { throw new InvalidOperationException(Resources.DestinationBlobTypeNotMatch); } Debug.Assert( this.destLocation.Blob.Properties.BlobType == BlobType.BlockBlob, "BlobType should be BlockBlob if we reach here."); } // Calculate number of blocks. int numBlocks = (int)Math.Ceiling( this.SharedTransferData.TotalLength / (double)this.Scheduler.TransferOptions.BlockSize); // Create sequence array. this.blockIdSequence = new string[numBlocks]; for (int i = 0; i < numBlocks; ++i) { string blockIdSuffix = i.ToString("D6", CultureInfo.InvariantCulture); byte[] blockIdInBytes = System.Text.Encoding.UTF8.GetBytes(this.destLocation.BlockIdPrefix + blockIdSuffix); string blockId = Convert.ToBase64String(blockIdInBytes); this.blockIdSequence[i] = blockId; } SingleObjectCheckpoint checkpoint = this.SharedTransferData.TransferJob.CheckPoint; int leftBlockCount = (int)Math.Ceiling( (this.SharedTransferData.TotalLength - checkpoint.EntryTransferOffset) / (double)this.Scheduler.TransferOptions.BlockSize) + checkpoint.TransferWindow.Count; if (0 == leftBlockCount) { this.state = State.Commit; } else { this.countdownEvent = new CountdownEvent(leftBlockCount); this.state = State.UploadBlob; } this.PreProcessed = true; this.hasWork = true; }
private void HandleFetchAttributesResult(Exception e) { bool existingBlob = !this.Controller.IsForceOverwrite; if (null != e) { StorageException se = e as StorageException; if (null != se) { // Getting a storage exception is expected if the blob doesn't // exist. In this case we won't error out, but set the // existingBlob flag to false to indicate we're uploading // a new blob instead of overwriting an existing blob. if (null != se.RequestInformation && se.RequestInformation.HttpStatusCode == (int)HttpStatusCode.NotFound) { existingBlob = false; } else if ((0 == string.Compare(se.Message, Constants.BlobTypeMismatch, StringComparison.OrdinalIgnoreCase))) { throw new InvalidOperationException(Resources.DestinationBlobTypeNotMatch, se); } else { throw se; } } else { throw e; } } this.destLocation.CheckedAccessCondition = true; if (!this.Controller.IsForceOverwrite) { // If destination file exists, query user whether to overwrite it. this.Controller.CheckOverwrite( existingBlob, this.SharedTransferData.TransferJob.Source.Instance, this.destLocation.Blob); } this.Controller.UpdateProgressAddBytesTransferred(0); if (existingBlob) { if (this.destLocation.Blob.Properties.BlobType == BlobType.Unspecified) { throw new InvalidOperationException(Resources.FailedToGetBlobTypeException); } if (this.destLocation.Blob.Properties.BlobType != BlobType.BlockBlob) { throw new InvalidOperationException(Resources.DestinationBlobTypeNotMatch); } Debug.Assert( this.destLocation.Blob.Properties.BlobType == BlobType.BlockBlob, "BlobType should be BlockBlob if we reach here."); } SingleObjectCheckpoint checkpoint = this.SharedTransferData.TransferJob.CheckPoint; int leftBlockCount = (int)Math.Ceiling( (this.SharedTransferData.TotalLength - checkpoint.EntryTransferOffset) / (double)this.SharedTransferData.BlockSize) + checkpoint.TransferWindow.Count; if (this.SharedTransferData.TotalLength > 0 && this.SharedTransferData.TotalLength <= Constants.SingleRequestBlobSizeThreshold) { this.PrepareForPutBlob(leftBlockCount); } else { this.PrepareForPutBlockAndPutBlockList(leftBlockCount); } this.PreProcessed = true; this.hasWork = true; }