private static void CreateForTest(CloudBlockBlob blob, int blockCount, int blockSize, bool isAsync, bool commit = true) { byte[] buffer = GetRandomBuffer(blockSize); List<string> blocks = GetBlockIdList(blockCount); using (AutoResetEvent waitHandle = new AutoResetEvent(false)) { foreach (string block in blocks) { using (MemoryStream stream = new MemoryStream(buffer)) { if (isAsync) { IAsyncResult result = blob.BeginPutBlock(block, stream, null, ar => waitHandle.Set(), null); waitHandle.WaitOne(); blob.EndPutBlock(result); } else { blob.PutBlock(block, stream, null); } } } if (commit) { if (isAsync) { IAsyncResult result = blob.BeginPutBlockList(blocks, ar => waitHandle.Set(), null); waitHandle.WaitOne(); blob.EndPutBlockList(result); } else { blob.PutBlockList(blocks); } } } }
/// <summary> /// Test block blob creation (set block list), expecting success. /// </summary> /// <param name="testBlob">The block blob.</param> /// <param name="blockList">The block list to set.</param> /// <param name="testAccessCondition">The access condition to use.</param> private void BlockBlobWriteExpectSuccess(CloudBlockBlob testBlob, IEnumerable<string> blockList, AccessCondition testAccessCondition) { testBlob.PutBlockList(blockList, testAccessCondition, null /* options */); }
static void UploadBlobInChunks(FileInfo fileInfo, CloudBlockBlob packageBlob, CloudBlobClient blobClient) { var operationContext = new OperationContext(); operationContext.ResponseReceived += delegate(object sender, RequestEventArgs args) { var statusCode = (int) args.Response.StatusCode; var statusDescription = args.Response.StatusDescription; Log.Verbose("Uploading, response received: " + statusCode + " " + statusDescription); if (statusCode >= 400) { Log.Error("Error when uploading the package. Azure returned a HTTP status code of: " + statusCode + " " + statusDescription); Log.Verbose("The upload will be retried"); } }; blobClient.SetServiceProperties(blobClient.GetServiceProperties(), operationContext: operationContext); Log.VerboseFormat("Uploading the package to blob storage. The package file is {0}.", fileInfo.Length.ToFileSizeString()); using (var fileReader = fileInfo.OpenRead()) { var blocklist = new List<string>(); long uploadedSoFar = 0; var data = new byte[1024 * 1024]; var id = 1; while (true) { id++; var read = fileReader.Read(data, 0, data.Length); if (read == 0) { packageBlob.PutBlockList(blocklist); break; } var blockId = Convert.ToBase64String(Encoding.UTF8.GetBytes(id.ToString(CultureInfo.InvariantCulture).PadLeft(30, '0'))); packageBlob.PutBlock(blockId, new MemoryStream(data, 0, read, true), null); blocklist.Add(blockId); uploadedSoFar += read; Log.VerboseFormat("Uploading package to blob storage: {0} of {1}", uploadedSoFar.ToFileSizeString(), fileInfo.Length.ToFileSizeString()); } } Log.Verbose("Upload complete"); }
/// <summary> /// Test block blob creation (block list setting), expecting lease failure. /// </summary> /// <param name="testBlob">The block blob.</param> /// <param name="blockList">An appropriate block list to set.</param> /// <param name="testAccessCondition">The failing access condition to use.</param> /// <param name="expectedErrorCode">The expected error code.</param> /// <param name="description">The reason why these calls should fail.</param> private void BlockBlobWriteExpectLeaseFailure(CloudBlockBlob testBlob, IEnumerable<string> blockList, AccessCondition testAccessCondition, HttpStatusCode expectedStatusCode, string expectedErrorCode, string description) { TestHelper.ExpectedException( () => testBlob.PutBlockList(blockList, testAccessCondition, null /* options */), description + " (Put Block List)", expectedStatusCode, expectedErrorCode); }
public void CloudBlobSASApiVersionQueryParam() { CloudBlobContainer container = GetRandomContainerReference(); try { container.Create(); CloudBlob blob; SharedAccessBlobPolicy policy = new SharedAccessBlobPolicy() { Permissions = SharedAccessBlobPermissions.Read, SharedAccessStartTime = DateTimeOffset.UtcNow.AddMinutes(-5), SharedAccessExpiryTime = DateTimeOffset.UtcNow.AddMinutes(30), }; CloudBlockBlob blockBlob = container.GetBlockBlobReference("bb"); blockBlob.PutBlockList(new string[] { }); CloudPageBlob pageBlob = container.GetPageBlobReference("pb"); pageBlob.Create(0); CloudAppendBlob appendBlob = container.GetAppendBlobReference("ab"); appendBlob.CreateOrReplace(); string blockBlobToken = blockBlob.GetSharedAccessSignature(policy); StorageCredentials blockBlobSAS = new StorageCredentials(blockBlobToken); Uri blockBlobSASUri = blockBlobSAS.TransformUri(blockBlob.Uri); StorageUri blockBlobSASStorageUri = blockBlobSAS.TransformUri(blockBlob.StorageUri); string pageBlobToken = pageBlob.GetSharedAccessSignature(policy); StorageCredentials pageBlobSAS = new StorageCredentials(pageBlobToken); Uri pageBlobSASUri = pageBlobSAS.TransformUri(pageBlob.Uri); StorageUri pageBlobSASStorageUri = pageBlobSAS.TransformUri(pageBlob.StorageUri); string appendBlobToken = appendBlob.GetSharedAccessSignature(policy); StorageCredentials appendBlobSAS = new StorageCredentials(appendBlobToken); Uri appendBlobSASUri = appendBlobSAS.TransformUri(appendBlob.Uri); StorageUri appendBlobSASStorageUri = appendBlobSAS.TransformUri(appendBlob.StorageUri); OperationContext apiVersionCheckContext = new OperationContext(); apiVersionCheckContext.SendingRequest += (sender, e) => { Assert.IsTrue(e.Request.RequestUri.Query.Contains("api-version")); }; blob = new CloudBlob(blockBlobSASUri); blob.FetchAttributes(operationContext: apiVersionCheckContext); Assert.AreEqual(blob.BlobType, BlobType.BlockBlob); Assert.IsTrue(blob.StorageUri.PrimaryUri.Equals(blockBlob.Uri)); Assert.IsNull(blob.StorageUri.SecondaryUri); blob = new CloudBlob(pageBlobSASUri); blob.FetchAttributes(operationContext: apiVersionCheckContext); Assert.AreEqual(blob.BlobType, BlobType.PageBlob); Assert.IsTrue(blob.StorageUri.PrimaryUri.Equals(pageBlob.Uri)); Assert.IsNull(blob.StorageUri.SecondaryUri); blob = new CloudBlob(blockBlobSASStorageUri, null, null); blob.FetchAttributes(operationContext: apiVersionCheckContext); Assert.AreEqual(blob.BlobType, BlobType.BlockBlob); Assert.IsTrue(blob.StorageUri.Equals(blockBlob.StorageUri)); blob = new CloudBlob(pageBlobSASStorageUri, null, null); blob.FetchAttributes(operationContext: apiVersionCheckContext); Assert.AreEqual(blob.BlobType, BlobType.PageBlob); Assert.IsTrue(blob.StorageUri.Equals(pageBlob.StorageUri)); } finally { container.DeleteIfExists(); } }
public void CloudBlobSASSharedProtocolsQueryParam() { CloudBlobContainer container = GetRandomContainerReference(); try { container.Create(); CloudBlob blob; SharedAccessBlobPolicy policy = new SharedAccessBlobPolicy() { Permissions = SharedAccessBlobPermissions.Read, SharedAccessStartTime = DateTimeOffset.UtcNow.AddMinutes(-5), SharedAccessExpiryTime = DateTimeOffset.UtcNow.AddMinutes(30), }; CloudBlockBlob blockBlob = container.GetBlockBlobReference("bb"); blockBlob.PutBlockList(new string[] { }); foreach (SharedAccessProtocol?protocol in new SharedAccessProtocol?[] { null, SharedAccessProtocol.HttpsOrHttp, SharedAccessProtocol.HttpsOnly }) { string blockBlobToken = blockBlob.GetSharedAccessSignature(policy, null, null, protocol, null); StorageCredentials blockBlobSAS = new StorageCredentials(blockBlobToken); Uri blockBlobSASUri = new Uri(blockBlob.Uri + blockBlobSAS.SASToken); StorageUri blockBlobSASStorageUri = new StorageUri(new Uri(blockBlob.StorageUri.PrimaryUri + blockBlobSAS.SASToken), new Uri(blockBlob.StorageUri.SecondaryUri + blockBlobSAS.SASToken)); int httpPort = blockBlobSASUri.Port; int securePort = 443; if (!string.IsNullOrEmpty(TestBase.TargetTenantConfig.BlobSecurePortOverride)) { securePort = Int32.Parse(TestBase.TargetTenantConfig.BlobSecurePortOverride); } var schemesAndPorts = new[] { new { scheme = Uri.UriSchemeHttp, port = httpPort }, new { scheme = Uri.UriSchemeHttps, port = securePort } }; foreach (var item in schemesAndPorts) { blockBlobSASUri = TransformSchemeAndPort(blockBlobSASUri, item.scheme, item.port); blockBlobSASStorageUri = new StorageUri(TransformSchemeAndPort(blockBlobSASStorageUri.PrimaryUri, item.scheme, item.port), TransformSchemeAndPort(blockBlobSASStorageUri.SecondaryUri, item.scheme, item.port)); if (protocol.HasValue && protocol.Value == SharedAccessProtocol.HttpsOnly && string.CompareOrdinal(item.scheme, Uri.UriSchemeHttp) == 0) { blob = new CloudBlob(blockBlobSASUri); TestHelper.ExpectedException(() => blob.FetchAttributes(), "Access a blob using SAS with a shared protocols that does not match", HttpStatusCode.Unused); blob = new CloudBlob(blockBlobSASStorageUri, null, null); TestHelper.ExpectedException(() => blob.FetchAttributes(), "Access a blob using SAS with a shared protocols that does not match", HttpStatusCode.Unused); } else { blob = new CloudBlob(blockBlobSASUri); blob.FetchAttributes(); Assert.AreEqual(blob.BlobType, BlobType.BlockBlob); blob = new CloudBlob(blockBlobSASStorageUri, null, null); blob.FetchAttributes(); Assert.AreEqual(blob.BlobType, BlobType.BlockBlob); } } } } finally { container.DeleteIfExists(); } }
public void CloudBlobSASApiVersionQueryParam() { CloudBlobContainer container = GetRandomContainerReference(); try { container.Create(); ICloudBlob blob; SharedAccessBlobPolicy policy = new SharedAccessBlobPolicy() { Permissions = SharedAccessBlobPermissions.Read, SharedAccessStartTime = DateTimeOffset.UtcNow.AddMinutes(-5), SharedAccessExpiryTime = DateTimeOffset.UtcNow.AddMinutes(30), }; CloudBlockBlob blockBlob = container.GetBlockBlobReference("bb"); blockBlob.PutBlockList(new string[] { }); CloudPageBlob pageBlob = container.GetPageBlobReference("pb"); pageBlob.Create(0); string blockBlobToken = blockBlob.GetSharedAccessSignature(policy); StorageCredentials blockBlobSAS = new StorageCredentials(blockBlobToken); Uri blockBlobSASUri = blockBlobSAS.TransformUri(blockBlob.Uri); StorageUri blockBlobSASStorageUri = blockBlobSAS.TransformUri(blockBlob.StorageUri); string pageBlobToken = pageBlob.GetSharedAccessSignature(policy); StorageCredentials pageBlobSAS = new StorageCredentials(pageBlobToken); Uri pageBlobSASUri = pageBlobSAS.TransformUri(pageBlob.Uri); StorageUri pageBlobSASStorageUri = pageBlobSAS.TransformUri(pageBlob.StorageUri); OperationContext apiVersionCheckContext = new OperationContext(); apiVersionCheckContext.SendingRequest += (sender, e) => { Assert.IsNull(e.Request.Headers.Get("x-ms-version")); Assert.IsTrue(e.Request.RequestUri.Query.Contains("api-version")); }; blob = container.ServiceClient.GetBlobReferenceFromServer(blockBlobSASUri, operationContext: apiVersionCheckContext); Assert.IsInstanceOfType(blob, typeof(CloudBlockBlob)); Assert.IsTrue(blob.StorageUri.PrimaryUri.Equals(blockBlob.Uri)); Assert.IsNull(blob.StorageUri.SecondaryUri); blob = container.ServiceClient.GetBlobReferenceFromServer(pageBlobSASUri, operationContext: apiVersionCheckContext); Assert.IsInstanceOfType(blob, typeof(CloudPageBlob)); Assert.IsTrue(blob.StorageUri.PrimaryUri.Equals(pageBlob.Uri)); Assert.IsNull(blob.StorageUri.SecondaryUri); blob = container.ServiceClient.GetBlobReferenceFromServer(blockBlobSASStorageUri, operationContext: apiVersionCheckContext); Assert.IsInstanceOfType(blob, typeof(CloudBlockBlob)); Assert.IsTrue(blob.StorageUri.Equals(blockBlob.StorageUri)); blob = container.ServiceClient.GetBlobReferenceFromServer(pageBlobSASStorageUri, operationContext: apiVersionCheckContext); Assert.IsInstanceOfType(blob, typeof(CloudPageBlob)); Assert.IsTrue(blob.StorageUri.Equals(pageBlob.StorageUri)); } finally { container.DeleteIfExists(); } }
public void BlobIngressEgressCounters() { CloudBlobContainer container = GetRandomContainerReference(); container.CreateIfNotExists(); CloudBlockBlob blob = container.GetBlockBlobReference("blob1"); string[] blockIds = new string[] { Convert.ToBase64String(Guid.NewGuid().ToByteArray()), Convert.ToBase64String(Guid.NewGuid().ToByteArray()), Convert.ToBase64String(Guid.NewGuid().ToByteArray()) }; try { // 1 byte TestHelper.ValidateIngressEgress(Selectors.IfUrlContains(blob.Uri.ToString()), () => { OperationContext opContext = new OperationContext(); blob.PutBlock(blockIds[0], new MemoryStream(GetRandomBuffer(1)), null, null, new BlobRequestOptions() { RetryPolicy = new RetryPolicies.NoRetry() }, opContext); return(opContext.LastResult); }); // 1024 TestHelper.ValidateIngressEgress(Selectors.IfUrlContains(blob.Uri.ToString()), () => { OperationContext opContext = new OperationContext(); blob.PutBlock(blockIds[1], new MemoryStream(GetRandomBuffer(1024)), null, null, new BlobRequestOptions() { RetryPolicy = new RetryPolicies.NoRetry() }, opContext); return(opContext.LastResult); }); // 98765 TestHelper.ValidateIngressEgress(Selectors.IfUrlContains(blob.Uri.ToString()), () => { OperationContext opContext = new OperationContext(); blob.PutBlock(blockIds[2], new MemoryStream(GetRandomBuffer(98765)), null, null, new BlobRequestOptions() { RetryPolicy = new RetryPolicies.NoRetry() }, opContext); return(opContext.LastResult); }); // PutBlockList TestHelper.ValidateIngressEgress(Selectors.IfUrlContains(blob.Uri.ToString()), () => { OperationContext opContext = new OperationContext(); blob.PutBlockList(blockIds, null, new BlobRequestOptions() { RetryPolicy = new RetryPolicies.NoRetry() }, opContext); return(opContext.LastResult); }); // GetBlockList TestHelper.ValidateIngressEgress(Selectors.IfUrlContains(blob.Uri.ToString()), () => { OperationContext opContext = new OperationContext(); blob.DownloadBlockList(BlockListingFilter.All, null, new BlobRequestOptions() { RetryPolicy = new RetryPolicies.NoRetry() }, opContext); return(opContext.LastResult); }); // Download TestHelper.ValidateIngressEgress(Selectors.IfUrlContains(blob.Uri.ToString()), () => { OperationContext opContext = new OperationContext(); blob.DownloadToStream(Stream.Null, null, new BlobRequestOptions() { RetryPolicy = new RetryPolicies.NoRetry() }, opContext); return(opContext.LastResult); }); Assert.AreEqual(blob.Properties.Length, 98765 + 1024 + 1); // Error Case CloudBlockBlob nullBlob = container.GetBlockBlobReference("null"); OperationContext errorContext = new OperationContext(); try { nullBlob.DownloadToStream(Stream.Null, null, new BlobRequestOptions() { RetryPolicy = new RetryPolicies.NoRetry() }, errorContext); Assert.Fail("Null blob, null stream, no download possible."); } catch (StorageException) { Assert.IsTrue(errorContext.LastResult.IngressBytes > 0); } } finally { container.DeleteIfExists(); } }
/// <summary> /// Adds the diagnostic message to block blob. /// </summary> /// <param name="blob">The cloud blob.</param> /// <param name="message">The message.</param> protected virtual void AddMessageToBlock(CloudBlockBlob blob, string message) { Sitecore.Diagnostics.Assert.ArgumentNotNull(blob, "blob"); Sitecore.Diagnostics.Assert.ArgumentNotNull(message, "message"); var blockIds = new List<string>(); if (blob.Exists()) { blockIds.AddRange(blob.DownloadBlockList().Select(b => b.Name)); } string blockId = Guid.NewGuid().ToString().Replace("-", string.Empty); blockIds.Add(blockId); using (var blockData = new MemoryStream(LogStorageManager.DefaultTextEncoding.GetBytes(message), false)) { blob.PutBlock(blockId, blockData, null); blob.PutBlockList(blockIds); } }
private static void CloudBlockBlobCopy(bool sourceIsSas, bool destinationIsSas) { CloudBlobContainer container = GetRandomContainerReference(); try { container.Create(); // Create Source on server CloudBlockBlob source = container.GetBlockBlobReference("source"); string data = "String data"; UploadText(source, data, Encoding.UTF8); source.Metadata["Test"] = "value"; source.SetMetadata(); // Create Destination on server CloudBlockBlob destination = container.GetBlockBlobReference("destination"); destination.PutBlockList(new string[] { }); CloudBlockBlob copySource = source; CloudBlockBlob copyDestination = destination; if (sourceIsSas) { // Source SAS must have read permissions SharedAccessBlobPermissions permissions = SharedAccessBlobPermissions.Read; SharedAccessBlobPolicy policy = new SharedAccessBlobPolicy() { SharedAccessStartTime = DateTimeOffset.UtcNow.AddMinutes(-5), SharedAccessExpiryTime = DateTimeOffset.UtcNow.AddMinutes(30), Permissions = permissions, }; string sasToken = source.GetSharedAccessSignature(policy); // Get source StorageCredentials credentials = new StorageCredentials(sasToken); copySource = new CloudBlockBlob(credentials.TransformUri(source.Uri)); } if (destinationIsSas) { if (!sourceIsSas) { // Source container must be public if source is not SAS BlobContainerPermissions containerPermissions = new BlobContainerPermissions { PublicAccess = BlobContainerPublicAccessType.Blob }; container.SetPermissions(containerPermissions); } // Destination SAS must have write permissions SharedAccessBlobPermissions permissions = SharedAccessBlobPermissions.Write; SharedAccessBlobPolicy policy = new SharedAccessBlobPolicy() { SharedAccessStartTime = DateTimeOffset.UtcNow.AddMinutes(-5), SharedAccessExpiryTime = DateTimeOffset.UtcNow.AddMinutes(30), Permissions = permissions, }; string sasToken = destination.GetSharedAccessSignature(policy); // Get destination StorageCredentials credentials = new StorageCredentials(sasToken); copyDestination = new CloudBlockBlob(credentials.TransformUri(destination.Uri)); } // Start copy and wait for completion string copyId = copyDestination.StartCopyFromBlob(TestHelper.Defiddler(copySource)); WaitForCopy(destination); // Check original blob references for equality Assert.AreEqual(CopyStatus.Success, destination.CopyState.Status); Assert.AreEqual(source.Uri.AbsolutePath, destination.CopyState.Source.AbsolutePath); Assert.AreEqual(data.Length, destination.CopyState.TotalBytes); Assert.AreEqual(data.Length, destination.CopyState.BytesCopied); Assert.AreEqual(copyId, destination.CopyState.CopyId); Assert.IsTrue(destination.CopyState.CompletionTime > DateTimeOffset.UtcNow.Subtract(TimeSpan.FromMinutes(1))); if (!destinationIsSas) { // Abort Copy is not supported for SAS destination TestHelper.ExpectedException( () => copyDestination.AbortCopy(copyId), "Aborting a copy operation after completion should fail", HttpStatusCode.Conflict, "NoPendingCopyOperation"); } source.FetchAttributes(); Assert.IsNotNull(destination.Properties.ETag); Assert.AreNotEqual(source.Properties.ETag, destination.Properties.ETag); Assert.IsTrue(destination.Properties.LastModified > DateTimeOffset.UtcNow.Subtract(TimeSpan.FromMinutes(1))); string copyData = DownloadText(destination, Encoding.UTF8); Assert.AreEqual(data, copyData, "Data inside copy of blob not equal."); destination.FetchAttributes(); BlobProperties prop1 = destination.Properties; BlobProperties prop2 = source.Properties; Assert.AreEqual(prop1.CacheControl, prop2.CacheControl); Assert.AreEqual(prop1.ContentEncoding, prop2.ContentEncoding); Assert.AreEqual(prop1.ContentLanguage, prop2.ContentLanguage); Assert.AreEqual(prop1.ContentMD5, prop2.ContentMD5); Assert.AreEqual(prop1.ContentType, prop2.ContentType); Assert.AreEqual("value", destination.Metadata["Test"], false, "Copied metadata not same"); destination.Delete(); source.Delete(); } finally { container.DeleteIfExists(); } }
/// <summary> /// Upload in parallel. /// If total size of file is smaller than chunkSize, then simply split length by parallel factor. /// FIXME: Need to retest this!!! /// </summary> /// <param name="stream"></param> /// <param name="blob"></param> /// <param name="parallelFactor"></param> /// <param name="chunkSizeInMB"></param> private void ParallelWriteBlockBlob(Stream stream, CloudBlockBlob blob, int parallelFactor, int chunkSizeInMB) { long chunkSize = chunkSizeInMB * 1024 * 1024; var length = stream.Length; if (chunkSize > length) { chunkSize = length / parallelFactor; } var numberOfBlocks = (length / chunkSize) + 1; var blockIdList = new string[numberOfBlocks]; var chunkSizeList = new int[numberOfBlocks]; var taskList = new List<Task>(); var count = numberOfBlocks - 1; // read the data... spawn a task to launch... then wait for all. while (count >= 0) { while (count >= 0 && taskList.Count < parallelFactor) { var index = (numberOfBlocks - count - 1); var chunkSizeToUpload = (int)Math.Min(chunkSize, length - (index * chunkSize)); // only upload if we have data to give. // edge case where we already have uploaded all the data. if (chunkSizeToUpload > 0) { chunkSizeList[index] = chunkSizeToUpload; var dataBuffer = new byte[chunkSizeToUpload]; stream.Seek(index * chunkSize, SeekOrigin.Begin); stream.Read(dataBuffer, 0, chunkSizeToUpload); var t = Task.Factory.StartNew(() => { var tempCount = index; var uploadSize = chunkSizeList[tempCount]; var newBuffer = new byte[uploadSize]; Array.Copy(dataBuffer, newBuffer, dataBuffer.Length); var blockId = Convert.ToBase64String(Guid.NewGuid().ToByteArray()); using (var memStream = new MemoryStream(newBuffer, 0, uploadSize)) { blob.PutBlock(blockId, memStream, null); } blockIdList[tempCount] = blockId; }); taskList.Add(t); } count--; } var waitedIndex = Task.WaitAny(taskList.ToArray()); if (waitedIndex >= 0) { taskList.RemoveAt(waitedIndex); } } Task.WaitAll(taskList.ToArray()); blob.PutBlockList(blockIdList.Where(t => t != null)); }